Compare commits
57 Commits
doc
...
6cba4d24bb
Author | SHA1 | Date | |
---|---|---|---|
6cba4d24bb
|
|||
52a1b4c200
|
|||
559ad5e528
|
|||
fd0bd1ee5f
|
|||
0e92998f16
|
|||
9eef91ebba
|
|||
3416c327b9
|
|||
9bdaa0d32e
|
|||
206bcd6319
|
|||
3774ab8ef0
|
|||
303828392e
|
|||
5d8cbe34ef
|
|||
85c0d4a384
|
|||
ae8817ad2a
|
|||
69f215e68f
|
|||
222b475223
|
|||
ede515e2ca
|
|||
974fdb7a91
|
|||
a294229ff0
|
|||
9600dd7e4f
|
|||
729276a2b1
|
|||
7ba7070693
|
|||
59a12d6218
|
|||
fc298de548
|
|||
8b639fc0b3
|
|||
5545f618f9
|
|||
43c0938d9a
|
|||
17215b401a
|
|||
4aced1c717
|
|||
31ce34cddb
|
|||
d64f7f4f27
|
|||
d15235fc4c
|
|||
49bb4f41b8
|
|||
a1398045ac
|
|||
1f93602102
|
|||
c818463a2e
|
|||
cd28563985
|
|||
8ef2d9c64e
|
|||
1510956989
|
|||
ac4f0fdd19
|
|||
37da03c719
|
|||
60bc4375cf
|
|||
725fe22b80
|
|||
ca18b63f27
|
|||
23f2a351a6
|
|||
c7d2b89d82
|
|||
72c34b57a6
|
|||
619873c4a9
|
|||
591f6e2af4
|
|||
ad00ebee9b
|
|||
adf8a0cf24
|
|||
42eb26a948
|
|||
f048a60540
|
|||
0463038aaa
|
|||
7eca8a270d
|
|||
84d7c977f9
|
|||
317eadce07
|
80
.gitea/workflows/build-dev.yaml
Normal file
80
.gitea/workflows/build-dev.yaml
Normal file
@@ -0,0 +1,80 @@
|
||||
name: CI
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- 'dev'
|
||||
jobs:
|
||||
build:
|
||||
runs-on: woryzen
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup Gradle
|
||||
uses: gradle/actions/setup-gradle@v3
|
||||
- name: Execute Gradle build
|
||||
run: ./gradlew build
|
||||
- name: Prepare Docker image build
|
||||
run: ./gradlew prepareDockerBuild
|
||||
- name: Get project version
|
||||
id: retrieve-version
|
||||
run: ./gradlew -q version >> "$GITHUB_OUTPUT"
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: docker-container
|
||||
- name: Login to Gitea container registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: gitea.woggioni.net
|
||||
username: woggioni
|
||||
password: ${{ secrets.PUBLISHER_TOKEN }}
|
||||
-
|
||||
name: Build rbcs Docker image
|
||||
uses: docker/build-push-action@v5.3.0
|
||||
with:
|
||||
context: "docker/build/docker"
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
pull: true
|
||||
tags: |
|
||||
gitea.woggioni.net/woggioni/rbcs:vanilla-dev
|
||||
target: release-vanilla
|
||||
cache-from: type=registry,ref=gitea.woggioni.net/woggioni/rbcs:buildx
|
||||
-
|
||||
name: Build rbcs memcache Docker image
|
||||
uses: docker/build-push-action@v5.3.0
|
||||
with:
|
||||
context: "docker/build/docker"
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
pull: true
|
||||
tags: |
|
||||
gitea.woggioni.net/woggioni/rbcs:memcache-dev
|
||||
target: release-memcache
|
||||
cache-from: type=registry,ref=gitea.woggioni.net/woggioni/rbcs:buildx
|
||||
cache-to: type=registry,mode=max,compression=zstd,image-manifest=true,oci-mediatypes=true,ref=gitea.woggioni.net/woggioni/rbcs:buildx
|
||||
-
|
||||
name: Build rbcs native Docker image
|
||||
uses: docker/build-push-action@v5.3.0
|
||||
with:
|
||||
context: "docker/build/docker"
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
pull: true
|
||||
tags: |
|
||||
gitea.woggioni.net/woggioni/rbcs:native-dev
|
||||
target: release-native
|
||||
-
|
||||
name: Build rbcs jlink Docker image
|
||||
uses: docker/build-push-action@v5.3.0
|
||||
with:
|
||||
context: "docker/build/docker"
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
pull: true
|
||||
tags: |
|
||||
gitea.woggioni.net/woggioni/rbcs:jlink-dev
|
||||
target: release-jlink
|
||||
|
@@ -5,7 +5,7 @@ on:
|
||||
- '*'
|
||||
jobs:
|
||||
build:
|
||||
runs-on: hostinger
|
||||
runs-on: woryzen
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
@@ -39,9 +39,9 @@ jobs:
|
||||
push: true
|
||||
pull: true
|
||||
tags: |
|
||||
gitea.woggioni.net/woggioni/rbcs:latest
|
||||
gitea.woggioni.net/woggioni/rbcs:${{ steps.retrieve-version.outputs.VERSION }}
|
||||
target: release
|
||||
gitea.woggioni.net/woggioni/rbcs:vanilla
|
||||
gitea.woggioni.net/woggioni/rbcs:vanilla-${{ steps.retrieve-version.outputs.VERSION }}
|
||||
target: release-vanilla
|
||||
cache-from: type=registry,ref=gitea.woggioni.net/woggioni/rbcs:buildx
|
||||
-
|
||||
name: Build rbcs memcache Docker image
|
||||
@@ -52,11 +52,37 @@ jobs:
|
||||
push: true
|
||||
pull: true
|
||||
tags: |
|
||||
gitea.woggioni.net/woggioni/rbcs:latest
|
||||
gitea.woggioni.net/woggioni/rbcs:${{ steps.retrieve-version.outputs.VERSION }}
|
||||
gitea.woggioni.net/woggioni/rbcs:memcache
|
||||
gitea.woggioni.net/woggioni/rbcs:memcache-${{ steps.retrieve-version.outputs.VERSION }}
|
||||
target: release-memcache
|
||||
cache-from: type=registry,ref=gitea.woggioni.net/woggioni/rbcs:buildx
|
||||
cache-to: type=registry,mode=max,compression=zstd,image-manifest=true,oci-mediatypes=true,ref=gitea.woggioni.net/woggioni/rbcs:buildx
|
||||
-
|
||||
name: Build rbcs native Docker image
|
||||
uses: docker/build-push-action@v5.3.0
|
||||
with:
|
||||
context: "docker/build/docker"
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
pull: true
|
||||
tags: |
|
||||
gitea.woggioni.net/woggioni/rbcs:native
|
||||
gitea.woggioni.net/woggioni/rbcs:native-${{ steps.retrieve-version.outputs.VERSION }}
|
||||
target: release-native
|
||||
-
|
||||
name: Build rbcs jlink Docker image
|
||||
uses: docker/build-push-action@v5.3.0
|
||||
with:
|
||||
context: "docker/build/docker"
|
||||
platforms: linux/amd64
|
||||
push: true
|
||||
pull: true
|
||||
tags: |
|
||||
gitea.woggioni.net/woggioni/rbcs:jlink
|
||||
gitea.woggioni.net/woggioni/rbcs:jlink-${{ steps.retrieve-version.outputs.VERSION }}-jlink
|
||||
target: release-jlink
|
||||
- name: Publish artifacts
|
||||
env:
|
||||
PUBLISHER_TOKEN: ${{ secrets.PUBLISHER_TOKEN }}
|
||||
|
20
LICENSE
Normal file
20
LICENSE
Normal file
@@ -0,0 +1,20 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2017 Y. T. CHUNG <zonyitoo@gmail.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
433
README.md
433
README.md
@@ -0,0 +1,433 @@
|
||||
# Remote Build Cache Server
|
||||
|
||||

|
||||

|
||||

|
||||

|
||||
|
||||
<!--
|
||||

|
||||
-->
|
||||
|
||||
Speed up your builds by sharing and reusing unchanged build outputs across your team.
|
||||
|
||||
Remote Build Cache Server (RBCS) allows teams to share and reuse unchanged build and test outputs,
|
||||
significantly reducing build times for both local and CI environments. By eliminating redundant work,
|
||||
RBCS helps teams become more productive and efficient.
|
||||
|
||||
**Key Features:**
|
||||
- Support for both Gradle and Maven build environments
|
||||
- Pluggable storage backends (in-memory, disk-backed, memcached)
|
||||
- Flexible authentication (HTTP basic or TLS certificate)
|
||||
- Role-based access control
|
||||
- Request throttling
|
||||
|
||||
## Table of Contents
|
||||
- [Quickstart](#quickstart)
|
||||
- [Integration with build tools](#integration-with-build-tools)
|
||||
- [Use RBCS with Gradle](#use-rbcs-with-gradle)
|
||||
- [Use RBCS with Maven](#use-rbcs-with-maven)
|
||||
- [Server configuration](#server-configuration)
|
||||
- [Authentication](#authentication)
|
||||
- [HTTP Basic authentication](#configure-http-basic-authentication)
|
||||
- [TLS client certificate authentication](#configure-tls-certificate-authentication)
|
||||
- [Authentication & Access Control](#access-control)
|
||||
- [Plugins](#plugins)
|
||||
- [Client Tools](#rbcs-client)
|
||||
- [Logging](#logging)
|
||||
- [Performance](#performance)
|
||||
- [FAQ](#faq)
|
||||
|
||||
|
||||
|
||||
Remote Build Cache Server (shortened to RBCS) allows you to share and reuse unchanged build
|
||||
and test outputs across the team. This speeds up local and CI builds since cycles are not wasted
|
||||
re-building components that are unaffected by new code changes. RBCS supports both Gradle and
|
||||
Maven build tool environments.
|
||||
|
||||
It comes with pluggable storage backends, the core application offers in-memory storage or disk-backed storage,
|
||||
in addition to this there is an official plugin to use memcached as the storage backend.
|
||||
|
||||
It supports HTTP basic authentication or, alternatively, TLS certificate authentication, role-based access control (RBAC),
|
||||
and throttling.
|
||||
|
||||
## Quickstart
|
||||
|
||||
### Use the all-in-one jar file
|
||||
You can download the latest version from [this link](https://gitea.woggioni.net/woggioni/-/packages/maven/net.woggioni:rbcs-cli/)
|
||||
|
||||
|
||||
Assuming you have Java 21 or later installed, you can launch the server directly with
|
||||
|
||||
```bash
|
||||
java -jar rbcs-cli.jar server
|
||||
```
|
||||
|
||||
By default it will start an HTTP server bound to localhost and listening on port 8080 with no authentication,
|
||||
writing data to the disk, that you can use for testing
|
||||
|
||||
### Use the Docker image
|
||||
You can pull the latest Docker image with
|
||||
```bash
|
||||
docker pull gitea.woggioni.net/woggioni/rbcs:latest
|
||||
```
|
||||
|
||||
By default it will start an HTTP server bound to localhost and listening on port 8080 with no authentication,
|
||||
writing data to the disk, that you can use for testing
|
||||
|
||||
### Use the native executable
|
||||
If you are on a Linux X86_64 machine you can download the native executable
|
||||
from [here](https://gitea.woggioni.net/woggioni/-/packages/maven/net.woggioni:rbcs-cli/).
|
||||
It behaves the same as the jar file but it doesn't require a JVM and it has faster startup times.
|
||||
because of GraalVM's [closed-world assumption](https://www.graalvm.org/latest/reference-manual/native-image/basics/#static-analysis),
|
||||
the native executable does not supports plugins, so it comes with all plugins embedded into it.
|
||||
|
||||
> [!WARNING]
|
||||
> The native executable is built with `-march=skylake`, so it may fail with SIGILL on x86 CPUs that do not support
|
||||
> the full skylake instruction set (as a rule of thumb, older than 2015)
|
||||
|
||||
## Integration with build tools
|
||||
|
||||
### Use RBCS with Gradle
|
||||
|
||||
Add this to the `settings.gradle` file of your project
|
||||
|
||||
```groovy
|
||||
buildCache {
|
||||
remote(HttpBuildCache) {
|
||||
url = 'https://rbcs.example.com/'
|
||||
push = true
|
||||
allowInsecureProtocol = false
|
||||
// The credentials block is only required if you enable
|
||||
// HTTP basic authentication on RBCS
|
||||
credentials {
|
||||
username = 'build-cache-user'
|
||||
password = 'some-complicated-password'
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
alternatively you can add this to `${GRADLE_HOME}/init.gradle` to configure the remote cache
|
||||
at the system level
|
||||
|
||||
```groovy
|
||||
gradle.settingsEvaluated { settings ->
|
||||
settings.buildCache {
|
||||
remote(HttpBuildCache) {
|
||||
url = 'https://rbcs.example.com/'
|
||||
push = true
|
||||
allowInsecureProtocol = false
|
||||
// The credentials block is only required if you enable
|
||||
// HTTP basic authentication on RBCS
|
||||
credentials {
|
||||
username = 'build-cache-user'
|
||||
password = 'some-complicated-password'
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
add `org.gradle.caching=true` to your `<project>/gradle.properties` or run gradle with `--build-cache`.
|
||||
|
||||
Read [Gradle documentation](https://docs.gradle.org/current/userguide/build_cache.html) for more detailed information.
|
||||
|
||||
### Use RBCS with Maven
|
||||
|
||||
1. Create an `extensions.xml` in `<project>/.mvn/extensions.xml` with the following content
|
||||
```xml
|
||||
<extensions xmlns="http://maven.apache.org/EXTENSIONS/1.1.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/EXTENSIONS/1.1.0 https://maven.apache.org/xsd/core-extensions-1.0.0.xsd">
|
||||
<extension>
|
||||
<groupId>org.apache.maven.extensions</groupId>
|
||||
<artifactId>maven-build-cache-extension</artifactId>
|
||||
<version>1.2.0</version>
|
||||
</extension>
|
||||
</extensions>
|
||||
```
|
||||
2. Copy [maven-build-cache-config.xml](https://maven.apache.org/extensions/maven-build-cache-extension/maven-build-cache-config.xml) into `<project>/.mvn/` folder
|
||||
3. Edit the `cache/configuration/remote` element
|
||||
```xml
|
||||
<remote enabled="true" id="rbcs">
|
||||
<url>https://rbcs.example.com/</url>
|
||||
</remote>
|
||||
```
|
||||
4. Run maven with
|
||||
```bash
|
||||
mvn -Dmaven.build.cache.enabled=true -Dmaven.build.cache.debugOutput=true -Dmaven.build.cache.remote.save.enabled=true package
|
||||
```
|
||||
|
||||
Alternatively you can set those properties in your `<project>/pom.xml`
|
||||
|
||||
Read [here](https://maven.apache.org/extensions/maven-build-cache-extension/remote-cache.html)
|
||||
for more informations
|
||||
|
||||
|
||||
## Server configuration
|
||||
RBCS reads an XML configuration file, by default named `rbcs-server.xml`.
|
||||
The expected location of the `rbcs-server.xml` file depends on the operating system,
|
||||
if the configuration file is not found a default one will be created and its location is printed
|
||||
on the console
|
||||
|
||||
```bash
|
||||
user@76a90cbcd75d:~$ rbcs-cli server
|
||||
2025-01-01 00:00:00,000 [INFO ] (main) n.w.r.c.impl.commands.ServerCommand -- Creating default configuration file at '/home/user/.config/rbcs/rbcs-server.xml'
|
||||
```
|
||||
|
||||
Alternatively it can be changed setting the `RBCS_CONFIGURATION_DIR` environmental variable or `net.woggioni.rbcs.conf.dir`
|
||||
Java system property to the directory that contain the `rbcs-server.xml` file.
|
||||
It can also be directly specified from the command line with
|
||||
```bash
|
||||
java -jar rbcs-cli.jar server -c /path/to/rbcs-server.xml
|
||||
```
|
||||
|
||||
The server configuration file follows the XML format and uses XML schema for validation
|
||||
(you can find the schema for the `rbcs-server.xml` configuration file [here](https://gitea.woggioni.net/woggioni/rbcs/src/branch/master/rbcs-server/src/main/resources/net/woggioni/rbcs/server/schema/rbcs-server.xsd)).
|
||||
|
||||
The configuration values are enclosed inside XML attribute and support system property / environmental variable interpolation.
|
||||
As an example, you can configure RBCS to read the server port number from the `RBCS_SERVER_PORT` environmental variable
|
||||
and the bind address from the `rbc.bind.address` JVM system property with
|
||||
|
||||
```xml
|
||||
<bind host="${sys:rpc.bind.address}" port="${env:RBCS_SERVER_PORT}"/>
|
||||
```
|
||||
|
||||
Full documentation for all tags and attributes and configuration file examples
|
||||
are available [here](doc/server_configuration.md).
|
||||
|
||||
### Plugins
|
||||
If you want to use memcache as a storage backend you'll also need to download [the memcache plugin](https://gitea.woggioni.net/woggioni/-/packages/maven/net.woggioni:rbcs-server-memcache/)
|
||||
|
||||
Plugins need to be stored in a folder named `plugins` in the located server's working directory
|
||||
(the directory where the server process is started). They are shipped as TAR archives, so you need to extract
|
||||
the content of the archive into the `plugins` directory for the server to pick them up.
|
||||
|
||||
## Authentication
|
||||
|
||||
RBCS supports 2 authentication mechanisms:
|
||||
|
||||
- HTTP basic authentication
|
||||
- TLS certificate authentication
|
||||
|
||||
### Configure HTTP basic authentication
|
||||
|
||||
Add a `<basic>` element to the `<authentication>` element in your `rbcs-server.xml`
|
||||
```xml
|
||||
<authentication>
|
||||
<basic/>
|
||||
</authentication>
|
||||
```
|
||||
|
||||
### Configure TLS certificate authentication
|
||||
|
||||
Add a `<client-certificate>` element to the `<authentication>` element in your `rbcs-server.xml`
|
||||
```xml
|
||||
<authentication>
|
||||
<client-certificate>
|
||||
<user-extractor attribute-name="CN" pattern="(.*)"/>
|
||||
<group-extractor attribute-name="O" pattern="(.*)"/>
|
||||
</client-certificate>
|
||||
</authentication>
|
||||
```
|
||||
The `<user-extractor>` here determines how the username is extracted from the
|
||||
subject's X.500 name in the TLS certificate presented by the client, where `attribute-name`
|
||||
is the `RelativeDistinguishedName` (RDN) identifier and pattern is a regular expression
|
||||
that will be applied to extract the username from the first group present in the regex.
|
||||
An error will be thrown if the regular expression contains no groups, while additional
|
||||
groups are ignored.
|
||||
|
||||
Similarly, the `<group-extractor>` here determines how the group name is extracted from the
|
||||
subject's X.500 name in the TLS certificate presented by the client.
|
||||
Note that this allows to assign roles to incoming requests without necessarily assigning them
|
||||
a username.
|
||||
|
||||
|
||||
|
||||
## Access control
|
||||
|
||||
RBCS supports role-based access control (RBAC), three roles are available:
|
||||
- `Reader` can perform `GET` calls
|
||||
- `Writer` can perform `PUT` calls
|
||||
- `Healthcheck` can perform `TRACE` calls
|
||||
|
||||
Roles are assigned to groups so that a user will have a role only if that roles belongs
|
||||
to one of the groups he is a member of.
|
||||
|
||||
There is also a special `<anonymous>` user
|
||||
which matches any request who hasn't been authenticated and that can be assigned
|
||||
to any group like a normal user. This permits to have a build cache that is
|
||||
publicly readable but only writable by authenticated users (e.g. CI/CD pipeline).
|
||||
|
||||
### Defining users
|
||||
|
||||
Users can be defined in the `<authorization>` element
|
||||
```xml
|
||||
<authorization>
|
||||
<users>
|
||||
<user name="user1" password="kb/vNnkn2RvyPkTN6Q07uH0F7wI7u61MkManD3NHregRukBg4KHehfbqtLTb39fZjHA+SRH+EpEWDCf+Rihr5H5C1YN5qwmArV0p8O5ptC4="/>
|
||||
<user name="user2" password="2J7MAhdIzZ3SO+JGB+K6wPhb4P5LH1L4L7yJCl5QrxNfAWRr5jTUExJRbcgbH1UfnkCbIO1p+xTDq+FCj3LFBZeMZUNZ47npN+WR7AX3VTo="/>
|
||||
<anonymous/>
|
||||
</users>
|
||||
<groups>
|
||||
<group name="readers">
|
||||
<users>
|
||||
<anonymous/>
|
||||
</users>
|
||||
<roles>
|
||||
<reader/>
|
||||
</roles>
|
||||
</group>
|
||||
<group name="writers">
|
||||
<users>
|
||||
<user ref="user1"/>
|
||||
<user ref="user2"/>
|
||||
</users>
|
||||
<roles>
|
||||
<reader/>
|
||||
<writer/>
|
||||
<healthcheck/>
|
||||
</roles>
|
||||
</group>
|
||||
</groups>
|
||||
</authorization>
|
||||
```
|
||||
|
||||
The `password` attribute is only used for HTTP Basic authentication, so it can be omitted
|
||||
if you use TLS certificate authentication. It must contain a password hash that can be derived from
|
||||
the actual password using the following command
|
||||
|
||||
```bash
|
||||
java -jar rbcs-cli.jar password
|
||||
```
|
||||
|
||||
## Reliability
|
||||
|
||||
RBCS implements the [TRACE](https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods/TRACE) HTTP method and this functionality can be used
|
||||
as a health check (mind you need to have `Healthcheck` role in order to perform it and match the server's `prefix` in the URL).
|
||||
|
||||
## RBCS Client
|
||||
|
||||
RBCS ships with a command line client that can be used for testing, benchmarking or to manually
|
||||
upload/download files to the cache. It must be configured with the `rbcs-client.xml`,
|
||||
whose location follows the same logic of the `rbcs-server.xml`.
|
||||
The `rbcs-client.xml` must adhere to the [rbcs-client.xsd](rbcs-client/src/main/resources/net/woggioni/rbcs/client/schema/rbcs-client.xsd)
|
||||
XML schema
|
||||
|
||||
The documentation for the `rbcs-client.xml` configuration file is available [here](conf/client_configuration.md)
|
||||
|
||||
### GET command
|
||||
|
||||
```bash
|
||||
java -jar rbcs-cli.jar client -p $CLIENT_PROFILE_NAME get -k $CACHE_KEY -v $FILE_WHERE_THE_VALUE_WILL_BE_STORED
|
||||
```
|
||||
|
||||
### PUT command
|
||||
|
||||
```bash
|
||||
java -jar rbcs-cli.jar client -p $CLIENT_PROFILE_NAME put -k $CACHE_KEY -v $FILE_TO_BE_UPLOADED
|
||||
```
|
||||
|
||||
If you don't specify the key, a UUID key based on the file content will be used,
|
||||
if you add the `-i` command line parameter, the uploaded file will be served with
|
||||
`Content-Disposition: inline` HTTP header so that browser will attempt to render
|
||||
it in the page instead of triggering a file download (in this way you can create a temporary web page).
|
||||
|
||||
The client will try to detect the file mime type upon upload but if you want to be sure you can specify
|
||||
it manually with the `-t` parameter.
|
||||
|
||||
### Benchmark command
|
||||
|
||||
```bash
|
||||
java -jar rbcs-cli.jar client -p $CLIENT_PROFILE_NAME benchamrk -s 4096 -e 10000
|
||||
```
|
||||
This will insert 10000 randomly generates entries of 4096 bytes into RBCS, then retrieve them
|
||||
and check that the retrieved value matches what was inserted.
|
||||
It will also print throughput stats on the way.
|
||||
|
||||
## Logging
|
||||
|
||||
RBCS uses [logback](https://logback.qos.ch/) and ships with a [default logging configuration](./conf/logback.xml) that
|
||||
can be overridden with `-Dlogback.configurationFile=path/to/custom/configuration.xml`, refer to
|
||||
[Logback documentation](https://logback.qos.ch/manual/configuration.html) for more details about
|
||||
how to configure Logback
|
||||
|
||||
## Performance
|
||||
|
||||
You can check performance benchmarks [here](doc/benchmarks.md)
|
||||
|
||||
## FAQ
|
||||
### Why should I use a build cache?
|
||||
|
||||
#### Build Caches Improve Build & Test Performance
|
||||
|
||||
Building software consists of a number of steps, like compiling sources, executing tests, and linking binaries. We’ve seen that a binary artifact repository helps when such a step requires an external component by downloading the artifact from the repository rather than building it locally.
|
||||
However, there are many additional steps in this build process which can be optimized to reduce the build time. An obvious strategy is to avoid executing build steps which dominate the total build time when these build steps are not needed.
|
||||
Most build times are dominated by the testing step.
|
||||
|
||||
While binary repositories cannot capture the outcome of a test build step (only the test reports
|
||||
when included in binary artifacts), build caches are designed to eliminate redundant executions
|
||||
for every build step. Moreover, it generalizes the concept of avoiding work associated with any
|
||||
incremental step of the build, including test execution, compilation and resource processing.
|
||||
The mechanism itself is comparable to a pure function. That is, given some inputs such as source
|
||||
files and environment parameters we know that the output is always going to be the same.
|
||||
As a result, we can cache it and retrieve it based on a simple cryptographic hash of the inputs.
|
||||
Build caching is supported natively by some build tools.
|
||||
|
||||
#### Improve CI builds with a remote build cache
|
||||
|
||||
When analyzing the role of a build cache it is important to take into account the granularity
|
||||
of the changes that it caches. Imagine a full build for a project with 40 to 50 modules
|
||||
which fails at the last step (deployment) because the staging environment is temporarily unavailable.
|
||||
Although the vast majority of the build steps (potentially thousands) succeed,
|
||||
the change can not be deployed to the staging environment.
|
||||
Without a build cache one typically relies on a very complex CI configuration to reuse build step outputs
|
||||
or would have to repeat the full build once the environment is available.
|
||||
|
||||
Some build tools don’t support incremental builds properly. For example, outputs of a build started
|
||||
from scratch may vary when compared to subsequent builds that rely on the initial build’s output.
|
||||
As a result, to preserve build integrity, it’s crucial to rebuild from scratch, or ‘cleanly,’ in this
|
||||
scenario.
|
||||
|
||||
With a build cache, only the last step needs to be executed and the build can be re-triggered
|
||||
when the environment is back online. This automatically saves all of the time and
|
||||
resources required across the different build steps which were successfully executed.
|
||||
Instead of executing the intermediate steps, the build tool pulls the outputs from the build cache,
|
||||
avoiding a lot of redundant work
|
||||
|
||||
#### Share outputs with a remote build cache
|
||||
|
||||
One of the most important advantages of a remote build cache is the ability to share build outputs.
|
||||
In most CI configurations, for example, a number of pipelines are created.
|
||||
These may include one for building the sources, one for testing, one for publishing the outcomes
|
||||
to a remote repository, and other pipelines to test on different platforms.
|
||||
There are even situations where CI builds partially build a project (i.e. some modules and not others).
|
||||
|
||||
Most of those pipelines share a lot of intermediate build steps. All builds which perform testing
|
||||
require the binaries to be ready. All publishing builds require all previous steps to be executed.
|
||||
And because modern CI infrastructure means executing everything in containerized (isolated) environments,
|
||||
significant resources are wasted by repeatedly building the same intermediate artifacts.
|
||||
|
||||
A remote build cache greatly reduces this overhead by orders of magnitudes because it provides a way
|
||||
for all those pipelines to share their outputs. After all, there is no point recreating an output that
|
||||
is already available in the cache.
|
||||
|
||||
Because there are inherent dependencies between software components of a build,
|
||||
introducing a build cache dramatically reduces the impact of exploding a component into multiple pieces,
|
||||
allowing for increased modularity without increased overhead.
|
||||
|
||||
#### Make local developers more efficient with remote build caches
|
||||
|
||||
It is common for different teams within a company to work on different modules of a single large
|
||||
application. In this case, most teams don’t care about building the other parts of the software.
|
||||
By introducing a remote cache developers immediately benefit from pre-built artifacts when checking out code.
|
||||
Because it has already been built on CI, they don’t have to do it locally.
|
||||
|
||||
Introducing a remote cache is a huge benefit for those developers. Consider that a typical developer’s
|
||||
day begins by performing a code checkout. Most likely the checked out code has already been built on CI.
|
||||
Therefore, no time is wasted running the first build of the day. The remote cache provides all of the
|
||||
intermediate artifacts needed. And, in the event local changes are made, the remote cache still leverages
|
||||
partial cache hits for projects which are independent. As other developers in the organization request
|
||||
CI builds, the remote cache continues to populate, increasing the likelihood of these remote cache hits
|
||||
across team members.
|
||||
|
||||
|
94
benchmark/rbcs-filesystem.yml
Normal file
94
benchmark/rbcs-filesystem.yml
Normal file
@@ -0,0 +1,94 @@
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: rbcs-server
|
||||
data:
|
||||
rbcs-server.xml: |
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<rbcs:server xmlns:xs="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns:rbcs="urn:net.woggioni.rbcs.server"
|
||||
xmlns:rbcs-memcache="urn:net.woggioni.rbcs.server.memcache"
|
||||
xs:schemaLocation="urn:net.woggioni.rbcs.server.memcache jpms://net.woggioni.rbcs.server.memcache/net/woggioni/rbcs/server/memcache/schema/rbcs-memcache.xsd urn:net.woggioni.rbcs.server jpms://net.woggioni.rbcs.server/net/woggioni/rbcs/server/schema/rbcs-server.xsd"
|
||||
>
|
||||
<bind host="0.0.0.0" port="8080" incoming-connections-backlog-size="128"/>
|
||||
<connection
|
||||
max-request-size="0xd000000"
|
||||
idle-timeout="PT15S"
|
||||
read-idle-timeout="PT30S"
|
||||
write-idle-timeout="PT30S"/>
|
||||
<event-executor use-virtual-threads="true"/>
|
||||
<cache xs:type="rbcs:fileSystemCacheType" max-age="P7D" enable-compression="false" path="/home/luser/cache" digest="SHA-224"/>
|
||||
</rbcs:server>
|
||||
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: PersistentVolumeClaim
|
||||
metadata:
|
||||
name: rbcs-pvc
|
||||
namespace: default
|
||||
spec:
|
||||
accessModes:
|
||||
- ReadWriteOnce
|
||||
storageClassName: local-path
|
||||
resources:
|
||||
requests:
|
||||
storage: 16Gi
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: rbcs-deployment
|
||||
labels:
|
||||
app: rbcs
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: rbcs
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: rbcs
|
||||
spec:
|
||||
containers:
|
||||
- name: rbcs
|
||||
image: gitea.woggioni.net/woggioni/rbcs:memcache
|
||||
imagePullPolicy: Always
|
||||
command: ["java", "-Dlogback.configurationFile=logback.xml", "-XX:MaxRAMPercentage=75","-jar", "/home/luser/rbcs.jar"]
|
||||
args: ['server', '-c', 'rbcs-server.xml']
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
volumeMounts:
|
||||
- name: config-volume
|
||||
mountPath: /home/luser/rbcs-server.xml
|
||||
subPath: rbcs-server.xml
|
||||
- name: cache-volume
|
||||
mountPath: /home/luser/cache
|
||||
resources:
|
||||
requests:
|
||||
memory: "0.25Gi"
|
||||
cpu: "1"
|
||||
limits:
|
||||
memory: "0.5Gi"
|
||||
cpu: "1"
|
||||
volumes:
|
||||
- name: config-volume
|
||||
configMap:
|
||||
name: rbcs-server
|
||||
- name: cache-volume
|
||||
persistentVolumeClaim:
|
||||
claimName: rbcs-pvc
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: rbcs-service
|
||||
spec:
|
||||
type: LoadBalancer
|
||||
ports:
|
||||
- port: 8080
|
||||
targetPort: 8080
|
||||
protocol: TCP
|
||||
selector:
|
||||
app: rbcs
|
||||
|
77
benchmark/rbcs-in-memory.yml
Normal file
77
benchmark/rbcs-in-memory.yml
Normal file
@@ -0,0 +1,77 @@
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: rbcs-server
|
||||
data:
|
||||
rbcs-server.xml: |
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<rbcs:server xmlns:xs="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns:rbcs="urn:net.woggioni.rbcs.server"
|
||||
xmlns:rbcs-memcache="urn:net.woggioni.rbcs.server.memcache"
|
||||
xs:schemaLocation="urn:net.woggioni.rbcs.server.memcache jpms://net.woggioni.rbcs.server.memcache/net/woggioni/rbcs/server/memcache/schema/rbcs-memcache.xsd urn:net.woggioni.rbcs.server jpms://net.woggioni.rbcs.server/net/woggioni/rbcs/server/schema/rbcs-server.xsd"
|
||||
>
|
||||
<bind host="0.0.0.0" port="8080" incoming-connections-backlog-size="128"/>
|
||||
<connection
|
||||
max-request-size="0xd000000"
|
||||
idle-timeout="PT15S"
|
||||
read-idle-timeout="PT30S"
|
||||
write-idle-timeout="PT30S"/>
|
||||
<event-executor use-virtual-threads="true"/>
|
||||
<cache xs:type="rbcs:inMemoryCacheType" max-age="P7D" enable-compression="false" max-size="0x40000000" digest="SHA-224"/>
|
||||
</rbcs:server>
|
||||
|
||||
---
|
||||
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: rbcs-deployment
|
||||
labels:
|
||||
app: rbcs
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: rbcs
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: rbcs
|
||||
spec:
|
||||
containers:
|
||||
- name: rbcs
|
||||
image: gitea.woggioni.net/woggioni/rbcs:memcache
|
||||
imagePullPolicy: Always
|
||||
command: ["java", "-Dlogback.configurationFile=logback.xml", "-XX:MaxRAMPercentage=75","-jar", "/home/luser/rbcs.jar"]
|
||||
args: ['server', '-c', 'rbcs-server.xml']
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
volumeMounts:
|
||||
- name: config-volume
|
||||
mountPath: /home/luser/rbcs-server.xml
|
||||
subPath: rbcs-server.xml
|
||||
resources:
|
||||
requests:
|
||||
memory: "0.5Gi"
|
||||
cpu: "1"
|
||||
limits:
|
||||
memory: "4Gi"
|
||||
cpu: "1"
|
||||
volumes:
|
||||
- name: config-volume
|
||||
configMap:
|
||||
name: rbcs-server
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: rbcs-service
|
||||
spec:
|
||||
type: LoadBalancer
|
||||
ports:
|
||||
- port: 8080
|
||||
targetPort: 8080
|
||||
protocol: TCP
|
||||
selector:
|
||||
app: rbcs
|
||||
|
118
benchmark/rbcs-memcache.yml
Normal file
118
benchmark/rbcs-memcache.yml
Normal file
@@ -0,0 +1,118 @@
|
||||
apiVersion: v1
|
||||
kind: ConfigMap
|
||||
metadata:
|
||||
name: rbcs-server
|
||||
data:
|
||||
rbcs-server.xml: |
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<rbcs:server xmlns:xs="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns:rbcs="urn:net.woggioni.rbcs.server"
|
||||
xmlns:rbcs-memcache="urn:net.woggioni.rbcs.server.memcache"
|
||||
xs:schemaLocation="urn:net.woggioni.rbcs.server.memcache jpms://net.woggioni.rbcs.server.memcache/net/woggioni/rbcs/server/memcache/schema/rbcs-memcache.xsd urn:net.woggioni.rbcs.server jpms://net.woggioni.rbcs.server/net/woggioni/rbcs/server/schema/rbcs-server.xsd"
|
||||
>
|
||||
<bind host="0.0.0.0" port="8080" incoming-connections-backlog-size="128"/>
|
||||
<connection
|
||||
max-request-size="0xd000000"
|
||||
idle-timeout="PT15S"
|
||||
read-idle-timeout="PT30S"
|
||||
write-idle-timeout="PT30S"/>
|
||||
<event-executor use-virtual-threads="true"/>
|
||||
<!--cache xs:type="rbcs:inMemoryCacheType" max-age="P7D" enable-compression="false" max-size="0x10000000" /-->
|
||||
<cache xs:type="rbcs-memcache:memcacheCacheType" max-age="P7D" chunk-size="0x1000" digest="SHA-224">
|
||||
<server host="memcached-service" port="11211" max-connections="256"/>
|
||||
</cache>
|
||||
</rbcs:server>
|
||||
|
||||
---
|
||||
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: rbcs-deployment
|
||||
labels:
|
||||
app: rbcs
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: rbcs
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: rbcs
|
||||
spec:
|
||||
containers:
|
||||
- name: rbcs
|
||||
image: gitea.woggioni.net/woggioni/rbcs:memcache
|
||||
imagePullPolicy: Always
|
||||
command: ["java", "-Dlogback.configurationFile=logback.xml", "-XX:MaxRAMPercentage=75","-jar", "/home/luser/rbcs.jar"]
|
||||
args: ['server', '-c', 'rbcs-server.xml']
|
||||
ports:
|
||||
- containerPort: 8080
|
||||
volumeMounts:
|
||||
- name: config-volume
|
||||
mountPath: /home/luser/rbcs-server.xml
|
||||
subPath: rbcs-server.xml
|
||||
resources:
|
||||
requests:
|
||||
memory: "0.5Gi"
|
||||
cpu: "1"
|
||||
limits:
|
||||
memory: "0.5Gi"
|
||||
cpu: "3.5"
|
||||
volumes:
|
||||
- name: config-volume
|
||||
configMap:
|
||||
name: rbcs-server
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: rbcs-service
|
||||
spec:
|
||||
type: LoadBalancer
|
||||
ports:
|
||||
- port: 8080
|
||||
targetPort: 8080
|
||||
protocol: TCP
|
||||
selector:
|
||||
app: rbcs
|
||||
---
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: memcached-deployment
|
||||
spec:
|
||||
replicas: 1
|
||||
selector:
|
||||
matchLabels:
|
||||
app: memcached
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: memcached
|
||||
spec:
|
||||
containers:
|
||||
- name: memcached
|
||||
image: memcached
|
||||
args: ["-I", "128m", "-m", "4096", "-t", "1"]
|
||||
resources:
|
||||
requests:
|
||||
memory: "1Gi"
|
||||
cpu: "500m" # 0.5 CPU
|
||||
limits:
|
||||
memory: "5Gi"
|
||||
cpu: "500m" # 0.5 CP
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: memcached-service
|
||||
spec:
|
||||
type: ClusterIP # ClusterIP makes it accessible only within the cluster
|
||||
ports:
|
||||
- port: 11211 # Default memcached port
|
||||
targetPort: 11211
|
||||
protocol: TCP
|
||||
selector:
|
||||
app: memcached
|
@@ -14,9 +14,7 @@ allprojects { subproject ->
|
||||
if(project.currentTag.isPresent()) {
|
||||
version = project.currentTag.map { it[0] }.get()
|
||||
} else {
|
||||
version = project.gitRevision.map { gitRevision ->
|
||||
"${getProperty('rbcs.version')}.${gitRevision[0..10]}"
|
||||
}.get()
|
||||
version = "${getProperty('rbcs.version')}-SNAPSHOT"
|
||||
}
|
||||
|
||||
repositories {
|
||||
@@ -24,7 +22,6 @@ allprojects { subproject ->
|
||||
url = getProperty('gitea.maven.url')
|
||||
content {
|
||||
includeModule 'net.woggioni', 'jwo'
|
||||
includeModule 'net.woggioni', 'xmemcached'
|
||||
includeGroup 'com.lys'
|
||||
}
|
||||
}
|
||||
@@ -42,7 +39,6 @@ allprojects { subproject ->
|
||||
modularity.inferModulePath = true
|
||||
toolchain {
|
||||
languageVersion = JavaLanguageVersion.of(21)
|
||||
vendor = JvmVendorSpec.ORACLE
|
||||
}
|
||||
}
|
||||
|
||||
|
86
doc/benchmarks.md
Normal file
86
doc/benchmarks.md
Normal file
@@ -0,0 +1,86 @@
|
||||
# RBCS performance benchmarks
|
||||
|
||||
All test were executed under the following conditions:
|
||||
- CPU: Intel Celeron J3455 (4 physical cores)
|
||||
- memory: 8GB DDR3L 1600 MHz
|
||||
- disk: SATA3 120GB SSD
|
||||
- HTTP compression: disabled
|
||||
- cache compression: disabled
|
||||
- digest: none
|
||||
- authentication: disabled
|
||||
- TLS: disabled
|
||||
- network RTT: 14ms
|
||||
- network bandwidth: 112 MiB/s
|
||||
### In memory cache backend
|
||||
|
||||
|
||||
| Cache backend | CPU | CPU quota | Memory quota (GB) | Request size (b) | Client connections | PUT (req/s) | GET (req/s) |
|
||||
|----------------|---------------------|-----------|-------------------|------------------|--------------------|-------------|-------------|
|
||||
| in-memory | Intel Celeron J3455 | 1.00 | 4 | 128 | 10 | 7867 | 13762 |
|
||||
| in-memory | Intel Celeron J3455 | 1.00 | 4 | 128 | 100 | 7728 | 14180 |
|
||||
| in-memory | Intel Celeron J3455 | 1.00 | 4 | 512 | 10 | 7964 | 10992 |
|
||||
| in-memory | Intel Celeron J3455 | 1.00 | 4 | 512 | 100 | 8415 | 12478 |
|
||||
| in-memory | Intel Celeron J3455 | 1.00 | 4 | 4096 | 10 | 4268 | 5395 |
|
||||
| in-memory | Intel Celeron J3455 | 1.00 | 4 | 4096 | 100 | 5585 | 8259 |
|
||||
| in-memory | Intel Celeron J3455 | 1.00 | 4 | 65536 | 10 | 1063 | 1185 |
|
||||
| in-memory | Intel Celeron J3455 | 1.00 | 4 | 65536 | 100 | 1522 | 1366 |
|
||||
| in-memory | Intel Celeron J3455 | 3.50 | 4 | 128 | 10 | 11271 | 14092 |
|
||||
| in-memory | Intel Celeron J3455 | 3.50 | 4 | 128 | 100 | 16064 | 24201 |
|
||||
| in-memory | Intel Celeron J3455 | 3.50 | 4 | 512 | 10 | 11504 | 13077 |
|
||||
| in-memory | Intel Celeron J3455 | 3.50 | 4 | 512 | 100 | 17379 | 22094 |
|
||||
| in-memory | Intel Celeron J3455 | 3.50 | 4 | 4096 | 10 | 9151 | 9489 |
|
||||
| in-memory | Intel Celeron J3455 | 3.50 | 4 | 4096 | 100 | 13194 | 18268 |
|
||||
| in-memory | Intel Celeron J3455 | 3.50 | 4 | 65536 | 10 | 1590 | 1174 |
|
||||
| in-memory | Intel Celeron J3455 | 3.50 | 4 | 65536 | 100 | 1539 | 1561 |
|
||||
|
||||
### Filesystem cache backend
|
||||
|
||||
compression: disabled
|
||||
digest: none
|
||||
authentication: disabled
|
||||
TLS: disabled
|
||||
|
||||
| Cache backend | CPU | CPU quota | Memory quota (GB) | Request size (b) | Client connections | PUT (req/s) | GET (req/s) |
|
||||
|---------------|---------------------|-----------|-------------------|------------------|--------------------|-------------|-------------|
|
||||
| filesystem | Intel Celeron J3455 | 1.00 | 0.5 | 128 | 10 | 1478 | 5771 |
|
||||
| filesystem | Intel Celeron J3455 | 1.00 | 0.5 | 128 | 100 | 3166 | 8070 |
|
||||
| filesystem | Intel Celeron J3455 | 1.00 | 0.5 | 512 | 10 | 1717 | 5895 |
|
||||
| filesystem | Intel Celeron J3455 | 1.00 | 0.5 | 512 | 100 | 1125 | 6564 |
|
||||
| filesystem | Intel Celeron J3455 | 1.00 | 0.5 | 4096 | 10 | 819 | 2509 |
|
||||
| filesystem | Intel Celeron J3455 | 1.00 | 0.5 | 4096 | 100 | 1136 | 2365 |
|
||||
| filesystem | Intel Celeron J3455 | 1.00 | 0.5 | 65536 | 10 | 584 | 632 |
|
||||
| filesystem | Intel Celeron J3455 | 1.00 | 0.5 | 65536 | 100 | 529 | 635 |
|
||||
| filesystem | Intel Celeron J3455 | 3.50 | 0.5 | 128 | 10 | 1227 | 3342 |
|
||||
| filesystem | Intel Celeron J3455 | 3.50 | 0.5 | 128 | 100 | 1156 | 4035 |
|
||||
| filesystem | Intel Celeron J3455 | 3.50 | 0.5 | 512 | 10 | 979 | 3294 |
|
||||
| filesystem | Intel Celeron J3455 | 3.50 | 0.5 | 512 | 100 | 1217 | 3888 |
|
||||
| filesystem | Intel Celeron J3455 | 3.50 | 0.5 | 4096 | 10 | 535 | 1805 |
|
||||
| filesystem | Intel Celeron J3455 | 3.50 | 0.5 | 4096 | 100 | 555 | 1910 |
|
||||
| filesystem | Intel Celeron J3455 | 3.50 | 0.5 | 65536 | 10 | 301 | 494 |
|
||||
| filesystem | Intel Celeron J3455 | 3.50 | 0.5 | 65536 | 100 | 353 | 595 |
|
||||
|
||||
### Memcache cache backend
|
||||
|
||||
compression: disabled
|
||||
digest: MD5
|
||||
authentication: disabled
|
||||
TLS: disabled
|
||||
|
||||
| Cache backend | CPU | CPU quota | Memory quota (GB) | Request size (b) | Client connections | PUT (req/s) | GET (req/s) |
|
||||
|---------------|---------------------|-----------|-------------------|------------------|--------------------|-------------|-------------|
|
||||
| memcache | Intel Celeron J3455 | 1.00 | 0.25 | 128 | 10 | 3380 | 6083 |
|
||||
| memcache | Intel Celeron J3455 | 1.00 | 0.25 | 128 | 100 | 3323 | 4998 |
|
||||
| memcache | Intel Celeron J3455 | 1.00 | 0.25 | 512 | 10 | 3924 | 6086 |
|
||||
| memcache | Intel Celeron J3455 | 1.00 | 0.25 | 512 | 100 | 3440 | 5049 |
|
||||
| memcache | Intel Celeron J3455 | 1.00 | 0.25 | 4096 | 10 | 3347 | 5255 |
|
||||
| memcache | Intel Celeron J3455 | 1.00 | 0.25 | 4096 | 100 | 3685 | 4693 |
|
||||
| memcache | Intel Celeron J3455 | 1.00 | 0.25 | 65536 | 10 | 1304 | 1343 |
|
||||
| memcache | Intel Celeron J3455 | 1.00 | 0.25 | 65536 | 100 | 1481 | 1541 |
|
||||
| memcache | Intel Celeron J3455 | 3.50 | 0.25 | 128 | 10 | 4667 | 7984 |
|
||||
| memcache | Intel Celeron J3455 | 3.50 | 0.25 | 128 | 100 | 4044 | 8358 |
|
||||
| memcache | Intel Celeron J3455 | 3.50 | 0.25 | 512 | 10 | 4177 | 7828 |
|
||||
| memcache | Intel Celeron J3455 | 3.50 | 0.25 | 512 | 100 | 4079 | 8794 |
|
||||
| memcache | Intel Celeron J3455 | 3.50 | 0.25 | 4096 | 10 | 4588 | 6869 |
|
||||
| memcache | Intel Celeron J3455 | 3.50 | 0.25 | 4096 | 100 | 5343 | 7797 |
|
||||
| memcache | Intel Celeron J3455 | 3.50 | 0.25 | 65536 | 10 | 1624 | 1317 |
|
||||
| memcache | Intel Celeron J3455 | 3.50 | 0.25 | 65536 | 100 | 1633 | 1317 |
|
125
doc/client_configuration.md
Normal file
125
doc/client_configuration.md
Normal file
@@ -0,0 +1,125 @@
|
||||
# XML Schema Documentation: RBCS Client Configuration
|
||||
|
||||
This document provides detailed information about the XML schema for RBCS client configuration, which defines profiles for connecting to RBCS servers.
|
||||
|
||||
## Root Element
|
||||
|
||||
### `profiles`
|
||||
The root element that contains a collection of server profiles.
|
||||
- **Type**: `profilesType`
|
||||
- **Contains**: Zero or more `profile` elements
|
||||
|
||||
## Complex Types
|
||||
|
||||
### `profilesType`
|
||||
Defines the structure for the profiles collection.
|
||||
- **Elements**:
|
||||
- `profile`: Server connection profile (0 to unbounded)
|
||||
|
||||
### `profileType`
|
||||
Defines a server connection profile with authentication, connection settings, and retry policies.
|
||||
|
||||
- **Attributes**:
|
||||
- `name` (required): Name of the server profile, referenced with the '-p' parameter in rbcs-cli
|
||||
- `base-url` (required): RBCs server URL
|
||||
- `max-connections`: Maximum number of concurrent TCP connections (default: 50)
|
||||
- `connection-timeout`: Timeout for establishing connections
|
||||
- `enable-compression`: Whether to enable HTTP compression (default: true)
|
||||
|
||||
- **Elements** (in sequence):
|
||||
- **Authentication** (choice of one):
|
||||
- `no-auth`: Disable authentication
|
||||
- `basic-auth`: Enable HTTP basic authentication
|
||||
- `tls-client-auth`: Enable TLS certificate authentication
|
||||
- `connection` (optional): Connection timeout settings
|
||||
- `retry-policy` (optional): Retry policy for failed requests
|
||||
- `tls-trust-store` (optional): Custom truststore for server certificate validation
|
||||
|
||||
### `connectionType`
|
||||
Defines connection timeout settings.
|
||||
|
||||
- **Attributes**:
|
||||
- `idle-timeout`: Close connection after inactivity period (default: PT30S - 30 seconds)
|
||||
- `read-idle-timeout`: Close connection when no read occurs (default: PT60S - 60 seconds)
|
||||
- `write-idle-timeout`: Close connection when no write occurs (default: PT60S - 60 seconds)
|
||||
|
||||
### `noAuthType`
|
||||
Indicates no authentication should be used.
|
||||
- No attributes or elements
|
||||
|
||||
### `basicAuthType`
|
||||
Configures HTTP Basic Authentication.
|
||||
|
||||
- **Attributes**:
|
||||
- `user` (required): Username for authentication
|
||||
- `password` (required): Password for authentication
|
||||
|
||||
### `tlsClientAuthType`
|
||||
Configures TLS client certificate authentication.
|
||||
|
||||
- **Attributes**:
|
||||
- `key-store-file` (required): Path to the keystore file
|
||||
- `key-store-password` (required): Password to open the keystore
|
||||
- `key-alias` (required): Alias of the keystore entry with the private key
|
||||
- `key-password` (optional): Private key entry's encryption password
|
||||
|
||||
### `retryType`
|
||||
Defines retry policy using exponential backoff.
|
||||
|
||||
- **Attributes**:
|
||||
- `max-attempts` (required): Maximum number of retry attempts
|
||||
- `initial-delay`: Delay before first retry (default: PT1S - 1 second)
|
||||
- `exp`: Exponent for computing next delay (default: 2.0)
|
||||
|
||||
### `trustStoreType`
|
||||
Configures custom truststore for server certificate validation.
|
||||
|
||||
- **Attributes**:
|
||||
- `file` (required): Path to the truststore file
|
||||
- `password`: Truststore file password
|
||||
- `check-certificate-status`: Whether to check certificate validity using CRL/OCSP
|
||||
- `verify-server-certificate`: Whether to validate server certificates (default: true)
|
||||
|
||||
## Sample XML Document
|
||||
|
||||
```xml
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<profiles xmlns="urn:net.woggioni.rbcs.client">
|
||||
<!-- Profile with basic authentication -->
|
||||
<profile name="production-server"
|
||||
base-url="https://rbcs.example.com/api"
|
||||
max-connections="100"
|
||||
enable-compression="true">
|
||||
<basic-auth user="admin" password="secure_password123"/>
|
||||
<connection idle-timeout="PT45S"
|
||||
read-idle-timeout="PT90S"
|
||||
write-idle-timeout="PT90S"/>
|
||||
<retry-policy max-attempts="5"
|
||||
initial-delay="PT2S"
|
||||
exp="1.5"/>
|
||||
<tls-trust-store file="/path/to/truststore.jks"
|
||||
password="truststore_password"
|
||||
check-certificate-status="true"/>
|
||||
</profile>
|
||||
|
||||
<!-- Profile with TLS client authentication -->
|
||||
<profile name="secure-server"
|
||||
base-url="https://secure.example.com/api"
|
||||
max-connections="25">
|
||||
<tls-client-auth key-store-file="/path/to/keystore.p12"
|
||||
key-store-password="keystore_password"
|
||||
key-alias="client-cert"
|
||||
key-password="key_password"/>
|
||||
<retry-policy max-attempts="3"/>
|
||||
</profile>
|
||||
|
||||
<!-- Profile with no authentication -->
|
||||
<profile name="development"
|
||||
base-url="http://localhost:8080/api"
|
||||
enable-compression="false">
|
||||
<no-auth/>
|
||||
</profile>
|
||||
</profiles>
|
||||
```
|
||||
|
||||
This sample XML document demonstrates three different profiles with various authentication methods and configuration options as defined in the schema.
|
189
doc/server_configuration.md
Normal file
189
doc/server_configuration.md
Normal file
@@ -0,0 +1,189 @@
|
||||
### RBCS server configuration file elements and attributes
|
||||
|
||||
#### Root Element: `server`
|
||||
The root element that contains all server configuration.
|
||||
|
||||
**Attributes:**
|
||||
- `path` (optional): URI path prefix for cache requests. Example: if set to "cache", requests would be made to "http://www.example.com/cache/KEY"
|
||||
|
||||
#### Child Elements
|
||||
|
||||
#### `<bind>`
|
||||
Configures server socket settings.
|
||||
|
||||
**Attributes:**
|
||||
- `host` (required): Server bind address
|
||||
- `port` (required): Server port number
|
||||
- `incoming-connections-backlog-size` (optional, default: 1024): Maximum queue length for incoming connection indications
|
||||
|
||||
#### `<connection>`
|
||||
Configures connection handling parameters.
|
||||
|
||||
**Attributes:**
|
||||
- `idle-timeout` (optional, default: PT30S): Connection timeout when no activity
|
||||
- `read-idle-timeout` (optional, default: PT60S): Connection timeout when no reads
|
||||
- `write-idle-timeout` (optional, default: PT60S): Connection timeout when no writes
|
||||
- `max-request-size` (optional, default: 0x4000000): Maximum allowed request body size
|
||||
- `chunk-size` (default: 0x10000): Maximum socket write size
|
||||
|
||||
#### `<event-executor>`
|
||||
Configures event execution settings.
|
||||
|
||||
**Attributes:**
|
||||
- `use-virtual-threads` (optional, default: true): Whether to use virtual threads for the server handler
|
||||
|
||||
#### `<cache>`
|
||||
Defines cache storage implementation. Two types are available:
|
||||
|
||||
##### InMemory Cache
|
||||
|
||||
A simple storage backend that uses an hash map to store data in memory
|
||||
|
||||
**Attributes:**
|
||||
- `max-age` (default: P1D): Cache entry lifetime
|
||||
- `max-size` (default: 0x1000000): Maximum cache size in bytes
|
||||
- `digest` (default: MD5): Key hashing algorithm
|
||||
- `enable-compression` (default: true): Enable deflate compression
|
||||
- `compression-level` (default: -1): Compression level (-1 to 9)
|
||||
|
||||
##### FileSystem Cache
|
||||
|
||||
A storage backend that stores data in a folder on the disk
|
||||
|
||||
**Attributes:**
|
||||
- `path`: Storage directory path
|
||||
- `max-age` (default: P1D): Cache entry lifetime
|
||||
- `digest` (default: MD5): Key hashing algorithm
|
||||
- `enable-compression` (default: true): Enable deflate compression
|
||||
- `compression-level` (default: -1): Compression level
|
||||
|
||||
#### `<authorization>`
|
||||
Configures user and group-based access control.
|
||||
|
||||
##### `<users>`
|
||||
List of registered users.
|
||||
- Contains `<user>` elements:
|
||||
|
||||
**Attributes:**
|
||||
- `name` (required): Username
|
||||
- `password` (optional): For basic authentication
|
||||
- Can contain an `anonymous` element to allow for unauthenticated access
|
||||
|
||||
##### `<groups>`
|
||||
List of user groups.
|
||||
- Contains `<group>` elements:
|
||||
|
||||
**Attributes:**
|
||||
- `name`: Group name
|
||||
- Can contain:
|
||||
- `users`: List of user references
|
||||
- `roles`: List of roles (READER/WRITER)
|
||||
- `user-quota`: Per-user quota
|
||||
- `group-quota`: Group-wide quota
|
||||
|
||||
#### `<authentication>`
|
||||
Configures authentication mechanism. Options:
|
||||
- `<basic>`: HTTP basic authentication
|
||||
- `<client-certificate>`: TLS certificate authentication, it uses attributes of the subject's X.500 name
|
||||
to extract the username and group of the client.
|
||||
|
||||
Example:
|
||||
```xml
|
||||
<client-certificate>
|
||||
<user-extractor attribute-name="CN" pattern="(.*)"/>
|
||||
<group-extractor attribute-name="O" pattern="(.*)"/>
|
||||
</client-certificate>
|
||||
```
|
||||
- `<none>`: No authentication
|
||||
|
||||
#### `<tls>`
|
||||
Configures TLS encryption.
|
||||
|
||||
**Child Elements:**
|
||||
- `<keystore>`: Server certificate configuration
|
||||
|
||||
**Attributes:**
|
||||
- `file` (required): Keystore file path
|
||||
- `password`: Keystore password
|
||||
- `key-alias` (required): Private key alias
|
||||
- `key-password`: Private key password
|
||||
|
||||
- `<truststore>`: Client certificate verification
|
||||
|
||||
**Attributes:**
|
||||
- `file` (required): Truststore file path
|
||||
- `password`: Truststore password
|
||||
- `check-certificate-status`: Enable CRL/OCSP checking
|
||||
- `require-client-certificate` (default: false): Require client certificates
|
||||
|
||||
|
||||
----------------------------
|
||||
|
||||
# Complete configuration example
|
||||
|
||||
```xml
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<rbcs:server xmlns:xs="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns:rbcs="urn:net.woggioni.rbcs.server"
|
||||
xs:schemaLocation="urn:net.woggioni.rbcs.server jpms://net.woggioni.rbcs.server/net/woggioni/rbcs/server/schema/rbcs-server.xsd"
|
||||
>
|
||||
<bind host="0.0.0.0" port="8080" incoming-connections-backlog-size="1024"/>
|
||||
<connection
|
||||
max-request-size="67108864"
|
||||
idle-timeout="PT10S"
|
||||
read-idle-timeout="PT20S"
|
||||
write-idle-timeout="PT20S"
|
||||
chunk-size="0x1000"/>
|
||||
<event-executor use-virtual-threads="true"/>
|
||||
|
||||
<cache xs:type="rbcs:inMemoryCacheType" max-age="P7D" enable-compression="false" max-size="0x10000000" />
|
||||
|
||||
<!-- uncomment this to enable the filesystem storage backend, sotring cache data in "${sys:java.io.tmpdir}/rbcs"
|
||||
<cache xs:type="rbcs:fileSystemCacheType" max-age="P7D" enable-compression="false" path="${sys:java.io.tmpdir}/rbcs"/>
|
||||
-->
|
||||
|
||||
<!-- uncomment this to use memcache as the storage backend, also make sure you have
|
||||
the memcache plugin installed in the `plugins` directory if you are using running
|
||||
the jar version of RBCS
|
||||
<cache xs:type="rbcs-memcache:memcacheCacheType" max-age="P7D" digest="MD5">
|
||||
<server host="127.0.0.1" port="11211" max-connections="256"/>
|
||||
</cache>
|
||||
-->
|
||||
|
||||
<authorization>
|
||||
<users>
|
||||
<user name="user1" password="II+qeNLft2pZ/JVNo9F7jpjM/BqEcfsJW27NZ6dPVs8tAwHbxrJppKYsbL7J/SMl">
|
||||
<quota calls="100" period="PT1S"/>
|
||||
</user>
|
||||
<user name="user2" password="v6T9+q6/VNpvLknji3ixPiyz2YZCQMXj2FN7hvzbfc2Ig+IzAHO0iiBCH9oWuBDq"/>
|
||||
<anonymous>
|
||||
<quota calls="10" period="PT60S" initial-available-calls="10" max-available-calls="10"/>
|
||||
</anonymous>
|
||||
</users>
|
||||
<groups>
|
||||
<group name="readers">
|
||||
<users>
|
||||
<anonymous/>
|
||||
</users>
|
||||
<roles>
|
||||
<reader/>
|
||||
</roles>
|
||||
</group>
|
||||
<group name="writers">
|
||||
<users>
|
||||
<user ref="user1"/>
|
||||
<user ref="user2"/>
|
||||
</users>
|
||||
<roles>
|
||||
<reader/>
|
||||
<writer/>
|
||||
</roles>
|
||||
</group>
|
||||
</groups>
|
||||
</authorization>
|
||||
<authentication>
|
||||
<basic/>
|
||||
</authentication>
|
||||
</rbcs:server>
|
||||
|
||||
```
|
@@ -3,9 +3,9 @@ RUN adduser -D luser
|
||||
USER luser
|
||||
WORKDIR /home/luser
|
||||
|
||||
FROM base-release AS release
|
||||
FROM base-release AS release-vanilla
|
||||
ADD rbcs-cli-envelope-*.jar rbcs.jar
|
||||
ENTRYPOINT ["java", "-XX:+UseZGC", "-XX:+ZGenerational", "-jar", "/home/luser/rbcs.jar", "server"]
|
||||
ENTRYPOINT ["java", "-Dlogback.configurationFile=logback.xml", "-XX:MaxRAMPercentage=70", "-XX:GCTimeRatio=24", "-XX:+UseZGC", "-XX:+ZGenerational", "-jar", "/home/luser/rbcs.jar"]
|
||||
|
||||
FROM base-release AS release-memcache
|
||||
ADD --chown=luser:luser rbcs-cli-envelope-*.jar rbcs.jar
|
||||
@@ -13,4 +13,30 @@ RUN mkdir plugins
|
||||
WORKDIR /home/luser/plugins
|
||||
RUN --mount=type=bind,source=.,target=/build/distributions tar -xf /build/distributions/rbcs-server-memcache*.tar
|
||||
WORKDIR /home/luser
|
||||
ENTRYPOINT ["java", "-XX:+UseZGC", "-XX:+ZGenerational", "-jar", "/home/luser/rbcs.jar", "server"]
|
||||
ADD logback.xml .
|
||||
ENTRYPOINT ["java", "-Dlogback.configurationFile=logback.xml", "-XX:MaxRAMPercentage=70", "-XX:GCTimeRatio=24", "-XX:+UseZGC", "-XX:+ZGenerational", "-jar", "/home/luser/rbcs.jar"]
|
||||
|
||||
FROM busybox:musl AS base-native
|
||||
RUN mkdir -p /var/lib/rbcs /etc/rbcs
|
||||
RUN adduser -D -u 1000 rbcs -h /var/lib/rbcs
|
||||
|
||||
FROM scratch AS release-native
|
||||
COPY --from=base-native /etc/passwd /etc/passwd
|
||||
COPY --from=base-native /etc/rbcs /etc/rbcs
|
||||
COPY --from=base-native /var/lib/rbcs /var/lib/rbcs
|
||||
ADD rbcs-cli.upx /usr/bin/rbcs-cli
|
||||
ENV RBCS_CONFIGURATION_DIR="/etc/rbcs"
|
||||
USER rbcs
|
||||
WORKDIR /var/lib/rbcs
|
||||
ENTRYPOINT ["/usr/bin/rbcs-cli", "-XX:MaximumHeapSizePercent=70"]
|
||||
|
||||
FROM debian:12-slim AS release-jlink
|
||||
RUN mkdir -p /usr/share/java/rbcs
|
||||
RUN --mount=type=bind,source=.,target=/build/distributions tar -xf /build/distributions/rbcs-cli*.tar -C /usr/share/java/rbcs
|
||||
ADD --chmod=755 rbcs-cli.sh /usr/local/bin/rbcs-cli
|
||||
RUN adduser -u 1000 luser
|
||||
USER luser
|
||||
WORKDIR /home/luser
|
||||
ADD logback.xml .
|
||||
ENV JAVA_OPTS=-XX:-UseJVMCICompiler\ -Dlogback.configurationFile=logback.xml\ -XX:MaxRAMPercentage=70\ -XX:GCTimeRatio=24\ -XX:+UseZGC\ -XX:+ZGenerational
|
||||
ENTRYPOINT ["/usr/local/bin/rbcs-cli"]
|
||||
|
28
docker/README.md
Normal file
28
docker/README.md
Normal file
@@ -0,0 +1,28 @@
|
||||
# RBCS Docker images
|
||||
There are 3 image flavours:
|
||||
- vanilla
|
||||
- memcache
|
||||
- native
|
||||
|
||||
The `vanilla` image only contains the envelope
|
||||
jar file with no plugins and is based on `eclipse-temurin:21-jre-alpine`
|
||||
|
||||
The `memcache` image is similar to the `vanilla` image, except that it also contains
|
||||
the `rbcs-server-memcache` plugin in the `plugins` folder, use this image if you don't want to use the `native`
|
||||
image and want to use memcache as the cache backend
|
||||
|
||||
The `native` image contains a native, statically-linked executable created with GraalVM Native Image
|
||||
that has no userspace dependencies. It also embeds the memcache plugin inside the executable.
|
||||
Use this image for maximum efficiency and minimal memory footprint.
|
||||
|
||||
The `jlink` image contains a custom Java runtime created with GraalVM's Jlink
|
||||
that only depends on glibc. It also contains the memcache plugin in the module path.
|
||||
Use this image for best performance.
|
||||
|
||||
## Which image should I use?
|
||||
The `native` image uses Java's SerialGC, so it's ideal for constrained environment like containers or small servers,
|
||||
if you have a lot of resources and want to squeeze out the maximum throughput you should consider the
|
||||
`vanilla` or `memcache` image, then choose and fine tune the garbage collector.
|
||||
|
||||
Also the `native` image is only available for the `x86_64` architecture at the moment,
|
||||
while `vanilla` and `memcache` also ship a `aarch64` variant.
|
@@ -29,7 +29,10 @@ Provider<Copy> prepareDockerBuild = tasks.register('prepareDockerBuild', Copy) {
|
||||
group = 'docker'
|
||||
into project.layout.buildDirectory.file('docker')
|
||||
from(configurations.docker)
|
||||
from(file('Dockerfile'))
|
||||
from(files('Dockerfile', 'rbcs-cli.sh'))
|
||||
from(rootProject.file('conf')) {
|
||||
include 'logback.xml'
|
||||
}
|
||||
}
|
||||
|
||||
Provider<DockerBuildImage> dockerBuild = tasks.register('dockerBuildImage', DockerBuildImage) {
|
||||
@@ -63,5 +66,3 @@ Provider<DockerPushImage> dockerPush = tasks.register('dockerPushImage', DockerP
|
||||
}
|
||||
images = [dockerTag.flatMap{ it.tag }, dockerTagMemcache.flatMap{ it.tag }]
|
||||
}
|
||||
|
||||
|
||||
|
3
docker/rbcs-cli.sh
Normal file
3
docker/rbcs-cli.sh
Normal file
@@ -0,0 +1,3 @@
|
||||
#!/bin/sh
|
||||
DIR=/usr/share/java/rbcs
|
||||
$DIR/bin/java $JAVA_OPTS -m net.woggioni.rbcs.cli "$@"
|
@@ -2,11 +2,10 @@ org.gradle.configuration-cache=false
|
||||
org.gradle.parallel=true
|
||||
org.gradle.caching=true
|
||||
|
||||
rbcs.version = 0.1.4
|
||||
rbcs.version = 0.3.1
|
||||
|
||||
lys.version = 2025.02.05
|
||||
lys.version = 2025.06.10
|
||||
|
||||
gitea.maven.url = https://gitea.woggioni.net/api/packages/woggioni/maven
|
||||
docker.registry.url=gitea.woggioni.net
|
||||
|
||||
jpms-check.configurationName = runtimeClasspath
|
||||
|
2
gradle/wrapper/gradle-wrapper.properties
vendored
2
gradle/wrapper/gradle-wrapper.properties
vendored
@@ -1,6 +1,6 @@
|
||||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-8.12-bin.zip
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-8.14.2-bin.zip
|
||||
networkTimeout=10000
|
||||
validateDistributionUrl=true
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
|
3
gradlew
vendored
3
gradlew
vendored
@@ -86,8 +86,7 @@ done
|
||||
# shellcheck disable=SC2034
|
||||
APP_BASE_NAME=${0##*/}
|
||||
# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036)
|
||||
APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s
|
||||
' "$PWD" ) || exit
|
||||
APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s\n' "$PWD" ) || exit
|
||||
|
||||
# Use the maximum available, or set MAX_FD != -1 to use that value.
|
||||
MAX_FD=maximum
|
||||
|
@@ -5,7 +5,12 @@ plugins {
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation catalog.slf4j.api
|
||||
implementation project(':rbcs-common')
|
||||
api catalog.netty.common
|
||||
api catalog.netty.buffer
|
||||
api catalog.netty.handler
|
||||
api catalog.netty.codec.http
|
||||
}
|
||||
|
||||
publishing {
|
||||
|
@@ -1,7 +1,16 @@
|
||||
module net.woggioni.rbcs.api {
|
||||
requires static lombok;
|
||||
requires java.xml;
|
||||
requires io.netty.handler;
|
||||
requires io.netty.common;
|
||||
requires net.woggioni.rbcs.common;
|
||||
requires io.netty.transport;
|
||||
requires io.netty.codec.http;
|
||||
requires io.netty.buffer;
|
||||
requires org.slf4j;
|
||||
requires java.xml;
|
||||
|
||||
|
||||
exports net.woggioni.rbcs.api;
|
||||
exports net.woggioni.rbcs.api.exception;
|
||||
exports net.woggioni.rbcs.api.message;
|
||||
}
|
@@ -0,0 +1,13 @@
|
||||
package net.woggioni.rbcs.api;
|
||||
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
public interface AsyncCloseable extends AutoCloseable {
|
||||
|
||||
CompletableFuture<Void> asyncClose();
|
||||
|
||||
@Override
|
||||
default void close() throws Exception {
|
||||
asyncClose().get();
|
||||
}
|
||||
}
|
@@ -1,14 +0,0 @@
|
||||
package net.woggioni.rbcs.api;
|
||||
|
||||
import io.netty.buffer.ByteBuf;
|
||||
import net.woggioni.rbcs.api.exception.ContentTooLargeException;
|
||||
|
||||
import java.nio.channels.ReadableByteChannel;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
|
||||
public interface Cache extends AutoCloseable {
|
||||
CompletableFuture<ReadableByteChannel> get(String key);
|
||||
|
||||
CompletableFuture<Void> put(String key, ByteBuf content) throws ContentTooLargeException;
|
||||
}
|
@@ -0,0 +1,57 @@
|
||||
package net.woggioni.rbcs.api;
|
||||
|
||||
import io.netty.channel.ChannelHandlerContext;
|
||||
import io.netty.channel.ChannelInboundHandlerAdapter;
|
||||
import io.netty.handler.codec.http.LastHttpContent;
|
||||
import io.netty.util.ReferenceCounted;
|
||||
import lombok.extern.slf4j.Slf4j;
|
||||
import net.woggioni.rbcs.api.message.CacheMessage;
|
||||
|
||||
@Slf4j
|
||||
public abstract class CacheHandler extends ChannelInboundHandlerAdapter {
|
||||
private boolean requestFinished = false;
|
||||
|
||||
abstract protected void channelRead0(ChannelHandlerContext ctx, CacheMessage msg);
|
||||
|
||||
@Override
|
||||
public void channelRead(ChannelHandlerContext ctx, Object msg) {
|
||||
if(!requestFinished && msg instanceof CacheMessage) {
|
||||
if(msg instanceof CacheMessage.LastCacheContent) requestFinished = true;
|
||||
try {
|
||||
channelRead0(ctx, (CacheMessage) msg);
|
||||
} finally {
|
||||
if(msg instanceof ReferenceCounted rc) rc.release();
|
||||
}
|
||||
} else {
|
||||
ctx.fireChannelRead(msg);
|
||||
}
|
||||
}
|
||||
|
||||
protected void sendMessageAndFlush(ChannelHandlerContext ctx, Object msg) {
|
||||
sendMessage(ctx, msg, true);
|
||||
}
|
||||
|
||||
protected void sendMessage(ChannelHandlerContext ctx, Object msg) {
|
||||
sendMessage(ctx, msg, false);
|
||||
}
|
||||
|
||||
private void sendMessage(ChannelHandlerContext ctx, Object msg, boolean flush) {
|
||||
ctx.write(msg);
|
||||
if(
|
||||
msg instanceof CacheMessage.LastCacheContent ||
|
||||
msg instanceof CacheMessage.CachePutResponse ||
|
||||
msg instanceof CacheMessage.CacheValueNotFoundResponse ||
|
||||
msg instanceof LastHttpContent
|
||||
) {
|
||||
ctx.flush();
|
||||
ctx.pipeline().remove(this);
|
||||
} else if(flush) {
|
||||
ctx.flush();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
|
||||
super.exceptionCaught(ctx, cause);
|
||||
}
|
||||
}
|
@@ -0,0 +1,15 @@
|
||||
package net.woggioni.rbcs.api;
|
||||
|
||||
import io.netty.channel.ChannelFactory;
|
||||
import io.netty.channel.EventLoopGroup;
|
||||
import io.netty.channel.socket.DatagramChannel;
|
||||
import io.netty.channel.socket.SocketChannel;
|
||||
|
||||
public interface CacheHandlerFactory extends AsyncCloseable {
|
||||
CacheHandler newHandler(
|
||||
Configuration configuration,
|
||||
EventLoopGroup eventLoopGroup,
|
||||
ChannelFactory<SocketChannel> socketChannelFactory,
|
||||
ChannelFactory<DatagramChannel> datagramChannelFactory
|
||||
);
|
||||
}
|
@@ -0,0 +1,14 @@
|
||||
package net.woggioni.rbcs.api;
|
||||
|
||||
import lombok.Getter;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
@Getter
|
||||
@RequiredArgsConstructor
|
||||
public class CacheValueMetadata implements Serializable {
|
||||
private final String contentDisposition;
|
||||
private final String mimeType;
|
||||
}
|
||||
|
@@ -21,6 +21,8 @@ public class Configuration {
|
||||
@NonNull
|
||||
EventExecutor eventExecutor;
|
||||
@NonNull
|
||||
RateLimiter rateLimiter;
|
||||
@NonNull
|
||||
Connection connection;
|
||||
Map<String, User> users;
|
||||
Map<String, Group> groups;
|
||||
@@ -28,6 +30,13 @@ public class Configuration {
|
||||
Authentication authentication;
|
||||
Tls tls;
|
||||
|
||||
@Value
|
||||
public static class RateLimiter {
|
||||
boolean delayRequest;
|
||||
int messageBufferSize;
|
||||
int maxQueuedMessages;
|
||||
}
|
||||
|
||||
@Value
|
||||
public static class EventExecutor {
|
||||
boolean useVirtualThreads;
|
||||
@@ -35,12 +44,11 @@ public class Configuration {
|
||||
|
||||
@Value
|
||||
public static class Connection {
|
||||
Duration readTimeout;
|
||||
Duration writeTimeout;
|
||||
Duration idleTimeout;
|
||||
Duration readIdleTimeout;
|
||||
Duration writeIdleTimeout;
|
||||
int maxRequestSize;
|
||||
int chunkSize;
|
||||
}
|
||||
|
||||
@Value
|
||||
@@ -85,17 +93,6 @@ public class Configuration {
|
||||
Group extract(X509Certificate cert);
|
||||
}
|
||||
|
||||
@Value
|
||||
public static class Throttling {
|
||||
KeyStore keyStore;
|
||||
TrustStore trustStore;
|
||||
boolean verifyClients;
|
||||
}
|
||||
|
||||
public enum ClientCertificate {
|
||||
REQUIRED, OPTIONAL
|
||||
}
|
||||
|
||||
@Value
|
||||
public static class Tls {
|
||||
KeyStore keyStore;
|
||||
@@ -135,7 +132,7 @@ public class Configuration {
|
||||
}
|
||||
|
||||
public interface Cache {
|
||||
net.woggioni.rbcs.api.Cache materialize();
|
||||
CacheHandlerFactory materialize();
|
||||
String getNamespaceURI();
|
||||
String getTypeName();
|
||||
}
|
||||
@@ -146,6 +143,7 @@ public class Configuration {
|
||||
int incomingConnectionsBacklogSize,
|
||||
String serverPath,
|
||||
EventExecutor eventExecutor,
|
||||
RateLimiter rateLimiter,
|
||||
Connection connection,
|
||||
Map<String, User> users,
|
||||
Map<String, Group> groups,
|
||||
@@ -159,6 +157,7 @@ public class Configuration {
|
||||
incomingConnectionsBacklogSize,
|
||||
serverPath != null && !serverPath.isEmpty() && !serverPath.equals("/") ? serverPath : null,
|
||||
eventExecutor,
|
||||
rateLimiter,
|
||||
connection,
|
||||
users,
|
||||
groups,
|
||||
|
@@ -1,5 +1,5 @@
|
||||
package net.woggioni.rbcs.api;
|
||||
|
||||
public enum Role {
|
||||
Reader, Writer
|
||||
Reader, Writer, Healthcheck
|
||||
}
|
@@ -0,0 +1,170 @@
|
||||
package net.woggioni.rbcs.api.message;
|
||||
|
||||
import io.netty.buffer.ByteBuf;
|
||||
import io.netty.buffer.ByteBufHolder;
|
||||
import lombok.Getter;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import net.woggioni.rbcs.api.CacheValueMetadata;
|
||||
|
||||
public sealed interface CacheMessage {
|
||||
|
||||
@Getter
|
||||
@RequiredArgsConstructor
|
||||
final class CacheGetRequest implements CacheMessage {
|
||||
private final String key;
|
||||
}
|
||||
|
||||
@Getter
|
||||
@RequiredArgsConstructor
|
||||
abstract sealed class CacheGetResponse implements CacheMessage {
|
||||
private final String key;
|
||||
}
|
||||
|
||||
@Getter
|
||||
final class CacheValueFoundResponse extends CacheGetResponse {
|
||||
private final CacheValueMetadata metadata;
|
||||
|
||||
public CacheValueFoundResponse(String key, CacheValueMetadata metadata) {
|
||||
super(key);
|
||||
this.metadata = metadata;
|
||||
}
|
||||
}
|
||||
|
||||
final class CacheValueNotFoundResponse extends CacheGetResponse {
|
||||
public CacheValueNotFoundResponse(String key) {
|
||||
super(key);
|
||||
}
|
||||
}
|
||||
|
||||
@Getter
|
||||
@RequiredArgsConstructor
|
||||
final class CachePutRequest implements CacheMessage {
|
||||
private final String key;
|
||||
private final CacheValueMetadata metadata;
|
||||
}
|
||||
|
||||
@Getter
|
||||
@RequiredArgsConstructor
|
||||
final class CachePutResponse implements CacheMessage {
|
||||
private final String key;
|
||||
}
|
||||
|
||||
@RequiredArgsConstructor
|
||||
non-sealed class CacheContent implements CacheMessage, ByteBufHolder {
|
||||
protected final ByteBuf chunk;
|
||||
|
||||
@Override
|
||||
public ByteBuf content() {
|
||||
return chunk;
|
||||
}
|
||||
|
||||
@Override
|
||||
public CacheContent copy() {
|
||||
return replace(chunk.copy());
|
||||
}
|
||||
|
||||
@Override
|
||||
public CacheContent duplicate() {
|
||||
return new CacheContent(chunk.duplicate());
|
||||
}
|
||||
|
||||
@Override
|
||||
public CacheContent retainedDuplicate() {
|
||||
return new CacheContent(chunk.retainedDuplicate());
|
||||
}
|
||||
|
||||
@Override
|
||||
public CacheContent replace(ByteBuf content) {
|
||||
return new CacheContent(content);
|
||||
}
|
||||
|
||||
@Override
|
||||
public CacheContent retain() {
|
||||
chunk.retain();
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public CacheContent retain(int increment) {
|
||||
chunk.retain(increment);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public CacheContent touch() {
|
||||
chunk.touch();
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public CacheContent touch(Object hint) {
|
||||
chunk.touch(hint);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int refCnt() {
|
||||
return chunk.refCnt();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean release() {
|
||||
return chunk.release();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean release(int decrement) {
|
||||
return chunk.release(decrement);
|
||||
}
|
||||
}
|
||||
|
||||
final class LastCacheContent extends CacheContent {
|
||||
public LastCacheContent(ByteBuf chunk) {
|
||||
super(chunk);
|
||||
}
|
||||
|
||||
@Override
|
||||
public LastCacheContent copy() {
|
||||
return replace(chunk.copy());
|
||||
}
|
||||
|
||||
@Override
|
||||
public LastCacheContent duplicate() {
|
||||
return new LastCacheContent(chunk.duplicate());
|
||||
}
|
||||
|
||||
@Override
|
||||
public LastCacheContent retainedDuplicate() {
|
||||
return new LastCacheContent(chunk.retainedDuplicate());
|
||||
}
|
||||
|
||||
@Override
|
||||
public LastCacheContent replace(ByteBuf content) {
|
||||
return new LastCacheContent(chunk);
|
||||
}
|
||||
|
||||
@Override
|
||||
public LastCacheContent retain() {
|
||||
super.retain();
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public LastCacheContent retain(int increment) {
|
||||
super.retain(increment);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public LastCacheContent touch() {
|
||||
super.touch();
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public LastCacheContent touch(Object hint) {
|
||||
super.touch(hint);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
}
|
@@ -9,12 +9,62 @@ plugins {
|
||||
id 'maven-publish'
|
||||
}
|
||||
|
||||
|
||||
import net.woggioni.gradle.envelope.EnvelopeJarTask
|
||||
import net.woggioni.gradle.graalvm.NativeImageConfigurationTask
|
||||
import net.woggioni.gradle.graalvm.NativeImagePlugin
|
||||
import net.woggioni.gradle.graalvm.NativeImageTask
|
||||
import net.woggioni.gradle.graalvm.JlinkPlugin
|
||||
import net.woggioni.gradle.graalvm.JlinkTask
|
||||
import net.woggioni.gradle.envelope.EnvelopePlugin
|
||||
import net.woggioni.gradle.graalvm.*
|
||||
|
||||
sourceSets {
|
||||
configureNativeImage {
|
||||
java {
|
||||
}
|
||||
kotlin {
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
configurations {
|
||||
|
||||
release {
|
||||
transitive = false
|
||||
canBeConsumed = true
|
||||
canBeResolved = true
|
||||
visible = true
|
||||
}
|
||||
|
||||
configureNativeImageImplementation {
|
||||
extendsFrom implementation
|
||||
}
|
||||
|
||||
configureNativeImageRuntimeOnly {
|
||||
extendsFrom runtimeOnly
|
||||
}
|
||||
|
||||
nativeImage {
|
||||
extendsFrom runtimeClasspath
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
dependencies {
|
||||
configureNativeImageImplementation project
|
||||
configureNativeImageImplementation project(':rbcs-server-memcache')
|
||||
|
||||
implementation catalog.jwo
|
||||
implementation catalog.slf4j.api
|
||||
implementation catalog.picocli
|
||||
|
||||
implementation project(':rbcs-client')
|
||||
implementation project(':rbcs-server')
|
||||
|
||||
// runtimeOnly catalog.slf4j.jdk14
|
||||
runtimeOnly catalog.logback.classic
|
||||
// runtimeOnly catalog.slf4j.simple
|
||||
nativeImage project(':rbcs-server-memcache')
|
||||
|
||||
}
|
||||
|
||||
|
||||
Property<String> mainModuleName = objects.property(String.class)
|
||||
mainModuleName.set('net.woggioni.rbcs.cli')
|
||||
@@ -25,77 +75,105 @@ tasks.named(JavaPlugin.COMPILE_JAVA_TASK_NAME, JavaCompile) {
|
||||
options.javaModuleMainClass = mainClassName
|
||||
}
|
||||
|
||||
configurations {
|
||||
release {
|
||||
transitive = false
|
||||
canBeConsumed = true
|
||||
canBeResolved = true
|
||||
visible = true
|
||||
}
|
||||
}
|
||||
Provider<Jar> jarTaskProvider = tasks.named(JavaPlugin.JAR_TASK_NAME, Jar)
|
||||
|
||||
envelopeJar {
|
||||
Provider<EnvelopeJarTask> envelopeJarTaskProvider = tasks.named(EnvelopePlugin.ENVELOPE_JAR_TASK_NAME, EnvelopeJarTask.class) {
|
||||
mainModule = mainModuleName
|
||||
mainClass = mainClassName
|
||||
|
||||
extraClasspath = ["plugins"]
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation catalog.jwo
|
||||
implementation catalog.slf4j.api
|
||||
implementation catalog.netty.codec.http
|
||||
implementation catalog.picocli
|
||||
|
||||
implementation project(':rbcs-client')
|
||||
implementation project(':rbcs-server')
|
||||
|
||||
// runtimeOnly catalog.slf4j.jdk14
|
||||
runtimeOnly catalog.logback.classic
|
||||
// runtimeOnly catalog.slf4j.simple
|
||||
}
|
||||
|
||||
Provider<EnvelopeJarTask> envelopeJarTaskProvider = tasks.named('envelopeJar', EnvelopeJarTask.class) {
|
||||
// systemProperties['java.util.logging.config.class'] = 'net.woggioni.rbcs.LoggingConfig'
|
||||
// systemProperties['log.config.source'] = 'net/woggioni/rbcs/cli/logging.properties'
|
||||
// systemProperties['java.util.logging.config.file'] = 'classpath:net/woggioni/rbcs/cli/logging.properties'
|
||||
systemProperties['logback.configurationFile'] = 'classpath:net/woggioni/rbcs/cli/logback.xml'
|
||||
systemProperties['io.netty.leakDetectionLevel'] = 'DISABLED'
|
||||
|
||||
// systemProperties['org.slf4j.simpleLogger.showDateTime'] = 'true'
|
||||
// systemProperties['org.slf4j.simpleLogger.defaultLogLevel'] = 'debug'
|
||||
// systemProperties['org.slf4j.simpleLogger.log.com.google.code.yanf4j'] = 'warn'
|
||||
// systemProperties['org.slf4j.simpleLogger.log.net.rubyeye.xmemcached'] = 'warn'
|
||||
// systemProperties['org.slf4j.simpleLogger.dateTimeFormat'] = 'yyyy-MM-dd\'T\'HH:mm:ss.SSSZ'
|
||||
}
|
||||
|
||||
tasks.named(NativeImagePlugin.CONFIGURE_NATIVE_IMAGE_TASK_NAME, NativeImageConfigurationTask) {
|
||||
mainClass = mainClassName
|
||||
mainModule = mainModuleName
|
||||
toolchain {
|
||||
languageVersion = JavaLanguageVersion.of(21)
|
||||
vendor = JvmVendorSpec.GRAAL_VM
|
||||
}
|
||||
mainClass = "net.woggioni.rbcs.cli.graal.GraalNativeImageConfiguration"
|
||||
classpath = project.files(
|
||||
configurations.configureNativeImageRuntimeClasspath,
|
||||
sourceSets.configureNativeImage.output
|
||||
)
|
||||
mergeConfiguration = false
|
||||
systemProperty('logback.configurationFile', 'classpath:net/woggioni/rbcs/cli/logback.xml')
|
||||
systemProperty('io.netty.leakDetectionLevel', 'DISABLED')
|
||||
modularity.inferModulePath = false
|
||||
enabled = true
|
||||
systemProperty('gradle.tmp.dir', temporaryDir.toString())
|
||||
}
|
||||
|
||||
tasks.named(NativeImagePlugin.NATIVE_IMAGE_TASK_NAME, NativeImageTask) {
|
||||
nativeImage {
|
||||
toolchain {
|
||||
languageVersion = JavaLanguageVersion.of(23)
|
||||
vendor = JvmVendorSpec.GRAAL_VM
|
||||
}
|
||||
mainClass = mainClassName
|
||||
mainModule = mainModuleName
|
||||
// mainModule = mainModuleName
|
||||
useMusl = true
|
||||
buildStaticImage = true
|
||||
linkAtBuildTime = false
|
||||
classpath = project.files(jarTaskProvider, configurations.nativeImage)
|
||||
compressExecutable = true
|
||||
compressionLevel = 6
|
||||
useLZMA = false
|
||||
}
|
||||
|
||||
tasks.named(JlinkPlugin.JLINK_TASK_NAME, JlinkTask) {
|
||||
Provider<UpxTask> upxTaskProvider = tasks.named(NativeImagePlugin.UPX_TASK_NAME, UpxTask) {
|
||||
}
|
||||
|
||||
Provider<JlinkTask> jlinkTaskProvider = tasks.named(JlinkPlugin.JLINK_TASK_NAME, JlinkTask) {
|
||||
toolchain {
|
||||
languageVersion = JavaLanguageVersion.of(21)
|
||||
vendor = JvmVendorSpec.GRAAL_VM
|
||||
}
|
||||
|
||||
mainClass = mainClassName
|
||||
mainModule = 'net.woggioni.rbcs.cli'
|
||||
classpath = project.files(
|
||||
configurations.configureNativeImageRuntimeClasspath,
|
||||
sourceSets.configureNativeImage.output
|
||||
)
|
||||
additionalModules = [
|
||||
'net.woggioni.rbcs.server.memcache',
|
||||
'ch.qos.logback.classic',
|
||||
'jdk.crypto.ec'
|
||||
]
|
||||
compressionLevel = 2
|
||||
stripDebug = false
|
||||
}
|
||||
|
||||
Provider<Tar> jlinkDistTarTaskProvider = tasks.named(JlinkPlugin.JLINK_DIST_TAR_TASK_NAME, Tar) {
|
||||
exclude 'lib/libjvmcicompiler.so'
|
||||
}
|
||||
|
||||
tasks.named(JavaPlugin.PROCESS_RESOURCES_TASK_NAME, ProcessResources) {
|
||||
from(rootProject.file('conf')) {
|
||||
into('net/woggioni/rbcs/cli')
|
||||
include 'logback.xml'
|
||||
include 'logging.properties'
|
||||
}
|
||||
}
|
||||
|
||||
artifacts {
|
||||
release(envelopeJarTaskProvider)
|
||||
release(upxTaskProvider)
|
||||
release(jlinkDistTarTaskProvider)
|
||||
}
|
||||
|
||||
publishing {
|
||||
publications {
|
||||
maven(MavenPublication) {
|
||||
artifact envelopeJar
|
||||
artifact(upxTaskProvider) {
|
||||
classifier = "linux-x86_64"
|
||||
extension = "exe"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
15
rbcs-cli/conf/rbcs-client.xml
Normal file
15
rbcs-cli/conf/rbcs-client.xml
Normal file
@@ -0,0 +1,15 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<rbcs-client:profiles xmlns:xs="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns:rbcs-client="urn:net.woggioni.rbcs.client"
|
||||
xs:schemaLocation="urn:net.woggioni.rbcs.client jpms://net.woggioni.rbcs.client/net/woggioni/rbcs/client/schema/rbcs-client.xsd"
|
||||
>
|
||||
<profile name="profile1" base-url="https://rbcs1.example.com/">
|
||||
<no-auth/>
|
||||
<connection write-idle-timeout="PT60S"
|
||||
read-idle-timeout="PT60S"
|
||||
idle-timeout="PT30S" />
|
||||
</profile>
|
||||
<profile name="profile2" base-url="https://rbcs2.example.com/">
|
||||
<basic-auth user="user" password="password"/>
|
||||
</profile>
|
||||
</rbcs-client:profiles>
|
53
rbcs-cli/conf/rbcs-server.xml
Normal file
53
rbcs-cli/conf/rbcs-server.xml
Normal file
@@ -0,0 +1,53 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<rbcs:server xmlns:xs="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns:rbcs="urn:net.woggioni.rbcs.server"
|
||||
xmlns:rbcs-memcache="urn:net.woggioni.rbcs.server.memcache"
|
||||
xs:schemaLocation="urn:net.woggioni.rbcs.server.memcache jpms://net.woggioni.rbcs.server.memcache/net/woggioni/rbcs/server/memcache/schema/rbcs-memcache.xsd urn:net.woggioni.rbcs.server jpms://net.woggioni.rbcs.server/net/woggioni/rbcs/server/schema/rbcs-server.xsd"
|
||||
>
|
||||
<bind host="127.0.0.1" port="8080" incoming-connections-backlog-size="1024"/>
|
||||
<connection
|
||||
max-request-size="67108864"
|
||||
idle-timeout="PT10S"
|
||||
read-idle-timeout="PT20S"
|
||||
write-idle-timeout="PT20S"/>
|
||||
<event-executor use-virtual-threads="true"/>
|
||||
<cache xs:type="rbcs-memcache:memcacheCacheType" max-age="P7D" chunk-size="0x1000" digest="MD5">
|
||||
<server host="127.0.0.1" port="11211" max-connections="256"/>
|
||||
</cache>
|
||||
<!--cache xs:type="rbcs:inMemoryCacheType" max-age="P7D" enable-compression="false" max-size="0x10000000" /-->
|
||||
<!--cache xs:type="rbcs:fileSystemCacheType" max-age="P7D" enable-compression="false" /-->
|
||||
<authorization>
|
||||
<users>
|
||||
<user name="woggioni" password="II+qeNLft2pZ/JVNo9F7jpjM/BqEcfsJW27NZ6dPVs8tAwHbxrJppKYsbL7J/SMl">
|
||||
<quota calls="100" period="PT1S"/>
|
||||
</user>
|
||||
<user name="gitea" password="v6T9+q6/VNpvLknji3ixPiyz2YZCQMXj2FN7hvzbfc2Ig+IzAHO0iiBCH9oWuBDq"/>
|
||||
<anonymous>
|
||||
<quota calls="10" period="PT60S" initial-available-calls="10" max-available-calls="10"/>
|
||||
</anonymous>
|
||||
</users>
|
||||
<groups>
|
||||
<group name="readers">
|
||||
<users>
|
||||
<anonymous/>
|
||||
</users>
|
||||
<roles>
|
||||
<reader/>
|
||||
</roles>
|
||||
</group>
|
||||
<group name="writers">
|
||||
<users>
|
||||
<user ref="woggioni"/>
|
||||
<user ref="gitea"/>
|
||||
</users>
|
||||
<roles>
|
||||
<reader/>
|
||||
<writer/>
|
||||
</roles>
|
||||
</group>
|
||||
</groups>
|
||||
</authorization>
|
||||
<authentication>
|
||||
<none/>
|
||||
</authentication>
|
||||
</rbcs:server>
|
6
rbcs-cli/native-image/jni-config.json
Normal file
6
rbcs-cli/native-image/jni-config.json
Normal file
@@ -0,0 +1,6 @@
|
||||
[
|
||||
{
|
||||
"name":"java.lang.Boolean",
|
||||
"methods":[{"name":"getBoolean","parameterTypes":["java.lang.String"] }]
|
||||
}
|
||||
]
|
@@ -1,2 +1,2 @@
|
||||
Args=-H:Optimize=3 --gc=serial --initialize-at-run-time=io.netty
|
||||
Args=-O3 -march=x86-64-v2 --gc=serial --install-exit-handlers --initialize-at-run-time=io.netty --enable-url-protocols=jpms --initialize-at-build-time=net.woggioni.rbcs.common.RbcsUrlStreamHandlerFactory,net.woggioni.rbcs.common.RbcsUrlStreamHandlerFactory$JpmsHandler
|
||||
#-H:TraceClassInitialization=io.netty.handler.ssl.BouncyCastleAlpnSslUtils
|
8
rbcs-cli/native-image/predefined-classes-config.json
Normal file
8
rbcs-cli/native-image/predefined-classes-config.json
Normal file
@@ -0,0 +1,8 @@
|
||||
[
|
||||
{
|
||||
"type":"agent-extracted",
|
||||
"classes":[
|
||||
]
|
||||
}
|
||||
]
|
||||
|
2
rbcs-cli/native-image/proxy-config.json
Normal file
2
rbcs-cli/native-image/proxy-config.json
Normal file
@@ -0,0 +1,2 @@
|
||||
[
|
||||
]
|
728
rbcs-cli/native-image/reflect-config.json
Normal file
728
rbcs-cli/native-image/reflect-config.json
Normal file
@@ -0,0 +1,728 @@
|
||||
[
|
||||
{
|
||||
"name":"android.os.Build$VERSION"
|
||||
},
|
||||
{
|
||||
"name":"ch.qos.logback.classic.encoder.PatternLayoutEncoder",
|
||||
"queryAllPublicMethods":true,
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"ch.qos.logback.classic.joran.SerializedModelConfigurator",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"ch.qos.logback.classic.util.DefaultJoranConfigurator",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"ch.qos.logback.core.ConsoleAppender",
|
||||
"queryAllPublicMethods":true,
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }, {"name":"setTarget","parameterTypes":["java.lang.String"] }]
|
||||
},
|
||||
{
|
||||
"name":"ch.qos.logback.core.OutputStreamAppender",
|
||||
"methods":[{"name":"setEncoder","parameterTypes":["ch.qos.logback.core.encoder.Encoder"] }]
|
||||
},
|
||||
{
|
||||
"name":"ch.qos.logback.core.encoder.Encoder",
|
||||
"methods":[{"name":"valueOf","parameterTypes":["java.lang.String"] }]
|
||||
},
|
||||
{
|
||||
"name":"ch.qos.logback.core.encoder.LayoutWrappingEncoder",
|
||||
"methods":[{"name":"setParent","parameterTypes":["ch.qos.logback.core.spi.ContextAware"] }]
|
||||
},
|
||||
{
|
||||
"name":"ch.qos.logback.core.pattern.PatternLayoutEncoderBase",
|
||||
"methods":[{"name":"setPattern","parameterTypes":["java.lang.String"] }]
|
||||
},
|
||||
{
|
||||
"name":"ch.qos.logback.core.spi.ContextAware",
|
||||
"methods":[{"name":"valueOf","parameterTypes":["java.lang.String"] }]
|
||||
},
|
||||
{
|
||||
"name":"com.aayushatharva.brotli4j.Brotli4jLoader"
|
||||
},
|
||||
{
|
||||
"name":"com.github.luben.zstd.Zstd"
|
||||
},
|
||||
{
|
||||
"name":"com.jcraft.jzlib.JZlib"
|
||||
},
|
||||
{
|
||||
"name":"com.sun.crypto.provider.AESCipher$General",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"com.sun.crypto.provider.ARCFOURCipher",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"com.sun.crypto.provider.ChaCha20Cipher$ChaCha20Poly1305",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"com.sun.crypto.provider.DESCipher",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"com.sun.crypto.provider.DESedeCipher",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"com.sun.crypto.provider.DHParameters",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"com.sun.crypto.provider.GaloisCounterMode$AESGCM",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"com.sun.crypto.provider.HmacCore$HmacSHA512",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"com.sun.crypto.provider.PBKDF2Core$HmacSHA512",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"com.sun.crypto.provider.TlsMasterSecretGenerator",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"com.sun.org.apache.xerces.internal.impl.dv.xs.ExtendedSchemaDVFactoryImpl",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"com.sun.org.apache.xerces.internal.impl.dv.xs.SchemaDVFactoryImpl",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"com.sun.org.apache.xerces.internal.jaxp.DocumentBuilderFactoryImpl",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"com.sun.org.apache.xerces.internal.jaxp.SAXParserFactoryImpl",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"groovy.lang.Closure"
|
||||
},
|
||||
{
|
||||
"name":"io.netty.bootstrap.ServerBootstrap$1"
|
||||
},
|
||||
{
|
||||
"name":"io.netty.bootstrap.ServerBootstrap$ServerBootstrapAcceptor",
|
||||
"methods":[{"name":"channelRead","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }, {"name":"exceptionCaught","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Throwable"] }]
|
||||
},
|
||||
{
|
||||
"name":"io.netty.buffer.AbstractByteBufAllocator",
|
||||
"queryAllDeclaredMethods":true
|
||||
},
|
||||
{
|
||||
"name":"io.netty.buffer.AbstractReferenceCountedByteBuf",
|
||||
"fields":[{"name":"refCnt"}]
|
||||
},
|
||||
{
|
||||
"name":"io.netty.buffer.AdaptivePoolingAllocator$Chunk",
|
||||
"fields":[{"name":"refCnt"}]
|
||||
},
|
||||
{
|
||||
"name":"io.netty.buffer.AdaptivePoolingAllocator$Magazine",
|
||||
"fields":[{"name":"nextInLine"}]
|
||||
},
|
||||
{
|
||||
"name":"io.netty.channel.AbstractChannelHandlerContext",
|
||||
"fields":[{"name":"handlerState"}]
|
||||
},
|
||||
{
|
||||
"name":"io.netty.channel.ChannelDuplexHandler",
|
||||
"methods":[{"name":"bind","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.net.SocketAddress","io.netty.channel.ChannelPromise"] }, {"name":"close","parameterTypes":["io.netty.channel.ChannelHandlerContext","io.netty.channel.ChannelPromise"] }, {"name":"connect","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.net.SocketAddress","java.net.SocketAddress","io.netty.channel.ChannelPromise"] }, {"name":"deregister","parameterTypes":["io.netty.channel.ChannelHandlerContext","io.netty.channel.ChannelPromise"] }, {"name":"disconnect","parameterTypes":["io.netty.channel.ChannelHandlerContext","io.netty.channel.ChannelPromise"] }, {"name":"flush","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"read","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"write","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object","io.netty.channel.ChannelPromise"] }]
|
||||
},
|
||||
{
|
||||
"name":"io.netty.channel.ChannelHandlerAdapter",
|
||||
"methods":[{"name":"exceptionCaught","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Throwable"] }]
|
||||
},
|
||||
{
|
||||
"name":"io.netty.channel.ChannelInboundHandlerAdapter",
|
||||
"methods":[{"name":"channelActive","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelInactive","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelRead","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }, {"name":"channelReadComplete","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelRegistered","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelUnregistered","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelWritabilityChanged","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"exceptionCaught","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Throwable"] }, {"name":"userEventTriggered","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }]
|
||||
},
|
||||
{
|
||||
"name":"io.netty.channel.ChannelInitializer",
|
||||
"methods":[{"name":"channelRegistered","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"exceptionCaught","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Throwable"] }]
|
||||
},
|
||||
{
|
||||
"name":"io.netty.channel.ChannelOutboundBuffer",
|
||||
"fields":[{"name":"totalPendingSize"}, {"name":"unwritable"}]
|
||||
},
|
||||
{
|
||||
"name":"io.netty.channel.ChannelOutboundHandlerAdapter",
|
||||
"methods":[{"name":"bind","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.net.SocketAddress","io.netty.channel.ChannelPromise"] }, {"name":"connect","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.net.SocketAddress","java.net.SocketAddress","io.netty.channel.ChannelPromise"] }, {"name":"deregister","parameterTypes":["io.netty.channel.ChannelHandlerContext","io.netty.channel.ChannelPromise"] }, {"name":"disconnect","parameterTypes":["io.netty.channel.ChannelHandlerContext","io.netty.channel.ChannelPromise"] }, {"name":"flush","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"read","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }]
|
||||
},
|
||||
{
|
||||
"name":"io.netty.channel.CombinedChannelDuplexHandler",
|
||||
"methods":[{"name":"bind","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.net.SocketAddress","io.netty.channel.ChannelPromise"] }, {"name":"channelActive","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelInactive","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelRead","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }, {"name":"channelReadComplete","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelRegistered","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelUnregistered","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelWritabilityChanged","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"close","parameterTypes":["io.netty.channel.ChannelHandlerContext","io.netty.channel.ChannelPromise"] }, {"name":"connect","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.net.SocketAddress","java.net.SocketAddress","io.netty.channel.ChannelPromise"] }, {"name":"deregister","parameterTypes":["io.netty.channel.ChannelHandlerContext","io.netty.channel.ChannelPromise"] }, {"name":"disconnect","parameterTypes":["io.netty.channel.ChannelHandlerContext","io.netty.channel.ChannelPromise"] }, {"name":"exceptionCaught","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Throwable"] }, {"name":"flush","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"read","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"userEventTriggered","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }, {"name":"write","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object","io.netty.channel.ChannelPromise"] }]
|
||||
},
|
||||
{
|
||||
"name":"io.netty.channel.DefaultChannelConfig",
|
||||
"fields":[{"name":"autoRead"}, {"name":"writeBufferWaterMark"}]
|
||||
},
|
||||
{
|
||||
"name":"io.netty.channel.DefaultChannelPipeline",
|
||||
"fields":[{"name":"estimatorHandle"}]
|
||||
},
|
||||
{
|
||||
"name":"io.netty.channel.DefaultChannelPipeline$HeadContext",
|
||||
"methods":[{"name":"bind","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.net.SocketAddress","io.netty.channel.ChannelPromise"] }, {"name":"channelActive","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelInactive","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelRead","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }, {"name":"channelReadComplete","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelRegistered","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelUnregistered","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelWritabilityChanged","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"close","parameterTypes":["io.netty.channel.ChannelHandlerContext","io.netty.channel.ChannelPromise"] }, {"name":"connect","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.net.SocketAddress","java.net.SocketAddress","io.netty.channel.ChannelPromise"] }, {"name":"deregister","parameterTypes":["io.netty.channel.ChannelHandlerContext","io.netty.channel.ChannelPromise"] }, {"name":"disconnect","parameterTypes":["io.netty.channel.ChannelHandlerContext","io.netty.channel.ChannelPromise"] }, {"name":"exceptionCaught","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Throwable"] }, {"name":"flush","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"read","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"userEventTriggered","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }, {"name":"write","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object","io.netty.channel.ChannelPromise"] }]
|
||||
},
|
||||
{
|
||||
"name":"io.netty.channel.DefaultChannelPipeline$TailContext",
|
||||
"methods":[{"name":"channelActive","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelInactive","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelRead","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }, {"name":"channelReadComplete","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelRegistered","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelUnregistered","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelWritabilityChanged","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"exceptionCaught","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Throwable"] }, {"name":"userEventTriggered","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }]
|
||||
},
|
||||
{
|
||||
"name":"io.netty.channel.SimpleChannelInboundHandler",
|
||||
"methods":[{"name":"channelRead","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }]
|
||||
},
|
||||
{
|
||||
"name":"io.netty.channel.embedded.EmbeddedChannel$2"
|
||||
},
|
||||
{
|
||||
"name":"io.netty.channel.pool.SimpleChannelPool$1"
|
||||
},
|
||||
{
|
||||
"name":"io.netty.channel.socket.nio.NioSocketChannel",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"io.netty.handler.codec.ByteToMessageDecoder",
|
||||
"methods":[{"name":"channelInactive","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelRead","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }, {"name":"channelReadComplete","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"userEventTriggered","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }]
|
||||
},
|
||||
{
|
||||
"name":"io.netty.handler.codec.MessageAggregator",
|
||||
"methods":[{"name":"channelInactive","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelReadComplete","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }]
|
||||
},
|
||||
{
|
||||
"name":"io.netty.handler.codec.MessageToByteEncoder",
|
||||
"methods":[{"name":"write","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object","io.netty.channel.ChannelPromise"] }]
|
||||
},
|
||||
{
|
||||
"name":"io.netty.handler.codec.MessageToMessageCodec",
|
||||
"methods":[{"name":"channelRead","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }, {"name":"channelReadComplete","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }]
|
||||
},
|
||||
{
|
||||
"name":"io.netty.handler.codec.MessageToMessageDecoder",
|
||||
"methods":[{"name":"channelRead","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }]
|
||||
},
|
||||
{
|
||||
"name":"io.netty.handler.codec.compression.JdkZlibDecoder"
|
||||
},
|
||||
{
|
||||
"name":"io.netty.handler.codec.compression.JdkZlibEncoder",
|
||||
"methods":[{"name":"close","parameterTypes":["io.netty.channel.ChannelHandlerContext","io.netty.channel.ChannelPromise"] }]
|
||||
},
|
||||
{
|
||||
"name":"io.netty.handler.codec.http.HttpClientCodec"
|
||||
},
|
||||
{
|
||||
"name":"io.netty.handler.codec.http.HttpContentDecoder",
|
||||
"methods":[{"name":"channelInactive","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelReadComplete","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }]
|
||||
},
|
||||
{
|
||||
"name":"io.netty.handler.codec.http.HttpContentDecompressor"
|
||||
},
|
||||
{
|
||||
"name":"io.netty.handler.codec.http.HttpContentEncoder",
|
||||
"methods":[{"name":"channelInactive","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }]
|
||||
},
|
||||
{
|
||||
"name":"io.netty.handler.codec.http.HttpObjectAggregator"
|
||||
},
|
||||
{
|
||||
"name":"io.netty.handler.codec.http.HttpServerCodec"
|
||||
},
|
||||
{
|
||||
"name":"io.netty.handler.codec.memcache.binary.BinaryMemcacheClientCodec"
|
||||
},
|
||||
{
|
||||
"name":"io.netty.handler.stream.ChunkedWriteHandler",
|
||||
"methods":[{"name":"channelInactive","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelWritabilityChanged","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"flush","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"write","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object","io.netty.channel.ChannelPromise"] }]
|
||||
},
|
||||
{
|
||||
"name":"io.netty.handler.timeout.IdleStateHandler",
|
||||
"methods":[{"name":"channelActive","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelInactive","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelRead","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }, {"name":"channelReadComplete","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelRegistered","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"write","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object","io.netty.channel.ChannelPromise"] }]
|
||||
},
|
||||
{
|
||||
"name":"io.netty.internal.tcnative.SSLContext"
|
||||
},
|
||||
{
|
||||
"name":"io.netty.util.AbstractReferenceCounted",
|
||||
"fields":[{"name":"refCnt"}]
|
||||
},
|
||||
{
|
||||
"name":"io.netty.util.DefaultAttributeMap",
|
||||
"fields":[{"name":"attributes"}]
|
||||
},
|
||||
{
|
||||
"name":"io.netty.util.DefaultAttributeMap$DefaultAttribute",
|
||||
"fields":[{"name":"attributeMap"}]
|
||||
},
|
||||
{
|
||||
"name":"io.netty.util.Recycler$DefaultHandle",
|
||||
"fields":[{"name":"state"}]
|
||||
},
|
||||
{
|
||||
"name":"io.netty.util.ReferenceCountUtil",
|
||||
"queryAllDeclaredMethods":true
|
||||
},
|
||||
{
|
||||
"name":"io.netty.util.concurrent.DefaultPromise",
|
||||
"fields":[{"name":"result"}]
|
||||
},
|
||||
{
|
||||
"name":"io.netty.util.concurrent.SingleThreadEventExecutor",
|
||||
"fields":[{"name":"state"}, {"name":"threadProperties"}]
|
||||
},
|
||||
{
|
||||
"name":"io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueColdProducerFields",
|
||||
"fields":[{"name":"producerLimit"}]
|
||||
},
|
||||
{
|
||||
"name":"io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueConsumerFields",
|
||||
"fields":[{"name":"consumerIndex"}]
|
||||
},
|
||||
{
|
||||
"name":"io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueProducerFields",
|
||||
"fields":[{"name":"producerIndex"}]
|
||||
},
|
||||
{
|
||||
"name":"io.netty.util.internal.shaded.org.jctools.queues.MpmcArrayQueueConsumerIndexField",
|
||||
"fields":[{"name":"consumerIndex"}]
|
||||
},
|
||||
{
|
||||
"name":"io.netty.util.internal.shaded.org.jctools.queues.MpmcArrayQueueProducerIndexField",
|
||||
"fields":[{"name":"producerIndex"}]
|
||||
},
|
||||
{
|
||||
"name":"java.lang.Object",
|
||||
"allDeclaredFields":true,
|
||||
"queryAllDeclaredMethods":true
|
||||
},
|
||||
{
|
||||
"name":"java.lang.ProcessHandle",
|
||||
"methods":[{"name":"current","parameterTypes":[] }, {"name":"pid","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"java.lang.System",
|
||||
"methods":[{"name":"console","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"java.lang.Thread",
|
||||
"fields":[{"name":"threadLocalRandomProbe"}],
|
||||
"methods":[{"name":"isVirtual","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"java.nio.Bits",
|
||||
"fields":[{"name":"MAX_MEMORY"}, {"name":"UNALIGNED"}]
|
||||
},
|
||||
{
|
||||
"name":"java.nio.Buffer",
|
||||
"fields":[{"name":"address"}]
|
||||
},
|
||||
{
|
||||
"name":"java.nio.ByteBuffer",
|
||||
"methods":[{"name":"alignedSlice","parameterTypes":["int"] }]
|
||||
},
|
||||
{
|
||||
"name":"java.nio.DirectByteBuffer",
|
||||
"methods":[{"name":"<init>","parameterTypes":["long","long"] }]
|
||||
},
|
||||
{
|
||||
"name":"java.nio.channels.spi.SelectorProvider",
|
||||
"methods":[{"name":"openServerSocketChannel","parameterTypes":["java.net.ProtocolFamily"] }, {"name":"openSocketChannel","parameterTypes":["java.net.ProtocolFamily"] }]
|
||||
},
|
||||
{
|
||||
"name":"java.nio.file.Path"
|
||||
},
|
||||
{
|
||||
"name":"java.nio.file.Paths",
|
||||
"methods":[{"name":"get","parameterTypes":["java.lang.String","java.lang.String[]"] }]
|
||||
},
|
||||
{
|
||||
"name":"java.security.AlgorithmParametersSpi"
|
||||
},
|
||||
{
|
||||
"name":"java.security.KeyStoreSpi"
|
||||
},
|
||||
{
|
||||
"name":"java.security.SecureRandomParameters"
|
||||
},
|
||||
{
|
||||
"name":"java.sql.Connection"
|
||||
},
|
||||
{
|
||||
"name":"java.sql.Driver"
|
||||
},
|
||||
{
|
||||
"name":"java.sql.DriverManager",
|
||||
"methods":[{"name":"getConnection","parameterTypes":["java.lang.String"] }, {"name":"getDriver","parameterTypes":["java.lang.String"] }]
|
||||
},
|
||||
{
|
||||
"name":"java.sql.Time",
|
||||
"methods":[{"name":"<init>","parameterTypes":["long"] }]
|
||||
},
|
||||
{
|
||||
"name":"java.sql.Timestamp",
|
||||
"methods":[{"name":"valueOf","parameterTypes":["java.lang.String"] }]
|
||||
},
|
||||
{
|
||||
"name":"java.time.Duration",
|
||||
"methods":[{"name":"parse","parameterTypes":["java.lang.CharSequence"] }]
|
||||
},
|
||||
{
|
||||
"name":"java.time.Instant",
|
||||
"methods":[{"name":"parse","parameterTypes":["java.lang.CharSequence"] }]
|
||||
},
|
||||
{
|
||||
"name":"java.time.LocalDate",
|
||||
"methods":[{"name":"parse","parameterTypes":["java.lang.CharSequence"] }]
|
||||
},
|
||||
{
|
||||
"name":"java.time.LocalDateTime",
|
||||
"methods":[{"name":"parse","parameterTypes":["java.lang.CharSequence"] }]
|
||||
},
|
||||
{
|
||||
"name":"java.time.LocalTime",
|
||||
"methods":[{"name":"parse","parameterTypes":["java.lang.CharSequence"] }]
|
||||
},
|
||||
{
|
||||
"name":"java.time.MonthDay",
|
||||
"methods":[{"name":"parse","parameterTypes":["java.lang.CharSequence"] }]
|
||||
},
|
||||
{
|
||||
"name":"java.time.OffsetDateTime",
|
||||
"methods":[{"name":"parse","parameterTypes":["java.lang.CharSequence"] }]
|
||||
},
|
||||
{
|
||||
"name":"java.time.OffsetTime",
|
||||
"methods":[{"name":"parse","parameterTypes":["java.lang.CharSequence"] }]
|
||||
},
|
||||
{
|
||||
"name":"java.time.Period",
|
||||
"methods":[{"name":"parse","parameterTypes":["java.lang.CharSequence"] }]
|
||||
},
|
||||
{
|
||||
"name":"java.time.Year",
|
||||
"methods":[{"name":"parse","parameterTypes":["java.lang.CharSequence"] }]
|
||||
},
|
||||
{
|
||||
"name":"java.time.YearMonth",
|
||||
"methods":[{"name":"parse","parameterTypes":["java.lang.CharSequence"] }]
|
||||
},
|
||||
{
|
||||
"name":"java.time.ZoneId",
|
||||
"methods":[{"name":"of","parameterTypes":["java.lang.String"] }]
|
||||
},
|
||||
{
|
||||
"name":"java.time.ZoneOffset",
|
||||
"methods":[{"name":"of","parameterTypes":["java.lang.String"] }]
|
||||
},
|
||||
{
|
||||
"name":"java.time.ZonedDateTime",
|
||||
"methods":[{"name":"parse","parameterTypes":["java.lang.CharSequence"] }]
|
||||
},
|
||||
{
|
||||
"name":"java.util.concurrent.ForkJoinTask",
|
||||
"fields":[{"name":"aux"}, {"name":"status"}]
|
||||
},
|
||||
{
|
||||
"name":"java.util.concurrent.atomic.AtomicBoolean",
|
||||
"fields":[{"name":"value"}]
|
||||
},
|
||||
{
|
||||
"name":"java.util.concurrent.atomic.AtomicReference",
|
||||
"fields":[{"name":"value"}]
|
||||
},
|
||||
{
|
||||
"name":"java.util.concurrent.atomic.Striped64",
|
||||
"fields":[{"name":"base"}, {"name":"cellsBusy"}]
|
||||
},
|
||||
{
|
||||
"name":"java.util.concurrent.atomic.Striped64$Cell",
|
||||
"fields":[{"name":"value"}]
|
||||
},
|
||||
{
|
||||
"name":"javax.security.auth.x500.X500Principal",
|
||||
"fields":[{"name":"thisX500Name"}],
|
||||
"methods":[{"name":"<init>","parameterTypes":["sun.security.x509.X500Name"] }]
|
||||
},
|
||||
{
|
||||
"name":"jdk.internal.misc.Unsafe",
|
||||
"methods":[{"name":"getUnsafe","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"net.woggioni.rbcs.api.CacheHandler",
|
||||
"methods":[{"name":"channelRead","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }]
|
||||
},
|
||||
{
|
||||
"name":"net.woggioni.rbcs.cli.RemoteBuildCacheServerCli",
|
||||
"allDeclaredFields":true,
|
||||
"queryAllDeclaredMethods":true
|
||||
},
|
||||
{
|
||||
"name":"net.woggioni.rbcs.cli.RemoteBuildCacheServerCli$VersionProvider",
|
||||
"allDeclaredFields":true,
|
||||
"queryAllDeclaredMethods":true,
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"net.woggioni.rbcs.cli.impl.RbcsCommand",
|
||||
"allDeclaredFields":true,
|
||||
"queryAllDeclaredMethods":true
|
||||
},
|
||||
{
|
||||
"name":"net.woggioni.rbcs.cli.impl.commands.BenchmarkCommand",
|
||||
"allDeclaredFields":true,
|
||||
"queryAllDeclaredMethods":true
|
||||
},
|
||||
{
|
||||
"name":"net.woggioni.rbcs.cli.impl.commands.ClientCommand",
|
||||
"allDeclaredFields":true,
|
||||
"queryAllDeclaredMethods":true
|
||||
},
|
||||
{
|
||||
"name":"net.woggioni.rbcs.cli.impl.commands.GetCommand",
|
||||
"allDeclaredFields":true,
|
||||
"queryAllDeclaredMethods":true
|
||||
},
|
||||
{
|
||||
"name":"net.woggioni.rbcs.cli.impl.commands.HealthCheckCommand",
|
||||
"allDeclaredFields":true,
|
||||
"queryAllDeclaredMethods":true
|
||||
},
|
||||
{
|
||||
"name":"net.woggioni.rbcs.cli.impl.commands.PasswordHashCommand",
|
||||
"allDeclaredFields":true,
|
||||
"queryAllDeclaredMethods":true
|
||||
},
|
||||
{
|
||||
"name":"net.woggioni.rbcs.cli.impl.commands.PutCommand",
|
||||
"allDeclaredFields":true,
|
||||
"queryAllDeclaredMethods":true
|
||||
},
|
||||
{
|
||||
"name":"net.woggioni.rbcs.cli.impl.commands.ServerCommand",
|
||||
"allDeclaredFields":true,
|
||||
"queryAllDeclaredMethods":true
|
||||
},
|
||||
{
|
||||
"name":"net.woggioni.rbcs.cli.impl.converters.ByteSizeConverter",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"net.woggioni.rbcs.cli.impl.converters.DurationConverter",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"net.woggioni.rbcs.cli.impl.converters.OutputStreamConverter",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"net.woggioni.rbcs.client.RemoteBuildCacheClient$sendRequest$1$operationComplete$responseHandler$1",
|
||||
"methods":[{"name":"channelInactive","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"exceptionCaught","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Throwable"] }, {"name":"userEventTriggered","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }]
|
||||
},
|
||||
{
|
||||
"name":"net.woggioni.rbcs.server.RemoteBuildCacheServer$HttpChunkContentCompressor",
|
||||
"methods":[{"name":"write","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object","io.netty.channel.ChannelPromise"] }]
|
||||
},
|
||||
{
|
||||
"name":"net.woggioni.rbcs.server.RemoteBuildCacheServer$NettyHttpBasicAuthenticator"
|
||||
},
|
||||
{
|
||||
"name":"net.woggioni.rbcs.server.RemoteBuildCacheServer$ServerInitializer"
|
||||
},
|
||||
{
|
||||
"name":"net.woggioni.rbcs.server.RemoteBuildCacheServer$ServerInitializer$initChannel$4",
|
||||
"methods":[{"name":"userEventTriggered","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }]
|
||||
},
|
||||
{
|
||||
"name":"net.woggioni.rbcs.server.auth.AbstractNettyHttpAuthenticator",
|
||||
"methods":[{"name":"channelRead","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }]
|
||||
},
|
||||
{
|
||||
"name":"net.woggioni.rbcs.server.cache.FileSystemCacheHandler",
|
||||
"methods":[{"name":"exceptionCaught","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Throwable"] }]
|
||||
},
|
||||
{
|
||||
"name":"net.woggioni.rbcs.server.cache.InMemoryCacheHandler",
|
||||
"methods":[{"name":"exceptionCaught","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Throwable"] }]
|
||||
},
|
||||
{
|
||||
"name":"net.woggioni.rbcs.server.exception.ExceptionHandler",
|
||||
"methods":[{"name":"exceptionCaught","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Throwable"] }]
|
||||
},
|
||||
{
|
||||
"name":"net.woggioni.rbcs.server.handler.MaxRequestSizeHandler",
|
||||
"methods":[{"name":"channelRead","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }]
|
||||
},
|
||||
{
|
||||
"name":"net.woggioni.rbcs.server.handler.ReadTriggerDuplexHandler",
|
||||
"methods":[{"name":"channelRead","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }, {"name":"write","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object","io.netty.channel.ChannelPromise"] }]
|
||||
},
|
||||
{
|
||||
"name":"net.woggioni.rbcs.server.handler.ServerHandler",
|
||||
"methods":[{"name":"channelRead","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }, {"name":"exceptionCaught","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Throwable"] }, {"name":"write","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object","io.netty.channel.ChannelPromise"] }]
|
||||
},
|
||||
{
|
||||
"name":"net.woggioni.rbcs.server.handler.TraceHandler",
|
||||
"methods":[{"name":"channelRead","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }, {"name":"exceptionCaught","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Throwable"] }]
|
||||
},
|
||||
{
|
||||
"name":"net.woggioni.rbcs.server.memcache.MemcacheCacheHandler",
|
||||
"methods":[{"name":"exceptionCaught","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Throwable"] }]
|
||||
},
|
||||
{
|
||||
"name":"net.woggioni.rbcs.server.memcache.client.MemcacheClient$sendRequest$1$operationComplete$handler$1",
|
||||
"methods":[{"name":"channelInactive","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"exceptionCaught","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Throwable"] }]
|
||||
},
|
||||
{
|
||||
"name":"net.woggioni.rbcs.server.throttling.ThrottlingHandler",
|
||||
"methods":[{"name":"channelRead","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }]
|
||||
},
|
||||
{
|
||||
"name":"sun.misc.Unsafe",
|
||||
"fields":[{"name":"theUnsafe"}],
|
||||
"methods":[{"name":"copyMemory","parameterTypes":["java.lang.Object","long","java.lang.Object","long","long"] }, {"name":"getAndAddLong","parameterTypes":["java.lang.Object","long","long"] }, {"name":"getAndSetObject","parameterTypes":["java.lang.Object","long","java.lang.Object"] }, {"name":"invokeCleaner","parameterTypes":["java.nio.ByteBuffer"] }]
|
||||
},
|
||||
{
|
||||
"name":"sun.nio.ch.SelectorImpl",
|
||||
"fields":[{"name":"publicSelectedKeys"}, {"name":"selectedKeys"}]
|
||||
},
|
||||
{
|
||||
"name":"sun.security.pkcs12.PKCS12KeyStore",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"sun.security.pkcs12.PKCS12KeyStore$DualFormatPKCS12",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"sun.security.provider.DSA$SHA224withDSA",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"sun.security.provider.DSA$SHA256withDSA",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"sun.security.provider.JavaKeyStore$JKS",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"sun.security.provider.MD5",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"sun.security.provider.NativePRNG",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }, {"name":"<init>","parameterTypes":["java.security.SecureRandomParameters"] }]
|
||||
},
|
||||
{
|
||||
"name":"sun.security.provider.NativePRNG$NonBlocking",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }, {"name":"<init>","parameterTypes":["java.security.SecureRandomParameters"] }]
|
||||
},
|
||||
{
|
||||
"name":"sun.security.provider.SHA",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"sun.security.provider.SHA2$SHA224",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"sun.security.provider.SHA2$SHA256",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"sun.security.provider.SHA5$SHA384",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"sun.security.provider.SHA5$SHA512",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"sun.security.provider.X509Factory",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"sun.security.rsa.PSSParameters",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"sun.security.rsa.RSAKeyFactory$Legacy",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"sun.security.rsa.RSAPSSSignature",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"sun.security.rsa.RSASignature$SHA224withRSA",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"sun.security.ssl.KeyManagerFactoryImpl$SunX509",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"sun.security.ssl.SSLContextImpl$DefaultSSLContext",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"sun.security.ssl.SSLContextImpl$TLSContext",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"sun.security.ssl.TrustManagerFactoryImpl$PKIXFactory",
|
||||
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||
},
|
||||
{
|
||||
"name":"sun.security.x509.AuthorityInfoAccessExtension",
|
||||
"methods":[{"name":"<init>","parameterTypes":["java.lang.Boolean","java.lang.Object"] }]
|
||||
},
|
||||
{
|
||||
"name":"sun.security.x509.AuthorityKeyIdentifierExtension",
|
||||
"methods":[{"name":"<init>","parameterTypes":["java.lang.Boolean","java.lang.Object"] }]
|
||||
},
|
||||
{
|
||||
"name":"sun.security.x509.BasicConstraintsExtension",
|
||||
"methods":[{"name":"<init>","parameterTypes":["java.lang.Boolean","java.lang.Object"] }]
|
||||
},
|
||||
{
|
||||
"name":"sun.security.x509.CRLDistributionPointsExtension",
|
||||
"methods":[{"name":"<init>","parameterTypes":["java.lang.Boolean","java.lang.Object"] }]
|
||||
},
|
||||
{
|
||||
"name":"sun.security.x509.CertificatePoliciesExtension",
|
||||
"methods":[{"name":"<init>","parameterTypes":["java.lang.Boolean","java.lang.Object"] }]
|
||||
},
|
||||
{
|
||||
"name":"sun.security.x509.KeyUsageExtension",
|
||||
"methods":[{"name":"<init>","parameterTypes":["java.lang.Boolean","java.lang.Object"] }]
|
||||
},
|
||||
{
|
||||
"name":"sun.security.x509.NetscapeCertTypeExtension",
|
||||
"methods":[{"name":"<init>","parameterTypes":["java.lang.Boolean","java.lang.Object"] }]
|
||||
},
|
||||
{
|
||||
"name":"sun.security.x509.PrivateKeyUsageExtension",
|
||||
"methods":[{"name":"<init>","parameterTypes":["java.lang.Boolean","java.lang.Object"] }]
|
||||
},
|
||||
{
|
||||
"name":"sun.security.x509.SubjectAlternativeNameExtension",
|
||||
"methods":[{"name":"<init>","parameterTypes":["java.lang.Boolean","java.lang.Object"] }]
|
||||
},
|
||||
{
|
||||
"name":"sun.security.x509.SubjectKeyIdentifierExtension",
|
||||
"methods":[{"name":"<init>","parameterTypes":["java.lang.Boolean","java.lang.Object"] }]
|
||||
}
|
||||
]
|
44
rbcs-cli/native-image/resource-config.json
Normal file
44
rbcs-cli/native-image/resource-config.json
Normal file
@@ -0,0 +1,44 @@
|
||||
{
|
||||
"resources":{
|
||||
"includes":[{
|
||||
"pattern":"\\QMETA-INF/MANIFEST.MF\\E"
|
||||
}, {
|
||||
"pattern":"\\QMETA-INF/services/ch.qos.logback.classic.spi.Configurator\\E"
|
||||
}, {
|
||||
"pattern":"\\QMETA-INF/services/java.lang.System$LoggerFinder\\E"
|
||||
}, {
|
||||
"pattern":"\\QMETA-INF/services/java.net.spi.InetAddressResolverProvider\\E"
|
||||
}, {
|
||||
"pattern":"\\QMETA-INF/services/java.net.spi.URLStreamHandlerProvider\\E"
|
||||
}, {
|
||||
"pattern":"\\QMETA-INF/services/java.nio.channels.spi.SelectorProvider\\E"
|
||||
}, {
|
||||
"pattern":"\\QMETA-INF/services/java.time.zone.ZoneRulesProvider\\E"
|
||||
}, {
|
||||
"pattern":"\\QMETA-INF/services/javax.xml.parsers.DocumentBuilderFactory\\E"
|
||||
}, {
|
||||
"pattern":"\\QMETA-INF/services/javax.xml.parsers.SAXParserFactory\\E"
|
||||
}, {
|
||||
"pattern":"\\QMETA-INF/services/net.woggioni.rbcs.api.CacheProvider\\E"
|
||||
}, {
|
||||
"pattern":"\\QMETA-INF/services/org.slf4j.spi.SLF4JServiceProvider\\E"
|
||||
}, {
|
||||
"pattern":"\\Qclasspath:net/woggioni/rbcs/cli/logback.xml\\E"
|
||||
}, {
|
||||
"pattern":"\\Qlogback-test.scmo\\E"
|
||||
}, {
|
||||
"pattern":"\\Qlogback.scmo\\E"
|
||||
}, {
|
||||
"pattern":"\\Qnet/woggioni/rbcs/cli/logback.xml\\E"
|
||||
}, {
|
||||
"pattern":"\\Qnet/woggioni/rbcs/client/schema/rbcs-client.xsd\\E"
|
||||
}, {
|
||||
"pattern":"\\Qnet/woggioni/rbcs/server/memcache/schema/rbcs-memcache.xsd\\E"
|
||||
}, {
|
||||
"pattern":"\\Qnet/woggioni/rbcs/server/schema/rbcs-server.xsd\\E"
|
||||
}]},
|
||||
"bundles":[{
|
||||
"name":"com.sun.org.apache.xerces.internal.impl.xpath.regex.message",
|
||||
"locales":[""]
|
||||
}]
|
||||
}
|
14
rbcs-cli/native-image/serialization-config.json
Normal file
14
rbcs-cli/native-image/serialization-config.json
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"types":[
|
||||
{
|
||||
"name":"java.lang.String"
|
||||
},
|
||||
{
|
||||
"name":"net.woggioni.rbcs.api.CacheValueMetadata"
|
||||
}
|
||||
],
|
||||
"lambdaCapturingTypes":[
|
||||
],
|
||||
"proxies":[
|
||||
]
|
||||
}
|
@@ -0,0 +1,175 @@
|
||||
package net.woggioni.rbcs.cli.graal
|
||||
|
||||
import java.io.ByteArrayInputStream
|
||||
import java.net.URI
|
||||
import java.nio.file.Path
|
||||
import java.time.Duration
|
||||
import java.time.temporal.ChronoUnit
|
||||
import java.util.concurrent.ExecutionException
|
||||
import java.util.zip.Deflater
|
||||
import net.woggioni.jwo.NullOutputStream
|
||||
import net.woggioni.rbcs.api.Configuration
|
||||
import net.woggioni.rbcs.api.Configuration.User
|
||||
import net.woggioni.rbcs.api.Role
|
||||
import net.woggioni.rbcs.cli.RemoteBuildCacheServerCli
|
||||
import net.woggioni.rbcs.cli.impl.commands.BenchmarkCommand
|
||||
import net.woggioni.rbcs.cli.impl.commands.GetCommand
|
||||
import net.woggioni.rbcs.cli.impl.commands.HealthCheckCommand
|
||||
import net.woggioni.rbcs.cli.impl.commands.PutCommand
|
||||
import net.woggioni.rbcs.client.Configuration as ClientConfiguration
|
||||
import net.woggioni.rbcs.client.impl.Parser as ClientConfigurationParser
|
||||
import net.woggioni.rbcs.common.HostAndPort
|
||||
import net.woggioni.rbcs.common.PasswordSecurity.hashPassword
|
||||
import net.woggioni.rbcs.common.RBCS
|
||||
import net.woggioni.rbcs.common.Xml
|
||||
import net.woggioni.rbcs.server.RemoteBuildCacheServer
|
||||
import net.woggioni.rbcs.server.cache.FileSystemCacheConfiguration
|
||||
import net.woggioni.rbcs.server.cache.InMemoryCacheConfiguration
|
||||
import net.woggioni.rbcs.server.configuration.Parser
|
||||
import net.woggioni.rbcs.server.memcache.MemcacheCacheConfiguration
|
||||
|
||||
object GraalNativeImageConfiguration {
|
||||
@JvmStatic
|
||||
fun main(vararg args : String) {
|
||||
|
||||
val serverURL = URI.create("file:conf/rbcs-server.xml").toURL()
|
||||
val serverDoc = serverURL.openStream().use {
|
||||
Xml.parseXml(serverURL, it)
|
||||
}
|
||||
Parser.parse(serverDoc)
|
||||
|
||||
val url = URI.create("file:conf/rbcs-client.xml").toURL()
|
||||
val clientDoc = url.openStream().use {
|
||||
Xml.parseXml(url, it)
|
||||
}
|
||||
ClientConfigurationParser.parse(clientDoc)
|
||||
|
||||
val PASSWORD = "password"
|
||||
val readersGroup = Configuration.Group("readers", setOf(Role.Reader, Role.Healthcheck), null, null)
|
||||
val writersGroup = Configuration.Group("writers", setOf(Role.Writer), null, null)
|
||||
|
||||
|
||||
val users = listOf(
|
||||
User("user1", hashPassword(PASSWORD), setOf(readersGroup), null),
|
||||
User("user2", hashPassword(PASSWORD), setOf(writersGroup), null),
|
||||
User("user3", hashPassword(PASSWORD), setOf(readersGroup, writersGroup), null),
|
||||
User("", null, setOf(readersGroup), null),
|
||||
User("user4", hashPassword(PASSWORD), setOf(readersGroup),
|
||||
Configuration.Quota(1, Duration.of(1, ChronoUnit.DAYS), 0, 1)
|
||||
),
|
||||
User("user5", hashPassword(PASSWORD), setOf(readersGroup),
|
||||
Configuration.Quota(1, Duration.of(5, ChronoUnit.SECONDS), 0, 1)
|
||||
)
|
||||
)
|
||||
|
||||
val serverPort = RBCS.getFreePort()
|
||||
|
||||
val caches = listOf<Configuration.Cache>(
|
||||
InMemoryCacheConfiguration(
|
||||
maxAge = Duration.ofSeconds(3600),
|
||||
digestAlgorithm = "MD5",
|
||||
compressionLevel = Deflater.DEFAULT_COMPRESSION,
|
||||
compressionEnabled = false,
|
||||
maxSize = 0x1000000,
|
||||
),
|
||||
FileSystemCacheConfiguration(
|
||||
Path.of(System.getProperty("java.io.tmpdir")).resolve("rbcs"),
|
||||
maxAge = Duration.ofSeconds(3600),
|
||||
digestAlgorithm = "MD5",
|
||||
compressionLevel = Deflater.DEFAULT_COMPRESSION,
|
||||
compressionEnabled = false,
|
||||
),
|
||||
MemcacheCacheConfiguration(
|
||||
listOf(MemcacheCacheConfiguration.Server(
|
||||
HostAndPort("127.0.0.1", 11211),
|
||||
1000,
|
||||
4)
|
||||
),
|
||||
Duration.ofSeconds(60),
|
||||
"someCustomPrefix",
|
||||
"MD5",
|
||||
null,
|
||||
1,
|
||||
)
|
||||
)
|
||||
|
||||
for (cache in caches) {
|
||||
val serverConfiguration = Configuration(
|
||||
"127.0.0.1",
|
||||
serverPort,
|
||||
100,
|
||||
null,
|
||||
Configuration.EventExecutor(true),
|
||||
Configuration.RateLimiter(
|
||||
false, 0x100000, 10
|
||||
),
|
||||
Configuration.Connection(
|
||||
Duration.ofSeconds(10),
|
||||
Duration.ofSeconds(15),
|
||||
Duration.ofSeconds(15),
|
||||
0x10000,
|
||||
0x1000
|
||||
),
|
||||
users.asSequence().map { it.name to it }.toMap(),
|
||||
sequenceOf(writersGroup, readersGroup).map { it.name to it }.toMap(),
|
||||
cache,
|
||||
Configuration.BasicAuthentication(),
|
||||
null,
|
||||
)
|
||||
|
||||
val serverHandle = RemoteBuildCacheServer(serverConfiguration).run()
|
||||
|
||||
val clientProfile = ClientConfiguration.Profile(
|
||||
URI.create("http://127.0.0.1:$serverPort/"),
|
||||
ClientConfiguration.Connection(
|
||||
Duration.ofSeconds(5),
|
||||
Duration.ofSeconds(5),
|
||||
Duration.ofSeconds(7),
|
||||
true,
|
||||
),
|
||||
ClientConfiguration.Authentication.BasicAuthenticationCredentials("user3", PASSWORD),
|
||||
Duration.ofSeconds(3),
|
||||
10,
|
||||
true,
|
||||
ClientConfiguration.RetryPolicy(
|
||||
3,
|
||||
1000,
|
||||
1.2
|
||||
),
|
||||
ClientConfiguration.TrustStore(null, null, false, false)
|
||||
)
|
||||
|
||||
HealthCheckCommand.execute(clientProfile)
|
||||
|
||||
BenchmarkCommand.execute(
|
||||
clientProfile,
|
||||
1000,
|
||||
0x100,
|
||||
true
|
||||
)
|
||||
|
||||
PutCommand.execute(
|
||||
clientProfile,
|
||||
"some-file.bin",
|
||||
ByteArrayInputStream(ByteArray(0x1000) { it.toByte() }),
|
||||
"application/octet-setream",
|
||||
"attachment; filename=\"some-file.bin\""
|
||||
)
|
||||
|
||||
GetCommand.execute(
|
||||
clientProfile,
|
||||
"some-file.bin",
|
||||
NullOutputStream()
|
||||
)
|
||||
|
||||
serverHandle.sendShutdownSignal()
|
||||
try {
|
||||
serverHandle.get()
|
||||
} catch (ee : ExecutionException) {
|
||||
}
|
||||
}
|
||||
System.setProperty("net.woggioni.rbcs.conf.dir", System.getProperty("gradle.tmp.dir"))
|
||||
RemoteBuildCacheServerCli.createCommandLine().execute("--version")
|
||||
RemoteBuildCacheServerCli.createCommandLine().execute("server", "-t", "PT10S")
|
||||
}
|
||||
}
|
@@ -1,5 +1,6 @@
|
||||
package net.woggioni.rbcs.cli
|
||||
|
||||
import net.woggioni.jwo.Application
|
||||
import net.woggioni.rbcs.cli.impl.AbstractVersionProvider
|
||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||
import net.woggioni.rbcs.cli.impl.commands.BenchmarkCommand
|
||||
@@ -10,8 +11,7 @@ import net.woggioni.rbcs.cli.impl.commands.PasswordHashCommand
|
||||
import net.woggioni.rbcs.cli.impl.commands.PutCommand
|
||||
import net.woggioni.rbcs.cli.impl.commands.ServerCommand
|
||||
import net.woggioni.rbcs.common.RbcsUrlStreamHandlerFactory
|
||||
import net.woggioni.rbcs.common.contextLogger
|
||||
import net.woggioni.jwo.Application
|
||||
import net.woggioni.rbcs.common.createLogger
|
||||
import picocli.CommandLine
|
||||
import picocli.CommandLine.Model.CommandSpec
|
||||
|
||||
@@ -23,15 +23,20 @@ class RemoteBuildCacheServerCli : RbcsCommand() {
|
||||
|
||||
class VersionProvider : AbstractVersionProvider()
|
||||
companion object {
|
||||
@JvmStatic
|
||||
fun main(vararg args: String) {
|
||||
private fun setPropertyIfNotPresent(key: String, value: String) {
|
||||
System.getProperty(key) ?: System.setProperty(key, value)
|
||||
}
|
||||
|
||||
fun createCommandLine() : CommandLine {
|
||||
setPropertyIfNotPresent("logback.configurationFile", "net/woggioni/rbcs/cli/logback.xml")
|
||||
setPropertyIfNotPresent("io.netty.leakDetectionLevel", "DISABLED")
|
||||
val currentClassLoader = RemoteBuildCacheServerCli::class.java.classLoader
|
||||
Thread.currentThread().contextClassLoader = currentClassLoader
|
||||
if(currentClassLoader.javaClass.name == "net.woggioni.envelope.loader.ModuleClassLoader") {
|
||||
//We're running in an envelope jar and custom URL protocols won't work
|
||||
RbcsUrlStreamHandlerFactory.install()
|
||||
}
|
||||
val log = contextLogger()
|
||||
val log = createLogger<RemoteBuildCacheServerCli>()
|
||||
val app = Application.builder("rbcs")
|
||||
.configurationDirectoryEnvVar("RBCS_CONFIGURATION_DIR")
|
||||
.configurationDirectoryPropertyKey("net.woggioni.rbcs.conf.dir")
|
||||
@@ -51,7 +56,12 @@ class RemoteBuildCacheServerCli : RbcsCommand() {
|
||||
addSubcommand(GetCommand())
|
||||
addSubcommand(HealthCheckCommand())
|
||||
})
|
||||
System.exit(commandLine.execute(*args))
|
||||
return commandLine
|
||||
}
|
||||
|
||||
@JvmStatic
|
||||
fun main(vararg args: String) {
|
||||
System.exit(createCommandLine().execute(*args))
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -1,9 +1,9 @@
|
||||
package net.woggioni.rbcs.cli.impl
|
||||
|
||||
import picocli.CommandLine
|
||||
import java.util.jar.Attributes
|
||||
import java.util.jar.JarFile
|
||||
import java.util.jar.Manifest
|
||||
import picocli.CommandLine
|
||||
|
||||
|
||||
abstract class AbstractVersionProvider : CommandLine.IVersionProvider {
|
||||
|
@@ -1,8 +1,8 @@
|
||||
package net.woggioni.rbcs.cli.impl
|
||||
|
||||
import java.nio.file.Path
|
||||
import net.woggioni.jwo.Application
|
||||
import picocli.CommandLine
|
||||
import java.nio.file.Path
|
||||
|
||||
|
||||
abstract class RbcsCommand : Runnable {
|
||||
@@ -12,7 +12,7 @@ abstract class RbcsCommand : Runnable {
|
||||
private set
|
||||
|
||||
protected fun findConfigurationFile(app: Application, fileName : String): Path {
|
||||
val confDir = app.computeConfigurationDirectory()
|
||||
val confDir = app.computeConfigurationDirectory(false)
|
||||
val configurationFile = confDir.resolve(fileName)
|
||||
return configurationFile
|
||||
}
|
||||
|
@@ -1,19 +1,25 @@
|
||||
package net.woggioni.rbcs.cli.impl.commands
|
||||
|
||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
||||
import net.woggioni.rbcs.common.contextLogger
|
||||
import net.woggioni.rbcs.common.error
|
||||
import net.woggioni.rbcs.common.info
|
||||
import net.woggioni.jwo.JWO
|
||||
import picocli.CommandLine
|
||||
import java.security.SecureRandom
|
||||
import java.time.Duration
|
||||
import java.time.Instant
|
||||
import java.time.temporal.ChronoUnit
|
||||
import java.util.concurrent.LinkedBlockingQueue
|
||||
import java.util.concurrent.Semaphore
|
||||
import java.util.concurrent.atomic.AtomicLong
|
||||
import kotlin.random.Random
|
||||
import net.woggioni.jwo.JWO
|
||||
import net.woggioni.jwo.LongMath
|
||||
import net.woggioni.rbcs.api.CacheValueMetadata
|
||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||
import net.woggioni.rbcs.cli.impl.converters.ByteSizeConverter
|
||||
import net.woggioni.rbcs.client.Configuration
|
||||
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
||||
import net.woggioni.rbcs.common.createLogger
|
||||
import net.woggioni.rbcs.common.debug
|
||||
import net.woggioni.rbcs.common.error
|
||||
import net.woggioni.rbcs.common.info
|
||||
import picocli.CommandLine
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "benchmark",
|
||||
@@ -21,7 +27,137 @@ import kotlin.random.Random
|
||||
showDefaultValues = true
|
||||
)
|
||||
class BenchmarkCommand : RbcsCommand() {
|
||||
private val log = contextLogger()
|
||||
companion object {
|
||||
private val log = createLogger<BenchmarkCommand>()
|
||||
|
||||
fun execute(profile : Configuration.Profile,
|
||||
numberOfEntries : Int,
|
||||
entrySize : Int,
|
||||
useRandomValue : Boolean,
|
||||
) {
|
||||
val progressThreshold = LongMath.ceilDiv(numberOfEntries.toLong(), 20)
|
||||
RemoteBuildCacheClient(profile).use { client ->
|
||||
val entryGenerator = sequence {
|
||||
val random = Random(SecureRandom.getInstance("NativePRNGNonBlocking").nextLong())
|
||||
while (true) {
|
||||
val key = JWO.bytesToHex(random.nextBytes(16))
|
||||
val value = if (useRandomValue) {
|
||||
random.nextBytes(entrySize)
|
||||
} else {
|
||||
val byteValue = random.nextInt().toByte()
|
||||
ByteArray(entrySize) { _ -> byteValue }
|
||||
}
|
||||
yield(key to value)
|
||||
}
|
||||
}
|
||||
|
||||
log.info {
|
||||
"Starting insertion"
|
||||
}
|
||||
val entries = let {
|
||||
val completionCounter = AtomicLong(0)
|
||||
val completionQueue = LinkedBlockingQueue<Pair<String, ByteArray>>(numberOfEntries)
|
||||
val start = Instant.now()
|
||||
val semaphore = Semaphore(profile.maxConnections * 5)
|
||||
val iterator = entryGenerator.take(numberOfEntries).iterator()
|
||||
while (completionCounter.get() < numberOfEntries) {
|
||||
if (iterator.hasNext()) {
|
||||
val entry = iterator.next()
|
||||
semaphore.acquire()
|
||||
val future =
|
||||
client.put(entry.first, entry.second, CacheValueMetadata(null, null)).thenApply { entry }
|
||||
future.whenComplete { result, ex ->
|
||||
if (ex != null) {
|
||||
log.error(ex.message, ex)
|
||||
} else {
|
||||
completionQueue.put(result)
|
||||
}
|
||||
semaphore.release()
|
||||
val completed = completionCounter.incrementAndGet()
|
||||
if (completed.mod(progressThreshold) == 0L) {
|
||||
log.debug {
|
||||
"Inserted $completed / $numberOfEntries"
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Thread.sleep(Duration.of(500, ChronoUnit.MILLIS))
|
||||
}
|
||||
}
|
||||
|
||||
val inserted = completionQueue.toList()
|
||||
val end = Instant.now()
|
||||
log.info {
|
||||
val elapsed = Duration.between(start, end).toMillis()
|
||||
val opsPerSecond = String.format("%.2f", numberOfEntries.toDouble() / elapsed * 1000)
|
||||
"Insertion rate: $opsPerSecond ops/s"
|
||||
}
|
||||
inserted
|
||||
}
|
||||
log.info {
|
||||
"Inserted ${entries.size} entries"
|
||||
}
|
||||
log.info {
|
||||
"Starting retrieval"
|
||||
}
|
||||
if (entries.isNotEmpty()) {
|
||||
val errorCounter = AtomicLong(0)
|
||||
val completionCounter = AtomicLong(0)
|
||||
val semaphore = Semaphore(profile.maxConnections * 5)
|
||||
val start = Instant.now()
|
||||
val it = entries.iterator()
|
||||
while (completionCounter.get() < entries.size) {
|
||||
if (it.hasNext()) {
|
||||
val entry = it.next()
|
||||
semaphore.acquire()
|
||||
val future = client.get(entry.first).handle { response, ex ->
|
||||
if(ex != null) {
|
||||
errorCounter.incrementAndGet()
|
||||
log.error(ex.message, ex)
|
||||
} else if (response == null) {
|
||||
errorCounter.incrementAndGet()
|
||||
log.error {
|
||||
"Missing entry for key '${entry.first}'"
|
||||
}
|
||||
} else if (!entry.second.contentEquals(response)) {
|
||||
errorCounter.incrementAndGet()
|
||||
log.error {
|
||||
"Retrieved a value different from what was inserted for key '${entry.first}': " +
|
||||
"expected '${JWO.bytesToHex(entry.second)}', got '${JWO.bytesToHex(response)}' instead"
|
||||
}
|
||||
}
|
||||
}
|
||||
future.whenComplete { _, _ ->
|
||||
val completed = completionCounter.incrementAndGet()
|
||||
if (completed.mod(progressThreshold) == 0L) {
|
||||
log.debug {
|
||||
"Retrieved $completed / ${entries.size}"
|
||||
}
|
||||
}
|
||||
semaphore.release()
|
||||
}
|
||||
} else {
|
||||
Thread.sleep(Duration.of(500, ChronoUnit.MILLIS))
|
||||
}
|
||||
}
|
||||
val end = Instant.now()
|
||||
val errors = errorCounter.get()
|
||||
val successfulRetrievals = entries.size - errors
|
||||
val successRate = successfulRetrievals.toDouble() / entries.size
|
||||
log.info {
|
||||
"Successfully retrieved ${entries.size - errors}/${entries.size} (${String.format("%.1f", successRate * 100)}%)"
|
||||
}
|
||||
log.info {
|
||||
val elapsed = Duration.between(start, end).toMillis()
|
||||
val opsPerSecond = String.format("%.2f", entries.size.toDouble() / elapsed * 1000)
|
||||
"Retrieval rate: $opsPerSecond ops/s"
|
||||
}
|
||||
} else {
|
||||
log.error("Skipping retrieval benchmark as it was not possible to insert any entry in the cache")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@CommandLine.Spec
|
||||
private lateinit var spec: CommandLine.Model.CommandSpec
|
||||
@@ -36,107 +172,28 @@ class BenchmarkCommand : RbcsCommand() {
|
||||
@CommandLine.Option(
|
||||
names = ["-s", "--size"],
|
||||
description = ["Size of a cache value in bytes"],
|
||||
paramLabel = "SIZE"
|
||||
paramLabel = "SIZE",
|
||||
converter = [ByteSizeConverter::class]
|
||||
)
|
||||
private var size = 0x1000
|
||||
|
||||
@CommandLine.Option(
|
||||
names = ["-r", "--random"],
|
||||
description = ["Insert completely random byte values"]
|
||||
)
|
||||
private var randomValues = false
|
||||
|
||||
override fun run() {
|
||||
val clientCommand = spec.parent().userObject() as ClientCommand
|
||||
val profile = clientCommand.profileName.let { profileName ->
|
||||
clientCommand.configuration.profiles[profileName]
|
||||
?: throw IllegalArgumentException("Profile $profileName does not exist in configuration")
|
||||
}
|
||||
RemoteBuildCacheClient(profile).use { client ->
|
||||
|
||||
val entryGenerator = sequence {
|
||||
val random = Random(SecureRandom.getInstance("NativePRNGNonBlocking").nextLong())
|
||||
while (true) {
|
||||
val key = JWO.bytesToHex(random.nextBytes(16))
|
||||
val content = random.nextInt().toByte()
|
||||
val value = ByteArray(size, { _ -> content })
|
||||
yield(key to value)
|
||||
}
|
||||
}
|
||||
|
||||
log.info {
|
||||
"Starting insertion"
|
||||
}
|
||||
val entries = let {
|
||||
val completionCounter = AtomicLong(0)
|
||||
val completionQueue = LinkedBlockingQueue<Pair<String, ByteArray>>(numberOfEntries)
|
||||
val start = Instant.now()
|
||||
val semaphore = Semaphore(profile.maxConnections * 3)
|
||||
val iterator = entryGenerator.take(numberOfEntries).iterator()
|
||||
while (completionCounter.get() < numberOfEntries) {
|
||||
if (iterator.hasNext()) {
|
||||
val entry = iterator.next()
|
||||
semaphore.acquire()
|
||||
val future = client.put(entry.first, entry.second).thenApply { entry }
|
||||
future.whenComplete { result, ex ->
|
||||
if (ex != null) {
|
||||
log.error(ex.message, ex)
|
||||
} else {
|
||||
completionQueue.put(result)
|
||||
}
|
||||
semaphore.release()
|
||||
completionCounter.incrementAndGet()
|
||||
}
|
||||
} else {
|
||||
Thread.sleep(0)
|
||||
}
|
||||
}
|
||||
|
||||
val inserted = completionQueue.toList()
|
||||
val end = Instant.now()
|
||||
log.info {
|
||||
val elapsed = Duration.between(start, end).toMillis()
|
||||
val opsPerSecond = String.format("%.2f", numberOfEntries.toDouble() / elapsed * 1000)
|
||||
"Insertion rate: $opsPerSecond ops/s"
|
||||
}
|
||||
inserted
|
||||
}
|
||||
log.info {
|
||||
"Inserted ${entries.size} entries"
|
||||
}
|
||||
log.info {
|
||||
"Starting retrieval"
|
||||
}
|
||||
if (entries.isNotEmpty()) {
|
||||
val completionCounter = AtomicLong(0)
|
||||
val semaphore = Semaphore(profile.maxConnections * 3)
|
||||
val start = Instant.now()
|
||||
val it = entries.iterator()
|
||||
while (completionCounter.get() < entries.size) {
|
||||
if (it.hasNext()) {
|
||||
val entry = it.next()
|
||||
val future = client.get(entry.first).thenApply {
|
||||
if (it == null) {
|
||||
log.error {
|
||||
"Missing entry for key '${entry.first}'"
|
||||
}
|
||||
} else if (!entry.second.contentEquals(it)) {
|
||||
log.error {
|
||||
"Retrieved a value different from what was inserted for key '${entry.first}'"
|
||||
}
|
||||
}
|
||||
}
|
||||
future.whenComplete { _, _ ->
|
||||
completionCounter.incrementAndGet()
|
||||
semaphore.release()
|
||||
}
|
||||
} else {
|
||||
Thread.sleep(0)
|
||||
}
|
||||
}
|
||||
val end = Instant.now()
|
||||
log.info {
|
||||
val elapsed = Duration.between(start, end).toMillis()
|
||||
val opsPerSecond = String.format("%.2f", entries.size.toDouble() / elapsed * 1000)
|
||||
"Retrieval rate: $opsPerSecond ops/s"
|
||||
}
|
||||
} else {
|
||||
log.error("Skipping retrieval benchmark as it was not possible to insert any entry in the cache")
|
||||
}
|
||||
}
|
||||
execute(
|
||||
profile,
|
||||
numberOfEntries,
|
||||
size,
|
||||
randomValues
|
||||
)
|
||||
}
|
||||
}
|
@@ -1,10 +1,12 @@
|
||||
package net.woggioni.rbcs.cli.impl.commands
|
||||
|
||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
||||
import net.woggioni.jwo.Application
|
||||
import picocli.CommandLine
|
||||
import java.nio.file.Path
|
||||
import net.woggioni.jwo.Application
|
||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||
import net.woggioni.rbcs.client.Configuration
|
||||
import net.woggioni.rbcs.common.createLogger
|
||||
import net.woggioni.rbcs.common.debug
|
||||
import picocli.CommandLine
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "client",
|
||||
@@ -24,15 +26,20 @@ class ClientCommand(app : Application) : RbcsCommand() {
|
||||
names = ["-p", "--profile"],
|
||||
description = ["Name of the client profile to be used"],
|
||||
paramLabel = "PROFILE",
|
||||
required = true
|
||||
required = false
|
||||
)
|
||||
var profileName : String? = null
|
||||
get() = field ?: throw IllegalArgumentException("A profile name must be specified using the '-p' command line parameter")
|
||||
|
||||
val configuration : RemoteBuildCacheClient.Configuration by lazy {
|
||||
RemoteBuildCacheClient.Configuration.parse(configurationFile)
|
||||
val configuration : Configuration by lazy {
|
||||
Configuration.parse(configurationFile)
|
||||
}
|
||||
|
||||
override fun run() {
|
||||
val log = createLogger<ClientCommand>()
|
||||
log.debug {
|
||||
"Using configuration file '$configurationFile'"
|
||||
}
|
||||
println("Available profiles:")
|
||||
configuration.profiles.forEach { (profileName, _) ->
|
||||
println(profileName)
|
||||
|
@@ -1,11 +1,13 @@
|
||||
package net.woggioni.rbcs.cli.impl.commands
|
||||
|
||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
||||
import net.woggioni.rbcs.common.contextLogger
|
||||
import picocli.CommandLine
|
||||
import java.io.OutputStream
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||
import net.woggioni.rbcs.client.Configuration
|
||||
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
||||
import net.woggioni.rbcs.common.createLogger
|
||||
import picocli.CommandLine
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "get",
|
||||
@@ -13,7 +15,21 @@ import java.nio.file.Path
|
||||
showDefaultValues = true
|
||||
)
|
||||
class GetCommand : RbcsCommand() {
|
||||
private val log = contextLogger()
|
||||
companion object {
|
||||
private val log = createLogger<GetCommand>()
|
||||
|
||||
fun execute(profile : Configuration.Profile, key : String, outputStream: OutputStream) {
|
||||
RemoteBuildCacheClient(profile).use { client ->
|
||||
client.get(key).thenApply { value ->
|
||||
value?.let {
|
||||
outputStream.use {
|
||||
it.write(value)
|
||||
}
|
||||
} ?: throw NoSuchElementException("No value found for key $key")
|
||||
}.get()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@CommandLine.Spec
|
||||
private lateinit var spec: CommandLine.Model.CommandSpec
|
||||
@@ -38,14 +54,6 @@ class GetCommand : RbcsCommand() {
|
||||
clientCommand.configuration.profiles[profileName]
|
||||
?: throw IllegalArgumentException("Profile $profileName does not exist in configuration")
|
||||
}
|
||||
RemoteBuildCacheClient(profile).use { client ->
|
||||
client.get(key).thenApply { value ->
|
||||
value?.let {
|
||||
(output?.let(Files::newOutputStream) ?: System.out).use {
|
||||
it.write(value)
|
||||
}
|
||||
} ?: throw NoSuchElementException("No value found for key $key")
|
||||
}.get()
|
||||
}
|
||||
execute(profile, key, (output?.let(Files::newOutputStream) ?: System.out))
|
||||
}
|
||||
}
|
@@ -1,11 +1,12 @@
|
||||
package net.woggioni.rbcs.cli.impl.commands
|
||||
|
||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
||||
import net.woggioni.rbcs.common.contextLogger
|
||||
import picocli.CommandLine
|
||||
import java.security.SecureRandom
|
||||
import kotlin.random.Random
|
||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||
import net.woggioni.rbcs.client.Configuration
|
||||
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
||||
import net.woggioni.rbcs.common.createLogger
|
||||
import picocli.CommandLine
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "health",
|
||||
@@ -13,7 +14,30 @@ import kotlin.random.Random
|
||||
showDefaultValues = true
|
||||
)
|
||||
class HealthCheckCommand : RbcsCommand() {
|
||||
private val log = contextLogger()
|
||||
companion object{
|
||||
private val log = createLogger<HealthCheckCommand>()
|
||||
|
||||
fun execute(profile : Configuration.Profile) {
|
||||
RemoteBuildCacheClient(profile).use { client ->
|
||||
val random = Random(SecureRandom.getInstance("NativePRNGNonBlocking").nextLong())
|
||||
val nonce = ByteArray(0xa0)
|
||||
random.nextBytes(nonce)
|
||||
client.healthCheck(nonce).thenApply { value ->
|
||||
if(value == null) {
|
||||
throw IllegalStateException("Empty response from server")
|
||||
}
|
||||
val offset = value.size - nonce.size
|
||||
for(i in 0 until nonce.size) {
|
||||
val a = nonce[i]
|
||||
val b = value[offset + i]
|
||||
if(a != b) {
|
||||
throw IllegalStateException("Server nonce does not match")
|
||||
}
|
||||
}
|
||||
}.get()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@CommandLine.Spec
|
||||
private lateinit var spec: CommandLine.Model.CommandSpec
|
||||
@@ -24,22 +48,6 @@ class HealthCheckCommand : RbcsCommand() {
|
||||
clientCommand.configuration.profiles[profileName]
|
||||
?: throw IllegalArgumentException("Profile $profileName does not exist in configuration")
|
||||
}
|
||||
RemoteBuildCacheClient(profile).use { client ->
|
||||
val random = Random(SecureRandom.getInstance("NativePRNGNonBlocking").nextLong())
|
||||
val nonce = ByteArray(0xa0)
|
||||
random.nextBytes(nonce)
|
||||
client.healthCheck(nonce).thenApply { value ->
|
||||
if(value == null) {
|
||||
throw IllegalStateException("Empty response from server")
|
||||
}
|
||||
for(i in 0 until nonce.size) {
|
||||
for(j in value.size - nonce.size until nonce.size) {
|
||||
if(nonce[i] != value[j]) {
|
||||
throw IllegalStateException("Server nonce does not match")
|
||||
}
|
||||
}
|
||||
}
|
||||
}.get()
|
||||
}
|
||||
execute(profile)
|
||||
}
|
||||
}
|
@@ -1,13 +1,13 @@
|
||||
package net.woggioni.rbcs.cli.impl.commands
|
||||
|
||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||
import net.woggioni.rbcs.cli.impl.converters.OutputStreamConverter
|
||||
import net.woggioni.rbcs.common.PasswordSecurity.hashPassword
|
||||
import net.woggioni.jwo.UncloseableOutputStream
|
||||
import picocli.CommandLine
|
||||
import java.io.OutputStream
|
||||
import java.io.OutputStreamWriter
|
||||
import java.io.PrintWriter
|
||||
import net.woggioni.jwo.UncloseableOutputStream
|
||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||
import net.woggioni.rbcs.cli.impl.converters.OutputStreamConverter
|
||||
import net.woggioni.rbcs.common.PasswordSecurity.hashPassword
|
||||
import picocli.CommandLine
|
||||
|
||||
|
||||
@CommandLine.Command(
|
||||
|
@@ -1,11 +1,18 @@
|
||||
package net.woggioni.rbcs.cli.impl.commands
|
||||
|
||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||
import net.woggioni.rbcs.cli.impl.converters.InputStreamConverter
|
||||
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
||||
import net.woggioni.rbcs.common.contextLogger
|
||||
import picocli.CommandLine
|
||||
import java.io.InputStream
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
import java.util.UUID
|
||||
import net.woggioni.jwo.Hash
|
||||
import net.woggioni.jwo.JWO
|
||||
import net.woggioni.jwo.NullOutputStream
|
||||
import net.woggioni.rbcs.api.CacheValueMetadata
|
||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||
import net.woggioni.rbcs.client.Configuration
|
||||
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
||||
import net.woggioni.rbcs.common.createLogger
|
||||
import picocli.CommandLine
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "put",
|
||||
@@ -13,25 +20,55 @@ import java.io.InputStream
|
||||
showDefaultValues = true
|
||||
)
|
||||
class PutCommand : RbcsCommand() {
|
||||
private val log = contextLogger()
|
||||
companion object {
|
||||
private val log = createLogger<PutCommand>()
|
||||
|
||||
fun execute(profile: Configuration.Profile,
|
||||
actualKey : String,
|
||||
inputStream: InputStream,
|
||||
mimeType : String?,
|
||||
contentDisposition: String?
|
||||
) {
|
||||
RemoteBuildCacheClient(profile).use { client ->
|
||||
inputStream.use {
|
||||
client.put(actualKey, it.readAllBytes(), CacheValueMetadata(contentDisposition, mimeType))
|
||||
}.get()
|
||||
println(profile.serverURI.resolve(actualKey))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@CommandLine.Spec
|
||||
private lateinit var spec: CommandLine.Model.CommandSpec
|
||||
|
||||
@CommandLine.Option(
|
||||
names = ["-k", "--key"],
|
||||
description = ["The key for the new value"],
|
||||
description = ["The key for the new value, randomly generated if omitted"],
|
||||
paramLabel = "KEY"
|
||||
)
|
||||
private var key : String = ""
|
||||
private var key : String? = null
|
||||
|
||||
@CommandLine.Option(
|
||||
names = ["-i", "--inline"],
|
||||
description = ["File is to be displayed in the browser"],
|
||||
paramLabel = "INLINE",
|
||||
)
|
||||
private var inline : Boolean = false
|
||||
|
||||
@CommandLine.Option(
|
||||
names = ["-t", "--type"],
|
||||
description = ["File mime type"],
|
||||
paramLabel = "MIME_TYPE",
|
||||
)
|
||||
private var mimeType : String? = null
|
||||
|
||||
@CommandLine.Option(
|
||||
names = ["-v", "--value"],
|
||||
description = ["Path to a file containing the value to be added (defaults to stdin)"],
|
||||
paramLabel = "VALUE_FILE",
|
||||
converter = [InputStreamConverter::class]
|
||||
)
|
||||
private var value : InputStream = System.`in`
|
||||
private var value : Path? = null
|
||||
|
||||
override fun run() {
|
||||
val clientCommand = spec.parent().userObject() as ClientCommand
|
||||
@@ -40,9 +77,37 @@ class PutCommand : RbcsCommand() {
|
||||
?: throw IllegalArgumentException("Profile $profileName does not exist in configuration")
|
||||
}
|
||||
RemoteBuildCacheClient(profile).use { client ->
|
||||
value.use {
|
||||
client.put(key, it.readAllBytes())
|
||||
}.get()
|
||||
val inputStream : InputStream
|
||||
val mimeType : String?
|
||||
val contentDisposition : String?
|
||||
val valuePath = value
|
||||
val actualKey : String?
|
||||
if(valuePath != null) {
|
||||
inputStream = Files.newInputStream(valuePath)
|
||||
mimeType = this.mimeType ?: Files.probeContentType(valuePath)
|
||||
contentDisposition = if(inline) {
|
||||
"inline"
|
||||
} else {
|
||||
"attachment; filename=\"${valuePath.fileName}\""
|
||||
}
|
||||
actualKey = key ?: let {
|
||||
val md = Hash.Algorithm.SHA512.newInputStream(Files.newInputStream(valuePath)).use {
|
||||
JWO.copy(it, NullOutputStream())
|
||||
it.messageDigest
|
||||
}
|
||||
UUID.nameUUIDFromBytes(md.digest()).toString()
|
||||
}
|
||||
} else {
|
||||
inputStream = System.`in`
|
||||
mimeType = this.mimeType
|
||||
contentDisposition = if(inline) {
|
||||
"inline"
|
||||
} else {
|
||||
null
|
||||
}
|
||||
actualKey = key ?: UUID.randomUUID().toString()
|
||||
}
|
||||
execute(profile, actualKey, inputStream, mimeType, contentDisposition)
|
||||
}
|
||||
}
|
||||
}
|
@@ -1,19 +1,20 @@
|
||||
package net.woggioni.rbcs.cli.impl.commands
|
||||
|
||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||
import net.woggioni.rbcs.cli.impl.converters.DurationConverter
|
||||
import net.woggioni.rbcs.common.contextLogger
|
||||
import net.woggioni.rbcs.common.debug
|
||||
import net.woggioni.rbcs.common.info
|
||||
import net.woggioni.rbcs.server.RemoteBuildCacheServer
|
||||
import net.woggioni.rbcs.server.RemoteBuildCacheServer.Companion.DEFAULT_CONFIGURATION_URL
|
||||
import net.woggioni.jwo.Application
|
||||
import net.woggioni.jwo.JWO
|
||||
import picocli.CommandLine
|
||||
import java.io.ByteArrayOutputStream
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
import java.time.Duration
|
||||
import java.util.concurrent.TimeUnit
|
||||
import net.woggioni.jwo.Application
|
||||
import net.woggioni.jwo.JWO
|
||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||
import net.woggioni.rbcs.cli.impl.converters.DurationConverter
|
||||
import net.woggioni.rbcs.common.createLogger
|
||||
import net.woggioni.rbcs.common.debug
|
||||
import net.woggioni.rbcs.common.info
|
||||
import net.woggioni.rbcs.server.RemoteBuildCacheServer
|
||||
import net.woggioni.rbcs.server.RemoteBuildCacheServer.Companion.DEFAULT_CONFIGURATION_URL
|
||||
import picocli.CommandLine
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "server",
|
||||
@@ -21,8 +22,9 @@ import java.time.Duration
|
||||
showDefaultValues = true
|
||||
)
|
||||
class ServerCommand(app : Application) : RbcsCommand() {
|
||||
|
||||
private val log = contextLogger()
|
||||
companion object {
|
||||
private val log = createLogger<ServerCommand>()
|
||||
}
|
||||
|
||||
private fun createDefaultConfigurationFile(configurationFile: Path) {
|
||||
log.info {
|
||||
@@ -57,6 +59,9 @@ class ServerCommand(app : Application) : RbcsCommand() {
|
||||
createDefaultConfigurationFile(configurationFile)
|
||||
}
|
||||
|
||||
log.debug {
|
||||
"Using configuration file '$configurationFile'"
|
||||
}
|
||||
val configuration = RemoteBuildCacheServer.loadConfiguration(configurationFile)
|
||||
log.debug {
|
||||
ByteArrayOutputStream().also {
|
||||
@@ -66,11 +71,20 @@ class ServerCommand(app : Application) : RbcsCommand() {
|
||||
}
|
||||
}
|
||||
val server = RemoteBuildCacheServer(configuration)
|
||||
server.run().use { server ->
|
||||
timeout?.let {
|
||||
Thread.sleep(it)
|
||||
server.shutdown()
|
||||
val handle = server.run()
|
||||
val shutdownHook = Thread.ofPlatform().unstarted {
|
||||
handle.sendShutdownSignal()
|
||||
try {
|
||||
handle.get(60, TimeUnit.SECONDS)
|
||||
} catch (ex : Throwable) {
|
||||
log.warn(ex.message, ex)
|
||||
}
|
||||
}
|
||||
Runtime.getRuntime().addShutdownHook(shutdownHook)
|
||||
if(timeout != null) {
|
||||
Thread.sleep(timeout)
|
||||
handle.sendShutdownSignal()
|
||||
}
|
||||
handle.get()
|
||||
}
|
||||
}
|
@@ -0,0 +1,10 @@
|
||||
package net.woggioni.rbcs.cli.impl.converters
|
||||
|
||||
import picocli.CommandLine
|
||||
|
||||
|
||||
class ByteSizeConverter : CommandLine.ITypeConverter<Int> {
|
||||
override fun convert(value: String): Int {
|
||||
return Integer.decode(value)
|
||||
}
|
||||
}
|
@@ -1,7 +1,7 @@
|
||||
package net.woggioni.rbcs.cli.impl.converters
|
||||
|
||||
import picocli.CommandLine
|
||||
import java.time.Duration
|
||||
import picocli.CommandLine
|
||||
|
||||
|
||||
class DurationConverter : CommandLine.ITypeConverter<Duration> {
|
||||
|
@@ -1,9 +1,9 @@
|
||||
package net.woggioni.rbcs.cli.impl.converters
|
||||
|
||||
import picocli.CommandLine
|
||||
import java.io.InputStream
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Paths
|
||||
import picocli.CommandLine
|
||||
|
||||
|
||||
class InputStreamConverter : CommandLine.ITypeConverter<InputStream> {
|
||||
|
@@ -1,9 +1,9 @@
|
||||
package net.woggioni.rbcs.cli.impl.converters
|
||||
|
||||
import picocli.CommandLine
|
||||
import java.io.OutputStream
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Paths
|
||||
import picocli.CommandLine
|
||||
|
||||
|
||||
class OutputStreamConverter : CommandLine.ITypeConverter<OutputStream> {
|
||||
|
@@ -6,9 +6,11 @@ plugins {
|
||||
dependencies {
|
||||
implementation project(':rbcs-api')
|
||||
implementation project(':rbcs-common')
|
||||
implementation catalog.picocli
|
||||
implementation catalog.slf4j.api
|
||||
implementation catalog.netty.buffer
|
||||
implementation catalog.netty.handler
|
||||
implementation catalog.netty.transport
|
||||
implementation catalog.netty.common
|
||||
implementation catalog.netty.codec.http
|
||||
|
||||
testRuntimeOnly catalog.logback.classic
|
||||
|
@@ -0,0 +1,62 @@
|
||||
package net.woggioni.rbcs.client
|
||||
|
||||
import java.net.URI
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
import java.security.PrivateKey
|
||||
import java.security.cert.X509Certificate
|
||||
import java.time.Duration
|
||||
import net.woggioni.rbcs.client.impl.Parser
|
||||
import net.woggioni.rbcs.common.Xml
|
||||
|
||||
data class Configuration(
|
||||
val profiles: Map<String, Profile>
|
||||
) {
|
||||
sealed class Authentication {
|
||||
data class TlsClientAuthenticationCredentials(
|
||||
val key: PrivateKey,
|
||||
val certificateChain: Array<X509Certificate>
|
||||
) : Authentication()
|
||||
|
||||
data class BasicAuthenticationCredentials(val username: String, val password: String) : Authentication()
|
||||
}
|
||||
|
||||
class TrustStore (
|
||||
var file: Path?,
|
||||
var password: String?,
|
||||
var checkCertificateStatus: Boolean = false,
|
||||
var verifyServerCertificate: Boolean = true,
|
||||
)
|
||||
|
||||
class RetryPolicy(
|
||||
val maxAttempts: Int,
|
||||
val initialDelayMillis: Long,
|
||||
val exp: Double
|
||||
)
|
||||
|
||||
class Connection(
|
||||
val readIdleTimeout: Duration,
|
||||
val writeIdleTimeout: Duration,
|
||||
val idleTimeout: Duration,
|
||||
val requestPipelining : Boolean,
|
||||
)
|
||||
|
||||
data class Profile(
|
||||
val serverURI: URI,
|
||||
val connection: Connection,
|
||||
val authentication: Authentication?,
|
||||
val connectionTimeout: Duration?,
|
||||
val maxConnections: Int,
|
||||
val compressionEnabled: Boolean,
|
||||
val retryPolicy: RetryPolicy?,
|
||||
val tlsTruststore : TrustStore?
|
||||
)
|
||||
|
||||
companion object {
|
||||
fun parse(path: Path): Configuration {
|
||||
return Files.newInputStream(path).use {
|
||||
Xml.parseXml(path.toUri().toURL(), it)
|
||||
}.let(Parser::parse)
|
||||
}
|
||||
}
|
||||
}
|
@@ -7,8 +7,10 @@ import io.netty.channel.Channel
|
||||
import io.netty.channel.ChannelHandlerContext
|
||||
import io.netty.channel.ChannelOption
|
||||
import io.netty.channel.ChannelPipeline
|
||||
import io.netty.channel.IoEventLoopGroup
|
||||
import io.netty.channel.MultiThreadIoEventLoopGroup
|
||||
import io.netty.channel.SimpleChannelInboundHandler
|
||||
import io.netty.channel.nio.NioEventLoopGroup
|
||||
import io.netty.channel.nio.NioIoHandler
|
||||
import io.netty.channel.pool.AbstractChannelPoolHandler
|
||||
import io.netty.channel.pool.ChannelPool
|
||||
import io.netty.channel.pool.FixedChannelPool
|
||||
@@ -28,75 +30,70 @@ import io.netty.handler.codec.http.HttpVersion
|
||||
import io.netty.handler.ssl.SslContext
|
||||
import io.netty.handler.ssl.SslContextBuilder
|
||||
import io.netty.handler.stream.ChunkedWriteHandler
|
||||
import io.netty.handler.timeout.IdleState
|
||||
import io.netty.handler.timeout.IdleStateEvent
|
||||
import io.netty.handler.timeout.IdleStateHandler
|
||||
import io.netty.util.concurrent.Future
|
||||
import io.netty.util.concurrent.Future as NettyFuture
|
||||
import io.netty.util.concurrent.GenericFutureListener
|
||||
import net.woggioni.rbcs.client.impl.Parser
|
||||
import net.woggioni.rbcs.common.Xml
|
||||
import net.woggioni.rbcs.common.contextLogger
|
||||
import net.woggioni.rbcs.common.debug
|
||||
import net.woggioni.rbcs.common.trace
|
||||
import java.io.IOException
|
||||
import java.net.InetSocketAddress
|
||||
import java.net.URI
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
import java.security.PrivateKey
|
||||
import java.security.cert.X509Certificate
|
||||
import java.time.Duration
|
||||
import java.util.Base64
|
||||
import java.util.concurrent.CompletableFuture
|
||||
import java.util.concurrent.TimeUnit
|
||||
import java.util.concurrent.TimeoutException
|
||||
import java.util.concurrent.atomic.AtomicInteger
|
||||
import io.netty.util.concurrent.Future as NettyFuture
|
||||
|
||||
import javax.net.ssl.TrustManagerFactory
|
||||
import javax.net.ssl.X509TrustManager
|
||||
import kotlin.random.Random
|
||||
import net.woggioni.rbcs.api.CacheValueMetadata
|
||||
import net.woggioni.rbcs.common.RBCS.loadKeystore
|
||||
import net.woggioni.rbcs.common.createLogger
|
||||
import net.woggioni.rbcs.common.debug
|
||||
import net.woggioni.rbcs.common.trace
|
||||
|
||||
class RemoteBuildCacheClient(private val profile: Configuration.Profile) : AutoCloseable {
|
||||
private val group: NioEventLoopGroup
|
||||
private var sslContext: SslContext
|
||||
private val log = contextLogger()
|
||||
private val pool: ChannelPool
|
||||
|
||||
data class Configuration(
|
||||
val profiles: Map<String, Profile>
|
||||
) {
|
||||
sealed class Authentication {
|
||||
data class TlsClientAuthenticationCredentials(
|
||||
val key: PrivateKey,
|
||||
val certificateChain: Array<X509Certificate>
|
||||
) : Authentication()
|
||||
|
||||
data class BasicAuthenticationCredentials(val username: String, val password: String) : Authentication()
|
||||
}
|
||||
|
||||
class RetryPolicy(
|
||||
val maxAttempts: Int,
|
||||
val initialDelayMillis: Long,
|
||||
val exp: Double
|
||||
)
|
||||
|
||||
data class Profile(
|
||||
val serverURI: URI,
|
||||
val authentication: Authentication?,
|
||||
val connectionTimeout: Duration?,
|
||||
val maxConnections: Int,
|
||||
val retryPolicy: RetryPolicy?,
|
||||
)
|
||||
|
||||
companion object {
|
||||
fun parse(path: Path): Configuration {
|
||||
return Files.newInputStream(path).use {
|
||||
Xml.parseXml(path.toUri().toURL(), it)
|
||||
}.let(Parser::parse)
|
||||
}
|
||||
}
|
||||
companion object {
|
||||
private val log = createLogger<RemoteBuildCacheClient>()
|
||||
}
|
||||
|
||||
private val group: IoEventLoopGroup
|
||||
private val sslContext: SslContext
|
||||
private val pool: ChannelPool
|
||||
|
||||
init {
|
||||
group = NioEventLoopGroup()
|
||||
group = MultiThreadIoEventLoopGroup(NioIoHandler.newFactory())
|
||||
sslContext = SslContextBuilder.forClient().also { builder ->
|
||||
(profile.authentication as? Configuration.Authentication.TlsClientAuthenticationCredentials)?.let { tlsClientAuthenticationCredentials ->
|
||||
builder.keyManager(
|
||||
tlsClientAuthenticationCredentials.key,
|
||||
*tlsClientAuthenticationCredentials.certificateChain
|
||||
)
|
||||
builder.apply {
|
||||
keyManager(
|
||||
tlsClientAuthenticationCredentials.key,
|
||||
*tlsClientAuthenticationCredentials.certificateChain
|
||||
)
|
||||
profile.tlsTruststore?.let { trustStore ->
|
||||
if (!trustStore.verifyServerCertificate) {
|
||||
trustManager(object : X509TrustManager {
|
||||
override fun checkClientTrusted(certChain: Array<out X509Certificate>, p1: String?) {
|
||||
}
|
||||
|
||||
override fun checkServerTrusted(certChain: Array<out X509Certificate>, p1: String?) {
|
||||
}
|
||||
|
||||
override fun getAcceptedIssuers() = null
|
||||
})
|
||||
} else {
|
||||
trustStore.file?.let {
|
||||
val ts = loadKeystore(it, trustStore.password)
|
||||
val trustManagerFactory: TrustManagerFactory =
|
||||
TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm())
|
||||
trustManagerFactory.init(ts)
|
||||
trustManager(trustManagerFactory)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}.build()
|
||||
|
||||
@@ -141,18 +138,37 @@ class RemoteBuildCacheClient(private val profile: Configuration.Profile) : AutoC
|
||||
}
|
||||
|
||||
override fun channelCreated(ch: Channel) {
|
||||
val connectionId = connectionCount.getAndIncrement()
|
||||
val connectionId = connectionCount.incrementAndGet()
|
||||
log.debug {
|
||||
"Created connection $connectionId, total number of active connections: $connectionId"
|
||||
"Created connection ${ch.id().asShortText()}, total number of active connections: $connectionId"
|
||||
}
|
||||
ch.closeFuture().addListener {
|
||||
val activeConnections = connectionCount.decrementAndGet()
|
||||
log.debug {
|
||||
"Closed connection $connectionId, total number of active connections: $activeConnections"
|
||||
"Closed connection ${
|
||||
ch.id().asShortText()
|
||||
}, total number of active connections: $activeConnections"
|
||||
}
|
||||
}
|
||||
val pipeline: ChannelPipeline = ch.pipeline()
|
||||
|
||||
profile.connection.also { conn ->
|
||||
val readIdleTimeout = conn.readIdleTimeout.toMillis()
|
||||
val writeIdleTimeout = conn.writeIdleTimeout.toMillis()
|
||||
val idleTimeout = conn.idleTimeout.toMillis()
|
||||
if (readIdleTimeout > 0 || writeIdleTimeout > 0 || idleTimeout > 0) {
|
||||
pipeline.addLast(
|
||||
IdleStateHandler(
|
||||
true,
|
||||
readIdleTimeout,
|
||||
writeIdleTimeout,
|
||||
idleTimeout,
|
||||
TimeUnit.MILLISECONDS
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Add SSL handler if needed
|
||||
if ("https".equals(scheme, ignoreCase = true)) {
|
||||
pipeline.addLast("ssl", sslContext.newHandler(ch.alloc(), host, port))
|
||||
@@ -160,7 +176,9 @@ class RemoteBuildCacheClient(private val profile: Configuration.Profile) : AutoC
|
||||
|
||||
// HTTP handlers
|
||||
pipeline.addLast("codec", HttpClientCodec())
|
||||
pipeline.addLast("decompressor", HttpContentDecompressor())
|
||||
if (profile.compressionEnabled) {
|
||||
pipeline.addLast("decompressor", HttpContentDecompressor())
|
||||
}
|
||||
pipeline.addLast("aggregator", HttpObjectAggregator(134217728))
|
||||
pipeline.addLast("chunked", ChunkedWriteHandler())
|
||||
}
|
||||
@@ -206,6 +224,7 @@ class RemoteBuildCacheClient(private val profile: Configuration.Profile) : AutoC
|
||||
retryPolicy.initialDelayMillis.toDouble(),
|
||||
retryPolicy.exp,
|
||||
outcomeHandler,
|
||||
Random.Default,
|
||||
operation
|
||||
)
|
||||
} else {
|
||||
@@ -235,27 +254,37 @@ class RemoteBuildCacheClient(private val profile: Configuration.Profile) : AutoC
|
||||
fun get(key: String): CompletableFuture<ByteArray?> {
|
||||
return executeWithRetry {
|
||||
sendRequest(profile.serverURI.resolve(key), HttpMethod.GET, null)
|
||||
}.thenApply {
|
||||
val status = it.status()
|
||||
if (it.status() == HttpResponseStatus.NOT_FOUND) {
|
||||
}.thenApply { response ->
|
||||
val status = response.status()
|
||||
if (response.status() == HttpResponseStatus.NOT_FOUND) {
|
||||
response.release()
|
||||
null
|
||||
} else if (it.status() != HttpResponseStatus.OK) {
|
||||
} else if (response.status() != HttpResponseStatus.OK) {
|
||||
response.release()
|
||||
throw HttpException(status)
|
||||
} else {
|
||||
it.content()
|
||||
response.content().also {
|
||||
it.retain()
|
||||
response.release()
|
||||
}
|
||||
}
|
||||
}.thenApply { maybeByteBuf ->
|
||||
maybeByteBuf?.let {
|
||||
val result = ByteArray(it.readableBytes())
|
||||
it.getBytes(0, result)
|
||||
maybeByteBuf?.let { buf ->
|
||||
val result = ByteArray(buf.readableBytes())
|
||||
buf.getBytes(0, result)
|
||||
buf.release()
|
||||
result
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fun put(key: String, content: ByteArray): CompletableFuture<Unit> {
|
||||
fun put(key: String, content: ByteArray, metadata: CacheValueMetadata): CompletableFuture<Unit> {
|
||||
return executeWithRetry {
|
||||
sendRequest(profile.serverURI.resolve(key), HttpMethod.PUT, content)
|
||||
val extraHeaders = sequenceOf(
|
||||
metadata.mimeType?.let { HttpHeaderNames.CONTENT_TYPE to it },
|
||||
metadata.contentDisposition?.let { HttpHeaderNames.CONTENT_DISPOSITION to it }
|
||||
).filterNotNull()
|
||||
sendRequest(profile.serverURI.resolve(key), HttpMethod.PUT, content, extraHeaders.asIterable())
|
||||
}.thenApply {
|
||||
val status = it.status()
|
||||
if (it.status() != HttpResponseStatus.CREATED && it.status() != HttpResponseStatus.OK) {
|
||||
@@ -264,35 +293,83 @@ class RemoteBuildCacheClient(private val profile: Configuration.Profile) : AutoC
|
||||
}
|
||||
}
|
||||
|
||||
private fun sendRequest(uri: URI, method: HttpMethod, body: ByteArray?): CompletableFuture<FullHttpResponse> {
|
||||
private fun sendRequest(
|
||||
uri: URI,
|
||||
method: HttpMethod,
|
||||
body: ByteArray?,
|
||||
extraHeaders: Iterable<Pair<CharSequence, CharSequence>>? = null
|
||||
): CompletableFuture<FullHttpResponse> {
|
||||
val responseFuture = CompletableFuture<FullHttpResponse>()
|
||||
// Custom handler for processing responses
|
||||
pool.acquire().addListener(object : GenericFutureListener<NettyFuture<Channel>> {
|
||||
|
||||
override fun operationComplete(channelFuture: Future<Channel>) {
|
||||
if (channelFuture.isSuccess) {
|
||||
val channel = channelFuture.now
|
||||
val pipeline = channel.pipeline()
|
||||
channel.pipeline().addLast("handler", object : SimpleChannelInboundHandler<FullHttpResponse>() {
|
||||
|
||||
val closeListener = GenericFutureListener<Future<Void>> {
|
||||
responseFuture.completeExceptionally(IOException("The remote server closed the connection"))
|
||||
}
|
||||
channel.closeFuture().addListener(closeListener)
|
||||
|
||||
val responseHandler = object : SimpleChannelInboundHandler<FullHttpResponse>() {
|
||||
|
||||
override fun handlerAdded(ctx: ChannelHandlerContext) {
|
||||
channel.closeFuture().removeListener(closeListener)
|
||||
}
|
||||
|
||||
override fun channelRead0(
|
||||
ctx: ChannelHandlerContext,
|
||||
response: FullHttpResponse
|
||||
) {
|
||||
pipeline.removeLast()
|
||||
pool.release(channel)
|
||||
responseFuture.complete(response)
|
||||
pipeline.remove(this)
|
||||
responseFuture.complete(response.retainedDuplicate())
|
||||
if (!profile.connection.requestPipelining) {
|
||||
pool.release(channel)
|
||||
}
|
||||
}
|
||||
|
||||
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
|
||||
ctx.newPromise()
|
||||
val ex = when (cause) {
|
||||
is DecoderException -> cause.cause
|
||||
else -> cause
|
||||
}
|
||||
responseFuture.completeExceptionally(ex)
|
||||
ctx.close()
|
||||
pipeline.removeLast()
|
||||
}
|
||||
|
||||
override fun channelInactive(ctx: ChannelHandlerContext) {
|
||||
responseFuture.completeExceptionally(IOException("The remote server closed the connection"))
|
||||
super.channelInactive(ctx)
|
||||
pool.release(channel)
|
||||
}
|
||||
})
|
||||
|
||||
override fun userEventTriggered(ctx: ChannelHandlerContext, evt: Any) {
|
||||
if (evt is IdleStateEvent) {
|
||||
val te = when (evt.state()) {
|
||||
IdleState.READER_IDLE -> TimeoutException("Read timeout")
|
||||
IdleState.WRITER_IDLE -> TimeoutException("Write timeout")
|
||||
IdleState.ALL_IDLE -> TimeoutException("Idle timeout")
|
||||
null -> throw IllegalStateException("This should never happen")
|
||||
}
|
||||
responseFuture.completeExceptionally(te)
|
||||
super.userEventTriggered(ctx, evt)
|
||||
if (this === pipeline.last()) {
|
||||
ctx.close()
|
||||
}
|
||||
if (!profile.connection.requestPipelining) {
|
||||
pool.release(channel)
|
||||
}
|
||||
} else {
|
||||
super.userEventTriggered(ctx, evt)
|
||||
}
|
||||
}
|
||||
}
|
||||
pipeline.addLast(responseHandler)
|
||||
|
||||
|
||||
// Prepare the HTTP request
|
||||
val request: FullHttpRequest = let {
|
||||
val content: ByteBuf? = body?.takeIf(ByteArray::isNotEmpty)?.let(Unpooled::wrappedBuffer)
|
||||
@@ -302,17 +379,22 @@ class RemoteBuildCacheClient(private val profile: Configuration.Profile) : AutoC
|
||||
uri.rawPath,
|
||||
content ?: Unpooled.buffer(0)
|
||||
).apply {
|
||||
// Set headers
|
||||
headers().apply {
|
||||
if (content != null) {
|
||||
set(HttpHeaderNames.CONTENT_TYPE, HttpHeaderValues.APPLICATION_OCTET_STREAM)
|
||||
set(HttpHeaderNames.CONTENT_LENGTH, content.readableBytes())
|
||||
}
|
||||
set(HttpHeaderNames.HOST, profile.serverURI.host)
|
||||
set(HttpHeaderNames.CONNECTION, HttpHeaderValues.KEEP_ALIVE)
|
||||
set(
|
||||
HttpHeaderNames.ACCEPT_ENCODING,
|
||||
HttpHeaderValues.GZIP.toString() + "," + HttpHeaderValues.DEFLATE.toString()
|
||||
)
|
||||
if (profile.compressionEnabled) {
|
||||
set(
|
||||
HttpHeaderNames.ACCEPT_ENCODING,
|
||||
HttpHeaderValues.GZIP.toString() + "," + HttpHeaderValues.DEFLATE.toString()
|
||||
)
|
||||
}
|
||||
extraHeaders?.forEach { (k, v) ->
|
||||
add(k, v)
|
||||
}
|
||||
// Add basic auth if configured
|
||||
(profile.authentication as? Configuration.Authentication.BasicAuthenticationCredentials)?.let { credentials ->
|
||||
val auth = "${credentials.username}:${credentials.password}"
|
||||
@@ -323,9 +405,16 @@ class RemoteBuildCacheClient(private val profile: Configuration.Profile) : AutoC
|
||||
}
|
||||
}
|
||||
|
||||
// Set headers
|
||||
// Send the request
|
||||
channel.writeAndFlush(request)
|
||||
channel.writeAndFlush(request).addListener {
|
||||
if (!it.isSuccess) {
|
||||
val ex = it.cause()
|
||||
log.warn(ex.message, ex)
|
||||
}
|
||||
if (profile.connection.requestPipelining) {
|
||||
pool.release(channel)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
responseFuture.completeExceptionally(channelFuture.cause())
|
||||
}
|
@@ -1,10 +1,5 @@
|
||||
package net.woggioni.rbcs.client.impl
|
||||
|
||||
import net.woggioni.rbcs.api.exception.ConfigurationException
|
||||
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
||||
import net.woggioni.rbcs.common.Xml.Companion.asIterable
|
||||
import net.woggioni.rbcs.common.Xml.Companion.renderAttribute
|
||||
import org.w3c.dom.Document
|
||||
import java.net.URI
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
@@ -12,12 +7,18 @@ import java.security.KeyStore
|
||||
import java.security.PrivateKey
|
||||
import java.security.cert.X509Certificate
|
||||
import java.time.Duration
|
||||
import java.time.temporal.ChronoUnit
|
||||
import net.woggioni.rbcs.api.exception.ConfigurationException
|
||||
import net.woggioni.rbcs.client.Configuration
|
||||
import net.woggioni.rbcs.common.Xml.Companion.asIterable
|
||||
import net.woggioni.rbcs.common.Xml.Companion.renderAttribute
|
||||
import org.w3c.dom.Document
|
||||
|
||||
object Parser {
|
||||
|
||||
fun parse(document: Document): RemoteBuildCacheClient.Configuration {
|
||||
fun parse(document: Document): Configuration {
|
||||
val root = document.documentElement
|
||||
val profiles = mutableMapOf<String, RemoteBuildCacheClient.Configuration.Profile>()
|
||||
val profiles = mutableMapOf<String, Configuration.Profile>()
|
||||
|
||||
for (child in root.asIterable()) {
|
||||
val tagName = child.localName
|
||||
@@ -27,8 +28,15 @@ object Parser {
|
||||
child.renderAttribute("name") ?: throw ConfigurationException("name attribute is required")
|
||||
val uri = child.renderAttribute("base-url")?.let(::URI)
|
||||
?: throw ConfigurationException("base-url attribute is required")
|
||||
var authentication: RemoteBuildCacheClient.Configuration.Authentication? = null
|
||||
var retryPolicy: RemoteBuildCacheClient.Configuration.RetryPolicy? = null
|
||||
var authentication: Configuration.Authentication? = null
|
||||
var retryPolicy: Configuration.RetryPolicy? = null
|
||||
var connection : Configuration.Connection = Configuration.Connection(
|
||||
Duration.ofSeconds(60),
|
||||
Duration.ofSeconds(60),
|
||||
Duration.ofSeconds(30),
|
||||
false
|
||||
)
|
||||
var trustStore : Configuration.TrustStore? = null
|
||||
for (gchild in child.asIterable()) {
|
||||
when (gchild.localName) {
|
||||
"tls-client-auth" -> {
|
||||
@@ -49,7 +57,7 @@ object Parser {
|
||||
.toList()
|
||||
.toTypedArray()
|
||||
authentication =
|
||||
RemoteBuildCacheClient.Configuration.Authentication.TlsClientAuthenticationCredentials(
|
||||
Configuration.Authentication.TlsClientAuthenticationCredentials(
|
||||
key,
|
||||
certChain
|
||||
)
|
||||
@@ -61,7 +69,7 @@ object Parser {
|
||||
val password = gchild.renderAttribute("password")
|
||||
?: throw ConfigurationException("password attribute is required")
|
||||
authentication =
|
||||
RemoteBuildCacheClient.Configuration.Authentication.BasicAuthenticationCredentials(
|
||||
Configuration.Authentication.BasicAuthenticationCredentials(
|
||||
username,
|
||||
password
|
||||
)
|
||||
@@ -80,12 +88,40 @@ object Parser {
|
||||
gchild.renderAttribute("exp")
|
||||
?.let(String::toDouble)
|
||||
?: 2.0f
|
||||
retryPolicy = RemoteBuildCacheClient.Configuration.RetryPolicy(
|
||||
retryPolicy = Configuration.RetryPolicy(
|
||||
maxAttempts,
|
||||
initialDelay.toMillis(),
|
||||
exp.toDouble()
|
||||
)
|
||||
}
|
||||
|
||||
"connection" -> {
|
||||
val idleTimeout = gchild.renderAttribute("idle-timeout")
|
||||
?.let(Duration::parse) ?: Duration.of(30, ChronoUnit.SECONDS)
|
||||
val readIdleTimeout = gchild.renderAttribute("read-idle-timeout")
|
||||
?.let(Duration::parse) ?: Duration.of(60, ChronoUnit.SECONDS)
|
||||
val writeIdleTimeout = gchild.renderAttribute("write-idle-timeout")
|
||||
?.let(Duration::parse) ?: Duration.of(60, ChronoUnit.SECONDS)
|
||||
val requestPipelining = gchild.renderAttribute("request-pipelining")
|
||||
?.let(String::toBoolean) ?: false
|
||||
connection = Configuration.Connection(
|
||||
readIdleTimeout,
|
||||
writeIdleTimeout,
|
||||
idleTimeout,
|
||||
requestPipelining
|
||||
)
|
||||
}
|
||||
|
||||
"tls-trust-store" -> {
|
||||
val file = gchild.renderAttribute("file")
|
||||
?.let(Path::of)
|
||||
val password = gchild.renderAttribute("password")
|
||||
val checkCertificateStatus = gchild.renderAttribute("check-certificate-status")
|
||||
?.let(String::toBoolean) ?: false
|
||||
val verifyServerCertificate = gchild.renderAttribute("verify-server-certificate")
|
||||
?.let(String::toBoolean) ?: true
|
||||
trustStore = Configuration.TrustStore(file, password, checkCertificateStatus, verifyServerCertificate)
|
||||
}
|
||||
}
|
||||
}
|
||||
val maxConnections = child.renderAttribute("max-connections")
|
||||
@@ -93,16 +129,23 @@ object Parser {
|
||||
?: 50
|
||||
val connectionTimeout = child.renderAttribute("connection-timeout")
|
||||
?.let(Duration::parse)
|
||||
profiles[name] = RemoteBuildCacheClient.Configuration.Profile(
|
||||
val compressionEnabled = child.renderAttribute("enable-compression")
|
||||
?.let(String::toBoolean)
|
||||
?: true
|
||||
|
||||
profiles[name] = Configuration.Profile(
|
||||
uri,
|
||||
connection,
|
||||
authentication,
|
||||
connectionTimeout,
|
||||
maxConnections,
|
||||
retryPolicy
|
||||
compressionEnabled,
|
||||
retryPolicy,
|
||||
trustStore
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
return RemoteBuildCacheClient.Configuration(profiles)
|
||||
return Configuration(profiles)
|
||||
}
|
||||
}
|
@@ -3,6 +3,8 @@ package net.woggioni.rbcs.client
|
||||
import io.netty.util.concurrent.EventExecutorGroup
|
||||
import java.util.concurrent.CompletableFuture
|
||||
import java.util.concurrent.TimeUnit
|
||||
import kotlin.math.pow
|
||||
import kotlin.random.Random
|
||||
|
||||
sealed class OperationOutcome<T> {
|
||||
class Success<T>(val result: T) : OperationOutcome<T>()
|
||||
@@ -24,8 +26,10 @@ fun <T> executeWithRetry(
|
||||
initialDelay: Double,
|
||||
exp: Double,
|
||||
outcomeHandler: OutcomeHandler<T>,
|
||||
randomizer : Random?,
|
||||
cb: () -> CompletableFuture<T>
|
||||
): CompletableFuture<T> {
|
||||
|
||||
val finalResult = cb()
|
||||
var future = finalResult
|
||||
var shortCircuit = false
|
||||
@@ -46,7 +50,7 @@ fun <T> executeWithRetry(
|
||||
is OutcomeHandlerResult.Retry -> {
|
||||
val res = CompletableFuture<T>()
|
||||
val delay = run {
|
||||
val scheduledDelay = (initialDelay * Math.pow(exp, i.toDouble())).toLong()
|
||||
val scheduledDelay = (initialDelay * exp.pow(i.toDouble()) * (1.0 + (randomizer?.nextDouble(-0.5, 0.5) ?: 0.0))).toLong()
|
||||
outcomeHandlerResult.suggestedDelayMillis?.coerceAtMost(scheduledDelay) ?: scheduledDelay
|
||||
}
|
||||
eventExecutorGroup.schedule({
|
||||
|
@@ -15,36 +15,246 @@
|
||||
<xs:complexType name="profileType">
|
||||
<xs:sequence>
|
||||
<xs:choice>
|
||||
<xs:element name="no-auth" type="rbcs-client:noAuthType"/>
|
||||
<xs:element name="basic-auth" type="rbcs-client:basicAuthType"/>
|
||||
<xs:element name="tls-client-auth" type="rbcs-client:tlsClientAuthType"/>
|
||||
<xs:element name="no-auth" type="rbcs-client:noAuthType">
|
||||
<xs:annotation>
|
||||
<xs:documentation>
|
||||
Disable authentication.
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:element>
|
||||
<xs:element name="basic-auth" type="rbcs-client:basicAuthType">
|
||||
<xs:annotation>
|
||||
<xs:documentation>
|
||||
Enable HTTP basic authentication.
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:element>
|
||||
<xs:element name="tls-client-auth" type="rbcs-client:tlsClientAuthType">
|
||||
<xs:annotation>
|
||||
<xs:documentation>
|
||||
Enable TLS certificate authentication.
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:element>
|
||||
</xs:choice>
|
||||
<xs:element name="retry-policy" type="rbcs-client:retryType" minOccurs="0"/>
|
||||
<xs:element name="connection" type="rbcs-client:connectionType" minOccurs="0" >
|
||||
<xs:annotation>
|
||||
<xs:documentation>
|
||||
Set inactivity timeouts for connections to this server,
|
||||
if not present, connections are only closed on network errors.
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:element>
|
||||
<xs:element name="retry-policy" type="rbcs-client:retryType" minOccurs="0">
|
||||
<xs:annotation>
|
||||
<xs:documentation>
|
||||
Set a retry policy for this server, if not present requests won't be retried
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:element>
|
||||
<xs:element name="tls-trust-store" type="rbcs-client:trustStoreType" minOccurs="0">
|
||||
<xs:annotation>
|
||||
<xs:documentation>
|
||||
If set, specify an alternative truststore to validate the server certificate.
|
||||
If not present the system truststore is used.
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:element>
|
||||
</xs:sequence>
|
||||
<xs:attribute name="name" type="xs:token" use="required"/>
|
||||
<xs:attribute name="base-url" type="xs:anyURI" use="required"/>
|
||||
<xs:attribute name="max-connections" type="xs:positiveInteger" default="50"/>
|
||||
<xs:attribute name="connection-timeout" type="xs:duration"/>
|
||||
<xs:attribute name="name" type="xs:token" use="required">
|
||||
<xs:annotation>
|
||||
<xs:documentation>
|
||||
Name of this server profile, to be referred to from rbcs-cli with the '-p' parameter
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="base-url" type="xs:anyURI" use="required">
|
||||
<xs:annotation>
|
||||
<xs:documentation>
|
||||
RBCs server URL
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="max-connections" type="xs:positiveInteger" default="50">
|
||||
<xs:annotation>
|
||||
<xs:documentation>
|
||||
Maximum number of concurrent TCP connection to open with this server
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="connection-timeout" type="xs:duration">
|
||||
<xs:annotation>
|
||||
<xs:documentation>
|
||||
Enable HTTP compression when communicating to this server
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="enable-compression" type="xs:boolean" default="true">
|
||||
<xs:annotation>
|
||||
<xs:documentation>
|
||||
Enable HTTP compression when communicating to this server
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
</xs:complexType>
|
||||
|
||||
<xs:complexType name="noAuthType"/>
|
||||
<xs:complexType name="connectionType">
|
||||
<xs:attribute name="idle-timeout" type="xs:duration" use="optional" default="PT30S">
|
||||
<xs:annotation>
|
||||
<xs:documentation>
|
||||
The client will close the connection with the server
|
||||
when neither a read nor a write was performed for the specified period of time.
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="read-idle-timeout" type="xs:duration" use="optional" default="PT60S">
|
||||
<xs:annotation>
|
||||
<xs:documentation>
|
||||
The client will close the connection with the server
|
||||
when no read was performed for the specified period of time.
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="write-idle-timeout" type="xs:duration" use="optional" default="PT60S">
|
||||
<xs:annotation>
|
||||
<xs:documentation>
|
||||
The client will close the connection with the server
|
||||
when no write was performed for the specified period of time.
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="request-pipelining" type="xs:boolean" use="optional" default="false">
|
||||
<xs:annotation>
|
||||
<xs:documentation>
|
||||
Enables HTTP/1.1 request pipelining
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
</xs:complexType>
|
||||
|
||||
<xs:complexType name="noAuthType">
|
||||
<xs:annotation>
|
||||
<xs:documentation>
|
||||
Add this tag to not use any type of authentication when talking to the RBCS server
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:complexType>
|
||||
|
||||
<xs:complexType name="basicAuthType">
|
||||
<xs:attribute name="user" type="xs:token" use="required"/>
|
||||
<xs:attribute name="password" type="xs:string" use="required"/>
|
||||
<xs:annotation>
|
||||
<xs:documentation>
|
||||
Add this tag to enable HTTP basic authentication for the communication to this server,
|
||||
mind that HTTP basic authentication sends credentials directly over the network, so make sure
|
||||
your communication is protected by TLS (i.e. your server's URL starts with "https")
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
<xs:attribute name="user" type="xs:token" use="required">
|
||||
<xs:annotation>
|
||||
<xs:documentation>
|
||||
Username for HTTP basic authentication
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="password" type="xs:string" use="required">
|
||||
<xs:annotation>
|
||||
<xs:documentation>
|
||||
Password used for HTTP basic authentication
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
</xs:complexType>
|
||||
|
||||
<xs:complexType name="tlsClientAuthType">
|
||||
<xs:attribute name="key-store-file" type="xs:anyURI" use="required"/>
|
||||
<xs:attribute name="key-store-password" type="xs:string" use="required"/>
|
||||
<xs:attribute name="key-alias" type="xs:token" use="required"/>
|
||||
<xs:attribute name="key-password" type="xs:string" use="optional"/>
|
||||
<xs:attribute name="key-store-file" type="xs:anyURI" use="required">
|
||||
<xs:annotation>
|
||||
<xs:documentation>
|
||||
System path to the keystore file
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="key-store-password" type="xs:string" use="required">
|
||||
<xs:annotation>
|
||||
<xs:documentation>
|
||||
Password to open they keystore file
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="key-alias" type="xs:token" use="required">
|
||||
<xs:annotation>
|
||||
<xs:documentation>
|
||||
Alias of the keystore entry containing the private key
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="key-password" type="xs:string" use="optional">
|
||||
<xs:annotation>
|
||||
<xs:documentation>
|
||||
Private key entry's encryption password
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
</xs:complexType>
|
||||
|
||||
<xs:complexType name="retryType">
|
||||
<xs:attribute name="max-attempts" type="xs:positiveInteger" use="required"/>
|
||||
<xs:attribute name="initial-delay" type="xs:duration" default="PT1S"/>
|
||||
<xs:attribute name="exp" type="xs:double" default="2.0"/>
|
||||
<xs:annotation>
|
||||
<xs:documentation>
|
||||
Retry policy to use in case of failures, based on exponential backoff
|
||||
https://en.wikipedia.org/wiki/Exponential_backoff
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
|
||||
<xs:attribute name="max-attempts" type="xs:positiveInteger" use="required">
|
||||
<xs:annotation>
|
||||
<xs:documentation>
|
||||
Maximum number of attempts, after which the call will result in an error,
|
||||
throwing an exception related to the last received failure
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="initial-delay" type="xs:duration" default="PT1S">
|
||||
<xs:annotation>
|
||||
<xs:documentation>
|
||||
Delay to apply before retrying after the first failed call
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="exp" type="xs:double" default="2.0">
|
||||
<xs:annotation>
|
||||
<xs:documentation>
|
||||
Exponent to apply to compute the next delay
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
</xs:complexType>
|
||||
|
||||
<xs:complexType name="trustStoreType">
|
||||
<xs:attribute name="file" type="xs:string" use="required">
|
||||
<xs:annotation>
|
||||
<xs:documentation>
|
||||
Path to the truststore file
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="password" type="xs:string">
|
||||
<xs:annotation>
|
||||
<xs:documentation>
|
||||
Truststore file password
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="check-certificate-status" type="xs:boolean">
|
||||
<xs:annotation>
|
||||
<xs:documentation>
|
||||
Whether or not check the server certificate validity using CRL/OCSP
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="verify-server-certificate" type="xs:boolean" use="optional" default="true">
|
||||
<xs:annotation>
|
||||
<xs:documentation>
|
||||
If false, the client will blindly trust the certificate provided by the server
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
</xs:complexType>
|
||||
</xs:schema>
|
||||
|
@@ -2,6 +2,9 @@ package net.woggioni.rbcs.client
|
||||
|
||||
import io.netty.util.concurrent.DefaultEventExecutorGroup
|
||||
import io.netty.util.concurrent.EventExecutorGroup
|
||||
import java.util.concurrent.CompletableFuture
|
||||
import java.util.stream.Stream
|
||||
import kotlin.random.Random
|
||||
import net.woggioni.rbcs.common.contextLogger
|
||||
import org.junit.jupiter.api.Assertions
|
||||
import org.junit.jupiter.api.extension.ExtensionContext
|
||||
@@ -9,9 +12,6 @@ import org.junit.jupiter.params.ParameterizedTest
|
||||
import org.junit.jupiter.params.provider.Arguments
|
||||
import org.junit.jupiter.params.provider.ArgumentsProvider
|
||||
import org.junit.jupiter.params.provider.ArgumentsSource
|
||||
import java.util.concurrent.CompletableFuture
|
||||
import java.util.stream.Stream
|
||||
import kotlin.random.Random
|
||||
|
||||
class RetryTest {
|
||||
|
||||
@@ -89,7 +89,7 @@ class RetryTest {
|
||||
val random = Random(testArgs.seed)
|
||||
|
||||
val future =
|
||||
executeWithRetry(executor, testArgs.maxAttempt, testArgs.initialDelay, testArgs.exp, outcomeHandler) {
|
||||
executeWithRetry(executor, testArgs.maxAttempt, testArgs.initialDelay, testArgs.exp, outcomeHandler, null) {
|
||||
val now = System.nanoTime()
|
||||
val result = CompletableFuture<Int>()
|
||||
executor.submit {
|
||||
@@ -129,7 +129,7 @@ class RetryTest {
|
||||
previousAttempt.first + testArgs.initialDelay * Math.pow(testArgs.exp, index.toDouble()) * 1e6
|
||||
val actualTimestamp = timestamp
|
||||
val err = Math.abs(expectedTimestamp - actualTimestamp) / expectedTimestamp
|
||||
Assertions.assertTrue(err < 1e-3)
|
||||
Assertions.assertTrue(err < 0.1)
|
||||
}
|
||||
if (index == attempts.size - 1 && index < testArgs.maxAttempt - 1) {
|
||||
/*
|
||||
|
@@ -9,6 +9,8 @@
|
||||
key-store-password="password"
|
||||
key-alias="woggioni@c962475fa38"
|
||||
key-password="key-password"/>
|
||||
<connection write-idle-timeout="PT60S" read-idle-timeout="PT60S" write-timeout="PT0S" read-timeout="PT0S" idle-timeout="PT30S" />
|
||||
<tls-trust-store file="file.pfx" password="password" check-certificate-status="false" verify-server-certificate="true"/>
|
||||
</profile>
|
||||
<profile name="profile2" base-url="https://rbcs2.example.com/">
|
||||
<basic-auth user="user" password="password"/>
|
||||
|
@@ -6,7 +6,7 @@ plugins {
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation project(':rbcs-api')
|
||||
implementation catalog.netty.transport
|
||||
implementation catalog.slf4j.api
|
||||
implementation catalog.jwo
|
||||
implementation catalog.netty.buffer
|
||||
|
@@ -5,6 +5,7 @@ module net.woggioni.rbcs.common {
|
||||
requires kotlin.stdlib;
|
||||
requires net.woggioni.jwo;
|
||||
requires io.netty.buffer;
|
||||
requires io.netty.transport;
|
||||
|
||||
provides java.net.spi.URLStreamHandlerProvider with net.woggioni.rbcs.common.RbcsUrlStreamHandlerFactory;
|
||||
exports net.woggioni.rbcs.common;
|
||||
|
15
rbcs-common/src/main/kotlin/net/woggioni/rbcs/common/BB.kt
Normal file
15
rbcs-common/src/main/kotlin/net/woggioni/rbcs/common/BB.kt
Normal file
@@ -0,0 +1,15 @@
|
||||
package net.woggioni.rbcs.common
|
||||
|
||||
import io.netty.buffer.ByteBuf
|
||||
import io.netty.buffer.ByteBufAllocator
|
||||
import io.netty.buffer.CompositeByteBuf
|
||||
|
||||
fun extractChunk(buf: CompositeByteBuf, alloc: ByteBufAllocator): ByteBuf {
|
||||
val chunk = alloc.compositeBuffer()
|
||||
for (component in buf.decompose(0, buf.readableBytes())) {
|
||||
chunk.addComponent(true, component.retain())
|
||||
}
|
||||
buf.removeComponents(0, buf.numComponents())
|
||||
buf.clear()
|
||||
return chunk
|
||||
}
|
@@ -1,90 +1,173 @@
|
||||
package net.woggioni.rbcs.common
|
||||
|
||||
import org.slf4j.Logger
|
||||
import org.slf4j.LoggerFactory
|
||||
import org.slf4j.event.Level
|
||||
import io.netty.channel.Channel
|
||||
import io.netty.channel.ChannelHandlerContext
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
import java.util.logging.LogManager
|
||||
import org.slf4j.Logger
|
||||
import org.slf4j.LoggerFactory
|
||||
import org.slf4j.MDC
|
||||
import org.slf4j.event.Level
|
||||
import org.slf4j.spi.LoggingEventBuilder
|
||||
|
||||
inline fun <reified T> T.contextLogger() = LoggerFactory.getLogger(T::class.java)
|
||||
inline fun <reified T> createLogger() = LoggerFactory.getLogger(T::class.java)
|
||||
|
||||
inline fun Logger.traceParam(messageBuilder : () -> Pair<String, Array<Any>>) {
|
||||
if(isTraceEnabled) {
|
||||
inline fun Logger.traceParam(messageBuilder: () -> Pair<String, Array<Any>>) {
|
||||
if (isTraceEnabled) {
|
||||
val (format, params) = messageBuilder()
|
||||
trace(format, params)
|
||||
}
|
||||
}
|
||||
|
||||
inline fun Logger.debugParam(messageBuilder : () -> Pair<String, Array<Any>>) {
|
||||
if(isDebugEnabled) {
|
||||
inline fun Logger.debugParam(messageBuilder: () -> Pair<String, Array<Any>>) {
|
||||
if (isDebugEnabled) {
|
||||
val (format, params) = messageBuilder()
|
||||
info(format, params)
|
||||
}
|
||||
}
|
||||
|
||||
inline fun Logger.infoParam(messageBuilder : () -> Pair<String, Array<Any>>) {
|
||||
if(isInfoEnabled) {
|
||||
inline fun Logger.infoParam(messageBuilder: () -> Pair<String, Array<Any>>) {
|
||||
if (isInfoEnabled) {
|
||||
val (format, params) = messageBuilder()
|
||||
info(format, params)
|
||||
}
|
||||
}
|
||||
|
||||
inline fun Logger.warnParam(messageBuilder : () -> Pair<String, Array<Any>>) {
|
||||
if(isWarnEnabled) {
|
||||
inline fun Logger.warnParam(messageBuilder: () -> Pair<String, Array<Any>>) {
|
||||
if (isWarnEnabled) {
|
||||
val (format, params) = messageBuilder()
|
||||
warn(format, params)
|
||||
}
|
||||
}
|
||||
|
||||
inline fun Logger.errorParam(messageBuilder : () -> Pair<String, Array<Any>>) {
|
||||
if(isErrorEnabled) {
|
||||
inline fun Logger.errorParam(messageBuilder: () -> Pair<String, Array<Any>>) {
|
||||
if (isErrorEnabled) {
|
||||
val (format, params) = messageBuilder()
|
||||
error(format, params)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
inline fun log(log : Logger,
|
||||
filter : Logger.() -> Boolean,
|
||||
loggerMethod : Logger.(String) -> Unit, messageBuilder : () -> String) {
|
||||
if(log.filter()) {
|
||||
inline fun log(
|
||||
log: Logger,
|
||||
filter: Logger.() -> Boolean,
|
||||
loggerMethod: Logger.(String) -> Unit, messageBuilder: () -> String
|
||||
) {
|
||||
if (log.filter()) {
|
||||
log.loggerMethod(messageBuilder())
|
||||
}
|
||||
}
|
||||
|
||||
inline fun Logger.log(level : Level, messageBuilder : () -> String) {
|
||||
if(isEnabledForLevel(level)) {
|
||||
fun withMDC(params: Array<Pair<String, String>>, cb: () -> Unit) {
|
||||
object : AutoCloseable {
|
||||
override fun close() {
|
||||
for ((key, _) in params) MDC.remove(key)
|
||||
}
|
||||
}.use {
|
||||
for ((key, value) in params) MDC.put(key, value)
|
||||
cb()
|
||||
}
|
||||
}
|
||||
|
||||
inline fun Logger.log(level: Level, channel: Channel, crossinline messageBuilder: (LoggingEventBuilder) -> Unit ) {
|
||||
if (isEnabledForLevel(level)) {
|
||||
val params = arrayOf<Pair<String, String>>(
|
||||
"channel-id-short" to channel.id().asShortText(),
|
||||
"channel-id-long" to channel.id().asLongText(),
|
||||
"remote-address" to channel.remoteAddress().toString(),
|
||||
"local-address" to channel.localAddress().toString(),
|
||||
)
|
||||
withMDC(params) {
|
||||
val builder = makeLoggingEventBuilder(level)
|
||||
// for ((key, value) in params) {
|
||||
// builder.addKeyValue(key, value)
|
||||
// }
|
||||
messageBuilder(builder)
|
||||
builder.log()
|
||||
}
|
||||
}
|
||||
}
|
||||
inline fun Logger.log(level: Level, channel: Channel, crossinline messageBuilder: () -> String) {
|
||||
log(level, channel) { builder ->
|
||||
builder.setMessage(messageBuilder())
|
||||
}
|
||||
}
|
||||
|
||||
inline fun Logger.trace(ch: Channel, crossinline messageBuilder: () -> String) {
|
||||
log(Level.TRACE, ch, messageBuilder)
|
||||
}
|
||||
|
||||
inline fun Logger.debug(ch: Channel, crossinline messageBuilder: () -> String) {
|
||||
log(Level.DEBUG, ch, messageBuilder)
|
||||
}
|
||||
|
||||
inline fun Logger.info(ch: Channel, crossinline messageBuilder: () -> String) {
|
||||
log(Level.INFO, ch, messageBuilder)
|
||||
}
|
||||
|
||||
inline fun Logger.warn(ch: Channel, crossinline messageBuilder: () -> String) {
|
||||
log(Level.WARN, ch, messageBuilder)
|
||||
}
|
||||
|
||||
inline fun Logger.error(ch: Channel, crossinline messageBuilder: () -> String) {
|
||||
log(Level.ERROR, ch, messageBuilder)
|
||||
}
|
||||
|
||||
inline fun Logger.trace(ctx: ChannelHandlerContext, crossinline messageBuilder: () -> String) {
|
||||
log(Level.TRACE, ctx.channel(), messageBuilder)
|
||||
}
|
||||
|
||||
inline fun Logger.debug(ctx: ChannelHandlerContext, crossinline messageBuilder: () -> String) {
|
||||
log(Level.DEBUG, ctx.channel(), messageBuilder)
|
||||
}
|
||||
|
||||
inline fun Logger.info(ctx: ChannelHandlerContext, crossinline messageBuilder: () -> String) {
|
||||
log(Level.INFO, ctx.channel(), messageBuilder)
|
||||
}
|
||||
|
||||
inline fun Logger.warn(ctx: ChannelHandlerContext, crossinline messageBuilder: () -> String) {
|
||||
log(Level.WARN, ctx.channel(), messageBuilder)
|
||||
}
|
||||
|
||||
inline fun Logger.error(ctx: ChannelHandlerContext, crossinline messageBuilder: () -> String) {
|
||||
log(Level.ERROR, ctx.channel(), messageBuilder)
|
||||
}
|
||||
|
||||
|
||||
inline fun Logger.log(level: Level, messageBuilder: () -> String) {
|
||||
if (isEnabledForLevel(level)) {
|
||||
makeLoggingEventBuilder(level).log(messageBuilder())
|
||||
}
|
||||
}
|
||||
|
||||
inline fun Logger.trace(messageBuilder : () -> String) {
|
||||
if(isTraceEnabled) {
|
||||
inline fun Logger.trace(messageBuilder: () -> String) {
|
||||
if (isTraceEnabled) {
|
||||
trace(messageBuilder())
|
||||
}
|
||||
}
|
||||
|
||||
inline fun Logger.debug(messageBuilder : () -> String) {
|
||||
if(isDebugEnabled) {
|
||||
inline fun Logger.debug(messageBuilder: () -> String) {
|
||||
if (isDebugEnabled) {
|
||||
debug(messageBuilder())
|
||||
}
|
||||
}
|
||||
|
||||
inline fun Logger.info(messageBuilder : () -> String) {
|
||||
if(isInfoEnabled) {
|
||||
inline fun Logger.info(messageBuilder: () -> String) {
|
||||
if (isInfoEnabled) {
|
||||
info(messageBuilder())
|
||||
}
|
||||
}
|
||||
|
||||
inline fun Logger.warn(messageBuilder : () -> String) {
|
||||
if(isWarnEnabled) {
|
||||
inline fun Logger.warn(messageBuilder: () -> String) {
|
||||
if (isWarnEnabled) {
|
||||
warn(messageBuilder())
|
||||
}
|
||||
}
|
||||
|
||||
inline fun Logger.error(messageBuilder : () -> String) {
|
||||
if(isErrorEnabled) {
|
||||
inline fun Logger.error(messageBuilder: () -> String) {
|
||||
if (isErrorEnabled) {
|
||||
error(messageBuilder())
|
||||
}
|
||||
}
|
||||
@@ -94,9 +177,9 @@ class LoggingConfig {
|
||||
|
||||
init {
|
||||
val logManager = LogManager.getLogManager()
|
||||
System.getProperty("log.config.source")?.let withSource@ { source ->
|
||||
System.getProperty("log.config.source")?.let withSource@{ source ->
|
||||
val urls = LoggingConfig::class.java.classLoader.getResources(source)
|
||||
while(urls.hasMoreElements()) {
|
||||
while (urls.hasMoreElements()) {
|
||||
val url = urls.nextElement()
|
||||
url.openStream().use { inputStream ->
|
||||
logManager.readConfiguration(inputStream)
|
||||
|
@@ -7,7 +7,18 @@ import javax.crypto.SecretKeyFactory
|
||||
import javax.crypto.spec.PBEKeySpec
|
||||
|
||||
object PasswordSecurity {
|
||||
private const val KEY_LENGTH = 256
|
||||
|
||||
enum class Algorithm(
|
||||
val codeName : String,
|
||||
val keyLength : Int,
|
||||
val iterations : Int) {
|
||||
PBEWithHmacSHA512_224AndAES_256("PBEWithHmacSHA512/224AndAES_256", 64, 1),
|
||||
PBEWithHmacSHA1AndAES_256("PBEWithHmacSHA1AndAES_256",64, 1),
|
||||
PBEWithHmacSHA384AndAES_128("PBEWithHmacSHA384AndAES_128", 64,1),
|
||||
PBEWithHmacSHA384AndAES_256("PBEWithHmacSHA384AndAES_256",64,1),
|
||||
PBKDF2WithHmacSHA512("PBKDF2WithHmacSHA512",512, 1),
|
||||
PBKDF2WithHmacSHA384("PBKDF2WithHmacSHA384",384, 1);
|
||||
}
|
||||
|
||||
private fun concat(arr1: ByteArray, arr2: ByteArray): ByteArray {
|
||||
val result = ByteArray(arr1.size + arr2.size)
|
||||
@@ -23,22 +34,22 @@ object PasswordSecurity {
|
||||
return result
|
||||
}
|
||||
|
||||
fun hashPassword(password : String, salt : String? = null) : String {
|
||||
fun hashPassword(password : String, salt : String? = null, algorithm : Algorithm = Algorithm.PBKDF2WithHmacSHA512) : String {
|
||||
val actualSalt = salt?.let(Base64.getDecoder()::decode) ?: SecureRandom().run {
|
||||
val result = ByteArray(16)
|
||||
nextBytes(result)
|
||||
result
|
||||
}
|
||||
val spec: KeySpec = PBEKeySpec(password.toCharArray(), actualSalt, 10, KEY_LENGTH)
|
||||
val factory = SecretKeyFactory.getInstance("PBKDF2WithHmacSHA1")
|
||||
val spec: KeySpec = PBEKeySpec(password.toCharArray(), actualSalt, algorithm.iterations, algorithm.keyLength)
|
||||
val factory = SecretKeyFactory.getInstance(algorithm.codeName)
|
||||
val hash = factory.generateSecret(spec).encoded
|
||||
return String(Base64.getEncoder().encode(concat(hash, actualSalt)))
|
||||
}
|
||||
|
||||
fun decodePasswordHash(passwordHash : String) : Pair<ByteArray, ByteArray> {
|
||||
val decoded = Base64.getDecoder().decode(passwordHash)
|
||||
val hash = ByteArray(KEY_LENGTH / 8)
|
||||
val salt = ByteArray(decoded.size - KEY_LENGTH / 8)
|
||||
fun decodePasswordHash(encodedPasswordHash : String, algorithm: Algorithm = Algorithm.PBKDF2WithHmacSHA512) : Pair<ByteArray, ByteArray> {
|
||||
val decoded = Base64.getDecoder().decode(encodedPasswordHash)
|
||||
val hash = ByteArray(algorithm.keyLength / 8)
|
||||
val salt = ByteArray(decoded.size - algorithm.keyLength / 8)
|
||||
System.arraycopy(decoded, 0, hash, 0, hash.size)
|
||||
System.arraycopy(decoded, hash.size, salt, 0, salt.size)
|
||||
return hash to salt
|
||||
|
@@ -1,20 +1,55 @@
|
||||
package net.woggioni.rbcs.common
|
||||
|
||||
import net.woggioni.jwo.JWO
|
||||
import java.io.IOException
|
||||
import java.net.InetAddress
|
||||
import java.net.ServerSocket
|
||||
import java.net.URI
|
||||
import java.net.URL
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
import java.security.KeyStore
|
||||
import java.security.MessageDigest
|
||||
import java.security.cert.CertPathValidator
|
||||
import java.security.cert.CertPathValidatorException
|
||||
import java.security.cert.CertificateException
|
||||
import java.security.cert.CertificateFactory
|
||||
import java.security.cert.PKIXParameters
|
||||
import java.security.cert.PKIXRevocationChecker
|
||||
import java.security.cert.X509Certificate
|
||||
import java.util.EnumSet
|
||||
import javax.net.ssl.TrustManagerFactory
|
||||
import javax.net.ssl.X509TrustManager
|
||||
import net.woggioni.jwo.JWO
|
||||
import net.woggioni.jwo.Tuple2
|
||||
|
||||
object RBCS {
|
||||
fun String.toUrl() : URL = URL.of(URI(this), null)
|
||||
fun String.toUrl(): URL = URL.of(URI(this), null)
|
||||
|
||||
const val RBCS_NAMESPACE_URI: String = "urn:net.woggioni.rbcs.server"
|
||||
const val RBCS_PREFIX: String = "rbcs"
|
||||
const val XML_SCHEMA_NAMESPACE_URI = "http://www.w3.org/2001/XMLSchema-instance"
|
||||
|
||||
fun ByteArray.toInt(index: Int = 0): Long {
|
||||
if (index + 4 > size) throw IllegalArgumentException("Not enough bytes to decode a 32 bits integer")
|
||||
var value: Long = 0
|
||||
for (b in index until index + 4) {
|
||||
value = (value shl 8) + (get(b).toInt() and 0xFF)
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
fun ByteArray.toLong(index: Int = 0): Long {
|
||||
if (index + 8 > size) throw IllegalArgumentException("Not enough bytes to decode a 64 bits long integer")
|
||||
var value: Long = 0
|
||||
for (b in index until index + 8) {
|
||||
value = (value shl 8) + (get(b).toInt() and 0xFF)
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
fun digest(
|
||||
data: ByteArray,
|
||||
md: MessageDigest = MessageDigest.getInstance("MD5")
|
||||
md: MessageDigest
|
||||
): ByteArray {
|
||||
md.update(data)
|
||||
return md.digest()
|
||||
@@ -22,8 +57,111 @@ object RBCS {
|
||||
|
||||
fun digestString(
|
||||
data: ByteArray,
|
||||
md: MessageDigest = MessageDigest.getInstance("MD5")
|
||||
md: MessageDigest
|
||||
): String {
|
||||
return JWO.bytesToHex(digest(data, md))
|
||||
}
|
||||
|
||||
fun processCacheKey(key: String, keyPrefix: String?, digestAlgorithm: String?) : ByteArray {
|
||||
val prefixedKey = if (keyPrefix == null) {
|
||||
key
|
||||
} else {
|
||||
key + keyPrefix
|
||||
}.toByteArray(Charsets.UTF_8)
|
||||
return digestAlgorithm
|
||||
?.let(MessageDigest::getInstance)
|
||||
?.let { md ->
|
||||
digest(prefixedKey, md)
|
||||
} ?: prefixedKey
|
||||
}
|
||||
|
||||
fun Long.toIntOrNull(): Int? {
|
||||
return if (this >= Int.MIN_VALUE && this <= Int.MAX_VALUE) {
|
||||
toInt()
|
||||
} else {
|
||||
null
|
||||
}
|
||||
}
|
||||
|
||||
fun getFreePort(): Int {
|
||||
var count = 0
|
||||
while (count < 50) {
|
||||
try {
|
||||
ServerSocket(0, 50, InetAddress.getLocalHost()).use { serverSocket ->
|
||||
val candidate = serverSocket.localPort
|
||||
if (candidate > 0) {
|
||||
return candidate
|
||||
} else {
|
||||
throw RuntimeException("Got invalid port number: $candidate")
|
||||
}
|
||||
}
|
||||
} catch (ignored: IOException) {
|
||||
++count
|
||||
}
|
||||
}
|
||||
throw RuntimeException("Error trying to find an open port")
|
||||
}
|
||||
|
||||
fun loadKeystore(file: Path, password: String?): KeyStore {
|
||||
val ext = JWO.splitExtension(file)
|
||||
.map(Tuple2<String, String>::get_2)
|
||||
.orElseThrow {
|
||||
IllegalArgumentException(
|
||||
"Keystore file '${file}' must have .jks, .p12, .pfx extension"
|
||||
)
|
||||
}
|
||||
val keystore = when (ext.substring(1).lowercase()) {
|
||||
"jks" -> KeyStore.getInstance("JKS")
|
||||
"p12", "pfx" -> KeyStore.getInstance("PKCS12")
|
||||
else -> throw IllegalArgumentException(
|
||||
"Keystore file '${file}' must have .jks, .p12, .pfx extension"
|
||||
)
|
||||
}
|
||||
Files.newInputStream(file).use {
|
||||
keystore.load(it, password?.let(String::toCharArray))
|
||||
}
|
||||
return keystore
|
||||
}
|
||||
|
||||
fun getTrustManager(trustStore: KeyStore?, certificateRevocationEnabled: Boolean): X509TrustManager {
|
||||
return if (trustStore != null) {
|
||||
val certificateFactory = CertificateFactory.getInstance("X.509")
|
||||
val validator = CertPathValidator.getInstance("PKIX").apply {
|
||||
val rc = revocationChecker as PKIXRevocationChecker
|
||||
rc.options = EnumSet.of(
|
||||
PKIXRevocationChecker.Option.NO_FALLBACK
|
||||
)
|
||||
}
|
||||
val params = PKIXParameters(trustStore).apply {
|
||||
isRevocationEnabled = certificateRevocationEnabled
|
||||
}
|
||||
object : X509TrustManager {
|
||||
override fun checkClientTrusted(chain: Array<out X509Certificate>, authType: String) {
|
||||
val clientCertificateChain = certificateFactory.generateCertPath(chain.toList())
|
||||
try {
|
||||
validator.validate(clientCertificateChain, params)
|
||||
} catch (ex: CertPathValidatorException) {
|
||||
throw CertificateException(ex)
|
||||
}
|
||||
}
|
||||
|
||||
override fun checkServerTrusted(chain: Array<out X509Certificate>, authType: String) {
|
||||
throw NotImplementedError()
|
||||
}
|
||||
|
||||
private val acceptedIssuers = trustStore.aliases().asSequence()
|
||||
.filter(trustStore::isCertificateEntry)
|
||||
.map(trustStore::getCertificate)
|
||||
.map { it as X509Certificate }
|
||||
.toList()
|
||||
.toTypedArray()
|
||||
|
||||
override fun getAcceptedIssuers() = acceptedIssuers
|
||||
}
|
||||
} else {
|
||||
val trustManagerFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm())
|
||||
trustManagerFactory.trustManagers.asSequence().filter { it is X509TrustManager }
|
||||
.single() as X509TrustManager
|
||||
}
|
||||
}
|
||||
}
|
@@ -1,15 +1,5 @@
|
||||
package net.woggioni.rbcs.common
|
||||
|
||||
import net.woggioni.jwo.JWO
|
||||
import org.slf4j.LoggerFactory
|
||||
import org.slf4j.event.Level
|
||||
import org.w3c.dom.Document
|
||||
import org.w3c.dom.Element
|
||||
import org.w3c.dom.Node
|
||||
import org.w3c.dom.NodeList
|
||||
import org.xml.sax.SAXNotRecognizedException
|
||||
import org.xml.sax.SAXNotSupportedException
|
||||
import org.xml.sax.SAXParseException
|
||||
import java.io.InputStream
|
||||
import java.io.OutputStream
|
||||
import java.net.URL
|
||||
@@ -26,7 +16,16 @@ import javax.xml.transform.stream.StreamResult
|
||||
import javax.xml.transform.stream.StreamSource
|
||||
import javax.xml.validation.Schema
|
||||
import javax.xml.validation.SchemaFactory
|
||||
import net.woggioni.jwo.JWO
|
||||
import org.slf4j.event.Level
|
||||
import org.w3c.dom.Document
|
||||
import org.w3c.dom.Element
|
||||
import org.w3c.dom.Node
|
||||
import org.w3c.dom.NodeList
|
||||
import org.xml.sax.ErrorHandler as ErrHandler
|
||||
import org.xml.sax.SAXNotRecognizedException
|
||||
import org.xml.sax.SAXNotSupportedException
|
||||
import org.xml.sax.SAXParseException
|
||||
|
||||
|
||||
class NodeListIterator(private val nodeList: NodeList) : Iterator<Node> {
|
||||
@@ -79,7 +78,7 @@ class Xml(val doc: Document, val element: Element) {
|
||||
class ErrorHandler(private val fileURL: URL) : ErrHandler {
|
||||
|
||||
companion object {
|
||||
private val log = LoggerFactory.getLogger(ErrorHandler::class.java)
|
||||
private val log = createLogger<ErrorHandler>()
|
||||
}
|
||||
|
||||
override fun warning(ex: SAXParseException)= err(ex, Level.WARN)
|
||||
|
@@ -0,0 +1,38 @@
|
||||
package net.woggioni.rbcs.common
|
||||
|
||||
import java.security.Provider
|
||||
import java.security.Security
|
||||
import java.util.Base64
|
||||
import net.woggioni.rbcs.common.PasswordSecurity.decodePasswordHash
|
||||
import net.woggioni.rbcs.common.PasswordSecurity.hashPassword
|
||||
import org.junit.jupiter.api.Assertions
|
||||
import org.junit.jupiter.api.Test
|
||||
import org.junit.jupiter.params.ParameterizedTest
|
||||
import org.junit.jupiter.params.provider.EnumSource
|
||||
|
||||
|
||||
class PasswordHashingTest {
|
||||
|
||||
@EnumSource(PasswordSecurity.Algorithm::class)
|
||||
@ParameterizedTest
|
||||
fun test(algo: PasswordSecurity.Algorithm) {
|
||||
val password = "password"
|
||||
val encoded = hashPassword(password, algorithm = algo)
|
||||
val (_, salt) = decodePasswordHash(encoded, algo)
|
||||
Assertions.assertEquals(encoded,
|
||||
hashPassword(password, salt = salt.let(Base64.getEncoder()::encodeToString), algorithm = algo)
|
||||
)
|
||||
}
|
||||
|
||||
@Test
|
||||
fun listAvailableAlgorithms() {
|
||||
Security.getProviders().asSequence()
|
||||
.flatMap { provider: Provider -> provider.services.asSequence() }
|
||||
.filter { service: Provider.Service -> "SecretKeyFactory" == service.type }
|
||||
.map(Provider.Service::getAlgorithm)
|
||||
.forEach {
|
||||
println(it)
|
||||
}
|
||||
|
||||
}
|
||||
}
|
45
rbcs-server-memcache/README.md
Normal file
45
rbcs-server-memcache/README.md
Normal file
@@ -0,0 +1,45 @@
|
||||
# RBCS Memcache plugins
|
||||
|
||||
This plugins allows RBCs to store and retrieve data from a memcache cluster.
|
||||
The memcache server selection is simply based on the hash of the key,
|
||||
deflate compression is also supported and performed by the RBCS server
|
||||
|
||||
## Quickstart
|
||||
The plugin can be built with
|
||||
```bash
|
||||
./gradlew rbcs-server-memcache:bundle
|
||||
```
|
||||
which creates a `.tar` archive in the `build/distributions` folder.
|
||||
The archive is supposed to be extracted inside the RBCS server's `plugins` directory.
|
||||
|
||||
## Configuration
|
||||
|
||||
The plugin can be enabled setting the `xs:type` attribute of the `cache` element
|
||||
to `memcacheCacheType`.
|
||||
|
||||
The plugins currently supports the following configuration attributes:
|
||||
- `max-age`: the amount of time cache entries will be retained on memcache
|
||||
- `digest`: digest algorithm to use on the key before submission
|
||||
to memcache (optional, no digest is applied if omitted)
|
||||
- `compression`: compression algorithm to apply to cache values before,
|
||||
currently only `deflate` is supported (optional, if omitted compression is disabled)
|
||||
- `compression-level`: compression level to use, deflate supports compression levels from 1 to 9,
|
||||
where 1 is for fast compression at the expense of speed (optional, 6 is used if omitted)
|
||||
```xml
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<rbcs:server xmlns:xs="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns:rbcs="urn:net.woggioni.rbcs.server"
|
||||
xmlns:rbcs-memcache="urn:net.woggioni.rbcs.server.memcache"
|
||||
xs:schemaLocation="urn:net.woggioni.rbcs.server.memcache jpms://net.woggioni.rbcs.server.memcache/net/woggioni/rbcs/server/memcache/schema/rbcs-memcache.xsd urn:net.woggioni.rbcs.server jpms://net.woggioni.rbcs.server/net/woggioni/rbcs/server/schema/rbcs-server.xsd"
|
||||
>
|
||||
...
|
||||
<cache xs:type="rbcs-memcache:memcacheCacheType"
|
||||
max-age="P7D"
|
||||
digest="SHA-256"
|
||||
compression-mode="deflate"
|
||||
compression-level="6">
|
||||
<server host="127.0.0.1" port="11211" max-connections="256"/>
|
||||
<server host="127.0.0.1" port="11212" max-connections="256"/>
|
||||
</cache>
|
||||
...
|
||||
```
|
@@ -34,6 +34,7 @@ dependencies {
|
||||
implementation catalog.jwo
|
||||
implementation catalog.slf4j.api
|
||||
implementation catalog.netty.common
|
||||
implementation catalog.netty.handler
|
||||
implementation catalog.netty.codec.memcache
|
||||
|
||||
bundle catalog.netty.codec.memcache
|
||||
|
@@ -11,6 +11,7 @@ module net.woggioni.rbcs.server.memcache {
|
||||
requires io.netty.codec.memcache;
|
||||
requires io.netty.common;
|
||||
requires io.netty.buffer;
|
||||
requires io.netty.handler;
|
||||
requires org.slf4j;
|
||||
|
||||
provides CacheProvider with net.woggioni.rbcs.server.memcache.MemcacheCacheProvider;
|
||||
|
@@ -1,23 +0,0 @@
|
||||
package net.woggioni.rbcs.server.memcache
|
||||
|
||||
import io.netty.buffer.ByteBuf
|
||||
import net.woggioni.rbcs.api.Cache
|
||||
import net.woggioni.rbcs.server.memcache.client.MemcacheClient
|
||||
import java.nio.channels.ReadableByteChannel
|
||||
import java.util.concurrent.CompletableFuture
|
||||
|
||||
class MemcacheCache(private val cfg : MemcacheCacheConfiguration) : Cache {
|
||||
private val memcacheClient = MemcacheClient(cfg)
|
||||
|
||||
override fun get(key: String): CompletableFuture<ReadableByteChannel?> {
|
||||
return memcacheClient.get(key)
|
||||
}
|
||||
|
||||
override fun put(key: String, content: ByteBuf): CompletableFuture<Void> {
|
||||
return memcacheClient.put(key, content, cfg.maxAge)
|
||||
}
|
||||
|
||||
override fun close() {
|
||||
memcacheClient.close()
|
||||
}
|
||||
}
|
@@ -1,23 +1,36 @@
|
||||
package net.woggioni.rbcs.server.memcache
|
||||
|
||||
import io.netty.channel.ChannelFactory
|
||||
import io.netty.channel.EventLoopGroup
|
||||
import io.netty.channel.pool.FixedChannelPool
|
||||
import io.netty.channel.socket.DatagramChannel
|
||||
import io.netty.channel.socket.SocketChannel
|
||||
import java.time.Duration
|
||||
import java.util.concurrent.CompletableFuture
|
||||
import java.util.concurrent.ConcurrentHashMap
|
||||
import java.util.concurrent.atomic.AtomicInteger
|
||||
import java.util.concurrent.atomic.AtomicReference
|
||||
import net.woggioni.rbcs.api.CacheHandler
|
||||
import net.woggioni.rbcs.api.CacheHandlerFactory
|
||||
import net.woggioni.rbcs.api.Configuration
|
||||
import net.woggioni.rbcs.common.HostAndPort
|
||||
import java.time.Duration
|
||||
import net.woggioni.rbcs.common.createLogger
|
||||
import net.woggioni.rbcs.server.memcache.client.MemcacheClient
|
||||
|
||||
data class MemcacheCacheConfiguration(
|
||||
val servers: List<Server>,
|
||||
val maxAge: Duration = Duration.ofDays(1),
|
||||
val maxSize: Int = 0x100000,
|
||||
val keyPrefix : String? = null,
|
||||
val digestAlgorithm: String? = null,
|
||||
val compressionMode: CompressionMode? = null,
|
||||
val compressionLevel: Int,
|
||||
) : Configuration.Cache {
|
||||
|
||||
enum class CompressionMode {
|
||||
/**
|
||||
* Gzip mode
|
||||
*/
|
||||
GZIP,
|
||||
companion object {
|
||||
private val log = createLogger<MemcacheCacheConfiguration>()
|
||||
}
|
||||
|
||||
enum class CompressionMode {
|
||||
/**
|
||||
* Deflate mode
|
||||
*/
|
||||
@@ -25,13 +38,65 @@ data class MemcacheCacheConfiguration(
|
||||
}
|
||||
|
||||
data class Server(
|
||||
val endpoint : HostAndPort,
|
||||
val connectionTimeoutMillis : Int?,
|
||||
val maxConnections : Int
|
||||
val endpoint: HostAndPort,
|
||||
val connectionTimeoutMillis: Int?,
|
||||
val maxConnections: Int
|
||||
)
|
||||
|
||||
override fun materialize() = object : CacheHandlerFactory {
|
||||
|
||||
override fun materialize() = MemcacheCache(this)
|
||||
private val connectionPoolMap = ConcurrentHashMap<HostAndPort, FixedChannelPool>()
|
||||
|
||||
override fun newHandler(
|
||||
cfg : Configuration,
|
||||
eventLoop: EventLoopGroup,
|
||||
socketChannelFactory: ChannelFactory<SocketChannel>,
|
||||
datagramChannelFactory: ChannelFactory<DatagramChannel>,
|
||||
): CacheHandler {
|
||||
return MemcacheCacheHandler(
|
||||
MemcacheClient(
|
||||
this@MemcacheCacheConfiguration.servers,
|
||||
cfg.connection.chunkSize,
|
||||
eventLoop,
|
||||
socketChannelFactory,
|
||||
connectionPoolMap
|
||||
),
|
||||
keyPrefix,
|
||||
digestAlgorithm,
|
||||
compressionMode != null,
|
||||
compressionLevel,
|
||||
cfg.connection.chunkSize,
|
||||
maxAge
|
||||
)
|
||||
}
|
||||
|
||||
override fun asyncClose() = object : CompletableFuture<Void>() {
|
||||
init {
|
||||
val failure = AtomicReference<Throwable>(null)
|
||||
val pools = connectionPoolMap.values.toList()
|
||||
val npools = pools.size
|
||||
val finished = AtomicInteger(0)
|
||||
if (pools.isEmpty()) {
|
||||
complete(null)
|
||||
} else {
|
||||
pools.forEach { pool ->
|
||||
pool.closeAsync().addListener {
|
||||
if (!it.isSuccess) {
|
||||
failure.compareAndSet(null, it.cause())
|
||||
}
|
||||
if (finished.incrementAndGet() == npools) {
|
||||
when (val ex = failure.get()) {
|
||||
null -> complete(null)
|
||||
else -> completeExceptionally(ex)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
override fun getNamespaceURI() = "urn:net.woggioni.rbcs.server.memcache"
|
||||
|
||||
|
@@ -0,0 +1,442 @@
|
||||
package net.woggioni.rbcs.server.memcache
|
||||
|
||||
import io.netty.buffer.ByteBuf
|
||||
import io.netty.buffer.ByteBufAllocator
|
||||
import io.netty.buffer.CompositeByteBuf
|
||||
import io.netty.channel.Channel as NettyChannel
|
||||
import io.netty.channel.ChannelHandlerContext
|
||||
import io.netty.handler.codec.memcache.DefaultLastMemcacheContent
|
||||
import io.netty.handler.codec.memcache.DefaultMemcacheContent
|
||||
import io.netty.handler.codec.memcache.LastMemcacheContent
|
||||
import io.netty.handler.codec.memcache.MemcacheContent
|
||||
import io.netty.handler.codec.memcache.binary.BinaryMemcacheOpcodes
|
||||
import io.netty.handler.codec.memcache.binary.BinaryMemcacheResponse
|
||||
import io.netty.handler.codec.memcache.binary.BinaryMemcacheResponseStatus
|
||||
import io.netty.handler.codec.memcache.binary.DefaultBinaryMemcacheRequest
|
||||
import java.io.ByteArrayOutputStream
|
||||
import java.io.ObjectInputStream
|
||||
import java.io.ObjectOutputStream
|
||||
import java.nio.ByteBuffer
|
||||
import java.nio.channels.Channels
|
||||
import java.nio.channels.FileChannel
|
||||
import java.nio.channels.ReadableByteChannel
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.StandardOpenOption
|
||||
import java.time.Duration
|
||||
import java.time.Instant
|
||||
import java.util.concurrent.CompletableFuture
|
||||
import java.util.zip.Deflater
|
||||
import java.util.zip.DeflaterOutputStream
|
||||
import java.util.zip.InflaterOutputStream
|
||||
import net.woggioni.rbcs.api.CacheHandler
|
||||
import net.woggioni.rbcs.api.CacheValueMetadata
|
||||
import net.woggioni.rbcs.api.exception.ContentTooLargeException
|
||||
import net.woggioni.rbcs.api.message.CacheMessage
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CacheContent
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CacheGetRequest
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CachePutRequest
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CachePutResponse
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CacheValueFoundResponse
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CacheValueNotFoundResponse
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.LastCacheContent
|
||||
import net.woggioni.rbcs.common.ByteBufInputStream
|
||||
import net.woggioni.rbcs.common.ByteBufOutputStream
|
||||
import net.woggioni.rbcs.common.RBCS.processCacheKey
|
||||
import net.woggioni.rbcs.common.RBCS.toIntOrNull
|
||||
import net.woggioni.rbcs.common.createLogger
|
||||
import net.woggioni.rbcs.common.debug
|
||||
import net.woggioni.rbcs.common.extractChunk
|
||||
import net.woggioni.rbcs.common.trace
|
||||
import net.woggioni.rbcs.server.memcache.client.MemcacheClient
|
||||
import net.woggioni.rbcs.server.memcache.client.MemcacheRequestController
|
||||
import net.woggioni.rbcs.server.memcache.client.MemcacheResponseHandler
|
||||
|
||||
class MemcacheCacheHandler(
|
||||
private val client: MemcacheClient,
|
||||
private val keyPrefix: String?,
|
||||
private val digestAlgorithm: String?,
|
||||
private val compressionEnabled: Boolean,
|
||||
private val compressionLevel: Int,
|
||||
private val chunkSize: Int,
|
||||
private val maxAge: Duration
|
||||
) : CacheHandler() {
|
||||
companion object {
|
||||
private val log = createLogger<MemcacheCacheHandler>()
|
||||
|
||||
private fun encodeExpiry(expiry: Duration): Int {
|
||||
val expirySeconds = expiry.toSeconds()
|
||||
return expirySeconds.toInt().takeIf { it.toLong() == expirySeconds }
|
||||
?: Instant.ofEpochSecond(expirySeconds).epochSecond.toInt()
|
||||
}
|
||||
}
|
||||
|
||||
private interface InProgressRequest {
|
||||
|
||||
}
|
||||
|
||||
private inner class InProgressGetRequest(
|
||||
val key: String,
|
||||
private val ctx: ChannelHandlerContext
|
||||
) : InProgressRequest {
|
||||
private val acc = ctx.alloc().compositeBuffer()
|
||||
private val chunk = ctx.alloc().compositeBuffer()
|
||||
private val outputStream = ByteBufOutputStream(chunk).let {
|
||||
if (compressionEnabled) {
|
||||
InflaterOutputStream(it)
|
||||
} else {
|
||||
it
|
||||
}
|
||||
}
|
||||
private var responseSent = false
|
||||
private var metadataSize: Int? = null
|
||||
|
||||
fun write(buf: ByteBuf) {
|
||||
acc.addComponent(true, buf.retain())
|
||||
if (metadataSize == null && acc.readableBytes() >= Int.SIZE_BYTES) {
|
||||
metadataSize = acc.readInt()
|
||||
}
|
||||
metadataSize
|
||||
?.takeIf { !responseSent }
|
||||
?.takeIf { acc.readableBytes() >= it }
|
||||
?.let { mSize ->
|
||||
val metadata = ObjectInputStream(ByteBufInputStream(acc)).use {
|
||||
acc.retain()
|
||||
it.readObject() as CacheValueMetadata
|
||||
}
|
||||
log.trace(ctx) {
|
||||
"Sending response from cache"
|
||||
}
|
||||
sendMessageAndFlush(ctx, CacheValueFoundResponse(key, metadata))
|
||||
responseSent = true
|
||||
acc.readerIndex(Int.SIZE_BYTES + mSize)
|
||||
}
|
||||
if (responseSent) {
|
||||
acc.readBytes(outputStream, acc.readableBytes())
|
||||
if (acc.readableBytes() >= chunkSize) {
|
||||
flush(false)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private fun flush(last: Boolean) {
|
||||
val toSend = extractChunk(chunk, ctx.alloc())
|
||||
val msg = if (last) {
|
||||
log.trace(ctx) {
|
||||
"Sending last chunk to client"
|
||||
}
|
||||
LastCacheContent(toSend)
|
||||
} else {
|
||||
log.trace(ctx) {
|
||||
"Sending chunk to client"
|
||||
}
|
||||
CacheContent(toSend)
|
||||
}
|
||||
sendMessageAndFlush(ctx, msg)
|
||||
}
|
||||
|
||||
fun commit() {
|
||||
acc.release()
|
||||
chunk.retain()
|
||||
outputStream.close()
|
||||
flush(true)
|
||||
chunk.release()
|
||||
}
|
||||
|
||||
fun rollback() {
|
||||
acc.release()
|
||||
outputStream.close()
|
||||
}
|
||||
}
|
||||
|
||||
private inner class InProgressPutRequest(
|
||||
private val ch: NettyChannel,
|
||||
metadata: CacheValueMetadata,
|
||||
val digest: ByteBuf,
|
||||
val requestController: CompletableFuture<MemcacheRequestController>,
|
||||
private val alloc: ByteBufAllocator
|
||||
) : InProgressRequest {
|
||||
private var totalSize = 0
|
||||
private var tmpFile: FileChannel? = null
|
||||
private val accumulator = alloc.compositeBuffer()
|
||||
private val stream = ByteBufOutputStream(accumulator).let {
|
||||
if (compressionEnabled) {
|
||||
DeflaterOutputStream(it, Deflater(compressionLevel))
|
||||
} else {
|
||||
it
|
||||
}
|
||||
}
|
||||
|
||||
init {
|
||||
ByteArrayOutputStream().let { baos ->
|
||||
ObjectOutputStream(baos).use {
|
||||
it.writeObject(metadata)
|
||||
}
|
||||
val serializedBytes = baos.toByteArray()
|
||||
accumulator.writeInt(serializedBytes.size)
|
||||
accumulator.writeBytes(serializedBytes)
|
||||
}
|
||||
}
|
||||
|
||||
fun write(buf: ByteBuf) {
|
||||
totalSize += buf.readableBytes()
|
||||
buf.readBytes(stream, buf.readableBytes())
|
||||
tmpFile?.let {
|
||||
flushToDisk(it, accumulator)
|
||||
}
|
||||
if (accumulator.readableBytes() > 0x100000) {
|
||||
log.debug(ch) {
|
||||
"Entry is too big, buffering it into a file"
|
||||
}
|
||||
val opts = arrayOf(
|
||||
StandardOpenOption.DELETE_ON_CLOSE,
|
||||
StandardOpenOption.READ,
|
||||
StandardOpenOption.WRITE,
|
||||
StandardOpenOption.TRUNCATE_EXISTING
|
||||
)
|
||||
FileChannel.open(Files.createTempFile("rbcs-memcache", ".tmp"), *opts).let { fc ->
|
||||
tmpFile = fc
|
||||
flushToDisk(fc, accumulator)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private fun flushToDisk(fc: FileChannel, buf: CompositeByteBuf) {
|
||||
val chunk = extractChunk(buf, alloc)
|
||||
fc.write(chunk.nioBuffer())
|
||||
chunk.release()
|
||||
}
|
||||
|
||||
fun commit(): Pair<Int, ReadableByteChannel> {
|
||||
digest.release()
|
||||
accumulator.retain()
|
||||
stream.close()
|
||||
val fileChannel = tmpFile
|
||||
return if (fileChannel != null) {
|
||||
flushToDisk(fileChannel, accumulator)
|
||||
accumulator.release()
|
||||
fileChannel.position(0)
|
||||
val fileSize = fileChannel.size().toIntOrNull() ?: let {
|
||||
fileChannel.close()
|
||||
throw ContentTooLargeException("Request body is too large", null)
|
||||
}
|
||||
fileSize to fileChannel
|
||||
} else {
|
||||
accumulator.readableBytes() to Channels.newChannel(ByteBufInputStream(accumulator))
|
||||
}
|
||||
}
|
||||
|
||||
fun rollback() {
|
||||
stream.close()
|
||||
digest.release()
|
||||
tmpFile?.close()
|
||||
}
|
||||
}
|
||||
|
||||
private var inProgressRequest: InProgressRequest? = null
|
||||
|
||||
override fun channelRead0(ctx: ChannelHandlerContext, msg: CacheMessage) {
|
||||
when (msg) {
|
||||
is CacheGetRequest -> handleGetRequest(ctx, msg)
|
||||
is CachePutRequest -> handlePutRequest(ctx, msg)
|
||||
is LastCacheContent -> handleLastCacheContent(ctx, msg)
|
||||
is CacheContent -> handleCacheContent(ctx, msg)
|
||||
else -> ctx.fireChannelRead(msg)
|
||||
}
|
||||
}
|
||||
|
||||
private fun handleGetRequest(ctx: ChannelHandlerContext, msg: CacheGetRequest) {
|
||||
log.debug(ctx) {
|
||||
"Fetching ${msg.key} from memcache"
|
||||
}
|
||||
val key = ctx.alloc().buffer().also {
|
||||
it.writeBytes(processCacheKey(msg.key, keyPrefix, digestAlgorithm))
|
||||
}
|
||||
val responseHandler = object : MemcacheResponseHandler {
|
||||
override fun responseReceived(response: BinaryMemcacheResponse) {
|
||||
val status = response.status()
|
||||
when (status) {
|
||||
BinaryMemcacheResponseStatus.SUCCESS -> {
|
||||
log.debug(ctx) {
|
||||
"Cache hit for key ${msg.key} on memcache"
|
||||
}
|
||||
inProgressRequest = InProgressGetRequest(msg.key, ctx)
|
||||
}
|
||||
|
||||
BinaryMemcacheResponseStatus.KEY_ENOENT -> {
|
||||
log.debug(ctx) {
|
||||
"Cache miss for key ${msg.key} on memcache"
|
||||
}
|
||||
sendMessageAndFlush(ctx, CacheValueNotFoundResponse(msg.key))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun contentReceived(content: MemcacheContent) {
|
||||
log.trace(ctx) {
|
||||
"${if (content is LastMemcacheContent) "Last chunk" else "Chunk"} of ${
|
||||
content.content().readableBytes()
|
||||
} bytes received from memcache for key ${msg.key}"
|
||||
}
|
||||
(inProgressRequest as? InProgressGetRequest)?.let { inProgressGetRequest ->
|
||||
inProgressGetRequest.write(content.content())
|
||||
if (content is LastMemcacheContent) {
|
||||
inProgressRequest = null
|
||||
inProgressGetRequest.commit()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun exceptionCaught(ex: Throwable) {
|
||||
(inProgressRequest as? InProgressGetRequest).let { inProgressGetRequest ->
|
||||
inProgressGetRequest?.let {
|
||||
inProgressRequest = null
|
||||
it.rollback()
|
||||
}
|
||||
}
|
||||
this@MemcacheCacheHandler.exceptionCaught(ctx, ex)
|
||||
}
|
||||
}
|
||||
client.sendRequest(key.retainedDuplicate(), responseHandler).thenAccept { requestHandle ->
|
||||
log.trace(ctx) {
|
||||
"Sending GET request for key ${msg.key} to memcache"
|
||||
}
|
||||
val request = DefaultBinaryMemcacheRequest(key).apply {
|
||||
setOpcode(BinaryMemcacheOpcodes.GET)
|
||||
}
|
||||
requestHandle.sendRequest(request)
|
||||
requestHandle.sendContent(LastMemcacheContent.EMPTY_LAST_CONTENT)
|
||||
}
|
||||
}
|
||||
|
||||
private fun handlePutRequest(ctx: ChannelHandlerContext, msg: CachePutRequest) {
|
||||
val key = ctx.alloc().buffer().also {
|
||||
it.writeBytes(processCacheKey(msg.key, keyPrefix, digestAlgorithm))
|
||||
}
|
||||
val responseHandler = object : MemcacheResponseHandler {
|
||||
override fun responseReceived(response: BinaryMemcacheResponse) {
|
||||
val status = response.status()
|
||||
when (status) {
|
||||
BinaryMemcacheResponseStatus.SUCCESS -> {
|
||||
log.debug(ctx) {
|
||||
"Inserted key ${msg.key} into memcache"
|
||||
}
|
||||
sendMessageAndFlush(ctx, CachePutResponse(msg.key))
|
||||
}
|
||||
|
||||
else -> this@MemcacheCacheHandler.exceptionCaught(ctx, MemcacheException(status))
|
||||
}
|
||||
}
|
||||
|
||||
override fun contentReceived(content: MemcacheContent) {}
|
||||
|
||||
override fun exceptionCaught(ex: Throwable) {
|
||||
this@MemcacheCacheHandler.exceptionCaught(ctx, ex)
|
||||
}
|
||||
}
|
||||
|
||||
val requestController = client.sendRequest(key.retainedDuplicate(), responseHandler).whenComplete { _, ex ->
|
||||
ex?.let {
|
||||
this@MemcacheCacheHandler.exceptionCaught(ctx, ex)
|
||||
}
|
||||
}
|
||||
inProgressRequest = InProgressPutRequest(ctx.channel(), msg.metadata, key, requestController, ctx.alloc())
|
||||
}
|
||||
|
||||
private fun handleCacheContent(ctx: ChannelHandlerContext, msg: CacheContent) {
|
||||
val request = inProgressRequest
|
||||
when (request) {
|
||||
is InProgressPutRequest -> {
|
||||
log.trace(ctx) {
|
||||
"Received chunk of ${msg.content().readableBytes()} bytes for memcache"
|
||||
}
|
||||
request.write(msg.content())
|
||||
}
|
||||
|
||||
is InProgressGetRequest -> {
|
||||
msg.release()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private fun handleLastCacheContent(ctx: ChannelHandlerContext, msg: LastCacheContent) {
|
||||
val request = inProgressRequest
|
||||
when (request) {
|
||||
is InProgressPutRequest -> {
|
||||
inProgressRequest = null
|
||||
log.trace(ctx) {
|
||||
"Received last chunk of ${msg.content().readableBytes()} bytes for memcache"
|
||||
}
|
||||
request.write(msg.content())
|
||||
val key = request.digest.retainedDuplicate()
|
||||
val (payloadSize, payloadSource) = request.commit()
|
||||
val extras = ctx.alloc().buffer(8, 8)
|
||||
extras.writeInt(0)
|
||||
extras.writeInt(encodeExpiry(maxAge))
|
||||
val totalBodyLength = request.digest.readableBytes() + extras.readableBytes() + payloadSize
|
||||
log.trace(ctx) {
|
||||
"Trying to send SET request to memcache"
|
||||
}
|
||||
request.requestController.whenComplete { requestController, ex ->
|
||||
if (ex == null) {
|
||||
log.trace(ctx) {
|
||||
"Sending SET request to memcache"
|
||||
}
|
||||
requestController.sendRequest(DefaultBinaryMemcacheRequest().apply {
|
||||
setOpcode(BinaryMemcacheOpcodes.SET)
|
||||
setKey(key)
|
||||
setExtras(extras)
|
||||
setTotalBodyLength(totalBodyLength)
|
||||
})
|
||||
log.trace(ctx) {
|
||||
"Sending request payload to memcache"
|
||||
}
|
||||
payloadSource.use { source ->
|
||||
val bb = ByteBuffer.allocate(chunkSize)
|
||||
while (true) {
|
||||
val read = source.read(bb)
|
||||
bb.limit()
|
||||
if (read >= 0 && bb.position() < chunkSize && bb.hasRemaining()) {
|
||||
continue
|
||||
}
|
||||
val chunk = ctx.alloc().buffer(chunkSize)
|
||||
bb.flip()
|
||||
chunk.writeBytes(bb)
|
||||
bb.clear()
|
||||
log.trace(ctx) {
|
||||
"Sending ${chunk.readableBytes()} bytes chunk to memcache"
|
||||
}
|
||||
if (read < 0) {
|
||||
requestController.sendContent(DefaultLastMemcacheContent(chunk))
|
||||
break
|
||||
} else {
|
||||
requestController.sendContent(DefaultMemcacheContent(chunk))
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
payloadSource.close()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
|
||||
val request = inProgressRequest
|
||||
when (request) {
|
||||
is InProgressPutRequest -> {
|
||||
inProgressRequest = null
|
||||
request.requestController.thenAccept { controller ->
|
||||
controller.exceptionCaught(cause)
|
||||
}
|
||||
request.rollback()
|
||||
}
|
||||
|
||||
is InProgressGetRequest -> {
|
||||
inProgressRequest = null
|
||||
request.rollback()
|
||||
}
|
||||
}
|
||||
super.exceptionCaught(ctx, cause)
|
||||
}
|
||||
}
|
@@ -1,16 +1,16 @@
|
||||
package net.woggioni.rbcs.server.memcache
|
||||
|
||||
import java.time.Duration
|
||||
import java.time.temporal.ChronoUnit
|
||||
import net.woggioni.rbcs.api.CacheProvider
|
||||
import net.woggioni.rbcs.api.exception.ConfigurationException
|
||||
import net.woggioni.rbcs.common.RBCS
|
||||
import net.woggioni.rbcs.common.HostAndPort
|
||||
import net.woggioni.rbcs.common.RBCS
|
||||
import net.woggioni.rbcs.common.Xml
|
||||
import net.woggioni.rbcs.common.Xml.Companion.asIterable
|
||||
import net.woggioni.rbcs.common.Xml.Companion.renderAttribute
|
||||
import org.w3c.dom.Document
|
||||
import org.w3c.dom.Element
|
||||
import java.time.Duration
|
||||
import java.time.temporal.ChronoUnit
|
||||
|
||||
|
||||
class MemcacheCacheProvider : CacheProvider<MemcacheCacheConfiguration> {
|
||||
@@ -28,18 +28,17 @@ class MemcacheCacheProvider : CacheProvider<MemcacheCacheConfiguration> {
|
||||
val maxAge = el.renderAttribute("max-age")
|
||||
?.let(Duration::parse)
|
||||
?: Duration.ofDays(1)
|
||||
val maxSize = el.renderAttribute("max-size")
|
||||
?.let(String::toInt)
|
||||
?: 0x100000
|
||||
val compressionLevel = el.renderAttribute("compression-level")
|
||||
?.let(Integer::decode)
|
||||
?: -1
|
||||
val compressionMode = el.renderAttribute("compression-mode")
|
||||
?.let {
|
||||
when (it) {
|
||||
"gzip" -> MemcacheCacheConfiguration.CompressionMode.GZIP
|
||||
"deflate" -> MemcacheCacheConfiguration.CompressionMode.DEFLATE
|
||||
else -> MemcacheCacheConfiguration.CompressionMode.DEFLATE
|
||||
}
|
||||
}
|
||||
?: MemcacheCacheConfiguration.CompressionMode.DEFLATE
|
||||
val keyPrefix = el.renderAttribute("key-prefix")
|
||||
val digestAlgorithm = el.renderAttribute("digest")
|
||||
for (child in el.asIterable()) {
|
||||
when (child.nodeName) {
|
||||
@@ -56,13 +55,13 @@ class MemcacheCacheProvider : CacheProvider<MemcacheCacheConfiguration> {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return MemcacheCacheConfiguration(
|
||||
servers,
|
||||
maxAge,
|
||||
maxSize,
|
||||
keyPrefix,
|
||||
digestAlgorithm,
|
||||
compressionMode,
|
||||
compressionLevel
|
||||
)
|
||||
}
|
||||
|
||||
@@ -70,7 +69,6 @@ class MemcacheCacheProvider : CacheProvider<MemcacheCacheConfiguration> {
|
||||
val result = doc.createElement("cache")
|
||||
Xml.of(doc, result) {
|
||||
attr("xmlns:${xmlNamespacePrefix}", xmlNamespace, namespaceURI = "http://www.w3.org/2000/xmlns/")
|
||||
|
||||
attr("xs:type", "${xmlNamespacePrefix}:$xmlType", RBCS.XML_SCHEMA_NAMESPACE_URI)
|
||||
for (server in servers) {
|
||||
node("server") {
|
||||
@@ -81,20 +79,23 @@ class MemcacheCacheProvider : CacheProvider<MemcacheCacheConfiguration> {
|
||||
}
|
||||
attr("max-connections", server.maxConnections.toString())
|
||||
}
|
||||
|
||||
}
|
||||
attr("max-age", maxAge.toString())
|
||||
attr("max-size", maxSize.toString())
|
||||
keyPrefix?.let {
|
||||
attr("key-prefix", it)
|
||||
}
|
||||
digestAlgorithm?.let { digestAlgorithm ->
|
||||
attr("digest", digestAlgorithm)
|
||||
}
|
||||
compressionMode?.let { compressionMode ->
|
||||
attr(
|
||||
"compression-mode", when (compressionMode) {
|
||||
MemcacheCacheConfiguration.CompressionMode.GZIP -> "gzip"
|
||||
MemcacheCacheConfiguration.CompressionMode.DEFLATE -> "deflate"
|
||||
}
|
||||
)
|
||||
}
|
||||
attr("compression-level", compressionLevel.toString())
|
||||
}
|
||||
result
|
||||
}
|
||||
|
@@ -3,68 +3,52 @@ package net.woggioni.rbcs.server.memcache.client
|
||||
|
||||
import io.netty.bootstrap.Bootstrap
|
||||
import io.netty.buffer.ByteBuf
|
||||
import io.netty.buffer.Unpooled
|
||||
import io.netty.channel.Channel
|
||||
import io.netty.channel.ChannelFactory
|
||||
import io.netty.channel.ChannelFutureListener
|
||||
import io.netty.channel.ChannelHandlerContext
|
||||
import io.netty.channel.ChannelOption
|
||||
import io.netty.channel.ChannelPipeline
|
||||
import io.netty.channel.EventLoopGroup
|
||||
import io.netty.channel.SimpleChannelInboundHandler
|
||||
import io.netty.channel.nio.NioEventLoopGroup
|
||||
import io.netty.channel.pool.AbstractChannelPoolHandler
|
||||
import io.netty.channel.pool.ChannelPool
|
||||
import io.netty.channel.pool.FixedChannelPool
|
||||
import io.netty.channel.socket.nio.NioSocketChannel
|
||||
import io.netty.handler.codec.DecoderException
|
||||
import io.netty.channel.socket.SocketChannel
|
||||
import io.netty.handler.codec.memcache.LastMemcacheContent
|
||||
import io.netty.handler.codec.memcache.MemcacheContent
|
||||
import io.netty.handler.codec.memcache.MemcacheObject
|
||||
import io.netty.handler.codec.memcache.binary.BinaryMemcacheClientCodec
|
||||
import io.netty.handler.codec.memcache.binary.BinaryMemcacheObjectAggregator
|
||||
import io.netty.handler.codec.memcache.binary.BinaryMemcacheOpcodes
|
||||
import io.netty.handler.codec.memcache.binary.BinaryMemcacheResponseStatus
|
||||
import io.netty.handler.codec.memcache.binary.DefaultFullBinaryMemcacheRequest
|
||||
import io.netty.handler.codec.memcache.binary.FullBinaryMemcacheRequest
|
||||
import io.netty.handler.codec.memcache.binary.FullBinaryMemcacheResponse
|
||||
import io.netty.handler.codec.memcache.binary.BinaryMemcacheRequest
|
||||
import io.netty.handler.codec.memcache.binary.BinaryMemcacheResponse
|
||||
import io.netty.util.concurrent.Future as NettyFuture
|
||||
import io.netty.util.concurrent.GenericFutureListener
|
||||
import net.woggioni.rbcs.common.ByteBufInputStream
|
||||
import net.woggioni.rbcs.common.ByteBufOutputStream
|
||||
import net.woggioni.rbcs.common.RBCS.digest
|
||||
import net.woggioni.rbcs.common.HostAndPort
|
||||
import net.woggioni.rbcs.common.contextLogger
|
||||
import net.woggioni.rbcs.server.memcache.MemcacheCacheConfiguration
|
||||
import net.woggioni.rbcs.server.memcache.MemcacheException
|
||||
import net.woggioni.jwo.JWO
|
||||
import java.io.IOException
|
||||
import java.net.InetSocketAddress
|
||||
import java.nio.channels.Channels
|
||||
import java.nio.channels.ReadableByteChannel
|
||||
import java.security.MessageDigest
|
||||
import java.time.Duration
|
||||
import java.time.Instant
|
||||
import java.util.concurrent.CompletableFuture
|
||||
import java.util.concurrent.ConcurrentHashMap
|
||||
import java.util.zip.Deflater
|
||||
import java.util.zip.DeflaterOutputStream
|
||||
import java.util.zip.GZIPInputStream
|
||||
import java.util.zip.GZIPOutputStream
|
||||
import java.util.zip.InflaterInputStream
|
||||
import io.netty.util.concurrent.Future as NettyFuture
|
||||
import net.woggioni.rbcs.common.HostAndPort
|
||||
import net.woggioni.rbcs.common.createLogger
|
||||
import net.woggioni.rbcs.common.trace
|
||||
import net.woggioni.rbcs.server.memcache.MemcacheCacheConfiguration
|
||||
import net.woggioni.rbcs.server.memcache.MemcacheCacheHandler
|
||||
|
||||
|
||||
class MemcacheClient(private val cfg: MemcacheCacheConfiguration) : AutoCloseable {
|
||||
class MemcacheClient(
|
||||
private val servers: List<MemcacheCacheConfiguration.Server>,
|
||||
private val chunkSize : Int,
|
||||
private val group: EventLoopGroup,
|
||||
private val channelFactory: ChannelFactory<SocketChannel>,
|
||||
private val connectionPool: ConcurrentHashMap<HostAndPort, FixedChannelPool>
|
||||
) : AutoCloseable {
|
||||
|
||||
private companion object {
|
||||
@JvmStatic
|
||||
private val log = contextLogger()
|
||||
}
|
||||
|
||||
private val group: NioEventLoopGroup
|
||||
private val connectionPool: MutableMap<HostAndPort, ChannelPool> = ConcurrentHashMap()
|
||||
|
||||
init {
|
||||
group = NioEventLoopGroup()
|
||||
private val log = createLogger<MemcacheCacheHandler>()
|
||||
}
|
||||
|
||||
private fun newConnectionPool(server: MemcacheCacheConfiguration.Server): FixedChannelPool {
|
||||
val bootstrap = Bootstrap().apply {
|
||||
group(group)
|
||||
channel(NioSocketChannel::class.java)
|
||||
channelFactory(channelFactory)
|
||||
option(ChannelOption.SO_KEEPALIVE, true)
|
||||
remoteAddress(InetSocketAddress(server.endpoint.host, server.endpoint.port))
|
||||
server.connectionTimeoutMillis?.let {
|
||||
@@ -75,35 +59,33 @@ class MemcacheClient(private val cfg: MemcacheCacheConfiguration) : AutoCloseabl
|
||||
|
||||
override fun channelCreated(ch: Channel) {
|
||||
val pipeline: ChannelPipeline = ch.pipeline()
|
||||
pipeline.addLast(BinaryMemcacheClientCodec())
|
||||
pipeline.addLast(BinaryMemcacheObjectAggregator(cfg.maxSize))
|
||||
pipeline.addLast(BinaryMemcacheClientCodec(chunkSize, true))
|
||||
}
|
||||
}
|
||||
return FixedChannelPool(bootstrap, channelPoolHandler, server.maxConnections)
|
||||
}
|
||||
|
||||
|
||||
private fun sendRequest(request: FullBinaryMemcacheRequest): CompletableFuture<FullBinaryMemcacheResponse> {
|
||||
|
||||
val server = cfg.servers.let { servers ->
|
||||
if (servers.size > 1) {
|
||||
val key = request.key().duplicate()
|
||||
var checksum = 0
|
||||
while (key.readableBytes() > 4) {
|
||||
val byte = key.readInt()
|
||||
checksum = checksum xor byte
|
||||
}
|
||||
while (key.readableBytes() > 0) {
|
||||
val byte = key.readByte()
|
||||
checksum = checksum xor byte.toInt()
|
||||
}
|
||||
servers[checksum % servers.size]
|
||||
} else {
|
||||
servers.first()
|
||||
fun sendRequest(
|
||||
key: ByteBuf,
|
||||
responseHandler: MemcacheResponseHandler
|
||||
): CompletableFuture<MemcacheRequestController> {
|
||||
val server = if (servers.size > 1) {
|
||||
var checksum = 0
|
||||
while (key.readableBytes() > 4) {
|
||||
val byte = key.readInt()
|
||||
checksum = checksum xor byte
|
||||
}
|
||||
while (key.readableBytes() > 0) {
|
||||
val byte = key.readByte()
|
||||
checksum = checksum xor byte.toInt()
|
||||
}
|
||||
servers[checksum % servers.size]
|
||||
} else {
|
||||
servers.first()
|
||||
}
|
||||
key.release()
|
||||
|
||||
val response = CompletableFuture<FullBinaryMemcacheResponse>()
|
||||
val response = CompletableFuture<MemcacheRequestController>()
|
||||
// Custom handler for processing responses
|
||||
val pool = connectionPool.computeIfAbsent(server.endpoint) {
|
||||
newConnectionPool(server)
|
||||
@@ -112,32 +94,92 @@ class MemcacheClient(private val cfg: MemcacheCacheConfiguration) : AutoCloseabl
|
||||
override fun operationComplete(channelFuture: NettyFuture<Channel>) {
|
||||
if (channelFuture.isSuccess) {
|
||||
val channel = channelFuture.now
|
||||
var connectionClosedByTheRemoteServer = true
|
||||
val closeCallback = {
|
||||
if (connectionClosedByTheRemoteServer) {
|
||||
val ex = IOException("The memcache server closed the connection")
|
||||
val completed = response.completeExceptionally(ex)
|
||||
if(!completed) responseHandler.exceptionCaught(ex)
|
||||
}
|
||||
}
|
||||
val closeListener = ChannelFutureListener {
|
||||
closeCallback()
|
||||
}
|
||||
channel.closeFuture().addListener(closeListener)
|
||||
val pipeline = channel.pipeline()
|
||||
channel.pipeline()
|
||||
.addLast("client-handler", object : SimpleChannelInboundHandler<FullBinaryMemcacheResponse>() {
|
||||
override fun channelRead0(
|
||||
ctx: ChannelHandlerContext,
|
||||
msg: FullBinaryMemcacheResponse
|
||||
) {
|
||||
pipeline.removeLast()
|
||||
pool.release(channel)
|
||||
msg.touch("The method's caller must remember to release this")
|
||||
response.complete(msg.retain())
|
||||
}
|
||||
val handler = object : SimpleChannelInboundHandler<MemcacheObject>() {
|
||||
|
||||
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
|
||||
val ex = when (cause) {
|
||||
is DecoderException -> cause.cause!!
|
||||
else -> cause
|
||||
override fun handlerAdded(ctx: ChannelHandlerContext) {
|
||||
channel.closeFuture().removeListener(closeListener)
|
||||
}
|
||||
|
||||
override fun channelRead0(
|
||||
ctx: ChannelHandlerContext,
|
||||
msg: MemcacheObject
|
||||
) {
|
||||
when (msg) {
|
||||
is BinaryMemcacheResponse -> {
|
||||
responseHandler.responseReceived(msg)
|
||||
}
|
||||
|
||||
is LastMemcacheContent -> {
|
||||
responseHandler.contentReceived(msg)
|
||||
pipeline.remove(this)
|
||||
}
|
||||
|
||||
is MemcacheContent -> {
|
||||
responseHandler.contentReceived(msg)
|
||||
}
|
||||
ctx.close()
|
||||
pipeline.removeLast()
|
||||
pool.release(channel)
|
||||
response.completeExceptionally(ex)
|
||||
}
|
||||
})
|
||||
request.touch()
|
||||
channel.writeAndFlush(request)
|
||||
}
|
||||
|
||||
override fun channelInactive(ctx: ChannelHandlerContext) {
|
||||
closeCallback()
|
||||
ctx.fireChannelInactive()
|
||||
}
|
||||
|
||||
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
|
||||
connectionClosedByTheRemoteServer = false
|
||||
ctx.close()
|
||||
responseHandler.exceptionCaught(cause)
|
||||
}
|
||||
}
|
||||
|
||||
channel.pipeline().addLast(handler)
|
||||
response.complete(object : MemcacheRequestController {
|
||||
private var channelReleased = false
|
||||
|
||||
override fun sendRequest(request: BinaryMemcacheRequest) {
|
||||
channel.writeAndFlush(request)
|
||||
}
|
||||
|
||||
override fun sendContent(content: MemcacheContent) {
|
||||
channel.writeAndFlush(content).addListener {
|
||||
if(content is LastMemcacheContent) {
|
||||
if(!channelReleased) {
|
||||
pool.release(channel)
|
||||
channelReleased = true
|
||||
log.trace(channel) {
|
||||
"Channel released"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun exceptionCaught(ex: Throwable) {
|
||||
log.warn(ex.message, ex)
|
||||
connectionClosedByTheRemoteServer = false
|
||||
channel.close()
|
||||
if(!channelReleased) {
|
||||
pool.release(channel)
|
||||
channelReleased = true
|
||||
log.trace(channel) {
|
||||
"Channel released"
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
} else {
|
||||
response.completeExceptionally(channelFuture.cause())
|
||||
}
|
||||
@@ -146,107 +188,6 @@ class MemcacheClient(private val cfg: MemcacheCacheConfiguration) : AutoCloseabl
|
||||
return response
|
||||
}
|
||||
|
||||
private fun encodeExpiry(expiry: Duration): Int {
|
||||
val expirySeconds = expiry.toSeconds()
|
||||
return expirySeconds.toInt().takeIf { it.toLong() == expirySeconds }
|
||||
?: Instant.ofEpochSecond(expirySeconds).epochSecond.toInt()
|
||||
}
|
||||
|
||||
fun get(key: String): CompletableFuture<ReadableByteChannel?> {
|
||||
val request = (cfg.digestAlgorithm
|
||||
?.let(MessageDigest::getInstance)
|
||||
?.let { md ->
|
||||
digest(key.toByteArray(), md)
|
||||
} ?: key.toByteArray(Charsets.UTF_8)).let { digest ->
|
||||
DefaultFullBinaryMemcacheRequest(Unpooled.wrappedBuffer(digest), null).apply {
|
||||
setOpcode(BinaryMemcacheOpcodes.GET)
|
||||
}
|
||||
}
|
||||
return sendRequest(request).thenApply { response ->
|
||||
try {
|
||||
when (val status = response.status()) {
|
||||
BinaryMemcacheResponseStatus.SUCCESS -> {
|
||||
val compressionMode = cfg.compressionMode
|
||||
val content = response.content().retain()
|
||||
content.touch()
|
||||
if (compressionMode != null) {
|
||||
when (compressionMode) {
|
||||
MemcacheCacheConfiguration.CompressionMode.GZIP -> {
|
||||
GZIPInputStream(ByteBufInputStream(content))
|
||||
}
|
||||
|
||||
MemcacheCacheConfiguration.CompressionMode.DEFLATE -> {
|
||||
InflaterInputStream(ByteBufInputStream(content))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
ByteBufInputStream(content)
|
||||
}.let(Channels::newChannel)
|
||||
}
|
||||
|
||||
BinaryMemcacheResponseStatus.KEY_ENOENT -> {
|
||||
null
|
||||
}
|
||||
|
||||
else -> throw MemcacheException(status)
|
||||
}
|
||||
} finally {
|
||||
response.release()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fun put(key: String, content: ByteBuf, expiry: Duration, cas: Long? = null): CompletableFuture<Void> {
|
||||
val request = (cfg.digestAlgorithm
|
||||
?.let(MessageDigest::getInstance)
|
||||
?.let { md ->
|
||||
digest(key.toByteArray(), md)
|
||||
} ?: key.toByteArray(Charsets.UTF_8)).let { digest ->
|
||||
val extras = Unpooled.buffer(8, 8)
|
||||
extras.writeInt(0)
|
||||
extras.writeInt(encodeExpiry(expiry))
|
||||
val compressionMode = cfg.compressionMode
|
||||
content.retain()
|
||||
val payload = if (compressionMode != null) {
|
||||
val inputStream = ByteBufInputStream(content)
|
||||
val buf = content.alloc().buffer()
|
||||
buf.retain()
|
||||
val outputStream = when (compressionMode) {
|
||||
MemcacheCacheConfiguration.CompressionMode.GZIP -> {
|
||||
GZIPOutputStream(ByteBufOutputStream(buf))
|
||||
}
|
||||
|
||||
MemcacheCacheConfiguration.CompressionMode.DEFLATE -> {
|
||||
DeflaterOutputStream(ByteBufOutputStream(buf), Deflater(Deflater.DEFAULT_COMPRESSION, false))
|
||||
}
|
||||
}
|
||||
inputStream.use { i ->
|
||||
outputStream.use { o ->
|
||||
JWO.copy(i, o)
|
||||
}
|
||||
}
|
||||
buf
|
||||
} else {
|
||||
content
|
||||
}
|
||||
DefaultFullBinaryMemcacheRequest(Unpooled.wrappedBuffer(digest), extras, payload).apply {
|
||||
setOpcode(BinaryMemcacheOpcodes.SET)
|
||||
cas?.let(this::setCas)
|
||||
}
|
||||
}
|
||||
return sendRequest(request).thenApply { response ->
|
||||
try {
|
||||
when (val status = response.status()) {
|
||||
BinaryMemcacheResponseStatus.SUCCESS -> null
|
||||
else -> throw MemcacheException(status)
|
||||
}
|
||||
} finally {
|
||||
response.release()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
fun shutDown(): NettyFuture<*> {
|
||||
return group.shutdownGracefully()
|
||||
}
|
||||
|
@@ -0,0 +1,13 @@
|
||||
package net.woggioni.rbcs.server.memcache.client
|
||||
|
||||
import io.netty.handler.codec.memcache.MemcacheContent
|
||||
import io.netty.handler.codec.memcache.binary.BinaryMemcacheRequest
|
||||
|
||||
interface MemcacheRequestController {
|
||||
|
||||
fun sendRequest(request : BinaryMemcacheRequest)
|
||||
|
||||
fun sendContent(content : MemcacheContent)
|
||||
|
||||
fun exceptionCaught(ex : Throwable)
|
||||
}
|
@@ -0,0 +1,14 @@
|
||||
package net.woggioni.rbcs.server.memcache.client
|
||||
|
||||
import io.netty.handler.codec.memcache.MemcacheContent
|
||||
import io.netty.handler.codec.memcache.binary.BinaryMemcacheResponse
|
||||
|
||||
interface MemcacheResponseHandler {
|
||||
|
||||
|
||||
fun responseReceived(response : BinaryMemcacheResponse)
|
||||
|
||||
fun contentReceived(content : MemcacheContent)
|
||||
|
||||
fun exceptionCaught(ex : Throwable)
|
||||
}
|
@@ -4,7 +4,7 @@
|
||||
xmlns:rbcs="urn:net.woggioni.rbcs.server"
|
||||
xmlns:xs="http://www.w3.org/2001/XMLSchema">
|
||||
|
||||
<xs:import schemaLocation="jpms://net.woggioni.rbcs.server/net/woggioni/rbcs/server/schema/rbcs.xsd" namespace="urn:net.woggioni.rbcs.server"/>
|
||||
<xs:import schemaLocation="jpms://net.woggioni.rbcs.server/net/woggioni/rbcs/server/schema/rbcs-server.xsd" namespace="urn:net.woggioni.rbcs.server"/>
|
||||
|
||||
<xs:complexType name="memcacheServerType">
|
||||
<xs:attribute name="host" type="xs:token" use="required"/>
|
||||
@@ -20,9 +20,18 @@
|
||||
<xs:element name="server" type="rbcs-memcache:memcacheServerType"/>
|
||||
</xs:sequence>
|
||||
<xs:attribute name="max-age" type="xs:duration" default="P1D"/>
|
||||
<xs:attribute name="max-size" type="xs:unsignedInt" default="1048576"/>
|
||||
<xs:attribute name="digest" type="xs:token" />
|
||||
<xs:attribute name="chunk-size" type="rbcs:byteSizeType" default="0x10000"/>
|
||||
<xs:attribute name="key-prefix" type="xs:string" use="optional">
|
||||
<xs:annotation>
|
||||
<xs:documentation>
|
||||
Prepend this string to all the keys inserted in memcache,
|
||||
useful in case the caching backend is shared with other applications
|
||||
</xs:documentation>
|
||||
</xs:annotation>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="digest" type="xs:token"/>
|
||||
<xs:attribute name="compression-mode" type="rbcs-memcache:compressionType"/>
|
||||
<xs:attribute name="compression-level" type="rbcs:compressionLevelType" default="-1"/>
|
||||
</xs:extension>
|
||||
</xs:complexContent>
|
||||
</xs:complexType>
|
||||
@@ -30,7 +39,6 @@
|
||||
<xs:simpleType name="compressionType">
|
||||
<xs:restriction base="xs:token">
|
||||
<xs:enumeration value="deflate"/>
|
||||
<xs:enumeration value="gzip"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
|
||||
|
@@ -0,0 +1,27 @@
|
||||
package net.woggioni.rbcs.server.memcache.client
|
||||
|
||||
import io.netty.buffer.ByteBufUtil
|
||||
import io.netty.buffer.Unpooled
|
||||
import java.io.ByteArrayInputStream
|
||||
import java.nio.ByteBuffer
|
||||
import java.nio.channels.Channels
|
||||
import kotlin.random.Random
|
||||
import org.junit.jupiter.api.Assertions
|
||||
import org.junit.jupiter.api.Test
|
||||
|
||||
class ByteBufferTest {
|
||||
|
||||
@Test
|
||||
fun test() {
|
||||
val byteBuffer = ByteBuffer.allocate(0x100)
|
||||
val originalBytes = Random(101325).nextBytes(0x100)
|
||||
Channels.newChannel(ByteArrayInputStream(originalBytes)).use { source ->
|
||||
source.read(byteBuffer)
|
||||
}
|
||||
byteBuffer.flip()
|
||||
val buf = Unpooled.buffer()
|
||||
buf.writeBytes(byteBuffer)
|
||||
val finalBytes = ByteBufUtil.getBytes(buf)
|
||||
Assertions.assertArrayEquals(originalBytes, finalBytes)
|
||||
}
|
||||
}
|
@@ -9,6 +9,9 @@ dependencies {
|
||||
implementation catalog.jwo
|
||||
implementation catalog.slf4j.api
|
||||
implementation catalog.netty.codec.http
|
||||
implementation catalog.netty.handler
|
||||
implementation catalog.netty.buffer
|
||||
implementation catalog.netty.transport
|
||||
|
||||
api project(':rbcs-common')
|
||||
api project(':rbcs-api')
|
||||
@@ -36,3 +39,4 @@ publishing {
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
@@ -3,27 +3,27 @@ import net.woggioni.rbcs.server.cache.FileSystemCacheProvider;
|
||||
import net.woggioni.rbcs.server.cache.InMemoryCacheProvider;
|
||||
|
||||
module net.woggioni.rbcs.server {
|
||||
requires java.sql;
|
||||
requires java.xml;
|
||||
requires java.logging;
|
||||
requires java.naming;
|
||||
requires kotlin.stdlib;
|
||||
requires io.netty.buffer;
|
||||
requires io.netty.transport;
|
||||
requires io.netty.codec.http;
|
||||
requires io.netty.common;
|
||||
requires io.netty.handler;
|
||||
requires io.netty.codec;
|
||||
requires org.slf4j;
|
||||
requires net.woggioni.jwo;
|
||||
requires net.woggioni.rbcs.common;
|
||||
requires net.woggioni.rbcs.api;
|
||||
requires io.netty.codec.compression;
|
||||
requires io.netty.transport;
|
||||
requires io.netty.buffer;
|
||||
requires io.netty.common;
|
||||
requires io.netty.codec;
|
||||
requires org.slf4j;
|
||||
|
||||
exports net.woggioni.rbcs.server;
|
||||
|
||||
opens net.woggioni.rbcs.server;
|
||||
opens net.woggioni.rbcs.server.schema;
|
||||
|
||||
|
||||
uses CacheProvider;
|
||||
provides CacheProvider with FileSystemCacheProvider, InMemoryCacheProvider;
|
||||
}
|
@@ -1,30 +0,0 @@
|
||||
package net.woggioni.rbcs.server
|
||||
|
||||
import io.netty.channel.ChannelHandlerContext
|
||||
import org.slf4j.Logger
|
||||
import java.net.InetSocketAddress
|
||||
|
||||
inline fun Logger.trace(ctx : ChannelHandlerContext, messageBuilder : () -> String) {
|
||||
log(this, ctx, { isTraceEnabled }, { trace(it) } , messageBuilder)
|
||||
}
|
||||
inline fun Logger.debug(ctx : ChannelHandlerContext, messageBuilder : () -> String) {
|
||||
log(this, ctx, { isDebugEnabled }, { debug(it) } , messageBuilder)
|
||||
}
|
||||
inline fun Logger.info(ctx : ChannelHandlerContext, messageBuilder : () -> String) {
|
||||
log(this, ctx, { isInfoEnabled }, { info(it) } , messageBuilder)
|
||||
}
|
||||
inline fun Logger.warn(ctx : ChannelHandlerContext, messageBuilder : () -> String) {
|
||||
log(this, ctx, { isWarnEnabled }, { warn(it) } , messageBuilder)
|
||||
}
|
||||
inline fun Logger.error(ctx : ChannelHandlerContext, messageBuilder : () -> String) {
|
||||
log(this, ctx, { isErrorEnabled }, { error(it) } , messageBuilder)
|
||||
}
|
||||
|
||||
inline fun log(log : Logger, ctx : ChannelHandlerContext,
|
||||
filter : Logger.() -> Boolean,
|
||||
loggerMethod : Logger.(String) -> Unit, messageBuilder : () -> String) {
|
||||
if(log.filter()) {
|
||||
val clientAddress = (ctx.channel().remoteAddress() as InetSocketAddress).address.hostAddress
|
||||
log.loggerMethod(clientAddress + " - " + messageBuilder())
|
||||
}
|
||||
}
|
@@ -3,6 +3,7 @@ package net.woggioni.rbcs.server
|
||||
import io.netty.bootstrap.ServerBootstrap
|
||||
import io.netty.buffer.ByteBuf
|
||||
import io.netty.channel.Channel
|
||||
import io.netty.channel.ChannelFactory
|
||||
import io.netty.channel.ChannelFuture
|
||||
import io.netty.channel.ChannelHandler.Sharable
|
||||
import io.netty.channel.ChannelHandlerContext
|
||||
@@ -10,13 +11,19 @@ import io.netty.channel.ChannelInboundHandlerAdapter
|
||||
import io.netty.channel.ChannelInitializer
|
||||
import io.netty.channel.ChannelOption
|
||||
import io.netty.channel.ChannelPromise
|
||||
import io.netty.channel.nio.NioEventLoopGroup
|
||||
import io.netty.channel.MultiThreadIoEventLoopGroup
|
||||
import io.netty.channel.nio.NioIoHandler
|
||||
import io.netty.channel.socket.DatagramChannel
|
||||
import io.netty.channel.socket.ServerSocketChannel
|
||||
import io.netty.channel.socket.SocketChannel
|
||||
import io.netty.channel.socket.nio.NioDatagramChannel
|
||||
import io.netty.channel.socket.nio.NioServerSocketChannel
|
||||
import io.netty.channel.socket.nio.NioSocketChannel
|
||||
import io.netty.handler.codec.compression.CompressionOptions
|
||||
import io.netty.handler.codec.http.DefaultHttpContent
|
||||
import io.netty.handler.codec.http.HttpContentCompressor
|
||||
import io.netty.handler.codec.http.HttpDecoderConfig
|
||||
import io.netty.handler.codec.http.HttpHeaderNames
|
||||
import io.netty.handler.codec.http.HttpObjectAggregator
|
||||
import io.netty.handler.codec.http.HttpRequest
|
||||
import io.netty.handler.codec.http.HttpServerCodec
|
||||
import io.netty.handler.ssl.ClientAuth
|
||||
@@ -28,52 +35,58 @@ import io.netty.handler.timeout.IdleState
|
||||
import io.netty.handler.timeout.IdleStateEvent
|
||||
import io.netty.handler.timeout.IdleStateHandler
|
||||
import io.netty.util.AttributeKey
|
||||
import io.netty.util.concurrent.DefaultEventExecutorGroup
|
||||
import io.netty.util.concurrent.EventExecutorGroup
|
||||
import net.woggioni.rbcs.api.Configuration
|
||||
import net.woggioni.rbcs.api.exception.ConfigurationException
|
||||
import net.woggioni.rbcs.common.RBCS.toUrl
|
||||
import net.woggioni.rbcs.common.PasswordSecurity.decodePasswordHash
|
||||
import net.woggioni.rbcs.common.PasswordSecurity.hashPassword
|
||||
import net.woggioni.rbcs.common.Xml
|
||||
import net.woggioni.rbcs.common.contextLogger
|
||||
import net.woggioni.rbcs.common.debug
|
||||
import net.woggioni.rbcs.common.info
|
||||
import net.woggioni.rbcs.server.auth.AbstractNettyHttpAuthenticator
|
||||
import net.woggioni.rbcs.server.auth.Authorizer
|
||||
import net.woggioni.rbcs.server.auth.ClientCertificateValidator
|
||||
import net.woggioni.rbcs.server.auth.RoleAuthorizer
|
||||
import net.woggioni.rbcs.server.configuration.Parser
|
||||
import net.woggioni.rbcs.server.configuration.Serializer
|
||||
import net.woggioni.rbcs.server.exception.ExceptionHandler
|
||||
import net.woggioni.rbcs.server.handler.ServerHandler
|
||||
import net.woggioni.rbcs.server.throttling.ThrottlingHandler
|
||||
import net.woggioni.jwo.JWO
|
||||
import net.woggioni.jwo.Tuple2
|
||||
import java.io.OutputStream
|
||||
import java.net.InetSocketAddress
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
import java.security.KeyStore
|
||||
import java.security.PrivateKey
|
||||
import java.security.cert.X509Certificate
|
||||
import java.time.Duration
|
||||
import java.time.Instant
|
||||
import java.util.Arrays
|
||||
import java.util.Base64
|
||||
import java.util.concurrent.CompletableFuture
|
||||
import java.util.concurrent.Future
|
||||
import java.util.concurrent.TimeUnit
|
||||
import java.util.concurrent.TimeoutException
|
||||
import java.util.regex.Matcher
|
||||
import java.util.regex.Pattern
|
||||
import javax.naming.ldap.LdapName
|
||||
import javax.net.ssl.SSLPeerUnverifiedException
|
||||
import net.woggioni.rbcs.api.AsyncCloseable
|
||||
import net.woggioni.rbcs.api.Configuration
|
||||
import net.woggioni.rbcs.api.exception.ConfigurationException
|
||||
import net.woggioni.rbcs.common.PasswordSecurity.decodePasswordHash
|
||||
import net.woggioni.rbcs.common.PasswordSecurity.hashPassword
|
||||
import net.woggioni.rbcs.common.RBCS.getTrustManager
|
||||
import net.woggioni.rbcs.common.RBCS.loadKeystore
|
||||
import net.woggioni.rbcs.common.RBCS.toUrl
|
||||
import net.woggioni.rbcs.common.Xml
|
||||
import net.woggioni.rbcs.common.createLogger
|
||||
import net.woggioni.rbcs.common.debug
|
||||
import net.woggioni.rbcs.common.info
|
||||
import net.woggioni.rbcs.server.auth.AbstractNettyHttpAuthenticator
|
||||
import net.woggioni.rbcs.server.auth.Authorizer
|
||||
import net.woggioni.rbcs.server.auth.RoleAuthorizer
|
||||
import net.woggioni.rbcs.server.configuration.Parser
|
||||
import net.woggioni.rbcs.server.configuration.Serializer
|
||||
import net.woggioni.rbcs.server.exception.ExceptionHandler
|
||||
import net.woggioni.rbcs.server.handler.MaxRequestSizeHandler
|
||||
import net.woggioni.rbcs.server.handler.ReadTriggerDuplexHandler
|
||||
import net.woggioni.rbcs.server.handler.ServerHandler
|
||||
import net.woggioni.rbcs.server.throttling.BucketManager
|
||||
import net.woggioni.rbcs.server.throttling.ThrottlingHandler
|
||||
|
||||
class RemoteBuildCacheServer(private val cfg: Configuration) {
|
||||
private val log = contextLogger()
|
||||
|
||||
companion object {
|
||||
private val log = createLogger<RemoteBuildCacheServer>()
|
||||
|
||||
val userAttribute: AttributeKey<Configuration.User> = AttributeKey.valueOf("user")
|
||||
val groupAttribute: AttributeKey<Set<Configuration.Group>> = AttributeKey.valueOf("group")
|
||||
|
||||
val DEFAULT_CONFIGURATION_URL by lazy { "classpath:net/woggioni/rbcs/server/rbcs-default.xml".toUrl() }
|
||||
val DEFAULT_CONFIGURATION_URL by lazy { "jpms://net.woggioni.rbcs.server/net/woggioni/rbcs/server/rbcs-default.xml".toUrl() }
|
||||
private const val SSL_HANDLER_NAME = "sslHandler"
|
||||
|
||||
fun loadConfiguration(configurationFile: Path): Configuration {
|
||||
@@ -128,11 +141,12 @@ class RemoteBuildCacheServer(private val cfg: Configuration) {
|
||||
val clientCertificate = peerCertificates.first() as X509Certificate
|
||||
val user = userExtractor?.extract(clientCertificate)
|
||||
val group = groupExtractor?.extract(clientCertificate)
|
||||
val allGroups = ((user?.groups ?: emptySet()).asSequence() + sequenceOf(group).filterNotNull()).toSet()
|
||||
val allGroups =
|
||||
((user?.groups ?: emptySet()).asSequence() + sequenceOf(group).filterNotNull()).toSet()
|
||||
AuthenticationResult(user, allGroups)
|
||||
} ?: anonymousUserGroups?.let{ AuthenticationResult(null, it) }
|
||||
} ?: anonymousUserGroups?.let { AuthenticationResult(null, it) }
|
||||
} catch (es: SSLPeerUnverifiedException) {
|
||||
anonymousUserGroups?.let{ AuthenticationResult(null, it) }
|
||||
anonymousUserGroups?.let { AuthenticationResult(null, it) }
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -141,7 +155,9 @@ class RemoteBuildCacheServer(private val cfg: Configuration) {
|
||||
private class NettyHttpBasicAuthenticator(
|
||||
private val users: Map<String, Configuration.User>, authorizer: Authorizer
|
||||
) : AbstractNettyHttpAuthenticator(authorizer) {
|
||||
private val log = contextLogger()
|
||||
companion object {
|
||||
private val log = createLogger<NettyHttpBasicAuthenticator>()
|
||||
}
|
||||
|
||||
override fun authenticate(ctx: ChannelHandlerContext, req: HttpRequest): AuthenticationResult? {
|
||||
val authorizationHeader = req.headers()[HttpHeaderNames.AUTHORIZATION] ?: let {
|
||||
@@ -190,8 +206,9 @@ class RemoteBuildCacheServer(private val cfg: Configuration) {
|
||||
|
||||
private class ServerInitializer(
|
||||
private val cfg: Configuration,
|
||||
private val eventExecutorGroup: EventExecutorGroup
|
||||
) : ChannelInitializer<Channel>() {
|
||||
private val channelFactory : ChannelFactory<SocketChannel>,
|
||||
private val datagramChannelFactory : ChannelFactory<DatagramChannel>,
|
||||
) : ChannelInitializer<Channel>(), AsyncCloseable {
|
||||
|
||||
companion object {
|
||||
private fun createSslCtx(tls: Configuration.Tls): SslContext {
|
||||
@@ -211,9 +228,9 @@ class RemoteBuildCacheServer(private val cfg: Configuration) {
|
||||
val clientAuth = tls.trustStore?.let { trustStore ->
|
||||
val ts = loadKeystore(trustStore.file, trustStore.password)
|
||||
trustManager(
|
||||
ClientCertificateValidator.getTrustManager(ts, trustStore.isCheckCertificateStatus)
|
||||
getTrustManager(ts, trustStore.isCheckCertificateStatus)
|
||||
)
|
||||
if(trustStore.isRequireClientCertificate) ClientAuth.REQUIRE
|
||||
if (trustStore.isRequireClientCertificate) ClientAuth.REQUIRE
|
||||
else ClientAuth.OPTIONAL
|
||||
} ?: ClientAuth.NONE
|
||||
clientAuth(clientAuth)
|
||||
@@ -221,38 +238,12 @@ class RemoteBuildCacheServer(private val cfg: Configuration) {
|
||||
}
|
||||
}
|
||||
|
||||
fun loadKeystore(file: Path, password: String?): KeyStore {
|
||||
val ext = JWO.splitExtension(file)
|
||||
.map(Tuple2<String, String>::get_2)
|
||||
.orElseThrow {
|
||||
IllegalArgumentException(
|
||||
"Keystore file '${file}' must have .jks, .p12, .pfx extension"
|
||||
)
|
||||
}
|
||||
val keystore = when (ext.substring(1).lowercase()) {
|
||||
"jks" -> KeyStore.getInstance("JKS")
|
||||
"p12", "pfx" -> KeyStore.getInstance("PKCS12")
|
||||
else -> throw IllegalArgumentException(
|
||||
"Keystore file '${file}' must have .jks, .p12, .pfx extension"
|
||||
)
|
||||
}
|
||||
Files.newInputStream(file).use {
|
||||
keystore.load(it, password?.let(String::toCharArray))
|
||||
}
|
||||
return keystore
|
||||
}
|
||||
private val log = createLogger<ServerInitializer>()
|
||||
}
|
||||
|
||||
private val log = contextLogger()
|
||||
private val cacheHandlerFactory = cfg.cache.materialize()
|
||||
|
||||
private val serverHandler = let {
|
||||
val cacheImplementation = cfg.cache.materialize()
|
||||
val prefix = Path.of("/").resolve(Path.of(cfg.serverPath ?: "/"))
|
||||
ServerHandler(cacheImplementation, prefix)
|
||||
}
|
||||
|
||||
private val exceptionHandler = ExceptionHandler()
|
||||
private val throttlingHandler = ThrottlingHandler(cfg)
|
||||
private val bucketManager = BucketManager.from(cfg)
|
||||
|
||||
private val authenticator = when (val auth = cfg.authentication) {
|
||||
is Configuration.BasicAuthentication -> NettyHttpBasicAuthenticator(cfg.users, RoleAuthorizer())
|
||||
@@ -307,25 +298,13 @@ class RemoteBuildCacheServer(private val cfg: Configuration) {
|
||||
"Closed connection ${ch.id().asShortText()} with ${ch.remoteAddress()}"
|
||||
}
|
||||
}
|
||||
ch.config().isAutoRead = false
|
||||
val pipeline = ch.pipeline()
|
||||
cfg.connection.also { conn ->
|
||||
val readTimeout = conn.readTimeout.toMillis()
|
||||
val writeTimeout = conn.writeTimeout.toMillis()
|
||||
if(readTimeout > 0 || writeTimeout > 0) {
|
||||
pipeline.addLast(
|
||||
IdleStateHandler(
|
||||
false,
|
||||
readTimeout,
|
||||
writeTimeout,
|
||||
0,
|
||||
TimeUnit.MILLISECONDS
|
||||
)
|
||||
)
|
||||
}
|
||||
val readIdleTimeout = conn.readIdleTimeout.toMillis()
|
||||
val writeIdleTimeout = conn.writeIdleTimeout.toMillis()
|
||||
val idleTimeout = conn.idleTimeout.toMillis()
|
||||
if(readIdleTimeout > 0 || writeIdleTimeout > 0 || idleTimeout > 0) {
|
||||
if (readIdleTimeout > 0 || writeIdleTimeout > 0 || idleTimeout > 0) {
|
||||
pipeline.addLast(
|
||||
IdleStateHandler(
|
||||
true,
|
||||
@@ -340,16 +319,19 @@ class RemoteBuildCacheServer(private val cfg: Configuration) {
|
||||
pipeline.addLast(object : ChannelInboundHandlerAdapter() {
|
||||
override fun userEventTriggered(ctx: ChannelHandlerContext, evt: Any) {
|
||||
if (evt is IdleStateEvent) {
|
||||
when(evt.state()) {
|
||||
when (evt.state()) {
|
||||
IdleState.READER_IDLE -> log.debug {
|
||||
"Read timeout reached on channel ${ch.id().asShortText()}, closing the connection"
|
||||
}
|
||||
|
||||
IdleState.WRITER_IDLE -> log.debug {
|
||||
"Write timeout reached on channel ${ch.id().asShortText()}, closing the connection"
|
||||
}
|
||||
|
||||
IdleState.ALL_IDLE -> log.debug {
|
||||
"Idle timeout reached on channel ${ch.id().asShortText()}, closing the connection"
|
||||
}
|
||||
|
||||
null -> throw IllegalStateException("This should never happen")
|
||||
}
|
||||
ctx.close()
|
||||
@@ -359,63 +341,117 @@ class RemoteBuildCacheServer(private val cfg: Configuration) {
|
||||
sslContext?.newHandler(ch.alloc())?.also {
|
||||
pipeline.addLast(SSL_HANDLER_NAME, it)
|
||||
}
|
||||
pipeline.addLast(HttpServerCodec())
|
||||
val httpDecoderConfig = HttpDecoderConfig().apply {
|
||||
maxChunkSize = cfg.connection.chunkSize
|
||||
}
|
||||
pipeline.addLast(HttpServerCodec(httpDecoderConfig))
|
||||
pipeline.addLast(ReadTriggerDuplexHandler.NAME, ReadTriggerDuplexHandler)
|
||||
pipeline.addLast(MaxRequestSizeHandler.NAME, MaxRequestSizeHandler(cfg.connection.maxRequestSize))
|
||||
pipeline.addLast(HttpChunkContentCompressor(1024))
|
||||
pipeline.addLast(ChunkedWriteHandler())
|
||||
pipeline.addLast(HttpObjectAggregator(cfg.connection.maxRequestSize))
|
||||
authenticator?.let {
|
||||
pipeline.addLast(it)
|
||||
}
|
||||
pipeline.addLast(throttlingHandler)
|
||||
pipeline.addLast(eventExecutorGroup, serverHandler)
|
||||
pipeline.addLast(exceptionHandler)
|
||||
pipeline.addLast(ThrottlingHandler(bucketManager,cfg.rateLimiter, cfg.connection))
|
||||
|
||||
val serverHandler = let {
|
||||
val prefix = Path.of("/").resolve(Path.of(cfg.serverPath ?: "/"))
|
||||
ServerHandler(prefix) {
|
||||
cacheHandlerFactory.newHandler(cfg, ch.eventLoop(), channelFactory, datagramChannelFactory)
|
||||
}
|
||||
}
|
||||
pipeline.addLast(ServerHandler.NAME, serverHandler)
|
||||
pipeline.addLast(ExceptionHandler.NAME, ExceptionHandler)
|
||||
}
|
||||
|
||||
override fun asyncClose() = cacheHandlerFactory.asyncClose()
|
||||
}
|
||||
|
||||
class ServerHandle(
|
||||
httpChannelFuture: ChannelFuture,
|
||||
private val executorGroups: Iterable<EventExecutorGroup>
|
||||
) : AutoCloseable {
|
||||
private val httpChannel: Channel = httpChannelFuture.channel()
|
||||
private val closeFuture: ChannelFuture = httpChannel.closeFuture()
|
||||
private val log = contextLogger()
|
||||
closeFuture: ChannelFuture,
|
||||
private val bossGroup: EventExecutorGroup,
|
||||
private val executorGroups: Iterable<EventExecutorGroup>,
|
||||
private val serverInitializer: AsyncCloseable,
|
||||
) : Future<Void> by from(closeFuture, bossGroup, executorGroups, serverInitializer) {
|
||||
|
||||
fun shutdown(): ChannelFuture {
|
||||
return httpChannel.close()
|
||||
}
|
||||
companion object {
|
||||
private val log = createLogger<ServerHandle>()
|
||||
|
||||
override fun close() {
|
||||
try {
|
||||
closeFuture.sync()
|
||||
} finally {
|
||||
executorGroups.forEach {
|
||||
it.shutdownGracefully().sync()
|
||||
private fun from(
|
||||
closeFuture: ChannelFuture,
|
||||
bossGroup: EventExecutorGroup,
|
||||
executorGroups: Iterable<EventExecutorGroup>,
|
||||
serverInitializer: AsyncCloseable
|
||||
): CompletableFuture<Void> {
|
||||
val result = CompletableFuture<Void>()
|
||||
closeFuture.addListener {
|
||||
val errors = mutableListOf<Throwable>()
|
||||
val deadline = Instant.now().plusSeconds(20)
|
||||
|
||||
serverInitializer.asyncClose().whenCompleteAsync { _, ex ->
|
||||
if(ex != null) {
|
||||
log.error(ex.message, ex)
|
||||
errors.addLast(ex)
|
||||
}
|
||||
|
||||
executorGroups.forEach(EventExecutorGroup::shutdownGracefully)
|
||||
bossGroup.terminationFuture().sync()
|
||||
|
||||
for (executorGroup in executorGroups) {
|
||||
val future = executorGroup.terminationFuture()
|
||||
try {
|
||||
val now = Instant.now()
|
||||
if (now > deadline) {
|
||||
future.get(0, TimeUnit.SECONDS)
|
||||
} else {
|
||||
future.get(Duration.between(now, deadline).toMillis(), TimeUnit.MILLISECONDS)
|
||||
}
|
||||
}
|
||||
catch (te: TimeoutException) {
|
||||
errors.addLast(te)
|
||||
log.warn("Timeout while waiting for shutdown of $executorGroup", te)
|
||||
} catch (ex: Throwable) {
|
||||
log.warn(ex.message, ex)
|
||||
errors.addLast(ex)
|
||||
}
|
||||
}
|
||||
|
||||
if(errors.isEmpty()) {
|
||||
result.complete(null)
|
||||
} else {
|
||||
result.completeExceptionally(errors.first())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result.thenAccept {
|
||||
log.info {
|
||||
"RemoteBuildCacheServer has been gracefully shut down"
|
||||
}
|
||||
}
|
||||
}
|
||||
log.info {
|
||||
"RemoteBuildCacheServer has been gracefully shut down"
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
fun sendShutdownSignal() {
|
||||
bossGroup.shutdownGracefully()
|
||||
}
|
||||
}
|
||||
|
||||
fun run(): ServerHandle {
|
||||
// Create the multithreaded event loops for the server
|
||||
val bossGroup = NioEventLoopGroup(1)
|
||||
val serverSocketChannel = NioServerSocketChannel::class.java
|
||||
val workerGroup = NioEventLoopGroup(0)
|
||||
val eventExecutorGroup = run {
|
||||
val threadFactory = if (cfg.eventExecutor.isUseVirtualThreads) {
|
||||
Thread.ofVirtual().factory()
|
||||
} else {
|
||||
null
|
||||
}
|
||||
DefaultEventExecutorGroup(Runtime.getRuntime().availableProcessors(), threadFactory)
|
||||
}
|
||||
val bossGroup = MultiThreadIoEventLoopGroup(1, NioIoHandler.newFactory())
|
||||
val channelFactory = ChannelFactory<SocketChannel> { NioSocketChannel() }
|
||||
val datagramChannelFactory = ChannelFactory<DatagramChannel> { NioDatagramChannel() }
|
||||
val serverChannelFactory = ChannelFactory<ServerSocketChannel> { NioServerSocketChannel() }
|
||||
val workerGroup = MultiThreadIoEventLoopGroup(0, NioIoHandler.newFactory())
|
||||
|
||||
val serverInitializer = ServerInitializer(cfg, channelFactory, datagramChannelFactory)
|
||||
val bootstrap = ServerBootstrap().apply {
|
||||
// Configure the server
|
||||
group(bossGroup, workerGroup)
|
||||
channel(serverSocketChannel)
|
||||
childHandler(ServerInitializer(cfg, eventExecutorGroup))
|
||||
channelFactory(serverChannelFactory)
|
||||
childHandler(serverInitializer)
|
||||
option(ChannelOption.SO_BACKLOG, cfg.incomingConnectionsBacklogSize)
|
||||
childOption(ChannelOption.SO_KEEPALIVE, true)
|
||||
}
|
||||
@@ -423,10 +459,16 @@ class RemoteBuildCacheServer(private val cfg: Configuration) {
|
||||
|
||||
// Bind and start to accept incoming connections.
|
||||
val bindAddress = InetSocketAddress(cfg.host, cfg.port)
|
||||
val httpChannel = bootstrap.bind(bindAddress).sync()
|
||||
val httpChannel = bootstrap.bind(bindAddress).sync().channel()
|
||||
log.info {
|
||||
"RemoteBuildCacheServer is listening on ${cfg.host}:${cfg.port}"
|
||||
}
|
||||
return ServerHandle(httpChannel, setOf(bossGroup, workerGroup, eventExecutorGroup))
|
||||
|
||||
return ServerHandle(
|
||||
httpChannel.closeFuture(),
|
||||
bossGroup,
|
||||
setOf(workerGroup),
|
||||
serverInitializer
|
||||
)
|
||||
}
|
||||
}
|
||||
|
@@ -6,6 +6,7 @@ import io.netty.channel.ChannelHandlerContext
|
||||
import io.netty.channel.ChannelInboundHandlerAdapter
|
||||
import io.netty.handler.codec.http.DefaultFullHttpResponse
|
||||
import io.netty.handler.codec.http.FullHttpResponse
|
||||
import io.netty.handler.codec.http.HttpContent
|
||||
import io.netty.handler.codec.http.HttpHeaderNames
|
||||
import io.netty.handler.codec.http.HttpRequest
|
||||
import io.netty.handler.codec.http.HttpResponseStatus
|
||||
@@ -57,6 +58,8 @@ abstract class AbstractNettyHttpAuthenticator(private val authorizer: Authorizer
|
||||
} else {
|
||||
authorizationFailure(ctx, msg)
|
||||
}
|
||||
} else if(msg is HttpContent) {
|
||||
ctx.fireChannelRead(msg)
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -1,90 +0,0 @@
|
||||
package net.woggioni.rbcs.server.auth
|
||||
|
||||
import io.netty.channel.ChannelHandlerContext
|
||||
import io.netty.channel.ChannelInboundHandlerAdapter
|
||||
import io.netty.handler.ssl.SslHandler
|
||||
import io.netty.handler.ssl.SslHandshakeCompletionEvent
|
||||
import java.security.KeyStore
|
||||
import java.security.cert.CertPathValidator
|
||||
import java.security.cert.CertPathValidatorException
|
||||
import java.security.cert.CertificateException
|
||||
import java.security.cert.CertificateFactory
|
||||
import java.security.cert.PKIXParameters
|
||||
import java.security.cert.PKIXRevocationChecker
|
||||
import java.security.cert.X509Certificate
|
||||
import java.util.EnumSet
|
||||
import javax.net.ssl.SSLSession
|
||||
import javax.net.ssl.TrustManagerFactory
|
||||
import javax.net.ssl.X509TrustManager
|
||||
|
||||
|
||||
class ClientCertificateValidator private constructor(
|
||||
private val sslHandler: SslHandler,
|
||||
private val x509TrustManager: X509TrustManager
|
||||
) : ChannelInboundHandlerAdapter() {
|
||||
override fun userEventTriggered(ctx: ChannelHandlerContext, evt: Any) {
|
||||
if (evt is SslHandshakeCompletionEvent) {
|
||||
if (evt.isSuccess) {
|
||||
val session: SSLSession = sslHandler.engine().session
|
||||
val clientCertificateChain = session.peerCertificates as Array<X509Certificate>
|
||||
val authType: String = clientCertificateChain[0].publicKey.algorithm
|
||||
x509TrustManager.checkClientTrusted(clientCertificateChain, authType)
|
||||
} else {
|
||||
// Handle the failure, for example by closing the channel.
|
||||
}
|
||||
}
|
||||
super.userEventTriggered(ctx, evt)
|
||||
}
|
||||
|
||||
companion object {
|
||||
fun getTrustManager(trustStore: KeyStore?, certificateRevocationEnabled: Boolean): X509TrustManager {
|
||||
return if (trustStore != null) {
|
||||
val certificateFactory = CertificateFactory.getInstance("X.509")
|
||||
val validator = CertPathValidator.getInstance("PKIX").apply {
|
||||
val rc = revocationChecker as PKIXRevocationChecker
|
||||
rc.options = EnumSet.of(
|
||||
PKIXRevocationChecker.Option.NO_FALLBACK
|
||||
)
|
||||
}
|
||||
val params = PKIXParameters(trustStore).apply {
|
||||
isRevocationEnabled = certificateRevocationEnabled
|
||||
}
|
||||
object : X509TrustManager {
|
||||
override fun checkClientTrusted(chain: Array<out X509Certificate>, authType: String) {
|
||||
val clientCertificateChain = certificateFactory.generateCertPath(chain.toList())
|
||||
try {
|
||||
validator.validate(clientCertificateChain, params)
|
||||
} catch (ex: CertPathValidatorException) {
|
||||
throw CertificateException(ex)
|
||||
}
|
||||
}
|
||||
|
||||
override fun checkServerTrusted(chain: Array<out X509Certificate>, authType: String) {
|
||||
throw NotImplementedError()
|
||||
}
|
||||
|
||||
private val acceptedIssuers = trustStore.aliases().asSequence()
|
||||
.filter(trustStore::isCertificateEntry)
|
||||
.map(trustStore::getCertificate)
|
||||
.map { it as X509Certificate }
|
||||
.toList()
|
||||
.toTypedArray()
|
||||
|
||||
override fun getAcceptedIssuers() = acceptedIssuers
|
||||
}
|
||||
} else {
|
||||
val trustManagerFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm())
|
||||
trustManagerFactory.trustManagers.asSequence().filter { it is X509TrustManager }
|
||||
.single() as X509TrustManager
|
||||
}
|
||||
}
|
||||
|
||||
fun of(
|
||||
sslHandler: SslHandler,
|
||||
trustStore: KeyStore?,
|
||||
certificateRevocationEnabled: Boolean
|
||||
): ClientCertificateValidator {
|
||||
return ClientCertificateValidator(sslHandler, getTrustManager(trustStore, certificateRevocationEnabled))
|
||||
}
|
||||
}
|
||||
}
|
@@ -8,8 +8,9 @@ class RoleAuthorizer : Authorizer {
|
||||
|
||||
companion object {
|
||||
private val METHOD_MAP = mapOf(
|
||||
Role.Reader to setOf(HttpMethod.GET, HttpMethod.HEAD, HttpMethod.TRACE),
|
||||
Role.Writer to setOf(HttpMethod.PUT, HttpMethod.POST)
|
||||
Role.Reader to setOf(HttpMethod.GET, HttpMethod.HEAD),
|
||||
Role.Writer to setOf(HttpMethod.PUT, HttpMethod.POST),
|
||||
Role.Healthcheck to setOf(HttpMethod.TRACE)
|
||||
)
|
||||
}
|
||||
|
||||
|
@@ -1,12 +1,11 @@
|
||||
package net.woggioni.rbcs.server.cache
|
||||
|
||||
import io.netty.buffer.ByteBuf
|
||||
import net.woggioni.rbcs.api.Cache
|
||||
import net.woggioni.rbcs.common.ByteBufInputStream
|
||||
import net.woggioni.rbcs.common.RBCS.digestString
|
||||
import net.woggioni.rbcs.common.contextLogger
|
||||
import net.woggioni.jwo.JWO
|
||||
import net.woggioni.jwo.LockFile
|
||||
import java.io.ByteArrayOutputStream
|
||||
import java.io.InputStream
|
||||
import java.io.ObjectInputStream
|
||||
import java.io.ObjectOutputStream
|
||||
import java.io.Serializable
|
||||
import java.nio.ByteBuffer
|
||||
import java.nio.channels.Channels
|
||||
import java.nio.channels.FileChannel
|
||||
import java.nio.file.Files
|
||||
@@ -14,117 +13,157 @@ import java.nio.file.Path
|
||||
import java.nio.file.StandardCopyOption
|
||||
import java.nio.file.StandardOpenOption
|
||||
import java.nio.file.attribute.BasicFileAttributes
|
||||
import java.security.MessageDigest
|
||||
import java.time.Duration
|
||||
import java.time.Instant
|
||||
import java.util.concurrent.CompletableFuture
|
||||
import java.util.concurrent.atomic.AtomicReference
|
||||
import java.util.zip.Deflater
|
||||
import java.util.zip.DeflaterOutputStream
|
||||
import java.util.zip.Inflater
|
||||
import java.util.zip.InflaterInputStream
|
||||
import net.woggioni.jwo.JWO
|
||||
import net.woggioni.rbcs.api.AsyncCloseable
|
||||
import net.woggioni.rbcs.api.CacheValueMetadata
|
||||
import net.woggioni.rbcs.common.createLogger
|
||||
|
||||
class FileSystemCache(
|
||||
val root: Path,
|
||||
val maxAge: Duration,
|
||||
val digestAlgorithm: String?,
|
||||
val compressionEnabled: Boolean,
|
||||
val compressionLevel: Int
|
||||
) : Cache {
|
||||
val maxAge: Duration
|
||||
) : AsyncCloseable {
|
||||
|
||||
class EntryValue(val metadata: CacheValueMetadata, val channel : FileChannel, val offset : Long, val size : Long) : Serializable
|
||||
|
||||
private companion object {
|
||||
@JvmStatic
|
||||
private val log = contextLogger()
|
||||
private val log = createLogger<FileSystemCache>()
|
||||
}
|
||||
|
||||
init {
|
||||
Files.createDirectories(root)
|
||||
}
|
||||
|
||||
private var nextGc = AtomicReference(Instant.now().plus(maxAge))
|
||||
@Volatile
|
||||
private var running = true
|
||||
|
||||
override fun get(key: String) = (digestAlgorithm
|
||||
?.let(MessageDigest::getInstance)
|
||||
?.let { md ->
|
||||
digestString(key.toByteArray(), md)
|
||||
} ?: key).let { digest ->
|
||||
root.resolve(digest).takeIf(Files::exists)
|
||||
private var nextGc = Instant.now()
|
||||
|
||||
fun get(key: String): EntryValue? =
|
||||
root.resolve(key).takeIf(Files::exists)
|
||||
?.let { file ->
|
||||
file.takeIf(Files::exists)?.let { file ->
|
||||
if (compressionEnabled) {
|
||||
val inflater = Inflater()
|
||||
Channels.newChannel(
|
||||
InflaterInputStream(
|
||||
Channels.newInputStream(
|
||||
FileChannel.open(
|
||||
file,
|
||||
StandardOpenOption.READ
|
||||
)
|
||||
), inflater
|
||||
)
|
||||
)
|
||||
} else {
|
||||
FileChannel.open(file, StandardOpenOption.READ)
|
||||
val size = Files.size(file)
|
||||
val channel = FileChannel.open(file, StandardOpenOption.READ)
|
||||
val source = Channels.newInputStream(channel)
|
||||
val tmp = ByteArray(Integer.BYTES)
|
||||
val buffer = ByteBuffer.wrap(tmp)
|
||||
source.read(tmp)
|
||||
buffer.rewind()
|
||||
val offset = (Integer.BYTES + buffer.getInt()).toLong()
|
||||
var count = 0
|
||||
val wrapper = object : InputStream() {
|
||||
override fun read(): Int {
|
||||
return source.read().also {
|
||||
if (it > 0) count += it
|
||||
}
|
||||
}
|
||||
|
||||
override fun read(b: ByteArray, off: Int, len: Int): Int {
|
||||
return source.read(b, off, len).also {
|
||||
if (it > 0) count += it
|
||||
}
|
||||
}
|
||||
|
||||
override fun close() {
|
||||
}
|
||||
}
|
||||
}.also {
|
||||
gc()
|
||||
}.let {
|
||||
CompletableFuture.completedFuture(it)
|
||||
val metadata = ObjectInputStream(wrapper).use { ois ->
|
||||
ois.readObject() as CacheValueMetadata
|
||||
}
|
||||
EntryValue(metadata, channel, offset, size)
|
||||
}
|
||||
|
||||
class FileSink(metadata: CacheValueMetadata, private val path: Path, private val tmpFile: Path) {
|
||||
val channel: FileChannel
|
||||
|
||||
init {
|
||||
val baos = ByteArrayOutputStream()
|
||||
ObjectOutputStream(baos).use {
|
||||
it.writeObject(metadata)
|
||||
}
|
||||
Files.newOutputStream(tmpFile).use {
|
||||
val bytes = baos.toByteArray()
|
||||
val buffer = ByteBuffer.allocate(Integer.BYTES)
|
||||
buffer.putInt(bytes.size)
|
||||
buffer.rewind()
|
||||
it.write(buffer.array())
|
||||
it.write(bytes)
|
||||
}
|
||||
channel = FileChannel.open(tmpFile, StandardOpenOption.APPEND)
|
||||
}
|
||||
|
||||
fun commit() {
|
||||
channel.close()
|
||||
Files.move(tmpFile, path, StandardCopyOption.ATOMIC_MOVE)
|
||||
}
|
||||
|
||||
fun rollback() {
|
||||
channel.close()
|
||||
Files.delete(path)
|
||||
}
|
||||
}
|
||||
|
||||
override fun put(key: String, content: ByteBuf): CompletableFuture<Void> {
|
||||
(digestAlgorithm
|
||||
?.let(MessageDigest::getInstance)
|
||||
?.let { md ->
|
||||
digestString(key.toByteArray(), md)
|
||||
} ?: key).let { digest ->
|
||||
val file = root.resolve(digest)
|
||||
val tmpFile = Files.createTempFile(root, null, ".tmp")
|
||||
try {
|
||||
Files.newOutputStream(tmpFile).let {
|
||||
if (compressionEnabled) {
|
||||
val deflater = Deflater(compressionLevel)
|
||||
DeflaterOutputStream(it, deflater)
|
||||
} else {
|
||||
it
|
||||
fun put(
|
||||
key: String,
|
||||
metadata: CacheValueMetadata,
|
||||
): FileSink {
|
||||
val file = root.resolve(key)
|
||||
val tmpFile = Files.createTempFile(root, null, ".tmp")
|
||||
return FileSink(metadata, file, tmpFile)
|
||||
}
|
||||
|
||||
private val closeFuture = object : CompletableFuture<Void>() {
|
||||
init {
|
||||
Thread.ofVirtual().name("file-system-cache-gc").start {
|
||||
try {
|
||||
while (running) {
|
||||
gc()
|
||||
}
|
||||
}.use {
|
||||
JWO.copy(ByteBufInputStream(content), it)
|
||||
complete(null)
|
||||
} catch (ex : Throwable) {
|
||||
completeExceptionally(ex)
|
||||
}
|
||||
Files.move(tmpFile, file, StandardCopyOption.ATOMIC_MOVE)
|
||||
} catch (t: Throwable) {
|
||||
Files.delete(tmpFile)
|
||||
throw t
|
||||
}
|
||||
}.also {
|
||||
gc()
|
||||
}
|
||||
return CompletableFuture.completedFuture(null)
|
||||
}
|
||||
|
||||
private fun gc() {
|
||||
val now = Instant.now()
|
||||
val oldValue = nextGc.getAndSet(now.plus(maxAge))
|
||||
if (oldValue < now) {
|
||||
actualGc(now)
|
||||
if (nextGc < now) {
|
||||
val oldestEntry = actualGc(now)
|
||||
nextGc = (oldestEntry ?: now).plus(maxAge)
|
||||
}
|
||||
Thread.sleep(minOf(Duration.between(now, nextGc), Duration.ofSeconds(1)))
|
||||
}
|
||||
|
||||
@Synchronized
|
||||
private fun actualGc(now: Instant) {
|
||||
Files.list(root).filter {
|
||||
val creationTimeStamp = Files.readAttributes(it, BasicFileAttributes::class.java)
|
||||
.creationTime()
|
||||
.toInstant()
|
||||
now > creationTimeStamp.plus(maxAge)
|
||||
}.forEach { file ->
|
||||
LockFile.acquire(file, false).use {
|
||||
Files.delete(file)
|
||||
/**
|
||||
* Returns the creation timestamp of the oldest cache entry (if any)
|
||||
*/
|
||||
private fun actualGc(now: Instant): Instant? {
|
||||
var result: Instant? = null
|
||||
Files.list(root)
|
||||
.filter { path ->
|
||||
JWO.splitExtension(path)
|
||||
.map { it._2 }
|
||||
.map { it != ".tmp" }
|
||||
.orElse(true)
|
||||
}
|
||||
}
|
||||
.filter {
|
||||
val creationTimeStamp = Files.readAttributes(it, BasicFileAttributes::class.java)
|
||||
.creationTime()
|
||||
.toInstant()
|
||||
if (result == null || creationTimeStamp < result) {
|
||||
result = creationTimeStamp
|
||||
}
|
||||
now > creationTimeStamp.plus(maxAge)
|
||||
}.forEach(Files::delete)
|
||||
return result
|
||||
}
|
||||
|
||||
override fun close() {}
|
||||
override fun asyncClose() : CompletableFuture<Void> {
|
||||
running = false
|
||||
return closeFuture
|
||||
}
|
||||
}
|
@@ -1,10 +1,15 @@
|
||||
package net.woggioni.rbcs.server.cache
|
||||
|
||||
import net.woggioni.rbcs.api.Configuration
|
||||
import net.woggioni.rbcs.common.RBCS
|
||||
import net.woggioni.jwo.Application
|
||||
import io.netty.channel.ChannelFactory
|
||||
import io.netty.channel.EventLoopGroup
|
||||
import io.netty.channel.socket.DatagramChannel
|
||||
import io.netty.channel.socket.SocketChannel
|
||||
import java.nio.file.Path
|
||||
import java.time.Duration
|
||||
import net.woggioni.jwo.Application
|
||||
import net.woggioni.rbcs.api.CacheHandlerFactory
|
||||
import net.woggioni.rbcs.api.Configuration
|
||||
import net.woggioni.rbcs.common.RBCS
|
||||
|
||||
data class FileSystemCacheConfiguration(
|
||||
val root: Path?,
|
||||
@@ -13,13 +18,19 @@ data class FileSystemCacheConfiguration(
|
||||
val compressionEnabled: Boolean,
|
||||
val compressionLevel: Int,
|
||||
) : Configuration.Cache {
|
||||
override fun materialize() = FileSystemCache(
|
||||
root ?: Application.builder("rbcs").build().computeCacheDirectory(),
|
||||
maxAge,
|
||||
digestAlgorithm,
|
||||
compressionEnabled,
|
||||
compressionLevel
|
||||
)
|
||||
|
||||
override fun materialize() = object : CacheHandlerFactory {
|
||||
private val cache = FileSystemCache(root ?: Application.builder("rbcs").build().computeCacheDirectory(), maxAge)
|
||||
|
||||
override fun asyncClose() = cache.asyncClose()
|
||||
|
||||
override fun newHandler(
|
||||
cfg : Configuration,
|
||||
eventLoop: EventLoopGroup,
|
||||
socketChannelFactory: ChannelFactory<SocketChannel>,
|
||||
datagramChannelFactory: ChannelFactory<DatagramChannel>
|
||||
) = FileSystemCacheHandler(cache, digestAlgorithm, compressionEnabled, compressionLevel, cfg.connection.chunkSize)
|
||||
}
|
||||
|
||||
override fun getNamespaceURI() = RBCS.RBCS_NAMESPACE_URI
|
||||
|
||||
|
137
rbcs-server/src/main/kotlin/net/woggioni/rbcs/server/cache/FileSystemCacheHandler.kt
vendored
Normal file
137
rbcs-server/src/main/kotlin/net/woggioni/rbcs/server/cache/FileSystemCacheHandler.kt
vendored
Normal file
@@ -0,0 +1,137 @@
|
||||
package net.woggioni.rbcs.server.cache
|
||||
|
||||
import io.netty.buffer.ByteBuf
|
||||
import io.netty.channel.ChannelHandlerContext
|
||||
import io.netty.handler.codec.http.LastHttpContent
|
||||
import io.netty.handler.stream.ChunkedNioFile
|
||||
import java.nio.channels.Channels
|
||||
import java.util.Base64
|
||||
import java.util.zip.Deflater
|
||||
import java.util.zip.DeflaterOutputStream
|
||||
import java.util.zip.InflaterInputStream
|
||||
import net.woggioni.rbcs.api.CacheHandler
|
||||
import net.woggioni.rbcs.api.message.CacheMessage
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CacheContent
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CacheGetRequest
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CachePutRequest
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CachePutResponse
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CacheValueFoundResponse
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CacheValueNotFoundResponse
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.LastCacheContent
|
||||
import net.woggioni.rbcs.common.RBCS.processCacheKey
|
||||
|
||||
class FileSystemCacheHandler(
|
||||
private val cache: FileSystemCache,
|
||||
private val digestAlgorithm: String?,
|
||||
private val compressionEnabled: Boolean,
|
||||
private val compressionLevel: Int,
|
||||
private val chunkSize: Int
|
||||
) : CacheHandler() {
|
||||
|
||||
private interface InProgressRequest{
|
||||
|
||||
}
|
||||
|
||||
private class InProgressGetRequest(val request : CacheGetRequest) : InProgressRequest
|
||||
|
||||
private inner class InProgressPutRequest(
|
||||
val key : String,
|
||||
private val fileSink : FileSystemCache.FileSink
|
||||
) : InProgressRequest {
|
||||
|
||||
private val stream = Channels.newOutputStream(fileSink.channel).let {
|
||||
if (compressionEnabled) {
|
||||
DeflaterOutputStream(it, Deflater(compressionLevel))
|
||||
} else {
|
||||
it
|
||||
}
|
||||
}
|
||||
|
||||
fun write(buf: ByteBuf) {
|
||||
buf.readBytes(stream, buf.readableBytes())
|
||||
}
|
||||
|
||||
fun commit() {
|
||||
stream.close()
|
||||
fileSink.commit()
|
||||
}
|
||||
|
||||
fun rollback() {
|
||||
fileSink.rollback()
|
||||
}
|
||||
}
|
||||
|
||||
private var inProgressRequest: InProgressRequest? = null
|
||||
|
||||
override fun channelRead0(ctx: ChannelHandlerContext, msg: CacheMessage) {
|
||||
when (msg) {
|
||||
is CacheGetRequest -> handleGetRequest(ctx, msg)
|
||||
is CachePutRequest -> handlePutRequest(ctx, msg)
|
||||
is LastCacheContent -> handleLastCacheContent(ctx, msg)
|
||||
is CacheContent -> handleCacheContent(ctx, msg)
|
||||
else -> ctx.fireChannelRead(msg)
|
||||
}
|
||||
}
|
||||
|
||||
private fun handleGetRequest(ctx: ChannelHandlerContext, msg: CacheGetRequest) {
|
||||
inProgressRequest = InProgressGetRequest(msg)
|
||||
|
||||
}
|
||||
|
||||
private fun handlePutRequest(ctx: ChannelHandlerContext, msg: CachePutRequest) {
|
||||
val key = String(Base64.getUrlEncoder().encode(processCacheKey(msg.key, null, digestAlgorithm)))
|
||||
val sink = cache.put(key, msg.metadata)
|
||||
inProgressRequest = InProgressPutRequest(msg.key, sink)
|
||||
}
|
||||
|
||||
private fun handleCacheContent(ctx: ChannelHandlerContext, msg: CacheContent) {
|
||||
val request = inProgressRequest
|
||||
if(request is InProgressPutRequest) {
|
||||
request.write(msg.content())
|
||||
}
|
||||
}
|
||||
|
||||
private fun handleLastCacheContent(ctx: ChannelHandlerContext, msg: LastCacheContent) {
|
||||
when(val request = inProgressRequest) {
|
||||
is InProgressPutRequest -> {
|
||||
inProgressRequest = null
|
||||
request.write(msg.content())
|
||||
request.commit()
|
||||
sendMessageAndFlush(ctx, CachePutResponse(request.key))
|
||||
}
|
||||
is InProgressGetRequest -> {
|
||||
val key = String(Base64.getUrlEncoder().encode(processCacheKey(request.request.key, null, digestAlgorithm)))
|
||||
cache.get(key)?.also { entryValue ->
|
||||
sendMessageAndFlush(ctx, CacheValueFoundResponse(request.request.key, entryValue.metadata))
|
||||
entryValue.channel.let { channel ->
|
||||
if(compressionEnabled) {
|
||||
InflaterInputStream(Channels.newInputStream(channel)).use { stream ->
|
||||
|
||||
outerLoop@
|
||||
while (true) {
|
||||
val buf = ctx.alloc().heapBuffer(chunkSize)
|
||||
while(buf.readableBytes() < chunkSize) {
|
||||
val read = buf.writeBytes(stream, chunkSize)
|
||||
if(read < 0) {
|
||||
sendMessageAndFlush(ctx, LastCacheContent(buf))
|
||||
break@outerLoop
|
||||
}
|
||||
}
|
||||
sendMessageAndFlush(ctx, CacheContent(buf))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
sendMessage(ctx, ChunkedNioFile(channel, entryValue.offset, entryValue.size - entryValue.offset, chunkSize))
|
||||
sendMessageAndFlush(ctx, LastHttpContent.EMPTY_LAST_CONTENT)
|
||||
}
|
||||
}
|
||||
} ?: sendMessageAndFlush(ctx, CacheValueNotFoundResponse(key))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
|
||||
(inProgressRequest as? InProgressPutRequest)?.rollback()
|
||||
super.exceptionCaught(ctx, cause)
|
||||
}
|
||||
}
|
@@ -1,18 +1,18 @@
|
||||
package net.woggioni.rbcs.server.cache
|
||||
|
||||
import java.nio.file.Path
|
||||
import java.time.Duration
|
||||
import java.util.zip.Deflater
|
||||
import net.woggioni.rbcs.api.CacheProvider
|
||||
import net.woggioni.rbcs.common.RBCS
|
||||
import net.woggioni.rbcs.common.Xml
|
||||
import net.woggioni.rbcs.common.Xml.Companion.renderAttribute
|
||||
import org.w3c.dom.Document
|
||||
import org.w3c.dom.Element
|
||||
import java.nio.file.Path
|
||||
import java.time.Duration
|
||||
import java.util.zip.Deflater
|
||||
|
||||
class FileSystemCacheProvider : CacheProvider<FileSystemCacheConfiguration> {
|
||||
|
||||
override fun getXmlSchemaLocation() = "classpath:net/woggioni/rbcs/server/schema/rbcs.xsd"
|
||||
override fun getXmlSchemaLocation() = "classpath:net/woggioni/rbcs/server/schema/rbcs-server.xsd"
|
||||
|
||||
override fun getXmlType() = "fileSystemCacheType"
|
||||
|
||||
@@ -30,14 +30,14 @@ class FileSystemCacheProvider : CacheProvider<FileSystemCacheConfiguration> {
|
||||
val compressionLevel = el.renderAttribute("compression-level")
|
||||
?.let(String::toInt)
|
||||
?: Deflater.DEFAULT_COMPRESSION
|
||||
val digestAlgorithm = el.renderAttribute("digest") ?: "MD5"
|
||||
val digestAlgorithm = el.renderAttribute("digest")
|
||||
|
||||
return FileSystemCacheConfiguration(
|
||||
path,
|
||||
maxAge,
|
||||
digestAlgorithm,
|
||||
enableCompression,
|
||||
compressionLevel
|
||||
compressionLevel,
|
||||
)
|
||||
}
|
||||
|
||||
@@ -46,7 +46,9 @@ class FileSystemCacheProvider : CacheProvider<FileSystemCacheConfiguration> {
|
||||
Xml.of(doc, result) {
|
||||
val prefix = doc.lookupPrefix(RBCS.RBCS_NAMESPACE_URI)
|
||||
attr("xs:type", "${prefix}:fileSystemCacheType", RBCS.XML_SCHEMA_NAMESPACE_URI)
|
||||
attr("path", root.toString())
|
||||
root?.let {
|
||||
attr("path", it.toString())
|
||||
}
|
||||
attr("max-age", maxAge.toString())
|
||||
digestAlgorithm?.let { digestAlgorithm ->
|
||||
attr("digest", digestAlgorithm)
|
||||
|
@@ -1,150 +1,131 @@
|
||||
package net.woggioni.rbcs.server.cache
|
||||
|
||||
import io.netty.buffer.ByteBuf
|
||||
import net.woggioni.rbcs.api.Cache
|
||||
import net.woggioni.rbcs.common.ByteBufInputStream
|
||||
import net.woggioni.rbcs.common.ByteBufOutputStream
|
||||
import net.woggioni.rbcs.common.RBCS.digestString
|
||||
import net.woggioni.rbcs.common.contextLogger
|
||||
import net.woggioni.jwo.JWO
|
||||
import java.nio.channels.Channels
|
||||
import java.security.MessageDigest
|
||||
import java.time.Duration
|
||||
import java.time.Instant
|
||||
import java.util.PriorityQueue
|
||||
import java.util.concurrent.CompletableFuture
|
||||
import java.util.concurrent.ConcurrentHashMap
|
||||
import java.util.concurrent.PriorityBlockingQueue
|
||||
import java.util.concurrent.atomic.AtomicLong
|
||||
import java.util.zip.Deflater
|
||||
import java.util.zip.DeflaterOutputStream
|
||||
import java.util.zip.Inflater
|
||||
import java.util.zip.InflaterInputStream
|
||||
import java.util.concurrent.TimeUnit
|
||||
import java.util.concurrent.locks.ReentrantReadWriteLock
|
||||
import kotlin.concurrent.withLock
|
||||
import net.woggioni.rbcs.api.AsyncCloseable
|
||||
import net.woggioni.rbcs.api.CacheValueMetadata
|
||||
import net.woggioni.rbcs.common.createLogger
|
||||
|
||||
private class CacheKey(private val value: ByteArray) {
|
||||
override fun equals(other: Any?) = if (other is CacheKey) {
|
||||
value.contentEquals(other.value)
|
||||
} else false
|
||||
|
||||
override fun hashCode() = value.contentHashCode()
|
||||
}
|
||||
|
||||
class CacheEntry(
|
||||
val metadata: CacheValueMetadata,
|
||||
val content: ByteArray
|
||||
)
|
||||
|
||||
class InMemoryCache(
|
||||
val maxAge: Duration,
|
||||
val maxSize: Long,
|
||||
val digestAlgorithm: String?,
|
||||
val compressionEnabled: Boolean,
|
||||
val compressionLevel: Int
|
||||
) : Cache {
|
||||
private val maxAge: Duration,
|
||||
private val maxSize: Long
|
||||
) : AsyncCloseable {
|
||||
|
||||
companion object {
|
||||
@JvmStatic
|
||||
private val log = contextLogger()
|
||||
private val log = createLogger<InMemoryCache>()
|
||||
}
|
||||
|
||||
private val size = AtomicLong()
|
||||
private val map = ConcurrentHashMap<String, ByteBuf>()
|
||||
|
||||
private class RemovalQueueElement(val key: String, val value : ByteBuf, val expiry : Instant) : Comparable<RemovalQueueElement> {
|
||||
private var mapSize : Long = 0
|
||||
private val map = HashMap<CacheKey, CacheEntry>()
|
||||
private val lock = ReentrantReadWriteLock()
|
||||
private val cond = lock.writeLock().newCondition()
|
||||
|
||||
private class RemovalQueueElement(val key: CacheKey, val value: CacheEntry, val expiry: Instant) :
|
||||
Comparable<RemovalQueueElement> {
|
||||
override fun compareTo(other: RemovalQueueElement) = expiry.compareTo(other.expiry)
|
||||
}
|
||||
|
||||
private val removalQueue = PriorityBlockingQueue<RemovalQueueElement>()
|
||||
private val removalQueue = PriorityQueue<RemovalQueueElement>()
|
||||
|
||||
@Volatile
|
||||
private var running = true
|
||||
private val garbageCollector = Thread {
|
||||
while(true) {
|
||||
val el = removalQueue.take()
|
||||
val buf = el.value
|
||||
val now = Instant.now()
|
||||
if(now > el.expiry) {
|
||||
val removed = map.remove(el.key, buf)
|
||||
if(removed) {
|
||||
updateSizeAfterRemoval(buf)
|
||||
//Decrease the reference count for map
|
||||
buf.release()
|
||||
|
||||
private val closeFuture = object : CompletableFuture<Void>() {
|
||||
init {
|
||||
Thread.ofVirtual().name("in-memory-cache-gc").start {
|
||||
try {
|
||||
lock.writeLock().withLock {
|
||||
while (running) {
|
||||
val el = removalQueue.poll()
|
||||
if(el == null) {
|
||||
cond.await(1000, TimeUnit.MILLISECONDS)
|
||||
continue
|
||||
}
|
||||
val value = el.value
|
||||
val now = Instant.now()
|
||||
if (now > el.expiry) {
|
||||
val removed = map.remove(el.key, value)
|
||||
if (removed) {
|
||||
updateSizeAfterRemoval(value.content)
|
||||
}
|
||||
} else {
|
||||
removalQueue.offer(el)
|
||||
val interval = minOf(Duration.between(now, el.expiry), Duration.ofSeconds(1))
|
||||
cond.await(interval.toMillis(), TimeUnit.MILLISECONDS)
|
||||
}
|
||||
}
|
||||
map.clear()
|
||||
}
|
||||
complete(null)
|
||||
} catch (ex: Throwable) {
|
||||
completeExceptionally(ex)
|
||||
}
|
||||
//Decrease the reference count for removalQueue
|
||||
buf.release()
|
||||
} else {
|
||||
removalQueue.put(el)
|
||||
Thread.sleep(minOf(Duration.between(now, el.expiry), Duration.ofSeconds(1)))
|
||||
}
|
||||
}
|
||||
}.apply {
|
||||
start()
|
||||
}
|
||||
|
||||
private fun removeEldest() : Long {
|
||||
while(true) {
|
||||
val el = removalQueue.take()
|
||||
val buf = el.value
|
||||
val removed = map.remove(el.key, buf)
|
||||
//Decrease the reference count for removalQueue
|
||||
buf.release()
|
||||
if(removed) {
|
||||
val newSize = updateSizeAfterRemoval(buf)
|
||||
//Decrease the reference count for map
|
||||
buf.release()
|
||||
fun removeEldest(): Long {
|
||||
while (true) {
|
||||
val el = removalQueue.poll() ?: return mapSize
|
||||
val value = el.value
|
||||
val removed = map.remove(el.key, value)
|
||||
if (removed) {
|
||||
val newSize = updateSizeAfterRemoval(value.content)
|
||||
return newSize
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private fun updateSizeAfterRemoval(removed: ByteBuf) : Long {
|
||||
return size.updateAndGet { currentSize : Long ->
|
||||
currentSize - removed.readableBytes()
|
||||
}
|
||||
private fun updateSizeAfterRemoval(removed: ByteArray): Long {
|
||||
mapSize -= removed.size
|
||||
return mapSize
|
||||
}
|
||||
|
||||
override fun close() {
|
||||
override fun asyncClose() : CompletableFuture<Void> {
|
||||
running = false
|
||||
garbageCollector.join()
|
||||
lock.writeLock().withLock {
|
||||
cond.signal()
|
||||
}
|
||||
return closeFuture
|
||||
}
|
||||
|
||||
override fun get(key: String) =
|
||||
(digestAlgorithm
|
||||
?.let(MessageDigest::getInstance)
|
||||
?.let { md ->
|
||||
digestString(key.toByteArray(), md)
|
||||
} ?: key
|
||||
).let { digest ->
|
||||
map[digest]
|
||||
?.let { value ->
|
||||
val copy = value.retainedDuplicate()
|
||||
copy.touch("This has to be released by the caller of the cache")
|
||||
if (compressionEnabled) {
|
||||
val inflater = Inflater()
|
||||
Channels.newChannel(InflaterInputStream(ByteBufInputStream(copy), inflater))
|
||||
} else {
|
||||
Channels.newChannel(ByteBufInputStream(copy))
|
||||
}
|
||||
}
|
||||
}.let {
|
||||
CompletableFuture.completedFuture(it)
|
||||
}
|
||||
|
||||
override fun put(key: String, content: ByteBuf) =
|
||||
(digestAlgorithm
|
||||
?.let(MessageDigest::getInstance)
|
||||
?.let { md ->
|
||||
digestString(key.toByteArray(), md)
|
||||
} ?: key).let { digest ->
|
||||
content.retain()
|
||||
val value = if (compressionEnabled) {
|
||||
val deflater = Deflater(compressionLevel)
|
||||
val buf = content.alloc().buffer()
|
||||
buf.retain()
|
||||
DeflaterOutputStream(ByteBufOutputStream(buf), deflater).use { outputStream ->
|
||||
ByteBufInputStream(content).use { inputStream ->
|
||||
JWO.copy(inputStream, outputStream)
|
||||
}
|
||||
}
|
||||
buf
|
||||
} else {
|
||||
content
|
||||
}
|
||||
val old = map.put(digest, value)
|
||||
val delta = value.readableBytes() - (old?.readableBytes() ?: 0)
|
||||
var newSize = size.updateAndGet { currentSize : Long ->
|
||||
currentSize + delta
|
||||
}
|
||||
removalQueue.put(RemovalQueueElement(digest, value.retain(), Instant.now().plus(maxAge)))
|
||||
while(newSize > maxSize) {
|
||||
newSize = removeEldest()
|
||||
}
|
||||
}.let {
|
||||
CompletableFuture.completedFuture<Void>(null)
|
||||
fun get(key: ByteArray) = lock.readLock().withLock {
|
||||
map[CacheKey(key)]?.run {
|
||||
CacheEntry(metadata, content)
|
||||
}
|
||||
}
|
||||
|
||||
fun put(
|
||||
key: ByteArray,
|
||||
value: CacheEntry,
|
||||
) {
|
||||
val cacheKey = CacheKey(key)
|
||||
lock.writeLock().withLock {
|
||||
val oldSize = map.put(cacheKey, value)?.content?.size ?: 0
|
||||
val delta = value.content.size - oldSize
|
||||
mapSize += delta
|
||||
removalQueue.offer(RemovalQueueElement(cacheKey, value, Instant.now().plus(maxAge)))
|
||||
while (mapSize > maxSize) {
|
||||
removeEldest()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@@ -1,8 +1,13 @@
|
||||
package net.woggioni.rbcs.server.cache
|
||||
|
||||
import io.netty.channel.ChannelFactory
|
||||
import io.netty.channel.EventLoopGroup
|
||||
import io.netty.channel.socket.DatagramChannel
|
||||
import io.netty.channel.socket.SocketChannel
|
||||
import java.time.Duration
|
||||
import net.woggioni.rbcs.api.CacheHandlerFactory
|
||||
import net.woggioni.rbcs.api.Configuration
|
||||
import net.woggioni.rbcs.common.RBCS
|
||||
import java.time.Duration
|
||||
|
||||
data class InMemoryCacheConfiguration(
|
||||
val maxAge: Duration,
|
||||
@@ -11,13 +16,18 @@ data class InMemoryCacheConfiguration(
|
||||
val compressionEnabled: Boolean,
|
||||
val compressionLevel: Int,
|
||||
) : Configuration.Cache {
|
||||
override fun materialize() = InMemoryCache(
|
||||
maxAge,
|
||||
maxSize,
|
||||
digestAlgorithm,
|
||||
compressionEnabled,
|
||||
compressionLevel
|
||||
)
|
||||
override fun materialize() = object : CacheHandlerFactory {
|
||||
private val cache = InMemoryCache(maxAge, maxSize)
|
||||
|
||||
override fun asyncClose() = cache.asyncClose()
|
||||
|
||||
override fun newHandler(
|
||||
cfg : Configuration,
|
||||
eventLoop: EventLoopGroup,
|
||||
socketChannelFactory: ChannelFactory<SocketChannel>,
|
||||
datagramChannelFactory: ChannelFactory<DatagramChannel>
|
||||
) = InMemoryCacheHandler(cache, digestAlgorithm, compressionEnabled, compressionLevel)
|
||||
}
|
||||
|
||||
override fun getNamespaceURI() = RBCS.RBCS_NAMESPACE_URI
|
||||
|
||||
|
157
rbcs-server/src/main/kotlin/net/woggioni/rbcs/server/cache/InMemoryCacheHandler.kt
vendored
Normal file
157
rbcs-server/src/main/kotlin/net/woggioni/rbcs/server/cache/InMemoryCacheHandler.kt
vendored
Normal file
@@ -0,0 +1,157 @@
|
||||
package net.woggioni.rbcs.server.cache
|
||||
|
||||
import io.netty.buffer.ByteBuf
|
||||
import io.netty.channel.ChannelHandlerContext
|
||||
import java.io.ByteArrayInputStream
|
||||
import java.io.ByteArrayOutputStream
|
||||
import java.util.zip.Deflater
|
||||
import java.util.zip.DeflaterOutputStream
|
||||
import java.util.zip.InflaterOutputStream
|
||||
import net.woggioni.rbcs.api.CacheHandler
|
||||
import net.woggioni.rbcs.api.message.CacheMessage
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CacheContent
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CacheGetRequest
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CachePutRequest
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CachePutResponse
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CacheValueFoundResponse
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CacheValueNotFoundResponse
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.LastCacheContent
|
||||
import net.woggioni.rbcs.common.ByteBufOutputStream
|
||||
import net.woggioni.rbcs.common.RBCS.processCacheKey
|
||||
|
||||
class InMemoryCacheHandler(
|
||||
private val cache: InMemoryCache,
|
||||
private val digestAlgorithm: String?,
|
||||
private val compressionEnabled: Boolean,
|
||||
private val compressionLevel: Int
|
||||
) : CacheHandler() {
|
||||
|
||||
private interface InProgressRequest : AutoCloseable {
|
||||
}
|
||||
|
||||
private class InProgressGetRequest(val request: CacheGetRequest) : InProgressRequest {
|
||||
override fun close() {
|
||||
}
|
||||
}
|
||||
|
||||
private interface InProgressPutRequest : InProgressRequest {
|
||||
val request: CachePutRequest
|
||||
val buf: ByteBuf
|
||||
|
||||
fun append(buf: ByteBuf)
|
||||
}
|
||||
|
||||
private inner class InProgressPlainPutRequest(ctx: ChannelHandlerContext, override val request: CachePutRequest) :
|
||||
InProgressPutRequest {
|
||||
override val buf = ctx.alloc().compositeHeapBuffer()
|
||||
|
||||
override fun append(buf: ByteBuf) {
|
||||
if (buf.isDirect) {
|
||||
this.buf.writeBytes(buf)
|
||||
} else {
|
||||
this.buf.addComponent(true, buf.retain())
|
||||
}
|
||||
}
|
||||
|
||||
override fun close() {
|
||||
buf.release()
|
||||
}
|
||||
}
|
||||
|
||||
private inner class InProgressCompressedPutRequest(
|
||||
ctx: ChannelHandlerContext,
|
||||
override val request: CachePutRequest
|
||||
) : InProgressPutRequest {
|
||||
|
||||
override val buf = ctx.alloc().heapBuffer()
|
||||
|
||||
private val stream = ByteBufOutputStream(buf).let {
|
||||
DeflaterOutputStream(it, Deflater(compressionLevel))
|
||||
}
|
||||
|
||||
override fun append(buf: ByteBuf) {
|
||||
buf.readBytes(stream, buf.readableBytes())
|
||||
}
|
||||
|
||||
override fun close() {
|
||||
stream.close()
|
||||
}
|
||||
}
|
||||
|
||||
private var inProgressRequest: InProgressRequest? = null
|
||||
|
||||
override fun channelRead0(ctx: ChannelHandlerContext, msg: CacheMessage) {
|
||||
when (msg) {
|
||||
is CacheGetRequest -> handleGetRequest(ctx, msg)
|
||||
is CachePutRequest -> handlePutRequest(ctx, msg)
|
||||
is LastCacheContent -> handleLastCacheContent(ctx, msg)
|
||||
is CacheContent -> handleCacheContent(ctx, msg)
|
||||
else -> ctx.fireChannelRead(msg)
|
||||
}
|
||||
}
|
||||
|
||||
private fun handleGetRequest(ctx: ChannelHandlerContext, msg: CacheGetRequest) {
|
||||
inProgressRequest = InProgressGetRequest(msg)
|
||||
}
|
||||
|
||||
private fun handlePutRequest(ctx: ChannelHandlerContext, msg: CachePutRequest) {
|
||||
inProgressRequest = if (compressionEnabled) {
|
||||
InProgressCompressedPutRequest(ctx, msg)
|
||||
} else {
|
||||
InProgressPlainPutRequest(ctx, msg)
|
||||
}
|
||||
}
|
||||
|
||||
private fun handleCacheContent(ctx: ChannelHandlerContext, msg: CacheContent) {
|
||||
val req = inProgressRequest
|
||||
if (req is InProgressPutRequest) {
|
||||
req.append(msg.content())
|
||||
}
|
||||
}
|
||||
|
||||
private fun handleLastCacheContent(ctx: ChannelHandlerContext, msg: LastCacheContent) {
|
||||
handleCacheContent(ctx, msg)
|
||||
when (val req = inProgressRequest) {
|
||||
is InProgressGetRequest -> {
|
||||
// this.inProgressRequest = null
|
||||
cache.get(processCacheKey(req.request.key, null, digestAlgorithm))?.let { value ->
|
||||
sendMessageAndFlush(ctx, CacheValueFoundResponse(req.request.key, value.metadata))
|
||||
if (compressionEnabled) {
|
||||
val buf = ctx.alloc().heapBuffer()
|
||||
InflaterOutputStream(ByteBufOutputStream(buf)).use {
|
||||
it.write(value.content)
|
||||
buf.retain()
|
||||
}
|
||||
sendMessage(ctx, LastCacheContent(buf))
|
||||
} else {
|
||||
val buf = ctx.alloc().heapBuffer()
|
||||
ByteBufOutputStream(buf).use {
|
||||
it.write(value.content)
|
||||
buf.retain()
|
||||
}
|
||||
sendMessage(ctx, LastCacheContent(buf))
|
||||
}
|
||||
} ?: sendMessage(ctx, CacheValueNotFoundResponse(req.request.key))
|
||||
}
|
||||
|
||||
is InProgressPutRequest -> {
|
||||
this.inProgressRequest = null
|
||||
val buf = req.buf
|
||||
buf.retain()
|
||||
req.close()
|
||||
|
||||
val bytes = ByteArray(buf.readableBytes()).also(buf::readBytes)
|
||||
buf.release()
|
||||
val cacheKey = processCacheKey(req.request.key, null, digestAlgorithm)
|
||||
cache.put(cacheKey, CacheEntry(req.request.metadata, bytes))
|
||||
sendMessageAndFlush(ctx, CachePutResponse(req.request.key))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
|
||||
inProgressRequest?.close()
|
||||
inProgressRequest = null
|
||||
super.exceptionCaught(ctx, cause)
|
||||
}
|
||||
}
|
@@ -1,17 +1,17 @@
|
||||
package net.woggioni.rbcs.server.cache
|
||||
|
||||
import java.time.Duration
|
||||
import java.util.zip.Deflater
|
||||
import net.woggioni.rbcs.api.CacheProvider
|
||||
import net.woggioni.rbcs.common.RBCS
|
||||
import net.woggioni.rbcs.common.Xml
|
||||
import net.woggioni.rbcs.common.Xml.Companion.renderAttribute
|
||||
import org.w3c.dom.Document
|
||||
import org.w3c.dom.Element
|
||||
import java.time.Duration
|
||||
import java.util.zip.Deflater
|
||||
|
||||
class InMemoryCacheProvider : CacheProvider<InMemoryCacheConfiguration> {
|
||||
|
||||
override fun getXmlSchemaLocation() = "classpath:net/woggioni/rbcs/server/schema/rbcs.xsd"
|
||||
override fun getXmlSchemaLocation() = "classpath:net/woggioni/rbcs/server/schema/rbcs-server.xsd"
|
||||
|
||||
override fun getXmlType() = "inMemoryCacheType"
|
||||
|
||||
@@ -30,14 +30,13 @@ class InMemoryCacheProvider : CacheProvider<InMemoryCacheConfiguration> {
|
||||
val compressionLevel = el.renderAttribute("compression-level")
|
||||
?.let(String::toInt)
|
||||
?: Deflater.DEFAULT_COMPRESSION
|
||||
val digestAlgorithm = el.renderAttribute("digest") ?: "MD5"
|
||||
|
||||
val digestAlgorithm = el.renderAttribute("digest")
|
||||
return InMemoryCacheConfiguration(
|
||||
maxAge,
|
||||
maxSize,
|
||||
digestAlgorithm,
|
||||
enableCompression,
|
||||
compressionLevel
|
||||
compressionLevel,
|
||||
)
|
||||
}
|
||||
|
||||
|
@@ -1,8 +1,8 @@
|
||||
package net.woggioni.rbcs.server.configuration
|
||||
|
||||
import java.util.ServiceLoader
|
||||
import net.woggioni.rbcs.api.CacheProvider
|
||||
import net.woggioni.rbcs.api.Configuration
|
||||
import java.util.ServiceLoader
|
||||
|
||||
object CacheSerializers {
|
||||
val index = (Configuration::class.java.module.layer?.let { layer ->
|
||||
|
@@ -1,5 +1,8 @@
|
||||
package net.woggioni.rbcs.server.configuration
|
||||
|
||||
import java.nio.file.Paths
|
||||
import java.time.Duration
|
||||
import java.time.temporal.ChronoUnit
|
||||
import net.woggioni.rbcs.api.Configuration
|
||||
import net.woggioni.rbcs.api.Configuration.Authentication
|
||||
import net.woggioni.rbcs.api.Configuration.BasicAuthentication
|
||||
@@ -18,22 +21,19 @@ import net.woggioni.rbcs.common.Xml.Companion.renderAttribute
|
||||
import org.w3c.dom.Document
|
||||
import org.w3c.dom.Element
|
||||
import org.w3c.dom.TypeInfo
|
||||
import java.nio.file.Paths
|
||||
import java.time.Duration
|
||||
import java.time.temporal.ChronoUnit
|
||||
|
||||
object Parser {
|
||||
fun parse(document: Document): Configuration {
|
||||
val root = document.documentElement
|
||||
val anonymousUser = User("", null, emptySet(), null)
|
||||
var connection: Configuration.Connection = Configuration.Connection(
|
||||
Duration.of(10, ChronoUnit.SECONDS),
|
||||
Duration.of(10, ChronoUnit.SECONDS),
|
||||
Duration.of(30, ChronoUnit.SECONDS),
|
||||
Duration.of(60, ChronoUnit.SECONDS),
|
||||
Duration.of(30, ChronoUnit.SECONDS),
|
||||
Duration.of(30, ChronoUnit.SECONDS),
|
||||
67108864
|
||||
Duration.of(60, ChronoUnit.SECONDS),
|
||||
0x4000000,
|
||||
0x10000
|
||||
)
|
||||
var rateLimiter = Configuration.RateLimiter(false, 0x100000, 100)
|
||||
var eventExecutor: Configuration.EventExecutor = Configuration.EventExecutor(true)
|
||||
var cache: Cache? = null
|
||||
var host = "127.0.0.1"
|
||||
@@ -113,10 +113,6 @@ object Parser {
|
||||
}
|
||||
|
||||
"connection" -> {
|
||||
val writeTimeout = child.renderAttribute("write-timeout")
|
||||
?.let(Duration::parse) ?: Duration.of(0, ChronoUnit.SECONDS)
|
||||
val readTimeout = child.renderAttribute("read-timeout")
|
||||
?.let(Duration::parse) ?: Duration.of(0, ChronoUnit.SECONDS)
|
||||
val idleTimeout = child.renderAttribute("idle-timeout")
|
||||
?.let(Duration::parse) ?: Duration.of(30, ChronoUnit.SECONDS)
|
||||
val readIdleTimeout = child.renderAttribute("read-idle-timeout")
|
||||
@@ -124,23 +120,37 @@ object Parser {
|
||||
val writeIdleTimeout = child.renderAttribute("write-idle-timeout")
|
||||
?.let(Duration::parse) ?: Duration.of(60, ChronoUnit.SECONDS)
|
||||
val maxRequestSize = child.renderAttribute("max-request-size")
|
||||
?.let(String::toInt) ?: 67108864
|
||||
?.let(Integer::decode) ?: 0x4000000
|
||||
val chunkSize = child.renderAttribute("chunk-size")
|
||||
?.let(Integer::decode) ?: 0x10000
|
||||
connection = Configuration.Connection(
|
||||
readTimeout,
|
||||
writeTimeout,
|
||||
idleTimeout,
|
||||
readIdleTimeout,
|
||||
writeIdleTimeout,
|
||||
maxRequestSize
|
||||
maxRequestSize,
|
||||
chunkSize
|
||||
)
|
||||
}
|
||||
|
||||
"event-executor" -> {
|
||||
val useVirtualThread = root.renderAttribute("use-virtual-threads")
|
||||
val useVirtualThread = child.renderAttribute("use-virtual-threads")
|
||||
?.let(String::toBoolean) ?: true
|
||||
eventExecutor = Configuration.EventExecutor(useVirtualThread)
|
||||
}
|
||||
|
||||
"rate-limiter" -> {
|
||||
val delayResponse = child.renderAttribute("delay-response")
|
||||
?.let(String::toBoolean)
|
||||
?: false
|
||||
val messageBufferSize = child.renderAttribute("message-buffer-size")
|
||||
?.let(Integer::decode)
|
||||
?: 0x100000
|
||||
val maxQueuedMessages = child.renderAttribute("max-queued-messages")
|
||||
?.let(Integer::decode)
|
||||
?: 100
|
||||
rateLimiter = Configuration.RateLimiter(delayResponse, messageBufferSize, maxQueuedMessages)
|
||||
}
|
||||
|
||||
"tls" -> {
|
||||
var keyStore: KeyStore? = null
|
||||
var trustStore: TrustStore? = null
|
||||
@@ -188,6 +198,7 @@ object Parser {
|
||||
incomingConnectionsBacklogSize,
|
||||
serverPath,
|
||||
eventExecutor,
|
||||
rateLimiter,
|
||||
connection,
|
||||
users,
|
||||
groups,
|
||||
@@ -201,6 +212,7 @@ object Parser {
|
||||
when (it.localName) {
|
||||
"reader" -> Role.Reader
|
||||
"writer" -> Role.Writer
|
||||
"healthcheck" -> Role.Healthcheck
|
||||
else -> throw UnsupportedOperationException("Illegal node '${it.localName}'")
|
||||
}
|
||||
}.toSet()
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user