Compare commits
48 Commits
doc
...
3774ab8ef0
Author | SHA1 | Date | |
---|---|---|---|
3774ab8ef0
|
|||
303828392e
|
|||
5d8cbe34ef
|
|||
85c0d4a384
|
|||
ae8817ad2a
|
|||
69f215e68f
|
|||
222b475223
|
|||
ede515e2ca
|
|||
974fdb7a91
|
|||
a294229ff0
|
|||
9600dd7e4f
|
|||
729276a2b1
|
|||
7ba7070693
|
|||
59a12d6218
|
|||
fc298de548
|
|||
8b639fc0b3
|
|||
5545f618f9
|
|||
43c0938d9a
|
|||
17215b401a
|
|||
4aced1c717
|
|||
31ce34cddb
|
|||
d64f7f4f27
|
|||
d15235fc4c
|
|||
49bb4f41b8
|
|||
a1398045ac
|
|||
1f93602102
|
|||
c818463a2e
|
|||
cd28563985
|
|||
8ef2d9c64e
|
|||
1510956989
|
|||
ac4f0fdd19
|
|||
37da03c719
|
|||
60bc4375cf
|
|||
725fe22b80
|
|||
ca18b63f27
|
|||
23f2a351a6
|
|||
c7d2b89d82
|
|||
72c34b57a6
|
|||
619873c4a9
|
|||
591f6e2af4
|
|||
ad00ebee9b
|
|||
adf8a0cf24
|
|||
42eb26a948
|
|||
f048a60540
|
|||
0463038aaa
|
|||
7eca8a270d
|
|||
84d7c977f9
|
|||
317eadce07
|
80
.gitea/workflows/build-dev.yaml
Normal file
80
.gitea/workflows/build-dev.yaml
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
name: CI
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- 'dev'
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
runs-on: hostinger
|
||||||
|
steps:
|
||||||
|
- name: Checkout sources
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
- name: Setup Gradle
|
||||||
|
uses: gradle/actions/setup-gradle@v3
|
||||||
|
- name: Execute Gradle build
|
||||||
|
run: ./gradlew build
|
||||||
|
- name: Prepare Docker image build
|
||||||
|
run: ./gradlew prepareDockerBuild
|
||||||
|
- name: Get project version
|
||||||
|
id: retrieve-version
|
||||||
|
run: ./gradlew -q version >> "$GITHUB_OUTPUT"
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v3
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v3
|
||||||
|
with:
|
||||||
|
driver: docker-container
|
||||||
|
- name: Login to Gitea container registry
|
||||||
|
uses: docker/login-action@v3
|
||||||
|
with:
|
||||||
|
registry: gitea.woggioni.net
|
||||||
|
username: woggioni
|
||||||
|
password: ${{ secrets.PUBLISHER_TOKEN }}
|
||||||
|
-
|
||||||
|
name: Build rbcs Docker image
|
||||||
|
uses: docker/build-push-action@v5.3.0
|
||||||
|
with:
|
||||||
|
context: "docker/build/docker"
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
push: true
|
||||||
|
pull: true
|
||||||
|
tags: |
|
||||||
|
gitea.woggioni.net/woggioni/rbcs:vanilla-dev
|
||||||
|
target: release-vanilla
|
||||||
|
cache-from: type=registry,ref=gitea.woggioni.net/woggioni/rbcs:buildx
|
||||||
|
-
|
||||||
|
name: Build rbcs memcache Docker image
|
||||||
|
uses: docker/build-push-action@v5.3.0
|
||||||
|
with:
|
||||||
|
context: "docker/build/docker"
|
||||||
|
platforms: linux/amd64,linux/arm64
|
||||||
|
push: true
|
||||||
|
pull: true
|
||||||
|
tags: |
|
||||||
|
gitea.woggioni.net/woggioni/rbcs:memcache-dev
|
||||||
|
target: release-memcache
|
||||||
|
cache-from: type=registry,ref=gitea.woggioni.net/woggioni/rbcs:buildx
|
||||||
|
cache-to: type=registry,mode=max,compression=zstd,image-manifest=true,oci-mediatypes=true,ref=gitea.woggioni.net/woggioni/rbcs:buildx
|
||||||
|
-
|
||||||
|
name: Build rbcs native Docker image
|
||||||
|
uses: docker/build-push-action@v5.3.0
|
||||||
|
with:
|
||||||
|
context: "docker/build/docker"
|
||||||
|
platforms: linux/amd64
|
||||||
|
push: true
|
||||||
|
pull: true
|
||||||
|
tags: |
|
||||||
|
gitea.woggioni.net/woggioni/rbcs:native-dev
|
||||||
|
target: release-native
|
||||||
|
-
|
||||||
|
name: Build rbcs jlink Docker image
|
||||||
|
uses: docker/build-push-action@v5.3.0
|
||||||
|
with:
|
||||||
|
context: "docker/build/docker"
|
||||||
|
platforms: linux/amd64
|
||||||
|
push: true
|
||||||
|
pull: true
|
||||||
|
tags: |
|
||||||
|
gitea.woggioni.net/woggioni/rbcs:jlink-dev
|
||||||
|
target: release-jlink
|
||||||
|
|
@@ -39,9 +39,9 @@ jobs:
|
|||||||
push: true
|
push: true
|
||||||
pull: true
|
pull: true
|
||||||
tags: |
|
tags: |
|
||||||
gitea.woggioni.net/woggioni/rbcs:latest
|
gitea.woggioni.net/woggioni/rbcs:vanilla
|
||||||
gitea.woggioni.net/woggioni/rbcs:${{ steps.retrieve-version.outputs.VERSION }}
|
gitea.woggioni.net/woggioni/rbcs:vanilla-${{ steps.retrieve-version.outputs.VERSION }}
|
||||||
target: release
|
target: release-vanilla
|
||||||
cache-from: type=registry,ref=gitea.woggioni.net/woggioni/rbcs:buildx
|
cache-from: type=registry,ref=gitea.woggioni.net/woggioni/rbcs:buildx
|
||||||
-
|
-
|
||||||
name: Build rbcs memcache Docker image
|
name: Build rbcs memcache Docker image
|
||||||
@@ -52,11 +52,37 @@ jobs:
|
|||||||
push: true
|
push: true
|
||||||
pull: true
|
pull: true
|
||||||
tags: |
|
tags: |
|
||||||
|
gitea.woggioni.net/woggioni/rbcs:latest
|
||||||
|
gitea.woggioni.net/woggioni/rbcs:${{ steps.retrieve-version.outputs.VERSION }}
|
||||||
gitea.woggioni.net/woggioni/rbcs:memcache
|
gitea.woggioni.net/woggioni/rbcs:memcache
|
||||||
gitea.woggioni.net/woggioni/rbcs:memcache-${{ steps.retrieve-version.outputs.VERSION }}
|
gitea.woggioni.net/woggioni/rbcs:memcache-${{ steps.retrieve-version.outputs.VERSION }}
|
||||||
target: release-memcache
|
target: release-memcache
|
||||||
cache-from: type=registry,ref=gitea.woggioni.net/woggioni/rbcs:buildx
|
cache-from: type=registry,ref=gitea.woggioni.net/woggioni/rbcs:buildx
|
||||||
cache-to: type=registry,mode=max,compression=zstd,image-manifest=true,oci-mediatypes=true,ref=gitea.woggioni.net/woggioni/rbcs:buildx
|
cache-to: type=registry,mode=max,compression=zstd,image-manifest=true,oci-mediatypes=true,ref=gitea.woggioni.net/woggioni/rbcs:buildx
|
||||||
|
-
|
||||||
|
name: Build rbcs native Docker image
|
||||||
|
uses: docker/build-push-action@v5.3.0
|
||||||
|
with:
|
||||||
|
context: "docker/build/docker"
|
||||||
|
platforms: linux/amd64
|
||||||
|
push: true
|
||||||
|
pull: true
|
||||||
|
tags: |
|
||||||
|
gitea.woggioni.net/woggioni/rbcs:native
|
||||||
|
gitea.woggioni.net/woggioni/rbcs:native-${{ steps.retrieve-version.outputs.VERSION }}
|
||||||
|
target: release-native
|
||||||
|
-
|
||||||
|
name: Build rbcs jlink Docker image
|
||||||
|
uses: docker/build-push-action@v5.3.0
|
||||||
|
with:
|
||||||
|
context: "docker/build/docker"
|
||||||
|
platforms: linux/amd64
|
||||||
|
push: true
|
||||||
|
pull: true
|
||||||
|
tags: |
|
||||||
|
gitea.woggioni.net/woggioni/rbcs:jlink
|
||||||
|
gitea.woggioni.net/woggioni/rbcs:jlink-${{ steps.retrieve-version.outputs.VERSION }}-jlink
|
||||||
|
target: release-jlink
|
||||||
- name: Publish artifacts
|
- name: Publish artifacts
|
||||||
env:
|
env:
|
||||||
PUBLISHER_TOKEN: ${{ secrets.PUBLISHER_TOKEN }}
|
PUBLISHER_TOKEN: ${{ secrets.PUBLISHER_TOKEN }}
|
||||||
|
341
README.md
341
README.md
@@ -1,37 +1,360 @@
|
|||||||
# Remote Build Cache Server
|
# Remote Build Cache Server
|
||||||
|
|
||||||
|

|
||||||
|

|
||||||
|

|
||||||
|

|
||||||
|
|
||||||
|
<!--
|
||||||
|

|
||||||
|
-->
|
||||||
|
|
||||||
|
Speed up your builds by sharing and reusing unchanged build outputs across your team.
|
||||||
|
|
||||||
|
Remote Build Cache Server (RBCS) allows teams to share and reuse unchanged build and test outputs,
|
||||||
|
significantly reducing build times for both local and CI environments. By eliminating redundant work,
|
||||||
|
RBCS helps teams become more productive and efficient.
|
||||||
|
|
||||||
|
**Key Features:**
|
||||||
|
- Support for both Gradle and Maven build environments
|
||||||
|
- Pluggable storage backends (in-memory, disk-backed, memcached)
|
||||||
|
- Flexible authentication (HTTP basic or TLS certificate)
|
||||||
|
- Role-based access control
|
||||||
|
- Request throttling
|
||||||
|
|
||||||
|
## Table of Contents
|
||||||
|
- [Quickstart](#quickstart)
|
||||||
|
- [Integration with build tools](#integration-with-build-tools)
|
||||||
|
- [Use RBCS with Gradle](#use-rbcs-with-gradle)
|
||||||
|
- [Use RBCS with Maven](#use-rbcs-with-maven)
|
||||||
|
- [Server configuration](#server-configuration)
|
||||||
|
- [Authentication](#authentication)
|
||||||
|
- [HTTP Basic authentication](#configure-http-basic-authentication)
|
||||||
|
- [TLS client certificate authentication](#configure-tls-certificate-authentication)
|
||||||
|
- [Authentication & Access Control](#access-control)
|
||||||
|
- [Plugins](#plugins)
|
||||||
|
- [Client Tools](#rbcs-client)
|
||||||
|
- [Logging](#logging)
|
||||||
|
- [Performance](#performance)
|
||||||
|
- [FAQ](#faq)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Remote Build Cache Server (shortened to RBCS) allows you to share and reuse unchanged build
|
Remote Build Cache Server (shortened to RBCS) allows you to share and reuse unchanged build
|
||||||
and test outputs across the team. This speeds up local and CI builds since cycles are not wasted
|
and test outputs across the team. This speeds up local and CI builds since cycles are not wasted
|
||||||
re-building components that are unaffected by new code changes. RBCS supports both Gradle and
|
re-building components that are unaffected by new code changes. RBCS supports both Gradle and
|
||||||
Maven build tool environments.
|
Maven build tool environments.
|
||||||
|
|
||||||
## Getting Started
|
It comes with pluggable storage backends, the core application offers in-memory storage or disk-backed storage,
|
||||||
|
in addition to this there is an official plugin to use memcached as the storage backend.
|
||||||
|
|
||||||
### Downloading the jar file
|
It supports HTTP basic authentication or, alternatively, TLS certificate authentication, role-based access control (RBAC),
|
||||||
You can download the latest version from [this link](https://gitea.woggioni.net/woggioni/-/packages/maven/net.woggioni-rbcs-cli/)
|
and throttling.
|
||||||
|
|
||||||
If you want to use memcache as a storage backend you'll also need to download [the memcache plugin](https://gitea.woggioni.net/woggioni/-/packages/maven/net.woggioni-rbcs-server-memcache/)
|
## Quickstart
|
||||||
|
|
||||||
### Using the Docker image
|
### Use the all-in-one jar file
|
||||||
|
You can download the latest version from [this link](https://gitea.woggioni.net/woggioni/-/packages/maven/net.woggioni:rbcs-cli/)
|
||||||
|
|
||||||
|
|
||||||
|
Assuming you have Java 21 or later installed, you can launch the server directly with
|
||||||
|
|
||||||
|
```bash
|
||||||
|
java -jar rbcs-cli.jar server
|
||||||
|
```
|
||||||
|
|
||||||
|
By default it will start an HTTP server bound to localhost and listening on port 8080 with no authentication,
|
||||||
|
writing data to the disk, that you can use for testing
|
||||||
|
|
||||||
|
### Use the Docker image
|
||||||
You can pull the latest Docker image with
|
You can pull the latest Docker image with
|
||||||
```bash
|
```bash
|
||||||
docker pull gitea.woggioni.net/woggioni/rbcs:latest
|
docker pull gitea.woggioni.net/woggioni/rbcs:latest
|
||||||
```
|
```
|
||||||
|
|
||||||
## Usage
|
By default it will start an HTTP server bound to localhost and listening on port 8080 with no authentication,
|
||||||
## Configuration
|
writing data to the disk, that you can use for testing
|
||||||
### Using RBCS with Gradle
|
|
||||||
|
### Use the native executable
|
||||||
|
If you are on a Linux X86_64 machine you can download the native executable
|
||||||
|
from [here](https://gitea.woggioni.net/woggioni/-/packages/maven/net.woggioni:rbcs-cli/).
|
||||||
|
It behaves the same as the jar file but it doesn't require a JVM and it has faster startup times.
|
||||||
|
because of GraalVM's [closed-world assumption](https://www.graalvm.org/latest/reference-manual/native-image/basics/#static-analysis),
|
||||||
|
the native executable does not supports plugins, so it comes with all plugins embedded into it.
|
||||||
|
|
||||||
|
> [!WARNING]
|
||||||
|
> The native executable is built with `-march=skylake`, so it may fail with SIGILL on x86 CPUs that do not support
|
||||||
|
> the full skylake instruction set (as a rule of thumb, older than 2015)
|
||||||
|
|
||||||
|
## Integration with build tools
|
||||||
|
|
||||||
|
### Use RBCS with Gradle
|
||||||
|
|
||||||
|
Add this to the `settings.gradle` file of your project
|
||||||
|
|
||||||
```groovy
|
```groovy
|
||||||
buildCache {
|
buildCache {
|
||||||
remote(HttpBuildCache) {
|
remote(HttpBuildCache) {
|
||||||
url = 'https://rbcs.example.com/'
|
url = 'https://rbcs.example.com/'
|
||||||
|
push = true
|
||||||
|
allowInsecureProtocol = false
|
||||||
|
// The credentials block is only required if you enable
|
||||||
|
// HTTP basic authentication on RBCS
|
||||||
|
credentials {
|
||||||
|
username = 'build-cache-user'
|
||||||
|
password = 'some-complicated-password'
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
### Using RBCS with Maven
|
alternatively you can add this to `${GRADLE_HOME}/init.gradle` to configure the remote cache
|
||||||
|
at the system level
|
||||||
|
|
||||||
|
```groovy
|
||||||
|
gradle.settingsEvaluated { settings ->
|
||||||
|
settings.buildCache {
|
||||||
|
remote(HttpBuildCache) {
|
||||||
|
url = 'https://rbcs.example.com/'
|
||||||
|
push = true
|
||||||
|
allowInsecureProtocol = false
|
||||||
|
// The credentials block is only required if you enable
|
||||||
|
// HTTP basic authentication on RBCS
|
||||||
|
credentials {
|
||||||
|
username = 'build-cache-user'
|
||||||
|
password = 'some-complicated-password'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
add `org.gradle.caching=true` to your `<project>/gradle.properties` or run gradle with `--build-cache`.
|
||||||
|
|
||||||
|
Read [Gradle documentation](https://docs.gradle.org/current/userguide/build_cache.html) for more detailed information.
|
||||||
|
|
||||||
|
### Use RBCS with Maven
|
||||||
|
|
||||||
|
1. Create an `extensions.xml` in `<project>/.mvn/extensions.xml` with the following content
|
||||||
|
```xml
|
||||||
|
<extensions xmlns="http://maven.apache.org/EXTENSIONS/1.1.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/EXTENSIONS/1.1.0 https://maven.apache.org/xsd/core-extensions-1.0.0.xsd">
|
||||||
|
<extension>
|
||||||
|
<groupId>org.apache.maven.extensions</groupId>
|
||||||
|
<artifactId>maven-build-cache-extension</artifactId>
|
||||||
|
<version>1.2.0</version>
|
||||||
|
</extension>
|
||||||
|
</extensions>
|
||||||
|
```
|
||||||
|
2. Copy [maven-build-cache-config.xml](https://maven.apache.org/extensions/maven-build-cache-extension/maven-build-cache-config.xml) into `<project>/.mvn/` folder
|
||||||
|
3. Edit the `cache/configuration/remote` element
|
||||||
|
```xml
|
||||||
|
<remote enabled="true" id="rbcs">
|
||||||
|
<url>https://rbcs.example.com/</url>
|
||||||
|
</remote>
|
||||||
|
```
|
||||||
|
4. Run maven with
|
||||||
|
```bash
|
||||||
|
mvn -Dmaven.build.cache.enabled=true -Dmaven.build.cache.debugOutput=true -Dmaven.build.cache.remote.save.enabled=true package
|
||||||
|
```
|
||||||
|
|
||||||
|
Alternatively you can set those properties in your `<project>/pom.xml`
|
||||||
|
|
||||||
Read [here](https://maven.apache.org/extensions/maven-build-cache-extension/remote-cache.html)
|
Read [here](https://maven.apache.org/extensions/maven-build-cache-extension/remote-cache.html)
|
||||||
|
for more informations
|
||||||
|
|
||||||
|
|
||||||
|
## Server configuration
|
||||||
|
RBCS reads an XML configuration file, by default named `rbcs-server.xml`.
|
||||||
|
The expected location of the `rbcs-server.xml` file depends on the operating system,
|
||||||
|
if the configuration file is not found a default one will be created and its location is printed
|
||||||
|
on the console
|
||||||
|
|
||||||
|
```bash
|
||||||
|
user@76a90cbcd75d:~$ rbcs-cli server
|
||||||
|
2025-01-01 00:00:00,000 [INFO ] (main) n.w.r.c.impl.commands.ServerCommand -- Creating default configuration file at '/home/user/.config/rbcs/rbcs-server.xml'
|
||||||
|
```
|
||||||
|
|
||||||
|
Alternatively it can be changed setting the `RBCS_CONFIGURATION_DIR` environmental variable or `net.woggioni.rbcs.conf.dir`
|
||||||
|
Java system property to the directory that contain the `rbcs-server.xml` file.
|
||||||
|
It can also be directly specified from the command line with
|
||||||
|
```bash
|
||||||
|
java -jar rbcs-cli.jar server -c /path/to/rbcs-server.xml
|
||||||
|
```
|
||||||
|
|
||||||
|
The server configuration file follows the XML format and uses XML schema for validation
|
||||||
|
(you can find the schema for the `rbcs-server.xml` configuration file [here](https://gitea.woggioni.net/woggioni/rbcs/src/branch/master/rbcs-server/src/main/resources/net/woggioni/rbcs/server/schema/rbcs-server.xsd)).
|
||||||
|
|
||||||
|
The configuration values are enclosed inside XML attribute and support system property / environmental variable interpolation.
|
||||||
|
As an example, you can configure RBCS to read the server port number from the `RBCS_SERVER_PORT` environmental variable
|
||||||
|
and the bind address from the `rbc.bind.address` JVM system property with
|
||||||
|
|
||||||
|
```xml
|
||||||
|
<bind host="${sys:rpc.bind.address}" port="${env:RBCS_SERVER_PORT}"/>
|
||||||
|
```
|
||||||
|
|
||||||
|
Full documentation for all tags and attributes and configuration file examples
|
||||||
|
are available [here](doc/server_configuration.md).
|
||||||
|
|
||||||
|
### Plugins
|
||||||
|
If you want to use memcache as a storage backend you'll also need to download [the memcache plugin](https://gitea.woggioni.net/woggioni/-/packages/maven/net.woggioni:rbcs-server-memcache/)
|
||||||
|
|
||||||
|
Plugins need to be stored in a folder named `plugins` in the located server's working directory
|
||||||
|
(the directory where the server process is started). They are shipped as TAR archives, so you need to extract
|
||||||
|
the content of the archive into the `plugins` directory for the server to pick them up.
|
||||||
|
|
||||||
|
## Authentication
|
||||||
|
|
||||||
|
RBCS supports 2 authentication mechanisms:
|
||||||
|
|
||||||
|
- HTTP basic authentication
|
||||||
|
- TLS certificate authentication
|
||||||
|
|
||||||
|
### Configure HTTP basic authentication
|
||||||
|
|
||||||
|
Add a `<basic>` element to the `<authentication>` element in your `rbcs-server.xml`
|
||||||
|
```xml
|
||||||
|
<authentication>
|
||||||
|
<basic/>
|
||||||
|
</authentication>
|
||||||
|
```
|
||||||
|
|
||||||
|
### Configure TLS certificate authentication
|
||||||
|
|
||||||
|
Add a `<client-certificate>` element to the `<authentication>` element in your `rbcs-server.xml`
|
||||||
|
```xml
|
||||||
|
<authentication>
|
||||||
|
<client-certificate>
|
||||||
|
<user-extractor attribute-name="CN" pattern="(.*)"/>
|
||||||
|
<group-extractor attribute-name="O" pattern="(.*)"/>
|
||||||
|
</client-certificate>
|
||||||
|
</authentication>
|
||||||
|
```
|
||||||
|
The `<user-extractor>` here determines how the username is extracted from the
|
||||||
|
subject's X.500 name in the TLS certificate presented by the client, where `attribute-name`
|
||||||
|
is the `RelativeDistinguishedName` (RDN) identifier and pattern is a regular expression
|
||||||
|
that will be applied to extract the username from the first group present in the regex.
|
||||||
|
An error will be thrown if the regular expression contains no groups, while additional
|
||||||
|
groups are ignored.
|
||||||
|
|
||||||
|
Similarly, the `<group-extractor>` here determines how the group name is extracted from the
|
||||||
|
subject's X.500 name in the TLS certificate presented by the client.
|
||||||
|
Note that this allows to assign roles to incoming requests without necessarily assigning them
|
||||||
|
a username.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
## Access control
|
||||||
|
|
||||||
|
RBCS supports role-based access control (RBAC), three roles are available:
|
||||||
|
- `Reader` can perform `GET` calls
|
||||||
|
- `Writer` can perform `PUT` calls
|
||||||
|
- `Healthcheck` can perform `TRACE` calls
|
||||||
|
|
||||||
|
Roles are assigned to groups so that a user will have a role only if that roles belongs
|
||||||
|
to one of the groups he is a member of.
|
||||||
|
|
||||||
|
There is also a special `<anonymous>` user
|
||||||
|
which matches any request who hasn't been authenticated and that can be assigned
|
||||||
|
to any group like a normal user. This permits to have a build cache that is
|
||||||
|
publicly readable but only writable by authenticated users (e.g. CI/CD pipeline).
|
||||||
|
|
||||||
|
### Defining users
|
||||||
|
|
||||||
|
Users can be defined in the `<authorization>` element
|
||||||
|
```xml
|
||||||
|
<authorization>
|
||||||
|
<users>
|
||||||
|
<user name="user1" password="kb/vNnkn2RvyPkTN6Q07uH0F7wI7u61MkManD3NHregRukBg4KHehfbqtLTb39fZjHA+SRH+EpEWDCf+Rihr5H5C1YN5qwmArV0p8O5ptC4="/>
|
||||||
|
<user name="user2" password="2J7MAhdIzZ3SO+JGB+K6wPhb4P5LH1L4L7yJCl5QrxNfAWRr5jTUExJRbcgbH1UfnkCbIO1p+xTDq+FCj3LFBZeMZUNZ47npN+WR7AX3VTo="/>
|
||||||
|
<anonymous/>
|
||||||
|
</users>
|
||||||
|
<groups>
|
||||||
|
<group name="readers">
|
||||||
|
<users>
|
||||||
|
<anonymous/>
|
||||||
|
</users>
|
||||||
|
<roles>
|
||||||
|
<reader/>
|
||||||
|
</roles>
|
||||||
|
</group>
|
||||||
|
<group name="writers">
|
||||||
|
<users>
|
||||||
|
<user ref="user1"/>
|
||||||
|
<user ref="user2"/>
|
||||||
|
</users>
|
||||||
|
<roles>
|
||||||
|
<reader/>
|
||||||
|
<writer/>
|
||||||
|
<healthcheck/>
|
||||||
|
</roles>
|
||||||
|
</group>
|
||||||
|
</groups>
|
||||||
|
</authorization>
|
||||||
|
```
|
||||||
|
|
||||||
|
The `password` attribute is only used for HTTP Basic authentication, so it can be omitted
|
||||||
|
if you use TLS certificate authentication. It must contain a password hash that can be derived from
|
||||||
|
the actual password using the following command
|
||||||
|
|
||||||
|
```bash
|
||||||
|
java -jar rbcs-cli.jar password
|
||||||
|
```
|
||||||
|
|
||||||
|
## Reliability
|
||||||
|
|
||||||
|
RBCS implements the [TRACE](https://developer.mozilla.org/en-US/docs/Web/HTTP/Methods/TRACE) HTTP method and this functionality can be used
|
||||||
|
as a health check (mind you need to have `Healthcheck` role in order to perform it and match the server's `prefix` in the URL).
|
||||||
|
|
||||||
|
## RBCS Client
|
||||||
|
|
||||||
|
RBCS ships with a command line client that can be used for testing, benchmarking or to manually
|
||||||
|
upload/download files to the cache. It must be configured with the `rbcs-client.xml`,
|
||||||
|
whose location follows the same logic of the `rbcs-server.xml`.
|
||||||
|
The `rbcs-client.xml` must adhere to the [rbcs-client.xsd](rbcs-client/src/main/resources/net/woggioni/rbcs/client/schema/rbcs-client.xsd)
|
||||||
|
XML schema
|
||||||
|
|
||||||
|
The documentation for the `rbcs-client.xml` configuration file is available [here](conf/client_configuration.md)
|
||||||
|
|
||||||
|
### GET command
|
||||||
|
|
||||||
|
```bash
|
||||||
|
java -jar rbcs-cli.jar client -p $CLIENT_PROFILE_NAME get -k $CACHE_KEY -v $FILE_WHERE_THE_VALUE_WILL_BE_STORED
|
||||||
|
```
|
||||||
|
|
||||||
|
### PUT command
|
||||||
|
|
||||||
|
```bash
|
||||||
|
java -jar rbcs-cli.jar client -p $CLIENT_PROFILE_NAME put -k $CACHE_KEY -v $FILE_TO_BE_UPLOADED
|
||||||
|
```
|
||||||
|
|
||||||
|
If you don't specify the key, a UUID key based on the file content will be used,
|
||||||
|
if you add the `-i` command line parameter, the uploaded file will be served with
|
||||||
|
`Content-Disposition: inline` HTTP header so that browser will attempt to render
|
||||||
|
it in the page instead of triggering a file download (in this way you can create a temporary web page).
|
||||||
|
|
||||||
|
The client will try to detect the file mime type upon upload but if you want to be sure you can specify
|
||||||
|
it manually with the `-t` parameter.
|
||||||
|
|
||||||
|
### Benchmark command
|
||||||
|
|
||||||
|
```bash
|
||||||
|
java -jar rbcs-cli.jar client -p $CLIENT_PROFILE_NAME benchamrk -s 4096 -e 10000
|
||||||
|
```
|
||||||
|
This will insert 10000 randomly generates entries of 4096 bytes into RBCS, then retrieve them
|
||||||
|
and check that the retrieved value matches what was inserted.
|
||||||
|
It will also print throughput stats on the way.
|
||||||
|
|
||||||
|
## Logging
|
||||||
|
|
||||||
|
RBCS uses [logback](https://logback.qos.ch/) and ships with a [default logging configuration](./conf/logback.xml) that
|
||||||
|
can be overridden with `-Dlogback.configurationFile=path/to/custom/configuration.xml`, refer to
|
||||||
|
[Logback documentation](https://logback.qos.ch/manual/configuration.html) for more details about
|
||||||
|
how to configure Logback
|
||||||
|
|
||||||
|
## Performance
|
||||||
|
|
||||||
|
You can check performance benchmarks [here](doc/benchmarks.md)
|
||||||
|
|
||||||
## FAQ
|
## FAQ
|
||||||
### Why should I use a build cache?
|
### Why should I use a build cache?
|
||||||
|
94
benchmark/rbcs-filesystem.yml
Normal file
94
benchmark/rbcs-filesystem.yml
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
apiVersion: v1
|
||||||
|
kind: ConfigMap
|
||||||
|
metadata:
|
||||||
|
name: rbcs-server
|
||||||
|
data:
|
||||||
|
rbcs-server.xml: |
|
||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||||
|
<rbcs:server xmlns:xs="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xmlns:rbcs="urn:net.woggioni.rbcs.server"
|
||||||
|
xmlns:rbcs-memcache="urn:net.woggioni.rbcs.server.memcache"
|
||||||
|
xs:schemaLocation="urn:net.woggioni.rbcs.server.memcache jpms://net.woggioni.rbcs.server.memcache/net/woggioni/rbcs/server/memcache/schema/rbcs-memcache.xsd urn:net.woggioni.rbcs.server jpms://net.woggioni.rbcs.server/net/woggioni/rbcs/server/schema/rbcs-server.xsd"
|
||||||
|
>
|
||||||
|
<bind host="0.0.0.0" port="8080" incoming-connections-backlog-size="128"/>
|
||||||
|
<connection
|
||||||
|
max-request-size="0xd000000"
|
||||||
|
idle-timeout="PT15S"
|
||||||
|
read-idle-timeout="PT30S"
|
||||||
|
write-idle-timeout="PT30S"/>
|
||||||
|
<event-executor use-virtual-threads="true"/>
|
||||||
|
<cache xs:type="rbcs:fileSystemCacheType" max-age="P7D" enable-compression="false" path="/home/luser/cache" digest="SHA-224"/>
|
||||||
|
</rbcs:server>
|
||||||
|
|
||||||
|
---
|
||||||
|
apiVersion: v1
|
||||||
|
kind: PersistentVolumeClaim
|
||||||
|
metadata:
|
||||||
|
name: rbcs-pvc
|
||||||
|
namespace: default
|
||||||
|
spec:
|
||||||
|
accessModes:
|
||||||
|
- ReadWriteOnce
|
||||||
|
storageClassName: local-path
|
||||||
|
resources:
|
||||||
|
requests:
|
||||||
|
storage: 16Gi
|
||||||
|
---
|
||||||
|
apiVersion: apps/v1
|
||||||
|
kind: Deployment
|
||||||
|
metadata:
|
||||||
|
name: rbcs-deployment
|
||||||
|
labels:
|
||||||
|
app: rbcs
|
||||||
|
spec:
|
||||||
|
replicas: 1
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
app: rbcs
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
app: rbcs
|
||||||
|
spec:
|
||||||
|
containers:
|
||||||
|
- name: rbcs
|
||||||
|
image: gitea.woggioni.net/woggioni/rbcs:memcache
|
||||||
|
imagePullPolicy: Always
|
||||||
|
command: ["java", "-Dlogback.configurationFile=logback.xml", "-XX:MaxRAMPercentage=75","-jar", "/home/luser/rbcs.jar"]
|
||||||
|
args: ['server', '-c', 'rbcs-server.xml']
|
||||||
|
ports:
|
||||||
|
- containerPort: 8080
|
||||||
|
volumeMounts:
|
||||||
|
- name: config-volume
|
||||||
|
mountPath: /home/luser/rbcs-server.xml
|
||||||
|
subPath: rbcs-server.xml
|
||||||
|
- name: cache-volume
|
||||||
|
mountPath: /home/luser/cache
|
||||||
|
resources:
|
||||||
|
requests:
|
||||||
|
memory: "0.25Gi"
|
||||||
|
cpu: "1"
|
||||||
|
limits:
|
||||||
|
memory: "0.5Gi"
|
||||||
|
cpu: "1"
|
||||||
|
volumes:
|
||||||
|
- name: config-volume
|
||||||
|
configMap:
|
||||||
|
name: rbcs-server
|
||||||
|
- name: cache-volume
|
||||||
|
persistentVolumeClaim:
|
||||||
|
claimName: rbcs-pvc
|
||||||
|
---
|
||||||
|
apiVersion: v1
|
||||||
|
kind: Service
|
||||||
|
metadata:
|
||||||
|
name: rbcs-service
|
||||||
|
spec:
|
||||||
|
type: LoadBalancer
|
||||||
|
ports:
|
||||||
|
- port: 8080
|
||||||
|
targetPort: 8080
|
||||||
|
protocol: TCP
|
||||||
|
selector:
|
||||||
|
app: rbcs
|
||||||
|
|
77
benchmark/rbcs-in-memory.yml
Normal file
77
benchmark/rbcs-in-memory.yml
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
apiVersion: v1
|
||||||
|
kind: ConfigMap
|
||||||
|
metadata:
|
||||||
|
name: rbcs-server
|
||||||
|
data:
|
||||||
|
rbcs-server.xml: |
|
||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||||
|
<rbcs:server xmlns:xs="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xmlns:rbcs="urn:net.woggioni.rbcs.server"
|
||||||
|
xmlns:rbcs-memcache="urn:net.woggioni.rbcs.server.memcache"
|
||||||
|
xs:schemaLocation="urn:net.woggioni.rbcs.server.memcache jpms://net.woggioni.rbcs.server.memcache/net/woggioni/rbcs/server/memcache/schema/rbcs-memcache.xsd urn:net.woggioni.rbcs.server jpms://net.woggioni.rbcs.server/net/woggioni/rbcs/server/schema/rbcs-server.xsd"
|
||||||
|
>
|
||||||
|
<bind host="0.0.0.0" port="8080" incoming-connections-backlog-size="128"/>
|
||||||
|
<connection
|
||||||
|
max-request-size="0xd000000"
|
||||||
|
idle-timeout="PT15S"
|
||||||
|
read-idle-timeout="PT30S"
|
||||||
|
write-idle-timeout="PT30S"/>
|
||||||
|
<event-executor use-virtual-threads="true"/>
|
||||||
|
<cache xs:type="rbcs:inMemoryCacheType" max-age="P7D" enable-compression="false" max-size="0x40000000" digest="SHA-224"/>
|
||||||
|
</rbcs:server>
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
apiVersion: apps/v1
|
||||||
|
kind: Deployment
|
||||||
|
metadata:
|
||||||
|
name: rbcs-deployment
|
||||||
|
labels:
|
||||||
|
app: rbcs
|
||||||
|
spec:
|
||||||
|
replicas: 1
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
app: rbcs
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
app: rbcs
|
||||||
|
spec:
|
||||||
|
containers:
|
||||||
|
- name: rbcs
|
||||||
|
image: gitea.woggioni.net/woggioni/rbcs:memcache
|
||||||
|
imagePullPolicy: Always
|
||||||
|
command: ["java", "-Dlogback.configurationFile=logback.xml", "-XX:MaxRAMPercentage=75","-jar", "/home/luser/rbcs.jar"]
|
||||||
|
args: ['server', '-c', 'rbcs-server.xml']
|
||||||
|
ports:
|
||||||
|
- containerPort: 8080
|
||||||
|
volumeMounts:
|
||||||
|
- name: config-volume
|
||||||
|
mountPath: /home/luser/rbcs-server.xml
|
||||||
|
subPath: rbcs-server.xml
|
||||||
|
resources:
|
||||||
|
requests:
|
||||||
|
memory: "0.5Gi"
|
||||||
|
cpu: "1"
|
||||||
|
limits:
|
||||||
|
memory: "4Gi"
|
||||||
|
cpu: "1"
|
||||||
|
volumes:
|
||||||
|
- name: config-volume
|
||||||
|
configMap:
|
||||||
|
name: rbcs-server
|
||||||
|
---
|
||||||
|
apiVersion: v1
|
||||||
|
kind: Service
|
||||||
|
metadata:
|
||||||
|
name: rbcs-service
|
||||||
|
spec:
|
||||||
|
type: LoadBalancer
|
||||||
|
ports:
|
||||||
|
- port: 8080
|
||||||
|
targetPort: 8080
|
||||||
|
protocol: TCP
|
||||||
|
selector:
|
||||||
|
app: rbcs
|
||||||
|
|
118
benchmark/rbcs-memcache.yml
Normal file
118
benchmark/rbcs-memcache.yml
Normal file
@@ -0,0 +1,118 @@
|
|||||||
|
apiVersion: v1
|
||||||
|
kind: ConfigMap
|
||||||
|
metadata:
|
||||||
|
name: rbcs-server
|
||||||
|
data:
|
||||||
|
rbcs-server.xml: |
|
||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||||
|
<rbcs:server xmlns:xs="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xmlns:rbcs="urn:net.woggioni.rbcs.server"
|
||||||
|
xmlns:rbcs-memcache="urn:net.woggioni.rbcs.server.memcache"
|
||||||
|
xs:schemaLocation="urn:net.woggioni.rbcs.server.memcache jpms://net.woggioni.rbcs.server.memcache/net/woggioni/rbcs/server/memcache/schema/rbcs-memcache.xsd urn:net.woggioni.rbcs.server jpms://net.woggioni.rbcs.server/net/woggioni/rbcs/server/schema/rbcs-server.xsd"
|
||||||
|
>
|
||||||
|
<bind host="0.0.0.0" port="8080" incoming-connections-backlog-size="128"/>
|
||||||
|
<connection
|
||||||
|
max-request-size="0xd000000"
|
||||||
|
idle-timeout="PT15S"
|
||||||
|
read-idle-timeout="PT30S"
|
||||||
|
write-idle-timeout="PT30S"/>
|
||||||
|
<event-executor use-virtual-threads="true"/>
|
||||||
|
<!--cache xs:type="rbcs:inMemoryCacheType" max-age="P7D" enable-compression="false" max-size="0x10000000" /-->
|
||||||
|
<cache xs:type="rbcs-memcache:memcacheCacheType" max-age="P7D" chunk-size="0x1000" digest="SHA-224">
|
||||||
|
<server host="memcached-service" port="11211" max-connections="256"/>
|
||||||
|
</cache>
|
||||||
|
</rbcs:server>
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
apiVersion: apps/v1
|
||||||
|
kind: Deployment
|
||||||
|
metadata:
|
||||||
|
name: rbcs-deployment
|
||||||
|
labels:
|
||||||
|
app: rbcs
|
||||||
|
spec:
|
||||||
|
replicas: 1
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
app: rbcs
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
app: rbcs
|
||||||
|
spec:
|
||||||
|
containers:
|
||||||
|
- name: rbcs
|
||||||
|
image: gitea.woggioni.net/woggioni/rbcs:memcache
|
||||||
|
imagePullPolicy: Always
|
||||||
|
command: ["java", "-Dlogback.configurationFile=logback.xml", "-XX:MaxRAMPercentage=75","-jar", "/home/luser/rbcs.jar"]
|
||||||
|
args: ['server', '-c', 'rbcs-server.xml']
|
||||||
|
ports:
|
||||||
|
- containerPort: 8080
|
||||||
|
volumeMounts:
|
||||||
|
- name: config-volume
|
||||||
|
mountPath: /home/luser/rbcs-server.xml
|
||||||
|
subPath: rbcs-server.xml
|
||||||
|
resources:
|
||||||
|
requests:
|
||||||
|
memory: "0.5Gi"
|
||||||
|
cpu: "1"
|
||||||
|
limits:
|
||||||
|
memory: "0.5Gi"
|
||||||
|
cpu: "3.5"
|
||||||
|
volumes:
|
||||||
|
- name: config-volume
|
||||||
|
configMap:
|
||||||
|
name: rbcs-server
|
||||||
|
---
|
||||||
|
apiVersion: v1
|
||||||
|
kind: Service
|
||||||
|
metadata:
|
||||||
|
name: rbcs-service
|
||||||
|
spec:
|
||||||
|
type: LoadBalancer
|
||||||
|
ports:
|
||||||
|
- port: 8080
|
||||||
|
targetPort: 8080
|
||||||
|
protocol: TCP
|
||||||
|
selector:
|
||||||
|
app: rbcs
|
||||||
|
---
|
||||||
|
apiVersion: apps/v1
|
||||||
|
kind: Deployment
|
||||||
|
metadata:
|
||||||
|
name: memcached-deployment
|
||||||
|
spec:
|
||||||
|
replicas: 1
|
||||||
|
selector:
|
||||||
|
matchLabels:
|
||||||
|
app: memcached
|
||||||
|
template:
|
||||||
|
metadata:
|
||||||
|
labels:
|
||||||
|
app: memcached
|
||||||
|
spec:
|
||||||
|
containers:
|
||||||
|
- name: memcached
|
||||||
|
image: memcached
|
||||||
|
args: ["-I", "128m", "-m", "4096", "-t", "1"]
|
||||||
|
resources:
|
||||||
|
requests:
|
||||||
|
memory: "1Gi"
|
||||||
|
cpu: "500m" # 0.5 CPU
|
||||||
|
limits:
|
||||||
|
memory: "5Gi"
|
||||||
|
cpu: "500m" # 0.5 CP
|
||||||
|
---
|
||||||
|
apiVersion: v1
|
||||||
|
kind: Service
|
||||||
|
metadata:
|
||||||
|
name: memcached-service
|
||||||
|
spec:
|
||||||
|
type: ClusterIP # ClusterIP makes it accessible only within the cluster
|
||||||
|
ports:
|
||||||
|
- port: 11211 # Default memcached port
|
||||||
|
targetPort: 11211
|
||||||
|
protocol: TCP
|
||||||
|
selector:
|
||||||
|
app: memcached
|
@@ -14,9 +14,7 @@ allprojects { subproject ->
|
|||||||
if(project.currentTag.isPresent()) {
|
if(project.currentTag.isPresent()) {
|
||||||
version = project.currentTag.map { it[0] }.get()
|
version = project.currentTag.map { it[0] }.get()
|
||||||
} else {
|
} else {
|
||||||
version = project.gitRevision.map { gitRevision ->
|
version = "${getProperty('rbcs.version')}-SNAPSHOT"
|
||||||
"${getProperty('rbcs.version')}.${gitRevision[0..10]}"
|
|
||||||
}.get()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
repositories {
|
repositories {
|
||||||
@@ -24,7 +22,6 @@ allprojects { subproject ->
|
|||||||
url = getProperty('gitea.maven.url')
|
url = getProperty('gitea.maven.url')
|
||||||
content {
|
content {
|
||||||
includeModule 'net.woggioni', 'jwo'
|
includeModule 'net.woggioni', 'jwo'
|
||||||
includeModule 'net.woggioni', 'xmemcached'
|
|
||||||
includeGroup 'com.lys'
|
includeGroup 'com.lys'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -42,7 +39,6 @@ allprojects { subproject ->
|
|||||||
modularity.inferModulePath = true
|
modularity.inferModulePath = true
|
||||||
toolchain {
|
toolchain {
|
||||||
languageVersion = JavaLanguageVersion.of(21)
|
languageVersion = JavaLanguageVersion.of(21)
|
||||||
vendor = JvmVendorSpec.ORACLE
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
86
doc/benchmarks.md
Normal file
86
doc/benchmarks.md
Normal file
@@ -0,0 +1,86 @@
|
|||||||
|
# RBCS performance benchmarks
|
||||||
|
|
||||||
|
All test were executed under the following conditions:
|
||||||
|
- CPU: Intel Celeron J3455 (4 physical cores)
|
||||||
|
- memory: 8GB DDR3L 1600 MHz
|
||||||
|
- disk: SATA3 120GB SSD
|
||||||
|
- HTTP compression: disabled
|
||||||
|
- cache compression: disabled
|
||||||
|
- digest: none
|
||||||
|
- authentication: disabled
|
||||||
|
- TLS: disabled
|
||||||
|
- network RTT: 14ms
|
||||||
|
- network bandwidth: 112 MiB/s
|
||||||
|
### In memory cache backend
|
||||||
|
|
||||||
|
|
||||||
|
| Cache backend | CPU | CPU quota | Memory quota (GB) | Request size (b) | Client connections | PUT (req/s) | GET (req/s) |
|
||||||
|
|----------------|---------------------|-----------|-------------------|------------------|--------------------|-------------|-------------|
|
||||||
|
| in-memory | Intel Celeron J3455 | 1.00 | 4 | 128 | 10 | 7867 | 13762 |
|
||||||
|
| in-memory | Intel Celeron J3455 | 1.00 | 4 | 128 | 100 | 7728 | 14180 |
|
||||||
|
| in-memory | Intel Celeron J3455 | 1.00 | 4 | 512 | 10 | 7964 | 10992 |
|
||||||
|
| in-memory | Intel Celeron J3455 | 1.00 | 4 | 512 | 100 | 8415 | 12478 |
|
||||||
|
| in-memory | Intel Celeron J3455 | 1.00 | 4 | 4096 | 10 | 4268 | 5395 |
|
||||||
|
| in-memory | Intel Celeron J3455 | 1.00 | 4 | 4096 | 100 | 5585 | 8259 |
|
||||||
|
| in-memory | Intel Celeron J3455 | 1.00 | 4 | 65536 | 10 | 1063 | 1185 |
|
||||||
|
| in-memory | Intel Celeron J3455 | 1.00 | 4 | 65536 | 100 | 1522 | 1366 |
|
||||||
|
| in-memory | Intel Celeron J3455 | 3.50 | 4 | 128 | 10 | 11271 | 14092 |
|
||||||
|
| in-memory | Intel Celeron J3455 | 3.50 | 4 | 128 | 100 | 16064 | 24201 |
|
||||||
|
| in-memory | Intel Celeron J3455 | 3.50 | 4 | 512 | 10 | 11504 | 13077 |
|
||||||
|
| in-memory | Intel Celeron J3455 | 3.50 | 4 | 512 | 100 | 17379 | 22094 |
|
||||||
|
| in-memory | Intel Celeron J3455 | 3.50 | 4 | 4096 | 10 | 9151 | 9489 |
|
||||||
|
| in-memory | Intel Celeron J3455 | 3.50 | 4 | 4096 | 100 | 13194 | 18268 |
|
||||||
|
| in-memory | Intel Celeron J3455 | 3.50 | 4 | 65536 | 10 | 1590 | 1174 |
|
||||||
|
| in-memory | Intel Celeron J3455 | 3.50 | 4 | 65536 | 100 | 1539 | 1561 |
|
||||||
|
|
||||||
|
### Filesystem cache backend
|
||||||
|
|
||||||
|
compression: disabled
|
||||||
|
digest: none
|
||||||
|
authentication: disabled
|
||||||
|
TLS: disabled
|
||||||
|
|
||||||
|
| Cache backend | CPU | CPU quota | Memory quota (GB) | Request size (b) | Client connections | PUT (req/s) | GET (req/s) |
|
||||||
|
|---------------|---------------------|-----------|-------------------|------------------|--------------------|-------------|-------------|
|
||||||
|
| filesystem | Intel Celeron J3455 | 1.00 | 0.5 | 128 | 10 | 1478 | 5771 |
|
||||||
|
| filesystem | Intel Celeron J3455 | 1.00 | 0.5 | 128 | 100 | 3166 | 8070 |
|
||||||
|
| filesystem | Intel Celeron J3455 | 1.00 | 0.5 | 512 | 10 | 1717 | 5895 |
|
||||||
|
| filesystem | Intel Celeron J3455 | 1.00 | 0.5 | 512 | 100 | 1125 | 6564 |
|
||||||
|
| filesystem | Intel Celeron J3455 | 1.00 | 0.5 | 4096 | 10 | 819 | 2509 |
|
||||||
|
| filesystem | Intel Celeron J3455 | 1.00 | 0.5 | 4096 | 100 | 1136 | 2365 |
|
||||||
|
| filesystem | Intel Celeron J3455 | 1.00 | 0.5 | 65536 | 10 | 584 | 632 |
|
||||||
|
| filesystem | Intel Celeron J3455 | 1.00 | 0.5 | 65536 | 100 | 529 | 635 |
|
||||||
|
| filesystem | Intel Celeron J3455 | 3.50 | 0.5 | 128 | 10 | 1227 | 3342 |
|
||||||
|
| filesystem | Intel Celeron J3455 | 3.50 | 0.5 | 128 | 100 | 1156 | 4035 |
|
||||||
|
| filesystem | Intel Celeron J3455 | 3.50 | 0.5 | 512 | 10 | 979 | 3294 |
|
||||||
|
| filesystem | Intel Celeron J3455 | 3.50 | 0.5 | 512 | 100 | 1217 | 3888 |
|
||||||
|
| filesystem | Intel Celeron J3455 | 3.50 | 0.5 | 4096 | 10 | 535 | 1805 |
|
||||||
|
| filesystem | Intel Celeron J3455 | 3.50 | 0.5 | 4096 | 100 | 555 | 1910 |
|
||||||
|
| filesystem | Intel Celeron J3455 | 3.50 | 0.5 | 65536 | 10 | 301 | 494 |
|
||||||
|
| filesystem | Intel Celeron J3455 | 3.50 | 0.5 | 65536 | 100 | 353 | 595 |
|
||||||
|
|
||||||
|
### Memcache cache backend
|
||||||
|
|
||||||
|
compression: disabled
|
||||||
|
digest: MD5
|
||||||
|
authentication: disabled
|
||||||
|
TLS: disabled
|
||||||
|
|
||||||
|
| Cache backend | CPU | CPU quota | Memory quota (GB) | Request size (b) | Client connections | PUT (req/s) | GET (req/s) |
|
||||||
|
|---------------|---------------------|-----------|-------------------|------------------|--------------------|-------------|-------------|
|
||||||
|
| memcache | Intel Celeron J3455 | 1.00 | 0.25 | 128 | 10 | 3380 | 6083 |
|
||||||
|
| memcache | Intel Celeron J3455 | 1.00 | 0.25 | 128 | 100 | 3323 | 4998 |
|
||||||
|
| memcache | Intel Celeron J3455 | 1.00 | 0.25 | 512 | 10 | 3924 | 6086 |
|
||||||
|
| memcache | Intel Celeron J3455 | 1.00 | 0.25 | 512 | 100 | 3440 | 5049 |
|
||||||
|
| memcache | Intel Celeron J3455 | 1.00 | 0.25 | 4096 | 10 | 3347 | 5255 |
|
||||||
|
| memcache | Intel Celeron J3455 | 1.00 | 0.25 | 4096 | 100 | 3685 | 4693 |
|
||||||
|
| memcache | Intel Celeron J3455 | 1.00 | 0.25 | 65536 | 10 | 1304 | 1343 |
|
||||||
|
| memcache | Intel Celeron J3455 | 1.00 | 0.25 | 65536 | 100 | 1481 | 1541 |
|
||||||
|
| memcache | Intel Celeron J3455 | 3.50 | 0.25 | 128 | 10 | 4667 | 7984 |
|
||||||
|
| memcache | Intel Celeron J3455 | 3.50 | 0.25 | 128 | 100 | 4044 | 8358 |
|
||||||
|
| memcache | Intel Celeron J3455 | 3.50 | 0.25 | 512 | 10 | 4177 | 7828 |
|
||||||
|
| memcache | Intel Celeron J3455 | 3.50 | 0.25 | 512 | 100 | 4079 | 8794 |
|
||||||
|
| memcache | Intel Celeron J3455 | 3.50 | 0.25 | 4096 | 10 | 4588 | 6869 |
|
||||||
|
| memcache | Intel Celeron J3455 | 3.50 | 0.25 | 4096 | 100 | 5343 | 7797 |
|
||||||
|
| memcache | Intel Celeron J3455 | 3.50 | 0.25 | 65536 | 10 | 1624 | 1317 |
|
||||||
|
| memcache | Intel Celeron J3455 | 3.50 | 0.25 | 65536 | 100 | 1633 | 1317 |
|
125
doc/client_configuration.md
Normal file
125
doc/client_configuration.md
Normal file
@@ -0,0 +1,125 @@
|
|||||||
|
# XML Schema Documentation: RBCS Client Configuration
|
||||||
|
|
||||||
|
This document provides detailed information about the XML schema for RBCS client configuration, which defines profiles for connecting to RBCS servers.
|
||||||
|
|
||||||
|
## Root Element
|
||||||
|
|
||||||
|
### `profiles`
|
||||||
|
The root element that contains a collection of server profiles.
|
||||||
|
- **Type**: `profilesType`
|
||||||
|
- **Contains**: Zero or more `profile` elements
|
||||||
|
|
||||||
|
## Complex Types
|
||||||
|
|
||||||
|
### `profilesType`
|
||||||
|
Defines the structure for the profiles collection.
|
||||||
|
- **Elements**:
|
||||||
|
- `profile`: Server connection profile (0 to unbounded)
|
||||||
|
|
||||||
|
### `profileType`
|
||||||
|
Defines a server connection profile with authentication, connection settings, and retry policies.
|
||||||
|
|
||||||
|
- **Attributes**:
|
||||||
|
- `name` (required): Name of the server profile, referenced with the '-p' parameter in rbcs-cli
|
||||||
|
- `base-url` (required): RBCs server URL
|
||||||
|
- `max-connections`: Maximum number of concurrent TCP connections (default: 50)
|
||||||
|
- `connection-timeout`: Timeout for establishing connections
|
||||||
|
- `enable-compression`: Whether to enable HTTP compression (default: true)
|
||||||
|
|
||||||
|
- **Elements** (in sequence):
|
||||||
|
- **Authentication** (choice of one):
|
||||||
|
- `no-auth`: Disable authentication
|
||||||
|
- `basic-auth`: Enable HTTP basic authentication
|
||||||
|
- `tls-client-auth`: Enable TLS certificate authentication
|
||||||
|
- `connection` (optional): Connection timeout settings
|
||||||
|
- `retry-policy` (optional): Retry policy for failed requests
|
||||||
|
- `tls-trust-store` (optional): Custom truststore for server certificate validation
|
||||||
|
|
||||||
|
### `connectionType`
|
||||||
|
Defines connection timeout settings.
|
||||||
|
|
||||||
|
- **Attributes**:
|
||||||
|
- `idle-timeout`: Close connection after inactivity period (default: PT30S - 30 seconds)
|
||||||
|
- `read-idle-timeout`: Close connection when no read occurs (default: PT60S - 60 seconds)
|
||||||
|
- `write-idle-timeout`: Close connection when no write occurs (default: PT60S - 60 seconds)
|
||||||
|
|
||||||
|
### `noAuthType`
|
||||||
|
Indicates no authentication should be used.
|
||||||
|
- No attributes or elements
|
||||||
|
|
||||||
|
### `basicAuthType`
|
||||||
|
Configures HTTP Basic Authentication.
|
||||||
|
|
||||||
|
- **Attributes**:
|
||||||
|
- `user` (required): Username for authentication
|
||||||
|
- `password` (required): Password for authentication
|
||||||
|
|
||||||
|
### `tlsClientAuthType`
|
||||||
|
Configures TLS client certificate authentication.
|
||||||
|
|
||||||
|
- **Attributes**:
|
||||||
|
- `key-store-file` (required): Path to the keystore file
|
||||||
|
- `key-store-password` (required): Password to open the keystore
|
||||||
|
- `key-alias` (required): Alias of the keystore entry with the private key
|
||||||
|
- `key-password` (optional): Private key entry's encryption password
|
||||||
|
|
||||||
|
### `retryType`
|
||||||
|
Defines retry policy using exponential backoff.
|
||||||
|
|
||||||
|
- **Attributes**:
|
||||||
|
- `max-attempts` (required): Maximum number of retry attempts
|
||||||
|
- `initial-delay`: Delay before first retry (default: PT1S - 1 second)
|
||||||
|
- `exp`: Exponent for computing next delay (default: 2.0)
|
||||||
|
|
||||||
|
### `trustStoreType`
|
||||||
|
Configures custom truststore for server certificate validation.
|
||||||
|
|
||||||
|
- **Attributes**:
|
||||||
|
- `file` (required): Path to the truststore file
|
||||||
|
- `password`: Truststore file password
|
||||||
|
- `check-certificate-status`: Whether to check certificate validity using CRL/OCSP
|
||||||
|
- `verify-server-certificate`: Whether to validate server certificates (default: true)
|
||||||
|
|
||||||
|
## Sample XML Document
|
||||||
|
|
||||||
|
```xml
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<profiles xmlns="urn:net.woggioni.rbcs.client">
|
||||||
|
<!-- Profile with basic authentication -->
|
||||||
|
<profile name="production-server"
|
||||||
|
base-url="https://rbcs.example.com/api"
|
||||||
|
max-connections="100"
|
||||||
|
enable-compression="true">
|
||||||
|
<basic-auth user="admin" password="secure_password123"/>
|
||||||
|
<connection idle-timeout="PT45S"
|
||||||
|
read-idle-timeout="PT90S"
|
||||||
|
write-idle-timeout="PT90S"/>
|
||||||
|
<retry-policy max-attempts="5"
|
||||||
|
initial-delay="PT2S"
|
||||||
|
exp="1.5"/>
|
||||||
|
<tls-trust-store file="/path/to/truststore.jks"
|
||||||
|
password="truststore_password"
|
||||||
|
check-certificate-status="true"/>
|
||||||
|
</profile>
|
||||||
|
|
||||||
|
<!-- Profile with TLS client authentication -->
|
||||||
|
<profile name="secure-server"
|
||||||
|
base-url="https://secure.example.com/api"
|
||||||
|
max-connections="25">
|
||||||
|
<tls-client-auth key-store-file="/path/to/keystore.p12"
|
||||||
|
key-store-password="keystore_password"
|
||||||
|
key-alias="client-cert"
|
||||||
|
key-password="key_password"/>
|
||||||
|
<retry-policy max-attempts="3"/>
|
||||||
|
</profile>
|
||||||
|
|
||||||
|
<!-- Profile with no authentication -->
|
||||||
|
<profile name="development"
|
||||||
|
base-url="http://localhost:8080/api"
|
||||||
|
enable-compression="false">
|
||||||
|
<no-auth/>
|
||||||
|
</profile>
|
||||||
|
</profiles>
|
||||||
|
```
|
||||||
|
|
||||||
|
This sample XML document demonstrates three different profiles with various authentication methods and configuration options as defined in the schema.
|
189
doc/server_configuration.md
Normal file
189
doc/server_configuration.md
Normal file
@@ -0,0 +1,189 @@
|
|||||||
|
### RBCS server configuration file elements and attributes
|
||||||
|
|
||||||
|
#### Root Element: `server`
|
||||||
|
The root element that contains all server configuration.
|
||||||
|
|
||||||
|
**Attributes:**
|
||||||
|
- `path` (optional): URI path prefix for cache requests. Example: if set to "cache", requests would be made to "http://www.example.com/cache/KEY"
|
||||||
|
|
||||||
|
#### Child Elements
|
||||||
|
|
||||||
|
#### `<bind>`
|
||||||
|
Configures server socket settings.
|
||||||
|
|
||||||
|
**Attributes:**
|
||||||
|
- `host` (required): Server bind address
|
||||||
|
- `port` (required): Server port number
|
||||||
|
- `incoming-connections-backlog-size` (optional, default: 1024): Maximum queue length for incoming connection indications
|
||||||
|
|
||||||
|
#### `<connection>`
|
||||||
|
Configures connection handling parameters.
|
||||||
|
|
||||||
|
**Attributes:**
|
||||||
|
- `idle-timeout` (optional, default: PT30S): Connection timeout when no activity
|
||||||
|
- `read-idle-timeout` (optional, default: PT60S): Connection timeout when no reads
|
||||||
|
- `write-idle-timeout` (optional, default: PT60S): Connection timeout when no writes
|
||||||
|
- `max-request-size` (optional, default: 0x4000000): Maximum allowed request body size
|
||||||
|
- `chunk-size` (default: 0x10000): Maximum socket write size
|
||||||
|
|
||||||
|
#### `<event-executor>`
|
||||||
|
Configures event execution settings.
|
||||||
|
|
||||||
|
**Attributes:**
|
||||||
|
- `use-virtual-threads` (optional, default: true): Whether to use virtual threads for the server handler
|
||||||
|
|
||||||
|
#### `<cache>`
|
||||||
|
Defines cache storage implementation. Two types are available:
|
||||||
|
|
||||||
|
##### InMemory Cache
|
||||||
|
|
||||||
|
A simple storage backend that uses an hash map to store data in memory
|
||||||
|
|
||||||
|
**Attributes:**
|
||||||
|
- `max-age` (default: P1D): Cache entry lifetime
|
||||||
|
- `max-size` (default: 0x1000000): Maximum cache size in bytes
|
||||||
|
- `digest` (default: MD5): Key hashing algorithm
|
||||||
|
- `enable-compression` (default: true): Enable deflate compression
|
||||||
|
- `compression-level` (default: -1): Compression level (-1 to 9)
|
||||||
|
|
||||||
|
##### FileSystem Cache
|
||||||
|
|
||||||
|
A storage backend that stores data in a folder on the disk
|
||||||
|
|
||||||
|
**Attributes:**
|
||||||
|
- `path`: Storage directory path
|
||||||
|
- `max-age` (default: P1D): Cache entry lifetime
|
||||||
|
- `digest` (default: MD5): Key hashing algorithm
|
||||||
|
- `enable-compression` (default: true): Enable deflate compression
|
||||||
|
- `compression-level` (default: -1): Compression level
|
||||||
|
|
||||||
|
#### `<authorization>`
|
||||||
|
Configures user and group-based access control.
|
||||||
|
|
||||||
|
##### `<users>`
|
||||||
|
List of registered users.
|
||||||
|
- Contains `<user>` elements:
|
||||||
|
|
||||||
|
**Attributes:**
|
||||||
|
- `name` (required): Username
|
||||||
|
- `password` (optional): For basic authentication
|
||||||
|
- Can contain an `anonymous` element to allow for unauthenticated access
|
||||||
|
|
||||||
|
##### `<groups>`
|
||||||
|
List of user groups.
|
||||||
|
- Contains `<group>` elements:
|
||||||
|
|
||||||
|
**Attributes:**
|
||||||
|
- `name`: Group name
|
||||||
|
- Can contain:
|
||||||
|
- `users`: List of user references
|
||||||
|
- `roles`: List of roles (READER/WRITER)
|
||||||
|
- `user-quota`: Per-user quota
|
||||||
|
- `group-quota`: Group-wide quota
|
||||||
|
|
||||||
|
#### `<authentication>`
|
||||||
|
Configures authentication mechanism. Options:
|
||||||
|
- `<basic>`: HTTP basic authentication
|
||||||
|
- `<client-certificate>`: TLS certificate authentication, it uses attributes of the subject's X.500 name
|
||||||
|
to extract the username and group of the client.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
```xml
|
||||||
|
<client-certificate>
|
||||||
|
<user-extractor attribute-name="CN" pattern="(.*)"/>
|
||||||
|
<group-extractor attribute-name="O" pattern="(.*)"/>
|
||||||
|
</client-certificate>
|
||||||
|
```
|
||||||
|
- `<none>`: No authentication
|
||||||
|
|
||||||
|
#### `<tls>`
|
||||||
|
Configures TLS encryption.
|
||||||
|
|
||||||
|
**Child Elements:**
|
||||||
|
- `<keystore>`: Server certificate configuration
|
||||||
|
|
||||||
|
**Attributes:**
|
||||||
|
- `file` (required): Keystore file path
|
||||||
|
- `password`: Keystore password
|
||||||
|
- `key-alias` (required): Private key alias
|
||||||
|
- `key-password`: Private key password
|
||||||
|
|
||||||
|
- `<truststore>`: Client certificate verification
|
||||||
|
|
||||||
|
**Attributes:**
|
||||||
|
- `file` (required): Truststore file path
|
||||||
|
- `password`: Truststore password
|
||||||
|
- `check-certificate-status`: Enable CRL/OCSP checking
|
||||||
|
- `require-client-certificate` (default: false): Require client certificates
|
||||||
|
|
||||||
|
|
||||||
|
----------------------------
|
||||||
|
|
||||||
|
# Complete configuration example
|
||||||
|
|
||||||
|
```xml
|
||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||||
|
<rbcs:server xmlns:xs="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xmlns:rbcs="urn:net.woggioni.rbcs.server"
|
||||||
|
xs:schemaLocation="urn:net.woggioni.rbcs.server jpms://net.woggioni.rbcs.server/net/woggioni/rbcs/server/schema/rbcs-server.xsd"
|
||||||
|
>
|
||||||
|
<bind host="0.0.0.0" port="8080" incoming-connections-backlog-size="1024"/>
|
||||||
|
<connection
|
||||||
|
max-request-size="67108864"
|
||||||
|
idle-timeout="PT10S"
|
||||||
|
read-idle-timeout="PT20S"
|
||||||
|
write-idle-timeout="PT20S"
|
||||||
|
chunk-size="0x1000"/>
|
||||||
|
<event-executor use-virtual-threads="true"/>
|
||||||
|
|
||||||
|
<cache xs:type="rbcs:inMemoryCacheType" max-age="P7D" enable-compression="false" max-size="0x10000000" />
|
||||||
|
|
||||||
|
<!-- uncomment this to enable the filesystem storage backend, sotring cache data in "${sys:java.io.tmpdir}/rbcs"
|
||||||
|
<cache xs:type="rbcs:fileSystemCacheType" max-age="P7D" enable-compression="false" path="${sys:java.io.tmpdir}/rbcs"/>
|
||||||
|
-->
|
||||||
|
|
||||||
|
<!-- uncomment this to use memcache as the storage backend, also make sure you have
|
||||||
|
the memcache plugin installed in the `plugins` directory if you are using running
|
||||||
|
the jar version of RBCS
|
||||||
|
<cache xs:type="rbcs-memcache:memcacheCacheType" max-age="P7D" digest="MD5">
|
||||||
|
<server host="127.0.0.1" port="11211" max-connections="256"/>
|
||||||
|
</cache>
|
||||||
|
-->
|
||||||
|
|
||||||
|
<authorization>
|
||||||
|
<users>
|
||||||
|
<user name="user1" password="II+qeNLft2pZ/JVNo9F7jpjM/BqEcfsJW27NZ6dPVs8tAwHbxrJppKYsbL7J/SMl">
|
||||||
|
<quota calls="100" period="PT1S"/>
|
||||||
|
</user>
|
||||||
|
<user name="user2" password="v6T9+q6/VNpvLknji3ixPiyz2YZCQMXj2FN7hvzbfc2Ig+IzAHO0iiBCH9oWuBDq"/>
|
||||||
|
<anonymous>
|
||||||
|
<quota calls="10" period="PT60S" initial-available-calls="10" max-available-calls="10"/>
|
||||||
|
</anonymous>
|
||||||
|
</users>
|
||||||
|
<groups>
|
||||||
|
<group name="readers">
|
||||||
|
<users>
|
||||||
|
<anonymous/>
|
||||||
|
</users>
|
||||||
|
<roles>
|
||||||
|
<reader/>
|
||||||
|
</roles>
|
||||||
|
</group>
|
||||||
|
<group name="writers">
|
||||||
|
<users>
|
||||||
|
<user ref="user1"/>
|
||||||
|
<user ref="user2"/>
|
||||||
|
</users>
|
||||||
|
<roles>
|
||||||
|
<reader/>
|
||||||
|
<writer/>
|
||||||
|
</roles>
|
||||||
|
</group>
|
||||||
|
</groups>
|
||||||
|
</authorization>
|
||||||
|
<authentication>
|
||||||
|
<basic/>
|
||||||
|
</authentication>
|
||||||
|
</rbcs:server>
|
||||||
|
|
||||||
|
```
|
@@ -3,9 +3,9 @@ RUN adduser -D luser
|
|||||||
USER luser
|
USER luser
|
||||||
WORKDIR /home/luser
|
WORKDIR /home/luser
|
||||||
|
|
||||||
FROM base-release AS release
|
FROM base-release AS release-vanilla
|
||||||
ADD rbcs-cli-envelope-*.jar rbcs.jar
|
ADD rbcs-cli-envelope-*.jar rbcs.jar
|
||||||
ENTRYPOINT ["java", "-XX:+UseZGC", "-XX:+ZGenerational", "-jar", "/home/luser/rbcs.jar", "server"]
|
ENTRYPOINT ["java", "-Dlogback.configurationFile=logback.xml", "-XX:MaxRAMPercentage=70", "-XX:GCTimeRatio=24", "-XX:+UseZGC", "-XX:+ZGenerational", "-jar", "/home/luser/rbcs.jar"]
|
||||||
|
|
||||||
FROM base-release AS release-memcache
|
FROM base-release AS release-memcache
|
||||||
ADD --chown=luser:luser rbcs-cli-envelope-*.jar rbcs.jar
|
ADD --chown=luser:luser rbcs-cli-envelope-*.jar rbcs.jar
|
||||||
@@ -13,4 +13,30 @@ RUN mkdir plugins
|
|||||||
WORKDIR /home/luser/plugins
|
WORKDIR /home/luser/plugins
|
||||||
RUN --mount=type=bind,source=.,target=/build/distributions tar -xf /build/distributions/rbcs-server-memcache*.tar
|
RUN --mount=type=bind,source=.,target=/build/distributions tar -xf /build/distributions/rbcs-server-memcache*.tar
|
||||||
WORKDIR /home/luser
|
WORKDIR /home/luser
|
||||||
ENTRYPOINT ["java", "-XX:+UseZGC", "-XX:+ZGenerational", "-jar", "/home/luser/rbcs.jar", "server"]
|
ADD logback.xml .
|
||||||
|
ENTRYPOINT ["java", "-Dlogback.configurationFile=logback.xml", "-XX:MaxRAMPercentage=70", "-XX:GCTimeRatio=24", "-XX:+UseZGC", "-XX:+ZGenerational", "-jar", "/home/luser/rbcs.jar"]
|
||||||
|
|
||||||
|
FROM busybox:musl AS base-native
|
||||||
|
RUN mkdir -p /var/lib/rbcs /etc/rbcs
|
||||||
|
RUN adduser -D -u 1000 rbcs -h /var/lib/rbcs
|
||||||
|
|
||||||
|
FROM scratch AS release-native
|
||||||
|
COPY --from=base-native /etc/passwd /etc/passwd
|
||||||
|
COPY --from=base-native /etc/rbcs /etc/rbcs
|
||||||
|
COPY --from=base-native /var/lib/rbcs /var/lib/rbcs
|
||||||
|
ADD rbcs-cli.upx /usr/bin/rbcs-cli
|
||||||
|
ENV RBCS_CONFIGURATION_DIR="/etc/rbcs"
|
||||||
|
USER rbcs
|
||||||
|
WORKDIR /var/lib/rbcs
|
||||||
|
ENTRYPOINT ["/usr/bin/rbcs-cli", "-XX:MaximumHeapSizePercent=70"]
|
||||||
|
|
||||||
|
FROM debian:12-slim AS release-jlink
|
||||||
|
RUN mkdir -p /usr/share/java/rbcs
|
||||||
|
RUN --mount=type=bind,source=.,target=/build/distributions tar -xf /build/distributions/rbcs-cli*.tar -C /usr/share/java/rbcs
|
||||||
|
ADD --chmod=755 rbcs-cli.sh /usr/local/bin/rbcs-cli
|
||||||
|
RUN adduser -u 1000 luser
|
||||||
|
USER luser
|
||||||
|
WORKDIR /home/luser
|
||||||
|
ADD logback.xml .
|
||||||
|
ENV JAVA_OPTS=-XX:-UseJVMCICompiler\ -Dlogback.configurationFile=logback.xml\ -XX:MaxRAMPercentage=70\ -XX:GCTimeRatio=24\ -XX:+UseZGC\ -XX:+ZGenerational
|
||||||
|
ENTRYPOINT ["/usr/local/bin/rbcs-cli"]
|
||||||
|
28
docker/README.md
Normal file
28
docker/README.md
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
# RBCS Docker images
|
||||||
|
There are 3 image flavours:
|
||||||
|
- vanilla
|
||||||
|
- memcache
|
||||||
|
- native
|
||||||
|
|
||||||
|
The `vanilla` image only contains the envelope
|
||||||
|
jar file with no plugins and is based on `eclipse-temurin:21-jre-alpine`
|
||||||
|
|
||||||
|
The `memcache` image is similar to the `vanilla` image, except that it also contains
|
||||||
|
the `rbcs-server-memcache` plugin in the `plugins` folder, use this image if you don't want to use the `native`
|
||||||
|
image and want to use memcache as the cache backend
|
||||||
|
|
||||||
|
The `native` image contains a native, statically-linked executable created with GraalVM Native Image
|
||||||
|
that has no userspace dependencies. It also embeds the memcache plugin inside the executable.
|
||||||
|
Use this image for maximum efficiency and minimal memory footprint.
|
||||||
|
|
||||||
|
The `jlink` image contains a custom Java runtime created with GraalVM's Jlink
|
||||||
|
that only depends on glibc. It also contains the memcache plugin in the module path.
|
||||||
|
Use this image for best performance.
|
||||||
|
|
||||||
|
## Which image should I use?
|
||||||
|
The `native` image uses Java's SerialGC, so it's ideal for constrained environment like containers or small servers,
|
||||||
|
if you have a lot of resources and want to squeeze out the maximum throughput you should consider the
|
||||||
|
`vanilla` or `memcache` image, then choose and fine tune the garbage collector.
|
||||||
|
|
||||||
|
Also the `native` image is only available for the `x86_64` architecture at the moment,
|
||||||
|
while `vanilla` and `memcache` also ship a `aarch64` variant.
|
@@ -29,7 +29,10 @@ Provider<Copy> prepareDockerBuild = tasks.register('prepareDockerBuild', Copy) {
|
|||||||
group = 'docker'
|
group = 'docker'
|
||||||
into project.layout.buildDirectory.file('docker')
|
into project.layout.buildDirectory.file('docker')
|
||||||
from(configurations.docker)
|
from(configurations.docker)
|
||||||
from(file('Dockerfile'))
|
from(files('Dockerfile', 'rbcs-cli.sh'))
|
||||||
|
from(rootProject.file('conf')) {
|
||||||
|
include 'logback.xml'
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Provider<DockerBuildImage> dockerBuild = tasks.register('dockerBuildImage', DockerBuildImage) {
|
Provider<DockerBuildImage> dockerBuild = tasks.register('dockerBuildImage', DockerBuildImage) {
|
||||||
@@ -63,5 +66,3 @@ Provider<DockerPushImage> dockerPush = tasks.register('dockerPushImage', DockerP
|
|||||||
}
|
}
|
||||||
images = [dockerTag.flatMap{ it.tag }, dockerTagMemcache.flatMap{ it.tag }]
|
images = [dockerTag.flatMap{ it.tag }, dockerTagMemcache.flatMap{ it.tag }]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
3
docker/rbcs-cli.sh
Normal file
3
docker/rbcs-cli.sh
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
DIR=/usr/share/java/rbcs
|
||||||
|
$DIR/bin/java $JAVA_OPTS -m net.woggioni.rbcs.cli "$@"
|
@@ -2,11 +2,10 @@ org.gradle.configuration-cache=false
|
|||||||
org.gradle.parallel=true
|
org.gradle.parallel=true
|
||||||
org.gradle.caching=true
|
org.gradle.caching=true
|
||||||
|
|
||||||
rbcs.version = 0.1.4
|
rbcs.version = 0.3.0-SNAPSHOT
|
||||||
|
|
||||||
lys.version = 2025.02.05
|
lys.version = 2025.06.10
|
||||||
|
|
||||||
gitea.maven.url = https://gitea.woggioni.net/api/packages/woggioni/maven
|
gitea.maven.url = https://gitea.woggioni.net/api/packages/woggioni/maven
|
||||||
docker.registry.url=gitea.woggioni.net
|
docker.registry.url=gitea.woggioni.net
|
||||||
|
|
||||||
jpms-check.configurationName = runtimeClasspath
|
|
||||||
|
2
gradle/wrapper/gradle-wrapper.properties
vendored
2
gradle/wrapper/gradle-wrapper.properties
vendored
@@ -1,6 +1,6 @@
|
|||||||
distributionBase=GRADLE_USER_HOME
|
distributionBase=GRADLE_USER_HOME
|
||||||
distributionPath=wrapper/dists
|
distributionPath=wrapper/dists
|
||||||
distributionUrl=https\://services.gradle.org/distributions/gradle-8.12-bin.zip
|
distributionUrl=https\://services.gradle.org/distributions/gradle-8.14.2-bin.zip
|
||||||
networkTimeout=10000
|
networkTimeout=10000
|
||||||
validateDistributionUrl=true
|
validateDistributionUrl=true
|
||||||
zipStoreBase=GRADLE_USER_HOME
|
zipStoreBase=GRADLE_USER_HOME
|
||||||
|
3
gradlew
vendored
3
gradlew
vendored
@@ -86,8 +86,7 @@ done
|
|||||||
# shellcheck disable=SC2034
|
# shellcheck disable=SC2034
|
||||||
APP_BASE_NAME=${0##*/}
|
APP_BASE_NAME=${0##*/}
|
||||||
# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036)
|
# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036)
|
||||||
APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s
|
APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s\n' "$PWD" ) || exit
|
||||||
' "$PWD" ) || exit
|
|
||||||
|
|
||||||
# Use the maximum available, or set MAX_FD != -1 to use that value.
|
# Use the maximum available, or set MAX_FD != -1 to use that value.
|
||||||
MAX_FD=maximum
|
MAX_FD=maximum
|
||||||
|
@@ -5,7 +5,12 @@ plugins {
|
|||||||
}
|
}
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
|
implementation catalog.slf4j.api
|
||||||
|
implementation project(':rbcs-common')
|
||||||
|
api catalog.netty.common
|
||||||
api catalog.netty.buffer
|
api catalog.netty.buffer
|
||||||
|
api catalog.netty.handler
|
||||||
|
api catalog.netty.codec.http
|
||||||
}
|
}
|
||||||
|
|
||||||
publishing {
|
publishing {
|
||||||
|
@@ -1,7 +1,16 @@
|
|||||||
module net.woggioni.rbcs.api {
|
module net.woggioni.rbcs.api {
|
||||||
requires static lombok;
|
requires static lombok;
|
||||||
requires java.xml;
|
requires io.netty.handler;
|
||||||
|
requires io.netty.common;
|
||||||
|
requires net.woggioni.rbcs.common;
|
||||||
|
requires io.netty.transport;
|
||||||
|
requires io.netty.codec.http;
|
||||||
requires io.netty.buffer;
|
requires io.netty.buffer;
|
||||||
|
requires org.slf4j;
|
||||||
|
requires java.xml;
|
||||||
|
|
||||||
|
|
||||||
exports net.woggioni.rbcs.api;
|
exports net.woggioni.rbcs.api;
|
||||||
exports net.woggioni.rbcs.api.exception;
|
exports net.woggioni.rbcs.api.exception;
|
||||||
|
exports net.woggioni.rbcs.api.message;
|
||||||
}
|
}
|
@@ -0,0 +1,13 @@
|
|||||||
|
package net.woggioni.rbcs.api;
|
||||||
|
|
||||||
|
import java.util.concurrent.CompletableFuture;
|
||||||
|
|
||||||
|
public interface AsyncCloseable extends AutoCloseable {
|
||||||
|
|
||||||
|
CompletableFuture<Void> asyncClose();
|
||||||
|
|
||||||
|
@Override
|
||||||
|
default void close() throws Exception {
|
||||||
|
asyncClose().get();
|
||||||
|
}
|
||||||
|
}
|
@@ -1,14 +0,0 @@
|
|||||||
package net.woggioni.rbcs.api;
|
|
||||||
|
|
||||||
import io.netty.buffer.ByteBuf;
|
|
||||||
import net.woggioni.rbcs.api.exception.ContentTooLargeException;
|
|
||||||
|
|
||||||
import java.nio.channels.ReadableByteChannel;
|
|
||||||
import java.util.concurrent.CompletableFuture;
|
|
||||||
|
|
||||||
|
|
||||||
public interface Cache extends AutoCloseable {
|
|
||||||
CompletableFuture<ReadableByteChannel> get(String key);
|
|
||||||
|
|
||||||
CompletableFuture<Void> put(String key, ByteBuf content) throws ContentTooLargeException;
|
|
||||||
}
|
|
@@ -0,0 +1,57 @@
|
|||||||
|
package net.woggioni.rbcs.api;
|
||||||
|
|
||||||
|
import io.netty.channel.ChannelHandlerContext;
|
||||||
|
import io.netty.channel.ChannelInboundHandlerAdapter;
|
||||||
|
import io.netty.handler.codec.http.LastHttpContent;
|
||||||
|
import io.netty.util.ReferenceCounted;
|
||||||
|
import lombok.extern.slf4j.Slf4j;
|
||||||
|
import net.woggioni.rbcs.api.message.CacheMessage;
|
||||||
|
|
||||||
|
@Slf4j
|
||||||
|
public abstract class CacheHandler extends ChannelInboundHandlerAdapter {
|
||||||
|
private boolean requestFinished = false;
|
||||||
|
|
||||||
|
abstract protected void channelRead0(ChannelHandlerContext ctx, CacheMessage msg);
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void channelRead(ChannelHandlerContext ctx, Object msg) {
|
||||||
|
if(!requestFinished && msg instanceof CacheMessage) {
|
||||||
|
if(msg instanceof CacheMessage.LastCacheContent) requestFinished = true;
|
||||||
|
try {
|
||||||
|
channelRead0(ctx, (CacheMessage) msg);
|
||||||
|
} finally {
|
||||||
|
if(msg instanceof ReferenceCounted rc) rc.release();
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
ctx.fireChannelRead(msg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void sendMessageAndFlush(ChannelHandlerContext ctx, Object msg) {
|
||||||
|
sendMessage(ctx, msg, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void sendMessage(ChannelHandlerContext ctx, Object msg) {
|
||||||
|
sendMessage(ctx, msg, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void sendMessage(ChannelHandlerContext ctx, Object msg, boolean flush) {
|
||||||
|
ctx.write(msg);
|
||||||
|
if(
|
||||||
|
msg instanceof CacheMessage.LastCacheContent ||
|
||||||
|
msg instanceof CacheMessage.CachePutResponse ||
|
||||||
|
msg instanceof CacheMessage.CacheValueNotFoundResponse ||
|
||||||
|
msg instanceof LastHttpContent
|
||||||
|
) {
|
||||||
|
ctx.flush();
|
||||||
|
ctx.pipeline().remove(this);
|
||||||
|
} else if(flush) {
|
||||||
|
ctx.flush();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
|
||||||
|
super.exceptionCaught(ctx, cause);
|
||||||
|
}
|
||||||
|
}
|
@@ -0,0 +1,15 @@
|
|||||||
|
package net.woggioni.rbcs.api;
|
||||||
|
|
||||||
|
import io.netty.channel.ChannelFactory;
|
||||||
|
import io.netty.channel.EventLoopGroup;
|
||||||
|
import io.netty.channel.socket.DatagramChannel;
|
||||||
|
import io.netty.channel.socket.SocketChannel;
|
||||||
|
|
||||||
|
public interface CacheHandlerFactory extends AsyncCloseable {
|
||||||
|
CacheHandler newHandler(
|
||||||
|
Configuration configuration,
|
||||||
|
EventLoopGroup eventLoopGroup,
|
||||||
|
ChannelFactory<SocketChannel> socketChannelFactory,
|
||||||
|
ChannelFactory<DatagramChannel> datagramChannelFactory
|
||||||
|
);
|
||||||
|
}
|
@@ -0,0 +1,13 @@
|
|||||||
|
package net.woggioni.rbcs.api;
|
||||||
|
|
||||||
|
import java.io.Serializable;
|
||||||
|
import lombok.Getter;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
|
||||||
|
@Getter
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
public class CacheValueMetadata implements Serializable {
|
||||||
|
private final String contentDisposition;
|
||||||
|
private final String mimeType;
|
||||||
|
}
|
||||||
|
|
@@ -1,16 +1,15 @@
|
|||||||
package net.woggioni.rbcs.api;
|
package net.woggioni.rbcs.api;
|
||||||
|
|
||||||
|
|
||||||
import lombok.EqualsAndHashCode;
|
|
||||||
import lombok.NonNull;
|
|
||||||
import lombok.Value;
|
|
||||||
|
|
||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
import java.security.cert.X509Certificate;
|
import java.security.cert.X509Certificate;
|
||||||
import java.time.Duration;
|
import java.time.Duration;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
import lombok.EqualsAndHashCode;
|
||||||
|
import lombok.NonNull;
|
||||||
|
import lombok.Value;
|
||||||
|
|
||||||
@Value
|
@Value
|
||||||
public class Configuration {
|
public class Configuration {
|
||||||
@@ -35,12 +34,11 @@ public class Configuration {
|
|||||||
|
|
||||||
@Value
|
@Value
|
||||||
public static class Connection {
|
public static class Connection {
|
||||||
Duration readTimeout;
|
|
||||||
Duration writeTimeout;
|
|
||||||
Duration idleTimeout;
|
Duration idleTimeout;
|
||||||
Duration readIdleTimeout;
|
Duration readIdleTimeout;
|
||||||
Duration writeIdleTimeout;
|
Duration writeIdleTimeout;
|
||||||
int maxRequestSize;
|
int maxRequestSize;
|
||||||
|
int chunkSize;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Value
|
@Value
|
||||||
@@ -85,17 +83,6 @@ public class Configuration {
|
|||||||
Group extract(X509Certificate cert);
|
Group extract(X509Certificate cert);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Value
|
|
||||||
public static class Throttling {
|
|
||||||
KeyStore keyStore;
|
|
||||||
TrustStore trustStore;
|
|
||||||
boolean verifyClients;
|
|
||||||
}
|
|
||||||
|
|
||||||
public enum ClientCertificate {
|
|
||||||
REQUIRED, OPTIONAL
|
|
||||||
}
|
|
||||||
|
|
||||||
@Value
|
@Value
|
||||||
public static class Tls {
|
public static class Tls {
|
||||||
KeyStore keyStore;
|
KeyStore keyStore;
|
||||||
@@ -135,7 +122,7 @@ public class Configuration {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public interface Cache {
|
public interface Cache {
|
||||||
net.woggioni.rbcs.api.Cache materialize();
|
CacheHandlerFactory materialize();
|
||||||
String getNamespaceURI();
|
String getNamespaceURI();
|
||||||
String getTypeName();
|
String getTypeName();
|
||||||
}
|
}
|
||||||
|
@@ -1,5 +1,5 @@
|
|||||||
package net.woggioni.rbcs.api;
|
package net.woggioni.rbcs.api;
|
||||||
|
|
||||||
public enum Role {
|
public enum Role {
|
||||||
Reader, Writer
|
Reader, Writer, Healthcheck
|
||||||
}
|
}
|
@@ -0,0 +1,161 @@
|
|||||||
|
package net.woggioni.rbcs.api.message;
|
||||||
|
|
||||||
|
import io.netty.buffer.ByteBuf;
|
||||||
|
import io.netty.buffer.ByteBufHolder;
|
||||||
|
import lombok.Getter;
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
import net.woggioni.rbcs.api.CacheValueMetadata;
|
||||||
|
|
||||||
|
public sealed interface CacheMessage {
|
||||||
|
|
||||||
|
@Getter
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
final class CacheGetRequest implements CacheMessage {
|
||||||
|
private final String key;
|
||||||
|
}
|
||||||
|
|
||||||
|
abstract sealed class CacheGetResponse implements CacheMessage {
|
||||||
|
}
|
||||||
|
|
||||||
|
@Getter
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
final class CacheValueFoundResponse extends CacheGetResponse {
|
||||||
|
private final String key;
|
||||||
|
private final CacheValueMetadata metadata;
|
||||||
|
}
|
||||||
|
|
||||||
|
final class CacheValueNotFoundResponse extends CacheGetResponse {
|
||||||
|
}
|
||||||
|
|
||||||
|
@Getter
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
final class CachePutRequest implements CacheMessage {
|
||||||
|
private final String key;
|
||||||
|
private final CacheValueMetadata metadata;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Getter
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
final class CachePutResponse implements CacheMessage {
|
||||||
|
private final String key;
|
||||||
|
}
|
||||||
|
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
non-sealed class CacheContent implements CacheMessage, ByteBufHolder {
|
||||||
|
protected final ByteBuf chunk;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ByteBuf content() {
|
||||||
|
return chunk;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CacheContent copy() {
|
||||||
|
return replace(chunk.copy());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CacheContent duplicate() {
|
||||||
|
return new CacheContent(chunk.duplicate());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CacheContent retainedDuplicate() {
|
||||||
|
return new CacheContent(chunk.retainedDuplicate());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CacheContent replace(ByteBuf content) {
|
||||||
|
return new CacheContent(content);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CacheContent retain() {
|
||||||
|
chunk.retain();
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CacheContent retain(int increment) {
|
||||||
|
chunk.retain(increment);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CacheContent touch() {
|
||||||
|
chunk.touch();
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public CacheContent touch(Object hint) {
|
||||||
|
chunk.touch(hint);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int refCnt() {
|
||||||
|
return chunk.refCnt();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean release() {
|
||||||
|
return chunk.release();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean release(int decrement) {
|
||||||
|
return chunk.release(decrement);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
final class LastCacheContent extends CacheContent {
|
||||||
|
public LastCacheContent(ByteBuf chunk) {
|
||||||
|
super(chunk);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public LastCacheContent copy() {
|
||||||
|
return replace(chunk.copy());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public LastCacheContent duplicate() {
|
||||||
|
return new LastCacheContent(chunk.duplicate());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public LastCacheContent retainedDuplicate() {
|
||||||
|
return new LastCacheContent(chunk.retainedDuplicate());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public LastCacheContent replace(ByteBuf content) {
|
||||||
|
return new LastCacheContent(chunk);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public LastCacheContent retain() {
|
||||||
|
super.retain();
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public LastCacheContent retain(int increment) {
|
||||||
|
super.retain(increment);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public LastCacheContent touch() {
|
||||||
|
super.touch();
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public LastCacheContent touch(Object hint) {
|
||||||
|
super.touch(hint);
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@@ -9,13 +9,68 @@ plugins {
|
|||||||
id 'maven-publish'
|
id 'maven-publish'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
import net.woggioni.gradle.envelope.EnvelopePlugin
|
||||||
import net.woggioni.gradle.envelope.EnvelopeJarTask
|
import net.woggioni.gradle.envelope.EnvelopeJarTask
|
||||||
import net.woggioni.gradle.graalvm.NativeImageConfigurationTask
|
import net.woggioni.gradle.graalvm.NativeImageConfigurationTask
|
||||||
import net.woggioni.gradle.graalvm.NativeImagePlugin
|
|
||||||
import net.woggioni.gradle.graalvm.NativeImageTask
|
import net.woggioni.gradle.graalvm.NativeImageTask
|
||||||
|
import net.woggioni.gradle.graalvm.NativeImagePlugin
|
||||||
|
import net.woggioni.gradle.graalvm.UpxTask
|
||||||
import net.woggioni.gradle.graalvm.JlinkPlugin
|
import net.woggioni.gradle.graalvm.JlinkPlugin
|
||||||
import net.woggioni.gradle.graalvm.JlinkTask
|
import net.woggioni.gradle.graalvm.JlinkTask
|
||||||
|
|
||||||
|
|
||||||
|
sourceSets {
|
||||||
|
configureNativeImage {
|
||||||
|
java {
|
||||||
|
}
|
||||||
|
kotlin {
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
configurations {
|
||||||
|
|
||||||
|
release {
|
||||||
|
transitive = false
|
||||||
|
canBeConsumed = true
|
||||||
|
canBeResolved = true
|
||||||
|
visible = true
|
||||||
|
}
|
||||||
|
|
||||||
|
configureNativeImageImplementation {
|
||||||
|
extendsFrom implementation
|
||||||
|
}
|
||||||
|
|
||||||
|
configureNativeImageRuntimeOnly {
|
||||||
|
extendsFrom runtimeOnly
|
||||||
|
}
|
||||||
|
|
||||||
|
nativeImage {
|
||||||
|
extendsFrom runtimeClasspath
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
dependencies {
|
||||||
|
configureNativeImageImplementation project
|
||||||
|
configureNativeImageImplementation project(':rbcs-server-memcache')
|
||||||
|
|
||||||
|
implementation catalog.jwo
|
||||||
|
implementation catalog.slf4j.api
|
||||||
|
implementation catalog.picocli
|
||||||
|
|
||||||
|
implementation project(':rbcs-client')
|
||||||
|
implementation project(':rbcs-server')
|
||||||
|
|
||||||
|
// runtimeOnly catalog.slf4j.jdk14
|
||||||
|
runtimeOnly catalog.logback.classic
|
||||||
|
// runtimeOnly catalog.slf4j.simple
|
||||||
|
nativeImage project(':rbcs-server-memcache')
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
Property<String> mainModuleName = objects.property(String.class)
|
Property<String> mainModuleName = objects.property(String.class)
|
||||||
mainModuleName.set('net.woggioni.rbcs.cli')
|
mainModuleName.set('net.woggioni.rbcs.cli')
|
||||||
Property<String> mainClassName = objects.property(String.class)
|
Property<String> mainClassName = objects.property(String.class)
|
||||||
@@ -25,77 +80,105 @@ tasks.named(JavaPlugin.COMPILE_JAVA_TASK_NAME, JavaCompile) {
|
|||||||
options.javaModuleMainClass = mainClassName
|
options.javaModuleMainClass = mainClassName
|
||||||
}
|
}
|
||||||
|
|
||||||
configurations {
|
Provider<Jar> jarTaskProvider = tasks.named(JavaPlugin.JAR_TASK_NAME, Jar)
|
||||||
release {
|
|
||||||
transitive = false
|
|
||||||
canBeConsumed = true
|
|
||||||
canBeResolved = true
|
|
||||||
visible = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
envelopeJar {
|
Provider<EnvelopeJarTask> envelopeJarTaskProvider = tasks.named(EnvelopePlugin.ENVELOPE_JAR_TASK_NAME, EnvelopeJarTask.class) {
|
||||||
mainModule = mainModuleName
|
mainModule = mainModuleName
|
||||||
mainClass = mainClassName
|
mainClass = mainClassName
|
||||||
|
|
||||||
extraClasspath = ["plugins"]
|
extraClasspath = ["plugins"]
|
||||||
}
|
|
||||||
|
|
||||||
dependencies {
|
|
||||||
implementation catalog.jwo
|
|
||||||
implementation catalog.slf4j.api
|
|
||||||
implementation catalog.netty.codec.http
|
|
||||||
implementation catalog.picocli
|
|
||||||
|
|
||||||
implementation project(':rbcs-client')
|
|
||||||
implementation project(':rbcs-server')
|
|
||||||
|
|
||||||
// runtimeOnly catalog.slf4j.jdk14
|
|
||||||
runtimeOnly catalog.logback.classic
|
|
||||||
// runtimeOnly catalog.slf4j.simple
|
|
||||||
}
|
|
||||||
|
|
||||||
Provider<EnvelopeJarTask> envelopeJarTaskProvider = tasks.named('envelopeJar', EnvelopeJarTask.class) {
|
|
||||||
// systemProperties['java.util.logging.config.class'] = 'net.woggioni.rbcs.LoggingConfig'
|
|
||||||
// systemProperties['log.config.source'] = 'net/woggioni/rbcs/cli/logging.properties'
|
|
||||||
// systemProperties['java.util.logging.config.file'] = 'classpath:net/woggioni/rbcs/cli/logging.properties'
|
|
||||||
systemProperties['logback.configurationFile'] = 'classpath:net/woggioni/rbcs/cli/logback.xml'
|
systemProperties['logback.configurationFile'] = 'classpath:net/woggioni/rbcs/cli/logback.xml'
|
||||||
systemProperties['io.netty.leakDetectionLevel'] = 'DISABLED'
|
systemProperties['io.netty.leakDetectionLevel'] = 'DISABLED'
|
||||||
|
|
||||||
// systemProperties['org.slf4j.simpleLogger.showDateTime'] = 'true'
|
|
||||||
// systemProperties['org.slf4j.simpleLogger.defaultLogLevel'] = 'debug'
|
|
||||||
// systemProperties['org.slf4j.simpleLogger.log.com.google.code.yanf4j'] = 'warn'
|
|
||||||
// systemProperties['org.slf4j.simpleLogger.log.net.rubyeye.xmemcached'] = 'warn'
|
|
||||||
// systemProperties['org.slf4j.simpleLogger.dateTimeFormat'] = 'yyyy-MM-dd\'T\'HH:mm:ss.SSSZ'
|
|
||||||
}
|
}
|
||||||
|
|
||||||
tasks.named(NativeImagePlugin.CONFIGURE_NATIVE_IMAGE_TASK_NAME, NativeImageConfigurationTask) {
|
tasks.named(NativeImagePlugin.CONFIGURE_NATIVE_IMAGE_TASK_NAME, NativeImageConfigurationTask) {
|
||||||
mainClass = mainClassName
|
toolchain {
|
||||||
mainModule = mainModuleName
|
languageVersion = JavaLanguageVersion.of(21)
|
||||||
|
vendor = JvmVendorSpec.GRAAL_VM
|
||||||
|
}
|
||||||
|
mainClass = "net.woggioni.rbcs.cli.graal.GraalNativeImageConfiguration"
|
||||||
|
classpath = project.files(
|
||||||
|
configurations.configureNativeImageRuntimeClasspath,
|
||||||
|
sourceSets.configureNativeImage.output
|
||||||
|
)
|
||||||
|
mergeConfiguration = false
|
||||||
|
systemProperty('logback.configurationFile', 'classpath:net/woggioni/rbcs/cli/logback.xml')
|
||||||
|
systemProperty('io.netty.leakDetectionLevel', 'DISABLED')
|
||||||
|
modularity.inferModulePath = false
|
||||||
|
enabled = true
|
||||||
|
systemProperty('gradle.tmp.dir', temporaryDir.toString())
|
||||||
}
|
}
|
||||||
|
|
||||||
tasks.named(NativeImagePlugin.NATIVE_IMAGE_TASK_NAME, NativeImageTask) {
|
nativeImage {
|
||||||
|
toolchain {
|
||||||
|
languageVersion = JavaLanguageVersion.of(23)
|
||||||
|
vendor = JvmVendorSpec.GRAAL_VM
|
||||||
|
}
|
||||||
mainClass = mainClassName
|
mainClass = mainClassName
|
||||||
mainModule = mainModuleName
|
// mainModule = mainModuleName
|
||||||
useMusl = true
|
useMusl = true
|
||||||
buildStaticImage = true
|
buildStaticImage = true
|
||||||
|
linkAtBuildTime = false
|
||||||
|
classpath = project.files(jarTaskProvider, configurations.nativeImage)
|
||||||
|
compressExecutable = true
|
||||||
|
compressionLevel = 6
|
||||||
|
useLZMA = false
|
||||||
}
|
}
|
||||||
|
|
||||||
tasks.named(JlinkPlugin.JLINK_TASK_NAME, JlinkTask) {
|
Provider<UpxTask> upxTaskProvider = tasks.named(NativeImagePlugin.UPX_TASK_NAME, UpxTask) {
|
||||||
|
}
|
||||||
|
|
||||||
|
Provider<JlinkTask> jlinkTaskProvider = tasks.named(JlinkPlugin.JLINK_TASK_NAME, JlinkTask) {
|
||||||
|
toolchain {
|
||||||
|
languageVersion = JavaLanguageVersion.of(21)
|
||||||
|
vendor = JvmVendorSpec.GRAAL_VM
|
||||||
|
}
|
||||||
|
|
||||||
mainClass = mainClassName
|
mainClass = mainClassName
|
||||||
mainModule = 'net.woggioni.rbcs.cli'
|
mainModule = 'net.woggioni.rbcs.cli'
|
||||||
|
classpath = project.files(
|
||||||
|
configurations.configureNativeImageRuntimeClasspath,
|
||||||
|
sourceSets.configureNativeImage.output
|
||||||
|
)
|
||||||
|
additionalModules = [
|
||||||
|
'net.woggioni.rbcs.server.memcache',
|
||||||
|
'ch.qos.logback.classic',
|
||||||
|
'jdk.crypto.ec'
|
||||||
|
]
|
||||||
|
compressionLevel = 2
|
||||||
|
stripDebug = false
|
||||||
|
}
|
||||||
|
|
||||||
|
Provider<Tar> jlinkDistTarTaskProvider = tasks.named(JlinkPlugin.JLINK_DIST_TAR_TASK_NAME, Tar) {
|
||||||
|
exclude 'lib/libjvmcicompiler.so'
|
||||||
|
}
|
||||||
|
|
||||||
|
tasks.named(JavaPlugin.PROCESS_RESOURCES_TASK_NAME, ProcessResources) {
|
||||||
|
from(rootProject.file('conf')) {
|
||||||
|
into('net/woggioni/rbcs/cli')
|
||||||
|
include 'logback.xml'
|
||||||
|
include 'logging.properties'
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
artifacts {
|
artifacts {
|
||||||
release(envelopeJarTaskProvider)
|
release(envelopeJarTaskProvider)
|
||||||
|
release(upxTaskProvider)
|
||||||
|
release(jlinkDistTarTaskProvider)
|
||||||
}
|
}
|
||||||
|
|
||||||
publishing {
|
publishing {
|
||||||
publications {
|
publications {
|
||||||
maven(MavenPublication) {
|
maven(MavenPublication) {
|
||||||
artifact envelopeJar
|
artifact envelopeJar
|
||||||
|
artifact(upxTaskProvider) {
|
||||||
|
classifier = "linux-x86_64"
|
||||||
|
extension = "exe"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
15
rbcs-cli/conf/rbcs-client.xml
Normal file
15
rbcs-cli/conf/rbcs-client.xml
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||||
|
<rbcs-client:profiles xmlns:xs="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xmlns:rbcs-client="urn:net.woggioni.rbcs.client"
|
||||||
|
xs:schemaLocation="urn:net.woggioni.rbcs.client jpms://net.woggioni.rbcs.client/net/woggioni/rbcs/client/schema/rbcs-client.xsd"
|
||||||
|
>
|
||||||
|
<profile name="profile1" base-url="https://rbcs1.example.com/">
|
||||||
|
<no-auth/>
|
||||||
|
<connection write-idle-timeout="PT60S"
|
||||||
|
read-idle-timeout="PT60S"
|
||||||
|
idle-timeout="PT30S" />
|
||||||
|
</profile>
|
||||||
|
<profile name="profile2" base-url="https://rbcs2.example.com/">
|
||||||
|
<basic-auth user="user" password="password"/>
|
||||||
|
</profile>
|
||||||
|
</rbcs-client:profiles>
|
53
rbcs-cli/conf/rbcs-server.xml
Normal file
53
rbcs-cli/conf/rbcs-server.xml
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||||
|
<rbcs:server xmlns:xs="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xmlns:rbcs="urn:net.woggioni.rbcs.server"
|
||||||
|
xmlns:rbcs-memcache="urn:net.woggioni.rbcs.server.memcache"
|
||||||
|
xs:schemaLocation="urn:net.woggioni.rbcs.server.memcache jpms://net.woggioni.rbcs.server.memcache/net/woggioni/rbcs/server/memcache/schema/rbcs-memcache.xsd urn:net.woggioni.rbcs.server jpms://net.woggioni.rbcs.server/net/woggioni/rbcs/server/schema/rbcs-server.xsd"
|
||||||
|
>
|
||||||
|
<bind host="127.0.0.1" port="8080" incoming-connections-backlog-size="1024"/>
|
||||||
|
<connection
|
||||||
|
max-request-size="67108864"
|
||||||
|
idle-timeout="PT10S"
|
||||||
|
read-idle-timeout="PT20S"
|
||||||
|
write-idle-timeout="PT20S"/>
|
||||||
|
<event-executor use-virtual-threads="true"/>
|
||||||
|
<cache xs:type="rbcs-memcache:memcacheCacheType" max-age="P7D" chunk-size="0x1000" digest="MD5">
|
||||||
|
<server host="127.0.0.1" port="11211" max-connections="256"/>
|
||||||
|
</cache>
|
||||||
|
<!--cache xs:type="rbcs:inMemoryCacheType" max-age="P7D" enable-compression="false" max-size="0x10000000" /-->
|
||||||
|
<!--cache xs:type="rbcs:fileSystemCacheType" max-age="P7D" enable-compression="false" /-->
|
||||||
|
<authorization>
|
||||||
|
<users>
|
||||||
|
<user name="woggioni" password="II+qeNLft2pZ/JVNo9F7jpjM/BqEcfsJW27NZ6dPVs8tAwHbxrJppKYsbL7J/SMl">
|
||||||
|
<quota calls="100" period="PT1S"/>
|
||||||
|
</user>
|
||||||
|
<user name="gitea" password="v6T9+q6/VNpvLknji3ixPiyz2YZCQMXj2FN7hvzbfc2Ig+IzAHO0iiBCH9oWuBDq"/>
|
||||||
|
<anonymous>
|
||||||
|
<quota calls="10" period="PT60S" initial-available-calls="10" max-available-calls="10"/>
|
||||||
|
</anonymous>
|
||||||
|
</users>
|
||||||
|
<groups>
|
||||||
|
<group name="readers">
|
||||||
|
<users>
|
||||||
|
<anonymous/>
|
||||||
|
</users>
|
||||||
|
<roles>
|
||||||
|
<reader/>
|
||||||
|
</roles>
|
||||||
|
</group>
|
||||||
|
<group name="writers">
|
||||||
|
<users>
|
||||||
|
<user ref="woggioni"/>
|
||||||
|
<user ref="gitea"/>
|
||||||
|
</users>
|
||||||
|
<roles>
|
||||||
|
<reader/>
|
||||||
|
<writer/>
|
||||||
|
</roles>
|
||||||
|
</group>
|
||||||
|
</groups>
|
||||||
|
</authorization>
|
||||||
|
<authentication>
|
||||||
|
<none/>
|
||||||
|
</authentication>
|
||||||
|
</rbcs:server>
|
6
rbcs-cli/native-image/jni-config.json
Normal file
6
rbcs-cli/native-image/jni-config.json
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
[
|
||||||
|
{
|
||||||
|
"name":"java.lang.Boolean",
|
||||||
|
"methods":[{"name":"getBoolean","parameterTypes":["java.lang.String"] }]
|
||||||
|
}
|
||||||
|
]
|
@@ -1,2 +1,2 @@
|
|||||||
Args=-H:Optimize=3 --gc=serial --initialize-at-run-time=io.netty
|
Args=-O3 -march=x86-64-v2 --gc=serial --install-exit-handlers --initialize-at-run-time=io.netty --enable-url-protocols=jpms --initialize-at-build-time=net.woggioni.rbcs.common.RbcsUrlStreamHandlerFactory,net.woggioni.rbcs.common.RbcsUrlStreamHandlerFactory$JpmsHandler
|
||||||
#-H:TraceClassInitialization=io.netty.handler.ssl.BouncyCastleAlpnSslUtils
|
#-H:TraceClassInitialization=io.netty.handler.ssl.BouncyCastleAlpnSslUtils
|
8
rbcs-cli/native-image/predefined-classes-config.json
Normal file
8
rbcs-cli/native-image/predefined-classes-config.json
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
[
|
||||||
|
{
|
||||||
|
"type":"agent-extracted",
|
||||||
|
"classes":[
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
2
rbcs-cli/native-image/proxy-config.json
Normal file
2
rbcs-cli/native-image/proxy-config.json
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
[
|
||||||
|
]
|
728
rbcs-cli/native-image/reflect-config.json
Normal file
728
rbcs-cli/native-image/reflect-config.json
Normal file
@@ -0,0 +1,728 @@
|
|||||||
|
[
|
||||||
|
{
|
||||||
|
"name":"android.os.Build$VERSION"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"ch.qos.logback.classic.encoder.PatternLayoutEncoder",
|
||||||
|
"queryAllPublicMethods":true,
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"ch.qos.logback.classic.joran.SerializedModelConfigurator",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"ch.qos.logback.classic.util.DefaultJoranConfigurator",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"ch.qos.logback.core.ConsoleAppender",
|
||||||
|
"queryAllPublicMethods":true,
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }, {"name":"setTarget","parameterTypes":["java.lang.String"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"ch.qos.logback.core.OutputStreamAppender",
|
||||||
|
"methods":[{"name":"setEncoder","parameterTypes":["ch.qos.logback.core.encoder.Encoder"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"ch.qos.logback.core.encoder.Encoder",
|
||||||
|
"methods":[{"name":"valueOf","parameterTypes":["java.lang.String"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"ch.qos.logback.core.encoder.LayoutWrappingEncoder",
|
||||||
|
"methods":[{"name":"setParent","parameterTypes":["ch.qos.logback.core.spi.ContextAware"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"ch.qos.logback.core.pattern.PatternLayoutEncoderBase",
|
||||||
|
"methods":[{"name":"setPattern","parameterTypes":["java.lang.String"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"ch.qos.logback.core.spi.ContextAware",
|
||||||
|
"methods":[{"name":"valueOf","parameterTypes":["java.lang.String"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"com.aayushatharva.brotli4j.Brotli4jLoader"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"com.github.luben.zstd.Zstd"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"com.sun.crypto.provider.AESCipher$General",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"com.sun.crypto.provider.ARCFOURCipher",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"com.sun.crypto.provider.ChaCha20Cipher$ChaCha20Poly1305",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"com.sun.crypto.provider.DESCipher",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"com.sun.crypto.provider.DESedeCipher",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"com.sun.crypto.provider.DHParameters",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"com.sun.crypto.provider.GaloisCounterMode$AESGCM",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"com.sun.crypto.provider.HmacCore$HmacSHA512",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"com.sun.crypto.provider.PBKDF2Core$HmacSHA512",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"com.sun.crypto.provider.TlsMasterSecretGenerator",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"com.sun.org.apache.xerces.internal.impl.dv.xs.ExtendedSchemaDVFactoryImpl",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"com.sun.org.apache.xerces.internal.impl.dv.xs.SchemaDVFactoryImpl",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"com.sun.org.apache.xerces.internal.jaxp.DocumentBuilderFactoryImpl",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"com.sun.org.apache.xerces.internal.jaxp.SAXParserFactoryImpl",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"groovy.lang.Closure"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.bootstrap.ServerBootstrap$1"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.bootstrap.ServerBootstrap$ServerBootstrapAcceptor",
|
||||||
|
"methods":[{"name":"channelRead","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }, {"name":"exceptionCaught","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Throwable"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.buffer.AbstractByteBufAllocator",
|
||||||
|
"queryAllDeclaredMethods":true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.buffer.AbstractReferenceCountedByteBuf",
|
||||||
|
"fields":[{"name":"refCnt"}]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.buffer.AdaptivePoolingAllocator$Chunk",
|
||||||
|
"fields":[{"name":"refCnt"}]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.buffer.AdaptivePoolingAllocator$Magazine",
|
||||||
|
"fields":[{"name":"nextInLine"}]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.channel.AbstractChannelHandlerContext",
|
||||||
|
"fields":[{"name":"handlerState"}]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.channel.ChannelDuplexHandler",
|
||||||
|
"methods":[{"name":"bind","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.net.SocketAddress","io.netty.channel.ChannelPromise"] }, {"name":"close","parameterTypes":["io.netty.channel.ChannelHandlerContext","io.netty.channel.ChannelPromise"] }, {"name":"connect","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.net.SocketAddress","java.net.SocketAddress","io.netty.channel.ChannelPromise"] }, {"name":"deregister","parameterTypes":["io.netty.channel.ChannelHandlerContext","io.netty.channel.ChannelPromise"] }, {"name":"disconnect","parameterTypes":["io.netty.channel.ChannelHandlerContext","io.netty.channel.ChannelPromise"] }, {"name":"flush","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"read","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"write","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object","io.netty.channel.ChannelPromise"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.channel.ChannelHandlerAdapter",
|
||||||
|
"methods":[{"name":"exceptionCaught","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Throwable"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.channel.ChannelInboundHandlerAdapter",
|
||||||
|
"methods":[{"name":"channelActive","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelInactive","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelRead","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }, {"name":"channelReadComplete","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelRegistered","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelUnregistered","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelWritabilityChanged","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"exceptionCaught","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Throwable"] }, {"name":"userEventTriggered","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.channel.ChannelInitializer",
|
||||||
|
"methods":[{"name":"channelRegistered","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"exceptionCaught","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Throwable"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.channel.ChannelOutboundBuffer",
|
||||||
|
"fields":[{"name":"totalPendingSize"}, {"name":"unwritable"}]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.channel.ChannelOutboundHandlerAdapter",
|
||||||
|
"methods":[{"name":"bind","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.net.SocketAddress","io.netty.channel.ChannelPromise"] }, {"name":"connect","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.net.SocketAddress","java.net.SocketAddress","io.netty.channel.ChannelPromise"] }, {"name":"deregister","parameterTypes":["io.netty.channel.ChannelHandlerContext","io.netty.channel.ChannelPromise"] }, {"name":"disconnect","parameterTypes":["io.netty.channel.ChannelHandlerContext","io.netty.channel.ChannelPromise"] }, {"name":"flush","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"read","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.channel.CombinedChannelDuplexHandler",
|
||||||
|
"methods":[{"name":"bind","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.net.SocketAddress","io.netty.channel.ChannelPromise"] }, {"name":"channelActive","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelInactive","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelRead","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }, {"name":"channelReadComplete","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelRegistered","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelUnregistered","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelWritabilityChanged","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"close","parameterTypes":["io.netty.channel.ChannelHandlerContext","io.netty.channel.ChannelPromise"] }, {"name":"connect","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.net.SocketAddress","java.net.SocketAddress","io.netty.channel.ChannelPromise"] }, {"name":"deregister","parameterTypes":["io.netty.channel.ChannelHandlerContext","io.netty.channel.ChannelPromise"] }, {"name":"disconnect","parameterTypes":["io.netty.channel.ChannelHandlerContext","io.netty.channel.ChannelPromise"] }, {"name":"exceptionCaught","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Throwable"] }, {"name":"flush","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"read","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"userEventTriggered","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }, {"name":"write","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object","io.netty.channel.ChannelPromise"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.channel.DefaultChannelConfig",
|
||||||
|
"fields":[{"name":"autoRead"}, {"name":"writeBufferWaterMark"}]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.channel.DefaultChannelPipeline",
|
||||||
|
"fields":[{"name":"estimatorHandle"}]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.channel.DefaultChannelPipeline$HeadContext",
|
||||||
|
"methods":[{"name":"bind","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.net.SocketAddress","io.netty.channel.ChannelPromise"] }, {"name":"channelActive","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelInactive","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelRead","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }, {"name":"channelReadComplete","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelRegistered","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelUnregistered","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelWritabilityChanged","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"close","parameterTypes":["io.netty.channel.ChannelHandlerContext","io.netty.channel.ChannelPromise"] }, {"name":"connect","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.net.SocketAddress","java.net.SocketAddress","io.netty.channel.ChannelPromise"] }, {"name":"deregister","parameterTypes":["io.netty.channel.ChannelHandlerContext","io.netty.channel.ChannelPromise"] }, {"name":"disconnect","parameterTypes":["io.netty.channel.ChannelHandlerContext","io.netty.channel.ChannelPromise"] }, {"name":"exceptionCaught","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Throwable"] }, {"name":"flush","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"read","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"userEventTriggered","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }, {"name":"write","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object","io.netty.channel.ChannelPromise"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.channel.DefaultChannelPipeline$TailContext",
|
||||||
|
"methods":[{"name":"channelActive","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelInactive","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelRead","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }, {"name":"channelReadComplete","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelRegistered","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelUnregistered","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelWritabilityChanged","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"exceptionCaught","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Throwable"] }, {"name":"userEventTriggered","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.channel.SimpleChannelInboundHandler",
|
||||||
|
"methods":[{"name":"channelRead","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.channel.embedded.EmbeddedChannel$2"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.channel.pool.SimpleChannelPool$1"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.channel.socket.nio.NioSocketChannel",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.handler.codec.ByteToMessageDecoder",
|
||||||
|
"methods":[{"name":"channelInactive","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelRead","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }, {"name":"channelReadComplete","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"userEventTriggered","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.handler.codec.MessageAggregator",
|
||||||
|
"methods":[{"name":"channelInactive","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelReadComplete","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.handler.codec.MessageToByteEncoder",
|
||||||
|
"methods":[{"name":"write","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object","io.netty.channel.ChannelPromise"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.handler.codec.MessageToMessageCodec",
|
||||||
|
"methods":[{"name":"channelRead","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }, {"name":"channelReadComplete","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.handler.codec.MessageToMessageDecoder",
|
||||||
|
"methods":[{"name":"channelRead","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.handler.codec.compression.JdkZlibDecoder"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.handler.codec.compression.JdkZlibEncoder",
|
||||||
|
"methods":[{"name":"close","parameterTypes":["io.netty.channel.ChannelHandlerContext","io.netty.channel.ChannelPromise"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.handler.codec.http.HttpClientCodec"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.handler.codec.http.HttpContentDecoder",
|
||||||
|
"methods":[{"name":"channelInactive","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelReadComplete","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.handler.codec.http.HttpContentDecompressor"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.handler.codec.http.HttpContentEncoder",
|
||||||
|
"methods":[{"name":"channelInactive","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.handler.codec.http.HttpObjectAggregator"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.handler.codec.http.HttpServerCodec"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.handler.codec.memcache.binary.BinaryMemcacheClientCodec"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.handler.stream.ChunkedWriteHandler",
|
||||||
|
"methods":[{"name":"channelInactive","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelWritabilityChanged","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"flush","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"write","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object","io.netty.channel.ChannelPromise"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.handler.timeout.IdleStateHandler",
|
||||||
|
"methods":[{"name":"channelActive","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelInactive","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelRead","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }, {"name":"channelReadComplete","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"channelRegistered","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"write","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object","io.netty.channel.ChannelPromise"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.internal.tcnative.SSLContext"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.util.AbstractReferenceCounted",
|
||||||
|
"fields":[{"name":"refCnt"}]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.util.DefaultAttributeMap",
|
||||||
|
"fields":[{"name":"attributes"}]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.util.DefaultAttributeMap$DefaultAttribute",
|
||||||
|
"fields":[{"name":"attributeMap"}]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.util.Recycler$DefaultHandle",
|
||||||
|
"fields":[{"name":"state"}]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.util.ReferenceCountUtil",
|
||||||
|
"queryAllDeclaredMethods":true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.util.concurrent.DefaultPromise",
|
||||||
|
"fields":[{"name":"result"}]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.util.concurrent.SingleThreadEventExecutor",
|
||||||
|
"fields":[{"name":"state"}, {"name":"threadProperties"}]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueColdProducerFields",
|
||||||
|
"fields":[{"name":"producerLimit"}]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueConsumerFields",
|
||||||
|
"fields":[{"name":"consumerIndex"}]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.util.internal.shaded.org.jctools.queues.BaseMpscLinkedArrayQueueProducerFields",
|
||||||
|
"fields":[{"name":"producerIndex"}]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.util.internal.shaded.org.jctools.queues.MpmcArrayQueueConsumerIndexField",
|
||||||
|
"fields":[{"name":"consumerIndex"}]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"io.netty.util.internal.shaded.org.jctools.queues.MpmcArrayQueueProducerIndexField",
|
||||||
|
"fields":[{"name":"producerIndex"}]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"java.lang.Object",
|
||||||
|
"allDeclaredFields":true,
|
||||||
|
"queryAllDeclaredMethods":true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"java.lang.ProcessHandle",
|
||||||
|
"methods":[{"name":"current","parameterTypes":[] }, {"name":"pid","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"java.lang.System",
|
||||||
|
"methods":[{"name":"console","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"java.lang.Thread",
|
||||||
|
"fields":[{"name":"threadLocalRandomProbe"}],
|
||||||
|
"methods":[{"name":"isVirtual","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"java.nio.Bits",
|
||||||
|
"fields":[{"name":"MAX_MEMORY"}, {"name":"UNALIGNED"}]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"java.nio.Buffer",
|
||||||
|
"fields":[{"name":"address"}]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"java.nio.ByteBuffer",
|
||||||
|
"methods":[{"name":"alignedSlice","parameterTypes":["int"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"java.nio.DirectByteBuffer",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":["long","long"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"java.nio.channels.spi.SelectorProvider",
|
||||||
|
"methods":[{"name":"openServerSocketChannel","parameterTypes":["java.net.ProtocolFamily"] }, {"name":"openSocketChannel","parameterTypes":["java.net.ProtocolFamily"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"java.nio.file.Path"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"java.nio.file.Paths",
|
||||||
|
"methods":[{"name":"get","parameterTypes":["java.lang.String","java.lang.String[]"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"java.security.AlgorithmParametersSpi"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"java.security.KeyStoreSpi"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"java.security.SecureRandomParameters"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"java.sql.Connection"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"java.sql.Driver"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"java.sql.DriverManager",
|
||||||
|
"methods":[{"name":"getConnection","parameterTypes":["java.lang.String"] }, {"name":"getDriver","parameterTypes":["java.lang.String"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"java.sql.Time",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":["long"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"java.sql.Timestamp",
|
||||||
|
"methods":[{"name":"valueOf","parameterTypes":["java.lang.String"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"java.time.Duration",
|
||||||
|
"methods":[{"name":"parse","parameterTypes":["java.lang.CharSequence"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"java.time.Instant",
|
||||||
|
"methods":[{"name":"parse","parameterTypes":["java.lang.CharSequence"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"java.time.LocalDate",
|
||||||
|
"methods":[{"name":"parse","parameterTypes":["java.lang.CharSequence"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"java.time.LocalDateTime",
|
||||||
|
"methods":[{"name":"parse","parameterTypes":["java.lang.CharSequence"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"java.time.LocalTime",
|
||||||
|
"methods":[{"name":"parse","parameterTypes":["java.lang.CharSequence"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"java.time.MonthDay",
|
||||||
|
"methods":[{"name":"parse","parameterTypes":["java.lang.CharSequence"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"java.time.OffsetDateTime",
|
||||||
|
"methods":[{"name":"parse","parameterTypes":["java.lang.CharSequence"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"java.time.OffsetTime",
|
||||||
|
"methods":[{"name":"parse","parameterTypes":["java.lang.CharSequence"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"java.time.Period",
|
||||||
|
"methods":[{"name":"parse","parameterTypes":["java.lang.CharSequence"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"java.time.Year",
|
||||||
|
"methods":[{"name":"parse","parameterTypes":["java.lang.CharSequence"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"java.time.YearMonth",
|
||||||
|
"methods":[{"name":"parse","parameterTypes":["java.lang.CharSequence"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"java.time.ZoneId",
|
||||||
|
"methods":[{"name":"of","parameterTypes":["java.lang.String"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"java.time.ZoneOffset",
|
||||||
|
"methods":[{"name":"of","parameterTypes":["java.lang.String"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"java.time.ZonedDateTime",
|
||||||
|
"methods":[{"name":"parse","parameterTypes":["java.lang.CharSequence"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"java.util.concurrent.ForkJoinTask",
|
||||||
|
"fields":[{"name":"aux"}, {"name":"status"}]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"java.util.concurrent.atomic.AtomicBoolean",
|
||||||
|
"fields":[{"name":"value"}]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"java.util.concurrent.atomic.AtomicReference",
|
||||||
|
"fields":[{"name":"value"}]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"java.util.concurrent.atomic.Striped64",
|
||||||
|
"fields":[{"name":"base"}, {"name":"cellsBusy"}]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"java.util.concurrent.atomic.Striped64$Cell",
|
||||||
|
"fields":[{"name":"value"}]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"javax.security.auth.x500.X500Principal",
|
||||||
|
"fields":[{"name":"thisX500Name"}],
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":["sun.security.x509.X500Name"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"jdk.internal.misc.Unsafe",
|
||||||
|
"methods":[{"name":"getUnsafe","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"net.woggioni.rbcs.api.CacheHandler",
|
||||||
|
"methods":[{"name":"channelRead","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"net.woggioni.rbcs.cli.RemoteBuildCacheServerCli",
|
||||||
|
"allDeclaredFields":true,
|
||||||
|
"queryAllDeclaredMethods":true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"net.woggioni.rbcs.cli.RemoteBuildCacheServerCli$VersionProvider",
|
||||||
|
"allDeclaredFields":true,
|
||||||
|
"queryAllDeclaredMethods":true,
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"net.woggioni.rbcs.cli.impl.RbcsCommand",
|
||||||
|
"allDeclaredFields":true,
|
||||||
|
"queryAllDeclaredMethods":true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"net.woggioni.rbcs.cli.impl.commands.BenchmarkCommand",
|
||||||
|
"allDeclaredFields":true,
|
||||||
|
"queryAllDeclaredMethods":true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"net.woggioni.rbcs.cli.impl.commands.ClientCommand",
|
||||||
|
"allDeclaredFields":true,
|
||||||
|
"queryAllDeclaredMethods":true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"net.woggioni.rbcs.cli.impl.commands.GetCommand",
|
||||||
|
"allDeclaredFields":true,
|
||||||
|
"queryAllDeclaredMethods":true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"net.woggioni.rbcs.cli.impl.commands.HealthCheckCommand",
|
||||||
|
"allDeclaredFields":true,
|
||||||
|
"queryAllDeclaredMethods":true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"net.woggioni.rbcs.cli.impl.commands.PasswordHashCommand",
|
||||||
|
"allDeclaredFields":true,
|
||||||
|
"queryAllDeclaredMethods":true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"net.woggioni.rbcs.cli.impl.commands.PutCommand",
|
||||||
|
"allDeclaredFields":true,
|
||||||
|
"queryAllDeclaredMethods":true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"net.woggioni.rbcs.cli.impl.commands.ServerCommand",
|
||||||
|
"allDeclaredFields":true,
|
||||||
|
"queryAllDeclaredMethods":true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"net.woggioni.rbcs.cli.impl.converters.ByteSizeConverter",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"net.woggioni.rbcs.cli.impl.converters.DurationConverter",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"net.woggioni.rbcs.cli.impl.converters.OutputStreamConverter",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"net.woggioni.rbcs.client.RemoteBuildCacheClient$sendRequest$1$operationComplete$responseHandler$1",
|
||||||
|
"methods":[{"name":"channelInactive","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"exceptionCaught","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Throwable"] }, {"name":"userEventTriggered","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"net.woggioni.rbcs.server.RemoteBuildCacheServer$HttpChunkContentCompressor",
|
||||||
|
"methods":[{"name":"write","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object","io.netty.channel.ChannelPromise"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"net.woggioni.rbcs.server.RemoteBuildCacheServer$NettyHttpBasicAuthenticator"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"net.woggioni.rbcs.server.RemoteBuildCacheServer$ServerInitializer"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"net.woggioni.rbcs.server.RemoteBuildCacheServer$ServerInitializer$initChannel$4",
|
||||||
|
"methods":[{"name":"userEventTriggered","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"net.woggioni.rbcs.server.auth.AbstractNettyHttpAuthenticator",
|
||||||
|
"methods":[{"name":"channelRead","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"net.woggioni.rbcs.server.cache.FileSystemCacheHandler",
|
||||||
|
"methods":[{"name":"exceptionCaught","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Throwable"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"net.woggioni.rbcs.server.cache.InMemoryCacheHandler",
|
||||||
|
"methods":[{"name":"exceptionCaught","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Throwable"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"net.woggioni.rbcs.server.exception.ExceptionHandler",
|
||||||
|
"methods":[{"name":"exceptionCaught","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Throwable"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"net.woggioni.rbcs.server.handler.BlackHoleRequestHandler"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"net.woggioni.rbcs.server.handler.MaxRequestSizeHandler",
|
||||||
|
"methods":[{"name":"channelRead","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"net.woggioni.rbcs.server.handler.ServerHandler",
|
||||||
|
"methods":[{"name":"channelRead","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }, {"name":"channelReadComplete","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"exceptionCaught","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Throwable"] }, {"name":"write","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object","io.netty.channel.ChannelPromise"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"net.woggioni.rbcs.server.handler.TraceHandler",
|
||||||
|
"methods":[{"name":"channelRead","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }, {"name":"exceptionCaught","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Throwable"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"net.woggioni.rbcs.server.memcache.MemcacheCacheHandler",
|
||||||
|
"methods":[{"name":"exceptionCaught","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Throwable"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"net.woggioni.rbcs.server.memcache.client.MemcacheClient$sendRequest$1$operationComplete$handler$1",
|
||||||
|
"methods":[{"name":"channelInactive","parameterTypes":["io.netty.channel.ChannelHandlerContext"] }, {"name":"exceptionCaught","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Throwable"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"net.woggioni.rbcs.server.throttling.ThrottlingHandler",
|
||||||
|
"methods":[{"name":"channelRead","parameterTypes":["io.netty.channel.ChannelHandlerContext","java.lang.Object"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"sun.misc.Unsafe",
|
||||||
|
"fields":[{"name":"theUnsafe"}],
|
||||||
|
"methods":[{"name":"copyMemory","parameterTypes":["java.lang.Object","long","java.lang.Object","long","long"] }, {"name":"getAndAddLong","parameterTypes":["java.lang.Object","long","long"] }, {"name":"getAndSetObject","parameterTypes":["java.lang.Object","long","java.lang.Object"] }, {"name":"invokeCleaner","parameterTypes":["java.nio.ByteBuffer"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"sun.nio.ch.SelectorImpl",
|
||||||
|
"fields":[{"name":"publicSelectedKeys"}, {"name":"selectedKeys"}]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"sun.security.pkcs12.PKCS12KeyStore",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"sun.security.pkcs12.PKCS12KeyStore$DualFormatPKCS12",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"sun.security.provider.DSA$SHA224withDSA",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"sun.security.provider.DSA$SHA256withDSA",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"sun.security.provider.MD5",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"sun.security.provider.NativePRNG",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }, {"name":"<init>","parameterTypes":["java.security.SecureRandomParameters"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"sun.security.provider.NativePRNG$NonBlocking",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }, {"name":"<init>","parameterTypes":["java.security.SecureRandomParameters"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"sun.security.provider.SHA",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"sun.security.provider.SHA2$SHA224",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"sun.security.provider.SHA2$SHA256",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"sun.security.provider.SHA5$SHA384",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"sun.security.provider.SHA5$SHA512",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"sun.security.provider.X509Factory",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"sun.security.rsa.PSSParameters",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"sun.security.rsa.RSAKeyFactory$Legacy",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"sun.security.rsa.RSAPSSSignature",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"sun.security.rsa.RSASignature$SHA224withRSA",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"sun.security.ssl.KeyManagerFactoryImpl$SunX509",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"sun.security.ssl.SSLContextImpl$DefaultSSLContext",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"sun.security.ssl.SSLContextImpl$TLSContext",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"sun.security.ssl.TrustManagerFactoryImpl$PKIXFactory",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":[] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"sun.security.x509.AuthorityInfoAccessExtension",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":["java.lang.Boolean","java.lang.Object"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"sun.security.x509.AuthorityKeyIdentifierExtension",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":["java.lang.Boolean","java.lang.Object"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"sun.security.x509.BasicConstraintsExtension",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":["java.lang.Boolean","java.lang.Object"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"sun.security.x509.CRLDistributionPointsExtension",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":["java.lang.Boolean","java.lang.Object"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"sun.security.x509.CertificatePoliciesExtension",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":["java.lang.Boolean","java.lang.Object"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"sun.security.x509.ExtendedKeyUsageExtension",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":["java.lang.Boolean","java.lang.Object"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"sun.security.x509.IssuerAlternativeNameExtension",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":["java.lang.Boolean","java.lang.Object"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"sun.security.x509.KeyUsageExtension",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":["java.lang.Boolean","java.lang.Object"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"sun.security.x509.NetscapeCertTypeExtension",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":["java.lang.Boolean","java.lang.Object"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"sun.security.x509.PrivateKeyUsageExtension",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":["java.lang.Boolean","java.lang.Object"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"sun.security.x509.SubjectAlternativeNameExtension",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":["java.lang.Boolean","java.lang.Object"] }]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"sun.security.x509.SubjectKeyIdentifierExtension",
|
||||||
|
"methods":[{"name":"<init>","parameterTypes":["java.lang.Boolean","java.lang.Object"] }]
|
||||||
|
}
|
||||||
|
]
|
46
rbcs-cli/native-image/resource-config.json
Normal file
46
rbcs-cli/native-image/resource-config.json
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
{
|
||||||
|
"resources":{
|
||||||
|
"includes":[{
|
||||||
|
"pattern":"\\QMETA-INF/MANIFEST.MF\\E"
|
||||||
|
}, {
|
||||||
|
"pattern":"\\QMETA-INF/services/ch.qos.logback.classic.spi.Configurator\\E"
|
||||||
|
}, {
|
||||||
|
"pattern":"\\QMETA-INF/services/java.lang.System$LoggerFinder\\E"
|
||||||
|
}, {
|
||||||
|
"pattern":"\\QMETA-INF/services/java.net.spi.InetAddressResolverProvider\\E"
|
||||||
|
}, {
|
||||||
|
"pattern":"\\QMETA-INF/services/java.net.spi.URLStreamHandlerProvider\\E"
|
||||||
|
}, {
|
||||||
|
"pattern":"\\QMETA-INF/services/java.nio.channels.spi.SelectorProvider\\E"
|
||||||
|
}, {
|
||||||
|
"pattern":"\\QMETA-INF/services/java.time.zone.ZoneRulesProvider\\E"
|
||||||
|
}, {
|
||||||
|
"pattern":"\\QMETA-INF/services/javax.xml.parsers.DocumentBuilderFactory\\E"
|
||||||
|
}, {
|
||||||
|
"pattern":"\\QMETA-INF/services/javax.xml.parsers.SAXParserFactory\\E"
|
||||||
|
}, {
|
||||||
|
"pattern":"\\QMETA-INF/services/net.woggioni.rbcs.api.CacheProvider\\E"
|
||||||
|
}, {
|
||||||
|
"pattern":"\\QMETA-INF/services/org.slf4j.spi.SLF4JServiceProvider\\E"
|
||||||
|
}, {
|
||||||
|
"pattern":"\\Qclasspath:net/woggioni/rbcs/cli/logback.xml\\E"
|
||||||
|
}, {
|
||||||
|
"pattern":"\\Qlogback-test.scmo\\E"
|
||||||
|
}, {
|
||||||
|
"pattern":"\\Qlogback.scmo\\E"
|
||||||
|
}, {
|
||||||
|
"pattern":"\\Qnet/woggioni/rbcs/cli/logback.xml\\E"
|
||||||
|
}, {
|
||||||
|
"pattern":"\\Qnet/woggioni/rbcs/client/schema/rbcs-client.xsd\\E"
|
||||||
|
}, {
|
||||||
|
"pattern":"\\Qnet/woggioni/rbcs/server/memcache/schema/rbcs-memcache.xsd\\E"
|
||||||
|
}, {
|
||||||
|
"pattern":"\\Qnet/woggioni/rbcs/server/rbcs-default.xml\\E"
|
||||||
|
}, {
|
||||||
|
"pattern":"\\Qnet/woggioni/rbcs/server/schema/rbcs-server.xsd\\E"
|
||||||
|
}]},
|
||||||
|
"bundles":[{
|
||||||
|
"name":"com.sun.org.apache.xerces.internal.impl.xpath.regex.message",
|
||||||
|
"locales":[""]
|
||||||
|
}]
|
||||||
|
}
|
14
rbcs-cli/native-image/serialization-config.json
Normal file
14
rbcs-cli/native-image/serialization-config.json
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
{
|
||||||
|
"types":[
|
||||||
|
{
|
||||||
|
"name":"java.lang.String"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name":"net.woggioni.rbcs.api.CacheValueMetadata"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"lambdaCapturingTypes":[
|
||||||
|
],
|
||||||
|
"proxies":[
|
||||||
|
]
|
||||||
|
}
|
@@ -0,0 +1,186 @@
|
|||||||
|
package net.woggioni.rbcs.cli.graal
|
||||||
|
|
||||||
|
import net.woggioni.jwo.NullOutputStream
|
||||||
|
import net.woggioni.rbcs.api.Configuration
|
||||||
|
import net.woggioni.rbcs.api.Configuration.User
|
||||||
|
import net.woggioni.rbcs.api.Role
|
||||||
|
import net.woggioni.rbcs.cli.RemoteBuildCacheServerCli
|
||||||
|
import net.woggioni.rbcs.cli.impl.commands.BenchmarkCommand
|
||||||
|
import net.woggioni.rbcs.cli.impl.commands.GetCommand
|
||||||
|
import net.woggioni.rbcs.cli.impl.commands.HealthCheckCommand
|
||||||
|
import net.woggioni.rbcs.cli.impl.commands.PutCommand
|
||||||
|
import net.woggioni.rbcs.common.HostAndPort
|
||||||
|
import net.woggioni.rbcs.common.PasswordSecurity.hashPassword
|
||||||
|
import net.woggioni.rbcs.common.RBCS
|
||||||
|
import net.woggioni.rbcs.common.Xml
|
||||||
|
import net.woggioni.rbcs.server.RemoteBuildCacheServer
|
||||||
|
import net.woggioni.rbcs.server.cache.FileSystemCacheConfiguration
|
||||||
|
import net.woggioni.rbcs.server.cache.InMemoryCacheConfiguration
|
||||||
|
import net.woggioni.rbcs.server.configuration.Parser
|
||||||
|
import net.woggioni.rbcs.server.memcache.MemcacheCacheConfiguration
|
||||||
|
import java.io.ByteArrayInputStream
|
||||||
|
import java.net.URI
|
||||||
|
import java.nio.file.Path
|
||||||
|
import java.time.Duration
|
||||||
|
import java.time.temporal.ChronoUnit
|
||||||
|
import java.util.concurrent.ExecutionException
|
||||||
|
import java.util.zip.Deflater
|
||||||
|
import net.woggioni.rbcs.client.Configuration as ClientConfiguration
|
||||||
|
import net.woggioni.rbcs.client.impl.Parser as ClientConfigurationParser
|
||||||
|
|
||||||
|
object GraalNativeImageConfiguration {
|
||||||
|
@JvmStatic
|
||||||
|
fun main(vararg args : String) {
|
||||||
|
|
||||||
|
val serverURL = URI.create("file:conf/rbcs-server.xml").toURL()
|
||||||
|
val serverDoc = serverURL.openStream().use {
|
||||||
|
Xml.parseXml(serverURL, it)
|
||||||
|
}
|
||||||
|
Parser.parse(serverDoc)
|
||||||
|
|
||||||
|
val url = URI.create("file:conf/rbcs-client.xml").toURL()
|
||||||
|
val clientDoc = url.openStream().use {
|
||||||
|
Xml.parseXml(url, it)
|
||||||
|
}
|
||||||
|
ClientConfigurationParser.parse(clientDoc)
|
||||||
|
|
||||||
|
val PASSWORD = "password"
|
||||||
|
val readersGroup = Configuration.Group("readers", setOf(Role.Reader, Role.Healthcheck), null, null)
|
||||||
|
val writersGroup = Configuration.Group("writers", setOf(Role.Writer), null, null)
|
||||||
|
|
||||||
|
|
||||||
|
val users = listOf(
|
||||||
|
User("user1", hashPassword(PASSWORD), setOf(readersGroup), null),
|
||||||
|
User("user2", hashPassword(PASSWORD), setOf(writersGroup), null),
|
||||||
|
User("user3", hashPassword(PASSWORD), setOf(readersGroup, writersGroup), null),
|
||||||
|
User("", null, setOf(readersGroup), null),
|
||||||
|
User("user4", hashPassword(PASSWORD), setOf(readersGroup),
|
||||||
|
Configuration.Quota(1, Duration.of(1, ChronoUnit.DAYS), 0, 1)
|
||||||
|
),
|
||||||
|
User("user5", hashPassword(PASSWORD), setOf(readersGroup),
|
||||||
|
Configuration.Quota(1, Duration.of(5, ChronoUnit.SECONDS), 0, 1)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
val serverPort = RBCS.getFreePort()
|
||||||
|
|
||||||
|
val caches = listOf<Configuration.Cache>(
|
||||||
|
InMemoryCacheConfiguration(
|
||||||
|
maxAge = Duration.ofSeconds(3600),
|
||||||
|
digestAlgorithm = "MD5",
|
||||||
|
compressionLevel = Deflater.DEFAULT_COMPRESSION,
|
||||||
|
compressionEnabled = false,
|
||||||
|
maxSize = 0x1000000,
|
||||||
|
),
|
||||||
|
FileSystemCacheConfiguration(
|
||||||
|
Path.of(System.getProperty("java.io.tmpdir")).resolve("rbcs"),
|
||||||
|
maxAge = Duration.ofSeconds(3600),
|
||||||
|
digestAlgorithm = "MD5",
|
||||||
|
compressionLevel = Deflater.DEFAULT_COMPRESSION,
|
||||||
|
compressionEnabled = false,
|
||||||
|
),
|
||||||
|
MemcacheCacheConfiguration(
|
||||||
|
listOf(MemcacheCacheConfiguration.Server(
|
||||||
|
HostAndPort("127.0.0.1", 11211),
|
||||||
|
1000,
|
||||||
|
4)
|
||||||
|
),
|
||||||
|
Duration.ofSeconds(60),
|
||||||
|
"MD5",
|
||||||
|
null,
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
for (cache in caches) {
|
||||||
|
val serverConfiguration = Configuration(
|
||||||
|
"127.0.0.1",
|
||||||
|
serverPort,
|
||||||
|
100,
|
||||||
|
null,
|
||||||
|
Configuration.EventExecutor(true),
|
||||||
|
Configuration.Connection(
|
||||||
|
Duration.ofSeconds(10),
|
||||||
|
Duration.ofSeconds(15),
|
||||||
|
Duration.ofSeconds(15),
|
||||||
|
0x10000,
|
||||||
|
0x1000
|
||||||
|
),
|
||||||
|
users.asSequence().map { it.name to it }.toMap(),
|
||||||
|
sequenceOf(writersGroup, readersGroup).map { it.name to it }.toMap(),
|
||||||
|
cache,
|
||||||
|
Configuration.BasicAuthentication(),
|
||||||
|
null,
|
||||||
|
)
|
||||||
|
|
||||||
|
MemcacheCacheConfiguration(
|
||||||
|
listOf(
|
||||||
|
MemcacheCacheConfiguration.Server(
|
||||||
|
HostAndPort("127.0.0.1", 11211),
|
||||||
|
1000,
|
||||||
|
4
|
||||||
|
)
|
||||||
|
),
|
||||||
|
Duration.ofSeconds(60),
|
||||||
|
"MD5",
|
||||||
|
null,
|
||||||
|
1,
|
||||||
|
)
|
||||||
|
|
||||||
|
val serverHandle = RemoteBuildCacheServer(serverConfiguration).run()
|
||||||
|
|
||||||
|
|
||||||
|
val clientProfile = ClientConfiguration.Profile(
|
||||||
|
URI.create("http://127.0.0.1:$serverPort/"),
|
||||||
|
ClientConfiguration.Connection(
|
||||||
|
Duration.ofSeconds(5),
|
||||||
|
Duration.ofSeconds(5),
|
||||||
|
Duration.ofSeconds(7),
|
||||||
|
true,
|
||||||
|
),
|
||||||
|
ClientConfiguration.Authentication.BasicAuthenticationCredentials("user3", PASSWORD),
|
||||||
|
Duration.ofSeconds(3),
|
||||||
|
10,
|
||||||
|
true,
|
||||||
|
ClientConfiguration.RetryPolicy(
|
||||||
|
3,
|
||||||
|
1000,
|
||||||
|
1.2
|
||||||
|
),
|
||||||
|
ClientConfiguration.TrustStore(null, null, false, false)
|
||||||
|
)
|
||||||
|
|
||||||
|
HealthCheckCommand.execute(clientProfile)
|
||||||
|
|
||||||
|
BenchmarkCommand.execute(
|
||||||
|
clientProfile,
|
||||||
|
1000,
|
||||||
|
0x100,
|
||||||
|
true
|
||||||
|
)
|
||||||
|
|
||||||
|
PutCommand.execute(
|
||||||
|
clientProfile,
|
||||||
|
"some-file.bin",
|
||||||
|
ByteArrayInputStream(ByteArray(0x1000) { it.toByte() }),
|
||||||
|
"application/octet-setream",
|
||||||
|
"attachment; filename=\"some-file.bin\""
|
||||||
|
)
|
||||||
|
|
||||||
|
GetCommand.execute(
|
||||||
|
clientProfile,
|
||||||
|
"some-file.bin",
|
||||||
|
NullOutputStream()
|
||||||
|
)
|
||||||
|
|
||||||
|
serverHandle.sendShutdownSignal()
|
||||||
|
try {
|
||||||
|
serverHandle.get()
|
||||||
|
} catch (ee : ExecutionException) {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
System.setProperty("net.woggioni.rbcs.conf.dir", System.getProperty("gradle.tmp.dir"))
|
||||||
|
RemoteBuildCacheServerCli.createCommandLine().execute("--version")
|
||||||
|
RemoteBuildCacheServerCli.createCommandLine().execute("server", "-t", "PT10S")
|
||||||
|
}
|
||||||
|
}
|
@@ -1,5 +1,6 @@
|
|||||||
package net.woggioni.rbcs.cli
|
package net.woggioni.rbcs.cli
|
||||||
|
|
||||||
|
import net.woggioni.jwo.Application
|
||||||
import net.woggioni.rbcs.cli.impl.AbstractVersionProvider
|
import net.woggioni.rbcs.cli.impl.AbstractVersionProvider
|
||||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||||
import net.woggioni.rbcs.cli.impl.commands.BenchmarkCommand
|
import net.woggioni.rbcs.cli.impl.commands.BenchmarkCommand
|
||||||
@@ -10,8 +11,7 @@ import net.woggioni.rbcs.cli.impl.commands.PasswordHashCommand
|
|||||||
import net.woggioni.rbcs.cli.impl.commands.PutCommand
|
import net.woggioni.rbcs.cli.impl.commands.PutCommand
|
||||||
import net.woggioni.rbcs.cli.impl.commands.ServerCommand
|
import net.woggioni.rbcs.cli.impl.commands.ServerCommand
|
||||||
import net.woggioni.rbcs.common.RbcsUrlStreamHandlerFactory
|
import net.woggioni.rbcs.common.RbcsUrlStreamHandlerFactory
|
||||||
import net.woggioni.rbcs.common.contextLogger
|
import net.woggioni.rbcs.common.createLogger
|
||||||
import net.woggioni.jwo.Application
|
|
||||||
import picocli.CommandLine
|
import picocli.CommandLine
|
||||||
import picocli.CommandLine.Model.CommandSpec
|
import picocli.CommandLine.Model.CommandSpec
|
||||||
|
|
||||||
@@ -23,15 +23,20 @@ class RemoteBuildCacheServerCli : RbcsCommand() {
|
|||||||
|
|
||||||
class VersionProvider : AbstractVersionProvider()
|
class VersionProvider : AbstractVersionProvider()
|
||||||
companion object {
|
companion object {
|
||||||
@JvmStatic
|
private fun setPropertyIfNotPresent(key: String, value: String) {
|
||||||
fun main(vararg args: String) {
|
System.getProperty(key) ?: System.setProperty(key, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
fun createCommandLine() : CommandLine {
|
||||||
|
setPropertyIfNotPresent("logback.configurationFile", "net/woggioni/rbcs/cli/logback.xml")
|
||||||
|
setPropertyIfNotPresent("io.netty.leakDetectionLevel", "DISABLED")
|
||||||
val currentClassLoader = RemoteBuildCacheServerCli::class.java.classLoader
|
val currentClassLoader = RemoteBuildCacheServerCli::class.java.classLoader
|
||||||
Thread.currentThread().contextClassLoader = currentClassLoader
|
Thread.currentThread().contextClassLoader = currentClassLoader
|
||||||
if(currentClassLoader.javaClass.name == "net.woggioni.envelope.loader.ModuleClassLoader") {
|
if(currentClassLoader.javaClass.name == "net.woggioni.envelope.loader.ModuleClassLoader") {
|
||||||
//We're running in an envelope jar and custom URL protocols won't work
|
//We're running in an envelope jar and custom URL protocols won't work
|
||||||
RbcsUrlStreamHandlerFactory.install()
|
RbcsUrlStreamHandlerFactory.install()
|
||||||
}
|
}
|
||||||
val log = contextLogger()
|
val log = createLogger<RemoteBuildCacheServerCli>()
|
||||||
val app = Application.builder("rbcs")
|
val app = Application.builder("rbcs")
|
||||||
.configurationDirectoryEnvVar("RBCS_CONFIGURATION_DIR")
|
.configurationDirectoryEnvVar("RBCS_CONFIGURATION_DIR")
|
||||||
.configurationDirectoryPropertyKey("net.woggioni.rbcs.conf.dir")
|
.configurationDirectoryPropertyKey("net.woggioni.rbcs.conf.dir")
|
||||||
@@ -51,7 +56,12 @@ class RemoteBuildCacheServerCli : RbcsCommand() {
|
|||||||
addSubcommand(GetCommand())
|
addSubcommand(GetCommand())
|
||||||
addSubcommand(HealthCheckCommand())
|
addSubcommand(HealthCheckCommand())
|
||||||
})
|
})
|
||||||
System.exit(commandLine.execute(*args))
|
return commandLine
|
||||||
|
}
|
||||||
|
|
||||||
|
@JvmStatic
|
||||||
|
fun main(vararg args: String) {
|
||||||
|
System.exit(createCommandLine().execute(*args))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -12,7 +12,7 @@ abstract class RbcsCommand : Runnable {
|
|||||||
private set
|
private set
|
||||||
|
|
||||||
protected fun findConfigurationFile(app: Application, fileName : String): Path {
|
protected fun findConfigurationFile(app: Application, fileName : String): Path {
|
||||||
val confDir = app.computeConfigurationDirectory()
|
val confDir = app.computeConfigurationDirectory(false)
|
||||||
val configurationFile = confDir.resolve(fileName)
|
val configurationFile = confDir.resolve(fileName)
|
||||||
return configurationFile
|
return configurationFile
|
||||||
}
|
}
|
||||||
|
@@ -1,15 +1,21 @@
|
|||||||
package net.woggioni.rbcs.cli.impl.commands
|
package net.woggioni.rbcs.cli.impl.commands
|
||||||
|
|
||||||
|
import net.woggioni.jwo.JWO
|
||||||
|
import net.woggioni.jwo.LongMath
|
||||||
|
import net.woggioni.rbcs.api.CacheValueMetadata
|
||||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||||
|
import net.woggioni.rbcs.cli.impl.converters.ByteSizeConverter
|
||||||
|
import net.woggioni.rbcs.client.Configuration
|
||||||
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
||||||
import net.woggioni.rbcs.common.contextLogger
|
import net.woggioni.rbcs.common.createLogger
|
||||||
|
import net.woggioni.rbcs.common.debug
|
||||||
import net.woggioni.rbcs.common.error
|
import net.woggioni.rbcs.common.error
|
||||||
import net.woggioni.rbcs.common.info
|
import net.woggioni.rbcs.common.info
|
||||||
import net.woggioni.jwo.JWO
|
|
||||||
import picocli.CommandLine
|
import picocli.CommandLine
|
||||||
import java.security.SecureRandom
|
import java.security.SecureRandom
|
||||||
import java.time.Duration
|
import java.time.Duration
|
||||||
import java.time.Instant
|
import java.time.Instant
|
||||||
|
import java.time.temporal.ChronoUnit
|
||||||
import java.util.concurrent.LinkedBlockingQueue
|
import java.util.concurrent.LinkedBlockingQueue
|
||||||
import java.util.concurrent.Semaphore
|
import java.util.concurrent.Semaphore
|
||||||
import java.util.concurrent.atomic.AtomicLong
|
import java.util.concurrent.atomic.AtomicLong
|
||||||
@@ -21,7 +27,124 @@ import kotlin.random.Random
|
|||||||
showDefaultValues = true
|
showDefaultValues = true
|
||||||
)
|
)
|
||||||
class BenchmarkCommand : RbcsCommand() {
|
class BenchmarkCommand : RbcsCommand() {
|
||||||
private val log = contextLogger()
|
companion object {
|
||||||
|
private val log = createLogger<BenchmarkCommand>()
|
||||||
|
|
||||||
|
fun execute(profile : Configuration.Profile,
|
||||||
|
numberOfEntries : Int,
|
||||||
|
entrySize : Int,
|
||||||
|
useRandomValue : Boolean,
|
||||||
|
) {
|
||||||
|
val progressThreshold = LongMath.ceilDiv(numberOfEntries.toLong(), 20)
|
||||||
|
RemoteBuildCacheClient(profile).use { client ->
|
||||||
|
val entryGenerator = sequence {
|
||||||
|
val random = Random(SecureRandom.getInstance("NativePRNGNonBlocking").nextLong())
|
||||||
|
while (true) {
|
||||||
|
val key = JWO.bytesToHex(random.nextBytes(16))
|
||||||
|
val value = if (useRandomValue) {
|
||||||
|
random.nextBytes(entrySize)
|
||||||
|
} else {
|
||||||
|
val byteValue = random.nextInt().toByte()
|
||||||
|
ByteArray(entrySize) { _ -> byteValue }
|
||||||
|
}
|
||||||
|
yield(key to value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info {
|
||||||
|
"Starting insertion"
|
||||||
|
}
|
||||||
|
val entries = let {
|
||||||
|
val completionCounter = AtomicLong(0)
|
||||||
|
val completionQueue = LinkedBlockingQueue<Pair<String, ByteArray>>(numberOfEntries)
|
||||||
|
val start = Instant.now()
|
||||||
|
val semaphore = Semaphore(profile.maxConnections * 5)
|
||||||
|
val iterator = entryGenerator.take(numberOfEntries).iterator()
|
||||||
|
while (completionCounter.get() < numberOfEntries) {
|
||||||
|
if (iterator.hasNext()) {
|
||||||
|
val entry = iterator.next()
|
||||||
|
semaphore.acquire()
|
||||||
|
val future =
|
||||||
|
client.put(entry.first, entry.second, CacheValueMetadata(null, null)).thenApply { entry }
|
||||||
|
future.whenComplete { result, ex ->
|
||||||
|
if (ex != null) {
|
||||||
|
log.error(ex.message, ex)
|
||||||
|
} else {
|
||||||
|
completionQueue.put(result)
|
||||||
|
}
|
||||||
|
semaphore.release()
|
||||||
|
val completed = completionCounter.incrementAndGet()
|
||||||
|
if (completed.mod(progressThreshold) == 0L) {
|
||||||
|
log.debug {
|
||||||
|
"Inserted $completed / $numberOfEntries"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Thread.sleep(Duration.of(500, ChronoUnit.MILLIS))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
val inserted = completionQueue.toList()
|
||||||
|
val end = Instant.now()
|
||||||
|
log.info {
|
||||||
|
val elapsed = Duration.between(start, end).toMillis()
|
||||||
|
val opsPerSecond = String.format("%.2f", numberOfEntries.toDouble() / elapsed * 1000)
|
||||||
|
"Insertion rate: $opsPerSecond ops/s"
|
||||||
|
}
|
||||||
|
inserted
|
||||||
|
}
|
||||||
|
log.info {
|
||||||
|
"Inserted ${entries.size} entries"
|
||||||
|
}
|
||||||
|
log.info {
|
||||||
|
"Starting retrieval"
|
||||||
|
}
|
||||||
|
if (entries.isNotEmpty()) {
|
||||||
|
val completionCounter = AtomicLong(0)
|
||||||
|
val semaphore = Semaphore(profile.maxConnections * 5)
|
||||||
|
val start = Instant.now()
|
||||||
|
val it = entries.iterator()
|
||||||
|
while (completionCounter.get() < entries.size) {
|
||||||
|
if (it.hasNext()) {
|
||||||
|
val entry = it.next()
|
||||||
|
semaphore.acquire()
|
||||||
|
val future = client.get(entry.first).thenApply {
|
||||||
|
if (it == null) {
|
||||||
|
log.error {
|
||||||
|
"Missing entry for key '${entry.first}'"
|
||||||
|
}
|
||||||
|
} else if (!entry.second.contentEquals(it)) {
|
||||||
|
log.error {
|
||||||
|
"Retrieved a value different from what was inserted for key '${entry.first}'"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
future.whenComplete { _, _ ->
|
||||||
|
val completed = completionCounter.incrementAndGet()
|
||||||
|
if (completed.mod(progressThreshold) == 0L) {
|
||||||
|
log.debug {
|
||||||
|
"Retrieved $completed / ${entries.size}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
semaphore.release()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Thread.sleep(Duration.of(500, ChronoUnit.MILLIS))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
val end = Instant.now()
|
||||||
|
log.info {
|
||||||
|
val elapsed = Duration.between(start, end).toMillis()
|
||||||
|
val opsPerSecond = String.format("%.2f", entries.size.toDouble() / elapsed * 1000)
|
||||||
|
"Retrieval rate: $opsPerSecond ops/s"
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
log.error("Skipping retrieval benchmark as it was not possible to insert any entry in the cache")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@CommandLine.Spec
|
@CommandLine.Spec
|
||||||
private lateinit var spec: CommandLine.Model.CommandSpec
|
private lateinit var spec: CommandLine.Model.CommandSpec
|
||||||
@@ -36,107 +159,28 @@ class BenchmarkCommand : RbcsCommand() {
|
|||||||
@CommandLine.Option(
|
@CommandLine.Option(
|
||||||
names = ["-s", "--size"],
|
names = ["-s", "--size"],
|
||||||
description = ["Size of a cache value in bytes"],
|
description = ["Size of a cache value in bytes"],
|
||||||
paramLabel = "SIZE"
|
paramLabel = "SIZE",
|
||||||
|
converter = [ByteSizeConverter::class]
|
||||||
)
|
)
|
||||||
private var size = 0x1000
|
private var size = 0x1000
|
||||||
|
|
||||||
|
@CommandLine.Option(
|
||||||
|
names = ["-r", "--random"],
|
||||||
|
description = ["Insert completely random byte values"]
|
||||||
|
)
|
||||||
|
private var randomValues = false
|
||||||
|
|
||||||
override fun run() {
|
override fun run() {
|
||||||
val clientCommand = spec.parent().userObject() as ClientCommand
|
val clientCommand = spec.parent().userObject() as ClientCommand
|
||||||
val profile = clientCommand.profileName.let { profileName ->
|
val profile = clientCommand.profileName.let { profileName ->
|
||||||
clientCommand.configuration.profiles[profileName]
|
clientCommand.configuration.profiles[profileName]
|
||||||
?: throw IllegalArgumentException("Profile $profileName does not exist in configuration")
|
?: throw IllegalArgumentException("Profile $profileName does not exist in configuration")
|
||||||
}
|
}
|
||||||
RemoteBuildCacheClient(profile).use { client ->
|
execute(
|
||||||
|
profile,
|
||||||
val entryGenerator = sequence {
|
numberOfEntries,
|
||||||
val random = Random(SecureRandom.getInstance("NativePRNGNonBlocking").nextLong())
|
size,
|
||||||
while (true) {
|
randomValues
|
||||||
val key = JWO.bytesToHex(random.nextBytes(16))
|
)
|
||||||
val content = random.nextInt().toByte()
|
|
||||||
val value = ByteArray(size, { _ -> content })
|
|
||||||
yield(key to value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
log.info {
|
|
||||||
"Starting insertion"
|
|
||||||
}
|
|
||||||
val entries = let {
|
|
||||||
val completionCounter = AtomicLong(0)
|
|
||||||
val completionQueue = LinkedBlockingQueue<Pair<String, ByteArray>>(numberOfEntries)
|
|
||||||
val start = Instant.now()
|
|
||||||
val semaphore = Semaphore(profile.maxConnections * 3)
|
|
||||||
val iterator = entryGenerator.take(numberOfEntries).iterator()
|
|
||||||
while (completionCounter.get() < numberOfEntries) {
|
|
||||||
if (iterator.hasNext()) {
|
|
||||||
val entry = iterator.next()
|
|
||||||
semaphore.acquire()
|
|
||||||
val future = client.put(entry.first, entry.second).thenApply { entry }
|
|
||||||
future.whenComplete { result, ex ->
|
|
||||||
if (ex != null) {
|
|
||||||
log.error(ex.message, ex)
|
|
||||||
} else {
|
|
||||||
completionQueue.put(result)
|
|
||||||
}
|
|
||||||
semaphore.release()
|
|
||||||
completionCounter.incrementAndGet()
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Thread.sleep(0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
val inserted = completionQueue.toList()
|
|
||||||
val end = Instant.now()
|
|
||||||
log.info {
|
|
||||||
val elapsed = Duration.between(start, end).toMillis()
|
|
||||||
val opsPerSecond = String.format("%.2f", numberOfEntries.toDouble() / elapsed * 1000)
|
|
||||||
"Insertion rate: $opsPerSecond ops/s"
|
|
||||||
}
|
|
||||||
inserted
|
|
||||||
}
|
|
||||||
log.info {
|
|
||||||
"Inserted ${entries.size} entries"
|
|
||||||
}
|
|
||||||
log.info {
|
|
||||||
"Starting retrieval"
|
|
||||||
}
|
|
||||||
if (entries.isNotEmpty()) {
|
|
||||||
val completionCounter = AtomicLong(0)
|
|
||||||
val semaphore = Semaphore(profile.maxConnections * 3)
|
|
||||||
val start = Instant.now()
|
|
||||||
val it = entries.iterator()
|
|
||||||
while (completionCounter.get() < entries.size) {
|
|
||||||
if (it.hasNext()) {
|
|
||||||
val entry = it.next()
|
|
||||||
val future = client.get(entry.first).thenApply {
|
|
||||||
if (it == null) {
|
|
||||||
log.error {
|
|
||||||
"Missing entry for key '${entry.first}'"
|
|
||||||
}
|
|
||||||
} else if (!entry.second.contentEquals(it)) {
|
|
||||||
log.error {
|
|
||||||
"Retrieved a value different from what was inserted for key '${entry.first}'"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
future.whenComplete { _, _ ->
|
|
||||||
completionCounter.incrementAndGet()
|
|
||||||
semaphore.release()
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Thread.sleep(0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
val end = Instant.now()
|
|
||||||
log.info {
|
|
||||||
val elapsed = Duration.between(start, end).toMillis()
|
|
||||||
val opsPerSecond = String.format("%.2f", entries.size.toDouble() / elapsed * 1000)
|
|
||||||
"Retrieval rate: $opsPerSecond ops/s"
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
log.error("Skipping retrieval benchmark as it was not possible to insert any entry in the cache")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
@@ -1,8 +1,10 @@
|
|||||||
package net.woggioni.rbcs.cli.impl.commands
|
package net.woggioni.rbcs.cli.impl.commands
|
||||||
|
|
||||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
|
||||||
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
|
||||||
import net.woggioni.jwo.Application
|
import net.woggioni.jwo.Application
|
||||||
|
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||||
|
import net.woggioni.rbcs.client.Configuration
|
||||||
|
import net.woggioni.rbcs.common.createLogger
|
||||||
|
import net.woggioni.rbcs.common.debug
|
||||||
import picocli.CommandLine
|
import picocli.CommandLine
|
||||||
import java.nio.file.Path
|
import java.nio.file.Path
|
||||||
|
|
||||||
@@ -24,15 +26,20 @@ class ClientCommand(app : Application) : RbcsCommand() {
|
|||||||
names = ["-p", "--profile"],
|
names = ["-p", "--profile"],
|
||||||
description = ["Name of the client profile to be used"],
|
description = ["Name of the client profile to be used"],
|
||||||
paramLabel = "PROFILE",
|
paramLabel = "PROFILE",
|
||||||
required = true
|
required = false
|
||||||
)
|
)
|
||||||
var profileName : String? = null
|
var profileName : String? = null
|
||||||
|
get() = field ?: throw IllegalArgumentException("A profile name must be specified using the '-p' command line parameter")
|
||||||
|
|
||||||
val configuration : RemoteBuildCacheClient.Configuration by lazy {
|
val configuration : Configuration by lazy {
|
||||||
RemoteBuildCacheClient.Configuration.parse(configurationFile)
|
Configuration.parse(configurationFile)
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun run() {
|
override fun run() {
|
||||||
|
val log = createLogger<ClientCommand>()
|
||||||
|
log.debug {
|
||||||
|
"Using configuration file '$configurationFile'"
|
||||||
|
}
|
||||||
println("Available profiles:")
|
println("Available profiles:")
|
||||||
configuration.profiles.forEach { (profileName, _) ->
|
configuration.profiles.forEach { (profileName, _) ->
|
||||||
println(profileName)
|
println(profileName)
|
||||||
|
@@ -1,9 +1,11 @@
|
|||||||
package net.woggioni.rbcs.cli.impl.commands
|
package net.woggioni.rbcs.cli.impl.commands
|
||||||
|
|
||||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||||
|
import net.woggioni.rbcs.client.Configuration
|
||||||
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
||||||
import net.woggioni.rbcs.common.contextLogger
|
import net.woggioni.rbcs.common.createLogger
|
||||||
import picocli.CommandLine
|
import picocli.CommandLine
|
||||||
|
import java.io.OutputStream
|
||||||
import java.nio.file.Files
|
import java.nio.file.Files
|
||||||
import java.nio.file.Path
|
import java.nio.file.Path
|
||||||
|
|
||||||
@@ -13,7 +15,21 @@ import java.nio.file.Path
|
|||||||
showDefaultValues = true
|
showDefaultValues = true
|
||||||
)
|
)
|
||||||
class GetCommand : RbcsCommand() {
|
class GetCommand : RbcsCommand() {
|
||||||
private val log = contextLogger()
|
companion object {
|
||||||
|
private val log = createLogger<GetCommand>()
|
||||||
|
|
||||||
|
fun execute(profile : Configuration.Profile, key : String, outputStream: OutputStream) {
|
||||||
|
RemoteBuildCacheClient(profile).use { client ->
|
||||||
|
client.get(key).thenApply { value ->
|
||||||
|
value?.let {
|
||||||
|
outputStream.use {
|
||||||
|
it.write(value)
|
||||||
|
}
|
||||||
|
} ?: throw NoSuchElementException("No value found for key $key")
|
||||||
|
}.get()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@CommandLine.Spec
|
@CommandLine.Spec
|
||||||
private lateinit var spec: CommandLine.Model.CommandSpec
|
private lateinit var spec: CommandLine.Model.CommandSpec
|
||||||
@@ -38,14 +54,6 @@ class GetCommand : RbcsCommand() {
|
|||||||
clientCommand.configuration.profiles[profileName]
|
clientCommand.configuration.profiles[profileName]
|
||||||
?: throw IllegalArgumentException("Profile $profileName does not exist in configuration")
|
?: throw IllegalArgumentException("Profile $profileName does not exist in configuration")
|
||||||
}
|
}
|
||||||
RemoteBuildCacheClient(profile).use { client ->
|
execute(profile, key, (output?.let(Files::newOutputStream) ?: System.out))
|
||||||
client.get(key).thenApply { value ->
|
|
||||||
value?.let {
|
|
||||||
(output?.let(Files::newOutputStream) ?: System.out).use {
|
|
||||||
it.write(value)
|
|
||||||
}
|
|
||||||
} ?: throw NoSuchElementException("No value found for key $key")
|
|
||||||
}.get()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
@@ -1,8 +1,9 @@
|
|||||||
package net.woggioni.rbcs.cli.impl.commands
|
package net.woggioni.rbcs.cli.impl.commands
|
||||||
|
|
||||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||||
|
import net.woggioni.rbcs.client.Configuration
|
||||||
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
||||||
import net.woggioni.rbcs.common.contextLogger
|
import net.woggioni.rbcs.common.createLogger
|
||||||
import picocli.CommandLine
|
import picocli.CommandLine
|
||||||
import java.security.SecureRandom
|
import java.security.SecureRandom
|
||||||
import kotlin.random.Random
|
import kotlin.random.Random
|
||||||
@@ -13,7 +14,30 @@ import kotlin.random.Random
|
|||||||
showDefaultValues = true
|
showDefaultValues = true
|
||||||
)
|
)
|
||||||
class HealthCheckCommand : RbcsCommand() {
|
class HealthCheckCommand : RbcsCommand() {
|
||||||
private val log = contextLogger()
|
companion object{
|
||||||
|
private val log = createLogger<HealthCheckCommand>()
|
||||||
|
|
||||||
|
fun execute(profile : Configuration.Profile) {
|
||||||
|
RemoteBuildCacheClient(profile).use { client ->
|
||||||
|
val random = Random(SecureRandom.getInstance("NativePRNGNonBlocking").nextLong())
|
||||||
|
val nonce = ByteArray(0xa0)
|
||||||
|
random.nextBytes(nonce)
|
||||||
|
client.healthCheck(nonce).thenApply { value ->
|
||||||
|
if(value == null) {
|
||||||
|
throw IllegalStateException("Empty response from server")
|
||||||
|
}
|
||||||
|
val offset = value.size - nonce.size
|
||||||
|
for(i in 0 until nonce.size) {
|
||||||
|
val a = nonce[i]
|
||||||
|
val b = value[offset + i]
|
||||||
|
if(a != b) {
|
||||||
|
throw IllegalStateException("Server nonce does not match")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}.get()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@CommandLine.Spec
|
@CommandLine.Spec
|
||||||
private lateinit var spec: CommandLine.Model.CommandSpec
|
private lateinit var spec: CommandLine.Model.CommandSpec
|
||||||
@@ -24,22 +48,6 @@ class HealthCheckCommand : RbcsCommand() {
|
|||||||
clientCommand.configuration.profiles[profileName]
|
clientCommand.configuration.profiles[profileName]
|
||||||
?: throw IllegalArgumentException("Profile $profileName does not exist in configuration")
|
?: throw IllegalArgumentException("Profile $profileName does not exist in configuration")
|
||||||
}
|
}
|
||||||
RemoteBuildCacheClient(profile).use { client ->
|
execute(profile)
|
||||||
val random = Random(SecureRandom.getInstance("NativePRNGNonBlocking").nextLong())
|
|
||||||
val nonce = ByteArray(0xa0)
|
|
||||||
random.nextBytes(nonce)
|
|
||||||
client.healthCheck(nonce).thenApply { value ->
|
|
||||||
if(value == null) {
|
|
||||||
throw IllegalStateException("Empty response from server")
|
|
||||||
}
|
|
||||||
for(i in 0 until nonce.size) {
|
|
||||||
for(j in value.size - nonce.size until nonce.size) {
|
|
||||||
if(nonce[i] != value[j]) {
|
|
||||||
throw IllegalStateException("Server nonce does not match")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}.get()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
@@ -1,9 +1,9 @@
|
|||||||
package net.woggioni.rbcs.cli.impl.commands
|
package net.woggioni.rbcs.cli.impl.commands
|
||||||
|
|
||||||
|
import net.woggioni.jwo.UncloseableOutputStream
|
||||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||||
import net.woggioni.rbcs.cli.impl.converters.OutputStreamConverter
|
import net.woggioni.rbcs.cli.impl.converters.OutputStreamConverter
|
||||||
import net.woggioni.rbcs.common.PasswordSecurity.hashPassword
|
import net.woggioni.rbcs.common.PasswordSecurity.hashPassword
|
||||||
import net.woggioni.jwo.UncloseableOutputStream
|
|
||||||
import picocli.CommandLine
|
import picocli.CommandLine
|
||||||
import java.io.OutputStream
|
import java.io.OutputStream
|
||||||
import java.io.OutputStreamWriter
|
import java.io.OutputStreamWriter
|
||||||
|
@@ -1,11 +1,18 @@
|
|||||||
package net.woggioni.rbcs.cli.impl.commands
|
package net.woggioni.rbcs.cli.impl.commands
|
||||||
|
|
||||||
|
import net.woggioni.jwo.Hash
|
||||||
|
import net.woggioni.jwo.JWO
|
||||||
|
import net.woggioni.jwo.NullOutputStream
|
||||||
|
import net.woggioni.rbcs.api.CacheValueMetadata
|
||||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||||
import net.woggioni.rbcs.cli.impl.converters.InputStreamConverter
|
import net.woggioni.rbcs.client.Configuration
|
||||||
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
||||||
import net.woggioni.rbcs.common.contextLogger
|
import net.woggioni.rbcs.common.createLogger
|
||||||
import picocli.CommandLine
|
import picocli.CommandLine
|
||||||
import java.io.InputStream
|
import java.io.InputStream
|
||||||
|
import java.nio.file.Files
|
||||||
|
import java.nio.file.Path
|
||||||
|
import java.util.UUID
|
||||||
|
|
||||||
@CommandLine.Command(
|
@CommandLine.Command(
|
||||||
name = "put",
|
name = "put",
|
||||||
@@ -13,25 +20,55 @@ import java.io.InputStream
|
|||||||
showDefaultValues = true
|
showDefaultValues = true
|
||||||
)
|
)
|
||||||
class PutCommand : RbcsCommand() {
|
class PutCommand : RbcsCommand() {
|
||||||
private val log = contextLogger()
|
companion object {
|
||||||
|
private val log = createLogger<PutCommand>()
|
||||||
|
|
||||||
|
fun execute(profile: Configuration.Profile,
|
||||||
|
actualKey : String,
|
||||||
|
inputStream: InputStream,
|
||||||
|
mimeType : String?,
|
||||||
|
contentDisposition: String?
|
||||||
|
) {
|
||||||
|
RemoteBuildCacheClient(profile).use { client ->
|
||||||
|
inputStream.use {
|
||||||
|
client.put(actualKey, it.readAllBytes(), CacheValueMetadata(contentDisposition, mimeType))
|
||||||
|
}.get()
|
||||||
|
println(profile.serverURI.resolve(actualKey))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
@CommandLine.Spec
|
@CommandLine.Spec
|
||||||
private lateinit var spec: CommandLine.Model.CommandSpec
|
private lateinit var spec: CommandLine.Model.CommandSpec
|
||||||
|
|
||||||
@CommandLine.Option(
|
@CommandLine.Option(
|
||||||
names = ["-k", "--key"],
|
names = ["-k", "--key"],
|
||||||
description = ["The key for the new value"],
|
description = ["The key for the new value, randomly generated if omitted"],
|
||||||
paramLabel = "KEY"
|
paramLabel = "KEY"
|
||||||
)
|
)
|
||||||
private var key : String = ""
|
private var key : String? = null
|
||||||
|
|
||||||
|
@CommandLine.Option(
|
||||||
|
names = ["-i", "--inline"],
|
||||||
|
description = ["File is to be displayed in the browser"],
|
||||||
|
paramLabel = "INLINE",
|
||||||
|
)
|
||||||
|
private var inline : Boolean = false
|
||||||
|
|
||||||
|
@CommandLine.Option(
|
||||||
|
names = ["-t", "--type"],
|
||||||
|
description = ["File mime type"],
|
||||||
|
paramLabel = "MIME_TYPE",
|
||||||
|
)
|
||||||
|
private var mimeType : String? = null
|
||||||
|
|
||||||
@CommandLine.Option(
|
@CommandLine.Option(
|
||||||
names = ["-v", "--value"],
|
names = ["-v", "--value"],
|
||||||
description = ["Path to a file containing the value to be added (defaults to stdin)"],
|
description = ["Path to a file containing the value to be added (defaults to stdin)"],
|
||||||
paramLabel = "VALUE_FILE",
|
paramLabel = "VALUE_FILE",
|
||||||
converter = [InputStreamConverter::class]
|
|
||||||
)
|
)
|
||||||
private var value : InputStream = System.`in`
|
private var value : Path? = null
|
||||||
|
|
||||||
override fun run() {
|
override fun run() {
|
||||||
val clientCommand = spec.parent().userObject() as ClientCommand
|
val clientCommand = spec.parent().userObject() as ClientCommand
|
||||||
@@ -40,9 +77,37 @@ class PutCommand : RbcsCommand() {
|
|||||||
?: throw IllegalArgumentException("Profile $profileName does not exist in configuration")
|
?: throw IllegalArgumentException("Profile $profileName does not exist in configuration")
|
||||||
}
|
}
|
||||||
RemoteBuildCacheClient(profile).use { client ->
|
RemoteBuildCacheClient(profile).use { client ->
|
||||||
value.use {
|
val inputStream : InputStream
|
||||||
client.put(key, it.readAllBytes())
|
val mimeType : String?
|
||||||
}.get()
|
val contentDisposition : String?
|
||||||
|
val valuePath = value
|
||||||
|
val actualKey : String?
|
||||||
|
if(valuePath != null) {
|
||||||
|
inputStream = Files.newInputStream(valuePath)
|
||||||
|
mimeType = this.mimeType ?: Files.probeContentType(valuePath)
|
||||||
|
contentDisposition = if(inline) {
|
||||||
|
"inline"
|
||||||
|
} else {
|
||||||
|
"attachment; filename=\"${valuePath.fileName}\""
|
||||||
|
}
|
||||||
|
actualKey = key ?: let {
|
||||||
|
val md = Hash.Algorithm.SHA512.newInputStream(Files.newInputStream(valuePath)).use {
|
||||||
|
JWO.copy(it, NullOutputStream())
|
||||||
|
it.messageDigest
|
||||||
|
}
|
||||||
|
UUID.nameUUIDFromBytes(md.digest()).toString()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
inputStream = System.`in`
|
||||||
|
mimeType = this.mimeType
|
||||||
|
contentDisposition = if(inline) {
|
||||||
|
"inline"
|
||||||
|
} else {
|
||||||
|
null
|
||||||
|
}
|
||||||
|
actualKey = key ?: UUID.randomUUID().toString()
|
||||||
|
}
|
||||||
|
execute(profile, actualKey, inputStream, mimeType, contentDisposition)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
@@ -1,19 +1,20 @@
|
|||||||
package net.woggioni.rbcs.cli.impl.commands
|
package net.woggioni.rbcs.cli.impl.commands
|
||||||
|
|
||||||
|
import net.woggioni.jwo.Application
|
||||||
|
import net.woggioni.jwo.JWO
|
||||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||||
import net.woggioni.rbcs.cli.impl.converters.DurationConverter
|
import net.woggioni.rbcs.cli.impl.converters.DurationConverter
|
||||||
import net.woggioni.rbcs.common.contextLogger
|
import net.woggioni.rbcs.common.createLogger
|
||||||
import net.woggioni.rbcs.common.debug
|
import net.woggioni.rbcs.common.debug
|
||||||
import net.woggioni.rbcs.common.info
|
import net.woggioni.rbcs.common.info
|
||||||
import net.woggioni.rbcs.server.RemoteBuildCacheServer
|
import net.woggioni.rbcs.server.RemoteBuildCacheServer
|
||||||
import net.woggioni.rbcs.server.RemoteBuildCacheServer.Companion.DEFAULT_CONFIGURATION_URL
|
import net.woggioni.rbcs.server.RemoteBuildCacheServer.Companion.DEFAULT_CONFIGURATION_URL
|
||||||
import net.woggioni.jwo.Application
|
|
||||||
import net.woggioni.jwo.JWO
|
|
||||||
import picocli.CommandLine
|
import picocli.CommandLine
|
||||||
import java.io.ByteArrayOutputStream
|
import java.io.ByteArrayOutputStream
|
||||||
import java.nio.file.Files
|
import java.nio.file.Files
|
||||||
import java.nio.file.Path
|
import java.nio.file.Path
|
||||||
import java.time.Duration
|
import java.time.Duration
|
||||||
|
import java.util.concurrent.TimeUnit
|
||||||
|
|
||||||
@CommandLine.Command(
|
@CommandLine.Command(
|
||||||
name = "server",
|
name = "server",
|
||||||
@@ -21,8 +22,9 @@ import java.time.Duration
|
|||||||
showDefaultValues = true
|
showDefaultValues = true
|
||||||
)
|
)
|
||||||
class ServerCommand(app : Application) : RbcsCommand() {
|
class ServerCommand(app : Application) : RbcsCommand() {
|
||||||
|
companion object {
|
||||||
private val log = contextLogger()
|
private val log = createLogger<ServerCommand>()
|
||||||
|
}
|
||||||
|
|
||||||
private fun createDefaultConfigurationFile(configurationFile: Path) {
|
private fun createDefaultConfigurationFile(configurationFile: Path) {
|
||||||
log.info {
|
log.info {
|
||||||
@@ -57,6 +59,9 @@ class ServerCommand(app : Application) : RbcsCommand() {
|
|||||||
createDefaultConfigurationFile(configurationFile)
|
createDefaultConfigurationFile(configurationFile)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
log.debug {
|
||||||
|
"Using configuration file '$configurationFile'"
|
||||||
|
}
|
||||||
val configuration = RemoteBuildCacheServer.loadConfiguration(configurationFile)
|
val configuration = RemoteBuildCacheServer.loadConfiguration(configurationFile)
|
||||||
log.debug {
|
log.debug {
|
||||||
ByteArrayOutputStream().also {
|
ByteArrayOutputStream().also {
|
||||||
@@ -66,11 +71,20 @@ class ServerCommand(app : Application) : RbcsCommand() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
val server = RemoteBuildCacheServer(configuration)
|
val server = RemoteBuildCacheServer(configuration)
|
||||||
server.run().use { server ->
|
val handle = server.run()
|
||||||
timeout?.let {
|
val shutdownHook = Thread.ofPlatform().unstarted {
|
||||||
Thread.sleep(it)
|
handle.sendShutdownSignal()
|
||||||
server.shutdown()
|
try {
|
||||||
|
handle.get(60, TimeUnit.SECONDS)
|
||||||
|
} catch (ex : Throwable) {
|
||||||
|
log.warn(ex.message, ex)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Runtime.getRuntime().addShutdownHook(shutdownHook)
|
||||||
|
if(timeout != null) {
|
||||||
|
Thread.sleep(timeout)
|
||||||
|
handle.sendShutdownSignal()
|
||||||
|
}
|
||||||
|
handle.get()
|
||||||
}
|
}
|
||||||
}
|
}
|
@@ -0,0 +1,10 @@
|
|||||||
|
package net.woggioni.rbcs.cli.impl.converters
|
||||||
|
|
||||||
|
import picocli.CommandLine
|
||||||
|
|
||||||
|
|
||||||
|
class ByteSizeConverter : CommandLine.ITypeConverter<Int> {
|
||||||
|
override fun convert(value: String): Int {
|
||||||
|
return Integer.decode(value)
|
||||||
|
}
|
||||||
|
}
|
@@ -6,9 +6,11 @@ plugins {
|
|||||||
dependencies {
|
dependencies {
|
||||||
implementation project(':rbcs-api')
|
implementation project(':rbcs-api')
|
||||||
implementation project(':rbcs-common')
|
implementation project(':rbcs-common')
|
||||||
implementation catalog.picocli
|
|
||||||
implementation catalog.slf4j.api
|
implementation catalog.slf4j.api
|
||||||
implementation catalog.netty.buffer
|
implementation catalog.netty.buffer
|
||||||
|
implementation catalog.netty.handler
|
||||||
|
implementation catalog.netty.transport
|
||||||
|
implementation catalog.netty.common
|
||||||
implementation catalog.netty.codec.http
|
implementation catalog.netty.codec.http
|
||||||
|
|
||||||
testRuntimeOnly catalog.logback.classic
|
testRuntimeOnly catalog.logback.classic
|
||||||
|
@@ -0,0 +1,62 @@
|
|||||||
|
package net.woggioni.rbcs.client
|
||||||
|
|
||||||
|
import net.woggioni.rbcs.client.impl.Parser
|
||||||
|
import net.woggioni.rbcs.common.Xml
|
||||||
|
import java.net.URI
|
||||||
|
import java.nio.file.Files
|
||||||
|
import java.nio.file.Path
|
||||||
|
import java.security.PrivateKey
|
||||||
|
import java.security.cert.X509Certificate
|
||||||
|
import java.time.Duration
|
||||||
|
|
||||||
|
data class Configuration(
|
||||||
|
val profiles: Map<String, Profile>
|
||||||
|
) {
|
||||||
|
sealed class Authentication {
|
||||||
|
data class TlsClientAuthenticationCredentials(
|
||||||
|
val key: PrivateKey,
|
||||||
|
val certificateChain: Array<X509Certificate>
|
||||||
|
) : Authentication()
|
||||||
|
|
||||||
|
data class BasicAuthenticationCredentials(val username: String, val password: String) : Authentication()
|
||||||
|
}
|
||||||
|
|
||||||
|
class TrustStore (
|
||||||
|
var file: Path?,
|
||||||
|
var password: String?,
|
||||||
|
var checkCertificateStatus: Boolean = false,
|
||||||
|
var verifyServerCertificate: Boolean = true,
|
||||||
|
)
|
||||||
|
|
||||||
|
class RetryPolicy(
|
||||||
|
val maxAttempts: Int,
|
||||||
|
val initialDelayMillis: Long,
|
||||||
|
val exp: Double
|
||||||
|
)
|
||||||
|
|
||||||
|
class Connection(
|
||||||
|
val readIdleTimeout: Duration,
|
||||||
|
val writeIdleTimeout: Duration,
|
||||||
|
val idleTimeout: Duration,
|
||||||
|
val requestPipelining : Boolean,
|
||||||
|
)
|
||||||
|
|
||||||
|
data class Profile(
|
||||||
|
val serverURI: URI,
|
||||||
|
val connection: Connection,
|
||||||
|
val authentication: Authentication?,
|
||||||
|
val connectionTimeout: Duration?,
|
||||||
|
val maxConnections: Int,
|
||||||
|
val compressionEnabled: Boolean,
|
||||||
|
val retryPolicy: RetryPolicy?,
|
||||||
|
val tlsTruststore : TrustStore?
|
||||||
|
)
|
||||||
|
|
||||||
|
companion object {
|
||||||
|
fun parse(path: Path): Configuration {
|
||||||
|
return Files.newInputStream(path).use {
|
||||||
|
Xml.parseXml(path.toUri().toURL(), it)
|
||||||
|
}.let(Parser::parse)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@@ -7,8 +7,10 @@ import io.netty.channel.Channel
|
|||||||
import io.netty.channel.ChannelHandlerContext
|
import io.netty.channel.ChannelHandlerContext
|
||||||
import io.netty.channel.ChannelOption
|
import io.netty.channel.ChannelOption
|
||||||
import io.netty.channel.ChannelPipeline
|
import io.netty.channel.ChannelPipeline
|
||||||
|
import io.netty.channel.IoEventLoopGroup
|
||||||
|
import io.netty.channel.MultiThreadIoEventLoopGroup
|
||||||
import io.netty.channel.SimpleChannelInboundHandler
|
import io.netty.channel.SimpleChannelInboundHandler
|
||||||
import io.netty.channel.nio.NioEventLoopGroup
|
import io.netty.channel.nio.NioIoHandler
|
||||||
import io.netty.channel.pool.AbstractChannelPoolHandler
|
import io.netty.channel.pool.AbstractChannelPoolHandler
|
||||||
import io.netty.channel.pool.ChannelPool
|
import io.netty.channel.pool.ChannelPool
|
||||||
import io.netty.channel.pool.FixedChannelPool
|
import io.netty.channel.pool.FixedChannelPool
|
||||||
@@ -28,75 +30,70 @@ import io.netty.handler.codec.http.HttpVersion
|
|||||||
import io.netty.handler.ssl.SslContext
|
import io.netty.handler.ssl.SslContext
|
||||||
import io.netty.handler.ssl.SslContextBuilder
|
import io.netty.handler.ssl.SslContextBuilder
|
||||||
import io.netty.handler.stream.ChunkedWriteHandler
|
import io.netty.handler.stream.ChunkedWriteHandler
|
||||||
|
import io.netty.handler.timeout.IdleState
|
||||||
|
import io.netty.handler.timeout.IdleStateEvent
|
||||||
|
import io.netty.handler.timeout.IdleStateHandler
|
||||||
import io.netty.util.concurrent.Future
|
import io.netty.util.concurrent.Future
|
||||||
|
import io.netty.util.concurrent.Future as NettyFuture
|
||||||
import io.netty.util.concurrent.GenericFutureListener
|
import io.netty.util.concurrent.GenericFutureListener
|
||||||
import net.woggioni.rbcs.client.impl.Parser
|
import java.io.IOException
|
||||||
import net.woggioni.rbcs.common.Xml
|
|
||||||
import net.woggioni.rbcs.common.contextLogger
|
|
||||||
import net.woggioni.rbcs.common.debug
|
|
||||||
import net.woggioni.rbcs.common.trace
|
|
||||||
import java.net.InetSocketAddress
|
import java.net.InetSocketAddress
|
||||||
import java.net.URI
|
import java.net.URI
|
||||||
import java.nio.file.Files
|
|
||||||
import java.nio.file.Path
|
|
||||||
import java.security.PrivateKey
|
|
||||||
import java.security.cert.X509Certificate
|
import java.security.cert.X509Certificate
|
||||||
import java.time.Duration
|
|
||||||
import java.util.Base64
|
import java.util.Base64
|
||||||
import java.util.concurrent.CompletableFuture
|
import java.util.concurrent.CompletableFuture
|
||||||
|
import java.util.concurrent.TimeUnit
|
||||||
|
import java.util.concurrent.TimeoutException
|
||||||
import java.util.concurrent.atomic.AtomicInteger
|
import java.util.concurrent.atomic.AtomicInteger
|
||||||
import io.netty.util.concurrent.Future as NettyFuture
|
import javax.net.ssl.TrustManagerFactory
|
||||||
|
import javax.net.ssl.X509TrustManager
|
||||||
|
import kotlin.random.Random
|
||||||
|
import net.woggioni.rbcs.api.CacheValueMetadata
|
||||||
|
import net.woggioni.rbcs.common.RBCS.loadKeystore
|
||||||
|
import net.woggioni.rbcs.common.createLogger
|
||||||
|
import net.woggioni.rbcs.common.debug
|
||||||
|
import net.woggioni.rbcs.common.trace
|
||||||
|
|
||||||
class RemoteBuildCacheClient(private val profile: Configuration.Profile) : AutoCloseable {
|
class RemoteBuildCacheClient(private val profile: Configuration.Profile) : AutoCloseable {
|
||||||
private val group: NioEventLoopGroup
|
companion object {
|
||||||
private var sslContext: SslContext
|
private val log = createLogger<RemoteBuildCacheClient>()
|
||||||
private val log = contextLogger()
|
|
||||||
private val pool: ChannelPool
|
|
||||||
|
|
||||||
data class Configuration(
|
|
||||||
val profiles: Map<String, Profile>
|
|
||||||
) {
|
|
||||||
sealed class Authentication {
|
|
||||||
data class TlsClientAuthenticationCredentials(
|
|
||||||
val key: PrivateKey,
|
|
||||||
val certificateChain: Array<X509Certificate>
|
|
||||||
) : Authentication()
|
|
||||||
|
|
||||||
data class BasicAuthenticationCredentials(val username: String, val password: String) : Authentication()
|
|
||||||
}
|
|
||||||
|
|
||||||
class RetryPolicy(
|
|
||||||
val maxAttempts: Int,
|
|
||||||
val initialDelayMillis: Long,
|
|
||||||
val exp: Double
|
|
||||||
)
|
|
||||||
|
|
||||||
data class Profile(
|
|
||||||
val serverURI: URI,
|
|
||||||
val authentication: Authentication?,
|
|
||||||
val connectionTimeout: Duration?,
|
|
||||||
val maxConnections: Int,
|
|
||||||
val retryPolicy: RetryPolicy?,
|
|
||||||
)
|
|
||||||
|
|
||||||
companion object {
|
|
||||||
fun parse(path: Path): Configuration {
|
|
||||||
return Files.newInputStream(path).use {
|
|
||||||
Xml.parseXml(path.toUri().toURL(), it)
|
|
||||||
}.let(Parser::parse)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private val group: IoEventLoopGroup
|
||||||
|
private val sslContext: SslContext
|
||||||
|
private val pool: ChannelPool
|
||||||
|
|
||||||
init {
|
init {
|
||||||
group = NioEventLoopGroup()
|
group = MultiThreadIoEventLoopGroup(NioIoHandler.newFactory())
|
||||||
sslContext = SslContextBuilder.forClient().also { builder ->
|
sslContext = SslContextBuilder.forClient().also { builder ->
|
||||||
(profile.authentication as? Configuration.Authentication.TlsClientAuthenticationCredentials)?.let { tlsClientAuthenticationCredentials ->
|
(profile.authentication as? Configuration.Authentication.TlsClientAuthenticationCredentials)?.let { tlsClientAuthenticationCredentials ->
|
||||||
builder.keyManager(
|
builder.apply {
|
||||||
tlsClientAuthenticationCredentials.key,
|
keyManager(
|
||||||
*tlsClientAuthenticationCredentials.certificateChain
|
tlsClientAuthenticationCredentials.key,
|
||||||
)
|
*tlsClientAuthenticationCredentials.certificateChain
|
||||||
|
)
|
||||||
|
profile.tlsTruststore?.let { trustStore ->
|
||||||
|
if (!trustStore.verifyServerCertificate) {
|
||||||
|
trustManager(object : X509TrustManager {
|
||||||
|
override fun checkClientTrusted(certChain: Array<out X509Certificate>, p1: String?) {
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun checkServerTrusted(certChain: Array<out X509Certificate>, p1: String?) {
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun getAcceptedIssuers() = null
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
trustStore.file?.let {
|
||||||
|
val ts = loadKeystore(it, trustStore.password)
|
||||||
|
val trustManagerFactory: TrustManagerFactory =
|
||||||
|
TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm())
|
||||||
|
trustManagerFactory.init(ts)
|
||||||
|
trustManager(trustManagerFactory)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}.build()
|
}.build()
|
||||||
|
|
||||||
@@ -141,18 +138,37 @@ class RemoteBuildCacheClient(private val profile: Configuration.Profile) : AutoC
|
|||||||
}
|
}
|
||||||
|
|
||||||
override fun channelCreated(ch: Channel) {
|
override fun channelCreated(ch: Channel) {
|
||||||
val connectionId = connectionCount.getAndIncrement()
|
val connectionId = connectionCount.incrementAndGet()
|
||||||
log.debug {
|
log.debug {
|
||||||
"Created connection $connectionId, total number of active connections: $connectionId"
|
"Created connection ${ch.id().asShortText()}, total number of active connections: $connectionId"
|
||||||
}
|
}
|
||||||
ch.closeFuture().addListener {
|
ch.closeFuture().addListener {
|
||||||
val activeConnections = connectionCount.decrementAndGet()
|
val activeConnections = connectionCount.decrementAndGet()
|
||||||
log.debug {
|
log.debug {
|
||||||
"Closed connection $connectionId, total number of active connections: $activeConnections"
|
"Closed connection ${
|
||||||
|
ch.id().asShortText()
|
||||||
|
}, total number of active connections: $activeConnections"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
val pipeline: ChannelPipeline = ch.pipeline()
|
val pipeline: ChannelPipeline = ch.pipeline()
|
||||||
|
|
||||||
|
profile.connection?.also { conn ->
|
||||||
|
val readIdleTimeout = conn.readIdleTimeout.toMillis()
|
||||||
|
val writeIdleTimeout = conn.writeIdleTimeout.toMillis()
|
||||||
|
val idleTimeout = conn.idleTimeout.toMillis()
|
||||||
|
if (readIdleTimeout > 0 || writeIdleTimeout > 0 || idleTimeout > 0) {
|
||||||
|
pipeline.addLast(
|
||||||
|
IdleStateHandler(
|
||||||
|
true,
|
||||||
|
readIdleTimeout,
|
||||||
|
writeIdleTimeout,
|
||||||
|
idleTimeout,
|
||||||
|
TimeUnit.MILLISECONDS
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Add SSL handler if needed
|
// Add SSL handler if needed
|
||||||
if ("https".equals(scheme, ignoreCase = true)) {
|
if ("https".equals(scheme, ignoreCase = true)) {
|
||||||
pipeline.addLast("ssl", sslContext.newHandler(ch.alloc(), host, port))
|
pipeline.addLast("ssl", sslContext.newHandler(ch.alloc(), host, port))
|
||||||
@@ -160,7 +176,9 @@ class RemoteBuildCacheClient(private val profile: Configuration.Profile) : AutoC
|
|||||||
|
|
||||||
// HTTP handlers
|
// HTTP handlers
|
||||||
pipeline.addLast("codec", HttpClientCodec())
|
pipeline.addLast("codec", HttpClientCodec())
|
||||||
pipeline.addLast("decompressor", HttpContentDecompressor())
|
if (profile.compressionEnabled) {
|
||||||
|
pipeline.addLast("decompressor", HttpContentDecompressor())
|
||||||
|
}
|
||||||
pipeline.addLast("aggregator", HttpObjectAggregator(134217728))
|
pipeline.addLast("aggregator", HttpObjectAggregator(134217728))
|
||||||
pipeline.addLast("chunked", ChunkedWriteHandler())
|
pipeline.addLast("chunked", ChunkedWriteHandler())
|
||||||
}
|
}
|
||||||
@@ -206,6 +224,7 @@ class RemoteBuildCacheClient(private val profile: Configuration.Profile) : AutoC
|
|||||||
retryPolicy.initialDelayMillis.toDouble(),
|
retryPolicy.initialDelayMillis.toDouble(),
|
||||||
retryPolicy.exp,
|
retryPolicy.exp,
|
||||||
outcomeHandler,
|
outcomeHandler,
|
||||||
|
Random.Default,
|
||||||
operation
|
operation
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
@@ -253,9 +272,13 @@ class RemoteBuildCacheClient(private val profile: Configuration.Profile) : AutoC
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fun put(key: String, content: ByteArray): CompletableFuture<Unit> {
|
fun put(key: String, content: ByteArray, metadata: CacheValueMetadata): CompletableFuture<Unit> {
|
||||||
return executeWithRetry {
|
return executeWithRetry {
|
||||||
sendRequest(profile.serverURI.resolve(key), HttpMethod.PUT, content)
|
val extraHeaders = sequenceOf(
|
||||||
|
metadata.mimeType?.let { HttpHeaderNames.CONTENT_TYPE to it },
|
||||||
|
metadata.contentDisposition?.let { HttpHeaderNames.CONTENT_DISPOSITION to it }
|
||||||
|
).filterNotNull()
|
||||||
|
sendRequest(profile.serverURI.resolve(key), HttpMethod.PUT, content, extraHeaders.asIterable())
|
||||||
}.thenApply {
|
}.thenApply {
|
||||||
val status = it.status()
|
val status = it.status()
|
||||||
if (it.status() != HttpResponseStatus.CREATED && it.status() != HttpResponseStatus.OK) {
|
if (it.status() != HttpResponseStatus.CREATED && it.status() != HttpResponseStatus.OK) {
|
||||||
@@ -264,35 +287,90 @@ class RemoteBuildCacheClient(private val profile: Configuration.Profile) : AutoC
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private fun sendRequest(uri: URI, method: HttpMethod, body: ByteArray?): CompletableFuture<FullHttpResponse> {
|
private fun sendRequest(
|
||||||
|
uri: URI,
|
||||||
|
method: HttpMethod,
|
||||||
|
body: ByteArray?,
|
||||||
|
extraHeaders: Iterable<Pair<CharSequence, CharSequence>>? = null
|
||||||
|
): CompletableFuture<FullHttpResponse> {
|
||||||
val responseFuture = CompletableFuture<FullHttpResponse>()
|
val responseFuture = CompletableFuture<FullHttpResponse>()
|
||||||
// Custom handler for processing responses
|
// Custom handler for processing responses
|
||||||
|
|
||||||
pool.acquire().addListener(object : GenericFutureListener<NettyFuture<Channel>> {
|
pool.acquire().addListener(object : GenericFutureListener<NettyFuture<Channel>> {
|
||||||
|
|
||||||
override fun operationComplete(channelFuture: Future<Channel>) {
|
override fun operationComplete(channelFuture: Future<Channel>) {
|
||||||
if (channelFuture.isSuccess) {
|
if (channelFuture.isSuccess) {
|
||||||
val channel = channelFuture.now
|
val channel = channelFuture.now
|
||||||
val pipeline = channel.pipeline()
|
val pipeline = channel.pipeline()
|
||||||
channel.pipeline().addLast("handler", object : SimpleChannelInboundHandler<FullHttpResponse>() {
|
|
||||||
|
val closeListener = GenericFutureListener<Future<Void>> {
|
||||||
|
responseFuture.completeExceptionally(IOException("The remote server closed the connection"))
|
||||||
|
}
|
||||||
|
channel.closeFuture().addListener(closeListener)
|
||||||
|
|
||||||
|
val responseHandler = object : SimpleChannelInboundHandler<FullHttpResponse>() {
|
||||||
|
|
||||||
|
override fun handlerAdded(ctx: ChannelHandlerContext) {
|
||||||
|
channel.closeFuture().removeListener(closeListener)
|
||||||
|
}
|
||||||
|
|
||||||
override fun channelRead0(
|
override fun channelRead0(
|
||||||
ctx: ChannelHandlerContext,
|
ctx: ChannelHandlerContext,
|
||||||
response: FullHttpResponse
|
response: FullHttpResponse
|
||||||
) {
|
) {
|
||||||
pipeline.removeLast()
|
pipeline.remove(this)
|
||||||
pool.release(channel)
|
|
||||||
responseFuture.complete(response)
|
responseFuture.complete(response)
|
||||||
|
if(!profile.connection.requestPipelining) {
|
||||||
|
pool.release(channel)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
|
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
|
||||||
|
ctx.newPromise()
|
||||||
val ex = when (cause) {
|
val ex = when (cause) {
|
||||||
is DecoderException -> cause.cause
|
is DecoderException -> cause.cause
|
||||||
else -> cause
|
else -> cause
|
||||||
}
|
}
|
||||||
responseFuture.completeExceptionally(ex)
|
responseFuture.completeExceptionally(ex)
|
||||||
ctx.close()
|
ctx.close()
|
||||||
pipeline.removeLast()
|
|
||||||
pool.release(channel)
|
|
||||||
}
|
}
|
||||||
})
|
|
||||||
|
override fun channelInactive(ctx: ChannelHandlerContext) {
|
||||||
|
responseFuture.completeExceptionally(IOException("The remote server closed the connection"))
|
||||||
|
if(!profile.connection.requestPipelining) {
|
||||||
|
pool.release(channel)
|
||||||
|
}
|
||||||
|
super.channelInactive(ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun userEventTriggered(ctx: ChannelHandlerContext, evt: Any) {
|
||||||
|
if (evt is IdleStateEvent) {
|
||||||
|
val te = when (evt.state()) {
|
||||||
|
IdleState.READER_IDLE -> TimeoutException(
|
||||||
|
"Read timeout",
|
||||||
|
)
|
||||||
|
|
||||||
|
IdleState.WRITER_IDLE -> TimeoutException("Write timeout")
|
||||||
|
|
||||||
|
IdleState.ALL_IDLE -> TimeoutException("Idle timeout")
|
||||||
|
null -> throw IllegalStateException("This should never happen")
|
||||||
|
}
|
||||||
|
responseFuture.completeExceptionally(te)
|
||||||
|
super.userEventTriggered(ctx, evt)
|
||||||
|
if (this === pipeline.last()) {
|
||||||
|
ctx.close()
|
||||||
|
}
|
||||||
|
if(!profile.connection.requestPipelining) {
|
||||||
|
pool.release(channel)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
super.userEventTriggered(ctx, evt)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pipeline.addLast(responseHandler)
|
||||||
|
|
||||||
|
|
||||||
// Prepare the HTTP request
|
// Prepare the HTTP request
|
||||||
val request: FullHttpRequest = let {
|
val request: FullHttpRequest = let {
|
||||||
val content: ByteBuf? = body?.takeIf(ByteArray::isNotEmpty)?.let(Unpooled::wrappedBuffer)
|
val content: ByteBuf? = body?.takeIf(ByteArray::isNotEmpty)?.let(Unpooled::wrappedBuffer)
|
||||||
@@ -302,17 +380,22 @@ class RemoteBuildCacheClient(private val profile: Configuration.Profile) : AutoC
|
|||||||
uri.rawPath,
|
uri.rawPath,
|
||||||
content ?: Unpooled.buffer(0)
|
content ?: Unpooled.buffer(0)
|
||||||
).apply {
|
).apply {
|
||||||
|
// Set headers
|
||||||
headers().apply {
|
headers().apply {
|
||||||
if (content != null) {
|
if (content != null) {
|
||||||
set(HttpHeaderNames.CONTENT_TYPE, HttpHeaderValues.APPLICATION_OCTET_STREAM)
|
|
||||||
set(HttpHeaderNames.CONTENT_LENGTH, content.readableBytes())
|
set(HttpHeaderNames.CONTENT_LENGTH, content.readableBytes())
|
||||||
}
|
}
|
||||||
set(HttpHeaderNames.HOST, profile.serverURI.host)
|
set(HttpHeaderNames.HOST, profile.serverURI.host)
|
||||||
set(HttpHeaderNames.CONNECTION, HttpHeaderValues.KEEP_ALIVE)
|
set(HttpHeaderNames.CONNECTION, HttpHeaderValues.KEEP_ALIVE)
|
||||||
set(
|
if (profile.compressionEnabled) {
|
||||||
HttpHeaderNames.ACCEPT_ENCODING,
|
set(
|
||||||
HttpHeaderValues.GZIP.toString() + "," + HttpHeaderValues.DEFLATE.toString()
|
HttpHeaderNames.ACCEPT_ENCODING,
|
||||||
)
|
HttpHeaderValues.GZIP.toString() + "," + HttpHeaderValues.DEFLATE.toString()
|
||||||
|
)
|
||||||
|
}
|
||||||
|
extraHeaders?.forEach { (k, v) ->
|
||||||
|
add(k, v)
|
||||||
|
}
|
||||||
// Add basic auth if configured
|
// Add basic auth if configured
|
||||||
(profile.authentication as? Configuration.Authentication.BasicAuthenticationCredentials)?.let { credentials ->
|
(profile.authentication as? Configuration.Authentication.BasicAuthenticationCredentials)?.let { credentials ->
|
||||||
val auth = "${credentials.username}:${credentials.password}"
|
val auth = "${credentials.username}:${credentials.password}"
|
||||||
@@ -323,9 +406,16 @@ class RemoteBuildCacheClient(private val profile: Configuration.Profile) : AutoC
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set headers
|
|
||||||
// Send the request
|
// Send the request
|
||||||
channel.writeAndFlush(request)
|
channel.writeAndFlush(request).addListener {
|
||||||
|
if(!it.isSuccess) {
|
||||||
|
val ex = it.cause()
|
||||||
|
log.warn(ex.message, ex)
|
||||||
|
}
|
||||||
|
if(profile.connection.requestPipelining) {
|
||||||
|
pool.release(channel)
|
||||||
|
}
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
responseFuture.completeExceptionally(channelFuture.cause())
|
responseFuture.completeExceptionally(channelFuture.cause())
|
||||||
}
|
}
|
@@ -1,7 +1,7 @@
|
|||||||
package net.woggioni.rbcs.client.impl
|
package net.woggioni.rbcs.client.impl
|
||||||
|
|
||||||
import net.woggioni.rbcs.api.exception.ConfigurationException
|
import net.woggioni.rbcs.api.exception.ConfigurationException
|
||||||
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
import net.woggioni.rbcs.client.Configuration
|
||||||
import net.woggioni.rbcs.common.Xml.Companion.asIterable
|
import net.woggioni.rbcs.common.Xml.Companion.asIterable
|
||||||
import net.woggioni.rbcs.common.Xml.Companion.renderAttribute
|
import net.woggioni.rbcs.common.Xml.Companion.renderAttribute
|
||||||
import org.w3c.dom.Document
|
import org.w3c.dom.Document
|
||||||
@@ -12,12 +12,13 @@ import java.security.KeyStore
|
|||||||
import java.security.PrivateKey
|
import java.security.PrivateKey
|
||||||
import java.security.cert.X509Certificate
|
import java.security.cert.X509Certificate
|
||||||
import java.time.Duration
|
import java.time.Duration
|
||||||
|
import java.time.temporal.ChronoUnit
|
||||||
|
|
||||||
object Parser {
|
object Parser {
|
||||||
|
|
||||||
fun parse(document: Document): RemoteBuildCacheClient.Configuration {
|
fun parse(document: Document): Configuration {
|
||||||
val root = document.documentElement
|
val root = document.documentElement
|
||||||
val profiles = mutableMapOf<String, RemoteBuildCacheClient.Configuration.Profile>()
|
val profiles = mutableMapOf<String, Configuration.Profile>()
|
||||||
|
|
||||||
for (child in root.asIterable()) {
|
for (child in root.asIterable()) {
|
||||||
val tagName = child.localName
|
val tagName = child.localName
|
||||||
@@ -27,8 +28,15 @@ object Parser {
|
|||||||
child.renderAttribute("name") ?: throw ConfigurationException("name attribute is required")
|
child.renderAttribute("name") ?: throw ConfigurationException("name attribute is required")
|
||||||
val uri = child.renderAttribute("base-url")?.let(::URI)
|
val uri = child.renderAttribute("base-url")?.let(::URI)
|
||||||
?: throw ConfigurationException("base-url attribute is required")
|
?: throw ConfigurationException("base-url attribute is required")
|
||||||
var authentication: RemoteBuildCacheClient.Configuration.Authentication? = null
|
var authentication: Configuration.Authentication? = null
|
||||||
var retryPolicy: RemoteBuildCacheClient.Configuration.RetryPolicy? = null
|
var retryPolicy: Configuration.RetryPolicy? = null
|
||||||
|
var connection : Configuration.Connection = Configuration.Connection(
|
||||||
|
Duration.ofSeconds(60),
|
||||||
|
Duration.ofSeconds(60),
|
||||||
|
Duration.ofSeconds(30),
|
||||||
|
false
|
||||||
|
)
|
||||||
|
var trustStore : Configuration.TrustStore? = null
|
||||||
for (gchild in child.asIterable()) {
|
for (gchild in child.asIterable()) {
|
||||||
when (gchild.localName) {
|
when (gchild.localName) {
|
||||||
"tls-client-auth" -> {
|
"tls-client-auth" -> {
|
||||||
@@ -49,7 +57,7 @@ object Parser {
|
|||||||
.toList()
|
.toList()
|
||||||
.toTypedArray()
|
.toTypedArray()
|
||||||
authentication =
|
authentication =
|
||||||
RemoteBuildCacheClient.Configuration.Authentication.TlsClientAuthenticationCredentials(
|
Configuration.Authentication.TlsClientAuthenticationCredentials(
|
||||||
key,
|
key,
|
||||||
certChain
|
certChain
|
||||||
)
|
)
|
||||||
@@ -61,7 +69,7 @@ object Parser {
|
|||||||
val password = gchild.renderAttribute("password")
|
val password = gchild.renderAttribute("password")
|
||||||
?: throw ConfigurationException("password attribute is required")
|
?: throw ConfigurationException("password attribute is required")
|
||||||
authentication =
|
authentication =
|
||||||
RemoteBuildCacheClient.Configuration.Authentication.BasicAuthenticationCredentials(
|
Configuration.Authentication.BasicAuthenticationCredentials(
|
||||||
username,
|
username,
|
||||||
password
|
password
|
||||||
)
|
)
|
||||||
@@ -80,12 +88,40 @@ object Parser {
|
|||||||
gchild.renderAttribute("exp")
|
gchild.renderAttribute("exp")
|
||||||
?.let(String::toDouble)
|
?.let(String::toDouble)
|
||||||
?: 2.0f
|
?: 2.0f
|
||||||
retryPolicy = RemoteBuildCacheClient.Configuration.RetryPolicy(
|
retryPolicy = Configuration.RetryPolicy(
|
||||||
maxAttempts,
|
maxAttempts,
|
||||||
initialDelay.toMillis(),
|
initialDelay.toMillis(),
|
||||||
exp.toDouble()
|
exp.toDouble()
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
"connection" -> {
|
||||||
|
val idleTimeout = gchild.renderAttribute("idle-timeout")
|
||||||
|
?.let(Duration::parse) ?: Duration.of(30, ChronoUnit.SECONDS)
|
||||||
|
val readIdleTimeout = gchild.renderAttribute("read-idle-timeout")
|
||||||
|
?.let(Duration::parse) ?: Duration.of(60, ChronoUnit.SECONDS)
|
||||||
|
val writeIdleTimeout = gchild.renderAttribute("write-idle-timeout")
|
||||||
|
?.let(Duration::parse) ?: Duration.of(60, ChronoUnit.SECONDS)
|
||||||
|
val requestPipelining = gchild.renderAttribute("request-pipelining")
|
||||||
|
?.let(String::toBoolean) ?: false
|
||||||
|
connection = Configuration.Connection(
|
||||||
|
readIdleTimeout,
|
||||||
|
writeIdleTimeout,
|
||||||
|
idleTimeout,
|
||||||
|
requestPipelining
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
"tls-trust-store" -> {
|
||||||
|
val file = gchild.renderAttribute("file")
|
||||||
|
?.let(Path::of)
|
||||||
|
val password = gchild.renderAttribute("password")
|
||||||
|
val checkCertificateStatus = gchild.renderAttribute("check-certificate-status")
|
||||||
|
?.let(String::toBoolean) ?: false
|
||||||
|
val verifyServerCertificate = gchild.renderAttribute("verify-server-certificate")
|
||||||
|
?.let(String::toBoolean) ?: true
|
||||||
|
trustStore = Configuration.TrustStore(file, password, checkCertificateStatus, verifyServerCertificate)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
val maxConnections = child.renderAttribute("max-connections")
|
val maxConnections = child.renderAttribute("max-connections")
|
||||||
@@ -93,16 +129,23 @@ object Parser {
|
|||||||
?: 50
|
?: 50
|
||||||
val connectionTimeout = child.renderAttribute("connection-timeout")
|
val connectionTimeout = child.renderAttribute("connection-timeout")
|
||||||
?.let(Duration::parse)
|
?.let(Duration::parse)
|
||||||
profiles[name] = RemoteBuildCacheClient.Configuration.Profile(
|
val compressionEnabled = child.renderAttribute("enable-compression")
|
||||||
|
?.let(String::toBoolean)
|
||||||
|
?: true
|
||||||
|
|
||||||
|
profiles[name] = Configuration.Profile(
|
||||||
uri,
|
uri,
|
||||||
|
connection,
|
||||||
authentication,
|
authentication,
|
||||||
connectionTimeout,
|
connectionTimeout,
|
||||||
maxConnections,
|
maxConnections,
|
||||||
retryPolicy
|
compressionEnabled,
|
||||||
|
retryPolicy,
|
||||||
|
trustStore
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return RemoteBuildCacheClient.Configuration(profiles)
|
return Configuration(profiles)
|
||||||
}
|
}
|
||||||
}
|
}
|
@@ -3,6 +3,8 @@ package net.woggioni.rbcs.client
|
|||||||
import io.netty.util.concurrent.EventExecutorGroup
|
import io.netty.util.concurrent.EventExecutorGroup
|
||||||
import java.util.concurrent.CompletableFuture
|
import java.util.concurrent.CompletableFuture
|
||||||
import java.util.concurrent.TimeUnit
|
import java.util.concurrent.TimeUnit
|
||||||
|
import kotlin.math.pow
|
||||||
|
import kotlin.random.Random
|
||||||
|
|
||||||
sealed class OperationOutcome<T> {
|
sealed class OperationOutcome<T> {
|
||||||
class Success<T>(val result: T) : OperationOutcome<T>()
|
class Success<T>(val result: T) : OperationOutcome<T>()
|
||||||
@@ -24,8 +26,10 @@ fun <T> executeWithRetry(
|
|||||||
initialDelay: Double,
|
initialDelay: Double,
|
||||||
exp: Double,
|
exp: Double,
|
||||||
outcomeHandler: OutcomeHandler<T>,
|
outcomeHandler: OutcomeHandler<T>,
|
||||||
|
randomizer : Random?,
|
||||||
cb: () -> CompletableFuture<T>
|
cb: () -> CompletableFuture<T>
|
||||||
): CompletableFuture<T> {
|
): CompletableFuture<T> {
|
||||||
|
|
||||||
val finalResult = cb()
|
val finalResult = cb()
|
||||||
var future = finalResult
|
var future = finalResult
|
||||||
var shortCircuit = false
|
var shortCircuit = false
|
||||||
@@ -46,7 +50,7 @@ fun <T> executeWithRetry(
|
|||||||
is OutcomeHandlerResult.Retry -> {
|
is OutcomeHandlerResult.Retry -> {
|
||||||
val res = CompletableFuture<T>()
|
val res = CompletableFuture<T>()
|
||||||
val delay = run {
|
val delay = run {
|
||||||
val scheduledDelay = (initialDelay * Math.pow(exp, i.toDouble())).toLong()
|
val scheduledDelay = (initialDelay * exp.pow(i.toDouble()) * (1.0 + (randomizer?.nextDouble(-0.5, 0.5) ?: 0.0))).toLong()
|
||||||
outcomeHandlerResult.suggestedDelayMillis?.coerceAtMost(scheduledDelay) ?: scheduledDelay
|
outcomeHandlerResult.suggestedDelayMillis?.coerceAtMost(scheduledDelay) ?: scheduledDelay
|
||||||
}
|
}
|
||||||
eventExecutorGroup.schedule({
|
eventExecutorGroup.schedule({
|
||||||
|
@@ -15,36 +15,246 @@
|
|||||||
<xs:complexType name="profileType">
|
<xs:complexType name="profileType">
|
||||||
<xs:sequence>
|
<xs:sequence>
|
||||||
<xs:choice>
|
<xs:choice>
|
||||||
<xs:element name="no-auth" type="rbcs-client:noAuthType"/>
|
<xs:element name="no-auth" type="rbcs-client:noAuthType">
|
||||||
<xs:element name="basic-auth" type="rbcs-client:basicAuthType"/>
|
<xs:annotation>
|
||||||
<xs:element name="tls-client-auth" type="rbcs-client:tlsClientAuthType"/>
|
<xs:documentation>
|
||||||
|
Disable authentication.
|
||||||
|
</xs:documentation>
|
||||||
|
</xs:annotation>
|
||||||
|
</xs:element>
|
||||||
|
<xs:element name="basic-auth" type="rbcs-client:basicAuthType">
|
||||||
|
<xs:annotation>
|
||||||
|
<xs:documentation>
|
||||||
|
Enable HTTP basic authentication.
|
||||||
|
</xs:documentation>
|
||||||
|
</xs:annotation>
|
||||||
|
</xs:element>
|
||||||
|
<xs:element name="tls-client-auth" type="rbcs-client:tlsClientAuthType">
|
||||||
|
<xs:annotation>
|
||||||
|
<xs:documentation>
|
||||||
|
Enable TLS certificate authentication.
|
||||||
|
</xs:documentation>
|
||||||
|
</xs:annotation>
|
||||||
|
</xs:element>
|
||||||
</xs:choice>
|
</xs:choice>
|
||||||
<xs:element name="retry-policy" type="rbcs-client:retryType" minOccurs="0"/>
|
<xs:element name="connection" type="rbcs-client:connectionType" minOccurs="0" >
|
||||||
|
<xs:annotation>
|
||||||
|
<xs:documentation>
|
||||||
|
Set inactivity timeouts for connections to this server,
|
||||||
|
if not present, connections are only closed on network errors.
|
||||||
|
</xs:documentation>
|
||||||
|
</xs:annotation>
|
||||||
|
</xs:element>
|
||||||
|
<xs:element name="retry-policy" type="rbcs-client:retryType" minOccurs="0">
|
||||||
|
<xs:annotation>
|
||||||
|
<xs:documentation>
|
||||||
|
Set a retry policy for this server, if not present requests won't be retried
|
||||||
|
</xs:documentation>
|
||||||
|
</xs:annotation>
|
||||||
|
</xs:element>
|
||||||
|
<xs:element name="tls-trust-store" type="rbcs-client:trustStoreType" minOccurs="0">
|
||||||
|
<xs:annotation>
|
||||||
|
<xs:documentation>
|
||||||
|
If set, specify an alternative truststore to validate the server certificate.
|
||||||
|
If not present the system truststore is used.
|
||||||
|
</xs:documentation>
|
||||||
|
</xs:annotation>
|
||||||
|
</xs:element>
|
||||||
</xs:sequence>
|
</xs:sequence>
|
||||||
<xs:attribute name="name" type="xs:token" use="required"/>
|
<xs:attribute name="name" type="xs:token" use="required">
|
||||||
<xs:attribute name="base-url" type="xs:anyURI" use="required"/>
|
<xs:annotation>
|
||||||
<xs:attribute name="max-connections" type="xs:positiveInteger" default="50"/>
|
<xs:documentation>
|
||||||
<xs:attribute name="connection-timeout" type="xs:duration"/>
|
Name of this server profile, to be referred to from rbcs-cli with the '-p' parameter
|
||||||
|
</xs:documentation>
|
||||||
|
</xs:annotation>
|
||||||
|
</xs:attribute>
|
||||||
|
<xs:attribute name="base-url" type="xs:anyURI" use="required">
|
||||||
|
<xs:annotation>
|
||||||
|
<xs:documentation>
|
||||||
|
RBCs server URL
|
||||||
|
</xs:documentation>
|
||||||
|
</xs:annotation>
|
||||||
|
</xs:attribute>
|
||||||
|
<xs:attribute name="max-connections" type="xs:positiveInteger" default="50">
|
||||||
|
<xs:annotation>
|
||||||
|
<xs:documentation>
|
||||||
|
Maximum number of concurrent TCP connection to open with this server
|
||||||
|
</xs:documentation>
|
||||||
|
</xs:annotation>
|
||||||
|
</xs:attribute>
|
||||||
|
<xs:attribute name="connection-timeout" type="xs:duration">
|
||||||
|
<xs:annotation>
|
||||||
|
<xs:documentation>
|
||||||
|
Enable HTTP compression when communicating to this server
|
||||||
|
</xs:documentation>
|
||||||
|
</xs:annotation>
|
||||||
|
</xs:attribute>
|
||||||
|
<xs:attribute name="enable-compression" type="xs:boolean" default="true">
|
||||||
|
<xs:annotation>
|
||||||
|
<xs:documentation>
|
||||||
|
Enable HTTP compression when communicating to this server
|
||||||
|
</xs:documentation>
|
||||||
|
</xs:annotation>
|
||||||
|
</xs:attribute>
|
||||||
</xs:complexType>
|
</xs:complexType>
|
||||||
|
|
||||||
<xs:complexType name="noAuthType"/>
|
<xs:complexType name="connectionType">
|
||||||
|
<xs:attribute name="idle-timeout" type="xs:duration" use="optional" default="PT30S">
|
||||||
|
<xs:annotation>
|
||||||
|
<xs:documentation>
|
||||||
|
The client will close the connection with the server
|
||||||
|
when neither a read nor a write was performed for the specified period of time.
|
||||||
|
</xs:documentation>
|
||||||
|
</xs:annotation>
|
||||||
|
</xs:attribute>
|
||||||
|
<xs:attribute name="read-idle-timeout" type="xs:duration" use="optional" default="PT60S">
|
||||||
|
<xs:annotation>
|
||||||
|
<xs:documentation>
|
||||||
|
The client will close the connection with the server
|
||||||
|
when no read was performed for the specified period of time.
|
||||||
|
</xs:documentation>
|
||||||
|
</xs:annotation>
|
||||||
|
</xs:attribute>
|
||||||
|
<xs:attribute name="write-idle-timeout" type="xs:duration" use="optional" default="PT60S">
|
||||||
|
<xs:annotation>
|
||||||
|
<xs:documentation>
|
||||||
|
The client will close the connection with the server
|
||||||
|
when no write was performed for the specified period of time.
|
||||||
|
</xs:documentation>
|
||||||
|
</xs:annotation>
|
||||||
|
</xs:attribute>
|
||||||
|
<xs:attribute name="request-pipelining" type="xs:boolean" use="optional" default="false">
|
||||||
|
<xs:annotation>
|
||||||
|
<xs:documentation>
|
||||||
|
Enables HTTP/1.1 request pipelining
|
||||||
|
</xs:documentation>
|
||||||
|
</xs:annotation>
|
||||||
|
</xs:attribute>
|
||||||
|
</xs:complexType>
|
||||||
|
|
||||||
|
<xs:complexType name="noAuthType">
|
||||||
|
<xs:annotation>
|
||||||
|
<xs:documentation>
|
||||||
|
Add this tag to not use any type of authentication when talking to the RBCS server
|
||||||
|
</xs:documentation>
|
||||||
|
</xs:annotation>
|
||||||
|
</xs:complexType>
|
||||||
|
|
||||||
<xs:complexType name="basicAuthType">
|
<xs:complexType name="basicAuthType">
|
||||||
<xs:attribute name="user" type="xs:token" use="required"/>
|
<xs:annotation>
|
||||||
<xs:attribute name="password" type="xs:string" use="required"/>
|
<xs:documentation>
|
||||||
|
Add this tag to enable HTTP basic authentication for the communication to this server,
|
||||||
|
mind that HTTP basic authentication sends credentials directly over the network, so make sure
|
||||||
|
your communication is protected by TLS (i.e. your server's URL starts with "https")
|
||||||
|
</xs:documentation>
|
||||||
|
</xs:annotation>
|
||||||
|
<xs:attribute name="user" type="xs:token" use="required">
|
||||||
|
<xs:annotation>
|
||||||
|
<xs:documentation>
|
||||||
|
Username for HTTP basic authentication
|
||||||
|
</xs:documentation>
|
||||||
|
</xs:annotation>
|
||||||
|
</xs:attribute>
|
||||||
|
<xs:attribute name="password" type="xs:string" use="required">
|
||||||
|
<xs:annotation>
|
||||||
|
<xs:documentation>
|
||||||
|
Password used for HTTP basic authentication
|
||||||
|
</xs:documentation>
|
||||||
|
</xs:annotation>
|
||||||
|
</xs:attribute>
|
||||||
</xs:complexType>
|
</xs:complexType>
|
||||||
|
|
||||||
<xs:complexType name="tlsClientAuthType">
|
<xs:complexType name="tlsClientAuthType">
|
||||||
<xs:attribute name="key-store-file" type="xs:anyURI" use="required"/>
|
<xs:attribute name="key-store-file" type="xs:anyURI" use="required">
|
||||||
<xs:attribute name="key-store-password" type="xs:string" use="required"/>
|
<xs:annotation>
|
||||||
<xs:attribute name="key-alias" type="xs:token" use="required"/>
|
<xs:documentation>
|
||||||
<xs:attribute name="key-password" type="xs:string" use="optional"/>
|
System path to the keystore file
|
||||||
|
</xs:documentation>
|
||||||
|
</xs:annotation>
|
||||||
|
</xs:attribute>
|
||||||
|
<xs:attribute name="key-store-password" type="xs:string" use="required">
|
||||||
|
<xs:annotation>
|
||||||
|
<xs:documentation>
|
||||||
|
Password to open they keystore file
|
||||||
|
</xs:documentation>
|
||||||
|
</xs:annotation>
|
||||||
|
</xs:attribute>
|
||||||
|
<xs:attribute name="key-alias" type="xs:token" use="required">
|
||||||
|
<xs:annotation>
|
||||||
|
<xs:documentation>
|
||||||
|
Alias of the keystore entry containing the private key
|
||||||
|
</xs:documentation>
|
||||||
|
</xs:annotation>
|
||||||
|
</xs:attribute>
|
||||||
|
<xs:attribute name="key-password" type="xs:string" use="optional">
|
||||||
|
<xs:annotation>
|
||||||
|
<xs:documentation>
|
||||||
|
Private key entry's encryption password
|
||||||
|
</xs:documentation>
|
||||||
|
</xs:annotation>
|
||||||
|
</xs:attribute>
|
||||||
</xs:complexType>
|
</xs:complexType>
|
||||||
|
|
||||||
<xs:complexType name="retryType">
|
<xs:complexType name="retryType">
|
||||||
<xs:attribute name="max-attempts" type="xs:positiveInteger" use="required"/>
|
<xs:annotation>
|
||||||
<xs:attribute name="initial-delay" type="xs:duration" default="PT1S"/>
|
<xs:documentation>
|
||||||
<xs:attribute name="exp" type="xs:double" default="2.0"/>
|
Retry policy to use in case of failures, based on exponential backoff
|
||||||
|
https://en.wikipedia.org/wiki/Exponential_backoff
|
||||||
|
</xs:documentation>
|
||||||
|
</xs:annotation>
|
||||||
|
|
||||||
|
<xs:attribute name="max-attempts" type="xs:positiveInteger" use="required">
|
||||||
|
<xs:annotation>
|
||||||
|
<xs:documentation>
|
||||||
|
Maximum number of attempts, after which the call will result in an error,
|
||||||
|
throwing an exception related to the last received failure
|
||||||
|
</xs:documentation>
|
||||||
|
</xs:annotation>
|
||||||
|
</xs:attribute>
|
||||||
|
<xs:attribute name="initial-delay" type="xs:duration" default="PT1S">
|
||||||
|
<xs:annotation>
|
||||||
|
<xs:documentation>
|
||||||
|
Delay to apply before retrying after the first failed call
|
||||||
|
</xs:documentation>
|
||||||
|
</xs:annotation>
|
||||||
|
</xs:attribute>
|
||||||
|
<xs:attribute name="exp" type="xs:double" default="2.0">
|
||||||
|
<xs:annotation>
|
||||||
|
<xs:documentation>
|
||||||
|
Exponent to apply to compute the next delay
|
||||||
|
</xs:documentation>
|
||||||
|
</xs:annotation>
|
||||||
|
</xs:attribute>
|
||||||
</xs:complexType>
|
</xs:complexType>
|
||||||
|
|
||||||
|
<xs:complexType name="trustStoreType">
|
||||||
|
<xs:attribute name="file" type="xs:string" use="required">
|
||||||
|
<xs:annotation>
|
||||||
|
<xs:documentation>
|
||||||
|
Path to the truststore file
|
||||||
|
</xs:documentation>
|
||||||
|
</xs:annotation>
|
||||||
|
</xs:attribute>
|
||||||
|
<xs:attribute name="password" type="xs:string">
|
||||||
|
<xs:annotation>
|
||||||
|
<xs:documentation>
|
||||||
|
Truststore file password
|
||||||
|
</xs:documentation>
|
||||||
|
</xs:annotation>
|
||||||
|
</xs:attribute>
|
||||||
|
<xs:attribute name="check-certificate-status" type="xs:boolean">
|
||||||
|
<xs:annotation>
|
||||||
|
<xs:documentation>
|
||||||
|
Whether or not check the server certificate validity using CRL/OCSP
|
||||||
|
</xs:documentation>
|
||||||
|
</xs:annotation>
|
||||||
|
</xs:attribute>
|
||||||
|
<xs:attribute name="verify-server-certificate" type="xs:boolean" use="optional" default="true">
|
||||||
|
<xs:annotation>
|
||||||
|
<xs:documentation>
|
||||||
|
If false, the client will blindly trust the certificate provided by the server
|
||||||
|
</xs:documentation>
|
||||||
|
</xs:annotation>
|
||||||
|
</xs:attribute>
|
||||||
|
</xs:complexType>
|
||||||
</xs:schema>
|
</xs:schema>
|
||||||
|
@@ -89,7 +89,7 @@ class RetryTest {
|
|||||||
val random = Random(testArgs.seed)
|
val random = Random(testArgs.seed)
|
||||||
|
|
||||||
val future =
|
val future =
|
||||||
executeWithRetry(executor, testArgs.maxAttempt, testArgs.initialDelay, testArgs.exp, outcomeHandler) {
|
executeWithRetry(executor, testArgs.maxAttempt, testArgs.initialDelay, testArgs.exp, outcomeHandler, null) {
|
||||||
val now = System.nanoTime()
|
val now = System.nanoTime()
|
||||||
val result = CompletableFuture<Int>()
|
val result = CompletableFuture<Int>()
|
||||||
executor.submit {
|
executor.submit {
|
||||||
@@ -129,7 +129,7 @@ class RetryTest {
|
|||||||
previousAttempt.first + testArgs.initialDelay * Math.pow(testArgs.exp, index.toDouble()) * 1e6
|
previousAttempt.first + testArgs.initialDelay * Math.pow(testArgs.exp, index.toDouble()) * 1e6
|
||||||
val actualTimestamp = timestamp
|
val actualTimestamp = timestamp
|
||||||
val err = Math.abs(expectedTimestamp - actualTimestamp) / expectedTimestamp
|
val err = Math.abs(expectedTimestamp - actualTimestamp) / expectedTimestamp
|
||||||
Assertions.assertTrue(err < 1e-3)
|
Assertions.assertTrue(err < 0.1)
|
||||||
}
|
}
|
||||||
if (index == attempts.size - 1 && index < testArgs.maxAttempt - 1) {
|
if (index == attempts.size - 1 && index < testArgs.maxAttempt - 1) {
|
||||||
/*
|
/*
|
||||||
|
@@ -9,6 +9,8 @@
|
|||||||
key-store-password="password"
|
key-store-password="password"
|
||||||
key-alias="woggioni@c962475fa38"
|
key-alias="woggioni@c962475fa38"
|
||||||
key-password="key-password"/>
|
key-password="key-password"/>
|
||||||
|
<connection write-idle-timeout="PT60S" read-idle-timeout="PT60S" write-timeout="PT0S" read-timeout="PT0S" idle-timeout="PT30S" />
|
||||||
|
<tls-trust-store file="file.pfx" password="password" check-certificate-status="false" verify-server-certificate="true"/>
|
||||||
</profile>
|
</profile>
|
||||||
<profile name="profile2" base-url="https://rbcs2.example.com/">
|
<profile name="profile2" base-url="https://rbcs2.example.com/">
|
||||||
<basic-auth user="user" password="password"/>
|
<basic-auth user="user" password="password"/>
|
||||||
|
@@ -6,7 +6,7 @@ plugins {
|
|||||||
}
|
}
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
implementation project(':rbcs-api')
|
implementation catalog.netty.transport
|
||||||
implementation catalog.slf4j.api
|
implementation catalog.slf4j.api
|
||||||
implementation catalog.jwo
|
implementation catalog.jwo
|
||||||
implementation catalog.netty.buffer
|
implementation catalog.netty.buffer
|
||||||
|
@@ -5,6 +5,7 @@ module net.woggioni.rbcs.common {
|
|||||||
requires kotlin.stdlib;
|
requires kotlin.stdlib;
|
||||||
requires net.woggioni.jwo;
|
requires net.woggioni.jwo;
|
||||||
requires io.netty.buffer;
|
requires io.netty.buffer;
|
||||||
|
requires io.netty.transport;
|
||||||
|
|
||||||
provides java.net.spi.URLStreamHandlerProvider with net.woggioni.rbcs.common.RbcsUrlStreamHandlerFactory;
|
provides java.net.spi.URLStreamHandlerProvider with net.woggioni.rbcs.common.RbcsUrlStreamHandlerFactory;
|
||||||
exports net.woggioni.rbcs.common;
|
exports net.woggioni.rbcs.common;
|
||||||
|
15
rbcs-common/src/main/kotlin/net/woggioni/rbcs/common/BB.kt
Normal file
15
rbcs-common/src/main/kotlin/net/woggioni/rbcs/common/BB.kt
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
package net.woggioni.rbcs.common
|
||||||
|
|
||||||
|
import io.netty.buffer.ByteBuf
|
||||||
|
import io.netty.buffer.ByteBufAllocator
|
||||||
|
import io.netty.buffer.CompositeByteBuf
|
||||||
|
|
||||||
|
fun extractChunk(buf: CompositeByteBuf, alloc: ByteBufAllocator): ByteBuf {
|
||||||
|
val chunk = alloc.compositeBuffer()
|
||||||
|
for (component in buf.decompose(0, buf.readableBytes())) {
|
||||||
|
chunk.addComponent(true, component.retain())
|
||||||
|
}
|
||||||
|
buf.removeComponents(0, buf.numComponents())
|
||||||
|
buf.clear()
|
||||||
|
return chunk
|
||||||
|
}
|
@@ -1,90 +1,173 @@
|
|||||||
package net.woggioni.rbcs.common
|
package net.woggioni.rbcs.common
|
||||||
|
|
||||||
|
import io.netty.channel.Channel
|
||||||
|
import io.netty.channel.ChannelHandlerContext
|
||||||
import org.slf4j.Logger
|
import org.slf4j.Logger
|
||||||
import org.slf4j.LoggerFactory
|
import org.slf4j.LoggerFactory
|
||||||
|
import org.slf4j.MDC
|
||||||
import org.slf4j.event.Level
|
import org.slf4j.event.Level
|
||||||
|
import org.slf4j.spi.LoggingEventBuilder
|
||||||
import java.nio.file.Files
|
import java.nio.file.Files
|
||||||
import java.nio.file.Path
|
import java.nio.file.Path
|
||||||
import java.util.logging.LogManager
|
import java.util.logging.LogManager
|
||||||
|
|
||||||
inline fun <reified T> T.contextLogger() = LoggerFactory.getLogger(T::class.java)
|
inline fun <reified T> T.contextLogger() = LoggerFactory.getLogger(T::class.java)
|
||||||
|
inline fun <reified T> createLogger() = LoggerFactory.getLogger(T::class.java)
|
||||||
|
|
||||||
inline fun Logger.traceParam(messageBuilder : () -> Pair<String, Array<Any>>) {
|
inline fun Logger.traceParam(messageBuilder: () -> Pair<String, Array<Any>>) {
|
||||||
if(isTraceEnabled) {
|
if (isTraceEnabled) {
|
||||||
val (format, params) = messageBuilder()
|
val (format, params) = messageBuilder()
|
||||||
trace(format, params)
|
trace(format, params)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
inline fun Logger.debugParam(messageBuilder : () -> Pair<String, Array<Any>>) {
|
inline fun Logger.debugParam(messageBuilder: () -> Pair<String, Array<Any>>) {
|
||||||
if(isDebugEnabled) {
|
if (isDebugEnabled) {
|
||||||
val (format, params) = messageBuilder()
|
val (format, params) = messageBuilder()
|
||||||
info(format, params)
|
info(format, params)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
inline fun Logger.infoParam(messageBuilder : () -> Pair<String, Array<Any>>) {
|
inline fun Logger.infoParam(messageBuilder: () -> Pair<String, Array<Any>>) {
|
||||||
if(isInfoEnabled) {
|
if (isInfoEnabled) {
|
||||||
val (format, params) = messageBuilder()
|
val (format, params) = messageBuilder()
|
||||||
info(format, params)
|
info(format, params)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
inline fun Logger.warnParam(messageBuilder : () -> Pair<String, Array<Any>>) {
|
inline fun Logger.warnParam(messageBuilder: () -> Pair<String, Array<Any>>) {
|
||||||
if(isWarnEnabled) {
|
if (isWarnEnabled) {
|
||||||
val (format, params) = messageBuilder()
|
val (format, params) = messageBuilder()
|
||||||
warn(format, params)
|
warn(format, params)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
inline fun Logger.errorParam(messageBuilder : () -> Pair<String, Array<Any>>) {
|
inline fun Logger.errorParam(messageBuilder: () -> Pair<String, Array<Any>>) {
|
||||||
if(isErrorEnabled) {
|
if (isErrorEnabled) {
|
||||||
val (format, params) = messageBuilder()
|
val (format, params) = messageBuilder()
|
||||||
error(format, params)
|
error(format, params)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
inline fun log(log : Logger,
|
inline fun log(
|
||||||
filter : Logger.() -> Boolean,
|
log: Logger,
|
||||||
loggerMethod : Logger.(String) -> Unit, messageBuilder : () -> String) {
|
filter: Logger.() -> Boolean,
|
||||||
if(log.filter()) {
|
loggerMethod: Logger.(String) -> Unit, messageBuilder: () -> String
|
||||||
|
) {
|
||||||
|
if (log.filter()) {
|
||||||
log.loggerMethod(messageBuilder())
|
log.loggerMethod(messageBuilder())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
inline fun Logger.log(level : Level, messageBuilder : () -> String) {
|
fun withMDC(params: Array<Pair<String, String>>, cb: () -> Unit) {
|
||||||
if(isEnabledForLevel(level)) {
|
object : AutoCloseable {
|
||||||
|
override fun close() {
|
||||||
|
for ((key, _) in params) MDC.remove(key)
|
||||||
|
}
|
||||||
|
}.use {
|
||||||
|
for ((key, value) in params) MDC.put(key, value)
|
||||||
|
cb()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
inline fun Logger.log(level: Level, channel: Channel, crossinline messageBuilder: (LoggingEventBuilder) -> Unit ) {
|
||||||
|
if (isEnabledForLevel(level)) {
|
||||||
|
val params = arrayOf<Pair<String, String>>(
|
||||||
|
"channel-id-short" to channel.id().asShortText(),
|
||||||
|
"channel-id-long" to channel.id().asLongText(),
|
||||||
|
"remote-address" to channel.remoteAddress().toString(),
|
||||||
|
"local-address" to channel.localAddress().toString(),
|
||||||
|
)
|
||||||
|
withMDC(params) {
|
||||||
|
val builder = makeLoggingEventBuilder(level)
|
||||||
|
// for ((key, value) in params) {
|
||||||
|
// builder.addKeyValue(key, value)
|
||||||
|
// }
|
||||||
|
messageBuilder(builder)
|
||||||
|
builder.log()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
inline fun Logger.log(level: Level, channel: Channel, crossinline messageBuilder: () -> String) {
|
||||||
|
log(level, channel) { builder ->
|
||||||
|
builder.setMessage(messageBuilder())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
inline fun Logger.trace(ch: Channel, crossinline messageBuilder: () -> String) {
|
||||||
|
log(Level.TRACE, ch, messageBuilder)
|
||||||
|
}
|
||||||
|
|
||||||
|
inline fun Logger.debug(ch: Channel, crossinline messageBuilder: () -> String) {
|
||||||
|
log(Level.DEBUG, ch, messageBuilder)
|
||||||
|
}
|
||||||
|
|
||||||
|
inline fun Logger.info(ch: Channel, crossinline messageBuilder: () -> String) {
|
||||||
|
log(Level.INFO, ch, messageBuilder)
|
||||||
|
}
|
||||||
|
|
||||||
|
inline fun Logger.warn(ch: Channel, crossinline messageBuilder: () -> String) {
|
||||||
|
log(Level.WARN, ch, messageBuilder)
|
||||||
|
}
|
||||||
|
|
||||||
|
inline fun Logger.error(ch: Channel, crossinline messageBuilder: () -> String) {
|
||||||
|
log(Level.ERROR, ch, messageBuilder)
|
||||||
|
}
|
||||||
|
|
||||||
|
inline fun Logger.trace(ctx: ChannelHandlerContext, crossinline messageBuilder: () -> String) {
|
||||||
|
log(Level.TRACE, ctx.channel(), messageBuilder)
|
||||||
|
}
|
||||||
|
|
||||||
|
inline fun Logger.debug(ctx: ChannelHandlerContext, crossinline messageBuilder: () -> String) {
|
||||||
|
log(Level.DEBUG, ctx.channel(), messageBuilder)
|
||||||
|
}
|
||||||
|
|
||||||
|
inline fun Logger.info(ctx: ChannelHandlerContext, crossinline messageBuilder: () -> String) {
|
||||||
|
log(Level.INFO, ctx.channel(), messageBuilder)
|
||||||
|
}
|
||||||
|
|
||||||
|
inline fun Logger.warn(ctx: ChannelHandlerContext, crossinline messageBuilder: () -> String) {
|
||||||
|
log(Level.WARN, ctx.channel(), messageBuilder)
|
||||||
|
}
|
||||||
|
|
||||||
|
inline fun Logger.error(ctx: ChannelHandlerContext, crossinline messageBuilder: () -> String) {
|
||||||
|
log(Level.ERROR, ctx.channel(), messageBuilder)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
inline fun Logger.log(level: Level, messageBuilder: () -> String) {
|
||||||
|
if (isEnabledForLevel(level)) {
|
||||||
makeLoggingEventBuilder(level).log(messageBuilder())
|
makeLoggingEventBuilder(level).log(messageBuilder())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
inline fun Logger.trace(messageBuilder : () -> String) {
|
inline fun Logger.trace(messageBuilder: () -> String) {
|
||||||
if(isTraceEnabled) {
|
if (isTraceEnabled) {
|
||||||
trace(messageBuilder())
|
trace(messageBuilder())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
inline fun Logger.debug(messageBuilder : () -> String) {
|
inline fun Logger.debug(messageBuilder: () -> String) {
|
||||||
if(isDebugEnabled) {
|
if (isDebugEnabled) {
|
||||||
debug(messageBuilder())
|
debug(messageBuilder())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
inline fun Logger.info(messageBuilder : () -> String) {
|
inline fun Logger.info(messageBuilder: () -> String) {
|
||||||
if(isInfoEnabled) {
|
if (isInfoEnabled) {
|
||||||
info(messageBuilder())
|
info(messageBuilder())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
inline fun Logger.warn(messageBuilder : () -> String) {
|
inline fun Logger.warn(messageBuilder: () -> String) {
|
||||||
if(isWarnEnabled) {
|
if (isWarnEnabled) {
|
||||||
warn(messageBuilder())
|
warn(messageBuilder())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
inline fun Logger.error(messageBuilder : () -> String) {
|
inline fun Logger.error(messageBuilder: () -> String) {
|
||||||
if(isErrorEnabled) {
|
if (isErrorEnabled) {
|
||||||
error(messageBuilder())
|
error(messageBuilder())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -94,9 +177,9 @@ class LoggingConfig {
|
|||||||
|
|
||||||
init {
|
init {
|
||||||
val logManager = LogManager.getLogManager()
|
val logManager = LogManager.getLogManager()
|
||||||
System.getProperty("log.config.source")?.let withSource@ { source ->
|
System.getProperty("log.config.source")?.let withSource@{ source ->
|
||||||
val urls = LoggingConfig::class.java.classLoader.getResources(source)
|
val urls = LoggingConfig::class.java.classLoader.getResources(source)
|
||||||
while(urls.hasMoreElements()) {
|
while (urls.hasMoreElements()) {
|
||||||
val url = urls.nextElement()
|
val url = urls.nextElement()
|
||||||
url.openStream().use { inputStream ->
|
url.openStream().use { inputStream ->
|
||||||
logManager.readConfiguration(inputStream)
|
logManager.readConfiguration(inputStream)
|
||||||
|
@@ -7,7 +7,18 @@ import javax.crypto.SecretKeyFactory
|
|||||||
import javax.crypto.spec.PBEKeySpec
|
import javax.crypto.spec.PBEKeySpec
|
||||||
|
|
||||||
object PasswordSecurity {
|
object PasswordSecurity {
|
||||||
private const val KEY_LENGTH = 256
|
|
||||||
|
enum class Algorithm(
|
||||||
|
val codeName : String,
|
||||||
|
val keyLength : Int,
|
||||||
|
val iterations : Int) {
|
||||||
|
PBEWithHmacSHA512_224AndAES_256("PBEWithHmacSHA512/224AndAES_256", 64, 1),
|
||||||
|
PBEWithHmacSHA1AndAES_256("PBEWithHmacSHA1AndAES_256",64, 1),
|
||||||
|
PBEWithHmacSHA384AndAES_128("PBEWithHmacSHA384AndAES_128", 64,1),
|
||||||
|
PBEWithHmacSHA384AndAES_256("PBEWithHmacSHA384AndAES_256",64,1),
|
||||||
|
PBKDF2WithHmacSHA512("PBKDF2WithHmacSHA512",512, 1),
|
||||||
|
PBKDF2WithHmacSHA384("PBKDF2WithHmacSHA384",384, 1);
|
||||||
|
}
|
||||||
|
|
||||||
private fun concat(arr1: ByteArray, arr2: ByteArray): ByteArray {
|
private fun concat(arr1: ByteArray, arr2: ByteArray): ByteArray {
|
||||||
val result = ByteArray(arr1.size + arr2.size)
|
val result = ByteArray(arr1.size + arr2.size)
|
||||||
@@ -23,22 +34,22 @@ object PasswordSecurity {
|
|||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
fun hashPassword(password : String, salt : String? = null) : String {
|
fun hashPassword(password : String, salt : String? = null, algorithm : Algorithm = Algorithm.PBKDF2WithHmacSHA512) : String {
|
||||||
val actualSalt = salt?.let(Base64.getDecoder()::decode) ?: SecureRandom().run {
|
val actualSalt = salt?.let(Base64.getDecoder()::decode) ?: SecureRandom().run {
|
||||||
val result = ByteArray(16)
|
val result = ByteArray(16)
|
||||||
nextBytes(result)
|
nextBytes(result)
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
val spec: KeySpec = PBEKeySpec(password.toCharArray(), actualSalt, 10, KEY_LENGTH)
|
val spec: KeySpec = PBEKeySpec(password.toCharArray(), actualSalt, algorithm.iterations, algorithm.keyLength)
|
||||||
val factory = SecretKeyFactory.getInstance("PBKDF2WithHmacSHA1")
|
val factory = SecretKeyFactory.getInstance(algorithm.codeName)
|
||||||
val hash = factory.generateSecret(spec).encoded
|
val hash = factory.generateSecret(spec).encoded
|
||||||
return String(Base64.getEncoder().encode(concat(hash, actualSalt)))
|
return String(Base64.getEncoder().encode(concat(hash, actualSalt)))
|
||||||
}
|
}
|
||||||
|
|
||||||
fun decodePasswordHash(passwordHash : String) : Pair<ByteArray, ByteArray> {
|
fun decodePasswordHash(encodedPasswordHash : String, algorithm: Algorithm = Algorithm.PBKDF2WithHmacSHA512) : Pair<ByteArray, ByteArray> {
|
||||||
val decoded = Base64.getDecoder().decode(passwordHash)
|
val decoded = Base64.getDecoder().decode(encodedPasswordHash)
|
||||||
val hash = ByteArray(KEY_LENGTH / 8)
|
val hash = ByteArray(algorithm.keyLength / 8)
|
||||||
val salt = ByteArray(decoded.size - KEY_LENGTH / 8)
|
val salt = ByteArray(decoded.size - algorithm.keyLength / 8)
|
||||||
System.arraycopy(decoded, 0, hash, 0, hash.size)
|
System.arraycopy(decoded, 0, hash, 0, hash.size)
|
||||||
System.arraycopy(decoded, hash.size, salt, 0, salt.size)
|
System.arraycopy(decoded, hash.size, salt, 0, salt.size)
|
||||||
return hash to salt
|
return hash to salt
|
||||||
|
@@ -1,9 +1,26 @@
|
|||||||
package net.woggioni.rbcs.common
|
package net.woggioni.rbcs.common
|
||||||
|
|
||||||
import net.woggioni.jwo.JWO
|
import net.woggioni.jwo.JWO
|
||||||
|
import net.woggioni.jwo.Tuple2
|
||||||
|
import java.io.IOException
|
||||||
|
import java.net.InetAddress
|
||||||
|
import java.net.ServerSocket
|
||||||
import java.net.URI
|
import java.net.URI
|
||||||
import java.net.URL
|
import java.net.URL
|
||||||
|
import java.nio.file.Files
|
||||||
|
import java.nio.file.Path
|
||||||
|
import java.security.KeyStore
|
||||||
import java.security.MessageDigest
|
import java.security.MessageDigest
|
||||||
|
import java.security.cert.CertPathValidator
|
||||||
|
import java.security.cert.CertPathValidatorException
|
||||||
|
import java.security.cert.CertificateException
|
||||||
|
import java.security.cert.CertificateFactory
|
||||||
|
import java.security.cert.PKIXParameters
|
||||||
|
import java.security.cert.PKIXRevocationChecker
|
||||||
|
import java.security.cert.X509Certificate
|
||||||
|
import java.util.EnumSet
|
||||||
|
import javax.net.ssl.TrustManagerFactory
|
||||||
|
import javax.net.ssl.X509TrustManager
|
||||||
|
|
||||||
object RBCS {
|
object RBCS {
|
||||||
fun String.toUrl() : URL = URL.of(URI(this), null)
|
fun String.toUrl() : URL = URL.of(URI(this), null)
|
||||||
@@ -12,9 +29,27 @@ object RBCS {
|
|||||||
const val RBCS_PREFIX: String = "rbcs"
|
const val RBCS_PREFIX: String = "rbcs"
|
||||||
const val XML_SCHEMA_NAMESPACE_URI = "http://www.w3.org/2001/XMLSchema-instance"
|
const val XML_SCHEMA_NAMESPACE_URI = "http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
|
||||||
|
fun ByteArray.toInt(index : Int = 0) : Long {
|
||||||
|
if(index + 4 > size) throw IllegalArgumentException("Not enough bytes to decode a 32 bits integer")
|
||||||
|
var value : Long = 0
|
||||||
|
for (b in index until index + 4) {
|
||||||
|
value = (value shl 8) + (get(b).toInt() and 0xFF)
|
||||||
|
}
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
|
||||||
|
fun ByteArray.toLong(index : Int = 0) : Long {
|
||||||
|
if(index + 8 > size) throw IllegalArgumentException("Not enough bytes to decode a 64 bits long integer")
|
||||||
|
var value : Long = 0
|
||||||
|
for (b in index until index + 8) {
|
||||||
|
value = (value shl 8) + (get(b).toInt() and 0xFF)
|
||||||
|
}
|
||||||
|
return value
|
||||||
|
}
|
||||||
|
|
||||||
fun digest(
|
fun digest(
|
||||||
data: ByteArray,
|
data: ByteArray,
|
||||||
md: MessageDigest = MessageDigest.getInstance("MD5")
|
md: MessageDigest
|
||||||
): ByteArray {
|
): ByteArray {
|
||||||
md.update(data)
|
md.update(data)
|
||||||
return md.digest()
|
return md.digest()
|
||||||
@@ -22,8 +57,104 @@ object RBCS {
|
|||||||
|
|
||||||
fun digestString(
|
fun digestString(
|
||||||
data: ByteArray,
|
data: ByteArray,
|
||||||
md: MessageDigest = MessageDigest.getInstance("MD5")
|
md: MessageDigest
|
||||||
): String {
|
): String {
|
||||||
return JWO.bytesToHex(digest(data, md))
|
return JWO.bytesToHex(digest(data, md))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fun processCacheKey(key: String, digestAlgorithm: String?) = digestAlgorithm
|
||||||
|
?.let(MessageDigest::getInstance)
|
||||||
|
?.let { md ->
|
||||||
|
digest(key.toByteArray(), md)
|
||||||
|
} ?: key.toByteArray(Charsets.UTF_8)
|
||||||
|
|
||||||
|
fun Long.toIntOrNull(): Int? {
|
||||||
|
return if (this >= Int.MIN_VALUE && this <= Int.MAX_VALUE) {
|
||||||
|
toInt()
|
||||||
|
} else {
|
||||||
|
null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fun getFreePort(): Int {
|
||||||
|
var count = 0
|
||||||
|
while (count < 50) {
|
||||||
|
try {
|
||||||
|
ServerSocket(0, 50, InetAddress.getLocalHost()).use { serverSocket ->
|
||||||
|
val candidate = serverSocket.localPort
|
||||||
|
if (candidate > 0) {
|
||||||
|
return candidate
|
||||||
|
} else {
|
||||||
|
throw RuntimeException("Got invalid port number: $candidate")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (ignored: IOException) {
|
||||||
|
++count
|
||||||
|
}
|
||||||
|
}
|
||||||
|
throw RuntimeException("Error trying to find an open port")
|
||||||
|
}
|
||||||
|
|
||||||
|
fun loadKeystore(file: Path, password: String?): KeyStore {
|
||||||
|
val ext = JWO.splitExtension(file)
|
||||||
|
.map(Tuple2<String, String>::get_2)
|
||||||
|
.orElseThrow {
|
||||||
|
IllegalArgumentException(
|
||||||
|
"Keystore file '${file}' must have .jks, .p12, .pfx extension"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
val keystore = when (ext.substring(1).lowercase()) {
|
||||||
|
"jks" -> KeyStore.getInstance("JKS")
|
||||||
|
"p12", "pfx" -> KeyStore.getInstance("PKCS12")
|
||||||
|
else -> throw IllegalArgumentException(
|
||||||
|
"Keystore file '${file}' must have .jks, .p12, .pfx extension"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
Files.newInputStream(file).use {
|
||||||
|
keystore.load(it, password?.let(String::toCharArray))
|
||||||
|
}
|
||||||
|
return keystore
|
||||||
|
}
|
||||||
|
|
||||||
|
fun getTrustManager(trustStore: KeyStore?, certificateRevocationEnabled: Boolean): X509TrustManager {
|
||||||
|
return if (trustStore != null) {
|
||||||
|
val certificateFactory = CertificateFactory.getInstance("X.509")
|
||||||
|
val validator = CertPathValidator.getInstance("PKIX").apply {
|
||||||
|
val rc = revocationChecker as PKIXRevocationChecker
|
||||||
|
rc.options = EnumSet.of(
|
||||||
|
PKIXRevocationChecker.Option.NO_FALLBACK
|
||||||
|
)
|
||||||
|
}
|
||||||
|
val params = PKIXParameters(trustStore).apply {
|
||||||
|
isRevocationEnabled = certificateRevocationEnabled
|
||||||
|
}
|
||||||
|
object : X509TrustManager {
|
||||||
|
override fun checkClientTrusted(chain: Array<out X509Certificate>, authType: String) {
|
||||||
|
val clientCertificateChain = certificateFactory.generateCertPath(chain.toList())
|
||||||
|
try {
|
||||||
|
validator.validate(clientCertificateChain, params)
|
||||||
|
} catch (ex: CertPathValidatorException) {
|
||||||
|
throw CertificateException(ex)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun checkServerTrusted(chain: Array<out X509Certificate>, authType: String) {
|
||||||
|
throw NotImplementedError()
|
||||||
|
}
|
||||||
|
|
||||||
|
private val acceptedIssuers = trustStore.aliases().asSequence()
|
||||||
|
.filter(trustStore::isCertificateEntry)
|
||||||
|
.map(trustStore::getCertificate)
|
||||||
|
.map { it as X509Certificate }
|
||||||
|
.toList()
|
||||||
|
.toTypedArray()
|
||||||
|
|
||||||
|
override fun getAcceptedIssuers() = acceptedIssuers
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
val trustManagerFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm())
|
||||||
|
trustManagerFactory.trustManagers.asSequence().filter { it is X509TrustManager }
|
||||||
|
.single() as X509TrustManager
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
@@ -1,7 +1,6 @@
|
|||||||
package net.woggioni.rbcs.common
|
package net.woggioni.rbcs.common
|
||||||
|
|
||||||
import net.woggioni.jwo.JWO
|
import net.woggioni.jwo.JWO
|
||||||
import org.slf4j.LoggerFactory
|
|
||||||
import org.slf4j.event.Level
|
import org.slf4j.event.Level
|
||||||
import org.w3c.dom.Document
|
import org.w3c.dom.Document
|
||||||
import org.w3c.dom.Element
|
import org.w3c.dom.Element
|
||||||
@@ -79,7 +78,7 @@ class Xml(val doc: Document, val element: Element) {
|
|||||||
class ErrorHandler(private val fileURL: URL) : ErrHandler {
|
class ErrorHandler(private val fileURL: URL) : ErrHandler {
|
||||||
|
|
||||||
companion object {
|
companion object {
|
||||||
private val log = LoggerFactory.getLogger(ErrorHandler::class.java)
|
private val log = createLogger<ErrorHandler>()
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun warning(ex: SAXParseException)= err(ex, Level.WARN)
|
override fun warning(ex: SAXParseException)= err(ex, Level.WARN)
|
||||||
|
@@ -0,0 +1,38 @@
|
|||||||
|
package net.woggioni.rbcs.common
|
||||||
|
|
||||||
|
import net.woggioni.rbcs.common.PasswordSecurity.decodePasswordHash
|
||||||
|
import net.woggioni.rbcs.common.PasswordSecurity.hashPassword
|
||||||
|
import org.junit.jupiter.api.Assertions
|
||||||
|
import org.junit.jupiter.api.Test
|
||||||
|
import org.junit.jupiter.params.ParameterizedTest
|
||||||
|
import org.junit.jupiter.params.provider.EnumSource
|
||||||
|
import java.security.Provider
|
||||||
|
import java.security.Security
|
||||||
|
import java.util.Base64
|
||||||
|
|
||||||
|
|
||||||
|
class PasswordHashingTest {
|
||||||
|
|
||||||
|
@EnumSource(PasswordSecurity.Algorithm::class)
|
||||||
|
@ParameterizedTest
|
||||||
|
fun test(algo: PasswordSecurity.Algorithm) {
|
||||||
|
val password = "password"
|
||||||
|
val encoded = hashPassword(password, algorithm = algo)
|
||||||
|
val (_, salt) = decodePasswordHash(encoded, algo)
|
||||||
|
Assertions.assertEquals(encoded,
|
||||||
|
hashPassword(password, salt = salt.let(Base64.getEncoder()::encodeToString), algorithm = algo)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
fun listAvailableAlgorithms() {
|
||||||
|
Security.getProviders().asSequence()
|
||||||
|
.flatMap { provider: Provider -> provider.services.asSequence() }
|
||||||
|
.filter { service: Provider.Service -> "SecretKeyFactory" == service.type }
|
||||||
|
.map(Provider.Service::getAlgorithm)
|
||||||
|
.forEach {
|
||||||
|
println(it)
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
45
rbcs-server-memcache/README.md
Normal file
45
rbcs-server-memcache/README.md
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
# RBCS Memcache plugins
|
||||||
|
|
||||||
|
This plugins allows RBCs to store and retrieve data from a memcache cluster.
|
||||||
|
The memcache server selection is simply based on the hash of the key,
|
||||||
|
deflate compression is also supported and performed by the RBCS server
|
||||||
|
|
||||||
|
## Quickstart
|
||||||
|
The plugin can be built with
|
||||||
|
```bash
|
||||||
|
./gradlew rbcs-server-memcache:bundle
|
||||||
|
```
|
||||||
|
which creates a `.tar` archive in the `build/distributions` folder.
|
||||||
|
The archive is supposed to be extracted inside the RBCS server's `plugins` directory.
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
The plugin can be enabled setting the `xs:type` attribute of the `cache` element
|
||||||
|
to `memcacheCacheType`.
|
||||||
|
|
||||||
|
The plugins currently supports the following configuration attributes:
|
||||||
|
- `max-age`: the amount of time cache entries will be retained on memcache
|
||||||
|
- `digest`: digest algorithm to use on the key before submission
|
||||||
|
to memcache (optional, no digest is applied if omitted)
|
||||||
|
- `compression`: compression algorithm to apply to cache values before,
|
||||||
|
currently only `deflate` is supported (optional, if omitted compression is disabled)
|
||||||
|
- `compression-level`: compression level to use, deflate supports compression levels from 1 to 9,
|
||||||
|
where 1 is for fast compression at the expense of speed (optional, 6 is used if omitted)
|
||||||
|
```xml
|
||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||||
|
<rbcs:server xmlns:xs="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xmlns:rbcs="urn:net.woggioni.rbcs.server"
|
||||||
|
xmlns:rbcs-memcache="urn:net.woggioni.rbcs.server.memcache"
|
||||||
|
xs:schemaLocation="urn:net.woggioni.rbcs.server.memcache jpms://net.woggioni.rbcs.server.memcache/net/woggioni/rbcs/server/memcache/schema/rbcs-memcache.xsd urn:net.woggioni.rbcs.server jpms://net.woggioni.rbcs.server/net/woggioni/rbcs/server/schema/rbcs-server.xsd"
|
||||||
|
>
|
||||||
|
...
|
||||||
|
<cache xs:type="rbcs-memcache:memcacheCacheType"
|
||||||
|
max-age="P7D"
|
||||||
|
digest="SHA-256"
|
||||||
|
compression-mode="deflate"
|
||||||
|
compression-level="6">
|
||||||
|
<server host="127.0.0.1" port="11211" max-connections="256"/>
|
||||||
|
<server host="127.0.0.1" port="11212" max-connections="256"/>
|
||||||
|
</cache>
|
||||||
|
...
|
||||||
|
```
|
@@ -34,6 +34,7 @@ dependencies {
|
|||||||
implementation catalog.jwo
|
implementation catalog.jwo
|
||||||
implementation catalog.slf4j.api
|
implementation catalog.slf4j.api
|
||||||
implementation catalog.netty.common
|
implementation catalog.netty.common
|
||||||
|
implementation catalog.netty.handler
|
||||||
implementation catalog.netty.codec.memcache
|
implementation catalog.netty.codec.memcache
|
||||||
|
|
||||||
bundle catalog.netty.codec.memcache
|
bundle catalog.netty.codec.memcache
|
||||||
|
@@ -11,6 +11,7 @@ module net.woggioni.rbcs.server.memcache {
|
|||||||
requires io.netty.codec.memcache;
|
requires io.netty.codec.memcache;
|
||||||
requires io.netty.common;
|
requires io.netty.common;
|
||||||
requires io.netty.buffer;
|
requires io.netty.buffer;
|
||||||
|
requires io.netty.handler;
|
||||||
requires org.slf4j;
|
requires org.slf4j;
|
||||||
|
|
||||||
provides CacheProvider with net.woggioni.rbcs.server.memcache.MemcacheCacheProvider;
|
provides CacheProvider with net.woggioni.rbcs.server.memcache.MemcacheCacheProvider;
|
||||||
|
@@ -1,23 +0,0 @@
|
|||||||
package net.woggioni.rbcs.server.memcache
|
|
||||||
|
|
||||||
import io.netty.buffer.ByteBuf
|
|
||||||
import net.woggioni.rbcs.api.Cache
|
|
||||||
import net.woggioni.rbcs.server.memcache.client.MemcacheClient
|
|
||||||
import java.nio.channels.ReadableByteChannel
|
|
||||||
import java.util.concurrent.CompletableFuture
|
|
||||||
|
|
||||||
class MemcacheCache(private val cfg : MemcacheCacheConfiguration) : Cache {
|
|
||||||
private val memcacheClient = MemcacheClient(cfg)
|
|
||||||
|
|
||||||
override fun get(key: String): CompletableFuture<ReadableByteChannel?> {
|
|
||||||
return memcacheClient.get(key)
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun put(key: String, content: ByteBuf): CompletableFuture<Void> {
|
|
||||||
return memcacheClient.put(key, content, cfg.maxAge)
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun close() {
|
|
||||||
memcacheClient.close()
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,23 +1,35 @@
|
|||||||
package net.woggioni.rbcs.server.memcache
|
package net.woggioni.rbcs.server.memcache
|
||||||
|
|
||||||
|
import io.netty.channel.ChannelFactory
|
||||||
|
import io.netty.channel.EventLoopGroup
|
||||||
|
import io.netty.channel.pool.FixedChannelPool
|
||||||
|
import io.netty.channel.socket.DatagramChannel
|
||||||
|
import io.netty.channel.socket.SocketChannel
|
||||||
|
import net.woggioni.rbcs.api.CacheHandler
|
||||||
|
import net.woggioni.rbcs.api.CacheHandlerFactory
|
||||||
import net.woggioni.rbcs.api.Configuration
|
import net.woggioni.rbcs.api.Configuration
|
||||||
import net.woggioni.rbcs.common.HostAndPort
|
import net.woggioni.rbcs.common.HostAndPort
|
||||||
|
import net.woggioni.rbcs.common.createLogger
|
||||||
|
import net.woggioni.rbcs.server.memcache.client.MemcacheClient
|
||||||
import java.time.Duration
|
import java.time.Duration
|
||||||
|
import java.util.concurrent.CompletableFuture
|
||||||
|
import java.util.concurrent.ConcurrentHashMap
|
||||||
|
import java.util.concurrent.atomic.AtomicInteger
|
||||||
|
import java.util.concurrent.atomic.AtomicReference
|
||||||
|
|
||||||
data class MemcacheCacheConfiguration(
|
data class MemcacheCacheConfiguration(
|
||||||
val servers: List<Server>,
|
val servers: List<Server>,
|
||||||
val maxAge: Duration = Duration.ofDays(1),
|
val maxAge: Duration = Duration.ofDays(1),
|
||||||
val maxSize: Int = 0x100000,
|
|
||||||
val digestAlgorithm: String? = null,
|
val digestAlgorithm: String? = null,
|
||||||
val compressionMode: CompressionMode? = null,
|
val compressionMode: CompressionMode? = null,
|
||||||
|
val compressionLevel: Int,
|
||||||
) : Configuration.Cache {
|
) : Configuration.Cache {
|
||||||
|
|
||||||
enum class CompressionMode {
|
companion object {
|
||||||
/**
|
private val log = createLogger<MemcacheCacheConfiguration>()
|
||||||
* Gzip mode
|
}
|
||||||
*/
|
|
||||||
GZIP,
|
|
||||||
|
|
||||||
|
enum class CompressionMode {
|
||||||
/**
|
/**
|
||||||
* Deflate mode
|
* Deflate mode
|
||||||
*/
|
*/
|
||||||
@@ -25,13 +37,64 @@ data class MemcacheCacheConfiguration(
|
|||||||
}
|
}
|
||||||
|
|
||||||
data class Server(
|
data class Server(
|
||||||
val endpoint : HostAndPort,
|
val endpoint: HostAndPort,
|
||||||
val connectionTimeoutMillis : Int?,
|
val connectionTimeoutMillis: Int?,
|
||||||
val maxConnections : Int
|
val maxConnections: Int
|
||||||
)
|
)
|
||||||
|
|
||||||
|
override fun materialize() = object : CacheHandlerFactory {
|
||||||
|
|
||||||
override fun materialize() = MemcacheCache(this)
|
private val connectionPoolMap = ConcurrentHashMap<HostAndPort, FixedChannelPool>()
|
||||||
|
|
||||||
|
override fun newHandler(
|
||||||
|
cfg : Configuration,
|
||||||
|
eventLoop: EventLoopGroup,
|
||||||
|
socketChannelFactory: ChannelFactory<SocketChannel>,
|
||||||
|
datagramChannelFactory: ChannelFactory<DatagramChannel>,
|
||||||
|
): CacheHandler {
|
||||||
|
return MemcacheCacheHandler(
|
||||||
|
MemcacheClient(
|
||||||
|
this@MemcacheCacheConfiguration.servers,
|
||||||
|
cfg.connection.chunkSize,
|
||||||
|
eventLoop,
|
||||||
|
socketChannelFactory,
|
||||||
|
connectionPoolMap
|
||||||
|
),
|
||||||
|
digestAlgorithm,
|
||||||
|
compressionMode != null,
|
||||||
|
compressionLevel,
|
||||||
|
cfg.connection.chunkSize,
|
||||||
|
maxAge
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun asyncClose() = object : CompletableFuture<Void>() {
|
||||||
|
init {
|
||||||
|
val failure = AtomicReference<Throwable>(null)
|
||||||
|
val pools = connectionPoolMap.values.toList()
|
||||||
|
val npools = pools.size
|
||||||
|
val finished = AtomicInteger(0)
|
||||||
|
if (pools.isEmpty()) {
|
||||||
|
complete(null)
|
||||||
|
} else {
|
||||||
|
pools.forEach { pool ->
|
||||||
|
pool.closeAsync().addListener {
|
||||||
|
if (!it.isSuccess) {
|
||||||
|
failure.compareAndSet(null, it.cause())
|
||||||
|
}
|
||||||
|
if (finished.incrementAndGet() == npools) {
|
||||||
|
when (val ex = failure.get()) {
|
||||||
|
null -> complete(null)
|
||||||
|
else -> completeExceptionally(ex)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
override fun getNamespaceURI() = "urn:net.woggioni.rbcs.server.memcache"
|
override fun getNamespaceURI() = "urn:net.woggioni.rbcs.server.memcache"
|
||||||
|
|
||||||
|
@@ -0,0 +1,441 @@
|
|||||||
|
package net.woggioni.rbcs.server.memcache
|
||||||
|
|
||||||
|
import io.netty.buffer.ByteBuf
|
||||||
|
import io.netty.buffer.ByteBufAllocator
|
||||||
|
import io.netty.buffer.CompositeByteBuf
|
||||||
|
import io.netty.channel.ChannelHandlerContext
|
||||||
|
import io.netty.handler.codec.memcache.DefaultLastMemcacheContent
|
||||||
|
import io.netty.handler.codec.memcache.DefaultMemcacheContent
|
||||||
|
import io.netty.handler.codec.memcache.LastMemcacheContent
|
||||||
|
import io.netty.handler.codec.memcache.MemcacheContent
|
||||||
|
import io.netty.handler.codec.memcache.binary.BinaryMemcacheOpcodes
|
||||||
|
import io.netty.handler.codec.memcache.binary.BinaryMemcacheResponse
|
||||||
|
import io.netty.handler.codec.memcache.binary.BinaryMemcacheResponseStatus
|
||||||
|
import io.netty.handler.codec.memcache.binary.DefaultBinaryMemcacheRequest
|
||||||
|
import net.woggioni.rbcs.api.CacheHandler
|
||||||
|
import net.woggioni.rbcs.api.CacheValueMetadata
|
||||||
|
import net.woggioni.rbcs.api.exception.ContentTooLargeException
|
||||||
|
import net.woggioni.rbcs.api.message.CacheMessage
|
||||||
|
import net.woggioni.rbcs.api.message.CacheMessage.CacheContent
|
||||||
|
import net.woggioni.rbcs.api.message.CacheMessage.CacheGetRequest
|
||||||
|
import net.woggioni.rbcs.api.message.CacheMessage.CachePutRequest
|
||||||
|
import net.woggioni.rbcs.api.message.CacheMessage.CachePutResponse
|
||||||
|
import net.woggioni.rbcs.api.message.CacheMessage.CacheValueFoundResponse
|
||||||
|
import net.woggioni.rbcs.api.message.CacheMessage.CacheValueNotFoundResponse
|
||||||
|
import net.woggioni.rbcs.api.message.CacheMessage.LastCacheContent
|
||||||
|
import net.woggioni.rbcs.common.ByteBufInputStream
|
||||||
|
import net.woggioni.rbcs.common.ByteBufOutputStream
|
||||||
|
import net.woggioni.rbcs.common.RBCS.processCacheKey
|
||||||
|
import net.woggioni.rbcs.common.RBCS.toIntOrNull
|
||||||
|
import net.woggioni.rbcs.common.createLogger
|
||||||
|
import net.woggioni.rbcs.common.debug
|
||||||
|
import net.woggioni.rbcs.common.extractChunk
|
||||||
|
import net.woggioni.rbcs.common.trace
|
||||||
|
import net.woggioni.rbcs.server.memcache.client.MemcacheClient
|
||||||
|
import net.woggioni.rbcs.server.memcache.client.MemcacheRequestController
|
||||||
|
import net.woggioni.rbcs.server.memcache.client.MemcacheResponseHandler
|
||||||
|
import java.io.ByteArrayOutputStream
|
||||||
|
import java.io.ObjectInputStream
|
||||||
|
import java.io.ObjectOutputStream
|
||||||
|
import java.nio.ByteBuffer
|
||||||
|
import java.nio.channels.Channels
|
||||||
|
import java.nio.channels.FileChannel
|
||||||
|
import java.nio.channels.ReadableByteChannel
|
||||||
|
import java.nio.file.Files
|
||||||
|
import java.nio.file.StandardOpenOption
|
||||||
|
import java.time.Duration
|
||||||
|
import java.time.Instant
|
||||||
|
import java.util.concurrent.CompletableFuture
|
||||||
|
import java.util.zip.Deflater
|
||||||
|
import java.util.zip.DeflaterOutputStream
|
||||||
|
import java.util.zip.InflaterOutputStream
|
||||||
|
import io.netty.channel.Channel as NettyChannel
|
||||||
|
|
||||||
|
class MemcacheCacheHandler(
|
||||||
|
private val client: MemcacheClient,
|
||||||
|
private val digestAlgorithm: String?,
|
||||||
|
private val compressionEnabled: Boolean,
|
||||||
|
private val compressionLevel: Int,
|
||||||
|
private val chunkSize: Int,
|
||||||
|
private val maxAge: Duration
|
||||||
|
) : CacheHandler() {
|
||||||
|
companion object {
|
||||||
|
private val log = createLogger<MemcacheCacheHandler>()
|
||||||
|
|
||||||
|
private fun encodeExpiry(expiry: Duration): Int {
|
||||||
|
val expirySeconds = expiry.toSeconds()
|
||||||
|
return expirySeconds.toInt().takeIf { it.toLong() == expirySeconds }
|
||||||
|
?: Instant.ofEpochSecond(expirySeconds).epochSecond.toInt()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private interface InProgressRequest {
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private inner class InProgressGetRequest(
|
||||||
|
val key: String,
|
||||||
|
private val ctx: ChannelHandlerContext
|
||||||
|
) : InProgressRequest {
|
||||||
|
private val acc = ctx.alloc().compositeBuffer()
|
||||||
|
private val chunk = ctx.alloc().compositeBuffer()
|
||||||
|
private val outputStream = ByteBufOutputStream(chunk).let {
|
||||||
|
if (compressionEnabled) {
|
||||||
|
InflaterOutputStream(it)
|
||||||
|
} else {
|
||||||
|
it
|
||||||
|
}
|
||||||
|
}
|
||||||
|
private var responseSent = false
|
||||||
|
private var metadataSize: Int? = null
|
||||||
|
|
||||||
|
fun write(buf: ByteBuf) {
|
||||||
|
acc.addComponent(true, buf.retain())
|
||||||
|
if (metadataSize == null && acc.readableBytes() >= Int.SIZE_BYTES) {
|
||||||
|
metadataSize = acc.readInt()
|
||||||
|
}
|
||||||
|
metadataSize
|
||||||
|
?.takeIf { !responseSent }
|
||||||
|
?.takeIf { acc.readableBytes() >= it }
|
||||||
|
?.let { mSize ->
|
||||||
|
val metadata = ObjectInputStream(ByteBufInputStream(acc)).use {
|
||||||
|
acc.retain()
|
||||||
|
it.readObject() as CacheValueMetadata
|
||||||
|
}
|
||||||
|
log.trace(ctx) {
|
||||||
|
"Sending response from cache"
|
||||||
|
}
|
||||||
|
sendMessageAndFlush(ctx, CacheValueFoundResponse(key, metadata))
|
||||||
|
responseSent = true
|
||||||
|
acc.readerIndex(Int.SIZE_BYTES + mSize)
|
||||||
|
}
|
||||||
|
if (responseSent) {
|
||||||
|
acc.readBytes(outputStream, acc.readableBytes())
|
||||||
|
if (acc.readableBytes() >= chunkSize) {
|
||||||
|
flush(false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun flush(last: Boolean) {
|
||||||
|
val toSend = extractChunk(chunk, ctx.alloc())
|
||||||
|
val msg = if (last) {
|
||||||
|
log.trace(ctx) {
|
||||||
|
"Sending last chunk to client"
|
||||||
|
}
|
||||||
|
LastCacheContent(toSend)
|
||||||
|
} else {
|
||||||
|
log.trace(ctx) {
|
||||||
|
"Sending chunk to client"
|
||||||
|
}
|
||||||
|
CacheContent(toSend)
|
||||||
|
}
|
||||||
|
sendMessageAndFlush(ctx, msg)
|
||||||
|
}
|
||||||
|
|
||||||
|
fun commit() {
|
||||||
|
acc.release()
|
||||||
|
chunk.retain()
|
||||||
|
outputStream.close()
|
||||||
|
flush(true)
|
||||||
|
chunk.release()
|
||||||
|
}
|
||||||
|
|
||||||
|
fun rollback() {
|
||||||
|
acc.release()
|
||||||
|
outputStream.close()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private inner class InProgressPutRequest(
|
||||||
|
private val ch: NettyChannel,
|
||||||
|
metadata: CacheValueMetadata,
|
||||||
|
val digest: ByteBuf,
|
||||||
|
val requestController: CompletableFuture<MemcacheRequestController>,
|
||||||
|
private val alloc: ByteBufAllocator
|
||||||
|
) : InProgressRequest {
|
||||||
|
private var totalSize = 0
|
||||||
|
private var tmpFile: FileChannel? = null
|
||||||
|
private val accumulator = alloc.compositeBuffer()
|
||||||
|
private val stream = ByteBufOutputStream(accumulator).let {
|
||||||
|
if (compressionEnabled) {
|
||||||
|
DeflaterOutputStream(it, Deflater(compressionLevel))
|
||||||
|
} else {
|
||||||
|
it
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
init {
|
||||||
|
ByteArrayOutputStream().let { baos ->
|
||||||
|
ObjectOutputStream(baos).use {
|
||||||
|
it.writeObject(metadata)
|
||||||
|
}
|
||||||
|
val serializedBytes = baos.toByteArray()
|
||||||
|
accumulator.writeInt(serializedBytes.size)
|
||||||
|
accumulator.writeBytes(serializedBytes)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fun write(buf: ByteBuf) {
|
||||||
|
totalSize += buf.readableBytes()
|
||||||
|
buf.readBytes(stream, buf.readableBytes())
|
||||||
|
tmpFile?.let {
|
||||||
|
flushToDisk(it, accumulator)
|
||||||
|
}
|
||||||
|
if (accumulator.readableBytes() > 0x100000) {
|
||||||
|
log.debug(ch) {
|
||||||
|
"Entry is too big, buffering it into a file"
|
||||||
|
}
|
||||||
|
val opts = arrayOf(
|
||||||
|
StandardOpenOption.DELETE_ON_CLOSE,
|
||||||
|
StandardOpenOption.READ,
|
||||||
|
StandardOpenOption.WRITE,
|
||||||
|
StandardOpenOption.TRUNCATE_EXISTING
|
||||||
|
)
|
||||||
|
FileChannel.open(Files.createTempFile("rbcs-memcache", ".tmp"), *opts).let { fc ->
|
||||||
|
tmpFile = fc
|
||||||
|
flushToDisk(fc, accumulator)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun flushToDisk(fc: FileChannel, buf: CompositeByteBuf) {
|
||||||
|
val chunk = extractChunk(buf, alloc)
|
||||||
|
fc.write(chunk.nioBuffer())
|
||||||
|
chunk.release()
|
||||||
|
}
|
||||||
|
|
||||||
|
fun commit(): Pair<Int, ReadableByteChannel> {
|
||||||
|
digest.release()
|
||||||
|
accumulator.retain()
|
||||||
|
stream.close()
|
||||||
|
val fileChannel = tmpFile
|
||||||
|
return if (fileChannel != null) {
|
||||||
|
flushToDisk(fileChannel, accumulator)
|
||||||
|
accumulator.release()
|
||||||
|
fileChannel.position(0)
|
||||||
|
val fileSize = fileChannel.size().toIntOrNull() ?: let {
|
||||||
|
fileChannel.close()
|
||||||
|
throw ContentTooLargeException("Request body is too large", null)
|
||||||
|
}
|
||||||
|
fileSize to fileChannel
|
||||||
|
} else {
|
||||||
|
accumulator.readableBytes() to Channels.newChannel(ByteBufInputStream(accumulator))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fun rollback() {
|
||||||
|
stream.close()
|
||||||
|
digest.release()
|
||||||
|
tmpFile?.close()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private var inProgressRequest: InProgressRequest? = null
|
||||||
|
|
||||||
|
override fun channelRead0(ctx: ChannelHandlerContext, msg: CacheMessage) {
|
||||||
|
when (msg) {
|
||||||
|
is CacheGetRequest -> handleGetRequest(ctx, msg)
|
||||||
|
is CachePutRequest -> handlePutRequest(ctx, msg)
|
||||||
|
is LastCacheContent -> handleLastCacheContent(ctx, msg)
|
||||||
|
is CacheContent -> handleCacheContent(ctx, msg)
|
||||||
|
else -> ctx.fireChannelRead(msg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun handleGetRequest(ctx: ChannelHandlerContext, msg: CacheGetRequest) {
|
||||||
|
log.debug(ctx) {
|
||||||
|
"Fetching ${msg.key} from memcache"
|
||||||
|
}
|
||||||
|
val key = ctx.alloc().buffer().also {
|
||||||
|
it.writeBytes(processCacheKey(msg.key, digestAlgorithm))
|
||||||
|
}
|
||||||
|
val responseHandler = object : MemcacheResponseHandler {
|
||||||
|
override fun responseReceived(response: BinaryMemcacheResponse) {
|
||||||
|
val status = response.status()
|
||||||
|
when (status) {
|
||||||
|
BinaryMemcacheResponseStatus.SUCCESS -> {
|
||||||
|
log.debug(ctx) {
|
||||||
|
"Cache hit for key ${msg.key} on memcache"
|
||||||
|
}
|
||||||
|
inProgressRequest = InProgressGetRequest(msg.key, ctx)
|
||||||
|
}
|
||||||
|
|
||||||
|
BinaryMemcacheResponseStatus.KEY_ENOENT -> {
|
||||||
|
log.debug(ctx) {
|
||||||
|
"Cache miss for key ${msg.key} on memcache"
|
||||||
|
}
|
||||||
|
sendMessageAndFlush(ctx, CacheValueNotFoundResponse())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun contentReceived(content: MemcacheContent) {
|
||||||
|
log.trace(ctx) {
|
||||||
|
"${if (content is LastMemcacheContent) "Last chunk" else "Chunk"} of ${
|
||||||
|
content.content().readableBytes()
|
||||||
|
} bytes received from memcache for key ${msg.key}"
|
||||||
|
}
|
||||||
|
(inProgressRequest as? InProgressGetRequest)?.let { inProgressGetRequest ->
|
||||||
|
inProgressGetRequest.write(content.content())
|
||||||
|
if (content is LastMemcacheContent) {
|
||||||
|
inProgressRequest = null
|
||||||
|
inProgressGetRequest.commit()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun exceptionCaught(ex: Throwable) {
|
||||||
|
(inProgressRequest as? InProgressGetRequest).let { inProgressGetRequest ->
|
||||||
|
inProgressGetRequest?.let {
|
||||||
|
inProgressRequest = null
|
||||||
|
it.rollback()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
this@MemcacheCacheHandler.exceptionCaught(ctx, ex)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
client.sendRequest(key.retainedDuplicate(), responseHandler).thenAccept { requestHandle ->
|
||||||
|
log.trace(ctx) {
|
||||||
|
"Sending GET request for key ${msg.key} to memcache"
|
||||||
|
}
|
||||||
|
val request = DefaultBinaryMemcacheRequest(key).apply {
|
||||||
|
setOpcode(BinaryMemcacheOpcodes.GET)
|
||||||
|
}
|
||||||
|
requestHandle.sendRequest(request)
|
||||||
|
requestHandle.sendContent(LastMemcacheContent.EMPTY_LAST_CONTENT)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun handlePutRequest(ctx: ChannelHandlerContext, msg: CachePutRequest) {
|
||||||
|
val key = ctx.alloc().buffer().also {
|
||||||
|
it.writeBytes(processCacheKey(msg.key, digestAlgorithm))
|
||||||
|
}
|
||||||
|
val responseHandler = object : MemcacheResponseHandler {
|
||||||
|
override fun responseReceived(response: BinaryMemcacheResponse) {
|
||||||
|
val status = response.status()
|
||||||
|
when (status) {
|
||||||
|
BinaryMemcacheResponseStatus.SUCCESS -> {
|
||||||
|
log.debug(ctx) {
|
||||||
|
"Inserted key ${msg.key} into memcache"
|
||||||
|
}
|
||||||
|
sendMessageAndFlush(ctx, CachePutResponse(msg.key))
|
||||||
|
}
|
||||||
|
|
||||||
|
else -> this@MemcacheCacheHandler.exceptionCaught(ctx, MemcacheException(status))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun contentReceived(content: MemcacheContent) {}
|
||||||
|
|
||||||
|
override fun exceptionCaught(ex: Throwable) {
|
||||||
|
this@MemcacheCacheHandler.exceptionCaught(ctx, ex)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
val requestController = client.sendRequest(key.retainedDuplicate(), responseHandler).whenComplete { _, ex ->
|
||||||
|
ex?.let {
|
||||||
|
this@MemcacheCacheHandler.exceptionCaught(ctx, ex)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
inProgressRequest = InProgressPutRequest(ctx.channel(), msg.metadata, key, requestController, ctx.alloc())
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun handleCacheContent(ctx: ChannelHandlerContext, msg: CacheContent) {
|
||||||
|
val request = inProgressRequest
|
||||||
|
when (request) {
|
||||||
|
is InProgressPutRequest -> {
|
||||||
|
log.trace(ctx) {
|
||||||
|
"Received chunk of ${msg.content().readableBytes()} bytes for memcache"
|
||||||
|
}
|
||||||
|
request.write(msg.content())
|
||||||
|
}
|
||||||
|
|
||||||
|
is InProgressGetRequest -> {
|
||||||
|
msg.release()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun handleLastCacheContent(ctx: ChannelHandlerContext, msg: LastCacheContent) {
|
||||||
|
val request = inProgressRequest
|
||||||
|
when (request) {
|
||||||
|
is InProgressPutRequest -> {
|
||||||
|
inProgressRequest = null
|
||||||
|
log.trace(ctx) {
|
||||||
|
"Received last chunk of ${msg.content().readableBytes()} bytes for memcache"
|
||||||
|
}
|
||||||
|
request.write(msg.content())
|
||||||
|
val key = request.digest.retainedDuplicate()
|
||||||
|
val (payloadSize, payloadSource) = request.commit()
|
||||||
|
val extras = ctx.alloc().buffer(8, 8)
|
||||||
|
extras.writeInt(0)
|
||||||
|
extras.writeInt(encodeExpiry(maxAge))
|
||||||
|
val totalBodyLength = request.digest.readableBytes() + extras.readableBytes() + payloadSize
|
||||||
|
log.trace(ctx) {
|
||||||
|
"Trying to send SET request to memcache"
|
||||||
|
}
|
||||||
|
request.requestController.whenComplete { requestController, ex ->
|
||||||
|
if (ex == null) {
|
||||||
|
log.trace(ctx) {
|
||||||
|
"Sending SET request to memcache"
|
||||||
|
}
|
||||||
|
requestController.sendRequest(DefaultBinaryMemcacheRequest().apply {
|
||||||
|
setOpcode(BinaryMemcacheOpcodes.SET)
|
||||||
|
setKey(key)
|
||||||
|
setExtras(extras)
|
||||||
|
setTotalBodyLength(totalBodyLength)
|
||||||
|
})
|
||||||
|
log.trace(ctx) {
|
||||||
|
"Sending request payload to memcache"
|
||||||
|
}
|
||||||
|
payloadSource.use { source ->
|
||||||
|
val bb = ByteBuffer.allocate(chunkSize)
|
||||||
|
while (true) {
|
||||||
|
val read = source.read(bb)
|
||||||
|
bb.limit()
|
||||||
|
if (read >= 0 && bb.position() < chunkSize && bb.hasRemaining()) {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
val chunk = ctx.alloc().buffer(chunkSize)
|
||||||
|
bb.flip()
|
||||||
|
chunk.writeBytes(bb)
|
||||||
|
bb.clear()
|
||||||
|
log.trace(ctx) {
|
||||||
|
"Sending ${chunk.readableBytes()} bytes chunk to memcache"
|
||||||
|
}
|
||||||
|
if (read < 0) {
|
||||||
|
requestController.sendContent(DefaultLastMemcacheContent(chunk))
|
||||||
|
break
|
||||||
|
} else {
|
||||||
|
requestController.sendContent(DefaultMemcacheContent(chunk))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
payloadSource.close()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
|
||||||
|
val request = inProgressRequest
|
||||||
|
when (request) {
|
||||||
|
is InProgressPutRequest -> {
|
||||||
|
inProgressRequest = null
|
||||||
|
request.requestController.thenAccept { controller ->
|
||||||
|
controller.exceptionCaught(cause)
|
||||||
|
}
|
||||||
|
request.rollback()
|
||||||
|
}
|
||||||
|
|
||||||
|
is InProgressGetRequest -> {
|
||||||
|
inProgressRequest = null
|
||||||
|
request.rollback()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
super.exceptionCaught(ctx, cause)
|
||||||
|
}
|
||||||
|
}
|
@@ -2,8 +2,8 @@ package net.woggioni.rbcs.server.memcache
|
|||||||
|
|
||||||
import net.woggioni.rbcs.api.CacheProvider
|
import net.woggioni.rbcs.api.CacheProvider
|
||||||
import net.woggioni.rbcs.api.exception.ConfigurationException
|
import net.woggioni.rbcs.api.exception.ConfigurationException
|
||||||
import net.woggioni.rbcs.common.RBCS
|
|
||||||
import net.woggioni.rbcs.common.HostAndPort
|
import net.woggioni.rbcs.common.HostAndPort
|
||||||
|
import net.woggioni.rbcs.common.RBCS
|
||||||
import net.woggioni.rbcs.common.Xml
|
import net.woggioni.rbcs.common.Xml
|
||||||
import net.woggioni.rbcs.common.Xml.Companion.asIterable
|
import net.woggioni.rbcs.common.Xml.Companion.asIterable
|
||||||
import net.woggioni.rbcs.common.Xml.Companion.renderAttribute
|
import net.woggioni.rbcs.common.Xml.Companion.renderAttribute
|
||||||
@@ -28,18 +28,16 @@ class MemcacheCacheProvider : CacheProvider<MemcacheCacheConfiguration> {
|
|||||||
val maxAge = el.renderAttribute("max-age")
|
val maxAge = el.renderAttribute("max-age")
|
||||||
?.let(Duration::parse)
|
?.let(Duration::parse)
|
||||||
?: Duration.ofDays(1)
|
?: Duration.ofDays(1)
|
||||||
val maxSize = el.renderAttribute("max-size")
|
val compressionLevel = el.renderAttribute("compression-level")
|
||||||
?.let(String::toInt)
|
?.let(Integer::decode)
|
||||||
?: 0x100000
|
?: -1
|
||||||
val compressionMode = el.renderAttribute("compression-mode")
|
val compressionMode = el.renderAttribute("compression-mode")
|
||||||
?.let {
|
?.let {
|
||||||
when (it) {
|
when (it) {
|
||||||
"gzip" -> MemcacheCacheConfiguration.CompressionMode.GZIP
|
|
||||||
"deflate" -> MemcacheCacheConfiguration.CompressionMode.DEFLATE
|
"deflate" -> MemcacheCacheConfiguration.CompressionMode.DEFLATE
|
||||||
else -> MemcacheCacheConfiguration.CompressionMode.DEFLATE
|
else -> MemcacheCacheConfiguration.CompressionMode.DEFLATE
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
?: MemcacheCacheConfiguration.CompressionMode.DEFLATE
|
|
||||||
val digestAlgorithm = el.renderAttribute("digest")
|
val digestAlgorithm = el.renderAttribute("digest")
|
||||||
for (child in el.asIterable()) {
|
for (child in el.asIterable()) {
|
||||||
when (child.nodeName) {
|
when (child.nodeName) {
|
||||||
@@ -60,9 +58,9 @@ class MemcacheCacheProvider : CacheProvider<MemcacheCacheConfiguration> {
|
|||||||
return MemcacheCacheConfiguration(
|
return MemcacheCacheConfiguration(
|
||||||
servers,
|
servers,
|
||||||
maxAge,
|
maxAge,
|
||||||
maxSize,
|
|
||||||
digestAlgorithm,
|
digestAlgorithm,
|
||||||
compressionMode,
|
compressionMode,
|
||||||
|
compressionLevel
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -70,7 +68,6 @@ class MemcacheCacheProvider : CacheProvider<MemcacheCacheConfiguration> {
|
|||||||
val result = doc.createElement("cache")
|
val result = doc.createElement("cache")
|
||||||
Xml.of(doc, result) {
|
Xml.of(doc, result) {
|
||||||
attr("xmlns:${xmlNamespacePrefix}", xmlNamespace, namespaceURI = "http://www.w3.org/2000/xmlns/")
|
attr("xmlns:${xmlNamespacePrefix}", xmlNamespace, namespaceURI = "http://www.w3.org/2000/xmlns/")
|
||||||
|
|
||||||
attr("xs:type", "${xmlNamespacePrefix}:$xmlType", RBCS.XML_SCHEMA_NAMESPACE_URI)
|
attr("xs:type", "${xmlNamespacePrefix}:$xmlType", RBCS.XML_SCHEMA_NAMESPACE_URI)
|
||||||
for (server in servers) {
|
for (server in servers) {
|
||||||
node("server") {
|
node("server") {
|
||||||
@@ -83,18 +80,17 @@ class MemcacheCacheProvider : CacheProvider<MemcacheCacheConfiguration> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
attr("max-age", maxAge.toString())
|
attr("max-age", maxAge.toString())
|
||||||
attr("max-size", maxSize.toString())
|
|
||||||
digestAlgorithm?.let { digestAlgorithm ->
|
digestAlgorithm?.let { digestAlgorithm ->
|
||||||
attr("digest", digestAlgorithm)
|
attr("digest", digestAlgorithm)
|
||||||
}
|
}
|
||||||
compressionMode?.let { compressionMode ->
|
compressionMode?.let { compressionMode ->
|
||||||
attr(
|
attr(
|
||||||
"compression-mode", when (compressionMode) {
|
"compression-mode", when (compressionMode) {
|
||||||
MemcacheCacheConfiguration.CompressionMode.GZIP -> "gzip"
|
|
||||||
MemcacheCacheConfiguration.CompressionMode.DEFLATE -> "deflate"
|
MemcacheCacheConfiguration.CompressionMode.DEFLATE -> "deflate"
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
attr("compression-level", compressionLevel.toString())
|
||||||
}
|
}
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
@@ -3,68 +3,52 @@ package net.woggioni.rbcs.server.memcache.client
|
|||||||
|
|
||||||
import io.netty.bootstrap.Bootstrap
|
import io.netty.bootstrap.Bootstrap
|
||||||
import io.netty.buffer.ByteBuf
|
import io.netty.buffer.ByteBuf
|
||||||
import io.netty.buffer.Unpooled
|
|
||||||
import io.netty.channel.Channel
|
import io.netty.channel.Channel
|
||||||
|
import io.netty.channel.ChannelFactory
|
||||||
|
import io.netty.channel.ChannelFutureListener
|
||||||
import io.netty.channel.ChannelHandlerContext
|
import io.netty.channel.ChannelHandlerContext
|
||||||
import io.netty.channel.ChannelOption
|
import io.netty.channel.ChannelOption
|
||||||
import io.netty.channel.ChannelPipeline
|
import io.netty.channel.ChannelPipeline
|
||||||
|
import io.netty.channel.EventLoopGroup
|
||||||
import io.netty.channel.SimpleChannelInboundHandler
|
import io.netty.channel.SimpleChannelInboundHandler
|
||||||
import io.netty.channel.nio.NioEventLoopGroup
|
|
||||||
import io.netty.channel.pool.AbstractChannelPoolHandler
|
import io.netty.channel.pool.AbstractChannelPoolHandler
|
||||||
import io.netty.channel.pool.ChannelPool
|
|
||||||
import io.netty.channel.pool.FixedChannelPool
|
import io.netty.channel.pool.FixedChannelPool
|
||||||
import io.netty.channel.socket.nio.NioSocketChannel
|
import io.netty.channel.socket.SocketChannel
|
||||||
import io.netty.handler.codec.DecoderException
|
import io.netty.handler.codec.memcache.LastMemcacheContent
|
||||||
|
import io.netty.handler.codec.memcache.MemcacheContent
|
||||||
|
import io.netty.handler.codec.memcache.MemcacheObject
|
||||||
import io.netty.handler.codec.memcache.binary.BinaryMemcacheClientCodec
|
import io.netty.handler.codec.memcache.binary.BinaryMemcacheClientCodec
|
||||||
import io.netty.handler.codec.memcache.binary.BinaryMemcacheObjectAggregator
|
import io.netty.handler.codec.memcache.binary.BinaryMemcacheRequest
|
||||||
import io.netty.handler.codec.memcache.binary.BinaryMemcacheOpcodes
|
import io.netty.handler.codec.memcache.binary.BinaryMemcacheResponse
|
||||||
import io.netty.handler.codec.memcache.binary.BinaryMemcacheResponseStatus
|
|
||||||
import io.netty.handler.codec.memcache.binary.DefaultFullBinaryMemcacheRequest
|
|
||||||
import io.netty.handler.codec.memcache.binary.FullBinaryMemcacheRequest
|
|
||||||
import io.netty.handler.codec.memcache.binary.FullBinaryMemcacheResponse
|
|
||||||
import io.netty.util.concurrent.GenericFutureListener
|
import io.netty.util.concurrent.GenericFutureListener
|
||||||
import net.woggioni.rbcs.common.ByteBufInputStream
|
|
||||||
import net.woggioni.rbcs.common.ByteBufOutputStream
|
|
||||||
import net.woggioni.rbcs.common.RBCS.digest
|
|
||||||
import net.woggioni.rbcs.common.HostAndPort
|
import net.woggioni.rbcs.common.HostAndPort
|
||||||
import net.woggioni.rbcs.common.contextLogger
|
import net.woggioni.rbcs.common.createLogger
|
||||||
|
import net.woggioni.rbcs.common.trace
|
||||||
import net.woggioni.rbcs.server.memcache.MemcacheCacheConfiguration
|
import net.woggioni.rbcs.server.memcache.MemcacheCacheConfiguration
|
||||||
import net.woggioni.rbcs.server.memcache.MemcacheException
|
import net.woggioni.rbcs.server.memcache.MemcacheCacheHandler
|
||||||
import net.woggioni.jwo.JWO
|
import java.io.IOException
|
||||||
import java.net.InetSocketAddress
|
import java.net.InetSocketAddress
|
||||||
import java.nio.channels.Channels
|
|
||||||
import java.nio.channels.ReadableByteChannel
|
|
||||||
import java.security.MessageDigest
|
|
||||||
import java.time.Duration
|
|
||||||
import java.time.Instant
|
|
||||||
import java.util.concurrent.CompletableFuture
|
import java.util.concurrent.CompletableFuture
|
||||||
import java.util.concurrent.ConcurrentHashMap
|
import java.util.concurrent.ConcurrentHashMap
|
||||||
import java.util.zip.Deflater
|
|
||||||
import java.util.zip.DeflaterOutputStream
|
|
||||||
import java.util.zip.GZIPInputStream
|
|
||||||
import java.util.zip.GZIPOutputStream
|
|
||||||
import java.util.zip.InflaterInputStream
|
|
||||||
import io.netty.util.concurrent.Future as NettyFuture
|
import io.netty.util.concurrent.Future as NettyFuture
|
||||||
|
|
||||||
|
|
||||||
class MemcacheClient(private val cfg: MemcacheCacheConfiguration) : AutoCloseable {
|
class MemcacheClient(
|
||||||
|
private val servers: List<MemcacheCacheConfiguration.Server>,
|
||||||
|
private val chunkSize : Int,
|
||||||
|
private val group: EventLoopGroup,
|
||||||
|
private val channelFactory: ChannelFactory<SocketChannel>,
|
||||||
|
private val connectionPool: ConcurrentHashMap<HostAndPort, FixedChannelPool>
|
||||||
|
) : AutoCloseable {
|
||||||
|
|
||||||
private companion object {
|
private companion object {
|
||||||
@JvmStatic
|
private val log = createLogger<MemcacheCacheHandler>()
|
||||||
private val log = contextLogger()
|
|
||||||
}
|
|
||||||
|
|
||||||
private val group: NioEventLoopGroup
|
|
||||||
private val connectionPool: MutableMap<HostAndPort, ChannelPool> = ConcurrentHashMap()
|
|
||||||
|
|
||||||
init {
|
|
||||||
group = NioEventLoopGroup()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private fun newConnectionPool(server: MemcacheCacheConfiguration.Server): FixedChannelPool {
|
private fun newConnectionPool(server: MemcacheCacheConfiguration.Server): FixedChannelPool {
|
||||||
val bootstrap = Bootstrap().apply {
|
val bootstrap = Bootstrap().apply {
|
||||||
group(group)
|
group(group)
|
||||||
channel(NioSocketChannel::class.java)
|
channelFactory(channelFactory)
|
||||||
option(ChannelOption.SO_KEEPALIVE, true)
|
option(ChannelOption.SO_KEEPALIVE, true)
|
||||||
remoteAddress(InetSocketAddress(server.endpoint.host, server.endpoint.port))
|
remoteAddress(InetSocketAddress(server.endpoint.host, server.endpoint.port))
|
||||||
server.connectionTimeoutMillis?.let {
|
server.connectionTimeoutMillis?.let {
|
||||||
@@ -75,35 +59,33 @@ class MemcacheClient(private val cfg: MemcacheCacheConfiguration) : AutoCloseabl
|
|||||||
|
|
||||||
override fun channelCreated(ch: Channel) {
|
override fun channelCreated(ch: Channel) {
|
||||||
val pipeline: ChannelPipeline = ch.pipeline()
|
val pipeline: ChannelPipeline = ch.pipeline()
|
||||||
pipeline.addLast(BinaryMemcacheClientCodec())
|
pipeline.addLast(BinaryMemcacheClientCodec(chunkSize, true))
|
||||||
pipeline.addLast(BinaryMemcacheObjectAggregator(cfg.maxSize))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return FixedChannelPool(bootstrap, channelPoolHandler, server.maxConnections)
|
return FixedChannelPool(bootstrap, channelPoolHandler, server.maxConnections)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fun sendRequest(
|
||||||
private fun sendRequest(request: FullBinaryMemcacheRequest): CompletableFuture<FullBinaryMemcacheResponse> {
|
key: ByteBuf,
|
||||||
|
responseHandler: MemcacheResponseHandler
|
||||||
val server = cfg.servers.let { servers ->
|
): CompletableFuture<MemcacheRequestController> {
|
||||||
if (servers.size > 1) {
|
val server = if (servers.size > 1) {
|
||||||
val key = request.key().duplicate()
|
var checksum = 0
|
||||||
var checksum = 0
|
while (key.readableBytes() > 4) {
|
||||||
while (key.readableBytes() > 4) {
|
val byte = key.readInt()
|
||||||
val byte = key.readInt()
|
checksum = checksum xor byte
|
||||||
checksum = checksum xor byte
|
|
||||||
}
|
|
||||||
while (key.readableBytes() > 0) {
|
|
||||||
val byte = key.readByte()
|
|
||||||
checksum = checksum xor byte.toInt()
|
|
||||||
}
|
|
||||||
servers[checksum % servers.size]
|
|
||||||
} else {
|
|
||||||
servers.first()
|
|
||||||
}
|
}
|
||||||
|
while (key.readableBytes() > 0) {
|
||||||
|
val byte = key.readByte()
|
||||||
|
checksum = checksum xor byte.toInt()
|
||||||
|
}
|
||||||
|
servers[checksum % servers.size]
|
||||||
|
} else {
|
||||||
|
servers.first()
|
||||||
}
|
}
|
||||||
|
key.release()
|
||||||
|
|
||||||
val response = CompletableFuture<FullBinaryMemcacheResponse>()
|
val response = CompletableFuture<MemcacheRequestController>()
|
||||||
// Custom handler for processing responses
|
// Custom handler for processing responses
|
||||||
val pool = connectionPool.computeIfAbsent(server.endpoint) {
|
val pool = connectionPool.computeIfAbsent(server.endpoint) {
|
||||||
newConnectionPool(server)
|
newConnectionPool(server)
|
||||||
@@ -112,32 +94,92 @@ class MemcacheClient(private val cfg: MemcacheCacheConfiguration) : AutoCloseabl
|
|||||||
override fun operationComplete(channelFuture: NettyFuture<Channel>) {
|
override fun operationComplete(channelFuture: NettyFuture<Channel>) {
|
||||||
if (channelFuture.isSuccess) {
|
if (channelFuture.isSuccess) {
|
||||||
val channel = channelFuture.now
|
val channel = channelFuture.now
|
||||||
|
var connectionClosedByTheRemoteServer = true
|
||||||
|
val closeCallback = {
|
||||||
|
if (connectionClosedByTheRemoteServer) {
|
||||||
|
val ex = IOException("The memcache server closed the connection")
|
||||||
|
val completed = response.completeExceptionally(ex)
|
||||||
|
if(!completed) responseHandler.exceptionCaught(ex)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
val closeListener = ChannelFutureListener {
|
||||||
|
closeCallback()
|
||||||
|
}
|
||||||
|
channel.closeFuture().addListener(closeListener)
|
||||||
val pipeline = channel.pipeline()
|
val pipeline = channel.pipeline()
|
||||||
channel.pipeline()
|
val handler = object : SimpleChannelInboundHandler<MemcacheObject>() {
|
||||||
.addLast("client-handler", object : SimpleChannelInboundHandler<FullBinaryMemcacheResponse>() {
|
|
||||||
override fun channelRead0(
|
|
||||||
ctx: ChannelHandlerContext,
|
|
||||||
msg: FullBinaryMemcacheResponse
|
|
||||||
) {
|
|
||||||
pipeline.removeLast()
|
|
||||||
pool.release(channel)
|
|
||||||
msg.touch("The method's caller must remember to release this")
|
|
||||||
response.complete(msg.retain())
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
|
override fun handlerAdded(ctx: ChannelHandlerContext) {
|
||||||
val ex = when (cause) {
|
channel.closeFuture().removeListener(closeListener)
|
||||||
is DecoderException -> cause.cause!!
|
}
|
||||||
else -> cause
|
|
||||||
|
override fun channelRead0(
|
||||||
|
ctx: ChannelHandlerContext,
|
||||||
|
msg: MemcacheObject
|
||||||
|
) {
|
||||||
|
when (msg) {
|
||||||
|
is BinaryMemcacheResponse -> {
|
||||||
|
responseHandler.responseReceived(msg)
|
||||||
|
}
|
||||||
|
|
||||||
|
is LastMemcacheContent -> {
|
||||||
|
responseHandler.contentReceived(msg)
|
||||||
|
pipeline.remove(this)
|
||||||
|
}
|
||||||
|
|
||||||
|
is MemcacheContent -> {
|
||||||
|
responseHandler.contentReceived(msg)
|
||||||
}
|
}
|
||||||
ctx.close()
|
|
||||||
pipeline.removeLast()
|
|
||||||
pool.release(channel)
|
|
||||||
response.completeExceptionally(ex)
|
|
||||||
}
|
}
|
||||||
})
|
}
|
||||||
request.touch()
|
|
||||||
channel.writeAndFlush(request)
|
override fun channelInactive(ctx: ChannelHandlerContext) {
|
||||||
|
closeCallback()
|
||||||
|
ctx.fireChannelInactive()
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
|
||||||
|
connectionClosedByTheRemoteServer = false
|
||||||
|
ctx.close()
|
||||||
|
responseHandler.exceptionCaught(cause)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
channel.pipeline().addLast(handler)
|
||||||
|
response.complete(object : MemcacheRequestController {
|
||||||
|
private var channelReleased = false
|
||||||
|
|
||||||
|
override fun sendRequest(request: BinaryMemcacheRequest) {
|
||||||
|
channel.writeAndFlush(request)
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun sendContent(content: MemcacheContent) {
|
||||||
|
channel.writeAndFlush(content).addListener {
|
||||||
|
if(content is LastMemcacheContent) {
|
||||||
|
if(!channelReleased) {
|
||||||
|
pool.release(channel)
|
||||||
|
channelReleased = true
|
||||||
|
log.trace(channel) {
|
||||||
|
"Channel released"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun exceptionCaught(ex: Throwable) {
|
||||||
|
log.warn(ex.message, ex)
|
||||||
|
connectionClosedByTheRemoteServer = false
|
||||||
|
channel.close()
|
||||||
|
if(!channelReleased) {
|
||||||
|
pool.release(channel)
|
||||||
|
channelReleased = true
|
||||||
|
log.trace(channel) {
|
||||||
|
"Channel released"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
})
|
||||||
} else {
|
} else {
|
||||||
response.completeExceptionally(channelFuture.cause())
|
response.completeExceptionally(channelFuture.cause())
|
||||||
}
|
}
|
||||||
@@ -146,107 +188,6 @@ class MemcacheClient(private val cfg: MemcacheCacheConfiguration) : AutoCloseabl
|
|||||||
return response
|
return response
|
||||||
}
|
}
|
||||||
|
|
||||||
private fun encodeExpiry(expiry: Duration): Int {
|
|
||||||
val expirySeconds = expiry.toSeconds()
|
|
||||||
return expirySeconds.toInt().takeIf { it.toLong() == expirySeconds }
|
|
||||||
?: Instant.ofEpochSecond(expirySeconds).epochSecond.toInt()
|
|
||||||
}
|
|
||||||
|
|
||||||
fun get(key: String): CompletableFuture<ReadableByteChannel?> {
|
|
||||||
val request = (cfg.digestAlgorithm
|
|
||||||
?.let(MessageDigest::getInstance)
|
|
||||||
?.let { md ->
|
|
||||||
digest(key.toByteArray(), md)
|
|
||||||
} ?: key.toByteArray(Charsets.UTF_8)).let { digest ->
|
|
||||||
DefaultFullBinaryMemcacheRequest(Unpooled.wrappedBuffer(digest), null).apply {
|
|
||||||
setOpcode(BinaryMemcacheOpcodes.GET)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return sendRequest(request).thenApply { response ->
|
|
||||||
try {
|
|
||||||
when (val status = response.status()) {
|
|
||||||
BinaryMemcacheResponseStatus.SUCCESS -> {
|
|
||||||
val compressionMode = cfg.compressionMode
|
|
||||||
val content = response.content().retain()
|
|
||||||
content.touch()
|
|
||||||
if (compressionMode != null) {
|
|
||||||
when (compressionMode) {
|
|
||||||
MemcacheCacheConfiguration.CompressionMode.GZIP -> {
|
|
||||||
GZIPInputStream(ByteBufInputStream(content))
|
|
||||||
}
|
|
||||||
|
|
||||||
MemcacheCacheConfiguration.CompressionMode.DEFLATE -> {
|
|
||||||
InflaterInputStream(ByteBufInputStream(content))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
ByteBufInputStream(content)
|
|
||||||
}.let(Channels::newChannel)
|
|
||||||
}
|
|
||||||
|
|
||||||
BinaryMemcacheResponseStatus.KEY_ENOENT -> {
|
|
||||||
null
|
|
||||||
}
|
|
||||||
|
|
||||||
else -> throw MemcacheException(status)
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
response.release()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fun put(key: String, content: ByteBuf, expiry: Duration, cas: Long? = null): CompletableFuture<Void> {
|
|
||||||
val request = (cfg.digestAlgorithm
|
|
||||||
?.let(MessageDigest::getInstance)
|
|
||||||
?.let { md ->
|
|
||||||
digest(key.toByteArray(), md)
|
|
||||||
} ?: key.toByteArray(Charsets.UTF_8)).let { digest ->
|
|
||||||
val extras = Unpooled.buffer(8, 8)
|
|
||||||
extras.writeInt(0)
|
|
||||||
extras.writeInt(encodeExpiry(expiry))
|
|
||||||
val compressionMode = cfg.compressionMode
|
|
||||||
content.retain()
|
|
||||||
val payload = if (compressionMode != null) {
|
|
||||||
val inputStream = ByteBufInputStream(content)
|
|
||||||
val buf = content.alloc().buffer()
|
|
||||||
buf.retain()
|
|
||||||
val outputStream = when (compressionMode) {
|
|
||||||
MemcacheCacheConfiguration.CompressionMode.GZIP -> {
|
|
||||||
GZIPOutputStream(ByteBufOutputStream(buf))
|
|
||||||
}
|
|
||||||
|
|
||||||
MemcacheCacheConfiguration.CompressionMode.DEFLATE -> {
|
|
||||||
DeflaterOutputStream(ByteBufOutputStream(buf), Deflater(Deflater.DEFAULT_COMPRESSION, false))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
inputStream.use { i ->
|
|
||||||
outputStream.use { o ->
|
|
||||||
JWO.copy(i, o)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
buf
|
|
||||||
} else {
|
|
||||||
content
|
|
||||||
}
|
|
||||||
DefaultFullBinaryMemcacheRequest(Unpooled.wrappedBuffer(digest), extras, payload).apply {
|
|
||||||
setOpcode(BinaryMemcacheOpcodes.SET)
|
|
||||||
cas?.let(this::setCas)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return sendRequest(request).thenApply { response ->
|
|
||||||
try {
|
|
||||||
when (val status = response.status()) {
|
|
||||||
BinaryMemcacheResponseStatus.SUCCESS -> null
|
|
||||||
else -> throw MemcacheException(status)
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
response.release()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
fun shutDown(): NettyFuture<*> {
|
fun shutDown(): NettyFuture<*> {
|
||||||
return group.shutdownGracefully()
|
return group.shutdownGracefully()
|
||||||
}
|
}
|
||||||
|
@@ -0,0 +1,13 @@
|
|||||||
|
package net.woggioni.rbcs.server.memcache.client
|
||||||
|
|
||||||
|
import io.netty.handler.codec.memcache.MemcacheContent
|
||||||
|
import io.netty.handler.codec.memcache.binary.BinaryMemcacheRequest
|
||||||
|
|
||||||
|
interface MemcacheRequestController {
|
||||||
|
|
||||||
|
fun sendRequest(request : BinaryMemcacheRequest)
|
||||||
|
|
||||||
|
fun sendContent(content : MemcacheContent)
|
||||||
|
|
||||||
|
fun exceptionCaught(ex : Throwable)
|
||||||
|
}
|
@@ -0,0 +1,14 @@
|
|||||||
|
package net.woggioni.rbcs.server.memcache.client
|
||||||
|
|
||||||
|
import io.netty.handler.codec.memcache.MemcacheContent
|
||||||
|
import io.netty.handler.codec.memcache.binary.BinaryMemcacheResponse
|
||||||
|
|
||||||
|
interface MemcacheResponseHandler {
|
||||||
|
|
||||||
|
|
||||||
|
fun responseReceived(response : BinaryMemcacheResponse)
|
||||||
|
|
||||||
|
fun contentReceived(content : MemcacheContent)
|
||||||
|
|
||||||
|
fun exceptionCaught(ex : Throwable)
|
||||||
|
}
|
@@ -4,7 +4,7 @@
|
|||||||
xmlns:rbcs="urn:net.woggioni.rbcs.server"
|
xmlns:rbcs="urn:net.woggioni.rbcs.server"
|
||||||
xmlns:xs="http://www.w3.org/2001/XMLSchema">
|
xmlns:xs="http://www.w3.org/2001/XMLSchema">
|
||||||
|
|
||||||
<xs:import schemaLocation="jpms://net.woggioni.rbcs.server/net/woggioni/rbcs/server/schema/rbcs.xsd" namespace="urn:net.woggioni.rbcs.server"/>
|
<xs:import schemaLocation="jpms://net.woggioni.rbcs.server/net/woggioni/rbcs/server/schema/rbcs-server.xsd" namespace="urn:net.woggioni.rbcs.server"/>
|
||||||
|
|
||||||
<xs:complexType name="memcacheServerType">
|
<xs:complexType name="memcacheServerType">
|
||||||
<xs:attribute name="host" type="xs:token" use="required"/>
|
<xs:attribute name="host" type="xs:token" use="required"/>
|
||||||
@@ -20,9 +20,10 @@
|
|||||||
<xs:element name="server" type="rbcs-memcache:memcacheServerType"/>
|
<xs:element name="server" type="rbcs-memcache:memcacheServerType"/>
|
||||||
</xs:sequence>
|
</xs:sequence>
|
||||||
<xs:attribute name="max-age" type="xs:duration" default="P1D"/>
|
<xs:attribute name="max-age" type="xs:duration" default="P1D"/>
|
||||||
<xs:attribute name="max-size" type="xs:unsignedInt" default="1048576"/>
|
<xs:attribute name="chunk-size" type="rbcs:byteSizeType" default="0x10000"/>
|
||||||
<xs:attribute name="digest" type="xs:token" />
|
<xs:attribute name="digest" type="xs:token"/>
|
||||||
<xs:attribute name="compression-mode" type="rbcs-memcache:compressionType"/>
|
<xs:attribute name="compression-mode" type="rbcs-memcache:compressionType"/>
|
||||||
|
<xs:attribute name="compression-level" type="rbcs:compressionLevelType" default="-1"/>
|
||||||
</xs:extension>
|
</xs:extension>
|
||||||
</xs:complexContent>
|
</xs:complexContent>
|
||||||
</xs:complexType>
|
</xs:complexType>
|
||||||
@@ -30,7 +31,6 @@
|
|||||||
<xs:simpleType name="compressionType">
|
<xs:simpleType name="compressionType">
|
||||||
<xs:restriction base="xs:token">
|
<xs:restriction base="xs:token">
|
||||||
<xs:enumeration value="deflate"/>
|
<xs:enumeration value="deflate"/>
|
||||||
<xs:enumeration value="gzip"/>
|
|
||||||
</xs:restriction>
|
</xs:restriction>
|
||||||
</xs:simpleType>
|
</xs:simpleType>
|
||||||
|
|
||||||
|
@@ -0,0 +1,27 @@
|
|||||||
|
package net.woggioni.rbcs.server.memcache.client
|
||||||
|
|
||||||
|
import io.netty.buffer.ByteBufUtil
|
||||||
|
import io.netty.buffer.Unpooled
|
||||||
|
import org.junit.jupiter.api.Assertions
|
||||||
|
import org.junit.jupiter.api.Test
|
||||||
|
import java.io.ByteArrayInputStream
|
||||||
|
import java.nio.ByteBuffer
|
||||||
|
import java.nio.channels.Channels
|
||||||
|
import kotlin.random.Random
|
||||||
|
|
||||||
|
class ByteBufferTest {
|
||||||
|
|
||||||
|
@Test
|
||||||
|
fun test() {
|
||||||
|
val byteBuffer = ByteBuffer.allocate(0x100)
|
||||||
|
val originalBytes = Random(101325).nextBytes(0x100)
|
||||||
|
Channels.newChannel(ByteArrayInputStream(originalBytes)).use { source ->
|
||||||
|
source.read(byteBuffer)
|
||||||
|
}
|
||||||
|
byteBuffer.flip()
|
||||||
|
val buf = Unpooled.buffer()
|
||||||
|
buf.writeBytes(byteBuffer)
|
||||||
|
val finalBytes = ByteBufUtil.getBytes(buf)
|
||||||
|
Assertions.assertArrayEquals(originalBytes, finalBytes)
|
||||||
|
}
|
||||||
|
}
|
@@ -9,6 +9,9 @@ dependencies {
|
|||||||
implementation catalog.jwo
|
implementation catalog.jwo
|
||||||
implementation catalog.slf4j.api
|
implementation catalog.slf4j.api
|
||||||
implementation catalog.netty.codec.http
|
implementation catalog.netty.codec.http
|
||||||
|
implementation catalog.netty.handler
|
||||||
|
implementation catalog.netty.buffer
|
||||||
|
implementation catalog.netty.transport
|
||||||
|
|
||||||
api project(':rbcs-common')
|
api project(':rbcs-common')
|
||||||
api project(':rbcs-api')
|
api project(':rbcs-api')
|
||||||
@@ -36,3 +39,4 @@ publishing {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@@ -3,27 +3,26 @@ import net.woggioni.rbcs.server.cache.FileSystemCacheProvider;
|
|||||||
import net.woggioni.rbcs.server.cache.InMemoryCacheProvider;
|
import net.woggioni.rbcs.server.cache.InMemoryCacheProvider;
|
||||||
|
|
||||||
module net.woggioni.rbcs.server {
|
module net.woggioni.rbcs.server {
|
||||||
requires java.sql;
|
|
||||||
requires java.xml;
|
requires java.xml;
|
||||||
requires java.logging;
|
|
||||||
requires java.naming;
|
requires java.naming;
|
||||||
requires kotlin.stdlib;
|
requires kotlin.stdlib;
|
||||||
requires io.netty.buffer;
|
|
||||||
requires io.netty.transport;
|
|
||||||
requires io.netty.codec.http;
|
requires io.netty.codec.http;
|
||||||
requires io.netty.common;
|
|
||||||
requires io.netty.handler;
|
requires io.netty.handler;
|
||||||
requires io.netty.codec;
|
|
||||||
requires org.slf4j;
|
|
||||||
requires net.woggioni.jwo;
|
requires net.woggioni.jwo;
|
||||||
requires net.woggioni.rbcs.common;
|
requires net.woggioni.rbcs.common;
|
||||||
requires net.woggioni.rbcs.api;
|
requires net.woggioni.rbcs.api;
|
||||||
|
requires io.netty.codec.compression;
|
||||||
|
requires io.netty.transport;
|
||||||
|
requires io.netty.buffer;
|
||||||
|
requires io.netty.common;
|
||||||
|
requires org.slf4j;
|
||||||
|
|
||||||
exports net.woggioni.rbcs.server;
|
exports net.woggioni.rbcs.server;
|
||||||
|
|
||||||
opens net.woggioni.rbcs.server;
|
opens net.woggioni.rbcs.server;
|
||||||
opens net.woggioni.rbcs.server.schema;
|
opens net.woggioni.rbcs.server.schema;
|
||||||
|
|
||||||
|
|
||||||
uses CacheProvider;
|
uses CacheProvider;
|
||||||
provides CacheProvider with FileSystemCacheProvider, InMemoryCacheProvider;
|
provides CacheProvider with FileSystemCacheProvider, InMemoryCacheProvider;
|
||||||
}
|
}
|
@@ -1,30 +0,0 @@
|
|||||||
package net.woggioni.rbcs.server
|
|
||||||
|
|
||||||
import io.netty.channel.ChannelHandlerContext
|
|
||||||
import org.slf4j.Logger
|
|
||||||
import java.net.InetSocketAddress
|
|
||||||
|
|
||||||
inline fun Logger.trace(ctx : ChannelHandlerContext, messageBuilder : () -> String) {
|
|
||||||
log(this, ctx, { isTraceEnabled }, { trace(it) } , messageBuilder)
|
|
||||||
}
|
|
||||||
inline fun Logger.debug(ctx : ChannelHandlerContext, messageBuilder : () -> String) {
|
|
||||||
log(this, ctx, { isDebugEnabled }, { debug(it) } , messageBuilder)
|
|
||||||
}
|
|
||||||
inline fun Logger.info(ctx : ChannelHandlerContext, messageBuilder : () -> String) {
|
|
||||||
log(this, ctx, { isInfoEnabled }, { info(it) } , messageBuilder)
|
|
||||||
}
|
|
||||||
inline fun Logger.warn(ctx : ChannelHandlerContext, messageBuilder : () -> String) {
|
|
||||||
log(this, ctx, { isWarnEnabled }, { warn(it) } , messageBuilder)
|
|
||||||
}
|
|
||||||
inline fun Logger.error(ctx : ChannelHandlerContext, messageBuilder : () -> String) {
|
|
||||||
log(this, ctx, { isErrorEnabled }, { error(it) } , messageBuilder)
|
|
||||||
}
|
|
||||||
|
|
||||||
inline fun log(log : Logger, ctx : ChannelHandlerContext,
|
|
||||||
filter : Logger.() -> Boolean,
|
|
||||||
loggerMethod : Logger.(String) -> Unit, messageBuilder : () -> String) {
|
|
||||||
if(log.filter()) {
|
|
||||||
val clientAddress = (ctx.channel().remoteAddress() as InetSocketAddress).address.hostAddress
|
|
||||||
log.loggerMethod(clientAddress + " - " + messageBuilder())
|
|
||||||
}
|
|
||||||
}
|
|
@@ -3,6 +3,7 @@ package net.woggioni.rbcs.server
|
|||||||
import io.netty.bootstrap.ServerBootstrap
|
import io.netty.bootstrap.ServerBootstrap
|
||||||
import io.netty.buffer.ByteBuf
|
import io.netty.buffer.ByteBuf
|
||||||
import io.netty.channel.Channel
|
import io.netty.channel.Channel
|
||||||
|
import io.netty.channel.ChannelFactory
|
||||||
import io.netty.channel.ChannelFuture
|
import io.netty.channel.ChannelFuture
|
||||||
import io.netty.channel.ChannelHandler.Sharable
|
import io.netty.channel.ChannelHandler.Sharable
|
||||||
import io.netty.channel.ChannelHandlerContext
|
import io.netty.channel.ChannelHandlerContext
|
||||||
@@ -10,13 +11,19 @@ import io.netty.channel.ChannelInboundHandlerAdapter
|
|||||||
import io.netty.channel.ChannelInitializer
|
import io.netty.channel.ChannelInitializer
|
||||||
import io.netty.channel.ChannelOption
|
import io.netty.channel.ChannelOption
|
||||||
import io.netty.channel.ChannelPromise
|
import io.netty.channel.ChannelPromise
|
||||||
import io.netty.channel.nio.NioEventLoopGroup
|
import io.netty.channel.MultiThreadIoEventLoopGroup
|
||||||
|
import io.netty.channel.nio.NioIoHandler
|
||||||
|
import io.netty.channel.socket.DatagramChannel
|
||||||
|
import io.netty.channel.socket.ServerSocketChannel
|
||||||
|
import io.netty.channel.socket.SocketChannel
|
||||||
|
import io.netty.channel.socket.nio.NioDatagramChannel
|
||||||
import io.netty.channel.socket.nio.NioServerSocketChannel
|
import io.netty.channel.socket.nio.NioServerSocketChannel
|
||||||
|
import io.netty.channel.socket.nio.NioSocketChannel
|
||||||
import io.netty.handler.codec.compression.CompressionOptions
|
import io.netty.handler.codec.compression.CompressionOptions
|
||||||
import io.netty.handler.codec.http.DefaultHttpContent
|
import io.netty.handler.codec.http.DefaultHttpContent
|
||||||
import io.netty.handler.codec.http.HttpContentCompressor
|
import io.netty.handler.codec.http.HttpContentCompressor
|
||||||
|
import io.netty.handler.codec.http.HttpDecoderConfig
|
||||||
import io.netty.handler.codec.http.HttpHeaderNames
|
import io.netty.handler.codec.http.HttpHeaderNames
|
||||||
import io.netty.handler.codec.http.HttpObjectAggregator
|
|
||||||
import io.netty.handler.codec.http.HttpRequest
|
import io.netty.handler.codec.http.HttpRequest
|
||||||
import io.netty.handler.codec.http.HttpServerCodec
|
import io.netty.handler.codec.http.HttpServerCodec
|
||||||
import io.netty.handler.ssl.ClientAuth
|
import io.netty.handler.ssl.ClientAuth
|
||||||
@@ -28,52 +35,58 @@ import io.netty.handler.timeout.IdleState
|
|||||||
import io.netty.handler.timeout.IdleStateEvent
|
import io.netty.handler.timeout.IdleStateEvent
|
||||||
import io.netty.handler.timeout.IdleStateHandler
|
import io.netty.handler.timeout.IdleStateHandler
|
||||||
import io.netty.util.AttributeKey
|
import io.netty.util.AttributeKey
|
||||||
import io.netty.util.concurrent.DefaultEventExecutorGroup
|
|
||||||
import io.netty.util.concurrent.EventExecutorGroup
|
import io.netty.util.concurrent.EventExecutorGroup
|
||||||
import net.woggioni.rbcs.api.Configuration
|
|
||||||
import net.woggioni.rbcs.api.exception.ConfigurationException
|
|
||||||
import net.woggioni.rbcs.common.RBCS.toUrl
|
|
||||||
import net.woggioni.rbcs.common.PasswordSecurity.decodePasswordHash
|
|
||||||
import net.woggioni.rbcs.common.PasswordSecurity.hashPassword
|
|
||||||
import net.woggioni.rbcs.common.Xml
|
|
||||||
import net.woggioni.rbcs.common.contextLogger
|
|
||||||
import net.woggioni.rbcs.common.debug
|
|
||||||
import net.woggioni.rbcs.common.info
|
|
||||||
import net.woggioni.rbcs.server.auth.AbstractNettyHttpAuthenticator
|
|
||||||
import net.woggioni.rbcs.server.auth.Authorizer
|
|
||||||
import net.woggioni.rbcs.server.auth.ClientCertificateValidator
|
|
||||||
import net.woggioni.rbcs.server.auth.RoleAuthorizer
|
|
||||||
import net.woggioni.rbcs.server.configuration.Parser
|
|
||||||
import net.woggioni.rbcs.server.configuration.Serializer
|
|
||||||
import net.woggioni.rbcs.server.exception.ExceptionHandler
|
|
||||||
import net.woggioni.rbcs.server.handler.ServerHandler
|
|
||||||
import net.woggioni.rbcs.server.throttling.ThrottlingHandler
|
|
||||||
import net.woggioni.jwo.JWO
|
|
||||||
import net.woggioni.jwo.Tuple2
|
|
||||||
import java.io.OutputStream
|
import java.io.OutputStream
|
||||||
import java.net.InetSocketAddress
|
import java.net.InetSocketAddress
|
||||||
import java.nio.file.Files
|
import java.nio.file.Files
|
||||||
import java.nio.file.Path
|
import java.nio.file.Path
|
||||||
import java.security.KeyStore
|
|
||||||
import java.security.PrivateKey
|
import java.security.PrivateKey
|
||||||
import java.security.cert.X509Certificate
|
import java.security.cert.X509Certificate
|
||||||
|
import java.time.Duration
|
||||||
|
import java.time.Instant
|
||||||
import java.util.Arrays
|
import java.util.Arrays
|
||||||
import java.util.Base64
|
import java.util.Base64
|
||||||
|
import java.util.concurrent.CompletableFuture
|
||||||
|
import java.util.concurrent.Future
|
||||||
import java.util.concurrent.TimeUnit
|
import java.util.concurrent.TimeUnit
|
||||||
|
import java.util.concurrent.TimeoutException
|
||||||
import java.util.regex.Matcher
|
import java.util.regex.Matcher
|
||||||
import java.util.regex.Pattern
|
import java.util.regex.Pattern
|
||||||
import javax.naming.ldap.LdapName
|
import javax.naming.ldap.LdapName
|
||||||
import javax.net.ssl.SSLPeerUnverifiedException
|
import javax.net.ssl.SSLPeerUnverifiedException
|
||||||
|
import net.woggioni.rbcs.api.AsyncCloseable
|
||||||
|
import net.woggioni.rbcs.api.Configuration
|
||||||
|
import net.woggioni.rbcs.api.exception.ConfigurationException
|
||||||
|
import net.woggioni.rbcs.common.PasswordSecurity.decodePasswordHash
|
||||||
|
import net.woggioni.rbcs.common.PasswordSecurity.hashPassword
|
||||||
|
import net.woggioni.rbcs.common.RBCS.getTrustManager
|
||||||
|
import net.woggioni.rbcs.common.RBCS.loadKeystore
|
||||||
|
import net.woggioni.rbcs.common.RBCS.toUrl
|
||||||
|
import net.woggioni.rbcs.common.Xml
|
||||||
|
import net.woggioni.rbcs.common.createLogger
|
||||||
|
import net.woggioni.rbcs.common.debug
|
||||||
|
import net.woggioni.rbcs.common.info
|
||||||
|
import net.woggioni.rbcs.server.auth.AbstractNettyHttpAuthenticator
|
||||||
|
import net.woggioni.rbcs.server.auth.Authorizer
|
||||||
|
import net.woggioni.rbcs.server.auth.RoleAuthorizer
|
||||||
|
import net.woggioni.rbcs.server.configuration.Parser
|
||||||
|
import net.woggioni.rbcs.server.configuration.Serializer
|
||||||
|
import net.woggioni.rbcs.server.exception.ExceptionHandler
|
||||||
|
import net.woggioni.rbcs.server.handler.BlackHoleRequestHandler
|
||||||
|
import net.woggioni.rbcs.server.handler.MaxRequestSizeHandler
|
||||||
|
import net.woggioni.rbcs.server.handler.ServerHandler
|
||||||
|
import net.woggioni.rbcs.server.throttling.BucketManager
|
||||||
|
import net.woggioni.rbcs.server.throttling.ThrottlingHandler
|
||||||
|
|
||||||
class RemoteBuildCacheServer(private val cfg: Configuration) {
|
class RemoteBuildCacheServer(private val cfg: Configuration) {
|
||||||
private val log = contextLogger()
|
|
||||||
|
|
||||||
companion object {
|
companion object {
|
||||||
|
private val log = createLogger<RemoteBuildCacheServer>()
|
||||||
|
|
||||||
val userAttribute: AttributeKey<Configuration.User> = AttributeKey.valueOf("user")
|
val userAttribute: AttributeKey<Configuration.User> = AttributeKey.valueOf("user")
|
||||||
val groupAttribute: AttributeKey<Set<Configuration.Group>> = AttributeKey.valueOf("group")
|
val groupAttribute: AttributeKey<Set<Configuration.Group>> = AttributeKey.valueOf("group")
|
||||||
|
|
||||||
val DEFAULT_CONFIGURATION_URL by lazy { "classpath:net/woggioni/rbcs/server/rbcs-default.xml".toUrl() }
|
val DEFAULT_CONFIGURATION_URL by lazy { "jpms://net.woggioni.rbcs.server/net/woggioni/rbcs/server/rbcs-default.xml".toUrl() }
|
||||||
private const val SSL_HANDLER_NAME = "sslHandler"
|
private const val SSL_HANDLER_NAME = "sslHandler"
|
||||||
|
|
||||||
fun loadConfiguration(configurationFile: Path): Configuration {
|
fun loadConfiguration(configurationFile: Path): Configuration {
|
||||||
@@ -128,11 +141,12 @@ class RemoteBuildCacheServer(private val cfg: Configuration) {
|
|||||||
val clientCertificate = peerCertificates.first() as X509Certificate
|
val clientCertificate = peerCertificates.first() as X509Certificate
|
||||||
val user = userExtractor?.extract(clientCertificate)
|
val user = userExtractor?.extract(clientCertificate)
|
||||||
val group = groupExtractor?.extract(clientCertificate)
|
val group = groupExtractor?.extract(clientCertificate)
|
||||||
val allGroups = ((user?.groups ?: emptySet()).asSequence() + sequenceOf(group).filterNotNull()).toSet()
|
val allGroups =
|
||||||
|
((user?.groups ?: emptySet()).asSequence() + sequenceOf(group).filterNotNull()).toSet()
|
||||||
AuthenticationResult(user, allGroups)
|
AuthenticationResult(user, allGroups)
|
||||||
} ?: anonymousUserGroups?.let{ AuthenticationResult(null, it) }
|
} ?: anonymousUserGroups?.let { AuthenticationResult(null, it) }
|
||||||
} catch (es: SSLPeerUnverifiedException) {
|
} catch (es: SSLPeerUnverifiedException) {
|
||||||
anonymousUserGroups?.let{ AuthenticationResult(null, it) }
|
anonymousUserGroups?.let { AuthenticationResult(null, it) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -141,7 +155,9 @@ class RemoteBuildCacheServer(private val cfg: Configuration) {
|
|||||||
private class NettyHttpBasicAuthenticator(
|
private class NettyHttpBasicAuthenticator(
|
||||||
private val users: Map<String, Configuration.User>, authorizer: Authorizer
|
private val users: Map<String, Configuration.User>, authorizer: Authorizer
|
||||||
) : AbstractNettyHttpAuthenticator(authorizer) {
|
) : AbstractNettyHttpAuthenticator(authorizer) {
|
||||||
private val log = contextLogger()
|
companion object {
|
||||||
|
private val log = createLogger<NettyHttpBasicAuthenticator>()
|
||||||
|
}
|
||||||
|
|
||||||
override fun authenticate(ctx: ChannelHandlerContext, req: HttpRequest): AuthenticationResult? {
|
override fun authenticate(ctx: ChannelHandlerContext, req: HttpRequest): AuthenticationResult? {
|
||||||
val authorizationHeader = req.headers()[HttpHeaderNames.AUTHORIZATION] ?: let {
|
val authorizationHeader = req.headers()[HttpHeaderNames.AUTHORIZATION] ?: let {
|
||||||
@@ -190,8 +206,9 @@ class RemoteBuildCacheServer(private val cfg: Configuration) {
|
|||||||
|
|
||||||
private class ServerInitializer(
|
private class ServerInitializer(
|
||||||
private val cfg: Configuration,
|
private val cfg: Configuration,
|
||||||
private val eventExecutorGroup: EventExecutorGroup
|
private val channelFactory : ChannelFactory<SocketChannel>,
|
||||||
) : ChannelInitializer<Channel>() {
|
private val datagramChannelFactory : ChannelFactory<DatagramChannel>,
|
||||||
|
) : ChannelInitializer<Channel>(), AsyncCloseable {
|
||||||
|
|
||||||
companion object {
|
companion object {
|
||||||
private fun createSslCtx(tls: Configuration.Tls): SslContext {
|
private fun createSslCtx(tls: Configuration.Tls): SslContext {
|
||||||
@@ -211,9 +228,9 @@ class RemoteBuildCacheServer(private val cfg: Configuration) {
|
|||||||
val clientAuth = tls.trustStore?.let { trustStore ->
|
val clientAuth = tls.trustStore?.let { trustStore ->
|
||||||
val ts = loadKeystore(trustStore.file, trustStore.password)
|
val ts = loadKeystore(trustStore.file, trustStore.password)
|
||||||
trustManager(
|
trustManager(
|
||||||
ClientCertificateValidator.getTrustManager(ts, trustStore.isCheckCertificateStatus)
|
getTrustManager(ts, trustStore.isCheckCertificateStatus)
|
||||||
)
|
)
|
||||||
if(trustStore.isRequireClientCertificate) ClientAuth.REQUIRE
|
if (trustStore.isRequireClientCertificate) ClientAuth.REQUIRE
|
||||||
else ClientAuth.OPTIONAL
|
else ClientAuth.OPTIONAL
|
||||||
} ?: ClientAuth.NONE
|
} ?: ClientAuth.NONE
|
||||||
clientAuth(clientAuth)
|
clientAuth(clientAuth)
|
||||||
@@ -221,38 +238,12 @@ class RemoteBuildCacheServer(private val cfg: Configuration) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fun loadKeystore(file: Path, password: String?): KeyStore {
|
private val log = createLogger<ServerInitializer>()
|
||||||
val ext = JWO.splitExtension(file)
|
|
||||||
.map(Tuple2<String, String>::get_2)
|
|
||||||
.orElseThrow {
|
|
||||||
IllegalArgumentException(
|
|
||||||
"Keystore file '${file}' must have .jks, .p12, .pfx extension"
|
|
||||||
)
|
|
||||||
}
|
|
||||||
val keystore = when (ext.substring(1).lowercase()) {
|
|
||||||
"jks" -> KeyStore.getInstance("JKS")
|
|
||||||
"p12", "pfx" -> KeyStore.getInstance("PKCS12")
|
|
||||||
else -> throw IllegalArgumentException(
|
|
||||||
"Keystore file '${file}' must have .jks, .p12, .pfx extension"
|
|
||||||
)
|
|
||||||
}
|
|
||||||
Files.newInputStream(file).use {
|
|
||||||
keystore.load(it, password?.let(String::toCharArray))
|
|
||||||
}
|
|
||||||
return keystore
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private val log = contextLogger()
|
private val cacheHandlerFactory = cfg.cache.materialize()
|
||||||
|
|
||||||
private val serverHandler = let {
|
private val bucketManager = BucketManager.from(cfg)
|
||||||
val cacheImplementation = cfg.cache.materialize()
|
|
||||||
val prefix = Path.of("/").resolve(Path.of(cfg.serverPath ?: "/"))
|
|
||||||
ServerHandler(cacheImplementation, prefix)
|
|
||||||
}
|
|
||||||
|
|
||||||
private val exceptionHandler = ExceptionHandler()
|
|
||||||
private val throttlingHandler = ThrottlingHandler(cfg)
|
|
||||||
|
|
||||||
private val authenticator = when (val auth = cfg.authentication) {
|
private val authenticator = when (val auth = cfg.authentication) {
|
||||||
is Configuration.BasicAuthentication -> NettyHttpBasicAuthenticator(cfg.users, RoleAuthorizer())
|
is Configuration.BasicAuthentication -> NettyHttpBasicAuthenticator(cfg.users, RoleAuthorizer())
|
||||||
@@ -307,25 +298,13 @@ class RemoteBuildCacheServer(private val cfg: Configuration) {
|
|||||||
"Closed connection ${ch.id().asShortText()} with ${ch.remoteAddress()}"
|
"Closed connection ${ch.id().asShortText()} with ${ch.remoteAddress()}"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
ch.config().setAutoRead(false)
|
||||||
val pipeline = ch.pipeline()
|
val pipeline = ch.pipeline()
|
||||||
cfg.connection.also { conn ->
|
cfg.connection.also { conn ->
|
||||||
val readTimeout = conn.readTimeout.toMillis()
|
|
||||||
val writeTimeout = conn.writeTimeout.toMillis()
|
|
||||||
if(readTimeout > 0 || writeTimeout > 0) {
|
|
||||||
pipeline.addLast(
|
|
||||||
IdleStateHandler(
|
|
||||||
false,
|
|
||||||
readTimeout,
|
|
||||||
writeTimeout,
|
|
||||||
0,
|
|
||||||
TimeUnit.MILLISECONDS
|
|
||||||
)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
val readIdleTimeout = conn.readIdleTimeout.toMillis()
|
val readIdleTimeout = conn.readIdleTimeout.toMillis()
|
||||||
val writeIdleTimeout = conn.writeIdleTimeout.toMillis()
|
val writeIdleTimeout = conn.writeIdleTimeout.toMillis()
|
||||||
val idleTimeout = conn.idleTimeout.toMillis()
|
val idleTimeout = conn.idleTimeout.toMillis()
|
||||||
if(readIdleTimeout > 0 || writeIdleTimeout > 0 || idleTimeout > 0) {
|
if (readIdleTimeout > 0 || writeIdleTimeout > 0 || idleTimeout > 0) {
|
||||||
pipeline.addLast(
|
pipeline.addLast(
|
||||||
IdleStateHandler(
|
IdleStateHandler(
|
||||||
true,
|
true,
|
||||||
@@ -340,16 +319,19 @@ class RemoteBuildCacheServer(private val cfg: Configuration) {
|
|||||||
pipeline.addLast(object : ChannelInboundHandlerAdapter() {
|
pipeline.addLast(object : ChannelInboundHandlerAdapter() {
|
||||||
override fun userEventTriggered(ctx: ChannelHandlerContext, evt: Any) {
|
override fun userEventTriggered(ctx: ChannelHandlerContext, evt: Any) {
|
||||||
if (evt is IdleStateEvent) {
|
if (evt is IdleStateEvent) {
|
||||||
when(evt.state()) {
|
when (evt.state()) {
|
||||||
IdleState.READER_IDLE -> log.debug {
|
IdleState.READER_IDLE -> log.debug {
|
||||||
"Read timeout reached on channel ${ch.id().asShortText()}, closing the connection"
|
"Read timeout reached on channel ${ch.id().asShortText()}, closing the connection"
|
||||||
}
|
}
|
||||||
|
|
||||||
IdleState.WRITER_IDLE -> log.debug {
|
IdleState.WRITER_IDLE -> log.debug {
|
||||||
"Write timeout reached on channel ${ch.id().asShortText()}, closing the connection"
|
"Write timeout reached on channel ${ch.id().asShortText()}, closing the connection"
|
||||||
}
|
}
|
||||||
|
|
||||||
IdleState.ALL_IDLE -> log.debug {
|
IdleState.ALL_IDLE -> log.debug {
|
||||||
"Idle timeout reached on channel ${ch.id().asShortText()}, closing the connection"
|
"Idle timeout reached on channel ${ch.id().asShortText()}, closing the connection"
|
||||||
}
|
}
|
||||||
|
|
||||||
null -> throw IllegalStateException("This should never happen")
|
null -> throw IllegalStateException("This should never happen")
|
||||||
}
|
}
|
||||||
ctx.close()
|
ctx.close()
|
||||||
@@ -359,63 +341,117 @@ class RemoteBuildCacheServer(private val cfg: Configuration) {
|
|||||||
sslContext?.newHandler(ch.alloc())?.also {
|
sslContext?.newHandler(ch.alloc())?.also {
|
||||||
pipeline.addLast(SSL_HANDLER_NAME, it)
|
pipeline.addLast(SSL_HANDLER_NAME, it)
|
||||||
}
|
}
|
||||||
pipeline.addLast(HttpServerCodec())
|
val httpDecoderConfig = HttpDecoderConfig().apply {
|
||||||
|
maxChunkSize = cfg.connection.chunkSize
|
||||||
|
}
|
||||||
|
pipeline.addLast(HttpServerCodec(httpDecoderConfig))
|
||||||
|
pipeline.addLast(MaxRequestSizeHandler.NAME, MaxRequestSizeHandler(cfg.connection.maxRequestSize))
|
||||||
pipeline.addLast(HttpChunkContentCompressor(1024))
|
pipeline.addLast(HttpChunkContentCompressor(1024))
|
||||||
pipeline.addLast(ChunkedWriteHandler())
|
pipeline.addLast(ChunkedWriteHandler())
|
||||||
pipeline.addLast(HttpObjectAggregator(cfg.connection.maxRequestSize))
|
|
||||||
authenticator?.let {
|
authenticator?.let {
|
||||||
pipeline.addLast(it)
|
pipeline.addLast(it)
|
||||||
}
|
}
|
||||||
pipeline.addLast(throttlingHandler)
|
pipeline.addLast(ThrottlingHandler(bucketManager, cfg.connection))
|
||||||
pipeline.addLast(eventExecutorGroup, serverHandler)
|
|
||||||
pipeline.addLast(exceptionHandler)
|
val serverHandler = let {
|
||||||
|
val prefix = Path.of("/").resolve(Path.of(cfg.serverPath ?: "/"))
|
||||||
|
ServerHandler(prefix) {
|
||||||
|
cacheHandlerFactory.newHandler(cfg, ch.eventLoop(), channelFactory, datagramChannelFactory)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pipeline.addLast(ServerHandler.NAME, serverHandler)
|
||||||
|
pipeline.addLast(ExceptionHandler.NAME, ExceptionHandler)
|
||||||
|
pipeline.addLast(BlackHoleRequestHandler.NAME, BlackHoleRequestHandler())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
override fun asyncClose() = cacheHandlerFactory.asyncClose()
|
||||||
}
|
}
|
||||||
|
|
||||||
class ServerHandle(
|
class ServerHandle(
|
||||||
httpChannelFuture: ChannelFuture,
|
closeFuture: ChannelFuture,
|
||||||
private val executorGroups: Iterable<EventExecutorGroup>
|
private val bossGroup: EventExecutorGroup,
|
||||||
) : AutoCloseable {
|
private val executorGroups: Iterable<EventExecutorGroup>,
|
||||||
private val httpChannel: Channel = httpChannelFuture.channel()
|
private val serverInitializer: AsyncCloseable,
|
||||||
private val closeFuture: ChannelFuture = httpChannel.closeFuture()
|
) : Future<Void> by from(closeFuture, bossGroup, executorGroups, serverInitializer) {
|
||||||
private val log = contextLogger()
|
|
||||||
|
|
||||||
fun shutdown(): ChannelFuture {
|
companion object {
|
||||||
return httpChannel.close()
|
private val log = createLogger<ServerHandle>()
|
||||||
}
|
|
||||||
|
|
||||||
override fun close() {
|
private fun from(
|
||||||
try {
|
closeFuture: ChannelFuture,
|
||||||
closeFuture.sync()
|
bossGroup: EventExecutorGroup,
|
||||||
} finally {
|
executorGroups: Iterable<EventExecutorGroup>,
|
||||||
executorGroups.forEach {
|
serverInitializer: AsyncCloseable
|
||||||
it.shutdownGracefully().sync()
|
): CompletableFuture<Void> {
|
||||||
|
val result = CompletableFuture<Void>()
|
||||||
|
closeFuture.addListener {
|
||||||
|
val errors = mutableListOf<Throwable>()
|
||||||
|
val deadline = Instant.now().plusSeconds(20)
|
||||||
|
|
||||||
|
serverInitializer.asyncClose().whenCompleteAsync { _, ex ->
|
||||||
|
if(ex != null) {
|
||||||
|
log.error(ex.message, ex)
|
||||||
|
errors.addLast(ex)
|
||||||
|
}
|
||||||
|
|
||||||
|
executorGroups.forEach(EventExecutorGroup::shutdownGracefully)
|
||||||
|
bossGroup.terminationFuture().sync()
|
||||||
|
|
||||||
|
for (executorGroup in executorGroups) {
|
||||||
|
val future = executorGroup.terminationFuture()
|
||||||
|
try {
|
||||||
|
val now = Instant.now()
|
||||||
|
if (now > deadline) {
|
||||||
|
future.get(0, TimeUnit.SECONDS)
|
||||||
|
} else {
|
||||||
|
future.get(Duration.between(now, deadline).toMillis(), TimeUnit.MILLISECONDS)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (te: TimeoutException) {
|
||||||
|
errors.addLast(te)
|
||||||
|
log.warn("Timeout while waiting for shutdown of $executorGroup", te)
|
||||||
|
} catch (ex: Throwable) {
|
||||||
|
log.warn(ex.message, ex)
|
||||||
|
errors.addLast(ex)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if(errors.isEmpty()) {
|
||||||
|
result.complete(null)
|
||||||
|
} else {
|
||||||
|
result.completeExceptionally(errors.first())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result.thenAccept {
|
||||||
|
log.info {
|
||||||
|
"RemoteBuildCacheServer has been gracefully shut down"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
log.info {
|
}
|
||||||
"RemoteBuildCacheServer has been gracefully shut down"
|
|
||||||
}
|
|
||||||
|
fun sendShutdownSignal() {
|
||||||
|
bossGroup.shutdownGracefully()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fun run(): ServerHandle {
|
fun run(): ServerHandle {
|
||||||
// Create the multithreaded event loops for the server
|
// Create the multithreaded event loops for the server
|
||||||
val bossGroup = NioEventLoopGroup(1)
|
val bossGroup = MultiThreadIoEventLoopGroup(1, NioIoHandler.newFactory())
|
||||||
val serverSocketChannel = NioServerSocketChannel::class.java
|
val channelFactory = ChannelFactory<SocketChannel> { NioSocketChannel() }
|
||||||
val workerGroup = NioEventLoopGroup(0)
|
val datagramChannelFactory = ChannelFactory<DatagramChannel> { NioDatagramChannel() }
|
||||||
val eventExecutorGroup = run {
|
val serverChannelFactory = ChannelFactory<ServerSocketChannel> { NioServerSocketChannel() }
|
||||||
val threadFactory = if (cfg.eventExecutor.isUseVirtualThreads) {
|
val workerGroup = MultiThreadIoEventLoopGroup(0, NioIoHandler.newFactory())
|
||||||
Thread.ofVirtual().factory()
|
|
||||||
} else {
|
val serverInitializer = ServerInitializer(cfg, channelFactory, datagramChannelFactory)
|
||||||
null
|
|
||||||
}
|
|
||||||
DefaultEventExecutorGroup(Runtime.getRuntime().availableProcessors(), threadFactory)
|
|
||||||
}
|
|
||||||
val bootstrap = ServerBootstrap().apply {
|
val bootstrap = ServerBootstrap().apply {
|
||||||
// Configure the server
|
// Configure the server
|
||||||
group(bossGroup, workerGroup)
|
group(bossGroup, workerGroup)
|
||||||
channel(serverSocketChannel)
|
channelFactory(serverChannelFactory)
|
||||||
childHandler(ServerInitializer(cfg, eventExecutorGroup))
|
childHandler(serverInitializer)
|
||||||
option(ChannelOption.SO_BACKLOG, cfg.incomingConnectionsBacklogSize)
|
option(ChannelOption.SO_BACKLOG, cfg.incomingConnectionsBacklogSize)
|
||||||
childOption(ChannelOption.SO_KEEPALIVE, true)
|
childOption(ChannelOption.SO_KEEPALIVE, true)
|
||||||
}
|
}
|
||||||
@@ -423,10 +459,16 @@ class RemoteBuildCacheServer(private val cfg: Configuration) {
|
|||||||
|
|
||||||
// Bind and start to accept incoming connections.
|
// Bind and start to accept incoming connections.
|
||||||
val bindAddress = InetSocketAddress(cfg.host, cfg.port)
|
val bindAddress = InetSocketAddress(cfg.host, cfg.port)
|
||||||
val httpChannel = bootstrap.bind(bindAddress).sync()
|
val httpChannel = bootstrap.bind(bindAddress).sync().channel()
|
||||||
log.info {
|
log.info {
|
||||||
"RemoteBuildCacheServer is listening on ${cfg.host}:${cfg.port}"
|
"RemoteBuildCacheServer is listening on ${cfg.host}:${cfg.port}"
|
||||||
}
|
}
|
||||||
return ServerHandle(httpChannel, setOf(bossGroup, workerGroup, eventExecutorGroup))
|
|
||||||
|
return ServerHandle(
|
||||||
|
httpChannel.closeFuture(),
|
||||||
|
bossGroup,
|
||||||
|
setOf(workerGroup),
|
||||||
|
serverInitializer
|
||||||
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -6,6 +6,7 @@ import io.netty.channel.ChannelHandlerContext
|
|||||||
import io.netty.channel.ChannelInboundHandlerAdapter
|
import io.netty.channel.ChannelInboundHandlerAdapter
|
||||||
import io.netty.handler.codec.http.DefaultFullHttpResponse
|
import io.netty.handler.codec.http.DefaultFullHttpResponse
|
||||||
import io.netty.handler.codec.http.FullHttpResponse
|
import io.netty.handler.codec.http.FullHttpResponse
|
||||||
|
import io.netty.handler.codec.http.HttpContent
|
||||||
import io.netty.handler.codec.http.HttpHeaderNames
|
import io.netty.handler.codec.http.HttpHeaderNames
|
||||||
import io.netty.handler.codec.http.HttpRequest
|
import io.netty.handler.codec.http.HttpRequest
|
||||||
import io.netty.handler.codec.http.HttpResponseStatus
|
import io.netty.handler.codec.http.HttpResponseStatus
|
||||||
@@ -57,6 +58,8 @@ abstract class AbstractNettyHttpAuthenticator(private val authorizer: Authorizer
|
|||||||
} else {
|
} else {
|
||||||
authorizationFailure(ctx, msg)
|
authorizationFailure(ctx, msg)
|
||||||
}
|
}
|
||||||
|
} else if(msg is HttpContent) {
|
||||||
|
ctx.fireChannelRead(msg)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -1,90 +0,0 @@
|
|||||||
package net.woggioni.rbcs.server.auth
|
|
||||||
|
|
||||||
import io.netty.channel.ChannelHandlerContext
|
|
||||||
import io.netty.channel.ChannelInboundHandlerAdapter
|
|
||||||
import io.netty.handler.ssl.SslHandler
|
|
||||||
import io.netty.handler.ssl.SslHandshakeCompletionEvent
|
|
||||||
import java.security.KeyStore
|
|
||||||
import java.security.cert.CertPathValidator
|
|
||||||
import java.security.cert.CertPathValidatorException
|
|
||||||
import java.security.cert.CertificateException
|
|
||||||
import java.security.cert.CertificateFactory
|
|
||||||
import java.security.cert.PKIXParameters
|
|
||||||
import java.security.cert.PKIXRevocationChecker
|
|
||||||
import java.security.cert.X509Certificate
|
|
||||||
import java.util.EnumSet
|
|
||||||
import javax.net.ssl.SSLSession
|
|
||||||
import javax.net.ssl.TrustManagerFactory
|
|
||||||
import javax.net.ssl.X509TrustManager
|
|
||||||
|
|
||||||
|
|
||||||
class ClientCertificateValidator private constructor(
|
|
||||||
private val sslHandler: SslHandler,
|
|
||||||
private val x509TrustManager: X509TrustManager
|
|
||||||
) : ChannelInboundHandlerAdapter() {
|
|
||||||
override fun userEventTriggered(ctx: ChannelHandlerContext, evt: Any) {
|
|
||||||
if (evt is SslHandshakeCompletionEvent) {
|
|
||||||
if (evt.isSuccess) {
|
|
||||||
val session: SSLSession = sslHandler.engine().session
|
|
||||||
val clientCertificateChain = session.peerCertificates as Array<X509Certificate>
|
|
||||||
val authType: String = clientCertificateChain[0].publicKey.algorithm
|
|
||||||
x509TrustManager.checkClientTrusted(clientCertificateChain, authType)
|
|
||||||
} else {
|
|
||||||
// Handle the failure, for example by closing the channel.
|
|
||||||
}
|
|
||||||
}
|
|
||||||
super.userEventTriggered(ctx, evt)
|
|
||||||
}
|
|
||||||
|
|
||||||
companion object {
|
|
||||||
fun getTrustManager(trustStore: KeyStore?, certificateRevocationEnabled: Boolean): X509TrustManager {
|
|
||||||
return if (trustStore != null) {
|
|
||||||
val certificateFactory = CertificateFactory.getInstance("X.509")
|
|
||||||
val validator = CertPathValidator.getInstance("PKIX").apply {
|
|
||||||
val rc = revocationChecker as PKIXRevocationChecker
|
|
||||||
rc.options = EnumSet.of(
|
|
||||||
PKIXRevocationChecker.Option.NO_FALLBACK
|
|
||||||
)
|
|
||||||
}
|
|
||||||
val params = PKIXParameters(trustStore).apply {
|
|
||||||
isRevocationEnabled = certificateRevocationEnabled
|
|
||||||
}
|
|
||||||
object : X509TrustManager {
|
|
||||||
override fun checkClientTrusted(chain: Array<out X509Certificate>, authType: String) {
|
|
||||||
val clientCertificateChain = certificateFactory.generateCertPath(chain.toList())
|
|
||||||
try {
|
|
||||||
validator.validate(clientCertificateChain, params)
|
|
||||||
} catch (ex: CertPathValidatorException) {
|
|
||||||
throw CertificateException(ex)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun checkServerTrusted(chain: Array<out X509Certificate>, authType: String) {
|
|
||||||
throw NotImplementedError()
|
|
||||||
}
|
|
||||||
|
|
||||||
private val acceptedIssuers = trustStore.aliases().asSequence()
|
|
||||||
.filter(trustStore::isCertificateEntry)
|
|
||||||
.map(trustStore::getCertificate)
|
|
||||||
.map { it as X509Certificate }
|
|
||||||
.toList()
|
|
||||||
.toTypedArray()
|
|
||||||
|
|
||||||
override fun getAcceptedIssuers() = acceptedIssuers
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
val trustManagerFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm())
|
|
||||||
trustManagerFactory.trustManagers.asSequence().filter { it is X509TrustManager }
|
|
||||||
.single() as X509TrustManager
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fun of(
|
|
||||||
sslHandler: SslHandler,
|
|
||||||
trustStore: KeyStore?,
|
|
||||||
certificateRevocationEnabled: Boolean
|
|
||||||
): ClientCertificateValidator {
|
|
||||||
return ClientCertificateValidator(sslHandler, getTrustManager(trustStore, certificateRevocationEnabled))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@@ -8,8 +8,9 @@ class RoleAuthorizer : Authorizer {
|
|||||||
|
|
||||||
companion object {
|
companion object {
|
||||||
private val METHOD_MAP = mapOf(
|
private val METHOD_MAP = mapOf(
|
||||||
Role.Reader to setOf(HttpMethod.GET, HttpMethod.HEAD, HttpMethod.TRACE),
|
Role.Reader to setOf(HttpMethod.GET, HttpMethod.HEAD),
|
||||||
Role.Writer to setOf(HttpMethod.PUT, HttpMethod.POST)
|
Role.Writer to setOf(HttpMethod.PUT, HttpMethod.POST),
|
||||||
|
Role.Healthcheck to setOf(HttpMethod.TRACE)
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -1,12 +1,15 @@
|
|||||||
package net.woggioni.rbcs.server.cache
|
package net.woggioni.rbcs.server.cache
|
||||||
|
|
||||||
import io.netty.buffer.ByteBuf
|
|
||||||
import net.woggioni.rbcs.api.Cache
|
|
||||||
import net.woggioni.rbcs.common.ByteBufInputStream
|
|
||||||
import net.woggioni.rbcs.common.RBCS.digestString
|
|
||||||
import net.woggioni.rbcs.common.contextLogger
|
|
||||||
import net.woggioni.jwo.JWO
|
import net.woggioni.jwo.JWO
|
||||||
import net.woggioni.jwo.LockFile
|
import net.woggioni.rbcs.api.AsyncCloseable
|
||||||
|
import net.woggioni.rbcs.api.CacheValueMetadata
|
||||||
|
import net.woggioni.rbcs.common.createLogger
|
||||||
|
import java.io.ByteArrayOutputStream
|
||||||
|
import java.io.InputStream
|
||||||
|
import java.io.ObjectInputStream
|
||||||
|
import java.io.ObjectOutputStream
|
||||||
|
import java.io.Serializable
|
||||||
|
import java.nio.ByteBuffer
|
||||||
import java.nio.channels.Channels
|
import java.nio.channels.Channels
|
||||||
import java.nio.channels.FileChannel
|
import java.nio.channels.FileChannel
|
||||||
import java.nio.file.Files
|
import java.nio.file.Files
|
||||||
@@ -14,117 +17,153 @@ import java.nio.file.Path
|
|||||||
import java.nio.file.StandardCopyOption
|
import java.nio.file.StandardCopyOption
|
||||||
import java.nio.file.StandardOpenOption
|
import java.nio.file.StandardOpenOption
|
||||||
import java.nio.file.attribute.BasicFileAttributes
|
import java.nio.file.attribute.BasicFileAttributes
|
||||||
import java.security.MessageDigest
|
|
||||||
import java.time.Duration
|
import java.time.Duration
|
||||||
import java.time.Instant
|
import java.time.Instant
|
||||||
import java.util.concurrent.CompletableFuture
|
import java.util.concurrent.CompletableFuture
|
||||||
import java.util.concurrent.atomic.AtomicReference
|
|
||||||
import java.util.zip.Deflater
|
|
||||||
import java.util.zip.DeflaterOutputStream
|
|
||||||
import java.util.zip.Inflater
|
|
||||||
import java.util.zip.InflaterInputStream
|
|
||||||
|
|
||||||
class FileSystemCache(
|
class FileSystemCache(
|
||||||
val root: Path,
|
val root: Path,
|
||||||
val maxAge: Duration,
|
val maxAge: Duration
|
||||||
val digestAlgorithm: String?,
|
) : AsyncCloseable {
|
||||||
val compressionEnabled: Boolean,
|
|
||||||
val compressionLevel: Int
|
class EntryValue(val metadata: CacheValueMetadata, val channel : FileChannel, val offset : Long, val size : Long) : Serializable
|
||||||
) : Cache {
|
|
||||||
|
|
||||||
private companion object {
|
private companion object {
|
||||||
@JvmStatic
|
private val log = createLogger<FileSystemCache>()
|
||||||
private val log = contextLogger()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
init {
|
init {
|
||||||
Files.createDirectories(root)
|
Files.createDirectories(root)
|
||||||
}
|
}
|
||||||
|
|
||||||
private var nextGc = AtomicReference(Instant.now().plus(maxAge))
|
@Volatile
|
||||||
|
private var running = true
|
||||||
|
|
||||||
override fun get(key: String) = (digestAlgorithm
|
private var nextGc = Instant.now()
|
||||||
?.let(MessageDigest::getInstance)
|
|
||||||
?.let { md ->
|
fun get(key: String): EntryValue? =
|
||||||
digestString(key.toByteArray(), md)
|
root.resolve(key).takeIf(Files::exists)
|
||||||
} ?: key).let { digest ->
|
|
||||||
root.resolve(digest).takeIf(Files::exists)
|
|
||||||
?.let { file ->
|
?.let { file ->
|
||||||
file.takeIf(Files::exists)?.let { file ->
|
val size = Files.size(file)
|
||||||
if (compressionEnabled) {
|
val channel = FileChannel.open(file, StandardOpenOption.READ)
|
||||||
val inflater = Inflater()
|
val source = Channels.newInputStream(channel)
|
||||||
Channels.newChannel(
|
val tmp = ByteArray(Integer.BYTES)
|
||||||
InflaterInputStream(
|
val buffer = ByteBuffer.wrap(tmp)
|
||||||
Channels.newInputStream(
|
source.read(tmp)
|
||||||
FileChannel.open(
|
buffer.rewind()
|
||||||
file,
|
val offset = (Integer.BYTES + buffer.getInt()).toLong()
|
||||||
StandardOpenOption.READ
|
var count = 0
|
||||||
)
|
val wrapper = object : InputStream() {
|
||||||
), inflater
|
override fun read(): Int {
|
||||||
)
|
return source.read().also {
|
||||||
)
|
if (it > 0) count += it
|
||||||
} else {
|
}
|
||||||
FileChannel.open(file, StandardOpenOption.READ)
|
}
|
||||||
|
|
||||||
|
override fun read(b: ByteArray, off: Int, len: Int): Int {
|
||||||
|
return source.read(b, off, len).also {
|
||||||
|
if (it > 0) count += it
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun close() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}.also {
|
val metadata = ObjectInputStream(wrapper).use { ois ->
|
||||||
gc()
|
ois.readObject() as CacheValueMetadata
|
||||||
}.let {
|
}
|
||||||
CompletableFuture.completedFuture(it)
|
EntryValue(metadata, channel, offset, size)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
class FileSink(metadata: CacheValueMetadata, private val path: Path, private val tmpFile: Path) {
|
||||||
|
val channel: FileChannel
|
||||||
|
|
||||||
|
init {
|
||||||
|
val baos = ByteArrayOutputStream()
|
||||||
|
ObjectOutputStream(baos).use {
|
||||||
|
it.writeObject(metadata)
|
||||||
|
}
|
||||||
|
Files.newOutputStream(tmpFile).use {
|
||||||
|
val bytes = baos.toByteArray()
|
||||||
|
val buffer = ByteBuffer.allocate(Integer.BYTES)
|
||||||
|
buffer.putInt(bytes.size)
|
||||||
|
buffer.rewind()
|
||||||
|
it.write(buffer.array())
|
||||||
|
it.write(bytes)
|
||||||
|
}
|
||||||
|
channel = FileChannel.open(tmpFile, StandardOpenOption.APPEND)
|
||||||
|
}
|
||||||
|
|
||||||
|
fun commit() {
|
||||||
|
channel.close()
|
||||||
|
Files.move(tmpFile, path, StandardCopyOption.ATOMIC_MOVE)
|
||||||
|
}
|
||||||
|
|
||||||
|
fun rollback() {
|
||||||
|
channel.close()
|
||||||
|
Files.delete(path)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun put(key: String, content: ByteBuf): CompletableFuture<Void> {
|
fun put(
|
||||||
(digestAlgorithm
|
key: String,
|
||||||
?.let(MessageDigest::getInstance)
|
metadata: CacheValueMetadata,
|
||||||
?.let { md ->
|
): FileSink {
|
||||||
digestString(key.toByteArray(), md)
|
val file = root.resolve(key)
|
||||||
} ?: key).let { digest ->
|
val tmpFile = Files.createTempFile(root, null, ".tmp")
|
||||||
val file = root.resolve(digest)
|
return FileSink(metadata, file, tmpFile)
|
||||||
val tmpFile = Files.createTempFile(root, null, ".tmp")
|
}
|
||||||
try {
|
|
||||||
Files.newOutputStream(tmpFile).let {
|
private val closeFuture = object : CompletableFuture<Void>() {
|
||||||
if (compressionEnabled) {
|
init {
|
||||||
val deflater = Deflater(compressionLevel)
|
Thread.ofVirtual().name("file-system-cache-gc").start {
|
||||||
DeflaterOutputStream(it, deflater)
|
try {
|
||||||
} else {
|
while (running) {
|
||||||
it
|
gc()
|
||||||
}
|
}
|
||||||
}.use {
|
complete(null)
|
||||||
JWO.copy(ByteBufInputStream(content), it)
|
} catch (ex : Throwable) {
|
||||||
|
completeExceptionally(ex)
|
||||||
}
|
}
|
||||||
Files.move(tmpFile, file, StandardCopyOption.ATOMIC_MOVE)
|
|
||||||
} catch (t: Throwable) {
|
|
||||||
Files.delete(tmpFile)
|
|
||||||
throw t
|
|
||||||
}
|
}
|
||||||
}.also {
|
|
||||||
gc()
|
|
||||||
}
|
}
|
||||||
return CompletableFuture.completedFuture(null)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private fun gc() {
|
private fun gc() {
|
||||||
val now = Instant.now()
|
val now = Instant.now()
|
||||||
val oldValue = nextGc.getAndSet(now.plus(maxAge))
|
if (nextGc < now) {
|
||||||
if (oldValue < now) {
|
val oldestEntry = actualGc(now)
|
||||||
actualGc(now)
|
nextGc = (oldestEntry ?: now).plus(maxAge)
|
||||||
}
|
}
|
||||||
|
Thread.sleep(minOf(Duration.between(now, nextGc), Duration.ofSeconds(1)))
|
||||||
}
|
}
|
||||||
|
|
||||||
@Synchronized
|
/**
|
||||||
private fun actualGc(now: Instant) {
|
* Returns the creation timestamp of the oldest cache entry (if any)
|
||||||
Files.list(root).filter {
|
*/
|
||||||
val creationTimeStamp = Files.readAttributes(it, BasicFileAttributes::class.java)
|
private fun actualGc(now: Instant): Instant? {
|
||||||
.creationTime()
|
var result: Instant? = null
|
||||||
.toInstant()
|
Files.list(root)
|
||||||
now > creationTimeStamp.plus(maxAge)
|
.filter { path ->
|
||||||
}.forEach { file ->
|
JWO.splitExtension(path)
|
||||||
LockFile.acquire(file, false).use {
|
.map { it._2 }
|
||||||
Files.delete(file)
|
.map { it != ".tmp" }
|
||||||
|
.orElse(true)
|
||||||
}
|
}
|
||||||
}
|
.filter {
|
||||||
|
val creationTimeStamp = Files.readAttributes(it, BasicFileAttributes::class.java)
|
||||||
|
.creationTime()
|
||||||
|
.toInstant()
|
||||||
|
if (result == null || creationTimeStamp < result) {
|
||||||
|
result = creationTimeStamp
|
||||||
|
}
|
||||||
|
now > creationTimeStamp.plus(maxAge)
|
||||||
|
}.forEach(Files::delete)
|
||||||
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun close() {}
|
override fun asyncClose() : CompletableFuture<Void> {
|
||||||
|
running = false
|
||||||
|
return closeFuture
|
||||||
|
}
|
||||||
}
|
}
|
@@ -1,8 +1,13 @@
|
|||||||
package net.woggioni.rbcs.server.cache
|
package net.woggioni.rbcs.server.cache
|
||||||
|
|
||||||
|
import io.netty.channel.ChannelFactory
|
||||||
|
import io.netty.channel.EventLoopGroup
|
||||||
|
import io.netty.channel.socket.DatagramChannel
|
||||||
|
import io.netty.channel.socket.SocketChannel
|
||||||
|
import net.woggioni.jwo.Application
|
||||||
|
import net.woggioni.rbcs.api.CacheHandlerFactory
|
||||||
import net.woggioni.rbcs.api.Configuration
|
import net.woggioni.rbcs.api.Configuration
|
||||||
import net.woggioni.rbcs.common.RBCS
|
import net.woggioni.rbcs.common.RBCS
|
||||||
import net.woggioni.jwo.Application
|
|
||||||
import java.nio.file.Path
|
import java.nio.file.Path
|
||||||
import java.time.Duration
|
import java.time.Duration
|
||||||
|
|
||||||
@@ -13,13 +18,19 @@ data class FileSystemCacheConfiguration(
|
|||||||
val compressionEnabled: Boolean,
|
val compressionEnabled: Boolean,
|
||||||
val compressionLevel: Int,
|
val compressionLevel: Int,
|
||||||
) : Configuration.Cache {
|
) : Configuration.Cache {
|
||||||
override fun materialize() = FileSystemCache(
|
|
||||||
root ?: Application.builder("rbcs").build().computeCacheDirectory(),
|
override fun materialize() = object : CacheHandlerFactory {
|
||||||
maxAge,
|
private val cache = FileSystemCache(root ?: Application.builder("rbcs").build().computeCacheDirectory(), maxAge)
|
||||||
digestAlgorithm,
|
|
||||||
compressionEnabled,
|
override fun asyncClose() = cache.asyncClose()
|
||||||
compressionLevel
|
|
||||||
)
|
override fun newHandler(
|
||||||
|
cfg : Configuration,
|
||||||
|
eventLoop: EventLoopGroup,
|
||||||
|
socketChannelFactory: ChannelFactory<SocketChannel>,
|
||||||
|
datagramChannelFactory: ChannelFactory<DatagramChannel>
|
||||||
|
) = FileSystemCacheHandler(cache, digestAlgorithm, compressionEnabled, compressionLevel, cfg.connection.chunkSize)
|
||||||
|
}
|
||||||
|
|
||||||
override fun getNamespaceURI() = RBCS.RBCS_NAMESPACE_URI
|
override fun getNamespaceURI() = RBCS.RBCS_NAMESPACE_URI
|
||||||
|
|
||||||
|
137
rbcs-server/src/main/kotlin/net/woggioni/rbcs/server/cache/FileSystemCacheHandler.kt
vendored
Normal file
137
rbcs-server/src/main/kotlin/net/woggioni/rbcs/server/cache/FileSystemCacheHandler.kt
vendored
Normal file
@@ -0,0 +1,137 @@
|
|||||||
|
package net.woggioni.rbcs.server.cache
|
||||||
|
|
||||||
|
import io.netty.buffer.ByteBuf
|
||||||
|
import io.netty.channel.ChannelHandlerContext
|
||||||
|
import io.netty.handler.codec.http.LastHttpContent
|
||||||
|
import io.netty.handler.stream.ChunkedNioFile
|
||||||
|
import net.woggioni.rbcs.api.CacheHandler
|
||||||
|
import net.woggioni.rbcs.api.message.CacheMessage
|
||||||
|
import net.woggioni.rbcs.api.message.CacheMessage.CacheContent
|
||||||
|
import net.woggioni.rbcs.api.message.CacheMessage.CacheGetRequest
|
||||||
|
import net.woggioni.rbcs.api.message.CacheMessage.CachePutRequest
|
||||||
|
import net.woggioni.rbcs.api.message.CacheMessage.CachePutResponse
|
||||||
|
import net.woggioni.rbcs.api.message.CacheMessage.CacheValueFoundResponse
|
||||||
|
import net.woggioni.rbcs.api.message.CacheMessage.CacheValueNotFoundResponse
|
||||||
|
import net.woggioni.rbcs.api.message.CacheMessage.LastCacheContent
|
||||||
|
import net.woggioni.rbcs.common.RBCS.processCacheKey
|
||||||
|
import java.nio.channels.Channels
|
||||||
|
import java.util.Base64
|
||||||
|
import java.util.zip.Deflater
|
||||||
|
import java.util.zip.DeflaterOutputStream
|
||||||
|
import java.util.zip.InflaterInputStream
|
||||||
|
|
||||||
|
class FileSystemCacheHandler(
|
||||||
|
private val cache: FileSystemCache,
|
||||||
|
private val digestAlgorithm: String?,
|
||||||
|
private val compressionEnabled: Boolean,
|
||||||
|
private val compressionLevel: Int,
|
||||||
|
private val chunkSize: Int
|
||||||
|
) : CacheHandler() {
|
||||||
|
|
||||||
|
private interface InProgressRequest{
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private class InProgressGetRequest(val request : CacheGetRequest) : InProgressRequest
|
||||||
|
|
||||||
|
private inner class InProgressPutRequest(
|
||||||
|
val key : String,
|
||||||
|
private val fileSink : FileSystemCache.FileSink
|
||||||
|
) : InProgressRequest {
|
||||||
|
|
||||||
|
private val stream = Channels.newOutputStream(fileSink.channel).let {
|
||||||
|
if (compressionEnabled) {
|
||||||
|
DeflaterOutputStream(it, Deflater(compressionLevel))
|
||||||
|
} else {
|
||||||
|
it
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fun write(buf: ByteBuf) {
|
||||||
|
buf.readBytes(stream, buf.readableBytes())
|
||||||
|
}
|
||||||
|
|
||||||
|
fun commit() {
|
||||||
|
stream.close()
|
||||||
|
fileSink.commit()
|
||||||
|
}
|
||||||
|
|
||||||
|
fun rollback() {
|
||||||
|
fileSink.rollback()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private var inProgressRequest: InProgressRequest? = null
|
||||||
|
|
||||||
|
override fun channelRead0(ctx: ChannelHandlerContext, msg: CacheMessage) {
|
||||||
|
when (msg) {
|
||||||
|
is CacheGetRequest -> handleGetRequest(ctx, msg)
|
||||||
|
is CachePutRequest -> handlePutRequest(ctx, msg)
|
||||||
|
is LastCacheContent -> handleLastCacheContent(ctx, msg)
|
||||||
|
is CacheContent -> handleCacheContent(ctx, msg)
|
||||||
|
else -> ctx.fireChannelRead(msg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun handleGetRequest(ctx: ChannelHandlerContext, msg: CacheGetRequest) {
|
||||||
|
inProgressRequest = InProgressGetRequest(msg)
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun handlePutRequest(ctx: ChannelHandlerContext, msg: CachePutRequest) {
|
||||||
|
val key = String(Base64.getUrlEncoder().encode(processCacheKey(msg.key, digestAlgorithm)))
|
||||||
|
val sink = cache.put(key, msg.metadata)
|
||||||
|
inProgressRequest = InProgressPutRequest(msg.key, sink)
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun handleCacheContent(ctx: ChannelHandlerContext, msg: CacheContent) {
|
||||||
|
val request = inProgressRequest
|
||||||
|
if(request is InProgressPutRequest) {
|
||||||
|
request.write(msg.content())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun handleLastCacheContent(ctx: ChannelHandlerContext, msg: LastCacheContent) {
|
||||||
|
when(val request = inProgressRequest) {
|
||||||
|
is InProgressPutRequest -> {
|
||||||
|
inProgressRequest = null
|
||||||
|
request.write(msg.content())
|
||||||
|
request.commit()
|
||||||
|
sendMessageAndFlush(ctx, CachePutResponse(request.key))
|
||||||
|
}
|
||||||
|
is InProgressGetRequest -> {
|
||||||
|
val key = String(Base64.getUrlEncoder().encode(processCacheKey(request.request.key, digestAlgorithm)))
|
||||||
|
cache.get(key)?.also { entryValue ->
|
||||||
|
sendMessageAndFlush(ctx, CacheValueFoundResponse(request.request.key, entryValue.metadata))
|
||||||
|
entryValue.channel.let { channel ->
|
||||||
|
if(compressionEnabled) {
|
||||||
|
InflaterInputStream(Channels.newInputStream(channel)).use { stream ->
|
||||||
|
|
||||||
|
outerLoop@
|
||||||
|
while (true) {
|
||||||
|
val buf = ctx.alloc().heapBuffer(chunkSize)
|
||||||
|
while(buf.readableBytes() < chunkSize) {
|
||||||
|
val read = buf.writeBytes(stream, chunkSize)
|
||||||
|
if(read < 0) {
|
||||||
|
sendMessageAndFlush(ctx, LastCacheContent(buf))
|
||||||
|
break@outerLoop
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sendMessageAndFlush(ctx, CacheContent(buf))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
sendMessage(ctx, ChunkedNioFile(channel, entryValue.offset, entryValue.size - entryValue.offset, chunkSize))
|
||||||
|
sendMessageAndFlush(ctx, LastHttpContent.EMPTY_LAST_CONTENT)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} ?: sendMessageAndFlush(ctx, CacheValueNotFoundResponse())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
|
||||||
|
(inProgressRequest as? InProgressPutRequest)?.rollback()
|
||||||
|
super.exceptionCaught(ctx, cause)
|
||||||
|
}
|
||||||
|
}
|
@@ -12,7 +12,7 @@ import java.util.zip.Deflater
|
|||||||
|
|
||||||
class FileSystemCacheProvider : CacheProvider<FileSystemCacheConfiguration> {
|
class FileSystemCacheProvider : CacheProvider<FileSystemCacheConfiguration> {
|
||||||
|
|
||||||
override fun getXmlSchemaLocation() = "classpath:net/woggioni/rbcs/server/schema/rbcs.xsd"
|
override fun getXmlSchemaLocation() = "classpath:net/woggioni/rbcs/server/schema/rbcs-server.xsd"
|
||||||
|
|
||||||
override fun getXmlType() = "fileSystemCacheType"
|
override fun getXmlType() = "fileSystemCacheType"
|
||||||
|
|
||||||
@@ -30,14 +30,14 @@ class FileSystemCacheProvider : CacheProvider<FileSystemCacheConfiguration> {
|
|||||||
val compressionLevel = el.renderAttribute("compression-level")
|
val compressionLevel = el.renderAttribute("compression-level")
|
||||||
?.let(String::toInt)
|
?.let(String::toInt)
|
||||||
?: Deflater.DEFAULT_COMPRESSION
|
?: Deflater.DEFAULT_COMPRESSION
|
||||||
val digestAlgorithm = el.renderAttribute("digest") ?: "MD5"
|
val digestAlgorithm = el.renderAttribute("digest")
|
||||||
|
|
||||||
return FileSystemCacheConfiguration(
|
return FileSystemCacheConfiguration(
|
||||||
path,
|
path,
|
||||||
maxAge,
|
maxAge,
|
||||||
digestAlgorithm,
|
digestAlgorithm,
|
||||||
enableCompression,
|
enableCompression,
|
||||||
compressionLevel
|
compressionLevel,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -46,7 +46,9 @@ class FileSystemCacheProvider : CacheProvider<FileSystemCacheConfiguration> {
|
|||||||
Xml.of(doc, result) {
|
Xml.of(doc, result) {
|
||||||
val prefix = doc.lookupPrefix(RBCS.RBCS_NAMESPACE_URI)
|
val prefix = doc.lookupPrefix(RBCS.RBCS_NAMESPACE_URI)
|
||||||
attr("xs:type", "${prefix}:fileSystemCacheType", RBCS.XML_SCHEMA_NAMESPACE_URI)
|
attr("xs:type", "${prefix}:fileSystemCacheType", RBCS.XML_SCHEMA_NAMESPACE_URI)
|
||||||
attr("path", root.toString())
|
root?.let {
|
||||||
|
attr("path", it.toString())
|
||||||
|
}
|
||||||
attr("max-age", maxAge.toString())
|
attr("max-age", maxAge.toString())
|
||||||
digestAlgorithm?.let { digestAlgorithm ->
|
digestAlgorithm?.let { digestAlgorithm ->
|
||||||
attr("digest", digestAlgorithm)
|
attr("digest", digestAlgorithm)
|
||||||
|
@@ -1,150 +1,143 @@
|
|||||||
package net.woggioni.rbcs.server.cache
|
package net.woggioni.rbcs.server.cache
|
||||||
|
|
||||||
import io.netty.buffer.ByteBuf
|
import io.netty.buffer.ByteBuf
|
||||||
import net.woggioni.rbcs.api.Cache
|
import net.woggioni.rbcs.api.AsyncCloseable
|
||||||
import net.woggioni.rbcs.common.ByteBufInputStream
|
import net.woggioni.rbcs.api.CacheValueMetadata
|
||||||
import net.woggioni.rbcs.common.ByteBufOutputStream
|
import net.woggioni.rbcs.common.createLogger
|
||||||
import net.woggioni.rbcs.common.RBCS.digestString
|
|
||||||
import net.woggioni.rbcs.common.contextLogger
|
|
||||||
import net.woggioni.jwo.JWO
|
|
||||||
import java.nio.channels.Channels
|
|
||||||
import java.security.MessageDigest
|
|
||||||
import java.time.Duration
|
import java.time.Duration
|
||||||
import java.time.Instant
|
import java.time.Instant
|
||||||
|
import java.util.PriorityQueue
|
||||||
import java.util.concurrent.CompletableFuture
|
import java.util.concurrent.CompletableFuture
|
||||||
import java.util.concurrent.ConcurrentHashMap
|
import java.util.concurrent.TimeUnit
|
||||||
import java.util.concurrent.PriorityBlockingQueue
|
import java.util.concurrent.locks.ReentrantReadWriteLock
|
||||||
import java.util.concurrent.atomic.AtomicLong
|
import kotlin.concurrent.withLock
|
||||||
import java.util.zip.Deflater
|
|
||||||
import java.util.zip.DeflaterOutputStream
|
private class CacheKey(private val value: ByteArray) {
|
||||||
import java.util.zip.Inflater
|
override fun equals(other: Any?) = if (other is CacheKey) {
|
||||||
import java.util.zip.InflaterInputStream
|
value.contentEquals(other.value)
|
||||||
|
} else false
|
||||||
|
|
||||||
|
override fun hashCode() = value.contentHashCode()
|
||||||
|
}
|
||||||
|
|
||||||
|
class CacheEntry(
|
||||||
|
val metadata: CacheValueMetadata,
|
||||||
|
val content: ByteBuf
|
||||||
|
)
|
||||||
|
|
||||||
class InMemoryCache(
|
class InMemoryCache(
|
||||||
val maxAge: Duration,
|
private val maxAge: Duration,
|
||||||
val maxSize: Long,
|
private val maxSize: Long
|
||||||
val digestAlgorithm: String?,
|
) : AsyncCloseable {
|
||||||
val compressionEnabled: Boolean,
|
|
||||||
val compressionLevel: Int
|
|
||||||
) : Cache {
|
|
||||||
|
|
||||||
companion object {
|
companion object {
|
||||||
@JvmStatic
|
private val log = createLogger<InMemoryCache>()
|
||||||
private val log = contextLogger()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private val size = AtomicLong()
|
private var mapSize : Long = 0
|
||||||
private val map = ConcurrentHashMap<String, ByteBuf>()
|
private val map = HashMap<CacheKey, CacheEntry>()
|
||||||
|
private val lock = ReentrantReadWriteLock()
|
||||||
private class RemovalQueueElement(val key: String, val value : ByteBuf, val expiry : Instant) : Comparable<RemovalQueueElement> {
|
private val cond = lock.writeLock().newCondition()
|
||||||
|
|
||||||
|
private class RemovalQueueElement(val key: CacheKey, val value: CacheEntry, val expiry: Instant) :
|
||||||
|
Comparable<RemovalQueueElement> {
|
||||||
override fun compareTo(other: RemovalQueueElement) = expiry.compareTo(other.expiry)
|
override fun compareTo(other: RemovalQueueElement) = expiry.compareTo(other.expiry)
|
||||||
}
|
}
|
||||||
|
|
||||||
private val removalQueue = PriorityBlockingQueue<RemovalQueueElement>()
|
private val removalQueue = PriorityQueue<RemovalQueueElement>()
|
||||||
|
|
||||||
|
@Volatile
|
||||||
private var running = true
|
private var running = true
|
||||||
private val garbageCollector = Thread {
|
|
||||||
while(true) {
|
private val closeFuture = object : CompletableFuture<Void>() {
|
||||||
val el = removalQueue.take()
|
init {
|
||||||
val buf = el.value
|
Thread.ofVirtual().name("in-memory-cache-gc").start {
|
||||||
val now = Instant.now()
|
try {
|
||||||
if(now > el.expiry) {
|
lock.writeLock().withLock {
|
||||||
val removed = map.remove(el.key, buf)
|
while (running) {
|
||||||
if(removed) {
|
val el = removalQueue.poll()
|
||||||
updateSizeAfterRemoval(buf)
|
if(el == null) {
|
||||||
//Decrease the reference count for map
|
cond.await(1000, TimeUnit.MILLISECONDS)
|
||||||
buf.release()
|
continue
|
||||||
|
}
|
||||||
|
val value = el.value
|
||||||
|
val now = Instant.now()
|
||||||
|
if (now > el.expiry) {
|
||||||
|
val removed = map.remove(el.key, value)
|
||||||
|
if (removed) {
|
||||||
|
updateSizeAfterRemoval(value.content)
|
||||||
|
//Decrease the reference count for map
|
||||||
|
value.content.release()
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
removalQueue.offer(el)
|
||||||
|
val interval = minOf(Duration.between(now, el.expiry), Duration.ofSeconds(1))
|
||||||
|
cond.await(interval.toMillis(), TimeUnit.MILLISECONDS)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
map.forEach {
|
||||||
|
it.value.content.release()
|
||||||
|
}
|
||||||
|
map.clear()
|
||||||
|
}
|
||||||
|
complete(null)
|
||||||
|
} catch (ex: Throwable) {
|
||||||
|
completeExceptionally(ex)
|
||||||
}
|
}
|
||||||
//Decrease the reference count for removalQueue
|
|
||||||
buf.release()
|
|
||||||
} else {
|
|
||||||
removalQueue.put(el)
|
|
||||||
Thread.sleep(minOf(Duration.between(now, el.expiry), Duration.ofSeconds(1)))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}.apply {
|
|
||||||
start()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private fun removeEldest() : Long {
|
fun removeEldest(): Long {
|
||||||
while(true) {
|
while (true) {
|
||||||
val el = removalQueue.take()
|
val el = removalQueue.poll() ?: return mapSize
|
||||||
val buf = el.value
|
val value = el.value
|
||||||
val removed = map.remove(el.key, buf)
|
val removed = map.remove(el.key, value)
|
||||||
//Decrease the reference count for removalQueue
|
if (removed) {
|
||||||
buf.release()
|
val newSize = updateSizeAfterRemoval(value.content)
|
||||||
if(removed) {
|
|
||||||
val newSize = updateSizeAfterRemoval(buf)
|
|
||||||
//Decrease the reference count for map
|
//Decrease the reference count for map
|
||||||
buf.release()
|
value.content.release()
|
||||||
return newSize
|
return newSize
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private fun updateSizeAfterRemoval(removed: ByteBuf) : Long {
|
private fun updateSizeAfterRemoval(removed: ByteBuf): Long {
|
||||||
return size.updateAndGet { currentSize : Long ->
|
mapSize -= removed.readableBytes()
|
||||||
currentSize - removed.readableBytes()
|
return mapSize
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun close() {
|
override fun asyncClose() : CompletableFuture<Void> {
|
||||||
running = false
|
running = false
|
||||||
garbageCollector.join()
|
lock.writeLock().withLock {
|
||||||
|
cond.signal()
|
||||||
|
}
|
||||||
|
return closeFuture
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun get(key: String) =
|
fun get(key: ByteArray) = lock.readLock().withLock {
|
||||||
(digestAlgorithm
|
map[CacheKey(key)]?.run {
|
||||||
?.let(MessageDigest::getInstance)
|
CacheEntry(metadata, content.retainedDuplicate())
|
||||||
?.let { md ->
|
|
||||||
digestString(key.toByteArray(), md)
|
|
||||||
} ?: key
|
|
||||||
).let { digest ->
|
|
||||||
map[digest]
|
|
||||||
?.let { value ->
|
|
||||||
val copy = value.retainedDuplicate()
|
|
||||||
copy.touch("This has to be released by the caller of the cache")
|
|
||||||
if (compressionEnabled) {
|
|
||||||
val inflater = Inflater()
|
|
||||||
Channels.newChannel(InflaterInputStream(ByteBufInputStream(copy), inflater))
|
|
||||||
} else {
|
|
||||||
Channels.newChannel(ByteBufInputStream(copy))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}.let {
|
|
||||||
CompletableFuture.completedFuture(it)
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun put(key: String, content: ByteBuf) =
|
|
||||||
(digestAlgorithm
|
|
||||||
?.let(MessageDigest::getInstance)
|
|
||||||
?.let { md ->
|
|
||||||
digestString(key.toByteArray(), md)
|
|
||||||
} ?: key).let { digest ->
|
|
||||||
content.retain()
|
|
||||||
val value = if (compressionEnabled) {
|
|
||||||
val deflater = Deflater(compressionLevel)
|
|
||||||
val buf = content.alloc().buffer()
|
|
||||||
buf.retain()
|
|
||||||
DeflaterOutputStream(ByteBufOutputStream(buf), deflater).use { outputStream ->
|
|
||||||
ByteBufInputStream(content).use { inputStream ->
|
|
||||||
JWO.copy(inputStream, outputStream)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
buf
|
|
||||||
} else {
|
|
||||||
content
|
|
||||||
}
|
|
||||||
val old = map.put(digest, value)
|
|
||||||
val delta = value.readableBytes() - (old?.readableBytes() ?: 0)
|
|
||||||
var newSize = size.updateAndGet { currentSize : Long ->
|
|
||||||
currentSize + delta
|
|
||||||
}
|
|
||||||
removalQueue.put(RemovalQueueElement(digest, value.retain(), Instant.now().plus(maxAge)))
|
|
||||||
while(newSize > maxSize) {
|
|
||||||
newSize = removeEldest()
|
|
||||||
}
|
|
||||||
}.let {
|
|
||||||
CompletableFuture.completedFuture<Void>(null)
|
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fun put(
|
||||||
|
key: ByteArray,
|
||||||
|
value: CacheEntry,
|
||||||
|
) {
|
||||||
|
val cacheKey = CacheKey(key)
|
||||||
|
lock.writeLock().withLock {
|
||||||
|
val oldSize = map.put(cacheKey, value)?.let { old ->
|
||||||
|
val result = old.content.readableBytes()
|
||||||
|
old.content.release()
|
||||||
|
result
|
||||||
|
} ?: 0
|
||||||
|
val delta = value.content.readableBytes() - oldSize
|
||||||
|
mapSize += delta
|
||||||
|
removalQueue.offer(RemovalQueueElement(cacheKey, value, Instant.now().plus(maxAge)))
|
||||||
|
while (mapSize > maxSize) {
|
||||||
|
removeEldest()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
@@ -1,5 +1,10 @@
|
|||||||
package net.woggioni.rbcs.server.cache
|
package net.woggioni.rbcs.server.cache
|
||||||
|
|
||||||
|
import io.netty.channel.ChannelFactory
|
||||||
|
import io.netty.channel.EventLoopGroup
|
||||||
|
import io.netty.channel.socket.DatagramChannel
|
||||||
|
import io.netty.channel.socket.SocketChannel
|
||||||
|
import net.woggioni.rbcs.api.CacheHandlerFactory
|
||||||
import net.woggioni.rbcs.api.Configuration
|
import net.woggioni.rbcs.api.Configuration
|
||||||
import net.woggioni.rbcs.common.RBCS
|
import net.woggioni.rbcs.common.RBCS
|
||||||
import java.time.Duration
|
import java.time.Duration
|
||||||
@@ -11,13 +16,18 @@ data class InMemoryCacheConfiguration(
|
|||||||
val compressionEnabled: Boolean,
|
val compressionEnabled: Boolean,
|
||||||
val compressionLevel: Int,
|
val compressionLevel: Int,
|
||||||
) : Configuration.Cache {
|
) : Configuration.Cache {
|
||||||
override fun materialize() = InMemoryCache(
|
override fun materialize() = object : CacheHandlerFactory {
|
||||||
maxAge,
|
private val cache = InMemoryCache(maxAge, maxSize)
|
||||||
maxSize,
|
|
||||||
digestAlgorithm,
|
override fun asyncClose() = cache.asyncClose()
|
||||||
compressionEnabled,
|
|
||||||
compressionLevel
|
override fun newHandler(
|
||||||
)
|
cfg : Configuration,
|
||||||
|
eventLoop: EventLoopGroup,
|
||||||
|
socketChannelFactory: ChannelFactory<SocketChannel>,
|
||||||
|
datagramChannelFactory: ChannelFactory<DatagramChannel>
|
||||||
|
) = InMemoryCacheHandler(cache, digestAlgorithm, compressionEnabled, compressionLevel)
|
||||||
|
}
|
||||||
|
|
||||||
override fun getNamespaceURI() = RBCS.RBCS_NAMESPACE_URI
|
override fun getNamespaceURI() = RBCS.RBCS_NAMESPACE_URI
|
||||||
|
|
||||||
|
140
rbcs-server/src/main/kotlin/net/woggioni/rbcs/server/cache/InMemoryCacheHandler.kt
vendored
Normal file
140
rbcs-server/src/main/kotlin/net/woggioni/rbcs/server/cache/InMemoryCacheHandler.kt
vendored
Normal file
@@ -0,0 +1,140 @@
|
|||||||
|
package net.woggioni.rbcs.server.cache
|
||||||
|
|
||||||
|
import io.netty.buffer.ByteBuf
|
||||||
|
import io.netty.channel.ChannelHandlerContext
|
||||||
|
import net.woggioni.rbcs.api.CacheHandler
|
||||||
|
import net.woggioni.rbcs.api.message.CacheMessage
|
||||||
|
import net.woggioni.rbcs.api.message.CacheMessage.*
|
||||||
|
import net.woggioni.rbcs.common.ByteBufOutputStream
|
||||||
|
import net.woggioni.rbcs.common.RBCS.processCacheKey
|
||||||
|
import java.util.zip.Deflater
|
||||||
|
import java.util.zip.DeflaterOutputStream
|
||||||
|
import java.util.zip.InflaterOutputStream
|
||||||
|
|
||||||
|
class InMemoryCacheHandler(
|
||||||
|
private val cache: InMemoryCache,
|
||||||
|
private val digestAlgorithm: String?,
|
||||||
|
private val compressionEnabled: Boolean,
|
||||||
|
private val compressionLevel: Int
|
||||||
|
) : CacheHandler() {
|
||||||
|
|
||||||
|
private interface InProgressRequest : AutoCloseable {
|
||||||
|
}
|
||||||
|
|
||||||
|
private class InProgressGetRequest(val request : CacheGetRequest) : InProgressRequest {
|
||||||
|
override fun close() {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private interface InProgressPutRequest : InProgressRequest {
|
||||||
|
val request: CachePutRequest
|
||||||
|
val buf: ByteBuf
|
||||||
|
|
||||||
|
fun append(buf: ByteBuf)
|
||||||
|
}
|
||||||
|
|
||||||
|
private inner class InProgressPlainPutRequest(ctx: ChannelHandlerContext, override val request: CachePutRequest) :
|
||||||
|
InProgressPutRequest {
|
||||||
|
override val buf = ctx.alloc().compositeHeapBuffer()
|
||||||
|
|
||||||
|
override fun append(buf: ByteBuf) {
|
||||||
|
if(buf.isDirect) {
|
||||||
|
this.buf.writeBytes(buf)
|
||||||
|
} else {
|
||||||
|
this.buf.addComponent(true, buf.retain())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun close() {
|
||||||
|
buf.release()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private inner class InProgressCompressedPutRequest(
|
||||||
|
ctx: ChannelHandlerContext,
|
||||||
|
override val request: CachePutRequest
|
||||||
|
) : InProgressPutRequest {
|
||||||
|
|
||||||
|
override val buf = ctx.alloc().heapBuffer()
|
||||||
|
|
||||||
|
private val stream = ByteBufOutputStream(buf).let {
|
||||||
|
DeflaterOutputStream(it, Deflater(compressionLevel))
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun append(buf: ByteBuf) {
|
||||||
|
buf.readBytes(stream, buf.readableBytes())
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun close() {
|
||||||
|
stream.close()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private var inProgressRequest: InProgressRequest? = null
|
||||||
|
|
||||||
|
override fun channelRead0(ctx: ChannelHandlerContext, msg: CacheMessage) {
|
||||||
|
when (msg) {
|
||||||
|
is CacheGetRequest -> handleGetRequest(ctx, msg)
|
||||||
|
is CachePutRequest -> handlePutRequest(ctx, msg)
|
||||||
|
is LastCacheContent -> handleLastCacheContent(ctx, msg)
|
||||||
|
is CacheContent -> handleCacheContent(ctx, msg)
|
||||||
|
else -> ctx.fireChannelRead(msg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun handleGetRequest(ctx: ChannelHandlerContext, msg: CacheGetRequest) {
|
||||||
|
inProgressRequest = InProgressGetRequest(msg)
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun handlePutRequest(ctx: ChannelHandlerContext, msg: CachePutRequest) {
|
||||||
|
inProgressRequest = if(compressionEnabled) {
|
||||||
|
InProgressCompressedPutRequest(ctx, msg)
|
||||||
|
} else {
|
||||||
|
InProgressPlainPutRequest(ctx, msg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun handleCacheContent(ctx: ChannelHandlerContext, msg: CacheContent) {
|
||||||
|
val req = inProgressRequest
|
||||||
|
if(req is InProgressPutRequest) {
|
||||||
|
req.append(msg.content())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun handleLastCacheContent(ctx: ChannelHandlerContext, msg: LastCacheContent) {
|
||||||
|
handleCacheContent(ctx, msg)
|
||||||
|
when(val req = inProgressRequest) {
|
||||||
|
is InProgressGetRequest -> {
|
||||||
|
cache.get(processCacheKey(req.request.key, digestAlgorithm))?.let { value ->
|
||||||
|
sendMessageAndFlush(ctx, CacheValueFoundResponse(req.request.key, value.metadata))
|
||||||
|
if (compressionEnabled) {
|
||||||
|
val buf = ctx.alloc().heapBuffer()
|
||||||
|
InflaterOutputStream(ByteBufOutputStream(buf)).use {
|
||||||
|
value.content.readBytes(it, value.content.readableBytes())
|
||||||
|
value.content.release()
|
||||||
|
buf.retain()
|
||||||
|
}
|
||||||
|
sendMessage(ctx, LastCacheContent(buf))
|
||||||
|
} else {
|
||||||
|
sendMessage(ctx, LastCacheContent(value.content))
|
||||||
|
}
|
||||||
|
} ?: sendMessage(ctx, CacheValueNotFoundResponse())
|
||||||
|
}
|
||||||
|
is InProgressPutRequest -> {
|
||||||
|
this.inProgressRequest = null
|
||||||
|
val buf = req.buf
|
||||||
|
buf.retain()
|
||||||
|
req.close()
|
||||||
|
val cacheKey = processCacheKey(req.request.key, digestAlgorithm)
|
||||||
|
cache.put(cacheKey, CacheEntry(req.request.metadata, buf))
|
||||||
|
sendMessageAndFlush(ctx, CachePutResponse(req.request.key))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
|
||||||
|
inProgressRequest?.close()
|
||||||
|
inProgressRequest = null
|
||||||
|
super.exceptionCaught(ctx, cause)
|
||||||
|
}
|
||||||
|
}
|
@@ -11,7 +11,7 @@ import java.util.zip.Deflater
|
|||||||
|
|
||||||
class InMemoryCacheProvider : CacheProvider<InMemoryCacheConfiguration> {
|
class InMemoryCacheProvider : CacheProvider<InMemoryCacheConfiguration> {
|
||||||
|
|
||||||
override fun getXmlSchemaLocation() = "classpath:net/woggioni/rbcs/server/schema/rbcs.xsd"
|
override fun getXmlSchemaLocation() = "classpath:net/woggioni/rbcs/server/schema/rbcs-server.xsd"
|
||||||
|
|
||||||
override fun getXmlType() = "inMemoryCacheType"
|
override fun getXmlType() = "inMemoryCacheType"
|
||||||
|
|
||||||
@@ -30,14 +30,13 @@ class InMemoryCacheProvider : CacheProvider<InMemoryCacheConfiguration> {
|
|||||||
val compressionLevel = el.renderAttribute("compression-level")
|
val compressionLevel = el.renderAttribute("compression-level")
|
||||||
?.let(String::toInt)
|
?.let(String::toInt)
|
||||||
?: Deflater.DEFAULT_COMPRESSION
|
?: Deflater.DEFAULT_COMPRESSION
|
||||||
val digestAlgorithm = el.renderAttribute("digest") ?: "MD5"
|
val digestAlgorithm = el.renderAttribute("digest")
|
||||||
|
|
||||||
return InMemoryCacheConfiguration(
|
return InMemoryCacheConfiguration(
|
||||||
maxAge,
|
maxAge,
|
||||||
maxSize,
|
maxSize,
|
||||||
digestAlgorithm,
|
digestAlgorithm,
|
||||||
enableCompression,
|
enableCompression,
|
||||||
compressionLevel
|
compressionLevel,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@@ -27,12 +27,11 @@ object Parser {
|
|||||||
val root = document.documentElement
|
val root = document.documentElement
|
||||||
val anonymousUser = User("", null, emptySet(), null)
|
val anonymousUser = User("", null, emptySet(), null)
|
||||||
var connection: Configuration.Connection = Configuration.Connection(
|
var connection: Configuration.Connection = Configuration.Connection(
|
||||||
Duration.of(10, ChronoUnit.SECONDS),
|
Duration.of(30, ChronoUnit.SECONDS),
|
||||||
Duration.of(10, ChronoUnit.SECONDS),
|
|
||||||
Duration.of(60, ChronoUnit.SECONDS),
|
Duration.of(60, ChronoUnit.SECONDS),
|
||||||
Duration.of(30, ChronoUnit.SECONDS),
|
Duration.of(60, ChronoUnit.SECONDS),
|
||||||
Duration.of(30, ChronoUnit.SECONDS),
|
0x4000000,
|
||||||
67108864
|
0x10000
|
||||||
)
|
)
|
||||||
var eventExecutor: Configuration.EventExecutor = Configuration.EventExecutor(true)
|
var eventExecutor: Configuration.EventExecutor = Configuration.EventExecutor(true)
|
||||||
var cache: Cache? = null
|
var cache: Cache? = null
|
||||||
@@ -113,10 +112,6 @@ object Parser {
|
|||||||
}
|
}
|
||||||
|
|
||||||
"connection" -> {
|
"connection" -> {
|
||||||
val writeTimeout = child.renderAttribute("write-timeout")
|
|
||||||
?.let(Duration::parse) ?: Duration.of(0, ChronoUnit.SECONDS)
|
|
||||||
val readTimeout = child.renderAttribute("read-timeout")
|
|
||||||
?.let(Duration::parse) ?: Duration.of(0, ChronoUnit.SECONDS)
|
|
||||||
val idleTimeout = child.renderAttribute("idle-timeout")
|
val idleTimeout = child.renderAttribute("idle-timeout")
|
||||||
?.let(Duration::parse) ?: Duration.of(30, ChronoUnit.SECONDS)
|
?.let(Duration::parse) ?: Duration.of(30, ChronoUnit.SECONDS)
|
||||||
val readIdleTimeout = child.renderAttribute("read-idle-timeout")
|
val readIdleTimeout = child.renderAttribute("read-idle-timeout")
|
||||||
@@ -124,14 +119,15 @@ object Parser {
|
|||||||
val writeIdleTimeout = child.renderAttribute("write-idle-timeout")
|
val writeIdleTimeout = child.renderAttribute("write-idle-timeout")
|
||||||
?.let(Duration::parse) ?: Duration.of(60, ChronoUnit.SECONDS)
|
?.let(Duration::parse) ?: Duration.of(60, ChronoUnit.SECONDS)
|
||||||
val maxRequestSize = child.renderAttribute("max-request-size")
|
val maxRequestSize = child.renderAttribute("max-request-size")
|
||||||
?.let(String::toInt) ?: 67108864
|
?.let(Integer::decode) ?: 0x4000000
|
||||||
|
val chunkSize = child.renderAttribute("chunk-size")
|
||||||
|
?.let(Integer::decode) ?: 0x10000
|
||||||
connection = Configuration.Connection(
|
connection = Configuration.Connection(
|
||||||
readTimeout,
|
|
||||||
writeTimeout,
|
|
||||||
idleTimeout,
|
idleTimeout,
|
||||||
readIdleTimeout,
|
readIdleTimeout,
|
||||||
writeIdleTimeout,
|
writeIdleTimeout,
|
||||||
maxRequestSize
|
maxRequestSize,
|
||||||
|
chunkSize
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -201,6 +197,7 @@ object Parser {
|
|||||||
when (it.localName) {
|
when (it.localName) {
|
||||||
"reader" -> Role.Reader
|
"reader" -> Role.Reader
|
||||||
"writer" -> Role.Writer
|
"writer" -> Role.Writer
|
||||||
|
"healthcheck" -> Role.Healthcheck
|
||||||
else -> throw UnsupportedOperationException("Illegal node '${it.localName}'")
|
else -> throw UnsupportedOperationException("Illegal node '${it.localName}'")
|
||||||
}
|
}
|
||||||
}.toSet()
|
}.toSet()
|
||||||
|
@@ -36,12 +36,11 @@ object Serializer {
|
|||||||
}
|
}
|
||||||
node("connection") {
|
node("connection") {
|
||||||
conf.connection.let { connection ->
|
conf.connection.let { connection ->
|
||||||
attr("read-timeout", connection.readTimeout.toString())
|
|
||||||
attr("write-timeout", connection.writeTimeout.toString())
|
|
||||||
attr("idle-timeout", connection.idleTimeout.toString())
|
attr("idle-timeout", connection.idleTimeout.toString())
|
||||||
attr("read-idle-timeout", connection.readIdleTimeout.toString())
|
attr("read-idle-timeout", connection.readIdleTimeout.toString())
|
||||||
attr("write-idle-timeout", connection.writeIdleTimeout.toString())
|
attr("write-idle-timeout", connection.writeIdleTimeout.toString())
|
||||||
attr("max-request-size", connection.maxRequestSize.toString())
|
attr("max-request-size", connection.maxRequestSize.toString())
|
||||||
|
attr("chunk-size", connection.chunkSize.toString())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
node("event-executor") {
|
node("event-executor") {
|
||||||
|
@@ -3,7 +3,7 @@ package net.woggioni.rbcs.server.exception
|
|||||||
import io.netty.buffer.Unpooled
|
import io.netty.buffer.Unpooled
|
||||||
import io.netty.channel.ChannelDuplexHandler
|
import io.netty.channel.ChannelDuplexHandler
|
||||||
import io.netty.channel.ChannelFutureListener
|
import io.netty.channel.ChannelFutureListener
|
||||||
import io.netty.channel.ChannelHandler
|
import io.netty.channel.ChannelHandler.Sharable
|
||||||
import io.netty.channel.ChannelHandlerContext
|
import io.netty.channel.ChannelHandlerContext
|
||||||
import io.netty.handler.codec.DecoderException
|
import io.netty.handler.codec.DecoderException
|
||||||
import io.netty.handler.codec.http.DefaultFullHttpResponse
|
import io.netty.handler.codec.http.DefaultFullHttpResponse
|
||||||
@@ -17,10 +17,19 @@ import net.woggioni.rbcs.api.exception.CacheException
|
|||||||
import net.woggioni.rbcs.api.exception.ContentTooLargeException
|
import net.woggioni.rbcs.api.exception.ContentTooLargeException
|
||||||
import net.woggioni.rbcs.common.contextLogger
|
import net.woggioni.rbcs.common.contextLogger
|
||||||
import net.woggioni.rbcs.common.debug
|
import net.woggioni.rbcs.common.debug
|
||||||
|
import net.woggioni.rbcs.common.log
|
||||||
|
import org.slf4j.event.Level
|
||||||
|
import org.slf4j.spi.LoggingEventBuilder
|
||||||
|
import java.net.ConnectException
|
||||||
|
import java.net.SocketException
|
||||||
|
import javax.net.ssl.SSLException
|
||||||
import javax.net.ssl.SSLPeerUnverifiedException
|
import javax.net.ssl.SSLPeerUnverifiedException
|
||||||
|
|
||||||
@ChannelHandler.Sharable
|
@Sharable
|
||||||
class ExceptionHandler : ChannelDuplexHandler() {
|
object ExceptionHandler : ChannelDuplexHandler() {
|
||||||
|
|
||||||
|
val NAME : String = this::class.java.name
|
||||||
|
|
||||||
private val log = contextLogger()
|
private val log = contextLogger()
|
||||||
|
|
||||||
private val NOT_AUTHORIZED: FullHttpResponse = DefaultFullHttpResponse(
|
private val NOT_AUTHORIZED: FullHttpResponse = DefaultFullHttpResponse(
|
||||||
@@ -29,12 +38,6 @@ class ExceptionHandler : ChannelDuplexHandler() {
|
|||||||
headers()[HttpHeaderNames.CONTENT_LENGTH] = "0"
|
headers()[HttpHeaderNames.CONTENT_LENGTH] = "0"
|
||||||
}
|
}
|
||||||
|
|
||||||
private val TOO_BIG: FullHttpResponse = DefaultFullHttpResponse(
|
|
||||||
HttpVersion.HTTP_1_1, HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE, Unpooled.EMPTY_BUFFER
|
|
||||||
).apply {
|
|
||||||
headers()[HttpHeaderNames.CONTENT_LENGTH] = "0"
|
|
||||||
}
|
|
||||||
|
|
||||||
private val NOT_AVAILABLE: FullHttpResponse = DefaultFullHttpResponse(
|
private val NOT_AVAILABLE: FullHttpResponse = DefaultFullHttpResponse(
|
||||||
HttpVersion.HTTP_1_1, HttpResponseStatus.SERVICE_UNAVAILABLE, Unpooled.EMPTY_BUFFER
|
HttpVersion.HTTP_1_1, HttpResponseStatus.SERVICE_UNAVAILABLE, Unpooled.EMPTY_BUFFER
|
||||||
).apply {
|
).apply {
|
||||||
@@ -47,10 +50,26 @@ class ExceptionHandler : ChannelDuplexHandler() {
|
|||||||
headers()[HttpHeaderNames.CONTENT_LENGTH] = "0"
|
headers()[HttpHeaderNames.CONTENT_LENGTH] = "0"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private val TOO_BIG: FullHttpResponse = DefaultFullHttpResponse(
|
||||||
|
HttpVersion.HTTP_1_1, HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE, Unpooled.EMPTY_BUFFER
|
||||||
|
).apply {
|
||||||
|
headers()[HttpHeaderNames.CONTENT_LENGTH] = "0"
|
||||||
|
}
|
||||||
|
|
||||||
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
|
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
|
||||||
when (cause) {
|
when (cause) {
|
||||||
is DecoderException -> {
|
is DecoderException -> {
|
||||||
|
log.debug(cause.message, cause)
|
||||||
|
ctx.close()
|
||||||
|
}
|
||||||
|
|
||||||
|
is ConnectException -> {
|
||||||
log.error(cause.message, cause)
|
log.error(cause.message, cause)
|
||||||
|
ctx.writeAndFlush(SERVER_ERROR.retainedDuplicate())
|
||||||
|
}
|
||||||
|
|
||||||
|
is SocketException -> {
|
||||||
|
log.debug(cause.message, cause)
|
||||||
ctx.close()
|
ctx.close()
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -59,10 +78,19 @@ class ExceptionHandler : ChannelDuplexHandler() {
|
|||||||
.addListener(ChannelFutureListener.CLOSE_ON_FAILURE)
|
.addListener(ChannelFutureListener.CLOSE_ON_FAILURE)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
is SSLException -> {
|
||||||
|
log.debug(cause.message, cause)
|
||||||
|
ctx.close()
|
||||||
|
}
|
||||||
|
|
||||||
is ContentTooLargeException -> {
|
is ContentTooLargeException -> {
|
||||||
|
log.log(Level.DEBUG, ctx.channel()) { builder : LoggingEventBuilder ->
|
||||||
|
builder.setMessage("Request body is too large")
|
||||||
|
}
|
||||||
ctx.writeAndFlush(TOO_BIG.retainedDuplicate())
|
ctx.writeAndFlush(TOO_BIG.retainedDuplicate())
|
||||||
.addListener(ChannelFutureListener.CLOSE_ON_FAILURE)
|
.addListener(ChannelFutureListener.CLOSE_ON_FAILURE)
|
||||||
}
|
}
|
||||||
|
|
||||||
is ReadTimeoutException -> {
|
is ReadTimeoutException -> {
|
||||||
log.debug {
|
log.debug {
|
||||||
val channelId = ctx.channel().id().asShortText()
|
val channelId = ctx.channel().id().asShortText()
|
||||||
@@ -70,6 +98,7 @@ class ExceptionHandler : ChannelDuplexHandler() {
|
|||||||
}
|
}
|
||||||
ctx.close()
|
ctx.close()
|
||||||
}
|
}
|
||||||
|
|
||||||
is WriteTimeoutException -> {
|
is WriteTimeoutException -> {
|
||||||
log.debug {
|
log.debug {
|
||||||
val channelId = ctx.channel().id().asShortText()
|
val channelId = ctx.channel().id().asShortText()
|
||||||
@@ -77,11 +106,13 @@ class ExceptionHandler : ChannelDuplexHandler() {
|
|||||||
}
|
}
|
||||||
ctx.close()
|
ctx.close()
|
||||||
}
|
}
|
||||||
|
|
||||||
is CacheException -> {
|
is CacheException -> {
|
||||||
log.error(cause.message, cause)
|
log.error(cause.message, cause)
|
||||||
ctx.writeAndFlush(NOT_AVAILABLE.retainedDuplicate())
|
ctx.writeAndFlush(NOT_AVAILABLE.retainedDuplicate())
|
||||||
.addListener(ChannelFutureListener.CLOSE_ON_FAILURE)
|
.addListener(ChannelFutureListener.CLOSE_ON_FAILURE)
|
||||||
}
|
}
|
||||||
|
|
||||||
else -> {
|
else -> {
|
||||||
log.error(cause.message, cause)
|
log.error(cause.message, cause)
|
||||||
ctx.writeAndFlush(SERVER_ERROR.retainedDuplicate())
|
ctx.writeAndFlush(SERVER_ERROR.retainedDuplicate())
|
||||||
|
@@ -0,0 +1,13 @@
|
|||||||
|
package net.woggioni.rbcs.server.handler
|
||||||
|
|
||||||
|
import io.netty.channel.ChannelHandlerContext
|
||||||
|
import io.netty.channel.SimpleChannelInboundHandler
|
||||||
|
import io.netty.handler.codec.http.HttpContent
|
||||||
|
|
||||||
|
class BlackHoleRequestHandler : SimpleChannelInboundHandler<HttpContent>() {
|
||||||
|
companion object {
|
||||||
|
val NAME = BlackHoleRequestHandler::class.java.name
|
||||||
|
}
|
||||||
|
override fun channelRead0(ctx: ChannelHandlerContext, msg: HttpContent) {
|
||||||
|
}
|
||||||
|
}
|
@@ -0,0 +1,40 @@
|
|||||||
|
package net.woggioni.rbcs.server.handler
|
||||||
|
|
||||||
|
import io.netty.channel.ChannelHandlerContext
|
||||||
|
import io.netty.channel.ChannelInboundHandlerAdapter
|
||||||
|
import io.netty.handler.codec.http.HttpContent
|
||||||
|
import io.netty.handler.codec.http.HttpRequest
|
||||||
|
import net.woggioni.rbcs.api.exception.ContentTooLargeException
|
||||||
|
|
||||||
|
|
||||||
|
class MaxRequestSizeHandler(private val maxRequestSize : Int) : ChannelInboundHandlerAdapter() {
|
||||||
|
companion object {
|
||||||
|
val NAME = MaxRequestSizeHandler::class.java.name
|
||||||
|
}
|
||||||
|
|
||||||
|
private var cumulativeSize = 0
|
||||||
|
|
||||||
|
override fun channelRead(ctx: ChannelHandlerContext, msg: Any) {
|
||||||
|
when(msg) {
|
||||||
|
is HttpRequest -> {
|
||||||
|
cumulativeSize = 0
|
||||||
|
ctx.fireChannelRead(msg)
|
||||||
|
}
|
||||||
|
is HttpContent -> {
|
||||||
|
val exceeded = cumulativeSize > maxRequestSize
|
||||||
|
if(!exceeded) {
|
||||||
|
cumulativeSize += msg.content().readableBytes()
|
||||||
|
}
|
||||||
|
if(cumulativeSize > maxRequestSize) {
|
||||||
|
msg.release()
|
||||||
|
if(!exceeded) {
|
||||||
|
ctx.fireExceptionCaught(ContentTooLargeException("Request body is too large", null))
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
ctx.fireChannelRead(msg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else -> ctx.fireChannelRead(msg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@@ -1,95 +1,197 @@
|
|||||||
package net.woggioni.rbcs.server.handler
|
package net.woggioni.rbcs.server.handler
|
||||||
|
|
||||||
import io.netty.buffer.Unpooled
|
import io.netty.channel.ChannelDuplexHandler
|
||||||
import io.netty.channel.ChannelFutureListener
|
|
||||||
import io.netty.channel.ChannelHandler
|
import io.netty.channel.ChannelHandler
|
||||||
import io.netty.channel.ChannelHandlerContext
|
import io.netty.channel.ChannelHandlerContext
|
||||||
import io.netty.channel.DefaultFileRegion
|
import io.netty.channel.ChannelPromise
|
||||||
import io.netty.channel.SimpleChannelInboundHandler
|
|
||||||
import io.netty.handler.codec.http.DefaultFullHttpResponse
|
import io.netty.handler.codec.http.DefaultFullHttpResponse
|
||||||
|
import io.netty.handler.codec.http.DefaultHttpContent
|
||||||
import io.netty.handler.codec.http.DefaultHttpResponse
|
import io.netty.handler.codec.http.DefaultHttpResponse
|
||||||
import io.netty.handler.codec.http.FullHttpRequest
|
import io.netty.handler.codec.http.DefaultLastHttpContent
|
||||||
|
import io.netty.handler.codec.http.HttpContent
|
||||||
import io.netty.handler.codec.http.HttpHeaderNames
|
import io.netty.handler.codec.http.HttpHeaderNames
|
||||||
import io.netty.handler.codec.http.HttpHeaderValues
|
import io.netty.handler.codec.http.HttpHeaderValues
|
||||||
|
import io.netty.handler.codec.http.HttpHeaders
|
||||||
import io.netty.handler.codec.http.HttpMethod
|
import io.netty.handler.codec.http.HttpMethod
|
||||||
|
import io.netty.handler.codec.http.HttpRequest
|
||||||
import io.netty.handler.codec.http.HttpResponseStatus
|
import io.netty.handler.codec.http.HttpResponseStatus
|
||||||
import io.netty.handler.codec.http.HttpUtil
|
import io.netty.handler.codec.http.HttpUtil
|
||||||
|
import io.netty.handler.codec.http.HttpVersion
|
||||||
import io.netty.handler.codec.http.LastHttpContent
|
import io.netty.handler.codec.http.LastHttpContent
|
||||||
import io.netty.handler.stream.ChunkedNioStream
|
import net.woggioni.rbcs.api.CacheValueMetadata
|
||||||
import net.woggioni.rbcs.api.Cache
|
import net.woggioni.rbcs.api.message.CacheMessage
|
||||||
import net.woggioni.rbcs.common.contextLogger
|
import net.woggioni.rbcs.api.message.CacheMessage.CacheContent
|
||||||
import net.woggioni.rbcs.server.debug
|
import net.woggioni.rbcs.api.message.CacheMessage.CacheGetRequest
|
||||||
import net.woggioni.rbcs.server.warn
|
import net.woggioni.rbcs.api.message.CacheMessage.CachePutRequest
|
||||||
import java.nio.channels.FileChannel
|
import net.woggioni.rbcs.api.message.CacheMessage.CachePutResponse
|
||||||
|
import net.woggioni.rbcs.api.message.CacheMessage.CacheValueFoundResponse
|
||||||
|
import net.woggioni.rbcs.api.message.CacheMessage.CacheValueNotFoundResponse
|
||||||
|
import net.woggioni.rbcs.api.message.CacheMessage.LastCacheContent
|
||||||
|
import net.woggioni.rbcs.common.createLogger
|
||||||
|
import net.woggioni.rbcs.common.debug
|
||||||
|
import net.woggioni.rbcs.common.warn
|
||||||
|
import net.woggioni.rbcs.server.exception.ExceptionHandler
|
||||||
import java.nio.file.Path
|
import java.nio.file.Path
|
||||||
|
|
||||||
@ChannelHandler.Sharable
|
class ServerHandler(private val serverPrefix: Path, private val cacheHandlerSupplier : () -> ChannelHandler) :
|
||||||
class ServerHandler(private val cache: Cache, private val serverPrefix: Path) :
|
ChannelDuplexHandler() {
|
||||||
SimpleChannelInboundHandler<FullHttpRequest>() {
|
|
||||||
|
|
||||||
private val log = contextLogger()
|
companion object {
|
||||||
|
private val log = createLogger<ServerHandler>()
|
||||||
|
val NAME = ServerHandler::class.java.name
|
||||||
|
}
|
||||||
|
|
||||||
override fun channelRead0(ctx: ChannelHandlerContext, msg: FullHttpRequest) {
|
private var httpVersion = HttpVersion.HTTP_1_1
|
||||||
val keepAlive: Boolean = HttpUtil.isKeepAlive(msg)
|
private var keepAlive = true
|
||||||
val method = msg.method()
|
private var pipelinedRequests = 0
|
||||||
if (method === HttpMethod.GET) {
|
|
||||||
val path = Path.of(msg.uri())
|
private fun newRequest() {
|
||||||
val prefix = path.parent
|
pipelinedRequests += 1
|
||||||
val key = path.fileName?.toString() ?: let {
|
}
|
||||||
val response = DefaultFullHttpResponse(msg.protocolVersion(), HttpResponseStatus.NOT_FOUND)
|
|
||||||
response.headers()[HttpHeaderNames.CONTENT_LENGTH] = 0
|
private fun requestCompleted(ctx : ChannelHandlerContext) {
|
||||||
ctx.writeAndFlush(response)
|
pipelinedRequests -= 1
|
||||||
return
|
if(pipelinedRequests == 0) ctx.read()
|
||||||
}
|
}
|
||||||
if (serverPrefix == prefix) {
|
|
||||||
cache.get(key).thenApply { channel ->
|
private fun resetRequestMetadata() {
|
||||||
if(channel != null) {
|
httpVersion = HttpVersion.HTTP_1_1
|
||||||
log.debug(ctx) {
|
keepAlive = true
|
||||||
"Cache hit for key '$key'"
|
}
|
||||||
}
|
|
||||||
val response = DefaultHttpResponse(msg.protocolVersion(), HttpResponseStatus.OK)
|
private fun setRequestMetadata(req: HttpRequest) {
|
||||||
response.headers()[HttpHeaderNames.CONTENT_TYPE] = HttpHeaderValues.APPLICATION_OCTET_STREAM
|
httpVersion = req.protocolVersion()
|
||||||
if (!keepAlive) {
|
keepAlive = HttpUtil.isKeepAlive(req)
|
||||||
response.headers().set(HttpHeaderNames.CONNECTION, HttpHeaderValues.CLOSE)
|
}
|
||||||
response.headers().set(HttpHeaderNames.TRANSFER_ENCODING, HttpHeaderValues.IDENTITY)
|
|
||||||
} else {
|
private fun setKeepAliveHeader(headers: HttpHeaders) {
|
||||||
response.headers().set(HttpHeaderNames.CONNECTION, HttpHeaderValues.KEEP_ALIVE)
|
if (!keepAlive) {
|
||||||
response.headers().set(HttpHeaderNames.TRANSFER_ENCODING, HttpHeaderValues.CHUNKED)
|
headers.set(HttpHeaderNames.CONNECTION, HttpHeaderValues.CLOSE)
|
||||||
}
|
} else {
|
||||||
ctx.write(response)
|
headers.set(HttpHeaderNames.CONNECTION, HttpHeaderValues.KEEP_ALIVE)
|
||||||
when (channel) {
|
}
|
||||||
is FileChannel -> {
|
}
|
||||||
val content = DefaultFileRegion(channel, 0, channel.size())
|
|
||||||
if (keepAlive) {
|
private var cacheRequestInProgress : Boolean = false
|
||||||
ctx.write(content)
|
|
||||||
ctx.writeAndFlush(LastHttpContent.EMPTY_LAST_CONTENT.retainedDuplicate())
|
override fun handlerAdded(ctx: ChannelHandlerContext) {
|
||||||
} else {
|
ctx.read()
|
||||||
ctx.writeAndFlush(content)
|
}
|
||||||
.addListener(ChannelFutureListener.CLOSE)
|
|
||||||
}
|
override fun channelRead(ctx: ChannelHandlerContext, msg: Any) {
|
||||||
}
|
when (msg) {
|
||||||
else -> {
|
is HttpRequest -> handleRequest(ctx, msg)
|
||||||
val content = ChunkedNioStream(channel)
|
is HttpContent -> {
|
||||||
if (keepAlive) {
|
if(cacheRequestInProgress) {
|
||||||
ctx.write(content).addListener {
|
if(msg is LastHttpContent) {
|
||||||
content.close()
|
super.channelRead(ctx, LastCacheContent(msg.content().retain()))
|
||||||
}
|
cacheRequestInProgress = false
|
||||||
ctx.writeAndFlush(LastHttpContent.EMPTY_LAST_CONTENT.retainedDuplicate())
|
|
||||||
} else {
|
|
||||||
ctx.writeAndFlush(content)
|
|
||||||
.addListener(ChannelFutureListener.CLOSE)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
log.debug(ctx) {
|
super.channelRead(ctx, CacheContent(msg.content().retain()))
|
||||||
"Cache miss for key '$key'"
|
}
|
||||||
|
msg.release()
|
||||||
|
} else {
|
||||||
|
super.channelRead(ctx, msg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else -> super.channelRead(ctx, msg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun channelReadComplete(ctx: ChannelHandlerContext) {
|
||||||
|
super.channelReadComplete(ctx)
|
||||||
|
if(cacheRequestInProgress) {
|
||||||
|
ctx.read()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun write(ctx: ChannelHandlerContext, msg: Any, promise: ChannelPromise?) {
|
||||||
|
if (msg is CacheMessage) {
|
||||||
|
try {
|
||||||
|
when (msg) {
|
||||||
|
is CachePutResponse -> {
|
||||||
|
val response = DefaultFullHttpResponse(httpVersion, HttpResponseStatus.CREATED)
|
||||||
|
val keyBytes = msg.key.toByteArray(Charsets.UTF_8)
|
||||||
|
response.headers().apply {
|
||||||
|
set(HttpHeaderNames.CONTENT_TYPE, HttpHeaderValues.TEXT_PLAIN)
|
||||||
|
set(HttpHeaderNames.TRANSFER_ENCODING, HttpHeaderValues.CHUNKED)
|
||||||
}
|
}
|
||||||
val response = DefaultFullHttpResponse(msg.protocolVersion(), HttpResponseStatus.NOT_FOUND)
|
setKeepAliveHeader(response.headers())
|
||||||
|
ctx.write(response)
|
||||||
|
val buf = ctx.alloc().buffer(keyBytes.size).apply {
|
||||||
|
writeBytes(keyBytes)
|
||||||
|
}
|
||||||
|
ctx.writeAndFlush(DefaultLastHttpContent(buf)).also {
|
||||||
|
requestCompleted(ctx)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
is CacheValueNotFoundResponse -> {
|
||||||
|
val response = DefaultFullHttpResponse(httpVersion, HttpResponseStatus.NOT_FOUND)
|
||||||
response.headers()[HttpHeaderNames.CONTENT_LENGTH] = 0
|
response.headers()[HttpHeaderNames.CONTENT_LENGTH] = 0
|
||||||
|
setKeepAliveHeader(response.headers())
|
||||||
|
ctx.writeAndFlush(response).also {
|
||||||
|
requestCompleted(ctx)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
is CacheValueFoundResponse -> {
|
||||||
|
val response = DefaultHttpResponse(httpVersion, HttpResponseStatus.OK)
|
||||||
|
response.headers().apply {
|
||||||
|
set(HttpHeaderNames.CONTENT_TYPE, msg.metadata.mimeType ?: HttpHeaderValues.APPLICATION_OCTET_STREAM)
|
||||||
|
msg.metadata.contentDisposition?.let { contentDisposition ->
|
||||||
|
set(HttpHeaderNames.CONTENT_DISPOSITION, contentDisposition)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
setKeepAliveHeader(response.headers())
|
||||||
|
response.headers().set(HttpHeaderNames.TRANSFER_ENCODING, HttpHeaderValues.CHUNKED)
|
||||||
ctx.writeAndFlush(response)
|
ctx.writeAndFlush(response)
|
||||||
}
|
}
|
||||||
}.whenComplete { _, ex -> ex?.let(ctx::fireExceptionCaught) }
|
|
||||||
|
is LastCacheContent -> {
|
||||||
|
ctx.writeAndFlush(DefaultLastHttpContent(msg.content())).also {
|
||||||
|
requestCompleted(ctx)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
is CacheContent -> {
|
||||||
|
ctx.writeAndFlush(DefaultHttpContent(msg.content()))
|
||||||
|
}
|
||||||
|
|
||||||
|
else -> throw UnsupportedOperationException("This should never happen")
|
||||||
|
}.let { channelFuture ->
|
||||||
|
if (promise != null) {
|
||||||
|
channelFuture.addListener {
|
||||||
|
if (it.isSuccess) promise.setSuccess()
|
||||||
|
else promise.setFailure(it.cause())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} finally {
|
||||||
|
resetRequestMetadata()
|
||||||
|
}
|
||||||
|
} else if(msg is LastHttpContent) {
|
||||||
|
ctx.write(msg, promise)
|
||||||
|
requestCompleted(ctx)
|
||||||
|
} else super.write(ctx, msg, promise)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private fun handleRequest(ctx: ChannelHandlerContext, msg: HttpRequest) {
|
||||||
|
setRequestMetadata(msg)
|
||||||
|
val method = msg.method()
|
||||||
|
if (method === HttpMethod.GET) {
|
||||||
|
val path = Path.of(msg.uri()).normalize()
|
||||||
|
if (path.startsWith(serverPrefix)) {
|
||||||
|
cacheRequestInProgress = true
|
||||||
|
val relativePath = serverPrefix.relativize(path)
|
||||||
|
val key : String = relativePath.toString()
|
||||||
|
newRequest()
|
||||||
|
val cacheHandler = cacheHandlerSupplier()
|
||||||
|
ctx.pipeline().addBefore(ExceptionHandler.NAME, null, cacheHandler)
|
||||||
|
key.let(::CacheGetRequest)
|
||||||
|
.let(ctx::fireChannelRead)
|
||||||
|
?: ctx.channel().write(CacheValueNotFoundResponse())
|
||||||
} else {
|
} else {
|
||||||
log.warn(ctx) {
|
log.warn(ctx) {
|
||||||
"Got request for unhandled path '${msg.uri()}'"
|
"Got request for unhandled path '${msg.uri()}'"
|
||||||
@@ -99,24 +201,25 @@ class ServerHandler(private val cache: Cache, private val serverPrefix: Path) :
|
|||||||
ctx.writeAndFlush(response)
|
ctx.writeAndFlush(response)
|
||||||
}
|
}
|
||||||
} else if (method === HttpMethod.PUT) {
|
} else if (method === HttpMethod.PUT) {
|
||||||
val path = Path.of(msg.uri())
|
val path = Path.of(msg.uri()).normalize()
|
||||||
val prefix = path.parent
|
if (path.startsWith(serverPrefix)) {
|
||||||
val key = path.fileName.toString()
|
cacheRequestInProgress = true
|
||||||
|
val relativePath = serverPrefix.relativize(path)
|
||||||
if (serverPrefix == prefix) {
|
val key = relativePath.toString()
|
||||||
log.debug(ctx) {
|
log.debug(ctx) {
|
||||||
"Added value for key '$key' to build cache"
|
"Added value for key '$key' to build cache"
|
||||||
}
|
}
|
||||||
cache.put(key, msg.content()).thenRun {
|
newRequest()
|
||||||
val response = DefaultFullHttpResponse(
|
val cacheHandler = cacheHandlerSupplier()
|
||||||
msg.protocolVersion(), HttpResponseStatus.CREATED,
|
ctx.pipeline().addBefore(ExceptionHandler.NAME, null, cacheHandler)
|
||||||
Unpooled.copiedBuffer(key.toByteArray())
|
|
||||||
)
|
path.fileName?.toString()
|
||||||
response.headers()[HttpHeaderNames.CONTENT_LENGTH] = response.content().readableBytes()
|
?.let {
|
||||||
ctx.writeAndFlush(response)
|
val mimeType = HttpUtil.getMimeType(msg)?.toString()
|
||||||
}.whenComplete { _, ex ->
|
CachePutRequest(key, CacheValueMetadata(msg.headers().get(HttpHeaderNames.CONTENT_DISPOSITION), mimeType))
|
||||||
ctx.fireExceptionCaught(ex)
|
}
|
||||||
}
|
?.let(ctx::fireChannelRead)
|
||||||
|
?: ctx.channel().write(CacheValueNotFoundResponse())
|
||||||
} else {
|
} else {
|
||||||
log.warn(ctx) {
|
log.warn(ctx) {
|
||||||
"Got request for unhandled path '${msg.uri()}'"
|
"Got request for unhandled path '${msg.uri()}'"
|
||||||
@@ -125,30 +228,10 @@ class ServerHandler(private val cache: Cache, private val serverPrefix: Path) :
|
|||||||
response.headers()[HttpHeaderNames.CONTENT_LENGTH] = "0"
|
response.headers()[HttpHeaderNames.CONTENT_LENGTH] = "0"
|
||||||
ctx.writeAndFlush(response)
|
ctx.writeAndFlush(response)
|
||||||
}
|
}
|
||||||
} else if(method == HttpMethod.TRACE) {
|
} else if (method == HttpMethod.TRACE) {
|
||||||
val replayedRequestHead = ctx.alloc().buffer()
|
newRequest()
|
||||||
replayedRequestHead.writeCharSequence("TRACE ${Path.of(msg.uri())} ${msg.protocolVersion().text()}\r\n", Charsets.US_ASCII)
|
ctx.pipeline().addBefore(ExceptionHandler.NAME, null, TraceHandler)
|
||||||
msg.headers().forEach { (key, value) ->
|
super.channelRead(ctx, msg)
|
||||||
replayedRequestHead.apply {
|
|
||||||
writeCharSequence(key, Charsets.US_ASCII)
|
|
||||||
writeCharSequence(": ", Charsets.US_ASCII)
|
|
||||||
writeCharSequence(value, Charsets.UTF_8)
|
|
||||||
writeCharSequence("\r\n", Charsets.US_ASCII)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
replayedRequestHead.writeCharSequence("\r\n", Charsets.US_ASCII)
|
|
||||||
val requestBody = msg.content()
|
|
||||||
requestBody.retain()
|
|
||||||
val responseBody = ctx.alloc().compositeBuffer(2).apply {
|
|
||||||
addComponents(true, replayedRequestHead)
|
|
||||||
addComponents(true, requestBody)
|
|
||||||
}
|
|
||||||
val response = DefaultFullHttpResponse(msg.protocolVersion(), HttpResponseStatus.OK, responseBody)
|
|
||||||
response.headers().apply {
|
|
||||||
set(HttpHeaderNames.CONTENT_TYPE, "message/http")
|
|
||||||
set(HttpHeaderNames.CONTENT_LENGTH, responseBody.readableBytes())
|
|
||||||
}
|
|
||||||
ctx.writeAndFlush(response)
|
|
||||||
} else {
|
} else {
|
||||||
log.warn(ctx) {
|
log.warn(ctx) {
|
||||||
"Got request with unhandled method '${msg.method().name()}'"
|
"Got request with unhandled method '${msg.method().name()}'"
|
||||||
@@ -158,4 +241,8 @@ class ServerHandler(private val cache: Cache, private val serverPrefix: Path) :
|
|||||||
ctx.writeAndFlush(response)
|
ctx.writeAndFlush(response)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
|
||||||
|
super.exceptionCaught(ctx, cause)
|
||||||
|
}
|
||||||
}
|
}
|
@@ -0,0 +1,55 @@
|
|||||||
|
package net.woggioni.rbcs.server.handler
|
||||||
|
|
||||||
|
import io.netty.channel.ChannelHandler.Sharable
|
||||||
|
import io.netty.channel.ChannelHandlerContext
|
||||||
|
import io.netty.channel.ChannelInboundHandlerAdapter
|
||||||
|
import io.netty.handler.codec.http.DefaultHttpResponse
|
||||||
|
import io.netty.handler.codec.http.HttpContent
|
||||||
|
import io.netty.handler.codec.http.HttpHeaderNames
|
||||||
|
import io.netty.handler.codec.http.HttpHeaderValues
|
||||||
|
import io.netty.handler.codec.http.HttpRequest
|
||||||
|
import io.netty.handler.codec.http.HttpResponseStatus
|
||||||
|
import io.netty.handler.codec.http.LastHttpContent
|
||||||
|
import java.nio.file.Path
|
||||||
|
|
||||||
|
@Sharable
|
||||||
|
object TraceHandler : ChannelInboundHandlerAdapter() {
|
||||||
|
val NAME = this::class.java.name
|
||||||
|
override fun channelRead(ctx: ChannelHandlerContext, msg: Any) {
|
||||||
|
when(msg) {
|
||||||
|
is HttpRequest -> {
|
||||||
|
val response = DefaultHttpResponse(msg.protocolVersion(), HttpResponseStatus.OK)
|
||||||
|
response.headers().apply {
|
||||||
|
set(HttpHeaderNames.CONTENT_TYPE, "message/http")
|
||||||
|
set(HttpHeaderNames.TRANSFER_ENCODING, HttpHeaderValues.CHUNKED)
|
||||||
|
}
|
||||||
|
ctx.write(response)
|
||||||
|
val replayedRequestHead = ctx.alloc().buffer()
|
||||||
|
replayedRequestHead.writeCharSequence(
|
||||||
|
"TRACE ${Path.of(msg.uri())} ${msg.protocolVersion().text()}\r\n",
|
||||||
|
Charsets.US_ASCII
|
||||||
|
)
|
||||||
|
msg.headers().forEach { (key, value) ->
|
||||||
|
replayedRequestHead.apply {
|
||||||
|
writeCharSequence(key, Charsets.US_ASCII)
|
||||||
|
writeCharSequence(": ", Charsets.US_ASCII)
|
||||||
|
writeCharSequence(value, Charsets.UTF_8)
|
||||||
|
writeCharSequence("\r\n", Charsets.US_ASCII)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
replayedRequestHead.writeCharSequence("\r\n", Charsets.US_ASCII)
|
||||||
|
ctx.writeAndFlush(replayedRequestHead)
|
||||||
|
}
|
||||||
|
is LastHttpContent -> {
|
||||||
|
ctx.writeAndFlush(msg)
|
||||||
|
ctx.pipeline().remove(this)
|
||||||
|
}
|
||||||
|
is HttpContent -> ctx.writeAndFlush(msg)
|
||||||
|
else -> super.channelRead(ctx, msg)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun exceptionCaught(ctx: ChannelHandlerContext?, cause: Throwable?) {
|
||||||
|
super.exceptionCaught(ctx, cause)
|
||||||
|
}
|
||||||
|
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user