Compare commits
1 Commits
0.2.0-beta
...
client-eve
Author | SHA1 | Date | |
---|---|---|---|
ba961bd30d
|
@@ -31,7 +31,7 @@ jobs:
|
|||||||
username: woggioni
|
username: woggioni
|
||||||
password: ${{ secrets.PUBLISHER_TOKEN }}
|
password: ${{ secrets.PUBLISHER_TOKEN }}
|
||||||
-
|
-
|
||||||
name: Build rbcs Docker image
|
name: Build gbcs Docker image
|
||||||
uses: docker/build-push-action@v5.3.0
|
uses: docker/build-push-action@v5.3.0
|
||||||
with:
|
with:
|
||||||
context: "docker/build/docker"
|
context: "docker/build/docker"
|
||||||
@@ -39,12 +39,12 @@ jobs:
|
|||||||
push: true
|
push: true
|
||||||
pull: true
|
pull: true
|
||||||
tags: |
|
tags: |
|
||||||
gitea.woggioni.net/woggioni/rbcs:vanilla
|
gitea.woggioni.net/woggioni/gbcs:latest
|
||||||
gitea.woggioni.net/woggioni/rbcs:vanilla-${{ steps.retrieve-version.outputs.VERSION }}
|
gitea.woggioni.net/woggioni/gbcs:${{ steps.retrieve-version.outputs.VERSION }}
|
||||||
target: release-vanilla
|
target: release
|
||||||
cache-from: type=registry,ref=gitea.woggioni.net/woggioni/rbcs:buildx
|
cache-from: type=registry,ref=gitea.woggioni.net/woggioni/gbcs:buildx
|
||||||
-
|
-
|
||||||
name: Build rbcs memcache Docker image
|
name: Build gbcs memcached Docker image
|
||||||
uses: docker/build-push-action@v5.3.0
|
uses: docker/build-push-action@v5.3.0
|
||||||
with:
|
with:
|
||||||
context: "docker/build/docker"
|
context: "docker/build/docker"
|
||||||
@@ -52,25 +52,11 @@ jobs:
|
|||||||
push: true
|
push: true
|
||||||
pull: true
|
pull: true
|
||||||
tags: |
|
tags: |
|
||||||
gitea.woggioni.net/woggioni/rbcs:memcache
|
gitea.woggioni.net/woggioni/gbcs:memcached
|
||||||
gitea.woggioni.net/woggioni/rbcs:memcache-${{ steps.retrieve-version.outputs.VERSION }}
|
gitea.woggioni.net/woggioni/gbcs:memcached-${{ steps.retrieve-version.outputs.VERSION }}
|
||||||
target: release-memcache
|
target: release-memcached
|
||||||
cache-from: type=registry,ref=gitea.woggioni.net/woggioni/rbcs:buildx
|
cache-from: type=registry,ref=gitea.woggioni.net/woggioni/gbcs:buildx
|
||||||
cache-to: type=registry,mode=max,compression=zstd,image-manifest=true,oci-mediatypes=true,ref=gitea.woggioni.net/woggioni/rbcs:buildx
|
cache-to: type=registry,mode=max,compression=zstd,image-manifest=true,oci-mediatypes=true,ref=gitea.woggioni.net/woggioni/gbcs:buildx
|
||||||
-
|
|
||||||
name: Build rbcs memcache Docker image
|
|
||||||
uses: docker/build-push-action@v5.3.0
|
|
||||||
with:
|
|
||||||
context: "docker/build/docker"
|
|
||||||
platforms: linux/amd64
|
|
||||||
push: true
|
|
||||||
pull: true
|
|
||||||
tags: |
|
|
||||||
gitea.woggioni.net/woggioni/rbcs:latest
|
|
||||||
gitea.woggioni.net/woggioni/rbcs:${{ steps.retrieve-version.outputs.VERSION }}
|
|
||||||
gitea.woggioni.net/woggioni/rbcs:native
|
|
||||||
gitea.woggioni.net/woggioni/rbcs:native-${{ steps.retrieve-version.outputs.VERSION }}
|
|
||||||
target: release-native
|
|
||||||
- name: Publish artifacts
|
- name: Publish artifacts
|
||||||
env:
|
env:
|
||||||
PUBLISHER_TOKEN: ${{ secrets.PUBLISHER_TOKEN }}
|
PUBLISHER_TOKEN: ${{ secrets.PUBLISHER_TOKEN }}
|
||||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@@ -4,4 +4,4 @@
|
|||||||
# Ignore Gradle build output directory
|
# Ignore Gradle build output directory
|
||||||
build
|
build
|
||||||
|
|
||||||
rbcs-cli/native-image/*.json
|
gbcs-cli/native-image/*.json
|
||||||
|
20
LICENSE
20
LICENSE
@@ -1,20 +0,0 @@
|
|||||||
The MIT License (MIT)
|
|
||||||
|
|
||||||
Copyright (c) 2017 Y. T. CHUNG <zonyitoo@gmail.com>
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
|
||||||
this software and associated documentation files (the "Software"), to deal in
|
|
||||||
the Software without restriction, including without limitation the rights to
|
|
||||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
|
||||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
|
||||||
subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
|
||||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
|
||||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
|
||||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
|
||||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
209
README.md
209
README.md
@@ -1,209 +0,0 @@
|
|||||||
# Remote Build Cache Server
|
|
||||||
Remote Build Cache Server (shortened to RBCS) allows you to share and reuse unchanged build
|
|
||||||
and test outputs across the team. This speeds up local and CI builds since cycles are not wasted
|
|
||||||
re-building components that are unaffected by new code changes. RBCS supports both Gradle and
|
|
||||||
Maven build tool environments.
|
|
||||||
|
|
||||||
It comes with pluggable storage backends, the core application offers in-memory storage or disk-backed storage,
|
|
||||||
in addition to this there is an official plugin to use memcached as the storage backend.
|
|
||||||
|
|
||||||
It supports HTTP basic authentication or, alternatively, TLS certificate authentication, role-based access control (RBAC),
|
|
||||||
and throttling.
|
|
||||||
|
|
||||||
## Quickstart
|
|
||||||
|
|
||||||
### Downloading the jar file
|
|
||||||
You can download the latest version from [this link](https://gitea.woggioni.net/woggioni/-/packages/maven/net.woggioni:rbcs-cli/)
|
|
||||||
|
|
||||||
Assuming you have Java 21 or later installed, you can launch the server directly with
|
|
||||||
|
|
||||||
```bash
|
|
||||||
java -jar rbcs-cli.jar server
|
|
||||||
```
|
|
||||||
|
|
||||||
By default it will start an HTTP server bound to localhost and listening on port 8080 with no authentication,
|
|
||||||
writing data to the disk, that you can use for testing
|
|
||||||
|
|
||||||
### Using the Docker image
|
|
||||||
You can pull the latest Docker image with
|
|
||||||
```bash
|
|
||||||
docker pull gitea.woggioni.net/woggioni/rbcs:latest
|
|
||||||
```
|
|
||||||
|
|
||||||
By default it will start an HTTP server bound to localhost and listening on port 8080 with no authentication,
|
|
||||||
writing data to the disk, that you can use for testing
|
|
||||||
|
|
||||||
## Usage
|
|
||||||
|
|
||||||
### Configuration
|
|
||||||
The location of the `rbcs.xml` configuration file depends on the operating system,
|
|
||||||
Alternatively it can be changed setting the `RBCS_CONFIGURATION_DIR` environmental variable or `net.woggioni.rbcs.conf.dir` Java system property
|
|
||||||
to the directory that contain the `rbcs.xml` file.
|
|
||||||
|
|
||||||
The server configuration file follows the XML format and uses XML schema for validation
|
|
||||||
(you can find the schema for the main configuration file [here](https://gitea.woggioni.net/woggioni/rbcs/src/branch/master/rbcs-server/src/main/resources/net/woggioni/rbcs/server/schema/rbcs.xsd)).
|
|
||||||
|
|
||||||
The configuration values are enclosed inside XML attribute and support system property / environmental variable interpolation.
|
|
||||||
As an example, you can configure RBCS to read the server port number from the `RBCS_SERVER_PORT` environmental variable
|
|
||||||
and the bind address from the `rbc.bind.address` JVM system property with.
|
|
||||||
|
|
||||||
Full documentation for all tags and attributes is available [here](doc/server_configuration.md).
|
|
||||||
|
|
||||||
### Plugins
|
|
||||||
If you want to use memcache as a storage backend you'll also need to download [the memcache plugin](https://gitea.woggioni.net/woggioni/-/packages/maven/net.woggioni:rbcs-server-memcache/)
|
|
||||||
|
|
||||||
Plugins need to be stored in a folder named `plugins` in the located server's working directory
|
|
||||||
(the directory where the server process is started). They are shipped as TAR archives, so you need to extract
|
|
||||||
the content of the archive into the `plugins` directory for the server to pick them up.
|
|
||||||
|
|
||||||
### Using RBCS with Gradle
|
|
||||||
|
|
||||||
Add this to the `settings.gradle` file of your project
|
|
||||||
|
|
||||||
```groovy
|
|
||||||
buildCache {
|
|
||||||
remote(HttpBuildCache) {
|
|
||||||
url = 'https://rbcs.example.com/'
|
|
||||||
push = true
|
|
||||||
allowInsecureProtocol = false
|
|
||||||
// The credentials block is only required if you enable
|
|
||||||
// HTTP basic authentication on RBCS
|
|
||||||
credentials {
|
|
||||||
username = 'build-cache-user'
|
|
||||||
password = 'some-complicated-password'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
alternatively you can add this to `${GRADLE_HOME}/init.gradle` to configure the remote cache
|
|
||||||
at the system level
|
|
||||||
|
|
||||||
```groovy
|
|
||||||
gradle.settingsEvaluated { settings ->
|
|
||||||
settings.buildCache {
|
|
||||||
remote(HttpBuildCache) {
|
|
||||||
url = 'https://rbcs.example.com/'
|
|
||||||
push = true
|
|
||||||
allowInsecureProtocol = false
|
|
||||||
// The credentials block is only required if you enable
|
|
||||||
// HTTP basic authentication on RBCS
|
|
||||||
credentials {
|
|
||||||
username = 'build-cache-user'
|
|
||||||
password = 'some-complicated-password'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
add `org.gradle.caching=true` to your `<project>/gradle.properties` or run gradle with `--build-cache`.
|
|
||||||
|
|
||||||
Read [Gradle documentation](https://docs.gradle.org/current/userguide/build_cache.html) for more detailed information.
|
|
||||||
|
|
||||||
### Using RBCS with Maven
|
|
||||||
|
|
||||||
1. Create an `extensions.xml` in `<project>/.mvn/extensions.xml` with the following content
|
|
||||||
```xml
|
|
||||||
<extensions xmlns="http://maven.apache.org/EXTENSIONS/1.1.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
|
||||||
xsi:schemaLocation="http://maven.apache.org/EXTENSIONS/1.1.0 https://maven.apache.org/xsd/core-extensions-1.0.0.xsd">
|
|
||||||
<extension>
|
|
||||||
<groupId>org.apache.maven.extensions</groupId>
|
|
||||||
<artifactId>maven-build-cache-extension</artifactId>
|
|
||||||
<version>1.2.0</version>
|
|
||||||
</extension>
|
|
||||||
</extensions>
|
|
||||||
```
|
|
||||||
2. Copy [maven-build-cache-config.xml](https://maven.apache.org/extensions/maven-build-cache-extension/maven-build-cache-config.xml) into `<project>/.mvn/` folder
|
|
||||||
3. Edit the `cache/configuration/remote` element
|
|
||||||
```xml
|
|
||||||
<remote enabled="true" id="rbcs">
|
|
||||||
<url>https://rbcs.example.com/</url>
|
|
||||||
</remote>
|
|
||||||
```
|
|
||||||
4. Run maven with
|
|
||||||
```bash
|
|
||||||
mvn -Dmaven.build.cache.enabled=true -Dmaven.build.cache.debugOutput=true -Dmaven.build.cache.remote.save.enabled=true package
|
|
||||||
```
|
|
||||||
|
|
||||||
Alternatively you can set those properties in your `<project>/pom.xml`
|
|
||||||
|
|
||||||
|
|
||||||
Read [here](https://maven.apache.org/extensions/maven-build-cache-extension/remote-cache.html)
|
|
||||||
for more informations
|
|
||||||
|
|
||||||
## FAQ
|
|
||||||
### Why should I use a build cache?
|
|
||||||
|
|
||||||
#### Build Caches Improve Build & Test Performance
|
|
||||||
|
|
||||||
Building software consists of a number of steps, like compiling sources, executing tests, and linking binaries. We’ve seen that a binary artifact repository helps when such a step requires an external component by downloading the artifact from the repository rather than building it locally.
|
|
||||||
However, there are many additional steps in this build process which can be optimized to reduce the build time. An obvious strategy is to avoid executing build steps which dominate the total build time when these build steps are not needed.
|
|
||||||
Most build times are dominated by the testing step.
|
|
||||||
|
|
||||||
While binary repositories cannot capture the outcome of a test build step (only the test reports
|
|
||||||
when included in binary artifacts), build caches are designed to eliminate redundant executions
|
|
||||||
for every build step. Moreover, it generalizes the concept of avoiding work associated with any
|
|
||||||
incremental step of the build, including test execution, compilation and resource processing.
|
|
||||||
The mechanism itself is comparable to a pure function. That is, given some inputs such as source
|
|
||||||
files and environment parameters we know that the output is always going to be the same.
|
|
||||||
As a result, we can cache it and retrieve it based on a simple cryptographic hash of the inputs.
|
|
||||||
Build caching is supported natively by some build tools.
|
|
||||||
|
|
||||||
#### Improve CI builds with a remote build cache
|
|
||||||
|
|
||||||
When analyzing the role of a build cache it is important to take into account the granularity
|
|
||||||
of the changes that it caches. Imagine a full build for a project with 40 to 50 modules
|
|
||||||
which fails at the last step (deployment) because the staging environment is temporarily unavailable.
|
|
||||||
Although the vast majority of the build steps (potentially thousands) succeed,
|
|
||||||
the change can not be deployed to the staging environment.
|
|
||||||
Without a build cache one typically relies on a very complex CI configuration to reuse build step outputs
|
|
||||||
or would have to repeat the full build once the environment is available.
|
|
||||||
|
|
||||||
Some build tools don’t support incremental builds properly. For example, outputs of a build started
|
|
||||||
from scratch may vary when compared to subsequent builds that rely on the initial build’s output.
|
|
||||||
As a result, to preserve build integrity, it’s crucial to rebuild from scratch, or ‘cleanly,’ in this
|
|
||||||
scenario.
|
|
||||||
|
|
||||||
With a build cache, only the last step needs to be executed and the build can be re-triggered
|
|
||||||
when the environment is back online. This automatically saves all of the time and
|
|
||||||
resources required across the different build steps which were successfully executed.
|
|
||||||
Instead of executing the intermediate steps, the build tool pulls the outputs from the build cache,
|
|
||||||
avoiding a lot of redundant work
|
|
||||||
|
|
||||||
#### Share outputs with a remote build cache
|
|
||||||
|
|
||||||
One of the most important advantages of a remote build cache is the ability to share build outputs.
|
|
||||||
In most CI configurations, for example, a number of pipelines are created.
|
|
||||||
These may include one for building the sources, one for testing, one for publishing the outcomes
|
|
||||||
to a remote repository, and other pipelines to test on different platforms.
|
|
||||||
There are even situations where CI builds partially build a project (i.e. some modules and not others).
|
|
||||||
|
|
||||||
Most of those pipelines share a lot of intermediate build steps. All builds which perform testing
|
|
||||||
require the binaries to be ready. All publishing builds require all previous steps to be executed.
|
|
||||||
And because modern CI infrastructure means executing everything in containerized (isolated) environments,
|
|
||||||
significant resources are wasted by repeatedly building the same intermediate artifacts.
|
|
||||||
|
|
||||||
A remote build cache greatly reduces this overhead by orders of magnitudes because it provides a way
|
|
||||||
for all those pipelines to share their outputs. After all, there is no point recreating an output that
|
|
||||||
is already available in the cache.
|
|
||||||
|
|
||||||
Because there are inherent dependencies between software components of a build,
|
|
||||||
introducing a build cache dramatically reduces the impact of exploding a component into multiple pieces,
|
|
||||||
allowing for increased modularity without increased overhead.
|
|
||||||
|
|
||||||
#### Make local developers more efficient with remote build caches
|
|
||||||
|
|
||||||
It is common for different teams within a company to work on different modules of a single large
|
|
||||||
application. In this case, most teams don’t care about building the other parts of the software.
|
|
||||||
By introducing a remote cache developers immediately benefit from pre-built artifacts when checking out code.
|
|
||||||
Because it has already been built on CI, they don’t have to do it locally.
|
|
||||||
|
|
||||||
Introducing a remote cache is a huge benefit for those developers. Consider that a typical developer’s
|
|
||||||
day begins by performing a code checkout. Most likely the checked out code has already been built on CI.
|
|
||||||
Therefore, no time is wasted running the first build of the day. The remote cache provides all of the
|
|
||||||
intermediate artifacts needed. And, in the event local changes are made, the remote cache still leverages
|
|
||||||
partial cache hits for projects which are independent. As other developers in the organization request
|
|
||||||
CI builds, the remote cache continues to populate, increasing the likelihood of these remote cache hits
|
|
||||||
across team members.
|
|
||||||
|
|
||||||
|
@@ -14,7 +14,9 @@ allprojects { subproject ->
|
|||||||
if(project.currentTag.isPresent()) {
|
if(project.currentTag.isPresent()) {
|
||||||
version = project.currentTag.map { it[0] }.get()
|
version = project.currentTag.map { it[0] }.get()
|
||||||
} else {
|
} else {
|
||||||
version = "${getProperty('rbcs.version')}-SNAPSHOT"
|
version = project.gitRevision.map { gitRevision ->
|
||||||
|
"${getProperty('gbcs.version')}.${gitRevision[0..10]}"
|
||||||
|
}.get()
|
||||||
}
|
}
|
||||||
|
|
||||||
repositories {
|
repositories {
|
||||||
@@ -22,6 +24,7 @@ allprojects { subproject ->
|
|||||||
url = getProperty('gitea.maven.url')
|
url = getProperty('gitea.maven.url')
|
||||||
content {
|
content {
|
||||||
includeModule 'net.woggioni', 'jwo'
|
includeModule 'net.woggioni', 'jwo'
|
||||||
|
includeModule 'net.woggioni', 'xmemcached'
|
||||||
includeGroup 'com.lys'
|
includeGroup 'com.lys'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -38,7 +41,7 @@ allprojects { subproject ->
|
|||||||
withSourcesJar()
|
withSourcesJar()
|
||||||
modularity.inferModulePath = true
|
modularity.inferModulePath = true
|
||||||
toolchain {
|
toolchain {
|
||||||
languageVersion = JavaLanguageVersion.of(23)
|
languageVersion = JavaLanguageVersion.of(21)
|
||||||
vendor = JvmVendorSpec.ORACLE
|
vendor = JvmVendorSpec.ORACLE
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@@ -1,178 +0,0 @@
|
|||||||
|
|
||||||
### RBCS server configuration file elements and attributes
|
|
||||||
|
|
||||||
#### Root Element: `server`
|
|
||||||
The root element that contains all server configuration.
|
|
||||||
|
|
||||||
**Attributes:**
|
|
||||||
- `path` (optional): URI path prefix for cache requests. Example: if set to "cache", requests would be made to "http://www.example.com/cache/KEY"
|
|
||||||
|
|
||||||
#### Child Elements
|
|
||||||
|
|
||||||
#### `<bind>`
|
|
||||||
Configures server socket settings.
|
|
||||||
|
|
||||||
**Attributes:**
|
|
||||||
- `host` (required): Server bind address
|
|
||||||
- `port` (required): Server port number
|
|
||||||
- `incoming-connections-backlog-size` (optional, default: 1024): Maximum queue length for incoming connection indications
|
|
||||||
|
|
||||||
#### `<connection>`
|
|
||||||
Configures connection handling parameters.
|
|
||||||
|
|
||||||
**Attributes:**
|
|
||||||
- `idle-timeout` (optional, default: PT30S): Connection timeout when no activity
|
|
||||||
- `read-idle-timeout` (optional, default: PT60S): Connection timeout when no reads
|
|
||||||
- `write-idle-timeout` (optional, default: PT60S): Connection timeout when no writes
|
|
||||||
- `max-request-size` (optional, default: 0x4000000): Maximum allowed request body size
|
|
||||||
|
|
||||||
#### `<event-executor>`
|
|
||||||
Configures event execution settings.
|
|
||||||
|
|
||||||
**Attributes:**
|
|
||||||
- `use-virtual-threads` (optional, default: true): Whether to use virtual threads for the server handler
|
|
||||||
|
|
||||||
#### `<cache>`
|
|
||||||
Defines cache storage implementation. Two types are available:
|
|
||||||
|
|
||||||
##### InMemory Cache
|
|
||||||
|
|
||||||
A simple storage backend that uses an hash map to store data in memory
|
|
||||||
|
|
||||||
**Attributes:**
|
|
||||||
- `max-age` (default: P1D): Cache entry lifetime
|
|
||||||
- `max-size` (default: 0x1000000): Maximum cache size in bytes
|
|
||||||
- `digest` (default: MD5): Key hashing algorithm
|
|
||||||
- `enable-compression` (default: true): Enable deflate compression
|
|
||||||
- `compression-level` (default: -1): Compression level (-1 to 9)
|
|
||||||
- `chunk-size` (default: 0x10000): Maximum socket write size
|
|
||||||
|
|
||||||
##### FileSystem Cache
|
|
||||||
|
|
||||||
A storage backend that stores data in a folder on the disk
|
|
||||||
|
|
||||||
**Attributes:**
|
|
||||||
- `path`: Storage directory path
|
|
||||||
- `max-age` (default: P1D): Cache entry lifetime
|
|
||||||
- `digest` (default: MD5): Key hashing algorithm
|
|
||||||
- `enable-compression` (default: true): Enable deflate compression
|
|
||||||
- `compression-level` (default: -1): Compression level
|
|
||||||
- `chunk-size` (default: 0x10000): Maximum in-memory cache value size
|
|
||||||
|
|
||||||
#### `<authorization>`
|
|
||||||
Configures user and group-based access control.
|
|
||||||
|
|
||||||
##### `<users>`
|
|
||||||
List of registered users.
|
|
||||||
- Contains `<user>` elements:
|
|
||||||
|
|
||||||
**Attributes:**
|
|
||||||
- `name` (required): Username
|
|
||||||
- `password` (optional): For basic authentication
|
|
||||||
- Can contain an `anonymous` element to allow for unauthenticated access
|
|
||||||
|
|
||||||
##### `<groups>`
|
|
||||||
List of user groups.
|
|
||||||
- Contains `<group>` elements:
|
|
||||||
|
|
||||||
**Attributes:**
|
|
||||||
- `name`: Group name
|
|
||||||
- Can contain:
|
|
||||||
- `users`: List of user references
|
|
||||||
- `roles`: List of roles (READER/WRITER)
|
|
||||||
- `user-quota`: Per-user quota
|
|
||||||
- `group-quota`: Group-wide quota
|
|
||||||
|
|
||||||
#### `<authentication>`
|
|
||||||
Configures authentication mechanism. Options:
|
|
||||||
- `<basic>`: HTTP basic authentication
|
|
||||||
- `<client-certificate>`: TLS certificate authentication, it uses attributes of the subject's X.500 name
|
|
||||||
to extract the username and group of the client.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
```xml
|
|
||||||
<client-certificate>
|
|
||||||
<user-extractor attribute-name="CN" pattern="(.*)"/>
|
|
||||||
<group-extractor attribute-name="O" pattern="(.*)"/>
|
|
||||||
</client-certificate>
|
|
||||||
```
|
|
||||||
- `<none>`: No authentication
|
|
||||||
|
|
||||||
#### `<tls>`
|
|
||||||
Configures TLS encryption.
|
|
||||||
|
|
||||||
**Child Elements:**
|
|
||||||
- `<keystore>`: Server certificate configuration
|
|
||||||
|
|
||||||
**Attributes:**
|
|
||||||
- `file` (required): Keystore file path
|
|
||||||
- `password`: Keystore password
|
|
||||||
- `key-alias` (required): Private key alias
|
|
||||||
- `key-password`: Private key password
|
|
||||||
- `<truststore>`: Client certificate verification
|
|
||||||
|
|
||||||
**Attributes:**
|
|
||||||
- `file` (required): Truststore file path
|
|
||||||
- `password`: Truststore password
|
|
||||||
- `check-certificate-status`: Enable CRL/OCSP checking
|
|
||||||
- `require-client-certificate` (default: false): Require client certificates
|
|
||||||
|
|
||||||
|
|
||||||
----------------------------
|
|
||||||
|
|
||||||
# Complete configuration example
|
|
||||||
|
|
||||||
```xml
|
|
||||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
|
||||||
<rbcs:server xmlns:xs="http://www.w3.org/2001/XMLSchema-instance"
|
|
||||||
xmlns:rbcs="urn:net.woggioni.rbcs.server"
|
|
||||||
xs:schemaLocation="urn:net.woggioni.rbcs.server jpms://net.woggioni.rbcs.server/net/woggioni/rbcs/server/schema/rbcs.xsd"
|
|
||||||
>
|
|
||||||
<bind host="0.0.0.0" port="8080" incoming-connections-backlog-size="1024"/>
|
|
||||||
<connection
|
|
||||||
max-request-size="67108864"
|
|
||||||
idle-timeout="PT10S"
|
|
||||||
read-idle-timeout="PT20S"
|
|
||||||
write-idle-timeout="PT20S"
|
|
||||||
read-timeout="PT5S"
|
|
||||||
write-timeout="PT5S"/>
|
|
||||||
<event-executor use-virtual-threads="true"/>
|
|
||||||
<cache xs:type="rbcs:inMemoryCacheType" max-age="P7D" enable-compression="false" max-size="0x10000000" />
|
|
||||||
<!--cache xs:type="rbcs:fileSystemCacheType" max-age="P7D" enable-compression="false" path="${sys:java.io.tmpdir}/rbcs"/-->
|
|
||||||
<authorization>
|
|
||||||
<users>
|
|
||||||
<user name="user1" password="II+qeNLft2pZ/JVNo9F7jpjM/BqEcfsJW27NZ6dPVs8tAwHbxrJppKYsbL7J/SMl">
|
|
||||||
<quota calls="100" period="PT1S"/>
|
|
||||||
</user>
|
|
||||||
<user name="user2" password="v6T9+q6/VNpvLknji3ixPiyz2YZCQMXj2FN7hvzbfc2Ig+IzAHO0iiBCH9oWuBDq"/>
|
|
||||||
<anonymous>
|
|
||||||
<quota calls="10" period="PT60S" initial-available-calls="10" max-available-calls="10"/>
|
|
||||||
</anonymous>
|
|
||||||
</users>
|
|
||||||
<groups>
|
|
||||||
<group name="readers">
|
|
||||||
<users>
|
|
||||||
<anonymous/>
|
|
||||||
</users>
|
|
||||||
<roles>
|
|
||||||
<reader/>
|
|
||||||
</roles>
|
|
||||||
</group>
|
|
||||||
<group name="writers">
|
|
||||||
<users>
|
|
||||||
<user ref="user1"/>
|
|
||||||
<user ref="user2"/>
|
|
||||||
</users>
|
|
||||||
<roles>
|
|
||||||
<reader/>
|
|
||||||
<writer/>
|
|
||||||
</roles>
|
|
||||||
</group>
|
|
||||||
</groups>
|
|
||||||
</authorization>
|
|
||||||
<authentication>
|
|
||||||
<basic/>
|
|
||||||
</authentication>
|
|
||||||
</rbcs:server>
|
|
||||||
|
|
||||||
```
|
|
@@ -3,21 +3,14 @@ RUN adduser -D luser
|
|||||||
USER luser
|
USER luser
|
||||||
WORKDIR /home/luser
|
WORKDIR /home/luser
|
||||||
|
|
||||||
FROM base-release AS release-vanilla
|
FROM base-release AS release
|
||||||
ADD rbcs-cli-envelope-*.jar rbcs.jar
|
ADD gbcs-cli-envelope-*.jar gbcs.jar
|
||||||
ENTRYPOINT ["java", "-XX:+UseSerialGC", "-XX:GCTimeRatio=24", "-jar", "/home/luser/rbcs.jar", "server"]
|
ENTRYPOINT ["java", "-jar", "/home/luser/gbcs.jar", "server"]
|
||||||
|
|
||||||
FROM base-release AS release-memcache
|
FROM base-release AS release-memcached
|
||||||
ADD --chown=luser:luser rbcs-cli-envelope-*.jar rbcs.jar
|
ADD --chown=luser:luser gbcs-cli-envelope-*.jar gbcs.jar
|
||||||
RUN mkdir plugins
|
RUN mkdir plugins
|
||||||
WORKDIR /home/luser/plugins
|
WORKDIR /home/luser/plugins
|
||||||
RUN --mount=type=bind,source=.,target=/build/distributions tar -xf /build/distributions/rbcs-server-memcache*.tar
|
RUN --mount=type=bind,source=.,target=/build/distributions tar -xf /build/distributions/gbcs-server-memcached*.tar
|
||||||
WORKDIR /home/luser
|
WORKDIR /home/luser
|
||||||
ADD logback.xml .
|
ENTRYPOINT ["java", "-jar", "/home/luser/gbcs.jar", "server"]
|
||||||
ENTRYPOINT ["java", "-Dlogback.configurationFile=logback.xml", "-XX:+UseSerialGC", "-XX:GCTimeRatio=24", "-jar", "/home/luser/rbcs.jar", "server"]
|
|
||||||
|
|
||||||
FROM scratch AS release-native
|
|
||||||
ADD rbcs-cli.upx /rbcs/rbcs-cli
|
|
||||||
ENV RBCS_CONFIGURATION_DIR="/rbcs"
|
|
||||||
WORKDIR /rbcs
|
|
||||||
ENTRYPOINT ["./rbcs/rbcs-cli"]
|
|
||||||
|
@@ -1,24 +0,0 @@
|
|||||||
# RBCS Docker images
|
|
||||||
There are 3 image flavours:
|
|
||||||
- vanilla
|
|
||||||
- memcache
|
|
||||||
- native
|
|
||||||
|
|
||||||
The `vanilla` image only contains the envelope
|
|
||||||
jar file with no plugins and is based on `eclipse-temurin:21-jre-alpine`
|
|
||||||
|
|
||||||
The `memcache` image is similar to the `vanilla` image, except that it also contains
|
|
||||||
the `rbcs-server-memcache` plugin in the `plugins` folder, use this image if you don't want to use the `native`
|
|
||||||
image and want to use memcache as the cache backend
|
|
||||||
|
|
||||||
The `native` image contains a native, statically-linked executable created with GraalVM
|
|
||||||
that has no userspace dependencies. It also embeds the memcache plugin inside the executable.
|
|
||||||
Use this image for maximum efficiency and minimal memory footprint.
|
|
||||||
|
|
||||||
## Which image shoud I use?
|
|
||||||
The `native` image uses Java's SerialGC, so it's ideal for constrained environment like containers or small servers,
|
|
||||||
if you have a lot of resources and want to squeeze out the maximum throughput you should consider the
|
|
||||||
`vanilla` or `memcache` image, then choose and fine tune the garbage collector.
|
|
||||||
|
|
||||||
Also the `native` image is only available for the `x86_64` architecture at the moment,
|
|
||||||
while `vanilla` and `memcache` also ship a `aarch64` variant.
|
|
@@ -18,8 +18,8 @@ configurations {
|
|||||||
}
|
}
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
docker project(path: ':rbcs-cli', configuration: 'release')
|
docker project(path: ':gbcs-cli', configuration: 'release')
|
||||||
docker project(path: ':rbcs-server-memcache', configuration: 'release')
|
docker project(path: ':gbcs-server-memcached', configuration: 'release')
|
||||||
}
|
}
|
||||||
|
|
||||||
Provider<Task> cleanTaskProvider = tasks.named(BasePlugin.CLEAN_TASK_NAME) {}
|
Provider<Task> cleanTaskProvider = tasks.named(BasePlugin.CLEAN_TASK_NAME) {}
|
||||||
@@ -30,39 +30,38 @@ Provider<Copy> prepareDockerBuild = tasks.register('prepareDockerBuild', Copy) {
|
|||||||
into project.layout.buildDirectory.file('docker')
|
into project.layout.buildDirectory.file('docker')
|
||||||
from(configurations.docker)
|
from(configurations.docker)
|
||||||
from(file('Dockerfile'))
|
from(file('Dockerfile'))
|
||||||
from(rootProject.file('conf')) {
|
|
||||||
include 'logback.xml'
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Provider<DockerBuildImage> dockerBuild = tasks.register('dockerBuildImage', DockerBuildImage) {
|
Provider<DockerBuildImage> dockerBuild = tasks.register('dockerBuildImage', DockerBuildImage) {
|
||||||
group = 'docker'
|
group = 'docker'
|
||||||
dependsOn prepareDockerBuild
|
dependsOn prepareDockerBuild
|
||||||
images.add('gitea.woggioni.net/woggioni/rbcs:latest')
|
images.add('gitea.woggioni.net/woggioni/gbcs:latest')
|
||||||
images.add("gitea.woggioni.net/woggioni/rbcs:${version}")
|
images.add("gitea.woggioni.net/woggioni/gbcs:${version}")
|
||||||
}
|
}
|
||||||
|
|
||||||
Provider<DockerTagImage> dockerTag = tasks.register('dockerTagImage', DockerTagImage) {
|
Provider<DockerTagImage> dockerTag = tasks.register('dockerTagImage', DockerTagImage) {
|
||||||
group = 'docker'
|
group = 'docker'
|
||||||
repository = 'gitea.woggioni.net/woggioni/rbcs'
|
repository = 'gitea.woggioni.net/woggioni/gbcs'
|
||||||
imageId = 'gitea.woggioni.net/woggioni/rbcs:latest'
|
imageId = 'gitea.woggioni.net/woggioni/gbcs:latest'
|
||||||
tag = version
|
tag = version
|
||||||
}
|
}
|
||||||
|
|
||||||
Provider<DockerTagImage> dockerTagMemcache = tasks.register('dockerTagMemcacheImage', DockerTagImage) {
|
Provider<DockerTagImage> dockerTagMemcached = tasks.register('dockerTagMemcachedImage', DockerTagImage) {
|
||||||
group = 'docker'
|
group = 'docker'
|
||||||
repository = 'gitea.woggioni.net/woggioni/rbcs'
|
repository = 'gitea.woggioni.net/woggioni/gbcs'
|
||||||
imageId = 'gitea.woggioni.net/woggioni/rbcs:memcache'
|
imageId = 'gitea.woggioni.net/woggioni/gbcs:memcached'
|
||||||
tag = "${version}-memcache"
|
tag = "${version}-memcached"
|
||||||
}
|
}
|
||||||
|
|
||||||
Provider<DockerPushImage> dockerPush = tasks.register('dockerPushImage', DockerPushImage) {
|
Provider<DockerPushImage> dockerPush = tasks.register('dockerPushImage', DockerPushImage) {
|
||||||
group = 'docker'
|
group = 'docker'
|
||||||
dependsOn dockerTag, dockerTagMemcache
|
dependsOn dockerTag, dockerTagMemcached
|
||||||
registryCredentials {
|
registryCredentials {
|
||||||
url = getProperty('docker.registry.url')
|
url = getProperty('docker.registry.url')
|
||||||
username = 'woggioni'
|
username = 'woggioni'
|
||||||
password = System.getenv().get("PUBLISHER_TOKEN")
|
password = System.getenv().get("PUBLISHER_TOKEN")
|
||||||
}
|
}
|
||||||
images = [dockerTag.flatMap{ it.tag }, dockerTagMemcache.flatMap{ it.tag }]
|
images = [dockerTag.flatMap{ it.tag }, dockerTagMemcached.flatMap{ it.tag }]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@@ -5,9 +5,6 @@ plugins {
|
|||||||
}
|
}
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
api catalog.netty.common
|
|
||||||
api catalog.netty.buffer
|
|
||||||
api catalog.netty.handler
|
|
||||||
}
|
}
|
||||||
|
|
||||||
publishing {
|
publishing {
|
6
gbcs-api/src/main/java/module-info.java
Normal file
6
gbcs-api/src/main/java/module-info.java
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
module net.woggioni.gbcs.api {
|
||||||
|
requires static lombok;
|
||||||
|
requires java.xml;
|
||||||
|
exports net.woggioni.gbcs.api;
|
||||||
|
exports net.woggioni.gbcs.api.exception;
|
||||||
|
}
|
12
gbcs-api/src/main/java/net/woggioni/gbcs/api/Cache.java
Normal file
12
gbcs-api/src/main/java/net/woggioni/gbcs/api/Cache.java
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
package net.woggioni.gbcs.api;
|
||||||
|
|
||||||
|
import net.woggioni.gbcs.api.exception.ContentTooLargeException;
|
||||||
|
|
||||||
|
import java.nio.channels.ReadableByteChannel;
|
||||||
|
|
||||||
|
|
||||||
|
public interface Cache extends AutoCloseable {
|
||||||
|
ReadableByteChannel get(String key);
|
||||||
|
|
||||||
|
void put(String key, byte[] content) throws ContentTooLargeException;
|
||||||
|
}
|
@@ -1,4 +1,4 @@
|
|||||||
package net.woggioni.rbcs.api;
|
package net.woggioni.gbcs.api;
|
||||||
|
|
||||||
import org.w3c.dom.Document;
|
import org.w3c.dom.Document;
|
||||||
import org.w3c.dom.Element;
|
import org.w3c.dom.Element;
|
@@ -1,4 +1,4 @@
|
|||||||
package net.woggioni.rbcs.api;
|
package net.woggioni.gbcs.api;
|
||||||
|
|
||||||
|
|
||||||
import lombok.EqualsAndHashCode;
|
import lombok.EqualsAndHashCode;
|
||||||
@@ -35,6 +35,8 @@ public class Configuration {
|
|||||||
|
|
||||||
@Value
|
@Value
|
||||||
public static class Connection {
|
public static class Connection {
|
||||||
|
Duration readTimeout;
|
||||||
|
Duration writeTimeout;
|
||||||
Duration idleTimeout;
|
Duration idleTimeout;
|
||||||
Duration readIdleTimeout;
|
Duration readIdleTimeout;
|
||||||
Duration writeIdleTimeout;
|
Duration writeIdleTimeout;
|
||||||
@@ -54,8 +56,7 @@ public class Configuration {
|
|||||||
@EqualsAndHashCode.Include
|
@EqualsAndHashCode.Include
|
||||||
String name;
|
String name;
|
||||||
Set<Role> roles;
|
Set<Role> roles;
|
||||||
Quota groupQuota;
|
Quota quota;
|
||||||
Quota userQuota;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Value
|
@Value
|
||||||
@@ -83,6 +84,17 @@ public class Configuration {
|
|||||||
Group extract(X509Certificate cert);
|
Group extract(X509Certificate cert);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Value
|
||||||
|
public static class Throttling {
|
||||||
|
KeyStore keyStore;
|
||||||
|
TrustStore trustStore;
|
||||||
|
boolean verifyClients;
|
||||||
|
}
|
||||||
|
|
||||||
|
public enum ClientCertificate {
|
||||||
|
REQUIRED, OPTIONAL
|
||||||
|
}
|
||||||
|
|
||||||
@Value
|
@Value
|
||||||
public static class Tls {
|
public static class Tls {
|
||||||
KeyStore keyStore;
|
KeyStore keyStore;
|
||||||
@@ -122,7 +134,7 @@ public class Configuration {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public interface Cache {
|
public interface Cache {
|
||||||
CacheHandlerFactory materialize();
|
net.woggioni.gbcs.api.Cache materialize();
|
||||||
String getNamespaceURI();
|
String getNamespaceURI();
|
||||||
String getTypeName();
|
String getTypeName();
|
||||||
}
|
}
|
@@ -1,4 +1,4 @@
|
|||||||
package net.woggioni.rbcs.api;
|
package net.woggioni.gbcs.api;
|
||||||
|
|
||||||
public enum Role {
|
public enum Role {
|
||||||
Reader, Writer
|
Reader, Writer
|
@@ -1,6 +1,6 @@
|
|||||||
package net.woggioni.rbcs.api.exception;
|
package net.woggioni.gbcs.api.exception;
|
||||||
|
|
||||||
public class CacheException extends RbcsException {
|
public class CacheException extends GbcsException {
|
||||||
public CacheException(String message, Throwable cause) {
|
public CacheException(String message, Throwable cause) {
|
||||||
super(message, cause);
|
super(message, cause);
|
||||||
}
|
}
|
@@ -1,6 +1,6 @@
|
|||||||
package net.woggioni.rbcs.api.exception;
|
package net.woggioni.gbcs.api.exception;
|
||||||
|
|
||||||
public class ConfigurationException extends RbcsException {
|
public class ConfigurationException extends GbcsException {
|
||||||
public ConfigurationException(String message, Throwable cause) {
|
public ConfigurationException(String message, Throwable cause) {
|
||||||
super(message, cause);
|
super(message, cause);
|
||||||
}
|
}
|
@@ -1,6 +1,6 @@
|
|||||||
package net.woggioni.rbcs.api.exception;
|
package net.woggioni.gbcs.api.exception;
|
||||||
|
|
||||||
public class ContentTooLargeException extends RbcsException {
|
public class ContentTooLargeException extends GbcsException {
|
||||||
public ContentTooLargeException(String message, Throwable cause) {
|
public ContentTooLargeException(String message, Throwable cause) {
|
||||||
super(message, cause);
|
super(message, cause);
|
||||||
}
|
}
|
@@ -0,0 +1,7 @@
|
|||||||
|
package net.woggioni.gbcs.api.exception;
|
||||||
|
|
||||||
|
public class GbcsException extends RuntimeException {
|
||||||
|
public GbcsException(String message, Throwable cause) {
|
||||||
|
super(message, cause);
|
||||||
|
}
|
||||||
|
}
|
101
gbcs-cli/build.gradle
Normal file
101
gbcs-cli/build.gradle
Normal file
@@ -0,0 +1,101 @@
|
|||||||
|
plugins {
|
||||||
|
id 'java-library'
|
||||||
|
alias catalog.plugins.kotlin.jvm
|
||||||
|
alias catalog.plugins.envelope
|
||||||
|
alias catalog.plugins.sambal
|
||||||
|
alias catalog.plugins.graalvm.native.image
|
||||||
|
alias catalog.plugins.graalvm.jlink
|
||||||
|
alias catalog.plugins.jpms.check
|
||||||
|
id 'maven-publish'
|
||||||
|
}
|
||||||
|
|
||||||
|
import net.woggioni.gradle.envelope.EnvelopeJarTask
|
||||||
|
import net.woggioni.gradle.graalvm.NativeImageConfigurationTask
|
||||||
|
import net.woggioni.gradle.graalvm.NativeImagePlugin
|
||||||
|
import net.woggioni.gradle.graalvm.NativeImageTask
|
||||||
|
import net.woggioni.gradle.graalvm.JlinkPlugin
|
||||||
|
import net.woggioni.gradle.graalvm.JlinkTask
|
||||||
|
|
||||||
|
Property<String> mainModuleName = objects.property(String.class)
|
||||||
|
mainModuleName.set('net.woggioni.gbcs.cli')
|
||||||
|
Property<String> mainClassName = objects.property(String.class)
|
||||||
|
mainClassName.set('net.woggioni.gbcs.cli.GradleBuildCacheServerCli')
|
||||||
|
|
||||||
|
tasks.named(JavaPlugin.COMPILE_JAVA_TASK_NAME, JavaCompile) {
|
||||||
|
options.javaModuleMainClass = mainClassName
|
||||||
|
}
|
||||||
|
|
||||||
|
configurations {
|
||||||
|
release {
|
||||||
|
transitive = false
|
||||||
|
canBeConsumed = true
|
||||||
|
canBeResolved = true
|
||||||
|
visible = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
envelopeJar {
|
||||||
|
mainModule = mainModuleName
|
||||||
|
mainClass = mainClassName
|
||||||
|
|
||||||
|
extraClasspath = ["plugins"]
|
||||||
|
}
|
||||||
|
|
||||||
|
dependencies {
|
||||||
|
implementation catalog.jwo
|
||||||
|
implementation catalog.slf4j.api
|
||||||
|
implementation catalog.netty.codec.http
|
||||||
|
implementation catalog.picocli
|
||||||
|
|
||||||
|
implementation project(':gbcs-client')
|
||||||
|
implementation project(':gbcs-server')
|
||||||
|
|
||||||
|
// runtimeOnly catalog.slf4j.jdk14
|
||||||
|
runtimeOnly catalog.logback.classic
|
||||||
|
// runtimeOnly catalog.slf4j.simple
|
||||||
|
}
|
||||||
|
|
||||||
|
Provider<EnvelopeJarTask> envelopeJarTaskProvider = tasks.named('envelopeJar', EnvelopeJarTask.class) {
|
||||||
|
// systemProperties['java.util.logging.config.class'] = 'net.woggioni.gbcs.LoggingConfig'
|
||||||
|
// systemProperties['log.config.source'] = 'net/woggioni/gbcs/cli/logging.properties'
|
||||||
|
// systemProperties['java.util.logging.config.file'] = 'classpath:net/woggioni/gbcs/cli/logging.properties'
|
||||||
|
systemProperties['logback.configurationFile'] = 'classpath:net/woggioni/gbcs/cli/logback.xml'
|
||||||
|
systemProperties['io.netty.leakDetectionLevel'] = 'DISABLED'
|
||||||
|
|
||||||
|
// systemProperties['org.slf4j.simpleLogger.showDateTime'] = 'true'
|
||||||
|
// systemProperties['org.slf4j.simpleLogger.defaultLogLevel'] = 'debug'
|
||||||
|
// systemProperties['org.slf4j.simpleLogger.log.com.google.code.yanf4j'] = 'warn'
|
||||||
|
// systemProperties['org.slf4j.simpleLogger.log.net.rubyeye.xmemcached'] = 'warn'
|
||||||
|
// systemProperties['org.slf4j.simpleLogger.dateTimeFormat'] = 'yyyy-MM-dd\'T\'HH:mm:ss.SSSZ'
|
||||||
|
}
|
||||||
|
|
||||||
|
tasks.named(NativeImagePlugin.CONFIGURE_NATIVE_IMAGE_TASK_NAME, NativeImageConfigurationTask) {
|
||||||
|
mainClass = mainClassName
|
||||||
|
mainModule = mainModuleName
|
||||||
|
}
|
||||||
|
|
||||||
|
tasks.named(NativeImagePlugin.NATIVE_IMAGE_TASK_NAME, NativeImageTask) {
|
||||||
|
mainClass = mainClassName
|
||||||
|
mainModule = mainModuleName
|
||||||
|
useMusl = true
|
||||||
|
buildStaticImage = true
|
||||||
|
}
|
||||||
|
|
||||||
|
tasks.named(JlinkPlugin.JLINK_TASK_NAME, JlinkTask) {
|
||||||
|
mainClass = mainClassName
|
||||||
|
mainModule = 'net.woggioni.gbcs.cli'
|
||||||
|
}
|
||||||
|
|
||||||
|
artifacts {
|
||||||
|
release(envelopeJarTaskProvider)
|
||||||
|
}
|
||||||
|
|
||||||
|
publishing {
|
||||||
|
publications {
|
||||||
|
maven(MavenPublication) {
|
||||||
|
artifact envelopeJar
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
2
gbcs-cli/native-image/native-image.properties
Normal file
2
gbcs-cli/native-image/native-image.properties
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
Args=-H:Optimize=3 --gc=serial --initialize-at-run-time=io.netty
|
||||||
|
#-H:TraceClassInitialization=io.netty.handler.ssl.BouncyCastleAlpnSslUtils
|
18
gbcs-cli/src/main/java/module-info.java
Normal file
18
gbcs-cli/src/main/java/module-info.java
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
module net.woggioni.gbcs.cli {
|
||||||
|
requires org.slf4j;
|
||||||
|
requires net.woggioni.gbcs.server;
|
||||||
|
requires info.picocli;
|
||||||
|
requires net.woggioni.gbcs.common;
|
||||||
|
requires net.woggioni.gbcs.client;
|
||||||
|
requires kotlin.stdlib;
|
||||||
|
requires net.woggioni.jwo;
|
||||||
|
requires net.woggioni.gbcs.api;
|
||||||
|
requires io.netty.codec.http;
|
||||||
|
|
||||||
|
exports net.woggioni.gbcs.cli.impl.converters to info.picocli;
|
||||||
|
opens net.woggioni.gbcs.cli.impl.commands to info.picocli;
|
||||||
|
opens net.woggioni.gbcs.cli.impl to info.picocli;
|
||||||
|
opens net.woggioni.gbcs.cli to info.picocli, net.woggioni.gbcs.common;
|
||||||
|
|
||||||
|
exports net.woggioni.gbcs.cli;
|
||||||
|
}
|
@@ -0,0 +1,64 @@
|
|||||||
|
package net.woggioni.gbcs.cli
|
||||||
|
|
||||||
|
import net.woggioni.gbcs.common.GbcsUrlStreamHandlerFactory
|
||||||
|
import net.woggioni.gbcs.common.contextLogger
|
||||||
|
import net.woggioni.gbcs.cli.impl.AbstractVersionProvider
|
||||||
|
import net.woggioni.gbcs.cli.impl.GbcsCommand
|
||||||
|
import net.woggioni.gbcs.cli.impl.commands.BenchmarkCommand
|
||||||
|
import net.woggioni.gbcs.cli.impl.commands.ClientCommand
|
||||||
|
import net.woggioni.gbcs.cli.impl.commands.GetCommand
|
||||||
|
import net.woggioni.gbcs.cli.impl.commands.PasswordHashCommand
|
||||||
|
import net.woggioni.gbcs.cli.impl.commands.PutCommand
|
||||||
|
import net.woggioni.gbcs.cli.impl.commands.ServerCommand
|
||||||
|
import net.woggioni.jwo.Application
|
||||||
|
import picocli.CommandLine
|
||||||
|
import picocli.CommandLine.Model.CommandSpec
|
||||||
|
import java.net.URI
|
||||||
|
|
||||||
|
|
||||||
|
@CommandLine.Command(
|
||||||
|
name = "gbcs", versionProvider = GradleBuildCacheServerCli.VersionProvider::class
|
||||||
|
)
|
||||||
|
class GradleBuildCacheServerCli : GbcsCommand() {
|
||||||
|
|
||||||
|
class VersionProvider : AbstractVersionProvider()
|
||||||
|
companion object {
|
||||||
|
@JvmStatic
|
||||||
|
fun main(vararg args: String) {
|
||||||
|
Thread.currentThread().contextClassLoader = GradleBuildCacheServerCli::class.java.classLoader
|
||||||
|
GbcsUrlStreamHandlerFactory.install()
|
||||||
|
val log = contextLogger()
|
||||||
|
val app = Application.builder("gbcs")
|
||||||
|
.configurationDirectoryEnvVar("GBCS_CONFIGURATION_DIR")
|
||||||
|
.configurationDirectoryPropertyKey("net.woggioni.gbcs.conf.dir")
|
||||||
|
.build()
|
||||||
|
val gbcsCli = GradleBuildCacheServerCli()
|
||||||
|
val commandLine = CommandLine(gbcsCli)
|
||||||
|
commandLine.setExecutionExceptionHandler { ex, cl, parseResult ->
|
||||||
|
log.error(ex.message, ex)
|
||||||
|
CommandLine.ExitCode.SOFTWARE
|
||||||
|
}
|
||||||
|
commandLine.addSubcommand(ServerCommand(app))
|
||||||
|
commandLine.addSubcommand(PasswordHashCommand())
|
||||||
|
commandLine.addSubcommand(
|
||||||
|
CommandLine(ClientCommand(app)).apply {
|
||||||
|
addSubcommand(BenchmarkCommand())
|
||||||
|
addSubcommand(PutCommand())
|
||||||
|
addSubcommand(GetCommand())
|
||||||
|
})
|
||||||
|
System.exit(commandLine.execute(*args))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@CommandLine.Option(names = ["-V", "--version"], versionHelp = true)
|
||||||
|
var versionHelp = false
|
||||||
|
private set
|
||||||
|
|
||||||
|
@CommandLine.Spec
|
||||||
|
private lateinit var spec: CommandSpec
|
||||||
|
|
||||||
|
|
||||||
|
override fun run() {
|
||||||
|
spec.commandLine().usage(System.out);
|
||||||
|
}
|
||||||
|
}
|
@@ -1,4 +1,4 @@
|
|||||||
package net.woggioni.rbcs.cli.impl
|
package net.woggioni.gbcs.cli.impl
|
||||||
|
|
||||||
import picocli.CommandLine
|
import picocli.CommandLine
|
||||||
import java.util.jar.Attributes
|
import java.util.jar.Attributes
|
@@ -1,11 +1,11 @@
|
|||||||
package net.woggioni.rbcs.cli.impl
|
package net.woggioni.gbcs.cli.impl
|
||||||
|
|
||||||
import net.woggioni.jwo.Application
|
import net.woggioni.jwo.Application
|
||||||
import picocli.CommandLine
|
import picocli.CommandLine
|
||||||
import java.nio.file.Path
|
import java.nio.file.Path
|
||||||
|
|
||||||
|
|
||||||
abstract class RbcsCommand : Runnable {
|
abstract class GbcsCommand : Runnable {
|
||||||
|
|
||||||
@CommandLine.Option(names = ["-h", "--help"], usageHelp = true)
|
@CommandLine.Option(names = ["-h", "--help"], usageHelp = true)
|
||||||
var usageHelp = false
|
var usageHelp = false
|
@@ -0,0 +1,170 @@
|
|||||||
|
package net.woggioni.gbcs.cli.impl.commands
|
||||||
|
|
||||||
|
import io.netty.handler.codec.http.FullHttpRequest
|
||||||
|
import io.netty.handler.codec.http.FullHttpResponse
|
||||||
|
import net.woggioni.gbcs.common.contextLogger
|
||||||
|
import net.woggioni.gbcs.common.error
|
||||||
|
import net.woggioni.gbcs.common.info
|
||||||
|
import net.woggioni.gbcs.cli.impl.GbcsCommand
|
||||||
|
import net.woggioni.gbcs.client.GradleBuildCacheClient
|
||||||
|
import net.woggioni.gbcs.client.RequestEventListener
|
||||||
|
import net.woggioni.jwo.JWO
|
||||||
|
import picocli.CommandLine
|
||||||
|
import java.security.SecureRandom
|
||||||
|
import java.time.Duration
|
||||||
|
import java.time.Instant
|
||||||
|
import java.util.Base64
|
||||||
|
import java.util.concurrent.ExecutionException
|
||||||
|
import java.util.concurrent.Future
|
||||||
|
import java.util.concurrent.LinkedBlockingQueue
|
||||||
|
import java.util.concurrent.Semaphore
|
||||||
|
import java.util.concurrent.atomic.AtomicLong
|
||||||
|
import kotlin.random.Random
|
||||||
|
|
||||||
|
@CommandLine.Command(
|
||||||
|
name = "benchmark",
|
||||||
|
description = ["Run a load test against the server"],
|
||||||
|
showDefaultValues = true
|
||||||
|
)
|
||||||
|
class BenchmarkCommand : GbcsCommand() {
|
||||||
|
private val log = contextLogger()
|
||||||
|
|
||||||
|
@CommandLine.Spec
|
||||||
|
private lateinit var spec: CommandLine.Model.CommandSpec
|
||||||
|
|
||||||
|
@CommandLine.Option(
|
||||||
|
names = ["-e", "--entries"],
|
||||||
|
description = ["Total number of elements to be added to the cache"],
|
||||||
|
paramLabel = "NUMBER_OF_ENTRIES"
|
||||||
|
)
|
||||||
|
private var numberOfEntries = 1000
|
||||||
|
|
||||||
|
override fun run() {
|
||||||
|
val clientCommand = spec.parent().userObject() as ClientCommand
|
||||||
|
val profile = clientCommand.profileName.let { profileName ->
|
||||||
|
clientCommand.configuration.profiles[profileName]
|
||||||
|
?: throw IllegalArgumentException("Profile $profileName does not exist in configuration")
|
||||||
|
}
|
||||||
|
val client = GradleBuildCacheClient(profile)
|
||||||
|
|
||||||
|
val entryGenerator = sequence {
|
||||||
|
val random = Random(SecureRandom.getInstance("NativePRNGNonBlocking").nextLong())
|
||||||
|
while (true) {
|
||||||
|
val key = JWO.bytesToHex(random.nextBytes(16))
|
||||||
|
val content = random.nextInt().toByte()
|
||||||
|
val value = ByteArray(0x1000, { _ -> content })
|
||||||
|
yield(key to value)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info {
|
||||||
|
"Starting insertion"
|
||||||
|
}
|
||||||
|
val entries = let {
|
||||||
|
val completionCounter = AtomicLong(0)
|
||||||
|
val completionQueue = LinkedBlockingQueue<Pair<String, ByteArray>>(numberOfEntries)
|
||||||
|
val start = Instant.now()
|
||||||
|
val semaphore = Semaphore(profile.maxConnections * 3)
|
||||||
|
val totalElapsedTime = AtomicLong(0)
|
||||||
|
val iterator = entryGenerator.take(numberOfEntries).iterator()
|
||||||
|
while(completionCounter.get() < numberOfEntries) {
|
||||||
|
if(iterator.hasNext()) {
|
||||||
|
val entry = iterator.next()
|
||||||
|
semaphore.acquire()
|
||||||
|
val eventListener = object : RequestEventListener {
|
||||||
|
var start: Long? = null
|
||||||
|
override fun requestSent(req: FullHttpRequest) {
|
||||||
|
this.start = System.nanoTime()
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun responseReceived(res: FullHttpResponse) {
|
||||||
|
this.start?.let { requestStart ->
|
||||||
|
totalElapsedTime.addAndGet((System.nanoTime() - requestStart))
|
||||||
|
}
|
||||||
|
this.start = null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
val future = client.put(entry.first, entry.second, eventListener).thenApply { entry }
|
||||||
|
future.whenComplete { result, ex ->
|
||||||
|
if (ex != null) {
|
||||||
|
log.error(ex.message, ex)
|
||||||
|
} else {
|
||||||
|
completionQueue.put(result)
|
||||||
|
}
|
||||||
|
semaphore.release()
|
||||||
|
completionCounter.incrementAndGet()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
val inserted = completionQueue.toList()
|
||||||
|
val end = Instant.now()
|
||||||
|
log.info {
|
||||||
|
val elapsed = Duration.between(start, end).toMillis()
|
||||||
|
val opsPerSecond = String.format("%.2f", numberOfEntries.toDouble() / elapsed * 1000)
|
||||||
|
"Insertion rate: $opsPerSecond ops/s"
|
||||||
|
}
|
||||||
|
log.info {
|
||||||
|
val avgTxTime = String.format("%.0f", totalElapsedTime.get() / numberOfEntries.toDouble() / 1e6)
|
||||||
|
"Average time per insertion: $avgTxTime ms"
|
||||||
|
}
|
||||||
|
inserted
|
||||||
|
}
|
||||||
|
log.info {
|
||||||
|
"Inserted ${entries.size} entries"
|
||||||
|
}
|
||||||
|
log.info {
|
||||||
|
"Starting retrieval"
|
||||||
|
}
|
||||||
|
if (entries.isNotEmpty()) {
|
||||||
|
val completionCounter = AtomicLong(0)
|
||||||
|
val semaphore = Semaphore(profile.maxConnections * 3)
|
||||||
|
val start = Instant.now()
|
||||||
|
val totalElapsedTime = AtomicLong(0)
|
||||||
|
entries.forEach { entry ->
|
||||||
|
semaphore.acquire()
|
||||||
|
val eventListener = object : RequestEventListener {
|
||||||
|
var start : Long? = null
|
||||||
|
override fun requestSent(req: FullHttpRequest) {
|
||||||
|
this.start = System.nanoTime()
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun responseReceived(res: FullHttpResponse) {
|
||||||
|
this.start?.let { requestStart ->
|
||||||
|
totalElapsedTime.addAndGet((System.nanoTime() - requestStart))
|
||||||
|
}
|
||||||
|
this.start = null
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
val future = client.get(entry.first, eventListener).thenApply {
|
||||||
|
if (it == null) {
|
||||||
|
log.error {
|
||||||
|
"Missing entry for key '${entry.first}'"
|
||||||
|
}
|
||||||
|
} else if (!entry.second.contentEquals(it)) {
|
||||||
|
log.error {
|
||||||
|
"Retrieved a value different from what was inserted for key '${entry.first}'"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
future.whenComplete { _, _ ->
|
||||||
|
completionCounter.incrementAndGet()
|
||||||
|
semaphore.release()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
val end = Instant.now()
|
||||||
|
log.info {
|
||||||
|
val elapsed = Duration.between(start, end).toMillis()
|
||||||
|
val opsPerSecond = String.format("%.2f", entries.size.toDouble() / elapsed * 1000)
|
||||||
|
"Retrieval rate: $opsPerSecond ops/s"
|
||||||
|
}
|
||||||
|
log.info {
|
||||||
|
val avgTxTime = String.format("%.0f", totalElapsedTime.get() / completionCounter.toDouble() / 1e6)
|
||||||
|
"Average time per retrieval: $avgTxTime ms"
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
log.error("Skipping retrieval benchmark as it was not possible to insert any entry in the cache")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@@ -1,24 +1,24 @@
|
|||||||
package net.woggioni.rbcs.cli.impl.commands
|
package net.woggioni.gbcs.cli.impl.commands
|
||||||
|
|
||||||
|
import net.woggioni.gbcs.cli.impl.GbcsCommand
|
||||||
|
import net.woggioni.gbcs.client.GradleBuildCacheClient
|
||||||
import net.woggioni.jwo.Application
|
import net.woggioni.jwo.Application
|
||||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
|
||||||
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
|
||||||
import picocli.CommandLine
|
import picocli.CommandLine
|
||||||
import java.nio.file.Path
|
import java.nio.file.Path
|
||||||
|
|
||||||
@CommandLine.Command(
|
@CommandLine.Command(
|
||||||
name = "client",
|
name = "client",
|
||||||
description = ["RBCS client"],
|
description = ["GBCS client"],
|
||||||
showDefaultValues = true
|
showDefaultValues = true
|
||||||
)
|
)
|
||||||
class ClientCommand(app : Application) : RbcsCommand() {
|
class ClientCommand(app : Application) : GbcsCommand() {
|
||||||
|
|
||||||
@CommandLine.Option(
|
@CommandLine.Option(
|
||||||
names = ["-c", "--configuration"],
|
names = ["-c", "--configuration"],
|
||||||
description = ["Path to the client configuration file"],
|
description = ["Path to the client configuration file"],
|
||||||
paramLabel = "CONFIGURATION_FILE"
|
paramLabel = "CONFIGURATION_FILE"
|
||||||
)
|
)
|
||||||
private var configurationFile : Path = findConfigurationFile(app, "rbcs-client.xml")
|
private var configurationFile : Path = findConfigurationFile(app, "gbcs-client.xml")
|
||||||
|
|
||||||
@CommandLine.Option(
|
@CommandLine.Option(
|
||||||
names = ["-p", "--profile"],
|
names = ["-p", "--profile"],
|
||||||
@@ -28,8 +28,8 @@ class ClientCommand(app : Application) : RbcsCommand() {
|
|||||||
)
|
)
|
||||||
var profileName : String? = null
|
var profileName : String? = null
|
||||||
|
|
||||||
val configuration : RemoteBuildCacheClient.Configuration by lazy {
|
val configuration : GradleBuildCacheClient.Configuration by lazy {
|
||||||
RemoteBuildCacheClient.Configuration.parse(configurationFile)
|
GradleBuildCacheClient.Configuration.parse(configurationFile)
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun run() {
|
override fun run() {
|
@@ -1,8 +1,8 @@
|
|||||||
package net.woggioni.rbcs.cli.impl.commands
|
package net.woggioni.gbcs.cli.impl.commands
|
||||||
|
|
||||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
import net.woggioni.gbcs.common.contextLogger
|
||||||
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
import net.woggioni.gbcs.cli.impl.GbcsCommand
|
||||||
import net.woggioni.rbcs.common.createLogger
|
import net.woggioni.gbcs.client.GradleBuildCacheClient
|
||||||
import picocli.CommandLine
|
import picocli.CommandLine
|
||||||
import java.nio.file.Files
|
import java.nio.file.Files
|
||||||
import java.nio.file.Path
|
import java.nio.file.Path
|
||||||
@@ -12,10 +12,8 @@ import java.nio.file.Path
|
|||||||
description = ["Fetch a value from the cache with the specified key"],
|
description = ["Fetch a value from the cache with the specified key"],
|
||||||
showDefaultValues = true
|
showDefaultValues = true
|
||||||
)
|
)
|
||||||
class GetCommand : RbcsCommand() {
|
class GetCommand : GbcsCommand() {
|
||||||
companion object{
|
private val log = contextLogger()
|
||||||
private val log = createLogger<GetCommand>()
|
|
||||||
}
|
|
||||||
|
|
||||||
@CommandLine.Spec
|
@CommandLine.Spec
|
||||||
private lateinit var spec: CommandLine.Model.CommandSpec
|
private lateinit var spec: CommandLine.Model.CommandSpec
|
||||||
@@ -40,7 +38,7 @@ class GetCommand : RbcsCommand() {
|
|||||||
clientCommand.configuration.profiles[profileName]
|
clientCommand.configuration.profiles[profileName]
|
||||||
?: throw IllegalArgumentException("Profile $profileName does not exist in configuration")
|
?: throw IllegalArgumentException("Profile $profileName does not exist in configuration")
|
||||||
}
|
}
|
||||||
RemoteBuildCacheClient(profile).use { client ->
|
GradleBuildCacheClient(profile).use { client ->
|
||||||
client.get(key).thenApply { value ->
|
client.get(key).thenApply { value ->
|
||||||
value?.let {
|
value?.let {
|
||||||
(output?.let(Files::newOutputStream) ?: System.out).use {
|
(output?.let(Files::newOutputStream) ?: System.out).use {
|
@@ -1,9 +1,9 @@
|
|||||||
package net.woggioni.rbcs.cli.impl.commands
|
package net.woggioni.gbcs.cli.impl.commands
|
||||||
|
|
||||||
|
import net.woggioni.gbcs.common.PasswordSecurity.hashPassword
|
||||||
|
import net.woggioni.gbcs.cli.impl.GbcsCommand
|
||||||
|
import net.woggioni.gbcs.cli.impl.converters.OutputStreamConverter
|
||||||
import net.woggioni.jwo.UncloseableOutputStream
|
import net.woggioni.jwo.UncloseableOutputStream
|
||||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
|
||||||
import net.woggioni.rbcs.cli.impl.converters.OutputStreamConverter
|
|
||||||
import net.woggioni.rbcs.common.PasswordSecurity.hashPassword
|
|
||||||
import picocli.CommandLine
|
import picocli.CommandLine
|
||||||
import java.io.OutputStream
|
import java.io.OutputStream
|
||||||
import java.io.OutputStreamWriter
|
import java.io.OutputStreamWriter
|
||||||
@@ -12,10 +12,10 @@ import java.io.PrintWriter
|
|||||||
|
|
||||||
@CommandLine.Command(
|
@CommandLine.Command(
|
||||||
name = "password",
|
name = "password",
|
||||||
description = ["Generate a password hash to add to RBCS configuration file"],
|
description = ["Generate a password hash to add to GBCS configuration file"],
|
||||||
showDefaultValues = true
|
showDefaultValues = true
|
||||||
)
|
)
|
||||||
class PasswordHashCommand : RbcsCommand() {
|
class PasswordHashCommand : GbcsCommand() {
|
||||||
@CommandLine.Option(
|
@CommandLine.Option(
|
||||||
names = ["-o", "--output-file"],
|
names = ["-o", "--output-file"],
|
||||||
description = ["Write the output to a file instead of stdout"],
|
description = ["Write the output to a file instead of stdout"],
|
@@ -0,0 +1,48 @@
|
|||||||
|
package net.woggioni.gbcs.cli.impl.commands
|
||||||
|
|
||||||
|
import net.woggioni.gbcs.common.contextLogger
|
||||||
|
import net.woggioni.gbcs.cli.impl.GbcsCommand
|
||||||
|
import net.woggioni.gbcs.cli.impl.converters.InputStreamConverter
|
||||||
|
import net.woggioni.gbcs.client.GradleBuildCacheClient
|
||||||
|
import picocli.CommandLine
|
||||||
|
import java.io.InputStream
|
||||||
|
|
||||||
|
@CommandLine.Command(
|
||||||
|
name = "put",
|
||||||
|
description = ["Add or replace a value to the cache with the specified key"],
|
||||||
|
showDefaultValues = true
|
||||||
|
)
|
||||||
|
class PutCommand : GbcsCommand() {
|
||||||
|
private val log = contextLogger()
|
||||||
|
|
||||||
|
@CommandLine.Spec
|
||||||
|
private lateinit var spec: CommandLine.Model.CommandSpec
|
||||||
|
|
||||||
|
@CommandLine.Option(
|
||||||
|
names = ["-k", "--key"],
|
||||||
|
description = ["The key for the new value"],
|
||||||
|
paramLabel = "KEY"
|
||||||
|
)
|
||||||
|
private var key : String = ""
|
||||||
|
|
||||||
|
@CommandLine.Option(
|
||||||
|
names = ["-v", "--value"],
|
||||||
|
description = ["Path to a file containing the value to be added (defaults to stdin)"],
|
||||||
|
paramLabel = "VALUE_FILE",
|
||||||
|
converter = [InputStreamConverter::class]
|
||||||
|
)
|
||||||
|
private var value : InputStream = System.`in`
|
||||||
|
|
||||||
|
override fun run() {
|
||||||
|
val clientCommand = spec.parent().userObject() as ClientCommand
|
||||||
|
val profile = clientCommand.profileName.let { profileName ->
|
||||||
|
clientCommand.configuration.profiles[profileName]
|
||||||
|
?: throw IllegalArgumentException("Profile $profileName does not exist in configuration")
|
||||||
|
}
|
||||||
|
GradleBuildCacheClient(profile).use { client ->
|
||||||
|
value.use {
|
||||||
|
client.put(key, it.readAllBytes())
|
||||||
|
}.get()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@@ -0,0 +1,67 @@
|
|||||||
|
package net.woggioni.gbcs.cli.impl.commands
|
||||||
|
|
||||||
|
import net.woggioni.gbcs.server.GradleBuildCacheServer
|
||||||
|
import net.woggioni.gbcs.server.GradleBuildCacheServer.Companion.DEFAULT_CONFIGURATION_URL
|
||||||
|
import net.woggioni.gbcs.api.Configuration
|
||||||
|
import net.woggioni.gbcs.common.contextLogger
|
||||||
|
import net.woggioni.gbcs.common.debug
|
||||||
|
import net.woggioni.gbcs.common.info
|
||||||
|
import net.woggioni.gbcs.cli.impl.GbcsCommand
|
||||||
|
import net.woggioni.jwo.Application
|
||||||
|
import net.woggioni.jwo.JWO
|
||||||
|
import picocli.CommandLine
|
||||||
|
import java.io.ByteArrayOutputStream
|
||||||
|
import java.nio.file.Files
|
||||||
|
import java.nio.file.Path
|
||||||
|
|
||||||
|
@CommandLine.Command(
|
||||||
|
name = "server",
|
||||||
|
description = ["GBCS server"],
|
||||||
|
showDefaultValues = true
|
||||||
|
)
|
||||||
|
class ServerCommand(app : Application) : GbcsCommand() {
|
||||||
|
|
||||||
|
private val log = contextLogger()
|
||||||
|
|
||||||
|
private fun createDefaultConfigurationFile(configurationFile: Path) {
|
||||||
|
log.info {
|
||||||
|
"Creating default configuration file at '$configurationFile'"
|
||||||
|
}
|
||||||
|
val defaultConfigurationFileResource = DEFAULT_CONFIGURATION_URL
|
||||||
|
Files.newOutputStream(configurationFile).use { outputStream ->
|
||||||
|
defaultConfigurationFileResource.openStream().use { inputStream ->
|
||||||
|
JWO.copy(inputStream, outputStream)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@CommandLine.Option(
|
||||||
|
names = ["-c", "--config-file"],
|
||||||
|
description = ["Read the application configuration from this file"],
|
||||||
|
paramLabel = "CONFIG_FILE"
|
||||||
|
)
|
||||||
|
private var configurationFile: Path = findConfigurationFile(app, "gbcs-server.xml")
|
||||||
|
|
||||||
|
val configuration : Configuration by lazy {
|
||||||
|
GradleBuildCacheServer.loadConfiguration(configurationFile)
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun run() {
|
||||||
|
if (!Files.exists(configurationFile)) {
|
||||||
|
Files.createDirectories(configurationFile.parent)
|
||||||
|
createDefaultConfigurationFile(configurationFile)
|
||||||
|
}
|
||||||
|
|
||||||
|
val configuration = GradleBuildCacheServer.loadConfiguration(configurationFile)
|
||||||
|
log.debug {
|
||||||
|
ByteArrayOutputStream().also {
|
||||||
|
GradleBuildCacheServer.dumpConfiguration(configuration, it)
|
||||||
|
}.let {
|
||||||
|
"Server configuration:\n${String(it.toByteArray())}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
val server = GradleBuildCacheServer(configuration)
|
||||||
|
server.run().use {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@@ -1,4 +1,4 @@
|
|||||||
package net.woggioni.rbcs.cli.impl.converters
|
package net.woggioni.gbcs.cli.impl.converters
|
||||||
|
|
||||||
import picocli.CommandLine
|
import picocli.CommandLine
|
||||||
import java.io.InputStream
|
import java.io.InputStream
|
@@ -1,4 +1,4 @@
|
|||||||
package net.woggioni.rbcs.cli.impl.converters
|
package net.woggioni.gbcs.cli.impl.converters
|
||||||
|
|
||||||
import picocli.CommandLine
|
import picocli.CommandLine
|
||||||
import java.io.OutputStream
|
import java.io.OutputStream
|
@@ -15,4 +15,6 @@
|
|||||||
<root level="info">
|
<root level="info">
|
||||||
<appender-ref ref="console"/>
|
<appender-ref ref="console"/>
|
||||||
</root>
|
</root>
|
||||||
|
<logger name="com.google.code.yanf4j" level="warn"/>
|
||||||
|
<logger name="net.rubyeye.xmemcached" level="warn"/>
|
||||||
</configuration>
|
</configuration>
|
@@ -4,13 +4,11 @@ plugins {
|
|||||||
}
|
}
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
implementation project(':rbcs-api')
|
implementation project(':gbcs-api')
|
||||||
implementation project(':rbcs-common')
|
implementation project(':gbcs-common')
|
||||||
|
implementation catalog.picocli
|
||||||
implementation catalog.slf4j.api
|
implementation catalog.slf4j.api
|
||||||
implementation catalog.netty.buffer
|
implementation catalog.netty.buffer
|
||||||
implementation catalog.netty.handler
|
|
||||||
implementation catalog.netty.transport
|
|
||||||
implementation catalog.netty.common
|
|
||||||
implementation catalog.netty.codec.http
|
implementation catalog.netty.codec.http
|
||||||
|
|
||||||
testRuntimeOnly catalog.logback.classic
|
testRuntimeOnly catalog.logback.classic
|
@@ -1,4 +1,4 @@
|
|||||||
module net.woggioni.rbcs.client {
|
module net.woggioni.gbcs.client {
|
||||||
requires io.netty.handler;
|
requires io.netty.handler;
|
||||||
requires io.netty.codec.http;
|
requires io.netty.codec.http;
|
||||||
requires io.netty.transport;
|
requires io.netty.transport;
|
||||||
@@ -6,12 +6,12 @@ module net.woggioni.rbcs.client {
|
|||||||
requires io.netty.common;
|
requires io.netty.common;
|
||||||
requires io.netty.buffer;
|
requires io.netty.buffer;
|
||||||
requires java.xml;
|
requires java.xml;
|
||||||
requires net.woggioni.rbcs.common;
|
requires net.woggioni.gbcs.common;
|
||||||
requires net.woggioni.rbcs.api;
|
requires net.woggioni.gbcs.api;
|
||||||
requires io.netty.codec;
|
requires io.netty.codec;
|
||||||
requires org.slf4j;
|
requires org.slf4j;
|
||||||
|
|
||||||
exports net.woggioni.rbcs.client;
|
exports net.woggioni.gbcs.client;
|
||||||
|
|
||||||
opens net.woggioni.rbcs.client.schema;
|
opens net.woggioni.gbcs.client.schema;
|
||||||
}
|
}
|
@@ -1,12 +1,10 @@
|
|||||||
package net.woggioni.rbcs.client
|
package net.woggioni.gbcs.client
|
||||||
|
|
||||||
import io.netty.bootstrap.Bootstrap
|
import io.netty.bootstrap.Bootstrap
|
||||||
import io.netty.buffer.ByteBuf
|
import io.netty.buffer.ByteBuf
|
||||||
import io.netty.buffer.Unpooled
|
import io.netty.buffer.Unpooled
|
||||||
import io.netty.channel.Channel
|
import io.netty.channel.Channel
|
||||||
import io.netty.channel.ChannelHandler
|
|
||||||
import io.netty.channel.ChannelHandlerContext
|
import io.netty.channel.ChannelHandlerContext
|
||||||
import io.netty.channel.ChannelInboundHandlerAdapter
|
|
||||||
import io.netty.channel.ChannelOption
|
import io.netty.channel.ChannelOption
|
||||||
import io.netty.channel.ChannelPipeline
|
import io.netty.channel.ChannelPipeline
|
||||||
import io.netty.channel.SimpleChannelInboundHandler
|
import io.netty.channel.SimpleChannelInboundHandler
|
||||||
@@ -30,19 +28,13 @@ import io.netty.handler.codec.http.HttpVersion
|
|||||||
import io.netty.handler.ssl.SslContext
|
import io.netty.handler.ssl.SslContext
|
||||||
import io.netty.handler.ssl.SslContextBuilder
|
import io.netty.handler.ssl.SslContextBuilder
|
||||||
import io.netty.handler.stream.ChunkedWriteHandler
|
import io.netty.handler.stream.ChunkedWriteHandler
|
||||||
import io.netty.handler.timeout.IdleState
|
|
||||||
import io.netty.handler.timeout.IdleStateEvent
|
|
||||||
import io.netty.handler.timeout.IdleStateHandler
|
|
||||||
import io.netty.util.concurrent.Future
|
import io.netty.util.concurrent.Future
|
||||||
import io.netty.util.concurrent.GenericFutureListener
|
import io.netty.util.concurrent.GenericFutureListener
|
||||||
import net.woggioni.rbcs.api.CacheValueMetadata
|
import net.woggioni.gbcs.client.impl.Parser
|
||||||
import net.woggioni.rbcs.client.impl.Parser
|
import net.woggioni.gbcs.common.Xml
|
||||||
import net.woggioni.rbcs.common.RBCS.loadKeystore
|
import net.woggioni.gbcs.common.contextLogger
|
||||||
import net.woggioni.rbcs.common.Xml
|
import net.woggioni.gbcs.common.debug
|
||||||
import net.woggioni.rbcs.common.createLogger
|
import net.woggioni.gbcs.common.trace
|
||||||
import net.woggioni.rbcs.common.debug
|
|
||||||
import net.woggioni.rbcs.common.trace
|
|
||||||
import java.io.IOException
|
|
||||||
import java.net.InetSocketAddress
|
import java.net.InetSocketAddress
|
||||||
import java.net.URI
|
import java.net.URI
|
||||||
import java.nio.file.Files
|
import java.nio.file.Files
|
||||||
@@ -52,21 +44,14 @@ import java.security.cert.X509Certificate
|
|||||||
import java.time.Duration
|
import java.time.Duration
|
||||||
import java.util.Base64
|
import java.util.Base64
|
||||||
import java.util.concurrent.CompletableFuture
|
import java.util.concurrent.CompletableFuture
|
||||||
import java.util.concurrent.TimeUnit
|
|
||||||
import java.util.concurrent.TimeoutException
|
|
||||||
import java.util.concurrent.atomic.AtomicInteger
|
import java.util.concurrent.atomic.AtomicInteger
|
||||||
import javax.net.ssl.TrustManagerFactory
|
|
||||||
import javax.net.ssl.X509TrustManager
|
|
||||||
import kotlin.random.Random
|
|
||||||
import io.netty.util.concurrent.Future as NettyFuture
|
import io.netty.util.concurrent.Future as NettyFuture
|
||||||
|
|
||||||
class RemoteBuildCacheClient(private val profile: Configuration.Profile) : AutoCloseable {
|
|
||||||
companion object{
|
|
||||||
private val log = createLogger<RemoteBuildCacheClient>()
|
|
||||||
}
|
|
||||||
|
|
||||||
|
class GradleBuildCacheClient(private val profile: Configuration.Profile) : AutoCloseable {
|
||||||
private val group: NioEventLoopGroup
|
private val group: NioEventLoopGroup
|
||||||
private val sslContext: SslContext
|
private var sslContext: SslContext
|
||||||
|
private val log = contextLogger()
|
||||||
private val pool: ChannelPool
|
private val pool: ChannelPool
|
||||||
|
|
||||||
data class Configuration(
|
data class Configuration(
|
||||||
@@ -81,36 +66,18 @@ class RemoteBuildCacheClient(private val profile: Configuration.Profile) : AutoC
|
|||||||
data class BasicAuthenticationCredentials(val username: String, val password: String) : Authentication()
|
data class BasicAuthenticationCredentials(val username: String, val password: String) : Authentication()
|
||||||
}
|
}
|
||||||
|
|
||||||
class TrustStore (
|
|
||||||
var file: Path?,
|
|
||||||
var password: String?,
|
|
||||||
var checkCertificateStatus: Boolean = false,
|
|
||||||
var verifyServerCertificate: Boolean = true,
|
|
||||||
)
|
|
||||||
|
|
||||||
class RetryPolicy(
|
class RetryPolicy(
|
||||||
val maxAttempts: Int,
|
val maxAttempts: Int,
|
||||||
val initialDelayMillis: Long,
|
val initialDelayMillis: Long,
|
||||||
val exp: Double
|
val exp: Double
|
||||||
)
|
)
|
||||||
|
|
||||||
class Connection(
|
|
||||||
val readTimeout: Duration,
|
|
||||||
val writeTimeout: Duration,
|
|
||||||
val idleTimeout: Duration,
|
|
||||||
val readIdleTimeout: Duration,
|
|
||||||
val writeIdleTimeout: Duration
|
|
||||||
)
|
|
||||||
|
|
||||||
data class Profile(
|
data class Profile(
|
||||||
val serverURI: URI,
|
val serverURI: URI,
|
||||||
val connection: Connection?,
|
|
||||||
val authentication: Authentication?,
|
val authentication: Authentication?,
|
||||||
val connectionTimeout: Duration?,
|
val connectionTimeout: Duration?,
|
||||||
val maxConnections: Int,
|
val maxConnections: Int,
|
||||||
val compressionEnabled: Boolean,
|
|
||||||
val retryPolicy: RetryPolicy?,
|
val retryPolicy: RetryPolicy?,
|
||||||
val tlsTruststore : TrustStore?
|
|
||||||
)
|
)
|
||||||
|
|
||||||
companion object {
|
companion object {
|
||||||
@@ -126,33 +93,10 @@ class RemoteBuildCacheClient(private val profile: Configuration.Profile) : AutoC
|
|||||||
group = NioEventLoopGroup()
|
group = NioEventLoopGroup()
|
||||||
sslContext = SslContextBuilder.forClient().also { builder ->
|
sslContext = SslContextBuilder.forClient().also { builder ->
|
||||||
(profile.authentication as? Configuration.Authentication.TlsClientAuthenticationCredentials)?.let { tlsClientAuthenticationCredentials ->
|
(profile.authentication as? Configuration.Authentication.TlsClientAuthenticationCredentials)?.let { tlsClientAuthenticationCredentials ->
|
||||||
builder.apply {
|
builder.keyManager(
|
||||||
keyManager(
|
tlsClientAuthenticationCredentials.key,
|
||||||
tlsClientAuthenticationCredentials.key,
|
*tlsClientAuthenticationCredentials.certificateChain
|
||||||
*tlsClientAuthenticationCredentials.certificateChain
|
)
|
||||||
)
|
|
||||||
profile.tlsTruststore?.let { trustStore ->
|
|
||||||
if(!trustStore.verifyServerCertificate) {
|
|
||||||
trustManager(object : X509TrustManager {
|
|
||||||
override fun checkClientTrusted(certChain: Array<out X509Certificate>, p1: String?) {
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun checkServerTrusted(certChain: Array<out X509Certificate>, p1: String?) {
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun getAcceptedIssuers() = null
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
trustStore.file?.let {
|
|
||||||
val ts = loadKeystore(it, trustStore.password)
|
|
||||||
val trustManagerFactory: TrustManagerFactory =
|
|
||||||
TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm())
|
|
||||||
trustManagerFactory.init(ts)
|
|
||||||
trustManager(trustManagerFactory)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}.build()
|
}.build()
|
||||||
|
|
||||||
@@ -197,50 +141,18 @@ class RemoteBuildCacheClient(private val profile: Configuration.Profile) : AutoC
|
|||||||
}
|
}
|
||||||
|
|
||||||
override fun channelCreated(ch: Channel) {
|
override fun channelCreated(ch: Channel) {
|
||||||
val connectionId = connectionCount.incrementAndGet()
|
val connectionId = connectionCount.getAndIncrement()
|
||||||
log.debug {
|
log.debug {
|
||||||
"Created connection ${ch.id().asShortText()}, total number of active connections: $connectionId"
|
"Created connection $connectionId, total number of active connections: $connectionId"
|
||||||
}
|
}
|
||||||
ch.closeFuture().addListener {
|
ch.closeFuture().addListener {
|
||||||
val activeConnections = connectionCount.decrementAndGet()
|
val activeConnections = connectionCount.decrementAndGet()
|
||||||
log.debug {
|
log.debug {
|
||||||
"Closed connection ${
|
"Closed connection $connectionId, total number of active connections: $activeConnections"
|
||||||
ch.id().asShortText()
|
|
||||||
}, total number of active connections: $activeConnections"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
val pipeline: ChannelPipeline = ch.pipeline()
|
val pipeline: ChannelPipeline = ch.pipeline()
|
||||||
|
|
||||||
profile.connection?.also { conn ->
|
|
||||||
val readTimeout = conn.readTimeout.toMillis()
|
|
||||||
val writeTimeout = conn.writeTimeout.toMillis()
|
|
||||||
if (readTimeout > 0 || writeTimeout > 0) {
|
|
||||||
pipeline.addLast(
|
|
||||||
IdleStateHandler(
|
|
||||||
false,
|
|
||||||
readTimeout,
|
|
||||||
writeTimeout,
|
|
||||||
0,
|
|
||||||
TimeUnit.MILLISECONDS
|
|
||||||
)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
val readIdleTimeout = conn.readIdleTimeout.toMillis()
|
|
||||||
val writeIdleTimeout = conn.writeIdleTimeout.toMillis()
|
|
||||||
val idleTimeout = conn.idleTimeout.toMillis()
|
|
||||||
if (readIdleTimeout > 0 || writeIdleTimeout > 0 || idleTimeout > 0) {
|
|
||||||
pipeline.addLast(
|
|
||||||
IdleStateHandler(
|
|
||||||
true,
|
|
||||||
readIdleTimeout,
|
|
||||||
writeIdleTimeout,
|
|
||||||
idleTimeout,
|
|
||||||
TimeUnit.MILLISECONDS
|
|
||||||
)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add SSL handler if needed
|
// Add SSL handler if needed
|
||||||
if ("https".equals(scheme, ignoreCase = true)) {
|
if ("https".equals(scheme, ignoreCase = true)) {
|
||||||
pipeline.addLast("ssl", sslContext.newHandler(ch.alloc(), host, port))
|
pipeline.addLast("ssl", sslContext.newHandler(ch.alloc(), host, port))
|
||||||
@@ -248,9 +160,7 @@ class RemoteBuildCacheClient(private val profile: Configuration.Profile) : AutoC
|
|||||||
|
|
||||||
// HTTP handlers
|
// HTTP handlers
|
||||||
pipeline.addLast("codec", HttpClientCodec())
|
pipeline.addLast("codec", HttpClientCodec())
|
||||||
if(profile.compressionEnabled) {
|
pipeline.addLast("decompressor", HttpContentDecompressor())
|
||||||
pipeline.addLast("decompressor", HttpContentDecompressor())
|
|
||||||
}
|
|
||||||
pipeline.addLast("aggregator", HttpObjectAggregator(134217728))
|
pipeline.addLast("aggregator", HttpObjectAggregator(134217728))
|
||||||
pipeline.addLast("chunked", ChunkedWriteHandler())
|
pipeline.addLast("chunked", ChunkedWriteHandler())
|
||||||
}
|
}
|
||||||
@@ -296,7 +206,6 @@ class RemoteBuildCacheClient(private val profile: Configuration.Profile) : AutoC
|
|||||||
retryPolicy.initialDelayMillis.toDouble(),
|
retryPolicy.initialDelayMillis.toDouble(),
|
||||||
retryPolicy.exp,
|
retryPolicy.exp,
|
||||||
outcomeHandler,
|
outcomeHandler,
|
||||||
Random.Default,
|
|
||||||
operation
|
operation
|
||||||
)
|
)
|
||||||
} else {
|
} else {
|
||||||
@@ -304,28 +213,9 @@ class RemoteBuildCacheClient(private val profile: Configuration.Profile) : AutoC
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fun healthCheck(nonce: ByteArray): CompletableFuture<ByteArray?> {
|
fun get(key: String, eventListener : RequestEventListener? = null): CompletableFuture<ByteArray?> {
|
||||||
return executeWithRetry {
|
return executeWithRetry {
|
||||||
sendRequest(profile.serverURI, HttpMethod.TRACE, nonce)
|
sendRequest(profile.serverURI.resolve(key), HttpMethod.GET, null, eventListener)
|
||||||
}.thenApply {
|
|
||||||
val status = it.status()
|
|
||||||
if (it.status() != HttpResponseStatus.OK) {
|
|
||||||
throw HttpException(status)
|
|
||||||
} else {
|
|
||||||
it.content()
|
|
||||||
}
|
|
||||||
}.thenApply { maybeByteBuf ->
|
|
||||||
maybeByteBuf?.let {
|
|
||||||
val result = ByteArray(it.readableBytes())
|
|
||||||
it.getBytes(0, result)
|
|
||||||
result
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fun get(key: String): CompletableFuture<ByteArray?> {
|
|
||||||
return executeWithRetry {
|
|
||||||
sendRequest(profile.serverURI.resolve(key), HttpMethod.GET, null)
|
|
||||||
}.thenApply {
|
}.thenApply {
|
||||||
val status = it.status()
|
val status = it.status()
|
||||||
if (it.status() == HttpResponseStatus.NOT_FOUND) {
|
if (it.status() == HttpResponseStatus.NOT_FOUND) {
|
||||||
@@ -344,13 +234,9 @@ class RemoteBuildCacheClient(private val profile: Configuration.Profile) : AutoC
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fun put(key: String, content: ByteArray, metadata: CacheValueMetadata): CompletableFuture<Unit> {
|
fun put(key: String, content: ByteArray, eventListener : RequestEventListener? = null): CompletableFuture<Unit> {
|
||||||
return executeWithRetry {
|
return executeWithRetry {
|
||||||
val extraHeaders = sequenceOf(
|
sendRequest(profile.serverURI.resolve(key), HttpMethod.PUT, content, eventListener)
|
||||||
metadata.mimeType?.let { HttpHeaderNames.CONTENT_TYPE to it },
|
|
||||||
metadata.contentDisposition?.let { HttpHeaderNames.CONTENT_DISPOSITION to it }
|
|
||||||
).filterNotNull()
|
|
||||||
sendRequest(profile.serverURI.resolve(key), HttpMethod.PUT, content, extraHeaders.asIterable())
|
|
||||||
}.thenApply {
|
}.thenApply {
|
||||||
val status = it.status()
|
val status = it.status()
|
||||||
if (it.status() != HttpResponseStatus.CREATED && it.status() != HttpResponseStatus.OK) {
|
if (it.status() != HttpResponseStatus.CREATED && it.status() != HttpResponseStatus.OK) {
|
||||||
@@ -359,83 +245,36 @@ class RemoteBuildCacheClient(private val profile: Configuration.Profile) : AutoC
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private fun sendRequest(
|
private fun sendRequest(uri: URI, method: HttpMethod, body: ByteArray?, eventListener : RequestEventListener?): CompletableFuture<FullHttpResponse> {
|
||||||
uri: URI,
|
|
||||||
method: HttpMethod,
|
|
||||||
body: ByteArray?,
|
|
||||||
extraHeaders: Iterable<Pair<CharSequence, CharSequence>>? = null
|
|
||||||
): CompletableFuture<FullHttpResponse> {
|
|
||||||
val responseFuture = CompletableFuture<FullHttpResponse>()
|
val responseFuture = CompletableFuture<FullHttpResponse>()
|
||||||
// Custom handler for processing responses
|
// Custom handler for processing responses
|
||||||
|
|
||||||
pool.acquire().addListener(object : GenericFutureListener<NettyFuture<Channel>> {
|
pool.acquire().addListener(object : GenericFutureListener<NettyFuture<Channel>> {
|
||||||
private val handlers = mutableListOf<ChannelHandler>()
|
|
||||||
|
|
||||||
fun cleanup(channel: Channel, pipeline: ChannelPipeline) {
|
|
||||||
handlers.forEach(pipeline::remove)
|
|
||||||
pool.release(channel)
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun operationComplete(channelFuture: Future<Channel>) {
|
override fun operationComplete(channelFuture: Future<Channel>) {
|
||||||
if (channelFuture.isSuccess) {
|
if (channelFuture.isSuccess) {
|
||||||
val channel = channelFuture.now
|
val channel = channelFuture.now
|
||||||
val pipeline = channel.pipeline()
|
val pipeline = channel.pipeline()
|
||||||
val timeoutHandler = object : ChannelInboundHandlerAdapter() {
|
channel.pipeline().addLast("handler", object : SimpleChannelInboundHandler<FullHttpResponse>() {
|
||||||
override fun userEventTriggered(ctx: ChannelHandlerContext, evt: Any) {
|
|
||||||
if (evt is IdleStateEvent) {
|
|
||||||
val te = when (evt.state()) {
|
|
||||||
IdleState.READER_IDLE -> TimeoutException(
|
|
||||||
"Read timeout",
|
|
||||||
)
|
|
||||||
|
|
||||||
IdleState.WRITER_IDLE -> TimeoutException("Write timeout")
|
|
||||||
|
|
||||||
IdleState.ALL_IDLE -> TimeoutException("Idle timeout")
|
|
||||||
null -> throw IllegalStateException("This should never happen")
|
|
||||||
}
|
|
||||||
responseFuture.completeExceptionally(te)
|
|
||||||
ctx.close()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
val closeListener = GenericFutureListener<Future<Void>> {
|
|
||||||
responseFuture.completeExceptionally(IOException("The remote server closed the connection"))
|
|
||||||
pool.release(channel)
|
|
||||||
}
|
|
||||||
|
|
||||||
val responseHandler = object : SimpleChannelInboundHandler<FullHttpResponse>() {
|
|
||||||
override fun channelRead0(
|
override fun channelRead0(
|
||||||
ctx: ChannelHandlerContext,
|
ctx: ChannelHandlerContext,
|
||||||
response: FullHttpResponse
|
response: FullHttpResponse
|
||||||
) {
|
) {
|
||||||
channel.closeFuture().removeListener(closeListener)
|
pipeline.removeLast()
|
||||||
cleanup(channel, pipeline)
|
pool.release(channel)
|
||||||
responseFuture.complete(response)
|
responseFuture.complete(response)
|
||||||
|
eventListener?.responseReceived(response)
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
|
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
|
||||||
ctx.newPromise()
|
|
||||||
val ex = when (cause) {
|
val ex = when (cause) {
|
||||||
is DecoderException -> cause.cause
|
is DecoderException -> cause.cause
|
||||||
else -> cause
|
else -> cause
|
||||||
}
|
}
|
||||||
responseFuture.completeExceptionally(ex)
|
responseFuture.completeExceptionally(ex)
|
||||||
ctx.close()
|
ctx.close()
|
||||||
}
|
pipeline.removeLast()
|
||||||
|
|
||||||
override fun channelInactive(ctx: ChannelHandlerContext) {
|
|
||||||
pool.release(channel)
|
pool.release(channel)
|
||||||
responseFuture.completeExceptionally(IOException("The remote server closed the connection"))
|
|
||||||
super.channelInactive(ctx)
|
|
||||||
}
|
}
|
||||||
}
|
})
|
||||||
for (handler in arrayOf(timeoutHandler, responseHandler)) {
|
|
||||||
handlers.add(handler)
|
|
||||||
}
|
|
||||||
pipeline.addLast(timeoutHandler, responseHandler)
|
|
||||||
channel.closeFuture().addListener(closeListener)
|
|
||||||
|
|
||||||
|
|
||||||
// Prepare the HTTP request
|
// Prepare the HTTP request
|
||||||
val request: FullHttpRequest = let {
|
val request: FullHttpRequest = let {
|
||||||
val content: ByteBuf? = body?.takeIf(ByteArray::isNotEmpty)?.let(Unpooled::wrappedBuffer)
|
val content: ByteBuf? = body?.takeIf(ByteArray::isNotEmpty)?.let(Unpooled::wrappedBuffer)
|
||||||
@@ -447,19 +286,15 @@ class RemoteBuildCacheClient(private val profile: Configuration.Profile) : AutoC
|
|||||||
).apply {
|
).apply {
|
||||||
headers().apply {
|
headers().apply {
|
||||||
if (content != null) {
|
if (content != null) {
|
||||||
|
set(HttpHeaderNames.CONTENT_TYPE, HttpHeaderValues.APPLICATION_OCTET_STREAM)
|
||||||
set(HttpHeaderNames.CONTENT_LENGTH, content.readableBytes())
|
set(HttpHeaderNames.CONTENT_LENGTH, content.readableBytes())
|
||||||
}
|
}
|
||||||
set(HttpHeaderNames.HOST, profile.serverURI.host)
|
set(HttpHeaderNames.HOST, profile.serverURI.host)
|
||||||
set(HttpHeaderNames.CONNECTION, HttpHeaderValues.KEEP_ALIVE)
|
set(HttpHeaderNames.CONNECTION, HttpHeaderValues.KEEP_ALIVE)
|
||||||
if(profile.compressionEnabled) {
|
set(
|
||||||
set(
|
HttpHeaderNames.ACCEPT_ENCODING,
|
||||||
HttpHeaderNames.ACCEPT_ENCODING,
|
HttpHeaderValues.GZIP.toString() + "," + HttpHeaderValues.DEFLATE.toString()
|
||||||
HttpHeaderValues.GZIP.toString() + "," + HttpHeaderValues.DEFLATE.toString()
|
)
|
||||||
)
|
|
||||||
}
|
|
||||||
extraHeaders?.forEach { (k, v) ->
|
|
||||||
add(k, v)
|
|
||||||
}
|
|
||||||
// Add basic auth if configured
|
// Add basic auth if configured
|
||||||
(profile.authentication as? Configuration.Authentication.BasicAuthenticationCredentials)?.let { credentials ->
|
(profile.authentication as? Configuration.Authentication.BasicAuthenticationCredentials)?.let { credentials ->
|
||||||
val auth = "${credentials.username}:${credentials.password}"
|
val auth = "${credentials.username}:${credentials.password}"
|
||||||
@@ -472,7 +307,11 @@ class RemoteBuildCacheClient(private val profile: Configuration.Profile) : AutoC
|
|||||||
|
|
||||||
// Set headers
|
// Set headers
|
||||||
// Send the request
|
// Send the request
|
||||||
channel.writeAndFlush(request)
|
channel.writeAndFlush(request).addListener {
|
||||||
|
if(it.isSuccess) {
|
||||||
|
eventListener?.requestSent(request)
|
||||||
|
}
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
responseFuture.completeExceptionally(channelFuture.cause())
|
responseFuture.completeExceptionally(channelFuture.cause())
|
||||||
}
|
}
|
@@ -1,4 +1,4 @@
|
|||||||
package net.woggioni.rbcs.client
|
package net.woggioni.gbcs.client
|
||||||
|
|
||||||
import io.netty.handler.codec.http.HttpResponseStatus
|
import io.netty.handler.codec.http.HttpResponseStatus
|
||||||
|
|
@@ -0,0 +1,10 @@
|
|||||||
|
package net.woggioni.gbcs.client
|
||||||
|
|
||||||
|
import io.netty.handler.codec.http.FullHttpRequest
|
||||||
|
import io.netty.handler.codec.http.FullHttpResponse
|
||||||
|
|
||||||
|
interface RequestEventListener {
|
||||||
|
fun requestSent(req : FullHttpRequest) {}
|
||||||
|
fun responseReceived(res : FullHttpResponse) {}
|
||||||
|
fun exceptionCaught(ex : Throwable) {}
|
||||||
|
}
|
@@ -1,9 +1,9 @@
|
|||||||
package net.woggioni.rbcs.client.impl
|
package net.woggioni.gbcs.client.impl
|
||||||
|
|
||||||
import net.woggioni.rbcs.api.exception.ConfigurationException
|
import net.woggioni.gbcs.api.exception.ConfigurationException
|
||||||
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
import net.woggioni.gbcs.common.Xml.Companion.asIterable
|
||||||
import net.woggioni.rbcs.common.Xml.Companion.asIterable
|
import net.woggioni.gbcs.common.Xml.Companion.renderAttribute
|
||||||
import net.woggioni.rbcs.common.Xml.Companion.renderAttribute
|
import net.woggioni.gbcs.client.GradleBuildCacheClient
|
||||||
import org.w3c.dom.Document
|
import org.w3c.dom.Document
|
||||||
import java.net.URI
|
import java.net.URI
|
||||||
import java.nio.file.Files
|
import java.nio.file.Files
|
||||||
@@ -12,13 +12,12 @@ import java.security.KeyStore
|
|||||||
import java.security.PrivateKey
|
import java.security.PrivateKey
|
||||||
import java.security.cert.X509Certificate
|
import java.security.cert.X509Certificate
|
||||||
import java.time.Duration
|
import java.time.Duration
|
||||||
import java.time.temporal.ChronoUnit
|
|
||||||
|
|
||||||
object Parser {
|
object Parser {
|
||||||
|
|
||||||
fun parse(document: Document): RemoteBuildCacheClient.Configuration {
|
fun parse(document: Document): GradleBuildCacheClient.Configuration {
|
||||||
val root = document.documentElement
|
val root = document.documentElement
|
||||||
val profiles = mutableMapOf<String, RemoteBuildCacheClient.Configuration.Profile>()
|
val profiles = mutableMapOf<String, GradleBuildCacheClient.Configuration.Profile>()
|
||||||
|
|
||||||
for (child in root.asIterable()) {
|
for (child in root.asIterable()) {
|
||||||
val tagName = child.localName
|
val tagName = child.localName
|
||||||
@@ -28,10 +27,8 @@ object Parser {
|
|||||||
child.renderAttribute("name") ?: throw ConfigurationException("name attribute is required")
|
child.renderAttribute("name") ?: throw ConfigurationException("name attribute is required")
|
||||||
val uri = child.renderAttribute("base-url")?.let(::URI)
|
val uri = child.renderAttribute("base-url")?.let(::URI)
|
||||||
?: throw ConfigurationException("base-url attribute is required")
|
?: throw ConfigurationException("base-url attribute is required")
|
||||||
var authentication: RemoteBuildCacheClient.Configuration.Authentication? = null
|
var authentication: GradleBuildCacheClient.Configuration.Authentication? = null
|
||||||
var retryPolicy: RemoteBuildCacheClient.Configuration.RetryPolicy? = null
|
var retryPolicy: GradleBuildCacheClient.Configuration.RetryPolicy? = null
|
||||||
var connection : RemoteBuildCacheClient.Configuration.Connection? = null
|
|
||||||
var trustStore : RemoteBuildCacheClient.Configuration.TrustStore? = null
|
|
||||||
for (gchild in child.asIterable()) {
|
for (gchild in child.asIterable()) {
|
||||||
when (gchild.localName) {
|
when (gchild.localName) {
|
||||||
"tls-client-auth" -> {
|
"tls-client-auth" -> {
|
||||||
@@ -52,7 +49,7 @@ object Parser {
|
|||||||
.toList()
|
.toList()
|
||||||
.toTypedArray()
|
.toTypedArray()
|
||||||
authentication =
|
authentication =
|
||||||
RemoteBuildCacheClient.Configuration.Authentication.TlsClientAuthenticationCredentials(
|
GradleBuildCacheClient.Configuration.Authentication.TlsClientAuthenticationCredentials(
|
||||||
key,
|
key,
|
||||||
certChain
|
certChain
|
||||||
)
|
)
|
||||||
@@ -64,7 +61,7 @@ object Parser {
|
|||||||
val password = gchild.renderAttribute("password")
|
val password = gchild.renderAttribute("password")
|
||||||
?: throw ConfigurationException("password attribute is required")
|
?: throw ConfigurationException("password attribute is required")
|
||||||
authentication =
|
authentication =
|
||||||
RemoteBuildCacheClient.Configuration.Authentication.BasicAuthenticationCredentials(
|
GradleBuildCacheClient.Configuration.Authentication.BasicAuthenticationCredentials(
|
||||||
username,
|
username,
|
||||||
password
|
password
|
||||||
)
|
)
|
||||||
@@ -83,43 +80,12 @@ object Parser {
|
|||||||
gchild.renderAttribute("exp")
|
gchild.renderAttribute("exp")
|
||||||
?.let(String::toDouble)
|
?.let(String::toDouble)
|
||||||
?: 2.0f
|
?: 2.0f
|
||||||
retryPolicy = RemoteBuildCacheClient.Configuration.RetryPolicy(
|
retryPolicy = GradleBuildCacheClient.Configuration.RetryPolicy(
|
||||||
maxAttempts,
|
maxAttempts,
|
||||||
initialDelay.toMillis(),
|
initialDelay.toMillis(),
|
||||||
exp.toDouble()
|
exp.toDouble()
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
"connection" -> {
|
|
||||||
val writeTimeout = gchild.renderAttribute("write-timeout")
|
|
||||||
?.let(Duration::parse) ?: Duration.of(0, ChronoUnit.SECONDS)
|
|
||||||
val readTimeout = gchild.renderAttribute("read-timeout")
|
|
||||||
?.let(Duration::parse) ?: Duration.of(0, ChronoUnit.SECONDS)
|
|
||||||
val idleTimeout = gchild.renderAttribute("idle-timeout")
|
|
||||||
?.let(Duration::parse) ?: Duration.of(30, ChronoUnit.SECONDS)
|
|
||||||
val readIdleTimeout = gchild.renderAttribute("read-idle-timeout")
|
|
||||||
?.let(Duration::parse) ?: Duration.of(60, ChronoUnit.SECONDS)
|
|
||||||
val writeIdleTimeout = gchild.renderAttribute("write-idle-timeout")
|
|
||||||
?.let(Duration::parse) ?: Duration.of(60, ChronoUnit.SECONDS)
|
|
||||||
connection = RemoteBuildCacheClient.Configuration.Connection(
|
|
||||||
readTimeout,
|
|
||||||
writeTimeout,
|
|
||||||
idleTimeout,
|
|
||||||
readIdleTimeout,
|
|
||||||
writeIdleTimeout,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
"tls-trust-store" -> {
|
|
||||||
val file = gchild.renderAttribute("file")
|
|
||||||
?.let(Path::of)
|
|
||||||
val password = gchild.renderAttribute("password")
|
|
||||||
val checkCertificateStatus = gchild.renderAttribute("check-certificate-status")
|
|
||||||
?.let(String::toBoolean) ?: false
|
|
||||||
val verifyServerCertificate = gchild.renderAttribute("verify-server-certificate")
|
|
||||||
?.let(String::toBoolean) ?: true
|
|
||||||
trustStore = RemoteBuildCacheClient.Configuration.TrustStore(file, password, checkCertificateStatus, verifyServerCertificate)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
val maxConnections = child.renderAttribute("max-connections")
|
val maxConnections = child.renderAttribute("max-connections")
|
||||||
@@ -127,23 +93,16 @@ object Parser {
|
|||||||
?: 50
|
?: 50
|
||||||
val connectionTimeout = child.renderAttribute("connection-timeout")
|
val connectionTimeout = child.renderAttribute("connection-timeout")
|
||||||
?.let(Duration::parse)
|
?.let(Duration::parse)
|
||||||
val compressionEnabled = child.renderAttribute("enable-compression")
|
profiles[name] = GradleBuildCacheClient.Configuration.Profile(
|
||||||
?.let(String::toBoolean)
|
|
||||||
?: true
|
|
||||||
|
|
||||||
profiles[name] = RemoteBuildCacheClient.Configuration.Profile(
|
|
||||||
uri,
|
uri,
|
||||||
connection,
|
|
||||||
authentication,
|
authentication,
|
||||||
connectionTimeout,
|
connectionTimeout,
|
||||||
maxConnections,
|
maxConnections,
|
||||||
compressionEnabled,
|
retryPolicy
|
||||||
retryPolicy,
|
|
||||||
trustStore
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return RemoteBuildCacheClient.Configuration(profiles)
|
return GradleBuildCacheClient.Configuration(profiles)
|
||||||
}
|
}
|
||||||
}
|
}
|
@@ -1,10 +1,8 @@
|
|||||||
package net.woggioni.rbcs.client
|
package net.woggioni.gbcs.client
|
||||||
|
|
||||||
import io.netty.util.concurrent.EventExecutorGroup
|
import io.netty.util.concurrent.EventExecutorGroup
|
||||||
import java.util.concurrent.CompletableFuture
|
import java.util.concurrent.CompletableFuture
|
||||||
import java.util.concurrent.TimeUnit
|
import java.util.concurrent.TimeUnit
|
||||||
import kotlin.math.pow
|
|
||||||
import kotlin.random.Random
|
|
||||||
|
|
||||||
sealed class OperationOutcome<T> {
|
sealed class OperationOutcome<T> {
|
||||||
class Success<T>(val result: T) : OperationOutcome<T>()
|
class Success<T>(val result: T) : OperationOutcome<T>()
|
||||||
@@ -26,10 +24,8 @@ fun <T> executeWithRetry(
|
|||||||
initialDelay: Double,
|
initialDelay: Double,
|
||||||
exp: Double,
|
exp: Double,
|
||||||
outcomeHandler: OutcomeHandler<T>,
|
outcomeHandler: OutcomeHandler<T>,
|
||||||
randomizer : Random?,
|
|
||||||
cb: () -> CompletableFuture<T>
|
cb: () -> CompletableFuture<T>
|
||||||
): CompletableFuture<T> {
|
): CompletableFuture<T> {
|
||||||
|
|
||||||
val finalResult = cb()
|
val finalResult = cb()
|
||||||
var future = finalResult
|
var future = finalResult
|
||||||
var shortCircuit = false
|
var shortCircuit = false
|
||||||
@@ -50,7 +46,7 @@ fun <T> executeWithRetry(
|
|||||||
is OutcomeHandlerResult.Retry -> {
|
is OutcomeHandlerResult.Retry -> {
|
||||||
val res = CompletableFuture<T>()
|
val res = CompletableFuture<T>()
|
||||||
val delay = run {
|
val delay = run {
|
||||||
val scheduledDelay = (initialDelay * exp.pow(i.toDouble()) * (1.0 + (randomizer?.nextDouble(-0.5, 0.5) ?: 0.0))).toLong()
|
val scheduledDelay = (initialDelay * Math.pow(exp, i.toDouble())).toLong()
|
||||||
outcomeHandlerResult.suggestedDelayMillis?.coerceAtMost(scheduledDelay) ?: scheduledDelay
|
outcomeHandlerResult.suggestedDelayMillis?.coerceAtMost(scheduledDelay) ?: scheduledDelay
|
||||||
}
|
}
|
||||||
eventExecutorGroup.schedule({
|
eventExecutorGroup.schedule({
|
@@ -0,0 +1,50 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||||
|
<xs:schema targetNamespace="urn:net.woggioni.gbcs.client"
|
||||||
|
xmlns:xs="http://www.w3.org/2001/XMLSchema"
|
||||||
|
xmlns:gbcs-client="urn:net.woggioni.gbcs.client"
|
||||||
|
elementFormDefault="unqualified"
|
||||||
|
>
|
||||||
|
<xs:element name="profiles" type="gbcs-client:profilesType"/>
|
||||||
|
|
||||||
|
<xs:complexType name="profilesType">
|
||||||
|
<xs:sequence minOccurs="0">
|
||||||
|
<xs:element name="profile" type="gbcs-client:profileType" maxOccurs="unbounded"/>
|
||||||
|
</xs:sequence>
|
||||||
|
</xs:complexType>
|
||||||
|
|
||||||
|
<xs:complexType name="profileType">
|
||||||
|
<xs:sequence>
|
||||||
|
<xs:choice>
|
||||||
|
<xs:element name="no-auth" type="gbcs-client:noAuthType"/>
|
||||||
|
<xs:element name="basic-auth" type="gbcs-client:basicAuthType"/>
|
||||||
|
<xs:element name="tls-client-auth" type="gbcs-client:tlsClientAuthType"/>
|
||||||
|
</xs:choice>
|
||||||
|
<xs:element name="retry-policy" type="gbcs-client:retryType" minOccurs="0"/>
|
||||||
|
</xs:sequence>
|
||||||
|
<xs:attribute name="name" type="xs:token" use="required"/>
|
||||||
|
<xs:attribute name="base-url" type="xs:anyURI" use="required"/>
|
||||||
|
<xs:attribute name="max-connections" type="xs:positiveInteger" default="50"/>
|
||||||
|
<xs:attribute name="connection-timeout" type="xs:duration"/>
|
||||||
|
</xs:complexType>
|
||||||
|
|
||||||
|
<xs:complexType name="noAuthType"/>
|
||||||
|
|
||||||
|
<xs:complexType name="basicAuthType">
|
||||||
|
<xs:attribute name="user" type="xs:token" use="required"/>
|
||||||
|
<xs:attribute name="password" type="xs:string" use="required"/>
|
||||||
|
</xs:complexType>
|
||||||
|
|
||||||
|
<xs:complexType name="tlsClientAuthType">
|
||||||
|
<xs:attribute name="key-store-file" type="xs:anyURI" use="required"/>
|
||||||
|
<xs:attribute name="key-store-password" type="xs:string" use="required"/>
|
||||||
|
<xs:attribute name="key-alias" type="xs:token" use="required"/>
|
||||||
|
<xs:attribute name="key-password" type="xs:string" use="optional"/>
|
||||||
|
</xs:complexType>
|
||||||
|
|
||||||
|
<xs:complexType name="retryType">
|
||||||
|
<xs:attribute name="max-attempts" type="xs:positiveInteger" use="required"/>
|
||||||
|
<xs:attribute name="initial-delay" type="xs:duration" default="PT1S"/>
|
||||||
|
<xs:attribute name="exp" type="xs:double" default="2.0"/>
|
||||||
|
</xs:complexType>
|
||||||
|
|
||||||
|
</xs:schema>
|
@@ -1,9 +1,10 @@
|
|||||||
package net.woggioni.rbcs.client
|
package net.woggioni.gbcs.client
|
||||||
|
|
||||||
import io.netty.util.concurrent.DefaultEventExecutorGroup
|
import io.netty.util.concurrent.DefaultEventExecutorGroup
|
||||||
import io.netty.util.concurrent.EventExecutorGroup
|
import io.netty.util.concurrent.EventExecutorGroup
|
||||||
import net.woggioni.rbcs.common.contextLogger
|
import net.woggioni.gbcs.common.contextLogger
|
||||||
import org.junit.jupiter.api.Assertions
|
import org.junit.jupiter.api.Assertions
|
||||||
|
import org.junit.jupiter.api.Test
|
||||||
import org.junit.jupiter.api.extension.ExtensionContext
|
import org.junit.jupiter.api.extension.ExtensionContext
|
||||||
import org.junit.jupiter.params.ParameterizedTest
|
import org.junit.jupiter.params.ParameterizedTest
|
||||||
import org.junit.jupiter.params.provider.Arguments
|
import org.junit.jupiter.params.provider.Arguments
|
||||||
@@ -89,7 +90,7 @@ class RetryTest {
|
|||||||
val random = Random(testArgs.seed)
|
val random = Random(testArgs.seed)
|
||||||
|
|
||||||
val future =
|
val future =
|
||||||
executeWithRetry(executor, testArgs.maxAttempt, testArgs.initialDelay, testArgs.exp, outcomeHandler, null) {
|
executeWithRetry(executor, testArgs.maxAttempt, testArgs.initialDelay, testArgs.exp, outcomeHandler) {
|
||||||
val now = System.nanoTime()
|
val now = System.nanoTime()
|
||||||
val result = CompletableFuture<Int>()
|
val result = CompletableFuture<Int>()
|
||||||
executor.submit {
|
executor.submit {
|
||||||
@@ -129,7 +130,7 @@ class RetryTest {
|
|||||||
previousAttempt.first + testArgs.initialDelay * Math.pow(testArgs.exp, index.toDouble()) * 1e6
|
previousAttempt.first + testArgs.initialDelay * Math.pow(testArgs.exp, index.toDouble()) * 1e6
|
||||||
val actualTimestamp = timestamp
|
val actualTimestamp = timestamp
|
||||||
val err = Math.abs(expectedTimestamp - actualTimestamp) / expectedTimestamp
|
val err = Math.abs(expectedTimestamp - actualTimestamp) / expectedTimestamp
|
||||||
Assertions.assertTrue(err < 0.1)
|
Assertions.assertTrue(err < 1e-3)
|
||||||
}
|
}
|
||||||
if (index == attempts.size - 1 && index < testArgs.maxAttempt - 1) {
|
if (index == attempts.size - 1 && index < testArgs.maxAttempt - 1) {
|
||||||
/*
|
/*
|
@@ -0,0 +1,16 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||||
|
<gbcs-client:profiles xmlns:xs="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xmlns:gbcs-client="urn:net.woggioni.gbcs.client"
|
||||||
|
xs:schemaLocation="urn:net.woggioni.gbcs.client jms://net.woggioni.gbcs.client/net/woggioni/gbcs/client/schema/gbcs-client.xsd"
|
||||||
|
>
|
||||||
|
<profile name="profile1" base-url="https://gbcs1.example.com/">
|
||||||
|
<tls-client-auth
|
||||||
|
key-store-file="keystore.pfx"
|
||||||
|
key-store-password="password"
|
||||||
|
key-alias="woggioni@c962475fa38"
|
||||||
|
key-password="key-password"/>
|
||||||
|
</profile>
|
||||||
|
<profile name="profile2" base-url="https://gbcs2.example.com/">
|
||||||
|
<basic-auth user="user" password="password"/>
|
||||||
|
</profile>
|
||||||
|
</gbcs-client:profiles>
|
@@ -6,10 +6,9 @@ plugins {
|
|||||||
}
|
}
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
implementation project(':rbcs-api')
|
implementation project(':gbcs-api')
|
||||||
implementation catalog.slf4j.api
|
implementation catalog.slf4j.api
|
||||||
implementation catalog.jwo
|
implementation catalog.jwo
|
||||||
implementation catalog.netty.buffer
|
|
||||||
}
|
}
|
||||||
|
|
||||||
publishing {
|
publishing {
|
@@ -1,12 +1,10 @@
|
|||||||
module net.woggioni.rbcs.common {
|
module net.woggioni.gbcs.common {
|
||||||
requires java.xml;
|
requires java.xml;
|
||||||
requires java.logging;
|
requires java.logging;
|
||||||
requires org.slf4j;
|
requires org.slf4j;
|
||||||
requires kotlin.stdlib;
|
requires kotlin.stdlib;
|
||||||
requires net.woggioni.jwo;
|
requires net.woggioni.jwo;
|
||||||
requires io.netty.buffer;
|
|
||||||
requires io.netty.transport;
|
|
||||||
|
|
||||||
provides java.net.spi.URLStreamHandlerProvider with net.woggioni.rbcs.common.RbcsUrlStreamHandlerFactory;
|
provides java.net.spi.URLStreamHandlerProvider with net.woggioni.gbcs.common.GbcsUrlStreamHandlerFactory;
|
||||||
exports net.woggioni.rbcs.common;
|
exports net.woggioni.gbcs.common;
|
||||||
}
|
}
|
12
gbcs-common/src/main/kotlin/net/woggioni/gbcs/common/GBCS.kt
Normal file
12
gbcs-common/src/main/kotlin/net/woggioni/gbcs/common/GBCS.kt
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
package net.woggioni.gbcs.common
|
||||||
|
|
||||||
|
import java.net.URI
|
||||||
|
import java.net.URL
|
||||||
|
|
||||||
|
object GBCS {
|
||||||
|
fun String.toUrl() : URL = URL.of(URI(this), null)
|
||||||
|
|
||||||
|
const val GBCS_NAMESPACE_URI: String = "urn:net.woggioni.gbcs.server"
|
||||||
|
const val GBCS_PREFIX: String = "gbcs"
|
||||||
|
const val XML_SCHEMA_NAMESPACE_URI = "http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
}
|
@@ -1,18 +1,20 @@
|
|||||||
package net.woggioni.rbcs.common
|
package net.woggioni.gbcs.common
|
||||||
|
|
||||||
import java.io.IOException
|
import java.io.IOException
|
||||||
import java.io.InputStream
|
import java.io.InputStream
|
||||||
import java.net.URL
|
import java.net.URL
|
||||||
import java.net.URLConnection
|
import java.net.URLConnection
|
||||||
import java.net.URLStreamHandler
|
import java.net.URLStreamHandler
|
||||||
|
import java.net.URLStreamHandlerFactory
|
||||||
import java.net.spi.URLStreamHandlerProvider
|
import java.net.spi.URLStreamHandlerProvider
|
||||||
|
import java.util.Optional
|
||||||
import java.util.concurrent.atomic.AtomicBoolean
|
import java.util.concurrent.atomic.AtomicBoolean
|
||||||
import java.util.stream.Collectors
|
import java.util.stream.Collectors
|
||||||
|
|
||||||
|
|
||||||
class RbcsUrlStreamHandlerFactory : URLStreamHandlerProvider() {
|
class GbcsUrlStreamHandlerFactory : URLStreamHandlerProvider() {
|
||||||
|
|
||||||
private class ClasspathHandler(private val classLoader: ClassLoader = RbcsUrlStreamHandlerFactory::class.java.classLoader) :
|
private class ClasspathHandler(private val classLoader: ClassLoader = GbcsUrlStreamHandlerFactory::class.java.classLoader) :
|
||||||
URLStreamHandler() {
|
URLStreamHandler() {
|
||||||
|
|
||||||
override fun openConnection(u: URL): URLConnection? {
|
override fun openConnection(u: URL): URLConnection? {
|
||||||
@@ -35,17 +37,13 @@ class RbcsUrlStreamHandlerFactory : URLStreamHandlerProvider() {
|
|||||||
private class JpmsHandler : URLStreamHandler() {
|
private class JpmsHandler : URLStreamHandler() {
|
||||||
|
|
||||||
override fun openConnection(u: URL): URLConnection {
|
override fun openConnection(u: URL): URLConnection {
|
||||||
val moduleName = u.host
|
|
||||||
val thisModule = javaClass.module
|
val thisModule = javaClass.module
|
||||||
val sourceModule =
|
val sourceModule = Optional.ofNullable(thisModule)
|
||||||
thisModule
|
.map { obj: Module -> obj.layer }
|
||||||
?.let(Module::getLayer)
|
.flatMap { layer: ModuleLayer ->
|
||||||
?.let { layer: ModuleLayer ->
|
val moduleName = u.host
|
||||||
layer.findModule(moduleName).orElse(null)
|
layer.findModule(moduleName)
|
||||||
} ?: if(thisModule.layer == null) {
|
}.orElse(thisModule)
|
||||||
thisModule
|
|
||||||
} else throw ModuleNotFoundException("Module '$moduleName' not found")
|
|
||||||
|
|
||||||
return JpmsResourceURLConnection(u, sourceModule)
|
return JpmsResourceURLConnection(u, sourceModule)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -56,9 +54,7 @@ class RbcsUrlStreamHandlerFactory : URLStreamHandlerProvider() {
|
|||||||
|
|
||||||
@Throws(IOException::class)
|
@Throws(IOException::class)
|
||||||
override fun getInputStream(): InputStream {
|
override fun getInputStream(): InputStream {
|
||||||
val resource = getURL().path
|
return module.getResourceAsStream(getURL().path)
|
||||||
return module.getResourceAsStream(resource)
|
|
||||||
?: throw ResourceNotFoundException("Resource '$resource' not found in module '${module.name}'")
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -87,12 +83,12 @@ class RbcsUrlStreamHandlerFactory : URLStreamHandlerProvider() {
|
|||||||
private val installed = AtomicBoolean(false)
|
private val installed = AtomicBoolean(false)
|
||||||
fun install() {
|
fun install() {
|
||||||
if (!installed.getAndSet(true)) {
|
if (!installed.getAndSet(true)) {
|
||||||
URL.setURLStreamHandlerFactory(RbcsUrlStreamHandlerFactory())
|
URL.setURLStreamHandlerFactory(GbcsUrlStreamHandlerFactory())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private val packageMap: Map<String, List<Module>> by lazy {
|
private val packageMap: Map<String, List<Module>> by lazy {
|
||||||
RbcsUrlStreamHandlerFactory::class.java.module.layer
|
GbcsUrlStreamHandlerFactory::class.java.module.layer
|
||||||
.modules()
|
.modules()
|
||||||
.stream()
|
.stream()
|
||||||
.flatMap { m: Module ->
|
.flatMap { m: Module ->
|
@@ -1,4 +1,4 @@
|
|||||||
package net.woggioni.rbcs.common
|
package net.woggioni.gbcs.common
|
||||||
|
|
||||||
|
|
||||||
data class HostAndPort(val host: String, val port: Int = 0) {
|
data class HostAndPort(val host: String, val port: Int = 0) {
|
111
gbcs-common/src/main/kotlin/net/woggioni/gbcs/common/Logging.kt
Normal file
111
gbcs-common/src/main/kotlin/net/woggioni/gbcs/common/Logging.kt
Normal file
@@ -0,0 +1,111 @@
|
|||||||
|
package net.woggioni.gbcs.common
|
||||||
|
|
||||||
|
import org.slf4j.Logger
|
||||||
|
import org.slf4j.LoggerFactory
|
||||||
|
import org.slf4j.event.Level
|
||||||
|
import java.nio.file.Files
|
||||||
|
import java.nio.file.Path
|
||||||
|
import java.util.logging.LogManager
|
||||||
|
|
||||||
|
inline fun <reified T> T.contextLogger() = LoggerFactory.getLogger(T::class.java)
|
||||||
|
|
||||||
|
inline fun Logger.traceParam(messageBuilder : () -> Pair<String, Array<Any>>) {
|
||||||
|
if(isTraceEnabled) {
|
||||||
|
val (format, params) = messageBuilder()
|
||||||
|
trace(format, params)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
inline fun Logger.debugParam(messageBuilder : () -> Pair<String, Array<Any>>) {
|
||||||
|
if(isDebugEnabled) {
|
||||||
|
val (format, params) = messageBuilder()
|
||||||
|
info(format, params)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
inline fun Logger.infoParam(messageBuilder : () -> Pair<String, Array<Any>>) {
|
||||||
|
if(isInfoEnabled) {
|
||||||
|
val (format, params) = messageBuilder()
|
||||||
|
info(format, params)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
inline fun Logger.warnParam(messageBuilder : () -> Pair<String, Array<Any>>) {
|
||||||
|
if(isWarnEnabled) {
|
||||||
|
val (format, params) = messageBuilder()
|
||||||
|
warn(format, params)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
inline fun Logger.errorParam(messageBuilder : () -> Pair<String, Array<Any>>) {
|
||||||
|
if(isErrorEnabled) {
|
||||||
|
val (format, params) = messageBuilder()
|
||||||
|
error(format, params)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
inline fun log(log : Logger,
|
||||||
|
filter : Logger.() -> Boolean,
|
||||||
|
loggerMethod : Logger.(String) -> Unit, messageBuilder : () -> String) {
|
||||||
|
if(log.filter()) {
|
||||||
|
log.loggerMethod(messageBuilder())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
inline fun Logger.log(level : Level, messageBuilder : () -> String) {
|
||||||
|
if(isEnabledForLevel(level)) {
|
||||||
|
makeLoggingEventBuilder(level).log(messageBuilder())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
inline fun Logger.trace(messageBuilder : () -> String) {
|
||||||
|
if(isTraceEnabled) {
|
||||||
|
trace(messageBuilder())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
inline fun Logger.debug(messageBuilder : () -> String) {
|
||||||
|
if(isDebugEnabled) {
|
||||||
|
debug(messageBuilder())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
inline fun Logger.info(messageBuilder : () -> String) {
|
||||||
|
if(isInfoEnabled) {
|
||||||
|
info(messageBuilder())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
inline fun Logger.warn(messageBuilder : () -> String) {
|
||||||
|
if(isWarnEnabled) {
|
||||||
|
warn(messageBuilder())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
inline fun Logger.error(messageBuilder : () -> String) {
|
||||||
|
if(isErrorEnabled) {
|
||||||
|
error(messageBuilder())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class LoggingConfig {
|
||||||
|
|
||||||
|
init {
|
||||||
|
val logManager = LogManager.getLogManager()
|
||||||
|
System.getProperty("log.config.source")?.let withSource@ { source ->
|
||||||
|
val urls = LoggingConfig::class.java.classLoader.getResources(source)
|
||||||
|
while(urls.hasMoreElements()) {
|
||||||
|
val url = urls.nextElement()
|
||||||
|
url.openStream().use { inputStream ->
|
||||||
|
logManager.readConfiguration(inputStream)
|
||||||
|
return@withSource
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Path.of(source).takeIf(Files::exists)
|
||||||
|
?.let(Files::newInputStream)
|
||||||
|
?.use(logManager::readConfiguration)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@@ -0,0 +1,46 @@
|
|||||||
|
package net.woggioni.gbcs.common
|
||||||
|
|
||||||
|
import java.security.SecureRandom
|
||||||
|
import java.security.spec.KeySpec
|
||||||
|
import java.util.Base64
|
||||||
|
import javax.crypto.SecretKeyFactory
|
||||||
|
import javax.crypto.spec.PBEKeySpec
|
||||||
|
|
||||||
|
object PasswordSecurity {
|
||||||
|
private const val KEY_LENGTH = 256
|
||||||
|
|
||||||
|
private fun concat(arr1: ByteArray, arr2: ByteArray): ByteArray {
|
||||||
|
val result = ByteArray(arr1.size + arr2.size)
|
||||||
|
var j = 0
|
||||||
|
for(element in arr1) {
|
||||||
|
result[j] = element
|
||||||
|
j += 1
|
||||||
|
}
|
||||||
|
for(element in arr2) {
|
||||||
|
result[j] = element
|
||||||
|
j += 1
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
fun hashPassword(password : String, salt : String? = null) : String {
|
||||||
|
val actualSalt = salt?.let(Base64.getDecoder()::decode) ?: SecureRandom().run {
|
||||||
|
val result = ByteArray(16)
|
||||||
|
nextBytes(result)
|
||||||
|
result
|
||||||
|
}
|
||||||
|
val spec: KeySpec = PBEKeySpec(password.toCharArray(), actualSalt, 10, KEY_LENGTH)
|
||||||
|
val factory = SecretKeyFactory.getInstance("PBKDF2WithHmacSHA1")
|
||||||
|
val hash = factory.generateSecret(spec).encoded
|
||||||
|
return String(Base64.getEncoder().encode(concat(hash, actualSalt)))
|
||||||
|
}
|
||||||
|
|
||||||
|
fun decodePasswordHash(passwordHash : String) : Pair<ByteArray, ByteArray> {
|
||||||
|
val decoded = Base64.getDecoder().decode(passwordHash)
|
||||||
|
val hash = ByteArray(KEY_LENGTH / 8)
|
||||||
|
val salt = ByteArray(decoded.size - KEY_LENGTH / 8)
|
||||||
|
System.arraycopy(decoded, 0, hash, 0, hash.size)
|
||||||
|
System.arraycopy(decoded, hash.size, salt, 0, salt.size)
|
||||||
|
return hash to salt
|
||||||
|
}
|
||||||
|
}
|
@@ -1,6 +1,7 @@
|
|||||||
package net.woggioni.rbcs.common
|
package net.woggioni.gbcs.common
|
||||||
|
|
||||||
import net.woggioni.jwo.JWO
|
import net.woggioni.jwo.JWO
|
||||||
|
import org.slf4j.LoggerFactory
|
||||||
import org.slf4j.event.Level
|
import org.slf4j.event.Level
|
||||||
import org.w3c.dom.Document
|
import org.w3c.dom.Document
|
||||||
import org.w3c.dom.Element
|
import org.w3c.dom.Element
|
||||||
@@ -78,7 +79,7 @@ class Xml(val doc: Document, val element: Element) {
|
|||||||
class ErrorHandler(private val fileURL: URL) : ErrHandler {
|
class ErrorHandler(private val fileURL: URL) : ErrHandler {
|
||||||
|
|
||||||
companion object {
|
companion object {
|
||||||
private val log = createLogger<ErrorHandler>()
|
private val log = LoggerFactory.getLogger(ErrorHandler::class.java)
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun warning(ex: SAXParseException)= err(ex, Level.WARN)
|
override fun warning(ex: SAXParseException)= err(ex, Level.WARN)
|
@@ -0,0 +1 @@
|
|||||||
|
net.woggioni.gbcs.common.GbcsUrlStreamHandlerFactory
|
@@ -6,10 +6,10 @@ plugins {
|
|||||||
|
|
||||||
configurations {
|
configurations {
|
||||||
bundle {
|
bundle {
|
||||||
|
extendsFrom runtimeClasspath
|
||||||
canBeResolved = true
|
canBeResolved = true
|
||||||
canBeConsumed = false
|
canBeConsumed = false
|
||||||
visible = false
|
visible = false
|
||||||
transitive = false
|
|
||||||
|
|
||||||
resolutionStrategy {
|
resolutionStrategy {
|
||||||
dependencies {
|
dependencies {
|
||||||
@@ -29,21 +29,10 @@ configurations {
|
|||||||
}
|
}
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
implementation project(':rbcs-common')
|
compileOnly project(':gbcs-common')
|
||||||
implementation project(':rbcs-api')
|
compileOnly project(':gbcs-api')
|
||||||
implementation catalog.jwo
|
compileOnly catalog.jwo
|
||||||
implementation catalog.slf4j.api
|
implementation catalog.xmemcached
|
||||||
implementation catalog.netty.common
|
|
||||||
implementation catalog.netty.handler
|
|
||||||
implementation catalog.netty.codec.memcache
|
|
||||||
|
|
||||||
bundle catalog.netty.codec.memcache
|
|
||||||
|
|
||||||
testRuntimeOnly catalog.logback.classic
|
|
||||||
}
|
|
||||||
|
|
||||||
tasks.named(JavaPlugin.TEST_TASK_NAME, Test) {
|
|
||||||
systemProperty("io.netty.leakDetectionLevel", "PARANOID")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Provider<Tar> bundleTask = tasks.register("bundle", Tar) {
|
Provider<Tar> bundleTask = tasks.register("bundle", Tar) {
|
14
gbcs-server-memcached/src/main/java/module-info.java
Normal file
14
gbcs-server-memcached/src/main/java/module-info.java
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
import net.woggioni.gbcs.api.CacheProvider;
|
||||||
|
|
||||||
|
module net.woggioni.gbcs.server.memcached {
|
||||||
|
requires net.woggioni.gbcs.common;
|
||||||
|
requires net.woggioni.gbcs.api;
|
||||||
|
requires com.googlecode.xmemcached;
|
||||||
|
requires net.woggioni.jwo;
|
||||||
|
requires java.xml;
|
||||||
|
requires kotlin.stdlib;
|
||||||
|
|
||||||
|
provides CacheProvider with net.woggioni.gbcs.server.memcached.MemcachedCacheProvider;
|
||||||
|
|
||||||
|
opens net.woggioni.gbcs.server.memcached.schema;
|
||||||
|
}
|
@@ -0,0 +1,59 @@
|
|||||||
|
package net.woggioni.gbcs.server.memcached
|
||||||
|
|
||||||
|
import net.rubyeye.xmemcached.XMemcachedClientBuilder
|
||||||
|
import net.rubyeye.xmemcached.command.BinaryCommandFactory
|
||||||
|
import net.rubyeye.xmemcached.transcoders.CompressionMode
|
||||||
|
import net.rubyeye.xmemcached.transcoders.SerializingTranscoder
|
||||||
|
import net.woggioni.gbcs.api.Cache
|
||||||
|
import net.woggioni.gbcs.api.exception.ContentTooLargeException
|
||||||
|
import net.woggioni.gbcs.common.HostAndPort
|
||||||
|
import net.woggioni.jwo.JWO
|
||||||
|
import java.io.ByteArrayInputStream
|
||||||
|
import java.net.InetSocketAddress
|
||||||
|
import java.nio.channels.Channels
|
||||||
|
import java.nio.channels.ReadableByteChannel
|
||||||
|
import java.nio.charset.StandardCharsets
|
||||||
|
import java.security.MessageDigest
|
||||||
|
import java.time.Duration
|
||||||
|
|
||||||
|
class MemcachedCache(
|
||||||
|
servers: List<HostAndPort>,
|
||||||
|
private val maxAge: Duration,
|
||||||
|
maxSize : Int,
|
||||||
|
digestAlgorithm: String?,
|
||||||
|
compressionMode: CompressionMode,
|
||||||
|
) : Cache {
|
||||||
|
private val memcachedClient = XMemcachedClientBuilder(
|
||||||
|
servers.stream().map { addr: HostAndPort -> InetSocketAddress(addr.host, addr.port) }.toList()
|
||||||
|
).apply {
|
||||||
|
commandFactory = BinaryCommandFactory()
|
||||||
|
digestAlgorithm?.let { dAlg ->
|
||||||
|
setKeyProvider { key ->
|
||||||
|
val md = MessageDigest.getInstance(dAlg)
|
||||||
|
md.update(key.toByteArray(StandardCharsets.UTF_8))
|
||||||
|
JWO.bytesToHex(md.digest())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
transcoder = SerializingTranscoder(maxSize).apply {
|
||||||
|
setCompressionMode(compressionMode)
|
||||||
|
}
|
||||||
|
}.build()
|
||||||
|
|
||||||
|
override fun get(key: String): ReadableByteChannel? {
|
||||||
|
return memcachedClient.get<ByteArray>(key)
|
||||||
|
?.let(::ByteArrayInputStream)
|
||||||
|
?.let(Channels::newChannel)
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun put(key: String, content: ByteArray) {
|
||||||
|
try {
|
||||||
|
memcachedClient[key, maxAge.toSeconds().toInt()] = content
|
||||||
|
} catch (e: IllegalArgumentException) {
|
||||||
|
throw ContentTooLargeException(e.message, e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun close() {
|
||||||
|
memcachedClient.shutdown()
|
||||||
|
}
|
||||||
|
}
|
@@ -0,0 +1,26 @@
|
|||||||
|
package net.woggioni.gbcs.server.memcached
|
||||||
|
|
||||||
|
import net.rubyeye.xmemcached.transcoders.CompressionMode
|
||||||
|
import net.woggioni.gbcs.api.Configuration
|
||||||
|
import net.woggioni.gbcs.common.HostAndPort
|
||||||
|
import java.time.Duration
|
||||||
|
|
||||||
|
data class MemcachedCacheConfiguration(
|
||||||
|
var servers: List<HostAndPort>,
|
||||||
|
var maxAge: Duration = Duration.ofDays(1),
|
||||||
|
var maxSize: Int = 0x100000,
|
||||||
|
var digestAlgorithm: String? = null,
|
||||||
|
var compressionMode: CompressionMode = CompressionMode.ZIP,
|
||||||
|
) : Configuration.Cache {
|
||||||
|
override fun materialize() = MemcachedCache(
|
||||||
|
servers,
|
||||||
|
maxAge,
|
||||||
|
maxSize,
|
||||||
|
digestAlgorithm,
|
||||||
|
compressionMode
|
||||||
|
)
|
||||||
|
|
||||||
|
override fun getNamespaceURI() = "urn:net.woggioni.gbcs.server.memcached"
|
||||||
|
|
||||||
|
override fun getTypeName() = "memcachedCacheType"
|
||||||
|
}
|
@@ -0,0 +1,88 @@
|
|||||||
|
package net.woggioni.gbcs.server.memcached
|
||||||
|
|
||||||
|
import net.rubyeye.xmemcached.transcoders.CompressionMode
|
||||||
|
import net.woggioni.gbcs.api.CacheProvider
|
||||||
|
import net.woggioni.gbcs.api.exception.ConfigurationException
|
||||||
|
import net.woggioni.gbcs.common.GBCS
|
||||||
|
import net.woggioni.gbcs.common.HostAndPort
|
||||||
|
import net.woggioni.gbcs.common.Xml
|
||||||
|
import net.woggioni.gbcs.common.Xml.Companion.asIterable
|
||||||
|
import net.woggioni.gbcs.common.Xml.Companion.renderAttribute
|
||||||
|
import org.w3c.dom.Document
|
||||||
|
import org.w3c.dom.Element
|
||||||
|
import java.time.Duration
|
||||||
|
|
||||||
|
class MemcachedCacheProvider : CacheProvider<MemcachedCacheConfiguration> {
|
||||||
|
override fun getXmlSchemaLocation() = "jpms://net.woggioni.gbcs.server.memcached/net/woggioni/gbcs/server/memcached/schema/gbcs-memcached.xsd"
|
||||||
|
|
||||||
|
override fun getXmlType() = "memcachedCacheType"
|
||||||
|
|
||||||
|
override fun getXmlNamespace() = "urn:net.woggioni.gbcs.server.memcached"
|
||||||
|
|
||||||
|
val xmlNamespacePrefix : String
|
||||||
|
get() = "gbcs-memcached"
|
||||||
|
|
||||||
|
override fun deserialize(el: Element): MemcachedCacheConfiguration {
|
||||||
|
val servers = mutableListOf<HostAndPort>()
|
||||||
|
val maxAge = el.renderAttribute("max-age")
|
||||||
|
?.let(Duration::parse)
|
||||||
|
?: Duration.ofDays(1)
|
||||||
|
val maxSize = el.renderAttribute("max-size")
|
||||||
|
?.let(String::toInt)
|
||||||
|
?: 0x100000
|
||||||
|
val compressionMode = el.renderAttribute("compression-mode")
|
||||||
|
?.let {
|
||||||
|
when (it) {
|
||||||
|
"gzip" -> CompressionMode.GZIP
|
||||||
|
"zip" -> CompressionMode.ZIP
|
||||||
|
else -> CompressionMode.ZIP
|
||||||
|
}
|
||||||
|
}
|
||||||
|
?: CompressionMode.ZIP
|
||||||
|
val digestAlgorithm = el.renderAttribute("digest")
|
||||||
|
for (child in el.asIterable()) {
|
||||||
|
when (child.nodeName) {
|
||||||
|
"server" -> {
|
||||||
|
val host = child.renderAttribute("host") ?: throw ConfigurationException("host attribute is required")
|
||||||
|
val port = child.renderAttribute("port")?.toInt() ?: throw ConfigurationException("port attribute is required")
|
||||||
|
servers.add(HostAndPort(host, port))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return MemcachedCacheConfiguration(
|
||||||
|
servers,
|
||||||
|
maxAge,
|
||||||
|
maxSize,
|
||||||
|
digestAlgorithm,
|
||||||
|
compressionMode,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun serialize(doc: Document, cache: MemcachedCacheConfiguration) = cache.run {
|
||||||
|
val result = doc.createElement("cache")
|
||||||
|
Xml.of(doc, result) {
|
||||||
|
attr("xmlns:${xmlNamespacePrefix}", xmlNamespace, namespaceURI = "http://www.w3.org/2000/xmlns/")
|
||||||
|
|
||||||
|
attr("xs:type", "${xmlNamespacePrefix}:$xmlType", GBCS.XML_SCHEMA_NAMESPACE_URI)
|
||||||
|
for (server in servers) {
|
||||||
|
node("server") {
|
||||||
|
attr("host", server.host)
|
||||||
|
attr("port", server.port.toString())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
attr("max-age", maxAge.toString())
|
||||||
|
attr("max-size", maxSize.toString())
|
||||||
|
digestAlgorithm?.let { digestAlgorithm ->
|
||||||
|
attr("digest", digestAlgorithm)
|
||||||
|
}
|
||||||
|
attr(
|
||||||
|
"compression-mode", when (compressionMode) {
|
||||||
|
CompressionMode.GZIP -> "gzip"
|
||||||
|
CompressionMode.ZIP -> "zip"
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
result
|
||||||
|
}
|
||||||
|
}
|
@@ -0,0 +1 @@
|
|||||||
|
net.woggioni.gbcs.server.memcached.MemcachedCacheProvider
|
@@ -0,0 +1,35 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||||
|
<xs:schema targetNamespace="urn:net.woggioni.gbcs.server.memcached"
|
||||||
|
xmlns:gbcs-memcached="urn:net.woggioni.gbcs.server.memcached"
|
||||||
|
xmlns:gbcs="urn:net.woggioni.gbcs.server"
|
||||||
|
xmlns:xs="http://www.w3.org/2001/XMLSchema">
|
||||||
|
|
||||||
|
<xs:import schemaLocation="jpms://net.woggioni.gbcs.server/net/woggioni/gbcs/server/schema/gbcs.xsd" namespace="urn:net.woggioni.gbcs.server"/>
|
||||||
|
|
||||||
|
<xs:complexType name="memcachedServerType">
|
||||||
|
<xs:attribute name="host" type="xs:token" use="required"/>
|
||||||
|
<xs:attribute name="port" type="xs:positiveInteger" use="required"/>
|
||||||
|
</xs:complexType>
|
||||||
|
|
||||||
|
<xs:complexType name="memcachedCacheType">
|
||||||
|
<xs:complexContent>
|
||||||
|
<xs:extension base="gbcs:cacheType">
|
||||||
|
<xs:sequence maxOccurs="unbounded">
|
||||||
|
<xs:element name="server" type="gbcs-memcached:memcachedServerType"/>
|
||||||
|
</xs:sequence>
|
||||||
|
<xs:attribute name="max-age" type="xs:duration" default="P1D"/>
|
||||||
|
<xs:attribute name="max-size" type="xs:unsignedInt" default="1048576"/>
|
||||||
|
<xs:attribute name="digest" type="xs:token" />
|
||||||
|
<xs:attribute name="compression-mode" type="gbcs-memcached:compressionType" default="zip"/>
|
||||||
|
</xs:extension>
|
||||||
|
</xs:complexContent>
|
||||||
|
</xs:complexType>
|
||||||
|
|
||||||
|
<xs:simpleType name="compressionType">
|
||||||
|
<xs:restriction base="xs:token">
|
||||||
|
<xs:enumeration value="zip"/>
|
||||||
|
<xs:enumeration value="gzip"/>
|
||||||
|
</xs:restriction>
|
||||||
|
</xs:simpleType>
|
||||||
|
|
||||||
|
</xs:schema>
|
@@ -9,12 +9,9 @@ dependencies {
|
|||||||
implementation catalog.jwo
|
implementation catalog.jwo
|
||||||
implementation catalog.slf4j.api
|
implementation catalog.slf4j.api
|
||||||
implementation catalog.netty.codec.http
|
implementation catalog.netty.codec.http
|
||||||
implementation catalog.netty.handler
|
|
||||||
implementation catalog.netty.buffer
|
|
||||||
implementation catalog.netty.transport
|
|
||||||
|
|
||||||
api project(':rbcs-common')
|
api project(':gbcs-common')
|
||||||
api project(':rbcs-api')
|
api project(':gbcs-api')
|
||||||
|
|
||||||
// runtimeOnly catalog.slf4j.jdk14
|
// runtimeOnly catalog.slf4j.jdk14
|
||||||
testRuntimeOnly catalog.logback.classic
|
testRuntimeOnly catalog.logback.classic
|
||||||
@@ -22,7 +19,7 @@ dependencies {
|
|||||||
testImplementation catalog.bcprov.jdk18on
|
testImplementation catalog.bcprov.jdk18on
|
||||||
testImplementation catalog.bcpkix.jdk18on
|
testImplementation catalog.bcpkix.jdk18on
|
||||||
|
|
||||||
testRuntimeOnly project(":rbcs-server-memcache")
|
testRuntimeOnly project(":gbcs-server-memcached")
|
||||||
}
|
}
|
||||||
|
|
||||||
test {
|
test {
|
||||||
@@ -39,4 +36,3 @@ publishing {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
@@ -1,8 +1,8 @@
|
|||||||
import net.woggioni.rbcs.api.CacheProvider;
|
import net.woggioni.gbcs.api.CacheProvider;
|
||||||
import net.woggioni.rbcs.server.cache.FileSystemCacheProvider;
|
import net.woggioni.gbcs.server.cache.FileSystemCacheProvider;
|
||||||
import net.woggioni.rbcs.server.cache.InMemoryCacheProvider;
|
import net.woggioni.gbcs.server.cache.InMemoryCacheProvider;
|
||||||
|
|
||||||
module net.woggioni.rbcs.server {
|
module net.woggioni.gbcs.server {
|
||||||
requires java.sql;
|
requires java.sql;
|
||||||
requires java.xml;
|
requires java.xml;
|
||||||
requires java.logging;
|
requires java.logging;
|
||||||
@@ -16,13 +16,13 @@ module net.woggioni.rbcs.server {
|
|||||||
requires io.netty.codec;
|
requires io.netty.codec;
|
||||||
requires org.slf4j;
|
requires org.slf4j;
|
||||||
requires net.woggioni.jwo;
|
requires net.woggioni.jwo;
|
||||||
requires net.woggioni.rbcs.common;
|
requires net.woggioni.gbcs.common;
|
||||||
requires net.woggioni.rbcs.api;
|
requires net.woggioni.gbcs.api;
|
||||||
|
|
||||||
exports net.woggioni.rbcs.server;
|
exports net.woggioni.gbcs.server;
|
||||||
|
|
||||||
opens net.woggioni.rbcs.server;
|
opens net.woggioni.gbcs.server;
|
||||||
opens net.woggioni.rbcs.server.schema;
|
opens net.woggioni.gbcs.server.schema;
|
||||||
|
|
||||||
uses CacheProvider;
|
uses CacheProvider;
|
||||||
provides CacheProvider with FileSystemCacheProvider, InMemoryCacheProvider;
|
provides CacheProvider with FileSystemCacheProvider, InMemoryCacheProvider;
|
@@ -1,9 +1,8 @@
|
|||||||
package net.woggioni.rbcs.server
|
package net.woggioni.gbcs.server
|
||||||
|
|
||||||
import io.netty.bootstrap.ServerBootstrap
|
import io.netty.bootstrap.ServerBootstrap
|
||||||
import io.netty.buffer.ByteBuf
|
import io.netty.buffer.ByteBuf
|
||||||
import io.netty.channel.Channel
|
import io.netty.channel.Channel
|
||||||
import io.netty.channel.ChannelFactory
|
|
||||||
import io.netty.channel.ChannelFuture
|
import io.netty.channel.ChannelFuture
|
||||||
import io.netty.channel.ChannelHandler.Sharable
|
import io.netty.channel.ChannelHandler.Sharable
|
||||||
import io.netty.channel.ChannelHandlerContext
|
import io.netty.channel.ChannelHandlerContext
|
||||||
@@ -12,16 +11,12 @@ import io.netty.channel.ChannelInitializer
|
|||||||
import io.netty.channel.ChannelOption
|
import io.netty.channel.ChannelOption
|
||||||
import io.netty.channel.ChannelPromise
|
import io.netty.channel.ChannelPromise
|
||||||
import io.netty.channel.nio.NioEventLoopGroup
|
import io.netty.channel.nio.NioEventLoopGroup
|
||||||
import io.netty.channel.socket.DatagramChannel
|
|
||||||
import io.netty.channel.socket.ServerSocketChannel
|
|
||||||
import io.netty.channel.socket.SocketChannel
|
|
||||||
import io.netty.channel.socket.nio.NioDatagramChannel
|
|
||||||
import io.netty.channel.socket.nio.NioServerSocketChannel
|
import io.netty.channel.socket.nio.NioServerSocketChannel
|
||||||
import io.netty.channel.socket.nio.NioSocketChannel
|
|
||||||
import io.netty.handler.codec.compression.CompressionOptions
|
import io.netty.handler.codec.compression.CompressionOptions
|
||||||
import io.netty.handler.codec.http.DefaultHttpContent
|
import io.netty.handler.codec.http.DefaultHttpContent
|
||||||
import io.netty.handler.codec.http.HttpContentCompressor
|
import io.netty.handler.codec.http.HttpContentCompressor
|
||||||
import io.netty.handler.codec.http.HttpHeaderNames
|
import io.netty.handler.codec.http.HttpHeaderNames
|
||||||
|
import io.netty.handler.codec.http.HttpObjectAggregator
|
||||||
import io.netty.handler.codec.http.HttpRequest
|
import io.netty.handler.codec.http.HttpRequest
|
||||||
import io.netty.handler.codec.http.HttpServerCodec
|
import io.netty.handler.codec.http.HttpServerCodec
|
||||||
import io.netty.handler.ssl.ClientAuth
|
import io.netty.handler.ssl.ClientAuth
|
||||||
@@ -35,57 +30,50 @@ import io.netty.handler.timeout.IdleStateHandler
|
|||||||
import io.netty.util.AttributeKey
|
import io.netty.util.AttributeKey
|
||||||
import io.netty.util.concurrent.DefaultEventExecutorGroup
|
import io.netty.util.concurrent.DefaultEventExecutorGroup
|
||||||
import io.netty.util.concurrent.EventExecutorGroup
|
import io.netty.util.concurrent.EventExecutorGroup
|
||||||
import net.woggioni.rbcs.api.AsyncCloseable
|
import net.woggioni.gbcs.api.Configuration
|
||||||
import net.woggioni.rbcs.api.Configuration
|
import net.woggioni.gbcs.api.exception.ConfigurationException
|
||||||
import net.woggioni.rbcs.api.exception.ConfigurationException
|
import net.woggioni.gbcs.common.GBCS.toUrl
|
||||||
import net.woggioni.rbcs.common.PasswordSecurity.decodePasswordHash
|
import net.woggioni.gbcs.common.PasswordSecurity.decodePasswordHash
|
||||||
import net.woggioni.rbcs.common.PasswordSecurity.hashPassword
|
import net.woggioni.gbcs.common.PasswordSecurity.hashPassword
|
||||||
import net.woggioni.rbcs.common.RBCS.getTrustManager
|
import net.woggioni.gbcs.common.Xml
|
||||||
import net.woggioni.rbcs.common.RBCS.loadKeystore
|
import net.woggioni.gbcs.common.contextLogger
|
||||||
import net.woggioni.rbcs.common.RBCS.toUrl
|
import net.woggioni.gbcs.common.debug
|
||||||
import net.woggioni.rbcs.common.Xml
|
import net.woggioni.gbcs.common.info
|
||||||
import net.woggioni.rbcs.common.createLogger
|
import net.woggioni.gbcs.server.auth.AbstractNettyHttpAuthenticator
|
||||||
import net.woggioni.rbcs.common.debug
|
import net.woggioni.gbcs.server.auth.Authorizer
|
||||||
import net.woggioni.rbcs.common.info
|
import net.woggioni.gbcs.server.auth.ClientCertificateValidator
|
||||||
import net.woggioni.rbcs.server.auth.AbstractNettyHttpAuthenticator
|
import net.woggioni.gbcs.server.auth.RoleAuthorizer
|
||||||
import net.woggioni.rbcs.server.auth.Authorizer
|
import net.woggioni.gbcs.server.configuration.Parser
|
||||||
import net.woggioni.rbcs.server.auth.RoleAuthorizer
|
import net.woggioni.gbcs.server.configuration.Serializer
|
||||||
import net.woggioni.rbcs.server.configuration.Parser
|
import net.woggioni.gbcs.server.exception.ExceptionHandler
|
||||||
import net.woggioni.rbcs.server.configuration.Serializer
|
import net.woggioni.gbcs.server.handler.ServerHandler
|
||||||
import net.woggioni.rbcs.server.exception.ExceptionHandler
|
import net.woggioni.gbcs.server.throttling.ThrottlingHandler
|
||||||
import net.woggioni.rbcs.server.handler.MaxRequestSizeHandler
|
import net.woggioni.jwo.JWO
|
||||||
import net.woggioni.rbcs.server.handler.ServerHandler
|
import net.woggioni.jwo.Tuple2
|
||||||
import net.woggioni.rbcs.server.handler.TraceHandler
|
|
||||||
import net.woggioni.rbcs.server.throttling.BucketManager
|
|
||||||
import net.woggioni.rbcs.server.throttling.ThrottlingHandler
|
|
||||||
import java.io.OutputStream
|
import java.io.OutputStream
|
||||||
import java.net.InetSocketAddress
|
import java.net.InetSocketAddress
|
||||||
import java.nio.file.Files
|
import java.nio.file.Files
|
||||||
import java.nio.file.Path
|
import java.nio.file.Path
|
||||||
|
import java.security.KeyStore
|
||||||
import java.security.PrivateKey
|
import java.security.PrivateKey
|
||||||
import java.security.cert.X509Certificate
|
import java.security.cert.X509Certificate
|
||||||
import java.time.Duration
|
|
||||||
import java.time.Instant
|
|
||||||
import java.util.Arrays
|
import java.util.Arrays
|
||||||
import java.util.Base64
|
import java.util.Base64
|
||||||
import java.util.concurrent.CompletableFuture
|
|
||||||
import java.util.concurrent.Future
|
|
||||||
import java.util.concurrent.TimeUnit
|
import java.util.concurrent.TimeUnit
|
||||||
import java.util.concurrent.TimeoutException
|
|
||||||
import java.util.regex.Matcher
|
import java.util.regex.Matcher
|
||||||
import java.util.regex.Pattern
|
import java.util.regex.Pattern
|
||||||
import javax.naming.ldap.LdapName
|
import javax.naming.ldap.LdapName
|
||||||
import javax.net.ssl.SSLPeerUnverifiedException
|
import javax.net.ssl.SSLPeerUnverifiedException
|
||||||
|
|
||||||
class RemoteBuildCacheServer(private val cfg: Configuration) {
|
class GradleBuildCacheServer(private val cfg: Configuration) {
|
||||||
|
private val log = contextLogger()
|
||||||
|
|
||||||
companion object {
|
companion object {
|
||||||
private val log = createLogger<RemoteBuildCacheServer>()
|
|
||||||
|
|
||||||
val userAttribute: AttributeKey<Configuration.User> = AttributeKey.valueOf("user")
|
val userAttribute: AttributeKey<Configuration.User> = AttributeKey.valueOf("user")
|
||||||
val groupAttribute: AttributeKey<Set<Configuration.Group>> = AttributeKey.valueOf("group")
|
val groupAttribute: AttributeKey<Set<Configuration.Group>> = AttributeKey.valueOf("group")
|
||||||
|
|
||||||
val DEFAULT_CONFIGURATION_URL by lazy { "jpms://net.woggioni.rbcs.server/net/woggioni/rbcs/server/rbcs-default.xml".toUrl() }
|
val DEFAULT_CONFIGURATION_URL by lazy { "classpath:net/woggioni/gbcs/gbcs-default.xml".toUrl() }
|
||||||
private const val SSL_HANDLER_NAME = "sslHandler"
|
private const val SSL_HANDLER_NAME = "sslHandler"
|
||||||
|
|
||||||
fun loadConfiguration(configurationFile: Path): Configuration {
|
fun loadConfiguration(configurationFile: Path): Configuration {
|
||||||
@@ -140,12 +128,11 @@ class RemoteBuildCacheServer(private val cfg: Configuration) {
|
|||||||
val clientCertificate = peerCertificates.first() as X509Certificate
|
val clientCertificate = peerCertificates.first() as X509Certificate
|
||||||
val user = userExtractor?.extract(clientCertificate)
|
val user = userExtractor?.extract(clientCertificate)
|
||||||
val group = groupExtractor?.extract(clientCertificate)
|
val group = groupExtractor?.extract(clientCertificate)
|
||||||
val allGroups =
|
val allGroups = ((user?.groups ?: emptySet()).asSequence() + sequenceOf(group).filterNotNull()).toSet()
|
||||||
((user?.groups ?: emptySet()).asSequence() + sequenceOf(group).filterNotNull()).toSet()
|
|
||||||
AuthenticationResult(user, allGroups)
|
AuthenticationResult(user, allGroups)
|
||||||
} ?: anonymousUserGroups?.let { AuthenticationResult(null, it) }
|
} ?: anonymousUserGroups?.let{ AuthenticationResult(null, it) }
|
||||||
} catch (es: SSLPeerUnverifiedException) {
|
} catch (es: SSLPeerUnverifiedException) {
|
||||||
anonymousUserGroups?.let { AuthenticationResult(null, it) }
|
anonymousUserGroups?.let{ AuthenticationResult(null, it) }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -154,9 +141,7 @@ class RemoteBuildCacheServer(private val cfg: Configuration) {
|
|||||||
private class NettyHttpBasicAuthenticator(
|
private class NettyHttpBasicAuthenticator(
|
||||||
private val users: Map<String, Configuration.User>, authorizer: Authorizer
|
private val users: Map<String, Configuration.User>, authorizer: Authorizer
|
||||||
) : AbstractNettyHttpAuthenticator(authorizer) {
|
) : AbstractNettyHttpAuthenticator(authorizer) {
|
||||||
companion object {
|
private val log = contextLogger()
|
||||||
private val log = createLogger<NettyHttpBasicAuthenticator>()
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun authenticate(ctx: ChannelHandlerContext, req: HttpRequest): AuthenticationResult? {
|
override fun authenticate(ctx: ChannelHandlerContext, req: HttpRequest): AuthenticationResult? {
|
||||||
val authorizationHeader = req.headers()[HttpHeaderNames.AUTHORIZATION] ?: let {
|
val authorizationHeader = req.headers()[HttpHeaderNames.AUTHORIZATION] ?: let {
|
||||||
@@ -205,10 +190,8 @@ class RemoteBuildCacheServer(private val cfg: Configuration) {
|
|||||||
|
|
||||||
private class ServerInitializer(
|
private class ServerInitializer(
|
||||||
private val cfg: Configuration,
|
private val cfg: Configuration,
|
||||||
private val channelFactory : ChannelFactory<SocketChannel>,
|
|
||||||
private val datagramChannelFactory : ChannelFactory<DatagramChannel>,
|
|
||||||
private val eventExecutorGroup: EventExecutorGroup
|
private val eventExecutorGroup: EventExecutorGroup
|
||||||
) : ChannelInitializer<Channel>(), AsyncCloseable {
|
) : ChannelInitializer<Channel>() {
|
||||||
|
|
||||||
companion object {
|
companion object {
|
||||||
private fun createSslCtx(tls: Configuration.Tls): SslContext {
|
private fun createSslCtx(tls: Configuration.Tls): SslContext {
|
||||||
@@ -228,9 +211,9 @@ class RemoteBuildCacheServer(private val cfg: Configuration) {
|
|||||||
val clientAuth = tls.trustStore?.let { trustStore ->
|
val clientAuth = tls.trustStore?.let { trustStore ->
|
||||||
val ts = loadKeystore(trustStore.file, trustStore.password)
|
val ts = loadKeystore(trustStore.file, trustStore.password)
|
||||||
trustManager(
|
trustManager(
|
||||||
getTrustManager(ts, trustStore.isCheckCertificateStatus)
|
ClientCertificateValidator.getTrustManager(ts, trustStore.isCheckCertificateStatus)
|
||||||
)
|
)
|
||||||
if (trustStore.isRequireClientCertificate) ClientAuth.REQUIRE
|
if(trustStore.isRequireClientCertificate) ClientAuth.REQUIRE
|
||||||
else ClientAuth.OPTIONAL
|
else ClientAuth.OPTIONAL
|
||||||
} ?: ClientAuth.NONE
|
} ?: ClientAuth.NONE
|
||||||
clientAuth(clientAuth)
|
clientAuth(clientAuth)
|
||||||
@@ -238,12 +221,38 @@ class RemoteBuildCacheServer(private val cfg: Configuration) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private val log = createLogger<ServerInitializer>()
|
fun loadKeystore(file: Path, password: String?): KeyStore {
|
||||||
|
val ext = JWO.splitExtension(file)
|
||||||
|
.map(Tuple2<String, String>::get_2)
|
||||||
|
.orElseThrow {
|
||||||
|
IllegalArgumentException(
|
||||||
|
"Keystore file '${file}' must have .jks, .p12, .pfx extension"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
val keystore = when (ext.substring(1).lowercase()) {
|
||||||
|
"jks" -> KeyStore.getInstance("JKS")
|
||||||
|
"p12", "pfx" -> KeyStore.getInstance("PKCS12")
|
||||||
|
else -> throw IllegalArgumentException(
|
||||||
|
"Keystore file '${file}' must have .jks, .p12, .pfx extension"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
Files.newInputStream(file).use {
|
||||||
|
keystore.load(it, password?.let(String::toCharArray))
|
||||||
|
}
|
||||||
|
return keystore
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private val cacheHandlerFactory = cfg.cache.materialize()
|
private val log = contextLogger()
|
||||||
|
|
||||||
private val bucketManager = BucketManager.from(cfg)
|
private val serverHandler = let {
|
||||||
|
val cacheImplementation = cfg.cache.materialize()
|
||||||
|
val prefix = Path.of("/").resolve(Path.of(cfg.serverPath ?: "/"))
|
||||||
|
ServerHandler(cacheImplementation, prefix)
|
||||||
|
}
|
||||||
|
|
||||||
|
private val exceptionHandler = ExceptionHandler()
|
||||||
|
private val throttlingHandler = ThrottlingHandler(cfg)
|
||||||
|
|
||||||
private val authenticator = when (val auth = cfg.authentication) {
|
private val authenticator = when (val auth = cfg.authentication) {
|
||||||
is Configuration.BasicAuthentication -> NettyHttpBasicAuthenticator(cfg.users, RoleAuthorizer())
|
is Configuration.BasicAuthentication -> NettyHttpBasicAuthenticator(cfg.users, RoleAuthorizer())
|
||||||
@@ -300,10 +309,23 @@ class RemoteBuildCacheServer(private val cfg: Configuration) {
|
|||||||
}
|
}
|
||||||
val pipeline = ch.pipeline()
|
val pipeline = ch.pipeline()
|
||||||
cfg.connection.also { conn ->
|
cfg.connection.also { conn ->
|
||||||
|
val readTimeout = conn.readTimeout.toMillis()
|
||||||
|
val writeTimeout = conn.writeTimeout.toMillis()
|
||||||
|
if(readTimeout > 0 || writeTimeout > 0) {
|
||||||
|
pipeline.addLast(
|
||||||
|
IdleStateHandler(
|
||||||
|
false,
|
||||||
|
readTimeout,
|
||||||
|
writeTimeout,
|
||||||
|
0,
|
||||||
|
TimeUnit.MILLISECONDS
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
val readIdleTimeout = conn.readIdleTimeout.toMillis()
|
val readIdleTimeout = conn.readIdleTimeout.toMillis()
|
||||||
val writeIdleTimeout = conn.writeIdleTimeout.toMillis()
|
val writeIdleTimeout = conn.writeIdleTimeout.toMillis()
|
||||||
val idleTimeout = conn.idleTimeout.toMillis()
|
val idleTimeout = conn.idleTimeout.toMillis()
|
||||||
if (readIdleTimeout > 0 || writeIdleTimeout > 0 || idleTimeout > 0) {
|
if(readIdleTimeout > 0 || writeIdleTimeout > 0 || idleTimeout > 0) {
|
||||||
pipeline.addLast(
|
pipeline.addLast(
|
||||||
IdleStateHandler(
|
IdleStateHandler(
|
||||||
true,
|
true,
|
||||||
@@ -318,19 +340,16 @@ class RemoteBuildCacheServer(private val cfg: Configuration) {
|
|||||||
pipeline.addLast(object : ChannelInboundHandlerAdapter() {
|
pipeline.addLast(object : ChannelInboundHandlerAdapter() {
|
||||||
override fun userEventTriggered(ctx: ChannelHandlerContext, evt: Any) {
|
override fun userEventTriggered(ctx: ChannelHandlerContext, evt: Any) {
|
||||||
if (evt is IdleStateEvent) {
|
if (evt is IdleStateEvent) {
|
||||||
when (evt.state()) {
|
when(evt.state()) {
|
||||||
IdleState.READER_IDLE -> log.debug {
|
IdleState.READER_IDLE -> log.debug {
|
||||||
"Read timeout reached on channel ${ch.id().asShortText()}, closing the connection"
|
"Read timeout reached on channel ${ch.id().asShortText()}, closing the connection"
|
||||||
}
|
}
|
||||||
|
|
||||||
IdleState.WRITER_IDLE -> log.debug {
|
IdleState.WRITER_IDLE -> log.debug {
|
||||||
"Write timeout reached on channel ${ch.id().asShortText()}, closing the connection"
|
"Write timeout reached on channel ${ch.id().asShortText()}, closing the connection"
|
||||||
}
|
}
|
||||||
|
|
||||||
IdleState.ALL_IDLE -> log.debug {
|
IdleState.ALL_IDLE -> log.debug {
|
||||||
"Idle timeout reached on channel ${ch.id().asShortText()}, closing the connection"
|
"Idle timeout reached on channel ${ch.id().asShortText()}, closing the connection"
|
||||||
}
|
}
|
||||||
|
|
||||||
null -> throw IllegalStateException("This should never happen")
|
null -> throw IllegalStateException("This should never happen")
|
||||||
}
|
}
|
||||||
ctx.close()
|
ctx.close()
|
||||||
@@ -341,112 +360,49 @@ class RemoteBuildCacheServer(private val cfg: Configuration) {
|
|||||||
pipeline.addLast(SSL_HANDLER_NAME, it)
|
pipeline.addLast(SSL_HANDLER_NAME, it)
|
||||||
}
|
}
|
||||||
pipeline.addLast(HttpServerCodec())
|
pipeline.addLast(HttpServerCodec())
|
||||||
pipeline.addLast(MaxRequestSizeHandler.NAME, MaxRequestSizeHandler(cfg.connection.maxRequestSize))
|
|
||||||
pipeline.addLast(HttpChunkContentCompressor(1024))
|
pipeline.addLast(HttpChunkContentCompressor(1024))
|
||||||
pipeline.addLast(ChunkedWriteHandler())
|
pipeline.addLast(ChunkedWriteHandler())
|
||||||
|
pipeline.addLast(HttpObjectAggregator(cfg.connection.maxRequestSize))
|
||||||
authenticator?.let {
|
authenticator?.let {
|
||||||
pipeline.addLast(it)
|
pipeline.addLast(it)
|
||||||
}
|
}
|
||||||
pipeline.addLast(ThrottlingHandler(bucketManager, cfg.connection))
|
pipeline.addLast(throttlingHandler)
|
||||||
|
pipeline.addLast(eventExecutorGroup, serverHandler)
|
||||||
val serverHandler = let {
|
pipeline.addLast(exceptionHandler)
|
||||||
val prefix = Path.of("/").resolve(Path.of(cfg.serverPath ?: "/"))
|
|
||||||
ServerHandler(prefix)
|
|
||||||
}
|
|
||||||
pipeline.addLast(eventExecutorGroup, ServerHandler.NAME, serverHandler)
|
|
||||||
|
|
||||||
pipeline.addLast(cacheHandlerFactory.newHandler(ch.eventLoop(), channelFactory, datagramChannelFactory))
|
|
||||||
pipeline.addLast(TraceHandler)
|
|
||||||
pipeline.addLast(ExceptionHandler)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun asyncClose() = cacheHandlerFactory.asyncClose()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
class ServerHandle(
|
class ServerHandle(
|
||||||
closeFuture: ChannelFuture,
|
httpChannelFuture: ChannelFuture,
|
||||||
private val bossGroup: EventExecutorGroup,
|
private val executorGroups: Iterable<EventExecutorGroup>
|
||||||
private val executorGroups: Iterable<EventExecutorGroup>,
|
) : AutoCloseable {
|
||||||
private val serverInitializer: AsyncCloseable,
|
private val httpChannel: Channel = httpChannelFuture.channel()
|
||||||
) : Future<Void> by from(closeFuture, executorGroups, serverInitializer) {
|
private val closeFuture: ChannelFuture = httpChannel.closeFuture()
|
||||||
|
private val log = contextLogger()
|
||||||
|
|
||||||
companion object {
|
fun shutdown(): ChannelFuture {
|
||||||
private val log = createLogger<ServerHandle>()
|
return httpChannel.close()
|
||||||
|
|
||||||
private fun from(
|
|
||||||
closeFuture: ChannelFuture,
|
|
||||||
executorGroups: Iterable<EventExecutorGroup>,
|
|
||||||
serverInitializer: AsyncCloseable
|
|
||||||
): CompletableFuture<Void> {
|
|
||||||
val result = CompletableFuture<Void>()
|
|
||||||
closeFuture.addListener {
|
|
||||||
val errors = mutableListOf<Throwable>()
|
|
||||||
val deadline = Instant.now().plusSeconds(20)
|
|
||||||
try {
|
|
||||||
serverInitializer.close()
|
|
||||||
} catch (ex: Throwable) {
|
|
||||||
log.error(ex.message, ex)
|
|
||||||
errors.addLast(ex)
|
|
||||||
}
|
|
||||||
|
|
||||||
serverInitializer.asyncClose().whenComplete { _, ex ->
|
|
||||||
if(ex != null) {
|
|
||||||
log.error(ex.message, ex)
|
|
||||||
errors.addLast(ex)
|
|
||||||
}
|
|
||||||
|
|
||||||
executorGroups.map {
|
|
||||||
it.shutdownGracefully()
|
|
||||||
}
|
|
||||||
|
|
||||||
for (executorGroup in executorGroups) {
|
|
||||||
val future = executorGroup.terminationFuture()
|
|
||||||
try {
|
|
||||||
val now = Instant.now()
|
|
||||||
if (now > deadline) {
|
|
||||||
future.get(0, TimeUnit.SECONDS)
|
|
||||||
} else {
|
|
||||||
future.get(Duration.between(now, deadline).toMillis(), TimeUnit.MILLISECONDS)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (te: TimeoutException) {
|
|
||||||
errors.addLast(te)
|
|
||||||
log.warn("Timeout while waiting for shutdown of $executorGroup", te)
|
|
||||||
} catch (ex: Throwable) {
|
|
||||||
log.warn(ex.message, ex)
|
|
||||||
errors.addLast(ex)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if(errors.isEmpty()) {
|
|
||||||
result.complete(null)
|
|
||||||
} else {
|
|
||||||
result.completeExceptionally(errors.first())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return result.thenAccept {
|
|
||||||
log.info {
|
|
||||||
"RemoteBuildCacheServer has been gracefully shut down"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
override fun close() {
|
||||||
fun sendShutdownSignal() {
|
try {
|
||||||
bossGroup.shutdownGracefully()
|
closeFuture.sync()
|
||||||
|
} finally {
|
||||||
|
executorGroups.forEach {
|
||||||
|
it.shutdownGracefully().sync()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
log.info {
|
||||||
|
"GradleBuildCacheServer has been gracefully shut down"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fun run(): ServerHandle {
|
fun run(): ServerHandle {
|
||||||
// Create the multithreaded event loops for the server
|
// Create the multithreaded event loops for the server
|
||||||
val bossGroup = NioEventLoopGroup(1)
|
val bossGroup = NioEventLoopGroup(0)
|
||||||
val channelFactory = ChannelFactory<SocketChannel> { NioSocketChannel() }
|
val serverSocketChannel = NioServerSocketChannel::class.java
|
||||||
val datagramChannelFactory = ChannelFactory<DatagramChannel> { NioDatagramChannel() }
|
val workerGroup = bossGroup
|
||||||
val serverChannelFactory = ChannelFactory<ServerSocketChannel> { NioServerSocketChannel() }
|
|
||||||
val workerGroup = NioEventLoopGroup(0)
|
|
||||||
val eventExecutorGroup = run {
|
val eventExecutorGroup = run {
|
||||||
val threadFactory = if (cfg.eventExecutor.isUseVirtualThreads) {
|
val threadFactory = if (cfg.eventExecutor.isUseVirtualThreads) {
|
||||||
Thread.ofVirtual().factory()
|
Thread.ofVirtual().factory()
|
||||||
@@ -455,12 +411,11 @@ class RemoteBuildCacheServer(private val cfg: Configuration) {
|
|||||||
}
|
}
|
||||||
DefaultEventExecutorGroup(Runtime.getRuntime().availableProcessors(), threadFactory)
|
DefaultEventExecutorGroup(Runtime.getRuntime().availableProcessors(), threadFactory)
|
||||||
}
|
}
|
||||||
val serverInitializer = ServerInitializer(cfg, channelFactory, datagramChannelFactory, workerGroup)
|
|
||||||
val bootstrap = ServerBootstrap().apply {
|
val bootstrap = ServerBootstrap().apply {
|
||||||
// Configure the server
|
// Configure the server
|
||||||
group(bossGroup, workerGroup)
|
group(bossGroup, workerGroup)
|
||||||
channelFactory(serverChannelFactory)
|
channel(serverSocketChannel)
|
||||||
childHandler(serverInitializer)
|
childHandler(ServerInitializer(cfg, eventExecutorGroup))
|
||||||
option(ChannelOption.SO_BACKLOG, cfg.incomingConnectionsBacklogSize)
|
option(ChannelOption.SO_BACKLOG, cfg.incomingConnectionsBacklogSize)
|
||||||
childOption(ChannelOption.SO_KEEPALIVE, true)
|
childOption(ChannelOption.SO_KEEPALIVE, true)
|
||||||
}
|
}
|
||||||
@@ -468,16 +423,10 @@ class RemoteBuildCacheServer(private val cfg: Configuration) {
|
|||||||
|
|
||||||
// Bind and start to accept incoming connections.
|
// Bind and start to accept incoming connections.
|
||||||
val bindAddress = InetSocketAddress(cfg.host, cfg.port)
|
val bindAddress = InetSocketAddress(cfg.host, cfg.port)
|
||||||
val httpChannel = bootstrap.bind(bindAddress).sync().channel()
|
val httpChannel = bootstrap.bind(bindAddress).sync()
|
||||||
log.info {
|
log.info {
|
||||||
"RemoteBuildCacheServer is listening on ${cfg.host}:${cfg.port}"
|
"GradleBuildCacheServer is listening on ${cfg.host}:${cfg.port}"
|
||||||
}
|
}
|
||||||
|
return ServerHandle(httpChannel, setOf(bossGroup, workerGroup, eventExecutorGroup))
|
||||||
return ServerHandle(
|
|
||||||
httpChannel.closeFuture(),
|
|
||||||
bossGroup,
|
|
||||||
setOf(workerGroup, eventExecutorGroup),
|
|
||||||
serverInitializer
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
@@ -0,0 +1,30 @@
|
|||||||
|
package net.woggioni.gbcs.server
|
||||||
|
|
||||||
|
import io.netty.channel.ChannelHandlerContext
|
||||||
|
import org.slf4j.Logger
|
||||||
|
import java.net.InetSocketAddress
|
||||||
|
|
||||||
|
inline fun Logger.trace(ctx : ChannelHandlerContext, messageBuilder : () -> String) {
|
||||||
|
log(this, ctx, { isTraceEnabled }, { trace(it) } , messageBuilder)
|
||||||
|
}
|
||||||
|
inline fun Logger.debug(ctx : ChannelHandlerContext, messageBuilder : () -> String) {
|
||||||
|
log(this, ctx, { isDebugEnabled }, { debug(it) } , messageBuilder)
|
||||||
|
}
|
||||||
|
inline fun Logger.info(ctx : ChannelHandlerContext, messageBuilder : () -> String) {
|
||||||
|
log(this, ctx, { isInfoEnabled }, { info(it) } , messageBuilder)
|
||||||
|
}
|
||||||
|
inline fun Logger.warn(ctx : ChannelHandlerContext, messageBuilder : () -> String) {
|
||||||
|
log(this, ctx, { isWarnEnabled }, { warn(it) } , messageBuilder)
|
||||||
|
}
|
||||||
|
inline fun Logger.error(ctx : ChannelHandlerContext, messageBuilder : () -> String) {
|
||||||
|
log(this, ctx, { isErrorEnabled }, { error(it) } , messageBuilder)
|
||||||
|
}
|
||||||
|
|
||||||
|
inline fun log(log : Logger, ctx : ChannelHandlerContext,
|
||||||
|
filter : Logger.() -> Boolean,
|
||||||
|
loggerMethod : Logger.(String) -> Unit, messageBuilder : () -> String) {
|
||||||
|
if(log.filter()) {
|
||||||
|
val clientAddress = (ctx.channel().remoteAddress() as InetSocketAddress).address.hostAddress
|
||||||
|
log.loggerMethod(clientAddress + " - " + messageBuilder())
|
||||||
|
}
|
||||||
|
}
|
@@ -1,4 +1,4 @@
|
|||||||
package net.woggioni.rbcs.server.auth
|
package net.woggioni.gbcs.server.auth
|
||||||
|
|
||||||
import io.netty.buffer.Unpooled
|
import io.netty.buffer.Unpooled
|
||||||
import io.netty.channel.ChannelFutureListener
|
import io.netty.channel.ChannelFutureListener
|
||||||
@@ -6,16 +6,15 @@ import io.netty.channel.ChannelHandlerContext
|
|||||||
import io.netty.channel.ChannelInboundHandlerAdapter
|
import io.netty.channel.ChannelInboundHandlerAdapter
|
||||||
import io.netty.handler.codec.http.DefaultFullHttpResponse
|
import io.netty.handler.codec.http.DefaultFullHttpResponse
|
||||||
import io.netty.handler.codec.http.FullHttpResponse
|
import io.netty.handler.codec.http.FullHttpResponse
|
||||||
import io.netty.handler.codec.http.HttpContent
|
|
||||||
import io.netty.handler.codec.http.HttpHeaderNames
|
import io.netty.handler.codec.http.HttpHeaderNames
|
||||||
import io.netty.handler.codec.http.HttpRequest
|
import io.netty.handler.codec.http.HttpRequest
|
||||||
import io.netty.handler.codec.http.HttpResponseStatus
|
import io.netty.handler.codec.http.HttpResponseStatus
|
||||||
import io.netty.handler.codec.http.HttpVersion
|
import io.netty.handler.codec.http.HttpVersion
|
||||||
import io.netty.util.ReferenceCountUtil
|
import io.netty.util.ReferenceCountUtil
|
||||||
import net.woggioni.rbcs.api.Configuration
|
import net.woggioni.gbcs.api.Configuration
|
||||||
import net.woggioni.rbcs.api.Configuration.Group
|
import net.woggioni.gbcs.api.Configuration.Group
|
||||||
import net.woggioni.rbcs.api.Role
|
import net.woggioni.gbcs.api.Role
|
||||||
import net.woggioni.rbcs.server.RemoteBuildCacheServer
|
import net.woggioni.gbcs.server.GradleBuildCacheServer
|
||||||
|
|
||||||
|
|
||||||
abstract class AbstractNettyHttpAuthenticator(private val authorizer: Authorizer) : ChannelInboundHandlerAdapter() {
|
abstract class AbstractNettyHttpAuthenticator(private val authorizer: Authorizer) : ChannelInboundHandlerAdapter() {
|
||||||
@@ -41,8 +40,8 @@ abstract class AbstractNettyHttpAuthenticator(private val authorizer: Authorizer
|
|||||||
override fun channelRead(ctx: ChannelHandlerContext, msg: Any) {
|
override fun channelRead(ctx: ChannelHandlerContext, msg: Any) {
|
||||||
if (msg is HttpRequest) {
|
if (msg is HttpRequest) {
|
||||||
val result = authenticate(ctx, msg) ?: return authenticationFailure(ctx, msg)
|
val result = authenticate(ctx, msg) ?: return authenticationFailure(ctx, msg)
|
||||||
ctx.channel().attr(RemoteBuildCacheServer.userAttribute).set(result.user)
|
ctx.channel().attr(GradleBuildCacheServer.userAttribute).set(result.user)
|
||||||
ctx.channel().attr(RemoteBuildCacheServer.groupAttribute).set(result.groups)
|
ctx.channel().attr(GradleBuildCacheServer.groupAttribute).set(result.groups)
|
||||||
|
|
||||||
val roles = (
|
val roles = (
|
||||||
(result.user?.let { user ->
|
(result.user?.let { user ->
|
||||||
@@ -58,8 +57,6 @@ abstract class AbstractNettyHttpAuthenticator(private val authorizer: Authorizer
|
|||||||
} else {
|
} else {
|
||||||
authorizationFailure(ctx, msg)
|
authorizationFailure(ctx, msg)
|
||||||
}
|
}
|
||||||
} else if(msg is HttpContent) {
|
|
||||||
ctx.fireChannelRead(msg)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@@ -1,7 +1,7 @@
|
|||||||
package net.woggioni.rbcs.server.auth
|
package net.woggioni.gbcs.server.auth
|
||||||
|
|
||||||
import io.netty.handler.codec.http.HttpRequest
|
import io.netty.handler.codec.http.HttpRequest
|
||||||
import net.woggioni.rbcs.api.Role
|
import net.woggioni.gbcs.api.Role
|
||||||
|
|
||||||
fun interface Authorizer {
|
fun interface Authorizer {
|
||||||
fun authorize(roles : Set<Role>, request: HttpRequest) : Boolean
|
fun authorize(roles : Set<Role>, request: HttpRequest) : Boolean
|
@@ -0,0 +1,90 @@
|
|||||||
|
package net.woggioni.gbcs.server.auth
|
||||||
|
|
||||||
|
import io.netty.channel.ChannelHandlerContext
|
||||||
|
import io.netty.channel.ChannelInboundHandlerAdapter
|
||||||
|
import io.netty.handler.ssl.SslHandler
|
||||||
|
import io.netty.handler.ssl.SslHandshakeCompletionEvent
|
||||||
|
import java.security.KeyStore
|
||||||
|
import java.security.cert.CertPathValidator
|
||||||
|
import java.security.cert.CertPathValidatorException
|
||||||
|
import java.security.cert.CertificateException
|
||||||
|
import java.security.cert.CertificateFactory
|
||||||
|
import java.security.cert.PKIXParameters
|
||||||
|
import java.security.cert.PKIXRevocationChecker
|
||||||
|
import java.security.cert.X509Certificate
|
||||||
|
import java.util.EnumSet
|
||||||
|
import javax.net.ssl.SSLSession
|
||||||
|
import javax.net.ssl.TrustManagerFactory
|
||||||
|
import javax.net.ssl.X509TrustManager
|
||||||
|
|
||||||
|
|
||||||
|
class ClientCertificateValidator private constructor(
|
||||||
|
private val sslHandler: SslHandler,
|
||||||
|
private val x509TrustManager: X509TrustManager
|
||||||
|
) : ChannelInboundHandlerAdapter() {
|
||||||
|
override fun userEventTriggered(ctx: ChannelHandlerContext, evt: Any) {
|
||||||
|
if (evt is SslHandshakeCompletionEvent) {
|
||||||
|
if (evt.isSuccess) {
|
||||||
|
val session: SSLSession = sslHandler.engine().session
|
||||||
|
val clientCertificateChain = session.peerCertificates as Array<X509Certificate>
|
||||||
|
val authType: String = clientCertificateChain[0].publicKey.algorithm
|
||||||
|
x509TrustManager.checkClientTrusted(clientCertificateChain, authType)
|
||||||
|
} else {
|
||||||
|
// Handle the failure, for example by closing the channel.
|
||||||
|
}
|
||||||
|
}
|
||||||
|
super.userEventTriggered(ctx, evt)
|
||||||
|
}
|
||||||
|
|
||||||
|
companion object {
|
||||||
|
fun getTrustManager(trustStore: KeyStore?, certificateRevocationEnabled: Boolean): X509TrustManager {
|
||||||
|
return if (trustStore != null) {
|
||||||
|
val certificateFactory = CertificateFactory.getInstance("X.509")
|
||||||
|
val validator = CertPathValidator.getInstance("PKIX").apply {
|
||||||
|
val rc = revocationChecker as PKIXRevocationChecker
|
||||||
|
rc.options = EnumSet.of(
|
||||||
|
PKIXRevocationChecker.Option.NO_FALLBACK
|
||||||
|
)
|
||||||
|
}
|
||||||
|
val params = PKIXParameters(trustStore).apply {
|
||||||
|
isRevocationEnabled = certificateRevocationEnabled
|
||||||
|
}
|
||||||
|
object : X509TrustManager {
|
||||||
|
override fun checkClientTrusted(chain: Array<out X509Certificate>, authType: String) {
|
||||||
|
val clientCertificateChain = certificateFactory.generateCertPath(chain.toList())
|
||||||
|
try {
|
||||||
|
validator.validate(clientCertificateChain, params)
|
||||||
|
} catch (ex: CertPathValidatorException) {
|
||||||
|
throw CertificateException(ex)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun checkServerTrusted(chain: Array<out X509Certificate>, authType: String) {
|
||||||
|
throw NotImplementedError()
|
||||||
|
}
|
||||||
|
|
||||||
|
private val acceptedIssuers = trustStore.aliases().asSequence()
|
||||||
|
.filter(trustStore::isCertificateEntry)
|
||||||
|
.map(trustStore::getCertificate)
|
||||||
|
.map { it as X509Certificate }
|
||||||
|
.toList()
|
||||||
|
.toTypedArray()
|
||||||
|
|
||||||
|
override fun getAcceptedIssuers() = acceptedIssuers
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
val trustManagerFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm())
|
||||||
|
trustManagerFactory.trustManagers.asSequence().filter { it is X509TrustManager }
|
||||||
|
.single() as X509TrustManager
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fun of(
|
||||||
|
sslHandler: SslHandler,
|
||||||
|
trustStore: KeyStore?,
|
||||||
|
certificateRevocationEnabled: Boolean
|
||||||
|
): ClientCertificateValidator {
|
||||||
|
return ClientCertificateValidator(sslHandler, getTrustManager(trustStore, certificateRevocationEnabled))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@@ -1,8 +1,8 @@
|
|||||||
package net.woggioni.rbcs.server.auth
|
package net.woggioni.gbcs.server.auth
|
||||||
|
|
||||||
import io.netty.handler.codec.http.HttpMethod
|
import io.netty.handler.codec.http.HttpMethod
|
||||||
import io.netty.handler.codec.http.HttpRequest
|
import io.netty.handler.codec.http.HttpRequest
|
||||||
import net.woggioni.rbcs.api.Role
|
import net.woggioni.gbcs.api.Role
|
||||||
|
|
||||||
class RoleAuthorizer : Authorizer {
|
class RoleAuthorizer : Authorizer {
|
||||||
|
|
21
gbcs-server/src/main/kotlin/net/woggioni/gbcs/server/cache/CacheUtils.kt
vendored
Normal file
21
gbcs-server/src/main/kotlin/net/woggioni/gbcs/server/cache/CacheUtils.kt
vendored
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
package net.woggioni.gbcs.server.cache
|
||||||
|
|
||||||
|
import net.woggioni.jwo.JWO
|
||||||
|
import java.security.MessageDigest
|
||||||
|
|
||||||
|
object CacheUtils {
|
||||||
|
fun digest(
|
||||||
|
data: ByteArray,
|
||||||
|
md: MessageDigest = MessageDigest.getInstance("MD5")
|
||||||
|
): ByteArray {
|
||||||
|
md.update(data)
|
||||||
|
return md.digest()
|
||||||
|
}
|
||||||
|
|
||||||
|
fun digestString(
|
||||||
|
data: ByteArray,
|
||||||
|
md: MessageDigest = MessageDigest.getInstance("MD5")
|
||||||
|
): String {
|
||||||
|
return JWO.bytesToHex(digest(data, md))
|
||||||
|
}
|
||||||
|
}
|
120
gbcs-server/src/main/kotlin/net/woggioni/gbcs/server/cache/FileSystemCache.kt
vendored
Normal file
120
gbcs-server/src/main/kotlin/net/woggioni/gbcs/server/cache/FileSystemCache.kt
vendored
Normal file
@@ -0,0 +1,120 @@
|
|||||||
|
package net.woggioni.gbcs.server.cache
|
||||||
|
|
||||||
|
import net.woggioni.gbcs.api.Cache
|
||||||
|
import net.woggioni.gbcs.common.contextLogger
|
||||||
|
import net.woggioni.gbcs.server.cache.CacheUtils.digestString
|
||||||
|
import net.woggioni.jwo.LockFile
|
||||||
|
import java.nio.channels.Channels
|
||||||
|
import java.nio.channels.FileChannel
|
||||||
|
import java.nio.file.Files
|
||||||
|
import java.nio.file.Path
|
||||||
|
import java.nio.file.StandardCopyOption
|
||||||
|
import java.nio.file.StandardOpenOption
|
||||||
|
import java.nio.file.attribute.BasicFileAttributes
|
||||||
|
import java.security.MessageDigest
|
||||||
|
import java.time.Duration
|
||||||
|
import java.time.Instant
|
||||||
|
import java.util.concurrent.atomic.AtomicReference
|
||||||
|
import java.util.zip.Deflater
|
||||||
|
import java.util.zip.DeflaterOutputStream
|
||||||
|
import java.util.zip.Inflater
|
||||||
|
import java.util.zip.InflaterInputStream
|
||||||
|
|
||||||
|
class FileSystemCache(
|
||||||
|
val root: Path,
|
||||||
|
val maxAge: Duration,
|
||||||
|
val digestAlgorithm: String?,
|
||||||
|
val compressionEnabled: Boolean,
|
||||||
|
val compressionLevel: Int
|
||||||
|
) : Cache {
|
||||||
|
|
||||||
|
private val log = contextLogger()
|
||||||
|
|
||||||
|
init {
|
||||||
|
Files.createDirectories(root)
|
||||||
|
}
|
||||||
|
|
||||||
|
private var nextGc = AtomicReference(Instant.now().plus(maxAge))
|
||||||
|
|
||||||
|
override fun get(key: String) = (digestAlgorithm
|
||||||
|
?.let(MessageDigest::getInstance)
|
||||||
|
?.let { md ->
|
||||||
|
digestString(key.toByteArray(), md)
|
||||||
|
} ?: key).let { digest ->
|
||||||
|
root.resolve(digest).takeIf(Files::exists)
|
||||||
|
?.let { file ->
|
||||||
|
file.takeIf(Files::exists)?.let { file ->
|
||||||
|
if (compressionEnabled) {
|
||||||
|
val inflater = Inflater()
|
||||||
|
Channels.newChannel(
|
||||||
|
InflaterInputStream(
|
||||||
|
Channels.newInputStream(
|
||||||
|
FileChannel.open(
|
||||||
|
file,
|
||||||
|
StandardOpenOption.READ
|
||||||
|
)
|
||||||
|
), inflater
|
||||||
|
)
|
||||||
|
)
|
||||||
|
} else {
|
||||||
|
FileChannel.open(file, StandardOpenOption.READ)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}.also {
|
||||||
|
gc()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun put(key: String, content: ByteArray) {
|
||||||
|
(digestAlgorithm
|
||||||
|
?.let(MessageDigest::getInstance)
|
||||||
|
?.let { md ->
|
||||||
|
digestString(key.toByteArray(), md)
|
||||||
|
} ?: key).let { digest ->
|
||||||
|
val file = root.resolve(digest)
|
||||||
|
val tmpFile = Files.createTempFile(root, null, ".tmp")
|
||||||
|
try {
|
||||||
|
Files.newOutputStream(tmpFile).let {
|
||||||
|
if (compressionEnabled) {
|
||||||
|
val deflater = Deflater(compressionLevel)
|
||||||
|
DeflaterOutputStream(it, deflater)
|
||||||
|
} else {
|
||||||
|
it
|
||||||
|
}
|
||||||
|
}.use {
|
||||||
|
it.write(content)
|
||||||
|
}
|
||||||
|
Files.move(tmpFile, file, StandardCopyOption.ATOMIC_MOVE)
|
||||||
|
} catch (t: Throwable) {
|
||||||
|
Files.delete(tmpFile)
|
||||||
|
throw t
|
||||||
|
}
|
||||||
|
}.also {
|
||||||
|
gc()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun gc() {
|
||||||
|
val now = Instant.now()
|
||||||
|
val oldValue = nextGc.getAndSet(now.plus(maxAge))
|
||||||
|
if (oldValue < now) {
|
||||||
|
actualGc(now)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Synchronized
|
||||||
|
private fun actualGc(now: Instant) {
|
||||||
|
Files.list(root).filter {
|
||||||
|
val creationTimeStamp = Files.readAttributes(it, BasicFileAttributes::class.java)
|
||||||
|
.creationTime()
|
||||||
|
.toInstant()
|
||||||
|
now > creationTimeStamp.plus(maxAge)
|
||||||
|
}.forEach { file ->
|
||||||
|
LockFile.acquire(file, false).use {
|
||||||
|
Files.delete(file)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun close() {}
|
||||||
|
}
|
27
gbcs-server/src/main/kotlin/net/woggioni/gbcs/server/cache/FileSystemCacheConfiguration.kt
vendored
Normal file
27
gbcs-server/src/main/kotlin/net/woggioni/gbcs/server/cache/FileSystemCacheConfiguration.kt
vendored
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
package net.woggioni.gbcs.server.cache
|
||||||
|
|
||||||
|
import net.woggioni.gbcs.api.Configuration
|
||||||
|
import net.woggioni.gbcs.common.GBCS
|
||||||
|
import net.woggioni.jwo.Application
|
||||||
|
import java.nio.file.Path
|
||||||
|
import java.time.Duration
|
||||||
|
|
||||||
|
data class FileSystemCacheConfiguration(
|
||||||
|
val root: Path?,
|
||||||
|
val maxAge: Duration,
|
||||||
|
val digestAlgorithm : String?,
|
||||||
|
val compressionEnabled: Boolean,
|
||||||
|
val compressionLevel: Int,
|
||||||
|
) : Configuration.Cache {
|
||||||
|
override fun materialize() = FileSystemCache(
|
||||||
|
root ?: Application.builder("gbcs").build().computeCacheDirectory(),
|
||||||
|
maxAge,
|
||||||
|
digestAlgorithm,
|
||||||
|
compressionEnabled,
|
||||||
|
compressionLevel
|
||||||
|
)
|
||||||
|
|
||||||
|
override fun getNamespaceURI() = GBCS.GBCS_NAMESPACE_URI
|
||||||
|
|
||||||
|
override fun getTypeName() = "fileSystemCacheType"
|
||||||
|
}
|
@@ -1,9 +1,9 @@
|
|||||||
package net.woggioni.rbcs.server.cache
|
package net.woggioni.gbcs.server.cache
|
||||||
|
|
||||||
import net.woggioni.rbcs.api.CacheProvider
|
import net.woggioni.gbcs.api.CacheProvider
|
||||||
import net.woggioni.rbcs.common.RBCS
|
import net.woggioni.gbcs.common.GBCS
|
||||||
import net.woggioni.rbcs.common.Xml
|
import net.woggioni.gbcs.common.Xml
|
||||||
import net.woggioni.rbcs.common.Xml.Companion.renderAttribute
|
import net.woggioni.gbcs.common.Xml.Companion.renderAttribute
|
||||||
import org.w3c.dom.Document
|
import org.w3c.dom.Document
|
||||||
import org.w3c.dom.Element
|
import org.w3c.dom.Element
|
||||||
import java.nio.file.Path
|
import java.nio.file.Path
|
||||||
@@ -12,11 +12,11 @@ import java.util.zip.Deflater
|
|||||||
|
|
||||||
class FileSystemCacheProvider : CacheProvider<FileSystemCacheConfiguration> {
|
class FileSystemCacheProvider : CacheProvider<FileSystemCacheConfiguration> {
|
||||||
|
|
||||||
override fun getXmlSchemaLocation() = "classpath:net/woggioni/rbcs/server/schema/rbcs.xsd"
|
override fun getXmlSchemaLocation() = "classpath:net/woggioni/gbcs/server/schema/gbcs.xsd"
|
||||||
|
|
||||||
override fun getXmlType() = "fileSystemCacheType"
|
override fun getXmlType() = "fileSystemCacheType"
|
||||||
|
|
||||||
override fun getXmlNamespace() = "urn:net.woggioni.rbcs.server"
|
override fun getXmlNamespace() = "urn:net.woggioni.gbcs.server"
|
||||||
|
|
||||||
override fun deserialize(el: Element): FileSystemCacheConfiguration {
|
override fun deserialize(el: Element): FileSystemCacheConfiguration {
|
||||||
val path = el.renderAttribute("path")
|
val path = el.renderAttribute("path")
|
||||||
@@ -30,29 +30,23 @@ class FileSystemCacheProvider : CacheProvider<FileSystemCacheConfiguration> {
|
|||||||
val compressionLevel = el.renderAttribute("compression-level")
|
val compressionLevel = el.renderAttribute("compression-level")
|
||||||
?.let(String::toInt)
|
?.let(String::toInt)
|
||||||
?: Deflater.DEFAULT_COMPRESSION
|
?: Deflater.DEFAULT_COMPRESSION
|
||||||
val digestAlgorithm = el.renderAttribute("digest")
|
val digestAlgorithm = el.renderAttribute("digest") ?: "MD5"
|
||||||
val chunkSize = el.renderAttribute("chunk-size")
|
|
||||||
?.let(Integer::decode)
|
|
||||||
?: 0x10000
|
|
||||||
|
|
||||||
return FileSystemCacheConfiguration(
|
return FileSystemCacheConfiguration(
|
||||||
path,
|
path,
|
||||||
maxAge,
|
maxAge,
|
||||||
digestAlgorithm,
|
digestAlgorithm,
|
||||||
enableCompression,
|
enableCompression,
|
||||||
compressionLevel,
|
compressionLevel
|
||||||
chunkSize
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun serialize(doc: Document, cache : FileSystemCacheConfiguration) = cache.run {
|
override fun serialize(doc: Document, cache : FileSystemCacheConfiguration) = cache.run {
|
||||||
val result = doc.createElement("cache")
|
val result = doc.createElement("cache")
|
||||||
Xml.of(doc, result) {
|
Xml.of(doc, result) {
|
||||||
val prefix = doc.lookupPrefix(RBCS.RBCS_NAMESPACE_URI)
|
val prefix = doc.lookupPrefix(GBCS.GBCS_NAMESPACE_URI)
|
||||||
attr("xs:type", "${prefix}:fileSystemCacheType", RBCS.XML_SCHEMA_NAMESPACE_URI)
|
attr("xs:type", "${prefix}:fileSystemCacheType", GBCS.XML_SCHEMA_NAMESPACE_URI)
|
||||||
root?.let {
|
attr("path", root.toString())
|
||||||
attr("path", it.toString())
|
|
||||||
}
|
|
||||||
attr("max-age", maxAge.toString())
|
attr("max-age", maxAge.toString())
|
||||||
digestAlgorithm?.let { digestAlgorithm ->
|
digestAlgorithm?.let { digestAlgorithm ->
|
||||||
attr("digest", digestAlgorithm)
|
attr("digest", digestAlgorithm)
|
||||||
@@ -63,7 +57,6 @@ class FileSystemCacheProvider : CacheProvider<FileSystemCacheConfiguration> {
|
|||||||
}?.let {
|
}?.let {
|
||||||
attr("compression-level", it.toString())
|
attr("compression-level", it.toString())
|
||||||
}
|
}
|
||||||
attr("chunk-size", chunkSize.toString())
|
|
||||||
}
|
}
|
||||||
result
|
result
|
||||||
}
|
}
|
106
gbcs-server/src/main/kotlin/net/woggioni/gbcs/server/cache/InMemoryCache.kt
vendored
Normal file
106
gbcs-server/src/main/kotlin/net/woggioni/gbcs/server/cache/InMemoryCache.kt
vendored
Normal file
@@ -0,0 +1,106 @@
|
|||||||
|
package net.woggioni.gbcs.server.cache
|
||||||
|
|
||||||
|
import net.woggioni.gbcs.api.Cache
|
||||||
|
import net.woggioni.gbcs.server.cache.CacheUtils.digestString
|
||||||
|
import java.io.ByteArrayInputStream
|
||||||
|
import java.io.ByteArrayOutputStream
|
||||||
|
import java.nio.ByteBuffer
|
||||||
|
import java.nio.channels.Channels
|
||||||
|
import java.security.MessageDigest
|
||||||
|
import java.time.Duration
|
||||||
|
import java.time.Instant
|
||||||
|
import java.util.concurrent.ConcurrentHashMap
|
||||||
|
import java.util.concurrent.PriorityBlockingQueue
|
||||||
|
import java.util.concurrent.atomic.AtomicInteger
|
||||||
|
import java.util.concurrent.atomic.AtomicReference
|
||||||
|
import java.util.zip.Deflater
|
||||||
|
import java.util.zip.DeflaterOutputStream
|
||||||
|
import java.util.zip.Inflater
|
||||||
|
import java.util.zip.InflaterInputStream
|
||||||
|
|
||||||
|
class InMemoryCache(
|
||||||
|
val maxAge: Duration,
|
||||||
|
val digestAlgorithm: String?,
|
||||||
|
val compressionEnabled: Boolean,
|
||||||
|
val compressionLevel: Int
|
||||||
|
) : Cache {
|
||||||
|
|
||||||
|
private val map = ConcurrentHashMap<String, MapValue>()
|
||||||
|
|
||||||
|
private class MapValue(val rc: AtomicInteger, val payload : AtomicReference<ByteArray>)
|
||||||
|
|
||||||
|
private class RemovalQueueElement(val key: String, val expiry : Instant) : Comparable<RemovalQueueElement> {
|
||||||
|
override fun compareTo(other: RemovalQueueElement)= expiry.compareTo(other.expiry)
|
||||||
|
}
|
||||||
|
|
||||||
|
private val removalQueue = PriorityBlockingQueue<RemovalQueueElement>()
|
||||||
|
|
||||||
|
private var running = true
|
||||||
|
private val garbageCollector = Thread({
|
||||||
|
while(true) {
|
||||||
|
val el = removalQueue.take()
|
||||||
|
val now = Instant.now()
|
||||||
|
if(now > el.expiry) {
|
||||||
|
val value = map[el.key] ?: continue
|
||||||
|
val rc = value.rc.decrementAndGet()
|
||||||
|
if(rc == 0) {
|
||||||
|
map.remove(el.key)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
removalQueue.put(el)
|
||||||
|
Thread.sleep(minOf(Duration.between(now, el.expiry), Duration.ofSeconds(1)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}).apply {
|
||||||
|
start()
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun close() {
|
||||||
|
running = false
|
||||||
|
garbageCollector.join()
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun get(key: String) =
|
||||||
|
(digestAlgorithm
|
||||||
|
?.let(MessageDigest::getInstance)
|
||||||
|
?.let { md ->
|
||||||
|
digestString(key.toByteArray(), md)
|
||||||
|
} ?: key
|
||||||
|
).let { digest ->
|
||||||
|
map[digest]
|
||||||
|
?.let(MapValue::payload)
|
||||||
|
?.let(AtomicReference<ByteArray>::get)
|
||||||
|
?.let { value ->
|
||||||
|
if (compressionEnabled) {
|
||||||
|
val inflater = Inflater()
|
||||||
|
Channels.newChannel(InflaterInputStream(ByteArrayInputStream(value), inflater))
|
||||||
|
} else {
|
||||||
|
Channels.newChannel(ByteArrayInputStream(value))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun put(key: String, content: ByteArray) {
|
||||||
|
(digestAlgorithm
|
||||||
|
?.let(MessageDigest::getInstance)
|
||||||
|
?.let { md ->
|
||||||
|
digestString(key.toByteArray(), md)
|
||||||
|
} ?: key).let { digest ->
|
||||||
|
val value = if (compressionEnabled) {
|
||||||
|
val deflater = Deflater(compressionLevel)
|
||||||
|
val baos = ByteArrayOutputStream()
|
||||||
|
DeflaterOutputStream(baos, deflater).use { stream ->
|
||||||
|
stream.write(content)
|
||||||
|
}
|
||||||
|
baos.toByteArray()
|
||||||
|
} else {
|
||||||
|
content
|
||||||
|
}
|
||||||
|
val mapValue = map.computeIfAbsent(digest) {
|
||||||
|
MapValue(AtomicInteger(0), AtomicReference())
|
||||||
|
}
|
||||||
|
mapValue.payload.set(value)
|
||||||
|
removalQueue.put(RemovalQueueElement(digest, Instant.now().plus(maxAge)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
23
gbcs-server/src/main/kotlin/net/woggioni/gbcs/server/cache/InMemoryCacheConfiguration.kt
vendored
Normal file
23
gbcs-server/src/main/kotlin/net/woggioni/gbcs/server/cache/InMemoryCacheConfiguration.kt
vendored
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
package net.woggioni.gbcs.server.cache
|
||||||
|
|
||||||
|
import net.woggioni.gbcs.api.Configuration
|
||||||
|
import net.woggioni.gbcs.common.GBCS
|
||||||
|
import java.time.Duration
|
||||||
|
|
||||||
|
data class InMemoryCacheConfiguration(
|
||||||
|
val maxAge: Duration,
|
||||||
|
val digestAlgorithm : String?,
|
||||||
|
val compressionEnabled: Boolean,
|
||||||
|
val compressionLevel: Int,
|
||||||
|
) : Configuration.Cache {
|
||||||
|
override fun materialize() = InMemoryCache(
|
||||||
|
maxAge,
|
||||||
|
digestAlgorithm,
|
||||||
|
compressionEnabled,
|
||||||
|
compressionLevel
|
||||||
|
)
|
||||||
|
|
||||||
|
override fun getNamespaceURI() = GBCS.GBCS_NAMESPACE_URI
|
||||||
|
|
||||||
|
override fun getTypeName() = "inMemoryCacheType"
|
||||||
|
}
|
@@ -1,56 +1,49 @@
|
|||||||
package net.woggioni.rbcs.server.cache
|
package net.woggioni.gbcs.server.cache
|
||||||
|
|
||||||
import net.woggioni.rbcs.api.CacheProvider
|
import net.woggioni.gbcs.api.CacheProvider
|
||||||
import net.woggioni.rbcs.common.RBCS
|
import net.woggioni.gbcs.common.GBCS
|
||||||
import net.woggioni.rbcs.common.Xml
|
import net.woggioni.gbcs.common.Xml
|
||||||
import net.woggioni.rbcs.common.Xml.Companion.renderAttribute
|
import net.woggioni.gbcs.common.Xml.Companion.renderAttribute
|
||||||
import org.w3c.dom.Document
|
import org.w3c.dom.Document
|
||||||
import org.w3c.dom.Element
|
import org.w3c.dom.Element
|
||||||
|
import java.nio.file.Path
|
||||||
import java.time.Duration
|
import java.time.Duration
|
||||||
import java.util.zip.Deflater
|
import java.util.zip.Deflater
|
||||||
|
|
||||||
class InMemoryCacheProvider : CacheProvider<InMemoryCacheConfiguration> {
|
class InMemoryCacheProvider : CacheProvider<InMemoryCacheConfiguration> {
|
||||||
|
|
||||||
override fun getXmlSchemaLocation() = "classpath:net/woggioni/rbcs/server/schema/rbcs.xsd"
|
override fun getXmlSchemaLocation() = "classpath:net/woggioni/gbcs/server/schema/gbcs.xsd"
|
||||||
|
|
||||||
override fun getXmlType() = "inMemoryCacheType"
|
override fun getXmlType() = "inMemoryCacheType"
|
||||||
|
|
||||||
override fun getXmlNamespace() = "urn:net.woggioni.rbcs.server"
|
override fun getXmlNamespace() = "urn:net.woggioni.gbcs.server"
|
||||||
|
|
||||||
override fun deserialize(el: Element): InMemoryCacheConfiguration {
|
override fun deserialize(el: Element): InMemoryCacheConfiguration {
|
||||||
val maxAge = el.renderAttribute("max-age")
|
val maxAge = el.renderAttribute("max-age")
|
||||||
?.let(Duration::parse)
|
?.let(Duration::parse)
|
||||||
?: Duration.ofDays(1)
|
?: Duration.ofDays(1)
|
||||||
val maxSize = el.renderAttribute("max-size")
|
|
||||||
?.let(java.lang.Long::decode)
|
|
||||||
?: 0x1000000
|
|
||||||
val enableCompression = el.renderAttribute("enable-compression")
|
val enableCompression = el.renderAttribute("enable-compression")
|
||||||
?.let(String::toBoolean)
|
?.let(String::toBoolean)
|
||||||
?: true
|
?: true
|
||||||
val compressionLevel = el.renderAttribute("compression-level")
|
val compressionLevel = el.renderAttribute("compression-level")
|
||||||
?.let(String::toInt)
|
?.let(String::toInt)
|
||||||
?: Deflater.DEFAULT_COMPRESSION
|
?: Deflater.DEFAULT_COMPRESSION
|
||||||
val digestAlgorithm = el.renderAttribute("digest")
|
val digestAlgorithm = el.renderAttribute("digest") ?: "MD5"
|
||||||
val chunkSize = el.renderAttribute("chunk-size")
|
|
||||||
?.let(Integer::decode)
|
|
||||||
?: 0x10000
|
|
||||||
return InMemoryCacheConfiguration(
|
return InMemoryCacheConfiguration(
|
||||||
maxAge,
|
maxAge,
|
||||||
maxSize,
|
|
||||||
digestAlgorithm,
|
digestAlgorithm,
|
||||||
enableCompression,
|
enableCompression,
|
||||||
compressionLevel,
|
compressionLevel
|
||||||
chunkSize
|
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun serialize(doc: Document, cache : InMemoryCacheConfiguration) = cache.run {
|
override fun serialize(doc: Document, cache : InMemoryCacheConfiguration) = cache.run {
|
||||||
val result = doc.createElement("cache")
|
val result = doc.createElement("cache")
|
||||||
Xml.of(doc, result) {
|
Xml.of(doc, result) {
|
||||||
val prefix = doc.lookupPrefix(RBCS.RBCS_NAMESPACE_URI)
|
val prefix = doc.lookupPrefix(GBCS.GBCS_NAMESPACE_URI)
|
||||||
attr("xs:type", "${prefix}:inMemoryCacheType", RBCS.XML_SCHEMA_NAMESPACE_URI)
|
attr("xs:type", "${prefix}:inMemoryCacheType", GBCS.XML_SCHEMA_NAMESPACE_URI)
|
||||||
attr("max-age", maxAge.toString())
|
attr("max-age", maxAge.toString())
|
||||||
attr("max-size", maxSize.toString())
|
|
||||||
digestAlgorithm?.let { digestAlgorithm ->
|
digestAlgorithm?.let { digestAlgorithm ->
|
||||||
attr("digest", digestAlgorithm)
|
attr("digest", digestAlgorithm)
|
||||||
}
|
}
|
||||||
@@ -60,7 +53,6 @@ class InMemoryCacheProvider : CacheProvider<InMemoryCacheConfiguration> {
|
|||||||
}?.let {
|
}?.let {
|
||||||
attr("compression-level", it.toString())
|
attr("compression-level", it.toString())
|
||||||
}
|
}
|
||||||
attr("chunk-size", chunkSize.toString())
|
|
||||||
}
|
}
|
||||||
result
|
result
|
||||||
}
|
}
|
@@ -1,7 +1,7 @@
|
|||||||
package net.woggioni.rbcs.server.configuration
|
package net.woggioni.gbcs.server.configuration
|
||||||
|
|
||||||
import net.woggioni.rbcs.api.CacheProvider
|
import net.woggioni.gbcs.api.CacheProvider
|
||||||
import net.woggioni.rbcs.api.Configuration
|
import net.woggioni.gbcs.api.Configuration
|
||||||
import java.util.ServiceLoader
|
import java.util.ServiceLoader
|
||||||
|
|
||||||
object CacheSerializers {
|
object CacheSerializers {
|
@@ -1,20 +1,20 @@
|
|||||||
package net.woggioni.rbcs.server.configuration
|
package net.woggioni.gbcs.server.configuration
|
||||||
|
|
||||||
import net.woggioni.rbcs.api.Configuration
|
import net.woggioni.gbcs.api.Configuration
|
||||||
import net.woggioni.rbcs.api.Configuration.Authentication
|
import net.woggioni.gbcs.api.Configuration.Authentication
|
||||||
import net.woggioni.rbcs.api.Configuration.BasicAuthentication
|
import net.woggioni.gbcs.api.Configuration.BasicAuthentication
|
||||||
import net.woggioni.rbcs.api.Configuration.Cache
|
import net.woggioni.gbcs.api.Configuration.Cache
|
||||||
import net.woggioni.rbcs.api.Configuration.ClientCertificateAuthentication
|
import net.woggioni.gbcs.api.Configuration.ClientCertificateAuthentication
|
||||||
import net.woggioni.rbcs.api.Configuration.Group
|
import net.woggioni.gbcs.api.Configuration.Group
|
||||||
import net.woggioni.rbcs.api.Configuration.KeyStore
|
import net.woggioni.gbcs.api.Configuration.KeyStore
|
||||||
import net.woggioni.rbcs.api.Configuration.Tls
|
import net.woggioni.gbcs.api.Configuration.Tls
|
||||||
import net.woggioni.rbcs.api.Configuration.TlsCertificateExtractor
|
import net.woggioni.gbcs.api.Configuration.TlsCertificateExtractor
|
||||||
import net.woggioni.rbcs.api.Configuration.TrustStore
|
import net.woggioni.gbcs.api.Configuration.TrustStore
|
||||||
import net.woggioni.rbcs.api.Configuration.User
|
import net.woggioni.gbcs.api.Configuration.User
|
||||||
import net.woggioni.rbcs.api.Role
|
import net.woggioni.gbcs.api.Role
|
||||||
import net.woggioni.rbcs.api.exception.ConfigurationException
|
import net.woggioni.gbcs.api.exception.ConfigurationException
|
||||||
import net.woggioni.rbcs.common.Xml.Companion.asIterable
|
import net.woggioni.gbcs.common.Xml.Companion.asIterable
|
||||||
import net.woggioni.rbcs.common.Xml.Companion.renderAttribute
|
import net.woggioni.gbcs.common.Xml.Companion.renderAttribute
|
||||||
import org.w3c.dom.Document
|
import org.w3c.dom.Document
|
||||||
import org.w3c.dom.Element
|
import org.w3c.dom.Element
|
||||||
import org.w3c.dom.TypeInfo
|
import org.w3c.dom.TypeInfo
|
||||||
@@ -27,6 +27,8 @@ object Parser {
|
|||||||
val root = document.documentElement
|
val root = document.documentElement
|
||||||
val anonymousUser = User("", null, emptySet(), null)
|
val anonymousUser = User("", null, emptySet(), null)
|
||||||
var connection: Configuration.Connection = Configuration.Connection(
|
var connection: Configuration.Connection = Configuration.Connection(
|
||||||
|
Duration.of(10, ChronoUnit.SECONDS),
|
||||||
|
Duration.of(10, ChronoUnit.SECONDS),
|
||||||
Duration.of(60, ChronoUnit.SECONDS),
|
Duration.of(60, ChronoUnit.SECONDS),
|
||||||
Duration.of(30, ChronoUnit.SECONDS),
|
Duration.of(30, ChronoUnit.SECONDS),
|
||||||
Duration.of(30, ChronoUnit.SECONDS),
|
Duration.of(30, ChronoUnit.SECONDS),
|
||||||
@@ -111,6 +113,10 @@ object Parser {
|
|||||||
}
|
}
|
||||||
|
|
||||||
"connection" -> {
|
"connection" -> {
|
||||||
|
val writeTimeout = child.renderAttribute("write-timeout")
|
||||||
|
?.let(Duration::parse) ?: Duration.of(0, ChronoUnit.SECONDS)
|
||||||
|
val readTimeout = child.renderAttribute("read-timeout")
|
||||||
|
?.let(Duration::parse) ?: Duration.of(0, ChronoUnit.SECONDS)
|
||||||
val idleTimeout = child.renderAttribute("idle-timeout")
|
val idleTimeout = child.renderAttribute("idle-timeout")
|
||||||
?.let(Duration::parse) ?: Duration.of(30, ChronoUnit.SECONDS)
|
?.let(Duration::parse) ?: Duration.of(30, ChronoUnit.SECONDS)
|
||||||
val readIdleTimeout = child.renderAttribute("read-idle-timeout")
|
val readIdleTimeout = child.renderAttribute("read-idle-timeout")
|
||||||
@@ -118,8 +124,10 @@ object Parser {
|
|||||||
val writeIdleTimeout = child.renderAttribute("write-idle-timeout")
|
val writeIdleTimeout = child.renderAttribute("write-idle-timeout")
|
||||||
?.let(Duration::parse) ?: Duration.of(60, ChronoUnit.SECONDS)
|
?.let(Duration::parse) ?: Duration.of(60, ChronoUnit.SECONDS)
|
||||||
val maxRequestSize = child.renderAttribute("max-request-size")
|
val maxRequestSize = child.renderAttribute("max-request-size")
|
||||||
?.let(Integer::decode) ?: 0x4000000
|
?.let(String::toInt) ?: 67108864
|
||||||
connection = Configuration.Connection(
|
connection = Configuration.Connection(
|
||||||
|
readTimeout,
|
||||||
|
writeTimeout,
|
||||||
idleTimeout,
|
idleTimeout,
|
||||||
readIdleTimeout,
|
readIdleTimeout,
|
||||||
writeIdleTimeout,
|
writeIdleTimeout,
|
||||||
@@ -257,8 +265,7 @@ object Parser {
|
|||||||
}.map { el ->
|
}.map { el ->
|
||||||
val groupName = el.renderAttribute("name") ?: throw ConfigurationException("Group name is required")
|
val groupName = el.renderAttribute("name") ?: throw ConfigurationException("Group name is required")
|
||||||
var roles = emptySet<Role>()
|
var roles = emptySet<Role>()
|
||||||
var userQuota: Configuration.Quota? = null
|
var quota: Configuration.Quota? = null
|
||||||
var groupQuota: Configuration.Quota? = null
|
|
||||||
for (child in el.asIterable()) {
|
for (child in el.asIterable()) {
|
||||||
when (child.localName) {
|
when (child.localName) {
|
||||||
"users" -> {
|
"users" -> {
|
||||||
@@ -272,15 +279,12 @@ object Parser {
|
|||||||
"roles" -> {
|
"roles" -> {
|
||||||
roles = parseRoles(child)
|
roles = parseRoles(child)
|
||||||
}
|
}
|
||||||
"group-quota" -> {
|
"quota" -> {
|
||||||
userQuota = parseQuota(child)
|
quota = parseQuota(child)
|
||||||
}
|
|
||||||
"user-quota" -> {
|
|
||||||
groupQuota = parseQuota(child)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
groupName to Group(groupName, roles, userQuota, groupQuota)
|
groupName to Group(groupName, roles, quota)
|
||||||
}.toMap()
|
}.toMap()
|
||||||
val users = knownUsersMap.map { (name, user) ->
|
val users = knownUsersMap.map { (name, user) ->
|
||||||
name to User(name, user.password, userGroups[name]?.mapNotNull { groups[it] }?.toSet() ?: emptySet(), user.quota)
|
name to User(name, user.password, userGroups[name]?.mapNotNull { groups[it] }?.toSet() ?: emptySet(), user.quota)
|
@@ -1,28 +1,22 @@
|
|||||||
package net.woggioni.rbcs.server.configuration
|
package net.woggioni.gbcs.server.configuration
|
||||||
|
|
||||||
import net.woggioni.rbcs.api.CacheProvider
|
import net.woggioni.gbcs.api.CacheProvider
|
||||||
import net.woggioni.rbcs.api.Configuration
|
import net.woggioni.gbcs.api.Configuration
|
||||||
import net.woggioni.rbcs.common.RBCS
|
import net.woggioni.gbcs.common.GBCS
|
||||||
import net.woggioni.rbcs.common.Xml
|
import net.woggioni.gbcs.common.Xml
|
||||||
import org.w3c.dom.Document
|
import org.w3c.dom.Document
|
||||||
|
|
||||||
object Serializer {
|
object Serializer {
|
||||||
|
|
||||||
private fun Xml.serializeQuota(quota : Configuration.Quota) {
|
|
||||||
attr("calls", quota.calls.toString())
|
|
||||||
attr("period", quota.period.toString())
|
|
||||||
attr("max-available-calls", quota.maxAvailableCalls.toString())
|
|
||||||
attr("initial-available-calls", quota.initialAvailableCalls.toString())
|
|
||||||
}
|
|
||||||
|
|
||||||
fun serialize(conf : Configuration) : Document {
|
fun serialize(conf : Configuration) : Document {
|
||||||
|
|
||||||
val schemaLocations = CacheSerializers.index.values.asSequence().map {
|
val schemaLocations = CacheSerializers.index.values.asSequence().map {
|
||||||
it.xmlNamespace to it.xmlSchemaLocation
|
it.xmlNamespace to it.xmlSchemaLocation
|
||||||
}.toMap()
|
}.toMap()
|
||||||
return Xml.of(RBCS.RBCS_NAMESPACE_URI, RBCS.RBCS_PREFIX + ":server") {
|
return Xml.of(GBCS.GBCS_NAMESPACE_URI, GBCS.GBCS_PREFIX + ":server") {
|
||||||
// attr("xmlns:xs", GradleBuildCacheServer.XML_SCHEMA_NAMESPACE_URI)
|
// attr("xmlns:xs", GradleBuildCacheServer.XML_SCHEMA_NAMESPACE_URI)
|
||||||
val value = schemaLocations.asSequence().map { (k, v) -> "$k $v" }.joinToString(" ")
|
val value = schemaLocations.asSequence().map { (k, v) -> "$k $v" }.joinToString(" ")
|
||||||
attr("xs:schemaLocation", value , namespaceURI = RBCS.XML_SCHEMA_NAMESPACE_URI)
|
attr("xs:schemaLocation", value , namespaceURI = GBCS.XML_SCHEMA_NAMESPACE_URI)
|
||||||
|
|
||||||
conf.serverPath
|
conf.serverPath
|
||||||
?.takeIf(String::isNotEmpty)
|
?.takeIf(String::isNotEmpty)
|
||||||
@@ -36,6 +30,8 @@ object Serializer {
|
|||||||
}
|
}
|
||||||
node("connection") {
|
node("connection") {
|
||||||
conf.connection.let { connection ->
|
conf.connection.let { connection ->
|
||||||
|
attr("read-timeout", connection.readTimeout.toString())
|
||||||
|
attr("write-timeout", connection.writeTimeout.toString())
|
||||||
attr("idle-timeout", connection.idleTimeout.toString())
|
attr("idle-timeout", connection.idleTimeout.toString())
|
||||||
attr("read-idle-timeout", connection.readIdleTimeout.toString())
|
attr("read-idle-timeout", connection.readIdleTimeout.toString())
|
||||||
attr("write-idle-timeout", connection.writeIdleTimeout.toString())
|
attr("write-idle-timeout", connection.writeIdleTimeout.toString())
|
||||||
@@ -60,7 +56,10 @@ object Serializer {
|
|||||||
}
|
}
|
||||||
user.quota?.let { quota ->
|
user.quota?.let { quota ->
|
||||||
node("quota") {
|
node("quota") {
|
||||||
serializeQuota(quota)
|
attr("calls", quota.calls.toString())
|
||||||
|
attr("period", quota.period.toString())
|
||||||
|
attr("max-available-calls", quota.maxAvailableCalls.toString())
|
||||||
|
attr("initial-available-calls", quota.initialAvailableCalls.toString())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -71,7 +70,10 @@ object Serializer {
|
|||||||
anonymousUser.quota?.let { quota ->
|
anonymousUser.quota?.let { quota ->
|
||||||
node("anonymous") {
|
node("anonymous") {
|
||||||
node("quota") {
|
node("quota") {
|
||||||
serializeQuota(quota)
|
attr("calls", quota.calls.toString())
|
||||||
|
attr("period", quota.period.toString())
|
||||||
|
attr("max-available-calls", quota.maxAvailableCalls.toString())
|
||||||
|
attr("initial-available-calls", quota.initialAvailableCalls.toString())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -111,14 +113,12 @@ object Serializer {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
group.userQuota?.let { quota ->
|
group.quota?.let { quota ->
|
||||||
node("user-quota") {
|
node("quota") {
|
||||||
serializeQuota(quota)
|
attr("calls", quota.calls.toString())
|
||||||
}
|
attr("period", quota.period.toString())
|
||||||
}
|
attr("max-available-calls", quota.maxAvailableCalls.toString())
|
||||||
group.groupQuota?.let { quota ->
|
attr("initial-available-calls", quota.initialAvailableCalls.toString())
|
||||||
node("group-quota") {
|
|
||||||
serializeQuota(quota)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
@@ -1,9 +1,9 @@
|
|||||||
package net.woggioni.rbcs.server.exception
|
package net.woggioni.gbcs.server.exception
|
||||||
|
|
||||||
import io.netty.buffer.Unpooled
|
import io.netty.buffer.Unpooled
|
||||||
import io.netty.channel.ChannelDuplexHandler
|
import io.netty.channel.ChannelDuplexHandler
|
||||||
import io.netty.channel.ChannelFutureListener
|
import io.netty.channel.ChannelFutureListener
|
||||||
import io.netty.channel.ChannelHandler.Sharable
|
import io.netty.channel.ChannelHandler
|
||||||
import io.netty.channel.ChannelHandlerContext
|
import io.netty.channel.ChannelHandlerContext
|
||||||
import io.netty.handler.codec.DecoderException
|
import io.netty.handler.codec.DecoderException
|
||||||
import io.netty.handler.codec.http.DefaultFullHttpResponse
|
import io.netty.handler.codec.http.DefaultFullHttpResponse
|
||||||
@@ -13,20 +13,14 @@ import io.netty.handler.codec.http.HttpResponseStatus
|
|||||||
import io.netty.handler.codec.http.HttpVersion
|
import io.netty.handler.codec.http.HttpVersion
|
||||||
import io.netty.handler.timeout.ReadTimeoutException
|
import io.netty.handler.timeout.ReadTimeoutException
|
||||||
import io.netty.handler.timeout.WriteTimeoutException
|
import io.netty.handler.timeout.WriteTimeoutException
|
||||||
import net.woggioni.rbcs.api.exception.CacheException
|
import net.woggioni.gbcs.api.exception.CacheException
|
||||||
import net.woggioni.rbcs.api.exception.ContentTooLargeException
|
import net.woggioni.gbcs.api.exception.ContentTooLargeException
|
||||||
import net.woggioni.rbcs.common.contextLogger
|
import net.woggioni.gbcs.common.contextLogger
|
||||||
import net.woggioni.rbcs.common.debug
|
import net.woggioni.gbcs.common.debug
|
||||||
import net.woggioni.rbcs.common.log
|
|
||||||
import org.slf4j.event.Level
|
|
||||||
import org.slf4j.spi.LoggingEventBuilder
|
|
||||||
import java.net.ConnectException
|
|
||||||
import java.net.SocketException
|
|
||||||
import javax.net.ssl.SSLException
|
|
||||||
import javax.net.ssl.SSLPeerUnverifiedException
|
import javax.net.ssl.SSLPeerUnverifiedException
|
||||||
|
|
||||||
@Sharable
|
@ChannelHandler.Sharable
|
||||||
object ExceptionHandler : ChannelDuplexHandler() {
|
class ExceptionHandler : ChannelDuplexHandler() {
|
||||||
private val log = contextLogger()
|
private val log = contextLogger()
|
||||||
|
|
||||||
private val NOT_AUTHORIZED: FullHttpResponse = DefaultFullHttpResponse(
|
private val NOT_AUTHORIZED: FullHttpResponse = DefaultFullHttpResponse(
|
||||||
@@ -35,6 +29,12 @@ object ExceptionHandler : ChannelDuplexHandler() {
|
|||||||
headers()[HttpHeaderNames.CONTENT_LENGTH] = "0"
|
headers()[HttpHeaderNames.CONTENT_LENGTH] = "0"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private val TOO_BIG: FullHttpResponse = DefaultFullHttpResponse(
|
||||||
|
HttpVersion.HTTP_1_1, HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE, Unpooled.EMPTY_BUFFER
|
||||||
|
).apply {
|
||||||
|
headers()[HttpHeaderNames.CONTENT_LENGTH] = "0"
|
||||||
|
}
|
||||||
|
|
||||||
private val NOT_AVAILABLE: FullHttpResponse = DefaultFullHttpResponse(
|
private val NOT_AVAILABLE: FullHttpResponse = DefaultFullHttpResponse(
|
||||||
HttpVersion.HTTP_1_1, HttpResponseStatus.SERVICE_UNAVAILABLE, Unpooled.EMPTY_BUFFER
|
HttpVersion.HTTP_1_1, HttpResponseStatus.SERVICE_UNAVAILABLE, Unpooled.EMPTY_BUFFER
|
||||||
).apply {
|
).apply {
|
||||||
@@ -47,26 +47,10 @@ object ExceptionHandler : ChannelDuplexHandler() {
|
|||||||
headers()[HttpHeaderNames.CONTENT_LENGTH] = "0"
|
headers()[HttpHeaderNames.CONTENT_LENGTH] = "0"
|
||||||
}
|
}
|
||||||
|
|
||||||
private val TOO_BIG: FullHttpResponse = DefaultFullHttpResponse(
|
|
||||||
HttpVersion.HTTP_1_1, HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE, Unpooled.EMPTY_BUFFER
|
|
||||||
).apply {
|
|
||||||
headers()[HttpHeaderNames.CONTENT_LENGTH] = "0"
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
|
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
|
||||||
when (cause) {
|
when (cause) {
|
||||||
is DecoderException -> {
|
is DecoderException -> {
|
||||||
log.debug(cause.message, cause)
|
|
||||||
ctx.close()
|
|
||||||
}
|
|
||||||
|
|
||||||
is ConnectException -> {
|
|
||||||
log.error(cause.message, cause)
|
log.error(cause.message, cause)
|
||||||
ctx.writeAndFlush(SERVER_ERROR.retainedDuplicate())
|
|
||||||
}
|
|
||||||
|
|
||||||
is SocketException -> {
|
|
||||||
log.debug(cause.message, cause)
|
|
||||||
ctx.close()
|
ctx.close()
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -75,19 +59,10 @@ object ExceptionHandler : ChannelDuplexHandler() {
|
|||||||
.addListener(ChannelFutureListener.CLOSE_ON_FAILURE)
|
.addListener(ChannelFutureListener.CLOSE_ON_FAILURE)
|
||||||
}
|
}
|
||||||
|
|
||||||
is SSLException -> {
|
|
||||||
log.debug(cause.message, cause)
|
|
||||||
ctx.close()
|
|
||||||
}
|
|
||||||
|
|
||||||
is ContentTooLargeException -> {
|
is ContentTooLargeException -> {
|
||||||
log.log(Level.DEBUG, ctx.channel()) { builder : LoggingEventBuilder ->
|
|
||||||
builder.setMessage("Request body is too large")
|
|
||||||
}
|
|
||||||
ctx.writeAndFlush(TOO_BIG.retainedDuplicate())
|
ctx.writeAndFlush(TOO_BIG.retainedDuplicate())
|
||||||
.addListener(ChannelFutureListener.CLOSE_ON_FAILURE)
|
.addListener(ChannelFutureListener.CLOSE_ON_FAILURE)
|
||||||
}
|
}
|
||||||
|
|
||||||
is ReadTimeoutException -> {
|
is ReadTimeoutException -> {
|
||||||
log.debug {
|
log.debug {
|
||||||
val channelId = ctx.channel().id().asShortText()
|
val channelId = ctx.channel().id().asShortText()
|
||||||
@@ -95,7 +70,6 @@ object ExceptionHandler : ChannelDuplexHandler() {
|
|||||||
}
|
}
|
||||||
ctx.close()
|
ctx.close()
|
||||||
}
|
}
|
||||||
|
|
||||||
is WriteTimeoutException -> {
|
is WriteTimeoutException -> {
|
||||||
log.debug {
|
log.debug {
|
||||||
val channelId = ctx.channel().id().asShortText()
|
val channelId = ctx.channel().id().asShortText()
|
||||||
@@ -103,13 +77,11 @@ object ExceptionHandler : ChannelDuplexHandler() {
|
|||||||
}
|
}
|
||||||
ctx.close()
|
ctx.close()
|
||||||
}
|
}
|
||||||
|
|
||||||
is CacheException -> {
|
is CacheException -> {
|
||||||
log.error(cause.message, cause)
|
log.error(cause.message, cause)
|
||||||
ctx.writeAndFlush(NOT_AVAILABLE.retainedDuplicate())
|
ctx.writeAndFlush(NOT_AVAILABLE.retainedDuplicate())
|
||||||
.addListener(ChannelFutureListener.CLOSE_ON_FAILURE)
|
.addListener(ChannelFutureListener.CLOSE_ON_FAILURE)
|
||||||
}
|
}
|
||||||
|
|
||||||
else -> {
|
else -> {
|
||||||
log.error(cause.message, cause)
|
log.error(cause.message, cause)
|
||||||
ctx.writeAndFlush(SERVER_ERROR.retainedDuplicate())
|
ctx.writeAndFlush(SERVER_ERROR.retainedDuplicate())
|
@@ -0,0 +1,167 @@
|
|||||||
|
package net.woggioni.gbcs.server.handler
|
||||||
|
|
||||||
|
import io.netty.buffer.Unpooled
|
||||||
|
import io.netty.channel.ChannelFutureListener
|
||||||
|
import io.netty.channel.ChannelHandler
|
||||||
|
import io.netty.channel.ChannelHandlerContext
|
||||||
|
import io.netty.channel.DefaultFileRegion
|
||||||
|
import io.netty.channel.SimpleChannelInboundHandler
|
||||||
|
import io.netty.handler.codec.http.DefaultFullHttpResponse
|
||||||
|
import io.netty.handler.codec.http.DefaultHttpResponse
|
||||||
|
import io.netty.handler.codec.http.FullHttpRequest
|
||||||
|
import io.netty.handler.codec.http.HttpHeaderNames
|
||||||
|
import io.netty.handler.codec.http.HttpHeaderValues
|
||||||
|
import io.netty.handler.codec.http.HttpMethod
|
||||||
|
import io.netty.handler.codec.http.HttpResponseStatus
|
||||||
|
import io.netty.handler.codec.http.HttpUtil
|
||||||
|
import io.netty.handler.codec.http.LastHttpContent
|
||||||
|
import io.netty.handler.stream.ChunkedNioStream
|
||||||
|
import net.woggioni.gbcs.api.Cache
|
||||||
|
import net.woggioni.gbcs.api.exception.CacheException
|
||||||
|
import net.woggioni.gbcs.common.contextLogger
|
||||||
|
import net.woggioni.gbcs.server.debug
|
||||||
|
import net.woggioni.gbcs.server.warn
|
||||||
|
import java.nio.channels.FileChannel
|
||||||
|
import java.nio.file.Path
|
||||||
|
|
||||||
|
@ChannelHandler.Sharable
|
||||||
|
class ServerHandler(private val cache: Cache, private val serverPrefix: Path) :
|
||||||
|
SimpleChannelInboundHandler<FullHttpRequest>() {
|
||||||
|
|
||||||
|
private val log = contextLogger()
|
||||||
|
|
||||||
|
override fun channelRead0(ctx: ChannelHandlerContext, msg: FullHttpRequest) {
|
||||||
|
val keepAlive: Boolean = HttpUtil.isKeepAlive(msg)
|
||||||
|
val method = msg.method()
|
||||||
|
if (method === HttpMethod.GET) {
|
||||||
|
val path = Path.of(msg.uri())
|
||||||
|
val prefix = path.parent
|
||||||
|
val key = path.fileName?.toString() ?: let {
|
||||||
|
val response = DefaultFullHttpResponse(msg.protocolVersion(), HttpResponseStatus.NOT_FOUND)
|
||||||
|
response.headers()[HttpHeaderNames.CONTENT_LENGTH] = 0
|
||||||
|
ctx.writeAndFlush(response)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if (serverPrefix == prefix) {
|
||||||
|
try {
|
||||||
|
cache.get(key)
|
||||||
|
} catch(ex : Throwable) {
|
||||||
|
throw CacheException("Error accessing the cache backend", ex)
|
||||||
|
}?.let { channel ->
|
||||||
|
log.debug(ctx) {
|
||||||
|
"Cache hit for key '$key'"
|
||||||
|
}
|
||||||
|
val response = DefaultHttpResponse(msg.protocolVersion(), HttpResponseStatus.OK)
|
||||||
|
response.headers()[HttpHeaderNames.CONTENT_TYPE] = HttpHeaderValues.APPLICATION_OCTET_STREAM
|
||||||
|
if (!keepAlive) {
|
||||||
|
response.headers().set(HttpHeaderNames.CONNECTION, HttpHeaderValues.CLOSE)
|
||||||
|
response.headers().set(HttpHeaderNames.TRANSFER_ENCODING, HttpHeaderValues.IDENTITY)
|
||||||
|
} else {
|
||||||
|
response.headers().set(HttpHeaderNames.CONNECTION, HttpHeaderValues.KEEP_ALIVE)
|
||||||
|
response.headers().set(HttpHeaderNames.TRANSFER_ENCODING, HttpHeaderValues.CHUNKED)
|
||||||
|
}
|
||||||
|
ctx.write(response)
|
||||||
|
when (channel) {
|
||||||
|
is FileChannel -> {
|
||||||
|
if (keepAlive) {
|
||||||
|
ctx.write(DefaultFileRegion(channel, 0, channel.size()))
|
||||||
|
ctx.writeAndFlush(LastHttpContent.EMPTY_LAST_CONTENT.retainedDuplicate())
|
||||||
|
} else {
|
||||||
|
ctx.writeAndFlush(DefaultFileRegion(channel, 0, channel.size()))
|
||||||
|
.addListener(ChannelFutureListener.CLOSE)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
else -> {
|
||||||
|
ctx.write(ChunkedNioStream(channel)).addListener { evt ->
|
||||||
|
channel.close()
|
||||||
|
}
|
||||||
|
ctx.writeAndFlush(LastHttpContent.EMPTY_LAST_CONTENT.retainedDuplicate())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} ?: let {
|
||||||
|
log.debug(ctx) {
|
||||||
|
"Cache miss for key '$key'"
|
||||||
|
}
|
||||||
|
val response = DefaultFullHttpResponse(msg.protocolVersion(), HttpResponseStatus.NOT_FOUND)
|
||||||
|
response.headers()[HttpHeaderNames.CONTENT_LENGTH] = 0
|
||||||
|
ctx.writeAndFlush(response)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
log.warn(ctx) {
|
||||||
|
"Got request for unhandled path '${msg.uri()}'"
|
||||||
|
}
|
||||||
|
val response = DefaultFullHttpResponse(msg.protocolVersion(), HttpResponseStatus.BAD_REQUEST)
|
||||||
|
response.headers()[HttpHeaderNames.CONTENT_LENGTH] = 0
|
||||||
|
ctx.writeAndFlush(response)
|
||||||
|
}
|
||||||
|
} else if (method === HttpMethod.PUT) {
|
||||||
|
val path = Path.of(msg.uri())
|
||||||
|
val prefix = path.parent
|
||||||
|
val key = path.fileName.toString()
|
||||||
|
|
||||||
|
if (serverPrefix == prefix) {
|
||||||
|
log.debug(ctx) {
|
||||||
|
"Added value for key '$key' to build cache"
|
||||||
|
}
|
||||||
|
val bodyBytes = msg.content().run {
|
||||||
|
if (isDirect) {
|
||||||
|
ByteArray(readableBytes()).also {
|
||||||
|
readBytes(it)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
array()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
cache.put(key, bodyBytes)
|
||||||
|
} catch(ex : Throwable) {
|
||||||
|
throw CacheException("Error accessing the cache backend", ex)
|
||||||
|
}
|
||||||
|
val response = DefaultFullHttpResponse(
|
||||||
|
msg.protocolVersion(), HttpResponseStatus.CREATED,
|
||||||
|
Unpooled.copiedBuffer(key.toByteArray())
|
||||||
|
)
|
||||||
|
response.headers()[HttpHeaderNames.CONTENT_LENGTH] = response.content().readableBytes()
|
||||||
|
ctx.writeAndFlush(response)
|
||||||
|
} else {
|
||||||
|
log.warn(ctx) {
|
||||||
|
"Got request for unhandled path '${msg.uri()}'"
|
||||||
|
}
|
||||||
|
val response = DefaultFullHttpResponse(msg.protocolVersion(), HttpResponseStatus.BAD_REQUEST)
|
||||||
|
response.headers()[HttpHeaderNames.CONTENT_LENGTH] = "0"
|
||||||
|
ctx.writeAndFlush(response)
|
||||||
|
}
|
||||||
|
} else if(method == HttpMethod.TRACE) {
|
||||||
|
val replayedRequestHead = ctx.alloc().buffer()
|
||||||
|
replayedRequestHead.writeCharSequence("TRACE ${Path.of(msg.uri())} ${msg.protocolVersion().text()}\r\n", Charsets.US_ASCII)
|
||||||
|
msg.headers().forEach { (key, value) ->
|
||||||
|
replayedRequestHead.apply {
|
||||||
|
writeCharSequence(key, Charsets.US_ASCII)
|
||||||
|
writeCharSequence(": ", Charsets.US_ASCII)
|
||||||
|
writeCharSequence(value, Charsets.UTF_8)
|
||||||
|
writeCharSequence("\r\n", Charsets.US_ASCII)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
replayedRequestHead.writeCharSequence("\r\n", Charsets.US_ASCII)
|
||||||
|
val requestBody = msg.content()
|
||||||
|
requestBody.retain()
|
||||||
|
val responseBody = ctx.alloc().compositeBuffer(2).apply {
|
||||||
|
addComponents(true, replayedRequestHead)
|
||||||
|
addComponents(true, requestBody)
|
||||||
|
}
|
||||||
|
val response = DefaultFullHttpResponse(msg.protocolVersion(), HttpResponseStatus.OK, responseBody)
|
||||||
|
response.headers().apply {
|
||||||
|
set(HttpHeaderNames.CONTENT_TYPE, "message/http")
|
||||||
|
set(HttpHeaderNames.CONTENT_LENGTH, responseBody.readableBytes())
|
||||||
|
}
|
||||||
|
ctx.writeAndFlush(response)
|
||||||
|
} else {
|
||||||
|
log.warn(ctx) {
|
||||||
|
"Got request with unhandled method '${msg.method().name()}'"
|
||||||
|
}
|
||||||
|
val response = DefaultFullHttpResponse(msg.protocolVersion(), HttpResponseStatus.METHOD_NOT_ALLOWED)
|
||||||
|
response.headers()[HttpHeaderNames.CONTENT_LENGTH] = "0"
|
||||||
|
ctx.writeAndFlush(response)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@@ -1,14 +1,14 @@
|
|||||||
package net.woggioni.rbcs.server.throttling
|
package net.woggioni.gbcs.server.throttling
|
||||||
|
|
||||||
|
import net.woggioni.gbcs.api.Configuration
|
||||||
import net.woggioni.jwo.Bucket
|
import net.woggioni.jwo.Bucket
|
||||||
import net.woggioni.rbcs.api.Configuration
|
|
||||||
import java.net.InetSocketAddress
|
import java.net.InetSocketAddress
|
||||||
import java.util.Arrays
|
import java.util.Arrays
|
||||||
import java.util.concurrent.ConcurrentHashMap
|
import java.util.concurrent.ConcurrentHashMap
|
||||||
import java.util.function.Function
|
import java.util.function.Function
|
||||||
|
|
||||||
class BucketManager private constructor(
|
class BucketManager private constructor(
|
||||||
private val bucketsByUser: Map<Configuration.User, List<Bucket>> = HashMap(),
|
private val bucketsByUser: Map<Configuration.User, Bucket> = HashMap(),
|
||||||
private val bucketsByGroup: Map<Configuration.Group, Bucket> = HashMap(),
|
private val bucketsByGroup: Map<Configuration.Group, Bucket> = HashMap(),
|
||||||
loader: Function<InetSocketAddress, Bucket>?
|
loader: Function<InetSocketAddress, Bucket>?
|
||||||
) {
|
) {
|
||||||
@@ -43,27 +43,22 @@ class BucketManager private constructor(
|
|||||||
|
|
||||||
companion object {
|
companion object {
|
||||||
fun from(cfg : Configuration) : BucketManager {
|
fun from(cfg : Configuration) : BucketManager {
|
||||||
val bucketsByUser = cfg.users.values.asSequence().map { user ->
|
val bucketsByUser = cfg.users.values.asSequence().filter {
|
||||||
val buckets = (
|
it.quota != null
|
||||||
user.quota
|
}.map { user ->
|
||||||
?.let { quota ->
|
val quota = user.quota
|
||||||
sequenceOf(quota)
|
val bucket = Bucket.local(
|
||||||
} ?: user.groups.asSequence()
|
quota.maxAvailableCalls,
|
||||||
.mapNotNull(Configuration.Group::getUserQuota)
|
quota.calls,
|
||||||
).map { quota ->
|
quota.period,
|
||||||
Bucket.local(
|
quota.initialAvailableCalls
|
||||||
quota.maxAvailableCalls,
|
)
|
||||||
quota.calls,
|
user to bucket
|
||||||
quota.period,
|
|
||||||
quota.initialAvailableCalls
|
|
||||||
)
|
|
||||||
}.toList()
|
|
||||||
user to buckets
|
|
||||||
}.toMap()
|
}.toMap()
|
||||||
val bucketsByGroup = cfg.groups.values.asSequence().filter {
|
val bucketsByGroup = cfg.groups.values.asSequence().filter {
|
||||||
it.groupQuota != null
|
it.quota != null
|
||||||
}.map { group ->
|
}.map { group ->
|
||||||
val quota = group.groupQuota
|
val quota = group.quota
|
||||||
val bucket = Bucket.local(
|
val bucket = Bucket.local(
|
||||||
quota.maxAvailableCalls,
|
quota.maxAvailableCalls,
|
||||||
quota.calls,
|
quota.calls,
|
@@ -0,0 +1,99 @@
|
|||||||
|
package net.woggioni.gbcs.server.throttling
|
||||||
|
|
||||||
|
import io.netty.channel.ChannelHandler.Sharable
|
||||||
|
import io.netty.channel.ChannelHandlerContext
|
||||||
|
import io.netty.channel.ChannelInboundHandlerAdapter
|
||||||
|
import io.netty.handler.codec.http.DefaultFullHttpResponse
|
||||||
|
import io.netty.handler.codec.http.HttpHeaderNames
|
||||||
|
import io.netty.handler.codec.http.HttpResponseStatus
|
||||||
|
import io.netty.handler.codec.http.HttpVersion
|
||||||
|
import net.woggioni.gbcs.api.Configuration
|
||||||
|
import net.woggioni.gbcs.common.contextLogger
|
||||||
|
import net.woggioni.gbcs.server.GradleBuildCacheServer
|
||||||
|
import net.woggioni.jwo.Bucket
|
||||||
|
import net.woggioni.jwo.LongMath
|
||||||
|
import java.net.InetSocketAddress
|
||||||
|
import java.time.Duration
|
||||||
|
import java.time.temporal.ChronoUnit
|
||||||
|
import java.util.concurrent.TimeUnit
|
||||||
|
|
||||||
|
|
||||||
|
@Sharable
|
||||||
|
class ThrottlingHandler(cfg: Configuration) :
|
||||||
|
ChannelInboundHandlerAdapter() {
|
||||||
|
|
||||||
|
private val log = contextLogger()
|
||||||
|
private val bucketManager = BucketManager.from(cfg)
|
||||||
|
|
||||||
|
private val connectionConfiguration = cfg.connection
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If the suggested waiting time from the bucket is lower than this
|
||||||
|
* amount, then the server will simply wait by itself before sending a response
|
||||||
|
* instead of replying with 429
|
||||||
|
*/
|
||||||
|
private val waitThreshold = minOf(
|
||||||
|
connectionConfiguration.idleTimeout,
|
||||||
|
connectionConfiguration.readIdleTimeout,
|
||||||
|
connectionConfiguration.writeIdleTimeout
|
||||||
|
).dividedBy(2)
|
||||||
|
|
||||||
|
override fun channelRead(ctx: ChannelHandlerContext, msg: Any) {
|
||||||
|
val buckets = mutableListOf<Bucket>()
|
||||||
|
val user = ctx.channel().attr(GradleBuildCacheServer.userAttribute).get()
|
||||||
|
if (user != null) {
|
||||||
|
bucketManager.getBucketByUser(user)?.let(buckets::add)
|
||||||
|
}
|
||||||
|
val groups = ctx.channel().attr(GradleBuildCacheServer.groupAttribute).get() ?: emptySet()
|
||||||
|
if (groups.isNotEmpty()) {
|
||||||
|
groups.forEach { group ->
|
||||||
|
bucketManager.getBucketByGroup(group)?.let(buckets::add)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (user == null && groups.isEmpty()) {
|
||||||
|
bucketManager.getBucketByAddress(ctx.channel().remoteAddress() as InetSocketAddress)?.let(buckets::add)
|
||||||
|
}
|
||||||
|
if (buckets.isEmpty()) {
|
||||||
|
return super.channelRead(ctx, msg)
|
||||||
|
} else {
|
||||||
|
handleBuckets(buckets, ctx, msg, true)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun handleBuckets(buckets : List<Bucket>, ctx : ChannelHandlerContext, msg : Any, delayResponse : Boolean) {
|
||||||
|
var nextAttempt = -1L
|
||||||
|
for (bucket in buckets) {
|
||||||
|
val bucketNextAttempt = bucket.removeTokensWithEstimate(1)
|
||||||
|
if (bucketNextAttempt > nextAttempt) {
|
||||||
|
nextAttempt = bucketNextAttempt
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if(nextAttempt < 0) {
|
||||||
|
super.channelRead(ctx, msg)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
val waitDuration = Duration.of(LongMath.ceilDiv(nextAttempt, 100_000_000L) * 100L, ChronoUnit.MILLIS)
|
||||||
|
if (delayResponse && waitDuration < waitThreshold) {
|
||||||
|
ctx.executor().schedule({
|
||||||
|
handleBuckets(buckets, ctx, msg, false)
|
||||||
|
}, waitDuration.toMillis(), TimeUnit.MILLISECONDS)
|
||||||
|
} else {
|
||||||
|
sendThrottledResponse(ctx, waitDuration)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun sendThrottledResponse(ctx: ChannelHandlerContext, retryAfter: Duration) {
|
||||||
|
val response = DefaultFullHttpResponse(
|
||||||
|
HttpVersion.HTTP_1_1,
|
||||||
|
HttpResponseStatus.TOO_MANY_REQUESTS
|
||||||
|
)
|
||||||
|
response.headers()[HttpHeaderNames.CONTENT_LENGTH] = 0
|
||||||
|
retryAfter.seconds.takeIf {
|
||||||
|
it > 0
|
||||||
|
}?.let {
|
||||||
|
response.headers()[HttpHeaderNames.RETRY_AFTER] = retryAfter.seconds
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx.writeAndFlush(response)
|
||||||
|
}
|
||||||
|
}
|
@@ -0,0 +1,2 @@
|
|||||||
|
net.woggioni.gbcs.server.cache.FileSystemCacheProvider
|
||||||
|
net.woggioni.gbcs.server.cache.InMemoryCacheProvider
|
@@ -0,0 +1,19 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||||
|
<gbcs:server
|
||||||
|
xmlns:xs="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xmlns:gbcs="urn:net.woggioni.gbcs.server"
|
||||||
|
xs:schemaLocation="urn:net.woggioni.gbcs.server jpms://net.woggioni.gbcs.server/net/woggioni/gbcs/server/schema/gbcs.xsd">
|
||||||
|
<bind host="127.0.0.1" port="8080" incoming-connections-backlog-size="1024"/>
|
||||||
|
<connection
|
||||||
|
max-request-size="67108864"
|
||||||
|
idle-timeout="PT30S"
|
||||||
|
read-timeout="PT10S"
|
||||||
|
write-timeout="PT10S"
|
||||||
|
read-idle-timeout="PT60S"
|
||||||
|
write-idle-timeout="PT60S"/>
|
||||||
|
<event-executor use-virtual-threads="true"/>
|
||||||
|
<cache xs:type="gbcs:fileSystemCacheType" path="/tmp/gbcs" max-age="P7D"/>
|
||||||
|
<authentication>
|
||||||
|
<none/>
|
||||||
|
</authentication>
|
||||||
|
</gbcs:server>
|
@@ -0,0 +1,222 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||||
|
<xs:schema targetNamespace="urn:net.woggioni.gbcs.server"
|
||||||
|
xmlns:xs="http://www.w3.org/2001/XMLSchema"
|
||||||
|
xmlns:gbcs="urn:net.woggioni.gbcs.server"
|
||||||
|
elementFormDefault="unqualified">
|
||||||
|
<xs:element name="server" type="gbcs:serverType"/>
|
||||||
|
|
||||||
|
<xs:complexType name="serverType">
|
||||||
|
<xs:sequence minOccurs="0">
|
||||||
|
<xs:element name="bind" type="gbcs:bindType" maxOccurs="1"/>
|
||||||
|
<xs:element name="connection" type="gbcs:connectionType" minOccurs="0" maxOccurs="1"/>
|
||||||
|
<xs:element name="event-executor" type="gbcs:eventExecutorType" minOccurs="0" maxOccurs="1"/>
|
||||||
|
<xs:element name="cache" type="gbcs:cacheType" maxOccurs="1"/>
|
||||||
|
<xs:element name="authorization" type="gbcs:authorizationType" minOccurs="0">
|
||||||
|
<xs:key name="userId">
|
||||||
|
<xs:selector xpath="users/user"/>
|
||||||
|
<xs:field xpath="@name"/>
|
||||||
|
</xs:key>
|
||||||
|
<xs:keyref name="userRef" refer="gbcs:userId">
|
||||||
|
<xs:selector xpath="groups/group/users/user"/>
|
||||||
|
<xs:field xpath="@ref"/>
|
||||||
|
</xs:keyref>
|
||||||
|
</xs:element>
|
||||||
|
<xs:element name="authentication" type="gbcs:authenticationType" minOccurs="0" maxOccurs="1"/>
|
||||||
|
<xs:element name="tls" type="gbcs:tlsType" minOccurs="0" maxOccurs="1"/>
|
||||||
|
</xs:sequence>
|
||||||
|
<xs:attribute name="path" type="xs:string" use="optional"/>
|
||||||
|
</xs:complexType>
|
||||||
|
|
||||||
|
<xs:complexType name="bindType">
|
||||||
|
<xs:attribute name="host" type="xs:token" use="required"/>
|
||||||
|
<xs:attribute name="port" type="xs:unsignedShort" use="required"/>
|
||||||
|
<xs:attribute name="incoming-connections-backlog-size" type="xs:unsignedInt" use="optional" default="1024"/>
|
||||||
|
</xs:complexType>
|
||||||
|
|
||||||
|
<xs:complexType name="connectionType">
|
||||||
|
<xs:attribute name="read-timeout" type="xs:duration" use="optional" default="PT0S"/>
|
||||||
|
<xs:attribute name="write-timeout" type="xs:duration" use="optional" default="PT0S"/>
|
||||||
|
<xs:attribute name="idle-timeout" type="xs:duration" use="optional" default="PT30S"/>
|
||||||
|
<xs:attribute name="read-idle-timeout" type="xs:duration" use="optional" default="PT60S"/>
|
||||||
|
<xs:attribute name="write-idle-timeout" type="xs:duration" use="optional" default="PT60S"/>
|
||||||
|
<xs:attribute name="max-request-size" type="xs:unsignedInt" use="optional" default="67108864"/>
|
||||||
|
</xs:complexType>
|
||||||
|
|
||||||
|
<xs:complexType name="eventExecutorType">
|
||||||
|
<xs:attribute name="use-virtual-threads" type="xs:boolean" use="optional" default="true"/>
|
||||||
|
</xs:complexType>
|
||||||
|
|
||||||
|
<xs:complexType name="cacheType" abstract="true"/>
|
||||||
|
|
||||||
|
<xs:complexType name="inMemoryCacheType">
|
||||||
|
<xs:complexContent>
|
||||||
|
<xs:extension base="gbcs:cacheType">
|
||||||
|
<xs:attribute name="max-age" type="xs:duration" default="P1D"/>
|
||||||
|
<xs:attribute name="digest" type="xs:token" default="MD5"/>
|
||||||
|
<xs:attribute name="enable-compression" type="xs:boolean" default="true"/>
|
||||||
|
<xs:attribute name="compression-level" type="xs:byte" default="-1"/>
|
||||||
|
</xs:extension>
|
||||||
|
</xs:complexContent>
|
||||||
|
</xs:complexType>
|
||||||
|
|
||||||
|
<xs:complexType name="fileSystemCacheType">
|
||||||
|
<xs:complexContent>
|
||||||
|
<xs:extension base="gbcs:cacheType">
|
||||||
|
<xs:attribute name="path" type="xs:string" use="required"/>
|
||||||
|
<xs:attribute name="max-age" type="xs:duration" default="P1D"/>
|
||||||
|
<xs:attribute name="digest" type="xs:token" default="MD5"/>
|
||||||
|
<xs:attribute name="enable-compression" type="xs:boolean" default="true"/>
|
||||||
|
<xs:attribute name="compression-level" type="xs:byte" default="-1"/>
|
||||||
|
</xs:extension>
|
||||||
|
</xs:complexContent>
|
||||||
|
</xs:complexType>
|
||||||
|
|
||||||
|
<xs:complexType name="tlsCertificateAuthorizationType">
|
||||||
|
<xs:sequence>
|
||||||
|
<xs:element name="group-extractor" type="gbcs:X500NameExtractorType" minOccurs="0"/>
|
||||||
|
<xs:element name="user-extractor" type="gbcs:X500NameExtractorType" minOccurs="0"/>
|
||||||
|
</xs:sequence>
|
||||||
|
</xs:complexType>
|
||||||
|
|
||||||
|
<xs:complexType name="X500NameExtractorType">
|
||||||
|
<xs:attribute name="attribute-name" type="xs:token"/>
|
||||||
|
<xs:attribute name="pattern" type="xs:token"/>
|
||||||
|
</xs:complexType>
|
||||||
|
|
||||||
|
<xs:complexType name="authorizationType">
|
||||||
|
<xs:all>
|
||||||
|
<xs:element name="users" type="gbcs:usersType"/>
|
||||||
|
<xs:element name="groups" type="gbcs:groupsType">
|
||||||
|
<xs:unique name="groupKey">
|
||||||
|
<xs:selector xpath="group"/>
|
||||||
|
<xs:field xpath="@name"/>
|
||||||
|
</xs:unique>
|
||||||
|
</xs:element>
|
||||||
|
</xs:all>
|
||||||
|
</xs:complexType>
|
||||||
|
|
||||||
|
<xs:complexType name="authenticationType">
|
||||||
|
<xs:choice>
|
||||||
|
<xs:element name="basic"/>
|
||||||
|
<xs:element name="client-certificate" type="gbcs:tlsCertificateAuthorizationType"/>
|
||||||
|
<xs:element name="none"/>
|
||||||
|
</xs:choice>
|
||||||
|
</xs:complexType>
|
||||||
|
|
||||||
|
<xs:complexType name="quotaType">
|
||||||
|
<xs:attribute name="calls" type="xs:positiveInteger" use="required"/>
|
||||||
|
<xs:attribute name="period" type="xs:duration" use="required"/>
|
||||||
|
<xs:attribute name="max-available-calls" type="xs:positiveInteger" use="optional"/>
|
||||||
|
<xs:attribute name="initial-available-calls" type="xs:unsignedInt" use="optional"/>
|
||||||
|
</xs:complexType>
|
||||||
|
|
||||||
|
<xs:complexType name="anonymousUserType">
|
||||||
|
<xs:sequence>
|
||||||
|
<xs:element name="quota" type="gbcs:quotaType" minOccurs="0" maxOccurs="1"/>
|
||||||
|
</xs:sequence>
|
||||||
|
</xs:complexType>
|
||||||
|
|
||||||
|
<xs:complexType name="userType">
|
||||||
|
<xs:sequence>
|
||||||
|
<xs:element name="quota" type="gbcs:quotaType" minOccurs="0" maxOccurs="1"/>
|
||||||
|
</xs:sequence>
|
||||||
|
<xs:attribute name="name" type="xs:token" use="required"/>
|
||||||
|
<xs:attribute name="password" type="xs:string" use="optional"/>
|
||||||
|
</xs:complexType>
|
||||||
|
|
||||||
|
<xs:complexType name="usersType">
|
||||||
|
<xs:sequence>
|
||||||
|
<xs:element name="user" type="gbcs:userType" minOccurs="0" maxOccurs="unbounded"/>
|
||||||
|
<xs:element name="anonymous" type="gbcs:anonymousUserType" minOccurs="0" maxOccurs="1"/>
|
||||||
|
</xs:sequence>
|
||||||
|
</xs:complexType>
|
||||||
|
|
||||||
|
<xs:complexType name="groupsType">
|
||||||
|
<xs:sequence>
|
||||||
|
<xs:element name="group" type="gbcs:groupType" maxOccurs="unbounded" minOccurs="0"/>
|
||||||
|
</xs:sequence>
|
||||||
|
</xs:complexType>
|
||||||
|
|
||||||
|
<xs:complexType name="groupType">
|
||||||
|
<xs:sequence>
|
||||||
|
<xs:element name="users" type="gbcs:userRefsType" maxOccurs="1" minOccurs="0">
|
||||||
|
<xs:unique name="userRefWriterKey">
|
||||||
|
<xs:selector xpath="user"/>
|
||||||
|
<xs:field xpath="@ref"/>
|
||||||
|
</xs:unique>
|
||||||
|
</xs:element>
|
||||||
|
<xs:element name="roles" type="gbcs:rolesType" maxOccurs="1" minOccurs="0"/>
|
||||||
|
<xs:element name="quota" type="gbcs:quotaType" minOccurs="0" maxOccurs="1"/>
|
||||||
|
</xs:sequence>
|
||||||
|
<xs:attribute name="name" type="xs:token"/>
|
||||||
|
</xs:complexType>
|
||||||
|
|
||||||
|
<xs:simpleType name="role" final="restriction" >
|
||||||
|
<xs:restriction base="xs:token">
|
||||||
|
<xs:enumeration value="READER" />
|
||||||
|
<xs:enumeration value="WRITER" />
|
||||||
|
</xs:restriction>
|
||||||
|
</xs:simpleType>
|
||||||
|
|
||||||
|
<xs:complexType name="rolesType">
|
||||||
|
<xs:sequence>
|
||||||
|
<xs:choice maxOccurs="unbounded">
|
||||||
|
<xs:element name="writer"/>
|
||||||
|
<xs:element name="reader"/>
|
||||||
|
</xs:choice>
|
||||||
|
</xs:sequence>
|
||||||
|
</xs:complexType>
|
||||||
|
|
||||||
|
<xs:complexType name="userRefsType">
|
||||||
|
<xs:sequence>
|
||||||
|
<xs:element name="user" type="gbcs:userRefType" maxOccurs="unbounded" minOccurs="0"/>
|
||||||
|
<xs:element name="anonymous" minOccurs="0" maxOccurs="1"/>
|
||||||
|
</xs:sequence>
|
||||||
|
</xs:complexType>
|
||||||
|
|
||||||
|
<xs:complexType name="userRefType">
|
||||||
|
<xs:attribute name="ref" type="xs:string" use="required"/>
|
||||||
|
</xs:complexType>
|
||||||
|
|
||||||
|
<xs:complexType name="tlsType">
|
||||||
|
<xs:all>
|
||||||
|
<xs:element name="keystore" type="gbcs:keyStoreType" />
|
||||||
|
<xs:element name="truststore" type="gbcs:trustStoreType" minOccurs="0"/>
|
||||||
|
</xs:all>
|
||||||
|
</xs:complexType>
|
||||||
|
|
||||||
|
<xs:complexType name="keyStoreType">
|
||||||
|
<xs:attribute name="file" type="xs:string" use="required"/>
|
||||||
|
<xs:attribute name="password" type="xs:string"/>
|
||||||
|
<xs:attribute name="key-alias" type="xs:string" use="required"/>
|
||||||
|
<xs:attribute name="key-password" type="xs:string"/>
|
||||||
|
</xs:complexType>
|
||||||
|
|
||||||
|
<xs:complexType name="trustStoreType">
|
||||||
|
<xs:attribute name="file" type="xs:string" use="required"/>
|
||||||
|
<xs:attribute name="password" type="xs:string"/>
|
||||||
|
<xs:attribute name="check-certificate-status" type="xs:boolean"/>
|
||||||
|
<xs:attribute name="require-client-certificate" type="xs:boolean" use="optional" default="false"/>
|
||||||
|
</xs:complexType>
|
||||||
|
|
||||||
|
<xs:complexType name="propertiesType">
|
||||||
|
<xs:sequence>
|
||||||
|
<xs:element maxOccurs="unbounded" minOccurs="0" name="property" type="gbcs:propertyType"/>
|
||||||
|
</xs:sequence>
|
||||||
|
</xs:complexType>
|
||||||
|
|
||||||
|
<xs:complexType name="propertyType">
|
||||||
|
<xs:simpleContent>
|
||||||
|
<xs:extension base="xs:string">
|
||||||
|
<xs:attribute name="key" type="xs:string" use="required"/>
|
||||||
|
</xs:extension>
|
||||||
|
</xs:simpleContent>
|
||||||
|
</xs:complexType>
|
||||||
|
|
||||||
|
<xs:complexType name="hostAndPortType">
|
||||||
|
<xs:attribute name="host" type="xs:string" use="required"/>
|
||||||
|
<xs:attribute name="port" type="xs:unsignedShort" use="required"/>
|
||||||
|
</xs:complexType>
|
||||||
|
|
||||||
|
|
||||||
|
</xs:schema>
|
@@ -1,4 +1,4 @@
|
|||||||
package net.woggioni.rbcs.server.test.utils;
|
package net.woggioni.gbcs.server.test.utils;
|
||||||
|
|
||||||
import org.bouncycastle.asn1.DERSequence;
|
import org.bouncycastle.asn1.DERSequence;
|
||||||
import org.bouncycastle.asn1.x500.X500Name;
|
import org.bouncycastle.asn1.x500.X500Name;
|
@@ -0,0 +1,30 @@
|
|||||||
|
package net.woggioni.gbcs.server.test.utils;
|
||||||
|
|
||||||
|
import net.woggioni.jwo.JWO;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.net.InetAddress;
|
||||||
|
import java.net.ServerSocket;
|
||||||
|
|
||||||
|
public class NetworkUtils {
|
||||||
|
|
||||||
|
private static final int MAX_ATTEMPTS = 50;
|
||||||
|
|
||||||
|
public static int getFreePort() {
|
||||||
|
int count = 0;
|
||||||
|
while(count < MAX_ATTEMPTS) {
|
||||||
|
try (ServerSocket serverSocket = new ServerSocket(0, 50, InetAddress.getLocalHost())) {
|
||||||
|
final var candidate = serverSocket.getLocalPort();
|
||||||
|
if (candidate > 0) {
|
||||||
|
return candidate;
|
||||||
|
} else {
|
||||||
|
JWO.newThrowable(RuntimeException.class, "Got invalid port number: %d", candidate);
|
||||||
|
throw new RuntimeException("Error trying to find an open port");
|
||||||
|
}
|
||||||
|
} catch (IOException ignored) {
|
||||||
|
++count;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
throw new RuntimeException("Error trying to find an open port");
|
||||||
|
}
|
||||||
|
}
|
@@ -1,11 +1,11 @@
|
|||||||
package net.woggioni.rbcs.server.test
|
package net.woggioni.gbcs.server.test
|
||||||
|
|
||||||
import net.woggioni.rbcs.api.Configuration
|
import net.woggioni.gbcs.api.Configuration
|
||||||
import net.woggioni.rbcs.api.Role
|
import net.woggioni.gbcs.api.Role
|
||||||
import net.woggioni.rbcs.common.RBCS.getFreePort
|
import net.woggioni.gbcs.common.Xml
|
||||||
import net.woggioni.rbcs.common.Xml
|
import net.woggioni.gbcs.server.cache.FileSystemCacheConfiguration
|
||||||
import net.woggioni.rbcs.server.cache.FileSystemCacheConfiguration
|
import net.woggioni.gbcs.server.configuration.Serializer
|
||||||
import net.woggioni.rbcs.server.configuration.Serializer
|
import net.woggioni.gbcs.server.test.utils.NetworkUtils
|
||||||
import java.net.URI
|
import java.net.URI
|
||||||
import java.net.http.HttpRequest
|
import java.net.http.HttpRequest
|
||||||
import java.nio.charset.StandardCharsets
|
import java.nio.charset.StandardCharsets
|
||||||
@@ -23,9 +23,9 @@ abstract class AbstractBasicAuthServerTest : AbstractServerTest() {
|
|||||||
|
|
||||||
protected val random = Random(101325)
|
protected val random = Random(101325)
|
||||||
protected val keyValuePair = newEntry(random)
|
protected val keyValuePair = newEntry(random)
|
||||||
protected val serverPath = "rbcs"
|
protected val serverPath = "gbcs"
|
||||||
protected val readersGroup = Configuration.Group("readers", setOf(Role.Reader), null, null)
|
protected val readersGroup = Configuration.Group("readers", setOf(Role.Reader), null)
|
||||||
protected val writersGroup = Configuration.Group("writers", setOf(Role.Writer), null, null)
|
protected val writersGroup = Configuration.Group("writers", setOf(Role.Writer), null)
|
||||||
|
|
||||||
abstract protected val users : List<Configuration.User>
|
abstract protected val users : List<Configuration.User>
|
||||||
|
|
||||||
@@ -33,11 +33,13 @@ abstract class AbstractBasicAuthServerTest : AbstractServerTest() {
|
|||||||
this.cacheDir = testDir.resolve("cache")
|
this.cacheDir = testDir.resolve("cache")
|
||||||
cfg = Configuration.of(
|
cfg = Configuration.of(
|
||||||
"127.0.0.1",
|
"127.0.0.1",
|
||||||
getFreePort(),
|
NetworkUtils.getFreePort(),
|
||||||
50,
|
50,
|
||||||
serverPath,
|
serverPath,
|
||||||
Configuration.EventExecutor(false),
|
Configuration.EventExecutor(false),
|
||||||
Configuration.Connection(
|
Configuration.Connection(
|
||||||
|
Duration.of(10, ChronoUnit.SECONDS),
|
||||||
|
Duration.of(10, ChronoUnit.SECONDS),
|
||||||
Duration.of(60, ChronoUnit.SECONDS),
|
Duration.of(60, ChronoUnit.SECONDS),
|
||||||
Duration.of(30, ChronoUnit.SECONDS),
|
Duration.of(30, ChronoUnit.SECONDS),
|
||||||
Duration.of(30, ChronoUnit.SECONDS),
|
Duration.of(30, ChronoUnit.SECONDS),
|
||||||
@@ -45,13 +47,11 @@ abstract class AbstractBasicAuthServerTest : AbstractServerTest() {
|
|||||||
),
|
),
|
||||||
users.asSequence().map { it.name to it}.toMap(),
|
users.asSequence().map { it.name to it}.toMap(),
|
||||||
sequenceOf(writersGroup, readersGroup).map { it.name to it}.toMap(),
|
sequenceOf(writersGroup, readersGroup).map { it.name to it}.toMap(),
|
||||||
FileSystemCacheConfiguration(
|
FileSystemCacheConfiguration(this.cacheDir,
|
||||||
this.cacheDir,
|
|
||||||
maxAge = Duration.ofSeconds(3600 * 24),
|
maxAge = Duration.ofSeconds(3600 * 24),
|
||||||
digestAlgorithm = "MD5",
|
digestAlgorithm = "MD5",
|
||||||
compressionLevel = Deflater.DEFAULT_COMPRESSION,
|
compressionLevel = Deflater.DEFAULT_COMPRESSION,
|
||||||
compressionEnabled = false,
|
compressionEnabled = false
|
||||||
chunkSize = 0x1000
|
|
||||||
),
|
),
|
||||||
Configuration.BasicAuthentication(),
|
Configuration.BasicAuthentication(),
|
||||||
null,
|
null,
|
@@ -1,7 +1,7 @@
|
|||||||
package net.woggioni.rbcs.server.test
|
package net.woggioni.gbcs.server.test
|
||||||
|
|
||||||
import net.woggioni.rbcs.api.Configuration
|
import net.woggioni.gbcs.server.GradleBuildCacheServer
|
||||||
import net.woggioni.rbcs.server.RemoteBuildCacheServer
|
import net.woggioni.gbcs.api.Configuration
|
||||||
import org.junit.jupiter.api.AfterAll
|
import org.junit.jupiter.api.AfterAll
|
||||||
import org.junit.jupiter.api.BeforeAll
|
import org.junit.jupiter.api.BeforeAll
|
||||||
import org.junit.jupiter.api.MethodOrderer
|
import org.junit.jupiter.api.MethodOrderer
|
||||||
@@ -19,7 +19,7 @@ abstract class AbstractServerTest {
|
|||||||
|
|
||||||
protected lateinit var testDir : Path
|
protected lateinit var testDir : Path
|
||||||
|
|
||||||
private var serverHandle : RemoteBuildCacheServer.ServerHandle? = null
|
private var serverHandle : GradleBuildCacheServer.ServerHandle? = null
|
||||||
|
|
||||||
@BeforeAll
|
@BeforeAll
|
||||||
fun setUp0(@TempDir tmpDir : Path) {
|
fun setUp0(@TempDir tmpDir : Path) {
|
||||||
@@ -39,13 +39,12 @@ abstract class AbstractServerTest {
|
|||||||
abstract fun tearDown()
|
abstract fun tearDown()
|
||||||
|
|
||||||
private fun startServer(cfg : Configuration) {
|
private fun startServer(cfg : Configuration) {
|
||||||
this.serverHandle = RemoteBuildCacheServer(cfg).run()
|
this.serverHandle = GradleBuildCacheServer(cfg).run()
|
||||||
}
|
}
|
||||||
|
|
||||||
private fun stopServer() {
|
private fun stopServer() {
|
||||||
this.serverHandle?.let {
|
this.serverHandle?.use {
|
||||||
it.sendShutdownSignal()
|
it.shutdown()
|
||||||
it.get()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
@@ -1,13 +1,14 @@
|
|||||||
package net.woggioni.rbcs.server.test
|
package net.woggioni.gbcs.server.test
|
||||||
|
|
||||||
import net.woggioni.rbcs.api.Configuration
|
import net.woggioni.gbcs.api.Configuration
|
||||||
import net.woggioni.rbcs.api.Role
|
import net.woggioni.gbcs.api.Role
|
||||||
import net.woggioni.rbcs.common.RBCS.getFreePort
|
import net.woggioni.gbcs.common.Xml
|
||||||
import net.woggioni.rbcs.common.Xml
|
import net.woggioni.gbcs.server.cache.FileSystemCacheConfiguration
|
||||||
import net.woggioni.rbcs.server.cache.FileSystemCacheConfiguration
|
import net.woggioni.gbcs.server.cache.InMemoryCacheConfiguration
|
||||||
import net.woggioni.rbcs.server.configuration.Serializer
|
import net.woggioni.gbcs.server.configuration.Serializer
|
||||||
import net.woggioni.rbcs.server.test.utils.CertificateUtils
|
import net.woggioni.gbcs.server.test.utils.CertificateUtils
|
||||||
import net.woggioni.rbcs.server.test.utils.CertificateUtils.X509Credentials
|
import net.woggioni.gbcs.server.test.utils.CertificateUtils.X509Credentials
|
||||||
|
import net.woggioni.gbcs.server.test.utils.NetworkUtils
|
||||||
import org.bouncycastle.asn1.x500.X500Name
|
import org.bouncycastle.asn1.x500.X500Name
|
||||||
import java.net.URI
|
import java.net.URI
|
||||||
import java.net.http.HttpClient
|
import java.net.http.HttpClient
|
||||||
@@ -30,9 +31,9 @@ import kotlin.random.Random
|
|||||||
abstract class AbstractTlsServerTest : AbstractServerTest() {
|
abstract class AbstractTlsServerTest : AbstractServerTest() {
|
||||||
|
|
||||||
companion object {
|
companion object {
|
||||||
private const val CA_CERTIFICATE_ENTRY = "rbcs-ca"
|
private const val CA_CERTIFICATE_ENTRY = "gbcs-ca"
|
||||||
private const val CLIENT_CERTIFICATE_ENTRY = "rbcs-client"
|
private const val CLIENT_CERTIFICATE_ENTRY = "gbcs-client"
|
||||||
private const val SERVER_CERTIFICATE_ENTRY = "rbcs-server"
|
private const val SERVER_CERTIFICATE_ENTRY = "gbcs-server"
|
||||||
private const val PASSWORD = "password"
|
private const val PASSWORD = "password"
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -45,8 +46,8 @@ abstract class AbstractTlsServerTest : AbstractServerTest() {
|
|||||||
private lateinit var trustStore: KeyStore
|
private lateinit var trustStore: KeyStore
|
||||||
protected lateinit var ca: X509Credentials
|
protected lateinit var ca: X509Credentials
|
||||||
|
|
||||||
protected val readersGroup = Configuration.Group("readers", setOf(Role.Reader), null, null)
|
protected val readersGroup = Configuration.Group("readers", setOf(Role.Reader), null)
|
||||||
protected val writersGroup = Configuration.Group("writers", setOf(Role.Writer), null, null)
|
protected val writersGroup = Configuration.Group("writers", setOf(Role.Writer), null)
|
||||||
protected val random = Random(101325)
|
protected val random = Random(101325)
|
||||||
protected val keyValuePair = newEntry(random)
|
protected val keyValuePair = newEntry(random)
|
||||||
private val serverPath : String? = null
|
private val serverPath : String? = null
|
||||||
@@ -138,11 +139,13 @@ abstract class AbstractTlsServerTest : AbstractServerTest() {
|
|||||||
createKeyStoreAndTrustStore()
|
createKeyStoreAndTrustStore()
|
||||||
cfg = Configuration(
|
cfg = Configuration(
|
||||||
"127.0.0.1",
|
"127.0.0.1",
|
||||||
getFreePort(),
|
NetworkUtils.getFreePort(),
|
||||||
100,
|
100,
|
||||||
serverPath,
|
serverPath,
|
||||||
Configuration.EventExecutor(false),
|
Configuration.EventExecutor(false),
|
||||||
Configuration.Connection(
|
Configuration.Connection(
|
||||||
|
Duration.of(10, ChronoUnit.SECONDS),
|
||||||
|
Duration.of(10, ChronoUnit.SECONDS),
|
||||||
Duration.of(60, ChronoUnit.SECONDS),
|
Duration.of(60, ChronoUnit.SECONDS),
|
||||||
Duration.of(30, ChronoUnit.SECONDS),
|
Duration.of(30, ChronoUnit.SECONDS),
|
||||||
Duration.of(30, ChronoUnit.SECONDS),
|
Duration.of(30, ChronoUnit.SECONDS),
|
||||||
@@ -152,10 +155,9 @@ abstract class AbstractTlsServerTest : AbstractServerTest() {
|
|||||||
sequenceOf(writersGroup, readersGroup).map { it.name to it }.toMap(),
|
sequenceOf(writersGroup, readersGroup).map { it.name to it }.toMap(),
|
||||||
FileSystemCacheConfiguration(this.cacheDir,
|
FileSystemCacheConfiguration(this.cacheDir,
|
||||||
maxAge = Duration.ofSeconds(3600 * 24),
|
maxAge = Duration.ofSeconds(3600 * 24),
|
||||||
compressionEnabled = false,
|
compressionEnabled = true,
|
||||||
compressionLevel = Deflater.DEFAULT_COMPRESSION,
|
compressionLevel = Deflater.DEFAULT_COMPRESSION,
|
||||||
digestAlgorithm = "MD5",
|
digestAlgorithm = "MD5"
|
||||||
chunkSize = 0x1000
|
|
||||||
),
|
),
|
||||||
// InMemoryCacheConfiguration(
|
// InMemoryCacheConfiguration(
|
||||||
// maxAge = Duration.ofSeconds(3600 * 24),
|
// maxAge = Duration.ofSeconds(3600 * 24),
|
@@ -1,9 +1,9 @@
|
|||||||
package net.woggioni.rbcs.server.test
|
package net.woggioni.gbcs.server.test
|
||||||
|
|
||||||
import io.netty.handler.codec.http.HttpResponseStatus
|
import io.netty.handler.codec.http.HttpResponseStatus
|
||||||
import net.woggioni.rbcs.api.Configuration
|
import net.woggioni.gbcs.api.Configuration
|
||||||
import net.woggioni.rbcs.api.Role
|
import net.woggioni.gbcs.api.Role
|
||||||
import net.woggioni.rbcs.common.PasswordSecurity.hashPassword
|
import net.woggioni.gbcs.common.PasswordSecurity.hashPassword
|
||||||
import org.junit.jupiter.api.Assertions
|
import org.junit.jupiter.api.Assertions
|
||||||
import org.junit.jupiter.api.Order
|
import org.junit.jupiter.api.Order
|
||||||
import org.junit.jupiter.api.Test
|
import org.junit.jupiter.api.Test
|
||||||
@@ -86,7 +86,7 @@ class BasicAuthServerTest : AbstractBasicAuthServerTest() {
|
|||||||
@Test
|
@Test
|
||||||
@Order(4)
|
@Order(4)
|
||||||
fun putAsAWriterUser() {
|
fun putAsAWriterUser() {
|
||||||
val client: HttpClient = HttpClient.newBuilder().version(HttpClient.Version.HTTP_1_1).build()
|
val client: HttpClient = HttpClient.newHttpClient()
|
||||||
|
|
||||||
val (key, value) = keyValuePair
|
val (key, value) = keyValuePair
|
||||||
val user = cfg.users.values.find {
|
val user = cfg.users.values.find {
|
@@ -1,10 +1,10 @@
|
|||||||
package net.woggioni.rbcs.server.test
|
package net.woggioni.gbcs.server.test
|
||||||
|
|
||||||
import net.woggioni.rbcs.common.RBCS.toUrl
|
import net.woggioni.gbcs.common.GBCS.toUrl
|
||||||
import net.woggioni.rbcs.common.RbcsUrlStreamHandlerFactory
|
import net.woggioni.gbcs.common.GbcsUrlStreamHandlerFactory
|
||||||
import net.woggioni.rbcs.common.Xml
|
import net.woggioni.gbcs.common.Xml
|
||||||
import net.woggioni.rbcs.server.configuration.Parser
|
import net.woggioni.gbcs.server.configuration.Parser
|
||||||
import net.woggioni.rbcs.server.configuration.Serializer
|
import net.woggioni.gbcs.server.configuration.Serializer
|
||||||
import org.junit.jupiter.api.Assertions
|
import org.junit.jupiter.api.Assertions
|
||||||
import org.junit.jupiter.api.io.TempDir
|
import org.junit.jupiter.api.io.TempDir
|
||||||
import org.junit.jupiter.params.ParameterizedTest
|
import org.junit.jupiter.params.ParameterizedTest
|
||||||
@@ -17,18 +17,18 @@ class ConfigurationTest {
|
|||||||
|
|
||||||
@ValueSource(
|
@ValueSource(
|
||||||
strings = [
|
strings = [
|
||||||
"classpath:net/woggioni/rbcs/server/test/valid/rbcs-default.xml",
|
"classpath:net/woggioni/gbcs/server/test/valid/gbcs-default.xml",
|
||||||
"classpath:net/woggioni/rbcs/server/test/valid/rbcs-memcached.xml",
|
"classpath:net/woggioni/gbcs/server/test/valid/gbcs-memcached.xml",
|
||||||
"classpath:net/woggioni/rbcs/server/test/valid/rbcs-tls.xml",
|
"classpath:net/woggioni/gbcs/server/test/valid/gbcs-tls.xml",
|
||||||
"classpath:net/woggioni/rbcs/server/test/valid/rbcs-memcached-tls.xml",
|
"classpath:net/woggioni/gbcs/server/test/valid/gbcs-memcached-tls.xml",
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
@ParameterizedTest
|
@ParameterizedTest
|
||||||
fun test(configurationUrl: String, @TempDir testDir: Path) {
|
fun test(configurationUrl: String, @TempDir testDir: Path) {
|
||||||
RbcsUrlStreamHandlerFactory.install()
|
GbcsUrlStreamHandlerFactory.install()
|
||||||
val doc = Xml.parseXml(configurationUrl.toUrl())
|
val doc = Xml.parseXml(configurationUrl.toUrl())
|
||||||
val cfg = Parser.parse(doc)
|
val cfg = Parser.parse(doc)
|
||||||
val configFile = testDir.resolve("rbcs.xml")
|
val configFile = testDir.resolve("gbcs.xml")
|
||||||
Files.newOutputStream(configFile).use {
|
Files.newOutputStream(configFile).use {
|
||||||
Xml.write(Serializer.serialize(cfg), it)
|
Xml.write(Serializer.serialize(cfg), it)
|
||||||
}
|
}
|
||||||
@@ -40,15 +40,15 @@ class ConfigurationTest {
|
|||||||
|
|
||||||
@ValueSource(
|
@ValueSource(
|
||||||
strings = [
|
strings = [
|
||||||
"classpath:net/woggioni/rbcs/server/test/invalid/invalid-user-ref.xml",
|
"classpath:net/woggioni/gbcs/server/test/invalid/invalid-user-ref.xml",
|
||||||
"classpath:net/woggioni/rbcs/server/test/invalid/duplicate-anonymous-user.xml",
|
"classpath:net/woggioni/gbcs/server/test/invalid/duplicate-anonymous-user.xml",
|
||||||
"classpath:net/woggioni/rbcs/server/test/invalid/duplicate-anonymous-user2.xml",
|
"classpath:net/woggioni/gbcs/server/test/invalid/duplicate-anonymous-user2.xml",
|
||||||
"classpath:net/woggioni/rbcs/server/test/invalid/multiple-user-quota.xml",
|
"classpath:net/woggioni/gbcs/server/test/invalid/multiple-user-quota.xml",
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
@ParameterizedTest
|
@ParameterizedTest
|
||||||
fun invalidConfigurationTest(configurationUrl: String) {
|
fun invalidConfigurationTest(configurationUrl: String) {
|
||||||
RbcsUrlStreamHandlerFactory.install()
|
GbcsUrlStreamHandlerFactory.install()
|
||||||
Assertions.assertThrows(SAXParseException::class.java) {
|
Assertions.assertThrows(SAXParseException::class.java) {
|
||||||
Xml.parseXml(configurationUrl.toUrl())
|
Xml.parseXml(configurationUrl.toUrl())
|
||||||
}
|
}
|
@@ -1,8 +1,8 @@
|
|||||||
package net.woggioni.rbcs.server.test
|
package net.woggioni.gbcs.server.test
|
||||||
|
|
||||||
import io.netty.handler.codec.http.HttpResponseStatus
|
import io.netty.handler.codec.http.HttpResponseStatus
|
||||||
import net.woggioni.rbcs.api.Configuration
|
import net.woggioni.gbcs.api.Configuration
|
||||||
import net.woggioni.rbcs.common.PasswordSecurity.hashPassword
|
import net.woggioni.gbcs.common.PasswordSecurity.hashPassword
|
||||||
import org.junit.jupiter.api.Assertions
|
import org.junit.jupiter.api.Assertions
|
||||||
import org.junit.jupiter.api.Order
|
import org.junit.jupiter.api.Order
|
||||||
import org.junit.jupiter.api.Test
|
import org.junit.jupiter.api.Test
|
@@ -1,7 +1,7 @@
|
|||||||
package net.woggioni.rbcs.server.test
|
package net.woggioni.gbcs.server.test
|
||||||
|
|
||||||
import io.netty.handler.codec.http.HttpResponseStatus
|
import io.netty.handler.codec.http.HttpResponseStatus
|
||||||
import net.woggioni.rbcs.api.Configuration
|
import net.woggioni.gbcs.api.Configuration
|
||||||
import org.junit.jupiter.api.Assertions
|
import org.junit.jupiter.api.Assertions
|
||||||
import org.junit.jupiter.api.Order
|
import org.junit.jupiter.api.Order
|
||||||
import org.junit.jupiter.api.Test
|
import org.junit.jupiter.api.Test
|
@@ -1,11 +1,12 @@
|
|||||||
package net.woggioni.rbcs.server.test
|
package net.woggioni.gbcs.server.test
|
||||||
|
|
||||||
import io.netty.handler.codec.http.HttpResponseStatus
|
import io.netty.handler.codec.http.HttpResponseStatus
|
||||||
import net.woggioni.rbcs.api.Configuration
|
import net.woggioni.gbcs.api.Configuration
|
||||||
import net.woggioni.rbcs.common.RBCS.getFreePort
|
import net.woggioni.gbcs.common.Xml
|
||||||
import net.woggioni.rbcs.common.Xml
|
import net.woggioni.gbcs.server.cache.FileSystemCacheConfiguration
|
||||||
import net.woggioni.rbcs.server.cache.InMemoryCacheConfiguration
|
import net.woggioni.gbcs.server.cache.InMemoryCacheConfiguration
|
||||||
import net.woggioni.rbcs.server.configuration.Serializer
|
import net.woggioni.gbcs.server.configuration.Serializer
|
||||||
|
import net.woggioni.gbcs.server.test.utils.NetworkUtils
|
||||||
import org.junit.jupiter.api.Assertions
|
import org.junit.jupiter.api.Assertions
|
||||||
import org.junit.jupiter.api.Order
|
import org.junit.jupiter.api.Order
|
||||||
import org.junit.jupiter.api.Test
|
import org.junit.jupiter.api.Test
|
||||||
@@ -33,11 +34,13 @@ class NoAuthServerTest : AbstractServerTest() {
|
|||||||
this.cacheDir = testDir.resolve("cache")
|
this.cacheDir = testDir.resolve("cache")
|
||||||
cfg = Configuration(
|
cfg = Configuration(
|
||||||
"127.0.0.1",
|
"127.0.0.1",
|
||||||
getFreePort(),
|
NetworkUtils.getFreePort(),
|
||||||
100,
|
100,
|
||||||
serverPath,
|
serverPath,
|
||||||
Configuration.EventExecutor(false),
|
Configuration.EventExecutor(false),
|
||||||
Configuration.Connection(
|
Configuration.Connection(
|
||||||
|
Duration.of(10, ChronoUnit.SECONDS),
|
||||||
|
Duration.of(10, ChronoUnit.SECONDS),
|
||||||
Duration.of(60, ChronoUnit.SECONDS),
|
Duration.of(60, ChronoUnit.SECONDS),
|
||||||
Duration.of(30, ChronoUnit.SECONDS),
|
Duration.of(30, ChronoUnit.SECONDS),
|
||||||
Duration.of(30, ChronoUnit.SECONDS),
|
Duration.of(30, ChronoUnit.SECONDS),
|
||||||
@@ -49,9 +52,7 @@ class NoAuthServerTest : AbstractServerTest() {
|
|||||||
maxAge = Duration.ofSeconds(3600 * 24),
|
maxAge = Duration.ofSeconds(3600 * 24),
|
||||||
compressionEnabled = true,
|
compressionEnabled = true,
|
||||||
digestAlgorithm = "MD5",
|
digestAlgorithm = "MD5",
|
||||||
compressionLevel = Deflater.DEFAULT_COMPRESSION,
|
compressionLevel = Deflater.DEFAULT_COMPRESSION
|
||||||
maxSize = 0x1000000,
|
|
||||||
chunkSize = 0x1000
|
|
||||||
),
|
),
|
||||||
null,
|
null,
|
||||||
null,
|
null,
|
||||||
@@ -79,7 +80,7 @@ class NoAuthServerTest : AbstractServerTest() {
|
|||||||
@Test
|
@Test
|
||||||
@Order(1)
|
@Order(1)
|
||||||
fun putWithNoAuthorizationHeader() {
|
fun putWithNoAuthorizationHeader() {
|
||||||
val client: HttpClient = HttpClient.newBuilder().version(HttpClient.Version.HTTP_1_1).build()
|
val client: HttpClient = HttpClient.newHttpClient()
|
||||||
val (key, value) = keyValuePair
|
val (key, value) = keyValuePair
|
||||||
|
|
||||||
val requestBuilder = newRequestBuilder(key)
|
val requestBuilder = newRequestBuilder(key)
|
||||||
@@ -118,56 +119,6 @@ class NoAuthServerTest : AbstractServerTest() {
|
|||||||
|
|
||||||
@Test
|
@Test
|
||||||
@Order(4)
|
@Order(4)
|
||||||
fun getUnhandledPath() {
|
|
||||||
val client: HttpClient = HttpClient.newHttpClient()
|
|
||||||
val (key, _) = newEntry(random)
|
|
||||||
val requestBuilder = HttpRequest.newBuilder()
|
|
||||||
.uri(URI.create("http://${cfg.host}:${cfg.port}/some/other/path/$key"))
|
|
||||||
val response: HttpResponse<ByteArray> =
|
|
||||||
client.send(requestBuilder.build(), HttpResponse.BodyHandlers.ofByteArray())
|
|
||||||
Assertions.assertEquals(HttpResponseStatus.BAD_REQUEST.code(), response.statusCode())
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
@Order(5)
|
|
||||||
fun putUnhandledPath() {
|
|
||||||
val client: HttpClient = HttpClient.newHttpClient()
|
|
||||||
val (key, value) = newEntry(random)
|
|
||||||
val requestBuilder = HttpRequest.newBuilder()
|
|
||||||
.uri(URI.create("http://${cfg.host}:${cfg.port}/some/other/path/$key"))
|
|
||||||
.PUT(HttpRequest.BodyPublishers.ofByteArray(value))
|
|
||||||
val response: HttpResponse<ByteArray> =
|
|
||||||
client.send(requestBuilder.build(), HttpResponse.BodyHandlers.ofByteArray())
|
|
||||||
Assertions.assertEquals(HttpResponseStatus.BAD_REQUEST.code(), response.statusCode())
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
@Order(6)
|
|
||||||
fun getRelativeUnhandledPath() {
|
|
||||||
val client: HttpClient = HttpClient.newHttpClient()
|
|
||||||
val (key, _) = newEntry(random)
|
|
||||||
val requestBuilder = HttpRequest.newBuilder()
|
|
||||||
.uri(URI.create("http://${cfg.host}:${cfg.port}/some/nested/path/../../../some/other/path/$key"))
|
|
||||||
val response: HttpResponse<ByteArray> =
|
|
||||||
client.send(requestBuilder.build(), HttpResponse.BodyHandlers.ofByteArray())
|
|
||||||
Assertions.assertEquals(HttpResponseStatus.BAD_REQUEST.code(), response.statusCode())
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
@Order(7)
|
|
||||||
fun getRelativePath() {
|
|
||||||
val client: HttpClient = HttpClient.newHttpClient()
|
|
||||||
val (key, value) = keyValuePair
|
|
||||||
val requestBuilder = HttpRequest.newBuilder()
|
|
||||||
.uri(URI.create("http://${cfg.host}:${cfg.port}/some/other/path/../../nested/path/$key"))
|
|
||||||
val response: HttpResponse<ByteArray> =
|
|
||||||
client.send(requestBuilder.build(), HttpResponse.BodyHandlers.ofByteArray())
|
|
||||||
Assertions.assertEquals(HttpResponseStatus.OK.code(), response.statusCode())
|
|
||||||
Assertions.assertArrayEquals(value, response.body())
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
@Order(10)
|
|
||||||
fun traceTest() {
|
fun traceTest() {
|
||||||
val client: HttpClient = HttpClient.newBuilder().version(HttpClient.Version.HTTP_1_1).build()
|
val client: HttpClient = HttpClient.newBuilder().version(HttpClient.Version.HTTP_1_1).build()
|
||||||
val requestBuilder = newRequestBuilder("").method(
|
val requestBuilder = newRequestBuilder("").method(
|
@@ -1,12 +1,13 @@
|
|||||||
package net.woggioni.rbcs.server.test
|
package net.woggioni.gbcs.server.test
|
||||||
|
|
||||||
import io.netty.handler.codec.http.HttpResponseStatus
|
import io.netty.handler.codec.http.HttpResponseStatus
|
||||||
import net.woggioni.rbcs.api.Configuration
|
import net.woggioni.gbcs.api.Configuration
|
||||||
import net.woggioni.rbcs.api.Role
|
import net.woggioni.gbcs.api.Role
|
||||||
import org.bouncycastle.asn1.x500.X500Name
|
import org.bouncycastle.asn1.x500.X500Name
|
||||||
import org.junit.jupiter.api.Assertions
|
import org.junit.jupiter.api.Assertions
|
||||||
import org.junit.jupiter.api.Order
|
import org.junit.jupiter.api.Order
|
||||||
import org.junit.jupiter.api.Test
|
import org.junit.jupiter.api.Test
|
||||||
|
import org.junit.jupiter.params.provider.ArgumentsSource
|
||||||
import java.net.http.HttpClient
|
import java.net.http.HttpClient
|
||||||
import java.net.http.HttpRequest
|
import java.net.http.HttpRequest
|
||||||
import java.net.http.HttpResponse
|
import java.net.http.HttpResponse
|
@@ -1,4 +1,4 @@
|
|||||||
package net.woggioni.rbcs.server.test
|
package net.woggioni.gbcs.server.test
|
||||||
|
|
||||||
import org.junit.jupiter.api.Assertions
|
import org.junit.jupiter.api.Assertions
|
||||||
import org.junit.jupiter.api.Test
|
import org.junit.jupiter.api.Test
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user