Compare commits
1 Commits
23f2a351a6
...
0.0.2
Author | SHA1 | Date | |
---|---|---|---|
9de393c6ae
|
@@ -5,10 +5,17 @@ on:
|
|||||||
- '*'
|
- '*'
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
|
env:
|
||||||
|
RUNNER_TOOL_CACHE: /toolcache
|
||||||
runs-on: hostinger
|
runs-on: hostinger
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout sources
|
- name: Checkout sources
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v4
|
||||||
|
- name: Setup Java
|
||||||
|
uses: actions/setup-java@v4
|
||||||
|
with:
|
||||||
|
distribution: graalvm
|
||||||
|
java-version: 21
|
||||||
- name: Setup Gradle
|
- name: Setup Gradle
|
||||||
uses: gradle/actions/setup-gradle@v3
|
uses: gradle/actions/setup-gradle@v3
|
||||||
- name: Execute Gradle build
|
- name: Execute Gradle build
|
||||||
@@ -31,7 +38,7 @@ jobs:
|
|||||||
username: woggioni
|
username: woggioni
|
||||||
password: ${{ secrets.PUBLISHER_TOKEN }}
|
password: ${{ secrets.PUBLISHER_TOKEN }}
|
||||||
-
|
-
|
||||||
name: Build rbcs Docker image
|
name: Build gbcs Docker image
|
||||||
uses: docker/build-push-action@v5.3.0
|
uses: docker/build-push-action@v5.3.0
|
||||||
with:
|
with:
|
||||||
context: "docker/build/docker"
|
context: "docker/build/docker"
|
||||||
@@ -39,12 +46,12 @@ jobs:
|
|||||||
push: true
|
push: true
|
||||||
pull: true
|
pull: true
|
||||||
tags: |
|
tags: |
|
||||||
gitea.woggioni.net/woggioni/rbcs:latest
|
gitea.woggioni.net/woggioni/gbcs:latest
|
||||||
gitea.woggioni.net/woggioni/rbcs:${{ steps.retrieve-version.outputs.VERSION }}
|
gitea.woggioni.net/woggioni/gbcs:${{ steps.retrieve-version.outputs.VERSION }}
|
||||||
target: release
|
target: release
|
||||||
cache-from: type=registry,ref=gitea.woggioni.net/woggioni/rbcs:buildx
|
cache-from: type=registry,ref=gitea.woggioni.net/woggioni/gbcs:buildx
|
||||||
-
|
-
|
||||||
name: Build rbcs memcache Docker image
|
name: Build gbcs memcached Docker image
|
||||||
uses: docker/build-push-action@v5.3.0
|
uses: docker/build-push-action@v5.3.0
|
||||||
with:
|
with:
|
||||||
context: "docker/build/docker"
|
context: "docker/build/docker"
|
||||||
@@ -52,11 +59,11 @@ jobs:
|
|||||||
push: true
|
push: true
|
||||||
pull: true
|
pull: true
|
||||||
tags: |
|
tags: |
|
||||||
gitea.woggioni.net/woggioni/rbcs:memcache
|
gitea.woggioni.net/woggioni/gbcs:memcached
|
||||||
gitea.woggioni.net/woggioni/rbcs:memcache-${{ steps.retrieve-version.outputs.VERSION }}
|
gitea.woggioni.net/woggioni/gbcs:memcached-${{ steps.retrieve-version.outputs.VERSION }}
|
||||||
target: release-memcache
|
target: release-memcached
|
||||||
cache-from: type=registry,ref=gitea.woggioni.net/woggioni/rbcs:buildx
|
cache-from: type=registry,ref=gitea.woggioni.net/woggioni/gbcs:buildx
|
||||||
cache-to: type=registry,mode=max,compression=zstd,image-manifest=true,oci-mediatypes=true,ref=gitea.woggioni.net/woggioni/rbcs:buildx
|
cache-to: type=registry,mode=max,compression=zstd,image-manifest=true,oci-mediatypes=true,ref=gitea.woggioni.net/woggioni/gbcs:buildx
|
||||||
- name: Publish artifacts
|
- name: Publish artifacts
|
||||||
env:
|
env:
|
||||||
PUBLISHER_TOKEN: ${{ secrets.PUBLISHER_TOKEN }}
|
PUBLISHER_TOKEN: ${{ secrets.PUBLISHER_TOKEN }}
|
||||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@@ -4,4 +4,4 @@
|
|||||||
# Ignore Gradle build output directory
|
# Ignore Gradle build output directory
|
||||||
build
|
build
|
||||||
|
|
||||||
rbcs-cli/native-image/*.json
|
gbcs-cli/native-image/*.json
|
||||||
|
2
Dockerfile
Normal file
2
Dockerfile
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
FROM gitea.woggioni.net/woggioni/gbcs:memcached
|
||||||
|
COPY --chown=luser:luser conf/gbcs-memcached.xml /home/luser/.config/gbcs/gbcs.xml
|
20
LICENSE
20
LICENSE
@@ -1,20 +0,0 @@
|
|||||||
The MIT License (MIT)
|
|
||||||
|
|
||||||
Copyright (c) 2017 Y. T. CHUNG <zonyitoo@gmail.com>
|
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
|
||||||
this software and associated documentation files (the "Software"), to deal in
|
|
||||||
the Software without restriction, including without limitation the rights to
|
|
||||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
|
||||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
|
||||||
subject to the following conditions:
|
|
||||||
|
|
||||||
The above copyright notice and this permission notice shall be included in all
|
|
||||||
copies or substantial portions of the Software.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
|
||||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
|
||||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
|
||||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
|
||||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
|
209
README.md
209
README.md
@@ -1,209 +0,0 @@
|
|||||||
# Remote Build Cache Server
|
|
||||||
Remote Build Cache Server (shortened to RBCS) allows you to share and reuse unchanged build
|
|
||||||
and test outputs across the team. This speeds up local and CI builds since cycles are not wasted
|
|
||||||
re-building components that are unaffected by new code changes. RBCS supports both Gradle and
|
|
||||||
Maven build tool environments.
|
|
||||||
|
|
||||||
It comes with pluggable storage backends, the core application offers in-memory storage or disk-backed storage,
|
|
||||||
in addition to this there is an official plugin to use memcached as the storage backend.
|
|
||||||
|
|
||||||
It supports HTTP basic authentication or, alternatively, TLS certificate authentication, role-based access control (RBAC),
|
|
||||||
and throttling.
|
|
||||||
|
|
||||||
## Quickstart
|
|
||||||
|
|
||||||
### Downloading the jar file
|
|
||||||
You can download the latest version from [this link](https://gitea.woggioni.net/woggioni/-/packages/maven/net.woggioni:rbcs-cli/)
|
|
||||||
|
|
||||||
Assuming you have Java 21 or later installed, you can launch the server directly with
|
|
||||||
|
|
||||||
```bash
|
|
||||||
java -jar rbcs-cli.jar server
|
|
||||||
```
|
|
||||||
|
|
||||||
By default it will start an HTTP server bound to localhost and listening on port 8080 with no authentication,
|
|
||||||
writing data to the disk, that you can use for testing
|
|
||||||
|
|
||||||
### Using the Docker image
|
|
||||||
You can pull the latest Docker image with
|
|
||||||
```bash
|
|
||||||
docker pull gitea.woggioni.net/woggioni/rbcs:latest
|
|
||||||
```
|
|
||||||
|
|
||||||
By default it will start an HTTP server bound to localhost and listening on port 8080 with no authentication,
|
|
||||||
writing data to the disk, that you can use for testing
|
|
||||||
|
|
||||||
## Usage
|
|
||||||
|
|
||||||
### Configuration
|
|
||||||
The location of the `rbcs.xml` configuration file depends on the operating system,
|
|
||||||
Alternatively it can be changed setting the `RBCS_CONFIGURATION_DIR` environmental variable or `net.woggioni.rbcs.conf.dir` Java system property
|
|
||||||
to the directory that contain the `rbcs.xml` file.
|
|
||||||
|
|
||||||
The server configuration file follows the XML format and uses XML schema for validation
|
|
||||||
(you can find the schema for the main configuration file [here](https://gitea.woggioni.net/woggioni/rbcs/src/branch/master/rbcs-server/src/main/resources/net/woggioni/rbcs/server/schema/rbcs.xsd)).
|
|
||||||
|
|
||||||
The configuration values are enclosed inside XML attribute and support system property / environmental variable interpolation.
|
|
||||||
As an example, you can configure RBCS to read the server port number from the `RBCS_SERVER_PORT` environmental variable
|
|
||||||
and the bind address from the `rbc.bind.address` JVM system property with.
|
|
||||||
|
|
||||||
Full documentation for all tags and attributes is available [here](doc/server_configuration.md).
|
|
||||||
|
|
||||||
### Plugins
|
|
||||||
If you want to use memcache as a storage backend you'll also need to download [the memcache plugin](https://gitea.woggioni.net/woggioni/-/packages/maven/net.woggioni:rbcs-server-memcache/)
|
|
||||||
|
|
||||||
Plugins need to be stored in a folder named `plugins` in the located server's working directory
|
|
||||||
(the directory where the server process is started). They are shipped as TAR archives, so you need to extract
|
|
||||||
the content of the archive into the `plugins` directory for the server to pick them up.
|
|
||||||
|
|
||||||
### Using RBCS with Gradle
|
|
||||||
|
|
||||||
Add this to the `settings.gradle` file of your project
|
|
||||||
|
|
||||||
```groovy
|
|
||||||
buildCache {
|
|
||||||
remote(HttpBuildCache) {
|
|
||||||
url = 'https://rbcs.example.com/'
|
|
||||||
push = true
|
|
||||||
allowInsecureProtocol = false
|
|
||||||
// The credentials block is only required if you enable
|
|
||||||
// HTTP basic authentication on RBCS
|
|
||||||
credentials {
|
|
||||||
username = 'build-cache-user'
|
|
||||||
password = 'some-complicated-password'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
alternatively you can add this to `${GRADLE_HOME}/init.gradle` to configure the remote cache
|
|
||||||
at the system level
|
|
||||||
|
|
||||||
```groovy
|
|
||||||
gradle.settingsEvaluated { settings ->
|
|
||||||
settings.buildCache {
|
|
||||||
remote(HttpBuildCache) {
|
|
||||||
url = 'https://rbcs.example.com/'
|
|
||||||
push = true
|
|
||||||
allowInsecureProtocol = false
|
|
||||||
// The credentials block is only required if you enable
|
|
||||||
// HTTP basic authentication on RBCS
|
|
||||||
credentials {
|
|
||||||
username = 'build-cache-user'
|
|
||||||
password = 'some-complicated-password'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
add `org.gradle.caching=true` to your `<project>/gradle.properties` or run gradle with `--build-cache`.
|
|
||||||
|
|
||||||
Read [Gradle documentation](https://docs.gradle.org/current/userguide/build_cache.html) for more detailed information.
|
|
||||||
|
|
||||||
### Using RBCS with Maven
|
|
||||||
|
|
||||||
1. Create an `extensions.xml` in `<project>/.mvn/extensions.xml` with the following content
|
|
||||||
```xml
|
|
||||||
<extensions xmlns="http://maven.apache.org/EXTENSIONS/1.1.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
|
||||||
xsi:schemaLocation="http://maven.apache.org/EXTENSIONS/1.1.0 https://maven.apache.org/xsd/core-extensions-1.0.0.xsd">
|
|
||||||
<extension>
|
|
||||||
<groupId>org.apache.maven.extensions</groupId>
|
|
||||||
<artifactId>maven-build-cache-extension</artifactId>
|
|
||||||
<version>1.2.0</version>
|
|
||||||
</extension>
|
|
||||||
</extensions>
|
|
||||||
```
|
|
||||||
2. Copy [maven-build-cache-config.xml](https://maven.apache.org/extensions/maven-build-cache-extension/maven-build-cache-config.xml) into `<project>/.mvn/` folder
|
|
||||||
3. Edit the `cache/configuration/remote` element
|
|
||||||
```xml
|
|
||||||
<remote enabled="true" id="rbcs">
|
|
||||||
<url>https://rbcs.example.com/</url>
|
|
||||||
</remote>
|
|
||||||
```
|
|
||||||
4. Run maven with
|
|
||||||
```bash
|
|
||||||
mvn -Dmaven.build.cache.enabled=true -Dmaven.build.cache.debugOutput=true -Dmaven.build.cache.remote.save.enabled=true package
|
|
||||||
```
|
|
||||||
|
|
||||||
Alternatively you can set those properties in your `<project>/pom.xml`
|
|
||||||
|
|
||||||
|
|
||||||
Read [here](https://maven.apache.org/extensions/maven-build-cache-extension/remote-cache.html)
|
|
||||||
for more informations
|
|
||||||
|
|
||||||
## FAQ
|
|
||||||
### Why should I use a build cache?
|
|
||||||
|
|
||||||
#### Build Caches Improve Build & Test Performance
|
|
||||||
|
|
||||||
Building software consists of a number of steps, like compiling sources, executing tests, and linking binaries. We’ve seen that a binary artifact repository helps when such a step requires an external component by downloading the artifact from the repository rather than building it locally.
|
|
||||||
However, there are many additional steps in this build process which can be optimized to reduce the build time. An obvious strategy is to avoid executing build steps which dominate the total build time when these build steps are not needed.
|
|
||||||
Most build times are dominated by the testing step.
|
|
||||||
|
|
||||||
While binary repositories cannot capture the outcome of a test build step (only the test reports
|
|
||||||
when included in binary artifacts), build caches are designed to eliminate redundant executions
|
|
||||||
for every build step. Moreover, it generalizes the concept of avoiding work associated with any
|
|
||||||
incremental step of the build, including test execution, compilation and resource processing.
|
|
||||||
The mechanism itself is comparable to a pure function. That is, given some inputs such as source
|
|
||||||
files and environment parameters we know that the output is always going to be the same.
|
|
||||||
As a result, we can cache it and retrieve it based on a simple cryptographic hash of the inputs.
|
|
||||||
Build caching is supported natively by some build tools.
|
|
||||||
|
|
||||||
#### Improve CI builds with a remote build cache
|
|
||||||
|
|
||||||
When analyzing the role of a build cache it is important to take into account the granularity
|
|
||||||
of the changes that it caches. Imagine a full build for a project with 40 to 50 modules
|
|
||||||
which fails at the last step (deployment) because the staging environment is temporarily unavailable.
|
|
||||||
Although the vast majority of the build steps (potentially thousands) succeed,
|
|
||||||
the change can not be deployed to the staging environment.
|
|
||||||
Without a build cache one typically relies on a very complex CI configuration to reuse build step outputs
|
|
||||||
or would have to repeat the full build once the environment is available.
|
|
||||||
|
|
||||||
Some build tools don’t support incremental builds properly. For example, outputs of a build started
|
|
||||||
from scratch may vary when compared to subsequent builds that rely on the initial build’s output.
|
|
||||||
As a result, to preserve build integrity, it’s crucial to rebuild from scratch, or ‘cleanly,’ in this
|
|
||||||
scenario.
|
|
||||||
|
|
||||||
With a build cache, only the last step needs to be executed and the build can be re-triggered
|
|
||||||
when the environment is back online. This automatically saves all of the time and
|
|
||||||
resources required across the different build steps which were successfully executed.
|
|
||||||
Instead of executing the intermediate steps, the build tool pulls the outputs from the build cache,
|
|
||||||
avoiding a lot of redundant work
|
|
||||||
|
|
||||||
#### Share outputs with a remote build cache
|
|
||||||
|
|
||||||
One of the most important advantages of a remote build cache is the ability to share build outputs.
|
|
||||||
In most CI configurations, for example, a number of pipelines are created.
|
|
||||||
These may include one for building the sources, one for testing, one for publishing the outcomes
|
|
||||||
to a remote repository, and other pipelines to test on different platforms.
|
|
||||||
There are even situations where CI builds partially build a project (i.e. some modules and not others).
|
|
||||||
|
|
||||||
Most of those pipelines share a lot of intermediate build steps. All builds which perform testing
|
|
||||||
require the binaries to be ready. All publishing builds require all previous steps to be executed.
|
|
||||||
And because modern CI infrastructure means executing everything in containerized (isolated) environments,
|
|
||||||
significant resources are wasted by repeatedly building the same intermediate artifacts.
|
|
||||||
|
|
||||||
A remote build cache greatly reduces this overhead by orders of magnitudes because it provides a way
|
|
||||||
for all those pipelines to share their outputs. After all, there is no point recreating an output that
|
|
||||||
is already available in the cache.
|
|
||||||
|
|
||||||
Because there are inherent dependencies between software components of a build,
|
|
||||||
introducing a build cache dramatically reduces the impact of exploding a component into multiple pieces,
|
|
||||||
allowing for increased modularity without increased overhead.
|
|
||||||
|
|
||||||
#### Make local developers more efficient with remote build caches
|
|
||||||
|
|
||||||
It is common for different teams within a company to work on different modules of a single large
|
|
||||||
application. In this case, most teams don’t care about building the other parts of the software.
|
|
||||||
By introducing a remote cache developers immediately benefit from pre-built artifacts when checking out code.
|
|
||||||
Because it has already been built on CI, they don’t have to do it locally.
|
|
||||||
|
|
||||||
Introducing a remote cache is a huge benefit for those developers. Consider that a typical developer’s
|
|
||||||
day begins by performing a code checkout. Most likely the checked out code has already been built on CI.
|
|
||||||
Therefore, no time is wasted running the first build of the day. The remote cache provides all of the
|
|
||||||
intermediate artifacts needed. And, in the event local changes are made, the remote cache still leverages
|
|
||||||
partial cache hits for projects which are independent. As other developers in the organization request
|
|
||||||
CI builds, the remote cache continues to populate, increasing the likelihood of these remote cache hits
|
|
||||||
across team members.
|
|
||||||
|
|
58
build.gradle
58
build.gradle
@@ -1,12 +1,14 @@
|
|||||||
plugins {
|
plugins {
|
||||||
alias catalog.plugins.kotlin.jvm apply false
|
id 'java-library'
|
||||||
|
alias catalog.plugins.kotlin.jvm
|
||||||
alias catalog.plugins.sambal
|
alias catalog.plugins.sambal
|
||||||
alias catalog.plugins.lombok apply false
|
alias catalog.plugins.lombok
|
||||||
|
id 'maven-publish'
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
import org.jetbrains.kotlin.gradle.dsl.JvmTarget
|
|
||||||
import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
|
import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
|
||||||
|
import org.jetbrains.kotlin.gradle.dsl.JvmTarget
|
||||||
|
|
||||||
|
|
||||||
allprojects { subproject ->
|
allprojects { subproject ->
|
||||||
group = 'net.woggioni'
|
group = 'net.woggioni'
|
||||||
@@ -14,7 +16,9 @@ allprojects { subproject ->
|
|||||||
if(project.currentTag.isPresent()) {
|
if(project.currentTag.isPresent()) {
|
||||||
version = project.currentTag.map { it[0] }.get()
|
version = project.currentTag.map { it[0] }.get()
|
||||||
} else {
|
} else {
|
||||||
version = "${getProperty('rbcs.version')}-SNAPSHOT"
|
version = project.gitRevision.map { gitRevision ->
|
||||||
|
"${getProperty('gbcs.version')}.${gitRevision[0..10]}"
|
||||||
|
}.get()
|
||||||
}
|
}
|
||||||
|
|
||||||
repositories {
|
repositories {
|
||||||
@@ -22,6 +26,7 @@ allprojects { subproject ->
|
|||||||
url = getProperty('gitea.maven.url')
|
url = getProperty('gitea.maven.url')
|
||||||
content {
|
content {
|
||||||
includeModule 'net.woggioni', 'jwo'
|
includeModule 'net.woggioni', 'jwo'
|
||||||
|
includeModule 'net.woggioni', 'xmemcached'
|
||||||
includeGroup 'com.lys'
|
includeGroup 'com.lys'
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -43,12 +48,6 @@ allprojects { subproject ->
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
dependencies {
|
|
||||||
testImplementation catalog.junit.jupiter.api
|
|
||||||
testImplementation catalog.junit.jupiter.params
|
|
||||||
testRuntimeOnly catalog.junit.jupiter.engine
|
|
||||||
}
|
|
||||||
|
|
||||||
test {
|
test {
|
||||||
useJUnitPlatform()
|
useJUnitPlatform()
|
||||||
}
|
}
|
||||||
@@ -69,15 +68,6 @@ allprojects { subproject ->
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pluginManager.withPlugin('jacoco') {
|
|
||||||
test {
|
|
||||||
finalizedBy jacocoTestReport
|
|
||||||
}
|
|
||||||
jacocoTestReport {
|
|
||||||
dependsOn test
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pluginManager.withPlugin(catalog.plugins.kotlin.jvm.get().pluginId) {
|
pluginManager.withPlugin(catalog.plugins.kotlin.jvm.get().pluginId) {
|
||||||
tasks.withType(KotlinCompile.class) {
|
tasks.withType(KotlinCompile.class) {
|
||||||
compilerOptions.jvmTarget = JvmTarget.JVM_21
|
compilerOptions.jvmTarget = JvmTarget.JVM_21
|
||||||
@@ -112,6 +102,34 @@ allprojects { subproject ->
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
dependencies {
|
||||||
|
implementation catalog.jwo
|
||||||
|
implementation catalog.slf4j.api
|
||||||
|
implementation catalog.netty.codec.http
|
||||||
|
|
||||||
|
api project('gbcs-base')
|
||||||
|
api project('gbcs-api')
|
||||||
|
|
||||||
|
// runtimeOnly catalog.slf4j.jdk14
|
||||||
|
testRuntimeOnly catalog.logback.classic
|
||||||
|
|
||||||
|
testImplementation catalog.bcprov.jdk18on
|
||||||
|
testImplementation catalog.bcpkix.jdk18on
|
||||||
|
testImplementation catalog.junit.jupiter.api
|
||||||
|
testImplementation catalog.junit.jupiter.params
|
||||||
|
testRuntimeOnly catalog.junit.jupiter.engine
|
||||||
|
|
||||||
|
testRuntimeOnly project("gbcs-memcached")
|
||||||
|
}
|
||||||
|
|
||||||
|
publishing {
|
||||||
|
publications {
|
||||||
|
maven(MavenPublication) {
|
||||||
|
from(components["java"])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
tasks.register('version') {
|
tasks.register('version') {
|
||||||
doLast {
|
doLast {
|
||||||
println("VERSION=$version")
|
println("VERSION=$version")
|
||||||
|
13
conf/gbcs-memcached.xml
Normal file
13
conf/gbcs-memcached.xml
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||||
|
<gbcs:server useVirtualThreads="true" xmlns:xs="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xmlns:gbcs="urn:net.woggioni.gbcs"
|
||||||
|
xmlns:gbcs-memcached="urn:net.woggioni.gbcs-memcached"
|
||||||
|
xs:schemaLocation="urn:net.woggioni.gbcs-memcached jpms://net.woggioni.gbcs.memcached/net/woggioni/gbcs/memcached/schema/gbcs-memcached.xsd urn:net.woggioni.gbcs jpms://net.woggioni.gbcs/net/woggioni/gbcs/schema/gbcs.xsd">
|
||||||
|
<bind host="0.0.0.0" port="13080" />
|
||||||
|
<cache xs:type="gbcs-memcached:memcachedCacheType" max-age="P7D" max-size="16777216" compression-mode="zip">
|
||||||
|
<server host="memcached" port="11211"/>
|
||||||
|
</cache>
|
||||||
|
<authentication>
|
||||||
|
<none/>
|
||||||
|
</authentication>
|
||||||
|
</gbcs:server>
|
@@ -1,18 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8" ?>
|
|
||||||
<!DOCTYPE configuration>
|
|
||||||
|
|
||||||
<configuration>
|
|
||||||
<import class="ch.qos.logback.classic.encoder.PatternLayoutEncoder"/>
|
|
||||||
<import class="ch.qos.logback.core.ConsoleAppender"/>
|
|
||||||
|
|
||||||
<appender name="console" class="ConsoleAppender">
|
|
||||||
<target>System.err</target>
|
|
||||||
<encoder class="PatternLayoutEncoder">
|
|
||||||
<pattern>%d [%highlight(%-5level)] \(%thread\) %logger{36} -%kvp- %msg %n</pattern>
|
|
||||||
</encoder>
|
|
||||||
</appender>
|
|
||||||
|
|
||||||
<root level="info">
|
|
||||||
<appender-ref ref="console"/>
|
|
||||||
</root>
|
|
||||||
</configuration>
|
|
36
docker-compose.yml
Normal file
36
docker-compose.yml
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
networks:
|
||||||
|
default:
|
||||||
|
external: false
|
||||||
|
ipam:
|
||||||
|
driver: default
|
||||||
|
config:
|
||||||
|
- subnet: 172.118.0.0/16
|
||||||
|
ip_range: 172.118.0.0/16
|
||||||
|
gateway: 172.118.0.254
|
||||||
|
services:
|
||||||
|
gbcs:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
container_name: gbcs
|
||||||
|
restart: unless-stopped
|
||||||
|
ports:
|
||||||
|
- "127.0.0.1:8080:13080"
|
||||||
|
- "[::1]:8080:13080"
|
||||||
|
depends_on:
|
||||||
|
memcached:
|
||||||
|
condition: service_started
|
||||||
|
deploy:
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
cpus: "2.00"
|
||||||
|
memory: 256M
|
||||||
|
memcached:
|
||||||
|
image: memcached
|
||||||
|
container_name: memcached
|
||||||
|
restart: unless-stopped
|
||||||
|
command: -I 64m -m 900m
|
||||||
|
deploy:
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
cpus: "1.00"
|
||||||
|
memory: 1G
|
@@ -1,17 +1,21 @@
|
|||||||
FROM eclipse-temurin:21-jre-alpine AS base-release
|
FROM alpine:latest AS base-release
|
||||||
|
RUN --mount=type=cache,target=/var/cache/apk apk update
|
||||||
|
RUN --mount=type=cache,target=/var/cache/apk apk add openjdk21-jre
|
||||||
RUN adduser -D luser
|
RUN adduser -D luser
|
||||||
USER luser
|
USER luser
|
||||||
WORKDIR /home/luser
|
WORKDIR /home/luser
|
||||||
|
|
||||||
FROM base-release AS release
|
FROM base-release AS release
|
||||||
ADD rbcs-cli-envelope-*.jar rbcs.jar
|
ADD gbcs-cli-envelope-*.jar gbcs.jar
|
||||||
ENTRYPOINT ["java", "-XX:+UseSerialGC", "-XX:GCTimeRatio=24", "-jar", "/home/luser/rbcs.jar", "server"]
|
ENTRYPOINT ["java", "-jar", "/home/luser/gbcs.jar"]
|
||||||
|
|
||||||
FROM base-release AS release-memcache
|
FROM base-release AS release-memcached
|
||||||
ADD --chown=luser:luser rbcs-cli-envelope-*.jar rbcs.jar
|
ADD --chown=luser:luser gbcs-cli-envelope-*.jar gbcs.jar
|
||||||
RUN mkdir plugins
|
RUN mkdir plugins
|
||||||
WORKDIR /home/luser/plugins
|
WORKDIR /home/luser/plugins
|
||||||
RUN --mount=type=bind,source=.,target=/build/distributions tar -xf /build/distributions/rbcs-server-memcache*.tar
|
RUN --mount=type=bind,source=.,target=/build/distributions tar -xf /build/distributions/gbcs-memcached*.tar
|
||||||
WORKDIR /home/luser
|
WORKDIR /home/luser
|
||||||
ADD logback.xml .
|
ENTRYPOINT ["java", "-jar", "/home/luser/gbcs.jar"]
|
||||||
ENTRYPOINT ["java", "-Dlogback.configurationFile=logback.xml", "-XX:+UseSerialGC", "-XX:GCTimeRatio=24", "-jar", "/home/luser/rbcs.jar", "server"]
|
|
||||||
|
FROM release-memcached as compose
|
||||||
|
COPY --chown=luser:luser conf/gbcs-memcached.xml /home/luser/.config/gbcs/gbcs.xml
|
@@ -18,8 +18,8 @@ configurations {
|
|||||||
}
|
}
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
docker project(path: ':rbcs-cli', configuration: 'release')
|
docker project(path: ':gbcs-cli', configuration: 'release')
|
||||||
docker project(path: ':rbcs-server-memcache', configuration: 'release')
|
docker project(path: ':gbcs-memcached', configuration: 'release')
|
||||||
}
|
}
|
||||||
|
|
||||||
Provider<Task> cleanTaskProvider = tasks.named(BasePlugin.CLEAN_TASK_NAME) {}
|
Provider<Task> cleanTaskProvider = tasks.named(BasePlugin.CLEAN_TASK_NAME) {}
|
||||||
@@ -30,39 +30,38 @@ Provider<Copy> prepareDockerBuild = tasks.register('prepareDockerBuild', Copy) {
|
|||||||
into project.layout.buildDirectory.file('docker')
|
into project.layout.buildDirectory.file('docker')
|
||||||
from(configurations.docker)
|
from(configurations.docker)
|
||||||
from(file('Dockerfile'))
|
from(file('Dockerfile'))
|
||||||
from(rootProject.file('conf')) {
|
|
||||||
include 'logback.xml'
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Provider<DockerBuildImage> dockerBuild = tasks.register('dockerBuildImage', DockerBuildImage) {
|
Provider<DockerBuildImage> dockerBuild = tasks.register('dockerBuildImage', DockerBuildImage) {
|
||||||
group = 'docker'
|
group = 'docker'
|
||||||
dependsOn prepareDockerBuild
|
dependsOn prepareDockerBuild
|
||||||
images.add('gitea.woggioni.net/woggioni/rbcs:latest')
|
images.add('gitea.woggioni.net/woggioni/gbcs:latest')
|
||||||
images.add("gitea.woggioni.net/woggioni/rbcs:${version}")
|
images.add("gitea.woggioni.net/woggioni/gbcs:${version}")
|
||||||
}
|
}
|
||||||
|
|
||||||
Provider<DockerTagImage> dockerTag = tasks.register('dockerTagImage', DockerTagImage) {
|
Provider<DockerTagImage> dockerTag = tasks.register('dockerTagImage', DockerTagImage) {
|
||||||
group = 'docker'
|
group = 'docker'
|
||||||
repository = 'gitea.woggioni.net/woggioni/rbcs'
|
repository = 'gitea.woggioni.net/woggioni/gbcs'
|
||||||
imageId = 'gitea.woggioni.net/woggioni/rbcs:latest'
|
imageId = 'gitea.woggioni.net/woggioni/gbcs:latest'
|
||||||
tag = version
|
tag = version
|
||||||
}
|
}
|
||||||
|
|
||||||
Provider<DockerTagImage> dockerTagMemcache = tasks.register('dockerTagMemcacheImage', DockerTagImage) {
|
Provider<DockerTagImage> dockerTagMemcached = tasks.register('dockerTagMemcachedImage', DockerTagImage) {
|
||||||
group = 'docker'
|
group = 'docker'
|
||||||
repository = 'gitea.woggioni.net/woggioni/rbcs'
|
repository = 'gitea.woggioni.net/woggioni/gbcs'
|
||||||
imageId = 'gitea.woggioni.net/woggioni/rbcs:memcache'
|
imageId = 'gitea.woggioni.net/woggioni/gbcs:memcached'
|
||||||
tag = "${version}-memcache"
|
tag = "${version}-memcached"
|
||||||
}
|
}
|
||||||
|
|
||||||
Provider<DockerPushImage> dockerPush = tasks.register('dockerPushImage', DockerPushImage) {
|
Provider<DockerPushImage> dockerPush = tasks.register('dockerPushImage', DockerPushImage) {
|
||||||
group = 'docker'
|
group = 'docker'
|
||||||
dependsOn dockerTag, dockerTagMemcache
|
dependsOn dockerTag, dockerTagMemcached
|
||||||
registryCredentials {
|
registryCredentials {
|
||||||
url = getProperty('docker.registry.url')
|
url = getProperty('docker.registry.url')
|
||||||
username = 'woggioni'
|
username = 'woggioni'
|
||||||
password = System.getenv().get("PUBLISHER_TOKEN")
|
password = System.getenv().get("PUBLISHER_TOKEN")
|
||||||
}
|
}
|
||||||
images = [dockerTag.flatMap{ it.tag }, dockerTagMemcache.flatMap{ it.tag }]
|
images = [dockerTag.flatMap{ it.tag }, dockerTagMemcached.flatMap{ it.tag }]
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@@ -5,9 +5,6 @@ plugins {
|
|||||||
}
|
}
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
api catalog.netty.common
|
|
||||||
api catalog.netty.buffer
|
|
||||||
api catalog.netty.handler
|
|
||||||
}
|
}
|
||||||
|
|
||||||
publishing {
|
publishing {
|
6
gbcs-api/src/main/java/module-info.java
Normal file
6
gbcs-api/src/main/java/module-info.java
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
module net.woggioni.gbcs.api {
|
||||||
|
requires static lombok;
|
||||||
|
requires java.xml;
|
||||||
|
exports net.woggioni.gbcs.api;
|
||||||
|
exports net.woggioni.gbcs.api.exception;
|
||||||
|
}
|
11
gbcs-api/src/main/java/net/woggioni/gbcs/api/Cache.java
Normal file
11
gbcs-api/src/main/java/net/woggioni/gbcs/api/Cache.java
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
package net.woggioni.gbcs.api;
|
||||||
|
|
||||||
|
import net.woggioni.gbcs.api.exception.ContentTooLargeException;
|
||||||
|
|
||||||
|
import java.nio.channels.ReadableByteChannel;
|
||||||
|
|
||||||
|
public interface Cache extends AutoCloseable {
|
||||||
|
ReadableByteChannel get(String key);
|
||||||
|
|
||||||
|
void put(String key, byte[] content) throws ContentTooLargeException;
|
||||||
|
}
|
@@ -1,4 +1,4 @@
|
|||||||
package net.woggioni.rbcs.api;
|
package net.woggioni.gbcs.api;
|
||||||
|
|
||||||
import org.w3c.dom.Document;
|
import org.w3c.dom.Document;
|
||||||
import org.w3c.dom.Element;
|
import org.w3c.dom.Element;
|
@@ -1,13 +1,11 @@
|
|||||||
package net.woggioni.rbcs.api;
|
package net.woggioni.gbcs.api;
|
||||||
|
|
||||||
|
|
||||||
import lombok.EqualsAndHashCode;
|
import lombok.EqualsAndHashCode;
|
||||||
import lombok.NonNull;
|
|
||||||
import lombok.Value;
|
import lombok.Value;
|
||||||
|
|
||||||
import java.nio.file.Path;
|
import java.nio.file.Path;
|
||||||
import java.security.cert.X509Certificate;
|
import java.security.cert.X509Certificate;
|
||||||
import java.time.Duration;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
@@ -16,46 +14,19 @@ import java.util.stream.Collectors;
|
|||||||
public class Configuration {
|
public class Configuration {
|
||||||
String host;
|
String host;
|
||||||
int port;
|
int port;
|
||||||
int incomingConnectionsBacklogSize;
|
|
||||||
String serverPath;
|
String serverPath;
|
||||||
@NonNull
|
|
||||||
EventExecutor eventExecutor;
|
|
||||||
@NonNull
|
|
||||||
Connection connection;
|
|
||||||
Map<String, User> users;
|
Map<String, User> users;
|
||||||
Map<String, Group> groups;
|
Map<String, Group> groups;
|
||||||
Cache cache;
|
Cache cache;
|
||||||
Authentication authentication;
|
Authentication authentication;
|
||||||
Tls tls;
|
Tls tls;
|
||||||
|
boolean useVirtualThread;
|
||||||
@Value
|
|
||||||
public static class EventExecutor {
|
|
||||||
boolean useVirtualThreads;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Value
|
|
||||||
public static class Connection {
|
|
||||||
Duration idleTimeout;
|
|
||||||
Duration readIdleTimeout;
|
|
||||||
Duration writeIdleTimeout;
|
|
||||||
int maxRequestSize;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Value
|
|
||||||
public static class Quota {
|
|
||||||
long calls;
|
|
||||||
Duration period;
|
|
||||||
long initialAvailableCalls;
|
|
||||||
long maxAvailableCalls;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Value
|
@Value
|
||||||
public static class Group {
|
public static class Group {
|
||||||
@EqualsAndHashCode.Include
|
@EqualsAndHashCode.Include
|
||||||
String name;
|
String name;
|
||||||
Set<Role> roles;
|
Set<Role> roles;
|
||||||
Quota groupQuota;
|
|
||||||
Quota userQuota;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Value
|
@Value
|
||||||
@@ -64,7 +35,7 @@ public class Configuration {
|
|||||||
String name;
|
String name;
|
||||||
String password;
|
String password;
|
||||||
Set<Group> groups;
|
Set<Group> groups;
|
||||||
Quota quota;
|
|
||||||
|
|
||||||
public Set<Role> getRoles() {
|
public Set<Role> getRoles() {
|
||||||
return groups.stream()
|
return groups.stream()
|
||||||
@@ -87,6 +58,7 @@ public class Configuration {
|
|||||||
public static class Tls {
|
public static class Tls {
|
||||||
KeyStore keyStore;
|
KeyStore keyStore;
|
||||||
TrustStore trustStore;
|
TrustStore trustStore;
|
||||||
|
boolean verifyClients;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Value
|
@Value
|
||||||
@@ -102,7 +74,6 @@ public class Configuration {
|
|||||||
Path file;
|
Path file;
|
||||||
String password;
|
String password;
|
||||||
boolean checkCertificateStatus;
|
boolean checkCertificateStatus;
|
||||||
boolean requireClientCertificate;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Value
|
@Value
|
||||||
@@ -122,7 +93,7 @@ public class Configuration {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public interface Cache {
|
public interface Cache {
|
||||||
CacheHandlerFactory materialize();
|
net.woggioni.gbcs.api.Cache materialize();
|
||||||
String getNamespaceURI();
|
String getNamespaceURI();
|
||||||
String getTypeName();
|
String getTypeName();
|
||||||
}
|
}
|
||||||
@@ -130,28 +101,24 @@ public class Configuration {
|
|||||||
public static Configuration of(
|
public static Configuration of(
|
||||||
String host,
|
String host,
|
||||||
int port,
|
int port,
|
||||||
int incomingConnectionsBacklogSize,
|
|
||||||
String serverPath,
|
String serverPath,
|
||||||
EventExecutor eventExecutor,
|
|
||||||
Connection connection,
|
|
||||||
Map<String, User> users,
|
Map<String, User> users,
|
||||||
Map<String, Group> groups,
|
Map<String, Group> groups,
|
||||||
Cache cache,
|
Cache cache,
|
||||||
Authentication authentication,
|
Authentication authentication,
|
||||||
Tls tls
|
Tls tls,
|
||||||
|
boolean useVirtualThread
|
||||||
) {
|
) {
|
||||||
return new Configuration(
|
return new Configuration(
|
||||||
host,
|
host,
|
||||||
port,
|
port,
|
||||||
incomingConnectionsBacklogSize,
|
|
||||||
serverPath != null && !serverPath.isEmpty() && !serverPath.equals("/") ? serverPath : null,
|
serverPath != null && !serverPath.isEmpty() && !serverPath.equals("/") ? serverPath : null,
|
||||||
eventExecutor,
|
|
||||||
connection,
|
|
||||||
users,
|
users,
|
||||||
groups,
|
groups,
|
||||||
cache,
|
cache,
|
||||||
authentication,
|
authentication,
|
||||||
tls
|
tls,
|
||||||
|
useVirtualThread
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
}
|
}
|
@@ -1,4 +1,4 @@
|
|||||||
package net.woggioni.rbcs.api;
|
package net.woggioni.gbcs.api;
|
||||||
|
|
||||||
public enum Role {
|
public enum Role {
|
||||||
Reader, Writer
|
Reader, Writer
|
@@ -1,6 +1,6 @@
|
|||||||
package net.woggioni.rbcs.api.exception;
|
package net.woggioni.gbcs.api.exception;
|
||||||
|
|
||||||
public class ContentTooLargeException extends RbcsException {
|
public class ContentTooLargeException extends GbcsException {
|
||||||
public ContentTooLargeException(String message, Throwable cause) {
|
public ContentTooLargeException(String message, Throwable cause) {
|
||||||
super(message, cause);
|
super(message, cause);
|
||||||
}
|
}
|
@@ -0,0 +1,7 @@
|
|||||||
|
package net.woggioni.gbcs.api.exception;
|
||||||
|
|
||||||
|
public class GbcsException extends RuntimeException {
|
||||||
|
public GbcsException(String message, Throwable cause) {
|
||||||
|
super(message, cause);
|
||||||
|
}
|
||||||
|
}
|
@@ -6,10 +6,8 @@ plugins {
|
|||||||
}
|
}
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
implementation project(':rbcs-api')
|
compileOnly project(':gbcs-api')
|
||||||
implementation catalog.slf4j.api
|
compileOnly catalog.slf4j.api
|
||||||
implementation catalog.jwo
|
|
||||||
implementation catalog.netty.buffer
|
|
||||||
}
|
}
|
||||||
|
|
||||||
publishing {
|
publishing {
|
8
gbcs-base/src/main/java/module-info.java
Normal file
8
gbcs-base/src/main/java/module-info.java
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
module net.woggioni.gbcs.base {
|
||||||
|
requires java.xml;
|
||||||
|
requires java.logging;
|
||||||
|
requires org.slf4j;
|
||||||
|
requires kotlin.stdlib;
|
||||||
|
|
||||||
|
exports net.woggioni.gbcs.base;
|
||||||
|
}
|
12
gbcs-base/src/main/kotlin/net/woggioni/gbcs/base/GBCS.kt
Normal file
12
gbcs-base/src/main/kotlin/net/woggioni/gbcs/base/GBCS.kt
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
package net.woggioni.gbcs.base
|
||||||
|
|
||||||
|
import java.net.URI
|
||||||
|
import java.net.URL
|
||||||
|
|
||||||
|
object GBCS {
|
||||||
|
fun String.toUrl() : URL = URL.of(URI(this), null)
|
||||||
|
|
||||||
|
const val GBCS_NAMESPACE_URI: String = "urn:net.woggioni.gbcs"
|
||||||
|
const val GBCS_PREFIX: String = "gbcs"
|
||||||
|
const val XML_SCHEMA_NAMESPACE_URI = "http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
}
|
@@ -1,18 +1,19 @@
|
|||||||
package net.woggioni.rbcs.common
|
package net.woggioni.gbcs.base
|
||||||
|
|
||||||
import java.io.IOException
|
import java.io.IOException
|
||||||
import java.io.InputStream
|
import java.io.InputStream
|
||||||
import java.net.URL
|
import java.net.URL
|
||||||
import java.net.URLConnection
|
import java.net.URLConnection
|
||||||
import java.net.URLStreamHandler
|
import java.net.URLStreamHandler
|
||||||
import java.net.spi.URLStreamHandlerProvider
|
import java.net.URLStreamHandlerFactory
|
||||||
|
import java.util.Optional
|
||||||
import java.util.concurrent.atomic.AtomicBoolean
|
import java.util.concurrent.atomic.AtomicBoolean
|
||||||
import java.util.stream.Collectors
|
import java.util.stream.Collectors
|
||||||
|
|
||||||
|
|
||||||
class RbcsUrlStreamHandlerFactory : URLStreamHandlerProvider() {
|
class GbcsUrlStreamHandlerFactory : URLStreamHandlerFactory {
|
||||||
|
|
||||||
private class ClasspathHandler(private val classLoader: ClassLoader = RbcsUrlStreamHandlerFactory::class.java.classLoader) :
|
private class ClasspathHandler(private val classLoader: ClassLoader = GbcsUrlStreamHandlerFactory::class.java.classLoader) :
|
||||||
URLStreamHandler() {
|
URLStreamHandler() {
|
||||||
|
|
||||||
override fun openConnection(u: URL): URLConnection? {
|
override fun openConnection(u: URL): URLConnection? {
|
||||||
@@ -35,17 +36,13 @@ class RbcsUrlStreamHandlerFactory : URLStreamHandlerProvider() {
|
|||||||
private class JpmsHandler : URLStreamHandler() {
|
private class JpmsHandler : URLStreamHandler() {
|
||||||
|
|
||||||
override fun openConnection(u: URL): URLConnection {
|
override fun openConnection(u: URL): URLConnection {
|
||||||
val moduleName = u.host
|
|
||||||
val thisModule = javaClass.module
|
val thisModule = javaClass.module
|
||||||
val sourceModule =
|
val sourceModule = Optional.ofNullable(thisModule)
|
||||||
thisModule
|
.map { obj: Module -> obj.layer }
|
||||||
?.let(Module::getLayer)
|
.flatMap { layer: ModuleLayer ->
|
||||||
?.let { layer: ModuleLayer ->
|
val moduleName = u.host
|
||||||
layer.findModule(moduleName).orElse(null)
|
layer.findModule(moduleName)
|
||||||
} ?: if(thisModule.layer == null) {
|
}.orElse(thisModule)
|
||||||
thisModule
|
|
||||||
} else throw ModuleNotFoundException("Module '$moduleName' not found")
|
|
||||||
|
|
||||||
return JpmsResourceURLConnection(u, sourceModule)
|
return JpmsResourceURLConnection(u, sourceModule)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -56,9 +53,7 @@ class RbcsUrlStreamHandlerFactory : URLStreamHandlerProvider() {
|
|||||||
|
|
||||||
@Throws(IOException::class)
|
@Throws(IOException::class)
|
||||||
override fun getInputStream(): InputStream {
|
override fun getInputStream(): InputStream {
|
||||||
val resource = getURL().path
|
return module.getResourceAsStream(getURL().path)
|
||||||
return module.getResourceAsStream(resource)
|
|
||||||
?: throw ResourceNotFoundException("Resource '$resource' not found in module '${module.name}'")
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -87,12 +82,12 @@ class RbcsUrlStreamHandlerFactory : URLStreamHandlerProvider() {
|
|||||||
private val installed = AtomicBoolean(false)
|
private val installed = AtomicBoolean(false)
|
||||||
fun install() {
|
fun install() {
|
||||||
if (!installed.getAndSet(true)) {
|
if (!installed.getAndSet(true)) {
|
||||||
URL.setURLStreamHandlerFactory(RbcsUrlStreamHandlerFactory())
|
URL.setURLStreamHandlerFactory(GbcsUrlStreamHandlerFactory())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private val packageMap: Map<String, List<Module>> by lazy {
|
private val packageMap: Map<String, List<Module>> by lazy {
|
||||||
RbcsUrlStreamHandlerFactory::class.java.module.layer
|
GbcsUrlStreamHandlerFactory::class.java.module.layer
|
||||||
.modules()
|
.modules()
|
||||||
.stream()
|
.stream()
|
||||||
.flatMap { m: Module ->
|
.flatMap { m: Module ->
|
@@ -1,4 +1,4 @@
|
|||||||
package net.woggioni.rbcs.common
|
package net.woggioni.gbcs.base
|
||||||
|
|
||||||
|
|
||||||
data class HostAndPort(val host: String, val port: Int = 0) {
|
data class HostAndPort(val host: String, val port: Int = 0) {
|
104
gbcs-base/src/main/kotlin/net/woggioni/gbcs/base/Logging.kt
Normal file
104
gbcs-base/src/main/kotlin/net/woggioni/gbcs/base/Logging.kt
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
package net.woggioni.gbcs.base
|
||||||
|
|
||||||
|
import org.slf4j.Logger
|
||||||
|
import org.slf4j.LoggerFactory
|
||||||
|
import java.nio.file.Files
|
||||||
|
import java.nio.file.Path
|
||||||
|
import java.util.logging.LogManager
|
||||||
|
|
||||||
|
inline fun <reified T> T.contextLogger() = LoggerFactory.getLogger(T::class.java)
|
||||||
|
|
||||||
|
inline fun Logger.traceParam(messageBuilder : () -> Pair<String, Array<Any>>) {
|
||||||
|
if(isTraceEnabled) {
|
||||||
|
val (format, params) = messageBuilder()
|
||||||
|
trace(format, params)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
inline fun Logger.debugParam(messageBuilder : () -> Pair<String, Array<Any>>) {
|
||||||
|
if(isDebugEnabled) {
|
||||||
|
val (format, params) = messageBuilder()
|
||||||
|
info(format, params)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
inline fun Logger.infoParam(messageBuilder : () -> Pair<String, Array<Any>>) {
|
||||||
|
if(isInfoEnabled) {
|
||||||
|
val (format, params) = messageBuilder()
|
||||||
|
info(format, params)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
inline fun Logger.warnParam(messageBuilder : () -> Pair<String, Array<Any>>) {
|
||||||
|
if(isWarnEnabled) {
|
||||||
|
val (format, params) = messageBuilder()
|
||||||
|
warn(format, params)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
inline fun Logger.errorParam(messageBuilder : () -> Pair<String, Array<Any>>) {
|
||||||
|
if(isErrorEnabled) {
|
||||||
|
val (format, params) = messageBuilder()
|
||||||
|
error(format, params)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
inline fun log(log : Logger,
|
||||||
|
filter : Logger.() -> Boolean,
|
||||||
|
loggerMethod : Logger.(String) -> Unit, messageBuilder : () -> String) {
|
||||||
|
if(log.filter()) {
|
||||||
|
log.loggerMethod(messageBuilder())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
inline fun Logger.trace(messageBuilder : () -> String) {
|
||||||
|
if(isTraceEnabled) {
|
||||||
|
trace(messageBuilder())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
inline fun Logger.debug(messageBuilder : () -> String) {
|
||||||
|
if(isDebugEnabled) {
|
||||||
|
debug(messageBuilder())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
inline fun Logger.info(messageBuilder : () -> String) {
|
||||||
|
if(isInfoEnabled) {
|
||||||
|
info(messageBuilder())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
inline fun Logger.warn(messageBuilder : () -> String) {
|
||||||
|
if(isWarnEnabled) {
|
||||||
|
warn(messageBuilder())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
inline fun Logger.error(messageBuilder : () -> String) {
|
||||||
|
if(isErrorEnabled) {
|
||||||
|
error(messageBuilder())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class LoggingConfig {
|
||||||
|
|
||||||
|
init {
|
||||||
|
val logManager = LogManager.getLogManager()
|
||||||
|
System.getProperty("log.config.source")?.let withSource@ { source ->
|
||||||
|
val urls = LoggingConfig::class.java.classLoader.getResources(source)
|
||||||
|
while(urls.hasMoreElements()) {
|
||||||
|
val url = urls.nextElement()
|
||||||
|
url.openStream().use { inputStream ->
|
||||||
|
logManager.readConfiguration(inputStream)
|
||||||
|
return@withSource
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Path.of(source).takeIf(Files::exists)
|
||||||
|
?.let(Files::newInputStream)
|
||||||
|
?.use(logManager::readConfiguration)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@@ -0,0 +1,46 @@
|
|||||||
|
package net.woggioni.gbcs.base
|
||||||
|
|
||||||
|
import java.security.SecureRandom
|
||||||
|
import java.security.spec.KeySpec
|
||||||
|
import java.util.Base64
|
||||||
|
import javax.crypto.SecretKeyFactory
|
||||||
|
import javax.crypto.spec.PBEKeySpec
|
||||||
|
|
||||||
|
object PasswordSecurity {
|
||||||
|
private const val KEY_LENGTH = 256
|
||||||
|
|
||||||
|
private fun concat(arr1: ByteArray, arr2: ByteArray): ByteArray {
|
||||||
|
val result = ByteArray(arr1.size + arr2.size)
|
||||||
|
var j = 0
|
||||||
|
for(element in arr1) {
|
||||||
|
result[j] = element
|
||||||
|
j += 1
|
||||||
|
}
|
||||||
|
for(element in arr2) {
|
||||||
|
result[j] = element
|
||||||
|
j += 1
|
||||||
|
}
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
fun hashPassword(password : String, salt : String? = null) : String {
|
||||||
|
val actualSalt = salt?.let(Base64.getDecoder()::decode) ?: SecureRandom().run {
|
||||||
|
val result = ByteArray(16)
|
||||||
|
nextBytes(result)
|
||||||
|
result
|
||||||
|
}
|
||||||
|
val spec: KeySpec = PBEKeySpec(password.toCharArray(), actualSalt, 10, KEY_LENGTH)
|
||||||
|
val factory = SecretKeyFactory.getInstance("PBKDF2WithHmacSHA1")
|
||||||
|
val hash = factory.generateSecret(spec).encoded
|
||||||
|
return String(Base64.getEncoder().encode(concat(hash, actualSalt)))
|
||||||
|
}
|
||||||
|
|
||||||
|
fun decodePasswordHash(passwordHash : String) : Pair<ByteArray, ByteArray> {
|
||||||
|
val decoded = Base64.getDecoder().decode(passwordHash)
|
||||||
|
val hash = ByteArray(KEY_LENGTH / 8)
|
||||||
|
val salt = ByteArray(decoded.size - KEY_LENGTH / 8)
|
||||||
|
System.arraycopy(decoded, 0, hash, 0, hash.size)
|
||||||
|
System.arraycopy(decoded, hash.size, salt, 0, salt.size)
|
||||||
|
return hash to salt
|
||||||
|
}
|
||||||
|
}
|
@@ -1,7 +1,6 @@
|
|||||||
package net.woggioni.rbcs.common
|
package net.woggioni.gbcs.base
|
||||||
|
|
||||||
import net.woggioni.jwo.JWO
|
import org.slf4j.LoggerFactory
|
||||||
import org.slf4j.event.Level
|
|
||||||
import org.w3c.dom.Document
|
import org.w3c.dom.Document
|
||||||
import org.w3c.dom.Element
|
import org.w3c.dom.Element
|
||||||
import org.w3c.dom.Node
|
import org.w3c.dom.Node
|
||||||
@@ -78,39 +77,34 @@ class Xml(val doc: Document, val element: Element) {
|
|||||||
class ErrorHandler(private val fileURL: URL) : ErrHandler {
|
class ErrorHandler(private val fileURL: URL) : ErrHandler {
|
||||||
|
|
||||||
companion object {
|
companion object {
|
||||||
private val log = createLogger<ErrorHandler>()
|
private val log = LoggerFactory.getLogger(ErrorHandler::class.java)
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun warning(ex: SAXParseException)= err(ex, Level.WARN)
|
override fun warning(ex: SAXParseException) {
|
||||||
|
log.warn(
|
||||||
private fun err(ex: SAXParseException, level: Level) {
|
"Problem at {}:{}:{} parsing deployment configuration: {}",
|
||||||
log.log(level) {
|
fileURL, ex.lineNumber, ex.columnNumber, ex.message
|
||||||
"Problem at ${fileURL}:${ex.lineNumber}:${ex.columnNumber} parsing deployment configuration: ${ex.message}"
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
override fun error(ex: SAXParseException) {
|
||||||
|
log.error(
|
||||||
|
"Problem at {}:{}:{} parsing deployment configuration: {}",
|
||||||
|
fileURL, ex.lineNumber, ex.columnNumber, ex.message
|
||||||
|
)
|
||||||
throw ex
|
throw ex
|
||||||
}
|
}
|
||||||
|
|
||||||
override fun error(ex: SAXParseException) = err(ex, Level.ERROR)
|
override fun fatalError(ex: SAXParseException) {
|
||||||
override fun fatalError(ex: SAXParseException) = err(ex, Level.ERROR)
|
log.error(
|
||||||
|
"Problem at {}:{}:{} parsing deployment configuration: {}",
|
||||||
|
fileURL, ex.lineNumber, ex.columnNumber, ex.message
|
||||||
|
)
|
||||||
|
throw ex
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
companion object {
|
companion object {
|
||||||
private val dictMap: Map<String, Map<String, Any>> = sequenceOf(
|
|
||||||
"env" to System.getenv().asSequence().map { (k, v) -> k to (v as Any) }.toMap(),
|
|
||||||
"sys" to System.getProperties().asSequence().map { (k, v) -> k as String to (v as Any) }.toMap()
|
|
||||||
).toMap()
|
|
||||||
|
|
||||||
private fun renderConfigurationTemplate(template: String): String {
|
|
||||||
return JWO.renderTemplate(template, emptyMap(), dictMap).replace("$$", "$")
|
|
||||||
}
|
|
||||||
|
|
||||||
fun Element.renderAttribute(name : String, namespaceURI: String? = null) = if(namespaceURI == null) {
|
|
||||||
getAttribute(name)
|
|
||||||
} else {
|
|
||||||
getAttributeNS(name, namespaceURI)
|
|
||||||
}.takeIf(String::isNotEmpty)?.let(Companion::renderConfigurationTemplate)
|
|
||||||
|
|
||||||
|
|
||||||
fun Element.asIterable() = Iterable { ElementIterator(this, null) }
|
fun Element.asIterable() = Iterable { ElementIterator(this, null) }
|
||||||
fun NodeList.asIterable() = Iterable { NodeListIterator(this) }
|
fun NodeList.asIterable() = Iterable { NodeListIterator(this) }
|
||||||
|
|
||||||
@@ -152,8 +146,8 @@ class Xml(val doc: Document, val element: Element) {
|
|||||||
dbf.isExpandEntityReferences = true
|
dbf.isExpandEntityReferences = true
|
||||||
dbf.isIgnoringComments = true
|
dbf.isIgnoringComments = true
|
||||||
dbf.isNamespaceAware = true
|
dbf.isNamespaceAware = true
|
||||||
dbf.isValidating = schemaResourceURL == null
|
dbf.isValidating = false
|
||||||
dbf.setFeature("http://apache.org/xml/features/validation/schema", true)
|
dbf.setFeature("http://apache.org/xml/features/validation/schema", true);
|
||||||
schemaResourceURL?.let {
|
schemaResourceURL?.let {
|
||||||
dbf.schema = getSchema(it)
|
dbf.schema = getSchema(it)
|
||||||
}
|
}
|
||||||
@@ -189,12 +183,7 @@ class Xml(val doc: Document, val element: Element) {
|
|||||||
transformer.transform(source, result)
|
transformer.transform(source, result)
|
||||||
}
|
}
|
||||||
|
|
||||||
fun of(
|
fun of(namespaceURI: String, qualifiedName: String, schemaResourceURL: URL? = null, cb: Xml.(el: Element) -> Unit): Document {
|
||||||
namespaceURI: String,
|
|
||||||
qualifiedName: String,
|
|
||||||
schemaResourceURL: URL? = null,
|
|
||||||
cb: Xml.(el: Element) -> Unit
|
|
||||||
): Document {
|
|
||||||
val dbf = newDocumentBuilderFactory(schemaResourceURL)
|
val dbf = newDocumentBuilderFactory(schemaResourceURL)
|
||||||
val db = dbf.newDocumentBuilder()
|
val db = dbf.newDocumentBuilder()
|
||||||
val doc = db.newDocument()
|
val doc = db.newDocument()
|
@@ -4,18 +4,22 @@ plugins {
|
|||||||
alias catalog.plugins.envelope
|
alias catalog.plugins.envelope
|
||||||
alias catalog.plugins.sambal
|
alias catalog.plugins.sambal
|
||||||
alias catalog.plugins.graalvm.native.image
|
alias catalog.plugins.graalvm.native.image
|
||||||
alias catalog.plugins.graalvm.jlink
|
|
||||||
alias catalog.plugins.jpms.check
|
|
||||||
id 'maven-publish'
|
id 'maven-publish'
|
||||||
}
|
}
|
||||||
|
|
||||||
import net.woggioni.gradle.envelope.EnvelopePlugin
|
|
||||||
import net.woggioni.gradle.envelope.EnvelopeJarTask
|
import net.woggioni.gradle.envelope.EnvelopeJarTask
|
||||||
import net.woggioni.gradle.graalvm.NativeImageConfigurationTask
|
|
||||||
import net.woggioni.gradle.graalvm.NativeImagePlugin
|
import net.woggioni.gradle.graalvm.NativeImagePlugin
|
||||||
import net.woggioni.gradle.graalvm.NativeImageTask
|
import net.woggioni.gradle.graalvm.NativeImageTask
|
||||||
import net.woggioni.gradle.graalvm.JlinkPlugin
|
import net.woggioni.gradle.graalvm.NativeImageConfigurationTask
|
||||||
import net.woggioni.gradle.graalvm.JlinkTask
|
import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
|
||||||
|
import org.jetbrains.kotlin.gradle.dsl.JvmTarget
|
||||||
|
|
||||||
|
Property<String> mainClassName = objects.property(String.class)
|
||||||
|
mainClassName.set('net.woggioni.gbcs.cli.GradleBuildCacheServerCli')
|
||||||
|
|
||||||
|
tasks.named(JavaPlugin.COMPILE_JAVA_TASK_NAME, JavaCompile) {
|
||||||
|
options.javaModuleMainClass = mainClassName
|
||||||
|
}
|
||||||
|
|
||||||
configurations {
|
configurations {
|
||||||
release {
|
release {
|
||||||
@@ -26,64 +30,41 @@ configurations {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
dependencies {
|
envelopeJar {
|
||||||
implementation catalog.jwo
|
mainModule = 'net.woggioni.gbcs.cli'
|
||||||
implementation catalog.slf4j.api
|
|
||||||
implementation catalog.picocli
|
|
||||||
|
|
||||||
implementation project(':rbcs-client')
|
|
||||||
implementation project(':rbcs-server')
|
|
||||||
|
|
||||||
// runtimeOnly catalog.slf4j.jdk14
|
|
||||||
runtimeOnly catalog.logback.classic
|
|
||||||
// runtimeOnly catalog.slf4j.simple
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
Property<String> mainModuleName = objects.property(String.class)
|
|
||||||
mainModuleName.set('net.woggioni.rbcs.cli')
|
|
||||||
Property<String> mainClassName = objects.property(String.class)
|
|
||||||
mainClassName.set('net.woggioni.rbcs.cli.RemoteBuildCacheServerCli')
|
|
||||||
|
|
||||||
tasks.named(JavaPlugin.COMPILE_JAVA_TASK_NAME, JavaCompile) {
|
|
||||||
options.javaModuleMainClass = mainClassName
|
|
||||||
}
|
|
||||||
|
|
||||||
Provider<EnvelopeJarTask> envelopeJarTaskProvider = tasks.named(EnvelopePlugin.ENVELOPE_JAR_TASK_NAME, EnvelopeJarTask.class) {
|
|
||||||
mainModule = mainModuleName
|
|
||||||
mainClass = mainClassName
|
mainClass = mainClassName
|
||||||
|
|
||||||
extraClasspath = ["plugins"]
|
extraClasspath = ["plugins"]
|
||||||
|
}
|
||||||
|
|
||||||
systemProperties['logback.configurationFile'] = 'classpath:net/woggioni/rbcs/cli/logback.xml'
|
dependencies {
|
||||||
systemProperties['io.netty.leakDetectionLevel'] = 'DISABLED'
|
implementation catalog.jwo
|
||||||
|
implementation catalog.slf4j.api
|
||||||
|
implementation catalog.netty.codec.http
|
||||||
|
implementation catalog.picocli
|
||||||
|
|
||||||
|
implementation rootProject
|
||||||
|
|
||||||
|
// runtimeOnly catalog.slf4j.jdk14
|
||||||
|
runtimeOnly catalog.logback.classic
|
||||||
|
}
|
||||||
|
|
||||||
|
Provider<EnvelopeJarTask> envelopeJarTaskProvider = tasks.named('envelopeJar', EnvelopeJarTask.class) {
|
||||||
|
// systemProperties['java.util.logging.config.class'] = 'net.woggioni.gbcs.LoggingConfig'
|
||||||
|
// systemProperties['log.config.source'] = 'logging.properties'
|
||||||
|
systemProperties['logback.configurationFile'] = 'classpath:net/woggioni/gbcs/cli/logback.xml'
|
||||||
}
|
}
|
||||||
|
|
||||||
tasks.named(NativeImagePlugin.CONFIGURE_NATIVE_IMAGE_TASK_NAME, NativeImageConfigurationTask) {
|
tasks.named(NativeImagePlugin.CONFIGURE_NATIVE_IMAGE_TASK_NAME, NativeImageConfigurationTask) {
|
||||||
mainClass = mainClassName
|
mainClass = 'net.woggioni.gbcs.GraalNativeImageConfiguration'
|
||||||
mainModule = mainModuleName
|
|
||||||
}
|
}
|
||||||
|
|
||||||
tasks.named(NativeImagePlugin.NATIVE_IMAGE_TASK_NAME, NativeImageTask) {
|
tasks.named(NativeImagePlugin.NATIVE_IMAGE_TASK_NAME, NativeImageTask) {
|
||||||
mainClass = mainClassName
|
mainClass = 'net.woggioni.gbcs.GradleBuildCacheServer'
|
||||||
mainModule = mainModuleName
|
|
||||||
useMusl = true
|
useMusl = true
|
||||||
buildStaticImage = true
|
buildStaticImage = true
|
||||||
}
|
}
|
||||||
|
|
||||||
tasks.named(JlinkPlugin.JLINK_TASK_NAME, JlinkTask) {
|
|
||||||
mainClass = mainClassName
|
|
||||||
mainModule = 'net.woggioni.rbcs.cli'
|
|
||||||
}
|
|
||||||
|
|
||||||
tasks.named(JavaPlugin.PROCESS_RESOURCES_TASK_NAME, ProcessResources) {
|
|
||||||
from(rootProject.file('conf')) {
|
|
||||||
into('net/woggioni/rbcs/cli')
|
|
||||||
include 'logback.xml'
|
|
||||||
include 'logging.properties'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
artifacts {
|
artifacts {
|
||||||
release(envelopeJarTaskProvider)
|
release(envelopeJarTaskProvider)
|
||||||
}
|
}
|
@@ -1,2 +1,2 @@
|
|||||||
Args=-H:Optimize=3 --gc=serial --initialize-at-run-time=io.netty
|
Args=-H:Optimize=3 --gc=serial
|
||||||
#-H:TraceClassInitialization=io.netty.handler.ssl.BouncyCastleAlpnSslUtils
|
#-H:TraceClassInitialization=io.netty.handler.ssl.BouncyCastleAlpnSslUtils
|
15
gbcs-cli/src/main/java/module-info.java
Normal file
15
gbcs-cli/src/main/java/module-info.java
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
module net.woggioni.gbcs.cli {
|
||||||
|
requires org.slf4j;
|
||||||
|
requires net.woggioni.gbcs;
|
||||||
|
requires info.picocli;
|
||||||
|
requires net.woggioni.gbcs.base;
|
||||||
|
requires kotlin.stdlib;
|
||||||
|
requires net.woggioni.jwo;
|
||||||
|
|
||||||
|
exports net.woggioni.gbcs.cli.impl.converters to info.picocli;
|
||||||
|
opens net.woggioni.gbcs.cli.impl.commands to info.picocli;
|
||||||
|
opens net.woggioni.gbcs.cli.impl to info.picocli;
|
||||||
|
opens net.woggioni.gbcs.cli to info.picocli, net.woggioni.gbcs.base;
|
||||||
|
|
||||||
|
exports net.woggioni.gbcs.cli;
|
||||||
|
}
|
@@ -0,0 +1,99 @@
|
|||||||
|
package net.woggioni.gbcs.cli
|
||||||
|
|
||||||
|
import net.woggioni.gbcs.GradleBuildCacheServer
|
||||||
|
import net.woggioni.gbcs.GradleBuildCacheServer.Companion.DEFAULT_CONFIGURATION_URL
|
||||||
|
import net.woggioni.gbcs.base.GbcsUrlStreamHandlerFactory
|
||||||
|
import net.woggioni.gbcs.base.contextLogger
|
||||||
|
import net.woggioni.gbcs.base.debug
|
||||||
|
import net.woggioni.gbcs.base.info
|
||||||
|
import net.woggioni.gbcs.cli.impl.AbstractVersionProvider
|
||||||
|
import net.woggioni.gbcs.cli.impl.GbcsCommand
|
||||||
|
import net.woggioni.gbcs.cli.impl.commands.PasswordHashCommand
|
||||||
|
import net.woggioni.jwo.Application
|
||||||
|
import net.woggioni.jwo.JWO
|
||||||
|
import org.slf4j.Logger
|
||||||
|
import picocli.CommandLine
|
||||||
|
import picocli.CommandLine.Model.CommandSpec
|
||||||
|
import java.io.ByteArrayOutputStream
|
||||||
|
import java.nio.file.Files
|
||||||
|
import java.nio.file.Path
|
||||||
|
|
||||||
|
|
||||||
|
@CommandLine.Command(
|
||||||
|
name = "gbcs", versionProvider = GradleBuildCacheServerCli.VersionProvider::class
|
||||||
|
)
|
||||||
|
class GradleBuildCacheServerCli(application : Application, private val log : Logger) : GbcsCommand() {
|
||||||
|
|
||||||
|
class VersionProvider : AbstractVersionProvider()
|
||||||
|
companion object {
|
||||||
|
@JvmStatic
|
||||||
|
fun main(vararg args: String) {
|
||||||
|
Thread.currentThread().contextClassLoader = GradleBuildCacheServerCli::class.java.classLoader
|
||||||
|
GbcsUrlStreamHandlerFactory.install()
|
||||||
|
val log = contextLogger()
|
||||||
|
val app = Application.builder("gbcs")
|
||||||
|
.configurationDirectoryEnvVar("GBCS_CONFIGURATION_DIR")
|
||||||
|
.configurationDirectoryPropertyKey("net.woggioni.gbcs.conf.dir")
|
||||||
|
.build()
|
||||||
|
val gbcsCli = GradleBuildCacheServerCli(app, log)
|
||||||
|
val commandLine = CommandLine(gbcsCli)
|
||||||
|
commandLine.setExecutionExceptionHandler { ex, cl, parseResult ->
|
||||||
|
log.error(ex.message, ex)
|
||||||
|
CommandLine.ExitCode.SOFTWARE
|
||||||
|
}
|
||||||
|
commandLine.addSubcommand(PasswordHashCommand())
|
||||||
|
System.exit(commandLine.execute(*args))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@CommandLine.Option(
|
||||||
|
names = ["-c", "--config-file"],
|
||||||
|
description = ["Read the application configuration from this file"],
|
||||||
|
paramLabel = "CONFIG_FILE"
|
||||||
|
)
|
||||||
|
private var configurationFile: Path = findConfigurationFile(application)
|
||||||
|
|
||||||
|
@CommandLine.Option(names = ["-V", "--version"], versionHelp = true)
|
||||||
|
var versionHelp = false
|
||||||
|
private set
|
||||||
|
|
||||||
|
@CommandLine.Spec
|
||||||
|
private lateinit var spec: CommandSpec
|
||||||
|
|
||||||
|
private fun findConfigurationFile(app : Application): Path {
|
||||||
|
val confDir = app.computeConfigurationDirectory()
|
||||||
|
val configurationFile = confDir.resolve("gbcs.xml")
|
||||||
|
return configurationFile
|
||||||
|
}
|
||||||
|
|
||||||
|
private fun createDefaultConfigurationFile(configurationFile : Path) {
|
||||||
|
log.info {
|
||||||
|
"Creating default configuration file at '$configurationFile'"
|
||||||
|
}
|
||||||
|
val defaultConfigurationFileResource = DEFAULT_CONFIGURATION_URL
|
||||||
|
Files.newOutputStream(configurationFile).use { outputStream ->
|
||||||
|
defaultConfigurationFileResource.openStream().use { inputStream ->
|
||||||
|
JWO.copy(inputStream, outputStream)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun run() {
|
||||||
|
if (!Files.exists(configurationFile)) {
|
||||||
|
Files.createDirectories(configurationFile.parent)
|
||||||
|
createDefaultConfigurationFile(configurationFile)
|
||||||
|
}
|
||||||
|
|
||||||
|
val configuration = GradleBuildCacheServer.loadConfiguration(configurationFile)
|
||||||
|
log.debug {
|
||||||
|
ByteArrayOutputStream().also {
|
||||||
|
GradleBuildCacheServer.dumpConfiguration(configuration, it)
|
||||||
|
}.let {
|
||||||
|
"Server configuration:\n${String(it.toByteArray())}"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
val server = GradleBuildCacheServer(configuration)
|
||||||
|
server.run().use {
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@@ -1,6 +1,8 @@
|
|||||||
package net.woggioni.rbcs.cli.impl
|
package net.woggioni.gbcs.cli.impl
|
||||||
|
|
||||||
import picocli.CommandLine
|
import picocli.CommandLine
|
||||||
|
import java.net.URL
|
||||||
|
import java.util.Enumeration
|
||||||
import java.util.jar.Attributes
|
import java.util.jar.Attributes
|
||||||
import java.util.jar.JarFile
|
import java.util.jar.JarFile
|
||||||
import java.util.jar.Manifest
|
import java.util.jar.Manifest
|
@@ -0,0 +1,11 @@
|
|||||||
|
package net.woggioni.gbcs.cli.impl
|
||||||
|
|
||||||
|
import picocli.CommandLine
|
||||||
|
|
||||||
|
|
||||||
|
abstract class GbcsCommand : Runnable {
|
||||||
|
|
||||||
|
@CommandLine.Option(names = ["-h", "--help"], usageHelp = true)
|
||||||
|
var usageHelp = false
|
||||||
|
private set
|
||||||
|
}
|
@@ -1,26 +1,26 @@
|
|||||||
package net.woggioni.rbcs.cli.impl.commands
|
package net.woggioni.gbcs.cli.impl.commands
|
||||||
|
|
||||||
|
import net.woggioni.gbcs.base.PasswordSecurity.hashPassword
|
||||||
|
import net.woggioni.gbcs.cli.impl.GbcsCommand
|
||||||
|
import net.woggioni.gbcs.cli.impl.converters.OutputStreamConverter
|
||||||
import net.woggioni.jwo.UncloseableOutputStream
|
import net.woggioni.jwo.UncloseableOutputStream
|
||||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
|
||||||
import net.woggioni.rbcs.cli.impl.converters.OutputStreamConverter
|
|
||||||
import net.woggioni.rbcs.common.PasswordSecurity.hashPassword
|
|
||||||
import picocli.CommandLine
|
import picocli.CommandLine
|
||||||
|
import java.io.BufferedWriter
|
||||||
import java.io.OutputStream
|
import java.io.OutputStream
|
||||||
import java.io.OutputStreamWriter
|
import java.io.OutputStreamWriter
|
||||||
import java.io.PrintWriter
|
|
||||||
|
|
||||||
|
|
||||||
@CommandLine.Command(
|
@CommandLine.Command(
|
||||||
name = "password",
|
name = "password",
|
||||||
description = ["Generate a password hash to add to RBCS configuration file"],
|
description = ["Generate a password hash to add to GBCS configuration file"],
|
||||||
showDefaultValues = true
|
showDefaultValues = true
|
||||||
)
|
)
|
||||||
class PasswordHashCommand : RbcsCommand() {
|
class PasswordHashCommand : GbcsCommand() {
|
||||||
@CommandLine.Option(
|
@CommandLine.Option(
|
||||||
names = ["-o", "--output-file"],
|
names = ["-o", "--output-file"],
|
||||||
description = ["Write the output to a file instead of stdout"],
|
description = ["Write the output to a file instead of stdout"],
|
||||||
converter = [OutputStreamConverter::class],
|
converter = [OutputStreamConverter::class],
|
||||||
showDefaultValue = CommandLine.Help.Visibility.NEVER,
|
defaultValue = "stdout",
|
||||||
paramLabel = "OUTPUT_FILE"
|
paramLabel = "OUTPUT_FILE"
|
||||||
)
|
)
|
||||||
private var outputStream: OutputStream = UncloseableOutputStream(System.out)
|
private var outputStream: OutputStream = UncloseableOutputStream(System.out)
|
||||||
@@ -30,8 +30,9 @@ class PasswordHashCommand : RbcsCommand() {
|
|||||||
val password2 = String(System.console().readPassword("Type your password again for confirmation:"))
|
val password2 = String(System.console().readPassword("Type your password again for confirmation:"))
|
||||||
if(password1 != password2) throw IllegalArgumentException("Passwords do not match")
|
if(password1 != password2) throw IllegalArgumentException("Passwords do not match")
|
||||||
|
|
||||||
PrintWriter(OutputStreamWriter(outputStream, Charsets.UTF_8)).use {
|
BufferedWriter(OutputStreamWriter(outputStream, Charsets.UTF_8)).use {
|
||||||
it.println(hashPassword(password1))
|
it.write(hashPassword(password1))
|
||||||
|
it.newLine()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
@@ -1,4 +1,4 @@
|
|||||||
package net.woggioni.rbcs.cli.impl.converters
|
package net.woggioni.gbcs.cli.impl.converters
|
||||||
|
|
||||||
import picocli.CommandLine
|
import picocli.CommandLine
|
||||||
import java.io.OutputStream
|
import java.io.OutputStream
|
@@ -12,10 +12,10 @@
|
|||||||
</encoder>
|
</encoder>
|
||||||
</appender>
|
</appender>
|
||||||
|
|
||||||
<root level="info">
|
<root level="debug">
|
||||||
<appender-ref ref="console"/>
|
<appender-ref ref="console"/>
|
||||||
</root>
|
</root>
|
||||||
<logger name="io.netty" level="info"/>
|
<logger name="io.netty" level="debug"/>
|
||||||
<logger name="com.google.code.yanf4j" level="warn"/>
|
<logger name="com.google.code.yanf4j" level="warn"/>
|
||||||
<logger name="net.rubyeye.xmemcached" level="warn"/>
|
<logger name="net.rubyeye.xmemcached" level="warn"/>
|
||||||
</configuration>
|
</configuration>
|
@@ -1,3 +1,6 @@
|
|||||||
|
import org.jetbrains.kotlin.gradle.dsl.JvmTarget
|
||||||
|
import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
|
||||||
|
|
||||||
plugins {
|
plugins {
|
||||||
id 'java-library'
|
id 'java-library'
|
||||||
id 'maven-publish'
|
id 'maven-publish'
|
||||||
@@ -6,10 +9,10 @@ plugins {
|
|||||||
|
|
||||||
configurations {
|
configurations {
|
||||||
bundle {
|
bundle {
|
||||||
|
extendsFrom runtimeClasspath
|
||||||
canBeResolved = true
|
canBeResolved = true
|
||||||
canBeConsumed = false
|
canBeConsumed = false
|
||||||
visible = false
|
visible = false
|
||||||
transitive = false
|
|
||||||
|
|
||||||
resolutionStrategy {
|
resolutionStrategy {
|
||||||
dependencies {
|
dependencies {
|
||||||
@@ -29,21 +32,10 @@ configurations {
|
|||||||
}
|
}
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
implementation project(':rbcs-common')
|
compileOnly project(':gbcs-base')
|
||||||
implementation project(':rbcs-api')
|
compileOnly project(':gbcs-api')
|
||||||
implementation catalog.jwo
|
compileOnly catalog.jwo
|
||||||
implementation catalog.slf4j.api
|
implementation catalog.xmemcached
|
||||||
implementation catalog.netty.common
|
|
||||||
implementation catalog.netty.handler
|
|
||||||
implementation catalog.netty.codec.memcache
|
|
||||||
|
|
||||||
bundle catalog.netty.codec.memcache
|
|
||||||
|
|
||||||
testRuntimeOnly catalog.logback.classic
|
|
||||||
}
|
|
||||||
|
|
||||||
tasks.named(JavaPlugin.TEST_TASK_NAME, Test) {
|
|
||||||
systemProperty("io.netty.leakDetectionLevel", "PARANOID")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Provider<Tar> bundleTask = tasks.register("bundle", Tar) {
|
Provider<Tar> bundleTask = tasks.register("bundle", Tar) {
|
14
gbcs-memcached/src/main/java/module-info.java
Normal file
14
gbcs-memcached/src/main/java/module-info.java
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
import net.woggioni.gbcs.api.CacheProvider;
|
||||||
|
|
||||||
|
module net.woggioni.gbcs.memcached {
|
||||||
|
requires net.woggioni.gbcs.base;
|
||||||
|
requires net.woggioni.gbcs.api;
|
||||||
|
requires com.googlecode.xmemcached;
|
||||||
|
requires net.woggioni.jwo;
|
||||||
|
requires java.xml;
|
||||||
|
requires kotlin.stdlib;
|
||||||
|
|
||||||
|
provides CacheProvider with net.woggioni.gbcs.memcached.MemcachedCacheProvider;
|
||||||
|
|
||||||
|
opens net.woggioni.gbcs.memcached.schema;
|
||||||
|
}
|
@@ -0,0 +1,60 @@
|
|||||||
|
package net.woggioni.gbcs.memcached
|
||||||
|
|
||||||
|
import net.rubyeye.xmemcached.MemcachedClient
|
||||||
|
import net.rubyeye.xmemcached.XMemcachedClientBuilder
|
||||||
|
import net.rubyeye.xmemcached.command.BinaryCommandFactory
|
||||||
|
import net.rubyeye.xmemcached.transcoders.CompressionMode
|
||||||
|
import net.rubyeye.xmemcached.transcoders.SerializingTranscoder
|
||||||
|
import net.woggioni.gbcs.api.Cache
|
||||||
|
import net.woggioni.gbcs.api.exception.ContentTooLargeException
|
||||||
|
import net.woggioni.gbcs.base.HostAndPort
|
||||||
|
import net.woggioni.jwo.JWO
|
||||||
|
import java.io.ByteArrayInputStream
|
||||||
|
import java.net.InetSocketAddress
|
||||||
|
import java.nio.channels.Channels
|
||||||
|
import java.nio.channels.ReadableByteChannel
|
||||||
|
import java.nio.charset.StandardCharsets
|
||||||
|
import java.security.MessageDigest
|
||||||
|
import java.time.Duration
|
||||||
|
|
||||||
|
class MemcachedCache(
|
||||||
|
servers: List<HostAndPort>,
|
||||||
|
private val maxAge: Duration,
|
||||||
|
maxSize : Int,
|
||||||
|
digestAlgorithm: String?,
|
||||||
|
compressionMode: CompressionMode,
|
||||||
|
) : Cache {
|
||||||
|
private val memcachedClient = XMemcachedClientBuilder(
|
||||||
|
servers.stream().map { addr: HostAndPort -> InetSocketAddress(addr.host, addr.port) }.toList()
|
||||||
|
).apply {
|
||||||
|
commandFactory = BinaryCommandFactory()
|
||||||
|
digestAlgorithm?.let { dAlg ->
|
||||||
|
setKeyProvider { key ->
|
||||||
|
val md = MessageDigest.getInstance(dAlg)
|
||||||
|
md.update(key.toByteArray(StandardCharsets.UTF_8))
|
||||||
|
JWO.bytesToHex(md.digest())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
transcoder = SerializingTranscoder(maxSize).apply {
|
||||||
|
setCompressionMode(compressionMode)
|
||||||
|
}
|
||||||
|
}.build()
|
||||||
|
|
||||||
|
override fun get(key: String): ReadableByteChannel? {
|
||||||
|
return memcachedClient.get<ByteArray>(key)
|
||||||
|
?.let(::ByteArrayInputStream)
|
||||||
|
?.let(Channels::newChannel)
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun put(key: String, content: ByteArray) {
|
||||||
|
try {
|
||||||
|
memcachedClient[key, maxAge.toSeconds().toInt()] = content
|
||||||
|
} catch (e: IllegalArgumentException) {
|
||||||
|
throw ContentTooLargeException(e.message, e)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun close() {
|
||||||
|
memcachedClient.shutdown()
|
||||||
|
}
|
||||||
|
}
|
@@ -0,0 +1,26 @@
|
|||||||
|
package net.woggioni.gbcs.memcached
|
||||||
|
|
||||||
|
import net.rubyeye.xmemcached.transcoders.CompressionMode
|
||||||
|
import net.woggioni.gbcs.api.Configuration
|
||||||
|
import net.woggioni.gbcs.base.HostAndPort
|
||||||
|
import java.time.Duration
|
||||||
|
|
||||||
|
data class MemcachedCacheConfiguration(
|
||||||
|
var servers: List<HostAndPort>,
|
||||||
|
var maxAge: Duration = Duration.ofDays(1),
|
||||||
|
var maxSize: Int = 0x100000,
|
||||||
|
var digestAlgorithm: String? = null,
|
||||||
|
var compressionMode: CompressionMode = CompressionMode.ZIP,
|
||||||
|
) : Configuration.Cache {
|
||||||
|
override fun materialize() = MemcachedCache(
|
||||||
|
servers,
|
||||||
|
maxAge,
|
||||||
|
maxSize,
|
||||||
|
digestAlgorithm,
|
||||||
|
compressionMode
|
||||||
|
)
|
||||||
|
|
||||||
|
override fun getNamespaceURI() = "urn:net.woggioni.gbcs-memcached"
|
||||||
|
|
||||||
|
override fun getTypeName() = "memcachedCacheType"
|
||||||
|
}
|
@@ -0,0 +1,85 @@
|
|||||||
|
package net.woggioni.gbcs.memcached
|
||||||
|
|
||||||
|
import net.rubyeye.xmemcached.transcoders.CompressionMode
|
||||||
|
import net.woggioni.gbcs.api.CacheProvider
|
||||||
|
import net.woggioni.gbcs.base.GBCS
|
||||||
|
import net.woggioni.gbcs.base.HostAndPort
|
||||||
|
import net.woggioni.gbcs.base.Xml
|
||||||
|
import net.woggioni.gbcs.base.Xml.Companion.asIterable
|
||||||
|
import org.w3c.dom.Document
|
||||||
|
import org.w3c.dom.Element
|
||||||
|
import java.time.Duration
|
||||||
|
import java.util.zip.Deflater
|
||||||
|
|
||||||
|
class MemcachedCacheProvider : CacheProvider<MemcachedCacheConfiguration> {
|
||||||
|
override fun getXmlSchemaLocation() = "classpath:net/woggioni/gbcs/memcached/schema/gbcs-memcached.xsd"
|
||||||
|
|
||||||
|
override fun getXmlType() = "memcachedCacheType"
|
||||||
|
|
||||||
|
override fun getXmlNamespace()= "urn:net.woggioni.gbcs-memcached"
|
||||||
|
|
||||||
|
override fun deserialize(el: Element): MemcachedCacheConfiguration {
|
||||||
|
val servers = mutableListOf<HostAndPort>()
|
||||||
|
val maxAge = el.getAttribute("max-age")
|
||||||
|
.takeIf(String::isNotEmpty)
|
||||||
|
?.let(Duration::parse)
|
||||||
|
?: Duration.ofDays(1)
|
||||||
|
val maxSize = el.getAttribute("max-size")
|
||||||
|
.takeIf(String::isNotEmpty)
|
||||||
|
?.let(String::toInt)
|
||||||
|
?: 0x100000
|
||||||
|
val enableCompression = el.getAttribute("enable-compression")
|
||||||
|
.takeIf(String::isNotEmpty)
|
||||||
|
?.let(String::toBoolean)
|
||||||
|
?: false
|
||||||
|
val compressionMode = el.getAttribute("compression-mode")
|
||||||
|
.takeIf(String::isNotEmpty)
|
||||||
|
?.let {
|
||||||
|
when(it) {
|
||||||
|
"gzip" -> CompressionMode.GZIP
|
||||||
|
"zip" -> CompressionMode.ZIP
|
||||||
|
else -> CompressionMode.ZIP
|
||||||
|
}
|
||||||
|
}
|
||||||
|
?: CompressionMode.ZIP
|
||||||
|
val digestAlgorithm = el.getAttribute("digest").takeIf(String::isNotEmpty)
|
||||||
|
for (child in el.asIterable()) {
|
||||||
|
when (child.nodeName) {
|
||||||
|
"server" -> {
|
||||||
|
servers.add(HostAndPort(child.getAttribute("host"), child.getAttribute("port").toInt()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return MemcachedCacheConfiguration(
|
||||||
|
servers,
|
||||||
|
maxAge,
|
||||||
|
maxSize,
|
||||||
|
digestAlgorithm,
|
||||||
|
compressionMode,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
override fun serialize(doc: Document, cache : MemcachedCacheConfiguration) = cache.run {
|
||||||
|
val result = doc.createElementNS(xmlNamespace,"cache")
|
||||||
|
Xml.of(doc, result) {
|
||||||
|
attr("xs:type", xmlType, GBCS.XML_SCHEMA_NAMESPACE_URI)
|
||||||
|
for (server in servers) {
|
||||||
|
node("server", xmlNamespace) {
|
||||||
|
attr("host", server.host)
|
||||||
|
attr("port", server.port.toString())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
attr("max-age", maxAge.toString())
|
||||||
|
attr("max-size", maxSize.toString())
|
||||||
|
digestAlgorithm?.let { digestAlgorithm ->
|
||||||
|
attr("digest", digestAlgorithm)
|
||||||
|
}
|
||||||
|
attr("compression-mode", when(compressionMode) {
|
||||||
|
CompressionMode.GZIP -> "gzip"
|
||||||
|
CompressionMode.ZIP -> "zip"
|
||||||
|
})
|
||||||
|
}
|
||||||
|
result
|
||||||
|
}
|
||||||
|
}
|
@@ -0,0 +1 @@
|
|||||||
|
net.woggioni.gbcs.memcached.MemcachedCacheProvider
|
@@ -0,0 +1,35 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||||
|
<xs:schema targetNamespace="urn:net.woggioni.gbcs-memcached"
|
||||||
|
xmlns:gbcs-memcached="urn:net.woggioni.gbcs-memcached"
|
||||||
|
xmlns:gbcs="urn:net.woggioni.gbcs"
|
||||||
|
xmlns:xs="http://www.w3.org/2001/XMLSchema">
|
||||||
|
|
||||||
|
<xs:import schemaLocation="classpath:net/woggioni/gbcs/schema/gbcs.xsd" namespace="urn:net.woggioni.gbcs"/>
|
||||||
|
|
||||||
|
<xs:complexType name="memcachedServerType">
|
||||||
|
<xs:attribute name="host" type="xs:string" use="required"/>
|
||||||
|
<xs:attribute name="port" type="xs:positiveInteger" use="required"/>
|
||||||
|
</xs:complexType>
|
||||||
|
|
||||||
|
<xs:complexType name="memcachedCacheType">
|
||||||
|
<xs:complexContent>
|
||||||
|
<xs:extension base="gbcs:cacheType">
|
||||||
|
<xs:sequence maxOccurs="unbounded">
|
||||||
|
<xs:element name="server" type="gbcs-memcached:memcachedServerType"/>
|
||||||
|
</xs:sequence>
|
||||||
|
<xs:attribute name="max-age" type="xs:duration" default="P1D"/>
|
||||||
|
<xs:attribute name="max-size" type="xs:unsignedInt" default="1048576"/>
|
||||||
|
<xs:attribute name="digest" type="xs:token" />
|
||||||
|
<xs:attribute name="compression-type" type="gbcs-memcached:compressionType" default="deflate"/>
|
||||||
|
</xs:extension>
|
||||||
|
</xs:complexContent>
|
||||||
|
</xs:complexType>
|
||||||
|
|
||||||
|
<xs:simpleType name="compressionType">
|
||||||
|
<xs:restriction base="xs:token">
|
||||||
|
<xs:enumeration value="deflate"/>
|
||||||
|
<xs:enumeration value="gzip"/>
|
||||||
|
</xs:restriction>
|
||||||
|
</xs:simpleType>
|
||||||
|
|
||||||
|
</xs:schema>
|
@@ -2,10 +2,9 @@ org.gradle.configuration-cache=false
|
|||||||
org.gradle.parallel=true
|
org.gradle.parallel=true
|
||||||
org.gradle.caching=true
|
org.gradle.caching=true
|
||||||
|
|
||||||
rbcs.version = 0.2.0
|
gbcs.version = 0.0.1
|
||||||
|
|
||||||
lys.version = 2025.02.08
|
lys.version = 2025.01.10
|
||||||
|
|
||||||
gitea.maven.url = https://gitea.woggioni.net/api/packages/woggioni/maven
|
gitea.maven.url = https://gitea.woggioni.net/api/packages/woggioni/maven
|
||||||
docker.registry.url=gitea.woggioni.net
|
docker.registry.url=gitea.woggioni.net
|
||||||
|
|
||||||
|
@@ -1,11 +0,0 @@
|
|||||||
module net.woggioni.rbcs.api {
|
|
||||||
requires static lombok;
|
|
||||||
requires java.xml;
|
|
||||||
requires io.netty.buffer;
|
|
||||||
requires io.netty.handler;
|
|
||||||
requires io.netty.transport;
|
|
||||||
requires io.netty.common;
|
|
||||||
exports net.woggioni.rbcs.api;
|
|
||||||
exports net.woggioni.rbcs.api.exception;
|
|
||||||
exports net.woggioni.rbcs.api.message;
|
|
||||||
}
|
|
@@ -1,13 +0,0 @@
|
|||||||
package net.woggioni.rbcs.api;
|
|
||||||
|
|
||||||
import java.util.concurrent.CompletableFuture;
|
|
||||||
|
|
||||||
public interface AsyncCloseable extends AutoCloseable {
|
|
||||||
|
|
||||||
CompletableFuture<Void> asyncClose();
|
|
||||||
|
|
||||||
@Override
|
|
||||||
default void close() throws Exception {
|
|
||||||
asyncClose().get();
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,15 +0,0 @@
|
|||||||
package net.woggioni.rbcs.api;
|
|
||||||
|
|
||||||
import io.netty.channel.ChannelFactory;
|
|
||||||
import io.netty.channel.ChannelHandler;
|
|
||||||
import io.netty.channel.EventLoopGroup;
|
|
||||||
import io.netty.channel.socket.DatagramChannel;
|
|
||||||
import io.netty.channel.socket.SocketChannel;
|
|
||||||
|
|
||||||
public interface CacheHandlerFactory extends AsyncCloseable {
|
|
||||||
ChannelHandler newHandler(
|
|
||||||
EventLoopGroup eventLoopGroup,
|
|
||||||
ChannelFactory<SocketChannel> socketChannelFactory,
|
|
||||||
ChannelFactory<DatagramChannel> datagramChannelFactory
|
|
||||||
);
|
|
||||||
}
|
|
@@ -1,14 +0,0 @@
|
|||||||
package net.woggioni.rbcs.api;
|
|
||||||
|
|
||||||
import lombok.Getter;
|
|
||||||
import lombok.RequiredArgsConstructor;
|
|
||||||
|
|
||||||
import java.io.Serializable;
|
|
||||||
|
|
||||||
@Getter
|
|
||||||
@RequiredArgsConstructor
|
|
||||||
public class CacheValueMetadata implements Serializable {
|
|
||||||
private final String contentDisposition;
|
|
||||||
private final String mimeType;
|
|
||||||
}
|
|
||||||
|
|
@@ -1,11 +0,0 @@
|
|||||||
package net.woggioni.rbcs.api.exception;
|
|
||||||
|
|
||||||
public class CacheException extends RbcsException {
|
|
||||||
public CacheException(String message, Throwable cause) {
|
|
||||||
super(message, cause);
|
|
||||||
}
|
|
||||||
|
|
||||||
public CacheException(String message) {
|
|
||||||
this(message, null);
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,11 +0,0 @@
|
|||||||
package net.woggioni.rbcs.api.exception;
|
|
||||||
|
|
||||||
public class ConfigurationException extends RbcsException {
|
|
||||||
public ConfigurationException(String message, Throwable cause) {
|
|
||||||
super(message, cause);
|
|
||||||
}
|
|
||||||
|
|
||||||
public ConfigurationException(String message) {
|
|
||||||
this(message, null);
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,7 +0,0 @@
|
|||||||
package net.woggioni.rbcs.api.exception;
|
|
||||||
|
|
||||||
public class RbcsException extends RuntimeException {
|
|
||||||
public RbcsException(String message, Throwable cause) {
|
|
||||||
super(message, cause);
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,161 +0,0 @@
|
|||||||
package net.woggioni.rbcs.api.message;
|
|
||||||
|
|
||||||
import io.netty.buffer.ByteBuf;
|
|
||||||
import io.netty.buffer.ByteBufHolder;
|
|
||||||
import lombok.Getter;
|
|
||||||
import lombok.RequiredArgsConstructor;
|
|
||||||
import net.woggioni.rbcs.api.CacheValueMetadata;
|
|
||||||
|
|
||||||
public sealed interface CacheMessage {
|
|
||||||
|
|
||||||
@Getter
|
|
||||||
@RequiredArgsConstructor
|
|
||||||
final class CacheGetRequest implements CacheMessage {
|
|
||||||
private final String key;
|
|
||||||
}
|
|
||||||
|
|
||||||
abstract sealed class CacheGetResponse implements CacheMessage {
|
|
||||||
}
|
|
||||||
|
|
||||||
@Getter
|
|
||||||
@RequiredArgsConstructor
|
|
||||||
final class CacheValueFoundResponse extends CacheGetResponse {
|
|
||||||
private final String key;
|
|
||||||
private final CacheValueMetadata metadata;
|
|
||||||
}
|
|
||||||
|
|
||||||
final class CacheValueNotFoundResponse extends CacheGetResponse {
|
|
||||||
}
|
|
||||||
|
|
||||||
@Getter
|
|
||||||
@RequiredArgsConstructor
|
|
||||||
final class CachePutRequest implements CacheMessage {
|
|
||||||
private final String key;
|
|
||||||
private final CacheValueMetadata metadata;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Getter
|
|
||||||
@RequiredArgsConstructor
|
|
||||||
final class CachePutResponse implements CacheMessage {
|
|
||||||
private final String key;
|
|
||||||
}
|
|
||||||
|
|
||||||
@RequiredArgsConstructor
|
|
||||||
non-sealed class CacheContent implements CacheMessage, ByteBufHolder {
|
|
||||||
protected final ByteBuf chunk;
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public ByteBuf content() {
|
|
||||||
return chunk;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public CacheContent copy() {
|
|
||||||
return replace(chunk.copy());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public CacheContent duplicate() {
|
|
||||||
return new CacheContent(chunk.duplicate());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public CacheContent retainedDuplicate() {
|
|
||||||
return new CacheContent(chunk.retainedDuplicate());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public CacheContent replace(ByteBuf content) {
|
|
||||||
return new CacheContent(content);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public CacheContent retain() {
|
|
||||||
chunk.retain();
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public CacheContent retain(int increment) {
|
|
||||||
chunk.retain(increment);
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public CacheContent touch() {
|
|
||||||
chunk.touch();
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public CacheContent touch(Object hint) {
|
|
||||||
chunk.touch(hint);
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int refCnt() {
|
|
||||||
return chunk.refCnt();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean release() {
|
|
||||||
return chunk.release();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean release(int decrement) {
|
|
||||||
return chunk.release(decrement);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
final class LastCacheContent extends CacheContent {
|
|
||||||
public LastCacheContent(ByteBuf chunk) {
|
|
||||||
super(chunk);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public LastCacheContent copy() {
|
|
||||||
return replace(chunk.copy());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public LastCacheContent duplicate() {
|
|
||||||
return new LastCacheContent(chunk.duplicate());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public LastCacheContent retainedDuplicate() {
|
|
||||||
return new LastCacheContent(chunk.retainedDuplicate());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public LastCacheContent replace(ByteBuf content) {
|
|
||||||
return new LastCacheContent(chunk);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public LastCacheContent retain() {
|
|
||||||
super.retain();
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public LastCacheContent retain(int increment) {
|
|
||||||
super.retain(increment);
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public LastCacheContent touch() {
|
|
||||||
super.touch();
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public LastCacheContent touch(Object hint) {
|
|
||||||
super.touch(hint);
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,17 +0,0 @@
|
|||||||
module net.woggioni.rbcs.cli {
|
|
||||||
requires org.slf4j;
|
|
||||||
requires net.woggioni.rbcs.server;
|
|
||||||
requires info.picocli;
|
|
||||||
requires net.woggioni.rbcs.common;
|
|
||||||
requires net.woggioni.rbcs.client;
|
|
||||||
requires kotlin.stdlib;
|
|
||||||
requires net.woggioni.jwo;
|
|
||||||
requires net.woggioni.rbcs.api;
|
|
||||||
|
|
||||||
exports net.woggioni.rbcs.cli.impl.converters to info.picocli;
|
|
||||||
opens net.woggioni.rbcs.cli.impl.commands to info.picocli;
|
|
||||||
opens net.woggioni.rbcs.cli.impl to info.picocli;
|
|
||||||
opens net.woggioni.rbcs.cli to info.picocli, net.woggioni.rbcs.common;
|
|
||||||
|
|
||||||
exports net.woggioni.rbcs.cli;
|
|
||||||
}
|
|
@@ -1,69 +0,0 @@
|
|||||||
package net.woggioni.rbcs.cli
|
|
||||||
|
|
||||||
import net.woggioni.jwo.Application
|
|
||||||
import net.woggioni.rbcs.cli.impl.AbstractVersionProvider
|
|
||||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
|
||||||
import net.woggioni.rbcs.cli.impl.commands.BenchmarkCommand
|
|
||||||
import net.woggioni.rbcs.cli.impl.commands.ClientCommand
|
|
||||||
import net.woggioni.rbcs.cli.impl.commands.GetCommand
|
|
||||||
import net.woggioni.rbcs.cli.impl.commands.HealthCheckCommand
|
|
||||||
import net.woggioni.rbcs.cli.impl.commands.PasswordHashCommand
|
|
||||||
import net.woggioni.rbcs.cli.impl.commands.PutCommand
|
|
||||||
import net.woggioni.rbcs.cli.impl.commands.ServerCommand
|
|
||||||
import net.woggioni.rbcs.common.RbcsUrlStreamHandlerFactory
|
|
||||||
import net.woggioni.rbcs.common.contextLogger
|
|
||||||
import picocli.CommandLine
|
|
||||||
import picocli.CommandLine.Model.CommandSpec
|
|
||||||
|
|
||||||
|
|
||||||
@CommandLine.Command(
|
|
||||||
name = "rbcs", versionProvider = RemoteBuildCacheServerCli.VersionProvider::class
|
|
||||||
)
|
|
||||||
class RemoteBuildCacheServerCli : RbcsCommand() {
|
|
||||||
|
|
||||||
class VersionProvider : AbstractVersionProvider()
|
|
||||||
companion object {
|
|
||||||
@JvmStatic
|
|
||||||
fun main(vararg args: String) {
|
|
||||||
val currentClassLoader = RemoteBuildCacheServerCli::class.java.classLoader
|
|
||||||
Thread.currentThread().contextClassLoader = currentClassLoader
|
|
||||||
if(currentClassLoader.javaClass.name == "net.woggioni.envelope.loader.ModuleClassLoader") {
|
|
||||||
//We're running in an envelope jar and custom URL protocols won't work
|
|
||||||
RbcsUrlStreamHandlerFactory.install()
|
|
||||||
}
|
|
||||||
val log = contextLogger()
|
|
||||||
val app = Application.builder("rbcs")
|
|
||||||
.configurationDirectoryEnvVar("RBCS_CONFIGURATION_DIR")
|
|
||||||
.configurationDirectoryPropertyKey("net.woggioni.rbcs.conf.dir")
|
|
||||||
.build()
|
|
||||||
val rbcsCli = RemoteBuildCacheServerCli()
|
|
||||||
val commandLine = CommandLine(rbcsCli)
|
|
||||||
commandLine.setExecutionExceptionHandler { ex, cl, parseResult ->
|
|
||||||
log.error(ex.message, ex)
|
|
||||||
CommandLine.ExitCode.SOFTWARE
|
|
||||||
}
|
|
||||||
commandLine.addSubcommand(ServerCommand(app))
|
|
||||||
commandLine.addSubcommand(PasswordHashCommand())
|
|
||||||
commandLine.addSubcommand(
|
|
||||||
CommandLine(ClientCommand(app)).apply {
|
|
||||||
addSubcommand(BenchmarkCommand())
|
|
||||||
addSubcommand(PutCommand())
|
|
||||||
addSubcommand(GetCommand())
|
|
||||||
addSubcommand(HealthCheckCommand())
|
|
||||||
})
|
|
||||||
System.exit(commandLine.execute(*args))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@CommandLine.Option(names = ["-V", "--version"], versionHelp = true)
|
|
||||||
var versionHelp = false
|
|
||||||
private set
|
|
||||||
|
|
||||||
@CommandLine.Spec
|
|
||||||
private lateinit var spec: CommandSpec
|
|
||||||
|
|
||||||
|
|
||||||
override fun run() {
|
|
||||||
spec.commandLine().usage(System.out);
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,19 +0,0 @@
|
|||||||
package net.woggioni.rbcs.cli.impl
|
|
||||||
|
|
||||||
import net.woggioni.jwo.Application
|
|
||||||
import picocli.CommandLine
|
|
||||||
import java.nio.file.Path
|
|
||||||
|
|
||||||
|
|
||||||
abstract class RbcsCommand : Runnable {
|
|
||||||
|
|
||||||
@CommandLine.Option(names = ["-h", "--help"], usageHelp = true)
|
|
||||||
var usageHelp = false
|
|
||||||
private set
|
|
||||||
|
|
||||||
protected fun findConfigurationFile(app: Application, fileName : String): Path {
|
|
||||||
val confDir = app.computeConfigurationDirectory()
|
|
||||||
val configurationFile = confDir.resolve(fileName)
|
|
||||||
return configurationFile
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,172 +0,0 @@
|
|||||||
package net.woggioni.rbcs.cli.impl.commands
|
|
||||||
|
|
||||||
import net.woggioni.jwo.JWO
|
|
||||||
import net.woggioni.jwo.LongMath
|
|
||||||
import net.woggioni.rbcs.api.CacheValueMetadata
|
|
||||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
|
||||||
import net.woggioni.rbcs.cli.impl.converters.ByteSizeConverter
|
|
||||||
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
|
||||||
import net.woggioni.rbcs.common.createLogger
|
|
||||||
import net.woggioni.rbcs.common.debug
|
|
||||||
import net.woggioni.rbcs.common.error
|
|
||||||
import net.woggioni.rbcs.common.info
|
|
||||||
import picocli.CommandLine
|
|
||||||
import java.security.SecureRandom
|
|
||||||
import java.time.Duration
|
|
||||||
import java.time.Instant
|
|
||||||
import java.time.temporal.ChronoUnit
|
|
||||||
import java.util.concurrent.LinkedBlockingQueue
|
|
||||||
import java.util.concurrent.Semaphore
|
|
||||||
import java.util.concurrent.atomic.AtomicLong
|
|
||||||
import kotlin.random.Random
|
|
||||||
|
|
||||||
@CommandLine.Command(
|
|
||||||
name = "benchmark",
|
|
||||||
description = ["Run a load test against the server"],
|
|
||||||
showDefaultValues = true
|
|
||||||
)
|
|
||||||
class BenchmarkCommand : RbcsCommand() {
|
|
||||||
companion object{
|
|
||||||
private val log = createLogger<BenchmarkCommand>()
|
|
||||||
}
|
|
||||||
|
|
||||||
@CommandLine.Spec
|
|
||||||
private lateinit var spec: CommandLine.Model.CommandSpec
|
|
||||||
|
|
||||||
@CommandLine.Option(
|
|
||||||
names = ["-e", "--entries"],
|
|
||||||
description = ["Total number of elements to be added to the cache"],
|
|
||||||
paramLabel = "NUMBER_OF_ENTRIES"
|
|
||||||
)
|
|
||||||
private var numberOfEntries = 1000
|
|
||||||
|
|
||||||
@CommandLine.Option(
|
|
||||||
names = ["-s", "--size"],
|
|
||||||
description = ["Size of a cache value in bytes"],
|
|
||||||
paramLabel = "SIZE",
|
|
||||||
converter = [ByteSizeConverter::class]
|
|
||||||
)
|
|
||||||
private var size = 0x1000
|
|
||||||
|
|
||||||
@CommandLine.Option(
|
|
||||||
names = ["-r", "--random"],
|
|
||||||
description = ["Insert completely random byte values"]
|
|
||||||
)
|
|
||||||
private var randomValues = false
|
|
||||||
|
|
||||||
override fun run() {
|
|
||||||
val clientCommand = spec.parent().userObject() as ClientCommand
|
|
||||||
val profile = clientCommand.profileName.let { profileName ->
|
|
||||||
clientCommand.configuration.profiles[profileName]
|
|
||||||
?: throw IllegalArgumentException("Profile $profileName does not exist in configuration")
|
|
||||||
}
|
|
||||||
val progressThreshold = LongMath.ceilDiv(numberOfEntries.toLong(), 20)
|
|
||||||
RemoteBuildCacheClient(profile).use { client ->
|
|
||||||
|
|
||||||
val entryGenerator = sequence {
|
|
||||||
val random = Random(SecureRandom.getInstance("NativePRNGNonBlocking").nextLong())
|
|
||||||
while (true) {
|
|
||||||
val key = JWO.bytesToHex(random.nextBytes(16))
|
|
||||||
val value = if(randomValues) {
|
|
||||||
random.nextBytes(size)
|
|
||||||
} else {
|
|
||||||
val byteValue = random.nextInt().toByte()
|
|
||||||
ByteArray(size) {_ -> byteValue}
|
|
||||||
}
|
|
||||||
yield(key to value)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
log.info {
|
|
||||||
"Starting insertion"
|
|
||||||
}
|
|
||||||
val entries = let {
|
|
||||||
val completionCounter = AtomicLong(0)
|
|
||||||
val completionQueue = LinkedBlockingQueue<Pair<String, ByteArray>>(numberOfEntries)
|
|
||||||
val start = Instant.now()
|
|
||||||
val semaphore = Semaphore(profile.maxConnections * 5)
|
|
||||||
val iterator = entryGenerator.take(numberOfEntries).iterator()
|
|
||||||
while (completionCounter.get() < numberOfEntries) {
|
|
||||||
if (iterator.hasNext()) {
|
|
||||||
val entry = iterator.next()
|
|
||||||
semaphore.acquire()
|
|
||||||
val future = client.put(entry.first, entry.second, CacheValueMetadata(null, null)).thenApply { entry }
|
|
||||||
future.whenComplete { result, ex ->
|
|
||||||
if (ex != null) {
|
|
||||||
log.error(ex.message, ex)
|
|
||||||
} else {
|
|
||||||
completionQueue.put(result)
|
|
||||||
}
|
|
||||||
semaphore.release()
|
|
||||||
val completed = completionCounter.incrementAndGet()
|
|
||||||
if(completed.mod(progressThreshold) == 0L) {
|
|
||||||
log.debug {
|
|
||||||
"Inserted $completed / $numberOfEntries"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Thread.sleep(Duration.of(500, ChronoUnit.MILLIS))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
val inserted = completionQueue.toList()
|
|
||||||
val end = Instant.now()
|
|
||||||
log.info {
|
|
||||||
val elapsed = Duration.between(start, end).toMillis()
|
|
||||||
val opsPerSecond = String.format("%.2f", numberOfEntries.toDouble() / elapsed * 1000)
|
|
||||||
"Insertion rate: $opsPerSecond ops/s"
|
|
||||||
}
|
|
||||||
inserted
|
|
||||||
}
|
|
||||||
log.info {
|
|
||||||
"Inserted ${entries.size} entries"
|
|
||||||
}
|
|
||||||
log.info {
|
|
||||||
"Starting retrieval"
|
|
||||||
}
|
|
||||||
if (entries.isNotEmpty()) {
|
|
||||||
val completionCounter = AtomicLong(0)
|
|
||||||
val semaphore = Semaphore(profile.maxConnections * 5)
|
|
||||||
val start = Instant.now()
|
|
||||||
val it = entries.iterator()
|
|
||||||
while (completionCounter.get() < entries.size) {
|
|
||||||
if (it.hasNext()) {
|
|
||||||
val entry = it.next()
|
|
||||||
semaphore.acquire()
|
|
||||||
val future = client.get(entry.first).thenApply {
|
|
||||||
if (it == null) {
|
|
||||||
log.error {
|
|
||||||
"Missing entry for key '${entry.first}'"
|
|
||||||
}
|
|
||||||
} else if (!entry.second.contentEquals(it)) {
|
|
||||||
log.error {
|
|
||||||
"Retrieved a value different from what was inserted for key '${entry.first}'"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
future.whenComplete { _, _ ->
|
|
||||||
val completed = completionCounter.incrementAndGet()
|
|
||||||
if(completed.mod(progressThreshold) == 0L) {
|
|
||||||
log.debug {
|
|
||||||
"Retrieved $completed / ${entries.size}"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
semaphore.release()
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
Thread.sleep(Duration.of(500, ChronoUnit.MILLIS))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
val end = Instant.now()
|
|
||||||
log.info {
|
|
||||||
val elapsed = Duration.between(start, end).toMillis()
|
|
||||||
val opsPerSecond = String.format("%.2f", entries.size.toDouble() / elapsed * 1000)
|
|
||||||
"Retrieval rate: $opsPerSecond ops/s"
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
log.error("Skipping retrieval benchmark as it was not possible to insert any entry in the cache")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,41 +0,0 @@
|
|||||||
package net.woggioni.rbcs.cli.impl.commands
|
|
||||||
|
|
||||||
import net.woggioni.jwo.Application
|
|
||||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
|
||||||
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
|
||||||
import picocli.CommandLine
|
|
||||||
import java.nio.file.Path
|
|
||||||
|
|
||||||
@CommandLine.Command(
|
|
||||||
name = "client",
|
|
||||||
description = ["RBCS client"],
|
|
||||||
showDefaultValues = true
|
|
||||||
)
|
|
||||||
class ClientCommand(app : Application) : RbcsCommand() {
|
|
||||||
|
|
||||||
@CommandLine.Option(
|
|
||||||
names = ["-c", "--configuration"],
|
|
||||||
description = ["Path to the client configuration file"],
|
|
||||||
paramLabel = "CONFIGURATION_FILE"
|
|
||||||
)
|
|
||||||
private var configurationFile : Path = findConfigurationFile(app, "rbcs-client.xml")
|
|
||||||
|
|
||||||
@CommandLine.Option(
|
|
||||||
names = ["-p", "--profile"],
|
|
||||||
description = ["Name of the client profile to be used"],
|
|
||||||
paramLabel = "PROFILE",
|
|
||||||
required = true
|
|
||||||
)
|
|
||||||
var profileName : String? = null
|
|
||||||
|
|
||||||
val configuration : RemoteBuildCacheClient.Configuration by lazy {
|
|
||||||
RemoteBuildCacheClient.Configuration.parse(configurationFile)
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun run() {
|
|
||||||
println("Available profiles:")
|
|
||||||
configuration.profiles.forEach { (profileName, _) ->
|
|
||||||
println(profileName)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,53 +0,0 @@
|
|||||||
package net.woggioni.rbcs.cli.impl.commands
|
|
||||||
|
|
||||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
|
||||||
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
|
||||||
import net.woggioni.rbcs.common.createLogger
|
|
||||||
import picocli.CommandLine
|
|
||||||
import java.nio.file.Files
|
|
||||||
import java.nio.file.Path
|
|
||||||
|
|
||||||
@CommandLine.Command(
|
|
||||||
name = "get",
|
|
||||||
description = ["Fetch a value from the cache with the specified key"],
|
|
||||||
showDefaultValues = true
|
|
||||||
)
|
|
||||||
class GetCommand : RbcsCommand() {
|
|
||||||
companion object{
|
|
||||||
private val log = createLogger<GetCommand>()
|
|
||||||
}
|
|
||||||
|
|
||||||
@CommandLine.Spec
|
|
||||||
private lateinit var spec: CommandLine.Model.CommandSpec
|
|
||||||
|
|
||||||
@CommandLine.Option(
|
|
||||||
names = ["-k", "--key"],
|
|
||||||
description = ["The key for the new value"],
|
|
||||||
paramLabel = "KEY"
|
|
||||||
)
|
|
||||||
private var key : String = ""
|
|
||||||
|
|
||||||
@CommandLine.Option(
|
|
||||||
names = ["-v", "--value"],
|
|
||||||
description = ["Path to a file where the retrieved value will be written (defaults to stdout)"],
|
|
||||||
paramLabel = "VALUE_FILE",
|
|
||||||
)
|
|
||||||
private var output : Path? = null
|
|
||||||
|
|
||||||
override fun run() {
|
|
||||||
val clientCommand = spec.parent().userObject() as ClientCommand
|
|
||||||
val profile = clientCommand.profileName.let { profileName ->
|
|
||||||
clientCommand.configuration.profiles[profileName]
|
|
||||||
?: throw IllegalArgumentException("Profile $profileName does not exist in configuration")
|
|
||||||
}
|
|
||||||
RemoteBuildCacheClient(profile).use { client ->
|
|
||||||
client.get(key).thenApply { value ->
|
|
||||||
value?.let {
|
|
||||||
(output?.let(Files::newOutputStream) ?: System.out).use {
|
|
||||||
it.write(value)
|
|
||||||
}
|
|
||||||
} ?: throw NoSuchElementException("No value found for key $key")
|
|
||||||
}.get()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,48 +0,0 @@
|
|||||||
package net.woggioni.rbcs.cli.impl.commands
|
|
||||||
|
|
||||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
|
||||||
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
|
||||||
import net.woggioni.rbcs.common.createLogger
|
|
||||||
import picocli.CommandLine
|
|
||||||
import java.security.SecureRandom
|
|
||||||
import kotlin.random.Random
|
|
||||||
|
|
||||||
@CommandLine.Command(
|
|
||||||
name = "health",
|
|
||||||
description = ["Check server health"],
|
|
||||||
showDefaultValues = true
|
|
||||||
)
|
|
||||||
class HealthCheckCommand : RbcsCommand() {
|
|
||||||
companion object{
|
|
||||||
private val log = createLogger<HealthCheckCommand>()
|
|
||||||
}
|
|
||||||
|
|
||||||
@CommandLine.Spec
|
|
||||||
private lateinit var spec: CommandLine.Model.CommandSpec
|
|
||||||
|
|
||||||
override fun run() {
|
|
||||||
val clientCommand = spec.parent().userObject() as ClientCommand
|
|
||||||
val profile = clientCommand.profileName.let { profileName ->
|
|
||||||
clientCommand.configuration.profiles[profileName]
|
|
||||||
?: throw IllegalArgumentException("Profile $profileName does not exist in configuration")
|
|
||||||
}
|
|
||||||
RemoteBuildCacheClient(profile).use { client ->
|
|
||||||
val random = Random(SecureRandom.getInstance("NativePRNGNonBlocking").nextLong())
|
|
||||||
val nonce = ByteArray(0xa0)
|
|
||||||
random.nextBytes(nonce)
|
|
||||||
client.healthCheck(nonce).thenApply { value ->
|
|
||||||
if(value == null) {
|
|
||||||
throw IllegalStateException("Empty response from server")
|
|
||||||
}
|
|
||||||
val offset = value.size - nonce.size
|
|
||||||
for(i in 0 until nonce.size) {
|
|
||||||
val a = nonce[i]
|
|
||||||
val b = value[offset + i]
|
|
||||||
if(a != b) {
|
|
||||||
throw IllegalStateException("Server nonce does not match")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}.get()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,101 +0,0 @@
|
|||||||
package net.woggioni.rbcs.cli.impl.commands
|
|
||||||
|
|
||||||
import net.woggioni.jwo.Hash
|
|
||||||
import net.woggioni.jwo.JWO
|
|
||||||
import net.woggioni.jwo.NullOutputStream
|
|
||||||
import net.woggioni.rbcs.api.CacheValueMetadata
|
|
||||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
|
||||||
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
|
||||||
import net.woggioni.rbcs.common.createLogger
|
|
||||||
import picocli.CommandLine
|
|
||||||
import java.io.InputStream
|
|
||||||
import java.nio.file.Files
|
|
||||||
import java.nio.file.Path
|
|
||||||
import java.util.UUID
|
|
||||||
|
|
||||||
@CommandLine.Command(
|
|
||||||
name = "put",
|
|
||||||
description = ["Add or replace a value to the cache with the specified key"],
|
|
||||||
showDefaultValues = true
|
|
||||||
)
|
|
||||||
class PutCommand : RbcsCommand() {
|
|
||||||
companion object{
|
|
||||||
private val log = createLogger<PutCommand>()
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@CommandLine.Spec
|
|
||||||
private lateinit var spec: CommandLine.Model.CommandSpec
|
|
||||||
|
|
||||||
@CommandLine.Option(
|
|
||||||
names = ["-k", "--key"],
|
|
||||||
description = ["The key for the new value, randomly generated if omitted"],
|
|
||||||
paramLabel = "KEY"
|
|
||||||
)
|
|
||||||
private var key : String? = null
|
|
||||||
|
|
||||||
@CommandLine.Option(
|
|
||||||
names = ["-i", "--inline"],
|
|
||||||
description = ["File is to be displayed in the browser"],
|
|
||||||
paramLabel = "INLINE",
|
|
||||||
)
|
|
||||||
private var inline : Boolean = false
|
|
||||||
|
|
||||||
@CommandLine.Option(
|
|
||||||
names = ["-t", "--type"],
|
|
||||||
description = ["File mime type"],
|
|
||||||
paramLabel = "MIME_TYPE",
|
|
||||||
)
|
|
||||||
private var mimeType : String? = null
|
|
||||||
|
|
||||||
@CommandLine.Option(
|
|
||||||
names = ["-v", "--value"],
|
|
||||||
description = ["Path to a file containing the value to be added (defaults to stdin)"],
|
|
||||||
paramLabel = "VALUE_FILE",
|
|
||||||
)
|
|
||||||
private var value : Path? = null
|
|
||||||
|
|
||||||
override fun run() {
|
|
||||||
val clientCommand = spec.parent().userObject() as ClientCommand
|
|
||||||
val profile = clientCommand.profileName.let { profileName ->
|
|
||||||
clientCommand.configuration.profiles[profileName]
|
|
||||||
?: throw IllegalArgumentException("Profile $profileName does not exist in configuration")
|
|
||||||
}
|
|
||||||
RemoteBuildCacheClient(profile).use { client ->
|
|
||||||
val inputStream : InputStream
|
|
||||||
val mimeType : String?
|
|
||||||
val contentDisposition : String?
|
|
||||||
val valuePath = value
|
|
||||||
val actualKey : String?
|
|
||||||
if(valuePath != null) {
|
|
||||||
inputStream = Files.newInputStream(valuePath)
|
|
||||||
mimeType = this.mimeType ?: Files.probeContentType(valuePath)
|
|
||||||
contentDisposition = if(inline) {
|
|
||||||
"inline"
|
|
||||||
} else {
|
|
||||||
"attachment; filename=\"${valuePath.fileName}\""
|
|
||||||
}
|
|
||||||
actualKey = key ?: let {
|
|
||||||
val md = Hash.Algorithm.SHA512.newInputStream(Files.newInputStream(valuePath)).use {
|
|
||||||
JWO.copy(it, NullOutputStream())
|
|
||||||
it.messageDigest
|
|
||||||
}
|
|
||||||
UUID.nameUUIDFromBytes(md.digest()).toString()
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
inputStream = System.`in`
|
|
||||||
mimeType = this.mimeType
|
|
||||||
contentDisposition = if(inline) {
|
|
||||||
"inline"
|
|
||||||
} else {
|
|
||||||
null
|
|
||||||
}
|
|
||||||
actualKey = key ?: UUID.randomUUID().toString()
|
|
||||||
}
|
|
||||||
inputStream.use {
|
|
||||||
client.put(actualKey, it.readAllBytes(), CacheValueMetadata(contentDisposition, mimeType))
|
|
||||||
}.get()
|
|
||||||
println(profile.serverURI.resolve(actualKey))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,90 +0,0 @@
|
|||||||
package net.woggioni.rbcs.cli.impl.commands
|
|
||||||
|
|
||||||
import net.woggioni.jwo.Application
|
|
||||||
import net.woggioni.jwo.JWO
|
|
||||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
|
||||||
import net.woggioni.rbcs.cli.impl.converters.DurationConverter
|
|
||||||
import net.woggioni.rbcs.common.createLogger
|
|
||||||
import net.woggioni.rbcs.common.debug
|
|
||||||
import net.woggioni.rbcs.common.info
|
|
||||||
import net.woggioni.rbcs.server.RemoteBuildCacheServer
|
|
||||||
import net.woggioni.rbcs.server.RemoteBuildCacheServer.Companion.DEFAULT_CONFIGURATION_URL
|
|
||||||
import picocli.CommandLine
|
|
||||||
import java.io.ByteArrayOutputStream
|
|
||||||
import java.nio.file.Files
|
|
||||||
import java.nio.file.Path
|
|
||||||
import java.time.Duration
|
|
||||||
import java.util.concurrent.TimeUnit
|
|
||||||
|
|
||||||
@CommandLine.Command(
|
|
||||||
name = "server",
|
|
||||||
description = ["RBCS server"],
|
|
||||||
showDefaultValues = true
|
|
||||||
)
|
|
||||||
class ServerCommand(app : Application) : RbcsCommand() {
|
|
||||||
companion object {
|
|
||||||
private val log = createLogger<ServerCommand>()
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun createDefaultConfigurationFile(configurationFile: Path) {
|
|
||||||
log.info {
|
|
||||||
"Creating default configuration file at '$configurationFile'"
|
|
||||||
}
|
|
||||||
val defaultConfigurationFileResource = DEFAULT_CONFIGURATION_URL
|
|
||||||
Files.newOutputStream(configurationFile).use { outputStream ->
|
|
||||||
defaultConfigurationFileResource.openStream().use { inputStream ->
|
|
||||||
JWO.copy(inputStream, outputStream)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@CommandLine.Option(
|
|
||||||
names = ["-t", "--timeout"],
|
|
||||||
description = ["Exit after the specified time"],
|
|
||||||
paramLabel = "TIMEOUT",
|
|
||||||
converter = [DurationConverter::class]
|
|
||||||
)
|
|
||||||
private var timeout: Duration? = null
|
|
||||||
|
|
||||||
@CommandLine.Option(
|
|
||||||
names = ["-c", "--config-file"],
|
|
||||||
description = ["Read the application configuration from this file"],
|
|
||||||
paramLabel = "CONFIG_FILE"
|
|
||||||
)
|
|
||||||
private var configurationFile: Path = findConfigurationFile(app, "rbcs-server.xml")
|
|
||||||
|
|
||||||
override fun run() {
|
|
||||||
if (!Files.exists(configurationFile)) {
|
|
||||||
Files.createDirectories(configurationFile.parent)
|
|
||||||
createDefaultConfigurationFile(configurationFile)
|
|
||||||
}
|
|
||||||
|
|
||||||
log.debug {
|
|
||||||
"Using configuration file '$configurationFile'"
|
|
||||||
}
|
|
||||||
val configuration = RemoteBuildCacheServer.loadConfiguration(configurationFile)
|
|
||||||
log.debug {
|
|
||||||
ByteArrayOutputStream().also {
|
|
||||||
RemoteBuildCacheServer.dumpConfiguration(configuration, it)
|
|
||||||
}.let {
|
|
||||||
"Server configuration:\n${String(it.toByteArray())}"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
val server = RemoteBuildCacheServer(configuration)
|
|
||||||
val handle = server.run()
|
|
||||||
val shutdownHook = Thread.ofPlatform().unstarted {
|
|
||||||
handle.sendShutdownSignal()
|
|
||||||
try {
|
|
||||||
handle.get(60, TimeUnit.SECONDS)
|
|
||||||
} catch (ex : Throwable) {
|
|
||||||
log.warn(ex.message, ex)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Runtime.getRuntime().addShutdownHook(shutdownHook)
|
|
||||||
if(timeout != null) {
|
|
||||||
Thread.sleep(timeout)
|
|
||||||
handle.sendShutdownSignal()
|
|
||||||
}
|
|
||||||
handle.get()
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,10 +0,0 @@
|
|||||||
package net.woggioni.rbcs.cli.impl.converters
|
|
||||||
|
|
||||||
import picocli.CommandLine
|
|
||||||
|
|
||||||
|
|
||||||
class ByteSizeConverter : CommandLine.ITypeConverter<Int> {
|
|
||||||
override fun convert(value: String): Int {
|
|
||||||
return Integer.decode(value)
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,11 +0,0 @@
|
|||||||
package net.woggioni.rbcs.cli.impl.converters
|
|
||||||
|
|
||||||
import picocli.CommandLine
|
|
||||||
import java.time.Duration
|
|
||||||
|
|
||||||
|
|
||||||
class DurationConverter : CommandLine.ITypeConverter<Duration> {
|
|
||||||
override fun convert(value: String): Duration {
|
|
||||||
return Duration.parse(value)
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,13 +0,0 @@
|
|||||||
package net.woggioni.rbcs.cli.impl.converters
|
|
||||||
|
|
||||||
import picocli.CommandLine
|
|
||||||
import java.io.InputStream
|
|
||||||
import java.nio.file.Files
|
|
||||||
import java.nio.file.Paths
|
|
||||||
|
|
||||||
|
|
||||||
class InputStreamConverter : CommandLine.ITypeConverter<InputStream> {
|
|
||||||
override fun convert(value: String): InputStream {
|
|
||||||
return Files.newInputStream(Paths.get(value))
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,19 +0,0 @@
|
|||||||
plugins {
|
|
||||||
id 'java-library'
|
|
||||||
alias catalog.plugins.kotlin.jvm
|
|
||||||
}
|
|
||||||
|
|
||||||
dependencies {
|
|
||||||
implementation project(':rbcs-api')
|
|
||||||
implementation project(':rbcs-common')
|
|
||||||
implementation catalog.slf4j.api
|
|
||||||
implementation catalog.netty.buffer
|
|
||||||
implementation catalog.netty.handler
|
|
||||||
implementation catalog.netty.transport
|
|
||||||
implementation catalog.netty.common
|
|
||||||
implementation catalog.netty.codec.http
|
|
||||||
|
|
||||||
testRuntimeOnly catalog.logback.classic
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
@@ -1,17 +0,0 @@
|
|||||||
module net.woggioni.rbcs.client {
|
|
||||||
requires io.netty.handler;
|
|
||||||
requires io.netty.codec.http;
|
|
||||||
requires io.netty.transport;
|
|
||||||
requires kotlin.stdlib;
|
|
||||||
requires io.netty.common;
|
|
||||||
requires io.netty.buffer;
|
|
||||||
requires java.xml;
|
|
||||||
requires net.woggioni.rbcs.common;
|
|
||||||
requires net.woggioni.rbcs.api;
|
|
||||||
requires io.netty.codec;
|
|
||||||
requires org.slf4j;
|
|
||||||
|
|
||||||
exports net.woggioni.rbcs.client;
|
|
||||||
|
|
||||||
opens net.woggioni.rbcs.client.schema;
|
|
||||||
}
|
|
@@ -1,457 +0,0 @@
|
|||||||
package net.woggioni.rbcs.client
|
|
||||||
|
|
||||||
import io.netty.bootstrap.Bootstrap
|
|
||||||
import io.netty.buffer.ByteBuf
|
|
||||||
import io.netty.buffer.Unpooled
|
|
||||||
import io.netty.channel.Channel
|
|
||||||
import io.netty.channel.ChannelHandler
|
|
||||||
import io.netty.channel.ChannelHandlerContext
|
|
||||||
import io.netty.channel.ChannelInboundHandlerAdapter
|
|
||||||
import io.netty.channel.ChannelOption
|
|
||||||
import io.netty.channel.ChannelPipeline
|
|
||||||
import io.netty.channel.SimpleChannelInboundHandler
|
|
||||||
import io.netty.channel.nio.NioEventLoopGroup
|
|
||||||
import io.netty.channel.pool.AbstractChannelPoolHandler
|
|
||||||
import io.netty.channel.pool.ChannelPool
|
|
||||||
import io.netty.channel.pool.FixedChannelPool
|
|
||||||
import io.netty.channel.socket.nio.NioSocketChannel
|
|
||||||
import io.netty.handler.codec.DecoderException
|
|
||||||
import io.netty.handler.codec.http.DefaultFullHttpRequest
|
|
||||||
import io.netty.handler.codec.http.FullHttpRequest
|
|
||||||
import io.netty.handler.codec.http.FullHttpResponse
|
|
||||||
import io.netty.handler.codec.http.HttpClientCodec
|
|
||||||
import io.netty.handler.codec.http.HttpContentDecompressor
|
|
||||||
import io.netty.handler.codec.http.HttpHeaderNames
|
|
||||||
import io.netty.handler.codec.http.HttpHeaderValues
|
|
||||||
import io.netty.handler.codec.http.HttpMethod
|
|
||||||
import io.netty.handler.codec.http.HttpObjectAggregator
|
|
||||||
import io.netty.handler.codec.http.HttpResponseStatus
|
|
||||||
import io.netty.handler.codec.http.HttpVersion
|
|
||||||
import io.netty.handler.ssl.SslContext
|
|
||||||
import io.netty.handler.ssl.SslContextBuilder
|
|
||||||
import io.netty.handler.stream.ChunkedWriteHandler
|
|
||||||
import io.netty.handler.timeout.IdleState
|
|
||||||
import io.netty.handler.timeout.IdleStateEvent
|
|
||||||
import io.netty.handler.timeout.IdleStateHandler
|
|
||||||
import io.netty.util.concurrent.Future
|
|
||||||
import io.netty.util.concurrent.GenericFutureListener
|
|
||||||
import net.woggioni.rbcs.api.CacheValueMetadata
|
|
||||||
import net.woggioni.rbcs.client.impl.Parser
|
|
||||||
import net.woggioni.rbcs.common.Xml
|
|
||||||
import net.woggioni.rbcs.common.createLogger
|
|
||||||
import net.woggioni.rbcs.common.debug
|
|
||||||
import net.woggioni.rbcs.common.trace
|
|
||||||
import java.io.IOException
|
|
||||||
import java.net.InetSocketAddress
|
|
||||||
import java.net.URI
|
|
||||||
import java.nio.file.Files
|
|
||||||
import java.nio.file.Path
|
|
||||||
import java.security.PrivateKey
|
|
||||||
import java.security.cert.X509Certificate
|
|
||||||
import java.time.Duration
|
|
||||||
import java.util.Base64
|
|
||||||
import java.util.concurrent.CompletableFuture
|
|
||||||
import java.util.concurrent.TimeUnit
|
|
||||||
import java.util.concurrent.TimeoutException
|
|
||||||
import java.util.concurrent.atomic.AtomicInteger
|
|
||||||
import kotlin.random.Random
|
|
||||||
import io.netty.util.concurrent.Future as NettyFuture
|
|
||||||
|
|
||||||
class RemoteBuildCacheClient(private val profile: Configuration.Profile) : AutoCloseable {
|
|
||||||
companion object{
|
|
||||||
private val log = createLogger<RemoteBuildCacheClient>()
|
|
||||||
}
|
|
||||||
|
|
||||||
private val group: NioEventLoopGroup
|
|
||||||
private var sslContext: SslContext
|
|
||||||
private val pool: ChannelPool
|
|
||||||
|
|
||||||
data class Configuration(
|
|
||||||
val profiles: Map<String, Profile>
|
|
||||||
) {
|
|
||||||
sealed class Authentication {
|
|
||||||
data class TlsClientAuthenticationCredentials(
|
|
||||||
val key: PrivateKey,
|
|
||||||
val certificateChain: Array<X509Certificate>
|
|
||||||
) : Authentication()
|
|
||||||
|
|
||||||
data class BasicAuthenticationCredentials(val username: String, val password: String) : Authentication()
|
|
||||||
}
|
|
||||||
|
|
||||||
class RetryPolicy(
|
|
||||||
val maxAttempts: Int,
|
|
||||||
val initialDelayMillis: Long,
|
|
||||||
val exp: Double
|
|
||||||
)
|
|
||||||
|
|
||||||
class Connection(
|
|
||||||
val readTimeout: Duration,
|
|
||||||
val writeTimeout: Duration,
|
|
||||||
val idleTimeout: Duration,
|
|
||||||
val readIdleTimeout: Duration,
|
|
||||||
val writeIdleTimeout: Duration
|
|
||||||
)
|
|
||||||
|
|
||||||
data class Profile(
|
|
||||||
val serverURI: URI,
|
|
||||||
val connection: Connection?,
|
|
||||||
val authentication: Authentication?,
|
|
||||||
val connectionTimeout: Duration?,
|
|
||||||
val maxConnections: Int,
|
|
||||||
val compressionEnabled: Boolean,
|
|
||||||
val retryPolicy: RetryPolicy?,
|
|
||||||
)
|
|
||||||
|
|
||||||
companion object {
|
|
||||||
fun parse(path: Path): Configuration {
|
|
||||||
return Files.newInputStream(path).use {
|
|
||||||
Xml.parseXml(path.toUri().toURL(), it)
|
|
||||||
}.let(Parser::parse)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
init {
|
|
||||||
group = NioEventLoopGroup()
|
|
||||||
sslContext = SslContextBuilder.forClient().also { builder ->
|
|
||||||
(profile.authentication as? Configuration.Authentication.TlsClientAuthenticationCredentials)?.let { tlsClientAuthenticationCredentials ->
|
|
||||||
builder.keyManager(
|
|
||||||
tlsClientAuthenticationCredentials.key,
|
|
||||||
*tlsClientAuthenticationCredentials.certificateChain
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}.build()
|
|
||||||
|
|
||||||
val (scheme, host, port) = profile.serverURI.run {
|
|
||||||
Triple(
|
|
||||||
if (scheme == null) "http" else profile.serverURI.scheme,
|
|
||||||
host,
|
|
||||||
port.takeIf { it > 0 } ?: if ("https" == scheme.lowercase()) 443 else 80
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
val bootstrap = Bootstrap().apply {
|
|
||||||
group(group)
|
|
||||||
channel(NioSocketChannel::class.java)
|
|
||||||
option(ChannelOption.TCP_NODELAY, true)
|
|
||||||
option(ChannelOption.SO_KEEPALIVE, true)
|
|
||||||
remoteAddress(InetSocketAddress(host, port))
|
|
||||||
profile.connectionTimeout?.let {
|
|
||||||
option(ChannelOption.CONNECT_TIMEOUT_MILLIS, it.toMillis().toInt())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
val channelPoolHandler = object : AbstractChannelPoolHandler() {
|
|
||||||
|
|
||||||
@Volatile
|
|
||||||
private var connectionCount = AtomicInteger()
|
|
||||||
|
|
||||||
@Volatile
|
|
||||||
private var leaseCount = AtomicInteger()
|
|
||||||
|
|
||||||
override fun channelReleased(ch: Channel) {
|
|
||||||
val activeLeases = leaseCount.decrementAndGet()
|
|
||||||
log.trace {
|
|
||||||
"Released channel ${ch.id().asShortText()}, number of active leases: $activeLeases"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun channelAcquired(ch: Channel) {
|
|
||||||
val activeLeases = leaseCount.getAndIncrement()
|
|
||||||
log.trace {
|
|
||||||
"Acquired channel ${ch.id().asShortText()}, number of active leases: $activeLeases"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun channelCreated(ch: Channel) {
|
|
||||||
val connectionId = connectionCount.incrementAndGet()
|
|
||||||
log.debug {
|
|
||||||
"Created connection ${ch.id().asShortText()}, total number of active connections: $connectionId"
|
|
||||||
}
|
|
||||||
ch.closeFuture().addListener {
|
|
||||||
val activeConnections = connectionCount.decrementAndGet()
|
|
||||||
log.debug {
|
|
||||||
"Closed connection ${
|
|
||||||
ch.id().asShortText()
|
|
||||||
}, total number of active connections: $activeConnections"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
val pipeline: ChannelPipeline = ch.pipeline()
|
|
||||||
|
|
||||||
profile.connection?.also { conn ->
|
|
||||||
val readTimeout = conn.readTimeout.toMillis()
|
|
||||||
val writeTimeout = conn.writeTimeout.toMillis()
|
|
||||||
if (readTimeout > 0 || writeTimeout > 0) {
|
|
||||||
pipeline.addLast(
|
|
||||||
IdleStateHandler(
|
|
||||||
false,
|
|
||||||
readTimeout,
|
|
||||||
writeTimeout,
|
|
||||||
0,
|
|
||||||
TimeUnit.MILLISECONDS
|
|
||||||
)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
val readIdleTimeout = conn.readIdleTimeout.toMillis()
|
|
||||||
val writeIdleTimeout = conn.writeIdleTimeout.toMillis()
|
|
||||||
val idleTimeout = conn.idleTimeout.toMillis()
|
|
||||||
if (readIdleTimeout > 0 || writeIdleTimeout > 0 || idleTimeout > 0) {
|
|
||||||
pipeline.addLast(
|
|
||||||
IdleStateHandler(
|
|
||||||
true,
|
|
||||||
readIdleTimeout,
|
|
||||||
writeIdleTimeout,
|
|
||||||
idleTimeout,
|
|
||||||
TimeUnit.MILLISECONDS
|
|
||||||
)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add SSL handler if needed
|
|
||||||
if ("https".equals(scheme, ignoreCase = true)) {
|
|
||||||
pipeline.addLast("ssl", sslContext.newHandler(ch.alloc(), host, port))
|
|
||||||
}
|
|
||||||
|
|
||||||
// HTTP handlers
|
|
||||||
pipeline.addLast("codec", HttpClientCodec())
|
|
||||||
if(profile.compressionEnabled) {
|
|
||||||
pipeline.addLast("decompressor", HttpContentDecompressor())
|
|
||||||
}
|
|
||||||
pipeline.addLast("aggregator", HttpObjectAggregator(134217728))
|
|
||||||
pipeline.addLast("chunked", ChunkedWriteHandler())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pool = FixedChannelPool(bootstrap, channelPoolHandler, profile.maxConnections)
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun executeWithRetry(operation: () -> CompletableFuture<FullHttpResponse>): CompletableFuture<FullHttpResponse> {
|
|
||||||
val retryPolicy = profile.retryPolicy
|
|
||||||
return if (retryPolicy != null) {
|
|
||||||
val outcomeHandler = OutcomeHandler<FullHttpResponse> { outcome ->
|
|
||||||
when (outcome) {
|
|
||||||
is OperationOutcome.Success -> {
|
|
||||||
val response = outcome.result
|
|
||||||
val status = response.status()
|
|
||||||
when (status) {
|
|
||||||
HttpResponseStatus.TOO_MANY_REQUESTS -> {
|
|
||||||
val retryAfter = response.headers()[HttpHeaderNames.RETRY_AFTER]?.let { headerValue ->
|
|
||||||
try {
|
|
||||||
headerValue.toLong() * 1000
|
|
||||||
} catch (nfe: NumberFormatException) {
|
|
||||||
null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
OutcomeHandlerResult.Retry(retryAfter)
|
|
||||||
}
|
|
||||||
|
|
||||||
HttpResponseStatus.INTERNAL_SERVER_ERROR, HttpResponseStatus.SERVICE_UNAVAILABLE ->
|
|
||||||
OutcomeHandlerResult.Retry()
|
|
||||||
|
|
||||||
else -> OutcomeHandlerResult.DoNotRetry()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
is OperationOutcome.Failure -> {
|
|
||||||
OutcomeHandlerResult.Retry()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
executeWithRetry(
|
|
||||||
group,
|
|
||||||
retryPolicy.maxAttempts,
|
|
||||||
retryPolicy.initialDelayMillis.toDouble(),
|
|
||||||
retryPolicy.exp,
|
|
||||||
outcomeHandler,
|
|
||||||
Random.Default,
|
|
||||||
operation
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
operation()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fun healthCheck(nonce: ByteArray): CompletableFuture<ByteArray?> {
|
|
||||||
return executeWithRetry {
|
|
||||||
sendRequest(profile.serverURI, HttpMethod.TRACE, nonce)
|
|
||||||
}.thenApply {
|
|
||||||
val status = it.status()
|
|
||||||
if (it.status() != HttpResponseStatus.OK) {
|
|
||||||
throw HttpException(status)
|
|
||||||
} else {
|
|
||||||
it.content()
|
|
||||||
}
|
|
||||||
}.thenApply { maybeByteBuf ->
|
|
||||||
maybeByteBuf?.let {
|
|
||||||
val result = ByteArray(it.readableBytes())
|
|
||||||
it.getBytes(0, result)
|
|
||||||
result
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fun get(key: String): CompletableFuture<ByteArray?> {
|
|
||||||
return executeWithRetry {
|
|
||||||
sendRequest(profile.serverURI.resolve(key), HttpMethod.GET, null)
|
|
||||||
}.thenApply {
|
|
||||||
val status = it.status()
|
|
||||||
if (it.status() == HttpResponseStatus.NOT_FOUND) {
|
|
||||||
null
|
|
||||||
} else if (it.status() != HttpResponseStatus.OK) {
|
|
||||||
throw HttpException(status)
|
|
||||||
} else {
|
|
||||||
it.content()
|
|
||||||
}
|
|
||||||
}.thenApply { maybeByteBuf ->
|
|
||||||
maybeByteBuf?.let {
|
|
||||||
val result = ByteArray(it.readableBytes())
|
|
||||||
it.getBytes(0, result)
|
|
||||||
result
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fun put(key: String, content: ByteArray, metadata: CacheValueMetadata): CompletableFuture<Unit> {
|
|
||||||
return executeWithRetry {
|
|
||||||
val extraHeaders = sequenceOf(
|
|
||||||
metadata.mimeType?.let { HttpHeaderNames.CONTENT_TYPE to it },
|
|
||||||
metadata.contentDisposition?.let { HttpHeaderNames.CONTENT_DISPOSITION to it }
|
|
||||||
).filterNotNull()
|
|
||||||
sendRequest(profile.serverURI.resolve(key), HttpMethod.PUT, content, extraHeaders.asIterable())
|
|
||||||
}.thenApply {
|
|
||||||
val status = it.status()
|
|
||||||
if (it.status() != HttpResponseStatus.CREATED && it.status() != HttpResponseStatus.OK) {
|
|
||||||
throw HttpException(status)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun sendRequest(
|
|
||||||
uri: URI,
|
|
||||||
method: HttpMethod,
|
|
||||||
body: ByteArray?,
|
|
||||||
extraHeaders: Iterable<Pair<CharSequence, CharSequence>>? = null
|
|
||||||
): CompletableFuture<FullHttpResponse> {
|
|
||||||
val responseFuture = CompletableFuture<FullHttpResponse>()
|
|
||||||
// Custom handler for processing responses
|
|
||||||
|
|
||||||
pool.acquire().addListener(object : GenericFutureListener<NettyFuture<Channel>> {
|
|
||||||
private val handlers = mutableListOf<ChannelHandler>()
|
|
||||||
|
|
||||||
fun cleanup(channel: Channel, pipeline: ChannelPipeline) {
|
|
||||||
handlers.forEach(pipeline::remove)
|
|
||||||
pool.release(channel)
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun operationComplete(channelFuture: Future<Channel>) {
|
|
||||||
if (channelFuture.isSuccess) {
|
|
||||||
val channel = channelFuture.now
|
|
||||||
val pipeline = channel.pipeline()
|
|
||||||
val timeoutHandler = object : ChannelInboundHandlerAdapter() {
|
|
||||||
override fun userEventTriggered(ctx: ChannelHandlerContext, evt: Any) {
|
|
||||||
if (evt is IdleStateEvent) {
|
|
||||||
val te = when (evt.state()) {
|
|
||||||
IdleState.READER_IDLE -> TimeoutException(
|
|
||||||
"Read timeout",
|
|
||||||
)
|
|
||||||
|
|
||||||
IdleState.WRITER_IDLE -> TimeoutException("Write timeout")
|
|
||||||
|
|
||||||
IdleState.ALL_IDLE -> TimeoutException("Idle timeout")
|
|
||||||
null -> throw IllegalStateException("This should never happen")
|
|
||||||
}
|
|
||||||
responseFuture.completeExceptionally(te)
|
|
||||||
ctx.close()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
val closeListener = GenericFutureListener<Future<Void>> {
|
|
||||||
responseFuture.completeExceptionally(IOException("The remote server closed the connection"))
|
|
||||||
pool.release(channel)
|
|
||||||
}
|
|
||||||
|
|
||||||
val responseHandler = object : SimpleChannelInboundHandler<FullHttpResponse>() {
|
|
||||||
override fun channelRead0(
|
|
||||||
ctx: ChannelHandlerContext,
|
|
||||||
response: FullHttpResponse
|
|
||||||
) {
|
|
||||||
channel.closeFuture().removeListener(closeListener)
|
|
||||||
cleanup(channel, pipeline)
|
|
||||||
responseFuture.complete(response)
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
|
|
||||||
ctx.newPromise()
|
|
||||||
val ex = when (cause) {
|
|
||||||
is DecoderException -> cause.cause
|
|
||||||
else -> cause
|
|
||||||
}
|
|
||||||
responseFuture.completeExceptionally(ex)
|
|
||||||
ctx.close()
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun channelInactive(ctx: ChannelHandlerContext) {
|
|
||||||
pool.release(channel)
|
|
||||||
responseFuture.completeExceptionally(IOException("The remote server closed the connection"))
|
|
||||||
super.channelInactive(ctx)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for (handler in arrayOf(timeoutHandler, responseHandler)) {
|
|
||||||
handlers.add(handler)
|
|
||||||
}
|
|
||||||
pipeline.addLast(timeoutHandler, responseHandler)
|
|
||||||
channel.closeFuture().addListener(closeListener)
|
|
||||||
|
|
||||||
|
|
||||||
// Prepare the HTTP request
|
|
||||||
val request: FullHttpRequest = let {
|
|
||||||
val content: ByteBuf? = body?.takeIf(ByteArray::isNotEmpty)?.let(Unpooled::wrappedBuffer)
|
|
||||||
DefaultFullHttpRequest(
|
|
||||||
HttpVersion.HTTP_1_1,
|
|
||||||
method,
|
|
||||||
uri.rawPath,
|
|
||||||
content ?: Unpooled.buffer(0)
|
|
||||||
).apply {
|
|
||||||
headers().apply {
|
|
||||||
if (content != null) {
|
|
||||||
set(HttpHeaderNames.CONTENT_LENGTH, content.readableBytes())
|
|
||||||
}
|
|
||||||
set(HttpHeaderNames.HOST, profile.serverURI.host)
|
|
||||||
set(HttpHeaderNames.CONNECTION, HttpHeaderValues.KEEP_ALIVE)
|
|
||||||
if(profile.compressionEnabled) {
|
|
||||||
set(
|
|
||||||
HttpHeaderNames.ACCEPT_ENCODING,
|
|
||||||
HttpHeaderValues.GZIP.toString() + "," + HttpHeaderValues.DEFLATE.toString()
|
|
||||||
)
|
|
||||||
}
|
|
||||||
extraHeaders?.forEach { (k, v) ->
|
|
||||||
add(k, v)
|
|
||||||
}
|
|
||||||
// Add basic auth if configured
|
|
||||||
(profile.authentication as? Configuration.Authentication.BasicAuthenticationCredentials)?.let { credentials ->
|
|
||||||
val auth = "${credentials.username}:${credentials.password}"
|
|
||||||
val encodedAuth = Base64.getEncoder().encodeToString(auth.toByteArray())
|
|
||||||
set(HttpHeaderNames.AUTHORIZATION, "Basic $encodedAuth")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Set headers
|
|
||||||
// Send the request
|
|
||||||
channel.writeAndFlush(request)
|
|
||||||
} else {
|
|
||||||
responseFuture.completeExceptionally(channelFuture.cause())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
return responseFuture
|
|
||||||
}
|
|
||||||
|
|
||||||
fun shutDown(): NettyFuture<*> {
|
|
||||||
return group.shutdownGracefully()
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun close() {
|
|
||||||
shutDown().sync()
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,9 +0,0 @@
|
|||||||
package net.woggioni.rbcs.client
|
|
||||||
|
|
||||||
import io.netty.handler.codec.http.HttpResponseStatus
|
|
||||||
|
|
||||||
class HttpException(private val status : HttpResponseStatus) : RuntimeException(status.reasonPhrase()) {
|
|
||||||
|
|
||||||
override val message: String
|
|
||||||
get() = "Http status ${status.code()}: ${status.reasonPhrase()}"
|
|
||||||
}
|
|
@@ -1,136 +0,0 @@
|
|||||||
package net.woggioni.rbcs.client.impl
|
|
||||||
|
|
||||||
import net.woggioni.rbcs.api.exception.ConfigurationException
|
|
||||||
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
|
||||||
import net.woggioni.rbcs.common.Xml.Companion.asIterable
|
|
||||||
import net.woggioni.rbcs.common.Xml.Companion.renderAttribute
|
|
||||||
import org.w3c.dom.Document
|
|
||||||
import java.net.URI
|
|
||||||
import java.nio.file.Files
|
|
||||||
import java.nio.file.Path
|
|
||||||
import java.security.KeyStore
|
|
||||||
import java.security.PrivateKey
|
|
||||||
import java.security.cert.X509Certificate
|
|
||||||
import java.time.Duration
|
|
||||||
import java.time.temporal.ChronoUnit
|
|
||||||
|
|
||||||
object Parser {
|
|
||||||
|
|
||||||
fun parse(document: Document): RemoteBuildCacheClient.Configuration {
|
|
||||||
val root = document.documentElement
|
|
||||||
val profiles = mutableMapOf<String, RemoteBuildCacheClient.Configuration.Profile>()
|
|
||||||
|
|
||||||
for (child in root.asIterable()) {
|
|
||||||
val tagName = child.localName
|
|
||||||
when (tagName) {
|
|
||||||
"profile" -> {
|
|
||||||
val name =
|
|
||||||
child.renderAttribute("name") ?: throw ConfigurationException("name attribute is required")
|
|
||||||
val uri = child.renderAttribute("base-url")?.let(::URI)
|
|
||||||
?: throw ConfigurationException("base-url attribute is required")
|
|
||||||
var authentication: RemoteBuildCacheClient.Configuration.Authentication? = null
|
|
||||||
var retryPolicy: RemoteBuildCacheClient.Configuration.RetryPolicy? = null
|
|
||||||
var connection : RemoteBuildCacheClient.Configuration.Connection? = null
|
|
||||||
for (gchild in child.asIterable()) {
|
|
||||||
when (gchild.localName) {
|
|
||||||
"tls-client-auth" -> {
|
|
||||||
val keyStoreFile = gchild.renderAttribute("key-store-file")
|
|
||||||
val keyStorePassword =
|
|
||||||
gchild.renderAttribute("key-store-password")
|
|
||||||
val keyAlias = gchild.renderAttribute("key-alias")
|
|
||||||
val keyPassword = gchild.renderAttribute("key-password")
|
|
||||||
|
|
||||||
val keystore = KeyStore.getInstance("PKCS12").apply {
|
|
||||||
Files.newInputStream(Path.of(keyStoreFile)).use {
|
|
||||||
load(it, keyStorePassword?.toCharArray())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
val key = keystore.getKey(keyAlias, keyPassword?.toCharArray()) as PrivateKey
|
|
||||||
val certChain = keystore.getCertificateChain(keyAlias).asSequence()
|
|
||||||
.map { it as X509Certificate }
|
|
||||||
.toList()
|
|
||||||
.toTypedArray()
|
|
||||||
authentication =
|
|
||||||
RemoteBuildCacheClient.Configuration.Authentication.TlsClientAuthenticationCredentials(
|
|
||||||
key,
|
|
||||||
certChain
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
"basic-auth" -> {
|
|
||||||
val username = gchild.renderAttribute("user")
|
|
||||||
?: throw ConfigurationException("username attribute is required")
|
|
||||||
val password = gchild.renderAttribute("password")
|
|
||||||
?: throw ConfigurationException("password attribute is required")
|
|
||||||
authentication =
|
|
||||||
RemoteBuildCacheClient.Configuration.Authentication.BasicAuthenticationCredentials(
|
|
||||||
username,
|
|
||||||
password
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
"retry-policy" -> {
|
|
||||||
val maxAttempts =
|
|
||||||
gchild.renderAttribute("max-attempts")
|
|
||||||
?.let(String::toInt)
|
|
||||||
?: throw ConfigurationException("max-attempts attribute is required")
|
|
||||||
val initialDelay =
|
|
||||||
gchild.renderAttribute("initial-delay")
|
|
||||||
?.let(Duration::parse)
|
|
||||||
?: Duration.ofSeconds(1)
|
|
||||||
val exp =
|
|
||||||
gchild.renderAttribute("exp")
|
|
||||||
?.let(String::toDouble)
|
|
||||||
?: 2.0f
|
|
||||||
retryPolicy = RemoteBuildCacheClient.Configuration.RetryPolicy(
|
|
||||||
maxAttempts,
|
|
||||||
initialDelay.toMillis(),
|
|
||||||
exp.toDouble()
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
"connection" -> {
|
|
||||||
val writeTimeout = gchild.renderAttribute("write-timeout")
|
|
||||||
?.let(Duration::parse) ?: Duration.of(0, ChronoUnit.SECONDS)
|
|
||||||
val readTimeout = gchild.renderAttribute("read-timeout")
|
|
||||||
?.let(Duration::parse) ?: Duration.of(0, ChronoUnit.SECONDS)
|
|
||||||
val idleTimeout = gchild.renderAttribute("idle-timeout")
|
|
||||||
?.let(Duration::parse) ?: Duration.of(30, ChronoUnit.SECONDS)
|
|
||||||
val readIdleTimeout = gchild.renderAttribute("read-idle-timeout")
|
|
||||||
?.let(Duration::parse) ?: Duration.of(60, ChronoUnit.SECONDS)
|
|
||||||
val writeIdleTimeout = gchild.renderAttribute("write-idle-timeout")
|
|
||||||
?.let(Duration::parse) ?: Duration.of(60, ChronoUnit.SECONDS)
|
|
||||||
connection = RemoteBuildCacheClient.Configuration.Connection(
|
|
||||||
readTimeout,
|
|
||||||
writeTimeout,
|
|
||||||
idleTimeout,
|
|
||||||
readIdleTimeout,
|
|
||||||
writeIdleTimeout,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
val maxConnections = child.renderAttribute("max-connections")
|
|
||||||
?.let(String::toInt)
|
|
||||||
?: 50
|
|
||||||
val connectionTimeout = child.renderAttribute("connection-timeout")
|
|
||||||
?.let(Duration::parse)
|
|
||||||
val compressionEnabled = child.renderAttribute("enable-compression")
|
|
||||||
?.let(String::toBoolean)
|
|
||||||
?: true
|
|
||||||
|
|
||||||
profiles[name] = RemoteBuildCacheClient.Configuration.Profile(
|
|
||||||
uri,
|
|
||||||
connection,
|
|
||||||
authentication,
|
|
||||||
connectionTimeout,
|
|
||||||
maxConnections,
|
|
||||||
compressionEnabled,
|
|
||||||
retryPolicy
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return RemoteBuildCacheClient.Configuration(profiles)
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,79 +0,0 @@
|
|||||||
package net.woggioni.rbcs.client
|
|
||||||
|
|
||||||
import io.netty.util.concurrent.EventExecutorGroup
|
|
||||||
import java.util.concurrent.CompletableFuture
|
|
||||||
import java.util.concurrent.TimeUnit
|
|
||||||
import kotlin.math.pow
|
|
||||||
import kotlin.random.Random
|
|
||||||
|
|
||||||
sealed class OperationOutcome<T> {
|
|
||||||
class Success<T>(val result: T) : OperationOutcome<T>()
|
|
||||||
class Failure<T>(val ex: Throwable) : OperationOutcome<T>()
|
|
||||||
}
|
|
||||||
|
|
||||||
sealed class OutcomeHandlerResult {
|
|
||||||
class Retry(val suggestedDelayMillis: Long? = null) : OutcomeHandlerResult()
|
|
||||||
class DoNotRetry : OutcomeHandlerResult()
|
|
||||||
}
|
|
||||||
|
|
||||||
fun interface OutcomeHandler<T> {
|
|
||||||
fun shouldRetry(result: OperationOutcome<T>): OutcomeHandlerResult
|
|
||||||
}
|
|
||||||
|
|
||||||
fun <T> executeWithRetry(
|
|
||||||
eventExecutorGroup: EventExecutorGroup,
|
|
||||||
maxAttempts: Int,
|
|
||||||
initialDelay: Double,
|
|
||||||
exp: Double,
|
|
||||||
outcomeHandler: OutcomeHandler<T>,
|
|
||||||
randomizer : Random?,
|
|
||||||
cb: () -> CompletableFuture<T>
|
|
||||||
): CompletableFuture<T> {
|
|
||||||
|
|
||||||
val finalResult = cb()
|
|
||||||
var future = finalResult
|
|
||||||
var shortCircuit = false
|
|
||||||
for (i in 1 until maxAttempts) {
|
|
||||||
future = future.handle { result, ex ->
|
|
||||||
val operationOutcome = if (ex == null) {
|
|
||||||
OperationOutcome.Success(result)
|
|
||||||
} else {
|
|
||||||
OperationOutcome.Failure(ex.cause ?: ex)
|
|
||||||
}
|
|
||||||
if (shortCircuit) {
|
|
||||||
when(operationOutcome) {
|
|
||||||
is OperationOutcome.Failure -> throw operationOutcome.ex
|
|
||||||
is OperationOutcome.Success -> CompletableFuture.completedFuture(operationOutcome.result)
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
when(val outcomeHandlerResult = outcomeHandler.shouldRetry(operationOutcome)) {
|
|
||||||
is OutcomeHandlerResult.Retry -> {
|
|
||||||
val res = CompletableFuture<T>()
|
|
||||||
val delay = run {
|
|
||||||
val scheduledDelay = (initialDelay * exp.pow(i.toDouble()) * (1.0 + (randomizer?.nextDouble(-0.5, 0.5) ?: 0.0))).toLong()
|
|
||||||
outcomeHandlerResult.suggestedDelayMillis?.coerceAtMost(scheduledDelay) ?: scheduledDelay
|
|
||||||
}
|
|
||||||
eventExecutorGroup.schedule({
|
|
||||||
cb().handle { result, ex ->
|
|
||||||
if (ex == null) {
|
|
||||||
res.complete(result)
|
|
||||||
} else {
|
|
||||||
res.completeExceptionally(ex)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}, delay, TimeUnit.MILLISECONDS)
|
|
||||||
res
|
|
||||||
}
|
|
||||||
is OutcomeHandlerResult.DoNotRetry -> {
|
|
||||||
shortCircuit = true
|
|
||||||
when(operationOutcome) {
|
|
||||||
is OperationOutcome.Failure -> throw operationOutcome.ex
|
|
||||||
is OperationOutcome.Success -> CompletableFuture.completedFuture(operationOutcome.result)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}.thenCompose { it }
|
|
||||||
}
|
|
||||||
return future
|
|
||||||
}
|
|
@@ -1,60 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
|
||||||
<xs:schema targetNamespace="urn:net.woggioni.rbcs.client"
|
|
||||||
xmlns:xs="http://www.w3.org/2001/XMLSchema"
|
|
||||||
xmlns:rbcs-client="urn:net.woggioni.rbcs.client"
|
|
||||||
elementFormDefault="unqualified"
|
|
||||||
>
|
|
||||||
<xs:element name="profiles" type="rbcs-client:profilesType"/>
|
|
||||||
|
|
||||||
<xs:complexType name="profilesType">
|
|
||||||
<xs:sequence minOccurs="0">
|
|
||||||
<xs:element name="profile" type="rbcs-client:profileType" maxOccurs="unbounded"/>
|
|
||||||
</xs:sequence>
|
|
||||||
</xs:complexType>
|
|
||||||
|
|
||||||
<xs:complexType name="profileType">
|
|
||||||
<xs:sequence>
|
|
||||||
<xs:choice>
|
|
||||||
<xs:element name="no-auth" type="rbcs-client:noAuthType"/>
|
|
||||||
<xs:element name="basic-auth" type="rbcs-client:basicAuthType"/>
|
|
||||||
<xs:element name="tls-client-auth" type="rbcs-client:tlsClientAuthType"/>
|
|
||||||
</xs:choice>
|
|
||||||
<xs:element name="connection" type="rbcs-client:connectionType" minOccurs="0" />
|
|
||||||
<xs:element name="retry-policy" type="rbcs-client:retryType" minOccurs="0"/>
|
|
||||||
</xs:sequence>
|
|
||||||
<xs:attribute name="name" type="xs:token" use="required"/>
|
|
||||||
<xs:attribute name="base-url" type="xs:anyURI" use="required"/>
|
|
||||||
<xs:attribute name="max-connections" type="xs:positiveInteger" default="50"/>
|
|
||||||
<xs:attribute name="connection-timeout" type="xs:duration"/>
|
|
||||||
<xs:attribute name="enable-compression" type="xs:boolean" default="true"/>
|
|
||||||
</xs:complexType>
|
|
||||||
|
|
||||||
<xs:complexType name="connectionType">
|
|
||||||
<xs:attribute name="read-timeout" type="xs:duration" use="optional" default="PT0S"/>
|
|
||||||
<xs:attribute name="write-timeout" type="xs:duration" use="optional" default="PT0S"/>
|
|
||||||
<xs:attribute name="idle-timeout" type="xs:duration" use="optional" default="PT30S"/>
|
|
||||||
<xs:attribute name="read-idle-timeout" type="xs:duration" use="optional" default="PT60S"/>
|
|
||||||
<xs:attribute name="write-idle-timeout" type="xs:duration" use="optional" default="PT60S"/>
|
|
||||||
</xs:complexType>
|
|
||||||
|
|
||||||
<xs:complexType name="noAuthType"/>
|
|
||||||
|
|
||||||
<xs:complexType name="basicAuthType">
|
|
||||||
<xs:attribute name="user" type="xs:token" use="required"/>
|
|
||||||
<xs:attribute name="password" type="xs:string" use="required"/>
|
|
||||||
</xs:complexType>
|
|
||||||
|
|
||||||
<xs:complexType name="tlsClientAuthType">
|
|
||||||
<xs:attribute name="key-store-file" type="xs:anyURI" use="required"/>
|
|
||||||
<xs:attribute name="key-store-password" type="xs:string" use="required"/>
|
|
||||||
<xs:attribute name="key-alias" type="xs:token" use="required"/>
|
|
||||||
<xs:attribute name="key-password" type="xs:string" use="optional"/>
|
|
||||||
</xs:complexType>
|
|
||||||
|
|
||||||
<xs:complexType name="retryType">
|
|
||||||
<xs:attribute name="max-attempts" type="xs:positiveInteger" use="required"/>
|
|
||||||
<xs:attribute name="initial-delay" type="xs:duration" default="PT1S"/>
|
|
||||||
<xs:attribute name="exp" type="xs:double" default="2.0"/>
|
|
||||||
</xs:complexType>
|
|
||||||
|
|
||||||
</xs:schema>
|
|
@@ -1,148 +0,0 @@
|
|||||||
package net.woggioni.rbcs.client
|
|
||||||
|
|
||||||
import io.netty.util.concurrent.DefaultEventExecutorGroup
|
|
||||||
import io.netty.util.concurrent.EventExecutorGroup
|
|
||||||
import net.woggioni.rbcs.common.contextLogger
|
|
||||||
import org.junit.jupiter.api.Assertions
|
|
||||||
import org.junit.jupiter.api.extension.ExtensionContext
|
|
||||||
import org.junit.jupiter.params.ParameterizedTest
|
|
||||||
import org.junit.jupiter.params.provider.Arguments
|
|
||||||
import org.junit.jupiter.params.provider.ArgumentsProvider
|
|
||||||
import org.junit.jupiter.params.provider.ArgumentsSource
|
|
||||||
import java.util.concurrent.CompletableFuture
|
|
||||||
import java.util.stream.Stream
|
|
||||||
import kotlin.random.Random
|
|
||||||
|
|
||||||
class RetryTest {
|
|
||||||
|
|
||||||
data class TestArgs(
|
|
||||||
val seed: Int,
|
|
||||||
val maxAttempt: Int,
|
|
||||||
val initialDelay: Double,
|
|
||||||
val exp: Double,
|
|
||||||
)
|
|
||||||
|
|
||||||
class TestArguments : ArgumentsProvider {
|
|
||||||
override fun provideArguments(context: ExtensionContext): Stream<out Arguments> {
|
|
||||||
return Stream.of(
|
|
||||||
TestArgs(
|
|
||||||
seed = 101325,
|
|
||||||
maxAttempt = 5,
|
|
||||||
initialDelay = 50.0,
|
|
||||||
exp = 2.0,
|
|
||||||
),
|
|
||||||
TestArgs(
|
|
||||||
seed = 101325,
|
|
||||||
maxAttempt = 20,
|
|
||||||
initialDelay = 100.0,
|
|
||||||
exp = 1.1,
|
|
||||||
),
|
|
||||||
TestArgs(
|
|
||||||
seed = 123487,
|
|
||||||
maxAttempt = 20,
|
|
||||||
initialDelay = 100.0,
|
|
||||||
exp = 2.0,
|
|
||||||
),
|
|
||||||
TestArgs(
|
|
||||||
seed = 20082024,
|
|
||||||
maxAttempt = 10,
|
|
||||||
initialDelay = 100.0,
|
|
||||||
exp = 2.0,
|
|
||||||
)
|
|
||||||
).map {
|
|
||||||
object: Arguments {
|
|
||||||
override fun get() = arrayOf(it)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@ArgumentsSource(TestArguments::class)
|
|
||||||
@ParameterizedTest
|
|
||||||
fun test(testArgs: TestArgs) {
|
|
||||||
val log = contextLogger()
|
|
||||||
log.debug("Start")
|
|
||||||
val executor: EventExecutorGroup = DefaultEventExecutorGroup(1)
|
|
||||||
val attempts = mutableListOf<Pair<Long, OperationOutcome<Int>>>()
|
|
||||||
val outcomeHandler = OutcomeHandler<Int> { outcome ->
|
|
||||||
when(outcome) {
|
|
||||||
is OperationOutcome.Success -> {
|
|
||||||
if(outcome.result % 10 == 0) {
|
|
||||||
OutcomeHandlerResult.DoNotRetry()
|
|
||||||
} else {
|
|
||||||
OutcomeHandlerResult.Retry(null)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
is OperationOutcome.Failure -> {
|
|
||||||
when(outcome.ex) {
|
|
||||||
is IllegalStateException -> {
|
|
||||||
log.debug(outcome.ex.message, outcome.ex)
|
|
||||||
OutcomeHandlerResult.Retry(null)
|
|
||||||
}
|
|
||||||
else -> {
|
|
||||||
OutcomeHandlerResult.DoNotRetry()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
val random = Random(testArgs.seed)
|
|
||||||
|
|
||||||
val future =
|
|
||||||
executeWithRetry(executor, testArgs.maxAttempt, testArgs.initialDelay, testArgs.exp, outcomeHandler, null) {
|
|
||||||
val now = System.nanoTime()
|
|
||||||
val result = CompletableFuture<Int>()
|
|
||||||
executor.submit {
|
|
||||||
val n = random.nextInt(0, Integer.MAX_VALUE)
|
|
||||||
log.debug("Got new number: {}", n)
|
|
||||||
if(n % 3 == 0) {
|
|
||||||
val ex = IllegalStateException("Value $n can be divided by 3")
|
|
||||||
result.completeExceptionally(ex)
|
|
||||||
attempts += now to OperationOutcome.Failure(ex)
|
|
||||||
} else if(n % 7 == 0) {
|
|
||||||
val ex = RuntimeException("Value $n can be divided by 7")
|
|
||||||
result.completeExceptionally(ex)
|
|
||||||
attempts += now to OperationOutcome.Failure(ex)
|
|
||||||
} else {
|
|
||||||
result.complete(n)
|
|
||||||
attempts += now to OperationOutcome.Success(n)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
result
|
|
||||||
}
|
|
||||||
Assertions.assertTrue(attempts.size <= testArgs.maxAttempt)
|
|
||||||
val result = future.handle { res, ex ->
|
|
||||||
if(ex != null) {
|
|
||||||
val err = ex.cause ?: ex
|
|
||||||
log.debug(err.message, err)
|
|
||||||
OperationOutcome.Failure(err)
|
|
||||||
} else {
|
|
||||||
OperationOutcome.Success(res)
|
|
||||||
}
|
|
||||||
}.get()
|
|
||||||
for ((index, attempt) in attempts.withIndex()) {
|
|
||||||
val (timestamp, value) = attempt
|
|
||||||
if (index > 0) {
|
|
||||||
/* Check the delay for subsequent attempts is correct */
|
|
||||||
val previousAttempt = attempts[index - 1]
|
|
||||||
val expectedTimestamp =
|
|
||||||
previousAttempt.first + testArgs.initialDelay * Math.pow(testArgs.exp, index.toDouble()) * 1e6
|
|
||||||
val actualTimestamp = timestamp
|
|
||||||
val err = Math.abs(expectedTimestamp - actualTimestamp) / expectedTimestamp
|
|
||||||
Assertions.assertTrue(err < 1e-2)
|
|
||||||
}
|
|
||||||
if (index == attempts.size - 1 && index < testArgs.maxAttempt - 1) {
|
|
||||||
/*
|
|
||||||
* If the last attempt index is lower than the maximum number of attempts, then
|
|
||||||
* check the outcome handler returns DoNotRetry
|
|
||||||
*/
|
|
||||||
Assertions.assertTrue(outcomeHandler.shouldRetry(value) is OutcomeHandlerResult.DoNotRetry)
|
|
||||||
} else if (index < attempts.size - 1) {
|
|
||||||
/*
|
|
||||||
* If the attempt is not the last attempt check the outcome handler returns Retry
|
|
||||||
*/
|
|
||||||
Assertions.assertTrue(outcomeHandler.shouldRetry(value) is OutcomeHandlerResult.Retry)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,16 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
|
||||||
<rbcs-client:profiles xmlns:xs="http://www.w3.org/2001/XMLSchema-instance"
|
|
||||||
xmlns:rbcs-client="urn:net.woggioni.rbcs.client"
|
|
||||||
xs:schemaLocation="urn:net.woggioni.rbcs.client jms://net.woggioni.rbcs.client/net/woggioni/rbcs/client/schema/rbcs-client.xsd"
|
|
||||||
>
|
|
||||||
<profile name="profile1" base-url="https://rbcs1.example.com/">
|
|
||||||
<tls-client-auth
|
|
||||||
key-store-file="keystore.pfx"
|
|
||||||
key-store-password="password"
|
|
||||||
key-alias="woggioni@c962475fa38"
|
|
||||||
key-password="key-password"/>
|
|
||||||
</profile>
|
|
||||||
<profile name="profile2" base-url="https://rbcs2.example.com/">
|
|
||||||
<basic-auth user="user" password="password"/>
|
|
||||||
</profile>
|
|
||||||
</rbcs-client:profiles>
|
|
@@ -1,12 +0,0 @@
|
|||||||
module net.woggioni.rbcs.common {
|
|
||||||
requires java.xml;
|
|
||||||
requires java.logging;
|
|
||||||
requires org.slf4j;
|
|
||||||
requires kotlin.stdlib;
|
|
||||||
requires net.woggioni.jwo;
|
|
||||||
requires io.netty.buffer;
|
|
||||||
requires io.netty.transport;
|
|
||||||
|
|
||||||
provides java.net.spi.URLStreamHandlerProvider with net.woggioni.rbcs.common.RbcsUrlStreamHandlerFactory;
|
|
||||||
exports net.woggioni.rbcs.common;
|
|
||||||
}
|
|
@@ -1,15 +0,0 @@
|
|||||||
package net.woggioni.rbcs.common
|
|
||||||
|
|
||||||
import io.netty.buffer.ByteBuf
|
|
||||||
import io.netty.buffer.ByteBufAllocator
|
|
||||||
import io.netty.buffer.CompositeByteBuf
|
|
||||||
|
|
||||||
fun extractChunk(buf: CompositeByteBuf, alloc: ByteBufAllocator): ByteBuf {
|
|
||||||
val chunk = alloc.compositeBuffer()
|
|
||||||
for (component in buf.decompose(0, buf.readableBytes())) {
|
|
||||||
chunk.addComponent(true, component.retain())
|
|
||||||
}
|
|
||||||
buf.removeComponents(0, buf.numComponents())
|
|
||||||
buf.clear()
|
|
||||||
return chunk
|
|
||||||
}
|
|
@@ -1,25 +0,0 @@
|
|||||||
package net.woggioni.rbcs.common
|
|
||||||
|
|
||||||
import io.netty.buffer.ByteBuf
|
|
||||||
import java.io.InputStream
|
|
||||||
|
|
||||||
class ByteBufInputStream(private val buf : ByteBuf) : InputStream() {
|
|
||||||
override fun read(): Int {
|
|
||||||
return buf.takeIf {
|
|
||||||
it.readableBytes() > 0
|
|
||||||
}?.let(ByteBuf::readByte)
|
|
||||||
?.let(Byte::toInt) ?: -1
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun read(b: ByteArray, off: Int, len: Int): Int {
|
|
||||||
val readableBytes = buf.readableBytes()
|
|
||||||
if(readableBytes == 0) return -1
|
|
||||||
val result = len.coerceAtMost(readableBytes)
|
|
||||||
buf.readBytes(b, off, result)
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun close() {
|
|
||||||
buf.release()
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,18 +0,0 @@
|
|||||||
package net.woggioni.rbcs.common
|
|
||||||
|
|
||||||
import io.netty.buffer.ByteBuf
|
|
||||||
import java.io.OutputStream
|
|
||||||
|
|
||||||
class ByteBufOutputStream(private val buf : ByteBuf) : OutputStream() {
|
|
||||||
override fun write(b: Int) {
|
|
||||||
buf.writeByte(b)
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun write(b: ByteArray, off: Int, len: Int) {
|
|
||||||
buf.writeBytes(b, off, len)
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun close() {
|
|
||||||
buf.release()
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,7 +0,0 @@
|
|||||||
package net.woggioni.rbcs.common
|
|
||||||
|
|
||||||
class ResourceNotFoundException(msg : String? = null, cause: Throwable? = null) : RuntimeException(msg, cause) {
|
|
||||||
}
|
|
||||||
|
|
||||||
class ModuleNotFoundException(msg : String? = null, cause: Throwable? = null) : RuntimeException(msg, cause) {
|
|
||||||
}
|
|
@@ -1,194 +0,0 @@
|
|||||||
package net.woggioni.rbcs.common
|
|
||||||
|
|
||||||
import io.netty.channel.Channel
|
|
||||||
import io.netty.channel.ChannelHandlerContext
|
|
||||||
import org.slf4j.Logger
|
|
||||||
import org.slf4j.LoggerFactory
|
|
||||||
import org.slf4j.MDC
|
|
||||||
import org.slf4j.event.Level
|
|
||||||
import org.slf4j.spi.LoggingEventBuilder
|
|
||||||
import java.nio.file.Files
|
|
||||||
import java.nio.file.Path
|
|
||||||
import java.util.logging.LogManager
|
|
||||||
|
|
||||||
inline fun <reified T> T.contextLogger() = LoggerFactory.getLogger(T::class.java)
|
|
||||||
inline fun <reified T> createLogger() = LoggerFactory.getLogger(T::class.java)
|
|
||||||
|
|
||||||
inline fun Logger.traceParam(messageBuilder: () -> Pair<String, Array<Any>>) {
|
|
||||||
if (isTraceEnabled) {
|
|
||||||
val (format, params) = messageBuilder()
|
|
||||||
trace(format, params)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
inline fun Logger.debugParam(messageBuilder: () -> Pair<String, Array<Any>>) {
|
|
||||||
if (isDebugEnabled) {
|
|
||||||
val (format, params) = messageBuilder()
|
|
||||||
info(format, params)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
inline fun Logger.infoParam(messageBuilder: () -> Pair<String, Array<Any>>) {
|
|
||||||
if (isInfoEnabled) {
|
|
||||||
val (format, params) = messageBuilder()
|
|
||||||
info(format, params)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
inline fun Logger.warnParam(messageBuilder: () -> Pair<String, Array<Any>>) {
|
|
||||||
if (isWarnEnabled) {
|
|
||||||
val (format, params) = messageBuilder()
|
|
||||||
warn(format, params)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
inline fun Logger.errorParam(messageBuilder: () -> Pair<String, Array<Any>>) {
|
|
||||||
if (isErrorEnabled) {
|
|
||||||
val (format, params) = messageBuilder()
|
|
||||||
error(format, params)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
inline fun log(
|
|
||||||
log: Logger,
|
|
||||||
filter: Logger.() -> Boolean,
|
|
||||||
loggerMethod: Logger.(String) -> Unit, messageBuilder: () -> String
|
|
||||||
) {
|
|
||||||
if (log.filter()) {
|
|
||||||
log.loggerMethod(messageBuilder())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fun withMDC(params: Array<Pair<String, String>>, cb: () -> Unit) {
|
|
||||||
object : AutoCloseable {
|
|
||||||
override fun close() {
|
|
||||||
for ((key, _) in params) MDC.remove(key)
|
|
||||||
}
|
|
||||||
}.use {
|
|
||||||
for ((key, value) in params) MDC.put(key, value)
|
|
||||||
cb()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
inline fun Logger.log(level: Level, channel: Channel, crossinline messageBuilder: (LoggingEventBuilder) -> Unit ) {
|
|
||||||
if (isEnabledForLevel(level)) {
|
|
||||||
val params = arrayOf<Pair<String, String>>(
|
|
||||||
"channel-id-short" to channel.id().asShortText(),
|
|
||||||
"channel-id-long" to channel.id().asLongText(),
|
|
||||||
"remote-address" to channel.remoteAddress().toString(),
|
|
||||||
"local-address" to channel.localAddress().toString(),
|
|
||||||
)
|
|
||||||
withMDC(params) {
|
|
||||||
val builder = makeLoggingEventBuilder(level)
|
|
||||||
// for ((key, value) in params) {
|
|
||||||
// builder.addKeyValue(key, value)
|
|
||||||
// }
|
|
||||||
messageBuilder(builder)
|
|
||||||
builder.log()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
inline fun Logger.log(level: Level, channel: Channel, crossinline messageBuilder: () -> String) {
|
|
||||||
log(level, channel) { builder ->
|
|
||||||
builder.setMessage(messageBuilder())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
inline fun Logger.trace(ch: Channel, crossinline messageBuilder: () -> String) {
|
|
||||||
log(Level.TRACE, ch, messageBuilder)
|
|
||||||
}
|
|
||||||
|
|
||||||
inline fun Logger.debug(ch: Channel, crossinline messageBuilder: () -> String) {
|
|
||||||
log(Level.DEBUG, ch, messageBuilder)
|
|
||||||
}
|
|
||||||
|
|
||||||
inline fun Logger.info(ch: Channel, crossinline messageBuilder: () -> String) {
|
|
||||||
log(Level.INFO, ch, messageBuilder)
|
|
||||||
}
|
|
||||||
|
|
||||||
inline fun Logger.warn(ch: Channel, crossinline messageBuilder: () -> String) {
|
|
||||||
log(Level.WARN, ch, messageBuilder)
|
|
||||||
}
|
|
||||||
|
|
||||||
inline fun Logger.error(ch: Channel, crossinline messageBuilder: () -> String) {
|
|
||||||
log(Level.ERROR, ch, messageBuilder)
|
|
||||||
}
|
|
||||||
|
|
||||||
inline fun Logger.trace(ctx: ChannelHandlerContext, crossinline messageBuilder: () -> String) {
|
|
||||||
log(Level.TRACE, ctx.channel(), messageBuilder)
|
|
||||||
}
|
|
||||||
|
|
||||||
inline fun Logger.debug(ctx: ChannelHandlerContext, crossinline messageBuilder: () -> String) {
|
|
||||||
log(Level.DEBUG, ctx.channel(), messageBuilder)
|
|
||||||
}
|
|
||||||
|
|
||||||
inline fun Logger.info(ctx: ChannelHandlerContext, crossinline messageBuilder: () -> String) {
|
|
||||||
log(Level.INFO, ctx.channel(), messageBuilder)
|
|
||||||
}
|
|
||||||
|
|
||||||
inline fun Logger.warn(ctx: ChannelHandlerContext, crossinline messageBuilder: () -> String) {
|
|
||||||
log(Level.WARN, ctx.channel(), messageBuilder)
|
|
||||||
}
|
|
||||||
|
|
||||||
inline fun Logger.error(ctx: ChannelHandlerContext, crossinline messageBuilder: () -> String) {
|
|
||||||
log(Level.ERROR, ctx.channel(), messageBuilder)
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
inline fun Logger.log(level: Level, messageBuilder: () -> String) {
|
|
||||||
if (isEnabledForLevel(level)) {
|
|
||||||
makeLoggingEventBuilder(level).log(messageBuilder())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
inline fun Logger.trace(messageBuilder: () -> String) {
|
|
||||||
if (isTraceEnabled) {
|
|
||||||
trace(messageBuilder())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
inline fun Logger.debug(messageBuilder: () -> String) {
|
|
||||||
if (isDebugEnabled) {
|
|
||||||
debug(messageBuilder())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
inline fun Logger.info(messageBuilder: () -> String) {
|
|
||||||
if (isInfoEnabled) {
|
|
||||||
info(messageBuilder())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
inline fun Logger.warn(messageBuilder: () -> String) {
|
|
||||||
if (isWarnEnabled) {
|
|
||||||
warn(messageBuilder())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
inline fun Logger.error(messageBuilder: () -> String) {
|
|
||||||
if (isErrorEnabled) {
|
|
||||||
error(messageBuilder())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class LoggingConfig {
|
|
||||||
|
|
||||||
init {
|
|
||||||
val logManager = LogManager.getLogManager()
|
|
||||||
System.getProperty("log.config.source")?.let withSource@{ source ->
|
|
||||||
val urls = LoggingConfig::class.java.classLoader.getResources(source)
|
|
||||||
while (urls.hasMoreElements()) {
|
|
||||||
val url = urls.nextElement()
|
|
||||||
url.openStream().use { inputStream ->
|
|
||||||
logManager.readConfiguration(inputStream)
|
|
||||||
return@withSource
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Path.of(source).takeIf(Files::exists)
|
|
||||||
?.let(Files::newInputStream)
|
|
||||||
?.use(logManager::readConfiguration)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,57 +0,0 @@
|
|||||||
package net.woggioni.rbcs.common
|
|
||||||
|
|
||||||
import java.security.SecureRandom
|
|
||||||
import java.security.spec.KeySpec
|
|
||||||
import java.util.Base64
|
|
||||||
import javax.crypto.SecretKeyFactory
|
|
||||||
import javax.crypto.spec.PBEKeySpec
|
|
||||||
|
|
||||||
object PasswordSecurity {
|
|
||||||
|
|
||||||
enum class Algorithm(
|
|
||||||
val codeName : String,
|
|
||||||
val keyLength : Int,
|
|
||||||
val iterations : Int) {
|
|
||||||
PBEWithHmacSHA512_224AndAES_256("PBEWithHmacSHA512/224AndAES_256", 64, 1),
|
|
||||||
PBEWithHmacSHA1AndAES_256("PBEWithHmacSHA1AndAES_256",64, 1),
|
|
||||||
PBEWithHmacSHA384AndAES_128("PBEWithHmacSHA384AndAES_128", 64,1),
|
|
||||||
PBEWithHmacSHA384AndAES_256("PBEWithHmacSHA384AndAES_256",64,1),
|
|
||||||
PBKDF2WithHmacSHA512("PBKDF2WithHmacSHA512",512, 1),
|
|
||||||
PBKDF2WithHmacSHA384("PBKDF2WithHmacSHA384",384, 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun concat(arr1: ByteArray, arr2: ByteArray): ByteArray {
|
|
||||||
val result = ByteArray(arr1.size + arr2.size)
|
|
||||||
var j = 0
|
|
||||||
for(element in arr1) {
|
|
||||||
result[j] = element
|
|
||||||
j += 1
|
|
||||||
}
|
|
||||||
for(element in arr2) {
|
|
||||||
result[j] = element
|
|
||||||
j += 1
|
|
||||||
}
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
fun hashPassword(password : String, salt : String? = null, algorithm : Algorithm = Algorithm.PBKDF2WithHmacSHA512) : String {
|
|
||||||
val actualSalt = salt?.let(Base64.getDecoder()::decode) ?: SecureRandom().run {
|
|
||||||
val result = ByteArray(16)
|
|
||||||
nextBytes(result)
|
|
||||||
result
|
|
||||||
}
|
|
||||||
val spec: KeySpec = PBEKeySpec(password.toCharArray(), actualSalt, algorithm.iterations, algorithm.keyLength)
|
|
||||||
val factory = SecretKeyFactory.getInstance(algorithm.codeName)
|
|
||||||
val hash = factory.generateSecret(spec).encoded
|
|
||||||
return String(Base64.getEncoder().encode(concat(hash, actualSalt)))
|
|
||||||
}
|
|
||||||
|
|
||||||
fun decodePasswordHash(encodedPasswordHash : String, algorithm: Algorithm = Algorithm.PBKDF2WithHmacSHA512) : Pair<ByteArray, ByteArray> {
|
|
||||||
val decoded = Base64.getDecoder().decode(encodedPasswordHash)
|
|
||||||
val hash = ByteArray(algorithm.keyLength / 8)
|
|
||||||
val salt = ByteArray(decoded.size - algorithm.keyLength / 8)
|
|
||||||
System.arraycopy(decoded, 0, hash, 0, hash.size)
|
|
||||||
System.arraycopy(decoded, hash.size, salt, 0, salt.size)
|
|
||||||
return hash to salt
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,61 +0,0 @@
|
|||||||
package net.woggioni.rbcs.common
|
|
||||||
|
|
||||||
import net.woggioni.jwo.JWO
|
|
||||||
import java.net.URI
|
|
||||||
import java.net.URL
|
|
||||||
import java.security.MessageDigest
|
|
||||||
|
|
||||||
object RBCS {
|
|
||||||
fun String.toUrl() : URL = URL.of(URI(this), null)
|
|
||||||
|
|
||||||
const val RBCS_NAMESPACE_URI: String = "urn:net.woggioni.rbcs.server"
|
|
||||||
const val RBCS_PREFIX: String = "rbcs"
|
|
||||||
const val XML_SCHEMA_NAMESPACE_URI = "http://www.w3.org/2001/XMLSchema-instance"
|
|
||||||
|
|
||||||
fun ByteArray.toInt(index : Int = 0) : Long {
|
|
||||||
if(index + 4 > size) throw IllegalArgumentException("Not enough bytes to decode a 32 bits integer")
|
|
||||||
var value : Long = 0
|
|
||||||
for (b in index until index + 4) {
|
|
||||||
value = (value shl 8) + (get(b).toInt() and 0xFF)
|
|
||||||
}
|
|
||||||
return value
|
|
||||||
}
|
|
||||||
|
|
||||||
fun ByteArray.toLong(index : Int = 0) : Long {
|
|
||||||
if(index + 8 > size) throw IllegalArgumentException("Not enough bytes to decode a 64 bits long integer")
|
|
||||||
var value : Long = 0
|
|
||||||
for (b in index until index + 8) {
|
|
||||||
value = (value shl 8) + (get(b).toInt() and 0xFF)
|
|
||||||
}
|
|
||||||
return value
|
|
||||||
}
|
|
||||||
|
|
||||||
fun digest(
|
|
||||||
data: ByteArray,
|
|
||||||
md: MessageDigest
|
|
||||||
): ByteArray {
|
|
||||||
md.update(data)
|
|
||||||
return md.digest()
|
|
||||||
}
|
|
||||||
|
|
||||||
fun digestString(
|
|
||||||
data: ByteArray,
|
|
||||||
md: MessageDigest
|
|
||||||
): String {
|
|
||||||
return JWO.bytesToHex(digest(data, md))
|
|
||||||
}
|
|
||||||
|
|
||||||
fun processCacheKey(key: String, digestAlgorithm: String?) = digestAlgorithm
|
|
||||||
?.let(MessageDigest::getInstance)
|
|
||||||
?.let { md ->
|
|
||||||
digest(key.toByteArray(), md)
|
|
||||||
} ?: key.toByteArray(Charsets.UTF_8)
|
|
||||||
|
|
||||||
fun Long.toIntOrNull(): Int? {
|
|
||||||
return if (this >= Int.MIN_VALUE && this <= Int.MAX_VALUE) {
|
|
||||||
toInt()
|
|
||||||
} else {
|
|
||||||
null
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1 +0,0 @@
|
|||||||
net.woggioni.rbcs.common.RbcsUrlStreamHandlerFactory
|
|
@@ -1,38 +0,0 @@
|
|||||||
package net.woggioni.rbcs.common
|
|
||||||
|
|
||||||
import net.woggioni.rbcs.common.PasswordSecurity.decodePasswordHash
|
|
||||||
import net.woggioni.rbcs.common.PasswordSecurity.hashPassword
|
|
||||||
import org.junit.jupiter.api.Assertions
|
|
||||||
import org.junit.jupiter.api.Test
|
|
||||||
import org.junit.jupiter.params.ParameterizedTest
|
|
||||||
import org.junit.jupiter.params.provider.EnumSource
|
|
||||||
import java.security.Provider
|
|
||||||
import java.security.Security
|
|
||||||
import java.util.Base64
|
|
||||||
|
|
||||||
|
|
||||||
class PasswordHashingTest {
|
|
||||||
|
|
||||||
@EnumSource(PasswordSecurity.Algorithm::class)
|
|
||||||
@ParameterizedTest
|
|
||||||
fun test(algo: PasswordSecurity.Algorithm) {
|
|
||||||
val password = "password"
|
|
||||||
val encoded = hashPassword(password, algorithm = algo)
|
|
||||||
val (_, salt) = decodePasswordHash(encoded, algo)
|
|
||||||
Assertions.assertEquals(encoded,
|
|
||||||
hashPassword(password, salt = salt.let(Base64.getEncoder()::encodeToString), algorithm = algo)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
fun listAvailableAlgorithms() {
|
|
||||||
Security.getProviders().asSequence()
|
|
||||||
.flatMap { provider: Provider -> provider.services.asSequence() }
|
|
||||||
.filter { service: Provider.Service -> "SecretKeyFactory" == service.type }
|
|
||||||
.map(Provider.Service::getAlgorithm)
|
|
||||||
.forEach {
|
|
||||||
println(it)
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,46 +0,0 @@
|
|||||||
# RBCS Memcache plugins
|
|
||||||
|
|
||||||
This plugins allows RBCs to store and retrieve data from a memcache cluster.
|
|
||||||
The memcache server selection is simply based on the hash of the key,
|
|
||||||
deflate compression is also supported and performed by the RBCS server
|
|
||||||
|
|
||||||
## Quickstart
|
|
||||||
The plugin can be built with
|
|
||||||
```bash
|
|
||||||
./gradlew rbcs-server-memcache:bundle
|
|
||||||
```
|
|
||||||
which creates a `.tar` archive in the `build/distributions` folder.
|
|
||||||
The archive is supposed to be extracted inside the RBCS server's `plugins` directory.
|
|
||||||
|
|
||||||
## Configuration
|
|
||||||
|
|
||||||
The plugin can be enabled setting the `xs:type` attribute of the `cache` element
|
|
||||||
to `memcacheCacheType`.
|
|
||||||
|
|
||||||
The plugins currently supports the following configuration attributes:
|
|
||||||
- `max-age`: the amount of time cache entries will be retained on memcache
|
|
||||||
- `digest`: digest algorithm to use on the key before submission
|
|
||||||
to memcache (optional, no digest is applied if omitted)
|
|
||||||
- `compression`: compression algorithm to apply to cache values before,
|
|
||||||
currently only `deflate` is supported (optionla, if omitted compression is disabled)
|
|
||||||
- `compression-level`: compression level to use, deflate supports compression levels from 1 to 9,
|
|
||||||
where 1 is for fast compression at the expense of speed (optional, 6 is used if omitted)
|
|
||||||
```xml
|
|
||||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
|
||||||
<rbcs:server xmlns:xs="http://www.w3.org/2001/XMLSchema-instance"
|
|
||||||
xmlns:rbcs="urn:net.woggioni.rbcs.server"
|
|
||||||
xmlns:rbcs-memcache="urn:net.woggioni.rbcs.server.memcache"
|
|
||||||
xs:schemaLocation="urn:net.woggioni.rbcs.server.memcache jpms://net.woggioni.rbcs.server.memcache/net/woggioni/rbcs/server/memcache/schema/rbcs-memcache.xsd urn:net.woggioni.rbcs.server jpms://net.woggioni.rbcs.server/net/woggioni/rbcs/server/schema/rbcs.xsd"
|
|
||||||
>
|
|
||||||
...
|
|
||||||
<cache xs:type="rbcs-memcache:memcacheCacheType"
|
|
||||||
max-age="P7D"
|
|
||||||
digest="SHA-256"
|
|
||||||
compression-mode="deflate"
|
|
||||||
compression-level="6"
|
|
||||||
chunk-size="0x10000">
|
|
||||||
<server host="127.0.0.1" port="11211" max-connections="256"/>
|
|
||||||
<server host="127.0.0.1" port="11212" max-connections="256"/>
|
|
||||||
</cache>
|
|
||||||
...
|
|
||||||
```
|
|
@@ -1,20 +0,0 @@
|
|||||||
import net.woggioni.rbcs.api.CacheProvider;
|
|
||||||
|
|
||||||
module net.woggioni.rbcs.server.memcache {
|
|
||||||
requires net.woggioni.rbcs.common;
|
|
||||||
requires net.woggioni.rbcs.api;
|
|
||||||
requires net.woggioni.jwo;
|
|
||||||
requires java.xml;
|
|
||||||
requires kotlin.stdlib;
|
|
||||||
requires io.netty.transport;
|
|
||||||
requires io.netty.codec;
|
|
||||||
requires io.netty.codec.memcache;
|
|
||||||
requires io.netty.common;
|
|
||||||
requires io.netty.buffer;
|
|
||||||
requires io.netty.handler;
|
|
||||||
requires org.slf4j;
|
|
||||||
|
|
||||||
provides CacheProvider with net.woggioni.rbcs.server.memcache.MemcacheCacheProvider;
|
|
||||||
|
|
||||||
opens net.woggioni.rbcs.server.memcache.schema;
|
|
||||||
}
|
|
@@ -1,4 +0,0 @@
|
|||||||
package net.woggioni.rbcs.server.memcache
|
|
||||||
|
|
||||||
class MemcacheException(status : Short, msg : String? = null, cause : Throwable? = null)
|
|
||||||
: RuntimeException(msg ?: "Memcached status $status", cause)
|
|
@@ -1,94 +0,0 @@
|
|||||||
package net.woggioni.rbcs.server.memcache
|
|
||||||
|
|
||||||
import io.netty.channel.ChannelFactory
|
|
||||||
import io.netty.channel.ChannelHandler
|
|
||||||
import io.netty.channel.EventLoopGroup
|
|
||||||
import io.netty.channel.pool.FixedChannelPool
|
|
||||||
import io.netty.channel.socket.DatagramChannel
|
|
||||||
import io.netty.channel.socket.SocketChannel
|
|
||||||
import net.woggioni.rbcs.api.CacheHandlerFactory
|
|
||||||
import net.woggioni.rbcs.api.Configuration
|
|
||||||
import net.woggioni.rbcs.common.HostAndPort
|
|
||||||
import net.woggioni.rbcs.server.memcache.client.MemcacheClient
|
|
||||||
import java.time.Duration
|
|
||||||
import java.util.concurrent.CompletableFuture
|
|
||||||
import java.util.concurrent.ConcurrentHashMap
|
|
||||||
import java.util.concurrent.atomic.AtomicInteger
|
|
||||||
import java.util.concurrent.atomic.AtomicReference
|
|
||||||
|
|
||||||
data class MemcacheCacheConfiguration(
|
|
||||||
val servers: List<Server>,
|
|
||||||
val maxAge: Duration = Duration.ofDays(1),
|
|
||||||
val digestAlgorithm: String? = null,
|
|
||||||
val compressionMode: CompressionMode? = null,
|
|
||||||
val compressionLevel: Int,
|
|
||||||
val chunkSize: Int
|
|
||||||
) : Configuration.Cache {
|
|
||||||
|
|
||||||
enum class CompressionMode {
|
|
||||||
/**
|
|
||||||
* Deflate mode
|
|
||||||
*/
|
|
||||||
DEFLATE
|
|
||||||
}
|
|
||||||
|
|
||||||
data class Server(
|
|
||||||
val endpoint: HostAndPort,
|
|
||||||
val connectionTimeoutMillis: Int?,
|
|
||||||
val maxConnections: Int
|
|
||||||
)
|
|
||||||
|
|
||||||
override fun materialize() = object : CacheHandlerFactory {
|
|
||||||
|
|
||||||
private val connectionPoolMap = ConcurrentHashMap<HostAndPort, FixedChannelPool>()
|
|
||||||
|
|
||||||
override fun newHandler(
|
|
||||||
eventLoop: EventLoopGroup,
|
|
||||||
socketChannelFactory: ChannelFactory<SocketChannel>,
|
|
||||||
datagramChannelFactory: ChannelFactory<DatagramChannel>
|
|
||||||
): ChannelHandler {
|
|
||||||
return MemcacheCacheHandler(
|
|
||||||
MemcacheClient(
|
|
||||||
this@MemcacheCacheConfiguration.servers,
|
|
||||||
chunkSize,
|
|
||||||
eventLoop,
|
|
||||||
socketChannelFactory,
|
|
||||||
connectionPoolMap
|
|
||||||
),
|
|
||||||
digestAlgorithm,
|
|
||||||
compressionMode != null,
|
|
||||||
compressionLevel,
|
|
||||||
chunkSize,
|
|
||||||
maxAge
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun asyncClose() = object : CompletableFuture<Void>() {
|
|
||||||
init {
|
|
||||||
val failure = AtomicReference<Throwable>(null)
|
|
||||||
val pools = connectionPoolMap.values.toList()
|
|
||||||
val npools = pools.size
|
|
||||||
val finished = AtomicInteger(0)
|
|
||||||
pools.forEach { pool ->
|
|
||||||
pool.closeAsync().addListener {
|
|
||||||
if (!it.isSuccess) {
|
|
||||||
failure.compareAndSet(null, it.cause())
|
|
||||||
}
|
|
||||||
if(finished.incrementAndGet() == npools) {
|
|
||||||
when(val ex = failure.get()) {
|
|
||||||
null -> complete(null)
|
|
||||||
else -> completeExceptionally(ex)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun getNamespaceURI() = "urn:net.woggioni.rbcs.server.memcache"
|
|
||||||
|
|
||||||
override fun getTypeName() = "memcacheCacheType"
|
|
||||||
}
|
|
||||||
|
|
@@ -1,409 +0,0 @@
|
|||||||
package net.woggioni.rbcs.server.memcache
|
|
||||||
|
|
||||||
import io.netty.buffer.ByteBuf
|
|
||||||
import io.netty.buffer.ByteBufAllocator
|
|
||||||
import io.netty.buffer.CompositeByteBuf
|
|
||||||
import io.netty.channel.ChannelHandlerContext
|
|
||||||
import io.netty.channel.SimpleChannelInboundHandler
|
|
||||||
import io.netty.handler.codec.memcache.DefaultLastMemcacheContent
|
|
||||||
import io.netty.handler.codec.memcache.DefaultMemcacheContent
|
|
||||||
import io.netty.handler.codec.memcache.LastMemcacheContent
|
|
||||||
import io.netty.handler.codec.memcache.MemcacheContent
|
|
||||||
import io.netty.handler.codec.memcache.binary.BinaryMemcacheOpcodes
|
|
||||||
import io.netty.handler.codec.memcache.binary.BinaryMemcacheResponse
|
|
||||||
import io.netty.handler.codec.memcache.binary.BinaryMemcacheResponseStatus
|
|
||||||
import io.netty.handler.codec.memcache.binary.DefaultBinaryMemcacheRequest
|
|
||||||
import net.woggioni.rbcs.api.CacheValueMetadata
|
|
||||||
import net.woggioni.rbcs.api.exception.ContentTooLargeException
|
|
||||||
import net.woggioni.rbcs.api.message.CacheMessage
|
|
||||||
import net.woggioni.rbcs.api.message.CacheMessage.CacheContent
|
|
||||||
import net.woggioni.rbcs.api.message.CacheMessage.CacheGetRequest
|
|
||||||
import net.woggioni.rbcs.api.message.CacheMessage.CachePutRequest
|
|
||||||
import net.woggioni.rbcs.api.message.CacheMessage.CachePutResponse
|
|
||||||
import net.woggioni.rbcs.api.message.CacheMessage.CacheValueFoundResponse
|
|
||||||
import net.woggioni.rbcs.api.message.CacheMessage.CacheValueNotFoundResponse
|
|
||||||
import net.woggioni.rbcs.api.message.CacheMessage.LastCacheContent
|
|
||||||
import net.woggioni.rbcs.common.ByteBufInputStream
|
|
||||||
import net.woggioni.rbcs.common.ByteBufOutputStream
|
|
||||||
import net.woggioni.rbcs.common.RBCS.processCacheKey
|
|
||||||
import net.woggioni.rbcs.common.RBCS.toIntOrNull
|
|
||||||
import net.woggioni.rbcs.common.createLogger
|
|
||||||
import net.woggioni.rbcs.common.debug
|
|
||||||
import net.woggioni.rbcs.common.extractChunk
|
|
||||||
import net.woggioni.rbcs.common.trace
|
|
||||||
import net.woggioni.rbcs.server.memcache.client.MemcacheClient
|
|
||||||
import net.woggioni.rbcs.server.memcache.client.MemcacheRequestController
|
|
||||||
import net.woggioni.rbcs.server.memcache.client.MemcacheResponseHandler
|
|
||||||
import java.io.ByteArrayOutputStream
|
|
||||||
import java.io.ObjectInputStream
|
|
||||||
import java.io.ObjectOutputStream
|
|
||||||
import java.nio.ByteBuffer
|
|
||||||
import java.nio.channels.Channels
|
|
||||||
import java.nio.channels.FileChannel
|
|
||||||
import java.nio.channels.ReadableByteChannel
|
|
||||||
import java.nio.file.Files
|
|
||||||
import java.nio.file.StandardOpenOption
|
|
||||||
import java.time.Duration
|
|
||||||
import java.time.Instant
|
|
||||||
import java.util.concurrent.CompletableFuture
|
|
||||||
import java.util.zip.Deflater
|
|
||||||
import java.util.zip.DeflaterOutputStream
|
|
||||||
import java.util.zip.InflaterOutputStream
|
|
||||||
import io.netty.channel.Channel as NettyChannel
|
|
||||||
|
|
||||||
class MemcacheCacheHandler(
|
|
||||||
private val client: MemcacheClient,
|
|
||||||
private val digestAlgorithm: String?,
|
|
||||||
private val compressionEnabled: Boolean,
|
|
||||||
private val compressionLevel: Int,
|
|
||||||
private val chunkSize: Int,
|
|
||||||
private val maxAge: Duration
|
|
||||||
) : SimpleChannelInboundHandler<CacheMessage>() {
|
|
||||||
companion object {
|
|
||||||
private val log = createLogger<MemcacheCacheHandler>()
|
|
||||||
|
|
||||||
private fun encodeExpiry(expiry: Duration): Int {
|
|
||||||
val expirySeconds = expiry.toSeconds()
|
|
||||||
return expirySeconds.toInt().takeIf { it.toLong() == expirySeconds }
|
|
||||||
?: Instant.ofEpochSecond(expirySeconds).epochSecond.toInt()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private inner class InProgressGetRequest(
|
|
||||||
private val key: String,
|
|
||||||
private val ctx: ChannelHandlerContext
|
|
||||||
) {
|
|
||||||
private val acc = ctx.alloc().compositeBuffer()
|
|
||||||
private val chunk = ctx.alloc().compositeBuffer()
|
|
||||||
private val outputStream = ByteBufOutputStream(chunk).let {
|
|
||||||
if (compressionEnabled) {
|
|
||||||
InflaterOutputStream(it)
|
|
||||||
} else {
|
|
||||||
it
|
|
||||||
}
|
|
||||||
}
|
|
||||||
private var responseSent = false
|
|
||||||
private var metadataSize: Int? = null
|
|
||||||
|
|
||||||
fun write(buf: ByteBuf) {
|
|
||||||
acc.addComponent(true, buf.retain())
|
|
||||||
if (metadataSize == null && acc.readableBytes() >= Int.SIZE_BYTES) {
|
|
||||||
metadataSize = acc.readInt()
|
|
||||||
}
|
|
||||||
metadataSize
|
|
||||||
?.takeIf { !responseSent }
|
|
||||||
?.takeIf { acc.readableBytes() >= it }
|
|
||||||
?.let { mSize ->
|
|
||||||
val metadata = ObjectInputStream(ByteBufInputStream(acc)).use {
|
|
||||||
acc.retain()
|
|
||||||
it.readObject() as CacheValueMetadata
|
|
||||||
}
|
|
||||||
ctx.writeAndFlush(CacheValueFoundResponse(key, metadata))
|
|
||||||
responseSent = true
|
|
||||||
acc.readerIndex(Int.SIZE_BYTES + mSize)
|
|
||||||
}
|
|
||||||
if (responseSent) {
|
|
||||||
acc.readBytes(outputStream, acc.readableBytes())
|
|
||||||
if(acc.readableBytes() >= chunkSize) {
|
|
||||||
flush(false)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun flush(last : Boolean) {
|
|
||||||
val toSend = extractChunk(chunk, ctx.alloc())
|
|
||||||
val msg = if(last) {
|
|
||||||
log.trace(ctx) {
|
|
||||||
"Sending last chunk to client on channel ${ctx.channel().id().asShortText()}"
|
|
||||||
}
|
|
||||||
LastCacheContent(toSend)
|
|
||||||
} else {
|
|
||||||
log.trace(ctx) {
|
|
||||||
"Sending chunk to client on channel ${ctx.channel().id().asShortText()}"
|
|
||||||
}
|
|
||||||
CacheContent(toSend)
|
|
||||||
}
|
|
||||||
ctx.writeAndFlush(msg)
|
|
||||||
}
|
|
||||||
|
|
||||||
fun commit() {
|
|
||||||
acc.release()
|
|
||||||
chunk.retain()
|
|
||||||
outputStream.close()
|
|
||||||
flush(true)
|
|
||||||
chunk.release()
|
|
||||||
}
|
|
||||||
|
|
||||||
fun rollback() {
|
|
||||||
acc.release()
|
|
||||||
outputStream.close()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private inner class InProgressPutRequest(
|
|
||||||
private val ch : NettyChannel,
|
|
||||||
metadata : CacheValueMetadata,
|
|
||||||
val digest : ByteBuf,
|
|
||||||
val requestController: CompletableFuture<MemcacheRequestController>,
|
|
||||||
private val alloc: ByteBufAllocator
|
|
||||||
) {
|
|
||||||
private var totalSize = 0
|
|
||||||
private var tmpFile : FileChannel? = null
|
|
||||||
private val accumulator = alloc.compositeBuffer()
|
|
||||||
private val stream = ByteBufOutputStream(accumulator).let {
|
|
||||||
if (compressionEnabled) {
|
|
||||||
DeflaterOutputStream(it, Deflater(compressionLevel))
|
|
||||||
} else {
|
|
||||||
it
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
init {
|
|
||||||
ByteArrayOutputStream().let { baos ->
|
|
||||||
ObjectOutputStream(baos).use {
|
|
||||||
it.writeObject(metadata)
|
|
||||||
}
|
|
||||||
val serializedBytes = baos.toByteArray()
|
|
||||||
accumulator.writeInt(serializedBytes.size)
|
|
||||||
accumulator.writeBytes(serializedBytes)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fun write(buf: ByteBuf) {
|
|
||||||
totalSize += buf.readableBytes()
|
|
||||||
buf.readBytes(stream, buf.readableBytes())
|
|
||||||
tmpFile?.let {
|
|
||||||
flushToDisk(it, accumulator)
|
|
||||||
}
|
|
||||||
if(accumulator.readableBytes() > 0x100000) {
|
|
||||||
log.debug(ch) {
|
|
||||||
"Entry is too big, buffering it into a file"
|
|
||||||
}
|
|
||||||
val opts = arrayOf(
|
|
||||||
StandardOpenOption.DELETE_ON_CLOSE,
|
|
||||||
StandardOpenOption.READ,
|
|
||||||
StandardOpenOption.WRITE,
|
|
||||||
StandardOpenOption.TRUNCATE_EXISTING
|
|
||||||
)
|
|
||||||
FileChannel.open(Files.createTempFile("rbcs-memcache", ".tmp"), *opts).let { fc ->
|
|
||||||
tmpFile = fc
|
|
||||||
flushToDisk(fc, accumulator)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun flushToDisk(fc : FileChannel, buf : CompositeByteBuf) {
|
|
||||||
val chunk = extractChunk(buf, alloc)
|
|
||||||
fc.write(chunk.nioBuffer())
|
|
||||||
chunk.release()
|
|
||||||
}
|
|
||||||
|
|
||||||
fun commit() : Pair<Int, ReadableByteChannel> {
|
|
||||||
digest.release()
|
|
||||||
accumulator.retain()
|
|
||||||
stream.close()
|
|
||||||
val fileChannel = tmpFile
|
|
||||||
return if(fileChannel != null) {
|
|
||||||
flushToDisk(fileChannel, accumulator)
|
|
||||||
accumulator.release()
|
|
||||||
fileChannel.position(0)
|
|
||||||
val fileSize = fileChannel.size().toIntOrNull() ?: let {
|
|
||||||
fileChannel.close()
|
|
||||||
throw ContentTooLargeException("Request body is too large", null)
|
|
||||||
}
|
|
||||||
fileSize to fileChannel
|
|
||||||
} else {
|
|
||||||
accumulator.readableBytes() to Channels.newChannel(ByteBufInputStream(accumulator))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fun rollback() {
|
|
||||||
stream.close()
|
|
||||||
digest.release()
|
|
||||||
tmpFile?.close()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private var inProgressPutRequest: InProgressPutRequest? = null
|
|
||||||
private var inProgressGetRequest: InProgressGetRequest? = null
|
|
||||||
|
|
||||||
override fun channelRead0(ctx: ChannelHandlerContext, msg: CacheMessage) {
|
|
||||||
when (msg) {
|
|
||||||
is CacheGetRequest -> handleGetRequest(ctx, msg)
|
|
||||||
is CachePutRequest -> handlePutRequest(ctx, msg)
|
|
||||||
is LastCacheContent -> handleLastCacheContent(ctx, msg)
|
|
||||||
is CacheContent -> handleCacheContent(ctx, msg)
|
|
||||||
else -> ctx.fireChannelRead(msg)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun handleGetRequest(ctx: ChannelHandlerContext, msg: CacheGetRequest) {
|
|
||||||
log.debug(ctx) {
|
|
||||||
"Fetching ${msg.key} from memcache"
|
|
||||||
}
|
|
||||||
val key = ctx.alloc().buffer().also {
|
|
||||||
it.writeBytes(processCacheKey(msg.key, digestAlgorithm))
|
|
||||||
}
|
|
||||||
val responseHandler = object : MemcacheResponseHandler {
|
|
||||||
override fun responseReceived(response: BinaryMemcacheResponse) {
|
|
||||||
val status = response.status()
|
|
||||||
when (status) {
|
|
||||||
BinaryMemcacheResponseStatus.SUCCESS -> {
|
|
||||||
log.debug(ctx) {
|
|
||||||
"Cache hit for key ${msg.key} on memcache"
|
|
||||||
}
|
|
||||||
inProgressGetRequest = InProgressGetRequest(msg.key, ctx)
|
|
||||||
}
|
|
||||||
|
|
||||||
BinaryMemcacheResponseStatus.KEY_ENOENT -> {
|
|
||||||
log.debug(ctx) {
|
|
||||||
"Cache miss for key ${msg.key} on memcache"
|
|
||||||
}
|
|
||||||
ctx.writeAndFlush(CacheValueNotFoundResponse())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun contentReceived(content: MemcacheContent) {
|
|
||||||
log.trace(ctx) {
|
|
||||||
"${if(content is LastMemcacheContent) "Last chunk" else "Chunk"} of ${content.content().readableBytes()} bytes received from memcache for key ${msg.key}"
|
|
||||||
}
|
|
||||||
inProgressGetRequest?.write(content.content())
|
|
||||||
if (content is LastMemcacheContent) {
|
|
||||||
inProgressGetRequest?.commit()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun exceptionCaught(ex: Throwable) {
|
|
||||||
inProgressGetRequest?.let {
|
|
||||||
inProgressGetRequest = null
|
|
||||||
it.rollback()
|
|
||||||
}
|
|
||||||
this@MemcacheCacheHandler.exceptionCaught(ctx, ex)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
client.sendRequest(key.retainedDuplicate(), responseHandler).thenAccept { requestHandle ->
|
|
||||||
log.trace(ctx) {
|
|
||||||
"Sending GET request for key ${msg.key} to memcache"
|
|
||||||
}
|
|
||||||
val request = DefaultBinaryMemcacheRequest(key).apply {
|
|
||||||
setOpcode(BinaryMemcacheOpcodes.GET)
|
|
||||||
}
|
|
||||||
requestHandle.sendRequest(request)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun handlePutRequest(ctx: ChannelHandlerContext, msg: CachePutRequest) {
|
|
||||||
val key = ctx.alloc().buffer().also {
|
|
||||||
it.writeBytes(processCacheKey(msg.key, digestAlgorithm))
|
|
||||||
}
|
|
||||||
val responseHandler = object : MemcacheResponseHandler {
|
|
||||||
override fun responseReceived(response: BinaryMemcacheResponse) {
|
|
||||||
val status = response.status()
|
|
||||||
when (status) {
|
|
||||||
BinaryMemcacheResponseStatus.SUCCESS -> {
|
|
||||||
log.debug(ctx) {
|
|
||||||
"Inserted key ${msg.key} into memcache"
|
|
||||||
}
|
|
||||||
ctx.writeAndFlush(CachePutResponse(msg.key))
|
|
||||||
}
|
|
||||||
else -> this@MemcacheCacheHandler.exceptionCaught(ctx, MemcacheException(status))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun contentReceived(content: MemcacheContent) {}
|
|
||||||
|
|
||||||
override fun exceptionCaught(ex: Throwable) {
|
|
||||||
this@MemcacheCacheHandler.exceptionCaught(ctx, ex)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
val requestController = client.sendRequest(key.retainedDuplicate(), responseHandler).whenComplete { _, ex ->
|
|
||||||
ex?.let {
|
|
||||||
this@MemcacheCacheHandler.exceptionCaught(ctx, ex)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
inProgressPutRequest = InProgressPutRequest(ctx.channel(), msg.metadata, key, requestController, ctx.alloc())
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun handleCacheContent(ctx: ChannelHandlerContext, msg: CacheContent) {
|
|
||||||
inProgressPutRequest?.let { request ->
|
|
||||||
log.trace(ctx) {
|
|
||||||
"Received chunk of ${msg.content().readableBytes()} bytes for memcache"
|
|
||||||
}
|
|
||||||
request.write(msg.content())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun handleLastCacheContent(ctx: ChannelHandlerContext, msg: LastCacheContent) {
|
|
||||||
inProgressPutRequest?.let { request ->
|
|
||||||
inProgressPutRequest = null
|
|
||||||
log.trace(ctx) {
|
|
||||||
"Received last chunk of ${msg.content().readableBytes()} bytes for memcache"
|
|
||||||
}
|
|
||||||
request.write(msg.content())
|
|
||||||
val key = request.digest.retainedDuplicate()
|
|
||||||
val (payloadSize, payloadSource) = request.commit()
|
|
||||||
val extras = ctx.alloc().buffer(8, 8)
|
|
||||||
extras.writeInt(0)
|
|
||||||
extras.writeInt(encodeExpiry(maxAge))
|
|
||||||
val totalBodyLength = request.digest.readableBytes() + extras.readableBytes() + payloadSize
|
|
||||||
request.requestController.whenComplete { requestController, ex ->
|
|
||||||
if(ex == null) {
|
|
||||||
log.trace(ctx) {
|
|
||||||
"Sending SET request to memcache"
|
|
||||||
}
|
|
||||||
requestController.sendRequest(DefaultBinaryMemcacheRequest().apply {
|
|
||||||
setOpcode(BinaryMemcacheOpcodes.SET)
|
|
||||||
setKey(key)
|
|
||||||
setExtras(extras)
|
|
||||||
setTotalBodyLength(totalBodyLength)
|
|
||||||
})
|
|
||||||
log.trace(ctx) {
|
|
||||||
"Sending request payload to memcache"
|
|
||||||
}
|
|
||||||
payloadSource.use { source ->
|
|
||||||
val bb = ByteBuffer.allocate(chunkSize)
|
|
||||||
while (true) {
|
|
||||||
val read = source.read(bb)
|
|
||||||
bb.limit()
|
|
||||||
if(read >= 0 && bb.position() < chunkSize && bb.hasRemaining()) {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
val chunk = ctx.alloc().buffer(chunkSize)
|
|
||||||
bb.flip()
|
|
||||||
chunk.writeBytes(bb)
|
|
||||||
bb.clear()
|
|
||||||
log.trace(ctx) {
|
|
||||||
"Sending ${chunk.readableBytes()} bytes chunk to memcache"
|
|
||||||
}
|
|
||||||
if(read < 0) {
|
|
||||||
requestController.sendContent(DefaultLastMemcacheContent(chunk))
|
|
||||||
break
|
|
||||||
} else {
|
|
||||||
requestController.sendContent(DefaultMemcacheContent(chunk))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
payloadSource.close()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
|
|
||||||
inProgressGetRequest?.let {
|
|
||||||
inProgressGetRequest = null
|
|
||||||
it.rollback()
|
|
||||||
}
|
|
||||||
inProgressPutRequest?.let {
|
|
||||||
inProgressPutRequest = null
|
|
||||||
it.requestController.thenAccept { controller ->
|
|
||||||
controller.exceptionCaught(cause)
|
|
||||||
}
|
|
||||||
it.rollback()
|
|
||||||
}
|
|
||||||
super.exceptionCaught(ctx, cause)
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,102 +0,0 @@
|
|||||||
package net.woggioni.rbcs.server.memcache
|
|
||||||
|
|
||||||
import net.woggioni.rbcs.api.CacheProvider
|
|
||||||
import net.woggioni.rbcs.api.exception.ConfigurationException
|
|
||||||
import net.woggioni.rbcs.common.HostAndPort
|
|
||||||
import net.woggioni.rbcs.common.RBCS
|
|
||||||
import net.woggioni.rbcs.common.Xml
|
|
||||||
import net.woggioni.rbcs.common.Xml.Companion.asIterable
|
|
||||||
import net.woggioni.rbcs.common.Xml.Companion.renderAttribute
|
|
||||||
import org.w3c.dom.Document
|
|
||||||
import org.w3c.dom.Element
|
|
||||||
import java.time.Duration
|
|
||||||
import java.time.temporal.ChronoUnit
|
|
||||||
|
|
||||||
|
|
||||||
class MemcacheCacheProvider : CacheProvider<MemcacheCacheConfiguration> {
|
|
||||||
override fun getXmlSchemaLocation() = "jpms://net.woggioni.rbcs.server.memcache/net/woggioni/rbcs/server/memcache/schema/rbcs-memcache.xsd"
|
|
||||||
|
|
||||||
override fun getXmlType() = "memcacheCacheType"
|
|
||||||
|
|
||||||
override fun getXmlNamespace() = "urn:net.woggioni.rbcs.server.memcache"
|
|
||||||
|
|
||||||
val xmlNamespacePrefix : String
|
|
||||||
get() = "rbcs-memcache"
|
|
||||||
|
|
||||||
override fun deserialize(el: Element): MemcacheCacheConfiguration {
|
|
||||||
val servers = mutableListOf<MemcacheCacheConfiguration.Server>()
|
|
||||||
val maxAge = el.renderAttribute("max-age")
|
|
||||||
?.let(Duration::parse)
|
|
||||||
?: Duration.ofDays(1)
|
|
||||||
val chunkSize = el.renderAttribute("chunk-size")
|
|
||||||
?.let(Integer::decode)
|
|
||||||
?: 0x10000
|
|
||||||
val compressionLevel = el.renderAttribute("compression-level")
|
|
||||||
?.let(Integer::decode)
|
|
||||||
?: -1
|
|
||||||
val compressionMode = el.renderAttribute("compression-mode")
|
|
||||||
?.let {
|
|
||||||
when (it) {
|
|
||||||
"deflate" -> MemcacheCacheConfiguration.CompressionMode.DEFLATE
|
|
||||||
else -> MemcacheCacheConfiguration.CompressionMode.DEFLATE
|
|
||||||
}
|
|
||||||
}
|
|
||||||
val digestAlgorithm = el.renderAttribute("digest")
|
|
||||||
for (child in el.asIterable()) {
|
|
||||||
when (child.nodeName) {
|
|
||||||
"server" -> {
|
|
||||||
val host = child.renderAttribute("host") ?: throw ConfigurationException("host attribute is required")
|
|
||||||
val port = child.renderAttribute("port")?.toInt() ?: throw ConfigurationException("port attribute is required")
|
|
||||||
val maxConnections = child.renderAttribute("max-connections")?.toInt() ?: 1
|
|
||||||
val connectionTimeout = child.renderAttribute("connection-timeout")
|
|
||||||
?.let(Duration::parse)
|
|
||||||
?.let(Duration::toMillis)
|
|
||||||
?.let(Long::toInt)
|
|
||||||
?: 10000
|
|
||||||
servers.add(MemcacheCacheConfiguration.Server(HostAndPort(host, port), connectionTimeout, maxConnections))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return MemcacheCacheConfiguration(
|
|
||||||
servers,
|
|
||||||
maxAge,
|
|
||||||
digestAlgorithm,
|
|
||||||
compressionMode,
|
|
||||||
compressionLevel,
|
|
||||||
chunkSize
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun serialize(doc: Document, cache: MemcacheCacheConfiguration) = cache.run {
|
|
||||||
val result = doc.createElement("cache")
|
|
||||||
Xml.of(doc, result) {
|
|
||||||
attr("xmlns:${xmlNamespacePrefix}", xmlNamespace, namespaceURI = "http://www.w3.org/2000/xmlns/")
|
|
||||||
attr("xs:type", "${xmlNamespacePrefix}:$xmlType", RBCS.XML_SCHEMA_NAMESPACE_URI)
|
|
||||||
for (server in servers) {
|
|
||||||
node("server") {
|
|
||||||
attr("host", server.endpoint.host)
|
|
||||||
attr("port", server.endpoint.port.toString())
|
|
||||||
server.connectionTimeoutMillis?.let { connectionTimeoutMillis ->
|
|
||||||
attr("connection-timeout", Duration.of(connectionTimeoutMillis.toLong(), ChronoUnit.MILLIS).toString())
|
|
||||||
}
|
|
||||||
attr("max-connections", server.maxConnections.toString())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
attr("max-age", maxAge.toString())
|
|
||||||
attr("chunk-size", chunkSize.toString())
|
|
||||||
digestAlgorithm?.let { digestAlgorithm ->
|
|
||||||
attr("digest", digestAlgorithm)
|
|
||||||
}
|
|
||||||
compressionMode?.let { compressionMode ->
|
|
||||||
attr(
|
|
||||||
"compression-mode", when (compressionMode) {
|
|
||||||
MemcacheCacheConfiguration.CompressionMode.DEFLATE -> "deflate"
|
|
||||||
}
|
|
||||||
)
|
|
||||||
}
|
|
||||||
attr("compression-level", compressionLevel.toString())
|
|
||||||
}
|
|
||||||
result
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,214 +0,0 @@
|
|||||||
package net.woggioni.rbcs.server.memcache.client
|
|
||||||
|
|
||||||
|
|
||||||
import io.netty.bootstrap.Bootstrap
|
|
||||||
import io.netty.buffer.ByteBuf
|
|
||||||
import io.netty.channel.Channel
|
|
||||||
import io.netty.channel.ChannelFactory
|
|
||||||
import io.netty.channel.ChannelFutureListener
|
|
||||||
import io.netty.channel.ChannelHandlerContext
|
|
||||||
import io.netty.channel.ChannelOption
|
|
||||||
import io.netty.channel.ChannelPipeline
|
|
||||||
import io.netty.channel.EventLoopGroup
|
|
||||||
import io.netty.channel.SimpleChannelInboundHandler
|
|
||||||
import io.netty.channel.pool.AbstractChannelPoolHandler
|
|
||||||
import io.netty.channel.pool.ChannelPool
|
|
||||||
import io.netty.channel.pool.FixedChannelPool
|
|
||||||
import io.netty.channel.socket.SocketChannel
|
|
||||||
import io.netty.handler.codec.memcache.LastMemcacheContent
|
|
||||||
import io.netty.handler.codec.memcache.MemcacheContent
|
|
||||||
import io.netty.handler.codec.memcache.MemcacheObject
|
|
||||||
import io.netty.handler.codec.memcache.binary.BinaryMemcacheClientCodec
|
|
||||||
import io.netty.handler.codec.memcache.binary.BinaryMemcacheRequest
|
|
||||||
import io.netty.handler.codec.memcache.binary.BinaryMemcacheResponse
|
|
||||||
import io.netty.util.concurrent.GenericFutureListener
|
|
||||||
import net.woggioni.rbcs.common.HostAndPort
|
|
||||||
import net.woggioni.rbcs.common.createLogger
|
|
||||||
import net.woggioni.rbcs.common.warn
|
|
||||||
import net.woggioni.rbcs.server.memcache.MemcacheCacheConfiguration
|
|
||||||
import net.woggioni.rbcs.server.memcache.MemcacheCacheHandler
|
|
||||||
import java.io.IOException
|
|
||||||
import java.net.InetSocketAddress
|
|
||||||
import java.util.concurrent.CompletableFuture
|
|
||||||
import java.util.concurrent.ConcurrentHashMap
|
|
||||||
import io.netty.util.concurrent.Future as NettyFuture
|
|
||||||
|
|
||||||
|
|
||||||
class MemcacheClient(
|
|
||||||
private val servers: List<MemcacheCacheConfiguration.Server>,
|
|
||||||
private val chunkSize : Int,
|
|
||||||
private val group: EventLoopGroup,
|
|
||||||
private val channelFactory: ChannelFactory<SocketChannel>,
|
|
||||||
private val connectionPool: ConcurrentHashMap<HostAndPort, FixedChannelPool>
|
|
||||||
) : AutoCloseable {
|
|
||||||
|
|
||||||
private companion object {
|
|
||||||
private val log = createLogger<MemcacheCacheHandler>()
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun newConnectionPool(server: MemcacheCacheConfiguration.Server): FixedChannelPool {
|
|
||||||
val bootstrap = Bootstrap().apply {
|
|
||||||
group(group)
|
|
||||||
channelFactory(channelFactory)
|
|
||||||
option(ChannelOption.SO_KEEPALIVE, true)
|
|
||||||
remoteAddress(InetSocketAddress(server.endpoint.host, server.endpoint.port))
|
|
||||||
server.connectionTimeoutMillis?.let {
|
|
||||||
option(ChannelOption.CONNECT_TIMEOUT_MILLIS, it)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
val channelPoolHandler = object : AbstractChannelPoolHandler() {
|
|
||||||
|
|
||||||
override fun channelCreated(ch: Channel) {
|
|
||||||
val pipeline: ChannelPipeline = ch.pipeline()
|
|
||||||
pipeline.addLast(BinaryMemcacheClientCodec(chunkSize, true))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return FixedChannelPool(bootstrap, channelPoolHandler, server.maxConnections)
|
|
||||||
}
|
|
||||||
|
|
||||||
fun sendRequest(
|
|
||||||
key: ByteBuf,
|
|
||||||
responseHandler: MemcacheResponseHandler
|
|
||||||
): CompletableFuture<MemcacheRequestController> {
|
|
||||||
val server = if (servers.size > 1) {
|
|
||||||
var checksum = 0
|
|
||||||
while (key.readableBytes() > 4) {
|
|
||||||
val byte = key.readInt()
|
|
||||||
checksum = checksum xor byte
|
|
||||||
}
|
|
||||||
while (key.readableBytes() > 0) {
|
|
||||||
val byte = key.readByte()
|
|
||||||
checksum = checksum xor byte.toInt()
|
|
||||||
}
|
|
||||||
servers[checksum % servers.size]
|
|
||||||
} else {
|
|
||||||
servers.first()
|
|
||||||
}
|
|
||||||
key.release()
|
|
||||||
|
|
||||||
val response = CompletableFuture<MemcacheRequestController>()
|
|
||||||
// Custom handler for processing responses
|
|
||||||
val pool = connectionPool.computeIfAbsent(server.endpoint) {
|
|
||||||
newConnectionPool(server)
|
|
||||||
}
|
|
||||||
pool.acquire().addListener(object : GenericFutureListener<NettyFuture<Channel>> {
|
|
||||||
override fun operationComplete(channelFuture: NettyFuture<Channel>) {
|
|
||||||
if (channelFuture.isSuccess) {
|
|
||||||
|
|
||||||
var requestSent = false
|
|
||||||
var requestBodySent = false
|
|
||||||
var requestFinished = false
|
|
||||||
var responseReceived = false
|
|
||||||
var responseBodyReceived = false
|
|
||||||
var responseFinished = false
|
|
||||||
var requestBodySize = 0
|
|
||||||
var requestBodyBytesSent = 0
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
val channel = channelFuture.now
|
|
||||||
var connectionClosedByTheRemoteServer = true
|
|
||||||
val closeCallback = {
|
|
||||||
if (connectionClosedByTheRemoteServer) {
|
|
||||||
val ex = IOException("The memcache server closed the connection")
|
|
||||||
val completed = response.completeExceptionally(ex)
|
|
||||||
if(!completed) responseHandler.exceptionCaught(ex)
|
|
||||||
log.warn {
|
|
||||||
"RequestSent: $requestSent, RequestBodySent: $requestBodySent, " +
|
|
||||||
"RequestFinished: $requestFinished, ResponseReceived: $responseReceived, " +
|
|
||||||
"ResponseBodyReceived: $responseBodyReceived, ResponseFinished: $responseFinished, " +
|
|
||||||
"RequestBodySize: $requestBodySize, RequestBodyBytesSent: $requestBodyBytesSent"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pool.release(channel)
|
|
||||||
}
|
|
||||||
val closeListener = ChannelFutureListener {
|
|
||||||
closeCallback()
|
|
||||||
}
|
|
||||||
channel.closeFuture().addListener(closeListener)
|
|
||||||
val pipeline = channel.pipeline()
|
|
||||||
val handler = object : SimpleChannelInboundHandler<MemcacheObject>() {
|
|
||||||
|
|
||||||
override fun handlerAdded(ctx: ChannelHandlerContext) {
|
|
||||||
channel.closeFuture().removeListener(closeListener)
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun channelRead0(
|
|
||||||
ctx: ChannelHandlerContext,
|
|
||||||
msg: MemcacheObject
|
|
||||||
) {
|
|
||||||
when (msg) {
|
|
||||||
is BinaryMemcacheResponse -> {
|
|
||||||
responseHandler.responseReceived(msg)
|
|
||||||
responseReceived = true
|
|
||||||
}
|
|
||||||
|
|
||||||
is LastMemcacheContent -> {
|
|
||||||
responseFinished = true
|
|
||||||
responseHandler.contentReceived(msg)
|
|
||||||
pipeline.remove(this)
|
|
||||||
pool.release(channel)
|
|
||||||
}
|
|
||||||
|
|
||||||
is MemcacheContent -> {
|
|
||||||
responseBodyReceived = true
|
|
||||||
responseHandler.contentReceived(msg)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun channelInactive(ctx: ChannelHandlerContext) {
|
|
||||||
closeCallback()
|
|
||||||
ctx.fireChannelInactive()
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
|
|
||||||
connectionClosedByTheRemoteServer = false
|
|
||||||
ctx.close()
|
|
||||||
pool.release(channel)
|
|
||||||
responseHandler.exceptionCaught(cause)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
channel.pipeline()
|
|
||||||
.addLast("client-handler", handler)
|
|
||||||
response.complete(object : MemcacheRequestController {
|
|
||||||
|
|
||||||
override fun sendRequest(request: BinaryMemcacheRequest) {
|
|
||||||
requestBodySize = request.totalBodyLength() - request.keyLength() - request.extrasLength()
|
|
||||||
channel.writeAndFlush(request)
|
|
||||||
requestSent = true
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun sendContent(content: MemcacheContent) {
|
|
||||||
val size = content.content().readableBytes()
|
|
||||||
channel.writeAndFlush(content).addListener {
|
|
||||||
requestBodyBytesSent += size
|
|
||||||
requestBodySent = true
|
|
||||||
if(content is LastMemcacheContent) {
|
|
||||||
requestFinished = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun exceptionCaught(ex: Throwable) {
|
|
||||||
connectionClosedByTheRemoteServer = false
|
|
||||||
channel.close()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
} else {
|
|
||||||
response.completeExceptionally(channelFuture.cause())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
return response
|
|
||||||
}
|
|
||||||
|
|
||||||
fun shutDown(): NettyFuture<*> {
|
|
||||||
return group.shutdownGracefully()
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun close() {
|
|
||||||
shutDown().sync()
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,13 +0,0 @@
|
|||||||
package net.woggioni.rbcs.server.memcache.client
|
|
||||||
|
|
||||||
import io.netty.handler.codec.memcache.MemcacheContent
|
|
||||||
import io.netty.handler.codec.memcache.binary.BinaryMemcacheRequest
|
|
||||||
|
|
||||||
interface MemcacheRequestController {
|
|
||||||
|
|
||||||
fun sendRequest(request : BinaryMemcacheRequest)
|
|
||||||
|
|
||||||
fun sendContent(content : MemcacheContent)
|
|
||||||
|
|
||||||
fun exceptionCaught(ex : Throwable)
|
|
||||||
}
|
|
@@ -1,14 +0,0 @@
|
|||||||
package net.woggioni.rbcs.server.memcache.client
|
|
||||||
|
|
||||||
import io.netty.handler.codec.memcache.MemcacheContent
|
|
||||||
import io.netty.handler.codec.memcache.binary.BinaryMemcacheResponse
|
|
||||||
|
|
||||||
interface MemcacheResponseHandler {
|
|
||||||
|
|
||||||
|
|
||||||
fun responseReceived(response : BinaryMemcacheResponse)
|
|
||||||
|
|
||||||
fun contentReceived(content : MemcacheContent)
|
|
||||||
|
|
||||||
fun exceptionCaught(ex : Throwable)
|
|
||||||
}
|
|
@@ -1 +0,0 @@
|
|||||||
net.woggioni.rbcs.server.memcache.MemcacheCacheProvider
|
|
@@ -1,37 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
|
||||||
<xs:schema targetNamespace="urn:net.woggioni.rbcs.server.memcache"
|
|
||||||
xmlns:rbcs-memcache="urn:net.woggioni.rbcs.server.memcache"
|
|
||||||
xmlns:rbcs="urn:net.woggioni.rbcs.server"
|
|
||||||
xmlns:xs="http://www.w3.org/2001/XMLSchema">
|
|
||||||
|
|
||||||
<xs:import schemaLocation="jpms://net.woggioni.rbcs.server/net/woggioni/rbcs/server/schema/rbcs.xsd" namespace="urn:net.woggioni.rbcs.server"/>
|
|
||||||
|
|
||||||
<xs:complexType name="memcacheServerType">
|
|
||||||
<xs:attribute name="host" type="xs:token" use="required"/>
|
|
||||||
<xs:attribute name="port" type="xs:positiveInteger" use="required"/>
|
|
||||||
<xs:attribute name="connection-timeout" type="xs:duration"/>
|
|
||||||
<xs:attribute name="max-connections" type="xs:positiveInteger" default="1"/>
|
|
||||||
</xs:complexType>
|
|
||||||
|
|
||||||
<xs:complexType name="memcacheCacheType">
|
|
||||||
<xs:complexContent>
|
|
||||||
<xs:extension base="rbcs:cacheType">
|
|
||||||
<xs:sequence maxOccurs="unbounded">
|
|
||||||
<xs:element name="server" type="rbcs-memcache:memcacheServerType"/>
|
|
||||||
</xs:sequence>
|
|
||||||
<xs:attribute name="max-age" type="xs:duration" default="P1D"/>
|
|
||||||
<xs:attribute name="chunk-size" type="rbcs:byteSizeType" default="0x10000"/>
|
|
||||||
<xs:attribute name="digest" type="xs:token"/>
|
|
||||||
<xs:attribute name="compression-mode" type="rbcs-memcache:compressionType"/>
|
|
||||||
<xs:attribute name="compression-level" type="rbcs:compressionLevelType" default="-1"/>
|
|
||||||
</xs:extension>
|
|
||||||
</xs:complexContent>
|
|
||||||
</xs:complexType>
|
|
||||||
|
|
||||||
<xs:simpleType name="compressionType">
|
|
||||||
<xs:restriction base="xs:token">
|
|
||||||
<xs:enumeration value="deflate"/>
|
|
||||||
</xs:restriction>
|
|
||||||
</xs:simpleType>
|
|
||||||
|
|
||||||
</xs:schema>
|
|
@@ -1,27 +0,0 @@
|
|||||||
package net.woggioni.rbcs.server.memcache.client
|
|
||||||
|
|
||||||
import io.netty.buffer.ByteBufUtil
|
|
||||||
import io.netty.buffer.Unpooled
|
|
||||||
import org.junit.jupiter.api.Assertions
|
|
||||||
import org.junit.jupiter.api.Test
|
|
||||||
import java.io.ByteArrayInputStream
|
|
||||||
import java.nio.ByteBuffer
|
|
||||||
import java.nio.channels.Channels
|
|
||||||
import kotlin.random.Random
|
|
||||||
|
|
||||||
class ByteBufferTest {
|
|
||||||
|
|
||||||
@Test
|
|
||||||
fun test() {
|
|
||||||
val byteBuffer = ByteBuffer.allocate(0x100)
|
|
||||||
val originalBytes = Random(101325).nextBytes(0x100)
|
|
||||||
Channels.newChannel(ByteArrayInputStream(originalBytes)).use { source ->
|
|
||||||
source.read(byteBuffer)
|
|
||||||
}
|
|
||||||
byteBuffer.flip()
|
|
||||||
val buf = Unpooled.buffer()
|
|
||||||
buf.writeBytes(byteBuffer)
|
|
||||||
val finalBytes = ByteBufUtil.getBytes(buf)
|
|
||||||
Assertions.assertArrayEquals(originalBytes, finalBytes)
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,42 +0,0 @@
|
|||||||
plugins {
|
|
||||||
id 'java-library'
|
|
||||||
alias catalog.plugins.kotlin.jvm
|
|
||||||
id 'jacoco'
|
|
||||||
id 'maven-publish'
|
|
||||||
}
|
|
||||||
|
|
||||||
dependencies {
|
|
||||||
implementation catalog.jwo
|
|
||||||
implementation catalog.slf4j.api
|
|
||||||
implementation catalog.netty.codec.http
|
|
||||||
implementation catalog.netty.handler
|
|
||||||
implementation catalog.netty.buffer
|
|
||||||
implementation catalog.netty.transport
|
|
||||||
|
|
||||||
api project(':rbcs-common')
|
|
||||||
api project(':rbcs-api')
|
|
||||||
|
|
||||||
// runtimeOnly catalog.slf4j.jdk14
|
|
||||||
testRuntimeOnly catalog.logback.classic
|
|
||||||
|
|
||||||
testImplementation catalog.bcprov.jdk18on
|
|
||||||
testImplementation catalog.bcpkix.jdk18on
|
|
||||||
|
|
||||||
testRuntimeOnly project(":rbcs-server-memcache")
|
|
||||||
}
|
|
||||||
|
|
||||||
test {
|
|
||||||
systemProperty("io.netty.leakDetectionLevel", "PARANOID")
|
|
||||||
systemProperty("jdk.httpclient.redirects.retrylimit", "1")
|
|
||||||
}
|
|
||||||
|
|
||||||
publishing {
|
|
||||||
publications {
|
|
||||||
maven(MavenPublication) {
|
|
||||||
from(components["java"])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
@@ -1,29 +0,0 @@
|
|||||||
import net.woggioni.rbcs.api.CacheProvider;
|
|
||||||
import net.woggioni.rbcs.server.cache.FileSystemCacheProvider;
|
|
||||||
import net.woggioni.rbcs.server.cache.InMemoryCacheProvider;
|
|
||||||
|
|
||||||
module net.woggioni.rbcs.server {
|
|
||||||
requires java.sql;
|
|
||||||
requires java.xml;
|
|
||||||
requires java.logging;
|
|
||||||
requires java.naming;
|
|
||||||
requires kotlin.stdlib;
|
|
||||||
requires io.netty.buffer;
|
|
||||||
requires io.netty.transport;
|
|
||||||
requires io.netty.codec.http;
|
|
||||||
requires io.netty.common;
|
|
||||||
requires io.netty.handler;
|
|
||||||
requires io.netty.codec;
|
|
||||||
requires org.slf4j;
|
|
||||||
requires net.woggioni.jwo;
|
|
||||||
requires net.woggioni.rbcs.common;
|
|
||||||
requires net.woggioni.rbcs.api;
|
|
||||||
|
|
||||||
exports net.woggioni.rbcs.server;
|
|
||||||
|
|
||||||
opens net.woggioni.rbcs.server;
|
|
||||||
opens net.woggioni.rbcs.server.schema;
|
|
||||||
|
|
||||||
uses CacheProvider;
|
|
||||||
provides CacheProvider with FileSystemCacheProvider, InMemoryCacheProvider;
|
|
||||||
}
|
|
@@ -1,506 +0,0 @@
|
|||||||
package net.woggioni.rbcs.server
|
|
||||||
|
|
||||||
import io.netty.bootstrap.ServerBootstrap
|
|
||||||
import io.netty.buffer.ByteBuf
|
|
||||||
import io.netty.channel.Channel
|
|
||||||
import io.netty.channel.ChannelFactory
|
|
||||||
import io.netty.channel.ChannelFuture
|
|
||||||
import io.netty.channel.ChannelHandler.Sharable
|
|
||||||
import io.netty.channel.ChannelHandlerContext
|
|
||||||
import io.netty.channel.ChannelInboundHandlerAdapter
|
|
||||||
import io.netty.channel.ChannelInitializer
|
|
||||||
import io.netty.channel.ChannelOption
|
|
||||||
import io.netty.channel.ChannelPromise
|
|
||||||
import io.netty.channel.nio.NioEventLoopGroup
|
|
||||||
import io.netty.channel.socket.DatagramChannel
|
|
||||||
import io.netty.channel.socket.ServerSocketChannel
|
|
||||||
import io.netty.channel.socket.SocketChannel
|
|
||||||
import io.netty.channel.socket.nio.NioDatagramChannel
|
|
||||||
import io.netty.channel.socket.nio.NioServerSocketChannel
|
|
||||||
import io.netty.channel.socket.nio.NioSocketChannel
|
|
||||||
import io.netty.handler.codec.compression.CompressionOptions
|
|
||||||
import io.netty.handler.codec.http.DefaultHttpContent
|
|
||||||
import io.netty.handler.codec.http.HttpContentCompressor
|
|
||||||
import io.netty.handler.codec.http.HttpHeaderNames
|
|
||||||
import io.netty.handler.codec.http.HttpRequest
|
|
||||||
import io.netty.handler.codec.http.HttpServerCodec
|
|
||||||
import io.netty.handler.ssl.ClientAuth
|
|
||||||
import io.netty.handler.ssl.SslContext
|
|
||||||
import io.netty.handler.ssl.SslContextBuilder
|
|
||||||
import io.netty.handler.ssl.SslHandler
|
|
||||||
import io.netty.handler.stream.ChunkedWriteHandler
|
|
||||||
import io.netty.handler.timeout.IdleState
|
|
||||||
import io.netty.handler.timeout.IdleStateEvent
|
|
||||||
import io.netty.handler.timeout.IdleStateHandler
|
|
||||||
import io.netty.util.AttributeKey
|
|
||||||
import io.netty.util.concurrent.DefaultEventExecutorGroup
|
|
||||||
import io.netty.util.concurrent.EventExecutorGroup
|
|
||||||
import net.woggioni.jwo.JWO
|
|
||||||
import net.woggioni.jwo.Tuple2
|
|
||||||
import net.woggioni.rbcs.api.AsyncCloseable
|
|
||||||
import net.woggioni.rbcs.api.Configuration
|
|
||||||
import net.woggioni.rbcs.api.exception.ConfigurationException
|
|
||||||
import net.woggioni.rbcs.common.PasswordSecurity.decodePasswordHash
|
|
||||||
import net.woggioni.rbcs.common.PasswordSecurity.hashPassword
|
|
||||||
import net.woggioni.rbcs.common.RBCS.toUrl
|
|
||||||
import net.woggioni.rbcs.common.Xml
|
|
||||||
import net.woggioni.rbcs.common.createLogger
|
|
||||||
import net.woggioni.rbcs.common.debug
|
|
||||||
import net.woggioni.rbcs.common.info
|
|
||||||
import net.woggioni.rbcs.server.auth.AbstractNettyHttpAuthenticator
|
|
||||||
import net.woggioni.rbcs.server.auth.Authorizer
|
|
||||||
import net.woggioni.rbcs.server.auth.ClientCertificateValidator
|
|
||||||
import net.woggioni.rbcs.server.auth.RoleAuthorizer
|
|
||||||
import net.woggioni.rbcs.server.configuration.Parser
|
|
||||||
import net.woggioni.rbcs.server.configuration.Serializer
|
|
||||||
import net.woggioni.rbcs.server.exception.ExceptionHandler
|
|
||||||
import net.woggioni.rbcs.server.handler.MaxRequestSizeHandler
|
|
||||||
import net.woggioni.rbcs.server.handler.ServerHandler
|
|
||||||
import net.woggioni.rbcs.server.handler.TraceHandler
|
|
||||||
import net.woggioni.rbcs.server.throttling.BucketManager
|
|
||||||
import net.woggioni.rbcs.server.throttling.ThrottlingHandler
|
|
||||||
import java.io.OutputStream
|
|
||||||
import java.net.InetSocketAddress
|
|
||||||
import java.nio.file.Files
|
|
||||||
import java.nio.file.Path
|
|
||||||
import java.security.KeyStore
|
|
||||||
import java.security.PrivateKey
|
|
||||||
import java.security.cert.X509Certificate
|
|
||||||
import java.time.Duration
|
|
||||||
import java.time.Instant
|
|
||||||
import java.util.Arrays
|
|
||||||
import java.util.Base64
|
|
||||||
import java.util.concurrent.CompletableFuture
|
|
||||||
import java.util.concurrent.Future
|
|
||||||
import java.util.concurrent.TimeUnit
|
|
||||||
import java.util.concurrent.TimeoutException
|
|
||||||
import java.util.regex.Matcher
|
|
||||||
import java.util.regex.Pattern
|
|
||||||
import javax.naming.ldap.LdapName
|
|
||||||
import javax.net.ssl.SSLPeerUnverifiedException
|
|
||||||
|
|
||||||
class RemoteBuildCacheServer(private val cfg: Configuration) {
|
|
||||||
|
|
||||||
companion object {
|
|
||||||
private val log = createLogger<RemoteBuildCacheServer>()
|
|
||||||
|
|
||||||
val userAttribute: AttributeKey<Configuration.User> = AttributeKey.valueOf("user")
|
|
||||||
val groupAttribute: AttributeKey<Set<Configuration.Group>> = AttributeKey.valueOf("group")
|
|
||||||
|
|
||||||
val DEFAULT_CONFIGURATION_URL by lazy { "classpath:net/woggioni/rbcs/server/rbcs-default.xml".toUrl() }
|
|
||||||
private const val SSL_HANDLER_NAME = "sslHandler"
|
|
||||||
|
|
||||||
fun loadConfiguration(configurationFile: Path): Configuration {
|
|
||||||
val doc = Files.newInputStream(configurationFile).use {
|
|
||||||
Xml.parseXml(configurationFile.toUri().toURL(), it)
|
|
||||||
}
|
|
||||||
return Parser.parse(doc)
|
|
||||||
}
|
|
||||||
|
|
||||||
fun dumpConfiguration(conf: Configuration, outputStream: OutputStream) {
|
|
||||||
Xml.write(Serializer.serialize(conf), outputStream)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private class HttpChunkContentCompressor(
|
|
||||||
threshold: Int,
|
|
||||||
vararg compressionOptions: CompressionOptions = emptyArray()
|
|
||||||
) : HttpContentCompressor(threshold, *compressionOptions) {
|
|
||||||
override fun write(ctx: ChannelHandlerContext, msg: Any, promise: ChannelPromise) {
|
|
||||||
var message: Any? = msg
|
|
||||||
if (message is ByteBuf) {
|
|
||||||
// convert ByteBuf to HttpContent to make it work with compression. This is needed as we use the
|
|
||||||
// ChunkedWriteHandler to send files when compression is enabled.
|
|
||||||
val buff = message
|
|
||||||
if (buff.isReadable) {
|
|
||||||
// We only encode non empty buffers, as empty buffers can be used for determining when
|
|
||||||
// the content has been flushed and it confuses the HttpContentCompressor
|
|
||||||
// if we let it go
|
|
||||||
message = DefaultHttpContent(buff)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
super.write(ctx, message, promise)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Sharable
|
|
||||||
private class ClientCertificateAuthenticator(
|
|
||||||
authorizer: Authorizer,
|
|
||||||
private val anonymousUserGroups: Set<Configuration.Group>?,
|
|
||||||
private val userExtractor: Configuration.UserExtractor?,
|
|
||||||
private val groupExtractor: Configuration.GroupExtractor?,
|
|
||||||
) : AbstractNettyHttpAuthenticator(authorizer) {
|
|
||||||
|
|
||||||
override fun authenticate(ctx: ChannelHandlerContext, req: HttpRequest): AuthenticationResult? {
|
|
||||||
return try {
|
|
||||||
val sslHandler = (ctx.pipeline().get(SSL_HANDLER_NAME) as? SslHandler)
|
|
||||||
?: throw ConfigurationException("Client certificate authentication cannot be used when TLS is disabled")
|
|
||||||
val sslEngine = sslHandler.engine()
|
|
||||||
sslEngine.session.peerCertificates.takeIf {
|
|
||||||
it.isNotEmpty()
|
|
||||||
}?.let { peerCertificates ->
|
|
||||||
val clientCertificate = peerCertificates.first() as X509Certificate
|
|
||||||
val user = userExtractor?.extract(clientCertificate)
|
|
||||||
val group = groupExtractor?.extract(clientCertificate)
|
|
||||||
val allGroups =
|
|
||||||
((user?.groups ?: emptySet()).asSequence() + sequenceOf(group).filterNotNull()).toSet()
|
|
||||||
AuthenticationResult(user, allGroups)
|
|
||||||
} ?: anonymousUserGroups?.let { AuthenticationResult(null, it) }
|
|
||||||
} catch (es: SSLPeerUnverifiedException) {
|
|
||||||
anonymousUserGroups?.let { AuthenticationResult(null, it) }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Sharable
|
|
||||||
private class NettyHttpBasicAuthenticator(
|
|
||||||
private val users: Map<String, Configuration.User>, authorizer: Authorizer
|
|
||||||
) : AbstractNettyHttpAuthenticator(authorizer) {
|
|
||||||
companion object {
|
|
||||||
private val log = createLogger<NettyHttpBasicAuthenticator>()
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun authenticate(ctx: ChannelHandlerContext, req: HttpRequest): AuthenticationResult? {
|
|
||||||
val authorizationHeader = req.headers()[HttpHeaderNames.AUTHORIZATION] ?: let {
|
|
||||||
log.debug(ctx) {
|
|
||||||
"Missing Authorization header"
|
|
||||||
}
|
|
||||||
return users[""]?.let { AuthenticationResult(it, it.groups) }
|
|
||||||
}
|
|
||||||
val cursor = authorizationHeader.indexOf(' ')
|
|
||||||
if (cursor < 0) {
|
|
||||||
log.debug(ctx) {
|
|
||||||
"Invalid Authorization header: '$authorizationHeader'"
|
|
||||||
}
|
|
||||||
return users[""]?.let { AuthenticationResult(it, it.groups) }
|
|
||||||
}
|
|
||||||
val authenticationType = authorizationHeader.substring(0, cursor)
|
|
||||||
if ("Basic" != authenticationType) {
|
|
||||||
log.debug(ctx) {
|
|
||||||
"Invalid authentication type header: '$authenticationType'"
|
|
||||||
}
|
|
||||||
return users[""]?.let { AuthenticationResult(it, it.groups) }
|
|
||||||
}
|
|
||||||
val (username, password) = Base64.getDecoder().decode(authorizationHeader.substring(cursor + 1))
|
|
||||||
.let(::String)
|
|
||||||
.let {
|
|
||||||
val colon = it.indexOf(':')
|
|
||||||
if (colon < 0) {
|
|
||||||
log.debug(ctx) {
|
|
||||||
"Missing colon from authentication"
|
|
||||||
}
|
|
||||||
return null
|
|
||||||
}
|
|
||||||
it.substring(0, colon) to it.substring(colon + 1)
|
|
||||||
}
|
|
||||||
|
|
||||||
return username.let(users::get)?.takeIf { user ->
|
|
||||||
user.password?.let { passwordAndSalt ->
|
|
||||||
val (_, salt) = decodePasswordHash(passwordAndSalt)
|
|
||||||
hashPassword(password, Base64.getEncoder().encodeToString(salt)) == passwordAndSalt
|
|
||||||
} ?: false
|
|
||||||
}?.let { user ->
|
|
||||||
AuthenticationResult(user, user.groups)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private class ServerInitializer(
|
|
||||||
private val cfg: Configuration,
|
|
||||||
private val channelFactory : ChannelFactory<SocketChannel>,
|
|
||||||
private val datagramChannelFactory : ChannelFactory<DatagramChannel>,
|
|
||||||
private val eventExecutorGroup: EventExecutorGroup
|
|
||||||
) : ChannelInitializer<Channel>(), AsyncCloseable {
|
|
||||||
|
|
||||||
companion object {
|
|
||||||
private fun createSslCtx(tls: Configuration.Tls): SslContext {
|
|
||||||
val keyStore = tls.keyStore
|
|
||||||
return if (keyStore == null) {
|
|
||||||
throw IllegalArgumentException("No keystore configured")
|
|
||||||
} else {
|
|
||||||
val javaKeyStore = loadKeystore(keyStore.file, keyStore.password)
|
|
||||||
val serverKey = javaKeyStore.getKey(
|
|
||||||
keyStore.keyAlias, (keyStore.keyPassword ?: "").let(String::toCharArray)
|
|
||||||
) as PrivateKey
|
|
||||||
val serverCert: Array<X509Certificate> =
|
|
||||||
Arrays.stream(javaKeyStore.getCertificateChain(keyStore.keyAlias))
|
|
||||||
.map { it as X509Certificate }
|
|
||||||
.toArray { size -> Array<X509Certificate?>(size) { null } }
|
|
||||||
SslContextBuilder.forServer(serverKey, *serverCert).apply {
|
|
||||||
val clientAuth = tls.trustStore?.let { trustStore ->
|
|
||||||
val ts = loadKeystore(trustStore.file, trustStore.password)
|
|
||||||
trustManager(
|
|
||||||
ClientCertificateValidator.getTrustManager(ts, trustStore.isCheckCertificateStatus)
|
|
||||||
)
|
|
||||||
if (trustStore.isRequireClientCertificate) ClientAuth.REQUIRE
|
|
||||||
else ClientAuth.OPTIONAL
|
|
||||||
} ?: ClientAuth.NONE
|
|
||||||
clientAuth(clientAuth)
|
|
||||||
}.build()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fun loadKeystore(file: Path, password: String?): KeyStore {
|
|
||||||
val ext = JWO.splitExtension(file)
|
|
||||||
.map(Tuple2<String, String>::get_2)
|
|
||||||
.orElseThrow {
|
|
||||||
IllegalArgumentException(
|
|
||||||
"Keystore file '${file}' must have .jks, .p12, .pfx extension"
|
|
||||||
)
|
|
||||||
}
|
|
||||||
val keystore = when (ext.substring(1).lowercase()) {
|
|
||||||
"jks" -> KeyStore.getInstance("JKS")
|
|
||||||
"p12", "pfx" -> KeyStore.getInstance("PKCS12")
|
|
||||||
else -> throw IllegalArgumentException(
|
|
||||||
"Keystore file '${file}' must have .jks, .p12, .pfx extension"
|
|
||||||
)
|
|
||||||
}
|
|
||||||
Files.newInputStream(file).use {
|
|
||||||
keystore.load(it, password?.let(String::toCharArray))
|
|
||||||
}
|
|
||||||
return keystore
|
|
||||||
}
|
|
||||||
|
|
||||||
private val log = createLogger<ServerInitializer>()
|
|
||||||
}
|
|
||||||
|
|
||||||
private val cacheHandlerFactory = cfg.cache.materialize()
|
|
||||||
|
|
||||||
private val bucketManager = BucketManager.from(cfg)
|
|
||||||
|
|
||||||
private val authenticator = when (val auth = cfg.authentication) {
|
|
||||||
is Configuration.BasicAuthentication -> NettyHttpBasicAuthenticator(cfg.users, RoleAuthorizer())
|
|
||||||
is Configuration.ClientCertificateAuthentication -> {
|
|
||||||
ClientCertificateAuthenticator(
|
|
||||||
RoleAuthorizer(),
|
|
||||||
cfg.users[""]?.groups,
|
|
||||||
userExtractor(auth),
|
|
||||||
groupExtractor(auth)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
else -> null
|
|
||||||
}
|
|
||||||
|
|
||||||
private val sslContext: SslContext? = cfg.tls?.let(Companion::createSslCtx)
|
|
||||||
|
|
||||||
private fun userExtractor(authentication: Configuration.ClientCertificateAuthentication) =
|
|
||||||
authentication.userExtractor?.let { extractor ->
|
|
||||||
val pattern = Pattern.compile(extractor.pattern)
|
|
||||||
val rdnType = extractor.rdnType
|
|
||||||
Configuration.UserExtractor { cert: X509Certificate ->
|
|
||||||
val userName = LdapName(cert.subjectX500Principal.name).rdns.find {
|
|
||||||
it.type == rdnType
|
|
||||||
}?.let {
|
|
||||||
pattern.matcher(it.value.toString())
|
|
||||||
}?.takeIf(Matcher::matches)?.group(1)
|
|
||||||
cfg.users[userName] ?: throw java.lang.RuntimeException("Failed to extract user")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun groupExtractor(authentication: Configuration.ClientCertificateAuthentication) =
|
|
||||||
authentication.groupExtractor?.let { extractor ->
|
|
||||||
val pattern = Pattern.compile(extractor.pattern)
|
|
||||||
val rdnType = extractor.rdnType
|
|
||||||
Configuration.GroupExtractor { cert: X509Certificate ->
|
|
||||||
val groupName = LdapName(cert.subjectX500Principal.name).rdns.find {
|
|
||||||
it.type == rdnType
|
|
||||||
}?.let {
|
|
||||||
pattern.matcher(it.value.toString())
|
|
||||||
}?.takeIf(Matcher::matches)?.group(1)
|
|
||||||
cfg.groups[groupName] ?: throw java.lang.RuntimeException("Failed to extract group")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun initChannel(ch: Channel) {
|
|
||||||
log.debug {
|
|
||||||
"Created connection ${ch.id().asShortText()} with ${ch.remoteAddress()}"
|
|
||||||
}
|
|
||||||
ch.closeFuture().addListener {
|
|
||||||
log.debug {
|
|
||||||
"Closed connection ${ch.id().asShortText()} with ${ch.remoteAddress()}"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
val pipeline = ch.pipeline()
|
|
||||||
cfg.connection.also { conn ->
|
|
||||||
val readIdleTimeout = conn.readIdleTimeout.toMillis()
|
|
||||||
val writeIdleTimeout = conn.writeIdleTimeout.toMillis()
|
|
||||||
val idleTimeout = conn.idleTimeout.toMillis()
|
|
||||||
if (readIdleTimeout > 0 || writeIdleTimeout > 0 || idleTimeout > 0) {
|
|
||||||
pipeline.addLast(
|
|
||||||
IdleStateHandler(
|
|
||||||
true,
|
|
||||||
readIdleTimeout,
|
|
||||||
writeIdleTimeout,
|
|
||||||
idleTimeout,
|
|
||||||
TimeUnit.MILLISECONDS
|
|
||||||
)
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pipeline.addLast(object : ChannelInboundHandlerAdapter() {
|
|
||||||
override fun userEventTriggered(ctx: ChannelHandlerContext, evt: Any) {
|
|
||||||
if (evt is IdleStateEvent) {
|
|
||||||
when (evt.state()) {
|
|
||||||
IdleState.READER_IDLE -> log.debug {
|
|
||||||
"Read timeout reached on channel ${ch.id().asShortText()}, closing the connection"
|
|
||||||
}
|
|
||||||
|
|
||||||
IdleState.WRITER_IDLE -> log.debug {
|
|
||||||
"Write timeout reached on channel ${ch.id().asShortText()}, closing the connection"
|
|
||||||
}
|
|
||||||
|
|
||||||
IdleState.ALL_IDLE -> log.debug {
|
|
||||||
"Idle timeout reached on channel ${ch.id().asShortText()}, closing the connection"
|
|
||||||
}
|
|
||||||
|
|
||||||
null -> throw IllegalStateException("This should never happen")
|
|
||||||
}
|
|
||||||
ctx.close()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
sslContext?.newHandler(ch.alloc())?.also {
|
|
||||||
pipeline.addLast(SSL_HANDLER_NAME, it)
|
|
||||||
}
|
|
||||||
pipeline.addLast(HttpServerCodec())
|
|
||||||
pipeline.addLast(MaxRequestSizeHandler.NAME, MaxRequestSizeHandler(cfg.connection.maxRequestSize))
|
|
||||||
pipeline.addLast(HttpChunkContentCompressor(1024))
|
|
||||||
pipeline.addLast(ChunkedWriteHandler())
|
|
||||||
authenticator?.let {
|
|
||||||
pipeline.addLast(it)
|
|
||||||
}
|
|
||||||
pipeline.addLast(ThrottlingHandler(bucketManager, cfg.connection))
|
|
||||||
|
|
||||||
val serverHandler = let {
|
|
||||||
val prefix = Path.of("/").resolve(Path.of(cfg.serverPath ?: "/"))
|
|
||||||
ServerHandler(prefix)
|
|
||||||
}
|
|
||||||
pipeline.addLast(eventExecutorGroup, ServerHandler.NAME, serverHandler)
|
|
||||||
|
|
||||||
pipeline.addLast(cacheHandlerFactory.newHandler(ch.eventLoop(), channelFactory, datagramChannelFactory))
|
|
||||||
pipeline.addLast(TraceHandler)
|
|
||||||
pipeline.addLast(ExceptionHandler)
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun asyncClose() = cacheHandlerFactory.asyncClose()
|
|
||||||
}
|
|
||||||
|
|
||||||
class ServerHandle(
|
|
||||||
closeFuture: ChannelFuture,
|
|
||||||
private val bossGroup: EventExecutorGroup,
|
|
||||||
private val executorGroups: Iterable<EventExecutorGroup>,
|
|
||||||
private val serverInitializer: AsyncCloseable,
|
|
||||||
) : Future<Void> by from(closeFuture, executorGroups, serverInitializer) {
|
|
||||||
|
|
||||||
companion object {
|
|
||||||
private val log = createLogger<ServerHandle>()
|
|
||||||
|
|
||||||
private fun from(
|
|
||||||
closeFuture: ChannelFuture,
|
|
||||||
executorGroups: Iterable<EventExecutorGroup>,
|
|
||||||
serverInitializer: AsyncCloseable
|
|
||||||
): CompletableFuture<Void> {
|
|
||||||
val result = CompletableFuture<Void>()
|
|
||||||
closeFuture.addListener {
|
|
||||||
val errors = mutableListOf<Throwable>()
|
|
||||||
val deadline = Instant.now().plusSeconds(20)
|
|
||||||
try {
|
|
||||||
serverInitializer.close()
|
|
||||||
} catch (ex: Throwable) {
|
|
||||||
log.error(ex.message, ex)
|
|
||||||
errors.addLast(ex)
|
|
||||||
}
|
|
||||||
|
|
||||||
serverInitializer.asyncClose().whenComplete { _, ex ->
|
|
||||||
if(ex != null) {
|
|
||||||
log.error(ex.message, ex)
|
|
||||||
errors.addLast(ex)
|
|
||||||
}
|
|
||||||
|
|
||||||
executorGroups.map {
|
|
||||||
it.shutdownGracefully()
|
|
||||||
}
|
|
||||||
|
|
||||||
for (executorGroup in executorGroups) {
|
|
||||||
val future = executorGroup.terminationFuture()
|
|
||||||
try {
|
|
||||||
val now = Instant.now()
|
|
||||||
if (now > deadline) {
|
|
||||||
future.get(0, TimeUnit.SECONDS)
|
|
||||||
} else {
|
|
||||||
future.get(Duration.between(now, deadline).toMillis(), TimeUnit.MILLISECONDS)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
catch (te: TimeoutException) {
|
|
||||||
errors.addLast(te)
|
|
||||||
log.warn("Timeout while waiting for shutdown of $executorGroup", te)
|
|
||||||
} catch (ex: Throwable) {
|
|
||||||
log.warn(ex.message, ex)
|
|
||||||
errors.addLast(ex)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if(errors.isEmpty()) {
|
|
||||||
result.complete(null)
|
|
||||||
} else {
|
|
||||||
result.completeExceptionally(errors.first())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return result.thenAccept {
|
|
||||||
log.info {
|
|
||||||
"RemoteBuildCacheServer has been gracefully shut down"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
fun sendShutdownSignal() {
|
|
||||||
bossGroup.shutdownGracefully()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fun run(): ServerHandle {
|
|
||||||
// Create the multithreaded event loops for the server
|
|
||||||
val bossGroup = NioEventLoopGroup(1)
|
|
||||||
val channelFactory = ChannelFactory<SocketChannel> { NioSocketChannel() }
|
|
||||||
val datagramChannelFactory = ChannelFactory<DatagramChannel> { NioDatagramChannel() }
|
|
||||||
val serverChannelFactory = ChannelFactory<ServerSocketChannel> { NioServerSocketChannel() }
|
|
||||||
val workerGroup = NioEventLoopGroup(0)
|
|
||||||
val eventExecutorGroup = run {
|
|
||||||
val threadFactory = if (cfg.eventExecutor.isUseVirtualThreads) {
|
|
||||||
Thread.ofVirtual().factory()
|
|
||||||
} else {
|
|
||||||
null
|
|
||||||
}
|
|
||||||
DefaultEventExecutorGroup(Runtime.getRuntime().availableProcessors(), threadFactory)
|
|
||||||
}
|
|
||||||
val serverInitializer = ServerInitializer(cfg, channelFactory, datagramChannelFactory, workerGroup)
|
|
||||||
val bootstrap = ServerBootstrap().apply {
|
|
||||||
// Configure the server
|
|
||||||
group(bossGroup, workerGroup)
|
|
||||||
channelFactory(serverChannelFactory)
|
|
||||||
childHandler(serverInitializer)
|
|
||||||
option(ChannelOption.SO_BACKLOG, cfg.incomingConnectionsBacklogSize)
|
|
||||||
childOption(ChannelOption.SO_KEEPALIVE, true)
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
// Bind and start to accept incoming connections.
|
|
||||||
val bindAddress = InetSocketAddress(cfg.host, cfg.port)
|
|
||||||
val httpChannel = bootstrap.bind(bindAddress).sync().channel()
|
|
||||||
log.info {
|
|
||||||
"RemoteBuildCacheServer is listening on ${cfg.host}:${cfg.port}"
|
|
||||||
}
|
|
||||||
|
|
||||||
return ServerHandle(
|
|
||||||
httpChannel.closeFuture(),
|
|
||||||
bossGroup,
|
|
||||||
setOf(workerGroup, eventExecutorGroup),
|
|
||||||
serverInitializer
|
|
||||||
)
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,169 +0,0 @@
|
|||||||
package net.woggioni.rbcs.server.cache
|
|
||||||
|
|
||||||
import net.woggioni.jwo.JWO
|
|
||||||
import net.woggioni.rbcs.api.AsyncCloseable
|
|
||||||
import net.woggioni.rbcs.api.CacheValueMetadata
|
|
||||||
import net.woggioni.rbcs.common.createLogger
|
|
||||||
import java.io.ByteArrayOutputStream
|
|
||||||
import java.io.InputStream
|
|
||||||
import java.io.ObjectInputStream
|
|
||||||
import java.io.ObjectOutputStream
|
|
||||||
import java.io.Serializable
|
|
||||||
import java.nio.ByteBuffer
|
|
||||||
import java.nio.channels.Channels
|
|
||||||
import java.nio.channels.FileChannel
|
|
||||||
import java.nio.file.Files
|
|
||||||
import java.nio.file.Path
|
|
||||||
import java.nio.file.StandardCopyOption
|
|
||||||
import java.nio.file.StandardOpenOption
|
|
||||||
import java.nio.file.attribute.BasicFileAttributes
|
|
||||||
import java.time.Duration
|
|
||||||
import java.time.Instant
|
|
||||||
import java.util.concurrent.CompletableFuture
|
|
||||||
|
|
||||||
class FileSystemCache(
|
|
||||||
val root: Path,
|
|
||||||
val maxAge: Duration
|
|
||||||
) : AsyncCloseable {
|
|
||||||
|
|
||||||
class EntryValue(val metadata: CacheValueMetadata, val channel : FileChannel, val offset : Long, val size : Long) : Serializable
|
|
||||||
|
|
||||||
private companion object {
|
|
||||||
private val log = createLogger<FileSystemCache>()
|
|
||||||
}
|
|
||||||
|
|
||||||
init {
|
|
||||||
Files.createDirectories(root)
|
|
||||||
}
|
|
||||||
|
|
||||||
@Volatile
|
|
||||||
private var running = true
|
|
||||||
|
|
||||||
private var nextGc = Instant.now()
|
|
||||||
|
|
||||||
fun get(key: String): EntryValue? =
|
|
||||||
root.resolve(key).takeIf(Files::exists)
|
|
||||||
?.let { file ->
|
|
||||||
val size = Files.size(file)
|
|
||||||
val channel = FileChannel.open(file, StandardOpenOption.READ)
|
|
||||||
val source = Channels.newInputStream(channel)
|
|
||||||
val tmp = ByteArray(Integer.BYTES)
|
|
||||||
val buffer = ByteBuffer.wrap(tmp)
|
|
||||||
source.read(tmp)
|
|
||||||
buffer.rewind()
|
|
||||||
val offset = (Integer.BYTES + buffer.getInt()).toLong()
|
|
||||||
var count = 0
|
|
||||||
val wrapper = object : InputStream() {
|
|
||||||
override fun read(): Int {
|
|
||||||
return source.read().also {
|
|
||||||
if (it > 0) count += it
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun read(b: ByteArray, off: Int, len: Int): Int {
|
|
||||||
return source.read(b, off, len).also {
|
|
||||||
if (it > 0) count += it
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun close() {
|
|
||||||
}
|
|
||||||
}
|
|
||||||
val metadata = ObjectInputStream(wrapper).use { ois ->
|
|
||||||
ois.readObject() as CacheValueMetadata
|
|
||||||
}
|
|
||||||
EntryValue(metadata, channel, offset, size)
|
|
||||||
}
|
|
||||||
|
|
||||||
class FileSink(metadata: CacheValueMetadata, private val path: Path, private val tmpFile: Path) {
|
|
||||||
val channel: FileChannel
|
|
||||||
|
|
||||||
init {
|
|
||||||
val baos = ByteArrayOutputStream()
|
|
||||||
ObjectOutputStream(baos).use {
|
|
||||||
it.writeObject(metadata)
|
|
||||||
}
|
|
||||||
Files.newOutputStream(tmpFile).use {
|
|
||||||
val bytes = baos.toByteArray()
|
|
||||||
val buffer = ByteBuffer.allocate(Integer.BYTES)
|
|
||||||
buffer.putInt(bytes.size)
|
|
||||||
buffer.rewind()
|
|
||||||
it.write(buffer.array())
|
|
||||||
it.write(bytes)
|
|
||||||
}
|
|
||||||
channel = FileChannel.open(tmpFile, StandardOpenOption.APPEND)
|
|
||||||
}
|
|
||||||
|
|
||||||
fun commit() {
|
|
||||||
channel.close()
|
|
||||||
Files.move(tmpFile, path, StandardCopyOption.ATOMIC_MOVE)
|
|
||||||
}
|
|
||||||
|
|
||||||
fun rollback() {
|
|
||||||
channel.close()
|
|
||||||
Files.delete(path)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fun put(
|
|
||||||
key: String,
|
|
||||||
metadata: CacheValueMetadata,
|
|
||||||
): FileSink {
|
|
||||||
val file = root.resolve(key)
|
|
||||||
val tmpFile = Files.createTempFile(root, null, ".tmp")
|
|
||||||
return FileSink(metadata, file, tmpFile)
|
|
||||||
}
|
|
||||||
|
|
||||||
private val closeFuture = object : CompletableFuture<Void>() {
|
|
||||||
init {
|
|
||||||
Thread.ofVirtual().name("file-system-cache-gc").start {
|
|
||||||
try {
|
|
||||||
while (running) {
|
|
||||||
gc()
|
|
||||||
}
|
|
||||||
complete(null)
|
|
||||||
} catch (ex : Throwable) {
|
|
||||||
completeExceptionally(ex)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private fun gc() {
|
|
||||||
val now = Instant.now()
|
|
||||||
if (nextGc < now) {
|
|
||||||
val oldestEntry = actualGc(now)
|
|
||||||
nextGc = (oldestEntry ?: now).plus(maxAge)
|
|
||||||
}
|
|
||||||
Thread.sleep(minOf(Duration.between(now, nextGc), Duration.ofSeconds(1)))
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Returns the creation timestamp of the oldest cache entry (if any)
|
|
||||||
*/
|
|
||||||
private fun actualGc(now: Instant): Instant? {
|
|
||||||
var result: Instant? = null
|
|
||||||
Files.list(root)
|
|
||||||
.filter { path ->
|
|
||||||
JWO.splitExtension(path)
|
|
||||||
.map { it._2 }
|
|
||||||
.map { it != ".tmp" }
|
|
||||||
.orElse(true)
|
|
||||||
}
|
|
||||||
.filter {
|
|
||||||
val creationTimeStamp = Files.readAttributes(it, BasicFileAttributes::class.java)
|
|
||||||
.creationTime()
|
|
||||||
.toInstant()
|
|
||||||
if (result == null || creationTimeStamp < result) {
|
|
||||||
result = creationTimeStamp
|
|
||||||
}
|
|
||||||
now > creationTimeStamp.plus(maxAge)
|
|
||||||
}.forEach(Files::delete)
|
|
||||||
return result
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun asyncClose() : CompletableFuture<Void> {
|
|
||||||
running = false
|
|
||||||
return closeFuture
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,38 +0,0 @@
|
|||||||
package net.woggioni.rbcs.server.cache
|
|
||||||
|
|
||||||
import io.netty.channel.ChannelFactory
|
|
||||||
import io.netty.channel.EventLoopGroup
|
|
||||||
import io.netty.channel.socket.DatagramChannel
|
|
||||||
import io.netty.channel.socket.SocketChannel
|
|
||||||
import net.woggioni.jwo.Application
|
|
||||||
import net.woggioni.rbcs.api.CacheHandlerFactory
|
|
||||||
import net.woggioni.rbcs.api.Configuration
|
|
||||||
import net.woggioni.rbcs.common.RBCS
|
|
||||||
import java.nio.file.Path
|
|
||||||
import java.time.Duration
|
|
||||||
|
|
||||||
data class FileSystemCacheConfiguration(
|
|
||||||
val root: Path?,
|
|
||||||
val maxAge: Duration,
|
|
||||||
val digestAlgorithm : String?,
|
|
||||||
val compressionEnabled: Boolean,
|
|
||||||
val compressionLevel: Int,
|
|
||||||
val chunkSize: Int,
|
|
||||||
) : Configuration.Cache {
|
|
||||||
|
|
||||||
override fun materialize() = object : CacheHandlerFactory {
|
|
||||||
private val cache = FileSystemCache(root ?: Application.builder("rbcs").build().computeCacheDirectory(), maxAge)
|
|
||||||
|
|
||||||
override fun asyncClose() = cache.asyncClose()
|
|
||||||
|
|
||||||
override fun newHandler(
|
|
||||||
eventLoop: EventLoopGroup,
|
|
||||||
socketChannelFactory: ChannelFactory<SocketChannel>,
|
|
||||||
datagramChannelFactory: ChannelFactory<DatagramChannel>
|
|
||||||
) = FileSystemCacheHandler(cache, digestAlgorithm, compressionEnabled, compressionLevel, chunkSize)
|
|
||||||
}
|
|
||||||
|
|
||||||
override fun getNamespaceURI() = RBCS.RBCS_NAMESPACE_URI
|
|
||||||
|
|
||||||
override fun getTypeName() = "fileSystemCacheType"
|
|
||||||
}
|
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user