Compare commits
40 Commits
Author | SHA1 | Date | |
---|---|---|---|
c19bc9e91e
|
|||
af79e74b95
|
|||
78ae21caa4
|
|||
6c0eadb9fb
|
|||
5fef1b932e
|
|||
5e173dbf62
|
|||
53b24e3d54
|
|||
7d0f24fa58
|
|||
1b6cf1bd96
|
|||
4180df2352
|
|||
c2e388b931
|
|||
6c62ac85c0
|
|||
89153b60f8
|
|||
a2a40ab60f
|
|||
45458761f3
|
|||
90a5834f5f
|
|||
1823d0b9ca
|
|||
649cbba954
|
|||
eb9ccce3be
|
|||
316f64cf9d
|
|||
24a49779f9
|
|||
423b749db9
|
|||
9ce3e7fa0a
|
|||
1e6ece37a5
|
|||
fc9900d821
|
|||
1a78c8092b
|
|||
3d1847c408
|
|||
702556bfbb
|
|||
06e9e7ca09
|
|||
fa5bb55baa
|
|||
007d0fffd6
|
|||
75ebf2248f
|
|||
241d95fe1c
|
|||
3b7030c302
|
|||
a8670277e7
|
|||
03ee75266d
|
|||
05a265e4b4
|
|||
5af99330f8
|
|||
747168cda3
|
|||
225f156864
|
@@ -9,11 +9,6 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup Java
|
||||
uses: actions/setup-java@v4
|
||||
with:
|
||||
distribution: graalvm
|
||||
java-version: 21
|
||||
- name: Setup Gradle
|
||||
uses: gradle/actions/setup-gradle@v3
|
||||
- name: Execute Gradle build
|
||||
@@ -36,7 +31,7 @@ jobs:
|
||||
username: woggioni
|
||||
password: ${{ secrets.PUBLISHER_TOKEN }}
|
||||
-
|
||||
name: Build gbcs Docker image
|
||||
name: Build rbcs Docker image
|
||||
uses: docker/build-push-action@v5.3.0
|
||||
with:
|
||||
context: "docker/build/docker"
|
||||
@@ -44,12 +39,12 @@ jobs:
|
||||
push: true
|
||||
pull: true
|
||||
tags: |
|
||||
gitea.woggioni.net/woggioni/gbcs:latest
|
||||
gitea.woggioni.net/woggioni/gbcs:${{ steps.retrieve-version.outputs.VERSION }}
|
||||
gitea.woggioni.net/woggioni/rbcs:latest
|
||||
gitea.woggioni.net/woggioni/rbcs:${{ steps.retrieve-version.outputs.VERSION }}
|
||||
target: release
|
||||
cache-from: type=registry,ref=gitea.woggioni.net/woggioni/gbcs:buildx
|
||||
cache-from: type=registry,ref=gitea.woggioni.net/woggioni/rbcs:buildx
|
||||
-
|
||||
name: Build gbcs memcached Docker image
|
||||
name: Build rbcs memcache Docker image
|
||||
uses: docker/build-push-action@v5.3.0
|
||||
with:
|
||||
context: "docker/build/docker"
|
||||
@@ -57,11 +52,11 @@ jobs:
|
||||
push: true
|
||||
pull: true
|
||||
tags: |
|
||||
gitea.woggioni.net/woggioni/gbcs:memcached
|
||||
gitea.woggioni.net/woggioni/gbcs:memcached-${{ steps.retrieve-version.outputs.VERSION }}
|
||||
target: release-memcached
|
||||
cache-from: type=registry,ref=gitea.woggioni.net/woggioni/gbcs:buildx
|
||||
cache-to: type=registry,mode=max,compression=zstd,image-manifest=true,oci-mediatypes=true,ref=gitea.woggioni.net/woggioni/gbcs:buildx
|
||||
gitea.woggioni.net/woggioni/rbcs:memcache
|
||||
gitea.woggioni.net/woggioni/rbcs:memcache-${{ steps.retrieve-version.outputs.VERSION }}
|
||||
target: release-memcache
|
||||
cache-from: type=registry,ref=gitea.woggioni.net/woggioni/rbcs:buildx
|
||||
cache-to: type=registry,mode=max,compression=zstd,image-manifest=true,oci-mediatypes=true,ref=gitea.woggioni.net/woggioni/rbcs:buildx
|
||||
- name: Publish artifacts
|
||||
env:
|
||||
PUBLISHER_TOKEN: ${{ secrets.PUBLISHER_TOKEN }}
|
||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@@ -4,4 +4,4 @@
|
||||
# Ignore Gradle build output directory
|
||||
build
|
||||
|
||||
gbcs-cli/native-image/*.json
|
||||
rbcs-cli/native-image/*.json
|
||||
|
@@ -1,2 +0,0 @@
|
||||
FROM gitea.woggioni.net/woggioni/gbcs:memcached
|
||||
COPY --chown=luser:luser conf/gbcs-memcached.xml /home/luser/.config/gbcs/gbcs.xml
|
20
LICENSE
Normal file
20
LICENSE
Normal file
@@ -0,0 +1,20 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2017 Y. T. CHUNG <zonyitoo@gmail.com>
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
110
README.md
Normal file
110
README.md
Normal file
@@ -0,0 +1,110 @@
|
||||
# Remote Build Cache Server
|
||||
Remote Build Cache Server (shortened to RBCS) allows you to share and reuse unchanged build
|
||||
and test outputs across the team. This speeds up local and CI builds since cycles are not wasted
|
||||
re-building components that are unaffected by new code changes. RBCS supports both Gradle and
|
||||
Maven build tool environments.
|
||||
|
||||
## Getting Started
|
||||
|
||||
### Downloading the jar file
|
||||
You can download the latest version from [this link](https://gitea.woggioni.net/woggioni/-/packages/maven/net.woggioni-rbcs-cli/)
|
||||
|
||||
If you want to use memcache as a storage backend you'll also need to download [the memcache plugin](https://gitea.woggioni.net/woggioni/-/packages/maven/net.woggioni-rbcs-server-memcache/)
|
||||
|
||||
### Using the Docker image
|
||||
You can pull the latest Docker image with
|
||||
```bash
|
||||
docker pull gitea.woggioni.net/woggioni/rbcs:latest
|
||||
```
|
||||
|
||||
## Usage
|
||||
## Configuration
|
||||
### Using RBCS with Gradle
|
||||
|
||||
```groovy
|
||||
buildCache {
|
||||
remote(HttpBuildCache) {
|
||||
url = 'https://rbcs.example.com/'
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Using RBCS with Maven
|
||||
|
||||
Read [here](https://maven.apache.org/extensions/maven-build-cache-extension/remote-cache.html)
|
||||
|
||||
## FAQ
|
||||
### Why should I use a build cache?
|
||||
|
||||
#### Build Caches Improve Build & Test Performance
|
||||
|
||||
Building software consists of a number of steps, like compiling sources, executing tests, and linking binaries. We’ve seen that a binary artifact repository helps when such a step requires an external component by downloading the artifact from the repository rather than building it locally.
|
||||
However, there are many additional steps in this build process which can be optimized to reduce the build time. An obvious strategy is to avoid executing build steps which dominate the total build time when these build steps are not needed.
|
||||
Most build times are dominated by the testing step.
|
||||
|
||||
While binary repositories cannot capture the outcome of a test build step (only the test reports
|
||||
when included in binary artifacts), build caches are designed to eliminate redundant executions
|
||||
for every build step. Moreover, it generalizes the concept of avoiding work associated with any
|
||||
incremental step of the build, including test execution, compilation and resource processing.
|
||||
The mechanism itself is comparable to a pure function. That is, given some inputs such as source
|
||||
files and environment parameters we know that the output is always going to be the same.
|
||||
As a result, we can cache it and retrieve it based on a simple cryptographic hash of the inputs.
|
||||
Build caching is supported natively by some build tools.
|
||||
|
||||
#### Improve CI builds with a remote build cache
|
||||
|
||||
When analyzing the role of a build cache it is important to take into account the granularity
|
||||
of the changes that it caches. Imagine a full build for a project with 40 to 50 modules
|
||||
which fails at the last step (deployment) because the staging environment is temporarily unavailable.
|
||||
Although the vast majority of the build steps (potentially thousands) succeed,
|
||||
the change can not be deployed to the staging environment.
|
||||
Without a build cache one typically relies on a very complex CI configuration to reuse build step outputs
|
||||
or would have to repeat the full build once the environment is available.
|
||||
|
||||
Some build tools don’t support incremental builds properly. For example, outputs of a build started
|
||||
from scratch may vary when compared to subsequent builds that rely on the initial build’s output.
|
||||
As a result, to preserve build integrity, it’s crucial to rebuild from scratch, or ‘cleanly,’ in this
|
||||
scenario.
|
||||
|
||||
With a build cache, only the last step needs to be executed and the build can be re-triggered
|
||||
when the environment is back online. This automatically saves all of the time and
|
||||
resources required across the different build steps which were successfully executed.
|
||||
Instead of executing the intermediate steps, the build tool pulls the outputs from the build cache,
|
||||
avoiding a lot of redundant work
|
||||
|
||||
#### Share outputs with a remote build cache
|
||||
|
||||
One of the most important advantages of a remote build cache is the ability to share build outputs.
|
||||
In most CI configurations, for example, a number of pipelines are created.
|
||||
These may include one for building the sources, one for testing, one for publishing the outcomes
|
||||
to a remote repository, and other pipelines to test on different platforms.
|
||||
There are even situations where CI builds partially build a project (i.e. some modules and not others).
|
||||
|
||||
Most of those pipelines share a lot of intermediate build steps. All builds which perform testing
|
||||
require the binaries to be ready. All publishing builds require all previous steps to be executed.
|
||||
And because modern CI infrastructure means executing everything in containerized (isolated) environments,
|
||||
significant resources are wasted by repeatedly building the same intermediate artifacts.
|
||||
|
||||
A remote build cache greatly reduces this overhead by orders of magnitudes because it provides a way
|
||||
for all those pipelines to share their outputs. After all, there is no point recreating an output that
|
||||
is already available in the cache.
|
||||
|
||||
Because there are inherent dependencies between software components of a build,
|
||||
introducing a build cache dramatically reduces the impact of exploding a component into multiple pieces,
|
||||
allowing for increased modularity without increased overhead.
|
||||
|
||||
#### Make local developers more efficient with remote build caches
|
||||
|
||||
It is common for different teams within a company to work on different modules of a single large
|
||||
application. In this case, most teams don’t care about building the other parts of the software.
|
||||
By introducing a remote cache developers immediately benefit from pre-built artifacts when checking out code.
|
||||
Because it has already been built on CI, they don’t have to do it locally.
|
||||
|
||||
Introducing a remote cache is a huge benefit for those developers. Consider that a typical developer’s
|
||||
day begins by performing a code checkout. Most likely the checked out code has already been built on CI.
|
||||
Therefore, no time is wasted running the first build of the day. The remote cache provides all of the
|
||||
intermediate artifacts needed. And, in the event local changes are made, the remote cache still leverages
|
||||
partial cache hits for projects which are independent. As other developers in the organization request
|
||||
CI builds, the remote cache continues to populate, increasing the likelihood of these remote cache hits
|
||||
across team members.
|
||||
|
@@ -1,26 +0,0 @@
|
||||
plugins {
|
||||
alias catalog.plugins.gradle.jmh
|
||||
alias catalog.plugins.lombok
|
||||
}
|
||||
|
||||
import me.champeau.jmh.JMHTask
|
||||
|
||||
dependencies {
|
||||
implementation rootProject
|
||||
|
||||
implementation catalog.jwo
|
||||
implementation catalog.xz
|
||||
implementation catalog.jackson.databind
|
||||
|
||||
jmhAnnotationProcessor catalog.lombok
|
||||
}
|
||||
|
||||
jmh {
|
||||
threads = 4
|
||||
iterations = 2
|
||||
fork = 1
|
||||
warmupIterations = 1
|
||||
warmupForks = 0
|
||||
resultFormat = 'JSON'
|
||||
}
|
||||
|
@@ -1,170 +0,0 @@
|
||||
package net.woggioni.gbcs.benchmark;
|
||||
|
||||
import lombok.Getter;
|
||||
import lombok.SneakyThrows;
|
||||
import org.openjdk.jmh.annotations.Benchmark;
|
||||
import org.openjdk.jmh.annotations.BenchmarkMode;
|
||||
import org.openjdk.jmh.annotations.Level;
|
||||
import org.openjdk.jmh.annotations.Mode;
|
||||
import org.openjdk.jmh.annotations.OutputTimeUnit;
|
||||
import org.openjdk.jmh.annotations.Scope;
|
||||
import org.openjdk.jmh.annotations.Setup;
|
||||
import org.openjdk.jmh.annotations.State;
|
||||
|
||||
import java.net.URI;
|
||||
import java.net.http.HttpClient;
|
||||
import java.net.http.HttpRequest;
|
||||
import java.net.http.HttpResponse;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Arrays;
|
||||
import java.util.Base64;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.Properties;
|
||||
import java.util.Random;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.function.Predicate;
|
||||
|
||||
public class Main {
|
||||
|
||||
@SneakyThrows
|
||||
private static Properties loadProperties() {
|
||||
Properties properties = new Properties();
|
||||
try (final var is = Main.class.getResourceAsStream("/benchmark.properties")) {
|
||||
properties.load(is);
|
||||
}
|
||||
return properties;
|
||||
}
|
||||
|
||||
private static final Properties properties = loadProperties();
|
||||
|
||||
@State(Scope.Thread)
|
||||
public static class ExecutionPlan {
|
||||
private final Random random = new Random(101325);
|
||||
|
||||
@Getter
|
||||
private final HttpClient client = HttpClient.newHttpClient();
|
||||
|
||||
private final Map<String, byte[]> entries = new HashMap<>();
|
||||
|
||||
public final Map<String, byte[]> getEntries() {
|
||||
return Collections.unmodifiableMap(entries);
|
||||
}
|
||||
|
||||
public Map.Entry<String, byte[]> newEntry() {
|
||||
final var keyBuffer = new byte[0x20];
|
||||
random.nextBytes(keyBuffer);
|
||||
final var key = Base64.getUrlEncoder().encodeToString(keyBuffer);
|
||||
final var value = new byte[0x1000];
|
||||
random.nextBytes(value);
|
||||
return Map.entry(key, value);
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
public HttpRequest.Builder newRequestBuilder(String key) {
|
||||
final var requestBuilder = HttpRequest.newBuilder()
|
||||
.uri(getServerURI().resolve(key));
|
||||
String user = getUser();
|
||||
if (user != null) {
|
||||
requestBuilder.header("Authorization", buildAuthorizationHeader(user, getPassword()));
|
||||
}
|
||||
return requestBuilder;
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
public URI getServerURI() {
|
||||
return new URI(properties.getProperty("gbcs.server.url"));
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
public String getUser() {
|
||||
return Optional.ofNullable(properties.getProperty("gbcs.server.username"))
|
||||
.filter(Predicate.not(String::isEmpty))
|
||||
.orElse(null);
|
||||
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
public String getPassword() {
|
||||
return Optional.ofNullable(properties.getProperty("gbcs.server.password"))
|
||||
.filter(Predicate.not(String::isEmpty))
|
||||
.orElse(null);
|
||||
}
|
||||
|
||||
private String buildAuthorizationHeader(String user, String password) {
|
||||
final var b64 = Base64.getEncoder().encode(String.format("%s:%s", user, password).getBytes(StandardCharsets.UTF_8));
|
||||
return "Basic " + new String(b64);
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
@Setup(Level.Trial)
|
||||
public void setUp() {
|
||||
try (final var client = HttpClient.newHttpClient()) {
|
||||
for (int i = 0; i < 10000; i++) {
|
||||
final var pair = newEntry();
|
||||
final var requestBuilder = newRequestBuilder(pair.getKey())
|
||||
.header("Content-Type", "application/octet-stream")
|
||||
.PUT(HttpRequest.BodyPublishers.ofByteArray(pair.getValue()));
|
||||
final var response = client.send(requestBuilder.build(), HttpResponse.BodyHandlers.ofString());
|
||||
if (201 != response.statusCode()) {
|
||||
throw new IllegalStateException(Integer.toString(response.statusCode()));
|
||||
} else {
|
||||
entries.put(pair.getKey(), pair.getValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private Iterator<Map.Entry<String, byte[]>> it = null;
|
||||
|
||||
private Map.Entry<String, byte[]> nextEntry() {
|
||||
if (it == null || !it.hasNext()) {
|
||||
it = getEntries().entrySet().iterator();
|
||||
}
|
||||
return it.next();
|
||||
}
|
||||
}
|
||||
|
||||
@SneakyThrows
|
||||
@Benchmark
|
||||
@BenchmarkMode(Mode.Throughput)
|
||||
@OutputTimeUnit(TimeUnit.SECONDS)
|
||||
public void get(ExecutionPlan plan) {
|
||||
final var client = plan.getClient();
|
||||
final var entry = plan.nextEntry();
|
||||
final var requestBuilder = plan.newRequestBuilder(entry.getKey())
|
||||
.header("Accept", "application/octet-stream")
|
||||
.GET();
|
||||
final var response = client.send(requestBuilder.build(), HttpResponse.BodyHandlers.ofByteArray());
|
||||
if (200 != response.statusCode()) {
|
||||
throw new IllegalStateException(Integer.toString(response.statusCode()));
|
||||
} else {
|
||||
if (!Arrays.equals(entry.getValue(), response.body())) {
|
||||
throw new IllegalStateException("Retrieved unexpected value");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@SneakyThrows
|
||||
@Benchmark
|
||||
@BenchmarkMode(Mode.Throughput)
|
||||
@OutputTimeUnit(TimeUnit.SECONDS)
|
||||
public void put(Main.ExecutionPlan plan) {
|
||||
final var client = plan.getClient();
|
||||
final var entry = plan.nextEntry();
|
||||
|
||||
final var requestBuilder = plan.newRequestBuilder(entry.getKey())
|
||||
.header("Content-Type", "application/octet-stream")
|
||||
.PUT(HttpRequest.BodyPublishers.ofByteArray(entry.getValue()));
|
||||
|
||||
final var response = client.send(requestBuilder.build(), HttpResponse.BodyHandlers.ofByteArray());
|
||||
if (201 != response.statusCode()) {
|
||||
throw new IllegalStateException(Integer.toString(response.statusCode()));
|
||||
}
|
||||
}
|
||||
}
|
@@ -1 +0,0 @@
|
||||
gbcs.server.url= http://localhost:8080
|
55
build.gradle
55
build.gradle
@@ -1,14 +1,12 @@
|
||||
plugins {
|
||||
id 'java-library'
|
||||
alias catalog.plugins.kotlin.jvm
|
||||
alias catalog.plugins.kotlin.jvm apply false
|
||||
alias catalog.plugins.sambal
|
||||
alias catalog.plugins.lombok
|
||||
id 'maven-publish'
|
||||
alias catalog.plugins.lombok apply false
|
||||
}
|
||||
|
||||
import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
|
||||
import org.jetbrains.kotlin.gradle.dsl.JvmTarget
|
||||
|
||||
import org.jetbrains.kotlin.gradle.dsl.JvmTarget
|
||||
import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
|
||||
|
||||
allprojects { subproject ->
|
||||
group = 'net.woggioni'
|
||||
@@ -17,7 +15,7 @@ allprojects { subproject ->
|
||||
version = project.currentTag.map { it[0] }.get()
|
||||
} else {
|
||||
version = project.gitRevision.map { gitRevision ->
|
||||
"${getProperty('gbcs.version')}.${gitRevision[0..10]}"
|
||||
"${getProperty('rbcs.version')}.${gitRevision[0..10]}"
|
||||
}.get()
|
||||
}
|
||||
|
||||
@@ -48,6 +46,12 @@ allprojects { subproject ->
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
testImplementation catalog.junit.jupiter.api
|
||||
testImplementation catalog.junit.jupiter.params
|
||||
testRuntimeOnly catalog.junit.jupiter.engine
|
||||
}
|
||||
|
||||
test {
|
||||
useJUnitPlatform()
|
||||
}
|
||||
@@ -68,6 +72,15 @@ allprojects { subproject ->
|
||||
}
|
||||
}
|
||||
|
||||
pluginManager.withPlugin('jacoco') {
|
||||
test {
|
||||
finalizedBy jacocoTestReport
|
||||
}
|
||||
jacocoTestReport {
|
||||
dependsOn test
|
||||
}
|
||||
}
|
||||
|
||||
pluginManager.withPlugin(catalog.plugins.kotlin.jvm.get().pluginId) {
|
||||
tasks.withType(KotlinCompile.class) {
|
||||
compilerOptions.jvmTarget = JvmTarget.JVM_21
|
||||
@@ -102,34 +115,6 @@ allprojects { subproject ->
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation catalog.jwo
|
||||
implementation catalog.slf4j.api
|
||||
implementation catalog.netty.codec.http
|
||||
|
||||
api project('gbcs-base')
|
||||
api project('gbcs-api')
|
||||
|
||||
// runtimeOnly catalog.slf4j.jdk14
|
||||
testRuntimeOnly catalog.logback.classic
|
||||
|
||||
testImplementation catalog.bcprov.jdk18on
|
||||
testImplementation catalog.bcpkix.jdk18on
|
||||
testImplementation catalog.junit.jupiter.api
|
||||
testImplementation catalog.junit.jupiter.params
|
||||
testRuntimeOnly catalog.junit.jupiter.engine
|
||||
|
||||
testRuntimeOnly project("gbcs-memcached")
|
||||
}
|
||||
|
||||
publishing {
|
||||
publications {
|
||||
maven(MavenPublication) {
|
||||
from(components["java"])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
tasks.register('version') {
|
||||
doLast {
|
||||
println("VERSION=$version")
|
||||
|
@@ -1,13 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<gbcs:server useVirtualThreads="true" xmlns:xs="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns:gbcs="urn:net.woggioni.gbcs"
|
||||
xmlns:gbcs-memcached="urn:net.woggioni.gbcs-memcached"
|
||||
xs:schemaLocation="urn:net.woggioni.gbcs-memcached jpms://net.woggioni.gbcs.memcached/net/woggioni/gbcs/memcached/schema/gbcs-memcached.xsd urn:net.woggioni.gbcs jpms://net.woggioni.gbcs/net/woggioni/gbcs/schema/gbcs.xsd">
|
||||
<bind host="0.0.0.0" port="13080" />
|
||||
<cache xs:type="gbcs-memcached:memcachedCacheType" max-age="P7D" max-size="16777216" compression-mode="zip">
|
||||
<server host="memcached" port="11211"/>
|
||||
</cache>
|
||||
<authentication>
|
||||
<none/>
|
||||
</authentication>
|
||||
</gbcs:server>
|
@@ -1,36 +0,0 @@
|
||||
networks:
|
||||
default:
|
||||
external: false
|
||||
ipam:
|
||||
driver: default
|
||||
config:
|
||||
- subnet: 172.118.0.0/16
|
||||
ip_range: 172.118.0.0/16
|
||||
gateway: 172.118.0.254
|
||||
services:
|
||||
gbcs:
|
||||
build:
|
||||
context: .
|
||||
container_name: gbcs
|
||||
restart: unless-stopped
|
||||
ports:
|
||||
- "127.0.0.1:8080:13080"
|
||||
- "[::1]:8080:13080"
|
||||
depends_on:
|
||||
memcached:
|
||||
condition: service_started
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
cpus: "2.00"
|
||||
memory: 256M
|
||||
memcached:
|
||||
image: memcached
|
||||
container_name: memcached
|
||||
restart: unless-stopped
|
||||
command: -I 64m -m 900m
|
||||
deploy:
|
||||
resources:
|
||||
limits:
|
||||
cpus: "1.00"
|
||||
memory: 1G
|
@@ -1,21 +1,16 @@
|
||||
FROM alpine:latest AS base-release
|
||||
RUN --mount=type=cache,target=/var/cache/apk apk update
|
||||
RUN --mount=type=cache,target=/var/cache/apk apk add openjdk21-jre
|
||||
FROM eclipse-temurin:21-jre-alpine AS base-release
|
||||
RUN adduser -D luser
|
||||
USER luser
|
||||
WORKDIR /home/luser
|
||||
|
||||
FROM base-release AS release
|
||||
ADD gbcs-cli-envelope-*.jar gbcs.jar
|
||||
ENTRYPOINT ["java", "-jar", "/home/luser/gbcs.jar"]
|
||||
ADD rbcs-cli-envelope-*.jar rbcs.jar
|
||||
ENTRYPOINT ["java", "-XX:+UseZGC", "-XX:+ZGenerational", "-jar", "/home/luser/rbcs.jar", "server"]
|
||||
|
||||
FROM base-release AS release-memcached
|
||||
ADD --chown=luser:luser gbcs-cli-envelope-*.jar gbcs.jar
|
||||
FROM base-release AS release-memcache
|
||||
ADD --chown=luser:luser rbcs-cli-envelope-*.jar rbcs.jar
|
||||
RUN mkdir plugins
|
||||
WORKDIR /home/luser/plugins
|
||||
RUN --mount=type=bind,source=.,target=/build/distributions tar -xf /build/distributions/gbcs-memcached*.tar
|
||||
RUN --mount=type=bind,source=.,target=/build/distributions tar -xf /build/distributions/rbcs-server-memcache*.tar
|
||||
WORKDIR /home/luser
|
||||
ENTRYPOINT ["java", "-jar", "/home/luser/gbcs.jar"]
|
||||
|
||||
FROM release-memcached as compose
|
||||
COPY --chown=luser:luser conf/gbcs-memcached.xml /home/luser/.config/gbcs/gbcs.xml
|
||||
ENTRYPOINT ["java", "-XX:+UseZGC", "-XX:+ZGenerational", "-jar", "/home/luser/rbcs.jar", "server"]
|
||||
|
@@ -18,8 +18,8 @@ configurations {
|
||||
}
|
||||
|
||||
dependencies {
|
||||
docker project(path: ':gbcs-cli', configuration: 'release')
|
||||
docker project(path: ':gbcs-memcached', configuration: 'release')
|
||||
docker project(path: ':rbcs-cli', configuration: 'release')
|
||||
docker project(path: ':rbcs-server-memcache', configuration: 'release')
|
||||
}
|
||||
|
||||
Provider<Task> cleanTaskProvider = tasks.named(BasePlugin.CLEAN_TASK_NAME) {}
|
||||
@@ -35,33 +35,33 @@ Provider<Copy> prepareDockerBuild = tasks.register('prepareDockerBuild', Copy) {
|
||||
Provider<DockerBuildImage> dockerBuild = tasks.register('dockerBuildImage', DockerBuildImage) {
|
||||
group = 'docker'
|
||||
dependsOn prepareDockerBuild
|
||||
images.add('gitea.woggioni.net/woggioni/gbcs:latest')
|
||||
images.add("gitea.woggioni.net/woggioni/gbcs:${version}")
|
||||
images.add('gitea.woggioni.net/woggioni/rbcs:latest')
|
||||
images.add("gitea.woggioni.net/woggioni/rbcs:${version}")
|
||||
}
|
||||
|
||||
Provider<DockerTagImage> dockerTag = tasks.register('dockerTagImage', DockerTagImage) {
|
||||
group = 'docker'
|
||||
repository = 'gitea.woggioni.net/woggioni/gbcs'
|
||||
imageId = 'gitea.woggioni.net/woggioni/gbcs:latest'
|
||||
repository = 'gitea.woggioni.net/woggioni/rbcs'
|
||||
imageId = 'gitea.woggioni.net/woggioni/rbcs:latest'
|
||||
tag = version
|
||||
}
|
||||
|
||||
Provider<DockerTagImage> dockerTagMemcached = tasks.register('dockerTagMemcachedImage', DockerTagImage) {
|
||||
Provider<DockerTagImage> dockerTagMemcache = tasks.register('dockerTagMemcacheImage', DockerTagImage) {
|
||||
group = 'docker'
|
||||
repository = 'gitea.woggioni.net/woggioni/gbcs'
|
||||
imageId = 'gitea.woggioni.net/woggioni/gbcs:memcached'
|
||||
tag = "${version}-memcached"
|
||||
repository = 'gitea.woggioni.net/woggioni/rbcs'
|
||||
imageId = 'gitea.woggioni.net/woggioni/rbcs:memcache'
|
||||
tag = "${version}-memcache"
|
||||
}
|
||||
|
||||
Provider<DockerPushImage> dockerPush = tasks.register('dockerPushImage', DockerPushImage) {
|
||||
group = 'docker'
|
||||
dependsOn dockerTag, dockerTagMemcached
|
||||
dependsOn dockerTag, dockerTagMemcache
|
||||
registryCredentials {
|
||||
url = getProperty('docker.registry.url')
|
||||
username = 'woggioni'
|
||||
password = System.getenv().get("PUBLISHER_TOKEN")
|
||||
}
|
||||
images = [dockerTag.flatMap{ it.tag }, dockerTagMemcached.flatMap{ it.tag }]
|
||||
images = [dockerTag.flatMap{ it.tag }, dockerTagMemcache.flatMap{ it.tag }]
|
||||
}
|
||||
|
||||
|
||||
|
@@ -1,6 +0,0 @@
|
||||
module net.woggioni.gbcs.api {
|
||||
requires static lombok;
|
||||
requires java.xml;
|
||||
exports net.woggioni.gbcs.api;
|
||||
exports net.woggioni.gbcs.api.exception;
|
||||
}
|
@@ -1,11 +0,0 @@
|
||||
package net.woggioni.gbcs.api;
|
||||
|
||||
import net.woggioni.gbcs.api.exception.ContentTooLargeException;
|
||||
|
||||
import java.nio.channels.ReadableByteChannel;
|
||||
|
||||
public interface Cache extends AutoCloseable {
|
||||
ReadableByteChannel get(String key);
|
||||
|
||||
void put(String key, byte[] content) throws ContentTooLargeException;
|
||||
}
|
@@ -1,7 +0,0 @@
|
||||
package net.woggioni.gbcs.api.exception;
|
||||
|
||||
public class GbcsException extends RuntimeException {
|
||||
public GbcsException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
}
|
@@ -1,8 +0,0 @@
|
||||
module net.woggioni.gbcs.base {
|
||||
requires java.xml;
|
||||
requires java.logging;
|
||||
requires org.slf4j;
|
||||
requires kotlin.stdlib;
|
||||
|
||||
exports net.woggioni.gbcs.base;
|
||||
}
|
@@ -1,12 +0,0 @@
|
||||
package net.woggioni.gbcs.base
|
||||
|
||||
import java.net.URI
|
||||
import java.net.URL
|
||||
|
||||
object GBCS {
|
||||
fun String.toUrl() : URL = URL.of(URI(this), null)
|
||||
|
||||
const val GBCS_NAMESPACE_URI: String = "urn:net.woggioni.gbcs"
|
||||
const val GBCS_PREFIX: String = "gbcs"
|
||||
const val XML_SCHEMA_NAMESPACE_URI = "http://www.w3.org/2001/XMLSchema-instance"
|
||||
}
|
@@ -1,15 +0,0 @@
|
||||
module net.woggioni.gbcs.cli {
|
||||
requires org.slf4j;
|
||||
requires net.woggioni.gbcs;
|
||||
requires info.picocli;
|
||||
requires net.woggioni.gbcs.base;
|
||||
requires kotlin.stdlib;
|
||||
requires net.woggioni.jwo;
|
||||
|
||||
exports net.woggioni.gbcs.cli.impl.converters to info.picocli;
|
||||
opens net.woggioni.gbcs.cli.impl.commands to info.picocli;
|
||||
opens net.woggioni.gbcs.cli.impl to info.picocli;
|
||||
opens net.woggioni.gbcs.cli to info.picocli, net.woggioni.gbcs.base;
|
||||
|
||||
exports net.woggioni.gbcs.cli;
|
||||
}
|
@@ -1,99 +0,0 @@
|
||||
package net.woggioni.gbcs.cli
|
||||
|
||||
import net.woggioni.gbcs.GradleBuildCacheServer
|
||||
import net.woggioni.gbcs.GradleBuildCacheServer.Companion.DEFAULT_CONFIGURATION_URL
|
||||
import net.woggioni.gbcs.base.GbcsUrlStreamHandlerFactory
|
||||
import net.woggioni.gbcs.base.contextLogger
|
||||
import net.woggioni.gbcs.base.debug
|
||||
import net.woggioni.gbcs.base.info
|
||||
import net.woggioni.gbcs.cli.impl.AbstractVersionProvider
|
||||
import net.woggioni.gbcs.cli.impl.GbcsCommand
|
||||
import net.woggioni.gbcs.cli.impl.commands.PasswordHashCommand
|
||||
import net.woggioni.jwo.Application
|
||||
import net.woggioni.jwo.JWO
|
||||
import org.slf4j.Logger
|
||||
import picocli.CommandLine
|
||||
import picocli.CommandLine.Model.CommandSpec
|
||||
import java.io.ByteArrayOutputStream
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "gbcs", versionProvider = GradleBuildCacheServerCli.VersionProvider::class
|
||||
)
|
||||
class GradleBuildCacheServerCli(application : Application, private val log : Logger) : GbcsCommand() {
|
||||
|
||||
class VersionProvider : AbstractVersionProvider()
|
||||
companion object {
|
||||
@JvmStatic
|
||||
fun main(vararg args: String) {
|
||||
Thread.currentThread().contextClassLoader = GradleBuildCacheServerCli::class.java.classLoader
|
||||
GbcsUrlStreamHandlerFactory.install()
|
||||
val log = contextLogger()
|
||||
val app = Application.builder("gbcs")
|
||||
.configurationDirectoryEnvVar("GBCS_CONFIGURATION_DIR")
|
||||
.configurationDirectoryPropertyKey("net.woggioni.gbcs.conf.dir")
|
||||
.build()
|
||||
val gbcsCli = GradleBuildCacheServerCli(app, log)
|
||||
val commandLine = CommandLine(gbcsCli)
|
||||
commandLine.setExecutionExceptionHandler { ex, cl, parseResult ->
|
||||
log.error(ex.message, ex)
|
||||
CommandLine.ExitCode.SOFTWARE
|
||||
}
|
||||
commandLine.addSubcommand(PasswordHashCommand())
|
||||
System.exit(commandLine.execute(*args))
|
||||
}
|
||||
}
|
||||
|
||||
@CommandLine.Option(
|
||||
names = ["-c", "--config-file"],
|
||||
description = ["Read the application configuration from this file"],
|
||||
paramLabel = "CONFIG_FILE"
|
||||
)
|
||||
private var configurationFile: Path = findConfigurationFile(application)
|
||||
|
||||
@CommandLine.Option(names = ["-V", "--version"], versionHelp = true)
|
||||
var versionHelp = false
|
||||
private set
|
||||
|
||||
@CommandLine.Spec
|
||||
private lateinit var spec: CommandSpec
|
||||
|
||||
private fun findConfigurationFile(app : Application): Path {
|
||||
val confDir = app.computeConfigurationDirectory()
|
||||
val configurationFile = confDir.resolve("gbcs.xml")
|
||||
return configurationFile
|
||||
}
|
||||
|
||||
private fun createDefaultConfigurationFile(configurationFile : Path) {
|
||||
log.info {
|
||||
"Creating default configuration file at '$configurationFile'"
|
||||
}
|
||||
val defaultConfigurationFileResource = DEFAULT_CONFIGURATION_URL
|
||||
Files.newOutputStream(configurationFile).use { outputStream ->
|
||||
defaultConfigurationFileResource.openStream().use { inputStream ->
|
||||
JWO.copy(inputStream, outputStream)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun run() {
|
||||
if (!Files.exists(configurationFile)) {
|
||||
Files.createDirectories(configurationFile.parent)
|
||||
createDefaultConfigurationFile(configurationFile)
|
||||
}
|
||||
|
||||
val configuration = GradleBuildCacheServer.loadConfiguration(configurationFile)
|
||||
log.debug {
|
||||
ByteArrayOutputStream().also {
|
||||
GradleBuildCacheServer.dumpConfiguration(configuration, it)
|
||||
}.let {
|
||||
"Server configuration:\n${String(it.toByteArray())}"
|
||||
}
|
||||
}
|
||||
val server = GradleBuildCacheServer(configuration)
|
||||
server.run().use {
|
||||
}
|
||||
}
|
||||
}
|
@@ -1,11 +0,0 @@
|
||||
package net.woggioni.gbcs.cli.impl
|
||||
|
||||
import picocli.CommandLine
|
||||
|
||||
|
||||
abstract class GbcsCommand : Runnable {
|
||||
|
||||
@CommandLine.Option(names = ["-h", "--help"], usageHelp = true)
|
||||
var usageHelp = false
|
||||
private set
|
||||
}
|
@@ -1,14 +0,0 @@
|
||||
import net.woggioni.gbcs.api.CacheProvider;
|
||||
|
||||
module net.woggioni.gbcs.memcached {
|
||||
requires net.woggioni.gbcs.base;
|
||||
requires net.woggioni.gbcs.api;
|
||||
requires com.googlecode.xmemcached;
|
||||
requires net.woggioni.jwo;
|
||||
requires java.xml;
|
||||
requires kotlin.stdlib;
|
||||
|
||||
provides CacheProvider with net.woggioni.gbcs.memcached.MemcachedCacheProvider;
|
||||
|
||||
opens net.woggioni.gbcs.memcached.schema;
|
||||
}
|
@@ -1,60 +0,0 @@
|
||||
package net.woggioni.gbcs.memcached
|
||||
|
||||
import net.rubyeye.xmemcached.MemcachedClient
|
||||
import net.rubyeye.xmemcached.XMemcachedClientBuilder
|
||||
import net.rubyeye.xmemcached.command.BinaryCommandFactory
|
||||
import net.rubyeye.xmemcached.transcoders.CompressionMode
|
||||
import net.rubyeye.xmemcached.transcoders.SerializingTranscoder
|
||||
import net.woggioni.gbcs.api.Cache
|
||||
import net.woggioni.gbcs.api.exception.ContentTooLargeException
|
||||
import net.woggioni.gbcs.base.HostAndPort
|
||||
import net.woggioni.jwo.JWO
|
||||
import java.io.ByteArrayInputStream
|
||||
import java.net.InetSocketAddress
|
||||
import java.nio.channels.Channels
|
||||
import java.nio.channels.ReadableByteChannel
|
||||
import java.nio.charset.StandardCharsets
|
||||
import java.security.MessageDigest
|
||||
import java.time.Duration
|
||||
|
||||
class MemcachedCache(
|
||||
servers: List<HostAndPort>,
|
||||
private val maxAge: Duration,
|
||||
maxSize : Int,
|
||||
digestAlgorithm: String?,
|
||||
compressionMode: CompressionMode,
|
||||
) : Cache {
|
||||
private val memcachedClient = XMemcachedClientBuilder(
|
||||
servers.stream().map { addr: HostAndPort -> InetSocketAddress(addr.host, addr.port) }.toList()
|
||||
).apply {
|
||||
commandFactory = BinaryCommandFactory()
|
||||
digestAlgorithm?.let { dAlg ->
|
||||
setKeyProvider { key ->
|
||||
val md = MessageDigest.getInstance(dAlg)
|
||||
md.update(key.toByteArray(StandardCharsets.UTF_8))
|
||||
JWO.bytesToHex(md.digest())
|
||||
}
|
||||
}
|
||||
transcoder = SerializingTranscoder(maxSize).apply {
|
||||
setCompressionMode(compressionMode)
|
||||
}
|
||||
}.build()
|
||||
|
||||
override fun get(key: String): ReadableByteChannel? {
|
||||
return memcachedClient.get<ByteArray>(key)
|
||||
?.let(::ByteArrayInputStream)
|
||||
?.let(Channels::newChannel)
|
||||
}
|
||||
|
||||
override fun put(key: String, content: ByteArray) {
|
||||
try {
|
||||
memcachedClient[key, maxAge.toSeconds().toInt()] = content
|
||||
} catch (e: IllegalArgumentException) {
|
||||
throw ContentTooLargeException(e.message, e)
|
||||
}
|
||||
}
|
||||
|
||||
override fun close() {
|
||||
memcachedClient.shutdown()
|
||||
}
|
||||
}
|
@@ -1,26 +0,0 @@
|
||||
package net.woggioni.gbcs.memcached
|
||||
|
||||
import net.rubyeye.xmemcached.transcoders.CompressionMode
|
||||
import net.woggioni.gbcs.api.Configuration
|
||||
import net.woggioni.gbcs.base.HostAndPort
|
||||
import java.time.Duration
|
||||
|
||||
data class MemcachedCacheConfiguration(
|
||||
var servers: List<HostAndPort>,
|
||||
var maxAge: Duration = Duration.ofDays(1),
|
||||
var maxSize: Int = 0x100000,
|
||||
var digestAlgorithm: String? = null,
|
||||
var compressionMode: CompressionMode = CompressionMode.ZIP,
|
||||
) : Configuration.Cache {
|
||||
override fun materialize() = MemcachedCache(
|
||||
servers,
|
||||
maxAge,
|
||||
maxSize,
|
||||
digestAlgorithm,
|
||||
compressionMode
|
||||
)
|
||||
|
||||
override fun getNamespaceURI() = "urn:net.woggioni.gbcs-memcached"
|
||||
|
||||
override fun getTypeName() = "memcachedCacheType"
|
||||
}
|
@@ -1,85 +0,0 @@
|
||||
package net.woggioni.gbcs.memcached
|
||||
|
||||
import net.rubyeye.xmemcached.transcoders.CompressionMode
|
||||
import net.woggioni.gbcs.api.CacheProvider
|
||||
import net.woggioni.gbcs.base.GBCS
|
||||
import net.woggioni.gbcs.base.HostAndPort
|
||||
import net.woggioni.gbcs.base.Xml
|
||||
import net.woggioni.gbcs.base.Xml.Companion.asIterable
|
||||
import org.w3c.dom.Document
|
||||
import org.w3c.dom.Element
|
||||
import java.time.Duration
|
||||
import java.util.zip.Deflater
|
||||
|
||||
class MemcachedCacheProvider : CacheProvider<MemcachedCacheConfiguration> {
|
||||
override fun getXmlSchemaLocation() = "classpath:net/woggioni/gbcs/memcached/schema/gbcs-memcached.xsd"
|
||||
|
||||
override fun getXmlType() = "memcachedCacheType"
|
||||
|
||||
override fun getXmlNamespace()= "urn:net.woggioni.gbcs-memcached"
|
||||
|
||||
override fun deserialize(el: Element): MemcachedCacheConfiguration {
|
||||
val servers = mutableListOf<HostAndPort>()
|
||||
val maxAge = el.getAttribute("max-age")
|
||||
.takeIf(String::isNotEmpty)
|
||||
?.let(Duration::parse)
|
||||
?: Duration.ofDays(1)
|
||||
val maxSize = el.getAttribute("max-size")
|
||||
.takeIf(String::isNotEmpty)
|
||||
?.let(String::toInt)
|
||||
?: 0x100000
|
||||
val enableCompression = el.getAttribute("enable-compression")
|
||||
.takeIf(String::isNotEmpty)
|
||||
?.let(String::toBoolean)
|
||||
?: false
|
||||
val compressionMode = el.getAttribute("compression-mode")
|
||||
.takeIf(String::isNotEmpty)
|
||||
?.let {
|
||||
when(it) {
|
||||
"gzip" -> CompressionMode.GZIP
|
||||
"zip" -> CompressionMode.ZIP
|
||||
else -> CompressionMode.ZIP
|
||||
}
|
||||
}
|
||||
?: CompressionMode.ZIP
|
||||
val digestAlgorithm = el.getAttribute("digest").takeIf(String::isNotEmpty)
|
||||
for (child in el.asIterable()) {
|
||||
when (child.nodeName) {
|
||||
"server" -> {
|
||||
servers.add(HostAndPort(child.getAttribute("host"), child.getAttribute("port").toInt()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return MemcachedCacheConfiguration(
|
||||
servers,
|
||||
maxAge,
|
||||
maxSize,
|
||||
digestAlgorithm,
|
||||
compressionMode,
|
||||
)
|
||||
}
|
||||
|
||||
override fun serialize(doc: Document, cache : MemcachedCacheConfiguration) = cache.run {
|
||||
val result = doc.createElementNS(xmlNamespace,"cache")
|
||||
Xml.of(doc, result) {
|
||||
attr("xs:type", xmlType, GBCS.XML_SCHEMA_NAMESPACE_URI)
|
||||
for (server in servers) {
|
||||
node("server", xmlNamespace) {
|
||||
attr("host", server.host)
|
||||
attr("port", server.port.toString())
|
||||
}
|
||||
}
|
||||
attr("max-age", maxAge.toString())
|
||||
attr("max-size", maxSize.toString())
|
||||
digestAlgorithm?.let { digestAlgorithm ->
|
||||
attr("digest", digestAlgorithm)
|
||||
}
|
||||
attr("compression-mode", when(compressionMode) {
|
||||
CompressionMode.GZIP -> "gzip"
|
||||
CompressionMode.ZIP -> "zip"
|
||||
})
|
||||
}
|
||||
result
|
||||
}
|
||||
}
|
@@ -1 +0,0 @@
|
||||
net.woggioni.gbcs.memcached.MemcachedCacheProvider
|
@@ -1,35 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<xs:schema targetNamespace="urn:net.woggioni.gbcs-memcached"
|
||||
xmlns:gbcs-memcached="urn:net.woggioni.gbcs-memcached"
|
||||
xmlns:gbcs="urn:net.woggioni.gbcs"
|
||||
xmlns:xs="http://www.w3.org/2001/XMLSchema">
|
||||
|
||||
<xs:import schemaLocation="classpath:net/woggioni/gbcs/schema/gbcs.xsd" namespace="urn:net.woggioni.gbcs"/>
|
||||
|
||||
<xs:complexType name="memcachedServerType">
|
||||
<xs:attribute name="host" type="xs:string" use="required"/>
|
||||
<xs:attribute name="port" type="xs:positiveInteger" use="required"/>
|
||||
</xs:complexType>
|
||||
|
||||
<xs:complexType name="memcachedCacheType">
|
||||
<xs:complexContent>
|
||||
<xs:extension base="gbcs:cacheType">
|
||||
<xs:sequence maxOccurs="unbounded">
|
||||
<xs:element name="server" type="gbcs-memcached:memcachedServerType"/>
|
||||
</xs:sequence>
|
||||
<xs:attribute name="max-age" type="xs:duration" default="P1D"/>
|
||||
<xs:attribute name="max-size" type="xs:unsignedInt" default="1048576"/>
|
||||
<xs:attribute name="digest" type="xs:token" />
|
||||
<xs:attribute name="compression-type" type="gbcs-memcached:compressionType" default="deflate"/>
|
||||
</xs:extension>
|
||||
</xs:complexContent>
|
||||
</xs:complexType>
|
||||
|
||||
<xs:simpleType name="compressionType">
|
||||
<xs:restriction base="xs:token">
|
||||
<xs:enumeration value="deflate"/>
|
||||
<xs:enumeration value="gzip"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
|
||||
</xs:schema>
|
@@ -2,9 +2,11 @@ org.gradle.configuration-cache=false
|
||||
org.gradle.parallel=true
|
||||
org.gradle.caching=true
|
||||
|
||||
gbcs.version = 0.0.1
|
||||
rbcs.version = 0.1.4
|
||||
|
||||
lys.version = 2025.01.10
|
||||
lys.version = 2025.02.05
|
||||
|
||||
gitea.maven.url = https://gitea.woggioni.net/api/packages/woggioni/maven
|
||||
docker.registry.url=gitea.woggioni.net
|
||||
|
||||
jpms-check.configurationName = runtimeClasspath
|
||||
|
@@ -5,6 +5,7 @@ plugins {
|
||||
}
|
||||
|
||||
dependencies {
|
||||
api catalog.netty.buffer
|
||||
}
|
||||
|
||||
publishing {
|
7
rbcs-api/src/main/java/module-info.java
Normal file
7
rbcs-api/src/main/java/module-info.java
Normal file
@@ -0,0 +1,7 @@
|
||||
module net.woggioni.rbcs.api {
|
||||
requires static lombok;
|
||||
requires java.xml;
|
||||
requires io.netty.buffer;
|
||||
exports net.woggioni.rbcs.api;
|
||||
exports net.woggioni.rbcs.api.exception;
|
||||
}
|
14
rbcs-api/src/main/java/net/woggioni/rbcs/api/Cache.java
Normal file
14
rbcs-api/src/main/java/net/woggioni/rbcs/api/Cache.java
Normal file
@@ -0,0 +1,14 @@
|
||||
package net.woggioni.rbcs.api;
|
||||
|
||||
import io.netty.buffer.ByteBuf;
|
||||
import net.woggioni.rbcs.api.exception.ContentTooLargeException;
|
||||
|
||||
import java.nio.channels.ReadableByteChannel;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
|
||||
|
||||
public interface Cache extends AutoCloseable {
|
||||
CompletableFuture<ReadableByteChannel> get(String key);
|
||||
|
||||
CompletableFuture<Void> put(String key, ByteBuf content) throws ContentTooLargeException;
|
||||
}
|
@@ -1,4 +1,4 @@
|
||||
package net.woggioni.gbcs.api;
|
||||
package net.woggioni.rbcs.api;
|
||||
|
||||
import org.w3c.dom.Document;
|
||||
import org.w3c.dom.Element;
|
@@ -1,11 +1,13 @@
|
||||
package net.woggioni.gbcs.api;
|
||||
package net.woggioni.rbcs.api;
|
||||
|
||||
|
||||
import lombok.EqualsAndHashCode;
|
||||
import lombok.NonNull;
|
||||
import lombok.Value;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.security.cert.X509Certificate;
|
||||
import java.time.Duration;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
@@ -14,19 +16,48 @@ import java.util.stream.Collectors;
|
||||
public class Configuration {
|
||||
String host;
|
||||
int port;
|
||||
int incomingConnectionsBacklogSize;
|
||||
String serverPath;
|
||||
@NonNull
|
||||
EventExecutor eventExecutor;
|
||||
@NonNull
|
||||
Connection connection;
|
||||
Map<String, User> users;
|
||||
Map<String, Group> groups;
|
||||
Cache cache;
|
||||
Authentication authentication;
|
||||
Tls tls;
|
||||
boolean useVirtualThread;
|
||||
|
||||
@Value
|
||||
public static class EventExecutor {
|
||||
boolean useVirtualThreads;
|
||||
}
|
||||
|
||||
@Value
|
||||
public static class Connection {
|
||||
Duration readTimeout;
|
||||
Duration writeTimeout;
|
||||
Duration idleTimeout;
|
||||
Duration readIdleTimeout;
|
||||
Duration writeIdleTimeout;
|
||||
int maxRequestSize;
|
||||
}
|
||||
|
||||
@Value
|
||||
public static class Quota {
|
||||
long calls;
|
||||
Duration period;
|
||||
long initialAvailableCalls;
|
||||
long maxAvailableCalls;
|
||||
}
|
||||
|
||||
@Value
|
||||
public static class Group {
|
||||
@EqualsAndHashCode.Include
|
||||
String name;
|
||||
Set<Role> roles;
|
||||
Quota groupQuota;
|
||||
Quota userQuota;
|
||||
}
|
||||
|
||||
@Value
|
||||
@@ -35,7 +66,7 @@ public class Configuration {
|
||||
String name;
|
||||
String password;
|
||||
Set<Group> groups;
|
||||
|
||||
Quota quota;
|
||||
|
||||
public Set<Role> getRoles() {
|
||||
return groups.stream()
|
||||
@@ -55,12 +86,22 @@ public class Configuration {
|
||||
}
|
||||
|
||||
@Value
|
||||
public static class Tls {
|
||||
public static class Throttling {
|
||||
KeyStore keyStore;
|
||||
TrustStore trustStore;
|
||||
boolean verifyClients;
|
||||
}
|
||||
|
||||
public enum ClientCertificate {
|
||||
REQUIRED, OPTIONAL
|
||||
}
|
||||
|
||||
@Value
|
||||
public static class Tls {
|
||||
KeyStore keyStore;
|
||||
TrustStore trustStore;
|
||||
}
|
||||
|
||||
@Value
|
||||
public static class KeyStore {
|
||||
Path file;
|
||||
@@ -74,6 +115,7 @@ public class Configuration {
|
||||
Path file;
|
||||
String password;
|
||||
boolean checkCertificateStatus;
|
||||
boolean requireClientCertificate;
|
||||
}
|
||||
|
||||
@Value
|
||||
@@ -93,7 +135,7 @@ public class Configuration {
|
||||
}
|
||||
|
||||
public interface Cache {
|
||||
net.woggioni.gbcs.api.Cache materialize();
|
||||
net.woggioni.rbcs.api.Cache materialize();
|
||||
String getNamespaceURI();
|
||||
String getTypeName();
|
||||
}
|
||||
@@ -101,24 +143,28 @@ public class Configuration {
|
||||
public static Configuration of(
|
||||
String host,
|
||||
int port,
|
||||
int incomingConnectionsBacklogSize,
|
||||
String serverPath,
|
||||
EventExecutor eventExecutor,
|
||||
Connection connection,
|
||||
Map<String, User> users,
|
||||
Map<String, Group> groups,
|
||||
Cache cache,
|
||||
Authentication authentication,
|
||||
Tls tls,
|
||||
boolean useVirtualThread
|
||||
Tls tls
|
||||
) {
|
||||
return new Configuration(
|
||||
host,
|
||||
port,
|
||||
incomingConnectionsBacklogSize,
|
||||
serverPath != null && !serverPath.isEmpty() && !serverPath.equals("/") ? serverPath : null,
|
||||
eventExecutor,
|
||||
connection,
|
||||
users,
|
||||
groups,
|
||||
cache,
|
||||
authentication,
|
||||
tls,
|
||||
useVirtualThread
|
||||
tls
|
||||
);
|
||||
}
|
||||
}
|
@@ -1,4 +1,4 @@
|
||||
package net.woggioni.gbcs.api;
|
||||
package net.woggioni.rbcs.api;
|
||||
|
||||
public enum Role {
|
||||
Reader, Writer
|
@@ -0,0 +1,11 @@
|
||||
package net.woggioni.rbcs.api.exception;
|
||||
|
||||
public class CacheException extends RbcsException {
|
||||
public CacheException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
|
||||
public CacheException(String message) {
|
||||
this(message, null);
|
||||
}
|
||||
}
|
@@ -0,0 +1,11 @@
|
||||
package net.woggioni.rbcs.api.exception;
|
||||
|
||||
public class ConfigurationException extends RbcsException {
|
||||
public ConfigurationException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
|
||||
public ConfigurationException(String message) {
|
||||
this(message, null);
|
||||
}
|
||||
}
|
@@ -1,6 +1,6 @@
|
||||
package net.woggioni.gbcs.api.exception;
|
||||
package net.woggioni.rbcs.api.exception;
|
||||
|
||||
public class ContentTooLargeException extends GbcsException {
|
||||
public class ContentTooLargeException extends RbcsException {
|
||||
public ContentTooLargeException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
@@ -0,0 +1,7 @@
|
||||
package net.woggioni.rbcs.api.exception;
|
||||
|
||||
public class RbcsException extends RuntimeException {
|
||||
public RbcsException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
}
|
@@ -4,18 +4,22 @@ plugins {
|
||||
alias catalog.plugins.envelope
|
||||
alias catalog.plugins.sambal
|
||||
alias catalog.plugins.graalvm.native.image
|
||||
alias catalog.plugins.graalvm.jlink
|
||||
alias catalog.plugins.jpms.check
|
||||
id 'maven-publish'
|
||||
}
|
||||
|
||||
import net.woggioni.gradle.envelope.EnvelopeJarTask
|
||||
import net.woggioni.gradle.graalvm.NativeImageConfigurationTask
|
||||
import net.woggioni.gradle.graalvm.NativeImagePlugin
|
||||
import net.woggioni.gradle.graalvm.NativeImageTask
|
||||
import net.woggioni.gradle.graalvm.NativeImageConfigurationTask
|
||||
import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
|
||||
import org.jetbrains.kotlin.gradle.dsl.JvmTarget
|
||||
import net.woggioni.gradle.graalvm.JlinkPlugin
|
||||
import net.woggioni.gradle.graalvm.JlinkTask
|
||||
|
||||
Property<String> mainModuleName = objects.property(String.class)
|
||||
mainModuleName.set('net.woggioni.rbcs.cli')
|
||||
Property<String> mainClassName = objects.property(String.class)
|
||||
mainClassName.set('net.woggioni.gbcs.cli.GradleBuildCacheServerCli')
|
||||
mainClassName.set('net.woggioni.rbcs.cli.RemoteBuildCacheServerCli')
|
||||
|
||||
tasks.named(JavaPlugin.COMPILE_JAVA_TASK_NAME, JavaCompile) {
|
||||
options.javaModuleMainClass = mainClassName
|
||||
@@ -31,7 +35,7 @@ configurations {
|
||||
}
|
||||
|
||||
envelopeJar {
|
||||
mainModule = 'net.woggioni.gbcs.cli'
|
||||
mainModule = mainModuleName
|
||||
mainClass = mainClassName
|
||||
|
||||
extraClasspath = ["plugins"]
|
||||
@@ -43,28 +47,45 @@ dependencies {
|
||||
implementation catalog.netty.codec.http
|
||||
implementation catalog.picocli
|
||||
|
||||
implementation rootProject
|
||||
implementation project(':rbcs-client')
|
||||
implementation project(':rbcs-server')
|
||||
|
||||
// runtimeOnly catalog.slf4j.jdk14
|
||||
runtimeOnly catalog.logback.classic
|
||||
// runtimeOnly catalog.slf4j.simple
|
||||
}
|
||||
|
||||
Provider<EnvelopeJarTask> envelopeJarTaskProvider = tasks.named('envelopeJar', EnvelopeJarTask.class) {
|
||||
// systemProperties['java.util.logging.config.class'] = 'net.woggioni.gbcs.LoggingConfig'
|
||||
// systemProperties['log.config.source'] = 'logging.properties'
|
||||
systemProperties['logback.configurationFile'] = 'classpath:net/woggioni/gbcs/cli/logback.xml'
|
||||
// systemProperties['java.util.logging.config.class'] = 'net.woggioni.rbcs.LoggingConfig'
|
||||
// systemProperties['log.config.source'] = 'net/woggioni/rbcs/cli/logging.properties'
|
||||
// systemProperties['java.util.logging.config.file'] = 'classpath:net/woggioni/rbcs/cli/logging.properties'
|
||||
systemProperties['logback.configurationFile'] = 'classpath:net/woggioni/rbcs/cli/logback.xml'
|
||||
systemProperties['io.netty.leakDetectionLevel'] = 'DISABLED'
|
||||
|
||||
// systemProperties['org.slf4j.simpleLogger.showDateTime'] = 'true'
|
||||
// systemProperties['org.slf4j.simpleLogger.defaultLogLevel'] = 'debug'
|
||||
// systemProperties['org.slf4j.simpleLogger.log.com.google.code.yanf4j'] = 'warn'
|
||||
// systemProperties['org.slf4j.simpleLogger.log.net.rubyeye.xmemcached'] = 'warn'
|
||||
// systemProperties['org.slf4j.simpleLogger.dateTimeFormat'] = 'yyyy-MM-dd\'T\'HH:mm:ss.SSSZ'
|
||||
}
|
||||
|
||||
tasks.named(NativeImagePlugin.CONFIGURE_NATIVE_IMAGE_TASK_NAME, NativeImageConfigurationTask) {
|
||||
mainClass = 'net.woggioni.gbcs.GraalNativeImageConfiguration'
|
||||
mainClass = mainClassName
|
||||
mainModule = mainModuleName
|
||||
}
|
||||
|
||||
tasks.named(NativeImagePlugin.NATIVE_IMAGE_TASK_NAME, NativeImageTask) {
|
||||
mainClass = 'net.woggioni.gbcs.GradleBuildCacheServer'
|
||||
mainClass = mainClassName
|
||||
mainModule = mainModuleName
|
||||
useMusl = true
|
||||
buildStaticImage = true
|
||||
}
|
||||
|
||||
tasks.named(JlinkPlugin.JLINK_TASK_NAME, JlinkTask) {
|
||||
mainClass = mainClassName
|
||||
mainModule = 'net.woggioni.rbcs.cli'
|
||||
}
|
||||
|
||||
artifacts {
|
||||
release(envelopeJarTaskProvider)
|
||||
}
|
@@ -1,2 +1,2 @@
|
||||
Args=-H:Optimize=3 --gc=serial
|
||||
Args=-H:Optimize=3 --gc=serial --initialize-at-run-time=io.netty
|
||||
#-H:TraceClassInitialization=io.netty.handler.ssl.BouncyCastleAlpnSslUtils
|
17
rbcs-cli/src/main/java/module-info.java
Normal file
17
rbcs-cli/src/main/java/module-info.java
Normal file
@@ -0,0 +1,17 @@
|
||||
module net.woggioni.rbcs.cli {
|
||||
requires org.slf4j;
|
||||
requires net.woggioni.rbcs.server;
|
||||
requires info.picocli;
|
||||
requires net.woggioni.rbcs.common;
|
||||
requires net.woggioni.rbcs.client;
|
||||
requires kotlin.stdlib;
|
||||
requires net.woggioni.jwo;
|
||||
requires net.woggioni.rbcs.api;
|
||||
|
||||
exports net.woggioni.rbcs.cli.impl.converters to info.picocli;
|
||||
opens net.woggioni.rbcs.cli.impl.commands to info.picocli;
|
||||
opens net.woggioni.rbcs.cli.impl to info.picocli;
|
||||
opens net.woggioni.rbcs.cli to info.picocli, net.woggioni.rbcs.common;
|
||||
|
||||
exports net.woggioni.rbcs.cli;
|
||||
}
|
@@ -0,0 +1,69 @@
|
||||
package net.woggioni.rbcs.cli
|
||||
|
||||
import net.woggioni.rbcs.cli.impl.AbstractVersionProvider
|
||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||
import net.woggioni.rbcs.cli.impl.commands.BenchmarkCommand
|
||||
import net.woggioni.rbcs.cli.impl.commands.ClientCommand
|
||||
import net.woggioni.rbcs.cli.impl.commands.GetCommand
|
||||
import net.woggioni.rbcs.cli.impl.commands.HealthCheckCommand
|
||||
import net.woggioni.rbcs.cli.impl.commands.PasswordHashCommand
|
||||
import net.woggioni.rbcs.cli.impl.commands.PutCommand
|
||||
import net.woggioni.rbcs.cli.impl.commands.ServerCommand
|
||||
import net.woggioni.rbcs.common.RbcsUrlStreamHandlerFactory
|
||||
import net.woggioni.rbcs.common.contextLogger
|
||||
import net.woggioni.jwo.Application
|
||||
import picocli.CommandLine
|
||||
import picocli.CommandLine.Model.CommandSpec
|
||||
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "rbcs", versionProvider = RemoteBuildCacheServerCli.VersionProvider::class
|
||||
)
|
||||
class RemoteBuildCacheServerCli : RbcsCommand() {
|
||||
|
||||
class VersionProvider : AbstractVersionProvider()
|
||||
companion object {
|
||||
@JvmStatic
|
||||
fun main(vararg args: String) {
|
||||
val currentClassLoader = RemoteBuildCacheServerCli::class.java.classLoader
|
||||
Thread.currentThread().contextClassLoader = currentClassLoader
|
||||
if(currentClassLoader.javaClass.name == "net.woggioni.envelope.loader.ModuleClassLoader") {
|
||||
//We're running in an envelope jar and custom URL protocols won't work
|
||||
RbcsUrlStreamHandlerFactory.install()
|
||||
}
|
||||
val log = contextLogger()
|
||||
val app = Application.builder("rbcs")
|
||||
.configurationDirectoryEnvVar("RBCS_CONFIGURATION_DIR")
|
||||
.configurationDirectoryPropertyKey("net.woggioni.rbcs.conf.dir")
|
||||
.build()
|
||||
val rbcsCli = RemoteBuildCacheServerCli()
|
||||
val commandLine = CommandLine(rbcsCli)
|
||||
commandLine.setExecutionExceptionHandler { ex, cl, parseResult ->
|
||||
log.error(ex.message, ex)
|
||||
CommandLine.ExitCode.SOFTWARE
|
||||
}
|
||||
commandLine.addSubcommand(ServerCommand(app))
|
||||
commandLine.addSubcommand(PasswordHashCommand())
|
||||
commandLine.addSubcommand(
|
||||
CommandLine(ClientCommand(app)).apply {
|
||||
addSubcommand(BenchmarkCommand())
|
||||
addSubcommand(PutCommand())
|
||||
addSubcommand(GetCommand())
|
||||
addSubcommand(HealthCheckCommand())
|
||||
})
|
||||
System.exit(commandLine.execute(*args))
|
||||
}
|
||||
}
|
||||
|
||||
@CommandLine.Option(names = ["-V", "--version"], versionHelp = true)
|
||||
var versionHelp = false
|
||||
private set
|
||||
|
||||
@CommandLine.Spec
|
||||
private lateinit var spec: CommandSpec
|
||||
|
||||
|
||||
override fun run() {
|
||||
spec.commandLine().usage(System.out);
|
||||
}
|
||||
}
|
@@ -1,8 +1,6 @@
|
||||
package net.woggioni.gbcs.cli.impl
|
||||
package net.woggioni.rbcs.cli.impl
|
||||
|
||||
import picocli.CommandLine
|
||||
import java.net.URL
|
||||
import java.util.Enumeration
|
||||
import java.util.jar.Attributes
|
||||
import java.util.jar.JarFile
|
||||
import java.util.jar.Manifest
|
@@ -0,0 +1,19 @@
|
||||
package net.woggioni.rbcs.cli.impl
|
||||
|
||||
import net.woggioni.jwo.Application
|
||||
import picocli.CommandLine
|
||||
import java.nio.file.Path
|
||||
|
||||
|
||||
abstract class RbcsCommand : Runnable {
|
||||
|
||||
@CommandLine.Option(names = ["-h", "--help"], usageHelp = true)
|
||||
var usageHelp = false
|
||||
private set
|
||||
|
||||
protected fun findConfigurationFile(app: Application, fileName : String): Path {
|
||||
val confDir = app.computeConfigurationDirectory()
|
||||
val configurationFile = confDir.resolve(fileName)
|
||||
return configurationFile
|
||||
}
|
||||
}
|
@@ -0,0 +1,142 @@
|
||||
package net.woggioni.rbcs.cli.impl.commands
|
||||
|
||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
||||
import net.woggioni.rbcs.common.contextLogger
|
||||
import net.woggioni.rbcs.common.error
|
||||
import net.woggioni.rbcs.common.info
|
||||
import net.woggioni.jwo.JWO
|
||||
import picocli.CommandLine
|
||||
import java.security.SecureRandom
|
||||
import java.time.Duration
|
||||
import java.time.Instant
|
||||
import java.util.concurrent.LinkedBlockingQueue
|
||||
import java.util.concurrent.Semaphore
|
||||
import java.util.concurrent.atomic.AtomicLong
|
||||
import kotlin.random.Random
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "benchmark",
|
||||
description = ["Run a load test against the server"],
|
||||
showDefaultValues = true
|
||||
)
|
||||
class BenchmarkCommand : RbcsCommand() {
|
||||
private val log = contextLogger()
|
||||
|
||||
@CommandLine.Spec
|
||||
private lateinit var spec: CommandLine.Model.CommandSpec
|
||||
|
||||
@CommandLine.Option(
|
||||
names = ["-e", "--entries"],
|
||||
description = ["Total number of elements to be added to the cache"],
|
||||
paramLabel = "NUMBER_OF_ENTRIES"
|
||||
)
|
||||
private var numberOfEntries = 1000
|
||||
|
||||
@CommandLine.Option(
|
||||
names = ["-s", "--size"],
|
||||
description = ["Size of a cache value in bytes"],
|
||||
paramLabel = "SIZE"
|
||||
)
|
||||
private var size = 0x1000
|
||||
|
||||
override fun run() {
|
||||
val clientCommand = spec.parent().userObject() as ClientCommand
|
||||
val profile = clientCommand.profileName.let { profileName ->
|
||||
clientCommand.configuration.profiles[profileName]
|
||||
?: throw IllegalArgumentException("Profile $profileName does not exist in configuration")
|
||||
}
|
||||
RemoteBuildCacheClient(profile).use { client ->
|
||||
|
||||
val entryGenerator = sequence {
|
||||
val random = Random(SecureRandom.getInstance("NativePRNGNonBlocking").nextLong())
|
||||
while (true) {
|
||||
val key = JWO.bytesToHex(random.nextBytes(16))
|
||||
val content = random.nextInt().toByte()
|
||||
val value = ByteArray(size, { _ -> content })
|
||||
yield(key to value)
|
||||
}
|
||||
}
|
||||
|
||||
log.info {
|
||||
"Starting insertion"
|
||||
}
|
||||
val entries = let {
|
||||
val completionCounter = AtomicLong(0)
|
||||
val completionQueue = LinkedBlockingQueue<Pair<String, ByteArray>>(numberOfEntries)
|
||||
val start = Instant.now()
|
||||
val semaphore = Semaphore(profile.maxConnections * 3)
|
||||
val iterator = entryGenerator.take(numberOfEntries).iterator()
|
||||
while (completionCounter.get() < numberOfEntries) {
|
||||
if (iterator.hasNext()) {
|
||||
val entry = iterator.next()
|
||||
semaphore.acquire()
|
||||
val future = client.put(entry.first, entry.second).thenApply { entry }
|
||||
future.whenComplete { result, ex ->
|
||||
if (ex != null) {
|
||||
log.error(ex.message, ex)
|
||||
} else {
|
||||
completionQueue.put(result)
|
||||
}
|
||||
semaphore.release()
|
||||
completionCounter.incrementAndGet()
|
||||
}
|
||||
} else {
|
||||
Thread.sleep(0)
|
||||
}
|
||||
}
|
||||
|
||||
val inserted = completionQueue.toList()
|
||||
val end = Instant.now()
|
||||
log.info {
|
||||
val elapsed = Duration.between(start, end).toMillis()
|
||||
val opsPerSecond = String.format("%.2f", numberOfEntries.toDouble() / elapsed * 1000)
|
||||
"Insertion rate: $opsPerSecond ops/s"
|
||||
}
|
||||
inserted
|
||||
}
|
||||
log.info {
|
||||
"Inserted ${entries.size} entries"
|
||||
}
|
||||
log.info {
|
||||
"Starting retrieval"
|
||||
}
|
||||
if (entries.isNotEmpty()) {
|
||||
val completionCounter = AtomicLong(0)
|
||||
val semaphore = Semaphore(profile.maxConnections * 3)
|
||||
val start = Instant.now()
|
||||
val it = entries.iterator()
|
||||
while (completionCounter.get() < entries.size) {
|
||||
if (it.hasNext()) {
|
||||
val entry = it.next()
|
||||
val future = client.get(entry.first).thenApply {
|
||||
if (it == null) {
|
||||
log.error {
|
||||
"Missing entry for key '${entry.first}'"
|
||||
}
|
||||
} else if (!entry.second.contentEquals(it)) {
|
||||
log.error {
|
||||
"Retrieved a value different from what was inserted for key '${entry.first}'"
|
||||
}
|
||||
}
|
||||
}
|
||||
future.whenComplete { _, _ ->
|
||||
completionCounter.incrementAndGet()
|
||||
semaphore.release()
|
||||
}
|
||||
} else {
|
||||
Thread.sleep(0)
|
||||
}
|
||||
}
|
||||
val end = Instant.now()
|
||||
log.info {
|
||||
val elapsed = Duration.between(start, end).toMillis()
|
||||
val opsPerSecond = String.format("%.2f", entries.size.toDouble() / elapsed * 1000)
|
||||
"Retrieval rate: $opsPerSecond ops/s"
|
||||
}
|
||||
} else {
|
||||
log.error("Skipping retrieval benchmark as it was not possible to insert any entry in the cache")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,41 @@
|
||||
package net.woggioni.rbcs.cli.impl.commands
|
||||
|
||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
||||
import net.woggioni.jwo.Application
|
||||
import picocli.CommandLine
|
||||
import java.nio.file.Path
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "client",
|
||||
description = ["RBCS client"],
|
||||
showDefaultValues = true
|
||||
)
|
||||
class ClientCommand(app : Application) : RbcsCommand() {
|
||||
|
||||
@CommandLine.Option(
|
||||
names = ["-c", "--configuration"],
|
||||
description = ["Path to the client configuration file"],
|
||||
paramLabel = "CONFIGURATION_FILE"
|
||||
)
|
||||
private var configurationFile : Path = findConfigurationFile(app, "rbcs-client.xml")
|
||||
|
||||
@CommandLine.Option(
|
||||
names = ["-p", "--profile"],
|
||||
description = ["Name of the client profile to be used"],
|
||||
paramLabel = "PROFILE",
|
||||
required = true
|
||||
)
|
||||
var profileName : String? = null
|
||||
|
||||
val configuration : RemoteBuildCacheClient.Configuration by lazy {
|
||||
RemoteBuildCacheClient.Configuration.parse(configurationFile)
|
||||
}
|
||||
|
||||
override fun run() {
|
||||
println("Available profiles:")
|
||||
configuration.profiles.forEach { (profileName, _) ->
|
||||
println(profileName)
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,51 @@
|
||||
package net.woggioni.rbcs.cli.impl.commands
|
||||
|
||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
||||
import net.woggioni.rbcs.common.contextLogger
|
||||
import picocli.CommandLine
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "get",
|
||||
description = ["Fetch a value from the cache with the specified key"],
|
||||
showDefaultValues = true
|
||||
)
|
||||
class GetCommand : RbcsCommand() {
|
||||
private val log = contextLogger()
|
||||
|
||||
@CommandLine.Spec
|
||||
private lateinit var spec: CommandLine.Model.CommandSpec
|
||||
|
||||
@CommandLine.Option(
|
||||
names = ["-k", "--key"],
|
||||
description = ["The key for the new value"],
|
||||
paramLabel = "KEY"
|
||||
)
|
||||
private var key : String = ""
|
||||
|
||||
@CommandLine.Option(
|
||||
names = ["-v", "--value"],
|
||||
description = ["Path to a file where the retrieved value will be written (defaults to stdout)"],
|
||||
paramLabel = "VALUE_FILE",
|
||||
)
|
||||
private var output : Path? = null
|
||||
|
||||
override fun run() {
|
||||
val clientCommand = spec.parent().userObject() as ClientCommand
|
||||
val profile = clientCommand.profileName.let { profileName ->
|
||||
clientCommand.configuration.profiles[profileName]
|
||||
?: throw IllegalArgumentException("Profile $profileName does not exist in configuration")
|
||||
}
|
||||
RemoteBuildCacheClient(profile).use { client ->
|
||||
client.get(key).thenApply { value ->
|
||||
value?.let {
|
||||
(output?.let(Files::newOutputStream) ?: System.out).use {
|
||||
it.write(value)
|
||||
}
|
||||
} ?: throw NoSuchElementException("No value found for key $key")
|
||||
}.get()
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,45 @@
|
||||
package net.woggioni.rbcs.cli.impl.commands
|
||||
|
||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
||||
import net.woggioni.rbcs.common.contextLogger
|
||||
import picocli.CommandLine
|
||||
import java.security.SecureRandom
|
||||
import kotlin.random.Random
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "health",
|
||||
description = ["Check server health"],
|
||||
showDefaultValues = true
|
||||
)
|
||||
class HealthCheckCommand : RbcsCommand() {
|
||||
private val log = contextLogger()
|
||||
|
||||
@CommandLine.Spec
|
||||
private lateinit var spec: CommandLine.Model.CommandSpec
|
||||
|
||||
override fun run() {
|
||||
val clientCommand = spec.parent().userObject() as ClientCommand
|
||||
val profile = clientCommand.profileName.let { profileName ->
|
||||
clientCommand.configuration.profiles[profileName]
|
||||
?: throw IllegalArgumentException("Profile $profileName does not exist in configuration")
|
||||
}
|
||||
RemoteBuildCacheClient(profile).use { client ->
|
||||
val random = Random(SecureRandom.getInstance("NativePRNGNonBlocking").nextLong())
|
||||
val nonce = ByteArray(0xa0)
|
||||
random.nextBytes(nonce)
|
||||
client.healthCheck(nonce).thenApply { value ->
|
||||
if(value == null) {
|
||||
throw IllegalStateException("Empty response from server")
|
||||
}
|
||||
for(i in 0 until nonce.size) {
|
||||
for(j in value.size - nonce.size until nonce.size) {
|
||||
if(nonce[i] != value[j]) {
|
||||
throw IllegalStateException("Server nonce does not match")
|
||||
}
|
||||
}
|
||||
}
|
||||
}.get()
|
||||
}
|
||||
}
|
||||
}
|
@@ -1,26 +1,26 @@
|
||||
package net.woggioni.gbcs.cli.impl.commands
|
||||
package net.woggioni.rbcs.cli.impl.commands
|
||||
|
||||
import net.woggioni.gbcs.base.PasswordSecurity.hashPassword
|
||||
import net.woggioni.gbcs.cli.impl.GbcsCommand
|
||||
import net.woggioni.gbcs.cli.impl.converters.OutputStreamConverter
|
||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||
import net.woggioni.rbcs.cli.impl.converters.OutputStreamConverter
|
||||
import net.woggioni.rbcs.common.PasswordSecurity.hashPassword
|
||||
import net.woggioni.jwo.UncloseableOutputStream
|
||||
import picocli.CommandLine
|
||||
import java.io.BufferedWriter
|
||||
import java.io.OutputStream
|
||||
import java.io.OutputStreamWriter
|
||||
import java.io.PrintWriter
|
||||
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "password",
|
||||
description = ["Generate a password hash to add to GBCS configuration file"],
|
||||
description = ["Generate a password hash to add to RBCS configuration file"],
|
||||
showDefaultValues = true
|
||||
)
|
||||
class PasswordHashCommand : GbcsCommand() {
|
||||
class PasswordHashCommand : RbcsCommand() {
|
||||
@CommandLine.Option(
|
||||
names = ["-o", "--output-file"],
|
||||
description = ["Write the output to a file instead of stdout"],
|
||||
converter = [OutputStreamConverter::class],
|
||||
defaultValue = "stdout",
|
||||
showDefaultValue = CommandLine.Help.Visibility.NEVER,
|
||||
paramLabel = "OUTPUT_FILE"
|
||||
)
|
||||
private var outputStream: OutputStream = UncloseableOutputStream(System.out)
|
||||
@@ -30,9 +30,8 @@ class PasswordHashCommand : GbcsCommand() {
|
||||
val password2 = String(System.console().readPassword("Type your password again for confirmation:"))
|
||||
if(password1 != password2) throw IllegalArgumentException("Passwords do not match")
|
||||
|
||||
BufferedWriter(OutputStreamWriter(outputStream, Charsets.UTF_8)).use {
|
||||
it.write(hashPassword(password1))
|
||||
it.newLine()
|
||||
PrintWriter(OutputStreamWriter(outputStream, Charsets.UTF_8)).use {
|
||||
it.println(hashPassword(password1))
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,48 @@
|
||||
package net.woggioni.rbcs.cli.impl.commands
|
||||
|
||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||
import net.woggioni.rbcs.cli.impl.converters.InputStreamConverter
|
||||
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
||||
import net.woggioni.rbcs.common.contextLogger
|
||||
import picocli.CommandLine
|
||||
import java.io.InputStream
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "put",
|
||||
description = ["Add or replace a value to the cache with the specified key"],
|
||||
showDefaultValues = true
|
||||
)
|
||||
class PutCommand : RbcsCommand() {
|
||||
private val log = contextLogger()
|
||||
|
||||
@CommandLine.Spec
|
||||
private lateinit var spec: CommandLine.Model.CommandSpec
|
||||
|
||||
@CommandLine.Option(
|
||||
names = ["-k", "--key"],
|
||||
description = ["The key for the new value"],
|
||||
paramLabel = "KEY"
|
||||
)
|
||||
private var key : String = ""
|
||||
|
||||
@CommandLine.Option(
|
||||
names = ["-v", "--value"],
|
||||
description = ["Path to a file containing the value to be added (defaults to stdin)"],
|
||||
paramLabel = "VALUE_FILE",
|
||||
converter = [InputStreamConverter::class]
|
||||
)
|
||||
private var value : InputStream = System.`in`
|
||||
|
||||
override fun run() {
|
||||
val clientCommand = spec.parent().userObject() as ClientCommand
|
||||
val profile = clientCommand.profileName.let { profileName ->
|
||||
clientCommand.configuration.profiles[profileName]
|
||||
?: throw IllegalArgumentException("Profile $profileName does not exist in configuration")
|
||||
}
|
||||
RemoteBuildCacheClient(profile).use { client ->
|
||||
value.use {
|
||||
client.put(key, it.readAllBytes())
|
||||
}.get()
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,76 @@
|
||||
package net.woggioni.rbcs.cli.impl.commands
|
||||
|
||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||
import net.woggioni.rbcs.cli.impl.converters.DurationConverter
|
||||
import net.woggioni.rbcs.common.contextLogger
|
||||
import net.woggioni.rbcs.common.debug
|
||||
import net.woggioni.rbcs.common.info
|
||||
import net.woggioni.rbcs.server.RemoteBuildCacheServer
|
||||
import net.woggioni.rbcs.server.RemoteBuildCacheServer.Companion.DEFAULT_CONFIGURATION_URL
|
||||
import net.woggioni.jwo.Application
|
||||
import net.woggioni.jwo.JWO
|
||||
import picocli.CommandLine
|
||||
import java.io.ByteArrayOutputStream
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
import java.time.Duration
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "server",
|
||||
description = ["RBCS server"],
|
||||
showDefaultValues = true
|
||||
)
|
||||
class ServerCommand(app : Application) : RbcsCommand() {
|
||||
|
||||
private val log = contextLogger()
|
||||
|
||||
private fun createDefaultConfigurationFile(configurationFile: Path) {
|
||||
log.info {
|
||||
"Creating default configuration file at '$configurationFile'"
|
||||
}
|
||||
val defaultConfigurationFileResource = DEFAULT_CONFIGURATION_URL
|
||||
Files.newOutputStream(configurationFile).use { outputStream ->
|
||||
defaultConfigurationFileResource.openStream().use { inputStream ->
|
||||
JWO.copy(inputStream, outputStream)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@CommandLine.Option(
|
||||
names = ["-t", "--timeout"],
|
||||
description = ["Exit after the specified time"],
|
||||
paramLabel = "TIMEOUT",
|
||||
converter = [DurationConverter::class]
|
||||
)
|
||||
private var timeout: Duration? = null
|
||||
|
||||
@CommandLine.Option(
|
||||
names = ["-c", "--config-file"],
|
||||
description = ["Read the application configuration from this file"],
|
||||
paramLabel = "CONFIG_FILE"
|
||||
)
|
||||
private var configurationFile: Path = findConfigurationFile(app, "rbcs-server.xml")
|
||||
|
||||
override fun run() {
|
||||
if (!Files.exists(configurationFile)) {
|
||||
Files.createDirectories(configurationFile.parent)
|
||||
createDefaultConfigurationFile(configurationFile)
|
||||
}
|
||||
|
||||
val configuration = RemoteBuildCacheServer.loadConfiguration(configurationFile)
|
||||
log.debug {
|
||||
ByteArrayOutputStream().also {
|
||||
RemoteBuildCacheServer.dumpConfiguration(configuration, it)
|
||||
}.let {
|
||||
"Server configuration:\n${String(it.toByteArray())}"
|
||||
}
|
||||
}
|
||||
val server = RemoteBuildCacheServer(configuration)
|
||||
server.run().use { server ->
|
||||
timeout?.let {
|
||||
Thread.sleep(it)
|
||||
server.shutdown()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,11 @@
|
||||
package net.woggioni.rbcs.cli.impl.converters
|
||||
|
||||
import picocli.CommandLine
|
||||
import java.time.Duration
|
||||
|
||||
|
||||
class DurationConverter : CommandLine.ITypeConverter<Duration> {
|
||||
override fun convert(value: String): Duration {
|
||||
return Duration.parse(value)
|
||||
}
|
||||
}
|
@@ -0,0 +1,13 @@
|
||||
package net.woggioni.rbcs.cli.impl.converters
|
||||
|
||||
import picocli.CommandLine
|
||||
import java.io.InputStream
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Paths
|
||||
|
||||
|
||||
class InputStreamConverter : CommandLine.ITypeConverter<InputStream> {
|
||||
override fun convert(value: String): InputStream {
|
||||
return Files.newInputStream(Paths.get(value))
|
||||
}
|
||||
}
|
@@ -1,4 +1,4 @@
|
||||
package net.woggioni.gbcs.cli.impl.converters
|
||||
package net.woggioni.rbcs.cli.impl.converters
|
||||
|
||||
import picocli.CommandLine
|
||||
import java.io.OutputStream
|
@@ -12,10 +12,7 @@
|
||||
</encoder>
|
||||
</appender>
|
||||
|
||||
<root level="debug">
|
||||
<root level="info">
|
||||
<appender-ref ref="console"/>
|
||||
</root>
|
||||
<logger name="io.netty" level="debug"/>
|
||||
<logger name="com.google.code.yanf4j" level="warn"/>
|
||||
<logger name="net.rubyeye.xmemcached" level="warn"/>
|
||||
</configuration>
|
17
rbcs-client/build.gradle
Normal file
17
rbcs-client/build.gradle
Normal file
@@ -0,0 +1,17 @@
|
||||
plugins {
|
||||
id 'java-library'
|
||||
alias catalog.plugins.kotlin.jvm
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation project(':rbcs-api')
|
||||
implementation project(':rbcs-common')
|
||||
implementation catalog.picocli
|
||||
implementation catalog.slf4j.api
|
||||
implementation catalog.netty.buffer
|
||||
implementation catalog.netty.codec.http
|
||||
|
||||
testRuntimeOnly catalog.logback.classic
|
||||
}
|
||||
|
||||
|
17
rbcs-client/src/main/java/module-info.java
Normal file
17
rbcs-client/src/main/java/module-info.java
Normal file
@@ -0,0 +1,17 @@
|
||||
module net.woggioni.rbcs.client {
|
||||
requires io.netty.handler;
|
||||
requires io.netty.codec.http;
|
||||
requires io.netty.transport;
|
||||
requires kotlin.stdlib;
|
||||
requires io.netty.common;
|
||||
requires io.netty.buffer;
|
||||
requires java.xml;
|
||||
requires net.woggioni.rbcs.common;
|
||||
requires net.woggioni.rbcs.api;
|
||||
requires io.netty.codec;
|
||||
requires org.slf4j;
|
||||
|
||||
exports net.woggioni.rbcs.client;
|
||||
|
||||
opens net.woggioni.rbcs.client.schema;
|
||||
}
|
344
rbcs-client/src/main/kotlin/net/woggioni/rbcs/client/Client.kt
Normal file
344
rbcs-client/src/main/kotlin/net/woggioni/rbcs/client/Client.kt
Normal file
@@ -0,0 +1,344 @@
|
||||
package net.woggioni.rbcs.client
|
||||
|
||||
import io.netty.bootstrap.Bootstrap
|
||||
import io.netty.buffer.ByteBuf
|
||||
import io.netty.buffer.Unpooled
|
||||
import io.netty.channel.Channel
|
||||
import io.netty.channel.ChannelHandlerContext
|
||||
import io.netty.channel.ChannelOption
|
||||
import io.netty.channel.ChannelPipeline
|
||||
import io.netty.channel.SimpleChannelInboundHandler
|
||||
import io.netty.channel.nio.NioEventLoopGroup
|
||||
import io.netty.channel.pool.AbstractChannelPoolHandler
|
||||
import io.netty.channel.pool.ChannelPool
|
||||
import io.netty.channel.pool.FixedChannelPool
|
||||
import io.netty.channel.socket.nio.NioSocketChannel
|
||||
import io.netty.handler.codec.DecoderException
|
||||
import io.netty.handler.codec.http.DefaultFullHttpRequest
|
||||
import io.netty.handler.codec.http.FullHttpRequest
|
||||
import io.netty.handler.codec.http.FullHttpResponse
|
||||
import io.netty.handler.codec.http.HttpClientCodec
|
||||
import io.netty.handler.codec.http.HttpContentDecompressor
|
||||
import io.netty.handler.codec.http.HttpHeaderNames
|
||||
import io.netty.handler.codec.http.HttpHeaderValues
|
||||
import io.netty.handler.codec.http.HttpMethod
|
||||
import io.netty.handler.codec.http.HttpObjectAggregator
|
||||
import io.netty.handler.codec.http.HttpResponseStatus
|
||||
import io.netty.handler.codec.http.HttpVersion
|
||||
import io.netty.handler.ssl.SslContext
|
||||
import io.netty.handler.ssl.SslContextBuilder
|
||||
import io.netty.handler.stream.ChunkedWriteHandler
|
||||
import io.netty.util.concurrent.Future
|
||||
import io.netty.util.concurrent.GenericFutureListener
|
||||
import net.woggioni.rbcs.client.impl.Parser
|
||||
import net.woggioni.rbcs.common.Xml
|
||||
import net.woggioni.rbcs.common.contextLogger
|
||||
import net.woggioni.rbcs.common.debug
|
||||
import net.woggioni.rbcs.common.trace
|
||||
import java.net.InetSocketAddress
|
||||
import java.net.URI
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
import java.security.PrivateKey
|
||||
import java.security.cert.X509Certificate
|
||||
import java.time.Duration
|
||||
import java.util.Base64
|
||||
import java.util.concurrent.CompletableFuture
|
||||
import java.util.concurrent.atomic.AtomicInteger
|
||||
import io.netty.util.concurrent.Future as NettyFuture
|
||||
|
||||
|
||||
class RemoteBuildCacheClient(private val profile: Configuration.Profile) : AutoCloseable {
|
||||
private val group: NioEventLoopGroup
|
||||
private var sslContext: SslContext
|
||||
private val log = contextLogger()
|
||||
private val pool: ChannelPool
|
||||
|
||||
data class Configuration(
|
||||
val profiles: Map<String, Profile>
|
||||
) {
|
||||
sealed class Authentication {
|
||||
data class TlsClientAuthenticationCredentials(
|
||||
val key: PrivateKey,
|
||||
val certificateChain: Array<X509Certificate>
|
||||
) : Authentication()
|
||||
|
||||
data class BasicAuthenticationCredentials(val username: String, val password: String) : Authentication()
|
||||
}
|
||||
|
||||
class RetryPolicy(
|
||||
val maxAttempts: Int,
|
||||
val initialDelayMillis: Long,
|
||||
val exp: Double
|
||||
)
|
||||
|
||||
data class Profile(
|
||||
val serverURI: URI,
|
||||
val authentication: Authentication?,
|
||||
val connectionTimeout: Duration?,
|
||||
val maxConnections: Int,
|
||||
val retryPolicy: RetryPolicy?,
|
||||
)
|
||||
|
||||
companion object {
|
||||
fun parse(path: Path): Configuration {
|
||||
return Files.newInputStream(path).use {
|
||||
Xml.parseXml(path.toUri().toURL(), it)
|
||||
}.let(Parser::parse)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
init {
|
||||
group = NioEventLoopGroup()
|
||||
sslContext = SslContextBuilder.forClient().also { builder ->
|
||||
(profile.authentication as? Configuration.Authentication.TlsClientAuthenticationCredentials)?.let { tlsClientAuthenticationCredentials ->
|
||||
builder.keyManager(
|
||||
tlsClientAuthenticationCredentials.key,
|
||||
*tlsClientAuthenticationCredentials.certificateChain
|
||||
)
|
||||
}
|
||||
}.build()
|
||||
|
||||
val (scheme, host, port) = profile.serverURI.run {
|
||||
Triple(
|
||||
if (scheme == null) "http" else profile.serverURI.scheme,
|
||||
host,
|
||||
port.takeIf { it > 0 } ?: if ("https" == scheme.lowercase()) 443 else 80
|
||||
)
|
||||
}
|
||||
|
||||
val bootstrap = Bootstrap().apply {
|
||||
group(group)
|
||||
channel(NioSocketChannel::class.java)
|
||||
option(ChannelOption.TCP_NODELAY, true)
|
||||
option(ChannelOption.SO_KEEPALIVE, true)
|
||||
remoteAddress(InetSocketAddress(host, port))
|
||||
profile.connectionTimeout?.let {
|
||||
option(ChannelOption.CONNECT_TIMEOUT_MILLIS, it.toMillis().toInt())
|
||||
}
|
||||
}
|
||||
val channelPoolHandler = object : AbstractChannelPoolHandler() {
|
||||
|
||||
@Volatile
|
||||
private var connectionCount = AtomicInteger()
|
||||
|
||||
@Volatile
|
||||
private var leaseCount = AtomicInteger()
|
||||
|
||||
override fun channelReleased(ch: Channel) {
|
||||
val activeLeases = leaseCount.decrementAndGet()
|
||||
log.trace {
|
||||
"Released channel ${ch.id().asShortText()}, number of active leases: $activeLeases"
|
||||
}
|
||||
}
|
||||
|
||||
override fun channelAcquired(ch: Channel) {
|
||||
val activeLeases = leaseCount.getAndIncrement()
|
||||
log.trace {
|
||||
"Acquired channel ${ch.id().asShortText()}, number of active leases: $activeLeases"
|
||||
}
|
||||
}
|
||||
|
||||
override fun channelCreated(ch: Channel) {
|
||||
val connectionId = connectionCount.getAndIncrement()
|
||||
log.debug {
|
||||
"Created connection $connectionId, total number of active connections: $connectionId"
|
||||
}
|
||||
ch.closeFuture().addListener {
|
||||
val activeConnections = connectionCount.decrementAndGet()
|
||||
log.debug {
|
||||
"Closed connection $connectionId, total number of active connections: $activeConnections"
|
||||
}
|
||||
}
|
||||
val pipeline: ChannelPipeline = ch.pipeline()
|
||||
|
||||
// Add SSL handler if needed
|
||||
if ("https".equals(scheme, ignoreCase = true)) {
|
||||
pipeline.addLast("ssl", sslContext.newHandler(ch.alloc(), host, port))
|
||||
}
|
||||
|
||||
// HTTP handlers
|
||||
pipeline.addLast("codec", HttpClientCodec())
|
||||
pipeline.addLast("decompressor", HttpContentDecompressor())
|
||||
pipeline.addLast("aggregator", HttpObjectAggregator(134217728))
|
||||
pipeline.addLast("chunked", ChunkedWriteHandler())
|
||||
}
|
||||
}
|
||||
pool = FixedChannelPool(bootstrap, channelPoolHandler, profile.maxConnections)
|
||||
}
|
||||
|
||||
private fun executeWithRetry(operation: () -> CompletableFuture<FullHttpResponse>): CompletableFuture<FullHttpResponse> {
|
||||
val retryPolicy = profile.retryPolicy
|
||||
return if (retryPolicy != null) {
|
||||
val outcomeHandler = OutcomeHandler<FullHttpResponse> { outcome ->
|
||||
when (outcome) {
|
||||
is OperationOutcome.Success -> {
|
||||
val response = outcome.result
|
||||
val status = response.status()
|
||||
when (status) {
|
||||
HttpResponseStatus.TOO_MANY_REQUESTS -> {
|
||||
val retryAfter = response.headers()[HttpHeaderNames.RETRY_AFTER]?.let { headerValue ->
|
||||
try {
|
||||
headerValue.toLong() * 1000
|
||||
} catch (nfe: NumberFormatException) {
|
||||
null
|
||||
}
|
||||
}
|
||||
OutcomeHandlerResult.Retry(retryAfter)
|
||||
}
|
||||
|
||||
HttpResponseStatus.INTERNAL_SERVER_ERROR, HttpResponseStatus.SERVICE_UNAVAILABLE ->
|
||||
OutcomeHandlerResult.Retry()
|
||||
|
||||
else -> OutcomeHandlerResult.DoNotRetry()
|
||||
}
|
||||
}
|
||||
|
||||
is OperationOutcome.Failure -> {
|
||||
OutcomeHandlerResult.Retry()
|
||||
}
|
||||
}
|
||||
}
|
||||
executeWithRetry(
|
||||
group,
|
||||
retryPolicy.maxAttempts,
|
||||
retryPolicy.initialDelayMillis.toDouble(),
|
||||
retryPolicy.exp,
|
||||
outcomeHandler,
|
||||
operation
|
||||
)
|
||||
} else {
|
||||
operation()
|
||||
}
|
||||
}
|
||||
|
||||
fun healthCheck(nonce: ByteArray): CompletableFuture<ByteArray?> {
|
||||
return executeWithRetry {
|
||||
sendRequest(profile.serverURI, HttpMethod.TRACE, nonce)
|
||||
}.thenApply {
|
||||
val status = it.status()
|
||||
if (it.status() != HttpResponseStatus.OK) {
|
||||
throw HttpException(status)
|
||||
} else {
|
||||
it.content()
|
||||
}
|
||||
}.thenApply { maybeByteBuf ->
|
||||
maybeByteBuf?.let {
|
||||
val result = ByteArray(it.readableBytes())
|
||||
it.getBytes(0, result)
|
||||
result
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fun get(key: String): CompletableFuture<ByteArray?> {
|
||||
return executeWithRetry {
|
||||
sendRequest(profile.serverURI.resolve(key), HttpMethod.GET, null)
|
||||
}.thenApply {
|
||||
val status = it.status()
|
||||
if (it.status() == HttpResponseStatus.NOT_FOUND) {
|
||||
null
|
||||
} else if (it.status() != HttpResponseStatus.OK) {
|
||||
throw HttpException(status)
|
||||
} else {
|
||||
it.content()
|
||||
}
|
||||
}.thenApply { maybeByteBuf ->
|
||||
maybeByteBuf?.let {
|
||||
val result = ByteArray(it.readableBytes())
|
||||
it.getBytes(0, result)
|
||||
result
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fun put(key: String, content: ByteArray): CompletableFuture<Unit> {
|
||||
return executeWithRetry {
|
||||
sendRequest(profile.serverURI.resolve(key), HttpMethod.PUT, content)
|
||||
}.thenApply {
|
||||
val status = it.status()
|
||||
if (it.status() != HttpResponseStatus.CREATED && it.status() != HttpResponseStatus.OK) {
|
||||
throw HttpException(status)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private fun sendRequest(uri: URI, method: HttpMethod, body: ByteArray?): CompletableFuture<FullHttpResponse> {
|
||||
val responseFuture = CompletableFuture<FullHttpResponse>()
|
||||
// Custom handler for processing responses
|
||||
pool.acquire().addListener(object : GenericFutureListener<NettyFuture<Channel>> {
|
||||
override fun operationComplete(channelFuture: Future<Channel>) {
|
||||
if (channelFuture.isSuccess) {
|
||||
val channel = channelFuture.now
|
||||
val pipeline = channel.pipeline()
|
||||
channel.pipeline().addLast("handler", object : SimpleChannelInboundHandler<FullHttpResponse>() {
|
||||
override fun channelRead0(
|
||||
ctx: ChannelHandlerContext,
|
||||
response: FullHttpResponse
|
||||
) {
|
||||
pipeline.removeLast()
|
||||
pool.release(channel)
|
||||
responseFuture.complete(response)
|
||||
}
|
||||
|
||||
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
|
||||
val ex = when (cause) {
|
||||
is DecoderException -> cause.cause
|
||||
else -> cause
|
||||
}
|
||||
responseFuture.completeExceptionally(ex)
|
||||
ctx.close()
|
||||
pipeline.removeLast()
|
||||
pool.release(channel)
|
||||
}
|
||||
})
|
||||
// Prepare the HTTP request
|
||||
val request: FullHttpRequest = let {
|
||||
val content: ByteBuf? = body?.takeIf(ByteArray::isNotEmpty)?.let(Unpooled::wrappedBuffer)
|
||||
DefaultFullHttpRequest(
|
||||
HttpVersion.HTTP_1_1,
|
||||
method,
|
||||
uri.rawPath,
|
||||
content ?: Unpooled.buffer(0)
|
||||
).apply {
|
||||
headers().apply {
|
||||
if (content != null) {
|
||||
set(HttpHeaderNames.CONTENT_TYPE, HttpHeaderValues.APPLICATION_OCTET_STREAM)
|
||||
set(HttpHeaderNames.CONTENT_LENGTH, content.readableBytes())
|
||||
}
|
||||
set(HttpHeaderNames.HOST, profile.serverURI.host)
|
||||
set(HttpHeaderNames.CONNECTION, HttpHeaderValues.KEEP_ALIVE)
|
||||
set(
|
||||
HttpHeaderNames.ACCEPT_ENCODING,
|
||||
HttpHeaderValues.GZIP.toString() + "," + HttpHeaderValues.DEFLATE.toString()
|
||||
)
|
||||
// Add basic auth if configured
|
||||
(profile.authentication as? Configuration.Authentication.BasicAuthenticationCredentials)?.let { credentials ->
|
||||
val auth = "${credentials.username}:${credentials.password}"
|
||||
val encodedAuth = Base64.getEncoder().encodeToString(auth.toByteArray())
|
||||
set(HttpHeaderNames.AUTHORIZATION, "Basic $encodedAuth")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Set headers
|
||||
// Send the request
|
||||
channel.writeAndFlush(request)
|
||||
} else {
|
||||
responseFuture.completeExceptionally(channelFuture.cause())
|
||||
}
|
||||
}
|
||||
})
|
||||
return responseFuture
|
||||
}
|
||||
|
||||
fun shutDown(): NettyFuture<*> {
|
||||
return group.shutdownGracefully()
|
||||
}
|
||||
|
||||
override fun close() {
|
||||
shutDown().sync()
|
||||
}
|
||||
}
|
@@ -0,0 +1,9 @@
|
||||
package net.woggioni.rbcs.client
|
||||
|
||||
import io.netty.handler.codec.http.HttpResponseStatus
|
||||
|
||||
class HttpException(private val status : HttpResponseStatus) : RuntimeException(status.reasonPhrase()) {
|
||||
|
||||
override val message: String
|
||||
get() = "Http status ${status.code()}: ${status.reasonPhrase()}"
|
||||
}
|
@@ -0,0 +1,108 @@
|
||||
package net.woggioni.rbcs.client.impl
|
||||
|
||||
import net.woggioni.rbcs.api.exception.ConfigurationException
|
||||
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
||||
import net.woggioni.rbcs.common.Xml.Companion.asIterable
|
||||
import net.woggioni.rbcs.common.Xml.Companion.renderAttribute
|
||||
import org.w3c.dom.Document
|
||||
import java.net.URI
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
import java.security.KeyStore
|
||||
import java.security.PrivateKey
|
||||
import java.security.cert.X509Certificate
|
||||
import java.time.Duration
|
||||
|
||||
object Parser {
|
||||
|
||||
fun parse(document: Document): RemoteBuildCacheClient.Configuration {
|
||||
val root = document.documentElement
|
||||
val profiles = mutableMapOf<String, RemoteBuildCacheClient.Configuration.Profile>()
|
||||
|
||||
for (child in root.asIterable()) {
|
||||
val tagName = child.localName
|
||||
when (tagName) {
|
||||
"profile" -> {
|
||||
val name =
|
||||
child.renderAttribute("name") ?: throw ConfigurationException("name attribute is required")
|
||||
val uri = child.renderAttribute("base-url")?.let(::URI)
|
||||
?: throw ConfigurationException("base-url attribute is required")
|
||||
var authentication: RemoteBuildCacheClient.Configuration.Authentication? = null
|
||||
var retryPolicy: RemoteBuildCacheClient.Configuration.RetryPolicy? = null
|
||||
for (gchild in child.asIterable()) {
|
||||
when (gchild.localName) {
|
||||
"tls-client-auth" -> {
|
||||
val keyStoreFile = gchild.renderAttribute("key-store-file")
|
||||
val keyStorePassword =
|
||||
gchild.renderAttribute("key-store-password")
|
||||
val keyAlias = gchild.renderAttribute("key-alias")
|
||||
val keyPassword = gchild.renderAttribute("key-password")
|
||||
|
||||
val keystore = KeyStore.getInstance("PKCS12").apply {
|
||||
Files.newInputStream(Path.of(keyStoreFile)).use {
|
||||
load(it, keyStorePassword?.toCharArray())
|
||||
}
|
||||
}
|
||||
val key = keystore.getKey(keyAlias, keyPassword?.toCharArray()) as PrivateKey
|
||||
val certChain = keystore.getCertificateChain(keyAlias).asSequence()
|
||||
.map { it as X509Certificate }
|
||||
.toList()
|
||||
.toTypedArray()
|
||||
authentication =
|
||||
RemoteBuildCacheClient.Configuration.Authentication.TlsClientAuthenticationCredentials(
|
||||
key,
|
||||
certChain
|
||||
)
|
||||
}
|
||||
|
||||
"basic-auth" -> {
|
||||
val username = gchild.renderAttribute("user")
|
||||
?: throw ConfigurationException("username attribute is required")
|
||||
val password = gchild.renderAttribute("password")
|
||||
?: throw ConfigurationException("password attribute is required")
|
||||
authentication =
|
||||
RemoteBuildCacheClient.Configuration.Authentication.BasicAuthenticationCredentials(
|
||||
username,
|
||||
password
|
||||
)
|
||||
}
|
||||
|
||||
"retry-policy" -> {
|
||||
val maxAttempts =
|
||||
gchild.renderAttribute("max-attempts")
|
||||
?.let(String::toInt)
|
||||
?: throw ConfigurationException("max-attempts attribute is required")
|
||||
val initialDelay =
|
||||
gchild.renderAttribute("initial-delay")
|
||||
?.let(Duration::parse)
|
||||
?: Duration.ofSeconds(1)
|
||||
val exp =
|
||||
gchild.renderAttribute("exp")
|
||||
?.let(String::toDouble)
|
||||
?: 2.0f
|
||||
retryPolicy = RemoteBuildCacheClient.Configuration.RetryPolicy(
|
||||
maxAttempts,
|
||||
initialDelay.toMillis(),
|
||||
exp.toDouble()
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
val maxConnections = child.renderAttribute("max-connections")
|
||||
?.let(String::toInt)
|
||||
?: 50
|
||||
val connectionTimeout = child.renderAttribute("connection-timeout")
|
||||
?.let(Duration::parse)
|
||||
profiles[name] = RemoteBuildCacheClient.Configuration.Profile(
|
||||
uri,
|
||||
authentication,
|
||||
connectionTimeout,
|
||||
maxConnections,
|
||||
retryPolicy
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
return RemoteBuildCacheClient.Configuration(profiles)
|
||||
}
|
||||
}
|
@@ -0,0 +1,75 @@
|
||||
package net.woggioni.rbcs.client
|
||||
|
||||
import io.netty.util.concurrent.EventExecutorGroup
|
||||
import java.util.concurrent.CompletableFuture
|
||||
import java.util.concurrent.TimeUnit
|
||||
|
||||
sealed class OperationOutcome<T> {
|
||||
class Success<T>(val result: T) : OperationOutcome<T>()
|
||||
class Failure<T>(val ex: Throwable) : OperationOutcome<T>()
|
||||
}
|
||||
|
||||
sealed class OutcomeHandlerResult {
|
||||
class Retry(val suggestedDelayMillis: Long? = null) : OutcomeHandlerResult()
|
||||
class DoNotRetry : OutcomeHandlerResult()
|
||||
}
|
||||
|
||||
fun interface OutcomeHandler<T> {
|
||||
fun shouldRetry(result: OperationOutcome<T>): OutcomeHandlerResult
|
||||
}
|
||||
|
||||
fun <T> executeWithRetry(
|
||||
eventExecutorGroup: EventExecutorGroup,
|
||||
maxAttempts: Int,
|
||||
initialDelay: Double,
|
||||
exp: Double,
|
||||
outcomeHandler: OutcomeHandler<T>,
|
||||
cb: () -> CompletableFuture<T>
|
||||
): CompletableFuture<T> {
|
||||
val finalResult = cb()
|
||||
var future = finalResult
|
||||
var shortCircuit = false
|
||||
for (i in 1 until maxAttempts) {
|
||||
future = future.handle { result, ex ->
|
||||
val operationOutcome = if (ex == null) {
|
||||
OperationOutcome.Success(result)
|
||||
} else {
|
||||
OperationOutcome.Failure(ex.cause ?: ex)
|
||||
}
|
||||
if (shortCircuit) {
|
||||
when(operationOutcome) {
|
||||
is OperationOutcome.Failure -> throw operationOutcome.ex
|
||||
is OperationOutcome.Success -> CompletableFuture.completedFuture(operationOutcome.result)
|
||||
}
|
||||
} else {
|
||||
when(val outcomeHandlerResult = outcomeHandler.shouldRetry(operationOutcome)) {
|
||||
is OutcomeHandlerResult.Retry -> {
|
||||
val res = CompletableFuture<T>()
|
||||
val delay = run {
|
||||
val scheduledDelay = (initialDelay * Math.pow(exp, i.toDouble())).toLong()
|
||||
outcomeHandlerResult.suggestedDelayMillis?.coerceAtMost(scheduledDelay) ?: scheduledDelay
|
||||
}
|
||||
eventExecutorGroup.schedule({
|
||||
cb().handle { result, ex ->
|
||||
if (ex == null) {
|
||||
res.complete(result)
|
||||
} else {
|
||||
res.completeExceptionally(ex)
|
||||
}
|
||||
}
|
||||
}, delay, TimeUnit.MILLISECONDS)
|
||||
res
|
||||
}
|
||||
is OutcomeHandlerResult.DoNotRetry -> {
|
||||
shortCircuit = true
|
||||
when(operationOutcome) {
|
||||
is OperationOutcome.Failure -> throw operationOutcome.ex
|
||||
is OperationOutcome.Success -> CompletableFuture.completedFuture(operationOutcome.result)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}.thenCompose { it }
|
||||
}
|
||||
return future
|
||||
}
|
@@ -0,0 +1,50 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<xs:schema targetNamespace="urn:net.woggioni.rbcs.client"
|
||||
xmlns:xs="http://www.w3.org/2001/XMLSchema"
|
||||
xmlns:rbcs-client="urn:net.woggioni.rbcs.client"
|
||||
elementFormDefault="unqualified"
|
||||
>
|
||||
<xs:element name="profiles" type="rbcs-client:profilesType"/>
|
||||
|
||||
<xs:complexType name="profilesType">
|
||||
<xs:sequence minOccurs="0">
|
||||
<xs:element name="profile" type="rbcs-client:profileType" maxOccurs="unbounded"/>
|
||||
</xs:sequence>
|
||||
</xs:complexType>
|
||||
|
||||
<xs:complexType name="profileType">
|
||||
<xs:sequence>
|
||||
<xs:choice>
|
||||
<xs:element name="no-auth" type="rbcs-client:noAuthType"/>
|
||||
<xs:element name="basic-auth" type="rbcs-client:basicAuthType"/>
|
||||
<xs:element name="tls-client-auth" type="rbcs-client:tlsClientAuthType"/>
|
||||
</xs:choice>
|
||||
<xs:element name="retry-policy" type="rbcs-client:retryType" minOccurs="0"/>
|
||||
</xs:sequence>
|
||||
<xs:attribute name="name" type="xs:token" use="required"/>
|
||||
<xs:attribute name="base-url" type="xs:anyURI" use="required"/>
|
||||
<xs:attribute name="max-connections" type="xs:positiveInteger" default="50"/>
|
||||
<xs:attribute name="connection-timeout" type="xs:duration"/>
|
||||
</xs:complexType>
|
||||
|
||||
<xs:complexType name="noAuthType"/>
|
||||
|
||||
<xs:complexType name="basicAuthType">
|
||||
<xs:attribute name="user" type="xs:token" use="required"/>
|
||||
<xs:attribute name="password" type="xs:string" use="required"/>
|
||||
</xs:complexType>
|
||||
|
||||
<xs:complexType name="tlsClientAuthType">
|
||||
<xs:attribute name="key-store-file" type="xs:anyURI" use="required"/>
|
||||
<xs:attribute name="key-store-password" type="xs:string" use="required"/>
|
||||
<xs:attribute name="key-alias" type="xs:token" use="required"/>
|
||||
<xs:attribute name="key-password" type="xs:string" use="optional"/>
|
||||
</xs:complexType>
|
||||
|
||||
<xs:complexType name="retryType">
|
||||
<xs:attribute name="max-attempts" type="xs:positiveInteger" use="required"/>
|
||||
<xs:attribute name="initial-delay" type="xs:duration" default="PT1S"/>
|
||||
<xs:attribute name="exp" type="xs:double" default="2.0"/>
|
||||
</xs:complexType>
|
||||
|
||||
</xs:schema>
|
@@ -0,0 +1,148 @@
|
||||
package net.woggioni.rbcs.client
|
||||
|
||||
import io.netty.util.concurrent.DefaultEventExecutorGroup
|
||||
import io.netty.util.concurrent.EventExecutorGroup
|
||||
import net.woggioni.rbcs.common.contextLogger
|
||||
import org.junit.jupiter.api.Assertions
|
||||
import org.junit.jupiter.api.extension.ExtensionContext
|
||||
import org.junit.jupiter.params.ParameterizedTest
|
||||
import org.junit.jupiter.params.provider.Arguments
|
||||
import org.junit.jupiter.params.provider.ArgumentsProvider
|
||||
import org.junit.jupiter.params.provider.ArgumentsSource
|
||||
import java.util.concurrent.CompletableFuture
|
||||
import java.util.stream.Stream
|
||||
import kotlin.random.Random
|
||||
|
||||
class RetryTest {
|
||||
|
||||
data class TestArgs(
|
||||
val seed: Int,
|
||||
val maxAttempt: Int,
|
||||
val initialDelay: Double,
|
||||
val exp: Double,
|
||||
)
|
||||
|
||||
class TestArguments : ArgumentsProvider {
|
||||
override fun provideArguments(context: ExtensionContext): Stream<out Arguments> {
|
||||
return Stream.of(
|
||||
TestArgs(
|
||||
seed = 101325,
|
||||
maxAttempt = 5,
|
||||
initialDelay = 50.0,
|
||||
exp = 2.0,
|
||||
),
|
||||
TestArgs(
|
||||
seed = 101325,
|
||||
maxAttempt = 20,
|
||||
initialDelay = 100.0,
|
||||
exp = 1.1,
|
||||
),
|
||||
TestArgs(
|
||||
seed = 123487,
|
||||
maxAttempt = 20,
|
||||
initialDelay = 100.0,
|
||||
exp = 2.0,
|
||||
),
|
||||
TestArgs(
|
||||
seed = 20082024,
|
||||
maxAttempt = 10,
|
||||
initialDelay = 100.0,
|
||||
exp = 2.0,
|
||||
)
|
||||
).map {
|
||||
object: Arguments {
|
||||
override fun get() = arrayOf(it)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ArgumentsSource(TestArguments::class)
|
||||
@ParameterizedTest
|
||||
fun test(testArgs: TestArgs) {
|
||||
val log = contextLogger()
|
||||
log.debug("Start")
|
||||
val executor: EventExecutorGroup = DefaultEventExecutorGroup(1)
|
||||
val attempts = mutableListOf<Pair<Long, OperationOutcome<Int>>>()
|
||||
val outcomeHandler = OutcomeHandler<Int> { outcome ->
|
||||
when(outcome) {
|
||||
is OperationOutcome.Success -> {
|
||||
if(outcome.result % 10 == 0) {
|
||||
OutcomeHandlerResult.DoNotRetry()
|
||||
} else {
|
||||
OutcomeHandlerResult.Retry(null)
|
||||
}
|
||||
}
|
||||
is OperationOutcome.Failure -> {
|
||||
when(outcome.ex) {
|
||||
is IllegalStateException -> {
|
||||
log.debug(outcome.ex.message, outcome.ex)
|
||||
OutcomeHandlerResult.Retry(null)
|
||||
}
|
||||
else -> {
|
||||
OutcomeHandlerResult.DoNotRetry()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
val random = Random(testArgs.seed)
|
||||
|
||||
val future =
|
||||
executeWithRetry(executor, testArgs.maxAttempt, testArgs.initialDelay, testArgs.exp, outcomeHandler) {
|
||||
val now = System.nanoTime()
|
||||
val result = CompletableFuture<Int>()
|
||||
executor.submit {
|
||||
val n = random.nextInt(0, Integer.MAX_VALUE)
|
||||
log.debug("Got new number: {}", n)
|
||||
if(n % 3 == 0) {
|
||||
val ex = IllegalStateException("Value $n can be divided by 3")
|
||||
result.completeExceptionally(ex)
|
||||
attempts += now to OperationOutcome.Failure(ex)
|
||||
} else if(n % 7 == 0) {
|
||||
val ex = RuntimeException("Value $n can be divided by 7")
|
||||
result.completeExceptionally(ex)
|
||||
attempts += now to OperationOutcome.Failure(ex)
|
||||
} else {
|
||||
result.complete(n)
|
||||
attempts += now to OperationOutcome.Success(n)
|
||||
}
|
||||
}
|
||||
result
|
||||
}
|
||||
Assertions.assertTrue(attempts.size <= testArgs.maxAttempt)
|
||||
val result = future.handle { res, ex ->
|
||||
if(ex != null) {
|
||||
val err = ex.cause ?: ex
|
||||
log.debug(err.message, err)
|
||||
OperationOutcome.Failure(err)
|
||||
} else {
|
||||
OperationOutcome.Success(res)
|
||||
}
|
||||
}.get()
|
||||
for ((index, attempt) in attempts.withIndex()) {
|
||||
val (timestamp, value) = attempt
|
||||
if (index > 0) {
|
||||
/* Check the delay for subsequent attempts is correct */
|
||||
val previousAttempt = attempts[index - 1]
|
||||
val expectedTimestamp =
|
||||
previousAttempt.first + testArgs.initialDelay * Math.pow(testArgs.exp, index.toDouble()) * 1e6
|
||||
val actualTimestamp = timestamp
|
||||
val err = Math.abs(expectedTimestamp - actualTimestamp) / expectedTimestamp
|
||||
Assertions.assertTrue(err < 1e-3)
|
||||
}
|
||||
if (index == attempts.size - 1 && index < testArgs.maxAttempt - 1) {
|
||||
/*
|
||||
* If the last attempt index is lower than the maximum number of attempts, then
|
||||
* check the outcome handler returns DoNotRetry
|
||||
*/
|
||||
Assertions.assertTrue(outcomeHandler.shouldRetry(value) is OutcomeHandlerResult.DoNotRetry)
|
||||
} else if (index < attempts.size - 1) {
|
||||
/*
|
||||
* If the attempt is not the last attempt check the outcome handler returns Retry
|
||||
*/
|
||||
Assertions.assertTrue(outcomeHandler.shouldRetry(value) is OutcomeHandlerResult.Retry)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@@ -15,7 +15,7 @@
|
||||
<root level="info">
|
||||
<appender-ref ref="console"/>
|
||||
</root>
|
||||
<logger name="io.netty" level="debug"/>
|
||||
<logger name="io.netty" level="info"/>
|
||||
<logger name="com.google.code.yanf4j" level="warn"/>
|
||||
<logger name="net.rubyeye.xmemcached" level="warn"/>
|
||||
</configuration>
|
@@ -0,0 +1,16 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<rbcs-client:profiles xmlns:xs="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns:rbcs-client="urn:net.woggioni.rbcs.client"
|
||||
xs:schemaLocation="urn:net.woggioni.rbcs.client jms://net.woggioni.rbcs.client/net/woggioni/rbcs/client/schema/rbcs-client.xsd"
|
||||
>
|
||||
<profile name="profile1" base-url="https://rbcs1.example.com/">
|
||||
<tls-client-auth
|
||||
key-store-file="keystore.pfx"
|
||||
key-store-password="password"
|
||||
key-alias="woggioni@c962475fa38"
|
||||
key-password="key-password"/>
|
||||
</profile>
|
||||
<profile name="profile2" base-url="https://rbcs2.example.com/">
|
||||
<basic-auth user="user" password="password"/>
|
||||
</profile>
|
||||
</rbcs-client:profiles>
|
@@ -6,8 +6,10 @@ plugins {
|
||||
}
|
||||
|
||||
dependencies {
|
||||
compileOnly project(':gbcs-api')
|
||||
compileOnly catalog.slf4j.api
|
||||
implementation project(':rbcs-api')
|
||||
implementation catalog.slf4j.api
|
||||
implementation catalog.jwo
|
||||
implementation catalog.netty.buffer
|
||||
}
|
||||
|
||||
publishing {
|
11
rbcs-common/src/main/java/module-info.java
Normal file
11
rbcs-common/src/main/java/module-info.java
Normal file
@@ -0,0 +1,11 @@
|
||||
module net.woggioni.rbcs.common {
|
||||
requires java.xml;
|
||||
requires java.logging;
|
||||
requires org.slf4j;
|
||||
requires kotlin.stdlib;
|
||||
requires net.woggioni.jwo;
|
||||
requires io.netty.buffer;
|
||||
|
||||
provides java.net.spi.URLStreamHandlerProvider with net.woggioni.rbcs.common.RbcsUrlStreamHandlerFactory;
|
||||
exports net.woggioni.rbcs.common;
|
||||
}
|
@@ -0,0 +1,25 @@
|
||||
package net.woggioni.rbcs.common
|
||||
|
||||
import io.netty.buffer.ByteBuf
|
||||
import java.io.InputStream
|
||||
|
||||
class ByteBufInputStream(private val buf : ByteBuf) : InputStream() {
|
||||
override fun read(): Int {
|
||||
return buf.takeIf {
|
||||
it.readableBytes() > 0
|
||||
}?.let(ByteBuf::readByte)
|
||||
?.let(Byte::toInt) ?: -1
|
||||
}
|
||||
|
||||
override fun read(b: ByteArray, off: Int, len: Int): Int {
|
||||
val readableBytes = buf.readableBytes()
|
||||
if(readableBytes == 0) return -1
|
||||
val result = len.coerceAtMost(readableBytes)
|
||||
buf.readBytes(b, off, result)
|
||||
return result
|
||||
}
|
||||
|
||||
override fun close() {
|
||||
buf.release()
|
||||
}
|
||||
}
|
@@ -0,0 +1,18 @@
|
||||
package net.woggioni.rbcs.common
|
||||
|
||||
import io.netty.buffer.ByteBuf
|
||||
import java.io.OutputStream
|
||||
|
||||
class ByteBufOutputStream(private val buf : ByteBuf) : OutputStream() {
|
||||
override fun write(b: Int) {
|
||||
buf.writeByte(b)
|
||||
}
|
||||
|
||||
override fun write(b: ByteArray, off: Int, len: Int) {
|
||||
buf.writeBytes(b, off, len)
|
||||
}
|
||||
|
||||
override fun close() {
|
||||
buf.release()
|
||||
}
|
||||
}
|
@@ -0,0 +1,7 @@
|
||||
package net.woggioni.rbcs.common
|
||||
|
||||
class ResourceNotFoundException(msg : String? = null, cause: Throwable? = null) : RuntimeException(msg, cause) {
|
||||
}
|
||||
|
||||
class ModuleNotFoundException(msg : String? = null, cause: Throwable? = null) : RuntimeException(msg, cause) {
|
||||
}
|
@@ -1,4 +1,4 @@
|
||||
package net.woggioni.gbcs.base
|
||||
package net.woggioni.rbcs.common
|
||||
|
||||
|
||||
data class HostAndPort(val host: String, val port: Int = 0) {
|
@@ -1,7 +1,8 @@
|
||||
package net.woggioni.gbcs.base
|
||||
package net.woggioni.rbcs.common
|
||||
|
||||
import org.slf4j.Logger
|
||||
import org.slf4j.LoggerFactory
|
||||
import org.slf4j.event.Level
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
import java.util.logging.LogManager
|
||||
@@ -52,6 +53,12 @@ inline fun log(log : Logger,
|
||||
}
|
||||
}
|
||||
|
||||
inline fun Logger.log(level : Level, messageBuilder : () -> String) {
|
||||
if(isEnabledForLevel(level)) {
|
||||
makeLoggingEventBuilder(level).log(messageBuilder())
|
||||
}
|
||||
}
|
||||
|
||||
inline fun Logger.trace(messageBuilder : () -> String) {
|
||||
if(isTraceEnabled) {
|
||||
trace(messageBuilder())
|
@@ -1,4 +1,4 @@
|
||||
package net.woggioni.gbcs.base
|
||||
package net.woggioni.rbcs.common
|
||||
|
||||
import java.security.SecureRandom
|
||||
import java.security.spec.KeySpec
|
29
rbcs-common/src/main/kotlin/net/woggioni/rbcs/common/RBCS.kt
Normal file
29
rbcs-common/src/main/kotlin/net/woggioni/rbcs/common/RBCS.kt
Normal file
@@ -0,0 +1,29 @@
|
||||
package net.woggioni.rbcs.common
|
||||
|
||||
import net.woggioni.jwo.JWO
|
||||
import java.net.URI
|
||||
import java.net.URL
|
||||
import java.security.MessageDigest
|
||||
|
||||
object RBCS {
|
||||
fun String.toUrl() : URL = URL.of(URI(this), null)
|
||||
|
||||
const val RBCS_NAMESPACE_URI: String = "urn:net.woggioni.rbcs.server"
|
||||
const val RBCS_PREFIX: String = "rbcs"
|
||||
const val XML_SCHEMA_NAMESPACE_URI = "http://www.w3.org/2001/XMLSchema-instance"
|
||||
|
||||
fun digest(
|
||||
data: ByteArray,
|
||||
md: MessageDigest = MessageDigest.getInstance("MD5")
|
||||
): ByteArray {
|
||||
md.update(data)
|
||||
return md.digest()
|
||||
}
|
||||
|
||||
fun digestString(
|
||||
data: ByteArray,
|
||||
md: MessageDigest = MessageDigest.getInstance("MD5")
|
||||
): String {
|
||||
return JWO.bytesToHex(digest(data, md))
|
||||
}
|
||||
}
|
@@ -1,19 +1,18 @@
|
||||
package net.woggioni.gbcs.base
|
||||
package net.woggioni.rbcs.common
|
||||
|
||||
import java.io.IOException
|
||||
import java.io.InputStream
|
||||
import java.net.URL
|
||||
import java.net.URLConnection
|
||||
import java.net.URLStreamHandler
|
||||
import java.net.URLStreamHandlerFactory
|
||||
import java.util.Optional
|
||||
import java.net.spi.URLStreamHandlerProvider
|
||||
import java.util.concurrent.atomic.AtomicBoolean
|
||||
import java.util.stream.Collectors
|
||||
|
||||
|
||||
class GbcsUrlStreamHandlerFactory : URLStreamHandlerFactory {
|
||||
class RbcsUrlStreamHandlerFactory : URLStreamHandlerProvider() {
|
||||
|
||||
private class ClasspathHandler(private val classLoader: ClassLoader = GbcsUrlStreamHandlerFactory::class.java.classLoader) :
|
||||
private class ClasspathHandler(private val classLoader: ClassLoader = RbcsUrlStreamHandlerFactory::class.java.classLoader) :
|
||||
URLStreamHandler() {
|
||||
|
||||
override fun openConnection(u: URL): URLConnection? {
|
||||
@@ -36,13 +35,17 @@ class GbcsUrlStreamHandlerFactory : URLStreamHandlerFactory {
|
||||
private class JpmsHandler : URLStreamHandler() {
|
||||
|
||||
override fun openConnection(u: URL): URLConnection {
|
||||
val moduleName = u.host
|
||||
val thisModule = javaClass.module
|
||||
val sourceModule = Optional.ofNullable(thisModule)
|
||||
.map { obj: Module -> obj.layer }
|
||||
.flatMap { layer: ModuleLayer ->
|
||||
val moduleName = u.host
|
||||
layer.findModule(moduleName)
|
||||
}.orElse(thisModule)
|
||||
val sourceModule =
|
||||
thisModule
|
||||
?.let(Module::getLayer)
|
||||
?.let { layer: ModuleLayer ->
|
||||
layer.findModule(moduleName).orElse(null)
|
||||
} ?: if(thisModule.layer == null) {
|
||||
thisModule
|
||||
} else throw ModuleNotFoundException("Module '$moduleName' not found")
|
||||
|
||||
return JpmsResourceURLConnection(u, sourceModule)
|
||||
}
|
||||
}
|
||||
@@ -53,7 +56,9 @@ class GbcsUrlStreamHandlerFactory : URLStreamHandlerFactory {
|
||||
|
||||
@Throws(IOException::class)
|
||||
override fun getInputStream(): InputStream {
|
||||
return module.getResourceAsStream(getURL().path)
|
||||
val resource = getURL().path
|
||||
return module.getResourceAsStream(resource)
|
||||
?: throw ResourceNotFoundException("Resource '$resource' not found in module '${module.name}'")
|
||||
}
|
||||
}
|
||||
|
||||
@@ -82,12 +87,12 @@ class GbcsUrlStreamHandlerFactory : URLStreamHandlerFactory {
|
||||
private val installed = AtomicBoolean(false)
|
||||
fun install() {
|
||||
if (!installed.getAndSet(true)) {
|
||||
URL.setURLStreamHandlerFactory(GbcsUrlStreamHandlerFactory())
|
||||
URL.setURLStreamHandlerFactory(RbcsUrlStreamHandlerFactory())
|
||||
}
|
||||
}
|
||||
|
||||
private val packageMap: Map<String, List<Module>> by lazy {
|
||||
GbcsUrlStreamHandlerFactory::class.java.module.layer
|
||||
RbcsUrlStreamHandlerFactory::class.java.module.layer
|
||||
.modules()
|
||||
.stream()
|
||||
.flatMap { m: Module ->
|
@@ -1,6 +1,8 @@
|
||||
package net.woggioni.gbcs.base
|
||||
package net.woggioni.rbcs.common
|
||||
|
||||
import net.woggioni.jwo.JWO
|
||||
import org.slf4j.LoggerFactory
|
||||
import org.slf4j.event.Level
|
||||
import org.w3c.dom.Document
|
||||
import org.w3c.dom.Element
|
||||
import org.w3c.dom.Node
|
||||
@@ -80,31 +82,36 @@ class Xml(val doc: Document, val element: Element) {
|
||||
private val log = LoggerFactory.getLogger(ErrorHandler::class.java)
|
||||
}
|
||||
|
||||
override fun warning(ex: SAXParseException) {
|
||||
log.warn(
|
||||
"Problem at {}:{}:{} parsing deployment configuration: {}",
|
||||
fileURL, ex.lineNumber, ex.columnNumber, ex.message
|
||||
)
|
||||
}
|
||||
override fun warning(ex: SAXParseException)= err(ex, Level.WARN)
|
||||
|
||||
override fun error(ex: SAXParseException) {
|
||||
log.error(
|
||||
"Problem at {}:{}:{} parsing deployment configuration: {}",
|
||||
fileURL, ex.lineNumber, ex.columnNumber, ex.message
|
||||
)
|
||||
private fun err(ex: SAXParseException, level: Level) {
|
||||
log.log(level) {
|
||||
"Problem at ${fileURL}:${ex.lineNumber}:${ex.columnNumber} parsing deployment configuration: ${ex.message}"
|
||||
}
|
||||
throw ex
|
||||
}
|
||||
|
||||
override fun fatalError(ex: SAXParseException) {
|
||||
log.error(
|
||||
"Problem at {}:{}:{} parsing deployment configuration: {}",
|
||||
fileURL, ex.lineNumber, ex.columnNumber, ex.message
|
||||
)
|
||||
throw ex
|
||||
}
|
||||
override fun error(ex: SAXParseException) = err(ex, Level.ERROR)
|
||||
override fun fatalError(ex: SAXParseException) = err(ex, Level.ERROR)
|
||||
}
|
||||
|
||||
companion object {
|
||||
private val dictMap: Map<String, Map<String, Any>> = sequenceOf(
|
||||
"env" to System.getenv().asSequence().map { (k, v) -> k to (v as Any) }.toMap(),
|
||||
"sys" to System.getProperties().asSequence().map { (k, v) -> k as String to (v as Any) }.toMap()
|
||||
).toMap()
|
||||
|
||||
private fun renderConfigurationTemplate(template: String): String {
|
||||
return JWO.renderTemplate(template, emptyMap(), dictMap).replace("$$", "$")
|
||||
}
|
||||
|
||||
fun Element.renderAttribute(name : String, namespaceURI: String? = null) = if(namespaceURI == null) {
|
||||
getAttribute(name)
|
||||
} else {
|
||||
getAttributeNS(name, namespaceURI)
|
||||
}.takeIf(String::isNotEmpty)?.let(Companion::renderConfigurationTemplate)
|
||||
|
||||
|
||||
fun Element.asIterable() = Iterable { ElementIterator(this, null) }
|
||||
fun NodeList.asIterable() = Iterable { NodeListIterator(this) }
|
||||
|
||||
@@ -146,29 +153,29 @@ class Xml(val doc: Document, val element: Element) {
|
||||
dbf.isExpandEntityReferences = true
|
||||
dbf.isIgnoringComments = true
|
||||
dbf.isNamespaceAware = true
|
||||
dbf.isValidating = false
|
||||
dbf.setFeature("http://apache.org/xml/features/validation/schema", true);
|
||||
dbf.isValidating = schemaResourceURL == null
|
||||
dbf.setFeature("http://apache.org/xml/features/validation/schema", true)
|
||||
schemaResourceURL?.let {
|
||||
dbf.schema = getSchema(it)
|
||||
}
|
||||
return dbf
|
||||
}
|
||||
|
||||
fun newDocumentBuilder(resource: URL, schemaResourceURL: URL?): DocumentBuilder {
|
||||
val db = newDocumentBuilderFactory(schemaResourceURL).newDocumentBuilder()
|
||||
db.setErrorHandler(ErrorHandler(resource))
|
||||
return db
|
||||
}
|
||||
fun newDocumentBuilder(resource: URL, schemaResourceURL: URL?): DocumentBuilder {
|
||||
val db = newDocumentBuilderFactory(schemaResourceURL).newDocumentBuilder()
|
||||
db.setErrorHandler(ErrorHandler(resource))
|
||||
return db
|
||||
}
|
||||
|
||||
fun parseXmlResource(resource: URL, schemaResourceURL: URL?): Document {
|
||||
val db = newDocumentBuilder(resource, schemaResourceURL)
|
||||
return resource.openStream().use(db::parse)
|
||||
}
|
||||
fun parseXmlResource(resource: URL, schemaResourceURL: URL?): Document {
|
||||
val db = newDocumentBuilder(resource, schemaResourceURL)
|
||||
return resource.openStream().use(db::parse)
|
||||
}
|
||||
|
||||
fun parseXml(sourceURL : URL, sourceStream: InputStream? = null, schemaResourceURL: URL? = null): Document {
|
||||
val db = newDocumentBuilder(sourceURL, schemaResourceURL)
|
||||
return sourceStream?.let(db::parse) ?: sourceURL.openStream().use(db::parse)
|
||||
}
|
||||
fun parseXml(sourceURL: URL, sourceStream: InputStream? = null, schemaResourceURL: URL? = null): Document {
|
||||
val db = newDocumentBuilder(sourceURL, schemaResourceURL)
|
||||
return sourceStream?.let(db::parse) ?: sourceURL.openStream().use(db::parse)
|
||||
}
|
||||
|
||||
fun write(doc: Document, output: OutputStream) {
|
||||
val transformerFactory = TransformerFactory.newInstance()
|
||||
@@ -183,7 +190,12 @@ class Xml(val doc: Document, val element: Element) {
|
||||
transformer.transform(source, result)
|
||||
}
|
||||
|
||||
fun of(namespaceURI: String, qualifiedName: String, schemaResourceURL: URL? = null, cb: Xml.(el: Element) -> Unit): Document {
|
||||
fun of(
|
||||
namespaceURI: String,
|
||||
qualifiedName: String,
|
||||
schemaResourceURL: URL? = null,
|
||||
cb: Xml.(el: Element) -> Unit
|
||||
): Document {
|
||||
val dbf = newDocumentBuilderFactory(schemaResourceURL)
|
||||
val db = dbf.newDocumentBuilder()
|
||||
val doc = db.newDocument()
|
||||
@@ -207,7 +219,7 @@ class Xml(val doc: Document, val element: Element) {
|
||||
|
||||
fun node(
|
||||
name: String,
|
||||
namespaceURI : String? = null,
|
||||
namespaceURI: String? = null,
|
||||
attrs: Map<String, String> = emptyMap(),
|
||||
cb: Xml.(el: Element) -> Unit = {}
|
||||
): Element {
|
||||
@@ -222,7 +234,7 @@ class Xml(val doc: Document, val element: Element) {
|
||||
}
|
||||
}
|
||||
|
||||
fun attr(key: String, value: String, namespaceURI : String? = null) {
|
||||
fun attr(key: String, value: String, namespaceURI: String? = null) {
|
||||
element.setAttributeNS(namespaceURI, key, value)
|
||||
}
|
||||
|
@@ -0,0 +1 @@
|
||||
net.woggioni.rbcs.common.RbcsUrlStreamHandlerFactory
|
@@ -1,6 +1,3 @@
|
||||
import org.jetbrains.kotlin.gradle.dsl.JvmTarget
|
||||
import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
|
||||
|
||||
plugins {
|
||||
id 'java-library'
|
||||
id 'maven-publish'
|
||||
@@ -9,10 +6,10 @@ plugins {
|
||||
|
||||
configurations {
|
||||
bundle {
|
||||
extendsFrom runtimeClasspath
|
||||
canBeResolved = true
|
||||
canBeConsumed = false
|
||||
visible = false
|
||||
transitive = false
|
||||
|
||||
resolutionStrategy {
|
||||
dependencies {
|
||||
@@ -32,10 +29,20 @@ configurations {
|
||||
}
|
||||
|
||||
dependencies {
|
||||
compileOnly project(':gbcs-base')
|
||||
compileOnly project(':gbcs-api')
|
||||
compileOnly catalog.jwo
|
||||
implementation catalog.xmemcached
|
||||
implementation project(':rbcs-common')
|
||||
implementation project(':rbcs-api')
|
||||
implementation catalog.jwo
|
||||
implementation catalog.slf4j.api
|
||||
implementation catalog.netty.common
|
||||
implementation catalog.netty.codec.memcache
|
||||
|
||||
bundle catalog.netty.codec.memcache
|
||||
|
||||
testRuntimeOnly catalog.logback.classic
|
||||
}
|
||||
|
||||
tasks.named(JavaPlugin.TEST_TASK_NAME, Test) {
|
||||
systemProperty("io.netty.leakDetectionLevel", "PARANOID")
|
||||
}
|
||||
|
||||
Provider<Tar> bundleTask = tasks.register("bundle", Tar) {
|
19
rbcs-server-memcache/src/main/java/module-info.java
Normal file
19
rbcs-server-memcache/src/main/java/module-info.java
Normal file
@@ -0,0 +1,19 @@
|
||||
import net.woggioni.rbcs.api.CacheProvider;
|
||||
|
||||
module net.woggioni.rbcs.server.memcache {
|
||||
requires net.woggioni.rbcs.common;
|
||||
requires net.woggioni.rbcs.api;
|
||||
requires net.woggioni.jwo;
|
||||
requires java.xml;
|
||||
requires kotlin.stdlib;
|
||||
requires io.netty.transport;
|
||||
requires io.netty.codec;
|
||||
requires io.netty.codec.memcache;
|
||||
requires io.netty.common;
|
||||
requires io.netty.buffer;
|
||||
requires org.slf4j;
|
||||
|
||||
provides CacheProvider with net.woggioni.rbcs.server.memcache.MemcacheCacheProvider;
|
||||
|
||||
opens net.woggioni.rbcs.server.memcache.schema;
|
||||
}
|
@@ -0,0 +1,4 @@
|
||||
package net.woggioni.rbcs.server.memcache
|
||||
|
||||
class MemcacheException(status : Short, msg : String? = null, cause : Throwable? = null)
|
||||
: RuntimeException(msg ?: "Memcached status $status", cause)
|
@@ -0,0 +1,23 @@
|
||||
package net.woggioni.rbcs.server.memcache
|
||||
|
||||
import io.netty.buffer.ByteBuf
|
||||
import net.woggioni.rbcs.api.Cache
|
||||
import net.woggioni.rbcs.server.memcache.client.MemcacheClient
|
||||
import java.nio.channels.ReadableByteChannel
|
||||
import java.util.concurrent.CompletableFuture
|
||||
|
||||
class MemcacheCache(private val cfg : MemcacheCacheConfiguration) : Cache {
|
||||
private val memcacheClient = MemcacheClient(cfg)
|
||||
|
||||
override fun get(key: String): CompletableFuture<ReadableByteChannel?> {
|
||||
return memcacheClient.get(key)
|
||||
}
|
||||
|
||||
override fun put(key: String, content: ByteBuf): CompletableFuture<Void> {
|
||||
return memcacheClient.put(key, content, cfg.maxAge)
|
||||
}
|
||||
|
||||
override fun close() {
|
||||
memcacheClient.close()
|
||||
}
|
||||
}
|
@@ -0,0 +1,40 @@
|
||||
package net.woggioni.rbcs.server.memcache
|
||||
|
||||
import net.woggioni.rbcs.api.Configuration
|
||||
import net.woggioni.rbcs.common.HostAndPort
|
||||
import java.time.Duration
|
||||
|
||||
data class MemcacheCacheConfiguration(
|
||||
val servers: List<Server>,
|
||||
val maxAge: Duration = Duration.ofDays(1),
|
||||
val maxSize: Int = 0x100000,
|
||||
val digestAlgorithm: String? = null,
|
||||
val compressionMode: CompressionMode? = null,
|
||||
) : Configuration.Cache {
|
||||
|
||||
enum class CompressionMode {
|
||||
/**
|
||||
* Gzip mode
|
||||
*/
|
||||
GZIP,
|
||||
|
||||
/**
|
||||
* Deflate mode
|
||||
*/
|
||||
DEFLATE
|
||||
}
|
||||
|
||||
data class Server(
|
||||
val endpoint : HostAndPort,
|
||||
val connectionTimeoutMillis : Int?,
|
||||
val maxConnections : Int
|
||||
)
|
||||
|
||||
|
||||
override fun materialize() = MemcacheCache(this)
|
||||
|
||||
override fun getNamespaceURI() = "urn:net.woggioni.rbcs.server.memcache"
|
||||
|
||||
override fun getTypeName() = "memcacheCacheType"
|
||||
}
|
||||
|
@@ -0,0 +1,101 @@
|
||||
package net.woggioni.rbcs.server.memcache
|
||||
|
||||
import net.woggioni.rbcs.api.CacheProvider
|
||||
import net.woggioni.rbcs.api.exception.ConfigurationException
|
||||
import net.woggioni.rbcs.common.RBCS
|
||||
import net.woggioni.rbcs.common.HostAndPort
|
||||
import net.woggioni.rbcs.common.Xml
|
||||
import net.woggioni.rbcs.common.Xml.Companion.asIterable
|
||||
import net.woggioni.rbcs.common.Xml.Companion.renderAttribute
|
||||
import org.w3c.dom.Document
|
||||
import org.w3c.dom.Element
|
||||
import java.time.Duration
|
||||
import java.time.temporal.ChronoUnit
|
||||
|
||||
|
||||
class MemcacheCacheProvider : CacheProvider<MemcacheCacheConfiguration> {
|
||||
override fun getXmlSchemaLocation() = "jpms://net.woggioni.rbcs.server.memcache/net/woggioni/rbcs/server/memcache/schema/rbcs-memcache.xsd"
|
||||
|
||||
override fun getXmlType() = "memcacheCacheType"
|
||||
|
||||
override fun getXmlNamespace() = "urn:net.woggioni.rbcs.server.memcache"
|
||||
|
||||
val xmlNamespacePrefix : String
|
||||
get() = "rbcs-memcache"
|
||||
|
||||
override fun deserialize(el: Element): MemcacheCacheConfiguration {
|
||||
val servers = mutableListOf<MemcacheCacheConfiguration.Server>()
|
||||
val maxAge = el.renderAttribute("max-age")
|
||||
?.let(Duration::parse)
|
||||
?: Duration.ofDays(1)
|
||||
val maxSize = el.renderAttribute("max-size")
|
||||
?.let(String::toInt)
|
||||
?: 0x100000
|
||||
val compressionMode = el.renderAttribute("compression-mode")
|
||||
?.let {
|
||||
when (it) {
|
||||
"gzip" -> MemcacheCacheConfiguration.CompressionMode.GZIP
|
||||
"deflate" -> MemcacheCacheConfiguration.CompressionMode.DEFLATE
|
||||
else -> MemcacheCacheConfiguration.CompressionMode.DEFLATE
|
||||
}
|
||||
}
|
||||
?: MemcacheCacheConfiguration.CompressionMode.DEFLATE
|
||||
val digestAlgorithm = el.renderAttribute("digest")
|
||||
for (child in el.asIterable()) {
|
||||
when (child.nodeName) {
|
||||
"server" -> {
|
||||
val host = child.renderAttribute("host") ?: throw ConfigurationException("host attribute is required")
|
||||
val port = child.renderAttribute("port")?.toInt() ?: throw ConfigurationException("port attribute is required")
|
||||
val maxConnections = child.renderAttribute("max-connections")?.toInt() ?: 1
|
||||
val connectionTimeout = child.renderAttribute("connection-timeout")
|
||||
?.let(Duration::parse)
|
||||
?.let(Duration::toMillis)
|
||||
?.let(Long::toInt)
|
||||
?: 10000
|
||||
servers.add(MemcacheCacheConfiguration.Server(HostAndPort(host, port), connectionTimeout, maxConnections))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return MemcacheCacheConfiguration(
|
||||
servers,
|
||||
maxAge,
|
||||
maxSize,
|
||||
digestAlgorithm,
|
||||
compressionMode,
|
||||
)
|
||||
}
|
||||
|
||||
override fun serialize(doc: Document, cache: MemcacheCacheConfiguration) = cache.run {
|
||||
val result = doc.createElement("cache")
|
||||
Xml.of(doc, result) {
|
||||
attr("xmlns:${xmlNamespacePrefix}", xmlNamespace, namespaceURI = "http://www.w3.org/2000/xmlns/")
|
||||
|
||||
attr("xs:type", "${xmlNamespacePrefix}:$xmlType", RBCS.XML_SCHEMA_NAMESPACE_URI)
|
||||
for (server in servers) {
|
||||
node("server") {
|
||||
attr("host", server.endpoint.host)
|
||||
attr("port", server.endpoint.port.toString())
|
||||
server.connectionTimeoutMillis?.let { connectionTimeoutMillis ->
|
||||
attr("connection-timeout", Duration.of(connectionTimeoutMillis.toLong(), ChronoUnit.MILLIS).toString())
|
||||
}
|
||||
attr("max-connections", server.maxConnections.toString())
|
||||
}
|
||||
}
|
||||
attr("max-age", maxAge.toString())
|
||||
attr("max-size", maxSize.toString())
|
||||
digestAlgorithm?.let { digestAlgorithm ->
|
||||
attr("digest", digestAlgorithm)
|
||||
}
|
||||
compressionMode?.let { compressionMode ->
|
||||
attr(
|
||||
"compression-mode", when (compressionMode) {
|
||||
MemcacheCacheConfiguration.CompressionMode.GZIP -> "gzip"
|
||||
MemcacheCacheConfiguration.CompressionMode.DEFLATE -> "deflate"
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
result
|
||||
}
|
||||
}
|
@@ -0,0 +1,257 @@
|
||||
package net.woggioni.rbcs.server.memcache.client
|
||||
|
||||
|
||||
import io.netty.bootstrap.Bootstrap
|
||||
import io.netty.buffer.ByteBuf
|
||||
import io.netty.buffer.Unpooled
|
||||
import io.netty.channel.Channel
|
||||
import io.netty.channel.ChannelHandlerContext
|
||||
import io.netty.channel.ChannelOption
|
||||
import io.netty.channel.ChannelPipeline
|
||||
import io.netty.channel.SimpleChannelInboundHandler
|
||||
import io.netty.channel.nio.NioEventLoopGroup
|
||||
import io.netty.channel.pool.AbstractChannelPoolHandler
|
||||
import io.netty.channel.pool.ChannelPool
|
||||
import io.netty.channel.pool.FixedChannelPool
|
||||
import io.netty.channel.socket.nio.NioSocketChannel
|
||||
import io.netty.handler.codec.DecoderException
|
||||
import io.netty.handler.codec.memcache.binary.BinaryMemcacheClientCodec
|
||||
import io.netty.handler.codec.memcache.binary.BinaryMemcacheObjectAggregator
|
||||
import io.netty.handler.codec.memcache.binary.BinaryMemcacheOpcodes
|
||||
import io.netty.handler.codec.memcache.binary.BinaryMemcacheResponseStatus
|
||||
import io.netty.handler.codec.memcache.binary.DefaultFullBinaryMemcacheRequest
|
||||
import io.netty.handler.codec.memcache.binary.FullBinaryMemcacheRequest
|
||||
import io.netty.handler.codec.memcache.binary.FullBinaryMemcacheResponse
|
||||
import io.netty.util.concurrent.GenericFutureListener
|
||||
import net.woggioni.rbcs.common.ByteBufInputStream
|
||||
import net.woggioni.rbcs.common.ByteBufOutputStream
|
||||
import net.woggioni.rbcs.common.RBCS.digest
|
||||
import net.woggioni.rbcs.common.HostAndPort
|
||||
import net.woggioni.rbcs.common.contextLogger
|
||||
import net.woggioni.rbcs.server.memcache.MemcacheCacheConfiguration
|
||||
import net.woggioni.rbcs.server.memcache.MemcacheException
|
||||
import net.woggioni.jwo.JWO
|
||||
import java.net.InetSocketAddress
|
||||
import java.nio.channels.Channels
|
||||
import java.nio.channels.ReadableByteChannel
|
||||
import java.security.MessageDigest
|
||||
import java.time.Duration
|
||||
import java.time.Instant
|
||||
import java.util.concurrent.CompletableFuture
|
||||
import java.util.concurrent.ConcurrentHashMap
|
||||
import java.util.zip.Deflater
|
||||
import java.util.zip.DeflaterOutputStream
|
||||
import java.util.zip.GZIPInputStream
|
||||
import java.util.zip.GZIPOutputStream
|
||||
import java.util.zip.InflaterInputStream
|
||||
import io.netty.util.concurrent.Future as NettyFuture
|
||||
|
||||
|
||||
class MemcacheClient(private val cfg: MemcacheCacheConfiguration) : AutoCloseable {
|
||||
|
||||
private companion object {
|
||||
@JvmStatic
|
||||
private val log = contextLogger()
|
||||
}
|
||||
|
||||
private val group: NioEventLoopGroup
|
||||
private val connectionPool: MutableMap<HostAndPort, ChannelPool> = ConcurrentHashMap()
|
||||
|
||||
init {
|
||||
group = NioEventLoopGroup()
|
||||
}
|
||||
|
||||
private fun newConnectionPool(server: MemcacheCacheConfiguration.Server): FixedChannelPool {
|
||||
val bootstrap = Bootstrap().apply {
|
||||
group(group)
|
||||
channel(NioSocketChannel::class.java)
|
||||
option(ChannelOption.SO_KEEPALIVE, true)
|
||||
remoteAddress(InetSocketAddress(server.endpoint.host, server.endpoint.port))
|
||||
server.connectionTimeoutMillis?.let {
|
||||
option(ChannelOption.CONNECT_TIMEOUT_MILLIS, it)
|
||||
}
|
||||
}
|
||||
val channelPoolHandler = object : AbstractChannelPoolHandler() {
|
||||
|
||||
override fun channelCreated(ch: Channel) {
|
||||
val pipeline: ChannelPipeline = ch.pipeline()
|
||||
pipeline.addLast(BinaryMemcacheClientCodec())
|
||||
pipeline.addLast(BinaryMemcacheObjectAggregator(cfg.maxSize))
|
||||
}
|
||||
}
|
||||
return FixedChannelPool(bootstrap, channelPoolHandler, server.maxConnections)
|
||||
}
|
||||
|
||||
|
||||
private fun sendRequest(request: FullBinaryMemcacheRequest): CompletableFuture<FullBinaryMemcacheResponse> {
|
||||
|
||||
val server = cfg.servers.let { servers ->
|
||||
if (servers.size > 1) {
|
||||
val key = request.key().duplicate()
|
||||
var checksum = 0
|
||||
while (key.readableBytes() > 4) {
|
||||
val byte = key.readInt()
|
||||
checksum = checksum xor byte
|
||||
}
|
||||
while (key.readableBytes() > 0) {
|
||||
val byte = key.readByte()
|
||||
checksum = checksum xor byte.toInt()
|
||||
}
|
||||
servers[checksum % servers.size]
|
||||
} else {
|
||||
servers.first()
|
||||
}
|
||||
}
|
||||
|
||||
val response = CompletableFuture<FullBinaryMemcacheResponse>()
|
||||
// Custom handler for processing responses
|
||||
val pool = connectionPool.computeIfAbsent(server.endpoint) {
|
||||
newConnectionPool(server)
|
||||
}
|
||||
pool.acquire().addListener(object : GenericFutureListener<NettyFuture<Channel>> {
|
||||
override fun operationComplete(channelFuture: NettyFuture<Channel>) {
|
||||
if (channelFuture.isSuccess) {
|
||||
val channel = channelFuture.now
|
||||
val pipeline = channel.pipeline()
|
||||
channel.pipeline()
|
||||
.addLast("client-handler", object : SimpleChannelInboundHandler<FullBinaryMemcacheResponse>() {
|
||||
override fun channelRead0(
|
||||
ctx: ChannelHandlerContext,
|
||||
msg: FullBinaryMemcacheResponse
|
||||
) {
|
||||
pipeline.removeLast()
|
||||
pool.release(channel)
|
||||
msg.touch("The method's caller must remember to release this")
|
||||
response.complete(msg.retain())
|
||||
}
|
||||
|
||||
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
|
||||
val ex = when (cause) {
|
||||
is DecoderException -> cause.cause!!
|
||||
else -> cause
|
||||
}
|
||||
ctx.close()
|
||||
pipeline.removeLast()
|
||||
pool.release(channel)
|
||||
response.completeExceptionally(ex)
|
||||
}
|
||||
})
|
||||
request.touch()
|
||||
channel.writeAndFlush(request)
|
||||
} else {
|
||||
response.completeExceptionally(channelFuture.cause())
|
||||
}
|
||||
}
|
||||
})
|
||||
return response
|
||||
}
|
||||
|
||||
private fun encodeExpiry(expiry: Duration): Int {
|
||||
val expirySeconds = expiry.toSeconds()
|
||||
return expirySeconds.toInt().takeIf { it.toLong() == expirySeconds }
|
||||
?: Instant.ofEpochSecond(expirySeconds).epochSecond.toInt()
|
||||
}
|
||||
|
||||
fun get(key: String): CompletableFuture<ReadableByteChannel?> {
|
||||
val request = (cfg.digestAlgorithm
|
||||
?.let(MessageDigest::getInstance)
|
||||
?.let { md ->
|
||||
digest(key.toByteArray(), md)
|
||||
} ?: key.toByteArray(Charsets.UTF_8)).let { digest ->
|
||||
DefaultFullBinaryMemcacheRequest(Unpooled.wrappedBuffer(digest), null).apply {
|
||||
setOpcode(BinaryMemcacheOpcodes.GET)
|
||||
}
|
||||
}
|
||||
return sendRequest(request).thenApply { response ->
|
||||
try {
|
||||
when (val status = response.status()) {
|
||||
BinaryMemcacheResponseStatus.SUCCESS -> {
|
||||
val compressionMode = cfg.compressionMode
|
||||
val content = response.content().retain()
|
||||
content.touch()
|
||||
if (compressionMode != null) {
|
||||
when (compressionMode) {
|
||||
MemcacheCacheConfiguration.CompressionMode.GZIP -> {
|
||||
GZIPInputStream(ByteBufInputStream(content))
|
||||
}
|
||||
|
||||
MemcacheCacheConfiguration.CompressionMode.DEFLATE -> {
|
||||
InflaterInputStream(ByteBufInputStream(content))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
ByteBufInputStream(content)
|
||||
}.let(Channels::newChannel)
|
||||
}
|
||||
|
||||
BinaryMemcacheResponseStatus.KEY_ENOENT -> {
|
||||
null
|
||||
}
|
||||
|
||||
else -> throw MemcacheException(status)
|
||||
}
|
||||
} finally {
|
||||
response.release()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fun put(key: String, content: ByteBuf, expiry: Duration, cas: Long? = null): CompletableFuture<Void> {
|
||||
val request = (cfg.digestAlgorithm
|
||||
?.let(MessageDigest::getInstance)
|
||||
?.let { md ->
|
||||
digest(key.toByteArray(), md)
|
||||
} ?: key.toByteArray(Charsets.UTF_8)).let { digest ->
|
||||
val extras = Unpooled.buffer(8, 8)
|
||||
extras.writeInt(0)
|
||||
extras.writeInt(encodeExpiry(expiry))
|
||||
val compressionMode = cfg.compressionMode
|
||||
content.retain()
|
||||
val payload = if (compressionMode != null) {
|
||||
val inputStream = ByteBufInputStream(content)
|
||||
val buf = content.alloc().buffer()
|
||||
buf.retain()
|
||||
val outputStream = when (compressionMode) {
|
||||
MemcacheCacheConfiguration.CompressionMode.GZIP -> {
|
||||
GZIPOutputStream(ByteBufOutputStream(buf))
|
||||
}
|
||||
|
||||
MemcacheCacheConfiguration.CompressionMode.DEFLATE -> {
|
||||
DeflaterOutputStream(ByteBufOutputStream(buf), Deflater(Deflater.DEFAULT_COMPRESSION, false))
|
||||
}
|
||||
}
|
||||
inputStream.use { i ->
|
||||
outputStream.use { o ->
|
||||
JWO.copy(i, o)
|
||||
}
|
||||
}
|
||||
buf
|
||||
} else {
|
||||
content
|
||||
}
|
||||
DefaultFullBinaryMemcacheRequest(Unpooled.wrappedBuffer(digest), extras, payload).apply {
|
||||
setOpcode(BinaryMemcacheOpcodes.SET)
|
||||
cas?.let(this::setCas)
|
||||
}
|
||||
}
|
||||
return sendRequest(request).thenApply { response ->
|
||||
try {
|
||||
when (val status = response.status()) {
|
||||
BinaryMemcacheResponseStatus.SUCCESS -> null
|
||||
else -> throw MemcacheException(status)
|
||||
}
|
||||
} finally {
|
||||
response.release()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
fun shutDown(): NettyFuture<*> {
|
||||
return group.shutdownGracefully()
|
||||
}
|
||||
|
||||
override fun close() {
|
||||
shutDown().sync()
|
||||
}
|
||||
}
|
@@ -0,0 +1 @@
|
||||
net.woggioni.rbcs.server.memcache.MemcacheCacheProvider
|
@@ -0,0 +1,37 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<xs:schema targetNamespace="urn:net.woggioni.rbcs.server.memcache"
|
||||
xmlns:rbcs-memcache="urn:net.woggioni.rbcs.server.memcache"
|
||||
xmlns:rbcs="urn:net.woggioni.rbcs.server"
|
||||
xmlns:xs="http://www.w3.org/2001/XMLSchema">
|
||||
|
||||
<xs:import schemaLocation="jpms://net.woggioni.rbcs.server/net/woggioni/rbcs/server/schema/rbcs.xsd" namespace="urn:net.woggioni.rbcs.server"/>
|
||||
|
||||
<xs:complexType name="memcacheServerType">
|
||||
<xs:attribute name="host" type="xs:token" use="required"/>
|
||||
<xs:attribute name="port" type="xs:positiveInteger" use="required"/>
|
||||
<xs:attribute name="connection-timeout" type="xs:duration"/>
|
||||
<xs:attribute name="max-connections" type="xs:positiveInteger" default="1"/>
|
||||
</xs:complexType>
|
||||
|
||||
<xs:complexType name="memcacheCacheType">
|
||||
<xs:complexContent>
|
||||
<xs:extension base="rbcs:cacheType">
|
||||
<xs:sequence maxOccurs="unbounded">
|
||||
<xs:element name="server" type="rbcs-memcache:memcacheServerType"/>
|
||||
</xs:sequence>
|
||||
<xs:attribute name="max-age" type="xs:duration" default="P1D"/>
|
||||
<xs:attribute name="max-size" type="xs:unsignedInt" default="1048576"/>
|
||||
<xs:attribute name="digest" type="xs:token" />
|
||||
<xs:attribute name="compression-mode" type="rbcs-memcache:compressionType"/>
|
||||
</xs:extension>
|
||||
</xs:complexContent>
|
||||
</xs:complexType>
|
||||
|
||||
<xs:simpleType name="compressionType">
|
||||
<xs:restriction base="xs:token">
|
||||
<xs:enumeration value="deflate"/>
|
||||
<xs:enumeration value="gzip"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
|
||||
</xs:schema>
|
38
rbcs-server/build.gradle
Normal file
38
rbcs-server/build.gradle
Normal file
@@ -0,0 +1,38 @@
|
||||
plugins {
|
||||
id 'java-library'
|
||||
alias catalog.plugins.kotlin.jvm
|
||||
id 'jacoco'
|
||||
id 'maven-publish'
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation catalog.jwo
|
||||
implementation catalog.slf4j.api
|
||||
implementation catalog.netty.codec.http
|
||||
|
||||
api project(':rbcs-common')
|
||||
api project(':rbcs-api')
|
||||
|
||||
// runtimeOnly catalog.slf4j.jdk14
|
||||
testRuntimeOnly catalog.logback.classic
|
||||
|
||||
testImplementation catalog.bcprov.jdk18on
|
||||
testImplementation catalog.bcpkix.jdk18on
|
||||
|
||||
testRuntimeOnly project(":rbcs-server-memcache")
|
||||
}
|
||||
|
||||
test {
|
||||
systemProperty("io.netty.leakDetectionLevel", "PARANOID")
|
||||
systemProperty("jdk.httpclient.redirects.retrylimit", "1")
|
||||
}
|
||||
|
||||
publishing {
|
||||
publications {
|
||||
maven(MavenPublication) {
|
||||
from(components["java"])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
29
rbcs-server/src/main/java/module-info.java
Normal file
29
rbcs-server/src/main/java/module-info.java
Normal file
@@ -0,0 +1,29 @@
|
||||
import net.woggioni.rbcs.api.CacheProvider;
|
||||
import net.woggioni.rbcs.server.cache.FileSystemCacheProvider;
|
||||
import net.woggioni.rbcs.server.cache.InMemoryCacheProvider;
|
||||
|
||||
module net.woggioni.rbcs.server {
|
||||
requires java.sql;
|
||||
requires java.xml;
|
||||
requires java.logging;
|
||||
requires java.naming;
|
||||
requires kotlin.stdlib;
|
||||
requires io.netty.buffer;
|
||||
requires io.netty.transport;
|
||||
requires io.netty.codec.http;
|
||||
requires io.netty.common;
|
||||
requires io.netty.handler;
|
||||
requires io.netty.codec;
|
||||
requires org.slf4j;
|
||||
requires net.woggioni.jwo;
|
||||
requires net.woggioni.rbcs.common;
|
||||
requires net.woggioni.rbcs.api;
|
||||
|
||||
exports net.woggioni.rbcs.server;
|
||||
|
||||
opens net.woggioni.rbcs.server;
|
||||
opens net.woggioni.rbcs.server.schema;
|
||||
|
||||
uses CacheProvider;
|
||||
provides CacheProvider with FileSystemCacheProvider, InMemoryCacheProvider;
|
||||
}
|
@@ -1,4 +1,4 @@
|
||||
package net.woggioni.gbcs
|
||||
package net.woggioni.rbcs.server
|
||||
|
||||
import io.netty.channel.ChannelHandlerContext
|
||||
import org.slf4j.Logger
|
@@ -1,67 +1,57 @@
|
||||
package net.woggioni.gbcs
|
||||
package net.woggioni.rbcs.server
|
||||
|
||||
import io.netty.bootstrap.ServerBootstrap
|
||||
import io.netty.buffer.ByteBuf
|
||||
import io.netty.buffer.Unpooled
|
||||
import io.netty.channel.Channel
|
||||
import io.netty.channel.ChannelDuplexHandler
|
||||
import io.netty.channel.ChannelFuture
|
||||
import io.netty.channel.ChannelFutureListener
|
||||
import io.netty.channel.ChannelHandler.Sharable
|
||||
import io.netty.channel.ChannelHandlerContext
|
||||
import io.netty.channel.ChannelInboundHandlerAdapter
|
||||
import io.netty.channel.ChannelInitializer
|
||||
import io.netty.channel.ChannelOption
|
||||
import io.netty.channel.ChannelPromise
|
||||
import io.netty.channel.DefaultFileRegion
|
||||
import io.netty.channel.SimpleChannelInboundHandler
|
||||
import io.netty.channel.nio.NioEventLoopGroup
|
||||
import io.netty.channel.socket.nio.NioServerSocketChannel
|
||||
import io.netty.handler.codec.DecoderException
|
||||
import io.netty.handler.codec.compression.CompressionOptions
|
||||
import io.netty.handler.codec.http.DefaultFullHttpResponse
|
||||
import io.netty.handler.codec.http.DefaultHttpContent
|
||||
import io.netty.handler.codec.http.DefaultHttpResponse
|
||||
import io.netty.handler.codec.http.FullHttpRequest
|
||||
import io.netty.handler.codec.http.FullHttpResponse
|
||||
import io.netty.handler.codec.http.HttpContentCompressor
|
||||
import io.netty.handler.codec.http.HttpHeaderNames
|
||||
import io.netty.handler.codec.http.HttpHeaderValues
|
||||
import io.netty.handler.codec.http.HttpMethod
|
||||
import io.netty.handler.codec.http.HttpObjectAggregator
|
||||
import io.netty.handler.codec.http.HttpRequest
|
||||
import io.netty.handler.codec.http.HttpResponseStatus
|
||||
import io.netty.handler.codec.http.HttpServerCodec
|
||||
import io.netty.handler.codec.http.HttpUtil
|
||||
import io.netty.handler.codec.http.HttpVersion
|
||||
import io.netty.handler.codec.http.LastHttpContent
|
||||
import io.netty.handler.ssl.ClientAuth
|
||||
import io.netty.handler.ssl.SslContext
|
||||
import io.netty.handler.ssl.SslContextBuilder
|
||||
import io.netty.handler.stream.ChunkedNioFile
|
||||
import io.netty.handler.stream.ChunkedNioStream
|
||||
import io.netty.handler.ssl.SslHandler
|
||||
import io.netty.handler.stream.ChunkedWriteHandler
|
||||
import io.netty.handler.timeout.IdleState
|
||||
import io.netty.handler.timeout.IdleStateEvent
|
||||
import io.netty.handler.timeout.IdleStateHandler
|
||||
import io.netty.util.AttributeKey
|
||||
import io.netty.util.concurrent.DefaultEventExecutorGroup
|
||||
import io.netty.util.concurrent.EventExecutorGroup
|
||||
import net.woggioni.gbcs.api.Cache
|
||||
import net.woggioni.gbcs.api.Configuration
|
||||
import net.woggioni.gbcs.api.Role
|
||||
import net.woggioni.gbcs.api.exception.ContentTooLargeException
|
||||
import net.woggioni.gbcs.auth.AbstractNettyHttpAuthenticator
|
||||
import net.woggioni.gbcs.auth.Authorizer
|
||||
import net.woggioni.gbcs.auth.ClientCertificateValidator
|
||||
import net.woggioni.gbcs.auth.RoleAuthorizer
|
||||
import net.woggioni.gbcs.base.GBCS.toUrl
|
||||
import net.woggioni.gbcs.base.PasswordSecurity.decodePasswordHash
|
||||
import net.woggioni.gbcs.base.PasswordSecurity.hashPassword
|
||||
import net.woggioni.gbcs.base.Xml
|
||||
import net.woggioni.gbcs.base.contextLogger
|
||||
import net.woggioni.gbcs.base.info
|
||||
import net.woggioni.gbcs.configuration.Parser
|
||||
import net.woggioni.gbcs.configuration.Serializer
|
||||
import net.woggioni.rbcs.api.Configuration
|
||||
import net.woggioni.rbcs.api.exception.ConfigurationException
|
||||
import net.woggioni.rbcs.common.RBCS.toUrl
|
||||
import net.woggioni.rbcs.common.PasswordSecurity.decodePasswordHash
|
||||
import net.woggioni.rbcs.common.PasswordSecurity.hashPassword
|
||||
import net.woggioni.rbcs.common.Xml
|
||||
import net.woggioni.rbcs.common.contextLogger
|
||||
import net.woggioni.rbcs.common.debug
|
||||
import net.woggioni.rbcs.common.info
|
||||
import net.woggioni.rbcs.server.auth.AbstractNettyHttpAuthenticator
|
||||
import net.woggioni.rbcs.server.auth.Authorizer
|
||||
import net.woggioni.rbcs.server.auth.ClientCertificateValidator
|
||||
import net.woggioni.rbcs.server.auth.RoleAuthorizer
|
||||
import net.woggioni.rbcs.server.configuration.Parser
|
||||
import net.woggioni.rbcs.server.configuration.Serializer
|
||||
import net.woggioni.rbcs.server.exception.ExceptionHandler
|
||||
import net.woggioni.rbcs.server.handler.ServerHandler
|
||||
import net.woggioni.rbcs.server.throttling.ThrottlingHandler
|
||||
import net.woggioni.jwo.JWO
|
||||
import net.woggioni.jwo.Tuple2
|
||||
import java.io.OutputStream
|
||||
import java.net.InetSocketAddress
|
||||
import java.nio.channels.FileChannel
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
import java.security.KeyStore
|
||||
@@ -69,14 +59,34 @@ import java.security.PrivateKey
|
||||
import java.security.cert.X509Certificate
|
||||
import java.util.Arrays
|
||||
import java.util.Base64
|
||||
import java.util.concurrent.TimeUnit
|
||||
import java.util.regex.Matcher
|
||||
import java.util.regex.Pattern
|
||||
import javax.naming.ldap.LdapName
|
||||
import javax.net.ssl.SSLEngine
|
||||
import javax.net.ssl.SSLPeerUnverifiedException
|
||||
|
||||
class RemoteBuildCacheServer(private val cfg: Configuration) {
|
||||
private val log = contextLogger()
|
||||
|
||||
class GradleBuildCacheServer(private val cfg: Configuration) {
|
||||
companion object {
|
||||
|
||||
val userAttribute: AttributeKey<Configuration.User> = AttributeKey.valueOf("user")
|
||||
val groupAttribute: AttributeKey<Set<Configuration.Group>> = AttributeKey.valueOf("group")
|
||||
|
||||
val DEFAULT_CONFIGURATION_URL by lazy { "classpath:net/woggioni/rbcs/server/rbcs-default.xml".toUrl() }
|
||||
private const val SSL_HANDLER_NAME = "sslHandler"
|
||||
|
||||
fun loadConfiguration(configurationFile: Path): Configuration {
|
||||
val doc = Files.newInputStream(configurationFile).use {
|
||||
Xml.parseXml(configurationFile.toUri().toURL(), it)
|
||||
}
|
||||
return Parser.parse(doc)
|
||||
}
|
||||
|
||||
fun dumpConfiguration(conf: Configuration, outputStream: OutputStream) {
|
||||
Xml.write(Serializer.serialize(conf), outputStream)
|
||||
}
|
||||
}
|
||||
|
||||
private class HttpChunkContentCompressor(
|
||||
threshold: Int,
|
||||
@@ -99,61 +109,60 @@ class GradleBuildCacheServer(private val cfg: Configuration) {
|
||||
}
|
||||
}
|
||||
|
||||
@Sharable
|
||||
private class ClientCertificateAuthenticator(
|
||||
authorizer: Authorizer,
|
||||
private val sslEngine: SSLEngine,
|
||||
private val anonymousUserGroups: Set<Configuration.Group>?,
|
||||
private val userExtractor: Configuration.UserExtractor?,
|
||||
private val groupExtractor: Configuration.GroupExtractor?,
|
||||
) : AbstractNettyHttpAuthenticator(authorizer) {
|
||||
|
||||
companion object {
|
||||
private val log = contextLogger()
|
||||
}
|
||||
|
||||
override fun authenticate(ctx: ChannelHandlerContext, req: HttpRequest): Set<Role>? {
|
||||
override fun authenticate(ctx: ChannelHandlerContext, req: HttpRequest): AuthenticationResult? {
|
||||
return try {
|
||||
sslEngine.session.peerCertificates
|
||||
val sslHandler = (ctx.pipeline().get(SSL_HANDLER_NAME) as? SslHandler)
|
||||
?: throw ConfigurationException("Client certificate authentication cannot be used when TLS is disabled")
|
||||
val sslEngine = sslHandler.engine()
|
||||
sslEngine.session.peerCertificates.takeIf {
|
||||
it.isNotEmpty()
|
||||
}?.let { peerCertificates ->
|
||||
val clientCertificate = peerCertificates.first() as X509Certificate
|
||||
val user = userExtractor?.extract(clientCertificate)
|
||||
val group = groupExtractor?.extract(clientCertificate)
|
||||
val allGroups = ((user?.groups ?: emptySet()).asSequence() + sequenceOf(group).filterNotNull()).toSet()
|
||||
AuthenticationResult(user, allGroups)
|
||||
} ?: anonymousUserGroups?.let{ AuthenticationResult(null, it) }
|
||||
} catch (es: SSLPeerUnverifiedException) {
|
||||
null
|
||||
}?.takeIf {
|
||||
it.isNotEmpty()
|
||||
}?.let { peerCertificates ->
|
||||
val clientCertificate = peerCertificates.first() as X509Certificate
|
||||
val user = userExtractor?.extract(clientCertificate)
|
||||
val group = groupExtractor?.extract(clientCertificate)
|
||||
(group?.roles ?: emptySet()) + (user?.roles ?: emptySet())
|
||||
anonymousUserGroups?.let{ AuthenticationResult(null, it) }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Sharable
|
||||
private class NettyHttpBasicAuthenticator(
|
||||
private val users: Map<String, Configuration.User>, authorizer: Authorizer
|
||||
) : AbstractNettyHttpAuthenticator(authorizer) {
|
||||
private val log = contextLogger()
|
||||
|
||||
companion object {
|
||||
private val log = contextLogger()
|
||||
}
|
||||
|
||||
override fun authenticate(ctx: ChannelHandlerContext, req: HttpRequest): Set<Role>? {
|
||||
override fun authenticate(ctx: ChannelHandlerContext, req: HttpRequest): AuthenticationResult? {
|
||||
val authorizationHeader = req.headers()[HttpHeaderNames.AUTHORIZATION] ?: let {
|
||||
log.debug(ctx) {
|
||||
"Missing Authorization header"
|
||||
}
|
||||
return null
|
||||
return users[""]?.let { AuthenticationResult(it, it.groups) }
|
||||
}
|
||||
val cursor = authorizationHeader.indexOf(' ')
|
||||
if (cursor < 0) {
|
||||
log.debug(ctx) {
|
||||
"Invalid Authorization header: '$authorizationHeader'"
|
||||
}
|
||||
return null
|
||||
return users[""]?.let { AuthenticationResult(it, it.groups) }
|
||||
}
|
||||
val authenticationType = authorizationHeader.substring(0, cursor)
|
||||
if ("Basic" != authenticationType) {
|
||||
log.debug(ctx) {
|
||||
"Invalid authentication type header: '$authenticationType'"
|
||||
}
|
||||
return null
|
||||
return users[""]?.let { AuthenticationResult(it, it.groups) }
|
||||
}
|
||||
val (username, password) = Base64.getDecoder().decode(authorizationHeader.substring(cursor + 1))
|
||||
.let(::String)
|
||||
@@ -173,7 +182,9 @@ class GradleBuildCacheServer(private val cfg: Configuration) {
|
||||
val (_, salt) = decodePasswordHash(passwordAndSalt)
|
||||
hashPassword(password, Base64.getEncoder().encodeToString(salt)) == passwordAndSalt
|
||||
} ?: false
|
||||
}?.roles
|
||||
}?.let { user ->
|
||||
AuthenticationResult(user, user.groups)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -197,16 +208,15 @@ class GradleBuildCacheServer(private val cfg: Configuration) {
|
||||
.map { it as X509Certificate }
|
||||
.toArray { size -> Array<X509Certificate?>(size) { null } }
|
||||
SslContextBuilder.forServer(serverKey, *serverCert).apply {
|
||||
if (tls.isVerifyClients) {
|
||||
clientAuth(ClientAuth.OPTIONAL)
|
||||
val trustStore = tls.trustStore
|
||||
if (trustStore != null) {
|
||||
val ts = loadKeystore(trustStore.file, trustStore.password)
|
||||
trustManager(
|
||||
ClientCertificateValidator.getTrustManager(ts, trustStore.isCheckCertificateStatus)
|
||||
)
|
||||
}
|
||||
}
|
||||
val clientAuth = tls.trustStore?.let { trustStore ->
|
||||
val ts = loadKeystore(trustStore.file, trustStore.password)
|
||||
trustManager(
|
||||
ClientCertificateValidator.getTrustManager(ts, trustStore.isCheckCertificateStatus)
|
||||
)
|
||||
if(trustStore.isRequireClientCertificate) ClientAuth.REQUIRE
|
||||
else ClientAuth.OPTIONAL
|
||||
} ?: ClientAuth.NONE
|
||||
clientAuth(clientAuth)
|
||||
}.build()
|
||||
}
|
||||
}
|
||||
@@ -233,6 +243,31 @@ class GradleBuildCacheServer(private val cfg: Configuration) {
|
||||
}
|
||||
}
|
||||
|
||||
private val log = contextLogger()
|
||||
|
||||
private val serverHandler = let {
|
||||
val cacheImplementation = cfg.cache.materialize()
|
||||
val prefix = Path.of("/").resolve(Path.of(cfg.serverPath ?: "/"))
|
||||
ServerHandler(cacheImplementation, prefix)
|
||||
}
|
||||
|
||||
private val exceptionHandler = ExceptionHandler()
|
||||
private val throttlingHandler = ThrottlingHandler(cfg)
|
||||
|
||||
private val authenticator = when (val auth = cfg.authentication) {
|
||||
is Configuration.BasicAuthentication -> NettyHttpBasicAuthenticator(cfg.users, RoleAuthorizer())
|
||||
is Configuration.ClientCertificateAuthentication -> {
|
||||
ClientCertificateAuthenticator(
|
||||
RoleAuthorizer(),
|
||||
cfg.users[""]?.groups,
|
||||
userExtractor(auth),
|
||||
groupExtractor(auth)
|
||||
)
|
||||
}
|
||||
|
||||
else -> null
|
||||
}
|
||||
|
||||
private val sslContext: SslContext? = cfg.tls?.let(Companion::createSslCtx)
|
||||
|
||||
private fun userExtractor(authentication: Configuration.ClientCertificateAuthentication) =
|
||||
@@ -264,193 +299,86 @@ class GradleBuildCacheServer(private val cfg: Configuration) {
|
||||
}
|
||||
|
||||
override fun initChannel(ch: Channel) {
|
||||
val pipeline = ch.pipeline()
|
||||
val auth = cfg.authentication
|
||||
var authenticator: AbstractNettyHttpAuthenticator? = null
|
||||
if (auth is Configuration.BasicAuthentication) {
|
||||
val roleAuthorizer = RoleAuthorizer()
|
||||
authenticator = (NettyHttpBasicAuthenticator(cfg.users, roleAuthorizer))
|
||||
log.debug {
|
||||
"Created connection ${ch.id().asShortText()} with ${ch.remoteAddress()}"
|
||||
}
|
||||
if (sslContext != null) {
|
||||
val sslHandler = sslContext.newHandler(ch.alloc())
|
||||
pipeline.addLast(sslHandler)
|
||||
|
||||
if (auth is Configuration.ClientCertificateAuthentication) {
|
||||
val roleAuthorizer = RoleAuthorizer()
|
||||
authenticator = ClientCertificateAuthenticator(
|
||||
roleAuthorizer,
|
||||
sslHandler.engine(),
|
||||
userExtractor(auth),
|
||||
groupExtractor(auth)
|
||||
ch.closeFuture().addListener {
|
||||
log.debug {
|
||||
"Closed connection ${ch.id().asShortText()} with ${ch.remoteAddress()}"
|
||||
}
|
||||
}
|
||||
val pipeline = ch.pipeline()
|
||||
cfg.connection.also { conn ->
|
||||
val readTimeout = conn.readTimeout.toMillis()
|
||||
val writeTimeout = conn.writeTimeout.toMillis()
|
||||
if(readTimeout > 0 || writeTimeout > 0) {
|
||||
pipeline.addLast(
|
||||
IdleStateHandler(
|
||||
false,
|
||||
readTimeout,
|
||||
writeTimeout,
|
||||
0,
|
||||
TimeUnit.MILLISECONDS
|
||||
)
|
||||
)
|
||||
}
|
||||
val readIdleTimeout = conn.readIdleTimeout.toMillis()
|
||||
val writeIdleTimeout = conn.writeIdleTimeout.toMillis()
|
||||
val idleTimeout = conn.idleTimeout.toMillis()
|
||||
if(readIdleTimeout > 0 || writeIdleTimeout > 0 || idleTimeout > 0) {
|
||||
pipeline.addLast(
|
||||
IdleStateHandler(
|
||||
true,
|
||||
readIdleTimeout,
|
||||
writeIdleTimeout,
|
||||
idleTimeout,
|
||||
TimeUnit.MILLISECONDS
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
pipeline.addLast(object : ChannelInboundHandlerAdapter() {
|
||||
override fun userEventTriggered(ctx: ChannelHandlerContext, evt: Any) {
|
||||
if (evt is IdleStateEvent) {
|
||||
when(evt.state()) {
|
||||
IdleState.READER_IDLE -> log.debug {
|
||||
"Read timeout reached on channel ${ch.id().asShortText()}, closing the connection"
|
||||
}
|
||||
IdleState.WRITER_IDLE -> log.debug {
|
||||
"Write timeout reached on channel ${ch.id().asShortText()}, closing the connection"
|
||||
}
|
||||
IdleState.ALL_IDLE -> log.debug {
|
||||
"Idle timeout reached on channel ${ch.id().asShortText()}, closing the connection"
|
||||
}
|
||||
null -> throw IllegalStateException("This should never happen")
|
||||
}
|
||||
ctx.close()
|
||||
}
|
||||
}
|
||||
})
|
||||
sslContext?.newHandler(ch.alloc())?.also {
|
||||
pipeline.addLast(SSL_HANDLER_NAME, it)
|
||||
}
|
||||
pipeline.addLast(HttpServerCodec())
|
||||
pipeline.addLast(HttpChunkContentCompressor(1024))
|
||||
pipeline.addLast(ChunkedWriteHandler())
|
||||
pipeline.addLast(HttpObjectAggregator(Int.MAX_VALUE))
|
||||
pipeline.addLast(HttpObjectAggregator(cfg.connection.maxRequestSize))
|
||||
authenticator?.let {
|
||||
pipeline.addLast(it)
|
||||
}
|
||||
val cacheImplementation = cfg.cache.materialize()
|
||||
val prefix = Path.of("/").resolve(Path.of(cfg.serverPath ?: "/"))
|
||||
pipeline.addLast(eventExecutorGroup, ServerHandler(cacheImplementation, prefix))
|
||||
pipeline.addLast(ExceptionHandler())
|
||||
}
|
||||
}
|
||||
|
||||
private class ExceptionHandler : ChannelDuplexHandler() {
|
||||
private val log = contextLogger()
|
||||
|
||||
private val NOT_AUTHORIZED: FullHttpResponse = DefaultFullHttpResponse(
|
||||
HttpVersion.HTTP_1_1, HttpResponseStatus.FORBIDDEN, Unpooled.EMPTY_BUFFER
|
||||
).apply {
|
||||
headers()[HttpHeaderNames.CONTENT_LENGTH] = "0"
|
||||
}
|
||||
|
||||
private val TOO_BIG: FullHttpResponse = DefaultFullHttpResponse(
|
||||
HttpVersion.HTTP_1_1, HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE, Unpooled.EMPTY_BUFFER
|
||||
).apply {
|
||||
headers()[HttpHeaderNames.CONTENT_LENGTH] = "0"
|
||||
}
|
||||
|
||||
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
|
||||
when (cause) {
|
||||
is DecoderException -> {
|
||||
log.error(cause.message, cause)
|
||||
ctx.close()
|
||||
}
|
||||
|
||||
is SSLPeerUnverifiedException -> {
|
||||
ctx.writeAndFlush(NOT_AUTHORIZED.retainedDuplicate())
|
||||
.addListener(ChannelFutureListener.CLOSE_ON_FAILURE)
|
||||
}
|
||||
|
||||
is ContentTooLargeException -> {
|
||||
ctx.writeAndFlush(TOO_BIG.retainedDuplicate())
|
||||
.addListener(ChannelFutureListener.CLOSE_ON_FAILURE)
|
||||
}
|
||||
|
||||
else -> {
|
||||
log.error(cause.message, cause)
|
||||
ctx.close()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private class ServerHandler(private val cache: Cache, private val serverPrefix: Path) :
|
||||
SimpleChannelInboundHandler<FullHttpRequest>() {
|
||||
|
||||
companion object {
|
||||
private val log = contextLogger()
|
||||
}
|
||||
|
||||
override fun channelRead0(ctx: ChannelHandlerContext, msg: FullHttpRequest) {
|
||||
val keepAlive: Boolean = HttpUtil.isKeepAlive(msg)
|
||||
val method = msg.method()
|
||||
if (method === HttpMethod.GET) {
|
||||
val path = Path.of(msg.uri())
|
||||
val prefix = path.parent
|
||||
val key = path.fileName.toString()
|
||||
if (serverPrefix == prefix) {
|
||||
cache.get(key)?.let { channel ->
|
||||
log.debug(ctx) {
|
||||
"Cache hit for key '$key'"
|
||||
}
|
||||
val response = DefaultHttpResponse(msg.protocolVersion(), HttpResponseStatus.OK)
|
||||
response.headers()[HttpHeaderNames.CONTENT_TYPE] = HttpHeaderValues.APPLICATION_OCTET_STREAM
|
||||
if (!keepAlive) {
|
||||
response.headers().set(HttpHeaderNames.CONNECTION, HttpHeaderValues.CLOSE)
|
||||
response.headers().set(HttpHeaderNames.TRANSFER_ENCODING, HttpHeaderValues.IDENTITY)
|
||||
} else {
|
||||
response.headers().set(HttpHeaderNames.CONNECTION, HttpHeaderValues.KEEP_ALIVE)
|
||||
response.headers().set(HttpHeaderNames.TRANSFER_ENCODING, HttpHeaderValues.CHUNKED)
|
||||
}
|
||||
ctx.write(response)
|
||||
when (channel) {
|
||||
is FileChannel -> {
|
||||
if (keepAlive) {
|
||||
ctx.write(ChunkedNioFile(channel))
|
||||
ctx.writeAndFlush(LastHttpContent.EMPTY_LAST_CONTENT)
|
||||
} else {
|
||||
ctx.writeAndFlush(DefaultFileRegion(channel, 0, channel.size()))
|
||||
.addListener(ChannelFutureListener.CLOSE)
|
||||
}
|
||||
}
|
||||
|
||||
else -> {
|
||||
ctx.write(ChunkedNioStream(channel))
|
||||
ctx.writeAndFlush(LastHttpContent.EMPTY_LAST_CONTENT)
|
||||
}
|
||||
}
|
||||
} ?: let {
|
||||
log.debug(ctx) {
|
||||
"Cache miss for key '$key'"
|
||||
}
|
||||
val response = DefaultFullHttpResponse(msg.protocolVersion(), HttpResponseStatus.NOT_FOUND)
|
||||
response.headers()[HttpHeaderNames.CONTENT_LENGTH] = 0
|
||||
ctx.writeAndFlush(response)
|
||||
}
|
||||
} else {
|
||||
log.warn(ctx) {
|
||||
"Got request for unhandled path '${msg.uri()}'"
|
||||
}
|
||||
val response = DefaultFullHttpResponse(msg.protocolVersion(), HttpResponseStatus.BAD_REQUEST)
|
||||
response.headers()[HttpHeaderNames.CONTENT_LENGTH] = 0
|
||||
ctx.writeAndFlush(response)
|
||||
}
|
||||
} else if (method === HttpMethod.PUT) {
|
||||
val path = Path.of(msg.uri())
|
||||
val prefix = path.parent
|
||||
val key = path.fileName.toString()
|
||||
|
||||
if (serverPrefix == prefix) {
|
||||
log.debug(ctx) {
|
||||
"Added value for key '$key' to build cache"
|
||||
}
|
||||
val bodyBytes = msg.content().run {
|
||||
if (isDirect) {
|
||||
ByteArray(readableBytes()).also {
|
||||
readBytes(it)
|
||||
}
|
||||
} else {
|
||||
array()
|
||||
}
|
||||
}
|
||||
cache.put(key, bodyBytes)
|
||||
val response = DefaultFullHttpResponse(
|
||||
msg.protocolVersion(), HttpResponseStatus.CREATED,
|
||||
Unpooled.copiedBuffer(key.toByteArray())
|
||||
)
|
||||
response.headers()[HttpHeaderNames.CONTENT_LENGTH] = response.content().readableBytes()
|
||||
ctx.writeAndFlush(response)
|
||||
} else {
|
||||
log.warn(ctx) {
|
||||
"Got request for unhandled path '${msg.uri()}'"
|
||||
}
|
||||
val response = DefaultFullHttpResponse(msg.protocolVersion(), HttpResponseStatus.BAD_REQUEST)
|
||||
response.headers()[HttpHeaderNames.CONTENT_LENGTH] = "0"
|
||||
ctx.writeAndFlush(response)
|
||||
}
|
||||
} else {
|
||||
log.warn(ctx) {
|
||||
"Got request with unhandled method '${msg.method().name()}'"
|
||||
}
|
||||
val response = DefaultFullHttpResponse(msg.protocolVersion(), HttpResponseStatus.BAD_REQUEST)
|
||||
response.headers()[HttpHeaderNames.CONTENT_LENGTH] = "0"
|
||||
ctx.writeAndFlush(response)
|
||||
}
|
||||
pipeline.addLast(throttlingHandler)
|
||||
pipeline.addLast(eventExecutorGroup, serverHandler)
|
||||
pipeline.addLast(exceptionHandler)
|
||||
}
|
||||
}
|
||||
|
||||
class ServerHandle(
|
||||
httpChannelFuture: ChannelFuture,
|
||||
private val executorGroups : Iterable<EventExecutorGroup>
|
||||
private val executorGroups: Iterable<EventExecutorGroup>
|
||||
) : AutoCloseable {
|
||||
private val httpChannel: Channel = httpChannelFuture.channel()
|
||||
|
||||
private val closeFuture: ChannelFuture = httpChannel.closeFuture()
|
||||
private val log = contextLogger()
|
||||
|
||||
fun shutdown(): ChannelFuture {
|
||||
return httpChannel.close()
|
||||
@@ -465,31 +393,30 @@ class GradleBuildCacheServer(private val cfg: Configuration) {
|
||||
}
|
||||
}
|
||||
log.info {
|
||||
"GradleBuildCacheServer has been gracefully shut down"
|
||||
"RemoteBuildCacheServer has been gracefully shut down"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fun run(): ServerHandle {
|
||||
// Create the multithreaded event loops for the server
|
||||
val bossGroup = NioEventLoopGroup(0)
|
||||
val bossGroup = NioEventLoopGroup(1)
|
||||
val serverSocketChannel = NioServerSocketChannel::class.java
|
||||
val workerGroup = bossGroup
|
||||
val workerGroup = NioEventLoopGroup(0)
|
||||
val eventExecutorGroup = run {
|
||||
val threadFactory = if(cfg.isUseVirtualThread) {
|
||||
val threadFactory = if (cfg.eventExecutor.isUseVirtualThreads) {
|
||||
Thread.ofVirtual().factory()
|
||||
} else {
|
||||
null
|
||||
}
|
||||
DefaultEventExecutorGroup(Runtime.getRuntime().availableProcessors(), threadFactory)
|
||||
}
|
||||
// A helper class that simplifies server configuration
|
||||
val bootstrap = ServerBootstrap().apply {
|
||||
// Configure the server
|
||||
group(bossGroup, workerGroup)
|
||||
channel(serverSocketChannel)
|
||||
childHandler(ServerInitializer(cfg, eventExecutorGroup))
|
||||
option(ChannelOption.SO_BACKLOG, 128)
|
||||
option(ChannelOption.SO_BACKLOG, cfg.incomingConnectionsBacklogSize)
|
||||
childOption(ChannelOption.SO_KEEPALIVE, true)
|
||||
}
|
||||
|
||||
@@ -498,26 +425,8 @@ class GradleBuildCacheServer(private val cfg: Configuration) {
|
||||
val bindAddress = InetSocketAddress(cfg.host, cfg.port)
|
||||
val httpChannel = bootstrap.bind(bindAddress).sync()
|
||||
log.info {
|
||||
"GradleBuildCacheServer is listening on ${cfg.host}:${cfg.port}"
|
||||
"RemoteBuildCacheServer is listening on ${cfg.host}:${cfg.port}"
|
||||
}
|
||||
return ServerHandle(httpChannel, setOf(bossGroup, workerGroup, eventExecutorGroup))
|
||||
}
|
||||
|
||||
companion object {
|
||||
|
||||
val DEFAULT_CONFIGURATION_URL by lazy { "classpath:net/woggioni/gbcs/gbcs-default.xml".toUrl() }
|
||||
|
||||
fun loadConfiguration(configurationFile: Path): Configuration {
|
||||
val dbf = Xml.newDocumentBuilderFactory(null)
|
||||
val db = dbf.newDocumentBuilder()
|
||||
val doc = Files.newInputStream(configurationFile).use(db::parse)
|
||||
return Parser.parse(doc)
|
||||
}
|
||||
|
||||
fun dumpConfiguration(conf: Configuration, outputStream: OutputStream) {
|
||||
Xml.write(Serializer.serialize(conf), outputStream)
|
||||
}
|
||||
|
||||
private val log = contextLogger()
|
||||
}
|
||||
}
|
@@ -1,4 +1,4 @@
|
||||
package net.woggioni.gbcs.auth
|
||||
package net.woggioni.rbcs.server.auth
|
||||
|
||||
import io.netty.buffer.Unpooled
|
||||
import io.netty.channel.ChannelFutureListener
|
||||
@@ -11,32 +11,48 @@ import io.netty.handler.codec.http.HttpRequest
|
||||
import io.netty.handler.codec.http.HttpResponseStatus
|
||||
import io.netty.handler.codec.http.HttpVersion
|
||||
import io.netty.util.ReferenceCountUtil
|
||||
import net.woggioni.gbcs.api.Role
|
||||
import net.woggioni.rbcs.api.Configuration
|
||||
import net.woggioni.rbcs.api.Configuration.Group
|
||||
import net.woggioni.rbcs.api.Role
|
||||
import net.woggioni.rbcs.server.RemoteBuildCacheServer
|
||||
|
||||
|
||||
abstract class AbstractNettyHttpAuthenticator(private val authorizer : Authorizer)
|
||||
: ChannelInboundHandlerAdapter() {
|
||||
abstract class AbstractNettyHttpAuthenticator(private val authorizer: Authorizer) : ChannelInboundHandlerAdapter() {
|
||||
|
||||
companion object {
|
||||
private val AUTHENTICATION_FAILED: FullHttpResponse = DefaultFullHttpResponse(
|
||||
HttpVersion.HTTP_1_1, HttpResponseStatus.UNAUTHORIZED, Unpooled.EMPTY_BUFFER).apply {
|
||||
HttpVersion.HTTP_1_1, HttpResponseStatus.UNAUTHORIZED, Unpooled.EMPTY_BUFFER
|
||||
).apply {
|
||||
headers()[HttpHeaderNames.CONTENT_LENGTH] = "0"
|
||||
}
|
||||
|
||||
private val NOT_AUTHORIZED: FullHttpResponse = DefaultFullHttpResponse(
|
||||
HttpVersion.HTTP_1_1, HttpResponseStatus.FORBIDDEN, Unpooled.EMPTY_BUFFER).apply {
|
||||
HttpVersion.HTTP_1_1, HttpResponseStatus.FORBIDDEN, Unpooled.EMPTY_BUFFER
|
||||
).apply {
|
||||
headers()[HttpHeaderNames.CONTENT_LENGTH] = "0"
|
||||
}
|
||||
}
|
||||
|
||||
class AuthenticationResult(val user: Configuration.User?, val groups: Set<Group>)
|
||||
|
||||
abstract fun authenticate(ctx : ChannelHandlerContext, req : HttpRequest) : Set<Role>?
|
||||
abstract fun authenticate(ctx: ChannelHandlerContext, req: HttpRequest): AuthenticationResult?
|
||||
|
||||
override fun channelRead(ctx: ChannelHandlerContext, msg: Any) {
|
||||
if(msg is HttpRequest) {
|
||||
val roles = authenticate(ctx, msg) ?: return authenticationFailure(ctx, msg)
|
||||
if (msg is HttpRequest) {
|
||||
val result = authenticate(ctx, msg) ?: return authenticationFailure(ctx, msg)
|
||||
ctx.channel().attr(RemoteBuildCacheServer.userAttribute).set(result.user)
|
||||
ctx.channel().attr(RemoteBuildCacheServer.groupAttribute).set(result.groups)
|
||||
|
||||
val roles = (
|
||||
(result.user?.let { user ->
|
||||
user.groups.asSequence().flatMap { group ->
|
||||
group.roles.asSequence()
|
||||
}
|
||||
} ?: emptySequence<Role>()) +
|
||||
result.groups.asSequence().flatMap { it.roles.asSequence() }
|
||||
).toSet()
|
||||
val authorized = authorizer.authorize(roles, msg)
|
||||
if(authorized) {
|
||||
if (authorized) {
|
||||
super.channelRead(ctx, msg)
|
||||
} else {
|
||||
authorizationFailure(ctx, msg)
|
@@ -1,7 +1,7 @@
|
||||
package net.woggioni.gbcs.auth
|
||||
package net.woggioni.rbcs.server.auth
|
||||
|
||||
import io.netty.handler.codec.http.HttpRequest
|
||||
import net.woggioni.gbcs.api.Role
|
||||
import net.woggioni.rbcs.api.Role
|
||||
|
||||
fun interface Authorizer {
|
||||
fun authorize(roles : Set<Role>, request: HttpRequest) : Boolean
|
@@ -1,24 +1,27 @@
|
||||
package net.woggioni.gbcs.auth
|
||||
package net.woggioni.rbcs.server.auth
|
||||
|
||||
import io.netty.channel.ChannelHandlerContext
|
||||
import io.netty.channel.ChannelInboundHandlerAdapter
|
||||
import io.netty.handler.ssl.SslHandler
|
||||
import io.netty.handler.ssl.SslHandshakeCompletionEvent
|
||||
import java.security.KeyStore
|
||||
import java.security.cert.CertPathValidator
|
||||
import java.security.cert.CertPathValidatorException
|
||||
import java.security.cert.CertificateException
|
||||
import java.security.cert.CertificateFactory
|
||||
import java.security.cert.PKIXParameters
|
||||
import java.security.cert.PKIXRevocationChecker
|
||||
import java.security.cert.X509Certificate
|
||||
import java.util.EnumSet
|
||||
import io.netty.channel.ChannelHandlerContext
|
||||
import io.netty.channel.ChannelInboundHandlerAdapter
|
||||
import io.netty.handler.ssl.SslHandler
|
||||
import io.netty.handler.ssl.SslHandshakeCompletionEvent
|
||||
import javax.net.ssl.SSLSession
|
||||
import javax.net.ssl.TrustManagerFactory
|
||||
import javax.net.ssl.X509TrustManager
|
||||
|
||||
|
||||
class ClientCertificateValidator private constructor(
|
||||
private val sslHandler : SslHandler,
|
||||
private val x509TrustManager: X509TrustManager) : ChannelInboundHandlerAdapter() {
|
||||
private val sslHandler: SslHandler,
|
||||
private val x509TrustManager: X509TrustManager
|
||||
) : ChannelInboundHandlerAdapter() {
|
||||
override fun userEventTriggered(ctx: ChannelHandlerContext, evt: Any) {
|
||||
if (evt is SslHandshakeCompletionEvent) {
|
||||
if (evt.isSuccess) {
|
||||
@@ -34,13 +37,14 @@ class ClientCertificateValidator private constructor(
|
||||
}
|
||||
|
||||
companion object {
|
||||
fun getTrustManager(trustStore : KeyStore?, certificateRevocationEnabled : Boolean) : X509TrustManager {
|
||||
return if(trustStore != null) {
|
||||
fun getTrustManager(trustStore: KeyStore?, certificateRevocationEnabled: Boolean): X509TrustManager {
|
||||
return if (trustStore != null) {
|
||||
val certificateFactory = CertificateFactory.getInstance("X.509")
|
||||
val validator = CertPathValidator.getInstance("PKIX").apply {
|
||||
val rc = revocationChecker as PKIXRevocationChecker
|
||||
rc.options = EnumSet.of(
|
||||
PKIXRevocationChecker.Option.NO_FALLBACK)
|
||||
PKIXRevocationChecker.Option.NO_FALLBACK
|
||||
)
|
||||
}
|
||||
val params = PKIXParameters(trustStore).apply {
|
||||
isRevocationEnabled = certificateRevocationEnabled
|
||||
@@ -48,7 +52,11 @@ class ClientCertificateValidator private constructor(
|
||||
object : X509TrustManager {
|
||||
override fun checkClientTrusted(chain: Array<out X509Certificate>, authType: String) {
|
||||
val clientCertificateChain = certificateFactory.generateCertPath(chain.toList())
|
||||
validator.validate(clientCertificateChain, params)
|
||||
try {
|
||||
validator.validate(clientCertificateChain, params)
|
||||
} catch (ex: CertPathValidatorException) {
|
||||
throw CertificateException(ex)
|
||||
}
|
||||
}
|
||||
|
||||
override fun checkServerTrusted(chain: Array<out X509Certificate>, authType: String) {
|
||||
@@ -56,7 +64,7 @@ class ClientCertificateValidator private constructor(
|
||||
}
|
||||
|
||||
private val acceptedIssuers = trustStore.aliases().asSequence()
|
||||
.filter (trustStore::isCertificateEntry)
|
||||
.filter(trustStore::isCertificateEntry)
|
||||
.map(trustStore::getCertificate)
|
||||
.map { it as X509Certificate }
|
||||
.toList()
|
||||
@@ -66,11 +74,16 @@ class ClientCertificateValidator private constructor(
|
||||
}
|
||||
} else {
|
||||
val trustManagerFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm())
|
||||
trustManagerFactory.trustManagers.asSequence().filter { it is X509TrustManager }.single() as X509TrustManager
|
||||
trustManagerFactory.trustManagers.asSequence().filter { it is X509TrustManager }
|
||||
.single() as X509TrustManager
|
||||
}
|
||||
}
|
||||
|
||||
fun of(sslHandler : SslHandler, trustStore : KeyStore?, certificateRevocationEnabled : Boolean) : ClientCertificateValidator {
|
||||
fun of(
|
||||
sslHandler: SslHandler,
|
||||
trustStore: KeyStore?,
|
||||
certificateRevocationEnabled: Boolean
|
||||
): ClientCertificateValidator {
|
||||
return ClientCertificateValidator(sslHandler, getTrustManager(trustStore, certificateRevocationEnabled))
|
||||
}
|
||||
}
|
@@ -1,14 +1,14 @@
|
||||
package net.woggioni.gbcs.auth
|
||||
package net.woggioni.rbcs.server.auth
|
||||
|
||||
import io.netty.handler.codec.http.HttpMethod
|
||||
import io.netty.handler.codec.http.HttpRequest
|
||||
import net.woggioni.gbcs.api.Role
|
||||
import net.woggioni.rbcs.api.Role
|
||||
|
||||
class RoleAuthorizer : Authorizer {
|
||||
|
||||
companion object {
|
||||
private val METHOD_MAP = mapOf(
|
||||
Role.Reader to setOf(HttpMethod.GET, HttpMethod.HEAD),
|
||||
Role.Reader to setOf(HttpMethod.GET, HttpMethod.HEAD, HttpMethod.TRACE),
|
||||
Role.Writer to setOf(HttpMethod.PUT, HttpMethod.POST)
|
||||
)
|
||||
}
|
130
rbcs-server/src/main/kotlin/net/woggioni/rbcs/server/cache/FileSystemCache.kt
vendored
Normal file
130
rbcs-server/src/main/kotlin/net/woggioni/rbcs/server/cache/FileSystemCache.kt
vendored
Normal file
@@ -0,0 +1,130 @@
|
||||
package net.woggioni.rbcs.server.cache
|
||||
|
||||
import io.netty.buffer.ByteBuf
|
||||
import net.woggioni.rbcs.api.Cache
|
||||
import net.woggioni.rbcs.common.ByteBufInputStream
|
||||
import net.woggioni.rbcs.common.RBCS.digestString
|
||||
import net.woggioni.rbcs.common.contextLogger
|
||||
import net.woggioni.jwo.JWO
|
||||
import net.woggioni.jwo.LockFile
|
||||
import java.nio.channels.Channels
|
||||
import java.nio.channels.FileChannel
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
import java.nio.file.StandardCopyOption
|
||||
import java.nio.file.StandardOpenOption
|
||||
import java.nio.file.attribute.BasicFileAttributes
|
||||
import java.security.MessageDigest
|
||||
import java.time.Duration
|
||||
import java.time.Instant
|
||||
import java.util.concurrent.CompletableFuture
|
||||
import java.util.concurrent.atomic.AtomicReference
|
||||
import java.util.zip.Deflater
|
||||
import java.util.zip.DeflaterOutputStream
|
||||
import java.util.zip.Inflater
|
||||
import java.util.zip.InflaterInputStream
|
||||
|
||||
class FileSystemCache(
|
||||
val root: Path,
|
||||
val maxAge: Duration,
|
||||
val digestAlgorithm: String?,
|
||||
val compressionEnabled: Boolean,
|
||||
val compressionLevel: Int
|
||||
) : Cache {
|
||||
|
||||
private companion object {
|
||||
@JvmStatic
|
||||
private val log = contextLogger()
|
||||
}
|
||||
|
||||
init {
|
||||
Files.createDirectories(root)
|
||||
}
|
||||
|
||||
private var nextGc = AtomicReference(Instant.now().plus(maxAge))
|
||||
|
||||
override fun get(key: String) = (digestAlgorithm
|
||||
?.let(MessageDigest::getInstance)
|
||||
?.let { md ->
|
||||
digestString(key.toByteArray(), md)
|
||||
} ?: key).let { digest ->
|
||||
root.resolve(digest).takeIf(Files::exists)
|
||||
?.let { file ->
|
||||
file.takeIf(Files::exists)?.let { file ->
|
||||
if (compressionEnabled) {
|
||||
val inflater = Inflater()
|
||||
Channels.newChannel(
|
||||
InflaterInputStream(
|
||||
Channels.newInputStream(
|
||||
FileChannel.open(
|
||||
file,
|
||||
StandardOpenOption.READ
|
||||
)
|
||||
), inflater
|
||||
)
|
||||
)
|
||||
} else {
|
||||
FileChannel.open(file, StandardOpenOption.READ)
|
||||
}
|
||||
}
|
||||
}.also {
|
||||
gc()
|
||||
}.let {
|
||||
CompletableFuture.completedFuture(it)
|
||||
}
|
||||
}
|
||||
|
||||
override fun put(key: String, content: ByteBuf): CompletableFuture<Void> {
|
||||
(digestAlgorithm
|
||||
?.let(MessageDigest::getInstance)
|
||||
?.let { md ->
|
||||
digestString(key.toByteArray(), md)
|
||||
} ?: key).let { digest ->
|
||||
val file = root.resolve(digest)
|
||||
val tmpFile = Files.createTempFile(root, null, ".tmp")
|
||||
try {
|
||||
Files.newOutputStream(tmpFile).let {
|
||||
if (compressionEnabled) {
|
||||
val deflater = Deflater(compressionLevel)
|
||||
DeflaterOutputStream(it, deflater)
|
||||
} else {
|
||||
it
|
||||
}
|
||||
}.use {
|
||||
JWO.copy(ByteBufInputStream(content), it)
|
||||
}
|
||||
Files.move(tmpFile, file, StandardCopyOption.ATOMIC_MOVE)
|
||||
} catch (t: Throwable) {
|
||||
Files.delete(tmpFile)
|
||||
throw t
|
||||
}
|
||||
}.also {
|
||||
gc()
|
||||
}
|
||||
return CompletableFuture.completedFuture(null)
|
||||
}
|
||||
|
||||
private fun gc() {
|
||||
val now = Instant.now()
|
||||
val oldValue = nextGc.getAndSet(now.plus(maxAge))
|
||||
if (oldValue < now) {
|
||||
actualGc(now)
|
||||
}
|
||||
}
|
||||
|
||||
@Synchronized
|
||||
private fun actualGc(now: Instant) {
|
||||
Files.list(root).filter {
|
||||
val creationTimeStamp = Files.readAttributes(it, BasicFileAttributes::class.java)
|
||||
.creationTime()
|
||||
.toInstant()
|
||||
now > creationTimeStamp.plus(maxAge)
|
||||
}.forEach { file ->
|
||||
LockFile.acquire(file, false).use {
|
||||
Files.delete(file)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun close() {}
|
||||
}
|
@@ -1,7 +1,7 @@
|
||||
package net.woggioni.gbcs.cache
|
||||
package net.woggioni.rbcs.server.cache
|
||||
|
||||
import net.woggioni.gbcs.api.Configuration
|
||||
import net.woggioni.gbcs.base.GBCS
|
||||
import net.woggioni.rbcs.api.Configuration
|
||||
import net.woggioni.rbcs.common.RBCS
|
||||
import net.woggioni.jwo.Application
|
||||
import java.nio.file.Path
|
||||
import java.time.Duration
|
||||
@@ -14,14 +14,14 @@ data class FileSystemCacheConfiguration(
|
||||
val compressionLevel: Int,
|
||||
) : Configuration.Cache {
|
||||
override fun materialize() = FileSystemCache(
|
||||
root ?: Application.builder("gbcs").build().computeCacheDirectory(),
|
||||
root ?: Application.builder("rbcs").build().computeCacheDirectory(),
|
||||
maxAge,
|
||||
digestAlgorithm,
|
||||
compressionEnabled,
|
||||
compressionLevel
|
||||
)
|
||||
|
||||
override fun getNamespaceURI() = GBCS.GBCS_NAMESPACE_URI
|
||||
override fun getNamespaceURI() = RBCS.RBCS_NAMESPACE_URI
|
||||
|
||||
override fun getTypeName() = "fileSystemCacheType"
|
||||
}
|
@@ -1,8 +1,9 @@
|
||||
package net.woggioni.gbcs.cache
|
||||
package net.woggioni.rbcs.server.cache
|
||||
|
||||
import net.woggioni.gbcs.api.CacheProvider
|
||||
import net.woggioni.gbcs.base.GBCS
|
||||
import net.woggioni.gbcs.base.Xml
|
||||
import net.woggioni.rbcs.api.CacheProvider
|
||||
import net.woggioni.rbcs.common.RBCS
|
||||
import net.woggioni.rbcs.common.Xml
|
||||
import net.woggioni.rbcs.common.Xml.Companion.renderAttribute
|
||||
import org.w3c.dom.Document
|
||||
import org.w3c.dom.Element
|
||||
import java.nio.file.Path
|
||||
@@ -11,29 +12,25 @@ import java.util.zip.Deflater
|
||||
|
||||
class FileSystemCacheProvider : CacheProvider<FileSystemCacheConfiguration> {
|
||||
|
||||
override fun getXmlSchemaLocation() = "classpath:net/woggioni/gbcs/schema/gbcs.xsd"
|
||||
override fun getXmlSchemaLocation() = "classpath:net/woggioni/rbcs/server/schema/rbcs.xsd"
|
||||
|
||||
override fun getXmlType() = "fileSystemCacheType"
|
||||
|
||||
override fun getXmlNamespace() = "urn:net.woggioni.gbcs"
|
||||
override fun getXmlNamespace() = "urn:net.woggioni.rbcs.server"
|
||||
|
||||
override fun deserialize(el: Element): FileSystemCacheConfiguration {
|
||||
val path = el.getAttribute("path")
|
||||
.takeIf(String::isNotEmpty)
|
||||
val path = el.renderAttribute("path")
|
||||
?.let(Path::of)
|
||||
val maxAge = el.getAttribute("max-age")
|
||||
.takeIf(String::isNotEmpty)
|
||||
val maxAge = el.renderAttribute("max-age")
|
||||
?.let(Duration::parse)
|
||||
?: Duration.ofDays(1)
|
||||
val enableCompression = el.getAttribute("enable-compression")
|
||||
.takeIf(String::isNotEmpty)
|
||||
val enableCompression = el.renderAttribute("enable-compression")
|
||||
?.let(String::toBoolean)
|
||||
?: true
|
||||
val compressionLevel = el.getAttribute("compression-level")
|
||||
.takeIf(String::isNotEmpty)
|
||||
val compressionLevel = el.renderAttribute("compression-level")
|
||||
?.let(String::toInt)
|
||||
?: Deflater.DEFAULT_COMPRESSION
|
||||
val digestAlgorithm = el.getAttribute("digest").takeIf(String::isNotEmpty) ?: "MD5"
|
||||
val digestAlgorithm = el.renderAttribute("digest") ?: "MD5"
|
||||
|
||||
return FileSystemCacheConfiguration(
|
||||
path,
|
||||
@@ -47,8 +44,8 @@ class FileSystemCacheProvider : CacheProvider<FileSystemCacheConfiguration> {
|
||||
override fun serialize(doc: Document, cache : FileSystemCacheConfiguration) = cache.run {
|
||||
val result = doc.createElement("cache")
|
||||
Xml.of(doc, result) {
|
||||
val prefix = doc.lookupPrefix(GBCS.GBCS_NAMESPACE_URI)
|
||||
attr("xs:type", "${prefix}:fileSystemCacheType", GBCS.XML_SCHEMA_NAMESPACE_URI)
|
||||
val prefix = doc.lookupPrefix(RBCS.RBCS_NAMESPACE_URI)
|
||||
attr("xs:type", "${prefix}:fileSystemCacheType", RBCS.XML_SCHEMA_NAMESPACE_URI)
|
||||
attr("path", root.toString())
|
||||
attr("max-age", maxAge.toString())
|
||||
digestAlgorithm?.let { digestAlgorithm ->
|
150
rbcs-server/src/main/kotlin/net/woggioni/rbcs/server/cache/InMemoryCache.kt
vendored
Normal file
150
rbcs-server/src/main/kotlin/net/woggioni/rbcs/server/cache/InMemoryCache.kt
vendored
Normal file
@@ -0,0 +1,150 @@
|
||||
package net.woggioni.rbcs.server.cache
|
||||
|
||||
import io.netty.buffer.ByteBuf
|
||||
import net.woggioni.rbcs.api.Cache
|
||||
import net.woggioni.rbcs.common.ByteBufInputStream
|
||||
import net.woggioni.rbcs.common.ByteBufOutputStream
|
||||
import net.woggioni.rbcs.common.RBCS.digestString
|
||||
import net.woggioni.rbcs.common.contextLogger
|
||||
import net.woggioni.jwo.JWO
|
||||
import java.nio.channels.Channels
|
||||
import java.security.MessageDigest
|
||||
import java.time.Duration
|
||||
import java.time.Instant
|
||||
import java.util.concurrent.CompletableFuture
|
||||
import java.util.concurrent.ConcurrentHashMap
|
||||
import java.util.concurrent.PriorityBlockingQueue
|
||||
import java.util.concurrent.atomic.AtomicLong
|
||||
import java.util.zip.Deflater
|
||||
import java.util.zip.DeflaterOutputStream
|
||||
import java.util.zip.Inflater
|
||||
import java.util.zip.InflaterInputStream
|
||||
|
||||
class InMemoryCache(
|
||||
val maxAge: Duration,
|
||||
val maxSize: Long,
|
||||
val digestAlgorithm: String?,
|
||||
val compressionEnabled: Boolean,
|
||||
val compressionLevel: Int
|
||||
) : Cache {
|
||||
|
||||
companion object {
|
||||
@JvmStatic
|
||||
private val log = contextLogger()
|
||||
}
|
||||
|
||||
private val size = AtomicLong()
|
||||
private val map = ConcurrentHashMap<String, ByteBuf>()
|
||||
|
||||
private class RemovalQueueElement(val key: String, val value : ByteBuf, val expiry : Instant) : Comparable<RemovalQueueElement> {
|
||||
override fun compareTo(other: RemovalQueueElement) = expiry.compareTo(other.expiry)
|
||||
}
|
||||
|
||||
private val removalQueue = PriorityBlockingQueue<RemovalQueueElement>()
|
||||
|
||||
private var running = true
|
||||
private val garbageCollector = Thread {
|
||||
while(true) {
|
||||
val el = removalQueue.take()
|
||||
val buf = el.value
|
||||
val now = Instant.now()
|
||||
if(now > el.expiry) {
|
||||
val removed = map.remove(el.key, buf)
|
||||
if(removed) {
|
||||
updateSizeAfterRemoval(buf)
|
||||
//Decrease the reference count for map
|
||||
buf.release()
|
||||
}
|
||||
//Decrease the reference count for removalQueue
|
||||
buf.release()
|
||||
} else {
|
||||
removalQueue.put(el)
|
||||
Thread.sleep(minOf(Duration.between(now, el.expiry), Duration.ofSeconds(1)))
|
||||
}
|
||||
}
|
||||
}.apply {
|
||||
start()
|
||||
}
|
||||
|
||||
private fun removeEldest() : Long {
|
||||
while(true) {
|
||||
val el = removalQueue.take()
|
||||
val buf = el.value
|
||||
val removed = map.remove(el.key, buf)
|
||||
//Decrease the reference count for removalQueue
|
||||
buf.release()
|
||||
if(removed) {
|
||||
val newSize = updateSizeAfterRemoval(buf)
|
||||
//Decrease the reference count for map
|
||||
buf.release()
|
||||
return newSize
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private fun updateSizeAfterRemoval(removed: ByteBuf) : Long {
|
||||
return size.updateAndGet { currentSize : Long ->
|
||||
currentSize - removed.readableBytes()
|
||||
}
|
||||
}
|
||||
|
||||
override fun close() {
|
||||
running = false
|
||||
garbageCollector.join()
|
||||
}
|
||||
|
||||
override fun get(key: String) =
|
||||
(digestAlgorithm
|
||||
?.let(MessageDigest::getInstance)
|
||||
?.let { md ->
|
||||
digestString(key.toByteArray(), md)
|
||||
} ?: key
|
||||
).let { digest ->
|
||||
map[digest]
|
||||
?.let { value ->
|
||||
val copy = value.retainedDuplicate()
|
||||
copy.touch("This has to be released by the caller of the cache")
|
||||
if (compressionEnabled) {
|
||||
val inflater = Inflater()
|
||||
Channels.newChannel(InflaterInputStream(ByteBufInputStream(copy), inflater))
|
||||
} else {
|
||||
Channels.newChannel(ByteBufInputStream(copy))
|
||||
}
|
||||
}
|
||||
}.let {
|
||||
CompletableFuture.completedFuture(it)
|
||||
}
|
||||
|
||||
override fun put(key: String, content: ByteBuf) =
|
||||
(digestAlgorithm
|
||||
?.let(MessageDigest::getInstance)
|
||||
?.let { md ->
|
||||
digestString(key.toByteArray(), md)
|
||||
} ?: key).let { digest ->
|
||||
content.retain()
|
||||
val value = if (compressionEnabled) {
|
||||
val deflater = Deflater(compressionLevel)
|
||||
val buf = content.alloc().buffer()
|
||||
buf.retain()
|
||||
DeflaterOutputStream(ByteBufOutputStream(buf), deflater).use { outputStream ->
|
||||
ByteBufInputStream(content).use { inputStream ->
|
||||
JWO.copy(inputStream, outputStream)
|
||||
}
|
||||
}
|
||||
buf
|
||||
} else {
|
||||
content
|
||||
}
|
||||
val old = map.put(digest, value)
|
||||
val delta = value.readableBytes() - (old?.readableBytes() ?: 0)
|
||||
var newSize = size.updateAndGet { currentSize : Long ->
|
||||
currentSize + delta
|
||||
}
|
||||
removalQueue.put(RemovalQueueElement(digest, value.retain(), Instant.now().plus(maxAge)))
|
||||
while(newSize > maxSize) {
|
||||
newSize = removeEldest()
|
||||
}
|
||||
}.let {
|
||||
CompletableFuture.completedFuture<Void>(null)
|
||||
}
|
||||
}
|
25
rbcs-server/src/main/kotlin/net/woggioni/rbcs/server/cache/InMemoryCacheConfiguration.kt
vendored
Normal file
25
rbcs-server/src/main/kotlin/net/woggioni/rbcs/server/cache/InMemoryCacheConfiguration.kt
vendored
Normal file
@@ -0,0 +1,25 @@
|
||||
package net.woggioni.rbcs.server.cache
|
||||
|
||||
import net.woggioni.rbcs.api.Configuration
|
||||
import net.woggioni.rbcs.common.RBCS
|
||||
import java.time.Duration
|
||||
|
||||
data class InMemoryCacheConfiguration(
|
||||
val maxAge: Duration,
|
||||
val maxSize: Long,
|
||||
val digestAlgorithm : String?,
|
||||
val compressionEnabled: Boolean,
|
||||
val compressionLevel: Int,
|
||||
) : Configuration.Cache {
|
||||
override fun materialize() = InMemoryCache(
|
||||
maxAge,
|
||||
maxSize,
|
||||
digestAlgorithm,
|
||||
compressionEnabled,
|
||||
compressionLevel
|
||||
)
|
||||
|
||||
override fun getNamespaceURI() = RBCS.RBCS_NAMESPACE_URI
|
||||
|
||||
override fun getTypeName() = "inMemoryCacheType"
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user