Compare commits
58 Commits
47ec94caf2
...
0.2.0-alph
Author | SHA1 | Date | |
---|---|---|---|
ad00ebee9b
|
|||
adf8a0cf24
|
|||
42eb26a948
|
|||
f048a60540
|
|||
0463038aaa
|
|||
7eca8a270d
|
|||
84d7c977f9
|
|||
317eadce07
|
|||
af79e74b95
|
|||
78ae21caa4
|
|||
6c0eadb9fb
|
|||
5fef1b932e
|
|||
5e173dbf62
|
|||
53b24e3d54
|
|||
7d0f24fa58
|
|||
1b6cf1bd96
|
|||
4180df2352
|
|||
c2e388b931
|
|||
6c62ac85c0
|
|||
89153b60f8
|
|||
a2a40ab60f
|
|||
45458761f3
|
|||
90a5834f5f
|
|||
1823d0b9ca
|
|||
649cbba954
|
|||
eb9ccce3be
|
|||
316f64cf9d
|
|||
24a49779f9
|
|||
423b749db9
|
|||
9ce3e7fa0a
|
|||
1e6ece37a5
|
|||
fc9900d821
|
|||
1a78c8092b
|
|||
3d1847c408
|
|||
702556bfbb
|
|||
06e9e7ca09
|
|||
fa5bb55baa
|
|||
007d0fffd6
|
|||
75ebf2248f
|
|||
241d95fe1c
|
|||
3b7030c302
|
|||
a8670277e7
|
|||
03ee75266d
|
|||
05a265e4b4
|
|||
5af99330f8
|
|||
747168cda3
|
|||
225f156864
|
|||
696cb74740
|
|||
59f267426c
|
|||
608a9d18de
|
|||
d2c00402df
|
|||
d701157b06
|
|||
01d5b1462c
|
|||
d5a2c4a591
|
|||
0fdb37fb54
|
|||
688a196a52
|
|||
13f7ecc88a
|
|||
f28ecca45e
|
64
.gitea/workflows/build.yaml
Normal file
64
.gitea/workflows/build.yaml
Normal file
@@ -0,0 +1,64 @@
|
||||
name: CI
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
jobs:
|
||||
build:
|
||||
runs-on: hostinger
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup Gradle
|
||||
uses: gradle/actions/setup-gradle@v3
|
||||
- name: Execute Gradle build
|
||||
run: ./gradlew build
|
||||
- name: Prepare Docker image build
|
||||
run: ./gradlew prepareDockerBuild
|
||||
- name: Get project version
|
||||
id: retrieve-version
|
||||
run: ./gradlew -q version >> "$GITHUB_OUTPUT"
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver: docker-container
|
||||
- name: Login to Gitea container registry
|
||||
uses: docker/login-action@v3
|
||||
with:
|
||||
registry: gitea.woggioni.net
|
||||
username: woggioni
|
||||
password: ${{ secrets.PUBLISHER_TOKEN }}
|
||||
-
|
||||
name: Build rbcs Docker image
|
||||
uses: docker/build-push-action@v5.3.0
|
||||
with:
|
||||
context: "docker/build/docker"
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
pull: true
|
||||
tags: |
|
||||
gitea.woggioni.net/woggioni/rbcs:latest
|
||||
gitea.woggioni.net/woggioni/rbcs:${{ steps.retrieve-version.outputs.VERSION }}
|
||||
target: release
|
||||
cache-from: type=registry,ref=gitea.woggioni.net/woggioni/rbcs:buildx
|
||||
-
|
||||
name: Build rbcs memcache Docker image
|
||||
uses: docker/build-push-action@v5.3.0
|
||||
with:
|
||||
context: "docker/build/docker"
|
||||
platforms: linux/amd64,linux/arm64
|
||||
push: true
|
||||
pull: true
|
||||
tags: |
|
||||
gitea.woggioni.net/woggioni/rbcs:memcache
|
||||
gitea.woggioni.net/woggioni/rbcs:memcache-${{ steps.retrieve-version.outputs.VERSION }}
|
||||
target: release-memcache
|
||||
cache-from: type=registry,ref=gitea.woggioni.net/woggioni/rbcs:buildx
|
||||
cache-to: type=registry,mode=max,compression=zstd,image-manifest=true,oci-mediatypes=true,ref=gitea.woggioni.net/woggioni/rbcs:buildx
|
||||
- name: Publish artifacts
|
||||
env:
|
||||
PUBLISHER_TOKEN: ${{ secrets.PUBLISHER_TOKEN }}
|
||||
run: ./gradlew publish
|
||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@@ -3,3 +3,5 @@
|
||||
|
||||
# Ignore Gradle build output directory
|
||||
build
|
||||
|
||||
rbcs-cli/native-image/*.json
|
||||
|
116
build.gradle
116
build.gradle
@@ -1,81 +1,123 @@
|
||||
plugins {
|
||||
alias catalog.plugins.kotlin
|
||||
alias catalog.plugins.envelope
|
||||
id 'maven-publish'
|
||||
alias catalog.plugins.kotlin.jvm apply false
|
||||
alias catalog.plugins.sambal
|
||||
alias catalog.plugins.lombok apply false
|
||||
}
|
||||
|
||||
import net.woggioni.gradle.envelope.EnvelopeJarTask
|
||||
|
||||
import org.jetbrains.kotlin.gradle.dsl.JvmTarget
|
||||
import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
|
||||
|
||||
allprojects { subproject ->
|
||||
group = 'net.woggioni'
|
||||
|
||||
version = getProperty('gbcs.version')
|
||||
if(project.currentTag.isPresent()) {
|
||||
version = project.currentTag.map { it[0] }.get()
|
||||
} else {
|
||||
version = project.gitRevision.map { gitRevision ->
|
||||
"${getProperty('rbcs.version')}.${gitRevision[0..10]}"
|
||||
}.get()
|
||||
}
|
||||
|
||||
repositories {
|
||||
maven {
|
||||
url = 'https://woggioni.net/mvn'
|
||||
url = getProperty('gitea.maven.url')
|
||||
content {
|
||||
includeModule 'net.woggioni', 'jwo'
|
||||
includeModule 'net.woggioni', 'xmemcached'
|
||||
includeGroup 'com.lys'
|
||||
}
|
||||
}
|
||||
mavenCentral()
|
||||
}
|
||||
|
||||
pluginManager.withPlugin('java-library') {
|
||||
|
||||
ext {
|
||||
jpmsModuleName = subproject.group + '.' + subproject.name.replace('-', '.')
|
||||
}
|
||||
|
||||
java {
|
||||
withSourcesJar()
|
||||
modularity.inferModulePath = true
|
||||
toolchain {
|
||||
languageVersion = JavaLanguageVersion.of(21)
|
||||
vendor = JvmVendorSpec.ORACLE
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation catalog.jwo
|
||||
implementation catalog.slf4j.api
|
||||
implementation catalog.netty.codec.http
|
||||
|
||||
runtimeOnly catalog.slf4j.jdk14
|
||||
|
||||
testImplementation catalog.junit.jupiter.api
|
||||
testImplementation catalog.junit.jupiter.params
|
||||
testRuntimeOnly catalog.junit.jupiter.engine
|
||||
}
|
||||
|
||||
java {
|
||||
withSourcesJar()
|
||||
test {
|
||||
useJUnitPlatform()
|
||||
}
|
||||
|
||||
tasks.withType(JavaCompile) {
|
||||
modularity.inferModulePath = true
|
||||
options.release = 21
|
||||
}
|
||||
|
||||
tasks.named(JavaPlugin.COMPILE_JAVA_TASK_NAME, JavaCompile) {
|
||||
modularity.inferModulePath = true
|
||||
options.compilerArgs << '--patch-module' << 'net.woggioni.gbcs=' + project.sourceSets.main.output.asPath
|
||||
options.release = 17
|
||||
options.compilerArgumentProviders << new CommandLineArgumentProvider() {
|
||||
@Override
|
||||
Iterable<String> asArguments() {
|
||||
return ['--patch-module', subproject.jpmsModuleName + '=' + subproject.sourceSets.main.output.asPath]
|
||||
}
|
||||
|
||||
tasks.named("compileKotlin", KotlinCompile.class) {
|
||||
kotlinOptions {
|
||||
jvmTarget = 17
|
||||
}
|
||||
options.javaModuleVersion = version
|
||||
}
|
||||
}
|
||||
|
||||
Provider<EnvelopeJarTask> envelopeJarTaskProvider = tasks.named('envelopeJar', EnvelopeJarTask.class) {
|
||||
mainModule = 'net.woggioni.gbcs'
|
||||
mainClass = 'net.woggioni.gbcs.GradleBuildCacheServer'
|
||||
systemProperty 'java.util.logging.config.class', 'net.woggioni.gbcs.LoggingConfig'
|
||||
systemProperty 'log.config.source', 'logging.properties'
|
||||
pluginManager.withPlugin('jacoco') {
|
||||
test {
|
||||
finalizedBy jacocoTestReport
|
||||
}
|
||||
jacocoTestReport {
|
||||
dependsOn test
|
||||
}
|
||||
}
|
||||
|
||||
wrapper {
|
||||
distributionType = Wrapper.DistributionType.BIN
|
||||
gradleVersion = getProperty('gradle.version')
|
||||
pluginManager.withPlugin(catalog.plugins.kotlin.jvm.get().pluginId) {
|
||||
tasks.withType(KotlinCompile.class) {
|
||||
compilerOptions.jvmTarget = JvmTarget.JVM_21
|
||||
}
|
||||
}
|
||||
|
||||
def envelopeJarArtifact = artifacts.add('archives', envelopeJarTaskProvider.get().archiveFile.get().asFile) {
|
||||
type = 'jar'
|
||||
builtBy envelopeJarTaskProvider
|
||||
pluginManager.withPlugin(catalog.plugins.lombok.get().pluginId) {
|
||||
lombok {
|
||||
version = catalog.versions.lombok
|
||||
}
|
||||
}
|
||||
|
||||
pluginManager.withPlugin('maven-publish') {
|
||||
|
||||
publishing {
|
||||
repositories {
|
||||
maven {
|
||||
url = 'https://mvn.woggioni.net/'
|
||||
name = "Gitea"
|
||||
url = uri(getProperty('gitea.maven.url'))
|
||||
|
||||
credentials(HttpHeaderCredentials) {
|
||||
name = "Authorization"
|
||||
value = "token ${System.getenv()["PUBLISHER_TOKEN"]}"
|
||||
}
|
||||
}
|
||||
publications {
|
||||
maven(MavenPublication) {
|
||||
artifact envelopeJarArtifact
|
||||
|
||||
authentication {
|
||||
header(HttpHeaderAuthentication)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
tasks.register('version') {
|
||||
doLast {
|
||||
println("VERSION=$version")
|
||||
}
|
||||
}
|
||||
|
||||
|
18
conf/logback.xml
Normal file
18
conf/logback.xml
Normal file
@@ -0,0 +1,18 @@
|
||||
<?xml version="1.0" encoding="UTF-8" ?>
|
||||
<!DOCTYPE configuration>
|
||||
|
||||
<configuration>
|
||||
<import class="ch.qos.logback.classic.encoder.PatternLayoutEncoder"/>
|
||||
<import class="ch.qos.logback.core.ConsoleAppender"/>
|
||||
|
||||
<appender name="console" class="ConsoleAppender">
|
||||
<target>System.err</target>
|
||||
<encoder class="PatternLayoutEncoder">
|
||||
<pattern>%d [%highlight(%-5level)] \(%thread\) %logger{36} -%kvp- %msg %n</pattern>
|
||||
</encoder>
|
||||
</appender>
|
||||
|
||||
<root level="info">
|
||||
<appender-ref ref="console"/>
|
||||
</root>
|
||||
</configuration>
|
@@ -2,8 +2,8 @@
|
||||
|
||||
handlers = java.util.logging.ConsoleHandler
|
||||
|
||||
java.util.logging.ConsoleHandler.level = FINEST
|
||||
java.util.logging.ConsoleHandler.level = FINER
|
||||
java.util.logging.ConsoleHandler.filter =
|
||||
java.util.logging.ConsoleHandler.formatter = java.util.logging.SimpleFormatter
|
||||
java.util.logging.SimpleFormatter.format = %1$tF %1$tT [%4$s] %2$s %5$s %n
|
||||
java.util.logging.SimpleFormatter.format = %1$tF %1$tT [%4$s] %2$s %5$s %6$s%n
|
||||
java.util.logging.ConsoleHandler.encoding =
|
17
docker/Dockerfile
Normal file
17
docker/Dockerfile
Normal file
@@ -0,0 +1,17 @@
|
||||
FROM eclipse-temurin:21-jre-alpine AS base-release
|
||||
RUN adduser -D luser
|
||||
USER luser
|
||||
WORKDIR /home/luser
|
||||
|
||||
FROM base-release AS release
|
||||
ADD rbcs-cli-envelope-*.jar rbcs.jar
|
||||
ENTRYPOINT ["java", "-XX:+UseSerialGC", "-XX:GCTimeRatio=24", "-jar", "/home/luser/rbcs.jar", "server"]
|
||||
|
||||
FROM base-release AS release-memcache
|
||||
ADD --chown=luser:luser rbcs-cli-envelope-*.jar rbcs.jar
|
||||
RUN mkdir plugins
|
||||
WORKDIR /home/luser/plugins
|
||||
RUN --mount=type=bind,source=.,target=/build/distributions tar -xf /build/distributions/rbcs-server-memcache*.tar
|
||||
WORKDIR /home/luser
|
||||
ADD logback.xml .
|
||||
ENTRYPOINT ["java", "-Dlogback.configurationFile=logback.xml", "-XX:+UseSerialGC", "-XX:GCTimeRatio=24", "-jar", "/home/luser/rbcs.jar", "server"]
|
68
docker/build.gradle
Normal file
68
docker/build.gradle
Normal file
@@ -0,0 +1,68 @@
|
||||
plugins {
|
||||
id 'base'
|
||||
alias(catalog.plugins.gradle.docker)
|
||||
}
|
||||
|
||||
import com.bmuschko.gradle.docker.tasks.image.DockerBuildImage
|
||||
import com.bmuschko.gradle.docker.tasks.image.DockerPushImage
|
||||
import com.bmuschko.gradle.docker.tasks.image.DockerTagImage
|
||||
|
||||
|
||||
configurations {
|
||||
docker {
|
||||
canBeResolved = true
|
||||
transitive = false
|
||||
visible = false
|
||||
canBeConsumed = false
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
docker project(path: ':rbcs-cli', configuration: 'release')
|
||||
docker project(path: ':rbcs-server-memcache', configuration: 'release')
|
||||
}
|
||||
|
||||
Provider<Task> cleanTaskProvider = tasks.named(BasePlugin.CLEAN_TASK_NAME) {}
|
||||
|
||||
Provider<Copy> prepareDockerBuild = tasks.register('prepareDockerBuild', Copy) {
|
||||
dependsOn cleanTaskProvider
|
||||
group = 'docker'
|
||||
into project.layout.buildDirectory.file('docker')
|
||||
from(configurations.docker)
|
||||
from(file('Dockerfile'))
|
||||
from(rootProject.file('conf')) {
|
||||
include 'logback.xml'
|
||||
}
|
||||
}
|
||||
|
||||
Provider<DockerBuildImage> dockerBuild = tasks.register('dockerBuildImage', DockerBuildImage) {
|
||||
group = 'docker'
|
||||
dependsOn prepareDockerBuild
|
||||
images.add('gitea.woggioni.net/woggioni/rbcs:latest')
|
||||
images.add("gitea.woggioni.net/woggioni/rbcs:${version}")
|
||||
}
|
||||
|
||||
Provider<DockerTagImage> dockerTag = tasks.register('dockerTagImage', DockerTagImage) {
|
||||
group = 'docker'
|
||||
repository = 'gitea.woggioni.net/woggioni/rbcs'
|
||||
imageId = 'gitea.woggioni.net/woggioni/rbcs:latest'
|
||||
tag = version
|
||||
}
|
||||
|
||||
Provider<DockerTagImage> dockerTagMemcache = tasks.register('dockerTagMemcacheImage', DockerTagImage) {
|
||||
group = 'docker'
|
||||
repository = 'gitea.woggioni.net/woggioni/rbcs'
|
||||
imageId = 'gitea.woggioni.net/woggioni/rbcs:memcache'
|
||||
tag = "${version}-memcache"
|
||||
}
|
||||
|
||||
Provider<DockerPushImage> dockerPush = tasks.register('dockerPushImage', DockerPushImage) {
|
||||
group = 'docker'
|
||||
dependsOn dockerTag, dockerTagMemcache
|
||||
registryCredentials {
|
||||
url = getProperty('docker.registry.url')
|
||||
username = 'woggioni'
|
||||
password = System.getenv().get("PUBLISHER_TOKEN")
|
||||
}
|
||||
images = [dockerTag.flatMap{ it.tag }, dockerTagMemcache.flatMap{ it.tag }]
|
||||
}
|
@@ -1,4 +1,11 @@
|
||||
gbcs.version = 0.1-SNAPSHOT
|
||||
org.gradle.configuration-cache=false
|
||||
org.gradle.parallel=true
|
||||
org.gradle.caching=true
|
||||
|
||||
rbcs.version = 0.2.0
|
||||
|
||||
lys.version = 2025.02.08
|
||||
|
||||
gitea.maven.url = https://gitea.woggioni.net/api/packages/woggioni/maven
|
||||
docker.registry.url=gitea.woggioni.net
|
||||
|
||||
gradle.version = 7.5.1
|
||||
lys.version = 0.1-SNAPSHOT
|
||||
|
BIN
gradle/wrapper/gradle-wrapper.jar
vendored
BIN
gradle/wrapper/gradle-wrapper.jar
vendored
Binary file not shown.
4
gradle/wrapper/gradle-wrapper.properties
vendored
4
gradle/wrapper/gradle-wrapper.properties
vendored
@@ -1,5 +1,7 @@
|
||||
distributionBase=GRADLE_USER_HOME
|
||||
distributionPath=wrapper/dists
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-7.5.1-bin.zip
|
||||
distributionUrl=https\://services.gradle.org/distributions/gradle-8.12-bin.zip
|
||||
networkTimeout=10000
|
||||
validateDistributionUrl=true
|
||||
zipStoreBase=GRADLE_USER_HOME
|
||||
zipStorePath=wrapper/dists
|
||||
|
38
gradlew
vendored
38
gradlew
vendored
@@ -15,6 +15,8 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# SPDX-License-Identifier: Apache-2.0
|
||||
#
|
||||
|
||||
##############################################################################
|
||||
#
|
||||
@@ -55,7 +57,7 @@
|
||||
# Darwin, MinGW, and NonStop.
|
||||
#
|
||||
# (3) This script is generated from the Groovy template
|
||||
# https://github.com/gradle/gradle/blob/master/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
|
||||
# https://github.com/gradle/gradle/blob/HEAD/platforms/jvm/plugins-application/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
|
||||
# within the Gradle project.
|
||||
#
|
||||
# You can find Gradle at https://github.com/gradle/gradle/.
|
||||
@@ -80,13 +82,12 @@ do
|
||||
esac
|
||||
done
|
||||
|
||||
APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit
|
||||
|
||||
APP_NAME="Gradle"
|
||||
# This is normally unused
|
||||
# shellcheck disable=SC2034
|
||||
APP_BASE_NAME=${0##*/}
|
||||
|
||||
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
|
||||
# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036)
|
||||
APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s
|
||||
' "$PWD" ) || exit
|
||||
|
||||
# Use the maximum available, or set MAX_FD != -1 to use that value.
|
||||
MAX_FD=maximum
|
||||
@@ -133,22 +134,29 @@ location of your Java installation."
|
||||
fi
|
||||
else
|
||||
JAVACMD=java
|
||||
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||
if ! command -v java >/dev/null 2>&1
|
||||
then
|
||||
die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||
|
||||
Please set the JAVA_HOME variable in your environment to match the
|
||||
location of your Java installation."
|
||||
fi
|
||||
fi
|
||||
|
||||
# Increase the maximum file descriptors if we can.
|
||||
if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
|
||||
case $MAX_FD in #(
|
||||
max*)
|
||||
# In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked.
|
||||
# shellcheck disable=SC2039,SC3045
|
||||
MAX_FD=$( ulimit -H -n ) ||
|
||||
warn "Could not query maximum file descriptor limit"
|
||||
esac
|
||||
case $MAX_FD in #(
|
||||
'' | soft) :;; #(
|
||||
*)
|
||||
# In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked.
|
||||
# shellcheck disable=SC2039,SC3045
|
||||
ulimit -n "$MAX_FD" ||
|
||||
warn "Could not set maximum file descriptor limit to $MAX_FD"
|
||||
esac
|
||||
@@ -193,11 +201,15 @@ if "$cygwin" || "$msys" ; then
|
||||
done
|
||||
fi
|
||||
|
||||
# Collect all arguments for the java command;
|
||||
# * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of
|
||||
# shell script including quotes and variable substitutions, so put them in
|
||||
# double quotes to make sure that they get re-expanded; and
|
||||
# * put everything else in single quotes, so that it's not re-expanded.
|
||||
|
||||
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
|
||||
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
|
||||
|
||||
# Collect all arguments for the java command:
|
||||
# * DEFAULT_JVM_OPTS, JAVA_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments,
|
||||
# and any embedded shellness will be escaped.
|
||||
# * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be
|
||||
# treated as '${Hostname}' itself on the command line.
|
||||
|
||||
set -- \
|
||||
"-Dorg.gradle.appname=$APP_BASE_NAME" \
|
||||
|
23
gradlew.bat
vendored
23
gradlew.bat
vendored
@@ -13,6 +13,8 @@
|
||||
@rem See the License for the specific language governing permissions and
|
||||
@rem limitations under the License.
|
||||
@rem
|
||||
@rem SPDX-License-Identifier: Apache-2.0
|
||||
@rem
|
||||
|
||||
@if "%DEBUG%"=="" @echo off
|
||||
@rem ##########################################################################
|
||||
@@ -26,6 +28,7 @@ if "%OS%"=="Windows_NT" setlocal
|
||||
|
||||
set DIRNAME=%~dp0
|
||||
if "%DIRNAME%"=="" set DIRNAME=.
|
||||
@rem This is normally unused
|
||||
set APP_BASE_NAME=%~n0
|
||||
set APP_HOME=%DIRNAME%
|
||||
|
||||
@@ -42,11 +45,11 @@ set JAVA_EXE=java.exe
|
||||
%JAVA_EXE% -version >NUL 2>&1
|
||||
if %ERRORLEVEL% equ 0 goto execute
|
||||
|
||||
echo.
|
||||
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
|
||||
echo.
|
||||
echo Please set the JAVA_HOME variable in your environment to match the
|
||||
echo location of your Java installation.
|
||||
echo. 1>&2
|
||||
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 1>&2
|
||||
echo. 1>&2
|
||||
echo Please set the JAVA_HOME variable in your environment to match the 1>&2
|
||||
echo location of your Java installation. 1>&2
|
||||
|
||||
goto fail
|
||||
|
||||
@@ -56,11 +59,11 @@ set JAVA_EXE=%JAVA_HOME%/bin/java.exe
|
||||
|
||||
if exist "%JAVA_EXE%" goto execute
|
||||
|
||||
echo.
|
||||
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
|
||||
echo.
|
||||
echo Please set the JAVA_HOME variable in your environment to match the
|
||||
echo location of your Java installation.
|
||||
echo. 1>&2
|
||||
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 1>&2
|
||||
echo. 1>&2
|
||||
echo Please set the JAVA_HOME variable in your environment to match the 1>&2
|
||||
echo location of your Java installation. 1>&2
|
||||
|
||||
goto fail
|
||||
|
||||
|
18
rbcs-api/build.gradle
Normal file
18
rbcs-api/build.gradle
Normal file
@@ -0,0 +1,18 @@
|
||||
plugins {
|
||||
id 'java-library'
|
||||
id 'maven-publish'
|
||||
alias catalog.plugins.lombok
|
||||
}
|
||||
|
||||
dependencies {
|
||||
api catalog.netty.buffer
|
||||
api catalog.netty.handler
|
||||
}
|
||||
|
||||
publishing {
|
||||
publications {
|
||||
maven(MavenPublication) {
|
||||
from(components["java"])
|
||||
}
|
||||
}
|
||||
}
|
10
rbcs-api/src/main/java/module-info.java
Normal file
10
rbcs-api/src/main/java/module-info.java
Normal file
@@ -0,0 +1,10 @@
|
||||
module net.woggioni.rbcs.api {
|
||||
requires static lombok;
|
||||
requires java.xml;
|
||||
requires io.netty.buffer;
|
||||
requires io.netty.handler;
|
||||
requires io.netty.transport;
|
||||
exports net.woggioni.rbcs.api;
|
||||
exports net.woggioni.rbcs.api.exception;
|
||||
exports net.woggioni.rbcs.api.message;
|
||||
}
|
@@ -0,0 +1,7 @@
|
||||
package net.woggioni.rbcs.api;
|
||||
|
||||
import io.netty.channel.ChannelHandler;
|
||||
|
||||
public interface CacheHandlerFactory extends AutoCloseable {
|
||||
ChannelHandler newHandler();
|
||||
}
|
@@ -0,0 +1,17 @@
|
||||
package net.woggioni.rbcs.api;
|
||||
|
||||
import org.w3c.dom.Document;
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
public interface CacheProvider<T extends Configuration.Cache> {
|
||||
|
||||
String getXmlSchemaLocation();
|
||||
|
||||
String getXmlNamespace();
|
||||
|
||||
String getXmlType();
|
||||
|
||||
T deserialize(Element parent);
|
||||
|
||||
Element serialize(Document doc, T cache);
|
||||
}
|
@@ -0,0 +1,14 @@
|
||||
package net.woggioni.rbcs.api;
|
||||
|
||||
import lombok.Getter;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
@Getter
|
||||
@RequiredArgsConstructor
|
||||
public class CacheValueMetadata implements Serializable {
|
||||
private final String contentDisposition;
|
||||
private final String mimeType;
|
||||
}
|
||||
|
170
rbcs-api/src/main/java/net/woggioni/rbcs/api/Configuration.java
Normal file
170
rbcs-api/src/main/java/net/woggioni/rbcs/api/Configuration.java
Normal file
@@ -0,0 +1,170 @@
|
||||
package net.woggioni.rbcs.api;
|
||||
|
||||
|
||||
import lombok.EqualsAndHashCode;
|
||||
import lombok.NonNull;
|
||||
import lombok.Value;
|
||||
|
||||
import java.nio.file.Path;
|
||||
import java.security.cert.X509Certificate;
|
||||
import java.time.Duration;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@Value
|
||||
public class Configuration {
|
||||
String host;
|
||||
int port;
|
||||
int incomingConnectionsBacklogSize;
|
||||
String serverPath;
|
||||
@NonNull
|
||||
EventExecutor eventExecutor;
|
||||
@NonNull
|
||||
Connection connection;
|
||||
Map<String, User> users;
|
||||
Map<String, Group> groups;
|
||||
Cache cache;
|
||||
Authentication authentication;
|
||||
Tls tls;
|
||||
|
||||
@Value
|
||||
public static class EventExecutor {
|
||||
boolean useVirtualThreads;
|
||||
}
|
||||
|
||||
@Value
|
||||
public static class Connection {
|
||||
Duration readTimeout;
|
||||
Duration writeTimeout;
|
||||
Duration idleTimeout;
|
||||
Duration readIdleTimeout;
|
||||
Duration writeIdleTimeout;
|
||||
int maxRequestSize;
|
||||
}
|
||||
|
||||
@Value
|
||||
public static class Quota {
|
||||
long calls;
|
||||
Duration period;
|
||||
long initialAvailableCalls;
|
||||
long maxAvailableCalls;
|
||||
}
|
||||
|
||||
@Value
|
||||
public static class Group {
|
||||
@EqualsAndHashCode.Include
|
||||
String name;
|
||||
Set<Role> roles;
|
||||
Quota groupQuota;
|
||||
Quota userQuota;
|
||||
}
|
||||
|
||||
@Value
|
||||
public static class User {
|
||||
@EqualsAndHashCode.Include
|
||||
String name;
|
||||
String password;
|
||||
Set<Group> groups;
|
||||
Quota quota;
|
||||
|
||||
public Set<Role> getRoles() {
|
||||
return groups.stream()
|
||||
.flatMap(group -> group.getRoles().stream())
|
||||
.collect(Collectors.toSet());
|
||||
}
|
||||
}
|
||||
|
||||
@FunctionalInterface
|
||||
public interface UserExtractor {
|
||||
User extract(X509Certificate cert);
|
||||
}
|
||||
|
||||
@FunctionalInterface
|
||||
public interface GroupExtractor {
|
||||
Group extract(X509Certificate cert);
|
||||
}
|
||||
|
||||
@Value
|
||||
public static class Throttling {
|
||||
KeyStore keyStore;
|
||||
TrustStore trustStore;
|
||||
boolean verifyClients;
|
||||
}
|
||||
|
||||
public enum ClientCertificate {
|
||||
REQUIRED, OPTIONAL
|
||||
}
|
||||
|
||||
@Value
|
||||
public static class Tls {
|
||||
KeyStore keyStore;
|
||||
TrustStore trustStore;
|
||||
}
|
||||
|
||||
@Value
|
||||
public static class KeyStore {
|
||||
Path file;
|
||||
String password;
|
||||
String keyAlias;
|
||||
String keyPassword;
|
||||
}
|
||||
|
||||
@Value
|
||||
public static class TrustStore {
|
||||
Path file;
|
||||
String password;
|
||||
boolean checkCertificateStatus;
|
||||
boolean requireClientCertificate;
|
||||
}
|
||||
|
||||
@Value
|
||||
public static class TlsCertificateExtractor {
|
||||
String rdnType;
|
||||
String pattern;
|
||||
}
|
||||
|
||||
public interface Authentication {}
|
||||
|
||||
public static class BasicAuthentication implements Authentication {}
|
||||
|
||||
@Value
|
||||
public static class ClientCertificateAuthentication implements Authentication {
|
||||
TlsCertificateExtractor userExtractor;
|
||||
TlsCertificateExtractor groupExtractor;
|
||||
}
|
||||
|
||||
public interface Cache {
|
||||
CacheHandlerFactory materialize();
|
||||
String getNamespaceURI();
|
||||
String getTypeName();
|
||||
}
|
||||
|
||||
public static Configuration of(
|
||||
String host,
|
||||
int port,
|
||||
int incomingConnectionsBacklogSize,
|
||||
String serverPath,
|
||||
EventExecutor eventExecutor,
|
||||
Connection connection,
|
||||
Map<String, User> users,
|
||||
Map<String, Group> groups,
|
||||
Cache cache,
|
||||
Authentication authentication,
|
||||
Tls tls
|
||||
) {
|
||||
return new Configuration(
|
||||
host,
|
||||
port,
|
||||
incomingConnectionsBacklogSize,
|
||||
serverPath != null && !serverPath.isEmpty() && !serverPath.equals("/") ? serverPath : null,
|
||||
eventExecutor,
|
||||
connection,
|
||||
users,
|
||||
groups,
|
||||
cache,
|
||||
authentication,
|
||||
tls
|
||||
);
|
||||
}
|
||||
}
|
5
rbcs-api/src/main/java/net/woggioni/rbcs/api/Role.java
Normal file
5
rbcs-api/src/main/java/net/woggioni/rbcs/api/Role.java
Normal file
@@ -0,0 +1,5 @@
|
||||
package net.woggioni.rbcs.api;
|
||||
|
||||
public enum Role {
|
||||
Reader, Writer
|
||||
}
|
@@ -0,0 +1,11 @@
|
||||
package net.woggioni.rbcs.api.exception;
|
||||
|
||||
public class CacheException extends RbcsException {
|
||||
public CacheException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
|
||||
public CacheException(String message) {
|
||||
this(message, null);
|
||||
}
|
||||
}
|
@@ -0,0 +1,11 @@
|
||||
package net.woggioni.rbcs.api.exception;
|
||||
|
||||
public class ConfigurationException extends RbcsException {
|
||||
public ConfigurationException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
|
||||
public ConfigurationException(String message) {
|
||||
this(message, null);
|
||||
}
|
||||
}
|
@@ -0,0 +1,7 @@
|
||||
package net.woggioni.rbcs.api.exception;
|
||||
|
||||
public class ContentTooLargeException extends RbcsException {
|
||||
public ContentTooLargeException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
}
|
@@ -0,0 +1,7 @@
|
||||
package net.woggioni.rbcs.api.exception;
|
||||
|
||||
public class RbcsException extends RuntimeException {
|
||||
public RbcsException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
}
|
@@ -0,0 +1,161 @@
|
||||
package net.woggioni.rbcs.api.message;
|
||||
|
||||
import io.netty.buffer.ByteBuf;
|
||||
import io.netty.buffer.ByteBufHolder;
|
||||
import lombok.Getter;
|
||||
import lombok.RequiredArgsConstructor;
|
||||
import net.woggioni.rbcs.api.CacheValueMetadata;
|
||||
|
||||
public sealed interface CacheMessage {
|
||||
|
||||
@Getter
|
||||
@RequiredArgsConstructor
|
||||
final class CacheGetRequest implements CacheMessage {
|
||||
private final String key;
|
||||
}
|
||||
|
||||
abstract sealed class CacheGetResponse implements CacheMessage {
|
||||
}
|
||||
|
||||
@Getter
|
||||
@RequiredArgsConstructor
|
||||
final class CacheValueFoundResponse extends CacheGetResponse {
|
||||
private final String key;
|
||||
private final CacheValueMetadata metadata;
|
||||
}
|
||||
|
||||
final class CacheValueNotFoundResponse extends CacheGetResponse {
|
||||
}
|
||||
|
||||
@Getter
|
||||
@RequiredArgsConstructor
|
||||
final class CachePutRequest implements CacheMessage {
|
||||
private final String key;
|
||||
private final CacheValueMetadata metadata;
|
||||
}
|
||||
|
||||
@Getter
|
||||
@RequiredArgsConstructor
|
||||
final class CachePutResponse implements CacheMessage {
|
||||
private final String key;
|
||||
}
|
||||
|
||||
@RequiredArgsConstructor
|
||||
non-sealed class CacheContent implements CacheMessage, ByteBufHolder {
|
||||
protected final ByteBuf chunk;
|
||||
|
||||
@Override
|
||||
public ByteBuf content() {
|
||||
return chunk;
|
||||
}
|
||||
|
||||
@Override
|
||||
public CacheContent copy() {
|
||||
return replace(chunk.copy());
|
||||
}
|
||||
|
||||
@Override
|
||||
public CacheContent duplicate() {
|
||||
return new CacheContent(chunk.duplicate());
|
||||
}
|
||||
|
||||
@Override
|
||||
public CacheContent retainedDuplicate() {
|
||||
return new CacheContent(chunk.retainedDuplicate());
|
||||
}
|
||||
|
||||
@Override
|
||||
public CacheContent replace(ByteBuf content) {
|
||||
return new CacheContent(content);
|
||||
}
|
||||
|
||||
@Override
|
||||
public CacheContent retain() {
|
||||
chunk.retain();
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public CacheContent retain(int increment) {
|
||||
chunk.retain(increment);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public CacheContent touch() {
|
||||
chunk.touch();
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public CacheContent touch(Object hint) {
|
||||
chunk.touch(hint);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int refCnt() {
|
||||
return chunk.refCnt();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean release() {
|
||||
return chunk.release();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean release(int decrement) {
|
||||
return chunk.release(decrement);
|
||||
}
|
||||
}
|
||||
|
||||
final class LastCacheContent extends CacheContent {
|
||||
public LastCacheContent(ByteBuf chunk) {
|
||||
super(chunk);
|
||||
}
|
||||
|
||||
@Override
|
||||
public LastCacheContent copy() {
|
||||
return replace(chunk.copy());
|
||||
}
|
||||
|
||||
@Override
|
||||
public LastCacheContent duplicate() {
|
||||
return new LastCacheContent(chunk.duplicate());
|
||||
}
|
||||
|
||||
@Override
|
||||
public LastCacheContent retainedDuplicate() {
|
||||
return new LastCacheContent(chunk.retainedDuplicate());
|
||||
}
|
||||
|
||||
@Override
|
||||
public LastCacheContent replace(ByteBuf content) {
|
||||
return new LastCacheContent(chunk);
|
||||
}
|
||||
|
||||
@Override
|
||||
public LastCacheContent retain() {
|
||||
super.retain();
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public LastCacheContent retain(int increment) {
|
||||
super.retain(increment);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public LastCacheContent touch() {
|
||||
super.touch();
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public LastCacheContent touch(Object hint) {
|
||||
super.touch(hint);
|
||||
return this;
|
||||
}
|
||||
}
|
||||
}
|
108
rbcs-cli/build.gradle
Normal file
108
rbcs-cli/build.gradle
Normal file
@@ -0,0 +1,108 @@
|
||||
plugins {
|
||||
id 'java-library'
|
||||
alias catalog.plugins.kotlin.jvm
|
||||
alias catalog.plugins.envelope
|
||||
alias catalog.plugins.sambal
|
||||
alias catalog.plugins.graalvm.native.image
|
||||
alias catalog.plugins.graalvm.jlink
|
||||
alias catalog.plugins.jpms.check
|
||||
id 'maven-publish'
|
||||
}
|
||||
|
||||
import net.woggioni.gradle.envelope.EnvelopeJarTask
|
||||
import net.woggioni.gradle.graalvm.NativeImageConfigurationTask
|
||||
import net.woggioni.gradle.graalvm.NativeImagePlugin
|
||||
import net.woggioni.gradle.graalvm.NativeImageTask
|
||||
import net.woggioni.gradle.graalvm.JlinkPlugin
|
||||
import net.woggioni.gradle.graalvm.JlinkTask
|
||||
|
||||
Property<String> mainModuleName = objects.property(String.class)
|
||||
mainModuleName.set('net.woggioni.rbcs.cli')
|
||||
Property<String> mainClassName = objects.property(String.class)
|
||||
mainClassName.set('net.woggioni.rbcs.cli.RemoteBuildCacheServerCli')
|
||||
|
||||
tasks.named(JavaPlugin.COMPILE_JAVA_TASK_NAME, JavaCompile) {
|
||||
options.javaModuleMainClass = mainClassName
|
||||
}
|
||||
|
||||
configurations {
|
||||
release {
|
||||
transitive = false
|
||||
canBeConsumed = true
|
||||
canBeResolved = true
|
||||
visible = true
|
||||
}
|
||||
}
|
||||
|
||||
envelopeJar {
|
||||
mainModule = mainModuleName
|
||||
mainClass = mainClassName
|
||||
|
||||
extraClasspath = ["plugins"]
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation catalog.jwo
|
||||
implementation catalog.slf4j.api
|
||||
implementation catalog.picocli
|
||||
|
||||
implementation project(':rbcs-client')
|
||||
implementation project(':rbcs-server')
|
||||
|
||||
// runtimeOnly catalog.slf4j.jdk14
|
||||
runtimeOnly catalog.logback.classic
|
||||
// runtimeOnly catalog.slf4j.simple
|
||||
}
|
||||
|
||||
Provider<EnvelopeJarTask> envelopeJarTaskProvider = tasks.named('envelopeJar', EnvelopeJarTask.class) {
|
||||
// systemProperties['java.util.logging.config.class'] = 'net.woggioni.rbcs.LoggingConfig'
|
||||
// systemProperties['log.config.source'] = 'net/woggioni/rbcs/cli/logging.properties'
|
||||
// systemProperties['java.util.logging.config.file'] = 'classpath:net/woggioni/rbcs/cli/logging.properties'
|
||||
systemProperties['logback.configurationFile'] = 'classpath:net/woggioni/rbcs/cli/logback.xml'
|
||||
systemProperties['io.netty.leakDetectionLevel'] = 'DISABLED'
|
||||
|
||||
// systemProperties['org.slf4j.simpleLogger.showDateTime'] = 'true'
|
||||
// systemProperties['org.slf4j.simpleLogger.defaultLogLevel'] = 'debug'
|
||||
// systemProperties['org.slf4j.simpleLogger.log.com.google.code.yanf4j'] = 'warn'
|
||||
// systemProperties['org.slf4j.simpleLogger.log.net.rubyeye.xmemcached'] = 'warn'
|
||||
// systemProperties['org.slf4j.simpleLogger.dateTimeFormat'] = 'yyyy-MM-dd\'T\'HH:mm:ss.SSSZ'
|
||||
}
|
||||
|
||||
tasks.named(NativeImagePlugin.CONFIGURE_NATIVE_IMAGE_TASK_NAME, NativeImageConfigurationTask) {
|
||||
mainClass = mainClassName
|
||||
mainModule = mainModuleName
|
||||
}
|
||||
|
||||
tasks.named(NativeImagePlugin.NATIVE_IMAGE_TASK_NAME, NativeImageTask) {
|
||||
mainClass = mainClassName
|
||||
mainModule = mainModuleName
|
||||
useMusl = true
|
||||
buildStaticImage = true
|
||||
}
|
||||
|
||||
tasks.named(JlinkPlugin.JLINK_TASK_NAME, JlinkTask) {
|
||||
mainClass = mainClassName
|
||||
mainModule = 'net.woggioni.rbcs.cli'
|
||||
}
|
||||
|
||||
tasks.named(JavaPlugin.PROCESS_RESOURCES_TASK_NAME, ProcessResources) {
|
||||
from(rootProject.file('conf')) {
|
||||
into('net/woggioni/rbcs/cli')
|
||||
include 'logback.xml'
|
||||
include 'logging.properties'
|
||||
}
|
||||
}
|
||||
|
||||
artifacts {
|
||||
release(envelopeJarTaskProvider)
|
||||
}
|
||||
|
||||
publishing {
|
||||
publications {
|
||||
maven(MavenPublication) {
|
||||
artifact envelopeJar
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
2
rbcs-cli/native-image/native-image.properties
Normal file
2
rbcs-cli/native-image/native-image.properties
Normal file
@@ -0,0 +1,2 @@
|
||||
Args=-H:Optimize=3 --gc=serial --initialize-at-run-time=io.netty
|
||||
#-H:TraceClassInitialization=io.netty.handler.ssl.BouncyCastleAlpnSslUtils
|
17
rbcs-cli/src/main/java/module-info.java
Normal file
17
rbcs-cli/src/main/java/module-info.java
Normal file
@@ -0,0 +1,17 @@
|
||||
module net.woggioni.rbcs.cli {
|
||||
requires org.slf4j;
|
||||
requires net.woggioni.rbcs.server;
|
||||
requires info.picocli;
|
||||
requires net.woggioni.rbcs.common;
|
||||
requires net.woggioni.rbcs.client;
|
||||
requires kotlin.stdlib;
|
||||
requires net.woggioni.jwo;
|
||||
requires net.woggioni.rbcs.api;
|
||||
|
||||
exports net.woggioni.rbcs.cli.impl.converters to info.picocli;
|
||||
opens net.woggioni.rbcs.cli.impl.commands to info.picocli;
|
||||
opens net.woggioni.rbcs.cli.impl to info.picocli;
|
||||
opens net.woggioni.rbcs.cli to info.picocli, net.woggioni.rbcs.common;
|
||||
|
||||
exports net.woggioni.rbcs.cli;
|
||||
}
|
@@ -0,0 +1,69 @@
|
||||
package net.woggioni.rbcs.cli
|
||||
|
||||
import net.woggioni.jwo.Application
|
||||
import net.woggioni.rbcs.cli.impl.AbstractVersionProvider
|
||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||
import net.woggioni.rbcs.cli.impl.commands.BenchmarkCommand
|
||||
import net.woggioni.rbcs.cli.impl.commands.ClientCommand
|
||||
import net.woggioni.rbcs.cli.impl.commands.GetCommand
|
||||
import net.woggioni.rbcs.cli.impl.commands.HealthCheckCommand
|
||||
import net.woggioni.rbcs.cli.impl.commands.PasswordHashCommand
|
||||
import net.woggioni.rbcs.cli.impl.commands.PutCommand
|
||||
import net.woggioni.rbcs.cli.impl.commands.ServerCommand
|
||||
import net.woggioni.rbcs.common.RbcsUrlStreamHandlerFactory
|
||||
import net.woggioni.rbcs.common.contextLogger
|
||||
import picocli.CommandLine
|
||||
import picocli.CommandLine.Model.CommandSpec
|
||||
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "rbcs", versionProvider = RemoteBuildCacheServerCli.VersionProvider::class
|
||||
)
|
||||
class RemoteBuildCacheServerCli : RbcsCommand() {
|
||||
|
||||
class VersionProvider : AbstractVersionProvider()
|
||||
companion object {
|
||||
@JvmStatic
|
||||
fun main(vararg args: String) {
|
||||
val currentClassLoader = RemoteBuildCacheServerCli::class.java.classLoader
|
||||
Thread.currentThread().contextClassLoader = currentClassLoader
|
||||
if(currentClassLoader.javaClass.name == "net.woggioni.envelope.loader.ModuleClassLoader") {
|
||||
//We're running in an envelope jar and custom URL protocols won't work
|
||||
RbcsUrlStreamHandlerFactory.install()
|
||||
}
|
||||
val log = contextLogger()
|
||||
val app = Application.builder("rbcs")
|
||||
.configurationDirectoryEnvVar("RBCS_CONFIGURATION_DIR")
|
||||
.configurationDirectoryPropertyKey("net.woggioni.rbcs.conf.dir")
|
||||
.build()
|
||||
val rbcsCli = RemoteBuildCacheServerCli()
|
||||
val commandLine = CommandLine(rbcsCli)
|
||||
commandLine.setExecutionExceptionHandler { ex, cl, parseResult ->
|
||||
log.error(ex.message, ex)
|
||||
CommandLine.ExitCode.SOFTWARE
|
||||
}
|
||||
commandLine.addSubcommand(ServerCommand(app))
|
||||
commandLine.addSubcommand(PasswordHashCommand())
|
||||
commandLine.addSubcommand(
|
||||
CommandLine(ClientCommand(app)).apply {
|
||||
addSubcommand(BenchmarkCommand())
|
||||
addSubcommand(PutCommand())
|
||||
addSubcommand(GetCommand())
|
||||
addSubcommand(HealthCheckCommand())
|
||||
})
|
||||
System.exit(commandLine.execute(*args))
|
||||
}
|
||||
}
|
||||
|
||||
@CommandLine.Option(names = ["-V", "--version"], versionHelp = true)
|
||||
var versionHelp = false
|
||||
private set
|
||||
|
||||
@CommandLine.Spec
|
||||
private lateinit var spec: CommandSpec
|
||||
|
||||
|
||||
override fun run() {
|
||||
spec.commandLine().usage(System.out);
|
||||
}
|
||||
}
|
@@ -0,0 +1,30 @@
|
||||
package net.woggioni.rbcs.cli.impl
|
||||
|
||||
import picocli.CommandLine
|
||||
import java.util.jar.Attributes
|
||||
import java.util.jar.JarFile
|
||||
import java.util.jar.Manifest
|
||||
|
||||
|
||||
abstract class AbstractVersionProvider : CommandLine.IVersionProvider {
|
||||
private val version: String
|
||||
private val vcsHash: String
|
||||
|
||||
init {
|
||||
val mf = Manifest()
|
||||
javaClass.module.getResourceAsStream(JarFile.MANIFEST_NAME).use { `is` ->
|
||||
mf.read(`is`)
|
||||
}
|
||||
val mainAttributes = mf.mainAttributes
|
||||
version = mainAttributes.getValue(Attributes.Name.SPECIFICATION_VERSION) ?: throw RuntimeException("Version information not found in manifest")
|
||||
vcsHash = mainAttributes.getValue(Attributes.Name.IMPLEMENTATION_VERSION) ?: throw RuntimeException("Version information not found in manifest")
|
||||
}
|
||||
|
||||
override fun getVersion(): Array<String?> {
|
||||
return if (version.endsWith("-SNAPSHOT")) {
|
||||
arrayOf(version, vcsHash)
|
||||
} else {
|
||||
arrayOf(version)
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,19 @@
|
||||
package net.woggioni.rbcs.cli.impl
|
||||
|
||||
import net.woggioni.jwo.Application
|
||||
import picocli.CommandLine
|
||||
import java.nio.file.Path
|
||||
|
||||
|
||||
abstract class RbcsCommand : Runnable {
|
||||
|
||||
@CommandLine.Option(names = ["-h", "--help"], usageHelp = true)
|
||||
var usageHelp = false
|
||||
private set
|
||||
|
||||
protected fun findConfigurationFile(app: Application, fileName : String): Path {
|
||||
val confDir = app.computeConfigurationDirectory()
|
||||
val configurationFile = confDir.resolve(fileName)
|
||||
return configurationFile
|
||||
}
|
||||
}
|
@@ -0,0 +1,172 @@
|
||||
package net.woggioni.rbcs.cli.impl.commands
|
||||
|
||||
import net.woggioni.jwo.JWO
|
||||
import net.woggioni.jwo.LongMath
|
||||
import net.woggioni.rbcs.api.CacheValueMetadata
|
||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||
import net.woggioni.rbcs.cli.impl.converters.ByteSizeConverter
|
||||
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
||||
import net.woggioni.rbcs.common.createLogger
|
||||
import net.woggioni.rbcs.common.debug
|
||||
import net.woggioni.rbcs.common.error
|
||||
import net.woggioni.rbcs.common.info
|
||||
import picocli.CommandLine
|
||||
import java.security.SecureRandom
|
||||
import java.time.Duration
|
||||
import java.time.Instant
|
||||
import java.time.temporal.ChronoUnit
|
||||
import java.util.concurrent.LinkedBlockingQueue
|
||||
import java.util.concurrent.Semaphore
|
||||
import java.util.concurrent.atomic.AtomicLong
|
||||
import kotlin.random.Random
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "benchmark",
|
||||
description = ["Run a load test against the server"],
|
||||
showDefaultValues = true
|
||||
)
|
||||
class BenchmarkCommand : RbcsCommand() {
|
||||
companion object{
|
||||
private val log = createLogger<BenchmarkCommand>()
|
||||
}
|
||||
|
||||
@CommandLine.Spec
|
||||
private lateinit var spec: CommandLine.Model.CommandSpec
|
||||
|
||||
@CommandLine.Option(
|
||||
names = ["-e", "--entries"],
|
||||
description = ["Total number of elements to be added to the cache"],
|
||||
paramLabel = "NUMBER_OF_ENTRIES"
|
||||
)
|
||||
private var numberOfEntries = 1000
|
||||
|
||||
@CommandLine.Option(
|
||||
names = ["-s", "--size"],
|
||||
description = ["Size of a cache value in bytes"],
|
||||
paramLabel = "SIZE",
|
||||
converter = [ByteSizeConverter::class]
|
||||
)
|
||||
private var size = 0x1000
|
||||
|
||||
@CommandLine.Option(
|
||||
names = ["-r", "--random"],
|
||||
description = ["Insert completely random byte values"]
|
||||
)
|
||||
private var randomValues = false
|
||||
|
||||
override fun run() {
|
||||
val clientCommand = spec.parent().userObject() as ClientCommand
|
||||
val profile = clientCommand.profileName.let { profileName ->
|
||||
clientCommand.configuration.profiles[profileName]
|
||||
?: throw IllegalArgumentException("Profile $profileName does not exist in configuration")
|
||||
}
|
||||
val progressThreshold = LongMath.ceilDiv(numberOfEntries.toLong(), 20)
|
||||
RemoteBuildCacheClient(profile).use { client ->
|
||||
|
||||
val entryGenerator = sequence {
|
||||
val random = Random(SecureRandom.getInstance("NativePRNGNonBlocking").nextLong())
|
||||
while (true) {
|
||||
val key = JWO.bytesToHex(random.nextBytes(16))
|
||||
val value = if(randomValues) {
|
||||
random.nextBytes(size)
|
||||
} else {
|
||||
val byteValue = random.nextInt().toByte()
|
||||
ByteArray(size) {_ -> byteValue}
|
||||
}
|
||||
yield(key to value)
|
||||
}
|
||||
}
|
||||
|
||||
log.info {
|
||||
"Starting insertion"
|
||||
}
|
||||
val entries = let {
|
||||
val completionCounter = AtomicLong(0)
|
||||
val completionQueue = LinkedBlockingQueue<Pair<String, ByteArray>>(numberOfEntries)
|
||||
val start = Instant.now()
|
||||
val semaphore = Semaphore(profile.maxConnections * 5)
|
||||
val iterator = entryGenerator.take(numberOfEntries).iterator()
|
||||
while (completionCounter.get() < numberOfEntries) {
|
||||
if (iterator.hasNext()) {
|
||||
val entry = iterator.next()
|
||||
semaphore.acquire()
|
||||
val future = client.put(entry.first, entry.second, CacheValueMetadata(null, null)).thenApply { entry }
|
||||
future.whenComplete { result, ex ->
|
||||
if (ex != null) {
|
||||
log.error(ex.message, ex)
|
||||
} else {
|
||||
completionQueue.put(result)
|
||||
}
|
||||
semaphore.release()
|
||||
val completed = completionCounter.incrementAndGet()
|
||||
if(completed.mod(progressThreshold) == 0L) {
|
||||
log.debug {
|
||||
"Inserted $completed / $numberOfEntries"
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Thread.sleep(Duration.of(500, ChronoUnit.MILLIS))
|
||||
}
|
||||
}
|
||||
|
||||
val inserted = completionQueue.toList()
|
||||
val end = Instant.now()
|
||||
log.info {
|
||||
val elapsed = Duration.between(start, end).toMillis()
|
||||
val opsPerSecond = String.format("%.2f", numberOfEntries.toDouble() / elapsed * 1000)
|
||||
"Insertion rate: $opsPerSecond ops/s"
|
||||
}
|
||||
inserted
|
||||
}
|
||||
log.info {
|
||||
"Inserted ${entries.size} entries"
|
||||
}
|
||||
log.info {
|
||||
"Starting retrieval"
|
||||
}
|
||||
if (entries.isNotEmpty()) {
|
||||
val completionCounter = AtomicLong(0)
|
||||
val semaphore = Semaphore(profile.maxConnections * 5)
|
||||
val start = Instant.now()
|
||||
val it = entries.iterator()
|
||||
while (completionCounter.get() < entries.size) {
|
||||
if (it.hasNext()) {
|
||||
val entry = it.next()
|
||||
semaphore.acquire()
|
||||
val future = client.get(entry.first).thenApply {
|
||||
if (it == null) {
|
||||
log.error {
|
||||
"Missing entry for key '${entry.first}'"
|
||||
}
|
||||
} else if (!entry.second.contentEquals(it)) {
|
||||
log.error {
|
||||
"Retrieved a value different from what was inserted for key '${entry.first}'"
|
||||
}
|
||||
}
|
||||
}
|
||||
future.whenComplete { _, _ ->
|
||||
val completed = completionCounter.incrementAndGet()
|
||||
if(completed.mod(progressThreshold) == 0L) {
|
||||
log.debug {
|
||||
"Retrieved $completed / ${entries.size}"
|
||||
}
|
||||
}
|
||||
semaphore.release()
|
||||
}
|
||||
} else {
|
||||
Thread.sleep(Duration.of(500, ChronoUnit.MILLIS))
|
||||
}
|
||||
}
|
||||
val end = Instant.now()
|
||||
log.info {
|
||||
val elapsed = Duration.between(start, end).toMillis()
|
||||
val opsPerSecond = String.format("%.2f", entries.size.toDouble() / elapsed * 1000)
|
||||
"Retrieval rate: $opsPerSecond ops/s"
|
||||
}
|
||||
} else {
|
||||
log.error("Skipping retrieval benchmark as it was not possible to insert any entry in the cache")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,41 @@
|
||||
package net.woggioni.rbcs.cli.impl.commands
|
||||
|
||||
import net.woggioni.jwo.Application
|
||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
||||
import picocli.CommandLine
|
||||
import java.nio.file.Path
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "client",
|
||||
description = ["RBCS client"],
|
||||
showDefaultValues = true
|
||||
)
|
||||
class ClientCommand(app : Application) : RbcsCommand() {
|
||||
|
||||
@CommandLine.Option(
|
||||
names = ["-c", "--configuration"],
|
||||
description = ["Path to the client configuration file"],
|
||||
paramLabel = "CONFIGURATION_FILE"
|
||||
)
|
||||
private var configurationFile : Path = findConfigurationFile(app, "rbcs-client.xml")
|
||||
|
||||
@CommandLine.Option(
|
||||
names = ["-p", "--profile"],
|
||||
description = ["Name of the client profile to be used"],
|
||||
paramLabel = "PROFILE",
|
||||
required = true
|
||||
)
|
||||
var profileName : String? = null
|
||||
|
||||
val configuration : RemoteBuildCacheClient.Configuration by lazy {
|
||||
RemoteBuildCacheClient.Configuration.parse(configurationFile)
|
||||
}
|
||||
|
||||
override fun run() {
|
||||
println("Available profiles:")
|
||||
configuration.profiles.forEach { (profileName, _) ->
|
||||
println(profileName)
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,53 @@
|
||||
package net.woggioni.rbcs.cli.impl.commands
|
||||
|
||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
||||
import net.woggioni.rbcs.common.createLogger
|
||||
import picocli.CommandLine
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "get",
|
||||
description = ["Fetch a value from the cache with the specified key"],
|
||||
showDefaultValues = true
|
||||
)
|
||||
class GetCommand : RbcsCommand() {
|
||||
companion object{
|
||||
private val log = createLogger<GetCommand>()
|
||||
}
|
||||
|
||||
@CommandLine.Spec
|
||||
private lateinit var spec: CommandLine.Model.CommandSpec
|
||||
|
||||
@CommandLine.Option(
|
||||
names = ["-k", "--key"],
|
||||
description = ["The key for the new value"],
|
||||
paramLabel = "KEY"
|
||||
)
|
||||
private var key : String = ""
|
||||
|
||||
@CommandLine.Option(
|
||||
names = ["-v", "--value"],
|
||||
description = ["Path to a file where the retrieved value will be written (defaults to stdout)"],
|
||||
paramLabel = "VALUE_FILE",
|
||||
)
|
||||
private var output : Path? = null
|
||||
|
||||
override fun run() {
|
||||
val clientCommand = spec.parent().userObject() as ClientCommand
|
||||
val profile = clientCommand.profileName.let { profileName ->
|
||||
clientCommand.configuration.profiles[profileName]
|
||||
?: throw IllegalArgumentException("Profile $profileName does not exist in configuration")
|
||||
}
|
||||
RemoteBuildCacheClient(profile).use { client ->
|
||||
client.get(key).thenApply { value ->
|
||||
value?.let {
|
||||
(output?.let(Files::newOutputStream) ?: System.out).use {
|
||||
it.write(value)
|
||||
}
|
||||
} ?: throw NoSuchElementException("No value found for key $key")
|
||||
}.get()
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,48 @@
|
||||
package net.woggioni.rbcs.cli.impl.commands
|
||||
|
||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
||||
import net.woggioni.rbcs.common.createLogger
|
||||
import picocli.CommandLine
|
||||
import java.security.SecureRandom
|
||||
import kotlin.random.Random
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "health",
|
||||
description = ["Check server health"],
|
||||
showDefaultValues = true
|
||||
)
|
||||
class HealthCheckCommand : RbcsCommand() {
|
||||
companion object{
|
||||
private val log = createLogger<HealthCheckCommand>()
|
||||
}
|
||||
|
||||
@CommandLine.Spec
|
||||
private lateinit var spec: CommandLine.Model.CommandSpec
|
||||
|
||||
override fun run() {
|
||||
val clientCommand = spec.parent().userObject() as ClientCommand
|
||||
val profile = clientCommand.profileName.let { profileName ->
|
||||
clientCommand.configuration.profiles[profileName]
|
||||
?: throw IllegalArgumentException("Profile $profileName does not exist in configuration")
|
||||
}
|
||||
RemoteBuildCacheClient(profile).use { client ->
|
||||
val random = Random(SecureRandom.getInstance("NativePRNGNonBlocking").nextLong())
|
||||
val nonce = ByteArray(0xa0)
|
||||
random.nextBytes(nonce)
|
||||
client.healthCheck(nonce).thenApply { value ->
|
||||
if(value == null) {
|
||||
throw IllegalStateException("Empty response from server")
|
||||
}
|
||||
val offset = value.size - nonce.size
|
||||
for(i in 0 until nonce.size) {
|
||||
val a = nonce[i]
|
||||
val b = value[offset + i]
|
||||
if(a != b) {
|
||||
throw IllegalStateException("Server nonce does not match")
|
||||
}
|
||||
}
|
||||
}.get()
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,37 @@
|
||||
package net.woggioni.rbcs.cli.impl.commands
|
||||
|
||||
import net.woggioni.jwo.UncloseableOutputStream
|
||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||
import net.woggioni.rbcs.cli.impl.converters.OutputStreamConverter
|
||||
import net.woggioni.rbcs.common.PasswordSecurity.hashPassword
|
||||
import picocli.CommandLine
|
||||
import java.io.OutputStream
|
||||
import java.io.OutputStreamWriter
|
||||
import java.io.PrintWriter
|
||||
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "password",
|
||||
description = ["Generate a password hash to add to RBCS configuration file"],
|
||||
showDefaultValues = true
|
||||
)
|
||||
class PasswordHashCommand : RbcsCommand() {
|
||||
@CommandLine.Option(
|
||||
names = ["-o", "--output-file"],
|
||||
description = ["Write the output to a file instead of stdout"],
|
||||
converter = [OutputStreamConverter::class],
|
||||
showDefaultValue = CommandLine.Help.Visibility.NEVER,
|
||||
paramLabel = "OUTPUT_FILE"
|
||||
)
|
||||
private var outputStream: OutputStream = UncloseableOutputStream(System.out)
|
||||
|
||||
override fun run() {
|
||||
val password1 = String(System.console().readPassword("Type your password:"))
|
||||
val password2 = String(System.console().readPassword("Type your password again for confirmation:"))
|
||||
if(password1 != password2) throw IllegalArgumentException("Passwords do not match")
|
||||
|
||||
PrintWriter(OutputStreamWriter(outputStream, Charsets.UTF_8)).use {
|
||||
it.println(hashPassword(password1))
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,101 @@
|
||||
package net.woggioni.rbcs.cli.impl.commands
|
||||
|
||||
import net.woggioni.jwo.Hash
|
||||
import net.woggioni.jwo.JWO
|
||||
import net.woggioni.jwo.NullOutputStream
|
||||
import net.woggioni.rbcs.api.CacheValueMetadata
|
||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
||||
import net.woggioni.rbcs.common.createLogger
|
||||
import picocli.CommandLine
|
||||
import java.io.InputStream
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
import java.util.UUID
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "put",
|
||||
description = ["Add or replace a value to the cache with the specified key"],
|
||||
showDefaultValues = true
|
||||
)
|
||||
class PutCommand : RbcsCommand() {
|
||||
companion object{
|
||||
private val log = createLogger<PutCommand>()
|
||||
}
|
||||
|
||||
|
||||
@CommandLine.Spec
|
||||
private lateinit var spec: CommandLine.Model.CommandSpec
|
||||
|
||||
@CommandLine.Option(
|
||||
names = ["-k", "--key"],
|
||||
description = ["The key for the new value, randomly generated if omitted"],
|
||||
paramLabel = "KEY"
|
||||
)
|
||||
private var key : String? = null
|
||||
|
||||
@CommandLine.Option(
|
||||
names = ["-i", "--inline"],
|
||||
description = ["File is to be displayed in the browser"],
|
||||
paramLabel = "INLINE",
|
||||
)
|
||||
private var inline : Boolean = false
|
||||
|
||||
@CommandLine.Option(
|
||||
names = ["-t", "--type"],
|
||||
description = ["File mime type"],
|
||||
paramLabel = "MIME_TYPE",
|
||||
)
|
||||
private var mimeType : String? = null
|
||||
|
||||
@CommandLine.Option(
|
||||
names = ["-v", "--value"],
|
||||
description = ["Path to a file containing the value to be added (defaults to stdin)"],
|
||||
paramLabel = "VALUE_FILE",
|
||||
)
|
||||
private var value : Path? = null
|
||||
|
||||
override fun run() {
|
||||
val clientCommand = spec.parent().userObject() as ClientCommand
|
||||
val profile = clientCommand.profileName.let { profileName ->
|
||||
clientCommand.configuration.profiles[profileName]
|
||||
?: throw IllegalArgumentException("Profile $profileName does not exist in configuration")
|
||||
}
|
||||
RemoteBuildCacheClient(profile).use { client ->
|
||||
val inputStream : InputStream
|
||||
val mimeType : String?
|
||||
val contentDisposition : String?
|
||||
val valuePath = value
|
||||
val actualKey : String?
|
||||
if(valuePath != null) {
|
||||
inputStream = Files.newInputStream(valuePath)
|
||||
mimeType = this.mimeType ?: Files.probeContentType(valuePath)
|
||||
contentDisposition = if(inline) {
|
||||
"inline"
|
||||
} else {
|
||||
"attachment; filename=\"${valuePath.fileName}\""
|
||||
}
|
||||
actualKey = key ?: let {
|
||||
val md = Hash.Algorithm.SHA512.newInputStream(Files.newInputStream(valuePath)).use {
|
||||
JWO.copy(it, NullOutputStream())
|
||||
it.messageDigest
|
||||
}
|
||||
UUID.nameUUIDFromBytes(md.digest()).toString()
|
||||
}
|
||||
} else {
|
||||
inputStream = System.`in`
|
||||
mimeType = this.mimeType
|
||||
contentDisposition = if(inline) {
|
||||
"inline"
|
||||
} else {
|
||||
null
|
||||
}
|
||||
actualKey = key ?: UUID.randomUUID().toString()
|
||||
}
|
||||
inputStream.use {
|
||||
client.put(actualKey, it.readAllBytes(), CacheValueMetadata(contentDisposition, mimeType))
|
||||
}.get()
|
||||
println(profile.serverURI.resolve(actualKey))
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,87 @@
|
||||
package net.woggioni.rbcs.cli.impl.commands
|
||||
|
||||
import net.woggioni.jwo.Application
|
||||
import net.woggioni.jwo.JWO
|
||||
import net.woggioni.rbcs.cli.impl.RbcsCommand
|
||||
import net.woggioni.rbcs.cli.impl.converters.DurationConverter
|
||||
import net.woggioni.rbcs.common.createLogger
|
||||
import net.woggioni.rbcs.common.debug
|
||||
import net.woggioni.rbcs.common.info
|
||||
import net.woggioni.rbcs.server.RemoteBuildCacheServer
|
||||
import net.woggioni.rbcs.server.RemoteBuildCacheServer.Companion.DEFAULT_CONFIGURATION_URL
|
||||
import picocli.CommandLine
|
||||
import java.io.ByteArrayOutputStream
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
import java.time.Duration
|
||||
import java.util.concurrent.TimeUnit
|
||||
|
||||
@CommandLine.Command(
|
||||
name = "server",
|
||||
description = ["RBCS server"],
|
||||
showDefaultValues = true
|
||||
)
|
||||
class ServerCommand(app : Application) : RbcsCommand() {
|
||||
companion object {
|
||||
private val log = createLogger<ServerCommand>()
|
||||
}
|
||||
|
||||
private fun createDefaultConfigurationFile(configurationFile: Path) {
|
||||
log.info {
|
||||
"Creating default configuration file at '$configurationFile'"
|
||||
}
|
||||
val defaultConfigurationFileResource = DEFAULT_CONFIGURATION_URL
|
||||
Files.newOutputStream(configurationFile).use { outputStream ->
|
||||
defaultConfigurationFileResource.openStream().use { inputStream ->
|
||||
JWO.copy(inputStream, outputStream)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@CommandLine.Option(
|
||||
names = ["-t", "--timeout"],
|
||||
description = ["Exit after the specified time"],
|
||||
paramLabel = "TIMEOUT",
|
||||
converter = [DurationConverter::class]
|
||||
)
|
||||
private var timeout: Duration? = null
|
||||
|
||||
@CommandLine.Option(
|
||||
names = ["-c", "--config-file"],
|
||||
description = ["Read the application configuration from this file"],
|
||||
paramLabel = "CONFIG_FILE"
|
||||
)
|
||||
private var configurationFile: Path = findConfigurationFile(app, "rbcs-server.xml")
|
||||
|
||||
override fun run() {
|
||||
if (!Files.exists(configurationFile)) {
|
||||
Files.createDirectories(configurationFile.parent)
|
||||
createDefaultConfigurationFile(configurationFile)
|
||||
}
|
||||
|
||||
val configuration = RemoteBuildCacheServer.loadConfiguration(configurationFile)
|
||||
log.debug {
|
||||
ByteArrayOutputStream().also {
|
||||
RemoteBuildCacheServer.dumpConfiguration(configuration, it)
|
||||
}.let {
|
||||
"Server configuration:\n${String(it.toByteArray())}"
|
||||
}
|
||||
}
|
||||
val server = RemoteBuildCacheServer(configuration)
|
||||
val handle = server.run()
|
||||
val shutdownHook = Thread.ofPlatform().unstarted {
|
||||
handle.sendShutdownSignal()
|
||||
try {
|
||||
handle.get(60, TimeUnit.SECONDS)
|
||||
} catch (ex : Throwable) {
|
||||
log.warn(ex.message, ex)
|
||||
}
|
||||
}
|
||||
Runtime.getRuntime().addShutdownHook(shutdownHook)
|
||||
if(timeout != null) {
|
||||
Thread.sleep(timeout)
|
||||
handle.sendShutdownSignal()
|
||||
}
|
||||
handle.get()
|
||||
}
|
||||
}
|
@@ -0,0 +1,10 @@
|
||||
package net.woggioni.rbcs.cli.impl.converters
|
||||
|
||||
import picocli.CommandLine
|
||||
|
||||
|
||||
class ByteSizeConverter : CommandLine.ITypeConverter<Int> {
|
||||
override fun convert(value: String): Int {
|
||||
return Integer.decode(value)
|
||||
}
|
||||
}
|
@@ -0,0 +1,11 @@
|
||||
package net.woggioni.rbcs.cli.impl.converters
|
||||
|
||||
import picocli.CommandLine
|
||||
import java.time.Duration
|
||||
|
||||
|
||||
class DurationConverter : CommandLine.ITypeConverter<Duration> {
|
||||
override fun convert(value: String): Duration {
|
||||
return Duration.parse(value)
|
||||
}
|
||||
}
|
@@ -0,0 +1,13 @@
|
||||
package net.woggioni.rbcs.cli.impl.converters
|
||||
|
||||
import picocli.CommandLine
|
||||
import java.io.InputStream
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Paths
|
||||
|
||||
|
||||
class InputStreamConverter : CommandLine.ITypeConverter<InputStream> {
|
||||
override fun convert(value: String): InputStream {
|
||||
return Files.newInputStream(Paths.get(value))
|
||||
}
|
||||
}
|
@@ -0,0 +1,13 @@
|
||||
package net.woggioni.rbcs.cli.impl.converters
|
||||
|
||||
import picocli.CommandLine
|
||||
import java.io.OutputStream
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Paths
|
||||
|
||||
|
||||
class OutputStreamConverter : CommandLine.ITypeConverter<OutputStream> {
|
||||
override fun convert(value: String): OutputStream {
|
||||
return Files.newOutputStream(Paths.get(value))
|
||||
}
|
||||
}
|
19
rbcs-client/build.gradle
Normal file
19
rbcs-client/build.gradle
Normal file
@@ -0,0 +1,19 @@
|
||||
plugins {
|
||||
id 'java-library'
|
||||
alias catalog.plugins.kotlin.jvm
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation project(':rbcs-api')
|
||||
implementation project(':rbcs-common')
|
||||
implementation catalog.slf4j.api
|
||||
implementation catalog.netty.buffer
|
||||
implementation catalog.netty.handler
|
||||
implementation catalog.netty.transport
|
||||
implementation catalog.netty.common
|
||||
implementation catalog.netty.codec.http
|
||||
|
||||
testRuntimeOnly catalog.logback.classic
|
||||
}
|
||||
|
||||
|
@@ -1,15 +1,17 @@
|
||||
module net.woggioni.gbcs {
|
||||
requires java.xml;
|
||||
requires java.logging;
|
||||
requires kotlin.stdlib;
|
||||
requires io.netty.buffer;
|
||||
requires io.netty.transport;
|
||||
requires io.netty.codec.http;
|
||||
requires io.netty.common;
|
||||
module net.woggioni.rbcs.client {
|
||||
requires io.netty.handler;
|
||||
requires io.netty.codec.http;
|
||||
requires io.netty.transport;
|
||||
requires kotlin.stdlib;
|
||||
requires io.netty.common;
|
||||
requires io.netty.buffer;
|
||||
requires java.xml;
|
||||
requires net.woggioni.rbcs.common;
|
||||
requires net.woggioni.rbcs.api;
|
||||
requires io.netty.codec;
|
||||
requires org.slf4j;
|
||||
requires net.woggioni.jwo;
|
||||
|
||||
exports net.woggioni.gbcs;
|
||||
exports net.woggioni.rbcs.client;
|
||||
|
||||
opens net.woggioni.rbcs.client.schema;
|
||||
}
|
457
rbcs-client/src/main/kotlin/net/woggioni/rbcs/client/Client.kt
Normal file
457
rbcs-client/src/main/kotlin/net/woggioni/rbcs/client/Client.kt
Normal file
@@ -0,0 +1,457 @@
|
||||
package net.woggioni.rbcs.client
|
||||
|
||||
import io.netty.bootstrap.Bootstrap
|
||||
import io.netty.buffer.ByteBuf
|
||||
import io.netty.buffer.Unpooled
|
||||
import io.netty.channel.Channel
|
||||
import io.netty.channel.ChannelHandler
|
||||
import io.netty.channel.ChannelHandlerContext
|
||||
import io.netty.channel.ChannelInboundHandlerAdapter
|
||||
import io.netty.channel.ChannelOption
|
||||
import io.netty.channel.ChannelPipeline
|
||||
import io.netty.channel.SimpleChannelInboundHandler
|
||||
import io.netty.channel.nio.NioEventLoopGroup
|
||||
import io.netty.channel.pool.AbstractChannelPoolHandler
|
||||
import io.netty.channel.pool.ChannelPool
|
||||
import io.netty.channel.pool.FixedChannelPool
|
||||
import io.netty.channel.socket.nio.NioSocketChannel
|
||||
import io.netty.handler.codec.DecoderException
|
||||
import io.netty.handler.codec.http.DefaultFullHttpRequest
|
||||
import io.netty.handler.codec.http.FullHttpRequest
|
||||
import io.netty.handler.codec.http.FullHttpResponse
|
||||
import io.netty.handler.codec.http.HttpClientCodec
|
||||
import io.netty.handler.codec.http.HttpContentDecompressor
|
||||
import io.netty.handler.codec.http.HttpHeaderNames
|
||||
import io.netty.handler.codec.http.HttpHeaderValues
|
||||
import io.netty.handler.codec.http.HttpMethod
|
||||
import io.netty.handler.codec.http.HttpObjectAggregator
|
||||
import io.netty.handler.codec.http.HttpResponseStatus
|
||||
import io.netty.handler.codec.http.HttpVersion
|
||||
import io.netty.handler.ssl.SslContext
|
||||
import io.netty.handler.ssl.SslContextBuilder
|
||||
import io.netty.handler.stream.ChunkedWriteHandler
|
||||
import io.netty.handler.timeout.IdleState
|
||||
import io.netty.handler.timeout.IdleStateEvent
|
||||
import io.netty.handler.timeout.IdleStateHandler
|
||||
import io.netty.util.concurrent.Future
|
||||
import io.netty.util.concurrent.GenericFutureListener
|
||||
import net.woggioni.rbcs.api.CacheValueMetadata
|
||||
import net.woggioni.rbcs.client.impl.Parser
|
||||
import net.woggioni.rbcs.common.Xml
|
||||
import net.woggioni.rbcs.common.createLogger
|
||||
import net.woggioni.rbcs.common.debug
|
||||
import net.woggioni.rbcs.common.trace
|
||||
import java.io.IOException
|
||||
import java.net.InetSocketAddress
|
||||
import java.net.URI
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
import java.security.PrivateKey
|
||||
import java.security.cert.X509Certificate
|
||||
import java.time.Duration
|
||||
import java.util.Base64
|
||||
import java.util.concurrent.CompletableFuture
|
||||
import java.util.concurrent.TimeUnit
|
||||
import java.util.concurrent.TimeoutException
|
||||
import java.util.concurrent.atomic.AtomicInteger
|
||||
import kotlin.random.Random
|
||||
import io.netty.util.concurrent.Future as NettyFuture
|
||||
|
||||
class RemoteBuildCacheClient(private val profile: Configuration.Profile) : AutoCloseable {
|
||||
companion object{
|
||||
private val log = createLogger<RemoteBuildCacheClient>()
|
||||
}
|
||||
|
||||
private val group: NioEventLoopGroup
|
||||
private var sslContext: SslContext
|
||||
private val pool: ChannelPool
|
||||
|
||||
data class Configuration(
|
||||
val profiles: Map<String, Profile>
|
||||
) {
|
||||
sealed class Authentication {
|
||||
data class TlsClientAuthenticationCredentials(
|
||||
val key: PrivateKey,
|
||||
val certificateChain: Array<X509Certificate>
|
||||
) : Authentication()
|
||||
|
||||
data class BasicAuthenticationCredentials(val username: String, val password: String) : Authentication()
|
||||
}
|
||||
|
||||
class RetryPolicy(
|
||||
val maxAttempts: Int,
|
||||
val initialDelayMillis: Long,
|
||||
val exp: Double
|
||||
)
|
||||
|
||||
class Connection(
|
||||
val readTimeout: Duration,
|
||||
val writeTimeout: Duration,
|
||||
val idleTimeout: Duration,
|
||||
val readIdleTimeout: Duration,
|
||||
val writeIdleTimeout: Duration
|
||||
)
|
||||
|
||||
data class Profile(
|
||||
val serverURI: URI,
|
||||
val connection: Connection?,
|
||||
val authentication: Authentication?,
|
||||
val connectionTimeout: Duration?,
|
||||
val maxConnections: Int,
|
||||
val compressionEnabled: Boolean,
|
||||
val retryPolicy: RetryPolicy?,
|
||||
)
|
||||
|
||||
companion object {
|
||||
fun parse(path: Path): Configuration {
|
||||
return Files.newInputStream(path).use {
|
||||
Xml.parseXml(path.toUri().toURL(), it)
|
||||
}.let(Parser::parse)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
init {
|
||||
group = NioEventLoopGroup()
|
||||
sslContext = SslContextBuilder.forClient().also { builder ->
|
||||
(profile.authentication as? Configuration.Authentication.TlsClientAuthenticationCredentials)?.let { tlsClientAuthenticationCredentials ->
|
||||
builder.keyManager(
|
||||
tlsClientAuthenticationCredentials.key,
|
||||
*tlsClientAuthenticationCredentials.certificateChain
|
||||
)
|
||||
}
|
||||
}.build()
|
||||
|
||||
val (scheme, host, port) = profile.serverURI.run {
|
||||
Triple(
|
||||
if (scheme == null) "http" else profile.serverURI.scheme,
|
||||
host,
|
||||
port.takeIf { it > 0 } ?: if ("https" == scheme.lowercase()) 443 else 80
|
||||
)
|
||||
}
|
||||
|
||||
val bootstrap = Bootstrap().apply {
|
||||
group(group)
|
||||
channel(NioSocketChannel::class.java)
|
||||
option(ChannelOption.TCP_NODELAY, true)
|
||||
option(ChannelOption.SO_KEEPALIVE, true)
|
||||
remoteAddress(InetSocketAddress(host, port))
|
||||
profile.connectionTimeout?.let {
|
||||
option(ChannelOption.CONNECT_TIMEOUT_MILLIS, it.toMillis().toInt())
|
||||
}
|
||||
}
|
||||
val channelPoolHandler = object : AbstractChannelPoolHandler() {
|
||||
|
||||
@Volatile
|
||||
private var connectionCount = AtomicInteger()
|
||||
|
||||
@Volatile
|
||||
private var leaseCount = AtomicInteger()
|
||||
|
||||
override fun channelReleased(ch: Channel) {
|
||||
val activeLeases = leaseCount.decrementAndGet()
|
||||
log.trace {
|
||||
"Released channel ${ch.id().asShortText()}, number of active leases: $activeLeases"
|
||||
}
|
||||
}
|
||||
|
||||
override fun channelAcquired(ch: Channel) {
|
||||
val activeLeases = leaseCount.getAndIncrement()
|
||||
log.trace {
|
||||
"Acquired channel ${ch.id().asShortText()}, number of active leases: $activeLeases"
|
||||
}
|
||||
}
|
||||
|
||||
override fun channelCreated(ch: Channel) {
|
||||
val connectionId = connectionCount.incrementAndGet()
|
||||
log.debug {
|
||||
"Created connection ${ch.id().asShortText()}, total number of active connections: $connectionId"
|
||||
}
|
||||
ch.closeFuture().addListener {
|
||||
val activeConnections = connectionCount.decrementAndGet()
|
||||
log.debug {
|
||||
"Closed connection ${
|
||||
ch.id().asShortText()
|
||||
}, total number of active connections: $activeConnections"
|
||||
}
|
||||
}
|
||||
val pipeline: ChannelPipeline = ch.pipeline()
|
||||
|
||||
profile.connection?.also { conn ->
|
||||
val readTimeout = conn.readTimeout.toMillis()
|
||||
val writeTimeout = conn.writeTimeout.toMillis()
|
||||
if (readTimeout > 0 || writeTimeout > 0) {
|
||||
pipeline.addLast(
|
||||
IdleStateHandler(
|
||||
false,
|
||||
readTimeout,
|
||||
writeTimeout,
|
||||
0,
|
||||
TimeUnit.MILLISECONDS
|
||||
)
|
||||
)
|
||||
}
|
||||
val readIdleTimeout = conn.readIdleTimeout.toMillis()
|
||||
val writeIdleTimeout = conn.writeIdleTimeout.toMillis()
|
||||
val idleTimeout = conn.idleTimeout.toMillis()
|
||||
if (readIdleTimeout > 0 || writeIdleTimeout > 0 || idleTimeout > 0) {
|
||||
pipeline.addLast(
|
||||
IdleStateHandler(
|
||||
true,
|
||||
readIdleTimeout,
|
||||
writeIdleTimeout,
|
||||
idleTimeout,
|
||||
TimeUnit.MILLISECONDS
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Add SSL handler if needed
|
||||
if ("https".equals(scheme, ignoreCase = true)) {
|
||||
pipeline.addLast("ssl", sslContext.newHandler(ch.alloc(), host, port))
|
||||
}
|
||||
|
||||
// HTTP handlers
|
||||
pipeline.addLast("codec", HttpClientCodec())
|
||||
if(profile.compressionEnabled) {
|
||||
pipeline.addLast("decompressor", HttpContentDecompressor())
|
||||
}
|
||||
pipeline.addLast("aggregator", HttpObjectAggregator(134217728))
|
||||
pipeline.addLast("chunked", ChunkedWriteHandler())
|
||||
}
|
||||
}
|
||||
pool = FixedChannelPool(bootstrap, channelPoolHandler, profile.maxConnections)
|
||||
}
|
||||
|
||||
private fun executeWithRetry(operation: () -> CompletableFuture<FullHttpResponse>): CompletableFuture<FullHttpResponse> {
|
||||
val retryPolicy = profile.retryPolicy
|
||||
return if (retryPolicy != null) {
|
||||
val outcomeHandler = OutcomeHandler<FullHttpResponse> { outcome ->
|
||||
when (outcome) {
|
||||
is OperationOutcome.Success -> {
|
||||
val response = outcome.result
|
||||
val status = response.status()
|
||||
when (status) {
|
||||
HttpResponseStatus.TOO_MANY_REQUESTS -> {
|
||||
val retryAfter = response.headers()[HttpHeaderNames.RETRY_AFTER]?.let { headerValue ->
|
||||
try {
|
||||
headerValue.toLong() * 1000
|
||||
} catch (nfe: NumberFormatException) {
|
||||
null
|
||||
}
|
||||
}
|
||||
OutcomeHandlerResult.Retry(retryAfter)
|
||||
}
|
||||
|
||||
HttpResponseStatus.INTERNAL_SERVER_ERROR, HttpResponseStatus.SERVICE_UNAVAILABLE ->
|
||||
OutcomeHandlerResult.Retry()
|
||||
|
||||
else -> OutcomeHandlerResult.DoNotRetry()
|
||||
}
|
||||
}
|
||||
|
||||
is OperationOutcome.Failure -> {
|
||||
OutcomeHandlerResult.Retry()
|
||||
}
|
||||
}
|
||||
}
|
||||
executeWithRetry(
|
||||
group,
|
||||
retryPolicy.maxAttempts,
|
||||
retryPolicy.initialDelayMillis.toDouble(),
|
||||
retryPolicy.exp,
|
||||
outcomeHandler,
|
||||
Random.Default,
|
||||
operation
|
||||
)
|
||||
} else {
|
||||
operation()
|
||||
}
|
||||
}
|
||||
|
||||
fun healthCheck(nonce: ByteArray): CompletableFuture<ByteArray?> {
|
||||
return executeWithRetry {
|
||||
sendRequest(profile.serverURI, HttpMethod.TRACE, nonce)
|
||||
}.thenApply {
|
||||
val status = it.status()
|
||||
if (it.status() != HttpResponseStatus.OK) {
|
||||
throw HttpException(status)
|
||||
} else {
|
||||
it.content()
|
||||
}
|
||||
}.thenApply { maybeByteBuf ->
|
||||
maybeByteBuf?.let {
|
||||
val result = ByteArray(it.readableBytes())
|
||||
it.getBytes(0, result)
|
||||
result
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fun get(key: String): CompletableFuture<ByteArray?> {
|
||||
return executeWithRetry {
|
||||
sendRequest(profile.serverURI.resolve(key), HttpMethod.GET, null)
|
||||
}.thenApply {
|
||||
val status = it.status()
|
||||
if (it.status() == HttpResponseStatus.NOT_FOUND) {
|
||||
null
|
||||
} else if (it.status() != HttpResponseStatus.OK) {
|
||||
throw HttpException(status)
|
||||
} else {
|
||||
it.content()
|
||||
}
|
||||
}.thenApply { maybeByteBuf ->
|
||||
maybeByteBuf?.let {
|
||||
val result = ByteArray(it.readableBytes())
|
||||
it.getBytes(0, result)
|
||||
result
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fun put(key: String, content: ByteArray, metadata: CacheValueMetadata): CompletableFuture<Unit> {
|
||||
return executeWithRetry {
|
||||
val extraHeaders = sequenceOf(
|
||||
metadata.mimeType?.let { HttpHeaderNames.CONTENT_TYPE to it },
|
||||
metadata.contentDisposition?.let { HttpHeaderNames.CONTENT_DISPOSITION to it }
|
||||
).filterNotNull()
|
||||
sendRequest(profile.serverURI.resolve(key), HttpMethod.PUT, content, extraHeaders.asIterable())
|
||||
}.thenApply {
|
||||
val status = it.status()
|
||||
if (it.status() != HttpResponseStatus.CREATED && it.status() != HttpResponseStatus.OK) {
|
||||
throw HttpException(status)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private fun sendRequest(
|
||||
uri: URI,
|
||||
method: HttpMethod,
|
||||
body: ByteArray?,
|
||||
extraHeaders: Iterable<Pair<CharSequence, CharSequence>>? = null
|
||||
): CompletableFuture<FullHttpResponse> {
|
||||
val responseFuture = CompletableFuture<FullHttpResponse>()
|
||||
// Custom handler for processing responses
|
||||
|
||||
pool.acquire().addListener(object : GenericFutureListener<NettyFuture<Channel>> {
|
||||
private val handlers = mutableListOf<ChannelHandler>()
|
||||
|
||||
fun cleanup(channel: Channel, pipeline: ChannelPipeline) {
|
||||
handlers.forEach(pipeline::remove)
|
||||
pool.release(channel)
|
||||
}
|
||||
|
||||
override fun operationComplete(channelFuture: Future<Channel>) {
|
||||
if (channelFuture.isSuccess) {
|
||||
val channel = channelFuture.now
|
||||
val pipeline = channel.pipeline()
|
||||
val timeoutHandler = object : ChannelInboundHandlerAdapter() {
|
||||
override fun userEventTriggered(ctx: ChannelHandlerContext, evt: Any) {
|
||||
if (evt is IdleStateEvent) {
|
||||
val te = when (evt.state()) {
|
||||
IdleState.READER_IDLE -> TimeoutException(
|
||||
"Read timeout",
|
||||
)
|
||||
|
||||
IdleState.WRITER_IDLE -> TimeoutException("Write timeout")
|
||||
|
||||
IdleState.ALL_IDLE -> TimeoutException("Idle timeout")
|
||||
null -> throw IllegalStateException("This should never happen")
|
||||
}
|
||||
responseFuture.completeExceptionally(te)
|
||||
ctx.close()
|
||||
}
|
||||
}
|
||||
}
|
||||
val closeListener = GenericFutureListener<Future<Void>> {
|
||||
responseFuture.completeExceptionally(IOException("The remote server closed the connection"))
|
||||
pool.release(channel)
|
||||
}
|
||||
|
||||
val responseHandler = object : SimpleChannelInboundHandler<FullHttpResponse>() {
|
||||
override fun channelRead0(
|
||||
ctx: ChannelHandlerContext,
|
||||
response: FullHttpResponse
|
||||
) {
|
||||
channel.closeFuture().removeListener(closeListener)
|
||||
cleanup(channel, pipeline)
|
||||
responseFuture.complete(response)
|
||||
}
|
||||
|
||||
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
|
||||
ctx.newPromise()
|
||||
val ex = when (cause) {
|
||||
is DecoderException -> cause.cause
|
||||
else -> cause
|
||||
}
|
||||
responseFuture.completeExceptionally(ex)
|
||||
ctx.close()
|
||||
}
|
||||
|
||||
override fun channelInactive(ctx: ChannelHandlerContext) {
|
||||
pool.release(channel)
|
||||
responseFuture.completeExceptionally(IOException("The remote server closed the connection"))
|
||||
super.channelInactive(ctx)
|
||||
}
|
||||
}
|
||||
for (handler in arrayOf(timeoutHandler, responseHandler)) {
|
||||
handlers.add(handler)
|
||||
}
|
||||
pipeline.addLast(timeoutHandler, responseHandler)
|
||||
channel.closeFuture().addListener(closeListener)
|
||||
|
||||
|
||||
// Prepare the HTTP request
|
||||
val request: FullHttpRequest = let {
|
||||
val content: ByteBuf? = body?.takeIf(ByteArray::isNotEmpty)?.let(Unpooled::wrappedBuffer)
|
||||
DefaultFullHttpRequest(
|
||||
HttpVersion.HTTP_1_1,
|
||||
method,
|
||||
uri.rawPath,
|
||||
content ?: Unpooled.buffer(0)
|
||||
).apply {
|
||||
headers().apply {
|
||||
if (content != null) {
|
||||
set(HttpHeaderNames.CONTENT_LENGTH, content.readableBytes())
|
||||
}
|
||||
set(HttpHeaderNames.HOST, profile.serverURI.host)
|
||||
set(HttpHeaderNames.CONNECTION, HttpHeaderValues.KEEP_ALIVE)
|
||||
if(profile.compressionEnabled) {
|
||||
set(
|
||||
HttpHeaderNames.ACCEPT_ENCODING,
|
||||
HttpHeaderValues.GZIP.toString() + "," + HttpHeaderValues.DEFLATE.toString()
|
||||
)
|
||||
}
|
||||
extraHeaders?.forEach { (k, v) ->
|
||||
add(k, v)
|
||||
}
|
||||
// Add basic auth if configured
|
||||
(profile.authentication as? Configuration.Authentication.BasicAuthenticationCredentials)?.let { credentials ->
|
||||
val auth = "${credentials.username}:${credentials.password}"
|
||||
val encodedAuth = Base64.getEncoder().encodeToString(auth.toByteArray())
|
||||
set(HttpHeaderNames.AUTHORIZATION, "Basic $encodedAuth")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Set headers
|
||||
// Send the request
|
||||
channel.writeAndFlush(request)
|
||||
} else {
|
||||
responseFuture.completeExceptionally(channelFuture.cause())
|
||||
}
|
||||
}
|
||||
})
|
||||
return responseFuture
|
||||
}
|
||||
|
||||
fun shutDown(): NettyFuture<*> {
|
||||
return group.shutdownGracefully()
|
||||
}
|
||||
|
||||
override fun close() {
|
||||
shutDown().sync()
|
||||
}
|
||||
}
|
@@ -0,0 +1,9 @@
|
||||
package net.woggioni.rbcs.client
|
||||
|
||||
import io.netty.handler.codec.http.HttpResponseStatus
|
||||
|
||||
class HttpException(private val status : HttpResponseStatus) : RuntimeException(status.reasonPhrase()) {
|
||||
|
||||
override val message: String
|
||||
get() = "Http status ${status.code()}: ${status.reasonPhrase()}"
|
||||
}
|
@@ -0,0 +1,136 @@
|
||||
package net.woggioni.rbcs.client.impl
|
||||
|
||||
import net.woggioni.rbcs.api.exception.ConfigurationException
|
||||
import net.woggioni.rbcs.client.RemoteBuildCacheClient
|
||||
import net.woggioni.rbcs.common.Xml.Companion.asIterable
|
||||
import net.woggioni.rbcs.common.Xml.Companion.renderAttribute
|
||||
import org.w3c.dom.Document
|
||||
import java.net.URI
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
import java.security.KeyStore
|
||||
import java.security.PrivateKey
|
||||
import java.security.cert.X509Certificate
|
||||
import java.time.Duration
|
||||
import java.time.temporal.ChronoUnit
|
||||
|
||||
object Parser {
|
||||
|
||||
fun parse(document: Document): RemoteBuildCacheClient.Configuration {
|
||||
val root = document.documentElement
|
||||
val profiles = mutableMapOf<String, RemoteBuildCacheClient.Configuration.Profile>()
|
||||
|
||||
for (child in root.asIterable()) {
|
||||
val tagName = child.localName
|
||||
when (tagName) {
|
||||
"profile" -> {
|
||||
val name =
|
||||
child.renderAttribute("name") ?: throw ConfigurationException("name attribute is required")
|
||||
val uri = child.renderAttribute("base-url")?.let(::URI)
|
||||
?: throw ConfigurationException("base-url attribute is required")
|
||||
var authentication: RemoteBuildCacheClient.Configuration.Authentication? = null
|
||||
var retryPolicy: RemoteBuildCacheClient.Configuration.RetryPolicy? = null
|
||||
var connection : RemoteBuildCacheClient.Configuration.Connection? = null
|
||||
for (gchild in child.asIterable()) {
|
||||
when (gchild.localName) {
|
||||
"tls-client-auth" -> {
|
||||
val keyStoreFile = gchild.renderAttribute("key-store-file")
|
||||
val keyStorePassword =
|
||||
gchild.renderAttribute("key-store-password")
|
||||
val keyAlias = gchild.renderAttribute("key-alias")
|
||||
val keyPassword = gchild.renderAttribute("key-password")
|
||||
|
||||
val keystore = KeyStore.getInstance("PKCS12").apply {
|
||||
Files.newInputStream(Path.of(keyStoreFile)).use {
|
||||
load(it, keyStorePassword?.toCharArray())
|
||||
}
|
||||
}
|
||||
val key = keystore.getKey(keyAlias, keyPassword?.toCharArray()) as PrivateKey
|
||||
val certChain = keystore.getCertificateChain(keyAlias).asSequence()
|
||||
.map { it as X509Certificate }
|
||||
.toList()
|
||||
.toTypedArray()
|
||||
authentication =
|
||||
RemoteBuildCacheClient.Configuration.Authentication.TlsClientAuthenticationCredentials(
|
||||
key,
|
||||
certChain
|
||||
)
|
||||
}
|
||||
|
||||
"basic-auth" -> {
|
||||
val username = gchild.renderAttribute("user")
|
||||
?: throw ConfigurationException("username attribute is required")
|
||||
val password = gchild.renderAttribute("password")
|
||||
?: throw ConfigurationException("password attribute is required")
|
||||
authentication =
|
||||
RemoteBuildCacheClient.Configuration.Authentication.BasicAuthenticationCredentials(
|
||||
username,
|
||||
password
|
||||
)
|
||||
}
|
||||
|
||||
"retry-policy" -> {
|
||||
val maxAttempts =
|
||||
gchild.renderAttribute("max-attempts")
|
||||
?.let(String::toInt)
|
||||
?: throw ConfigurationException("max-attempts attribute is required")
|
||||
val initialDelay =
|
||||
gchild.renderAttribute("initial-delay")
|
||||
?.let(Duration::parse)
|
||||
?: Duration.ofSeconds(1)
|
||||
val exp =
|
||||
gchild.renderAttribute("exp")
|
||||
?.let(String::toDouble)
|
||||
?: 2.0f
|
||||
retryPolicy = RemoteBuildCacheClient.Configuration.RetryPolicy(
|
||||
maxAttempts,
|
||||
initialDelay.toMillis(),
|
||||
exp.toDouble()
|
||||
)
|
||||
}
|
||||
|
||||
"connection" -> {
|
||||
val writeTimeout = gchild.renderAttribute("write-timeout")
|
||||
?.let(Duration::parse) ?: Duration.of(0, ChronoUnit.SECONDS)
|
||||
val readTimeout = gchild.renderAttribute("read-timeout")
|
||||
?.let(Duration::parse) ?: Duration.of(0, ChronoUnit.SECONDS)
|
||||
val idleTimeout = gchild.renderAttribute("idle-timeout")
|
||||
?.let(Duration::parse) ?: Duration.of(30, ChronoUnit.SECONDS)
|
||||
val readIdleTimeout = gchild.renderAttribute("read-idle-timeout")
|
||||
?.let(Duration::parse) ?: Duration.of(60, ChronoUnit.SECONDS)
|
||||
val writeIdleTimeout = gchild.renderAttribute("write-idle-timeout")
|
||||
?.let(Duration::parse) ?: Duration.of(60, ChronoUnit.SECONDS)
|
||||
connection = RemoteBuildCacheClient.Configuration.Connection(
|
||||
readTimeout,
|
||||
writeTimeout,
|
||||
idleTimeout,
|
||||
readIdleTimeout,
|
||||
writeIdleTimeout,
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
val maxConnections = child.renderAttribute("max-connections")
|
||||
?.let(String::toInt)
|
||||
?: 50
|
||||
val connectionTimeout = child.renderAttribute("connection-timeout")
|
||||
?.let(Duration::parse)
|
||||
val compressionEnabled = child.renderAttribute("enable-compression")
|
||||
?.let(String::toBoolean)
|
||||
?: true
|
||||
|
||||
profiles[name] = RemoteBuildCacheClient.Configuration.Profile(
|
||||
uri,
|
||||
connection,
|
||||
authentication,
|
||||
connectionTimeout,
|
||||
maxConnections,
|
||||
compressionEnabled,
|
||||
retryPolicy
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
return RemoteBuildCacheClient.Configuration(profiles)
|
||||
}
|
||||
}
|
@@ -0,0 +1,79 @@
|
||||
package net.woggioni.rbcs.client
|
||||
|
||||
import io.netty.util.concurrent.EventExecutorGroup
|
||||
import java.util.concurrent.CompletableFuture
|
||||
import java.util.concurrent.TimeUnit
|
||||
import kotlin.math.pow
|
||||
import kotlin.random.Random
|
||||
|
||||
sealed class OperationOutcome<T> {
|
||||
class Success<T>(val result: T) : OperationOutcome<T>()
|
||||
class Failure<T>(val ex: Throwable) : OperationOutcome<T>()
|
||||
}
|
||||
|
||||
sealed class OutcomeHandlerResult {
|
||||
class Retry(val suggestedDelayMillis: Long? = null) : OutcomeHandlerResult()
|
||||
class DoNotRetry : OutcomeHandlerResult()
|
||||
}
|
||||
|
||||
fun interface OutcomeHandler<T> {
|
||||
fun shouldRetry(result: OperationOutcome<T>): OutcomeHandlerResult
|
||||
}
|
||||
|
||||
fun <T> executeWithRetry(
|
||||
eventExecutorGroup: EventExecutorGroup,
|
||||
maxAttempts: Int,
|
||||
initialDelay: Double,
|
||||
exp: Double,
|
||||
outcomeHandler: OutcomeHandler<T>,
|
||||
randomizer : Random?,
|
||||
cb: () -> CompletableFuture<T>
|
||||
): CompletableFuture<T> {
|
||||
|
||||
val finalResult = cb()
|
||||
var future = finalResult
|
||||
var shortCircuit = false
|
||||
for (i in 1 until maxAttempts) {
|
||||
future = future.handle { result, ex ->
|
||||
val operationOutcome = if (ex == null) {
|
||||
OperationOutcome.Success(result)
|
||||
} else {
|
||||
OperationOutcome.Failure(ex.cause ?: ex)
|
||||
}
|
||||
if (shortCircuit) {
|
||||
when(operationOutcome) {
|
||||
is OperationOutcome.Failure -> throw operationOutcome.ex
|
||||
is OperationOutcome.Success -> CompletableFuture.completedFuture(operationOutcome.result)
|
||||
}
|
||||
} else {
|
||||
when(val outcomeHandlerResult = outcomeHandler.shouldRetry(operationOutcome)) {
|
||||
is OutcomeHandlerResult.Retry -> {
|
||||
val res = CompletableFuture<T>()
|
||||
val delay = run {
|
||||
val scheduledDelay = (initialDelay * exp.pow(i.toDouble()) * (1.0 + (randomizer?.nextDouble(-0.5, 0.5) ?: 0.0))).toLong()
|
||||
outcomeHandlerResult.suggestedDelayMillis?.coerceAtMost(scheduledDelay) ?: scheduledDelay
|
||||
}
|
||||
eventExecutorGroup.schedule({
|
||||
cb().handle { result, ex ->
|
||||
if (ex == null) {
|
||||
res.complete(result)
|
||||
} else {
|
||||
res.completeExceptionally(ex)
|
||||
}
|
||||
}
|
||||
}, delay, TimeUnit.MILLISECONDS)
|
||||
res
|
||||
}
|
||||
is OutcomeHandlerResult.DoNotRetry -> {
|
||||
shortCircuit = true
|
||||
when(operationOutcome) {
|
||||
is OperationOutcome.Failure -> throw operationOutcome.ex
|
||||
is OperationOutcome.Success -> CompletableFuture.completedFuture(operationOutcome.result)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}.thenCompose { it }
|
||||
}
|
||||
return future
|
||||
}
|
@@ -0,0 +1,60 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<xs:schema targetNamespace="urn:net.woggioni.rbcs.client"
|
||||
xmlns:xs="http://www.w3.org/2001/XMLSchema"
|
||||
xmlns:rbcs-client="urn:net.woggioni.rbcs.client"
|
||||
elementFormDefault="unqualified"
|
||||
>
|
||||
<xs:element name="profiles" type="rbcs-client:profilesType"/>
|
||||
|
||||
<xs:complexType name="profilesType">
|
||||
<xs:sequence minOccurs="0">
|
||||
<xs:element name="profile" type="rbcs-client:profileType" maxOccurs="unbounded"/>
|
||||
</xs:sequence>
|
||||
</xs:complexType>
|
||||
|
||||
<xs:complexType name="profileType">
|
||||
<xs:sequence>
|
||||
<xs:choice>
|
||||
<xs:element name="no-auth" type="rbcs-client:noAuthType"/>
|
||||
<xs:element name="basic-auth" type="rbcs-client:basicAuthType"/>
|
||||
<xs:element name="tls-client-auth" type="rbcs-client:tlsClientAuthType"/>
|
||||
</xs:choice>
|
||||
<xs:element name="connection" type="rbcs-client:connectionType" minOccurs="0" />
|
||||
<xs:element name="retry-policy" type="rbcs-client:retryType" minOccurs="0"/>
|
||||
</xs:sequence>
|
||||
<xs:attribute name="name" type="xs:token" use="required"/>
|
||||
<xs:attribute name="base-url" type="xs:anyURI" use="required"/>
|
||||
<xs:attribute name="max-connections" type="xs:positiveInteger" default="50"/>
|
||||
<xs:attribute name="connection-timeout" type="xs:duration"/>
|
||||
<xs:attribute name="enable-compression" type="xs:boolean" default="true"/>
|
||||
</xs:complexType>
|
||||
|
||||
<xs:complexType name="connectionType">
|
||||
<xs:attribute name="read-timeout" type="xs:duration" use="optional" default="PT0S"/>
|
||||
<xs:attribute name="write-timeout" type="xs:duration" use="optional" default="PT0S"/>
|
||||
<xs:attribute name="idle-timeout" type="xs:duration" use="optional" default="PT30S"/>
|
||||
<xs:attribute name="read-idle-timeout" type="xs:duration" use="optional" default="PT60S"/>
|
||||
<xs:attribute name="write-idle-timeout" type="xs:duration" use="optional" default="PT60S"/>
|
||||
</xs:complexType>
|
||||
|
||||
<xs:complexType name="noAuthType"/>
|
||||
|
||||
<xs:complexType name="basicAuthType">
|
||||
<xs:attribute name="user" type="xs:token" use="required"/>
|
||||
<xs:attribute name="password" type="xs:string" use="required"/>
|
||||
</xs:complexType>
|
||||
|
||||
<xs:complexType name="tlsClientAuthType">
|
||||
<xs:attribute name="key-store-file" type="xs:anyURI" use="required"/>
|
||||
<xs:attribute name="key-store-password" type="xs:string" use="required"/>
|
||||
<xs:attribute name="key-alias" type="xs:token" use="required"/>
|
||||
<xs:attribute name="key-password" type="xs:string" use="optional"/>
|
||||
</xs:complexType>
|
||||
|
||||
<xs:complexType name="retryType">
|
||||
<xs:attribute name="max-attempts" type="xs:positiveInteger" use="required"/>
|
||||
<xs:attribute name="initial-delay" type="xs:duration" default="PT1S"/>
|
||||
<xs:attribute name="exp" type="xs:double" default="2.0"/>
|
||||
</xs:complexType>
|
||||
|
||||
</xs:schema>
|
@@ -0,0 +1,148 @@
|
||||
package net.woggioni.rbcs.client
|
||||
|
||||
import io.netty.util.concurrent.DefaultEventExecutorGroup
|
||||
import io.netty.util.concurrent.EventExecutorGroup
|
||||
import net.woggioni.rbcs.common.contextLogger
|
||||
import org.junit.jupiter.api.Assertions
|
||||
import org.junit.jupiter.api.extension.ExtensionContext
|
||||
import org.junit.jupiter.params.ParameterizedTest
|
||||
import org.junit.jupiter.params.provider.Arguments
|
||||
import org.junit.jupiter.params.provider.ArgumentsProvider
|
||||
import org.junit.jupiter.params.provider.ArgumentsSource
|
||||
import java.util.concurrent.CompletableFuture
|
||||
import java.util.stream.Stream
|
||||
import kotlin.random.Random
|
||||
|
||||
class RetryTest {
|
||||
|
||||
data class TestArgs(
|
||||
val seed: Int,
|
||||
val maxAttempt: Int,
|
||||
val initialDelay: Double,
|
||||
val exp: Double,
|
||||
)
|
||||
|
||||
class TestArguments : ArgumentsProvider {
|
||||
override fun provideArguments(context: ExtensionContext): Stream<out Arguments> {
|
||||
return Stream.of(
|
||||
TestArgs(
|
||||
seed = 101325,
|
||||
maxAttempt = 5,
|
||||
initialDelay = 50.0,
|
||||
exp = 2.0,
|
||||
),
|
||||
TestArgs(
|
||||
seed = 101325,
|
||||
maxAttempt = 20,
|
||||
initialDelay = 100.0,
|
||||
exp = 1.1,
|
||||
),
|
||||
TestArgs(
|
||||
seed = 123487,
|
||||
maxAttempt = 20,
|
||||
initialDelay = 100.0,
|
||||
exp = 2.0,
|
||||
),
|
||||
TestArgs(
|
||||
seed = 20082024,
|
||||
maxAttempt = 10,
|
||||
initialDelay = 100.0,
|
||||
exp = 2.0,
|
||||
)
|
||||
).map {
|
||||
object: Arguments {
|
||||
override fun get() = arrayOf(it)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ArgumentsSource(TestArguments::class)
|
||||
@ParameterizedTest
|
||||
fun test(testArgs: TestArgs) {
|
||||
val log = contextLogger()
|
||||
log.debug("Start")
|
||||
val executor: EventExecutorGroup = DefaultEventExecutorGroup(1)
|
||||
val attempts = mutableListOf<Pair<Long, OperationOutcome<Int>>>()
|
||||
val outcomeHandler = OutcomeHandler<Int> { outcome ->
|
||||
when(outcome) {
|
||||
is OperationOutcome.Success -> {
|
||||
if(outcome.result % 10 == 0) {
|
||||
OutcomeHandlerResult.DoNotRetry()
|
||||
} else {
|
||||
OutcomeHandlerResult.Retry(null)
|
||||
}
|
||||
}
|
||||
is OperationOutcome.Failure -> {
|
||||
when(outcome.ex) {
|
||||
is IllegalStateException -> {
|
||||
log.debug(outcome.ex.message, outcome.ex)
|
||||
OutcomeHandlerResult.Retry(null)
|
||||
}
|
||||
else -> {
|
||||
OutcomeHandlerResult.DoNotRetry()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
val random = Random(testArgs.seed)
|
||||
|
||||
val future =
|
||||
executeWithRetry(executor, testArgs.maxAttempt, testArgs.initialDelay, testArgs.exp, outcomeHandler, null) {
|
||||
val now = System.nanoTime()
|
||||
val result = CompletableFuture<Int>()
|
||||
executor.submit {
|
||||
val n = random.nextInt(0, Integer.MAX_VALUE)
|
||||
log.debug("Got new number: {}", n)
|
||||
if(n % 3 == 0) {
|
||||
val ex = IllegalStateException("Value $n can be divided by 3")
|
||||
result.completeExceptionally(ex)
|
||||
attempts += now to OperationOutcome.Failure(ex)
|
||||
} else if(n % 7 == 0) {
|
||||
val ex = RuntimeException("Value $n can be divided by 7")
|
||||
result.completeExceptionally(ex)
|
||||
attempts += now to OperationOutcome.Failure(ex)
|
||||
} else {
|
||||
result.complete(n)
|
||||
attempts += now to OperationOutcome.Success(n)
|
||||
}
|
||||
}
|
||||
result
|
||||
}
|
||||
Assertions.assertTrue(attempts.size <= testArgs.maxAttempt)
|
||||
val result = future.handle { res, ex ->
|
||||
if(ex != null) {
|
||||
val err = ex.cause ?: ex
|
||||
log.debug(err.message, err)
|
||||
OperationOutcome.Failure(err)
|
||||
} else {
|
||||
OperationOutcome.Success(res)
|
||||
}
|
||||
}.get()
|
||||
for ((index, attempt) in attempts.withIndex()) {
|
||||
val (timestamp, value) = attempt
|
||||
if (index > 0) {
|
||||
/* Check the delay for subsequent attempts is correct */
|
||||
val previousAttempt = attempts[index - 1]
|
||||
val expectedTimestamp =
|
||||
previousAttempt.first + testArgs.initialDelay * Math.pow(testArgs.exp, index.toDouble()) * 1e6
|
||||
val actualTimestamp = timestamp
|
||||
val err = Math.abs(expectedTimestamp - actualTimestamp) / expectedTimestamp
|
||||
Assertions.assertTrue(err < 1e-2)
|
||||
}
|
||||
if (index == attempts.size - 1 && index < testArgs.maxAttempt - 1) {
|
||||
/*
|
||||
* If the last attempt index is lower than the maximum number of attempts, then
|
||||
* check the outcome handler returns DoNotRetry
|
||||
*/
|
||||
Assertions.assertTrue(outcomeHandler.shouldRetry(value) is OutcomeHandlerResult.DoNotRetry)
|
||||
} else if (index < attempts.size - 1) {
|
||||
/*
|
||||
* If the attempt is not the last attempt check the outcome handler returns Retry
|
||||
*/
|
||||
Assertions.assertTrue(outcomeHandler.shouldRetry(value) is OutcomeHandlerResult.Retry)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
21
rbcs-client/src/test/resources/logback.xml
Normal file
21
rbcs-client/src/test/resources/logback.xml
Normal file
@@ -0,0 +1,21 @@
|
||||
<?xml version="1.0" encoding="UTF-8" ?>
|
||||
<!DOCTYPE configuration>
|
||||
|
||||
<configuration>
|
||||
<import class="ch.qos.logback.classic.encoder.PatternLayoutEncoder"/>
|
||||
<import class="ch.qos.logback.core.ConsoleAppender"/>
|
||||
|
||||
<appender name="console" class="ConsoleAppender">
|
||||
<target>System.err</target>
|
||||
<encoder class="PatternLayoutEncoder">
|
||||
<pattern>%d [%highlight(%-5level)] \(%thread\) %logger{36} -%kvp- %msg %n</pattern>
|
||||
</encoder>
|
||||
</appender>
|
||||
|
||||
<root level="info">
|
||||
<appender-ref ref="console"/>
|
||||
</root>
|
||||
<logger name="io.netty" level="info"/>
|
||||
<logger name="com.google.code.yanf4j" level="warn"/>
|
||||
<logger name="net.rubyeye.xmemcached" level="warn"/>
|
||||
</configuration>
|
@@ -0,0 +1,16 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<rbcs-client:profiles xmlns:xs="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xmlns:rbcs-client="urn:net.woggioni.rbcs.client"
|
||||
xs:schemaLocation="urn:net.woggioni.rbcs.client jms://net.woggioni.rbcs.client/net/woggioni/rbcs/client/schema/rbcs-client.xsd"
|
||||
>
|
||||
<profile name="profile1" base-url="https://rbcs1.example.com/">
|
||||
<tls-client-auth
|
||||
key-store-file="keystore.pfx"
|
||||
key-store-password="password"
|
||||
key-alias="woggioni@c962475fa38"
|
||||
key-password="key-password"/>
|
||||
</profile>
|
||||
<profile name="profile2" base-url="https://rbcs2.example.com/">
|
||||
<basic-auth user="user" password="password"/>
|
||||
</profile>
|
||||
</rbcs-client:profiles>
|
21
rbcs-common/build.gradle
Normal file
21
rbcs-common/build.gradle
Normal file
@@ -0,0 +1,21 @@
|
||||
|
||||
plugins {
|
||||
id 'java-library'
|
||||
id 'maven-publish'
|
||||
alias catalog.plugins.kotlin.jvm
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation project(':rbcs-api')
|
||||
implementation catalog.slf4j.api
|
||||
implementation catalog.jwo
|
||||
implementation catalog.netty.buffer
|
||||
}
|
||||
|
||||
publishing {
|
||||
publications {
|
||||
maven(MavenPublication) {
|
||||
from(components["java"])
|
||||
}
|
||||
}
|
||||
}
|
12
rbcs-common/src/main/java/module-info.java
Normal file
12
rbcs-common/src/main/java/module-info.java
Normal file
@@ -0,0 +1,12 @@
|
||||
module net.woggioni.rbcs.common {
|
||||
requires java.xml;
|
||||
requires java.logging;
|
||||
requires org.slf4j;
|
||||
requires kotlin.stdlib;
|
||||
requires net.woggioni.jwo;
|
||||
requires io.netty.buffer;
|
||||
requires io.netty.transport;
|
||||
|
||||
provides java.net.spi.URLStreamHandlerProvider with net.woggioni.rbcs.common.RbcsUrlStreamHandlerFactory;
|
||||
exports net.woggioni.rbcs.common;
|
||||
}
|
15
rbcs-common/src/main/kotlin/net/woggioni/rbcs/common/BB.kt
Normal file
15
rbcs-common/src/main/kotlin/net/woggioni/rbcs/common/BB.kt
Normal file
@@ -0,0 +1,15 @@
|
||||
package net.woggioni.rbcs.common
|
||||
|
||||
import io.netty.buffer.ByteBuf
|
||||
import io.netty.buffer.ByteBufAllocator
|
||||
import io.netty.buffer.CompositeByteBuf
|
||||
|
||||
fun extractChunk(buf: CompositeByteBuf, alloc: ByteBufAllocator): ByteBuf {
|
||||
val chunk = alloc.compositeBuffer()
|
||||
for (component in buf.decompose(0, buf.readableBytes())) {
|
||||
chunk.addComponent(true, component.retain())
|
||||
}
|
||||
buf.removeComponents(0, buf.numComponents())
|
||||
buf.clear()
|
||||
return chunk
|
||||
}
|
@@ -0,0 +1,25 @@
|
||||
package net.woggioni.rbcs.common
|
||||
|
||||
import io.netty.buffer.ByteBuf
|
||||
import java.io.InputStream
|
||||
|
||||
class ByteBufInputStream(private val buf : ByteBuf) : InputStream() {
|
||||
override fun read(): Int {
|
||||
return buf.takeIf {
|
||||
it.readableBytes() > 0
|
||||
}?.let(ByteBuf::readByte)
|
||||
?.let(Byte::toInt) ?: -1
|
||||
}
|
||||
|
||||
override fun read(b: ByteArray, off: Int, len: Int): Int {
|
||||
val readableBytes = buf.readableBytes()
|
||||
if(readableBytes == 0) return -1
|
||||
val result = len.coerceAtMost(readableBytes)
|
||||
buf.readBytes(b, off, result)
|
||||
return result
|
||||
}
|
||||
|
||||
override fun close() {
|
||||
buf.release()
|
||||
}
|
||||
}
|
@@ -0,0 +1,18 @@
|
||||
package net.woggioni.rbcs.common
|
||||
|
||||
import io.netty.buffer.ByteBuf
|
||||
import java.io.OutputStream
|
||||
|
||||
class ByteBufOutputStream(private val buf : ByteBuf) : OutputStream() {
|
||||
override fun write(b: Int) {
|
||||
buf.writeByte(b)
|
||||
}
|
||||
|
||||
override fun write(b: ByteArray, off: Int, len: Int) {
|
||||
buf.writeBytes(b, off, len)
|
||||
}
|
||||
|
||||
override fun close() {
|
||||
buf.release()
|
||||
}
|
||||
}
|
@@ -0,0 +1,7 @@
|
||||
package net.woggioni.rbcs.common
|
||||
|
||||
class ResourceNotFoundException(msg : String? = null, cause: Throwable? = null) : RuntimeException(msg, cause) {
|
||||
}
|
||||
|
||||
class ModuleNotFoundException(msg : String? = null, cause: Throwable? = null) : RuntimeException(msg, cause) {
|
||||
}
|
@@ -0,0 +1,8 @@
|
||||
package net.woggioni.rbcs.common
|
||||
|
||||
|
||||
data class HostAndPort(val host: String, val port: Int = 0) {
|
||||
override fun toString(): String {
|
||||
return "$host:$port"
|
||||
}
|
||||
}
|
194
rbcs-common/src/main/kotlin/net/woggioni/rbcs/common/Logging.kt
Normal file
194
rbcs-common/src/main/kotlin/net/woggioni/rbcs/common/Logging.kt
Normal file
@@ -0,0 +1,194 @@
|
||||
package net.woggioni.rbcs.common
|
||||
|
||||
import io.netty.channel.Channel
|
||||
import io.netty.channel.ChannelHandlerContext
|
||||
import org.slf4j.Logger
|
||||
import org.slf4j.LoggerFactory
|
||||
import org.slf4j.MDC
|
||||
import org.slf4j.event.Level
|
||||
import org.slf4j.spi.LoggingEventBuilder
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
import java.util.logging.LogManager
|
||||
|
||||
inline fun <reified T> T.contextLogger() = LoggerFactory.getLogger(T::class.java)
|
||||
inline fun <reified T> createLogger() = LoggerFactory.getLogger(T::class.java)
|
||||
|
||||
inline fun Logger.traceParam(messageBuilder: () -> Pair<String, Array<Any>>) {
|
||||
if (isTraceEnabled) {
|
||||
val (format, params) = messageBuilder()
|
||||
trace(format, params)
|
||||
}
|
||||
}
|
||||
|
||||
inline fun Logger.debugParam(messageBuilder: () -> Pair<String, Array<Any>>) {
|
||||
if (isDebugEnabled) {
|
||||
val (format, params) = messageBuilder()
|
||||
info(format, params)
|
||||
}
|
||||
}
|
||||
|
||||
inline fun Logger.infoParam(messageBuilder: () -> Pair<String, Array<Any>>) {
|
||||
if (isInfoEnabled) {
|
||||
val (format, params) = messageBuilder()
|
||||
info(format, params)
|
||||
}
|
||||
}
|
||||
|
||||
inline fun Logger.warnParam(messageBuilder: () -> Pair<String, Array<Any>>) {
|
||||
if (isWarnEnabled) {
|
||||
val (format, params) = messageBuilder()
|
||||
warn(format, params)
|
||||
}
|
||||
}
|
||||
|
||||
inline fun Logger.errorParam(messageBuilder: () -> Pair<String, Array<Any>>) {
|
||||
if (isErrorEnabled) {
|
||||
val (format, params) = messageBuilder()
|
||||
error(format, params)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
inline fun log(
|
||||
log: Logger,
|
||||
filter: Logger.() -> Boolean,
|
||||
loggerMethod: Logger.(String) -> Unit, messageBuilder: () -> String
|
||||
) {
|
||||
if (log.filter()) {
|
||||
log.loggerMethod(messageBuilder())
|
||||
}
|
||||
}
|
||||
|
||||
fun withMDC(params: Array<Pair<String, String>>, cb: () -> Unit) {
|
||||
object : AutoCloseable {
|
||||
override fun close() {
|
||||
for ((key, _) in params) MDC.remove(key)
|
||||
}
|
||||
}.use {
|
||||
for ((key, value) in params) MDC.put(key, value)
|
||||
cb()
|
||||
}
|
||||
}
|
||||
|
||||
inline fun Logger.log(level: Level, channel: Channel, crossinline messageBuilder: (LoggingEventBuilder) -> Unit ) {
|
||||
if (isEnabledForLevel(level)) {
|
||||
val params = arrayOf<Pair<String, String>>(
|
||||
"channel-id-short" to channel.id().asShortText(),
|
||||
"channel-id-long" to channel.id().asLongText(),
|
||||
"remote-address" to channel.remoteAddress().toString(),
|
||||
"local-address" to channel.localAddress().toString(),
|
||||
)
|
||||
withMDC(params) {
|
||||
val builder = makeLoggingEventBuilder(level)
|
||||
// for ((key, value) in params) {
|
||||
// builder.addKeyValue(key, value)
|
||||
// }
|
||||
messageBuilder(builder)
|
||||
builder.log()
|
||||
}
|
||||
}
|
||||
}
|
||||
inline fun Logger.log(level: Level, channel: Channel, crossinline messageBuilder: () -> String) {
|
||||
log(level, channel) { builder ->
|
||||
builder.setMessage(messageBuilder())
|
||||
}
|
||||
}
|
||||
|
||||
inline fun Logger.trace(ch: Channel, crossinline messageBuilder: () -> String) {
|
||||
log(Level.TRACE, ch, messageBuilder)
|
||||
}
|
||||
|
||||
inline fun Logger.debug(ch: Channel, crossinline messageBuilder: () -> String) {
|
||||
log(Level.DEBUG, ch, messageBuilder)
|
||||
}
|
||||
|
||||
inline fun Logger.info(ch: Channel, crossinline messageBuilder: () -> String) {
|
||||
log(Level.INFO, ch, messageBuilder)
|
||||
}
|
||||
|
||||
inline fun Logger.warn(ch: Channel, crossinline messageBuilder: () -> String) {
|
||||
log(Level.WARN, ch, messageBuilder)
|
||||
}
|
||||
|
||||
inline fun Logger.error(ch: Channel, crossinline messageBuilder: () -> String) {
|
||||
log(Level.ERROR, ch, messageBuilder)
|
||||
}
|
||||
|
||||
inline fun Logger.trace(ctx: ChannelHandlerContext, crossinline messageBuilder: () -> String) {
|
||||
log(Level.TRACE, ctx.channel(), messageBuilder)
|
||||
}
|
||||
|
||||
inline fun Logger.debug(ctx: ChannelHandlerContext, crossinline messageBuilder: () -> String) {
|
||||
log(Level.DEBUG, ctx.channel(), messageBuilder)
|
||||
}
|
||||
|
||||
inline fun Logger.info(ctx: ChannelHandlerContext, crossinline messageBuilder: () -> String) {
|
||||
log(Level.INFO, ctx.channel(), messageBuilder)
|
||||
}
|
||||
|
||||
inline fun Logger.warn(ctx: ChannelHandlerContext, crossinline messageBuilder: () -> String) {
|
||||
log(Level.WARN, ctx.channel(), messageBuilder)
|
||||
}
|
||||
|
||||
inline fun Logger.error(ctx: ChannelHandlerContext, crossinline messageBuilder: () -> String) {
|
||||
log(Level.ERROR, ctx.channel(), messageBuilder)
|
||||
}
|
||||
|
||||
|
||||
inline fun Logger.log(level: Level, messageBuilder: () -> String) {
|
||||
if (isEnabledForLevel(level)) {
|
||||
makeLoggingEventBuilder(level).log(messageBuilder())
|
||||
}
|
||||
}
|
||||
|
||||
inline fun Logger.trace(messageBuilder: () -> String) {
|
||||
if (isTraceEnabled) {
|
||||
trace(messageBuilder())
|
||||
}
|
||||
}
|
||||
|
||||
inline fun Logger.debug(messageBuilder: () -> String) {
|
||||
if (isDebugEnabled) {
|
||||
debug(messageBuilder())
|
||||
}
|
||||
}
|
||||
|
||||
inline fun Logger.info(messageBuilder: () -> String) {
|
||||
if (isInfoEnabled) {
|
||||
info(messageBuilder())
|
||||
}
|
||||
}
|
||||
|
||||
inline fun Logger.warn(messageBuilder: () -> String) {
|
||||
if (isWarnEnabled) {
|
||||
warn(messageBuilder())
|
||||
}
|
||||
}
|
||||
|
||||
inline fun Logger.error(messageBuilder: () -> String) {
|
||||
if (isErrorEnabled) {
|
||||
error(messageBuilder())
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class LoggingConfig {
|
||||
|
||||
init {
|
||||
val logManager = LogManager.getLogManager()
|
||||
System.getProperty("log.config.source")?.let withSource@{ source ->
|
||||
val urls = LoggingConfig::class.java.classLoader.getResources(source)
|
||||
while (urls.hasMoreElements()) {
|
||||
val url = urls.nextElement()
|
||||
url.openStream().use { inputStream ->
|
||||
logManager.readConfiguration(inputStream)
|
||||
return@withSource
|
||||
}
|
||||
}
|
||||
Path.of(source).takeIf(Files::exists)
|
||||
?.let(Files::newInputStream)
|
||||
?.use(logManager::readConfiguration)
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,46 @@
|
||||
package net.woggioni.rbcs.common
|
||||
|
||||
import java.security.SecureRandom
|
||||
import java.security.spec.KeySpec
|
||||
import java.util.Base64
|
||||
import javax.crypto.SecretKeyFactory
|
||||
import javax.crypto.spec.PBEKeySpec
|
||||
|
||||
object PasswordSecurity {
|
||||
private const val KEY_LENGTH = 256
|
||||
|
||||
private fun concat(arr1: ByteArray, arr2: ByteArray): ByteArray {
|
||||
val result = ByteArray(arr1.size + arr2.size)
|
||||
var j = 0
|
||||
for(element in arr1) {
|
||||
result[j] = element
|
||||
j += 1
|
||||
}
|
||||
for(element in arr2) {
|
||||
result[j] = element
|
||||
j += 1
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
fun hashPassword(password : String, salt : String? = null) : String {
|
||||
val actualSalt = salt?.let(Base64.getDecoder()::decode) ?: SecureRandom().run {
|
||||
val result = ByteArray(16)
|
||||
nextBytes(result)
|
||||
result
|
||||
}
|
||||
val spec: KeySpec = PBEKeySpec(password.toCharArray(), actualSalt, 10, KEY_LENGTH)
|
||||
val factory = SecretKeyFactory.getInstance("PBKDF2WithHmacSHA1")
|
||||
val hash = factory.generateSecret(spec).encoded
|
||||
return String(Base64.getEncoder().encode(concat(hash, actualSalt)))
|
||||
}
|
||||
|
||||
fun decodePasswordHash(passwordHash : String) : Pair<ByteArray, ByteArray> {
|
||||
val decoded = Base64.getDecoder().decode(passwordHash)
|
||||
val hash = ByteArray(KEY_LENGTH / 8)
|
||||
val salt = ByteArray(decoded.size - KEY_LENGTH / 8)
|
||||
System.arraycopy(decoded, 0, hash, 0, hash.size)
|
||||
System.arraycopy(decoded, hash.size, salt, 0, salt.size)
|
||||
return hash to salt
|
||||
}
|
||||
}
|
61
rbcs-common/src/main/kotlin/net/woggioni/rbcs/common/RBCS.kt
Normal file
61
rbcs-common/src/main/kotlin/net/woggioni/rbcs/common/RBCS.kt
Normal file
@@ -0,0 +1,61 @@
|
||||
package net.woggioni.rbcs.common
|
||||
|
||||
import net.woggioni.jwo.JWO
|
||||
import java.net.URI
|
||||
import java.net.URL
|
||||
import java.security.MessageDigest
|
||||
|
||||
object RBCS {
|
||||
fun String.toUrl() : URL = URL.of(URI(this), null)
|
||||
|
||||
const val RBCS_NAMESPACE_URI: String = "urn:net.woggioni.rbcs.server"
|
||||
const val RBCS_PREFIX: String = "rbcs"
|
||||
const val XML_SCHEMA_NAMESPACE_URI = "http://www.w3.org/2001/XMLSchema-instance"
|
||||
|
||||
fun ByteArray.toInt(index : Int = 0) : Long {
|
||||
if(index + 4 > size) throw IllegalArgumentException("Not enough bytes to decode a 32 bits integer")
|
||||
var value : Long = 0
|
||||
for (b in index until index + 4) {
|
||||
value = (value shl 8) + (get(b).toInt() and 0xFF)
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
fun ByteArray.toLong(index : Int = 0) : Long {
|
||||
if(index + 8 > size) throw IllegalArgumentException("Not enough bytes to decode a 64 bits long integer")
|
||||
var value : Long = 0
|
||||
for (b in index until index + 8) {
|
||||
value = (value shl 8) + (get(b).toInt() and 0xFF)
|
||||
}
|
||||
return value
|
||||
}
|
||||
|
||||
fun digest(
|
||||
data: ByteArray,
|
||||
md: MessageDigest = MessageDigest.getInstance("MD5")
|
||||
): ByteArray {
|
||||
md.update(data)
|
||||
return md.digest()
|
||||
}
|
||||
|
||||
fun digestString(
|
||||
data: ByteArray,
|
||||
md: MessageDigest = MessageDigest.getInstance("MD5")
|
||||
): String {
|
||||
return JWO.bytesToHex(digest(data, md))
|
||||
}
|
||||
|
||||
fun processCacheKey(key: String, digestAlgorithm: String?) = digestAlgorithm
|
||||
?.let(MessageDigest::getInstance)
|
||||
?.let { md ->
|
||||
digest(key.toByteArray(), md)
|
||||
} ?: key.toByteArray(Charsets.UTF_8)
|
||||
|
||||
fun Long.toIntOrNull(): Int? {
|
||||
return if (this >= Int.MIN_VALUE && this <= Int.MAX_VALUE) {
|
||||
toInt()
|
||||
} else {
|
||||
null
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,113 @@
|
||||
package net.woggioni.rbcs.common
|
||||
|
||||
import java.io.IOException
|
||||
import java.io.InputStream
|
||||
import java.net.URL
|
||||
import java.net.URLConnection
|
||||
import java.net.URLStreamHandler
|
||||
import java.net.spi.URLStreamHandlerProvider
|
||||
import java.util.concurrent.atomic.AtomicBoolean
|
||||
import java.util.stream.Collectors
|
||||
|
||||
|
||||
class RbcsUrlStreamHandlerFactory : URLStreamHandlerProvider() {
|
||||
|
||||
private class ClasspathHandler(private val classLoader: ClassLoader = RbcsUrlStreamHandlerFactory::class.java.classLoader) :
|
||||
URLStreamHandler() {
|
||||
|
||||
override fun openConnection(u: URL): URLConnection? {
|
||||
return javaClass.module
|
||||
?.takeIf { m: Module -> m.layer != null }
|
||||
?.let {
|
||||
val path = u.path
|
||||
val i = path.lastIndexOf('/')
|
||||
val packageName = path.substring(0, i).replace('/', '.')
|
||||
val modules = packageMap[packageName]!!
|
||||
ClasspathResourceURLConnection(
|
||||
u,
|
||||
modules
|
||||
)
|
||||
}
|
||||
?: classLoader.getResource(u.path)?.let(URL::openConnection)
|
||||
}
|
||||
}
|
||||
|
||||
private class JpmsHandler : URLStreamHandler() {
|
||||
|
||||
override fun openConnection(u: URL): URLConnection {
|
||||
val moduleName = u.host
|
||||
val thisModule = javaClass.module
|
||||
val sourceModule =
|
||||
thisModule
|
||||
?.let(Module::getLayer)
|
||||
?.let { layer: ModuleLayer ->
|
||||
layer.findModule(moduleName).orElse(null)
|
||||
} ?: if(thisModule.layer == null) {
|
||||
thisModule
|
||||
} else throw ModuleNotFoundException("Module '$moduleName' not found")
|
||||
|
||||
return JpmsResourceURLConnection(u, sourceModule)
|
||||
}
|
||||
}
|
||||
|
||||
private class JpmsResourceURLConnection(url: URL, private val module: Module) : URLConnection(url) {
|
||||
override fun connect() {
|
||||
}
|
||||
|
||||
@Throws(IOException::class)
|
||||
override fun getInputStream(): InputStream {
|
||||
val resource = getURL().path
|
||||
return module.getResourceAsStream(resource)
|
||||
?: throw ResourceNotFoundException("Resource '$resource' not found in module '${module.name}'")
|
||||
}
|
||||
}
|
||||
|
||||
override fun createURLStreamHandler(protocol: String): URLStreamHandler? {
|
||||
return when (protocol) {
|
||||
"classpath" -> ClasspathHandler()
|
||||
"jpms" -> JpmsHandler()
|
||||
else -> null
|
||||
}
|
||||
}
|
||||
|
||||
private class ClasspathResourceURLConnection(url: URL?, private val modules: List<Module>) :
|
||||
URLConnection(url) {
|
||||
override fun connect() {}
|
||||
|
||||
override fun getInputStream(): InputStream? {
|
||||
for (module in modules) {
|
||||
val result = module.getResourceAsStream(getURL().path)
|
||||
if (result != null) return result
|
||||
}
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
companion object {
|
||||
private val installed = AtomicBoolean(false)
|
||||
fun install() {
|
||||
if (!installed.getAndSet(true)) {
|
||||
URL.setURLStreamHandlerFactory(RbcsUrlStreamHandlerFactory())
|
||||
}
|
||||
}
|
||||
|
||||
private val packageMap: Map<String, List<Module>> by lazy {
|
||||
RbcsUrlStreamHandlerFactory::class.java.module.layer
|
||||
.modules()
|
||||
.stream()
|
||||
.flatMap { m: Module ->
|
||||
m.packages.stream()
|
||||
.map { p: String -> p to m }
|
||||
}
|
||||
.collect(
|
||||
Collectors.groupingBy(
|
||||
Pair<String, Module>::first,
|
||||
Collectors.mapping(
|
||||
Pair<String, Module>::second,
|
||||
Collectors.toUnmodifiableList<Module>()
|
||||
)
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
243
rbcs-common/src/main/kotlin/net/woggioni/rbcs/common/Xml.kt
Normal file
243
rbcs-common/src/main/kotlin/net/woggioni/rbcs/common/Xml.kt
Normal file
@@ -0,0 +1,243 @@
|
||||
package net.woggioni.rbcs.common
|
||||
|
||||
import net.woggioni.jwo.JWO
|
||||
import org.slf4j.event.Level
|
||||
import org.w3c.dom.Document
|
||||
import org.w3c.dom.Element
|
||||
import org.w3c.dom.Node
|
||||
import org.w3c.dom.NodeList
|
||||
import org.xml.sax.SAXNotRecognizedException
|
||||
import org.xml.sax.SAXNotSupportedException
|
||||
import org.xml.sax.SAXParseException
|
||||
import java.io.InputStream
|
||||
import java.io.OutputStream
|
||||
import java.net.URL
|
||||
import javax.xml.XMLConstants.ACCESS_EXTERNAL_DTD
|
||||
import javax.xml.XMLConstants.ACCESS_EXTERNAL_SCHEMA
|
||||
import javax.xml.XMLConstants.FEATURE_SECURE_PROCESSING
|
||||
import javax.xml.XMLConstants.W3C_XML_SCHEMA_NS_URI
|
||||
import javax.xml.parsers.DocumentBuilder
|
||||
import javax.xml.parsers.DocumentBuilderFactory
|
||||
import javax.xml.transform.OutputKeys
|
||||
import javax.xml.transform.TransformerFactory
|
||||
import javax.xml.transform.dom.DOMSource
|
||||
import javax.xml.transform.stream.StreamResult
|
||||
import javax.xml.transform.stream.StreamSource
|
||||
import javax.xml.validation.Schema
|
||||
import javax.xml.validation.SchemaFactory
|
||||
import org.xml.sax.ErrorHandler as ErrHandler
|
||||
|
||||
|
||||
class NodeListIterator(private val nodeList: NodeList) : Iterator<Node> {
|
||||
private var cursor: Int = 0
|
||||
override fun hasNext(): Boolean {
|
||||
return cursor < nodeList.length
|
||||
}
|
||||
|
||||
override fun next(): Node {
|
||||
return if (hasNext()) nodeList.item(cursor++) else throw NoSuchElementException()
|
||||
}
|
||||
}
|
||||
|
||||
class ElementIterator(parent: Element, name: String? = null) : Iterator<Element> {
|
||||
private val it: NodeListIterator
|
||||
private val name: String?
|
||||
private var next: Element?
|
||||
|
||||
init {
|
||||
it = NodeListIterator(parent.childNodes)
|
||||
this.name = name
|
||||
next = getNext()
|
||||
}
|
||||
|
||||
override fun hasNext(): Boolean {
|
||||
return next != null
|
||||
}
|
||||
|
||||
override fun next(): Element {
|
||||
val result = next ?: throw NoSuchElementException()
|
||||
next = getNext()
|
||||
return result
|
||||
}
|
||||
|
||||
private fun getNext(): Element? {
|
||||
var result: Element? = null
|
||||
while (it.hasNext()) {
|
||||
val node: Node = it.next()
|
||||
if (node is Element && (name == null || name == node.tagName)) {
|
||||
result = node
|
||||
break
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
}
|
||||
|
||||
class Xml(val doc: Document, val element: Element) {
|
||||
|
||||
class ErrorHandler(private val fileURL: URL) : ErrHandler {
|
||||
|
||||
companion object {
|
||||
private val log = createLogger<ErrorHandler>()
|
||||
}
|
||||
|
||||
override fun warning(ex: SAXParseException)= err(ex, Level.WARN)
|
||||
|
||||
private fun err(ex: SAXParseException, level: Level) {
|
||||
log.log(level) {
|
||||
"Problem at ${fileURL}:${ex.lineNumber}:${ex.columnNumber} parsing deployment configuration: ${ex.message}"
|
||||
}
|
||||
throw ex
|
||||
}
|
||||
|
||||
override fun error(ex: SAXParseException) = err(ex, Level.ERROR)
|
||||
override fun fatalError(ex: SAXParseException) = err(ex, Level.ERROR)
|
||||
}
|
||||
|
||||
companion object {
|
||||
private val dictMap: Map<String, Map<String, Any>> = sequenceOf(
|
||||
"env" to System.getenv().asSequence().map { (k, v) -> k to (v as Any) }.toMap(),
|
||||
"sys" to System.getProperties().asSequence().map { (k, v) -> k as String to (v as Any) }.toMap()
|
||||
).toMap()
|
||||
|
||||
private fun renderConfigurationTemplate(template: String): String {
|
||||
return JWO.renderTemplate(template, emptyMap(), dictMap).replace("$$", "$")
|
||||
}
|
||||
|
||||
fun Element.renderAttribute(name : String, namespaceURI: String? = null) = if(namespaceURI == null) {
|
||||
getAttribute(name)
|
||||
} else {
|
||||
getAttributeNS(name, namespaceURI)
|
||||
}.takeIf(String::isNotEmpty)?.let(Companion::renderConfigurationTemplate)
|
||||
|
||||
|
||||
fun Element.asIterable() = Iterable { ElementIterator(this, null) }
|
||||
fun NodeList.asIterable() = Iterable { NodeListIterator(this) }
|
||||
|
||||
private fun disableProperty(dbf: DocumentBuilderFactory, propertyName: String) {
|
||||
try {
|
||||
dbf.setAttribute(propertyName, "")
|
||||
} catch (iae: IllegalArgumentException) {
|
||||
// Property not supported.
|
||||
}
|
||||
}
|
||||
|
||||
private fun disableProperty(sf: SchemaFactory, propertyName: String) {
|
||||
try {
|
||||
sf.setProperty(propertyName, "")
|
||||
} catch (ex: SAXNotRecognizedException) {
|
||||
// Property not supported.
|
||||
} catch (ex: SAXNotSupportedException) {
|
||||
}
|
||||
}
|
||||
|
||||
fun getSchema(schema: URL): Schema {
|
||||
val sf = SchemaFactory.newInstance(W3C_XML_SCHEMA_NS_URI)
|
||||
sf.setFeature(FEATURE_SECURE_PROCESSING, false)
|
||||
sf.errorHandler = ErrorHandler(schema)
|
||||
return sf.newSchema(schema)
|
||||
}
|
||||
|
||||
fun getSchema(inputStream: InputStream): Schema {
|
||||
val sf = SchemaFactory.newInstance(W3C_XML_SCHEMA_NS_URI)
|
||||
sf.setFeature(FEATURE_SECURE_PROCESSING, true)
|
||||
return sf.newSchema(StreamSource(inputStream))
|
||||
}
|
||||
|
||||
fun newDocumentBuilderFactory(schemaResourceURL: URL?): DocumentBuilderFactory {
|
||||
val dbf = DocumentBuilderFactory.newInstance()
|
||||
dbf.setFeature(FEATURE_SECURE_PROCESSING, false)
|
||||
dbf.setAttribute(ACCESS_EXTERNAL_SCHEMA, "all")
|
||||
disableProperty(dbf, ACCESS_EXTERNAL_DTD)
|
||||
dbf.isExpandEntityReferences = true
|
||||
dbf.isIgnoringComments = true
|
||||
dbf.isNamespaceAware = true
|
||||
dbf.isValidating = schemaResourceURL == null
|
||||
dbf.setFeature("http://apache.org/xml/features/validation/schema", true)
|
||||
schemaResourceURL?.let {
|
||||
dbf.schema = getSchema(it)
|
||||
}
|
||||
return dbf
|
||||
}
|
||||
|
||||
fun newDocumentBuilder(resource: URL, schemaResourceURL: URL?): DocumentBuilder {
|
||||
val db = newDocumentBuilderFactory(schemaResourceURL).newDocumentBuilder()
|
||||
db.setErrorHandler(ErrorHandler(resource))
|
||||
return db
|
||||
}
|
||||
|
||||
fun parseXmlResource(resource: URL, schemaResourceURL: URL?): Document {
|
||||
val db = newDocumentBuilder(resource, schemaResourceURL)
|
||||
return resource.openStream().use(db::parse)
|
||||
}
|
||||
|
||||
fun parseXml(sourceURL: URL, sourceStream: InputStream? = null, schemaResourceURL: URL? = null): Document {
|
||||
val db = newDocumentBuilder(sourceURL, schemaResourceURL)
|
||||
return sourceStream?.let(db::parse) ?: sourceURL.openStream().use(db::parse)
|
||||
}
|
||||
|
||||
fun write(doc: Document, output: OutputStream) {
|
||||
val transformerFactory = TransformerFactory.newInstance()
|
||||
val transformer = transformerFactory.newTransformer()
|
||||
transformer.setOutputProperty(OutputKeys.INDENT, "yes")
|
||||
transformer.setOutputProperty(OutputKeys.INDENT, "yes")
|
||||
transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "4")
|
||||
transformer.setOutputProperty(OutputKeys.STANDALONE, "yes")
|
||||
transformer.setOutputProperty(OutputKeys.ENCODING, "UTF-8")
|
||||
val source = DOMSource(doc)
|
||||
val result = StreamResult(output)
|
||||
transformer.transform(source, result)
|
||||
}
|
||||
|
||||
fun of(
|
||||
namespaceURI: String,
|
||||
qualifiedName: String,
|
||||
schemaResourceURL: URL? = null,
|
||||
cb: Xml.(el: Element) -> Unit
|
||||
): Document {
|
||||
val dbf = newDocumentBuilderFactory(schemaResourceURL)
|
||||
val db = dbf.newDocumentBuilder()
|
||||
val doc = db.newDocument()
|
||||
val root = doc.createElementNS(namespaceURI, qualifiedName)
|
||||
.also(doc::appendChild)
|
||||
Xml(doc, root).cb(root)
|
||||
return doc
|
||||
}
|
||||
|
||||
fun of(doc: Document, el: Element, cb: Xml.(el: Element) -> Unit): Element {
|
||||
Xml(doc, el).cb(el)
|
||||
return el
|
||||
}
|
||||
|
||||
fun Element.removeChildren() {
|
||||
while (true) {
|
||||
removeChild(firstChild ?: break)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fun node(
|
||||
name: String,
|
||||
namespaceURI: String? = null,
|
||||
attrs: Map<String, String> = emptyMap(),
|
||||
cb: Xml.(el: Element) -> Unit = {}
|
||||
): Element {
|
||||
val child = doc.createElementNS(namespaceURI, name)
|
||||
for ((key, value) in attrs) {
|
||||
child.setAttribute(key, value)
|
||||
}
|
||||
return child
|
||||
.also {
|
||||
element.appendChild(it)
|
||||
Xml(doc, it).cb(it)
|
||||
}
|
||||
}
|
||||
|
||||
fun attr(key: String, value: String, namespaceURI: String? = null) {
|
||||
element.setAttributeNS(namespaceURI, key, value)
|
||||
}
|
||||
|
||||
fun text(txt: String) {
|
||||
element.appendChild(doc.createTextNode(txt))
|
||||
}
|
||||
}
|
@@ -0,0 +1 @@
|
||||
net.woggioni.rbcs.common.RbcsUrlStreamHandlerFactory
|
69
rbcs-server-memcache/build.gradle
Normal file
69
rbcs-server-memcache/build.gradle
Normal file
@@ -0,0 +1,69 @@
|
||||
plugins {
|
||||
id 'java-library'
|
||||
id 'maven-publish'
|
||||
alias catalog.plugins.kotlin.jvm
|
||||
}
|
||||
|
||||
configurations {
|
||||
bundle {
|
||||
canBeResolved = true
|
||||
canBeConsumed = false
|
||||
visible = false
|
||||
transitive = false
|
||||
|
||||
resolutionStrategy {
|
||||
dependencies {
|
||||
exclude group: 'org.slf4j', module: 'slf4j-api'
|
||||
exclude group: 'org.jetbrains.kotlin', module: 'kotlin-stdlib'
|
||||
exclude group: 'org.jetbrains', module: 'annotations'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
release {
|
||||
transitive = false
|
||||
canBeConsumed = true
|
||||
canBeResolved = true
|
||||
visible = true
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation project(':rbcs-common')
|
||||
implementation project(':rbcs-api')
|
||||
implementation catalog.jwo
|
||||
implementation catalog.slf4j.api
|
||||
implementation catalog.netty.common
|
||||
implementation catalog.netty.handler
|
||||
implementation catalog.netty.codec.memcache
|
||||
|
||||
bundle catalog.netty.codec.memcache
|
||||
|
||||
testRuntimeOnly catalog.logback.classic
|
||||
}
|
||||
|
||||
tasks.named(JavaPlugin.TEST_TASK_NAME, Test) {
|
||||
systemProperty("io.netty.leakDetectionLevel", "PARANOID")
|
||||
}
|
||||
|
||||
Provider<Tar> bundleTask = tasks.register("bundle", Tar) {
|
||||
from(tasks.named(JavaPlugin.JAR_TASK_NAME))
|
||||
from(configurations.bundle)
|
||||
group = BasePlugin.BUILD_GROUP
|
||||
}
|
||||
|
||||
tasks.named(BasePlugin.ASSEMBLE_TASK_NAME) {
|
||||
dependsOn(bundleTask)
|
||||
}
|
||||
|
||||
artifacts {
|
||||
release(bundleTask)
|
||||
}
|
||||
|
||||
publishing {
|
||||
publications {
|
||||
maven(MavenPublication) {
|
||||
artifact bundleTask
|
||||
}
|
||||
}
|
||||
}
|
20
rbcs-server-memcache/src/main/java/module-info.java
Normal file
20
rbcs-server-memcache/src/main/java/module-info.java
Normal file
@@ -0,0 +1,20 @@
|
||||
import net.woggioni.rbcs.api.CacheProvider;
|
||||
|
||||
module net.woggioni.rbcs.server.memcache {
|
||||
requires net.woggioni.rbcs.common;
|
||||
requires net.woggioni.rbcs.api;
|
||||
requires net.woggioni.jwo;
|
||||
requires java.xml;
|
||||
requires kotlin.stdlib;
|
||||
requires io.netty.transport;
|
||||
requires io.netty.codec;
|
||||
requires io.netty.codec.memcache;
|
||||
requires io.netty.common;
|
||||
requires io.netty.buffer;
|
||||
requires io.netty.handler;
|
||||
requires org.slf4j;
|
||||
|
||||
provides CacheProvider with net.woggioni.rbcs.server.memcache.MemcacheCacheProvider;
|
||||
|
||||
opens net.woggioni.rbcs.server.memcache.schema;
|
||||
}
|
@@ -0,0 +1,4 @@
|
||||
package net.woggioni.rbcs.server.memcache
|
||||
|
||||
class MemcacheException(status : Short, msg : String? = null, cause : Throwable? = null)
|
||||
: RuntimeException(msg ?: "Memcached status $status", cause)
|
@@ -0,0 +1,45 @@
|
||||
package net.woggioni.rbcs.server.memcache
|
||||
|
||||
import net.woggioni.rbcs.api.CacheHandlerFactory
|
||||
import net.woggioni.rbcs.api.Configuration
|
||||
import net.woggioni.rbcs.common.HostAndPort
|
||||
import net.woggioni.rbcs.server.memcache.client.MemcacheClient
|
||||
import java.time.Duration
|
||||
|
||||
data class MemcacheCacheConfiguration(
|
||||
val servers: List<Server>,
|
||||
val maxAge: Duration = Duration.ofDays(1),
|
||||
val digestAlgorithm: String? = null,
|
||||
val compressionMode: CompressionMode? = null,
|
||||
val compressionLevel: Int,
|
||||
val chunkSize : Int
|
||||
) : Configuration.Cache {
|
||||
|
||||
enum class CompressionMode {
|
||||
/**
|
||||
* Deflate mode
|
||||
*/
|
||||
DEFLATE
|
||||
}
|
||||
|
||||
data class Server(
|
||||
val endpoint : HostAndPort,
|
||||
val connectionTimeoutMillis : Int?,
|
||||
val maxConnections : Int
|
||||
)
|
||||
|
||||
|
||||
override fun materialize() = object : CacheHandlerFactory {
|
||||
private val client = MemcacheClient(this@MemcacheCacheConfiguration.servers, chunkSize)
|
||||
override fun close() {
|
||||
client.close()
|
||||
}
|
||||
|
||||
override fun newHandler() = MemcacheCacheHandler(client, digestAlgorithm, compressionMode != null, compressionLevel, chunkSize, maxAge)
|
||||
}
|
||||
|
||||
override fun getNamespaceURI() = "urn:net.woggioni.rbcs.server.memcache"
|
||||
|
||||
override fun getTypeName() = "memcacheCacheType"
|
||||
}
|
||||
|
@@ -0,0 +1,409 @@
|
||||
package net.woggioni.rbcs.server.memcache
|
||||
|
||||
import io.netty.buffer.ByteBuf
|
||||
import io.netty.buffer.ByteBufAllocator
|
||||
import io.netty.buffer.CompositeByteBuf
|
||||
import io.netty.channel.ChannelHandlerContext
|
||||
import io.netty.channel.SimpleChannelInboundHandler
|
||||
import io.netty.handler.codec.memcache.DefaultLastMemcacheContent
|
||||
import io.netty.handler.codec.memcache.DefaultMemcacheContent
|
||||
import io.netty.handler.codec.memcache.LastMemcacheContent
|
||||
import io.netty.handler.codec.memcache.MemcacheContent
|
||||
import io.netty.handler.codec.memcache.binary.BinaryMemcacheOpcodes
|
||||
import io.netty.handler.codec.memcache.binary.BinaryMemcacheResponse
|
||||
import io.netty.handler.codec.memcache.binary.BinaryMemcacheResponseStatus
|
||||
import io.netty.handler.codec.memcache.binary.DefaultBinaryMemcacheRequest
|
||||
import net.woggioni.rbcs.api.CacheValueMetadata
|
||||
import net.woggioni.rbcs.api.exception.ContentTooLargeException
|
||||
import net.woggioni.rbcs.api.message.CacheMessage
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CacheContent
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CacheGetRequest
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CachePutRequest
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CachePutResponse
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CacheValueFoundResponse
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CacheValueNotFoundResponse
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.LastCacheContent
|
||||
import net.woggioni.rbcs.common.ByteBufInputStream
|
||||
import net.woggioni.rbcs.common.ByteBufOutputStream
|
||||
import net.woggioni.rbcs.common.RBCS.processCacheKey
|
||||
import net.woggioni.rbcs.common.RBCS.toIntOrNull
|
||||
import net.woggioni.rbcs.common.createLogger
|
||||
import net.woggioni.rbcs.common.debug
|
||||
import net.woggioni.rbcs.common.extractChunk
|
||||
import net.woggioni.rbcs.common.trace
|
||||
import net.woggioni.rbcs.server.memcache.client.MemcacheClient
|
||||
import net.woggioni.rbcs.server.memcache.client.MemcacheRequestController
|
||||
import net.woggioni.rbcs.server.memcache.client.MemcacheResponseHandler
|
||||
import java.io.ByteArrayOutputStream
|
||||
import java.io.ObjectInputStream
|
||||
import java.io.ObjectOutputStream
|
||||
import java.nio.ByteBuffer
|
||||
import java.nio.channels.Channels
|
||||
import java.nio.channels.FileChannel
|
||||
import java.nio.channels.ReadableByteChannel
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.StandardOpenOption
|
||||
import java.time.Duration
|
||||
import java.time.Instant
|
||||
import java.util.concurrent.CompletableFuture
|
||||
import java.util.zip.Deflater
|
||||
import java.util.zip.DeflaterOutputStream
|
||||
import java.util.zip.InflaterOutputStream
|
||||
import io.netty.channel.Channel as NettyChannel
|
||||
|
||||
class MemcacheCacheHandler(
|
||||
private val client: MemcacheClient,
|
||||
private val digestAlgorithm: String?,
|
||||
private val compressionEnabled: Boolean,
|
||||
private val compressionLevel: Int,
|
||||
private val chunkSize: Int,
|
||||
private val maxAge: Duration
|
||||
) : SimpleChannelInboundHandler<CacheMessage>() {
|
||||
companion object {
|
||||
private val log = createLogger<MemcacheCacheHandler>()
|
||||
|
||||
private fun encodeExpiry(expiry: Duration): Int {
|
||||
val expirySeconds = expiry.toSeconds()
|
||||
return expirySeconds.toInt().takeIf { it.toLong() == expirySeconds }
|
||||
?: Instant.ofEpochSecond(expirySeconds).epochSecond.toInt()
|
||||
}
|
||||
}
|
||||
|
||||
private inner class InProgressGetRequest(
|
||||
private val key: String,
|
||||
private val ctx: ChannelHandlerContext
|
||||
) {
|
||||
private val acc = ctx.alloc().compositeBuffer()
|
||||
private val chunk = ctx.alloc().compositeBuffer()
|
||||
private val outputStream = ByteBufOutputStream(chunk).let {
|
||||
if (compressionEnabled) {
|
||||
InflaterOutputStream(it)
|
||||
} else {
|
||||
it
|
||||
}
|
||||
}
|
||||
private var responseSent = false
|
||||
private var metadataSize: Int? = null
|
||||
|
||||
fun write(buf: ByteBuf) {
|
||||
acc.addComponent(true, buf.retain())
|
||||
if (metadataSize == null && acc.readableBytes() >= Int.SIZE_BYTES) {
|
||||
metadataSize = acc.readInt()
|
||||
}
|
||||
metadataSize
|
||||
?.takeIf { !responseSent }
|
||||
?.takeIf { acc.readableBytes() >= it }
|
||||
?.let { mSize ->
|
||||
val metadata = ObjectInputStream(ByteBufInputStream(acc)).use {
|
||||
acc.retain()
|
||||
it.readObject() as CacheValueMetadata
|
||||
}
|
||||
ctx.writeAndFlush(CacheValueFoundResponse(key, metadata))
|
||||
responseSent = true
|
||||
acc.readerIndex(Int.SIZE_BYTES + mSize)
|
||||
}
|
||||
if (responseSent) {
|
||||
acc.readBytes(outputStream, acc.readableBytes())
|
||||
if(acc.readableBytes() >= chunkSize) {
|
||||
flush(false)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private fun flush(last : Boolean) {
|
||||
val toSend = extractChunk(chunk, ctx.alloc())
|
||||
val msg = if(last) {
|
||||
log.trace(ctx) {
|
||||
"Sending last chunk to client on channel ${ctx.channel().id().asShortText()}"
|
||||
}
|
||||
LastCacheContent(toSend)
|
||||
} else {
|
||||
log.trace(ctx) {
|
||||
"Sending chunk to client on channel ${ctx.channel().id().asShortText()}"
|
||||
}
|
||||
CacheContent(toSend)
|
||||
}
|
||||
ctx.writeAndFlush(msg)
|
||||
}
|
||||
|
||||
fun commit() {
|
||||
acc.release()
|
||||
chunk.retain()
|
||||
outputStream.close()
|
||||
flush(true)
|
||||
chunk.release()
|
||||
}
|
||||
|
||||
fun rollback() {
|
||||
acc.release()
|
||||
outputStream.close()
|
||||
}
|
||||
}
|
||||
|
||||
private inner class InProgressPutRequest(
|
||||
private val ch : NettyChannel,
|
||||
metadata : CacheValueMetadata,
|
||||
val digest : ByteBuf,
|
||||
val requestController: CompletableFuture<MemcacheRequestController>,
|
||||
private val alloc: ByteBufAllocator
|
||||
) {
|
||||
private var totalSize = 0
|
||||
private var tmpFile : FileChannel? = null
|
||||
private val accumulator = alloc.compositeBuffer()
|
||||
private val stream = ByteBufOutputStream(accumulator).let {
|
||||
if (compressionEnabled) {
|
||||
DeflaterOutputStream(it, Deflater(compressionLevel))
|
||||
} else {
|
||||
it
|
||||
}
|
||||
}
|
||||
|
||||
init {
|
||||
ByteArrayOutputStream().let { baos ->
|
||||
ObjectOutputStream(baos).use {
|
||||
it.writeObject(metadata)
|
||||
}
|
||||
val serializedBytes = baos.toByteArray()
|
||||
accumulator.writeInt(serializedBytes.size)
|
||||
accumulator.writeBytes(serializedBytes)
|
||||
}
|
||||
}
|
||||
|
||||
fun write(buf: ByteBuf) {
|
||||
totalSize += buf.readableBytes()
|
||||
buf.readBytes(stream, buf.readableBytes())
|
||||
tmpFile?.let {
|
||||
flushToDisk(it, accumulator)
|
||||
}
|
||||
if(accumulator.readableBytes() > 0x100000) {
|
||||
log.debug(ch) {
|
||||
"Entry is too big, buffering it into a file"
|
||||
}
|
||||
val opts = arrayOf(
|
||||
StandardOpenOption.DELETE_ON_CLOSE,
|
||||
StandardOpenOption.READ,
|
||||
StandardOpenOption.WRITE,
|
||||
StandardOpenOption.TRUNCATE_EXISTING
|
||||
)
|
||||
FileChannel.open(Files.createTempFile("rbcs-memcache", ".tmp"), *opts).let { fc ->
|
||||
tmpFile = fc
|
||||
flushToDisk(fc, accumulator)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private fun flushToDisk(fc : FileChannel, buf : CompositeByteBuf) {
|
||||
val chunk = extractChunk(buf, alloc)
|
||||
fc.write(chunk.nioBuffer())
|
||||
chunk.release()
|
||||
}
|
||||
|
||||
fun commit() : Pair<Int, ReadableByteChannel> {
|
||||
digest.release()
|
||||
accumulator.retain()
|
||||
stream.close()
|
||||
val fileChannel = tmpFile
|
||||
return if(fileChannel != null) {
|
||||
flushToDisk(fileChannel, accumulator)
|
||||
accumulator.release()
|
||||
fileChannel.position(0)
|
||||
val fileSize = fileChannel.size().toIntOrNull() ?: let {
|
||||
fileChannel.close()
|
||||
throw ContentTooLargeException("Request body is too large", null)
|
||||
}
|
||||
fileSize to fileChannel
|
||||
} else {
|
||||
accumulator.readableBytes() to Channels.newChannel(ByteBufInputStream(accumulator))
|
||||
}
|
||||
}
|
||||
|
||||
fun rollback() {
|
||||
stream.close()
|
||||
digest.release()
|
||||
tmpFile?.close()
|
||||
}
|
||||
}
|
||||
|
||||
private var inProgressPutRequest: InProgressPutRequest? = null
|
||||
private var inProgressGetRequest: InProgressGetRequest? = null
|
||||
|
||||
override fun channelRead0(ctx: ChannelHandlerContext, msg: CacheMessage) {
|
||||
when (msg) {
|
||||
is CacheGetRequest -> handleGetRequest(ctx, msg)
|
||||
is CachePutRequest -> handlePutRequest(ctx, msg)
|
||||
is LastCacheContent -> handleLastCacheContent(ctx, msg)
|
||||
is CacheContent -> handleCacheContent(ctx, msg)
|
||||
else -> ctx.fireChannelRead(msg)
|
||||
}
|
||||
}
|
||||
|
||||
private fun handleGetRequest(ctx: ChannelHandlerContext, msg: CacheGetRequest) {
|
||||
log.debug(ctx) {
|
||||
"Fetching ${msg.key} from memcache"
|
||||
}
|
||||
val key = ctx.alloc().buffer().also {
|
||||
it.writeBytes(processCacheKey(msg.key, digestAlgorithm))
|
||||
}
|
||||
val responseHandler = object : MemcacheResponseHandler {
|
||||
override fun responseReceived(response: BinaryMemcacheResponse) {
|
||||
val status = response.status()
|
||||
when (status) {
|
||||
BinaryMemcacheResponseStatus.SUCCESS -> {
|
||||
log.debug(ctx) {
|
||||
"Cache hit for key ${msg.key} on memcache"
|
||||
}
|
||||
inProgressGetRequest = InProgressGetRequest(msg.key, ctx)
|
||||
}
|
||||
|
||||
BinaryMemcacheResponseStatus.KEY_ENOENT -> {
|
||||
log.debug(ctx) {
|
||||
"Cache miss for key ${msg.key} on memcache"
|
||||
}
|
||||
ctx.writeAndFlush(CacheValueNotFoundResponse())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun contentReceived(content: MemcacheContent) {
|
||||
log.trace(ctx) {
|
||||
"${if(content is LastMemcacheContent) "Last chunk" else "Chunk"} of ${content.content().readableBytes()} bytes received from memcache for key ${msg.key}"
|
||||
}
|
||||
inProgressGetRequest?.write(content.content())
|
||||
if (content is LastMemcacheContent) {
|
||||
inProgressGetRequest?.commit()
|
||||
}
|
||||
}
|
||||
|
||||
override fun exceptionCaught(ex: Throwable) {
|
||||
inProgressGetRequest?.let {
|
||||
inProgressGetRequest = null
|
||||
it.rollback()
|
||||
}
|
||||
this@MemcacheCacheHandler.exceptionCaught(ctx, ex)
|
||||
}
|
||||
}
|
||||
client.sendRequest(key.retainedDuplicate(), responseHandler).thenAccept { requestHandle ->
|
||||
log.trace(ctx) {
|
||||
"Sending GET request for key ${msg.key} to memcache"
|
||||
}
|
||||
val request = DefaultBinaryMemcacheRequest(key).apply {
|
||||
setOpcode(BinaryMemcacheOpcodes.GET)
|
||||
}
|
||||
requestHandle.sendRequest(request)
|
||||
}
|
||||
}
|
||||
|
||||
private fun handlePutRequest(ctx: ChannelHandlerContext, msg: CachePutRequest) {
|
||||
val key = ctx.alloc().buffer().also {
|
||||
it.writeBytes(processCacheKey(msg.key, digestAlgorithm))
|
||||
}
|
||||
val responseHandler = object : MemcacheResponseHandler {
|
||||
override fun responseReceived(response: BinaryMemcacheResponse) {
|
||||
val status = response.status()
|
||||
when (status) {
|
||||
BinaryMemcacheResponseStatus.SUCCESS -> {
|
||||
log.debug(ctx) {
|
||||
"Inserted key ${msg.key} into memcache"
|
||||
}
|
||||
ctx.writeAndFlush(CachePutResponse(msg.key))
|
||||
}
|
||||
else -> this@MemcacheCacheHandler.exceptionCaught(ctx, MemcacheException(status))
|
||||
}
|
||||
}
|
||||
|
||||
override fun contentReceived(content: MemcacheContent) {}
|
||||
|
||||
override fun exceptionCaught(ex: Throwable) {
|
||||
this@MemcacheCacheHandler.exceptionCaught(ctx, ex)
|
||||
}
|
||||
}
|
||||
|
||||
val requestController = client.sendRequest(key.retainedDuplicate(), responseHandler).whenComplete { _, ex ->
|
||||
ex?.let {
|
||||
this@MemcacheCacheHandler.exceptionCaught(ctx, ex)
|
||||
}
|
||||
}
|
||||
inProgressPutRequest = InProgressPutRequest(ctx.channel(), msg.metadata, key, requestController, ctx.alloc())
|
||||
}
|
||||
|
||||
private fun handleCacheContent(ctx: ChannelHandlerContext, msg: CacheContent) {
|
||||
inProgressPutRequest?.let { request ->
|
||||
log.trace(ctx) {
|
||||
"Received chunk of ${msg.content().readableBytes()} bytes for memcache"
|
||||
}
|
||||
request.write(msg.content())
|
||||
}
|
||||
}
|
||||
|
||||
private fun handleLastCacheContent(ctx: ChannelHandlerContext, msg: LastCacheContent) {
|
||||
inProgressPutRequest?.let { request ->
|
||||
inProgressPutRequest = null
|
||||
log.trace(ctx) {
|
||||
"Received last chunk of ${msg.content().readableBytes()} bytes for memcache"
|
||||
}
|
||||
request.write(msg.content())
|
||||
val key = request.digest.retainedDuplicate()
|
||||
val (payloadSize, payloadSource) = request.commit()
|
||||
val extras = ctx.alloc().buffer(8, 8)
|
||||
extras.writeInt(0)
|
||||
extras.writeInt(encodeExpiry(maxAge))
|
||||
val totalBodyLength = request.digest.readableBytes() + extras.readableBytes() + payloadSize
|
||||
request.requestController.whenComplete { requestController, ex ->
|
||||
if(ex == null) {
|
||||
log.trace(ctx) {
|
||||
"Sending SET request to memcache"
|
||||
}
|
||||
requestController.sendRequest(DefaultBinaryMemcacheRequest().apply {
|
||||
setOpcode(BinaryMemcacheOpcodes.SET)
|
||||
setKey(key)
|
||||
setExtras(extras)
|
||||
setTotalBodyLength(totalBodyLength)
|
||||
})
|
||||
log.trace(ctx) {
|
||||
"Sending request payload to memcache"
|
||||
}
|
||||
payloadSource.use { source ->
|
||||
val bb = ByteBuffer.allocate(chunkSize)
|
||||
while (true) {
|
||||
val read = source.read(bb)
|
||||
bb.limit()
|
||||
if(read >= 0 && bb.position() < chunkSize && bb.hasRemaining()) {
|
||||
continue
|
||||
}
|
||||
val chunk = ctx.alloc().buffer(chunkSize)
|
||||
bb.flip()
|
||||
chunk.writeBytes(bb)
|
||||
bb.clear()
|
||||
log.trace(ctx) {
|
||||
"Sending ${chunk.readableBytes()} bytes chunk to memcache"
|
||||
}
|
||||
if(read < 0) {
|
||||
requestController.sendContent(DefaultLastMemcacheContent(chunk))
|
||||
break
|
||||
} else {
|
||||
requestController.sendContent(DefaultMemcacheContent(chunk))
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
payloadSource.close()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
|
||||
inProgressGetRequest?.let {
|
||||
inProgressGetRequest = null
|
||||
it.rollback()
|
||||
}
|
||||
inProgressPutRequest?.let {
|
||||
inProgressPutRequest = null
|
||||
it.requestController.thenAccept { controller ->
|
||||
controller.exceptionCaught(cause)
|
||||
}
|
||||
it.rollback()
|
||||
}
|
||||
super.exceptionCaught(ctx, cause)
|
||||
}
|
||||
}
|
@@ -0,0 +1,102 @@
|
||||
package net.woggioni.rbcs.server.memcache
|
||||
|
||||
import net.woggioni.rbcs.api.CacheProvider
|
||||
import net.woggioni.rbcs.api.exception.ConfigurationException
|
||||
import net.woggioni.rbcs.common.HostAndPort
|
||||
import net.woggioni.rbcs.common.RBCS
|
||||
import net.woggioni.rbcs.common.Xml
|
||||
import net.woggioni.rbcs.common.Xml.Companion.asIterable
|
||||
import net.woggioni.rbcs.common.Xml.Companion.renderAttribute
|
||||
import org.w3c.dom.Document
|
||||
import org.w3c.dom.Element
|
||||
import java.time.Duration
|
||||
import java.time.temporal.ChronoUnit
|
||||
|
||||
|
||||
class MemcacheCacheProvider : CacheProvider<MemcacheCacheConfiguration> {
|
||||
override fun getXmlSchemaLocation() = "jpms://net.woggioni.rbcs.server.memcache/net/woggioni/rbcs/server/memcache/schema/rbcs-memcache.xsd"
|
||||
|
||||
override fun getXmlType() = "memcacheCacheType"
|
||||
|
||||
override fun getXmlNamespace() = "urn:net.woggioni.rbcs.server.memcache"
|
||||
|
||||
val xmlNamespacePrefix : String
|
||||
get() = "rbcs-memcache"
|
||||
|
||||
override fun deserialize(el: Element): MemcacheCacheConfiguration {
|
||||
val servers = mutableListOf<MemcacheCacheConfiguration.Server>()
|
||||
val maxAge = el.renderAttribute("max-age")
|
||||
?.let(Duration::parse)
|
||||
?: Duration.ofDays(1)
|
||||
val chunkSize = el.renderAttribute("chunk-size")
|
||||
?.let(Integer::decode)
|
||||
?: 0x10000
|
||||
val compressionLevel = el.renderAttribute("compression-level")
|
||||
?.let(Integer::decode)
|
||||
?: -1
|
||||
val compressionMode = el.renderAttribute("compression-mode")
|
||||
?.let {
|
||||
when (it) {
|
||||
"deflate" -> MemcacheCacheConfiguration.CompressionMode.DEFLATE
|
||||
else -> MemcacheCacheConfiguration.CompressionMode.DEFLATE
|
||||
}
|
||||
}
|
||||
val digestAlgorithm = el.renderAttribute("digest")
|
||||
for (child in el.asIterable()) {
|
||||
when (child.nodeName) {
|
||||
"server" -> {
|
||||
val host = child.renderAttribute("host") ?: throw ConfigurationException("host attribute is required")
|
||||
val port = child.renderAttribute("port")?.toInt() ?: throw ConfigurationException("port attribute is required")
|
||||
val maxConnections = child.renderAttribute("max-connections")?.toInt() ?: 1
|
||||
val connectionTimeout = child.renderAttribute("connection-timeout")
|
||||
?.let(Duration::parse)
|
||||
?.let(Duration::toMillis)
|
||||
?.let(Long::toInt)
|
||||
?: 10000
|
||||
servers.add(MemcacheCacheConfiguration.Server(HostAndPort(host, port), connectionTimeout, maxConnections))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return MemcacheCacheConfiguration(
|
||||
servers,
|
||||
maxAge,
|
||||
digestAlgorithm,
|
||||
compressionMode,
|
||||
compressionLevel,
|
||||
chunkSize
|
||||
)
|
||||
}
|
||||
|
||||
override fun serialize(doc: Document, cache: MemcacheCacheConfiguration) = cache.run {
|
||||
val result = doc.createElement("cache")
|
||||
Xml.of(doc, result) {
|
||||
attr("xmlns:${xmlNamespacePrefix}", xmlNamespace, namespaceURI = "http://www.w3.org/2000/xmlns/")
|
||||
attr("xs:type", "${xmlNamespacePrefix}:$xmlType", RBCS.XML_SCHEMA_NAMESPACE_URI)
|
||||
for (server in servers) {
|
||||
node("server") {
|
||||
attr("host", server.endpoint.host)
|
||||
attr("port", server.endpoint.port.toString())
|
||||
server.connectionTimeoutMillis?.let { connectionTimeoutMillis ->
|
||||
attr("connection-timeout", Duration.of(connectionTimeoutMillis.toLong(), ChronoUnit.MILLIS).toString())
|
||||
}
|
||||
attr("max-connections", server.maxConnections.toString())
|
||||
}
|
||||
}
|
||||
attr("max-age", maxAge.toString())
|
||||
attr("chunk-size", chunkSize.toString())
|
||||
digestAlgorithm?.let { digestAlgorithm ->
|
||||
attr("digest", digestAlgorithm)
|
||||
}
|
||||
compressionMode?.let { compressionMode ->
|
||||
attr(
|
||||
"compression-mode", when (compressionMode) {
|
||||
MemcacheCacheConfiguration.CompressionMode.DEFLATE -> "deflate"
|
||||
}
|
||||
)
|
||||
}
|
||||
attr("compression-level", compressionLevel.toString())
|
||||
}
|
||||
result
|
||||
}
|
||||
}
|
@@ -0,0 +1,214 @@
|
||||
package net.woggioni.rbcs.server.memcache.client
|
||||
|
||||
|
||||
import io.netty.bootstrap.Bootstrap
|
||||
import io.netty.buffer.ByteBuf
|
||||
import io.netty.channel.Channel
|
||||
import io.netty.channel.ChannelFutureListener
|
||||
import io.netty.channel.ChannelHandlerContext
|
||||
import io.netty.channel.ChannelOption
|
||||
import io.netty.channel.ChannelPipeline
|
||||
import io.netty.channel.SimpleChannelInboundHandler
|
||||
import io.netty.channel.nio.NioEventLoopGroup
|
||||
import io.netty.channel.pool.AbstractChannelPoolHandler
|
||||
import io.netty.channel.pool.ChannelPool
|
||||
import io.netty.channel.pool.FixedChannelPool
|
||||
import io.netty.channel.socket.nio.NioSocketChannel
|
||||
import io.netty.handler.codec.memcache.LastMemcacheContent
|
||||
import io.netty.handler.codec.memcache.MemcacheContent
|
||||
import io.netty.handler.codec.memcache.MemcacheObject
|
||||
import io.netty.handler.codec.memcache.binary.BinaryMemcacheClientCodec
|
||||
import io.netty.handler.codec.memcache.binary.BinaryMemcacheRequest
|
||||
import io.netty.handler.codec.memcache.binary.BinaryMemcacheResponse
|
||||
import io.netty.util.concurrent.GenericFutureListener
|
||||
import net.woggioni.rbcs.common.HostAndPort
|
||||
import net.woggioni.rbcs.common.createLogger
|
||||
import net.woggioni.rbcs.common.warn
|
||||
import net.woggioni.rbcs.server.memcache.MemcacheCacheConfiguration
|
||||
import net.woggioni.rbcs.server.memcache.MemcacheCacheHandler
|
||||
import java.io.IOException
|
||||
import java.net.InetSocketAddress
|
||||
import java.util.concurrent.CompletableFuture
|
||||
import java.util.concurrent.ConcurrentHashMap
|
||||
import io.netty.util.concurrent.Future as NettyFuture
|
||||
|
||||
|
||||
class MemcacheClient(private val servers: List<MemcacheCacheConfiguration.Server>, private val chunkSize : Int) : AutoCloseable {
|
||||
|
||||
private companion object {
|
||||
private val log = createLogger<MemcacheCacheHandler>()
|
||||
}
|
||||
|
||||
private val group: NioEventLoopGroup
|
||||
private val connectionPool: MutableMap<HostAndPort, ChannelPool> = ConcurrentHashMap()
|
||||
|
||||
init {
|
||||
group = NioEventLoopGroup()
|
||||
}
|
||||
|
||||
private fun newConnectionPool(server: MemcacheCacheConfiguration.Server): FixedChannelPool {
|
||||
val bootstrap = Bootstrap().apply {
|
||||
group(group)
|
||||
channel(NioSocketChannel::class.java)
|
||||
option(ChannelOption.SO_KEEPALIVE, true)
|
||||
remoteAddress(InetSocketAddress(server.endpoint.host, server.endpoint.port))
|
||||
server.connectionTimeoutMillis?.let {
|
||||
option(ChannelOption.CONNECT_TIMEOUT_MILLIS, it)
|
||||
}
|
||||
}
|
||||
val channelPoolHandler = object : AbstractChannelPoolHandler() {
|
||||
|
||||
override fun channelCreated(ch: Channel) {
|
||||
val pipeline: ChannelPipeline = ch.pipeline()
|
||||
pipeline.addLast(BinaryMemcacheClientCodec(chunkSize, true))
|
||||
}
|
||||
}
|
||||
return FixedChannelPool(bootstrap, channelPoolHandler, server.maxConnections)
|
||||
}
|
||||
|
||||
fun sendRequest(
|
||||
key: ByteBuf,
|
||||
responseHandler: MemcacheResponseHandler
|
||||
): CompletableFuture<MemcacheRequestController> {
|
||||
val server = if (servers.size > 1) {
|
||||
var checksum = 0
|
||||
while (key.readableBytes() > 4) {
|
||||
val byte = key.readInt()
|
||||
checksum = checksum xor byte
|
||||
}
|
||||
while (key.readableBytes() > 0) {
|
||||
val byte = key.readByte()
|
||||
checksum = checksum xor byte.toInt()
|
||||
}
|
||||
servers[checksum % servers.size]
|
||||
} else {
|
||||
servers.first()
|
||||
}
|
||||
key.release()
|
||||
|
||||
val response = CompletableFuture<MemcacheRequestController>()
|
||||
// Custom handler for processing responses
|
||||
val pool = connectionPool.computeIfAbsent(server.endpoint) {
|
||||
newConnectionPool(server)
|
||||
}
|
||||
pool.acquire().addListener(object : GenericFutureListener<NettyFuture<Channel>> {
|
||||
override fun operationComplete(channelFuture: NettyFuture<Channel>) {
|
||||
if (channelFuture.isSuccess) {
|
||||
|
||||
var requestSent = false
|
||||
var requestBodySent = false
|
||||
var requestFinished = false
|
||||
var responseReceived = false
|
||||
var responseBodyReceived = false
|
||||
var responseFinished = false
|
||||
var requestBodySize = 0
|
||||
var requestBodyBytesSent = 0
|
||||
|
||||
|
||||
|
||||
val channel = channelFuture.now
|
||||
var connectionClosedByTheRemoteServer = true
|
||||
val closeCallback = {
|
||||
if (connectionClosedByTheRemoteServer) {
|
||||
val ex = IOException("The memcache server closed the connection")
|
||||
val completed = response.completeExceptionally(ex)
|
||||
if(!completed) responseHandler.exceptionCaught(ex)
|
||||
log.warn {
|
||||
"RequestSent: $requestSent, RequestBodySent: $requestBodySent, " +
|
||||
"RequestFinished: $requestFinished, ResponseReceived: $responseReceived, " +
|
||||
"ResponseBodyReceived: $responseBodyReceived, ResponseFinished: $responseFinished, " +
|
||||
"RequestBodySize: $requestBodySize, RequestBodyBytesSent: $requestBodyBytesSent"
|
||||
}
|
||||
}
|
||||
pool.release(channel)
|
||||
}
|
||||
val closeListener = ChannelFutureListener {
|
||||
closeCallback()
|
||||
}
|
||||
channel.closeFuture().addListener(closeListener)
|
||||
val pipeline = channel.pipeline()
|
||||
val handler = object : SimpleChannelInboundHandler<MemcacheObject>() {
|
||||
|
||||
override fun handlerAdded(ctx: ChannelHandlerContext) {
|
||||
channel.closeFuture().removeListener(closeListener)
|
||||
}
|
||||
|
||||
override fun channelRead0(
|
||||
ctx: ChannelHandlerContext,
|
||||
msg: MemcacheObject
|
||||
) {
|
||||
when (msg) {
|
||||
is BinaryMemcacheResponse -> {
|
||||
responseHandler.responseReceived(msg)
|
||||
responseReceived = true
|
||||
}
|
||||
|
||||
is LastMemcacheContent -> {
|
||||
responseFinished = true
|
||||
responseHandler.contentReceived(msg)
|
||||
pipeline.remove(this)
|
||||
pool.release(channel)
|
||||
}
|
||||
|
||||
is MemcacheContent -> {
|
||||
responseBodyReceived = true
|
||||
responseHandler.contentReceived(msg)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun channelInactive(ctx: ChannelHandlerContext) {
|
||||
closeCallback()
|
||||
ctx.fireChannelInactive()
|
||||
}
|
||||
|
||||
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
|
||||
connectionClosedByTheRemoteServer = false
|
||||
ctx.close()
|
||||
pool.release(channel)
|
||||
responseHandler.exceptionCaught(cause)
|
||||
}
|
||||
}
|
||||
|
||||
channel.pipeline()
|
||||
.addLast("client-handler", handler)
|
||||
response.complete(object : MemcacheRequestController {
|
||||
|
||||
override fun sendRequest(request: BinaryMemcacheRequest) {
|
||||
requestBodySize = request.totalBodyLength() - request.keyLength() - request.extrasLength()
|
||||
channel.writeAndFlush(request)
|
||||
requestSent = true
|
||||
}
|
||||
|
||||
override fun sendContent(content: MemcacheContent) {
|
||||
val size = content.content().readableBytes()
|
||||
channel.writeAndFlush(content).addListener {
|
||||
requestBodyBytesSent += size
|
||||
requestBodySent = true
|
||||
if(content is LastMemcacheContent) {
|
||||
requestFinished = true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun exceptionCaught(ex: Throwable) {
|
||||
connectionClosedByTheRemoteServer = false
|
||||
channel.close()
|
||||
}
|
||||
})
|
||||
} else {
|
||||
response.completeExceptionally(channelFuture.cause())
|
||||
}
|
||||
}
|
||||
})
|
||||
return response
|
||||
}
|
||||
|
||||
fun shutDown(): NettyFuture<*> {
|
||||
return group.shutdownGracefully()
|
||||
}
|
||||
|
||||
override fun close() {
|
||||
shutDown().sync()
|
||||
}
|
||||
}
|
@@ -0,0 +1,13 @@
|
||||
package net.woggioni.rbcs.server.memcache.client
|
||||
|
||||
import io.netty.handler.codec.memcache.MemcacheContent
|
||||
import io.netty.handler.codec.memcache.binary.BinaryMemcacheRequest
|
||||
|
||||
interface MemcacheRequestController {
|
||||
|
||||
fun sendRequest(request : BinaryMemcacheRequest)
|
||||
|
||||
fun sendContent(content : MemcacheContent)
|
||||
|
||||
fun exceptionCaught(ex : Throwable)
|
||||
}
|
@@ -0,0 +1,14 @@
|
||||
package net.woggioni.rbcs.server.memcache.client
|
||||
|
||||
import io.netty.handler.codec.memcache.MemcacheContent
|
||||
import io.netty.handler.codec.memcache.binary.BinaryMemcacheResponse
|
||||
|
||||
interface MemcacheResponseHandler {
|
||||
|
||||
|
||||
fun responseReceived(response : BinaryMemcacheResponse)
|
||||
|
||||
fun contentReceived(content : MemcacheContent)
|
||||
|
||||
fun exceptionCaught(ex : Throwable)
|
||||
}
|
@@ -0,0 +1 @@
|
||||
net.woggioni.rbcs.server.memcache.MemcacheCacheProvider
|
@@ -0,0 +1,37 @@
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||
<xs:schema targetNamespace="urn:net.woggioni.rbcs.server.memcache"
|
||||
xmlns:rbcs-memcache="urn:net.woggioni.rbcs.server.memcache"
|
||||
xmlns:rbcs="urn:net.woggioni.rbcs.server"
|
||||
xmlns:xs="http://www.w3.org/2001/XMLSchema">
|
||||
|
||||
<xs:import schemaLocation="jpms://net.woggioni.rbcs.server/net/woggioni/rbcs/server/schema/rbcs.xsd" namespace="urn:net.woggioni.rbcs.server"/>
|
||||
|
||||
<xs:complexType name="memcacheServerType">
|
||||
<xs:attribute name="host" type="xs:token" use="required"/>
|
||||
<xs:attribute name="port" type="xs:positiveInteger" use="required"/>
|
||||
<xs:attribute name="connection-timeout" type="xs:duration"/>
|
||||
<xs:attribute name="max-connections" type="xs:positiveInteger" default="1"/>
|
||||
</xs:complexType>
|
||||
|
||||
<xs:complexType name="memcacheCacheType">
|
||||
<xs:complexContent>
|
||||
<xs:extension base="rbcs:cacheType">
|
||||
<xs:sequence maxOccurs="unbounded">
|
||||
<xs:element name="server" type="rbcs-memcache:memcacheServerType"/>
|
||||
</xs:sequence>
|
||||
<xs:attribute name="max-age" type="xs:duration" default="P1D"/>
|
||||
<xs:attribute name="chunk-size" type="rbcs:byteSizeType" default="0x10000"/>
|
||||
<xs:attribute name="digest" type="xs:token" />
|
||||
<xs:attribute name="compression-mode" type="rbcs-memcache:compressionType"/>
|
||||
<xs:attribute name="compression-level" type="rbcs:compressionLevelType" default="-1"/>
|
||||
</xs:extension>
|
||||
</xs:complexContent>
|
||||
</xs:complexType>
|
||||
|
||||
<xs:simpleType name="compressionType">
|
||||
<xs:restriction base="xs:token">
|
||||
<xs:enumeration value="deflate"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
|
||||
</xs:schema>
|
@@ -0,0 +1,27 @@
|
||||
package net.woggioni.rbcs.server.memcache.client
|
||||
|
||||
import io.netty.buffer.ByteBufUtil
|
||||
import io.netty.buffer.Unpooled
|
||||
import org.junit.jupiter.api.Assertions
|
||||
import org.junit.jupiter.api.Test
|
||||
import java.io.ByteArrayInputStream
|
||||
import java.nio.ByteBuffer
|
||||
import java.nio.channels.Channels
|
||||
import kotlin.random.Random
|
||||
|
||||
class ByteBufferTest {
|
||||
|
||||
@Test
|
||||
fun test() {
|
||||
val byteBuffer = ByteBuffer.allocate(0x100)
|
||||
val originalBytes = Random(101325).nextBytes(0x100)
|
||||
Channels.newChannel(ByteArrayInputStream(originalBytes)).use { source ->
|
||||
source.read(byteBuffer)
|
||||
}
|
||||
byteBuffer.flip()
|
||||
val buf = Unpooled.buffer()
|
||||
buf.writeBytes(byteBuffer)
|
||||
val finalBytes = ByteBufUtil.getBytes(buf)
|
||||
Assertions.assertArrayEquals(originalBytes, finalBytes)
|
||||
}
|
||||
}
|
42
rbcs-server/build.gradle
Normal file
42
rbcs-server/build.gradle
Normal file
@@ -0,0 +1,42 @@
|
||||
plugins {
|
||||
id 'java-library'
|
||||
alias catalog.plugins.kotlin.jvm
|
||||
id 'jacoco'
|
||||
id 'maven-publish'
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation catalog.jwo
|
||||
implementation catalog.slf4j.api
|
||||
implementation catalog.netty.codec.http
|
||||
implementation catalog.netty.handler
|
||||
implementation catalog.netty.buffer
|
||||
implementation catalog.netty.transport
|
||||
|
||||
api project(':rbcs-common')
|
||||
api project(':rbcs-api')
|
||||
|
||||
// runtimeOnly catalog.slf4j.jdk14
|
||||
testRuntimeOnly catalog.logback.classic
|
||||
|
||||
testImplementation catalog.bcprov.jdk18on
|
||||
testImplementation catalog.bcpkix.jdk18on
|
||||
|
||||
testRuntimeOnly project(":rbcs-server-memcache")
|
||||
}
|
||||
|
||||
test {
|
||||
systemProperty("io.netty.leakDetectionLevel", "PARANOID")
|
||||
systemProperty("jdk.httpclient.redirects.retrylimit", "1")
|
||||
}
|
||||
|
||||
publishing {
|
||||
publications {
|
||||
maven(MavenPublication) {
|
||||
from(components["java"])
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
29
rbcs-server/src/main/java/module-info.java
Normal file
29
rbcs-server/src/main/java/module-info.java
Normal file
@@ -0,0 +1,29 @@
|
||||
import net.woggioni.rbcs.api.CacheProvider;
|
||||
import net.woggioni.rbcs.server.cache.FileSystemCacheProvider;
|
||||
import net.woggioni.rbcs.server.cache.InMemoryCacheProvider;
|
||||
|
||||
module net.woggioni.rbcs.server {
|
||||
requires java.sql;
|
||||
requires java.xml;
|
||||
requires java.logging;
|
||||
requires java.naming;
|
||||
requires kotlin.stdlib;
|
||||
requires io.netty.buffer;
|
||||
requires io.netty.transport;
|
||||
requires io.netty.codec.http;
|
||||
requires io.netty.common;
|
||||
requires io.netty.handler;
|
||||
requires io.netty.codec;
|
||||
requires org.slf4j;
|
||||
requires net.woggioni.jwo;
|
||||
requires net.woggioni.rbcs.common;
|
||||
requires net.woggioni.rbcs.api;
|
||||
|
||||
exports net.woggioni.rbcs.server;
|
||||
|
||||
opens net.woggioni.rbcs.server;
|
||||
opens net.woggioni.rbcs.server.schema;
|
||||
|
||||
uses CacheProvider;
|
||||
provides CacheProvider with FileSystemCacheProvider, InMemoryCacheProvider;
|
||||
}
|
@@ -0,0 +1,501 @@
|
||||
package net.woggioni.rbcs.server
|
||||
|
||||
import io.netty.bootstrap.ServerBootstrap
|
||||
import io.netty.buffer.ByteBuf
|
||||
import io.netty.channel.Channel
|
||||
import io.netty.channel.ChannelFuture
|
||||
import io.netty.channel.ChannelHandler.Sharable
|
||||
import io.netty.channel.ChannelHandlerContext
|
||||
import io.netty.channel.ChannelInboundHandlerAdapter
|
||||
import io.netty.channel.ChannelInitializer
|
||||
import io.netty.channel.ChannelOption
|
||||
import io.netty.channel.ChannelPromise
|
||||
import io.netty.channel.nio.NioEventLoopGroup
|
||||
import io.netty.channel.socket.nio.NioServerSocketChannel
|
||||
import io.netty.handler.codec.compression.CompressionOptions
|
||||
import io.netty.handler.codec.http.DefaultHttpContent
|
||||
import io.netty.handler.codec.http.HttpContentCompressor
|
||||
import io.netty.handler.codec.http.HttpHeaderNames
|
||||
import io.netty.handler.codec.http.HttpRequest
|
||||
import io.netty.handler.codec.http.HttpServerCodec
|
||||
import io.netty.handler.ssl.ClientAuth
|
||||
import io.netty.handler.ssl.SslContext
|
||||
import io.netty.handler.ssl.SslContextBuilder
|
||||
import io.netty.handler.ssl.SslHandler
|
||||
import io.netty.handler.stream.ChunkedWriteHandler
|
||||
import io.netty.handler.timeout.IdleState
|
||||
import io.netty.handler.timeout.IdleStateEvent
|
||||
import io.netty.handler.timeout.IdleStateHandler
|
||||
import io.netty.util.AttributeKey
|
||||
import io.netty.util.concurrent.DefaultEventExecutorGroup
|
||||
import io.netty.util.concurrent.EventExecutorGroup
|
||||
import net.woggioni.jwo.JWO
|
||||
import net.woggioni.jwo.Tuple2
|
||||
import net.woggioni.rbcs.api.Configuration
|
||||
import net.woggioni.rbcs.api.exception.ConfigurationException
|
||||
import net.woggioni.rbcs.common.PasswordSecurity.decodePasswordHash
|
||||
import net.woggioni.rbcs.common.PasswordSecurity.hashPassword
|
||||
import net.woggioni.rbcs.common.RBCS.toUrl
|
||||
import net.woggioni.rbcs.common.Xml
|
||||
import net.woggioni.rbcs.common.createLogger
|
||||
import net.woggioni.rbcs.common.debug
|
||||
import net.woggioni.rbcs.common.info
|
||||
import net.woggioni.rbcs.server.auth.AbstractNettyHttpAuthenticator
|
||||
import net.woggioni.rbcs.server.auth.Authorizer
|
||||
import net.woggioni.rbcs.server.auth.ClientCertificateValidator
|
||||
import net.woggioni.rbcs.server.auth.RoleAuthorizer
|
||||
import net.woggioni.rbcs.server.configuration.Parser
|
||||
import net.woggioni.rbcs.server.configuration.Serializer
|
||||
import net.woggioni.rbcs.server.exception.ExceptionHandler
|
||||
import net.woggioni.rbcs.server.handler.MaxRequestSizeHandler
|
||||
import net.woggioni.rbcs.server.handler.ServerHandler
|
||||
import net.woggioni.rbcs.server.handler.TraceHandler
|
||||
import net.woggioni.rbcs.server.throttling.BucketManager
|
||||
import net.woggioni.rbcs.server.throttling.ThrottlingHandler
|
||||
import java.io.OutputStream
|
||||
import java.net.InetSocketAddress
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
import java.security.KeyStore
|
||||
import java.security.PrivateKey
|
||||
import java.security.cert.X509Certificate
|
||||
import java.time.Duration
|
||||
import java.time.Instant
|
||||
import java.util.Arrays
|
||||
import java.util.Base64
|
||||
import java.util.concurrent.CompletableFuture
|
||||
import java.util.concurrent.Future
|
||||
import java.util.concurrent.TimeUnit
|
||||
import java.util.concurrent.TimeoutException
|
||||
import java.util.regex.Matcher
|
||||
import java.util.regex.Pattern
|
||||
import javax.naming.ldap.LdapName
|
||||
import javax.net.ssl.SSLPeerUnverifiedException
|
||||
|
||||
class RemoteBuildCacheServer(private val cfg: Configuration) {
|
||||
|
||||
companion object {
|
||||
private val log = createLogger<RemoteBuildCacheServer>()
|
||||
|
||||
val userAttribute: AttributeKey<Configuration.User> = AttributeKey.valueOf("user")
|
||||
val groupAttribute: AttributeKey<Set<Configuration.Group>> = AttributeKey.valueOf("group")
|
||||
|
||||
val DEFAULT_CONFIGURATION_URL by lazy { "classpath:net/woggioni/rbcs/server/rbcs-default.xml".toUrl() }
|
||||
private const val SSL_HANDLER_NAME = "sslHandler"
|
||||
|
||||
fun loadConfiguration(configurationFile: Path): Configuration {
|
||||
val doc = Files.newInputStream(configurationFile).use {
|
||||
Xml.parseXml(configurationFile.toUri().toURL(), it)
|
||||
}
|
||||
return Parser.parse(doc)
|
||||
}
|
||||
|
||||
fun dumpConfiguration(conf: Configuration, outputStream: OutputStream) {
|
||||
Xml.write(Serializer.serialize(conf), outputStream)
|
||||
}
|
||||
}
|
||||
|
||||
private class HttpChunkContentCompressor(
|
||||
threshold: Int,
|
||||
vararg compressionOptions: CompressionOptions = emptyArray()
|
||||
) : HttpContentCompressor(threshold, *compressionOptions) {
|
||||
override fun write(ctx: ChannelHandlerContext, msg: Any, promise: ChannelPromise) {
|
||||
var message: Any? = msg
|
||||
if (message is ByteBuf) {
|
||||
// convert ByteBuf to HttpContent to make it work with compression. This is needed as we use the
|
||||
// ChunkedWriteHandler to send files when compression is enabled.
|
||||
val buff = message
|
||||
if (buff.isReadable) {
|
||||
// We only encode non empty buffers, as empty buffers can be used for determining when
|
||||
// the content has been flushed and it confuses the HttpContentCompressor
|
||||
// if we let it go
|
||||
message = DefaultHttpContent(buff)
|
||||
}
|
||||
}
|
||||
super.write(ctx, message, promise)
|
||||
}
|
||||
}
|
||||
|
||||
@Sharable
|
||||
private class ClientCertificateAuthenticator(
|
||||
authorizer: Authorizer,
|
||||
private val anonymousUserGroups: Set<Configuration.Group>?,
|
||||
private val userExtractor: Configuration.UserExtractor?,
|
||||
private val groupExtractor: Configuration.GroupExtractor?,
|
||||
) : AbstractNettyHttpAuthenticator(authorizer) {
|
||||
|
||||
override fun authenticate(ctx: ChannelHandlerContext, req: HttpRequest): AuthenticationResult? {
|
||||
return try {
|
||||
val sslHandler = (ctx.pipeline().get(SSL_HANDLER_NAME) as? SslHandler)
|
||||
?: throw ConfigurationException("Client certificate authentication cannot be used when TLS is disabled")
|
||||
val sslEngine = sslHandler.engine()
|
||||
sslEngine.session.peerCertificates.takeIf {
|
||||
it.isNotEmpty()
|
||||
}?.let { peerCertificates ->
|
||||
val clientCertificate = peerCertificates.first() as X509Certificate
|
||||
val user = userExtractor?.extract(clientCertificate)
|
||||
val group = groupExtractor?.extract(clientCertificate)
|
||||
val allGroups =
|
||||
((user?.groups ?: emptySet()).asSequence() + sequenceOf(group).filterNotNull()).toSet()
|
||||
AuthenticationResult(user, allGroups)
|
||||
} ?: anonymousUserGroups?.let { AuthenticationResult(null, it) }
|
||||
} catch (es: SSLPeerUnverifiedException) {
|
||||
anonymousUserGroups?.let { AuthenticationResult(null, it) }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Sharable
|
||||
private class NettyHttpBasicAuthenticator(
|
||||
private val users: Map<String, Configuration.User>, authorizer: Authorizer
|
||||
) : AbstractNettyHttpAuthenticator(authorizer) {
|
||||
companion object {
|
||||
private val log = createLogger<NettyHttpBasicAuthenticator>()
|
||||
}
|
||||
|
||||
override fun authenticate(ctx: ChannelHandlerContext, req: HttpRequest): AuthenticationResult? {
|
||||
val authorizationHeader = req.headers()[HttpHeaderNames.AUTHORIZATION] ?: let {
|
||||
log.debug(ctx) {
|
||||
"Missing Authorization header"
|
||||
}
|
||||
return users[""]?.let { AuthenticationResult(it, it.groups) }
|
||||
}
|
||||
val cursor = authorizationHeader.indexOf(' ')
|
||||
if (cursor < 0) {
|
||||
log.debug(ctx) {
|
||||
"Invalid Authorization header: '$authorizationHeader'"
|
||||
}
|
||||
return users[""]?.let { AuthenticationResult(it, it.groups) }
|
||||
}
|
||||
val authenticationType = authorizationHeader.substring(0, cursor)
|
||||
if ("Basic" != authenticationType) {
|
||||
log.debug(ctx) {
|
||||
"Invalid authentication type header: '$authenticationType'"
|
||||
}
|
||||
return users[""]?.let { AuthenticationResult(it, it.groups) }
|
||||
}
|
||||
val (username, password) = Base64.getDecoder().decode(authorizationHeader.substring(cursor + 1))
|
||||
.let(::String)
|
||||
.let {
|
||||
val colon = it.indexOf(':')
|
||||
if (colon < 0) {
|
||||
log.debug(ctx) {
|
||||
"Missing colon from authentication"
|
||||
}
|
||||
return null
|
||||
}
|
||||
it.substring(0, colon) to it.substring(colon + 1)
|
||||
}
|
||||
|
||||
return username.let(users::get)?.takeIf { user ->
|
||||
user.password?.let { passwordAndSalt ->
|
||||
val (_, salt) = decodePasswordHash(passwordAndSalt)
|
||||
hashPassword(password, Base64.getEncoder().encodeToString(salt)) == passwordAndSalt
|
||||
} ?: false
|
||||
}?.let { user ->
|
||||
AuthenticationResult(user, user.groups)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private class ServerInitializer(
|
||||
private val cfg: Configuration,
|
||||
private val eventExecutorGroup: EventExecutorGroup
|
||||
) : ChannelInitializer<Channel>(), AutoCloseable {
|
||||
|
||||
companion object {
|
||||
private fun createSslCtx(tls: Configuration.Tls): SslContext {
|
||||
val keyStore = tls.keyStore
|
||||
return if (keyStore == null) {
|
||||
throw IllegalArgumentException("No keystore configured")
|
||||
} else {
|
||||
val javaKeyStore = loadKeystore(keyStore.file, keyStore.password)
|
||||
val serverKey = javaKeyStore.getKey(
|
||||
keyStore.keyAlias, (keyStore.keyPassword ?: "").let(String::toCharArray)
|
||||
) as PrivateKey
|
||||
val serverCert: Array<X509Certificate> =
|
||||
Arrays.stream(javaKeyStore.getCertificateChain(keyStore.keyAlias))
|
||||
.map { it as X509Certificate }
|
||||
.toArray { size -> Array<X509Certificate?>(size) { null } }
|
||||
SslContextBuilder.forServer(serverKey, *serverCert).apply {
|
||||
val clientAuth = tls.trustStore?.let { trustStore ->
|
||||
val ts = loadKeystore(trustStore.file, trustStore.password)
|
||||
trustManager(
|
||||
ClientCertificateValidator.getTrustManager(ts, trustStore.isCheckCertificateStatus)
|
||||
)
|
||||
if (trustStore.isRequireClientCertificate) ClientAuth.REQUIRE
|
||||
else ClientAuth.OPTIONAL
|
||||
} ?: ClientAuth.NONE
|
||||
clientAuth(clientAuth)
|
||||
}.build()
|
||||
}
|
||||
}
|
||||
|
||||
fun loadKeystore(file: Path, password: String?): KeyStore {
|
||||
val ext = JWO.splitExtension(file)
|
||||
.map(Tuple2<String, String>::get_2)
|
||||
.orElseThrow {
|
||||
IllegalArgumentException(
|
||||
"Keystore file '${file}' must have .jks, .p12, .pfx extension"
|
||||
)
|
||||
}
|
||||
val keystore = when (ext.substring(1).lowercase()) {
|
||||
"jks" -> KeyStore.getInstance("JKS")
|
||||
"p12", "pfx" -> KeyStore.getInstance("PKCS12")
|
||||
else -> throw IllegalArgumentException(
|
||||
"Keystore file '${file}' must have .jks, .p12, .pfx extension"
|
||||
)
|
||||
}
|
||||
Files.newInputStream(file).use {
|
||||
keystore.load(it, password?.let(String::toCharArray))
|
||||
}
|
||||
return keystore
|
||||
}
|
||||
|
||||
private val log = createLogger<ServerInitializer>()
|
||||
}
|
||||
|
||||
private val cacheHandlerFactory = cfg.cache.materialize()
|
||||
|
||||
private val bucketManager = BucketManager.from(cfg)
|
||||
|
||||
private val authenticator = when (val auth = cfg.authentication) {
|
||||
is Configuration.BasicAuthentication -> NettyHttpBasicAuthenticator(cfg.users, RoleAuthorizer())
|
||||
is Configuration.ClientCertificateAuthentication -> {
|
||||
ClientCertificateAuthenticator(
|
||||
RoleAuthorizer(),
|
||||
cfg.users[""]?.groups,
|
||||
userExtractor(auth),
|
||||
groupExtractor(auth)
|
||||
)
|
||||
}
|
||||
|
||||
else -> null
|
||||
}
|
||||
|
||||
private val sslContext: SslContext? = cfg.tls?.let(Companion::createSslCtx)
|
||||
|
||||
private fun userExtractor(authentication: Configuration.ClientCertificateAuthentication) =
|
||||
authentication.userExtractor?.let { extractor ->
|
||||
val pattern = Pattern.compile(extractor.pattern)
|
||||
val rdnType = extractor.rdnType
|
||||
Configuration.UserExtractor { cert: X509Certificate ->
|
||||
val userName = LdapName(cert.subjectX500Principal.name).rdns.find {
|
||||
it.type == rdnType
|
||||
}?.let {
|
||||
pattern.matcher(it.value.toString())
|
||||
}?.takeIf(Matcher::matches)?.group(1)
|
||||
cfg.users[userName] ?: throw java.lang.RuntimeException("Failed to extract user")
|
||||
}
|
||||
}
|
||||
|
||||
private fun groupExtractor(authentication: Configuration.ClientCertificateAuthentication) =
|
||||
authentication.groupExtractor?.let { extractor ->
|
||||
val pattern = Pattern.compile(extractor.pattern)
|
||||
val rdnType = extractor.rdnType
|
||||
Configuration.GroupExtractor { cert: X509Certificate ->
|
||||
val groupName = LdapName(cert.subjectX500Principal.name).rdns.find {
|
||||
it.type == rdnType
|
||||
}?.let {
|
||||
pattern.matcher(it.value.toString())
|
||||
}?.takeIf(Matcher::matches)?.group(1)
|
||||
cfg.groups[groupName] ?: throw java.lang.RuntimeException("Failed to extract group")
|
||||
}
|
||||
}
|
||||
|
||||
override fun initChannel(ch: Channel) {
|
||||
log.debug {
|
||||
"Created connection ${ch.id().asShortText()} with ${ch.remoteAddress()}"
|
||||
}
|
||||
ch.closeFuture().addListener {
|
||||
log.debug {
|
||||
"Closed connection ${ch.id().asShortText()} with ${ch.remoteAddress()}"
|
||||
}
|
||||
}
|
||||
val pipeline = ch.pipeline()
|
||||
cfg.connection.also { conn ->
|
||||
val readTimeout = conn.readTimeout.toMillis()
|
||||
val writeTimeout = conn.writeTimeout.toMillis()
|
||||
if (readTimeout > 0 || writeTimeout > 0) {
|
||||
pipeline.addLast(
|
||||
IdleStateHandler(
|
||||
false,
|
||||
readTimeout,
|
||||
writeTimeout,
|
||||
0,
|
||||
TimeUnit.MILLISECONDS
|
||||
)
|
||||
)
|
||||
}
|
||||
val readIdleTimeout = conn.readIdleTimeout.toMillis()
|
||||
val writeIdleTimeout = conn.writeIdleTimeout.toMillis()
|
||||
val idleTimeout = conn.idleTimeout.toMillis()
|
||||
if (readIdleTimeout > 0 || writeIdleTimeout > 0 || idleTimeout > 0) {
|
||||
pipeline.addLast(
|
||||
IdleStateHandler(
|
||||
true,
|
||||
readIdleTimeout,
|
||||
writeIdleTimeout,
|
||||
idleTimeout,
|
||||
TimeUnit.MILLISECONDS
|
||||
)
|
||||
)
|
||||
}
|
||||
}
|
||||
pipeline.addLast(object : ChannelInboundHandlerAdapter() {
|
||||
override fun userEventTriggered(ctx: ChannelHandlerContext, evt: Any) {
|
||||
if (evt is IdleStateEvent) {
|
||||
when (evt.state()) {
|
||||
IdleState.READER_IDLE -> log.debug {
|
||||
"Read timeout reached on channel ${ch.id().asShortText()}, closing the connection"
|
||||
}
|
||||
|
||||
IdleState.WRITER_IDLE -> log.debug {
|
||||
"Write timeout reached on channel ${ch.id().asShortText()}, closing the connection"
|
||||
}
|
||||
|
||||
IdleState.ALL_IDLE -> log.debug {
|
||||
"Idle timeout reached on channel ${ch.id().asShortText()}, closing the connection"
|
||||
}
|
||||
|
||||
null -> throw IllegalStateException("This should never happen")
|
||||
}
|
||||
ctx.close()
|
||||
}
|
||||
}
|
||||
})
|
||||
sslContext?.newHandler(ch.alloc())?.also {
|
||||
pipeline.addLast(SSL_HANDLER_NAME, it)
|
||||
}
|
||||
pipeline.addLast(HttpServerCodec())
|
||||
pipeline.addLast(MaxRequestSizeHandler.NAME, MaxRequestSizeHandler(cfg.connection.maxRequestSize))
|
||||
pipeline.addLast(HttpChunkContentCompressor(1024))
|
||||
pipeline.addLast(ChunkedWriteHandler())
|
||||
authenticator?.let {
|
||||
pipeline.addLast(it)
|
||||
}
|
||||
pipeline.addLast(ThrottlingHandler(bucketManager, cfg.connection))
|
||||
|
||||
val serverHandler = let {
|
||||
val prefix = Path.of("/").resolve(Path.of(cfg.serverPath ?: "/"))
|
||||
ServerHandler(prefix)
|
||||
}
|
||||
pipeline.addLast(eventExecutorGroup, ServerHandler.NAME, serverHandler)
|
||||
pipeline.addLast(cacheHandlerFactory.newHandler())
|
||||
pipeline.addLast(TraceHandler)
|
||||
pipeline.addLast(ExceptionHandler)
|
||||
}
|
||||
|
||||
override fun close() {
|
||||
cacheHandlerFactory.close()
|
||||
}
|
||||
}
|
||||
|
||||
class ServerHandle(
|
||||
closeFuture: ChannelFuture,
|
||||
private val bossGroup: EventExecutorGroup,
|
||||
private val executorGroups: Iterable<EventExecutorGroup>,
|
||||
private val serverInitializer: AutoCloseable,
|
||||
) : Future<Void> by from(closeFuture, executorGroups, serverInitializer) {
|
||||
|
||||
companion object {
|
||||
private val log = createLogger<ServerHandle>()
|
||||
|
||||
private fun from(
|
||||
closeFuture: ChannelFuture,
|
||||
executorGroups: Iterable<EventExecutorGroup>,
|
||||
serverInitializer: AutoCloseable
|
||||
): CompletableFuture<Void> {
|
||||
val result = CompletableFuture<Void>()
|
||||
closeFuture.addListener {
|
||||
val errors = mutableListOf<Throwable>()
|
||||
val deadline = Instant.now().plusSeconds(20)
|
||||
|
||||
|
||||
for (executorGroup in executorGroups) {
|
||||
val future = executorGroup.terminationFuture()
|
||||
try {
|
||||
val now = Instant.now()
|
||||
if (now > deadline) {
|
||||
future.get(0, TimeUnit.SECONDS)
|
||||
} else {
|
||||
future.get(Duration.between(now, deadline).toMillis(), TimeUnit.MILLISECONDS)
|
||||
}
|
||||
}
|
||||
catch (te: TimeoutException) {
|
||||
errors.addLast(te)
|
||||
log.warn("Timeout while waiting for shutdown of $executorGroup", te)
|
||||
} catch (ex: Throwable) {
|
||||
log.warn(ex.message, ex)
|
||||
errors.addLast(ex)
|
||||
}
|
||||
}
|
||||
try {
|
||||
serverInitializer.close()
|
||||
} catch (ex: Throwable) {
|
||||
log.error(ex.message, ex)
|
||||
errors.addLast(ex)
|
||||
}
|
||||
if(errors.isEmpty()) {
|
||||
result.complete(null)
|
||||
} else {
|
||||
result.completeExceptionally(errors.first())
|
||||
}
|
||||
}
|
||||
|
||||
return result.thenAccept {
|
||||
log.info {
|
||||
"RemoteBuildCacheServer has been gracefully shut down"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
fun sendShutdownSignal() {
|
||||
bossGroup.shutdownGracefully()
|
||||
executorGroups.map {
|
||||
it.shutdownGracefully()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fun run(): ServerHandle {
|
||||
// Create the multithreaded event loops for the server
|
||||
val bossGroup = NioEventLoopGroup(1)
|
||||
val serverSocketChannel = NioServerSocketChannel::class.java
|
||||
val workerGroup = NioEventLoopGroup(0)
|
||||
val eventExecutorGroup = run {
|
||||
val threadFactory = if (cfg.eventExecutor.isUseVirtualThreads) {
|
||||
Thread.ofVirtual().factory()
|
||||
} else {
|
||||
null
|
||||
}
|
||||
DefaultEventExecutorGroup(Runtime.getRuntime().availableProcessors(), threadFactory)
|
||||
}
|
||||
val serverInitializer = ServerInitializer(cfg, eventExecutorGroup)
|
||||
val bootstrap = ServerBootstrap().apply {
|
||||
// Configure the server
|
||||
group(bossGroup, workerGroup)
|
||||
channel(serverSocketChannel)
|
||||
childHandler(serverInitializer)
|
||||
option(ChannelOption.SO_BACKLOG, cfg.incomingConnectionsBacklogSize)
|
||||
childOption(ChannelOption.SO_KEEPALIVE, true)
|
||||
}
|
||||
|
||||
|
||||
// Bind and start to accept incoming connections.
|
||||
val bindAddress = InetSocketAddress(cfg.host, cfg.port)
|
||||
val httpChannel = bootstrap.bind(bindAddress).sync().channel()
|
||||
log.info {
|
||||
"RemoteBuildCacheServer is listening on ${cfg.host}:${cfg.port}"
|
||||
}
|
||||
|
||||
return ServerHandle(
|
||||
httpChannel.closeFuture(),
|
||||
bossGroup,
|
||||
setOf(workerGroup, eventExecutorGroup),
|
||||
serverInitializer
|
||||
)
|
||||
}
|
||||
}
|
@@ -1,4 +1,4 @@
|
||||
package net.woggioni.gbcs
|
||||
package net.woggioni.rbcs.server.auth
|
||||
|
||||
import io.netty.buffer.Unpooled
|
||||
import io.netty.channel.ChannelFutureListener
|
||||
@@ -6,37 +6,60 @@ import io.netty.channel.ChannelHandlerContext
|
||||
import io.netty.channel.ChannelInboundHandlerAdapter
|
||||
import io.netty.handler.codec.http.DefaultFullHttpResponse
|
||||
import io.netty.handler.codec.http.FullHttpResponse
|
||||
import io.netty.handler.codec.http.HttpContent
|
||||
import io.netty.handler.codec.http.HttpHeaderNames
|
||||
import io.netty.handler.codec.http.HttpRequest
|
||||
import io.netty.handler.codec.http.HttpResponseStatus
|
||||
import io.netty.handler.codec.http.HttpVersion
|
||||
import io.netty.util.ReferenceCountUtil
|
||||
import net.woggioni.rbcs.api.Configuration
|
||||
import net.woggioni.rbcs.api.Configuration.Group
|
||||
import net.woggioni.rbcs.api.Role
|
||||
import net.woggioni.rbcs.server.RemoteBuildCacheServer
|
||||
|
||||
abstract class AbstractNettyHttpAuthenticator(private val authorizer : Authorizer)
|
||||
: ChannelInboundHandlerAdapter() {
|
||||
|
||||
private companion object {
|
||||
abstract class AbstractNettyHttpAuthenticator(private val authorizer: Authorizer) : ChannelInboundHandlerAdapter() {
|
||||
|
||||
companion object {
|
||||
private val AUTHENTICATION_FAILED: FullHttpResponse = DefaultFullHttpResponse(
|
||||
HttpVersion.HTTP_1_1, HttpResponseStatus.UNAUTHORIZED, Unpooled.EMPTY_BUFFER).apply {
|
||||
HttpVersion.HTTP_1_1, HttpResponseStatus.UNAUTHORIZED, Unpooled.EMPTY_BUFFER
|
||||
).apply {
|
||||
headers()[HttpHeaderNames.CONTENT_LENGTH] = "0"
|
||||
}
|
||||
|
||||
private val NOT_AUTHORIZED: FullHttpResponse = DefaultFullHttpResponse(
|
||||
HttpVersion.HTTP_1_1, HttpResponseStatus.FORBIDDEN, Unpooled.EMPTY_BUFFER).apply {
|
||||
HttpVersion.HTTP_1_1, HttpResponseStatus.FORBIDDEN, Unpooled.EMPTY_BUFFER
|
||||
).apply {
|
||||
headers()[HttpHeaderNames.CONTENT_LENGTH] = "0"
|
||||
}
|
||||
}
|
||||
abstract fun authenticate(ctx : ChannelHandlerContext, req : HttpRequest) : String?
|
||||
|
||||
class AuthenticationResult(val user: Configuration.User?, val groups: Set<Group>)
|
||||
|
||||
abstract fun authenticate(ctx: ChannelHandlerContext, req: HttpRequest): AuthenticationResult?
|
||||
|
||||
override fun channelRead(ctx: ChannelHandlerContext, msg: Any) {
|
||||
if (msg is HttpRequest) {
|
||||
val user = authenticate(ctx, msg) ?: return authenticationFailure(ctx, msg)
|
||||
val authorized = authorizer.authorize(user, msg)
|
||||
val result = authenticate(ctx, msg) ?: return authenticationFailure(ctx, msg)
|
||||
ctx.channel().attr(RemoteBuildCacheServer.userAttribute).set(result.user)
|
||||
ctx.channel().attr(RemoteBuildCacheServer.groupAttribute).set(result.groups)
|
||||
|
||||
val roles = (
|
||||
(result.user?.let { user ->
|
||||
user.groups.asSequence().flatMap { group ->
|
||||
group.roles.asSequence()
|
||||
}
|
||||
} ?: emptySequence<Role>()) +
|
||||
result.groups.asSequence().flatMap { it.roles.asSequence() }
|
||||
).toSet()
|
||||
val authorized = authorizer.authorize(roles, msg)
|
||||
if (authorized) {
|
||||
super.channelRead(ctx, msg)
|
||||
} else {
|
||||
authorizationFailure(ctx, msg)
|
||||
}
|
||||
} else if(msg is HttpContent) {
|
||||
ctx.fireChannelRead(msg)
|
||||
}
|
||||
}
|
||||
|
@@ -0,0 +1,8 @@
|
||||
package net.woggioni.rbcs.server.auth
|
||||
|
||||
import io.netty.handler.codec.http.HttpRequest
|
||||
import net.woggioni.rbcs.api.Role
|
||||
|
||||
fun interface Authorizer {
|
||||
fun authorize(roles : Set<Role>, request: HttpRequest) : Boolean
|
||||
}
|
@@ -0,0 +1,90 @@
|
||||
package net.woggioni.rbcs.server.auth
|
||||
|
||||
import io.netty.channel.ChannelHandlerContext
|
||||
import io.netty.channel.ChannelInboundHandlerAdapter
|
||||
import io.netty.handler.ssl.SslHandler
|
||||
import io.netty.handler.ssl.SslHandshakeCompletionEvent
|
||||
import java.security.KeyStore
|
||||
import java.security.cert.CertPathValidator
|
||||
import java.security.cert.CertPathValidatorException
|
||||
import java.security.cert.CertificateException
|
||||
import java.security.cert.CertificateFactory
|
||||
import java.security.cert.PKIXParameters
|
||||
import java.security.cert.PKIXRevocationChecker
|
||||
import java.security.cert.X509Certificate
|
||||
import java.util.EnumSet
|
||||
import javax.net.ssl.SSLSession
|
||||
import javax.net.ssl.TrustManagerFactory
|
||||
import javax.net.ssl.X509TrustManager
|
||||
|
||||
|
||||
class ClientCertificateValidator private constructor(
|
||||
private val sslHandler: SslHandler,
|
||||
private val x509TrustManager: X509TrustManager
|
||||
) : ChannelInboundHandlerAdapter() {
|
||||
override fun userEventTriggered(ctx: ChannelHandlerContext, evt: Any) {
|
||||
if (evt is SslHandshakeCompletionEvent) {
|
||||
if (evt.isSuccess) {
|
||||
val session: SSLSession = sslHandler.engine().session
|
||||
val clientCertificateChain = session.peerCertificates as Array<X509Certificate>
|
||||
val authType: String = clientCertificateChain[0].publicKey.algorithm
|
||||
x509TrustManager.checkClientTrusted(clientCertificateChain, authType)
|
||||
} else {
|
||||
// Handle the failure, for example by closing the channel.
|
||||
}
|
||||
}
|
||||
super.userEventTriggered(ctx, evt)
|
||||
}
|
||||
|
||||
companion object {
|
||||
fun getTrustManager(trustStore: KeyStore?, certificateRevocationEnabled: Boolean): X509TrustManager {
|
||||
return if (trustStore != null) {
|
||||
val certificateFactory = CertificateFactory.getInstance("X.509")
|
||||
val validator = CertPathValidator.getInstance("PKIX").apply {
|
||||
val rc = revocationChecker as PKIXRevocationChecker
|
||||
rc.options = EnumSet.of(
|
||||
PKIXRevocationChecker.Option.NO_FALLBACK
|
||||
)
|
||||
}
|
||||
val params = PKIXParameters(trustStore).apply {
|
||||
isRevocationEnabled = certificateRevocationEnabled
|
||||
}
|
||||
object : X509TrustManager {
|
||||
override fun checkClientTrusted(chain: Array<out X509Certificate>, authType: String) {
|
||||
val clientCertificateChain = certificateFactory.generateCertPath(chain.toList())
|
||||
try {
|
||||
validator.validate(clientCertificateChain, params)
|
||||
} catch (ex: CertPathValidatorException) {
|
||||
throw CertificateException(ex)
|
||||
}
|
||||
}
|
||||
|
||||
override fun checkServerTrusted(chain: Array<out X509Certificate>, authType: String) {
|
||||
throw NotImplementedError()
|
||||
}
|
||||
|
||||
private val acceptedIssuers = trustStore.aliases().asSequence()
|
||||
.filter(trustStore::isCertificateEntry)
|
||||
.map(trustStore::getCertificate)
|
||||
.map { it as X509Certificate }
|
||||
.toList()
|
||||
.toTypedArray()
|
||||
|
||||
override fun getAcceptedIssuers() = acceptedIssuers
|
||||
}
|
||||
} else {
|
||||
val trustManagerFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm())
|
||||
trustManagerFactory.trustManagers.asSequence().filter { it is X509TrustManager }
|
||||
.single() as X509TrustManager
|
||||
}
|
||||
}
|
||||
|
||||
fun of(
|
||||
sslHandler: SslHandler,
|
||||
trustStore: KeyStore?,
|
||||
certificateRevocationEnabled: Boolean
|
||||
): ClientCertificateValidator {
|
||||
return ClientCertificateValidator(sslHandler, getTrustManager(trustStore, certificateRevocationEnabled))
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,23 @@
|
||||
package net.woggioni.rbcs.server.auth
|
||||
|
||||
import io.netty.handler.codec.http.HttpMethod
|
||||
import io.netty.handler.codec.http.HttpRequest
|
||||
import net.woggioni.rbcs.api.Role
|
||||
|
||||
class RoleAuthorizer : Authorizer {
|
||||
|
||||
companion object {
|
||||
private val METHOD_MAP = mapOf(
|
||||
Role.Reader to setOf(HttpMethod.GET, HttpMethod.HEAD, HttpMethod.TRACE),
|
||||
Role.Writer to setOf(HttpMethod.PUT, HttpMethod.POST)
|
||||
)
|
||||
}
|
||||
|
||||
override fun authorize(roles: Set<Role>, request: HttpRequest) : Boolean {
|
||||
val allowedMethods = roles.asSequence()
|
||||
.mapNotNull(METHOD_MAP::get)
|
||||
.flatten()
|
||||
.toSet()
|
||||
return request.method() in allowedMethods
|
||||
}
|
||||
}
|
158
rbcs-server/src/main/kotlin/net/woggioni/rbcs/server/cache/FileSystemCache.kt
vendored
Normal file
158
rbcs-server/src/main/kotlin/net/woggioni/rbcs/server/cache/FileSystemCache.kt
vendored
Normal file
@@ -0,0 +1,158 @@
|
||||
package net.woggioni.rbcs.server.cache
|
||||
|
||||
import net.woggioni.jwo.JWO
|
||||
import net.woggioni.rbcs.api.CacheValueMetadata
|
||||
import net.woggioni.rbcs.common.createLogger
|
||||
import java.io.ByteArrayOutputStream
|
||||
import java.io.InputStream
|
||||
import java.io.ObjectInputStream
|
||||
import java.io.ObjectOutputStream
|
||||
import java.io.Serializable
|
||||
import java.nio.ByteBuffer
|
||||
import java.nio.channels.Channels
|
||||
import java.nio.channels.FileChannel
|
||||
import java.nio.file.Files
|
||||
import java.nio.file.Path
|
||||
import java.nio.file.StandardCopyOption
|
||||
import java.nio.file.StandardOpenOption
|
||||
import java.nio.file.attribute.BasicFileAttributes
|
||||
import java.time.Duration
|
||||
import java.time.Instant
|
||||
|
||||
class FileSystemCache(
|
||||
val root: Path,
|
||||
val maxAge: Duration
|
||||
) : AutoCloseable {
|
||||
|
||||
class EntryValue(val metadata: CacheValueMetadata, val channel : FileChannel, val offset : Long, val size : Long) : Serializable
|
||||
|
||||
private companion object {
|
||||
private val log = createLogger<FileSystemCache>()
|
||||
}
|
||||
|
||||
init {
|
||||
Files.createDirectories(root)
|
||||
}
|
||||
|
||||
@Volatile
|
||||
private var running = true
|
||||
|
||||
private var nextGc = Instant.now()
|
||||
|
||||
fun get(key: String): EntryValue? =
|
||||
root.resolve(key).takeIf(Files::exists)
|
||||
?.let { file ->
|
||||
val size = Files.size(file)
|
||||
val channel = FileChannel.open(file, StandardOpenOption.READ)
|
||||
val source = Channels.newInputStream(channel)
|
||||
val tmp = ByteArray(Integer.BYTES)
|
||||
val buffer = ByteBuffer.wrap(tmp)
|
||||
source.read(tmp)
|
||||
buffer.rewind()
|
||||
val offset = (Integer.BYTES + buffer.getInt()).toLong()
|
||||
var count = 0
|
||||
val wrapper = object : InputStream() {
|
||||
override fun read(): Int {
|
||||
return source.read().also {
|
||||
if (it > 0) count += it
|
||||
}
|
||||
}
|
||||
|
||||
override fun read(b: ByteArray, off: Int, len: Int): Int {
|
||||
return source.read(b, off, len).also {
|
||||
if (it > 0) count += it
|
||||
}
|
||||
}
|
||||
|
||||
override fun close() {
|
||||
}
|
||||
}
|
||||
val metadata = ObjectInputStream(wrapper).use { ois ->
|
||||
ois.readObject() as CacheValueMetadata
|
||||
}
|
||||
EntryValue(metadata, channel, offset, size)
|
||||
}
|
||||
|
||||
class FileSink(metadata: CacheValueMetadata, private val path: Path, private val tmpFile: Path) {
|
||||
val channel: FileChannel
|
||||
|
||||
init {
|
||||
val baos = ByteArrayOutputStream()
|
||||
ObjectOutputStream(baos).use {
|
||||
it.writeObject(metadata)
|
||||
}
|
||||
Files.newOutputStream(tmpFile).use {
|
||||
val bytes = baos.toByteArray()
|
||||
val buffer = ByteBuffer.allocate(Integer.BYTES)
|
||||
buffer.putInt(bytes.size)
|
||||
buffer.rewind()
|
||||
it.write(buffer.array())
|
||||
it.write(bytes)
|
||||
}
|
||||
channel = FileChannel.open(tmpFile, StandardOpenOption.APPEND)
|
||||
}
|
||||
|
||||
fun commit() {
|
||||
channel.close()
|
||||
Files.move(tmpFile, path, StandardCopyOption.ATOMIC_MOVE)
|
||||
}
|
||||
|
||||
fun rollback() {
|
||||
channel.close()
|
||||
Files.delete(path)
|
||||
}
|
||||
}
|
||||
|
||||
fun put(
|
||||
key: String,
|
||||
metadata: CacheValueMetadata,
|
||||
): FileSink {
|
||||
val file = root.resolve(key)
|
||||
val tmpFile = Files.createTempFile(root, null, ".tmp")
|
||||
return FileSink(metadata, file, tmpFile)
|
||||
}
|
||||
|
||||
private val garbageCollector = Thread.ofVirtual().name("file-system-cache-gc").start {
|
||||
while (running) {
|
||||
gc()
|
||||
}
|
||||
}
|
||||
|
||||
private fun gc() {
|
||||
val now = Instant.now()
|
||||
if (nextGc < now) {
|
||||
val oldestEntry = actualGc(now)
|
||||
nextGc = (oldestEntry ?: now).plus(maxAge)
|
||||
}
|
||||
Thread.sleep(minOf(Duration.between(now, nextGc), Duration.ofSeconds(1)))
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the creation timestamp of the oldest cache entry (if any)
|
||||
*/
|
||||
private fun actualGc(now: Instant): Instant? {
|
||||
var result: Instant? = null
|
||||
Files.list(root)
|
||||
.filter { path ->
|
||||
JWO.splitExtension(path)
|
||||
.map { it._2 }
|
||||
.map { it != ".tmp" }
|
||||
.orElse(true)
|
||||
}
|
||||
.filter {
|
||||
val creationTimeStamp = Files.readAttributes(it, BasicFileAttributes::class.java)
|
||||
.creationTime()
|
||||
.toInstant()
|
||||
if (result == null || creationTimeStamp < result) {
|
||||
result = creationTimeStamp
|
||||
}
|
||||
now > creationTimeStamp.plus(maxAge)
|
||||
}.forEach(Files::delete)
|
||||
return result
|
||||
}
|
||||
|
||||
override fun close() {
|
||||
running = false
|
||||
garbageCollector.join()
|
||||
}
|
||||
}
|
32
rbcs-server/src/main/kotlin/net/woggioni/rbcs/server/cache/FileSystemCacheConfiguration.kt
vendored
Normal file
32
rbcs-server/src/main/kotlin/net/woggioni/rbcs/server/cache/FileSystemCacheConfiguration.kt
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
package net.woggioni.rbcs.server.cache
|
||||
|
||||
import net.woggioni.jwo.Application
|
||||
import net.woggioni.rbcs.api.CacheHandlerFactory
|
||||
import net.woggioni.rbcs.api.Configuration
|
||||
import net.woggioni.rbcs.common.RBCS
|
||||
import java.nio.file.Path
|
||||
import java.time.Duration
|
||||
|
||||
data class FileSystemCacheConfiguration(
|
||||
val root: Path?,
|
||||
val maxAge: Duration,
|
||||
val digestAlgorithm : String?,
|
||||
val compressionEnabled: Boolean,
|
||||
val compressionLevel: Int,
|
||||
val chunkSize: Int,
|
||||
) : Configuration.Cache {
|
||||
|
||||
override fun materialize() = object : CacheHandlerFactory {
|
||||
private val cache = FileSystemCache(root ?: Application.builder("rbcs").build().computeCacheDirectory(), maxAge)
|
||||
|
||||
override fun close() {
|
||||
cache.close()
|
||||
}
|
||||
|
||||
override fun newHandler() = FileSystemCacheHandler(cache, digestAlgorithm, compressionEnabled, compressionLevel, chunkSize)
|
||||
}
|
||||
|
||||
override fun getNamespaceURI() = RBCS.RBCS_NAMESPACE_URI
|
||||
|
||||
override fun getTypeName() = "fileSystemCacheType"
|
||||
}
|
122
rbcs-server/src/main/kotlin/net/woggioni/rbcs/server/cache/FileSystemCacheHandler.kt
vendored
Normal file
122
rbcs-server/src/main/kotlin/net/woggioni/rbcs/server/cache/FileSystemCacheHandler.kt
vendored
Normal file
@@ -0,0 +1,122 @@
|
||||
package net.woggioni.rbcs.server.cache
|
||||
|
||||
import io.netty.buffer.ByteBuf
|
||||
import io.netty.channel.ChannelHandlerContext
|
||||
import io.netty.channel.SimpleChannelInboundHandler
|
||||
import io.netty.handler.codec.http.LastHttpContent
|
||||
import io.netty.handler.stream.ChunkedNioFile
|
||||
import net.woggioni.rbcs.api.message.CacheMessage
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CacheContent
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CacheGetRequest
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CachePutRequest
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CachePutResponse
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CacheValueFoundResponse
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CacheValueNotFoundResponse
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.LastCacheContent
|
||||
import net.woggioni.rbcs.common.RBCS.processCacheKey
|
||||
import java.nio.channels.Channels
|
||||
import java.util.Base64
|
||||
import java.util.zip.Deflater
|
||||
import java.util.zip.DeflaterOutputStream
|
||||
import java.util.zip.InflaterInputStream
|
||||
|
||||
class FileSystemCacheHandler(
|
||||
private val cache: FileSystemCache,
|
||||
private val digestAlgorithm: String?,
|
||||
private val compressionEnabled: Boolean,
|
||||
private val compressionLevel: Int,
|
||||
private val chunkSize: Int
|
||||
) : SimpleChannelInboundHandler<CacheMessage>() {
|
||||
|
||||
private inner class InProgressPutRequest(
|
||||
val key : String,
|
||||
private val fileSink : FileSystemCache.FileSink
|
||||
) {
|
||||
|
||||
private val stream = Channels.newOutputStream(fileSink.channel).let {
|
||||
if (compressionEnabled) {
|
||||
DeflaterOutputStream(it, Deflater(compressionLevel))
|
||||
} else {
|
||||
it
|
||||
}
|
||||
}
|
||||
|
||||
fun write(buf: ByteBuf) {
|
||||
buf.readBytes(stream, buf.readableBytes())
|
||||
}
|
||||
|
||||
fun commit() {
|
||||
stream.close()
|
||||
fileSink.commit()
|
||||
}
|
||||
|
||||
fun rollback() {
|
||||
fileSink.rollback()
|
||||
}
|
||||
}
|
||||
|
||||
private var inProgressPutRequest: InProgressPutRequest? = null
|
||||
|
||||
override fun channelRead0(ctx: ChannelHandlerContext, msg: CacheMessage) {
|
||||
when (msg) {
|
||||
is CacheGetRequest -> handleGetRequest(ctx, msg)
|
||||
is CachePutRequest -> handlePutRequest(ctx, msg)
|
||||
is LastCacheContent -> handleLastCacheContent(ctx, msg)
|
||||
is CacheContent -> handleCacheContent(ctx, msg)
|
||||
else -> ctx.fireChannelRead(msg)
|
||||
}
|
||||
}
|
||||
|
||||
private fun handleGetRequest(ctx: ChannelHandlerContext, msg: CacheGetRequest) {
|
||||
val key = String(Base64.getUrlEncoder().encode(processCacheKey(msg.key, digestAlgorithm)))
|
||||
cache.get(key)?.also { entryValue ->
|
||||
ctx.writeAndFlush(CacheValueFoundResponse(msg.key, entryValue.metadata))
|
||||
entryValue.channel.let { channel ->
|
||||
if(compressionEnabled) {
|
||||
InflaterInputStream(Channels.newInputStream(channel)).use { stream ->
|
||||
|
||||
outerLoop@
|
||||
while (true) {
|
||||
val buf = ctx.alloc().heapBuffer(chunkSize)
|
||||
while(buf.readableBytes() < chunkSize) {
|
||||
val read = buf.writeBytes(stream, chunkSize)
|
||||
if(read < 0) {
|
||||
ctx.writeAndFlush(LastCacheContent(buf))
|
||||
break@outerLoop
|
||||
}
|
||||
}
|
||||
ctx.writeAndFlush(CacheContent(buf))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
ctx.writeAndFlush(ChunkedNioFile(channel, entryValue.offset, entryValue.size - entryValue.offset, chunkSize))
|
||||
ctx.writeAndFlush(LastHttpContent.EMPTY_LAST_CONTENT)
|
||||
}
|
||||
}
|
||||
} ?: ctx.writeAndFlush(CacheValueNotFoundResponse())
|
||||
}
|
||||
|
||||
private fun handlePutRequest(ctx: ChannelHandlerContext, msg: CachePutRequest) {
|
||||
val key = String(Base64.getUrlEncoder().encode(processCacheKey(msg.key, digestAlgorithm)))
|
||||
val sink = cache.put(key, msg.metadata)
|
||||
inProgressPutRequest = InProgressPutRequest(msg.key, sink)
|
||||
}
|
||||
|
||||
private fun handleCacheContent(ctx: ChannelHandlerContext, msg: CacheContent) {
|
||||
inProgressPutRequest!!.write(msg.content())
|
||||
}
|
||||
|
||||
private fun handleLastCacheContent(ctx: ChannelHandlerContext, msg: LastCacheContent) {
|
||||
inProgressPutRequest?.let { request ->
|
||||
inProgressPutRequest = null
|
||||
request.write(msg.content())
|
||||
request.commit()
|
||||
ctx.writeAndFlush(CachePutResponse(request.key))
|
||||
}
|
||||
}
|
||||
|
||||
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
|
||||
inProgressPutRequest?.rollback()
|
||||
super.exceptionCaught(ctx, cause)
|
||||
}
|
||||
}
|
70
rbcs-server/src/main/kotlin/net/woggioni/rbcs/server/cache/FileSystemCacheProvider.kt
vendored
Normal file
70
rbcs-server/src/main/kotlin/net/woggioni/rbcs/server/cache/FileSystemCacheProvider.kt
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
package net.woggioni.rbcs.server.cache
|
||||
|
||||
import net.woggioni.rbcs.api.CacheProvider
|
||||
import net.woggioni.rbcs.common.RBCS
|
||||
import net.woggioni.rbcs.common.Xml
|
||||
import net.woggioni.rbcs.common.Xml.Companion.renderAttribute
|
||||
import org.w3c.dom.Document
|
||||
import org.w3c.dom.Element
|
||||
import java.nio.file.Path
|
||||
import java.time.Duration
|
||||
import java.util.zip.Deflater
|
||||
|
||||
class FileSystemCacheProvider : CacheProvider<FileSystemCacheConfiguration> {
|
||||
|
||||
override fun getXmlSchemaLocation() = "classpath:net/woggioni/rbcs/server/schema/rbcs.xsd"
|
||||
|
||||
override fun getXmlType() = "fileSystemCacheType"
|
||||
|
||||
override fun getXmlNamespace() = "urn:net.woggioni.rbcs.server"
|
||||
|
||||
override fun deserialize(el: Element): FileSystemCacheConfiguration {
|
||||
val path = el.renderAttribute("path")
|
||||
?.let(Path::of)
|
||||
val maxAge = el.renderAttribute("max-age")
|
||||
?.let(Duration::parse)
|
||||
?: Duration.ofDays(1)
|
||||
val enableCompression = el.renderAttribute("enable-compression")
|
||||
?.let(String::toBoolean)
|
||||
?: true
|
||||
val compressionLevel = el.renderAttribute("compression-level")
|
||||
?.let(String::toInt)
|
||||
?: Deflater.DEFAULT_COMPRESSION
|
||||
val digestAlgorithm = el.renderAttribute("digest") ?: "MD5"
|
||||
val chunkSize = el.renderAttribute("chunk-size")
|
||||
?.let(Integer::decode)
|
||||
?: 0x10000
|
||||
|
||||
return FileSystemCacheConfiguration(
|
||||
path,
|
||||
maxAge,
|
||||
digestAlgorithm,
|
||||
enableCompression,
|
||||
compressionLevel,
|
||||
chunkSize
|
||||
)
|
||||
}
|
||||
|
||||
override fun serialize(doc: Document, cache : FileSystemCacheConfiguration) = cache.run {
|
||||
val result = doc.createElement("cache")
|
||||
Xml.of(doc, result) {
|
||||
val prefix = doc.lookupPrefix(RBCS.RBCS_NAMESPACE_URI)
|
||||
attr("xs:type", "${prefix}:fileSystemCacheType", RBCS.XML_SCHEMA_NAMESPACE_URI)
|
||||
root?.let {
|
||||
attr("path", it.toString())
|
||||
}
|
||||
attr("max-age", maxAge.toString())
|
||||
digestAlgorithm?.let { digestAlgorithm ->
|
||||
attr("digest", digestAlgorithm)
|
||||
}
|
||||
attr("enable-compression", compressionEnabled.toString())
|
||||
compressionLevel.takeIf {
|
||||
it != Deflater.DEFAULT_COMPRESSION
|
||||
}?.let {
|
||||
attr("compression-level", it.toString())
|
||||
}
|
||||
attr("chunk-size", chunkSize.toString())
|
||||
}
|
||||
result
|
||||
}
|
||||
}
|
115
rbcs-server/src/main/kotlin/net/woggioni/rbcs/server/cache/InMemoryCache.kt
vendored
Normal file
115
rbcs-server/src/main/kotlin/net/woggioni/rbcs/server/cache/InMemoryCache.kt
vendored
Normal file
@@ -0,0 +1,115 @@
|
||||
package net.woggioni.rbcs.server.cache
|
||||
|
||||
import io.netty.buffer.ByteBuf
|
||||
import net.woggioni.rbcs.api.CacheValueMetadata
|
||||
import net.woggioni.rbcs.common.createLogger
|
||||
import java.time.Duration
|
||||
import java.time.Instant
|
||||
import java.util.concurrent.ConcurrentHashMap
|
||||
import java.util.concurrent.PriorityBlockingQueue
|
||||
import java.util.concurrent.TimeUnit
|
||||
import java.util.concurrent.atomic.AtomicLong
|
||||
|
||||
private class CacheKey(private val value: ByteArray) {
|
||||
override fun equals(other: Any?) = if (other is CacheKey) {
|
||||
value.contentEquals(other.value)
|
||||
} else false
|
||||
|
||||
override fun hashCode() = value.contentHashCode()
|
||||
}
|
||||
|
||||
class CacheEntry(
|
||||
val metadata: CacheValueMetadata,
|
||||
val content: ByteBuf
|
||||
)
|
||||
|
||||
class InMemoryCache(
|
||||
private val maxAge: Duration,
|
||||
private val maxSize: Long
|
||||
) : AutoCloseable {
|
||||
|
||||
companion object {
|
||||
private val log = createLogger<InMemoryCache>()
|
||||
}
|
||||
|
||||
private val size = AtomicLong()
|
||||
private val map = ConcurrentHashMap<CacheKey, CacheEntry>()
|
||||
|
||||
private class RemovalQueueElement(val key: CacheKey, val value: CacheEntry, val expiry: Instant) :
|
||||
Comparable<RemovalQueueElement> {
|
||||
override fun compareTo(other: RemovalQueueElement) = expiry.compareTo(other.expiry)
|
||||
}
|
||||
|
||||
private val removalQueue = PriorityBlockingQueue<RemovalQueueElement>()
|
||||
|
||||
@Volatile
|
||||
private var running = true
|
||||
|
||||
private val garbageCollector = Thread.ofVirtual().name("in-memory-cache-gc").start {
|
||||
while (running) {
|
||||
val el = removalQueue.poll(1, TimeUnit.SECONDS) ?: continue
|
||||
val value = el.value
|
||||
val now = Instant.now()
|
||||
if (now > el.expiry) {
|
||||
val removed = map.remove(el.key, value)
|
||||
if (removed) {
|
||||
updateSizeAfterRemoval(value.content)
|
||||
//Decrease the reference count for map
|
||||
value.content.release()
|
||||
}
|
||||
} else {
|
||||
removalQueue.put(el)
|
||||
Thread.sleep(minOf(Duration.between(now, el.expiry), Duration.ofSeconds(1)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private fun removeEldest(): Long {
|
||||
while (true) {
|
||||
val el = removalQueue.take()
|
||||
val value = el.value
|
||||
val removed = map.remove(el.key, value)
|
||||
if (removed) {
|
||||
val newSize = updateSizeAfterRemoval(value.content)
|
||||
//Decrease the reference count for map
|
||||
value.content.release()
|
||||
return newSize
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private fun updateSizeAfterRemoval(removed: ByteBuf): Long {
|
||||
return size.updateAndGet { currentSize: Long ->
|
||||
currentSize - removed.readableBytes()
|
||||
}
|
||||
}
|
||||
|
||||
override fun close() {
|
||||
running = false
|
||||
garbageCollector.join()
|
||||
}
|
||||
|
||||
fun get(key: ByteArray) = map[CacheKey(key)]?.run {
|
||||
CacheEntry(metadata, content.retainedDuplicate())
|
||||
}
|
||||
|
||||
fun put(
|
||||
key: ByteArray,
|
||||
value: CacheEntry,
|
||||
) {
|
||||
val cacheKey = CacheKey(key)
|
||||
val oldSize = map.put(cacheKey, value)?.let { old ->
|
||||
val result = old.content.readableBytes()
|
||||
old.content.release()
|
||||
result
|
||||
} ?: 0
|
||||
val delta = value.content.readableBytes() - oldSize
|
||||
var newSize = size.updateAndGet { currentSize: Long ->
|
||||
currentSize + delta
|
||||
}
|
||||
removalQueue.put(RemovalQueueElement(cacheKey, value, Instant.now().plus(maxAge)))
|
||||
while (newSize > maxSize) {
|
||||
newSize = removeEldest()
|
||||
}
|
||||
}
|
||||
}
|
29
rbcs-server/src/main/kotlin/net/woggioni/rbcs/server/cache/InMemoryCacheConfiguration.kt
vendored
Normal file
29
rbcs-server/src/main/kotlin/net/woggioni/rbcs/server/cache/InMemoryCacheConfiguration.kt
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
package net.woggioni.rbcs.server.cache
|
||||
|
||||
import net.woggioni.rbcs.api.CacheHandlerFactory
|
||||
import net.woggioni.rbcs.api.Configuration
|
||||
import net.woggioni.rbcs.common.RBCS
|
||||
import java.time.Duration
|
||||
|
||||
data class InMemoryCacheConfiguration(
|
||||
val maxAge: Duration,
|
||||
val maxSize: Long,
|
||||
val digestAlgorithm : String?,
|
||||
val compressionEnabled: Boolean,
|
||||
val compressionLevel: Int,
|
||||
val chunkSize : Int
|
||||
) : Configuration.Cache {
|
||||
override fun materialize() = object : CacheHandlerFactory {
|
||||
private val cache = InMemoryCache(maxAge, maxSize)
|
||||
|
||||
override fun close() {
|
||||
cache.close()
|
||||
}
|
||||
|
||||
override fun newHandler() = InMemoryCacheHandler(cache, digestAlgorithm, compressionEnabled, compressionLevel)
|
||||
}
|
||||
|
||||
override fun getNamespaceURI() = RBCS.RBCS_NAMESPACE_URI
|
||||
|
||||
override fun getTypeName() = "inMemoryCacheType"
|
||||
}
|
135
rbcs-server/src/main/kotlin/net/woggioni/rbcs/server/cache/InMemoryCacheHandler.kt
vendored
Normal file
135
rbcs-server/src/main/kotlin/net/woggioni/rbcs/server/cache/InMemoryCacheHandler.kt
vendored
Normal file
@@ -0,0 +1,135 @@
|
||||
package net.woggioni.rbcs.server.cache
|
||||
|
||||
import io.netty.buffer.ByteBuf
|
||||
import io.netty.channel.ChannelHandlerContext
|
||||
import io.netty.channel.SimpleChannelInboundHandler
|
||||
import net.woggioni.rbcs.api.message.CacheMessage
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CacheContent
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CacheGetRequest
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CachePutRequest
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CachePutResponse
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CacheValueFoundResponse
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CacheValueNotFoundResponse
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.LastCacheContent
|
||||
import net.woggioni.rbcs.common.ByteBufOutputStream
|
||||
import net.woggioni.rbcs.common.RBCS.processCacheKey
|
||||
import java.util.zip.Deflater
|
||||
import java.util.zip.DeflaterOutputStream
|
||||
import java.util.zip.InflaterOutputStream
|
||||
|
||||
class InMemoryCacheHandler(
|
||||
private val cache: InMemoryCache,
|
||||
private val digestAlgorithm: String?,
|
||||
private val compressionEnabled: Boolean,
|
||||
private val compressionLevel: Int
|
||||
) : SimpleChannelInboundHandler<CacheMessage>() {
|
||||
|
||||
private interface InProgressPutRequest : AutoCloseable {
|
||||
val request: CachePutRequest
|
||||
val buf: ByteBuf
|
||||
|
||||
fun append(buf: ByteBuf)
|
||||
}
|
||||
|
||||
private inner class InProgressPlainPutRequest(ctx: ChannelHandlerContext, override val request: CachePutRequest) :
|
||||
InProgressPutRequest {
|
||||
override val buf = ctx.alloc().compositeBuffer()
|
||||
|
||||
private val stream = ByteBufOutputStream(buf).let {
|
||||
if (compressionEnabled) {
|
||||
DeflaterOutputStream(it, Deflater(compressionLevel))
|
||||
} else {
|
||||
it
|
||||
}
|
||||
}
|
||||
|
||||
override fun append(buf: ByteBuf) {
|
||||
this.buf.addComponent(true, buf.retain())
|
||||
}
|
||||
|
||||
override fun close() {
|
||||
buf.release()
|
||||
}
|
||||
}
|
||||
|
||||
private inner class InProgressCompressedPutRequest(
|
||||
ctx: ChannelHandlerContext,
|
||||
override val request: CachePutRequest
|
||||
) : InProgressPutRequest {
|
||||
|
||||
override val buf = ctx.alloc().heapBuffer()
|
||||
|
||||
private val stream = ByteBufOutputStream(buf).let {
|
||||
DeflaterOutputStream(it, Deflater(compressionLevel))
|
||||
}
|
||||
|
||||
override fun append(buf: ByteBuf) {
|
||||
buf.readBytes(stream, buf.readableBytes())
|
||||
}
|
||||
|
||||
override fun close() {
|
||||
stream.close()
|
||||
}
|
||||
}
|
||||
|
||||
private var inProgressPutRequest: InProgressPutRequest? = null
|
||||
|
||||
override fun channelRead0(ctx: ChannelHandlerContext, msg: CacheMessage) {
|
||||
when (msg) {
|
||||
is CacheGetRequest -> handleGetRequest(ctx, msg)
|
||||
is CachePutRequest -> handlePutRequest(ctx, msg)
|
||||
is LastCacheContent -> handleLastCacheContent(ctx, msg)
|
||||
is CacheContent -> handleCacheContent(ctx, msg)
|
||||
else -> ctx.fireChannelRead(msg)
|
||||
}
|
||||
}
|
||||
|
||||
private fun handleGetRequest(ctx: ChannelHandlerContext, msg: CacheGetRequest) {
|
||||
cache.get(processCacheKey(msg.key, digestAlgorithm))?.let { value ->
|
||||
ctx.writeAndFlush(CacheValueFoundResponse(msg.key, value.metadata))
|
||||
if (compressionEnabled) {
|
||||
val buf = ctx.alloc().heapBuffer()
|
||||
InflaterOutputStream(ByteBufOutputStream(buf)).use {
|
||||
value.content.readBytes(it, value.content.readableBytes())
|
||||
buf.retain()
|
||||
}
|
||||
ctx.writeAndFlush(LastCacheContent(buf))
|
||||
} else {
|
||||
ctx.writeAndFlush(LastCacheContent(value.content))
|
||||
}
|
||||
} ?: ctx.writeAndFlush(CacheValueNotFoundResponse())
|
||||
}
|
||||
|
||||
private fun handlePutRequest(ctx: ChannelHandlerContext, msg: CachePutRequest) {
|
||||
inProgressPutRequest = if(compressionEnabled) {
|
||||
InProgressCompressedPutRequest(ctx, msg)
|
||||
} else {
|
||||
InProgressPlainPutRequest(ctx, msg)
|
||||
}
|
||||
}
|
||||
|
||||
private fun handleCacheContent(ctx: ChannelHandlerContext, msg: CacheContent) {
|
||||
inProgressPutRequest?.append(msg.content())
|
||||
}
|
||||
|
||||
private fun handleLastCacheContent(ctx: ChannelHandlerContext, msg: LastCacheContent) {
|
||||
handleCacheContent(ctx, msg)
|
||||
inProgressPutRequest?.let { inProgressRequest ->
|
||||
inProgressPutRequest = null
|
||||
val buf = inProgressRequest.buf
|
||||
buf.retain()
|
||||
inProgressRequest.close()
|
||||
val cacheKey = processCacheKey(inProgressRequest.request.key, digestAlgorithm)
|
||||
cache.put(cacheKey, CacheEntry(inProgressRequest.request.metadata, buf))
|
||||
ctx.writeAndFlush(CachePutResponse(inProgressRequest.request.key))
|
||||
}
|
||||
}
|
||||
|
||||
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
|
||||
inProgressPutRequest?.let { req ->
|
||||
req.buf.release()
|
||||
inProgressPutRequest = null
|
||||
}
|
||||
super.exceptionCaught(ctx, cause)
|
||||
}
|
||||
}
|
67
rbcs-server/src/main/kotlin/net/woggioni/rbcs/server/cache/InMemoryCacheProvider.kt
vendored
Normal file
67
rbcs-server/src/main/kotlin/net/woggioni/rbcs/server/cache/InMemoryCacheProvider.kt
vendored
Normal file
@@ -0,0 +1,67 @@
|
||||
package net.woggioni.rbcs.server.cache
|
||||
|
||||
import net.woggioni.rbcs.api.CacheProvider
|
||||
import net.woggioni.rbcs.common.RBCS
|
||||
import net.woggioni.rbcs.common.Xml
|
||||
import net.woggioni.rbcs.common.Xml.Companion.renderAttribute
|
||||
import org.w3c.dom.Document
|
||||
import org.w3c.dom.Element
|
||||
import java.time.Duration
|
||||
import java.util.zip.Deflater
|
||||
|
||||
class InMemoryCacheProvider : CacheProvider<InMemoryCacheConfiguration> {
|
||||
|
||||
override fun getXmlSchemaLocation() = "classpath:net/woggioni/rbcs/server/schema/rbcs.xsd"
|
||||
|
||||
override fun getXmlType() = "inMemoryCacheType"
|
||||
|
||||
override fun getXmlNamespace() = "urn:net.woggioni.rbcs.server"
|
||||
|
||||
override fun deserialize(el: Element): InMemoryCacheConfiguration {
|
||||
val maxAge = el.renderAttribute("max-age")
|
||||
?.let(Duration::parse)
|
||||
?: Duration.ofDays(1)
|
||||
val maxSize = el.renderAttribute("max-size")
|
||||
?.let(java.lang.Long::decode)
|
||||
?: 0x1000000
|
||||
val enableCompression = el.renderAttribute("enable-compression")
|
||||
?.let(String::toBoolean)
|
||||
?: true
|
||||
val compressionLevel = el.renderAttribute("compression-level")
|
||||
?.let(String::toInt)
|
||||
?: Deflater.DEFAULT_COMPRESSION
|
||||
val digestAlgorithm = el.renderAttribute("digest") ?: "MD5"
|
||||
val chunkSize = el.renderAttribute("chunk-size")
|
||||
?.let(Integer::decode)
|
||||
?: 0x10000
|
||||
return InMemoryCacheConfiguration(
|
||||
maxAge,
|
||||
maxSize,
|
||||
digestAlgorithm,
|
||||
enableCompression,
|
||||
compressionLevel,
|
||||
chunkSize
|
||||
)
|
||||
}
|
||||
|
||||
override fun serialize(doc: Document, cache : InMemoryCacheConfiguration) = cache.run {
|
||||
val result = doc.createElement("cache")
|
||||
Xml.of(doc, result) {
|
||||
val prefix = doc.lookupPrefix(RBCS.RBCS_NAMESPACE_URI)
|
||||
attr("xs:type", "${prefix}:inMemoryCacheType", RBCS.XML_SCHEMA_NAMESPACE_URI)
|
||||
attr("max-age", maxAge.toString())
|
||||
attr("max-size", maxSize.toString())
|
||||
digestAlgorithm?.let { digestAlgorithm ->
|
||||
attr("digest", digestAlgorithm)
|
||||
}
|
||||
attr("enable-compression", compressionEnabled.toString())
|
||||
compressionLevel.takeIf {
|
||||
it != Deflater.DEFAULT_COMPRESSION
|
||||
}?.let {
|
||||
attr("compression-level", it.toString())
|
||||
}
|
||||
attr("chunk-size", chunkSize.toString())
|
||||
}
|
||||
result
|
||||
}
|
||||
}
|
@@ -0,0 +1,15 @@
|
||||
package net.woggioni.rbcs.server.configuration
|
||||
|
||||
import net.woggioni.rbcs.api.CacheProvider
|
||||
import net.woggioni.rbcs.api.Configuration
|
||||
import java.util.ServiceLoader
|
||||
|
||||
object CacheSerializers {
|
||||
val index = (Configuration::class.java.module.layer?.let { layer ->
|
||||
ServiceLoader.load(layer, CacheProvider::class.java)
|
||||
} ?: ServiceLoader.load(CacheProvider::class.java))
|
||||
.asSequence()
|
||||
.map {
|
||||
(it.xmlNamespace to it.xmlType) to it
|
||||
}.toMap()
|
||||
}
|
@@ -0,0 +1,298 @@
|
||||
package net.woggioni.rbcs.server.configuration
|
||||
|
||||
import net.woggioni.rbcs.api.Configuration
|
||||
import net.woggioni.rbcs.api.Configuration.Authentication
|
||||
import net.woggioni.rbcs.api.Configuration.BasicAuthentication
|
||||
import net.woggioni.rbcs.api.Configuration.Cache
|
||||
import net.woggioni.rbcs.api.Configuration.ClientCertificateAuthentication
|
||||
import net.woggioni.rbcs.api.Configuration.Group
|
||||
import net.woggioni.rbcs.api.Configuration.KeyStore
|
||||
import net.woggioni.rbcs.api.Configuration.Tls
|
||||
import net.woggioni.rbcs.api.Configuration.TlsCertificateExtractor
|
||||
import net.woggioni.rbcs.api.Configuration.TrustStore
|
||||
import net.woggioni.rbcs.api.Configuration.User
|
||||
import net.woggioni.rbcs.api.Role
|
||||
import net.woggioni.rbcs.api.exception.ConfigurationException
|
||||
import net.woggioni.rbcs.common.Xml.Companion.asIterable
|
||||
import net.woggioni.rbcs.common.Xml.Companion.renderAttribute
|
||||
import org.w3c.dom.Document
|
||||
import org.w3c.dom.Element
|
||||
import org.w3c.dom.TypeInfo
|
||||
import java.nio.file.Paths
|
||||
import java.time.Duration
|
||||
import java.time.temporal.ChronoUnit
|
||||
|
||||
object Parser {
|
||||
fun parse(document: Document): Configuration {
|
||||
val root = document.documentElement
|
||||
val anonymousUser = User("", null, emptySet(), null)
|
||||
var connection: Configuration.Connection = Configuration.Connection(
|
||||
Duration.of(10, ChronoUnit.SECONDS),
|
||||
Duration.of(10, ChronoUnit.SECONDS),
|
||||
Duration.of(60, ChronoUnit.SECONDS),
|
||||
Duration.of(30, ChronoUnit.SECONDS),
|
||||
Duration.of(30, ChronoUnit.SECONDS),
|
||||
67108864
|
||||
)
|
||||
var eventExecutor: Configuration.EventExecutor = Configuration.EventExecutor(true)
|
||||
var cache: Cache? = null
|
||||
var host = "127.0.0.1"
|
||||
var port = 11080
|
||||
var users: Map<String, User> = mapOf(anonymousUser.name to anonymousUser)
|
||||
var groups = emptyMap<String, Group>()
|
||||
var tls: Tls? = null
|
||||
val serverPath = root.renderAttribute("path")
|
||||
var incomingConnectionsBacklogSize = 1024
|
||||
var authentication: Authentication? = null
|
||||
for (child in root.asIterable()) {
|
||||
val tagName = child.localName
|
||||
when (tagName) {
|
||||
"authentication" -> {
|
||||
for (gchild in child.asIterable()) {
|
||||
when (gchild.localName) {
|
||||
"basic" -> {
|
||||
authentication = BasicAuthentication()
|
||||
}
|
||||
|
||||
"client-certificate" -> {
|
||||
var tlsExtractorUser: TlsCertificateExtractor? = null
|
||||
var tlsExtractorGroup: TlsCertificateExtractor? = null
|
||||
for (ggchild in gchild.asIterable()) {
|
||||
when (ggchild.localName) {
|
||||
"group-extractor" -> {
|
||||
val attrName = ggchild.renderAttribute("attribute-name")
|
||||
val pattern = ggchild.renderAttribute("pattern")
|
||||
tlsExtractorGroup = TlsCertificateExtractor(attrName, pattern)
|
||||
}
|
||||
|
||||
"user-extractor" -> {
|
||||
val attrName = ggchild.renderAttribute("attribute-name")
|
||||
val pattern = ggchild.renderAttribute("pattern")
|
||||
tlsExtractorUser = TlsCertificateExtractor(attrName, pattern)
|
||||
}
|
||||
}
|
||||
}
|
||||
authentication = ClientCertificateAuthentication(tlsExtractorUser, tlsExtractorGroup)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
"authorization" -> {
|
||||
var knownUsers = sequenceOf(anonymousUser)
|
||||
for (gchild in child.asIterable()) {
|
||||
when (gchild.localName) {
|
||||
"users" -> {
|
||||
knownUsers += parseUsers(gchild)
|
||||
}
|
||||
|
||||
"groups" -> {
|
||||
val pair = parseGroups(gchild, knownUsers)
|
||||
users = pair.first
|
||||
groups = pair.second
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
"bind" -> {
|
||||
host = child.renderAttribute("host") ?: throw ConfigurationException("host attribute is required")
|
||||
port = Integer.parseInt(child.renderAttribute("port"))
|
||||
incomingConnectionsBacklogSize = child.renderAttribute("incoming-connections-backlog-size")
|
||||
?.let(Integer::parseInt)
|
||||
?: 1024
|
||||
}
|
||||
|
||||
"cache" -> {
|
||||
cache = (child as TypeInfo).let { tf ->
|
||||
val typeNamespace = tf.typeNamespace
|
||||
val typeName = tf.typeName
|
||||
CacheSerializers.index[typeNamespace to typeName]
|
||||
?: throw IllegalArgumentException("Cache provider for namespace '$typeNamespace' with name '$typeName' not found")
|
||||
}.deserialize(child)
|
||||
}
|
||||
|
||||
"connection" -> {
|
||||
val writeTimeout = child.renderAttribute("write-timeout")
|
||||
?.let(Duration::parse) ?: Duration.of(0, ChronoUnit.SECONDS)
|
||||
val readTimeout = child.renderAttribute("read-timeout")
|
||||
?.let(Duration::parse) ?: Duration.of(0, ChronoUnit.SECONDS)
|
||||
val idleTimeout = child.renderAttribute("idle-timeout")
|
||||
?.let(Duration::parse) ?: Duration.of(30, ChronoUnit.SECONDS)
|
||||
val readIdleTimeout = child.renderAttribute("read-idle-timeout")
|
||||
?.let(Duration::parse) ?: Duration.of(60, ChronoUnit.SECONDS)
|
||||
val writeIdleTimeout = child.renderAttribute("write-idle-timeout")
|
||||
?.let(Duration::parse) ?: Duration.of(60, ChronoUnit.SECONDS)
|
||||
val maxRequestSize = child.renderAttribute("max-request-size")
|
||||
?.let(Integer::decode) ?: 0x4000000
|
||||
connection = Configuration.Connection(
|
||||
readTimeout,
|
||||
writeTimeout,
|
||||
idleTimeout,
|
||||
readIdleTimeout,
|
||||
writeIdleTimeout,
|
||||
maxRequestSize
|
||||
)
|
||||
}
|
||||
|
||||
"event-executor" -> {
|
||||
val useVirtualThread = root.renderAttribute("use-virtual-threads")
|
||||
?.let(String::toBoolean) ?: true
|
||||
eventExecutor = Configuration.EventExecutor(useVirtualThread)
|
||||
}
|
||||
|
||||
"tls" -> {
|
||||
var keyStore: KeyStore? = null
|
||||
var trustStore: TrustStore? = null
|
||||
|
||||
for (granChild in child.asIterable()) {
|
||||
when (granChild.localName) {
|
||||
"keystore" -> {
|
||||
val keyStoreFile = Paths.get(granChild.renderAttribute("file"))
|
||||
val keyStorePassword = granChild.renderAttribute("password")
|
||||
val keyAlias = granChild.renderAttribute("key-alias")
|
||||
val keyPassword = granChild.renderAttribute("key-password")
|
||||
keyStore = KeyStore(
|
||||
keyStoreFile,
|
||||
keyStorePassword,
|
||||
keyAlias,
|
||||
keyPassword
|
||||
)
|
||||
}
|
||||
|
||||
"truststore" -> {
|
||||
val trustStoreFile = Paths.get(granChild.renderAttribute("file"))
|
||||
val trustStorePassword = granChild.renderAttribute("password")
|
||||
val checkCertificateStatus = granChild.renderAttribute("check-certificate-status")
|
||||
?.let(String::toBoolean)
|
||||
?: false
|
||||
val requireClientCertificate = child.renderAttribute("require-client-certificate")
|
||||
?.let(String::toBoolean) ?: false
|
||||
|
||||
trustStore = TrustStore(
|
||||
trustStoreFile,
|
||||
trustStorePassword,
|
||||
checkCertificateStatus,
|
||||
requireClientCertificate
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
tls = Tls(keyStore, trustStore)
|
||||
}
|
||||
}
|
||||
}
|
||||
return Configuration.of(
|
||||
host,
|
||||
port,
|
||||
incomingConnectionsBacklogSize,
|
||||
serverPath,
|
||||
eventExecutor,
|
||||
connection,
|
||||
users,
|
||||
groups,
|
||||
cache!!,
|
||||
authentication,
|
||||
tls,
|
||||
)
|
||||
}
|
||||
|
||||
private fun parseRoles(root: Element) = root.asIterable().asSequence().map {
|
||||
when (it.localName) {
|
||||
"reader" -> Role.Reader
|
||||
"writer" -> Role.Writer
|
||||
else -> throw UnsupportedOperationException("Illegal node '${it.localName}'")
|
||||
}
|
||||
}.toSet()
|
||||
|
||||
private fun parseUserRefs(root: Element) = root.asIterable().asSequence().map {
|
||||
when (it.localName) {
|
||||
"user" -> it.renderAttribute("ref")
|
||||
"anonymous" -> ""
|
||||
else -> ConfigurationException("Unrecognized tag '${it.localName}'")
|
||||
}
|
||||
}
|
||||
|
||||
private fun parseQuota(el: Element): Configuration.Quota {
|
||||
val calls = el.renderAttribute("calls")
|
||||
?.let(String::toLong)
|
||||
?: throw ConfigurationException("Missing attribute 'calls'")
|
||||
val maxAvailableCalls = el.renderAttribute("max-available-calls")
|
||||
?.let(String::toLong)
|
||||
?: calls
|
||||
val initialAvailableCalls = el.renderAttribute("initial-available-calls")
|
||||
?.let(String::toLong)
|
||||
?: maxAvailableCalls
|
||||
val period = el.renderAttribute("period")
|
||||
?.let(Duration::parse)
|
||||
?: throw ConfigurationException("Missing attribute 'period'")
|
||||
return Configuration.Quota(calls, period, initialAvailableCalls, maxAvailableCalls)
|
||||
}
|
||||
|
||||
private fun parseUsers(root: Element): Sequence<User> {
|
||||
return root.asIterable().asSequence().mapNotNull { child ->
|
||||
when (child.localName) {
|
||||
"user" -> {
|
||||
val username = child.renderAttribute("name")
|
||||
val password = child.renderAttribute("password")
|
||||
var quota: Configuration.Quota? = null
|
||||
for (gchild in child.asIterable()) {
|
||||
if (gchild.localName == "quota") {
|
||||
quota = parseQuota(gchild)
|
||||
}
|
||||
}
|
||||
User(username, password, emptySet(), quota)
|
||||
}
|
||||
"anonymous" -> {
|
||||
var quota: Configuration.Quota? = null
|
||||
for (gchild in child.asIterable()) {
|
||||
if (gchild.localName == "quota") {
|
||||
quota= parseQuota(gchild)
|
||||
}
|
||||
}
|
||||
User("", null, emptySet(), quota)
|
||||
}
|
||||
else -> null
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private fun parseGroups(root: Element, knownUsers: Sequence<User>): Pair<Map<String, User>, Map<String, Group>> {
|
||||
val knownUsersMap = knownUsers.associateBy(User::getName)
|
||||
val userGroups = mutableMapOf<String, MutableSet<String>>()
|
||||
val groups = root.asIterable().asSequence().filter {
|
||||
it.localName == "group"
|
||||
}.map { el ->
|
||||
val groupName = el.renderAttribute("name") ?: throw ConfigurationException("Group name is required")
|
||||
var roles = emptySet<Role>()
|
||||
var userQuota: Configuration.Quota? = null
|
||||
var groupQuota: Configuration.Quota? = null
|
||||
for (child in el.asIterable()) {
|
||||
when (child.localName) {
|
||||
"users" -> {
|
||||
parseUserRefs(child).mapNotNull(knownUsersMap::get).forEach { user ->
|
||||
userGroups.computeIfAbsent(user.name) {
|
||||
mutableSetOf()
|
||||
}.add(groupName)
|
||||
}
|
||||
}
|
||||
|
||||
"roles" -> {
|
||||
roles = parseRoles(child)
|
||||
}
|
||||
"group-quota" -> {
|
||||
userQuota = parseQuota(child)
|
||||
}
|
||||
"user-quota" -> {
|
||||
groupQuota = parseQuota(child)
|
||||
}
|
||||
}
|
||||
}
|
||||
groupName to Group(groupName, roles, userQuota, groupQuota)
|
||||
}.toMap()
|
||||
val users = knownUsersMap.map { (name, user) ->
|
||||
name to User(name, user.password, userGroups[name]?.mapNotNull { groups[it] }?.toSet() ?: emptySet(), user.quota)
|
||||
}.toMap()
|
||||
return users to groups
|
||||
}
|
||||
}
|
@@ -0,0 +1,186 @@
|
||||
package net.woggioni.rbcs.server.configuration
|
||||
|
||||
import net.woggioni.rbcs.api.CacheProvider
|
||||
import net.woggioni.rbcs.api.Configuration
|
||||
import net.woggioni.rbcs.common.RBCS
|
||||
import net.woggioni.rbcs.common.Xml
|
||||
import org.w3c.dom.Document
|
||||
|
||||
object Serializer {
|
||||
|
||||
private fun Xml.serializeQuota(quota : Configuration.Quota) {
|
||||
attr("calls", quota.calls.toString())
|
||||
attr("period", quota.period.toString())
|
||||
attr("max-available-calls", quota.maxAvailableCalls.toString())
|
||||
attr("initial-available-calls", quota.initialAvailableCalls.toString())
|
||||
}
|
||||
|
||||
fun serialize(conf : Configuration) : Document {
|
||||
val schemaLocations = CacheSerializers.index.values.asSequence().map {
|
||||
it.xmlNamespace to it.xmlSchemaLocation
|
||||
}.toMap()
|
||||
return Xml.of(RBCS.RBCS_NAMESPACE_URI, RBCS.RBCS_PREFIX + ":server") {
|
||||
// attr("xmlns:xs", GradleBuildCacheServer.XML_SCHEMA_NAMESPACE_URI)
|
||||
val value = schemaLocations.asSequence().map { (k, v) -> "$k $v" }.joinToString(" ")
|
||||
attr("xs:schemaLocation", value , namespaceURI = RBCS.XML_SCHEMA_NAMESPACE_URI)
|
||||
|
||||
conf.serverPath
|
||||
?.takeIf(String::isNotEmpty)
|
||||
?.let { serverPath ->
|
||||
attr("path", serverPath)
|
||||
}
|
||||
node("bind") {
|
||||
attr("host", conf.host)
|
||||
attr("port", conf.port.toString())
|
||||
attr("incoming-connections-backlog-size", conf.incomingConnectionsBacklogSize.toString())
|
||||
}
|
||||
node("connection") {
|
||||
conf.connection.let { connection ->
|
||||
attr("read-timeout", connection.readTimeout.toString())
|
||||
attr("write-timeout", connection.writeTimeout.toString())
|
||||
attr("idle-timeout", connection.idleTimeout.toString())
|
||||
attr("read-idle-timeout", connection.readIdleTimeout.toString())
|
||||
attr("write-idle-timeout", connection.writeIdleTimeout.toString())
|
||||
attr("max-request-size", connection.maxRequestSize.toString())
|
||||
}
|
||||
}
|
||||
node("event-executor") {
|
||||
attr("use-virtual-threads", conf.eventExecutor.isUseVirtualThreads.toString())
|
||||
}
|
||||
val cache = conf.cache
|
||||
val serializer : CacheProvider<Configuration.Cache> =
|
||||
(CacheSerializers.index[cache.namespaceURI to cache.typeName] as? CacheProvider<Configuration.Cache>) ?: throw NotImplementedError()
|
||||
element.appendChild(serializer.serialize(doc, cache))
|
||||
node("authorization") {
|
||||
node("users") {
|
||||
for(user in conf.users.values) {
|
||||
if(user.name.isNotEmpty()) {
|
||||
node("user") {
|
||||
attr("name", user.name)
|
||||
user.password?.let { password ->
|
||||
attr("password", password)
|
||||
}
|
||||
user.quota?.let { quota ->
|
||||
node("quota") {
|
||||
serializeQuota(quota)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
conf.users[""]
|
||||
?.let { anonymousUser ->
|
||||
anonymousUser.quota?.let { quota ->
|
||||
node("anonymous") {
|
||||
node("quota") {
|
||||
serializeQuota(quota)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
node("groups") {
|
||||
val groups = conf.users.values.asSequence()
|
||||
.flatMap {
|
||||
user -> user.groups.map { it to user }
|
||||
}.groupBy(Pair<Configuration.Group, Configuration.User>::first, Pair<Configuration.Group, Configuration.User>::second)
|
||||
for(pair in groups) {
|
||||
val group = pair.key
|
||||
val users = pair.value
|
||||
node("group") {
|
||||
attr("name", group.name)
|
||||
if(users.isNotEmpty()) {
|
||||
node("users") {
|
||||
var anonymousUser : Configuration.User? = null
|
||||
for(user in users) {
|
||||
if(user.name.isNotEmpty()) {
|
||||
node("user") {
|
||||
attr("ref", user.name)
|
||||
}
|
||||
} else {
|
||||
anonymousUser = user
|
||||
}
|
||||
}
|
||||
if(anonymousUser != null) {
|
||||
node("anonymous")
|
||||
}
|
||||
}
|
||||
}
|
||||
if(group.roles.isNotEmpty()) {
|
||||
node("roles") {
|
||||
for(role in group.roles) {
|
||||
node(role.toString().lowercase())
|
||||
}
|
||||
}
|
||||
}
|
||||
group.userQuota?.let { quota ->
|
||||
node("user-quota") {
|
||||
serializeQuota(quota)
|
||||
}
|
||||
}
|
||||
group.groupQuota?.let { quota ->
|
||||
node("group-quota") {
|
||||
serializeQuota(quota)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
conf.authentication?.let { authentication ->
|
||||
node("authentication") {
|
||||
when(authentication) {
|
||||
is Configuration.BasicAuthentication -> {
|
||||
node("basic")
|
||||
}
|
||||
is Configuration.ClientCertificateAuthentication -> {
|
||||
node("client-certificate") {
|
||||
authentication.groupExtractor?.let { extractor ->
|
||||
node("group-extractor") {
|
||||
attr("attribute-name", extractor.rdnType)
|
||||
attr("pattern", extractor.pattern)
|
||||
}
|
||||
}
|
||||
authentication.userExtractor?.let { extractor ->
|
||||
node("user-extractor") {
|
||||
attr("attribute-name", extractor.rdnType)
|
||||
attr("pattern", extractor.pattern)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
conf.tls?.let { tlsConfiguration ->
|
||||
node("tls") {
|
||||
tlsConfiguration.keyStore?.let { keyStore ->
|
||||
node("keystore") {
|
||||
attr("file", keyStore.file.toString())
|
||||
keyStore.password?.let { keyStorePassword ->
|
||||
attr("password", keyStorePassword)
|
||||
}
|
||||
attr("key-alias", keyStore.keyAlias)
|
||||
keyStore.keyPassword?.let { keyPassword ->
|
||||
attr("key-password", keyPassword)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
tlsConfiguration.trustStore?.let { trustStore ->
|
||||
node("truststore") {
|
||||
attr("file", trustStore.file.toString())
|
||||
trustStore.password?.let { password ->
|
||||
attr("password", password)
|
||||
}
|
||||
attr("check-certificate-status", trustStore.isCheckCertificateStatus.toString())
|
||||
attr("require-client-certificate", trustStore.isRequireClientCertificate.toString())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,120 @@
|
||||
package net.woggioni.rbcs.server.exception
|
||||
|
||||
import io.netty.buffer.Unpooled
|
||||
import io.netty.channel.ChannelDuplexHandler
|
||||
import io.netty.channel.ChannelFutureListener
|
||||
import io.netty.channel.ChannelHandler.Sharable
|
||||
import io.netty.channel.ChannelHandlerContext
|
||||
import io.netty.handler.codec.DecoderException
|
||||
import io.netty.handler.codec.http.DefaultFullHttpResponse
|
||||
import io.netty.handler.codec.http.FullHttpResponse
|
||||
import io.netty.handler.codec.http.HttpHeaderNames
|
||||
import io.netty.handler.codec.http.HttpResponseStatus
|
||||
import io.netty.handler.codec.http.HttpVersion
|
||||
import io.netty.handler.timeout.ReadTimeoutException
|
||||
import io.netty.handler.timeout.WriteTimeoutException
|
||||
import net.woggioni.rbcs.api.exception.CacheException
|
||||
import net.woggioni.rbcs.api.exception.ContentTooLargeException
|
||||
import net.woggioni.rbcs.common.contextLogger
|
||||
import net.woggioni.rbcs.common.debug
|
||||
import net.woggioni.rbcs.common.log
|
||||
import org.slf4j.event.Level
|
||||
import org.slf4j.spi.LoggingEventBuilder
|
||||
import java.net.ConnectException
|
||||
import java.net.SocketException
|
||||
import javax.net.ssl.SSLException
|
||||
import javax.net.ssl.SSLPeerUnverifiedException
|
||||
|
||||
@Sharable
|
||||
object ExceptionHandler : ChannelDuplexHandler() {
|
||||
private val log = contextLogger()
|
||||
|
||||
private val NOT_AUTHORIZED: FullHttpResponse = DefaultFullHttpResponse(
|
||||
HttpVersion.HTTP_1_1, HttpResponseStatus.FORBIDDEN, Unpooled.EMPTY_BUFFER
|
||||
).apply {
|
||||
headers()[HttpHeaderNames.CONTENT_LENGTH] = "0"
|
||||
}
|
||||
|
||||
private val NOT_AVAILABLE: FullHttpResponse = DefaultFullHttpResponse(
|
||||
HttpVersion.HTTP_1_1, HttpResponseStatus.SERVICE_UNAVAILABLE, Unpooled.EMPTY_BUFFER
|
||||
).apply {
|
||||
headers()[HttpHeaderNames.CONTENT_LENGTH] = "0"
|
||||
}
|
||||
|
||||
private val SERVER_ERROR: FullHttpResponse = DefaultFullHttpResponse(
|
||||
HttpVersion.HTTP_1_1, HttpResponseStatus.INTERNAL_SERVER_ERROR, Unpooled.EMPTY_BUFFER
|
||||
).apply {
|
||||
headers()[HttpHeaderNames.CONTENT_LENGTH] = "0"
|
||||
}
|
||||
|
||||
private val TOO_BIG: FullHttpResponse = DefaultFullHttpResponse(
|
||||
HttpVersion.HTTP_1_1, HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE, Unpooled.EMPTY_BUFFER
|
||||
).apply {
|
||||
headers()[HttpHeaderNames.CONTENT_LENGTH] = "0"
|
||||
}
|
||||
|
||||
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
|
||||
when (cause) {
|
||||
is DecoderException -> {
|
||||
log.debug(cause.message, cause)
|
||||
ctx.close()
|
||||
}
|
||||
|
||||
is ConnectException -> {
|
||||
log.error(cause.message, cause)
|
||||
ctx.writeAndFlush(SERVER_ERROR.retainedDuplicate())
|
||||
}
|
||||
|
||||
is SocketException -> {
|
||||
log.debug(cause.message, cause)
|
||||
ctx.close()
|
||||
}
|
||||
|
||||
is SSLPeerUnverifiedException -> {
|
||||
ctx.writeAndFlush(NOT_AUTHORIZED.retainedDuplicate())
|
||||
.addListener(ChannelFutureListener.CLOSE_ON_FAILURE)
|
||||
}
|
||||
|
||||
is SSLException -> {
|
||||
log.debug(cause.message, cause)
|
||||
ctx.close()
|
||||
}
|
||||
|
||||
is ContentTooLargeException -> {
|
||||
log.log(Level.DEBUG, ctx.channel()) { builder : LoggingEventBuilder ->
|
||||
builder.setMessage("Request body is too large")
|
||||
}
|
||||
ctx.writeAndFlush(TOO_BIG.retainedDuplicate())
|
||||
.addListener(ChannelFutureListener.CLOSE_ON_FAILURE)
|
||||
}
|
||||
|
||||
is ReadTimeoutException -> {
|
||||
log.debug {
|
||||
val channelId = ctx.channel().id().asShortText()
|
||||
"Read timeout on channel $channelId, closing the connection"
|
||||
}
|
||||
ctx.close()
|
||||
}
|
||||
|
||||
is WriteTimeoutException -> {
|
||||
log.debug {
|
||||
val channelId = ctx.channel().id().asShortText()
|
||||
"Write timeout on channel $channelId, closing the connection"
|
||||
}
|
||||
ctx.close()
|
||||
}
|
||||
|
||||
is CacheException -> {
|
||||
log.error(cause.message, cause)
|
||||
ctx.writeAndFlush(NOT_AVAILABLE.retainedDuplicate())
|
||||
.addListener(ChannelFutureListener.CLOSE_ON_FAILURE)
|
||||
}
|
||||
|
||||
else -> {
|
||||
log.error(cause.message, cause)
|
||||
ctx.writeAndFlush(SERVER_ERROR.retainedDuplicate())
|
||||
.addListener(ChannelFutureListener.CLOSE_ON_FAILURE)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,28 @@
|
||||
package net.woggioni.rbcs.server.handler
|
||||
|
||||
import io.netty.channel.ChannelHandler.Sharable
|
||||
import io.netty.channel.ChannelHandlerContext
|
||||
import io.netty.channel.SimpleChannelInboundHandler
|
||||
import io.netty.handler.codec.http.HttpContent
|
||||
import io.netty.handler.codec.http.LastHttpContent
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CacheContent
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.LastCacheContent
|
||||
|
||||
@Sharable
|
||||
object CacheContentHandler : SimpleChannelInboundHandler<HttpContent>() {
|
||||
val NAME = this::class.java.name
|
||||
|
||||
override fun channelRead0(ctx: ChannelHandlerContext, msg: HttpContent) {
|
||||
when(msg) {
|
||||
is LastHttpContent -> {
|
||||
ctx.fireChannelRead(LastCacheContent(msg.content().retain()))
|
||||
ctx.pipeline().remove(this)
|
||||
}
|
||||
else -> ctx.fireChannelRead(CacheContent(msg.content().retain()))
|
||||
}
|
||||
}
|
||||
|
||||
override fun exceptionCaught(ctx: ChannelHandlerContext?, cause: Throwable?) {
|
||||
super.exceptionCaught(ctx, cause)
|
||||
}
|
||||
}
|
@@ -0,0 +1,40 @@
|
||||
package net.woggioni.rbcs.server.handler
|
||||
|
||||
import io.netty.channel.ChannelHandlerContext
|
||||
import io.netty.channel.ChannelInboundHandlerAdapter
|
||||
import io.netty.handler.codec.http.HttpContent
|
||||
import io.netty.handler.codec.http.HttpRequest
|
||||
import net.woggioni.rbcs.api.exception.ContentTooLargeException
|
||||
|
||||
|
||||
class MaxRequestSizeHandler(private val maxRequestSize : Int) : ChannelInboundHandlerAdapter() {
|
||||
companion object {
|
||||
val NAME = MaxRequestSizeHandler::class.java.name
|
||||
}
|
||||
|
||||
private var cumulativeSize = 0
|
||||
|
||||
override fun channelRead(ctx: ChannelHandlerContext, msg: Any) {
|
||||
when(msg) {
|
||||
is HttpRequest -> {
|
||||
cumulativeSize = 0
|
||||
ctx.fireChannelRead(msg)
|
||||
}
|
||||
is HttpContent -> {
|
||||
val exceeded = cumulativeSize > maxRequestSize
|
||||
if(!exceeded) {
|
||||
cumulativeSize += msg.content().readableBytes()
|
||||
}
|
||||
if(cumulativeSize > maxRequestSize) {
|
||||
msg.release()
|
||||
if(!exceeded) {
|
||||
ctx.fireExceptionCaught(ContentTooLargeException("Request body is too large", null))
|
||||
}
|
||||
} else {
|
||||
ctx.fireChannelRead(msg)
|
||||
}
|
||||
}
|
||||
else -> ctx.fireChannelRead(msg)
|
||||
}
|
||||
}
|
||||
}
|
@@ -0,0 +1,230 @@
|
||||
package net.woggioni.rbcs.server.handler
|
||||
|
||||
import io.netty.channel.ChannelDuplexHandler
|
||||
import io.netty.channel.ChannelHandlerContext
|
||||
import io.netty.channel.ChannelPromise
|
||||
import io.netty.handler.codec.http.DefaultFullHttpResponse
|
||||
import io.netty.handler.codec.http.DefaultHttpContent
|
||||
import io.netty.handler.codec.http.DefaultHttpResponse
|
||||
import io.netty.handler.codec.http.DefaultLastHttpContent
|
||||
import io.netty.handler.codec.http.HttpHeaderNames
|
||||
import io.netty.handler.codec.http.HttpHeaderValues
|
||||
import io.netty.handler.codec.http.HttpHeaders
|
||||
import io.netty.handler.codec.http.HttpMethod
|
||||
import io.netty.handler.codec.http.HttpRequest
|
||||
import io.netty.handler.codec.http.HttpResponseStatus
|
||||
import io.netty.handler.codec.http.HttpUtil
|
||||
import io.netty.handler.codec.http.HttpVersion
|
||||
import net.woggioni.rbcs.api.CacheValueMetadata
|
||||
import net.woggioni.rbcs.api.message.CacheMessage
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CacheContent
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CacheGetRequest
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CachePutRequest
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CachePutResponse
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CacheValueFoundResponse
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.CacheValueNotFoundResponse
|
||||
import net.woggioni.rbcs.api.message.CacheMessage.LastCacheContent
|
||||
import net.woggioni.rbcs.common.createLogger
|
||||
import net.woggioni.rbcs.common.debug
|
||||
import net.woggioni.rbcs.common.warn
|
||||
import java.nio.file.Path
|
||||
import java.util.Locale
|
||||
|
||||
class ServerHandler(private val serverPrefix: Path) :
|
||||
ChannelDuplexHandler() {
|
||||
|
||||
companion object {
|
||||
private val log = createLogger<ServerHandler>()
|
||||
val NAME = this::class.java.name
|
||||
}
|
||||
|
||||
private var httpVersion = HttpVersion.HTTP_1_1
|
||||
private var keepAlive = true
|
||||
|
||||
private fun resetRequestMetadata() {
|
||||
httpVersion = HttpVersion.HTTP_1_1
|
||||
keepAlive = true
|
||||
}
|
||||
|
||||
private fun setRequestMetadata(req: HttpRequest) {
|
||||
httpVersion = req.protocolVersion()
|
||||
keepAlive = HttpUtil.isKeepAlive(req)
|
||||
}
|
||||
|
||||
private fun setKeepAliveHeader(headers: HttpHeaders) {
|
||||
if (!keepAlive) {
|
||||
headers.set(HttpHeaderNames.CONNECTION, HttpHeaderValues.CLOSE)
|
||||
} else {
|
||||
headers.set(HttpHeaderNames.CONNECTION, HttpHeaderValues.KEEP_ALIVE)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
override fun channelRead(ctx: ChannelHandlerContext, msg: Any) {
|
||||
when (msg) {
|
||||
is HttpRequest -> handleRequest(ctx, msg)
|
||||
else -> super.channelRead(ctx, msg)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
override fun write(ctx: ChannelHandlerContext, msg: Any, promise: ChannelPromise?) {
|
||||
if (msg is CacheMessage) {
|
||||
try {
|
||||
when (msg) {
|
||||
is CachePutResponse -> {
|
||||
val response = DefaultFullHttpResponse(httpVersion, HttpResponseStatus.CREATED)
|
||||
val keyBytes = msg.key.toByteArray(Charsets.UTF_8)
|
||||
response.headers().apply {
|
||||
set(HttpHeaderNames.CONTENT_TYPE, HttpHeaderValues.TEXT_PLAIN)
|
||||
set(HttpHeaderNames.TRANSFER_ENCODING, HttpHeaderValues.CHUNKED)
|
||||
}
|
||||
setKeepAliveHeader(response.headers())
|
||||
ctx.write(response)
|
||||
val buf = ctx.alloc().buffer(keyBytes.size).apply {
|
||||
writeBytes(keyBytes)
|
||||
}
|
||||
ctx.writeAndFlush(DefaultLastHttpContent(buf))
|
||||
}
|
||||
|
||||
is CacheValueNotFoundResponse -> {
|
||||
val response = DefaultFullHttpResponse(httpVersion, HttpResponseStatus.NOT_FOUND)
|
||||
response.headers()[HttpHeaderNames.CONTENT_LENGTH] = 0
|
||||
setKeepAliveHeader(response.headers())
|
||||
ctx.writeAndFlush(response)
|
||||
}
|
||||
|
||||
is CacheValueFoundResponse -> {
|
||||
val response = DefaultHttpResponse(httpVersion, HttpResponseStatus.OK)
|
||||
response.headers().apply {
|
||||
set(HttpHeaderNames.CONTENT_TYPE, msg.metadata.mimeType ?: HttpHeaderValues.APPLICATION_OCTET_STREAM)
|
||||
msg.metadata.contentDisposition?.let { contentDisposition ->
|
||||
set(HttpHeaderNames.CONTENT_DISPOSITION, contentDisposition)
|
||||
}
|
||||
}
|
||||
setKeepAliveHeader(response.headers())
|
||||
response.headers().set(HttpHeaderNames.TRANSFER_ENCODING, HttpHeaderValues.CHUNKED)
|
||||
ctx.writeAndFlush(response)
|
||||
}
|
||||
|
||||
is LastCacheContent -> {
|
||||
ctx.writeAndFlush(DefaultLastHttpContent(msg.content()))
|
||||
}
|
||||
|
||||
is CacheContent -> {
|
||||
ctx.writeAndFlush(DefaultHttpContent(msg.content()))
|
||||
}
|
||||
|
||||
else -> throw UnsupportedOperationException("This should never happen")
|
||||
}.let { channelFuture ->
|
||||
if (promise != null) {
|
||||
channelFuture.addListener {
|
||||
if (it.isSuccess) promise.setSuccess()
|
||||
else promise.setFailure(it.cause())
|
||||
}
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
resetRequestMetadata()
|
||||
}
|
||||
} else super.write(ctx, msg, promise)
|
||||
}
|
||||
|
||||
|
||||
private fun handleRequest(ctx: ChannelHandlerContext, msg: HttpRequest) {
|
||||
setRequestMetadata(msg)
|
||||
val method = msg.method()
|
||||
if (method === HttpMethod.GET) {
|
||||
val path = Path.of(msg.uri())
|
||||
val prefix = path.parent
|
||||
if (serverPrefix == prefix) {
|
||||
ctx.pipeline().addAfter(NAME, CacheContentHandler.NAME, CacheContentHandler)
|
||||
path.fileName?.toString()
|
||||
?.let(::CacheGetRequest)
|
||||
?.let(ctx::fireChannelRead)
|
||||
?: ctx.channel().write(CacheValueNotFoundResponse())
|
||||
} else {
|
||||
log.warn(ctx) {
|
||||
"Got request for unhandled path '${msg.uri()}'"
|
||||
}
|
||||
val response = DefaultFullHttpResponse(msg.protocolVersion(), HttpResponseStatus.BAD_REQUEST)
|
||||
response.headers()[HttpHeaderNames.CONTENT_LENGTH] = 0
|
||||
ctx.writeAndFlush(response)
|
||||
}
|
||||
} else if (method === HttpMethod.PUT) {
|
||||
val path = Path.of(msg.uri())
|
||||
val prefix = path.parent
|
||||
val key = path.fileName.toString()
|
||||
|
||||
if (serverPrefix == prefix) {
|
||||
log.debug(ctx) {
|
||||
"Added value for key '$key' to build cache"
|
||||
}
|
||||
ctx.pipeline().addAfter(NAME, CacheContentHandler.NAME, CacheContentHandler)
|
||||
path.fileName?.toString()
|
||||
?.let {
|
||||
val mimeType = HttpUtil.getMimeType(msg)?.toString()
|
||||
CachePutRequest(key, CacheValueMetadata(msg.headers().get(HttpHeaderNames.CONTENT_DISPOSITION), mimeType))
|
||||
}
|
||||
?.let(ctx::fireChannelRead)
|
||||
?: ctx.channel().write(CacheValueNotFoundResponse())
|
||||
} else {
|
||||
log.warn(ctx) {
|
||||
"Got request for unhandled path '${msg.uri()}'"
|
||||
}
|
||||
val response = DefaultFullHttpResponse(msg.protocolVersion(), HttpResponseStatus.BAD_REQUEST)
|
||||
response.headers()[HttpHeaderNames.CONTENT_LENGTH] = "0"
|
||||
ctx.writeAndFlush(response)
|
||||
}
|
||||
} else if (method == HttpMethod.TRACE) {
|
||||
super.channelRead(ctx, msg)
|
||||
} else {
|
||||
log.warn(ctx) {
|
||||
"Got request with unhandled method '${msg.method().name()}'"
|
||||
}
|
||||
val response = DefaultFullHttpResponse(msg.protocolVersion(), HttpResponseStatus.METHOD_NOT_ALLOWED)
|
||||
response.headers()[HttpHeaderNames.CONTENT_LENGTH] = "0"
|
||||
ctx.writeAndFlush(response)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
data class ContentDisposition(val type: Type?, val fileName: String?) {
|
||||
enum class Type {
|
||||
attachment, `inline`;
|
||||
|
||||
companion object {
|
||||
@JvmStatic
|
||||
fun parse(maybeString: String?) = maybeString.let { s ->
|
||||
try {
|
||||
java.lang.Enum.valueOf(Type::class.java, s)
|
||||
} catch (ex: IllegalArgumentException) {
|
||||
null
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
companion object {
|
||||
@JvmStatic
|
||||
fun parse(contentDisposition: String) : ContentDisposition {
|
||||
val parts = contentDisposition.split(";").dropLastWhile { it.isEmpty() }.toTypedArray()
|
||||
val dispositionType = parts[0].trim { it <= ' ' }.let(Type::parse) // Get the type (e.g., attachment)
|
||||
|
||||
var filename: String? = null
|
||||
for (i in 1..<parts.size) {
|
||||
val part = parts[i].trim { it <= ' ' }
|
||||
if (part.lowercase(Locale.getDefault()).startsWith("filename=")) {
|
||||
filename = part.substring("filename=".length).trim { it <= ' ' }.replace("\"", "")
|
||||
break
|
||||
}
|
||||
}
|
||||
return ContentDisposition(dispositionType, filename)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
|
||||
super.exceptionCaught(ctx, cause)
|
||||
}
|
||||
}
|
@@ -0,0 +1,54 @@
|
||||
package net.woggioni.rbcs.server.handler
|
||||
|
||||
import io.netty.channel.ChannelHandler.Sharable
|
||||
import io.netty.channel.ChannelHandlerContext
|
||||
import io.netty.channel.ChannelInboundHandlerAdapter
|
||||
import io.netty.handler.codec.http.DefaultHttpResponse
|
||||
import io.netty.handler.codec.http.HttpContent
|
||||
import io.netty.handler.codec.http.HttpHeaderNames
|
||||
import io.netty.handler.codec.http.HttpHeaderValues
|
||||
import io.netty.handler.codec.http.HttpRequest
|
||||
import io.netty.handler.codec.http.HttpResponseStatus
|
||||
import io.netty.handler.codec.http.LastHttpContent
|
||||
import java.nio.file.Path
|
||||
|
||||
@Sharable
|
||||
object TraceHandler : ChannelInboundHandlerAdapter() {
|
||||
val NAME = this::class.java.name
|
||||
override fun channelRead(ctx: ChannelHandlerContext, msg: Any) {
|
||||
when(msg) {
|
||||
is HttpRequest -> {
|
||||
val response = DefaultHttpResponse(msg.protocolVersion(), HttpResponseStatus.OK)
|
||||
response.headers().apply {
|
||||
set(HttpHeaderNames.CONTENT_TYPE, "message/http")
|
||||
set(HttpHeaderNames.TRANSFER_ENCODING, HttpHeaderValues.CHUNKED)
|
||||
}
|
||||
ctx.write(response)
|
||||
val replayedRequestHead = ctx.alloc().buffer()
|
||||
replayedRequestHead.writeCharSequence(
|
||||
"TRACE ${Path.of(msg.uri())} ${msg.protocolVersion().text()}\r\n",
|
||||
Charsets.US_ASCII
|
||||
)
|
||||
msg.headers().forEach { (key, value) ->
|
||||
replayedRequestHead.apply {
|
||||
writeCharSequence(key, Charsets.US_ASCII)
|
||||
writeCharSequence(": ", Charsets.US_ASCII)
|
||||
writeCharSequence(value, Charsets.UTF_8)
|
||||
writeCharSequence("\r\n", Charsets.US_ASCII)
|
||||
}
|
||||
}
|
||||
replayedRequestHead.writeCharSequence("\r\n", Charsets.US_ASCII)
|
||||
ctx.writeAndFlush(replayedRequestHead)
|
||||
}
|
||||
is LastHttpContent -> {
|
||||
ctx.writeAndFlush(msg)
|
||||
}
|
||||
is HttpContent -> ctx.writeAndFlush(msg)
|
||||
else -> super.channelRead(ctx, msg)
|
||||
}
|
||||
}
|
||||
|
||||
override fun exceptionCaught(ctx: ChannelHandlerContext?, cause: Throwable?) {
|
||||
super.exceptionCaught(ctx, cause)
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user