Compare commits

..

1 Commits

Author SHA1 Message Date
ba961bd30d added optional even listener to client API 2025-01-27 23:55:59 +08:00
67 changed files with 499 additions and 989 deletions

View File

@@ -44,7 +44,7 @@ jobs:
target: release
cache-from: type=registry,ref=gitea.woggioni.net/woggioni/gbcs:buildx
-
name: Build gbcs memcache Docker image
name: Build gbcs memcached Docker image
uses: docker/build-push-action@v5.3.0
with:
context: "docker/build/docker"
@@ -52,9 +52,9 @@ jobs:
push: true
pull: true
tags: |
gitea.woggioni.net/woggioni/gbcs:memcache
gitea.woggioni.net/woggioni/gbcs:memcache-${{ steps.retrieve-version.outputs.VERSION }}
target: release-memcache
gitea.woggioni.net/woggioni/gbcs:memcached
gitea.woggioni.net/woggioni/gbcs:memcached-${{ steps.retrieve-version.outputs.VERSION }}
target: release-memcached
cache-from: type=registry,ref=gitea.woggioni.net/woggioni/gbcs:buildx
cache-to: type=registry,mode=max,compression=zstd,image-manifest=true,oci-mediatypes=true,ref=gitea.woggioni.net/woggioni/gbcs:buildx
- name: Publish artifacts

View File

@@ -5,12 +5,12 @@ WORKDIR /home/luser
FROM base-release AS release
ADD gbcs-cli-envelope-*.jar gbcs.jar
ENTRYPOINT ["java", "-XX:+UseZGC", "-XX:+ZGenerational", "-jar", "/home/luser/gbcs.jar", "server"]
ENTRYPOINT ["java", "-jar", "/home/luser/gbcs.jar", "server"]
FROM base-release AS release-memcache
FROM base-release AS release-memcached
ADD --chown=luser:luser gbcs-cli-envelope-*.jar gbcs.jar
RUN mkdir plugins
WORKDIR /home/luser/plugins
RUN --mount=type=bind,source=.,target=/build/distributions tar -xf /build/distributions/gbcs-server-memcache*.tar
RUN --mount=type=bind,source=.,target=/build/distributions tar -xf /build/distributions/gbcs-server-memcached*.tar
WORKDIR /home/luser
ENTRYPOINT ["java", "-XX:+UseZGC", "-XX:+ZGenerational", "-jar", "/home/luser/gbcs.jar", "server"]
ENTRYPOINT ["java", "-jar", "/home/luser/gbcs.jar", "server"]

View File

@@ -19,7 +19,7 @@ configurations {
dependencies {
docker project(path: ':gbcs-cli', configuration: 'release')
docker project(path: ':gbcs-server-memcache', configuration: 'release')
docker project(path: ':gbcs-server-memcached', configuration: 'release')
}
Provider<Task> cleanTaskProvider = tasks.named(BasePlugin.CLEAN_TASK_NAME) {}
@@ -46,22 +46,22 @@ Provider<DockerTagImage> dockerTag = tasks.register('dockerTagImage', DockerTagI
tag = version
}
Provider<DockerTagImage> dockerTagMemcache = tasks.register('dockerTagMemcacheImage', DockerTagImage) {
Provider<DockerTagImage> dockerTagMemcached = tasks.register('dockerTagMemcachedImage', DockerTagImage) {
group = 'docker'
repository = 'gitea.woggioni.net/woggioni/gbcs'
imageId = 'gitea.woggioni.net/woggioni/gbcs:memcache'
tag = "${version}-memcache"
imageId = 'gitea.woggioni.net/woggioni/gbcs:memcached'
tag = "${version}-memcached"
}
Provider<DockerPushImage> dockerPush = tasks.register('dockerPushImage', DockerPushImage) {
group = 'docker'
dependsOn dockerTag, dockerTagMemcache
dependsOn dockerTag, dockerTagMemcached
registryCredentials {
url = getProperty('docker.registry.url')
username = 'woggioni'
password = System.getenv().get("PUBLISHER_TOKEN")
}
images = [dockerTag.flatMap{ it.tag }, dockerTagMemcache.flatMap{ it.tag }]
images = [dockerTag.flatMap{ it.tag }, dockerTagMemcached.flatMap{ it.tag }]
}

View File

@@ -5,7 +5,6 @@ plugins {
}
dependencies {
api catalog.netty.buffer
}
publishing {

View File

@@ -1,7 +1,6 @@
module net.woggioni.gbcs.api {
requires static lombok;
requires java.xml;
requires io.netty.buffer;
exports net.woggioni.gbcs.api;
exports net.woggioni.gbcs.api.exception;
}

View File

@@ -1,14 +1,12 @@
package net.woggioni.gbcs.api;
import io.netty.buffer.ByteBuf;
import net.woggioni.gbcs.api.exception.ContentTooLargeException;
import java.nio.channels.ReadableByteChannel;
import java.util.concurrent.CompletableFuture;
public interface Cache extends AutoCloseable {
CompletableFuture<ReadableByteChannel> get(String key);
ReadableByteChannel get(String key);
CompletableFuture<Void> put(String key, ByteBuf content) throws ContentTooLargeException;
void put(String key, byte[] content) throws ContentTooLargeException;
}

View File

@@ -27,7 +27,6 @@ public class Configuration {
Cache cache;
Authentication authentication;
Tls tls;
boolean useNativeTransport;
@Value
public static class EventExecutor {
@@ -57,8 +56,7 @@ public class Configuration {
@EqualsAndHashCode.Include
String name;
Set<Role> roles;
Quota groupQuota;
Quota userQuota;
Quota quota;
}
@Value
@@ -152,8 +150,7 @@ public class Configuration {
Map<String, Group> groups,
Cache cache,
Authentication authentication,
Tls tls,
boolean useNativeTransport
Tls tls
) {
return new Configuration(
host,
@@ -166,8 +163,7 @@ public class Configuration {
groups,
cache,
authentication,
tls,
useNativeTransport
tls
);
}
}

View File

@@ -32,13 +32,6 @@ configurations {
canBeResolved = true
visible = true
}
nativeLibraries {
transitive = false
canBeConsumed = false
canBeResolved = true
visible = false
}
}
envelopeJar {
@@ -60,8 +53,6 @@ dependencies {
// runtimeOnly catalog.slf4j.jdk14
runtimeOnly catalog.logback.classic
// runtimeOnly catalog.slf4j.simple
nativeLibraries group: 'io.netty', name: 'netty-transport-native-epoll', version: catalog.versions.netty.get(), classifier: 'linux-x86_64'
}
Provider<EnvelopeJarTask> envelopeJarTaskProvider = tasks.named('envelopeJar', EnvelopeJarTask.class) {
@@ -76,9 +67,6 @@ Provider<EnvelopeJarTask> envelopeJarTaskProvider = tasks.named('envelopeJar', E
// systemProperties['org.slf4j.simpleLogger.log.com.google.code.yanf4j'] = 'warn'
// systemProperties['org.slf4j.simpleLogger.log.net.rubyeye.xmemcached'] = 'warn'
// systemProperties['org.slf4j.simpleLogger.dateTimeFormat'] = 'yyyy-MM-dd\'T\'HH:mm:ss.SSSZ'
from {
configurations.nativeLibraries.collect { it.isDirectory() ? it : zipTree(it) }
}
}
tasks.named(NativeImagePlugin.CONFIGURE_NATIVE_IMAGE_TASK_NAME, NativeImageConfigurationTask) {

View File

@@ -7,6 +7,7 @@ module net.woggioni.gbcs.cli {
requires kotlin.stdlib;
requires net.woggioni.jwo;
requires net.woggioni.gbcs.api;
requires io.netty.codec.http;
exports net.woggioni.gbcs.cli.impl.converters to info.picocli;
opens net.woggioni.gbcs.cli.impl.commands to info.picocli;

View File

@@ -1,19 +1,19 @@
package net.woggioni.gbcs.cli
import net.woggioni.gbcs.common.GbcsUrlStreamHandlerFactory
import net.woggioni.gbcs.common.contextLogger
import net.woggioni.gbcs.cli.impl.AbstractVersionProvider
import net.woggioni.gbcs.cli.impl.GbcsCommand
import net.woggioni.gbcs.cli.impl.commands.BenchmarkCommand
import net.woggioni.gbcs.cli.impl.commands.ClientCommand
import net.woggioni.gbcs.cli.impl.commands.GetCommand
import net.woggioni.gbcs.cli.impl.commands.HealthCheckCommand
import net.woggioni.gbcs.cli.impl.commands.PasswordHashCommand
import net.woggioni.gbcs.cli.impl.commands.PutCommand
import net.woggioni.gbcs.cli.impl.commands.ServerCommand
import net.woggioni.gbcs.common.GbcsUrlStreamHandlerFactory
import net.woggioni.gbcs.common.contextLogger
import net.woggioni.jwo.Application
import picocli.CommandLine
import picocli.CommandLine.Model.CommandSpec
import java.net.URI
@CommandLine.Command(
@@ -25,12 +25,8 @@ class GradleBuildCacheServerCli : GbcsCommand() {
companion object {
@JvmStatic
fun main(vararg args: String) {
val currentClassLoader = GradleBuildCacheServerCli::class.java.classLoader
Thread.currentThread().contextClassLoader = currentClassLoader
if(currentClassLoader.javaClass.name == "net.woggioni.envelope.loader.ModuleClassLoader") {
//We're running in an envelope jar and custom URL protocols won't work
GbcsUrlStreamHandlerFactory.install()
}
Thread.currentThread().contextClassLoader = GradleBuildCacheServerCli::class.java.classLoader
GbcsUrlStreamHandlerFactory.install()
val log = contextLogger()
val app = Application.builder("gbcs")
.configurationDirectoryEnvVar("GBCS_CONFIGURATION_DIR")
@@ -49,7 +45,6 @@ class GradleBuildCacheServerCli : GbcsCommand() {
addSubcommand(BenchmarkCommand())
addSubcommand(PutCommand())
addSubcommand(GetCommand())
addSubcommand(HealthCheckCommand())
})
System.exit(commandLine.execute(*args))
}

View File

@@ -1,15 +1,21 @@
package net.woggioni.gbcs.cli.impl.commands
import net.woggioni.gbcs.cli.impl.GbcsCommand
import net.woggioni.gbcs.client.GradleBuildCacheClient
import io.netty.handler.codec.http.FullHttpRequest
import io.netty.handler.codec.http.FullHttpResponse
import net.woggioni.gbcs.common.contextLogger
import net.woggioni.gbcs.common.error
import net.woggioni.gbcs.common.info
import net.woggioni.gbcs.cli.impl.GbcsCommand
import net.woggioni.gbcs.client.GradleBuildCacheClient
import net.woggioni.gbcs.client.RequestEventListener
import net.woggioni.jwo.JWO
import picocli.CommandLine
import java.security.SecureRandom
import java.time.Duration
import java.time.Instant
import java.util.Base64
import java.util.concurrent.ExecutionException
import java.util.concurrent.Future
import java.util.concurrent.LinkedBlockingQueue
import java.util.concurrent.Semaphore
import java.util.concurrent.atomic.AtomicLong
@@ -33,110 +39,132 @@ class BenchmarkCommand : GbcsCommand() {
)
private var numberOfEntries = 1000
@CommandLine.Option(
names = ["-s", "--size"],
description = ["Size of a cache value in bytes"],
paramLabel = "SIZE"
)
private var size = 0x1000
override fun run() {
val clientCommand = spec.parent().userObject() as ClientCommand
val profile = clientCommand.profileName.let { profileName ->
clientCommand.configuration.profiles[profileName]
?: throw IllegalArgumentException("Profile $profileName does not exist in configuration")
}
GradleBuildCacheClient(profile).use { client ->
val client = GradleBuildCacheClient(profile)
val entryGenerator = sequence {
val random = Random(SecureRandom.getInstance("NativePRNGNonBlocking").nextLong())
while (true) {
val key = JWO.bytesToHex(random.nextBytes(16))
val content = random.nextInt().toByte()
val value = ByteArray(size, { _ -> content })
yield(key to value)
val entryGenerator = sequence {
val random = Random(SecureRandom.getInstance("NativePRNGNonBlocking").nextLong())
while (true) {
val key = JWO.bytesToHex(random.nextBytes(16))
val content = random.nextInt().toByte()
val value = ByteArray(0x1000, { _ -> content })
yield(key to value)
}
}
log.info {
"Starting insertion"
}
val entries = let {
val completionCounter = AtomicLong(0)
val completionQueue = LinkedBlockingQueue<Pair<String, ByteArray>>(numberOfEntries)
val start = Instant.now()
val semaphore = Semaphore(profile.maxConnections * 3)
val totalElapsedTime = AtomicLong(0)
val iterator = entryGenerator.take(numberOfEntries).iterator()
while(completionCounter.get() < numberOfEntries) {
if(iterator.hasNext()) {
val entry = iterator.next()
semaphore.acquire()
val eventListener = object : RequestEventListener {
var start: Long? = null
override fun requestSent(req: FullHttpRequest) {
this.start = System.nanoTime()
}
override fun responseReceived(res: FullHttpResponse) {
this.start?.let { requestStart ->
totalElapsedTime.addAndGet((System.nanoTime() - requestStart))
}
this.start = null
}
}
val future = client.put(entry.first, entry.second, eventListener).thenApply { entry }
future.whenComplete { result, ex ->
if (ex != null) {
log.error(ex.message, ex)
} else {
completionQueue.put(result)
}
semaphore.release()
completionCounter.incrementAndGet()
}
}
}
val inserted = completionQueue.toList()
val end = Instant.now()
log.info {
"Starting insertion"
val elapsed = Duration.between(start, end).toMillis()
val opsPerSecond = String.format("%.2f", numberOfEntries.toDouble() / elapsed * 1000)
"Insertion rate: $opsPerSecond ops/s"
}
val entries = let {
val completionCounter = AtomicLong(0)
val completionQueue = LinkedBlockingQueue<Pair<String, ByteArray>>(numberOfEntries)
val start = Instant.now()
val semaphore = Semaphore(profile.maxConnections * 3)
val iterator = entryGenerator.take(numberOfEntries).iterator()
while (completionCounter.get() < numberOfEntries) {
if (iterator.hasNext()) {
val entry = iterator.next()
semaphore.acquire()
val future = client.put(entry.first, entry.second).thenApply { entry }
future.whenComplete { result, ex ->
if (ex != null) {
log.error(ex.message, ex)
} else {
completionQueue.put(result)
}
semaphore.release()
completionCounter.incrementAndGet()
log.info {
val avgTxTime = String.format("%.0f", totalElapsedTime.get() / numberOfEntries.toDouble() / 1e6)
"Average time per insertion: $avgTxTime ms"
}
inserted
}
log.info {
"Inserted ${entries.size} entries"
}
log.info {
"Starting retrieval"
}
if (entries.isNotEmpty()) {
val completionCounter = AtomicLong(0)
val semaphore = Semaphore(profile.maxConnections * 3)
val start = Instant.now()
val totalElapsedTime = AtomicLong(0)
entries.forEach { entry ->
semaphore.acquire()
val eventListener = object : RequestEventListener {
var start : Long? = null
override fun requestSent(req: FullHttpRequest) {
this.start = System.nanoTime()
}
override fun responseReceived(res: FullHttpResponse) {
this.start?.let { requestStart ->
totalElapsedTime.addAndGet((System.nanoTime() - requestStart))
}
} else {
Thread.sleep(0)
this.start = null
}
}
val inserted = completionQueue.toList()
val end = Instant.now()
log.info {
val elapsed = Duration.between(start, end).toMillis()
val opsPerSecond = String.format("%.2f", numberOfEntries.toDouble() / elapsed * 1000)
"Insertion rate: $opsPerSecond ops/s"
}
inserted
}
log.info {
"Inserted ${entries.size} entries"
}
log.info {
"Starting retrieval"
}
if (entries.isNotEmpty()) {
val completionCounter = AtomicLong(0)
val semaphore = Semaphore(profile.maxConnections * 3)
val start = Instant.now()
val it = entries.iterator()
while (completionCounter.get() < entries.size) {
if (it.hasNext()) {
val entry = it.next()
val future = client.get(entry.first).thenApply {
if (it == null) {
log.error {
"Missing entry for key '${entry.first}'"
}
} else if (!entry.second.contentEquals(it)) {
log.error {
"Retrieved a value different from what was inserted for key '${entry.first}'"
}
}
val future = client.get(entry.first, eventListener).thenApply {
if (it == null) {
log.error {
"Missing entry for key '${entry.first}'"
}
future.whenComplete { _, _ ->
completionCounter.incrementAndGet()
semaphore.release()
} else if (!entry.second.contentEquals(it)) {
log.error {
"Retrieved a value different from what was inserted for key '${entry.first}'"
}
} else {
Thread.sleep(0)
}
}
val end = Instant.now()
log.info {
val elapsed = Duration.between(start, end).toMillis()
val opsPerSecond = String.format("%.2f", entries.size.toDouble() / elapsed * 1000)
"Retrieval rate: $opsPerSecond ops/s"
future.whenComplete { _, _ ->
completionCounter.incrementAndGet()
semaphore.release()
}
} else {
log.error("Skipping retrieval benchmark as it was not possible to insert any entry in the cache")
}
val end = Instant.now()
log.info {
val elapsed = Duration.between(start, end).toMillis()
val opsPerSecond = String.format("%.2f", entries.size.toDouble() / elapsed * 1000)
"Retrieval rate: $opsPerSecond ops/s"
}
log.info {
val avgTxTime = String.format("%.0f", totalElapsedTime.get() / completionCounter.toDouble() / 1e6)
"Average time per retrieval: $avgTxTime ms"
}
} else {
log.error("Skipping retrieval benchmark as it was not possible to insert any entry in the cache")
}
}
}

View File

@@ -1,8 +1,8 @@
package net.woggioni.gbcs.cli.impl.commands
import net.woggioni.gbcs.common.contextLogger
import net.woggioni.gbcs.cli.impl.GbcsCommand
import net.woggioni.gbcs.client.GradleBuildCacheClient
import net.woggioni.gbcs.common.contextLogger
import picocli.CommandLine
import java.nio.file.Files
import java.nio.file.Path

View File

@@ -1,45 +0,0 @@
package net.woggioni.gbcs.cli.impl.commands
import net.woggioni.gbcs.cli.impl.GbcsCommand
import net.woggioni.gbcs.client.GradleBuildCacheClient
import net.woggioni.gbcs.common.contextLogger
import picocli.CommandLine
import java.security.SecureRandom
import kotlin.random.Random
@CommandLine.Command(
name = "health",
description = ["Check server health"],
showDefaultValues = true
)
class HealthCheckCommand : GbcsCommand() {
private val log = contextLogger()
@CommandLine.Spec
private lateinit var spec: CommandLine.Model.CommandSpec
override fun run() {
val clientCommand = spec.parent().userObject() as ClientCommand
val profile = clientCommand.profileName.let { profileName ->
clientCommand.configuration.profiles[profileName]
?: throw IllegalArgumentException("Profile $profileName does not exist in configuration")
}
GradleBuildCacheClient(profile).use { client ->
val random = Random(SecureRandom.getInstance("NativePRNGNonBlocking").nextLong())
val nonce = ByteArray(0xa0)
random.nextBytes(nonce)
client.healthCheck(nonce).thenApply { value ->
if(value == null) {
throw IllegalStateException("Empty response from server")
}
for(i in 0 until nonce.size) {
for(j in value.size - nonce.size until nonce.size) {
if(nonce[i] != value[j]) {
throw IllegalStateException("Server nonce does not match")
}
}
}
}.get()
}
}
}

View File

@@ -1,8 +1,8 @@
package net.woggioni.gbcs.cli.impl.commands
import net.woggioni.gbcs.common.PasswordSecurity.hashPassword
import net.woggioni.gbcs.cli.impl.GbcsCommand
import net.woggioni.gbcs.cli.impl.converters.OutputStreamConverter
import net.woggioni.gbcs.common.PasswordSecurity.hashPassword
import net.woggioni.jwo.UncloseableOutputStream
import picocli.CommandLine
import java.io.OutputStream

View File

@@ -1,9 +1,9 @@
package net.woggioni.gbcs.cli.impl.commands
import net.woggioni.gbcs.common.contextLogger
import net.woggioni.gbcs.cli.impl.GbcsCommand
import net.woggioni.gbcs.cli.impl.converters.InputStreamConverter
import net.woggioni.gbcs.client.GradleBuildCacheClient
import net.woggioni.gbcs.common.contextLogger
import picocli.CommandLine
import java.io.InputStream

View File

@@ -1,20 +1,18 @@
package net.woggioni.gbcs.cli.impl.commands
import net.woggioni.gbcs.server.GradleBuildCacheServer
import net.woggioni.gbcs.server.GradleBuildCacheServer.Companion.DEFAULT_CONFIGURATION_URL
import net.woggioni.gbcs.api.Configuration
import net.woggioni.gbcs.cli.impl.GbcsCommand
import net.woggioni.gbcs.cli.impl.converters.DurationConverter
import net.woggioni.gbcs.common.contextLogger
import net.woggioni.gbcs.common.debug
import net.woggioni.gbcs.common.info
import net.woggioni.gbcs.server.GradleBuildCacheServer
import net.woggioni.gbcs.server.GradleBuildCacheServer.Companion.DEFAULT_CONFIGURATION_URL
import net.woggioni.gbcs.cli.impl.GbcsCommand
import net.woggioni.jwo.Application
import net.woggioni.jwo.JWO
import picocli.CommandLine
import java.io.ByteArrayOutputStream
import java.nio.file.Files
import java.nio.file.Path
import java.time.Duration
@CommandLine.Command(
name = "server",
@@ -37,14 +35,6 @@ class ServerCommand(app : Application) : GbcsCommand() {
}
}
@CommandLine.Option(
names = ["-t", "--timeout"],
description = ["Exit after the specified time"],
paramLabel = "TIMEOUT",
converter = [DurationConverter::class]
)
private var timeout: Duration? = null
@CommandLine.Option(
names = ["-c", "--config-file"],
description = ["Read the application configuration from this file"],
@@ -52,6 +42,10 @@ class ServerCommand(app : Application) : GbcsCommand() {
)
private var configurationFile: Path = findConfigurationFile(app, "gbcs-server.xml")
val configuration : Configuration by lazy {
GradleBuildCacheServer.loadConfiguration(configurationFile)
}
override fun run() {
if (!Files.exists(configurationFile)) {
Files.createDirectories(configurationFile.parent)
@@ -67,11 +61,7 @@ class ServerCommand(app : Application) : GbcsCommand() {
}
}
val server = GradleBuildCacheServer(configuration)
server.run().use { server ->
timeout?.let {
Thread.sleep(it)
server.shutdown()
}
server.run().use {
}
}
}

View File

@@ -1,11 +0,0 @@
package net.woggioni.gbcs.cli.impl.converters
import picocli.CommandLine
import java.time.Duration
class DurationConverter : CommandLine.ITypeConverter<Duration> {
override fun convert(value: String): Duration {
return Duration.parse(value)
}
}

View File

@@ -15,4 +15,6 @@
<root level="info">
<appender-ref ref="console"/>
</root>
<logger name="com.google.code.yanf4j" level="warn"/>
<logger name="net.rubyeye.xmemcached" level="warn"/>
</configuration>

View File

@@ -213,28 +213,9 @@ class GradleBuildCacheClient(private val profile: Configuration.Profile) : AutoC
}
}
fun healthCheck(nonce: ByteArray): CompletableFuture<ByteArray?> {
fun get(key: String, eventListener : RequestEventListener? = null): CompletableFuture<ByteArray?> {
return executeWithRetry {
sendRequest(profile.serverURI, HttpMethod.TRACE, nonce)
}.thenApply {
val status = it.status()
if (it.status() != HttpResponseStatus.OK) {
throw HttpException(status)
} else {
it.content()
}
}.thenApply { maybeByteBuf ->
maybeByteBuf?.let {
val result = ByteArray(it.readableBytes())
it.getBytes(0, result)
result
}
}
}
fun get(key: String): CompletableFuture<ByteArray?> {
return executeWithRetry {
sendRequest(profile.serverURI.resolve(key), HttpMethod.GET, null)
sendRequest(profile.serverURI.resolve(key), HttpMethod.GET, null, eventListener)
}.thenApply {
val status = it.status()
if (it.status() == HttpResponseStatus.NOT_FOUND) {
@@ -253,9 +234,9 @@ class GradleBuildCacheClient(private val profile: Configuration.Profile) : AutoC
}
}
fun put(key: String, content: ByteArray): CompletableFuture<Unit> {
fun put(key: String, content: ByteArray, eventListener : RequestEventListener? = null): CompletableFuture<Unit> {
return executeWithRetry {
sendRequest(profile.serverURI.resolve(key), HttpMethod.PUT, content)
sendRequest(profile.serverURI.resolve(key), HttpMethod.PUT, content, eventListener)
}.thenApply {
val status = it.status()
if (it.status() != HttpResponseStatus.CREATED && it.status() != HttpResponseStatus.OK) {
@@ -264,7 +245,7 @@ class GradleBuildCacheClient(private val profile: Configuration.Profile) : AutoC
}
}
private fun sendRequest(uri: URI, method: HttpMethod, body: ByteArray?): CompletableFuture<FullHttpResponse> {
private fun sendRequest(uri: URI, method: HttpMethod, body: ByteArray?, eventListener : RequestEventListener?): CompletableFuture<FullHttpResponse> {
val responseFuture = CompletableFuture<FullHttpResponse>()
// Custom handler for processing responses
pool.acquire().addListener(object : GenericFutureListener<NettyFuture<Channel>> {
@@ -280,6 +261,7 @@ class GradleBuildCacheClient(private val profile: Configuration.Profile) : AutoC
pipeline.removeLast()
pool.release(channel)
responseFuture.complete(response)
eventListener?.responseReceived(response)
}
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
@@ -325,7 +307,11 @@ class GradleBuildCacheClient(private val profile: Configuration.Profile) : AutoC
// Set headers
// Send the request
channel.writeAndFlush(request)
channel.writeAndFlush(request).addListener {
if(it.isSuccess) {
eventListener?.requestSent(request)
}
}
} else {
responseFuture.completeExceptionally(channelFuture.cause())
}

View File

@@ -0,0 +1,10 @@
package net.woggioni.gbcs.client
import io.netty.handler.codec.http.FullHttpRequest
import io.netty.handler.codec.http.FullHttpResponse
interface RequestEventListener {
fun requestSent(req : FullHttpRequest) {}
fun responseReceived(res : FullHttpResponse) {}
fun exceptionCaught(ex : Throwable) {}
}

View File

@@ -1,9 +1,9 @@
package net.woggioni.gbcs.client.impl
import net.woggioni.gbcs.api.exception.ConfigurationException
import net.woggioni.gbcs.client.GradleBuildCacheClient
import net.woggioni.gbcs.common.Xml.Companion.asIterable
import net.woggioni.gbcs.common.Xml.Companion.renderAttribute
import net.woggioni.gbcs.client.GradleBuildCacheClient
import org.w3c.dom.Document
import java.net.URI
import java.nio.file.Files

View File

@@ -4,6 +4,7 @@ import io.netty.util.concurrent.DefaultEventExecutorGroup
import io.netty.util.concurrent.EventExecutorGroup
import net.woggioni.gbcs.common.contextLogger
import org.junit.jupiter.api.Assertions
import org.junit.jupiter.api.Test
import org.junit.jupiter.api.extension.ExtensionContext
import org.junit.jupiter.params.ParameterizedTest
import org.junit.jupiter.params.provider.Arguments

View File

@@ -9,7 +9,6 @@ dependencies {
implementation project(':gbcs-api')
implementation catalog.slf4j.api
implementation catalog.jwo
implementation catalog.netty.buffer
}
publishing {

View File

@@ -4,7 +4,6 @@ module net.woggioni.gbcs.common {
requires org.slf4j;
requires kotlin.stdlib;
requires net.woggioni.jwo;
requires io.netty.buffer;
provides java.net.spi.URLStreamHandlerProvider with net.woggioni.gbcs.common.GbcsUrlStreamHandlerFactory;
exports net.woggioni.gbcs.common;

View File

@@ -1,25 +0,0 @@
package net.woggioni.gbcs.common
import io.netty.buffer.ByteBuf
import java.io.InputStream
class ByteBufInputStream(private val buf : ByteBuf) : InputStream() {
override fun read(): Int {
return buf.takeIf {
it.readableBytes() > 0
}?.let(ByteBuf::readByte)
?.let(Byte::toInt) ?: -1
}
override fun read(b: ByteArray, off: Int, len: Int): Int {
val readableBytes = buf.readableBytes()
if(readableBytes == 0) return -1
val result = len.coerceAtMost(readableBytes)
buf.readBytes(b, off, result)
return result
}
override fun close() {
buf.release()
}
}

View File

@@ -1,19 +0,0 @@
package net.woggioni.gbcs.common
import io.netty.buffer.ByteBuf
import java.io.InputStream
import java.io.OutputStream
class ByteBufOutputStream(private val buf : ByteBuf) : OutputStream() {
override fun write(b: Int) {
buf.writeByte(b)
}
override fun write(b: ByteArray, off: Int, len: Int) {
buf.writeBytes(b, off, len)
}
override fun close() {
buf.release()
}
}

View File

@@ -1,7 +0,0 @@
package net.woggioni.gbcs.common
class ResourceNotFoundException(msg : String? = null, cause: Throwable? = null) : RuntimeException(msg, cause) {
}
class ModuleNotFoundException(msg : String? = null, cause: Throwable? = null) : RuntimeException(msg, cause) {
}

View File

@@ -1,9 +1,7 @@
package net.woggioni.gbcs.common
import net.woggioni.jwo.JWO
import java.net.URI
import java.net.URL
import java.security.MessageDigest
object GBCS {
fun String.toUrl() : URL = URL.of(URI(this), null)
@@ -11,19 +9,4 @@ object GBCS {
const val GBCS_NAMESPACE_URI: String = "urn:net.woggioni.gbcs.server"
const val GBCS_PREFIX: String = "gbcs"
const val XML_SCHEMA_NAMESPACE_URI = "http://www.w3.org/2001/XMLSchema-instance"
fun digest(
data: ByteArray,
md: MessageDigest = MessageDigest.getInstance("MD5")
): ByteArray {
md.update(data)
return md.digest()
}
fun digestString(
data: ByteArray,
md: MessageDigest = MessageDigest.getInstance("MD5")
): String {
return JWO.bytesToHex(digest(data, md))
}
}

View File

@@ -5,6 +5,7 @@ import java.io.InputStream
import java.net.URL
import java.net.URLConnection
import java.net.URLStreamHandler
import java.net.URLStreamHandlerFactory
import java.net.spi.URLStreamHandlerProvider
import java.util.Optional
import java.util.concurrent.atomic.AtomicBoolean
@@ -36,17 +37,13 @@ class GbcsUrlStreamHandlerFactory : URLStreamHandlerProvider() {
private class JpmsHandler : URLStreamHandler() {
override fun openConnection(u: URL): URLConnection {
val moduleName = u.host
val thisModule = javaClass.module
val sourceModule =
thisModule
?.let(Module::getLayer)
?.let { layer: ModuleLayer ->
layer.findModule(moduleName).orElse(null)
} ?: if(thisModule.layer == null) {
thisModule
} else throw ModuleNotFoundException("Module '$moduleName' not found")
val sourceModule = Optional.ofNullable(thisModule)
.map { obj: Module -> obj.layer }
.flatMap { layer: ModuleLayer ->
val moduleName = u.host
layer.findModule(moduleName)
}.orElse(thisModule)
return JpmsResourceURLConnection(u, sourceModule)
}
}
@@ -57,9 +54,7 @@ class GbcsUrlStreamHandlerFactory : URLStreamHandlerProvider() {
@Throws(IOException::class)
override fun getInputStream(): InputStream {
val resource = getURL().path
return module.getResourceAsStream(resource)
?: throw ResourceNotFoundException("Resource '$resource' not found in module '${module.name}'")
return module.getResourceAsStream(getURL().path)
}
}

View File

@@ -1,19 +0,0 @@
import net.woggioni.gbcs.api.CacheProvider;
module net.woggioni.gbcs.server.memcache {
requires net.woggioni.gbcs.common;
requires net.woggioni.gbcs.api;
requires net.woggioni.jwo;
requires java.xml;
requires kotlin.stdlib;
requires io.netty.transport;
requires io.netty.codec;
requires io.netty.codec.memcache;
requires io.netty.common;
requires io.netty.buffer;
requires org.slf4j;
provides CacheProvider with net.woggioni.gbcs.server.memcache.MemcacheCacheProvider;
opens net.woggioni.gbcs.server.memcache.schema;
}

View File

@@ -1,4 +0,0 @@
package net.woggioni.gbcs.server.memcache
class MemcacheException(status : Short, msg : String? = null, cause : Throwable? = null)
: RuntimeException(msg ?: "Memcached status $status", cause)

View File

@@ -1,23 +0,0 @@
package net.woggioni.gbcs.server.memcache
import io.netty.buffer.ByteBuf
import net.woggioni.gbcs.api.Cache
import net.woggioni.gbcs.server.memcache.client.MemcacheClient
import java.nio.channels.ReadableByteChannel
import java.util.concurrent.CompletableFuture
class MemcacheCache(private val cfg : MemcacheCacheConfiguration) : Cache {
private val memcacheClient = MemcacheClient(cfg)
override fun get(key: String): CompletableFuture<ReadableByteChannel?> {
return memcacheClient.get(key)
}
override fun put(key: String, content: ByteBuf): CompletableFuture<Void> {
return memcacheClient.put(key, content, cfg.maxAge)
}
override fun close() {
memcacheClient.close()
}
}

View File

@@ -1,40 +0,0 @@
package net.woggioni.gbcs.server.memcache
import net.woggioni.gbcs.api.Configuration
import net.woggioni.gbcs.common.HostAndPort
import java.time.Duration
data class MemcacheCacheConfiguration(
val servers: List<Server>,
val maxAge: Duration = Duration.ofDays(1),
val maxSize: Int = 0x100000,
val digestAlgorithm: String? = null,
val compressionMode: CompressionMode? = null,
) : Configuration.Cache {
enum class CompressionMode {
/**
* Gzip mode
*/
GZIP,
/**
* Deflate mode
*/
DEFLATE
}
data class Server(
val endpoint : HostAndPort,
val connectionTimeoutMillis : Int?,
val maxConnections : Int
)
override fun materialize() = MemcacheCache(this)
override fun getNamespaceURI() = "urn:net.woggioni.gbcs.server.memcache"
override fun getTypeName() = "memcacheCacheType"
}

View File

@@ -1,258 +0,0 @@
package net.woggioni.gbcs.server.memcache.client
import io.netty.bootstrap.Bootstrap
import io.netty.buffer.ByteBuf
import io.netty.buffer.Unpooled
import io.netty.channel.Channel
import io.netty.channel.ChannelHandlerContext
import io.netty.channel.ChannelOption
import io.netty.channel.ChannelPipeline
import io.netty.channel.SimpleChannelInboundHandler
import io.netty.channel.nio.NioEventLoopGroup
import io.netty.channel.pool.AbstractChannelPoolHandler
import io.netty.channel.pool.ChannelPool
import io.netty.channel.pool.FixedChannelPool
import io.netty.channel.socket.nio.NioSocketChannel
import io.netty.handler.codec.DecoderException
import io.netty.handler.codec.memcache.binary.BinaryMemcacheClientCodec
import io.netty.handler.codec.memcache.binary.BinaryMemcacheObjectAggregator
import io.netty.handler.codec.memcache.binary.BinaryMemcacheOpcodes
import io.netty.handler.codec.memcache.binary.BinaryMemcacheResponseStatus
import io.netty.handler.codec.memcache.binary.DefaultFullBinaryMemcacheRequest
import io.netty.handler.codec.memcache.binary.FullBinaryMemcacheRequest
import io.netty.handler.codec.memcache.binary.FullBinaryMemcacheResponse
import io.netty.util.concurrent.GenericFutureListener
import net.woggioni.gbcs.common.ByteBufInputStream
import net.woggioni.gbcs.common.ByteBufOutputStream
import net.woggioni.gbcs.common.GBCS.digest
import net.woggioni.gbcs.common.HostAndPort
import net.woggioni.gbcs.common.contextLogger
import net.woggioni.gbcs.server.memcache.MemcacheCacheConfiguration
import net.woggioni.gbcs.server.memcache.MemcacheException
import net.woggioni.jwo.JWO
import java.io.ByteArrayOutputStream
import java.net.InetSocketAddress
import java.nio.channels.Channels
import java.nio.channels.ReadableByteChannel
import java.security.MessageDigest
import java.time.Duration
import java.time.Instant
import java.util.concurrent.CompletableFuture
import java.util.concurrent.ConcurrentHashMap
import java.util.zip.Deflater
import java.util.zip.DeflaterOutputStream
import java.util.zip.GZIPInputStream
import java.util.zip.GZIPOutputStream
import java.util.zip.InflaterInputStream
import io.netty.util.concurrent.Future as NettyFuture
class MemcacheClient(private val cfg: MemcacheCacheConfiguration) : AutoCloseable {
private companion object {
@JvmStatic
private val log = contextLogger()
}
private val group: NioEventLoopGroup
private val connectionPool: MutableMap<HostAndPort, ChannelPool> = ConcurrentHashMap()
init {
group = NioEventLoopGroup()
}
private fun newConnectionPool(server: MemcacheCacheConfiguration.Server): FixedChannelPool {
val bootstrap = Bootstrap().apply {
group(group)
channel(NioSocketChannel::class.java)
option(ChannelOption.SO_KEEPALIVE, true)
remoteAddress(InetSocketAddress(server.endpoint.host, server.endpoint.port))
server.connectionTimeoutMillis?.let {
option(ChannelOption.CONNECT_TIMEOUT_MILLIS, it)
}
}
val channelPoolHandler = object : AbstractChannelPoolHandler() {
override fun channelCreated(ch: Channel) {
val pipeline: ChannelPipeline = ch.pipeline()
pipeline.addLast(BinaryMemcacheClientCodec())
pipeline.addLast(BinaryMemcacheObjectAggregator(Integer.MAX_VALUE))
}
}
return FixedChannelPool(bootstrap, channelPoolHandler, server.maxConnections)
}
private fun sendRequest(request: FullBinaryMemcacheRequest): CompletableFuture<FullBinaryMemcacheResponse> {
val server = cfg.servers.let { servers ->
if (servers.size > 1) {
val key = request.key().duplicate()
var checksum = 0
while (key.readableBytes() > 4) {
val byte = key.readInt()
checksum = checksum xor byte
}
while (key.readableBytes() > 0) {
val byte = key.readByte()
checksum = checksum xor byte.toInt()
}
servers[checksum % servers.size]
} else {
servers.first()
}
}
val response = CompletableFuture<FullBinaryMemcacheResponse>()
// Custom handler for processing responses
val pool = connectionPool.computeIfAbsent(server.endpoint) {
newConnectionPool(server)
}
pool.acquire().addListener(object : GenericFutureListener<NettyFuture<Channel>> {
override fun operationComplete(channelFuture: NettyFuture<Channel>) {
if (channelFuture.isSuccess) {
val channel = channelFuture.now
val pipeline = channel.pipeline()
channel.pipeline()
.addLast("client-handler", object : SimpleChannelInboundHandler<FullBinaryMemcacheResponse>() {
override fun channelRead0(
ctx: ChannelHandlerContext,
msg: FullBinaryMemcacheResponse
) {
pipeline.removeLast()
pool.release(channel)
msg.touch("The method's caller must remember to release this")
response.complete(msg.retain())
}
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
val ex = when (cause) {
is DecoderException -> cause.cause!!
else -> cause
}
ctx.close()
pipeline.removeLast()
pool.release(channel)
response.completeExceptionally(ex)
}
})
request.touch()
channel.writeAndFlush(request)
} else {
response.completeExceptionally(channelFuture.cause())
}
}
})
return response
}
private fun encodeExpiry(expiry: Duration): Int {
val expirySeconds = expiry.toSeconds()
return expirySeconds.toInt().takeIf { it.toLong() == expirySeconds }
?: Instant.ofEpochSecond(expirySeconds).epochSecond.toInt()
}
fun get(key: String): CompletableFuture<ReadableByteChannel?> {
val request = (cfg.digestAlgorithm
?.let(MessageDigest::getInstance)
?.let { md ->
digest(key.toByteArray(), md)
} ?: key.toByteArray(Charsets.UTF_8)).let { digest ->
DefaultFullBinaryMemcacheRequest(Unpooled.wrappedBuffer(digest), null).apply {
setOpcode(BinaryMemcacheOpcodes.GET)
}
}
return sendRequest(request).thenApply { response ->
try {
when (val status = response.status()) {
BinaryMemcacheResponseStatus.SUCCESS -> {
val compressionMode = cfg.compressionMode
val content = response.content().retain()
content.touch()
if (compressionMode != null) {
when (compressionMode) {
MemcacheCacheConfiguration.CompressionMode.GZIP -> {
GZIPInputStream(ByteBufInputStream(content))
}
MemcacheCacheConfiguration.CompressionMode.DEFLATE -> {
InflaterInputStream(ByteBufInputStream(content))
}
}
} else {
ByteBufInputStream(content)
}.let(Channels::newChannel)
}
BinaryMemcacheResponseStatus.KEY_ENOENT -> {
null
}
else -> throw MemcacheException(status)
}
} finally {
response.release()
}
}
}
fun put(key: String, content: ByteBuf, expiry: Duration, cas: Long? = null): CompletableFuture<Void> {
val request = (cfg.digestAlgorithm
?.let(MessageDigest::getInstance)
?.let { md ->
digest(key.toByteArray(), md)
} ?: key.toByteArray(Charsets.UTF_8)).let { digest ->
val extras = Unpooled.buffer(8, 8)
extras.writeInt(0)
extras.writeInt(encodeExpiry(expiry))
val compressionMode = cfg.compressionMode
content.retain()
val payload = if (compressionMode != null) {
val inputStream = ByteBufInputStream(content)
val buf = content.alloc().buffer()
buf.retain()
val outputStream = when (compressionMode) {
MemcacheCacheConfiguration.CompressionMode.GZIP -> {
GZIPOutputStream(ByteBufOutputStream(buf))
}
MemcacheCacheConfiguration.CompressionMode.DEFLATE -> {
DeflaterOutputStream(ByteBufOutputStream(buf), Deflater(Deflater.DEFAULT_COMPRESSION, false))
}
}
inputStream.use { i ->
outputStream.use { o ->
JWO.copy(i, o)
}
}
buf
} else {
content
}
DefaultFullBinaryMemcacheRequest(Unpooled.wrappedBuffer(digest), extras, payload).apply {
setOpcode(BinaryMemcacheOpcodes.SET)
cas?.let(this::setCas)
}
}
return sendRequest(request).thenApply { response ->
try {
when (val status = response.status()) {
BinaryMemcacheResponseStatus.SUCCESS -> null
else -> throw MemcacheException(status)
}
} finally {
response.release()
}
}
}
fun shutDown(): NettyFuture<*> {
return group.shutdownGracefully()
}
override fun close() {
shutDown().sync()
}
}

View File

@@ -1 +0,0 @@
net.woggioni.gbcs.server.memcache.MemcacheCacheProvider

View File

@@ -6,10 +6,10 @@ plugins {
configurations {
bundle {
extendsFrom runtimeClasspath
canBeResolved = true
canBeConsumed = false
visible = false
transitive = false
resolutionStrategy {
dependencies {
@@ -29,20 +29,10 @@ configurations {
}
dependencies {
implementation project(':gbcs-common')
implementation project(':gbcs-api')
implementation catalog.jwo
implementation catalog.slf4j.api
implementation catalog.netty.common
implementation catalog.netty.codec.memcache
bundle catalog.netty.codec.memcache
testRuntimeOnly catalog.logback.classic
}
tasks.named(JavaPlugin.TEST_TASK_NAME, Test) {
systemProperty("io.netty.leakDetectionLevel", "PARANOID")
compileOnly project(':gbcs-common')
compileOnly project(':gbcs-api')
compileOnly catalog.jwo
implementation catalog.xmemcached
}
Provider<Tar> bundleTask = tasks.register("bundle", Tar) {

View File

@@ -0,0 +1,14 @@
import net.woggioni.gbcs.api.CacheProvider;
module net.woggioni.gbcs.server.memcached {
requires net.woggioni.gbcs.common;
requires net.woggioni.gbcs.api;
requires com.googlecode.xmemcached;
requires net.woggioni.jwo;
requires java.xml;
requires kotlin.stdlib;
provides CacheProvider with net.woggioni.gbcs.server.memcached.MemcachedCacheProvider;
opens net.woggioni.gbcs.server.memcached.schema;
}

View File

@@ -0,0 +1,59 @@
package net.woggioni.gbcs.server.memcached
import net.rubyeye.xmemcached.XMemcachedClientBuilder
import net.rubyeye.xmemcached.command.BinaryCommandFactory
import net.rubyeye.xmemcached.transcoders.CompressionMode
import net.rubyeye.xmemcached.transcoders.SerializingTranscoder
import net.woggioni.gbcs.api.Cache
import net.woggioni.gbcs.api.exception.ContentTooLargeException
import net.woggioni.gbcs.common.HostAndPort
import net.woggioni.jwo.JWO
import java.io.ByteArrayInputStream
import java.net.InetSocketAddress
import java.nio.channels.Channels
import java.nio.channels.ReadableByteChannel
import java.nio.charset.StandardCharsets
import java.security.MessageDigest
import java.time.Duration
class MemcachedCache(
servers: List<HostAndPort>,
private val maxAge: Duration,
maxSize : Int,
digestAlgorithm: String?,
compressionMode: CompressionMode,
) : Cache {
private val memcachedClient = XMemcachedClientBuilder(
servers.stream().map { addr: HostAndPort -> InetSocketAddress(addr.host, addr.port) }.toList()
).apply {
commandFactory = BinaryCommandFactory()
digestAlgorithm?.let { dAlg ->
setKeyProvider { key ->
val md = MessageDigest.getInstance(dAlg)
md.update(key.toByteArray(StandardCharsets.UTF_8))
JWO.bytesToHex(md.digest())
}
}
transcoder = SerializingTranscoder(maxSize).apply {
setCompressionMode(compressionMode)
}
}.build()
override fun get(key: String): ReadableByteChannel? {
return memcachedClient.get<ByteArray>(key)
?.let(::ByteArrayInputStream)
?.let(Channels::newChannel)
}
override fun put(key: String, content: ByteArray) {
try {
memcachedClient[key, maxAge.toSeconds().toInt()] = content
} catch (e: IllegalArgumentException) {
throw ContentTooLargeException(e.message, e)
}
}
override fun close() {
memcachedClient.shutdown()
}
}

View File

@@ -0,0 +1,26 @@
package net.woggioni.gbcs.server.memcached
import net.rubyeye.xmemcached.transcoders.CompressionMode
import net.woggioni.gbcs.api.Configuration
import net.woggioni.gbcs.common.HostAndPort
import java.time.Duration
data class MemcachedCacheConfiguration(
var servers: List<HostAndPort>,
var maxAge: Duration = Duration.ofDays(1),
var maxSize: Int = 0x100000,
var digestAlgorithm: String? = null,
var compressionMode: CompressionMode = CompressionMode.ZIP,
) : Configuration.Cache {
override fun materialize() = MemcachedCache(
servers,
maxAge,
maxSize,
digestAlgorithm,
compressionMode
)
override fun getNamespaceURI() = "urn:net.woggioni.gbcs.server.memcached"
override fun getTypeName() = "memcachedCacheType"
}

View File

@@ -1,5 +1,6 @@
package net.woggioni.gbcs.server.memcache
package net.woggioni.gbcs.server.memcached
import net.rubyeye.xmemcached.transcoders.CompressionMode
import net.woggioni.gbcs.api.CacheProvider
import net.woggioni.gbcs.api.exception.ConfigurationException
import net.woggioni.gbcs.common.GBCS
@@ -10,21 +11,19 @@ import net.woggioni.gbcs.common.Xml.Companion.renderAttribute
import org.w3c.dom.Document
import org.w3c.dom.Element
import java.time.Duration
import java.time.temporal.ChronoUnit
class MemcachedCacheProvider : CacheProvider<MemcachedCacheConfiguration> {
override fun getXmlSchemaLocation() = "jpms://net.woggioni.gbcs.server.memcached/net/woggioni/gbcs/server/memcached/schema/gbcs-memcached.xsd"
class MemcacheCacheProvider : CacheProvider<MemcacheCacheConfiguration> {
override fun getXmlSchemaLocation() = "jpms://net.woggioni.gbcs.server.memcache/net/woggioni/gbcs/server/memcache/schema/gbcs-memcache.xsd"
override fun getXmlType() = "memcachedCacheType"
override fun getXmlType() = "memcacheCacheType"
override fun getXmlNamespace() = "urn:net.woggioni.gbcs.server.memcache"
override fun getXmlNamespace() = "urn:net.woggioni.gbcs.server.memcached"
val xmlNamespacePrefix : String
get() = "gbcs-memcache"
get() = "gbcs-memcached"
override fun deserialize(el: Element): MemcacheCacheConfiguration {
val servers = mutableListOf<MemcacheCacheConfiguration.Server>()
override fun deserialize(el: Element): MemcachedCacheConfiguration {
val servers = mutableListOf<HostAndPort>()
val maxAge = el.renderAttribute("max-age")
?.let(Duration::parse)
?: Duration.ofDays(1)
@@ -34,30 +33,24 @@ class MemcacheCacheProvider : CacheProvider<MemcacheCacheConfiguration> {
val compressionMode = el.renderAttribute("compression-mode")
?.let {
when (it) {
"gzip" -> MemcacheCacheConfiguration.CompressionMode.GZIP
"deflate" -> MemcacheCacheConfiguration.CompressionMode.DEFLATE
else -> MemcacheCacheConfiguration.CompressionMode.DEFLATE
"gzip" -> CompressionMode.GZIP
"zip" -> CompressionMode.ZIP
else -> CompressionMode.ZIP
}
}
?: MemcacheCacheConfiguration.CompressionMode.DEFLATE
?: CompressionMode.ZIP
val digestAlgorithm = el.renderAttribute("digest")
for (child in el.asIterable()) {
when (child.nodeName) {
"server" -> {
val host = child.renderAttribute("host") ?: throw ConfigurationException("host attribute is required")
val port = child.renderAttribute("port")?.toInt() ?: throw ConfigurationException("port attribute is required")
val maxConnections = child.renderAttribute("max-connections")?.toInt() ?: 1
val connectionTimeout = child.renderAttribute("connection-timeout")
?.let(Duration::parse)
?.let(Duration::toMillis)
?.let(Long::toInt)
?: 10000
servers.add(MemcacheCacheConfiguration.Server(HostAndPort(host, port), connectionTimeout, maxConnections))
servers.add(HostAndPort(host, port))
}
}
}
return MemcacheCacheConfiguration(
return MemcachedCacheConfiguration(
servers,
maxAge,
maxSize,
@@ -66,7 +59,7 @@ class MemcacheCacheProvider : CacheProvider<MemcacheCacheConfiguration> {
)
}
override fun serialize(doc: Document, cache: MemcacheCacheConfiguration) = cache.run {
override fun serialize(doc: Document, cache: MemcachedCacheConfiguration) = cache.run {
val result = doc.createElement("cache")
Xml.of(doc, result) {
attr("xmlns:${xmlNamespacePrefix}", xmlNamespace, namespaceURI = "http://www.w3.org/2000/xmlns/")
@@ -74,12 +67,8 @@ class MemcacheCacheProvider : CacheProvider<MemcacheCacheConfiguration> {
attr("xs:type", "${xmlNamespacePrefix}:$xmlType", GBCS.XML_SCHEMA_NAMESPACE_URI)
for (server in servers) {
node("server") {
attr("host", server.endpoint.host)
attr("port", server.endpoint.port.toString())
server.connectionTimeoutMillis?.let { connectionTimeoutMillis ->
attr("connection-timeout", Duration.of(connectionTimeoutMillis.toLong(), ChronoUnit.MILLIS).toString())
}
attr("max-connections", server.maxConnections.toString())
attr("host", server.host)
attr("port", server.port.toString())
}
}
attr("max-age", maxAge.toString())
@@ -87,14 +76,12 @@ class MemcacheCacheProvider : CacheProvider<MemcacheCacheConfiguration> {
digestAlgorithm?.let { digestAlgorithm ->
attr("digest", digestAlgorithm)
}
compressionMode?.let { compressionMode ->
attr(
"compression-mode", when (compressionMode) {
MemcacheCacheConfiguration.CompressionMode.GZIP -> "gzip"
MemcacheCacheConfiguration.CompressionMode.DEFLATE -> "deflate"
}
)
}
attr(
"compression-mode", when (compressionMode) {
CompressionMode.GZIP -> "gzip"
CompressionMode.ZIP -> "zip"
}
)
}
result
}

View File

@@ -0,0 +1 @@
net.woggioni.gbcs.server.memcached.MemcachedCacheProvider

View File

@@ -1,35 +1,33 @@
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<xs:schema targetNamespace="urn:net.woggioni.gbcs.server.memcache"
xmlns:gbcs-memcache="urn:net.woggioni.gbcs.server.memcache"
<xs:schema targetNamespace="urn:net.woggioni.gbcs.server.memcached"
xmlns:gbcs-memcached="urn:net.woggioni.gbcs.server.memcached"
xmlns:gbcs="urn:net.woggioni.gbcs.server"
xmlns:xs="http://www.w3.org/2001/XMLSchema">
<xs:import schemaLocation="jpms://net.woggioni.gbcs.server/net/woggioni/gbcs/server/schema/gbcs.xsd" namespace="urn:net.woggioni.gbcs.server"/>
<xs:complexType name="memcacheServerType">
<xs:complexType name="memcachedServerType">
<xs:attribute name="host" type="xs:token" use="required"/>
<xs:attribute name="port" type="xs:positiveInteger" use="required"/>
<xs:attribute name="connection-timeout" type="xs:duration"/>
<xs:attribute name="max-connections" type="xs:positiveInteger" default="1"/>
</xs:complexType>
<xs:complexType name="memcacheCacheType">
<xs:complexType name="memcachedCacheType">
<xs:complexContent>
<xs:extension base="gbcs:cacheType">
<xs:sequence maxOccurs="unbounded">
<xs:element name="server" type="gbcs-memcache:memcacheServerType"/>
<xs:element name="server" type="gbcs-memcached:memcachedServerType"/>
</xs:sequence>
<xs:attribute name="max-age" type="xs:duration" default="P1D"/>
<xs:attribute name="max-size" type="xs:unsignedInt" default="1048576"/>
<xs:attribute name="digest" type="xs:token" />
<xs:attribute name="compression-mode" type="gbcs-memcache:compressionType"/>
<xs:attribute name="compression-mode" type="gbcs-memcached:compressionType" default="zip"/>
</xs:extension>
</xs:complexContent>
</xs:complexType>
<xs:simpleType name="compressionType">
<xs:restriction base="xs:token">
<xs:enumeration value="deflate"/>
<xs:enumeration value="zip"/>
<xs:enumeration value="gzip"/>
</xs:restriction>
</xs:simpleType>

View File

@@ -19,7 +19,7 @@ dependencies {
testImplementation catalog.bcprov.jdk18on
testImplementation catalog.bcpkix.jdk18on
testRuntimeOnly project(":gbcs-server-memcache")
testRuntimeOnly project(":gbcs-server-memcached")
}
test {

View File

@@ -18,7 +18,6 @@ module net.woggioni.gbcs.server {
requires net.woggioni.jwo;
requires net.woggioni.gbcs.common;
requires net.woggioni.gbcs.api;
requires io.netty.transport.classes.epoll;
exports net.woggioni.gbcs.server;

View File

@@ -10,9 +10,6 @@ import io.netty.channel.ChannelInboundHandlerAdapter
import io.netty.channel.ChannelInitializer
import io.netty.channel.ChannelOption
import io.netty.channel.ChannelPromise
import io.netty.channel.MultithreadEventLoopGroup
import io.netty.channel.epoll.EpollEventLoopGroup
import io.netty.channel.epoll.EpollServerSocketChannel
import io.netty.channel.nio.NioEventLoopGroup
import io.netty.channel.socket.nio.NioServerSocketChannel
import io.netty.handler.codec.compression.CompressionOptions
@@ -52,7 +49,6 @@ import net.woggioni.gbcs.server.exception.ExceptionHandler
import net.woggioni.gbcs.server.handler.ServerHandler
import net.woggioni.gbcs.server.throttling.ThrottlingHandler
import net.woggioni.jwo.JWO
import net.woggioni.jwo.OS
import net.woggioni.jwo.Tuple2
import java.io.OutputStream
import java.net.InetSocketAddress
@@ -403,23 +399,10 @@ class GradleBuildCacheServer(private val cfg: Configuration) {
}
fun run(): ServerHandle {
val bossGroup : MultithreadEventLoopGroup
val workerGroup : MultithreadEventLoopGroup
val serverSocketChannel : Class<*>
if(cfg.isUseNativeTransport) {
if(OS.isLinux) {
bossGroup = EpollEventLoopGroup(1)
serverSocketChannel = EpollServerSocketChannel::class.java
workerGroup = EpollEventLoopGroup(0)
} else {
throw java.lang.IllegalArgumentException("Native transport is not supported on ${OS.current.name}")
}
} else {
bossGroup = NioEventLoopGroup(1)
serverSocketChannel = NioServerSocketChannel::class.java
workerGroup = NioEventLoopGroup(0)
}
// Create the multithreaded event loops for the server
val bossGroup = NioEventLoopGroup(0)
val serverSocketChannel = NioServerSocketChannel::class.java
val workerGroup = bossGroup
val eventExecutorGroup = run {
val threadFactory = if (cfg.eventExecutor.isUseVirtualThreads) {
Thread.ofVirtual().factory()

View File

@@ -0,0 +1,21 @@
package net.woggioni.gbcs.server.cache
import net.woggioni.jwo.JWO
import java.security.MessageDigest
object CacheUtils {
fun digest(
data: ByteArray,
md: MessageDigest = MessageDigest.getInstance("MD5")
): ByteArray {
md.update(data)
return md.digest()
}
fun digestString(
data: ByteArray,
md: MessageDigest = MessageDigest.getInstance("MD5")
): String {
return JWO.bytesToHex(digest(data, md))
}
}

View File

@@ -1,11 +1,8 @@
package net.woggioni.gbcs.server.cache
import io.netty.buffer.ByteBuf
import net.woggioni.gbcs.api.Cache
import net.woggioni.gbcs.common.ByteBufInputStream
import net.woggioni.gbcs.common.GBCS.digestString
import net.woggioni.gbcs.common.contextLogger
import net.woggioni.jwo.JWO
import net.woggioni.gbcs.server.cache.CacheUtils.digestString
import net.woggioni.jwo.LockFile
import java.nio.channels.Channels
import java.nio.channels.FileChannel
@@ -17,7 +14,6 @@ import java.nio.file.attribute.BasicFileAttributes
import java.security.MessageDigest
import java.time.Duration
import java.time.Instant
import java.util.concurrent.CompletableFuture
import java.util.concurrent.atomic.AtomicReference
import java.util.zip.Deflater
import java.util.zip.DeflaterOutputStream
@@ -32,10 +28,7 @@ class FileSystemCache(
val compressionLevel: Int
) : Cache {
private companion object {
@JvmStatic
private val log = contextLogger()
}
private val log = contextLogger()
init {
Files.createDirectories(root)
@@ -69,12 +62,10 @@ class FileSystemCache(
}
}.also {
gc()
}.let {
CompletableFuture.completedFuture(it)
}
}
override fun put(key: String, content: ByteBuf): CompletableFuture<Void> {
override fun put(key: String, content: ByteArray) {
(digestAlgorithm
?.let(MessageDigest::getInstance)
?.let { md ->
@@ -91,7 +82,7 @@ class FileSystemCache(
it
}
}.use {
JWO.copy(ByteBufInputStream(content), it)
it.write(content)
}
Files.move(tmpFile, file, StandardCopyOption.ATOMIC_MOVE)
} catch (t: Throwable) {
@@ -101,7 +92,6 @@ class FileSystemCache(
}.also {
gc()
}
return CompletableFuture.completedFuture(null)
}
private fun gc() {

View File

@@ -1,20 +1,18 @@
package net.woggioni.gbcs.server.cache
import io.netty.buffer.ByteBuf
import net.woggioni.gbcs.api.Cache
import net.woggioni.gbcs.common.ByteBufInputStream
import net.woggioni.gbcs.common.ByteBufOutputStream
import net.woggioni.gbcs.common.GBCS.digestString
import net.woggioni.gbcs.common.contextLogger
import net.woggioni.jwo.JWO
import net.woggioni.gbcs.server.cache.CacheUtils.digestString
import java.io.ByteArrayInputStream
import java.io.ByteArrayOutputStream
import java.nio.ByteBuffer
import java.nio.channels.Channels
import java.security.MessageDigest
import java.time.Duration
import java.time.Instant
import java.util.concurrent.CompletableFuture
import java.util.concurrent.ConcurrentHashMap
import java.util.concurrent.PriorityBlockingQueue
import java.util.concurrent.atomic.AtomicLong
import java.util.concurrent.atomic.AtomicInteger
import java.util.concurrent.atomic.AtomicReference
import java.util.zip.Deflater
import java.util.zip.DeflaterOutputStream
import java.util.zip.Inflater
@@ -22,72 +20,41 @@ import java.util.zip.InflaterInputStream
class InMemoryCache(
val maxAge: Duration,
val maxSize: Long,
val digestAlgorithm: String?,
val compressionEnabled: Boolean,
val compressionLevel: Int
) : Cache {
companion object {
@JvmStatic
private val log = contextLogger()
}
private val map = ConcurrentHashMap<String, MapValue>()
private val size = AtomicLong()
private val map = ConcurrentHashMap<String, ByteBuf>()
private class RemovalQueueElement(val key: String, val value : ByteBuf, val expiry : Instant) : Comparable<RemovalQueueElement> {
override fun compareTo(other: RemovalQueueElement) = expiry.compareTo(other.expiry)
private class MapValue(val rc: AtomicInteger, val payload : AtomicReference<ByteArray>)
private class RemovalQueueElement(val key: String, val expiry : Instant) : Comparable<RemovalQueueElement> {
override fun compareTo(other: RemovalQueueElement)= expiry.compareTo(other.expiry)
}
private val removalQueue = PriorityBlockingQueue<RemovalQueueElement>()
private var running = true
private val garbageCollector = Thread {
private val garbageCollector = Thread({
while(true) {
val el = removalQueue.take()
val buf = el.value
val now = Instant.now()
if(now > el.expiry) {
val removed = map.remove(el.key, buf)
if(removed) {
updateSizeAfterRemoval(buf)
//Decrease the reference count for map
buf.release()
val value = map[el.key] ?: continue
val rc = value.rc.decrementAndGet()
if(rc == 0) {
map.remove(el.key)
}
//Decrease the reference count for removalQueue
buf.release()
} else {
removalQueue.put(el)
Thread.sleep(minOf(Duration.between(now, el.expiry), Duration.ofSeconds(1)))
}
}
}.apply {
}).apply {
start()
}
private fun removeEldest() : Long {
while(true) {
val el = removalQueue.take()
val buf = el.value
val removed = map.remove(el.key, buf)
//Decrease the reference count for removalQueue
buf.release()
if(removed) {
val newSize = updateSizeAfterRemoval(buf)
//Decrease the reference count for map
buf.release()
return newSize
}
}
}
private fun updateSizeAfterRemoval(removed: ByteBuf) : Long {
return size.updateAndGet { currentSize : Long ->
currentSize - removed.readableBytes()
}
}
override fun close() {
running = false
garbageCollector.join()
@@ -101,50 +68,39 @@ class InMemoryCache(
} ?: key
).let { digest ->
map[digest]
?.let(MapValue::payload)
?.let(AtomicReference<ByteArray>::get)
?.let { value ->
val copy = value.retainedDuplicate()
copy.touch("This has to be released by the caller of the cache")
if (compressionEnabled) {
val inflater = Inflater()
Channels.newChannel(InflaterInputStream(ByteBufInputStream(copy), inflater))
Channels.newChannel(InflaterInputStream(ByteArrayInputStream(value), inflater))
} else {
Channels.newChannel(ByteBufInputStream(copy))
Channels.newChannel(ByteArrayInputStream(value))
}
}
}.let {
CompletableFuture.completedFuture(it)
}
override fun put(key: String, content: ByteBuf) =
override fun put(key: String, content: ByteArray) {
(digestAlgorithm
?.let(MessageDigest::getInstance)
?.let { md ->
digestString(key.toByteArray(), md)
} ?: key).let { digest ->
content.retain()
val value = if (compressionEnabled) {
val deflater = Deflater(compressionLevel)
val buf = content.alloc().buffer()
buf.retain()
DeflaterOutputStream(ByteBufOutputStream(buf), deflater).use { outputStream ->
ByteBufInputStream(content).use { inputStream ->
JWO.copy(inputStream, outputStream)
}
val baos = ByteArrayOutputStream()
DeflaterOutputStream(baos, deflater).use { stream ->
stream.write(content)
}
buf
baos.toByteArray()
} else {
content
}
val old = map.put(digest, value)
val delta = value.readableBytes() - (old?.readableBytes() ?: 0)
var newSize = size.updateAndGet { currentSize : Long ->
currentSize + delta
val mapValue = map.computeIfAbsent(digest) {
MapValue(AtomicInteger(0), AtomicReference())
}
removalQueue.put(RemovalQueueElement(digest, value.retain(), Instant.now().plus(maxAge)))
while(newSize > maxSize) {
newSize = removeEldest()
}
}.let {
CompletableFuture.completedFuture<Void>(null)
mapValue.payload.set(value)
removalQueue.put(RemovalQueueElement(digest, Instant.now().plus(maxAge)))
}
}
}

View File

@@ -6,14 +6,12 @@ import java.time.Duration
data class InMemoryCacheConfiguration(
val maxAge: Duration,
val maxSize: Long,
val digestAlgorithm : String?,
val compressionEnabled: Boolean,
val compressionLevel: Int,
) : Configuration.Cache {
override fun materialize() = InMemoryCache(
maxAge,
maxSize,
digestAlgorithm,
compressionEnabled,
compressionLevel

View File

@@ -6,6 +6,7 @@ import net.woggioni.gbcs.common.Xml
import net.woggioni.gbcs.common.Xml.Companion.renderAttribute
import org.w3c.dom.Document
import org.w3c.dom.Element
import java.nio.file.Path
import java.time.Duration
import java.util.zip.Deflater
@@ -21,9 +22,6 @@ class InMemoryCacheProvider : CacheProvider<InMemoryCacheConfiguration> {
val maxAge = el.renderAttribute("max-age")
?.let(Duration::parse)
?: Duration.ofDays(1)
val maxSize = el.renderAttribute("max-size")
?.let(java.lang.Long::decode)
?: 0x1000000
val enableCompression = el.renderAttribute("enable-compression")
?.let(String::toBoolean)
?: true
@@ -34,7 +32,6 @@ class InMemoryCacheProvider : CacheProvider<InMemoryCacheConfiguration> {
return InMemoryCacheConfiguration(
maxAge,
maxSize,
digestAlgorithm,
enableCompression,
compressionLevel
@@ -47,7 +44,6 @@ class InMemoryCacheProvider : CacheProvider<InMemoryCacheConfiguration> {
val prefix = doc.lookupPrefix(GBCS.GBCS_NAMESPACE_URI)
attr("xs:type", "${prefix}:inMemoryCacheType", GBCS.XML_SCHEMA_NAMESPACE_URI)
attr("max-age", maxAge.toString())
attr("max-size", maxSize.toString())
digestAlgorithm?.let { digestAlgorithm ->
attr("digest", digestAlgorithm)
}

View File

@@ -44,7 +44,6 @@ object Parser {
val serverPath = root.renderAttribute("path")
var incomingConnectionsBacklogSize = 1024
var authentication: Authentication? = null
var useNativeTransport = false
for (child in root.asIterable()) {
val tagName = child.localName
when (tagName) {
@@ -137,7 +136,7 @@ object Parser {
}
"event-executor" -> {
val useVirtualThread = child.renderAttribute("use-virtual-threads")
val useVirtualThread = root.renderAttribute("use-virtual-threads")
?.let(String::toBoolean) ?: true
eventExecutor = Configuration.EventExecutor(useVirtualThread)
}
@@ -181,10 +180,6 @@ object Parser {
}
tls = Tls(keyStore, trustStore)
}
"transport" -> {
useNativeTransport = child.renderAttribute("use-native-transport")
?.let(String::toBoolean) ?: false
}
}
}
return Configuration.of(
@@ -199,7 +194,6 @@ object Parser {
cache!!,
authentication,
tls,
useNativeTransport
)
}
@@ -271,8 +265,7 @@ object Parser {
}.map { el ->
val groupName = el.renderAttribute("name") ?: throw ConfigurationException("Group name is required")
var roles = emptySet<Role>()
var userQuota: Configuration.Quota? = null
var groupQuota: Configuration.Quota? = null
var quota: Configuration.Quota? = null
for (child in el.asIterable()) {
when (child.localName) {
"users" -> {
@@ -286,15 +279,12 @@ object Parser {
"roles" -> {
roles = parseRoles(child)
}
"group-quota" -> {
userQuota = parseQuota(child)
}
"user-quota" -> {
groupQuota = parseQuota(child)
"quota" -> {
quota = parseQuota(child)
}
}
}
groupName to Group(groupName, roles, userQuota, groupQuota)
groupName to Group(groupName, roles, quota)
}.toMap()
val users = knownUsersMap.map { (name, user) ->
name to User(name, user.password, userGroups[name]?.mapNotNull { groups[it] }?.toSet() ?: emptySet(), user.quota)

View File

@@ -8,14 +8,8 @@ import org.w3c.dom.Document
object Serializer {
private fun Xml.serializeQuota(quota : Configuration.Quota) {
attr("calls", quota.calls.toString())
attr("period", quota.period.toString())
attr("max-available-calls", quota.maxAvailableCalls.toString())
attr("initial-available-calls", quota.initialAvailableCalls.toString())
}
fun serialize(conf : Configuration) : Document {
val schemaLocations = CacheSerializers.index.values.asSequence().map {
it.xmlNamespace to it.xmlSchemaLocation
}.toMap()
@@ -44,12 +38,8 @@ object Serializer {
attr("max-request-size", connection.maxRequestSize.toString())
}
}
node("transport") {
attr("use-native-transport", conf.isUseNativeTransport.toString())
}
node("event-executor") {
attr("use-virtual-threads", conf.eventExecutor.isUseVirtualThreads.toString())
attr("use-virtual-threads", conf.eventExecutor.isUseVirtualThreads.toString())
}
val cache = conf.cache
val serializer : CacheProvider<Configuration.Cache> =
@@ -66,7 +56,10 @@ object Serializer {
}
user.quota?.let { quota ->
node("quota") {
serializeQuota(quota)
attr("calls", quota.calls.toString())
attr("period", quota.period.toString())
attr("max-available-calls", quota.maxAvailableCalls.toString())
attr("initial-available-calls", quota.initialAvailableCalls.toString())
}
}
}
@@ -77,7 +70,10 @@ object Serializer {
anonymousUser.quota?.let { quota ->
node("anonymous") {
node("quota") {
serializeQuota(quota)
attr("calls", quota.calls.toString())
attr("period", quota.period.toString())
attr("max-available-calls", quota.maxAvailableCalls.toString())
attr("initial-available-calls", quota.initialAvailableCalls.toString())
}
}
}
@@ -117,14 +113,12 @@ object Serializer {
}
}
}
group.userQuota?.let { quota ->
node("user-quota") {
serializeQuota(quota)
}
}
group.groupQuota?.let { quota ->
node("group-quota") {
serializeQuota(quota)
group.quota?.let { quota ->
node("quota") {
attr("calls", quota.calls.toString())
attr("period", quota.period.toString())
attr("max-available-calls", quota.maxAvailableCalls.toString())
attr("initial-available-calls", quota.initialAvailableCalls.toString())
}
}
}

View File

@@ -17,6 +17,7 @@ import io.netty.handler.codec.http.HttpUtil
import io.netty.handler.codec.http.LastHttpContent
import io.netty.handler.stream.ChunkedNioStream
import net.woggioni.gbcs.api.Cache
import net.woggioni.gbcs.api.exception.CacheException
import net.woggioni.gbcs.common.contextLogger
import net.woggioni.gbcs.server.debug
import net.woggioni.gbcs.server.warn
@@ -42,54 +43,49 @@ class ServerHandler(private val cache: Cache, private val serverPrefix: Path) :
return
}
if (serverPrefix == prefix) {
cache.get(key).thenApply { channel ->
if(channel != null) {
log.debug(ctx) {
"Cache hit for key '$key'"
}
val response = DefaultHttpResponse(msg.protocolVersion(), HttpResponseStatus.OK)
response.headers()[HttpHeaderNames.CONTENT_TYPE] = HttpHeaderValues.APPLICATION_OCTET_STREAM
if (!keepAlive) {
response.headers().set(HttpHeaderNames.CONNECTION, HttpHeaderValues.CLOSE)
response.headers().set(HttpHeaderNames.TRANSFER_ENCODING, HttpHeaderValues.IDENTITY)
} else {
response.headers().set(HttpHeaderNames.CONNECTION, HttpHeaderValues.KEEP_ALIVE)
response.headers().set(HttpHeaderNames.TRANSFER_ENCODING, HttpHeaderValues.CHUNKED)
}
ctx.write(response)
when (channel) {
is FileChannel -> {
val content = DefaultFileRegion(channel, 0, channel.size())
if (keepAlive) {
ctx.write(content)
ctx.writeAndFlush(LastHttpContent.EMPTY_LAST_CONTENT.retainedDuplicate())
} else {
ctx.writeAndFlush(content)
.addListener(ChannelFutureListener.CLOSE)
}
}
else -> {
val content = ChunkedNioStream(channel)
if (keepAlive) {
ctx.write(content).addListener {
content.close()
}
ctx.writeAndFlush(LastHttpContent.EMPTY_LAST_CONTENT.retainedDuplicate())
} else {
ctx.writeAndFlush(content)
.addListener(ChannelFutureListener.CLOSE)
}
}
}
} else {
log.debug(ctx) {
"Cache miss for key '$key'"
}
val response = DefaultFullHttpResponse(msg.protocolVersion(), HttpResponseStatus.NOT_FOUND)
response.headers()[HttpHeaderNames.CONTENT_LENGTH] = 0
ctx.writeAndFlush(response)
try {
cache.get(key)
} catch(ex : Throwable) {
throw CacheException("Error accessing the cache backend", ex)
}?.let { channel ->
log.debug(ctx) {
"Cache hit for key '$key'"
}
}.whenComplete { _, ex -> ex?.let(ctx::fireExceptionCaught) }
val response = DefaultHttpResponse(msg.protocolVersion(), HttpResponseStatus.OK)
response.headers()[HttpHeaderNames.CONTENT_TYPE] = HttpHeaderValues.APPLICATION_OCTET_STREAM
if (!keepAlive) {
response.headers().set(HttpHeaderNames.CONNECTION, HttpHeaderValues.CLOSE)
response.headers().set(HttpHeaderNames.TRANSFER_ENCODING, HttpHeaderValues.IDENTITY)
} else {
response.headers().set(HttpHeaderNames.CONNECTION, HttpHeaderValues.KEEP_ALIVE)
response.headers().set(HttpHeaderNames.TRANSFER_ENCODING, HttpHeaderValues.CHUNKED)
}
ctx.write(response)
when (channel) {
is FileChannel -> {
if (keepAlive) {
ctx.write(DefaultFileRegion(channel, 0, channel.size()))
ctx.writeAndFlush(LastHttpContent.EMPTY_LAST_CONTENT.retainedDuplicate())
} else {
ctx.writeAndFlush(DefaultFileRegion(channel, 0, channel.size()))
.addListener(ChannelFutureListener.CLOSE)
}
}
else -> {
ctx.write(ChunkedNioStream(channel)).addListener { evt ->
channel.close()
}
ctx.writeAndFlush(LastHttpContent.EMPTY_LAST_CONTENT.retainedDuplicate())
}
}
} ?: let {
log.debug(ctx) {
"Cache miss for key '$key'"
}
val response = DefaultFullHttpResponse(msg.protocolVersion(), HttpResponseStatus.NOT_FOUND)
response.headers()[HttpHeaderNames.CONTENT_LENGTH] = 0
ctx.writeAndFlush(response)
}
} else {
log.warn(ctx) {
"Got request for unhandled path '${msg.uri()}'"
@@ -107,16 +103,26 @@ class ServerHandler(private val cache: Cache, private val serverPrefix: Path) :
log.debug(ctx) {
"Added value for key '$key' to build cache"
}
cache.put(key, msg.content()).thenRun {
val response = DefaultFullHttpResponse(
msg.protocolVersion(), HttpResponseStatus.CREATED,
Unpooled.copiedBuffer(key.toByteArray())
)
response.headers()[HttpHeaderNames.CONTENT_LENGTH] = response.content().readableBytes()
ctx.writeAndFlush(response)
}.whenComplete { _, ex ->
ctx.fireExceptionCaught(ex)
val bodyBytes = msg.content().run {
if (isDirect) {
ByteArray(readableBytes()).also {
readBytes(it)
}
} else {
array()
}
}
try {
cache.put(key, bodyBytes)
} catch(ex : Throwable) {
throw CacheException("Error accessing the cache backend", ex)
}
val response = DefaultFullHttpResponse(
msg.protocolVersion(), HttpResponseStatus.CREATED,
Unpooled.copiedBuffer(key.toByteArray())
)
response.headers()[HttpHeaderNames.CONTENT_LENGTH] = response.content().readableBytes()
ctx.writeAndFlush(response)
} else {
log.warn(ctx) {
"Got request for unhandled path '${msg.uri()}'"

View File

@@ -8,7 +8,7 @@ import java.util.concurrent.ConcurrentHashMap
import java.util.function.Function
class BucketManager private constructor(
private val bucketsByUser: Map<Configuration.User, List<Bucket>> = HashMap(),
private val bucketsByUser: Map<Configuration.User, Bucket> = HashMap(),
private val bucketsByGroup: Map<Configuration.Group, Bucket> = HashMap(),
loader: Function<InetSocketAddress, Bucket>?
) {
@@ -43,27 +43,22 @@ class BucketManager private constructor(
companion object {
fun from(cfg : Configuration) : BucketManager {
val bucketsByUser = cfg.users.values.asSequence().map { user ->
val buckets = (
user.quota
?.let { quota ->
sequenceOf(quota)
} ?: user.groups.asSequence()
.mapNotNull(Configuration.Group::getUserQuota)
).map { quota ->
Bucket.local(
quota.maxAvailableCalls,
quota.calls,
quota.period,
quota.initialAvailableCalls
)
}.toList()
user to buckets
val bucketsByUser = cfg.users.values.asSequence().filter {
it.quota != null
}.map { user ->
val quota = user.quota
val bucket = Bucket.local(
quota.maxAvailableCalls,
quota.calls,
quota.period,
quota.initialAvailableCalls
)
user to bucket
}.toMap()
val bucketsByGroup = cfg.groups.values.asSequence().filter {
it.groupQuota != null
it.quota != null
}.map { group ->
val quota = group.groupQuota
val quota = group.quota
val bucket = Bucket.local(
quota.maxAvailableCalls,
quota.calls,

View File

@@ -42,7 +42,7 @@ class ThrottlingHandler(cfg: Configuration) :
val buckets = mutableListOf<Bucket>()
val user = ctx.channel().attr(GradleBuildCacheServer.userAttribute).get()
if (user != null) {
bucketManager.getBucketByUser(user)?.let(buckets::addAll)
bucketManager.getBucketByUser(user)?.let(buckets::add)
}
val groups = ctx.channel().attr(GradleBuildCacheServer.groupAttribute).get() ?: emptySet()
if (groups.isNotEmpty()) {

View File

@@ -9,7 +9,6 @@
<xs:sequence minOccurs="0">
<xs:element name="bind" type="gbcs:bindType" maxOccurs="1"/>
<xs:element name="connection" type="gbcs:connectionType" minOccurs="0" maxOccurs="1"/>
<xs:element name="transport" type="gbcs:transportType" minOccurs="0" maxOccurs="1"/>
<xs:element name="event-executor" type="gbcs:eventExecutorType" minOccurs="0" maxOccurs="1"/>
<xs:element name="cache" type="gbcs:cacheType" maxOccurs="1"/>
<xs:element name="authorization" type="gbcs:authorizationType" minOccurs="0">
@@ -43,10 +42,6 @@
<xs:attribute name="max-request-size" type="xs:unsignedInt" use="optional" default="67108864"/>
</xs:complexType>
<xs:complexType name="transportType">
<xs:attribute name="use-native-transport" type="xs:boolean" use="optional" default="false"/>
</xs:complexType>
<xs:complexType name="eventExecutorType">
<xs:attribute name="use-virtual-threads" type="xs:boolean" use="optional" default="true"/>
</xs:complexType>
@@ -57,7 +52,6 @@
<xs:complexContent>
<xs:extension base="gbcs:cacheType">
<xs:attribute name="max-age" type="xs:duration" default="P1D"/>
<xs:attribute name="max-size" type="xs:token" default="0x1000000"/>
<xs:attribute name="digest" type="xs:token" default="MD5"/>
<xs:attribute name="enable-compression" type="xs:boolean" default="true"/>
<xs:attribute name="compression-level" type="xs:byte" default="-1"/>
@@ -152,8 +146,7 @@
</xs:unique>
</xs:element>
<xs:element name="roles" type="gbcs:rolesType" maxOccurs="1" minOccurs="0"/>
<xs:element name="user-quota" type="gbcs:quotaType" minOccurs="0" maxOccurs="1"/>
<xs:element name="group-quota" type="gbcs:quotaType" minOccurs="0" maxOccurs="1"/>
<xs:element name="quota" type="gbcs:quotaType" minOccurs="0" maxOccurs="1"/>
</xs:sequence>
<xs:attribute name="name" type="xs:token"/>
</xs:complexType>

View File

@@ -24,8 +24,8 @@ abstract class AbstractBasicAuthServerTest : AbstractServerTest() {
protected val random = Random(101325)
protected val keyValuePair = newEntry(random)
protected val serverPath = "gbcs"
protected val readersGroup = Configuration.Group("readers", setOf(Role.Reader), null, null)
protected val writersGroup = Configuration.Group("writers", setOf(Role.Writer), null, null)
protected val readersGroup = Configuration.Group("readers", setOf(Role.Reader), null)
protected val writersGroup = Configuration.Group("writers", setOf(Role.Writer), null)
abstract protected val users : List<Configuration.User>
@@ -55,7 +55,6 @@ abstract class AbstractBasicAuthServerTest : AbstractServerTest() {
),
Configuration.BasicAuthentication(),
null,
false,
)
Xml.write(Serializer.serialize(cfg), System.out)
}

View File

@@ -1,7 +1,7 @@
package net.woggioni.gbcs.server.test
import net.woggioni.gbcs.api.Configuration
import net.woggioni.gbcs.server.GradleBuildCacheServer
import net.woggioni.gbcs.api.Configuration
import org.junit.jupiter.api.AfterAll
import org.junit.jupiter.api.BeforeAll
import org.junit.jupiter.api.MethodOrderer

View File

@@ -4,6 +4,7 @@ import net.woggioni.gbcs.api.Configuration
import net.woggioni.gbcs.api.Role
import net.woggioni.gbcs.common.Xml
import net.woggioni.gbcs.server.cache.FileSystemCacheConfiguration
import net.woggioni.gbcs.server.cache.InMemoryCacheConfiguration
import net.woggioni.gbcs.server.configuration.Serializer
import net.woggioni.gbcs.server.test.utils.CertificateUtils
import net.woggioni.gbcs.server.test.utils.CertificateUtils.X509Credentials
@@ -45,8 +46,8 @@ abstract class AbstractTlsServerTest : AbstractServerTest() {
private lateinit var trustStore: KeyStore
protected lateinit var ca: X509Credentials
protected val readersGroup = Configuration.Group("readers", setOf(Role.Reader), null, null)
protected val writersGroup = Configuration.Group("writers", setOf(Role.Writer), null, null)
protected val readersGroup = Configuration.Group("readers", setOf(Role.Reader), null)
protected val writersGroup = Configuration.Group("writers", setOf(Role.Writer), null)
protected val random = Random(101325)
protected val keyValuePair = newEntry(random)
private val serverPath : String? = null
@@ -171,8 +172,7 @@ abstract class AbstractTlsServerTest : AbstractServerTest() {
Configuration.Tls(
Configuration.KeyStore(this.serverKeyStoreFile, null, SERVER_CERTIFICATE_ENTRY, PASSWORD),
Configuration.TrustStore(this.trustStoreFile, null, false, false),
),
false
)
)
Xml.write(Serializer.serialize(cfg), System.out)
}

View File

@@ -3,6 +3,7 @@ package net.woggioni.gbcs.server.test
import io.netty.handler.codec.http.HttpResponseStatus
import net.woggioni.gbcs.api.Configuration
import net.woggioni.gbcs.common.Xml
import net.woggioni.gbcs.server.cache.FileSystemCacheConfiguration
import net.woggioni.gbcs.server.cache.InMemoryCacheConfiguration
import net.woggioni.gbcs.server.configuration.Serializer
import net.woggioni.gbcs.server.test.utils.NetworkUtils
@@ -51,12 +52,10 @@ class NoAuthServerTest : AbstractServerTest() {
maxAge = Duration.ofSeconds(3600 * 24),
compressionEnabled = true,
digestAlgorithm = "MD5",
compressionLevel = Deflater.DEFAULT_COMPRESSION,
maxSize = 0x1000000
compressionLevel = Deflater.DEFAULT_COMPRESSION
),
null,
null,
false,
)
Xml.write(Serializer.serialize(cfg), System.out)
}

View File

@@ -7,6 +7,7 @@ import org.bouncycastle.asn1.x500.X500Name
import org.junit.jupiter.api.Assertions
import org.junit.jupiter.api.Order
import org.junit.jupiter.api.Test
import org.junit.jupiter.params.provider.ArgumentsSource
import java.net.http.HttpClient
import java.net.http.HttpRequest
import java.net.http.HttpResponse

View File

@@ -10,7 +10,6 @@
write-idle-timeout="PT11M"
idle-timeout="PT30M"
max-request-size="101325"/>
<transport use-native-transport="true"/>
<event-executor use-virtual-threads="false"/>
<cache xs:type="gbcs:fileSystemCacheType" path="/tmp/gbcs" max-age="P7D"/>
<authentication>

View File

@@ -1,8 +1,8 @@
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<gbcs:server xmlns:xs="http://www.w3.org/2001/XMLSchema-instance"
xmlns:gbcs="urn:net.woggioni.gbcs.server"
xmlns:gbcs-memcache="urn:net.woggioni.gbcs.server.memcache"
xs:schemaLocation="urn:net.woggioni.gbcs.server.memcache jpms://net.woggioni.gbcs.server.memcache/net/woggioni/gbcs/server/memcache/schema/gbcs-memcache.xsd urn:net.woggioni.gbcs.server jpms://net.woggioni.gbcs.server/net/woggioni/gbcs/server/schema/gbcs.xsd"
xmlns:gbcs-memcached="urn:net.woggioni.gbcs.server.memcached"
xs:schemaLocation="urn:net.woggioni.gbcs.server.memcached jpms://net.woggioni.gbcs.server.memcached/net/woggioni/gbcs/server/memcached/schema/gbcs-memcached.xsd urn:net.woggioni.gbcs.server jpms://net.woggioni.gbcs.server/net/woggioni/gbcs/server/schema/gbcs.xsd"
>
<bind host="0.0.0.0" port="8443" incoming-connections-backlog-size="4096"/>
<connection
@@ -13,7 +13,7 @@
read-timeout="PT5M"
write-timeout="PT5M"/>
<event-executor use-virtual-threads="true"/>
<cache xs:type="gbcs-memcache:memcacheCacheType" max-age="P7D" max-size="16777216" compression-mode="deflate">
<cache xs:type="gbcs-memcached:memcachedCacheType" max-age="P7D" max-size="16777216" compression-mode="zip">
<server host="memcached" port="11211"/>
</cache>
<authorization>

View File

@@ -1,8 +1,8 @@
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<gbcs:server xmlns:xs="http://www.w3.org/2001/XMLSchema-instance"
xmlns:gbcs="urn:net.woggioni.gbcs.server"
xmlns:gbcs-memcache="urn:net.woggioni.gbcs.server.memcache"
xs:schemaLocation="urn:net.woggioni.gbcs.server.memcache jpms://net.woggioni.gbcs.server.memcache/net/woggioni/gbcs/server/memcache/schema/gbcs-memcache.xsd urn:net.woggioni.gbcs.server jpms://net.woggioni.gbcs.server/net/woggioni/gbcs/server/schema/gbcs.xsd">
xmlns:gbcs-memcached="urn:net.woggioni.gbcs.server.memcached"
xs:schemaLocation="urn:net.woggioni.gbcs.server.memcached jpms://net.woggioni.gbcs.server.memcached/net/woggioni/gbcs/server/memcached/schema/gbcs-memcached.xsd urn:net.woggioni.gbcs.server jpms://net.woggioni.gbcs.server/net/woggioni/gbcs/server/schema/gbcs.xsd">
<bind host="127.0.0.1" port="11443" incoming-connections-backlog-size="50"/>
<connection
write-timeout="PT25M"
@@ -12,8 +12,8 @@
idle-timeout="PT30M"
max-request-size="101325"/>
<event-executor use-virtual-threads="false"/>
<cache xs:type="gbcs-memcache:memcacheCacheType" max-age="P7D" max-size="101325" digest="SHA-256">
<server host="127.0.0.1" port="11211" max-connections="10" connection-timeout="PT20S"/>
<cache xs:type="gbcs-memcached:memcachedCacheType" max-age="P7D" max-size="101325" digest="SHA-256">
<server host="127.0.0.1" port="11211"/>
</cache>
<authentication>
<none/>

View File

@@ -32,8 +32,7 @@
<roles>
<reader/>
</roles>
<user-quota calls="30" period="PT1M"/>
<group-quota calls="10" period="PT1S"/>
<quota calls="10" period="PT1S"/>
</group>
<group name="writers">
<users>
@@ -51,7 +50,7 @@
<reader/>
<writer/>
</roles>
<group-quota calls="1000" period="P1D"/>
<quota calls="1000" period="P1D"/>
</group>
</groups>
</authorization>

View File

@@ -2,9 +2,9 @@ org.gradle.configuration-cache=false
org.gradle.parallel=true
org.gradle.caching=true
gbcs.version = 0.1.2
gbcs.version = 0.0.11
lys.version = 2025.01.31
lys.version = 2025.01.25
gitea.maven.url = https://gitea.woggioni.net/api/packages/woggioni/maven
docker.registry.url=gitea.woggioni.net

View File

@@ -27,7 +27,7 @@ rootProject.name = 'gbcs'
include 'gbcs-api'
include 'gbcs-common'
include 'gbcs-server-memcache'
include 'gbcs-server-memcached'
include 'gbcs-cli'
include 'docker'
include 'gbcs-client'