implemented streaming request/response streaming

added metadata to cache values

added cache servlet for comparison
This commit is contained in:
2025-02-19 22:36:31 +08:00
parent 0463038aaa
commit f048a60540
66 changed files with 2474 additions and 1078 deletions

View File

@@ -1,235 +0,0 @@
package net.woggioni.rbcs.server.memcache
import io.netty.buffer.ByteBufAllocator
import io.netty.buffer.Unpooled
import io.netty.handler.codec.memcache.binary.BinaryMemcacheOpcodes
import io.netty.handler.codec.memcache.binary.BinaryMemcacheResponseStatus
import io.netty.handler.codec.memcache.binary.DefaultBinaryMemcacheRequest
import net.woggioni.rbcs.api.Cache
import net.woggioni.rbcs.api.RequestHandle
import net.woggioni.rbcs.api.ResponseHandle
import net.woggioni.rbcs.api.event.RequestStreamingEvent
import net.woggioni.rbcs.api.event.ResponseStreamingEvent
import net.woggioni.rbcs.api.exception.ContentTooLargeException
import net.woggioni.rbcs.common.ByteBufOutputStream
import net.woggioni.rbcs.common.RBCS.digest
import net.woggioni.rbcs.common.contextLogger
import net.woggioni.rbcs.common.debug
import net.woggioni.rbcs.common.extractChunk
import net.woggioni.rbcs.server.memcache.client.MemcacheClient
import net.woggioni.rbcs.server.memcache.client.MemcacheResponseHandle
import net.woggioni.rbcs.server.memcache.client.StreamingRequestEvent
import net.woggioni.rbcs.server.memcache.client.StreamingResponseEvent
import java.security.MessageDigest
import java.time.Duration
import java.time.Instant
import java.util.concurrent.CompletableFuture
import java.util.zip.Deflater
import java.util.zip.DeflaterOutputStream
import java.util.zip.Inflater
import java.util.zip.InflaterOutputStream
class MemcacheCache(private val cfg: MemcacheCacheConfiguration) : Cache {
companion object {
@JvmStatic
private val log = contextLogger()
}
private val memcacheClient = MemcacheClient(cfg)
override fun get(key: String, responseHandle: ResponseHandle, alloc: ByteBufAllocator) {
val compressionMode = cfg.compressionMode
val buf = alloc.compositeBuffer()
val stream = ByteBufOutputStream(buf).let { outputStream ->
if (compressionMode != null) {
when (compressionMode) {
MemcacheCacheConfiguration.CompressionMode.DEFLATE -> {
InflaterOutputStream(
outputStream,
Inflater()
)
}
}
} else {
outputStream
}
}
val memcacheResponseHandle = object : MemcacheResponseHandle {
override fun handleEvent(evt: StreamingResponseEvent) {
when (evt) {
is StreamingResponseEvent.ResponseReceived -> {
if (evt.response.status() == BinaryMemcacheResponseStatus.SUCCESS) {
responseHandle.handleEvent(ResponseStreamingEvent.RESPONSE_RECEIVED)
} else if (evt.response.status() == BinaryMemcacheResponseStatus.KEY_ENOENT) {
responseHandle.handleEvent(ResponseStreamingEvent.NOT_FOUND)
} else {
responseHandle.handleEvent(ResponseStreamingEvent.ExceptionCaught(MemcacheException(evt.response.status())))
}
}
is StreamingResponseEvent.LastContentReceived -> {
evt.content.content().let { content ->
content.readBytes(stream, content.readableBytes())
}
buf.retain()
stream.close()
val chunk = extractChunk(buf, alloc)
buf.release()
responseHandle.handleEvent(
ResponseStreamingEvent.LastChunkReceived(
chunk
)
)
}
is StreamingResponseEvent.ContentReceived -> {
evt.content.content().let { content ->
content.readBytes(stream, content.readableBytes())
}
if (buf.readableBytes() >= cfg.chunkSize) {
val chunk = extractChunk(buf, alloc)
responseHandle.handleEvent(ResponseStreamingEvent.ChunkReceived(chunk))
}
}
is StreamingResponseEvent.ExceptionCaught -> {
responseHandle.handleEvent(ResponseStreamingEvent.ExceptionCaught(evt.exception))
}
}
}
}
memcacheClient.sendRequest(Unpooled.wrappedBuffer(key.toByteArray()), memcacheResponseHandle)
.thenApply { memcacheRequestHandle ->
val request = (cfg.digestAlgorithm
?.let(MessageDigest::getInstance)
?.let { md ->
digest(key.toByteArray(), md)
} ?: key.toByteArray(Charsets.UTF_8)
).let { digest ->
DefaultBinaryMemcacheRequest(Unpooled.wrappedBuffer(digest)).apply {
setOpcode(BinaryMemcacheOpcodes.GET)
}
}
memcacheRequestHandle.handleEvent(StreamingRequestEvent.SendRequest(request))
}.exceptionally { ex ->
responseHandle.handleEvent(ResponseStreamingEvent.ExceptionCaught(ex))
}
}
private fun encodeExpiry(expiry: Duration): Int {
val expirySeconds = expiry.toSeconds()
return expirySeconds.toInt().takeIf { it.toLong() == expirySeconds }
?: Instant.ofEpochSecond(expirySeconds).epochSecond.toInt()
}
override fun put(
key: String,
responseHandle: ResponseHandle,
alloc: ByteBufAllocator
): CompletableFuture<RequestHandle> {
val memcacheResponseHandle = object : MemcacheResponseHandle {
override fun handleEvent(evt: StreamingResponseEvent) {
when (evt) {
is StreamingResponseEvent.ResponseReceived -> {
when (evt.response.status()) {
BinaryMemcacheResponseStatus.SUCCESS -> {
responseHandle.handleEvent(ResponseStreamingEvent.RESPONSE_RECEIVED)
}
BinaryMemcacheResponseStatus.KEY_ENOENT -> {
responseHandle.handleEvent(ResponseStreamingEvent.NOT_FOUND)
}
BinaryMemcacheResponseStatus.E2BIG -> {
responseHandle.handleEvent(
ResponseStreamingEvent.ExceptionCaught(
ContentTooLargeException("Request payload is too big", null)
)
)
}
else -> {
responseHandle.handleEvent(ResponseStreamingEvent.ExceptionCaught(MemcacheException(evt.response.status())))
}
}
}
is StreamingResponseEvent.LastContentReceived -> {
responseHandle.handleEvent(
ResponseStreamingEvent.LastChunkReceived(
evt.content.content().retain()
)
)
}
is StreamingResponseEvent.ContentReceived -> {
responseHandle.handleEvent(ResponseStreamingEvent.ChunkReceived(evt.content.content().retain()))
}
is StreamingResponseEvent.ExceptionCaught -> {
responseHandle.handleEvent(ResponseStreamingEvent.ExceptionCaught(evt.exception))
}
}
}
}
val result: CompletableFuture<RequestHandle> =
memcacheClient.sendRequest(Unpooled.wrappedBuffer(key.toByteArray()), memcacheResponseHandle)
.thenApply { memcacheRequestHandle ->
val request = (cfg.digestAlgorithm
?.let(MessageDigest::getInstance)
?.let { md ->
digest(key.toByteArray(), md)
} ?: key.toByteArray(Charsets.UTF_8)).let { digest ->
val extras = Unpooled.buffer(8, 8)
extras.writeInt(0)
extras.writeInt(encodeExpiry(cfg.maxAge))
DefaultBinaryMemcacheRequest(Unpooled.wrappedBuffer(digest), extras).apply {
setOpcode(BinaryMemcacheOpcodes.SET)
}
}
// memcacheRequestHandle.handleEvent(StreamingRequestEvent.SendRequest(request))
val compressionMode = cfg.compressionMode
val buf = alloc.heapBuffer()
val stream = ByteBufOutputStream(buf).let { outputStream ->
if (compressionMode != null) {
when (compressionMode) {
MemcacheCacheConfiguration.CompressionMode.DEFLATE -> {
DeflaterOutputStream(
outputStream,
Deflater(Deflater.DEFAULT_COMPRESSION, false)
)
}
}
} else {
outputStream
}
}
RequestHandle { evt ->
when (evt) {
is RequestStreamingEvent.LastChunkReceived -> {
evt.chunk.readBytes(stream, evt.chunk.readableBytes())
buf.retain()
stream.close()
request.setTotalBodyLength(buf.readableBytes() + request.keyLength() + request.extrasLength())
memcacheRequestHandle.handleEvent(StreamingRequestEvent.SendRequest(request))
memcacheRequestHandle.handleEvent(StreamingRequestEvent.SendLastChunk(buf))
}
is RequestStreamingEvent.ChunkReceived -> {
evt.chunk.readBytes(stream, evt.chunk.readableBytes())
}
is RequestStreamingEvent.ExceptionCaught -> {
stream.close()
}
}
}
}
return result
}
override fun close() {
memcacheClient.close()
}
}

View File

@@ -1,15 +1,17 @@
package net.woggioni.rbcs.server.memcache
import net.woggioni.rbcs.api.CacheHandlerFactory
import net.woggioni.rbcs.api.Configuration
import net.woggioni.rbcs.common.HostAndPort
import net.woggioni.rbcs.server.memcache.client.MemcacheClient
import java.time.Duration
data class MemcacheCacheConfiguration(
val servers: List<Server>,
val maxAge: Duration = Duration.ofDays(1),
val maxSize: Int = 0x100000,
val digestAlgorithm: String? = null,
val compressionMode: CompressionMode? = null,
val compressionLevel: Int,
val chunkSize : Int
) : Configuration.Cache {
@@ -27,7 +29,14 @@ data class MemcacheCacheConfiguration(
)
override fun materialize() = MemcacheCache(this)
override fun materialize() = object : CacheHandlerFactory {
private val client = MemcacheClient(this@MemcacheCacheConfiguration.servers, chunkSize)
override fun close() {
client.close()
}
override fun newHandler() = MemcacheCacheHandler(client, digestAlgorithm, compressionMode != null, compressionLevel, chunkSize, maxAge)
}
override fun getNamespaceURI() = "urn:net.woggioni.rbcs.server.memcache"

View File

@@ -0,0 +1,409 @@
package net.woggioni.rbcs.server.memcache
import io.netty.buffer.ByteBuf
import io.netty.buffer.ByteBufAllocator
import io.netty.buffer.CompositeByteBuf
import io.netty.channel.ChannelHandlerContext
import io.netty.channel.SimpleChannelInboundHandler
import io.netty.handler.codec.memcache.DefaultLastMemcacheContent
import io.netty.handler.codec.memcache.DefaultMemcacheContent
import io.netty.handler.codec.memcache.LastMemcacheContent
import io.netty.handler.codec.memcache.MemcacheContent
import io.netty.handler.codec.memcache.binary.BinaryMemcacheOpcodes
import io.netty.handler.codec.memcache.binary.BinaryMemcacheResponse
import io.netty.handler.codec.memcache.binary.BinaryMemcacheResponseStatus
import io.netty.handler.codec.memcache.binary.DefaultBinaryMemcacheRequest
import net.woggioni.rbcs.api.CacheValueMetadata
import net.woggioni.rbcs.api.exception.ContentTooLargeException
import net.woggioni.rbcs.api.message.CacheMessage
import net.woggioni.rbcs.api.message.CacheMessage.CacheContent
import net.woggioni.rbcs.api.message.CacheMessage.CacheGetRequest
import net.woggioni.rbcs.api.message.CacheMessage.CachePutRequest
import net.woggioni.rbcs.api.message.CacheMessage.CachePutResponse
import net.woggioni.rbcs.api.message.CacheMessage.CacheValueFoundResponse
import net.woggioni.rbcs.api.message.CacheMessage.CacheValueNotFoundResponse
import net.woggioni.rbcs.api.message.CacheMessage.LastCacheContent
import net.woggioni.rbcs.common.ByteBufInputStream
import net.woggioni.rbcs.common.ByteBufOutputStream
import net.woggioni.rbcs.common.RBCS.processCacheKey
import net.woggioni.rbcs.common.RBCS.toIntOrNull
import net.woggioni.rbcs.common.createLogger
import net.woggioni.rbcs.common.debug
import net.woggioni.rbcs.common.extractChunk
import net.woggioni.rbcs.common.trace
import net.woggioni.rbcs.server.memcache.client.MemcacheClient
import net.woggioni.rbcs.server.memcache.client.MemcacheRequestController
import net.woggioni.rbcs.server.memcache.client.MemcacheResponseHandler
import java.io.ByteArrayOutputStream
import java.io.ObjectInputStream
import java.io.ObjectOutputStream
import java.nio.ByteBuffer
import java.nio.channels.Channels
import java.nio.channels.FileChannel
import java.nio.channels.ReadableByteChannel
import java.nio.file.Files
import java.nio.file.StandardOpenOption
import java.time.Duration
import java.time.Instant
import java.util.concurrent.CompletableFuture
import java.util.zip.Deflater
import java.util.zip.DeflaterOutputStream
import java.util.zip.InflaterOutputStream
import io.netty.channel.Channel as NettyChannel
class MemcacheCacheHandler(
private val client: MemcacheClient,
private val digestAlgorithm: String?,
private val compressionEnabled: Boolean,
private val compressionLevel: Int,
private val chunkSize: Int,
private val maxAge: Duration
) : SimpleChannelInboundHandler<CacheMessage>() {
companion object {
private val log = createLogger<MemcacheCacheHandler>()
private fun encodeExpiry(expiry: Duration): Int {
val expirySeconds = expiry.toSeconds()
return expirySeconds.toInt().takeIf { it.toLong() == expirySeconds }
?: Instant.ofEpochSecond(expirySeconds).epochSecond.toInt()
}
}
private inner class InProgressGetRequest(
private val key: String,
private val ctx: ChannelHandlerContext
) {
private val acc = ctx.alloc().compositeBuffer()
private val chunk = ctx.alloc().compositeBuffer()
private val outputStream = ByteBufOutputStream(chunk).let {
if (compressionEnabled) {
InflaterOutputStream(it)
} else {
it
}
}
private var responseSent = false
private var metadataSize: Int? = null
fun write(buf: ByteBuf) {
acc.addComponent(true, buf.retain())
if (metadataSize == null && acc.readableBytes() >= Int.SIZE_BYTES) {
metadataSize = acc.readInt()
}
metadataSize
?.takeIf { !responseSent }
?.takeIf { acc.readableBytes() >= it }
?.let { mSize ->
val metadata = ObjectInputStream(ByteBufInputStream(acc)).use {
acc.retain()
it.readObject() as CacheValueMetadata
}
ctx.writeAndFlush(CacheValueFoundResponse(key, metadata))
responseSent = true
acc.readerIndex(Int.SIZE_BYTES + mSize)
}
if (responseSent) {
acc.readBytes(outputStream, acc.readableBytes())
if(acc.readableBytes() >= chunkSize) {
flush(false)
}
}
}
private fun flush(last : Boolean) {
val toSend = extractChunk(chunk, ctx.alloc())
val msg = if(last) {
log.trace(ctx) {
"Sending last chunk to client on channel ${ctx.channel().id().asShortText()}"
}
LastCacheContent(toSend)
} else {
log.trace(ctx) {
"Sending chunk to client on channel ${ctx.channel().id().asShortText()}"
}
CacheContent(toSend)
}
ctx.writeAndFlush(msg)
}
fun commit() {
acc.release()
chunk.retain()
outputStream.close()
flush(true)
chunk.release()
}
fun rollback() {
acc.release()
outputStream.close()
}
}
private inner class InProgressPutRequest(
private val ch : NettyChannel,
metadata : CacheValueMetadata,
val digest : ByteBuf,
val requestController: CompletableFuture<MemcacheRequestController>,
private val alloc: ByteBufAllocator
) {
private var totalSize = 0
private var tmpFile : FileChannel? = null
private val accumulator = alloc.compositeBuffer()
private val stream = ByteBufOutputStream(accumulator).let {
if (compressionEnabled) {
DeflaterOutputStream(it, Deflater(compressionLevel))
} else {
it
}
}
init {
ByteArrayOutputStream().let { baos ->
ObjectOutputStream(baos).use {
it.writeObject(metadata)
}
val serializedBytes = baos.toByteArray()
accumulator.writeInt(serializedBytes.size)
accumulator.writeBytes(serializedBytes)
}
}
fun write(buf: ByteBuf) {
totalSize += buf.readableBytes()
buf.readBytes(stream, buf.readableBytes())
tmpFile?.let {
flushToDisk(it, accumulator)
}
if(accumulator.readableBytes() > 0x100000) {
log.debug(ch) {
"Entry is too big, buffering it into a file"
}
val opts = arrayOf(
StandardOpenOption.DELETE_ON_CLOSE,
StandardOpenOption.READ,
StandardOpenOption.WRITE,
StandardOpenOption.TRUNCATE_EXISTING
)
FileChannel.open(Files.createTempFile("rbcs-memcache", ".tmp"), *opts).let { fc ->
tmpFile = fc
flushToDisk(fc, accumulator)
}
}
}
private fun flushToDisk(fc : FileChannel, buf : CompositeByteBuf) {
val chunk = extractChunk(buf, alloc)
fc.write(chunk.nioBuffer())
chunk.release()
}
fun commit() : Pair<Int, ReadableByteChannel> {
digest.release()
accumulator.retain()
stream.close()
val fileChannel = tmpFile
return if(fileChannel != null) {
flushToDisk(fileChannel, accumulator)
accumulator.release()
fileChannel.position(0)
val fileSize = fileChannel.size().toIntOrNull() ?: let {
fileChannel.close()
throw ContentTooLargeException("Request body is too large", null)
}
fileSize to fileChannel
} else {
accumulator.readableBytes() to Channels.newChannel(ByteBufInputStream(accumulator))
}
}
fun rollback() {
stream.close()
digest.release()
tmpFile?.close()
}
}
private var inProgressPutRequest: InProgressPutRequest? = null
private var inProgressGetRequest: InProgressGetRequest? = null
override fun channelRead0(ctx: ChannelHandlerContext, msg: CacheMessage) {
when (msg) {
is CacheGetRequest -> handleGetRequest(ctx, msg)
is CachePutRequest -> handlePutRequest(ctx, msg)
is LastCacheContent -> handleLastCacheContent(ctx, msg)
is CacheContent -> handleCacheContent(ctx, msg)
else -> ctx.fireChannelRead(msg)
}
}
private fun handleGetRequest(ctx: ChannelHandlerContext, msg: CacheGetRequest) {
log.debug(ctx) {
"Fetching ${msg.key} from memcache"
}
val key = ctx.alloc().buffer().also {
it.writeBytes(processCacheKey(msg.key, digestAlgorithm))
}
val responseHandler = object : MemcacheResponseHandler {
override fun responseReceived(response: BinaryMemcacheResponse) {
val status = response.status()
when (status) {
BinaryMemcacheResponseStatus.SUCCESS -> {
log.debug(ctx) {
"Cache hit for key ${msg.key} on memcache"
}
inProgressGetRequest = InProgressGetRequest(msg.key, ctx)
}
BinaryMemcacheResponseStatus.KEY_ENOENT -> {
log.debug(ctx) {
"Cache miss for key ${msg.key} on memcache"
}
ctx.writeAndFlush(CacheValueNotFoundResponse())
}
}
}
override fun contentReceived(content: MemcacheContent) {
log.trace(ctx) {
"${if(content is LastMemcacheContent) "Last chunk" else "Chunk"} of ${content.content().readableBytes()} bytes received from memcache for key ${msg.key}"
}
inProgressGetRequest?.write(content.content())
if (content is LastMemcacheContent) {
inProgressGetRequest?.commit()
}
}
override fun exceptionCaught(ex: Throwable) {
inProgressGetRequest?.let {
inProgressGetRequest = null
it.rollback()
}
this@MemcacheCacheHandler.exceptionCaught(ctx, ex)
}
}
client.sendRequest(key.retainedDuplicate(), responseHandler).thenAccept { requestHandle ->
log.trace(ctx) {
"Sending GET request for key ${msg.key} to memcache"
}
val request = DefaultBinaryMemcacheRequest(key).apply {
setOpcode(BinaryMemcacheOpcodes.GET)
}
requestHandle.sendRequest(request)
}
}
private fun handlePutRequest(ctx: ChannelHandlerContext, msg: CachePutRequest) {
val key = ctx.alloc().buffer().also {
it.writeBytes(processCacheKey(msg.key, digestAlgorithm))
}
val responseHandler = object : MemcacheResponseHandler {
override fun responseReceived(response: BinaryMemcacheResponse) {
val status = response.status()
when (status) {
BinaryMemcacheResponseStatus.SUCCESS -> {
log.debug(ctx) {
"Inserted key ${msg.key} into memcache"
}
ctx.writeAndFlush(CachePutResponse(msg.key))
}
else -> this@MemcacheCacheHandler.exceptionCaught(ctx, MemcacheException(status))
}
}
override fun contentReceived(content: MemcacheContent) {}
override fun exceptionCaught(ex: Throwable) {
this@MemcacheCacheHandler.exceptionCaught(ctx, ex)
}
}
val requestController = client.sendRequest(key.retainedDuplicate(), responseHandler).whenComplete { _, ex ->
ex?.let {
this@MemcacheCacheHandler.exceptionCaught(ctx, ex)
}
}
inProgressPutRequest = InProgressPutRequest(ctx.channel(), msg.metadata, key, requestController, ctx.alloc())
}
private fun handleCacheContent(ctx: ChannelHandlerContext, msg: CacheContent) {
inProgressPutRequest?.let { request ->
log.trace(ctx) {
"Received chunk of ${msg.content().readableBytes()} bytes for memcache"
}
request.write(msg.content())
}
}
private fun handleLastCacheContent(ctx: ChannelHandlerContext, msg: LastCacheContent) {
inProgressPutRequest?.let { request ->
inProgressPutRequest = null
log.trace(ctx) {
"Received last chunk of ${msg.content().readableBytes()} bytes for memcache"
}
request.write(msg.content())
val key = request.digest.retainedDuplicate()
val (payloadSize, payloadSource) = request.commit()
val extras = ctx.alloc().buffer(8, 8)
extras.writeInt(0)
extras.writeInt(encodeExpiry(maxAge))
val totalBodyLength = request.digest.readableBytes() + extras.readableBytes() + payloadSize
request.requestController.whenComplete { requestController, ex ->
if(ex == null) {
log.trace(ctx) {
"Sending SET request to memcache"
}
requestController.sendRequest(DefaultBinaryMemcacheRequest().apply {
setOpcode(BinaryMemcacheOpcodes.SET)
setKey(key)
setExtras(extras)
setTotalBodyLength(totalBodyLength)
})
log.trace(ctx) {
"Sending request payload to memcache"
}
payloadSource.use { source ->
val bb = ByteBuffer.allocate(chunkSize)
while (true) {
val read = source.read(bb)
bb.limit()
if(read >= 0 && bb.position() < chunkSize && bb.hasRemaining()) {
continue
}
val chunk = ctx.alloc().buffer(chunkSize)
bb.flip()
chunk.writeBytes(bb)
bb.clear()
log.trace(ctx) {
"Sending ${chunk.readableBytes()} bytes chunk to memcache"
}
if(read < 0) {
requestController.sendContent(DefaultLastMemcacheContent(chunk))
break
} else {
requestController.sendContent(DefaultMemcacheContent(chunk))
}
}
}
} else {
payloadSource.close()
}
}
}
}
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
inProgressGetRequest?.let {
inProgressGetRequest = null
it.rollback()
}
inProgressPutRequest?.let {
inProgressPutRequest = null
it.requestController.thenAccept { controller ->
controller.exceptionCaught(cause)
}
it.rollback()
}
super.exceptionCaught(ctx, cause)
}
}

View File

@@ -28,12 +28,12 @@ class MemcacheCacheProvider : CacheProvider<MemcacheCacheConfiguration> {
val maxAge = el.renderAttribute("max-age")
?.let(Duration::parse)
?: Duration.ofDays(1)
val maxSize = el.renderAttribute("max-size")
?.let(Integer::decode)
?: 0x100000
val chunkSize = el.renderAttribute("chunk-size")
?.let(Integer::decode)
?: 0x4000
?: 0x10000
val compressionLevel = el.renderAttribute("compression-level")
?.let(Integer::decode)
?: -1
val compressionMode = el.renderAttribute("compression-mode")
?.let {
when (it) {
@@ -41,7 +41,6 @@ class MemcacheCacheProvider : CacheProvider<MemcacheCacheConfiguration> {
else -> MemcacheCacheConfiguration.CompressionMode.DEFLATE
}
}
?: MemcacheCacheConfiguration.CompressionMode.DEFLATE
val digestAlgorithm = el.renderAttribute("digest")
for (child in el.asIterable()) {
when (child.nodeName) {
@@ -62,9 +61,9 @@ class MemcacheCacheProvider : CacheProvider<MemcacheCacheConfiguration> {
return MemcacheCacheConfiguration(
servers,
maxAge,
maxSize,
digestAlgorithm,
compressionMode,
compressionLevel,
chunkSize
)
}
@@ -85,7 +84,6 @@ class MemcacheCacheProvider : CacheProvider<MemcacheCacheConfiguration> {
}
}
attr("max-age", maxAge.toString())
attr("max-size", maxSize.toString())
attr("chunk-size", chunkSize.toString())
digestAlgorithm?.let { digestAlgorithm ->
attr("digest", digestAlgorithm)
@@ -97,6 +95,7 @@ class MemcacheCacheProvider : CacheProvider<MemcacheCacheConfiguration> {
}
)
}
attr("compression-level", compressionLevel.toString())
}
result
}

View File

@@ -1,30 +0,0 @@
package net.woggioni.rbcs.server.memcache.client
import io.netty.buffer.ByteBuf
import io.netty.handler.codec.memcache.LastMemcacheContent
import io.netty.handler.codec.memcache.MemcacheContent
import io.netty.handler.codec.memcache.binary.BinaryMemcacheRequest
import io.netty.handler.codec.memcache.binary.BinaryMemcacheResponse
sealed interface StreamingRequestEvent {
class SendRequest(val request : BinaryMemcacheRequest) : StreamingRequestEvent
open class SendChunk(val chunk : ByteBuf) : StreamingRequestEvent
class SendLastChunk(chunk : ByteBuf) : SendChunk(chunk)
class ExceptionCaught(val exception : Throwable) : StreamingRequestEvent
}
sealed interface StreamingResponseEvent {
class ResponseReceived(val response : BinaryMemcacheResponse) : StreamingResponseEvent
open class ContentReceived(val content : MemcacheContent) : StreamingResponseEvent
class LastContentReceived(val lastContent : LastMemcacheContent) : ContentReceived(lastContent)
class ExceptionCaught(val exception : Throwable) : StreamingResponseEvent
}
interface MemcacheRequestHandle {
fun handleEvent(evt : StreamingRequestEvent)
}
interface MemcacheResponseHandle {
fun handleEvent(evt : StreamingResponseEvent)
}

View File

@@ -4,6 +4,7 @@ package net.woggioni.rbcs.server.memcache.client
import io.netty.bootstrap.Bootstrap
import io.netty.buffer.ByteBuf
import io.netty.channel.Channel
import io.netty.channel.ChannelFutureListener
import io.netty.channel.ChannelHandlerContext
import io.netty.channel.ChannelOption
import io.netty.channel.ChannelPipeline
@@ -13,31 +14,29 @@ import io.netty.channel.pool.AbstractChannelPoolHandler
import io.netty.channel.pool.ChannelPool
import io.netty.channel.pool.FixedChannelPool
import io.netty.channel.socket.nio.NioSocketChannel
import io.netty.handler.codec.memcache.DefaultLastMemcacheContent
import io.netty.handler.codec.memcache.DefaultMemcacheContent
import io.netty.handler.codec.memcache.LastMemcacheContent
import io.netty.handler.codec.memcache.MemcacheContent
import io.netty.handler.codec.memcache.MemcacheObject
import io.netty.handler.codec.memcache.binary.BinaryMemcacheClientCodec
import io.netty.handler.codec.memcache.binary.BinaryMemcacheRequest
import io.netty.handler.codec.memcache.binary.BinaryMemcacheResponse
import io.netty.handler.logging.LoggingHandler
import io.netty.util.concurrent.GenericFutureListener
import net.woggioni.rbcs.common.HostAndPort
import net.woggioni.rbcs.common.contextLogger
import net.woggioni.rbcs.common.debug
import net.woggioni.rbcs.common.createLogger
import net.woggioni.rbcs.common.warn
import net.woggioni.rbcs.server.memcache.MemcacheCacheConfiguration
import net.woggioni.rbcs.server.memcache.MemcacheCacheHandler
import java.io.IOException
import java.net.InetSocketAddress
import java.util.concurrent.CompletableFuture
import java.util.concurrent.ConcurrentHashMap
import java.util.concurrent.atomic.AtomicLong
import io.netty.util.concurrent.Future as NettyFuture
class MemcacheClient(private val cfg: MemcacheCacheConfiguration) : AutoCloseable {
class MemcacheClient(private val servers: List<MemcacheCacheConfiguration.Server>, private val chunkSize : Int) : AutoCloseable {
private companion object {
@JvmStatic
private val log = contextLogger()
private val log = createLogger<MemcacheCacheHandler>()
}
private val group: NioEventLoopGroup
@@ -47,8 +46,6 @@ class MemcacheClient(private val cfg: MemcacheCacheConfiguration) : AutoCloseabl
group = NioEventLoopGroup()
}
private val counter = AtomicLong(0)
private fun newConnectionPool(server: MemcacheCacheConfiguration.Server): FixedChannelPool {
val bootstrap = Bootstrap().apply {
group(group)
@@ -63,32 +60,33 @@ class MemcacheClient(private val cfg: MemcacheCacheConfiguration) : AutoCloseabl
override fun channelCreated(ch: Channel) {
val pipeline: ChannelPipeline = ch.pipeline()
pipeline.addLast(BinaryMemcacheClientCodec())
pipeline.addLast(LoggingHandler())
pipeline.addLast(BinaryMemcacheClientCodec(chunkSize, true))
}
}
return FixedChannelPool(bootstrap, channelPoolHandler, server.maxConnections)
}
fun sendRequest(key: ByteBuf, responseHandle: MemcacheResponseHandle): CompletableFuture<MemcacheRequestHandle> {
val server = cfg.servers.let { servers ->
if (servers.size > 1) {
var checksum = 0
while (key.readableBytes() > 4) {
val byte = key.readInt()
checksum = checksum xor byte
}
while (key.readableBytes() > 0) {
val byte = key.readByte()
checksum = checksum xor byte.toInt()
}
servers[checksum % servers.size]
} else {
servers.first()
fun sendRequest(
key: ByteBuf,
responseHandler: MemcacheResponseHandler
): CompletableFuture<MemcacheRequestController> {
val server = if (servers.size > 1) {
var checksum = 0
while (key.readableBytes() > 4) {
val byte = key.readInt()
checksum = checksum xor byte
}
while (key.readableBytes() > 0) {
val byte = key.readByte()
checksum = checksum xor byte.toInt()
}
servers[checksum % servers.size]
} else {
servers.first()
}
key.release()
val response = CompletableFuture<MemcacheRequestHandle>()
val response = CompletableFuture<MemcacheRequestController>()
// Custom handler for processing responses
val pool = connectionPool.computeIfAbsent(server.endpoint) {
newConnectionPool(server)
@@ -96,74 +94,107 @@ class MemcacheClient(private val cfg: MemcacheCacheConfiguration) : AutoCloseabl
pool.acquire().addListener(object : GenericFutureListener<NettyFuture<Channel>> {
override fun operationComplete(channelFuture: NettyFuture<Channel>) {
if (channelFuture.isSuccess) {
var requestSent = false
var requestBodySent = false
var requestFinished = false
var responseReceived = false
var responseBodyReceived = false
var responseFinished = false
var requestBodySize = 0
var requestBodyBytesSent = 0
val channel = channelFuture.now
var connectionClosedByTheRemoteServer = true
val closeCallback = {
if (connectionClosedByTheRemoteServer) {
val ex = IOException("The memcache server closed the connection")
val completed = response.completeExceptionally(ex)
if(!completed) responseHandler.exceptionCaught(ex)
log.warn {
"RequestSent: $requestSent, RequestBodySent: $requestBodySent, " +
"RequestFinished: $requestFinished, ResponseReceived: $responseReceived, " +
"ResponseBodyReceived: $responseBodyReceived, ResponseFinished: $responseFinished, " +
"RequestBodySize: $requestBodySize, RequestBodyBytesSent: $requestBodyBytesSent"
}
}
pool.release(channel)
}
val closeListener = ChannelFutureListener {
closeCallback()
}
channel.closeFuture().addListener(closeListener)
val pipeline = channel.pipeline()
val handler = object : SimpleChannelInboundHandler<MemcacheObject>() {
override fun handlerAdded(ctx: ChannelHandlerContext) {
channel.closeFuture().removeListener(closeListener)
}
override fun channelRead0(
ctx: ChannelHandlerContext,
msg: MemcacheObject
) {
when (msg) {
is BinaryMemcacheResponse -> responseHandle.handleEvent(
StreamingResponseEvent.ResponseReceived(
msg
)
)
is BinaryMemcacheResponse -> {
responseHandler.responseReceived(msg)
responseReceived = true
}
is LastMemcacheContent -> {
responseHandle.handleEvent(
StreamingResponseEvent.LastContentReceived(
msg
)
)
pipeline.removeLast()
responseFinished = true
responseHandler.contentReceived(msg)
pipeline.remove(this)
pool.release(channel)
}
is MemcacheContent -> responseHandle.handleEvent(
StreamingResponseEvent.ContentReceived(
msg
)
)
is MemcacheContent -> {
responseBodyReceived = true
responseHandler.contentReceived(msg)
}
}
}
override fun channelInactive(ctx: ChannelHandlerContext) {
closeCallback()
ctx.fireChannelInactive()
}
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
responseHandle.handleEvent(StreamingResponseEvent.ExceptionCaught(cause))
connectionClosedByTheRemoteServer = false
ctx.close()
pipeline.removeLast()
pool.release(channel)
responseHandler.exceptionCaught(cause)
}
}
channel.pipeline()
.addLast("client-handler", handler)
response.complete(object : MemcacheRequestHandle {
override fun handleEvent(evt: StreamingRequestEvent) {
when (evt) {
is StreamingRequestEvent.SendRequest -> {
channel.writeAndFlush(evt.request)
}
response.complete(object : MemcacheRequestController {
is StreamingRequestEvent.SendLastChunk -> {
channel.writeAndFlush(DefaultLastMemcacheContent(evt.chunk))
val value = counter.incrementAndGet()
log.debug {
"Finished request counter: $value"
}
}
override fun sendRequest(request: BinaryMemcacheRequest) {
requestBodySize = request.totalBodyLength() - request.keyLength() - request.extrasLength()
channel.writeAndFlush(request)
requestSent = true
}
is StreamingRequestEvent.SendChunk -> {
channel.writeAndFlush(DefaultMemcacheContent(evt.chunk))
}
is StreamingRequestEvent.ExceptionCaught -> {
responseHandle.handleEvent(StreamingResponseEvent.ExceptionCaught(evt.exception))
channel.close()
pipeline.removeLast()
pool.release(channel)
override fun sendContent(content: MemcacheContent) {
val size = content.content().readableBytes()
channel.writeAndFlush(content).addListener {
requestBodyBytesSent += size
requestBodySent = true
if(content is LastMemcacheContent) {
requestFinished = true
}
}
}
override fun exceptionCaught(ex: Throwable) {
connectionClosedByTheRemoteServer = false
channel.close()
}
})
} else {
response.completeExceptionally(channelFuture.cause())

View File

@@ -0,0 +1,13 @@
package net.woggioni.rbcs.server.memcache.client
import io.netty.handler.codec.memcache.MemcacheContent
import io.netty.handler.codec.memcache.binary.BinaryMemcacheRequest
interface MemcacheRequestController {
fun sendRequest(request : BinaryMemcacheRequest)
fun sendContent(content : MemcacheContent)
fun exceptionCaught(ex : Throwable)
}

View File

@@ -0,0 +1,14 @@
package net.woggioni.rbcs.server.memcache.client
import io.netty.handler.codec.memcache.MemcacheContent
import io.netty.handler.codec.memcache.binary.BinaryMemcacheResponse
interface MemcacheResponseHandler {
fun responseReceived(response : BinaryMemcacheResponse)
fun contentReceived(content : MemcacheContent)
fun exceptionCaught(ex : Throwable)
}

View File

@@ -20,10 +20,10 @@
<xs:element name="server" type="rbcs-memcache:memcacheServerType"/>
</xs:sequence>
<xs:attribute name="max-age" type="xs:duration" default="P1D"/>
<xs:attribute name="max-size" type="rbcs:byteSize" default="1048576"/>
<xs:attribute name="chunk-size" type="rbcs:byteSize" default="0x4000"/>
<xs:attribute name="chunk-size" type="rbcs:byteSizeType" default="0x10000"/>
<xs:attribute name="digest" type="xs:token" />
<xs:attribute name="compression-mode" type="rbcs-memcache:compressionType"/>
<xs:attribute name="compression-level" type="rbcs:compressionLevelType" default="-1"/>
</xs:extension>
</xs:complexContent>
</xs:complexType>

View File

@@ -0,0 +1,27 @@
package net.woggioni.rbcs.server.memcache.client
import io.netty.buffer.ByteBufUtil
import io.netty.buffer.Unpooled
import org.junit.jupiter.api.Assertions
import org.junit.jupiter.api.Test
import java.io.ByteArrayInputStream
import java.nio.ByteBuffer
import java.nio.channels.Channels
import kotlin.random.Random
class ByteBufferTest {
@Test
fun test() {
val byteBuffer = ByteBuffer.allocate(0x100)
val originalBytes = Random(101325).nextBytes(0x100)
Channels.newChannel(ByteArrayInputStream(originalBytes)).use { source ->
source.read(byteBuffer)
}
byteBuffer.flip()
val buf = Unpooled.buffer()
buf.writeBytes(byteBuffer)
val finalBytes = ByteBufUtil.getBytes(buf)
Assertions.assertArrayEquals(originalBytes, finalBytes)
}
}