fixed server support for request pipelining
All checks were successful
CI / build (push) Successful in 15m33s

This commit is contained in:
2025-03-08 11:07:21 +08:00
parent 59a12d6218
commit 7ba7070693
19 changed files with 203 additions and 235 deletions

View File

@@ -6,6 +6,7 @@ import io.netty.channel.EventLoopGroup
import io.netty.channel.pool.FixedChannelPool
import io.netty.channel.socket.DatagramChannel
import io.netty.channel.socket.SocketChannel
import net.woggioni.rbcs.api.CacheHandler
import net.woggioni.rbcs.api.CacheHandlerFactory
import net.woggioni.rbcs.api.Configuration
import net.woggioni.rbcs.common.HostAndPort
@@ -51,7 +52,7 @@ data class MemcacheCacheConfiguration(
eventLoop: EventLoopGroup,
socketChannelFactory: ChannelFactory<SocketChannel>,
datagramChannelFactory: ChannelFactory<DatagramChannel>,
): ChannelHandler {
): CacheHandler {
return MemcacheCacheHandler(
MemcacheClient(
this@MemcacheCacheConfiguration.servers,

View File

@@ -4,7 +4,6 @@ import io.netty.buffer.ByteBuf
import io.netty.buffer.ByteBufAllocator
import io.netty.buffer.CompositeByteBuf
import io.netty.channel.ChannelHandlerContext
import io.netty.channel.SimpleChannelInboundHandler
import io.netty.handler.codec.memcache.DefaultLastMemcacheContent
import io.netty.handler.codec.memcache.DefaultMemcacheContent
import io.netty.handler.codec.memcache.LastMemcacheContent
@@ -13,6 +12,7 @@ import io.netty.handler.codec.memcache.binary.BinaryMemcacheOpcodes
import io.netty.handler.codec.memcache.binary.BinaryMemcacheResponse
import io.netty.handler.codec.memcache.binary.BinaryMemcacheResponseStatus
import io.netty.handler.codec.memcache.binary.DefaultBinaryMemcacheRequest
import net.woggioni.rbcs.api.CacheHandler
import net.woggioni.rbcs.api.CacheValueMetadata
import net.woggioni.rbcs.api.exception.ContentTooLargeException
import net.woggioni.rbcs.api.message.CacheMessage
@@ -58,7 +58,7 @@ class MemcacheCacheHandler(
private val compressionLevel: Int,
private val chunkSize: Int,
private val maxAge: Duration
) : SimpleChannelInboundHandler<CacheMessage>() {
) : CacheHandler() {
companion object {
private val log = createLogger<MemcacheCacheHandler>()
@@ -98,7 +98,10 @@ class MemcacheCacheHandler(
acc.retain()
it.readObject() as CacheValueMetadata
}
ctx.writeAndFlush(CacheValueFoundResponse(key, metadata))
log.trace(ctx) {
"Sending response from cache"
}
sendMessageAndFlush(ctx, CacheValueFoundResponse(key, metadata))
responseSent = true
acc.readerIndex(Int.SIZE_BYTES + mSize)
}
@@ -114,16 +117,16 @@ class MemcacheCacheHandler(
val toSend = extractChunk(chunk, ctx.alloc())
val msg = if(last) {
log.trace(ctx) {
"Sending last chunk to client on channel ${ctx.channel().id().asShortText()}"
"Sending last chunk to client on channel"
}
LastCacheContent(toSend)
} else {
log.trace(ctx) {
"Sending chunk to client on channel ${ctx.channel().id().asShortText()}"
"Sending chunk to client"
}
CacheContent(toSend)
}
ctx.writeAndFlush(msg)
sendMessageAndFlush(ctx, msg)
}
fun commit() {
@@ -259,7 +262,7 @@ class MemcacheCacheHandler(
log.debug(ctx) {
"Cache miss for key ${msg.key} on memcache"
}
ctx.writeAndFlush(CacheValueNotFoundResponse())
sendMessageAndFlush(ctx, CacheValueNotFoundResponse())
}
}
}
@@ -290,6 +293,7 @@ class MemcacheCacheHandler(
setOpcode(BinaryMemcacheOpcodes.GET)
}
requestHandle.sendRequest(request)
requestHandle.sendContent(LastMemcacheContent.EMPTY_LAST_CONTENT)
}
}
@@ -305,7 +309,7 @@ class MemcacheCacheHandler(
log.debug(ctx) {
"Inserted key ${msg.key} into memcache"
}
ctx.writeAndFlush(CachePutResponse(msg.key))
sendMessageAndFlush(ctx, CachePutResponse(msg.key))
}
else -> this@MemcacheCacheHandler.exceptionCaught(ctx, MemcacheException(status))
}
@@ -348,6 +352,9 @@ class MemcacheCacheHandler(
extras.writeInt(0)
extras.writeInt(encodeExpiry(maxAge))
val totalBodyLength = request.digest.readableBytes() + extras.readableBytes() + payloadSize
log.trace(ctx) {
"Trying to send SET request to memcache"
}
request.requestController.whenComplete { requestController, ex ->
if(ex == null) {
log.trace(ctx) {

View File

@@ -12,7 +12,6 @@ import io.netty.channel.ChannelPipeline
import io.netty.channel.EventLoopGroup
import io.netty.channel.SimpleChannelInboundHandler
import io.netty.channel.pool.AbstractChannelPoolHandler
import io.netty.channel.pool.ChannelPool
import io.netty.channel.pool.FixedChannelPool
import io.netty.channel.socket.SocketChannel
import io.netty.handler.codec.memcache.LastMemcacheContent
@@ -24,7 +23,7 @@ import io.netty.handler.codec.memcache.binary.BinaryMemcacheResponse
import io.netty.util.concurrent.GenericFutureListener
import net.woggioni.rbcs.common.HostAndPort
import net.woggioni.rbcs.common.createLogger
import net.woggioni.rbcs.common.warn
import net.woggioni.rbcs.common.trace
import net.woggioni.rbcs.server.memcache.MemcacheCacheConfiguration
import net.woggioni.rbcs.server.memcache.MemcacheCacheHandler
import java.io.IOException
@@ -94,18 +93,6 @@ class MemcacheClient(
pool.acquire().addListener(object : GenericFutureListener<NettyFuture<Channel>> {
override fun operationComplete(channelFuture: NettyFuture<Channel>) {
if (channelFuture.isSuccess) {
var requestSent = false
var requestBodySent = false
var requestFinished = false
var responseReceived = false
var responseBodyReceived = false
var responseFinished = false
var requestBodySize = 0
var requestBodyBytesSent = 0
val channel = channelFuture.now
var connectionClosedByTheRemoteServer = true
val closeCallback = {
@@ -113,14 +100,7 @@ class MemcacheClient(
val ex = IOException("The memcache server closed the connection")
val completed = response.completeExceptionally(ex)
if(!completed) responseHandler.exceptionCaught(ex)
log.warn {
"RequestSent: $requestSent, RequestBodySent: $requestBodySent, " +
"RequestFinished: $requestFinished, ResponseReceived: $responseReceived, " +
"ResponseBodyReceived: $responseBodyReceived, ResponseFinished: $responseFinished, " +
"RequestBodySize: $requestBodySize, RequestBodyBytesSent: $requestBodyBytesSent"
}
}
pool.release(channel)
}
val closeListener = ChannelFutureListener {
closeCallback()
@@ -140,18 +120,14 @@ class MemcacheClient(
when (msg) {
is BinaryMemcacheResponse -> {
responseHandler.responseReceived(msg)
responseReceived = true
}
is LastMemcacheContent -> {
responseFinished = true
responseHandler.contentReceived(msg)
pipeline.remove(this)
pool.release(channel)
}
is MemcacheContent -> {
responseBodyReceived = true
responseHandler.contentReceived(msg)
}
}
@@ -165,35 +141,43 @@ class MemcacheClient(
override fun exceptionCaught(ctx: ChannelHandlerContext, cause: Throwable) {
connectionClosedByTheRemoteServer = false
ctx.close()
pool.release(channel)
responseHandler.exceptionCaught(cause)
}
}
channel.pipeline()
.addLast("client-handler", handler)
channel.pipeline().addLast(handler)
response.complete(object : MemcacheRequestController {
private var channelReleased = false
override fun sendRequest(request: BinaryMemcacheRequest) {
requestBodySize = request.totalBodyLength() - request.keyLength() - request.extrasLength()
channel.writeAndFlush(request)
requestSent = true
}
override fun sendContent(content: MemcacheContent) {
val size = content.content().readableBytes()
channel.writeAndFlush(content).addListener {
requestBodyBytesSent += size
requestBodySent = true
if(content is LastMemcacheContent) {
requestFinished = true
if(!channelReleased) {
pool.release(channel)
channelReleased = true
log.trace(channel) {
"Channel released"
}
}
}
}
}
override fun exceptionCaught(ex: Throwable) {
log.warn(ex.message, ex)
connectionClosedByTheRemoteServer = false
channel.close()
if(!channelReleased) {
pool.release(channel)
channelReleased = true
log.trace(channel) {
"Channel released"
}
}
}
})
} else {