From: Lanius Trolling Date: Thu, 4 Apr 2024 23:35:13 +0000 (-0400) Subject: Add ETag to asset headers X-Git-Url: https://gitweb.starshipfights.net/?a=commitdiff_plain;h=8f1b96339f4c6cdcbf6c6d8e523fdce7b5288468;p=factbooks Add ETag to asset headers --- diff --git a/src/jvmMain/kotlin/info/mechyrdia/Factbooks.kt b/src/jvmMain/kotlin/info/mechyrdia/Factbooks.kt index 89b777f..e403143 100644 --- a/src/jvmMain/kotlin/info/mechyrdia/Factbooks.kt +++ b/src/jvmMain/kotlin/info/mechyrdia/Factbooks.kt @@ -73,11 +73,8 @@ fun Application.factbooks() { } install(ConditionalHeaders) { - version { _, outgoingContent -> - if (outgoingContent is LocalFileContent) - listOf(LastModifiedVersion(outgoingContent.file.lastModified())) - else - emptyList() + version { call, outgoingContent -> + getVersionHeaders(call, outgoingContent) } } diff --git a/src/jvmMain/kotlin/info/mechyrdia/lore/asset_caching.kt b/src/jvmMain/kotlin/info/mechyrdia/lore/asset_caching.kt new file mode 100644 index 0000000..e7ef829 --- /dev/null +++ b/src/jvmMain/kotlin/info/mechyrdia/lore/asset_caching.kt @@ -0,0 +1,37 @@ +package info.mechyrdia.lore + +import java.io.File +import java.util.concurrent.ConcurrentHashMap + +abstract class FileDependentCache { + private val cache = ConcurrentHashMap() + + protected abstract fun processFile(file: File): T + + operator fun get(file: File): T { + return cache.compute(file) { _, prevEntry -> + prevEntry?.apply { + updateIfNeeded(file) + } ?: Entry(file) + }!!.cachedData + } + + private inner class Entry private constructor(lastModified: Long, cachedData: T) { + constructor(file: File) : this(file.lastModified(), processFile(file)) + + var lastModified: Long = lastModified + private set + + var cachedData: T = cachedData + private set + + fun updateIfNeeded(file: File) { + val fileLastModified = file.lastModified() + if (lastModified < fileLastModified) { + lastModified = fileLastModified + + cachedData = processFile(file) + } + } + } +} diff --git a/src/jvmMain/kotlin/info/mechyrdia/lore/asset_compression.kt b/src/jvmMain/kotlin/info/mechyrdia/lore/asset_compression.kt index 1b2420a..4dc5d12 100644 --- a/src/jvmMain/kotlin/info/mechyrdia/lore/asset_compression.kt +++ b/src/jvmMain/kotlin/info/mechyrdia/lore/asset_compression.kt @@ -4,18 +4,20 @@ import io.ktor.http.* import io.ktor.server.application.* import io.ktor.server.request.* import io.ktor.server.response.* +import io.ktor.util.* import kotlinx.coroutines.Dispatchers import kotlinx.coroutines.runInterruptible import java.io.ByteArrayOutputStream import java.io.File import java.io.FilterOutputStream import java.io.OutputStream -import java.util.concurrent.ConcurrentHashMap import java.util.zip.DeflaterOutputStream import java.util.zip.GZIPOutputStream -private val gzippedCache = CompressedCache.fromCompressorFactory("gzip", ::GZIPOutputStream) -private val deflatedCache = CompressedCache.fromCompressorFactory("deflate", ::DeflaterOutputStream) +val CompressedFileAttributeKey = AttributeKey("Mechyrdia.CompressedFile") + +private val gzippedCache = CompressedCache("gzip", ::GZIPOutputStream) +private val deflatedCache = CompressedCache("deflate", ::DeflaterOutputStream) private fun getCacheByEncoding(encoding: String) = when (encoding) { "gzip" -> gzippedCache @@ -33,51 +35,18 @@ private fun ApplicationCall.compressedCache(): CompressedCache? { suspend fun ApplicationCall.respondCompressedFile(file: File) { val cache = compressedCache() ?: return respondFile(file) response.header(HttpHeaders.ContentEncoding, cache.encoding) - val compressedBytes = runInterruptible(Dispatchers.IO) { cache.getCompressed(file) } + attributes.put(CompressedFileAttributeKey, file) + val compressedBytes = runInterruptible(Dispatchers.IO) { cache[file] } respondBytes(compressedBytes) } -private class CompressedCache(val encoding: String, private val compressor: (ByteArray) -> ByteArray) { - private val cache = ConcurrentHashMap() - - fun getCompressed(file: File): ByteArray { - return cache.compute(file) { _, prevEntry -> - prevEntry?.apply { - updateIfNeeded(file, compressor) - } ?: CompressedCacheEntry(file, compressor) - }!!.compressedData - } - - companion object { - fun fromCompressorFactory(encoding: String, compressorFactory: (OutputStream, Boolean) -> FilterOutputStream) = CompressedCache(encoding) { uncompressed -> - ByteArrayOutputStream().also { oStream -> - compressorFactory(oStream, true).use { gzip -> - gzip.write(uncompressed) - gzip.flush() - } - }.toByteArray() - } - } -} - -private class CompressedCacheEntry private constructor( - lastModified: Long, - compressedData: ByteArray, -) { - constructor(file: File, compressor: (ByteArray) -> ByteArray) : this(file.lastModified(), compressor(file.readBytes())) - - var lastModified: Long = lastModified - private set - - var compressedData: ByteArray = compressedData - private set - - fun updateIfNeeded(file: File, compressor: (ByteArray) -> ByteArray) { - val fileLastModified = file.lastModified() - if (lastModified < fileLastModified) { - lastModified = fileLastModified - - compressedData = compressor(file.readBytes()) - } +private class CompressedCache(val encoding: String, private val compressorFactory: (OutputStream, Boolean) -> FilterOutputStream) : FileDependentCache() { + override fun processFile(file: File): ByteArray { + return ByteArrayOutputStream().also { oStream -> + compressorFactory(oStream, true).use { gzip -> + file.inputStream().use { it.copyTo(gzip) } + gzip.flush() + } + }.toByteArray() } } diff --git a/src/jvmMain/kotlin/info/mechyrdia/lore/asset_hashing.kt b/src/jvmMain/kotlin/info/mechyrdia/lore/asset_hashing.kt new file mode 100644 index 0000000..31b3a6f --- /dev/null +++ b/src/jvmMain/kotlin/info/mechyrdia/lore/asset_hashing.kt @@ -0,0 +1,83 @@ +package info.mechyrdia.lore + +import io.ktor.http.content.* +import io.ktor.server.application.* +import io.ktor.server.http.content.* +import kotlinx.coroutines.Dispatchers +import kotlinx.coroutines.runInterruptible +import java.io.File +import java.io.IOException +import java.io.OutputStream +import java.security.MessageDigest +import java.util.* + +private class DigestingOutputStream(stomach: MessageDigest) : OutputStream() { + private var stomachStore: MessageDigest? = stomach + + private val stomach: MessageDigest + get() = stomachStore ?: throw IOException("Attempt to use DigestingOutputStream after it has been closed") + + val isWritable: Boolean + get() = stomachStore != null + + private var resultStore: ByteArray? = null + + val result: ByteArray + get() = resultStore ?: throw IOException("Attempt to retrieve result of DigestingOutputStream before it has finished") + + val isDone: Boolean + get() = resultStore != null + + override fun write(b: Int) { + stomach.update(b.toByte()) + } + + override fun write(b: ByteArray) { + stomach.update(b) + } + + override fun write(b: ByteArray, off: Int, len: Int) { + stomach.update(b, off, len) + } + + override fun close() { + resultStore = stomach.digest() + stomachStore = null + } + + inline fun useAndGet(block: (DigestingOutputStream) -> Unit): ByteArray { + use(block) + return result + } +} + +private class FileHashCache(val hashAlgo: String) : FileDependentCache() { + private val hashinator: ThreadLocal = ThreadLocal.withInitial { MessageDigest.getInstance(hashAlgo) } + + override fun processFile(file: File): ByteArray { + return DigestingOutputStream(hashinator.get()).useAndGet { oStream -> + file.inputStream().use { it.copyTo(oStream) } + } + } +} + +private val eTagCache = FileHashCache("SHA-384") + +private val b64Encoder: Base64.Encoder = Base64.getUrlEncoder() + +suspend fun File.eTag(): String = runInterruptible(Dispatchers.IO) { + b64Encoder.encodeToString(eTagCache[this]) +} + +private suspend fun File.getVersionHeaders() = listOf( + LastModifiedVersion(lastModified()), + EntityTagVersion(eTag()), +) + +suspend fun getVersionHeaders(call: ApplicationCall, outgoingContent: OutgoingContent): List { + return if (outgoingContent is LocalFileContent) + outgoingContent.file.getVersionHeaders() + else + call.attributes.getOrNull(CompressedFileAttributeKey)?.getVersionHeaders() + ?: emptyList() +}