Add ETag to asset headers
authorLanius Trolling <lanius@laniustrolling.dev>
Thu, 4 Apr 2024 23:35:13 +0000 (19:35 -0400)
committerLanius Trolling <lanius@laniustrolling.dev>
Thu, 4 Apr 2024 23:35:13 +0000 (19:35 -0400)
src/jvmMain/kotlin/info/mechyrdia/Factbooks.kt
src/jvmMain/kotlin/info/mechyrdia/lore/asset_caching.kt [new file with mode: 0644]
src/jvmMain/kotlin/info/mechyrdia/lore/asset_compression.kt
src/jvmMain/kotlin/info/mechyrdia/lore/asset_hashing.kt [new file with mode: 0644]

index 89b777f01a09422ca2a9c82dd409c64f1fc70324..e403143a5f6c9a4641bd295eeb7524f62ddde87a 100644 (file)
@@ -73,11 +73,8 @@ fun Application.factbooks() {
        }
        
        install(ConditionalHeaders) {
-               version { _, outgoingContent ->
-                       if (outgoingContent is LocalFileContent)
-                               listOf(LastModifiedVersion(outgoingContent.file.lastModified()))
-                       else
-                               emptyList()
+               version { call, outgoingContent ->
+                       getVersionHeaders(call, outgoingContent)
                }
        }
        
diff --git a/src/jvmMain/kotlin/info/mechyrdia/lore/asset_caching.kt b/src/jvmMain/kotlin/info/mechyrdia/lore/asset_caching.kt
new file mode 100644 (file)
index 0000000..e7ef829
--- /dev/null
@@ -0,0 +1,37 @@
+package info.mechyrdia.lore
+
+import java.io.File
+import java.util.concurrent.ConcurrentHashMap
+
+abstract class FileDependentCache<T> {
+       private val cache = ConcurrentHashMap<File, Entry>()
+       
+       protected abstract fun processFile(file: File): T
+       
+       operator fun get(file: File): T {
+               return cache.compute(file) { _, prevEntry ->
+                       prevEntry?.apply {
+                               updateIfNeeded(file)
+                       } ?: Entry(file)
+               }!!.cachedData
+       }
+       
+       private inner class Entry private constructor(lastModified: Long, cachedData: T) {
+               constructor(file: File) : this(file.lastModified(), processFile(file))
+               
+               var lastModified: Long = lastModified
+                       private set
+               
+               var cachedData: T = cachedData
+                       private set
+               
+               fun updateIfNeeded(file: File) {
+                       val fileLastModified = file.lastModified()
+                       if (lastModified < fileLastModified) {
+                               lastModified = fileLastModified
+                               
+                               cachedData = processFile(file)
+                       }
+               }
+       }
+}
index 1b2420a29dfeed2d429cfad618c693b66affe7df..4dc5d123edb74d1951df7805932a0dda3eb07621 100644 (file)
@@ -4,18 +4,20 @@ import io.ktor.http.*
 import io.ktor.server.application.*
 import io.ktor.server.request.*
 import io.ktor.server.response.*
+import io.ktor.util.*
 import kotlinx.coroutines.Dispatchers
 import kotlinx.coroutines.runInterruptible
 import java.io.ByteArrayOutputStream
 import java.io.File
 import java.io.FilterOutputStream
 import java.io.OutputStream
-import java.util.concurrent.ConcurrentHashMap
 import java.util.zip.DeflaterOutputStream
 import java.util.zip.GZIPOutputStream
 
-private val gzippedCache = CompressedCache.fromCompressorFactory("gzip", ::GZIPOutputStream)
-private val deflatedCache = CompressedCache.fromCompressorFactory("deflate", ::DeflaterOutputStream)
+val CompressedFileAttributeKey = AttributeKey<File>("Mechyrdia.CompressedFile")
+
+private val gzippedCache = CompressedCache("gzip", ::GZIPOutputStream)
+private val deflatedCache = CompressedCache("deflate", ::DeflaterOutputStream)
 
 private fun getCacheByEncoding(encoding: String) = when (encoding) {
        "gzip" -> gzippedCache
@@ -33,51 +35,18 @@ private fun ApplicationCall.compressedCache(): CompressedCache? {
 suspend fun ApplicationCall.respondCompressedFile(file: File) {
        val cache = compressedCache() ?: return respondFile(file)
        response.header(HttpHeaders.ContentEncoding, cache.encoding)
-       val compressedBytes = runInterruptible(Dispatchers.IO) { cache.getCompressed(file) }
+       attributes.put(CompressedFileAttributeKey, file)
+       val compressedBytes = runInterruptible(Dispatchers.IO) { cache[file] }
        respondBytes(compressedBytes)
 }
 
-private class CompressedCache(val encoding: String, private val compressor: (ByteArray) -> ByteArray) {
-       private val cache = ConcurrentHashMap<File, CompressedCacheEntry>()
-       
-       fun getCompressed(file: File): ByteArray {
-               return cache.compute(file) { _, prevEntry ->
-                       prevEntry?.apply {
-                               updateIfNeeded(file, compressor)
-                       } ?: CompressedCacheEntry(file, compressor)
-               }!!.compressedData
-       }
-       
-       companion object {
-               fun fromCompressorFactory(encoding: String, compressorFactory: (OutputStream, Boolean) -> FilterOutputStream) = CompressedCache(encoding) { uncompressed ->
-                       ByteArrayOutputStream().also { oStream ->
-                               compressorFactory(oStream, true).use { gzip ->
-                                       gzip.write(uncompressed)
-                                       gzip.flush()
-                               }
-                       }.toByteArray()
-               }
-       }
-}
-
-private class CompressedCacheEntry private constructor(
-       lastModified: Long,
-       compressedData: ByteArray,
-) {
-       constructor(file: File, compressor: (ByteArray) -> ByteArray) : this(file.lastModified(), compressor(file.readBytes()))
-       
-       var lastModified: Long = lastModified
-               private set
-       
-       var compressedData: ByteArray = compressedData
-               private set
-       
-       fun updateIfNeeded(file: File, compressor: (ByteArray) -> ByteArray) {
-               val fileLastModified = file.lastModified()
-               if (lastModified < fileLastModified) {
-                       lastModified = fileLastModified
-                       
-                       compressedData = compressor(file.readBytes())
-               }
+private class CompressedCache(val encoding: String, private val compressorFactory: (OutputStream, Boolean) -> FilterOutputStream) : FileDependentCache<ByteArray>() {
+       override fun processFile(file: File): ByteArray {
+               return ByteArrayOutputStream().also { oStream ->
+                       compressorFactory(oStream, true).use { gzip ->
+                               file.inputStream().use { it.copyTo(gzip) }
+                               gzip.flush()
+                       }
+               }.toByteArray()
        }
 }
diff --git a/src/jvmMain/kotlin/info/mechyrdia/lore/asset_hashing.kt b/src/jvmMain/kotlin/info/mechyrdia/lore/asset_hashing.kt
new file mode 100644 (file)
index 0000000..31b3a6f
--- /dev/null
@@ -0,0 +1,83 @@
+package info.mechyrdia.lore
+
+import io.ktor.http.content.*
+import io.ktor.server.application.*
+import io.ktor.server.http.content.*
+import kotlinx.coroutines.Dispatchers
+import kotlinx.coroutines.runInterruptible
+import java.io.File
+import java.io.IOException
+import java.io.OutputStream
+import java.security.MessageDigest
+import java.util.*
+
+private class DigestingOutputStream(stomach: MessageDigest) : OutputStream() {
+       private var stomachStore: MessageDigest? = stomach
+       
+       private val stomach: MessageDigest
+               get() = stomachStore ?: throw IOException("Attempt to use DigestingOutputStream after it has been closed")
+       
+       val isWritable: Boolean
+               get() = stomachStore != null
+       
+       private var resultStore: ByteArray? = null
+       
+       val result: ByteArray
+               get() = resultStore ?: throw IOException("Attempt to retrieve result of DigestingOutputStream before it has finished")
+       
+       val isDone: Boolean
+               get() = resultStore != null
+       
+       override fun write(b: Int) {
+               stomach.update(b.toByte())
+       }
+       
+       override fun write(b: ByteArray) {
+               stomach.update(b)
+       }
+       
+       override fun write(b: ByteArray, off: Int, len: Int) {
+               stomach.update(b, off, len)
+       }
+       
+       override fun close() {
+               resultStore = stomach.digest()
+               stomachStore = null
+       }
+       
+       inline fun useAndGet(block: (DigestingOutputStream) -> Unit): ByteArray {
+               use(block)
+               return result
+       }
+}
+
+private class FileHashCache(val hashAlgo: String) : FileDependentCache<ByteArray>() {
+       private val hashinator: ThreadLocal<MessageDigest> = ThreadLocal.withInitial { MessageDigest.getInstance(hashAlgo) }
+       
+       override fun processFile(file: File): ByteArray {
+               return DigestingOutputStream(hashinator.get()).useAndGet { oStream ->
+                       file.inputStream().use { it.copyTo(oStream) }
+               }
+       }
+}
+
+private val eTagCache = FileHashCache("SHA-384")
+
+private val b64Encoder: Base64.Encoder = Base64.getUrlEncoder()
+
+suspend fun File.eTag(): String = runInterruptible(Dispatchers.IO) {
+       b64Encoder.encodeToString(eTagCache[this])
+}
+
+private suspend fun File.getVersionHeaders() = listOf(
+       LastModifiedVersion(lastModified()),
+       EntityTagVersion(eTag()),
+)
+
+suspend fun getVersionHeaders(call: ApplicationCall, outgoingContent: OutgoingContent): List<Version> {
+       return if (outgoingContent is LocalFileContent)
+               outgoingContent.file.getVersionHeaders()
+       else
+               call.attributes.getOrNull(CompressedFileAttributeKey)?.getVersionHeaders()
+                       ?: emptyList()
+}