}
install(ConditionalHeaders) {
- version { _, outgoingContent ->
- if (outgoingContent is LocalFileContent)
- listOf(LastModifiedVersion(outgoingContent.file.lastModified()))
- else
- emptyList()
+ version { call, outgoingContent ->
+ getVersionHeaders(call, outgoingContent)
}
}
--- /dev/null
+package info.mechyrdia.lore
+
+import java.io.File
+import java.util.concurrent.ConcurrentHashMap
+
+abstract class FileDependentCache<T> {
+ private val cache = ConcurrentHashMap<File, Entry>()
+
+ protected abstract fun processFile(file: File): T
+
+ operator fun get(file: File): T {
+ return cache.compute(file) { _, prevEntry ->
+ prevEntry?.apply {
+ updateIfNeeded(file)
+ } ?: Entry(file)
+ }!!.cachedData
+ }
+
+ private inner class Entry private constructor(lastModified: Long, cachedData: T) {
+ constructor(file: File) : this(file.lastModified(), processFile(file))
+
+ var lastModified: Long = lastModified
+ private set
+
+ var cachedData: T = cachedData
+ private set
+
+ fun updateIfNeeded(file: File) {
+ val fileLastModified = file.lastModified()
+ if (lastModified < fileLastModified) {
+ lastModified = fileLastModified
+
+ cachedData = processFile(file)
+ }
+ }
+ }
+}
import io.ktor.server.application.*
import io.ktor.server.request.*
import io.ktor.server.response.*
+import io.ktor.util.*
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.runInterruptible
import java.io.ByteArrayOutputStream
import java.io.File
import java.io.FilterOutputStream
import java.io.OutputStream
-import java.util.concurrent.ConcurrentHashMap
import java.util.zip.DeflaterOutputStream
import java.util.zip.GZIPOutputStream
-private val gzippedCache = CompressedCache.fromCompressorFactory("gzip", ::GZIPOutputStream)
-private val deflatedCache = CompressedCache.fromCompressorFactory("deflate", ::DeflaterOutputStream)
+val CompressedFileAttributeKey = AttributeKey<File>("Mechyrdia.CompressedFile")
+
+private val gzippedCache = CompressedCache("gzip", ::GZIPOutputStream)
+private val deflatedCache = CompressedCache("deflate", ::DeflaterOutputStream)
private fun getCacheByEncoding(encoding: String) = when (encoding) {
"gzip" -> gzippedCache
suspend fun ApplicationCall.respondCompressedFile(file: File) {
val cache = compressedCache() ?: return respondFile(file)
response.header(HttpHeaders.ContentEncoding, cache.encoding)
- val compressedBytes = runInterruptible(Dispatchers.IO) { cache.getCompressed(file) }
+ attributes.put(CompressedFileAttributeKey, file)
+ val compressedBytes = runInterruptible(Dispatchers.IO) { cache[file] }
respondBytes(compressedBytes)
}
-private class CompressedCache(val encoding: String, private val compressor: (ByteArray) -> ByteArray) {
- private val cache = ConcurrentHashMap<File, CompressedCacheEntry>()
-
- fun getCompressed(file: File): ByteArray {
- return cache.compute(file) { _, prevEntry ->
- prevEntry?.apply {
- updateIfNeeded(file, compressor)
- } ?: CompressedCacheEntry(file, compressor)
- }!!.compressedData
- }
-
- companion object {
- fun fromCompressorFactory(encoding: String, compressorFactory: (OutputStream, Boolean) -> FilterOutputStream) = CompressedCache(encoding) { uncompressed ->
- ByteArrayOutputStream().also { oStream ->
- compressorFactory(oStream, true).use { gzip ->
- gzip.write(uncompressed)
- gzip.flush()
- }
- }.toByteArray()
- }
- }
-}
-
-private class CompressedCacheEntry private constructor(
- lastModified: Long,
- compressedData: ByteArray,
-) {
- constructor(file: File, compressor: (ByteArray) -> ByteArray) : this(file.lastModified(), compressor(file.readBytes()))
-
- var lastModified: Long = lastModified
- private set
-
- var compressedData: ByteArray = compressedData
- private set
-
- fun updateIfNeeded(file: File, compressor: (ByteArray) -> ByteArray) {
- val fileLastModified = file.lastModified()
- if (lastModified < fileLastModified) {
- lastModified = fileLastModified
-
- compressedData = compressor(file.readBytes())
- }
+private class CompressedCache(val encoding: String, private val compressorFactory: (OutputStream, Boolean) -> FilterOutputStream) : FileDependentCache<ByteArray>() {
+ override fun processFile(file: File): ByteArray {
+ return ByteArrayOutputStream().also { oStream ->
+ compressorFactory(oStream, true).use { gzip ->
+ file.inputStream().use { it.copyTo(gzip) }
+ gzip.flush()
+ }
+ }.toByteArray()
}
}
--- /dev/null
+package info.mechyrdia.lore
+
+import io.ktor.http.content.*
+import io.ktor.server.application.*
+import io.ktor.server.http.content.*
+import kotlinx.coroutines.Dispatchers
+import kotlinx.coroutines.runInterruptible
+import java.io.File
+import java.io.IOException
+import java.io.OutputStream
+import java.security.MessageDigest
+import java.util.*
+
+private class DigestingOutputStream(stomach: MessageDigest) : OutputStream() {
+ private var stomachStore: MessageDigest? = stomach
+
+ private val stomach: MessageDigest
+ get() = stomachStore ?: throw IOException("Attempt to use DigestingOutputStream after it has been closed")
+
+ val isWritable: Boolean
+ get() = stomachStore != null
+
+ private var resultStore: ByteArray? = null
+
+ val result: ByteArray
+ get() = resultStore ?: throw IOException("Attempt to retrieve result of DigestingOutputStream before it has finished")
+
+ val isDone: Boolean
+ get() = resultStore != null
+
+ override fun write(b: Int) {
+ stomach.update(b.toByte())
+ }
+
+ override fun write(b: ByteArray) {
+ stomach.update(b)
+ }
+
+ override fun write(b: ByteArray, off: Int, len: Int) {
+ stomach.update(b, off, len)
+ }
+
+ override fun close() {
+ resultStore = stomach.digest()
+ stomachStore = null
+ }
+
+ inline fun useAndGet(block: (DigestingOutputStream) -> Unit): ByteArray {
+ use(block)
+ return result
+ }
+}
+
+private class FileHashCache(val hashAlgo: String) : FileDependentCache<ByteArray>() {
+ private val hashinator: ThreadLocal<MessageDigest> = ThreadLocal.withInitial { MessageDigest.getInstance(hashAlgo) }
+
+ override fun processFile(file: File): ByteArray {
+ return DigestingOutputStream(hashinator.get()).useAndGet { oStream ->
+ file.inputStream().use { it.copyTo(oStream) }
+ }
+ }
+}
+
+private val eTagCache = FileHashCache("SHA-384")
+
+private val b64Encoder: Base64.Encoder = Base64.getUrlEncoder()
+
+suspend fun File.eTag(): String = runInterruptible(Dispatchers.IO) {
+ b64Encoder.encodeToString(eTagCache[this])
+}
+
+private suspend fun File.getVersionHeaders() = listOf(
+ LastModifiedVersion(lastModified()),
+ EntityTagVersion(eTag()),
+)
+
+suspend fun getVersionHeaders(call: ApplicationCall, outgoingContent: OutgoingContent): List<Version> {
+ return if (outgoingContent is LocalFileContent)
+ outgoingContent.file.getVersionHeaders()
+ else
+ call.attributes.getOrNull(CompressedFileAttributeKey)?.getVersionHeaders()
+ ?: emptyList()
+}