<option name="modules">
<set>
<option value="$PROJECT_DIR$" />
+ <option value="$PROJECT_DIR$/cache4k" />
<option value="$PROJECT_DIR$/externals" />
<option value="$PROJECT_DIR$/fontparser" />
</set>
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="KotlinJpsPluginSettings">
- <option name="version" value="1.9.21" />
+ <option name="version" value="1.9.23" />
</component>
</project>
\ No newline at end of file
val jvmMain by getting {
dependencies {
- implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core-jvm:1.7.3")
- implementation("org.jetbrains.kotlinx:kotlinx-coroutines-jdk8:1.7.3")
- implementation("org.jetbrains.kotlinx:kotlinx-coroutines-reactive:1.7.3")
- implementation("org.jetbrains.kotlinx:kotlinx-serialization-core-jvm:1.6.2")
- implementation("org.jetbrains.kotlinx:kotlinx-serialization-json-jvm:1.6.2")
+ implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core-jvm:1.8.0")
+ implementation("org.jetbrains.kotlinx:kotlinx-coroutines-jdk8:1.8.0")
+ implementation("org.jetbrains.kotlinx:kotlinx-coroutines-reactive:1.8.0")
+ implementation("org.jetbrains.kotlinx:kotlinx-serialization-core-jvm:1.6.3")
+ implementation("org.jetbrains.kotlinx:kotlinx-serialization-json-jvm:1.6.3")
implementation("io.ktor:ktor-server-core-jvm:2.3.9")
implementation("io.ktor:ktor-server-cio-jvm:2.3.9")
implementation("com.aventrix.jnanoid:jnanoid:2.0.0")
implementation("org.mongodb:mongodb-driver-kotlin-coroutine:5.0.0")
implementation("org.mongodb:bson-kotlinx:5.0.0")
+ implementation(project(":cache4k"))
implementation("org.slf4j:slf4j-api:2.0.7")
implementation("ch.qos.logback:logback-classic:1.4.14")
dependsOn(mapDeployToAssets)
}
+tasks.register("migrateToGridFs", JavaExec::class) {
+ group = "administration"
+
+ val runShadow: JavaExec by tasks
+
+ javaLauncher.convention(runShadow.javaLauncher)
+ classpath = runShadow.classpath
+ mainClass.set("info.mechyrdia.data.MigrateFiles")
+ setArgs(listOf("config", "gridfs"))
+}
+
tasks.withType<JavaExec> {
javaLauncher.set(javaToolchains.launcherFor {
languageVersion.set(JavaLanguageVersion.of(17))
--- /dev/null
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [2021] [Yang Chen]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
--- /dev/null
+plugins {
+ kotlin("jvm")
+}
+
+group = "io.github.reactivecircus.cache4k"
+
+repositories {
+ mavenCentral()
+}
+
+dependencies {
+ implementation("org.jetbrains.kotlinx:kotlinx-coroutines-core-jvm:1.8.0")
+ implementation("org.jetbrains.kotlinx:atomicfu:0.23.2")
+ implementation("co.touchlab:stately-iso-collections:2.0.6")
+}
--- /dev/null
+package io.github.reactivecircus.cache4k
+
+import kotlin.time.Duration
+import kotlin.time.TimeSource
+
+/**
+ * An in-memory key-value cache with support for time-based (expiration) and size-based evictions.
+ */
+public interface Cache<in Key : Any, Value : Any> {
+
+ /**
+ * Returns the value associated with [key] in this cache, or null if there is no
+ * cached value for [key].
+ */
+ public fun get(key: Key): Value?
+
+ /**
+ * Returns the value associated with [key] in this cache if exists,
+ * otherwise gets the value by invoking [loader], associates the value with [key] in the cache,
+ * and returns the cached value.
+ *
+ * Any exceptions thrown by the [loader] will be propagated to the caller of this function.
+ */
+ public suspend fun get(key: Key, loader: suspend () -> Value): Value
+
+ /**
+ * Associates [value] with [key] in this cache. If the cache previously contained a
+ * value associated with [key], the old value is replaced by [value].
+ */
+ public fun put(key: Key, value: Value)
+
+ /**
+ * Invokes [loader] atomically on the previous value of [key], or null if absent,
+ * associates the result of [loader] with [key] in the cache, invalidating if the result
+ * is null, and returns the result.
+ *
+ * Any exceptions thrown by the [loader] will be propagated to the caller of this function.
+ */
+ public suspend fun processAtomic(key: Key, loader: suspend (Value?) -> Value?): Value?
+
+ /**
+ * Discards any cached value for key [key].
+ */
+ public fun invalidate(key: Key)
+
+ /**
+ * Discards all entries in the cache.
+ */
+ public fun invalidateAll()
+
+ /**
+ * Returns a defensive copy of cache entries as [Map].
+ */
+ public fun asMap(): Map<in Key, Value>
+
+ /**
+ * Main entry point for creating a [Cache].
+ */
+ public interface Builder<K : Any, V : Any> {
+
+ /**
+ * Specifies that each entry should be automatically removed from the cache once a fixed duration
+ * has elapsed after the entry's creation or the most recent replacement of its value.
+ *
+ * When [duration] is zero, the cache's max size will be set to 0
+ * meaning no values will be cached.
+ */
+ public fun expireAfterWrite(duration: Duration): Builder<K, V>
+
+ /**
+ * Specifies that each entry should be automatically removed from the cache once a fixed duration
+ * has elapsed after the entry's creation, the most recent replacement of its value, or its last
+ * access.
+ *
+ * When [duration] is zero, the cache's max size will be set to 0
+ * meaning no values will be cached.
+ */
+ public fun expireAfterAccess(duration: Duration): Builder<K, V>
+
+ /**
+ * Specifies the maximum number of entries the cache may contain.
+ * Cache eviction policy is based on LRU - i.e. least recently accessed entries get evicted first.
+ *
+ * When [size] is 0, entries will be discarded immediately and no values will be cached.
+ *
+ * If not set, cache size will be unlimited.
+ */
+ public fun maximumCacheSize(size: Long): Builder<K, V>
+
+ /**
+ * Specifies a [TimeSource] to be used for expiry checks.
+ * If not specified, [TimeSource.Monotonic] will be used.
+ */
+ public fun timeSource(timeSource: TimeSource): Builder<K, V>
+
+ /**
+ * Specifies a [CacheEventListener] to be used to handle cache events.
+ */
+ public fun eventListener(listener: CacheEventListener<K, V>): Builder<K, V>
+
+ /**
+ * Builds a new instance of [Cache] with the specified configurations.
+ */
+ public fun build(): Cache<K, V>
+
+ public companion object {
+
+ /**
+ * Returns a new [Cache.Builder] instance.
+ */
+ public operator fun <K : Any, V : Any> invoke(): Builder<K, V> = CacheBuilderImpl()
+ }
+ }
+}
+
+/**
+ * A default implementation of [Cache.Builder].
+ */
+internal class CacheBuilderImpl<K : Any, V : Any> : Cache.Builder<K, V> {
+
+ private var expireAfterWriteDuration = Duration.INFINITE
+
+ private var expireAfterAccessDuration = Duration.INFINITE
+ private var maxSize = UNSET_LONG
+ private var timeSource: TimeSource? = null
+ private var eventListener: CacheEventListener<K, V>? = null
+
+ override fun expireAfterWrite(duration: Duration): CacheBuilderImpl<K, V> = apply {
+ require(duration.isPositive()) {
+ "expireAfterWrite duration must be positive"
+ }
+ this.expireAfterWriteDuration = duration
+ }
+
+ override fun expireAfterAccess(duration: Duration): CacheBuilderImpl<K, V> = apply {
+ require(duration.isPositive()) {
+ "expireAfterAccess duration must be positive"
+ }
+ this.expireAfterAccessDuration = duration
+ }
+
+ override fun maximumCacheSize(size: Long): CacheBuilderImpl<K, V> = apply {
+ require(size >= 0) {
+ "maximum size must not be negative"
+ }
+ this.maxSize = size
+ }
+
+ override fun timeSource(timeSource: TimeSource): Cache.Builder<K, V> = apply {
+ this.timeSource = timeSource
+ }
+
+ override fun eventListener(listener: CacheEventListener<K, V>): Cache.Builder<K, V> = apply {
+ eventListener = listener
+ }
+
+ override fun build(): Cache<K, V> {
+ return RealCache(
+ expireAfterWriteDuration,
+ expireAfterAccessDuration,
+ maxSize,
+ timeSource ?: TimeSource.Monotonic,
+ eventListener,
+ )
+ }
+
+ companion object {
+ internal const val UNSET_LONG: Long = -1
+ }
+}
--- /dev/null
+package io.github.reactivecircus.cache4k
+
+/**
+ * An event resulting from a mutative [Cache] operation.
+ */
+public sealed interface CacheEvent<Key : Any, Value : Any> {
+ public val key: Key
+
+ public class Created<Key : Any, Value : Any>(
+ override val key: Key,
+ public val value: Value,
+ ) : CacheEvent<Key, Value> {
+ override fun toString(): String {
+ return "Created(key=$key, value=$value)"
+ }
+
+ override fun equals(other: Any?): Boolean {
+ if (this === other) return true
+ if (other == null || this::class != other::class) return false
+
+ other as Created<*, *>
+
+ return key == other.key && value == other.value
+ }
+
+ override fun hashCode(): Int {
+ var result = key.hashCode()
+ result = 31 * result + value.hashCode()
+ return result
+ }
+ }
+
+ public class Updated<Key : Any, Value : Any>(
+ override val key: Key,
+ public val oldValue: Value,
+ public val newValue: Value,
+ ) : CacheEvent<Key, Value> {
+ override fun toString(): String {
+ return "Updated(key=$key, oldValue=$oldValue, newValue=$newValue)"
+ }
+
+ override fun equals(other: Any?): Boolean {
+ if (this === other) return true
+ if (other == null || this::class != other::class) return false
+
+ other as Updated<*, *>
+
+ return key == other.key && oldValue == other.oldValue && newValue == other.newValue
+ }
+
+ override fun hashCode(): Int {
+ var result = key.hashCode()
+ result = 31 * result + oldValue.hashCode()
+ result = 31 * result + newValue.hashCode()
+ return result
+ }
+ }
+
+ public class Removed<Key : Any, Value : Any>(
+ override val key: Key,
+ public val value: Value,
+ ) : CacheEvent<Key, Value> {
+ override fun toString(): String {
+ return "Removed(key=$key, value=$value)"
+ }
+
+ override fun equals(other: Any?): Boolean {
+ if (this === other) return true
+ if (other == null || this::class != other::class) return false
+
+ other as Removed<*, *>
+
+ return key == other.key && value == other.value
+ }
+
+ override fun hashCode(): Int {
+ var result = key.hashCode()
+ result = 31 * result + value.hashCode()
+ return result
+ }
+ }
+
+ public class Expired<Key : Any, Value : Any>(
+ override val key: Key,
+ public val value: Value,
+ ) : CacheEvent<Key, Value> {
+ override fun toString(): String {
+ return "Expired(key=$key, value=$value)"
+ }
+
+ override fun equals(other: Any?): Boolean {
+ if (this === other) return true
+ if (other == null || this::class != other::class) return false
+
+ other as Expired<*, *>
+
+ return key == other.key && value == other.value
+ }
+
+ override fun hashCode(): Int {
+ var result = key.hashCode()
+ result = 31 * result + value.hashCode()
+ return result
+ }
+ }
+
+ public class Evicted<Key : Any, Value : Any>(
+ override val key: Key,
+ public val value: Value,
+ ) : CacheEvent<Key, Value> {
+ override fun toString(): String {
+ return "Evicted(key=$key, value=$value)"
+ }
+
+ override fun equals(other: Any?): Boolean {
+ if (this === other) return true
+ if (other == null || this::class != other::class) return false
+
+ other as Evicted<*, *>
+
+ return key == other.key && value == other.value
+ }
+
+ override fun hashCode(): Int {
+ var result = key.hashCode()
+ result = 31 * result + value.hashCode()
+ return result
+ }
+ }
+}
+
+/**
+ * Definition of the contract for implementing listeners to receive [CacheEvent]s from a [Cache].
+ */
+public fun interface CacheEventListener<Key : Any, Value : Any> {
+ /**
+ * Invoked on [CacheEvent] firing.
+ *
+ * Cache entry event firing behaviors for mutative methods:
+ *
+ * | Initial value | Operation | New value | Event |
+ * |:-----------------|:-------------------------|:----------|:---------------------------------|
+ * | {} | put(K, V) | {K: V} | Created(K, V) |
+ * | {K: V1} | put(K, V2) | {K: V2} | Updated(K, V1, V2) |
+ * | {K: V} | invalidate(K) | {} | Removed(K, V) |
+ * | {K1: V1, K2: V2} | invalidateAll() | {} | Removed(K1, V1), Removed(K2, V2) |
+ * | {K: V} | any operation, K expired | {} | Expired(K, V) |
+ * | {K1: V1} | put(K2, V2), K1 evicted | {K2: V2} | Created(K2, V2), Evicted(K1, V1) |
+ *
+ */
+ public fun onEvent(event: CacheEvent<Key, Value>)
+}
--- /dev/null
+package io.github.reactivecircus.cache4k
+
+import java.util.concurrent.ConcurrentHashMap
+
+internal typealias ConcurrentMutableMap<Key, Value> = ConcurrentHashMap<Key, Value>
--- /dev/null
+package io.github.reactivecircus.cache4k
+
+import kotlinx.atomicfu.AtomicLong
+import kotlinx.atomicfu.atomic
+import kotlinx.atomicfu.update
+import kotlin.time.AbstractLongTimeSource
+import kotlin.time.Duration
+import kotlin.time.DurationUnit
+
+/**
+ * A time source that has programmatically updatable readings with support for multi-threaded access in Kotlin/Native.
+ *
+ * Implementation is identical to [kotlin.time.TestTimeSource] except the internal [reading] is an [AtomicLong].
+ */
+public class FakeTimeSource : AbstractLongTimeSource(unit = DurationUnit.NANOSECONDS) {
+
+ private val reading = atomic(0L)
+
+ override fun read(): Long = reading.value
+
+ /**
+ * Advances the current reading value of this time source by the specified [duration].
+ *
+ * [duration] value is rounded down towards zero when converting it to a [Long] number of nanoseconds.
+ * For example, if the duration being added is `0.6.nanoseconds`, the reading doesn't advance because
+ * the duration value is rounded to zero nanoseconds.
+ *
+ * @throws IllegalStateException when the reading value overflows as the result of this operation.
+ */
+ public operator fun plusAssign(duration: Duration) {
+ val delta = duration.toDouble(unit)
+ val longDelta = delta.toLong()
+ reading.update { currentReading ->
+ if (longDelta != Long.MIN_VALUE && longDelta != Long.MAX_VALUE) {
+ // when delta fits in long, add it as long
+ val newReading = currentReading + longDelta
+ if (currentReading xor longDelta >= 0 && currentReading xor newReading < 0) {
+ overflow(duration)
+ }
+ newReading
+ } else {
+ // when delta is greater than long, add it as double
+ val newReading = currentReading + delta
+ if (newReading > Long.MAX_VALUE || newReading < Long.MIN_VALUE) {
+ overflow(duration)
+ }
+ newReading.toLong()
+ }
+ }
+ }
+
+ private fun overflow(duration: Duration) {
+ throw IllegalStateException(
+ "FakeTimeSource will overflow if its reading ${reading}ns is advanced by $duration."
+ )
+ }
+}
--- /dev/null
+package io.github.reactivecircus.cache4k
+
+import kotlinx.atomicfu.locks.reentrantLock
+import kotlinx.atomicfu.locks.withLock
+import kotlinx.coroutines.sync.Mutex
+import kotlinx.coroutines.sync.withLock
+
+/**
+ * Provides a mechanism for performing key-based synchronization.
+ */
+internal class KeyedSynchronizer<Key : Any> {
+
+ private val keyBasedMutexes = ConcurrentMutableMap<Key, MutexEntry>()
+
+ private val mapLock = reentrantLock()
+
+ /**
+ * Executes the given [action] under a mutex associated with the [key].
+ * When called concurrently, all actions associated with the same [key] are mutually exclusive.
+ */
+ suspend fun <T> synchronizedFor(key: Key, action: suspend () -> T): T {
+ return getMutex(key).withLock {
+ try {
+ action()
+ } finally {
+ removeMutex(key)
+ }
+ }
+ }
+
+ /**
+ * Try to get a [MutexEntry] for the given [key] from the map.
+ * If one cannot be found, create a new [MutexEntry], save it to the map, and return it.
+ */
+ private fun getMutex(key: Key): Mutex {
+ mapLock.withLock {
+ val mutexEntry = keyBasedMutexes[key] ?: MutexEntry(Mutex(), 0)
+ // increment the counter to indicate a new thread is using the lock
+ mutexEntry.counter++
+ // save the lock entry to the map if it has just been created
+ if (keyBasedMutexes[key] == null) {
+ keyBasedMutexes.put(key, mutexEntry)
+ }
+
+ return mutexEntry.mutex
+ }
+ }
+
+ /**
+ * Remove the [MutexEntry] associated with the given [key] from the map
+ * if no other thread is using the mutex.
+ */
+ private fun removeMutex(key: Key) {
+ mapLock.withLock {
+ // decrement the counter to indicate the lock is no longer needed for this thread,
+ // then remove the lock entry from map if no other thread is still holding this lock
+ val mutexEntry = keyBasedMutexes[key] ?: return
+ mutexEntry.counter--
+ if (mutexEntry.counter == 0) {
+ keyBasedMutexes.remove(key)
+ }
+ }
+ }
+}
+
+private class MutexEntry(
+ val mutex: Mutex,
+ var counter: Int
+)
--- /dev/null
+package io.github.reactivecircus.cache4k
+
+import co.touchlab.stately.collections.IsoMutableSet
+import kotlinx.atomicfu.AtomicRef
+import kotlinx.atomicfu.atomic
+import kotlinx.atomicfu.update
+import kotlin.time.Duration
+import kotlin.time.TimeMark
+import kotlin.time.TimeSource
+
+/**
+ * A Kotlin Multiplatform [Cache] implementation powered by touchlab/Stately.
+ *
+ * Two types of evictions are supported:
+ *
+ * 1. Time-based evictions (expiration)
+ * 2. Size-based evictions
+ *
+ * Time-based evictions are enabled by specifying [expireAfterWriteDuration] and/or [expireAfterAccessDuration].
+ * When [expireAfterWriteDuration] is specified, entries will be automatically removed from the cache
+ * once a fixed duration has elapsed after the entry's creation
+ * or most recent replacement of its value.
+ * When [expireAfterAccessDuration] is specified, entries will be automatically removed from the cache
+ * once a fixed duration has elapsed after the entry's creation,
+ * the most recent replacement of its value, or its last access.
+ *
+ * Note that creation and replacement of an entry is also considered an access.
+ *
+ * Size-based evictions are enabled by specifying [maxSize]. When the size of the cache entries grows
+ * beyond [maxSize], least recently accessed entries will be evicted.
+ */
+internal class RealCache<Key : Any, Value : Any>(
+ val expireAfterWriteDuration: Duration,
+ val expireAfterAccessDuration: Duration,
+ val maxSize: Long,
+ val timeSource: TimeSource,
+ private val eventListener: CacheEventListener<Key, Value>?,
+) : Cache<Key, Value> {
+
+ private val cacheEntries = ConcurrentMutableMap<Key, CacheEntry<Key, Value>>()
+
+ /**
+ * Whether to perform size based evictions.
+ */
+ private val evictsBySize = maxSize >= 0
+
+ /**
+ * Whether to perform write-time based expiration.
+ */
+ private val expiresAfterWrite = expireAfterWriteDuration.isFinite()
+
+ /**
+ * Whether to perform access-time (both read and write) based expiration.
+ */
+ private val expiresAfterAccess = expireAfterAccessDuration.isFinite()
+
+ /**
+ * A key-based synchronizer for running cache loaders.
+ */
+ private val loadersSynchronizer = KeyedSynchronizer<Key>()
+
+ /**
+ * A queue of unique cache entries ordered by write time.
+ * Used for performing write-time based cache expiration.
+ */
+ private val writeQueue: IsoMutableSet<CacheEntry<Key, Value>>? =
+ takeIf { expiresAfterWrite }?.let {
+ ReorderingIsoMutableSet()
+ }
+
+ /**
+ * A queue of unique cache entries ordered by access time.
+ * Used for performing both write-time and read-time based cache expiration
+ * as well as size-based eviction.
+ *
+ * Note that a write is also considered an access.
+ */
+ private val accessQueue: IsoMutableSet<CacheEntry<Key, Value>>? =
+ takeIf { expiresAfterAccess || evictsBySize }?.let {
+ ReorderingIsoMutableSet()
+ }
+
+ override fun get(key: Key): Value? {
+ return cacheEntries[key]?.let {
+ if (it.isExpired()) {
+ // clean up expired entries and return null
+ expireEntries()
+ null
+ } else {
+ // update eviction metadata
+ recordRead(it)
+ it.value.value
+ }
+ }
+ }
+
+ override suspend fun get(key: Key, loader: suspend () -> Value): Value {
+ return loadersSynchronizer.synchronizedFor(key) {
+ cacheEntries[key]?.let {
+ if (it.isExpired()) {
+ // clean up expired entries
+ expireEntries()
+ null
+ } else {
+ // update eviction metadata
+ recordRead(it)
+ it.value.value
+ }
+ } ?: loader().let { loadedValue ->
+ val existingValue = get(key)
+ if (existingValue != null) {
+ existingValue
+ } else {
+ put(key, loadedValue)
+ loadedValue
+ }
+ }
+ }
+ }
+
+ override fun put(key: Key, value: Value) {
+ expireEntries()
+
+ val existingEntry = cacheEntries[key]
+ val oldValue = existingEntry?.value?.value
+ if (existingEntry != null) {
+ // cache entry found
+ recordWrite(existingEntry)
+ existingEntry.value.value = value
+ } else {
+ // create a new cache entry
+ val nowTimeMark = timeSource.markNow()
+ val newEntry = CacheEntry(
+ key = key,
+ value = atomic(value),
+ accessTimeMark = atomic(nowTimeMark),
+ writeTimeMark = atomic(nowTimeMark),
+ )
+ recordWrite(newEntry)
+ cacheEntries.put(key, newEntry)
+ }
+ onEvent(
+ oldValue?.let {
+ CacheEvent.Updated(key = key, oldValue = it, newValue = value)
+ } ?: CacheEvent.Created(key = key, value = value)
+ )
+
+ evictEntries()
+ }
+
+ override suspend fun processAtomic(key: Key, loader: suspend (Value?) -> Value?): Value? {
+ return loadersSynchronizer.synchronizedFor(key) {
+ val previous = cacheEntries[key]?.let {
+ if (it.isExpired()) {
+ // clean up expired entries
+ expireEntries()
+ null
+ } else {
+ // update eviction metadata
+ recordRead(it)
+ it.value.value
+ }
+ }
+
+ val updated = loader(previous)
+ if (updated == null) {
+ if (previous != null) invalidate(key)
+ null
+ } else {
+ if (previous !== updated) put(key, updated)
+ updated
+ }
+ }
+ }
+
+ override fun invalidate(key: Key) {
+ expireEntries()
+ cacheEntries.remove(key)?.also {
+ writeQueue?.remove(it)
+ accessQueue?.remove(it)
+ onEvent(
+ CacheEvent.Removed(
+ key = it.key,
+ value = it.value.value,
+ )
+ )
+ }
+ }
+
+ override fun invalidateAll() {
+ if (eventListener != null) {
+ cacheEntries.values.forEach { entry ->
+ onEvent(
+ CacheEvent.Removed(
+ key = entry.key,
+ value = entry.value.value,
+ )
+ )
+ }
+ }
+ cacheEntries.clear()
+ writeQueue?.clear()
+ accessQueue?.clear()
+ }
+
+ override fun asMap(): Map<in Key, Value> {
+ return cacheEntries.values.associate { entry ->
+ entry.key to entry.value.value
+ }
+ }
+
+ /**
+ * Remove all expired entries.
+ */
+ private fun expireEntries() {
+ val queuesToProcess = listOfNotNull(
+ if (expiresAfterWrite) writeQueue else null,
+ if (expiresAfterAccess) accessQueue else null
+ )
+
+ queuesToProcess.forEach { queue ->
+ queue.access {
+ val iterator = queue.iterator()
+ for (entry in iterator) {
+ if (entry.isExpired()) {
+ cacheEntries.remove(entry.key)
+ // remove the entry from the current queue
+ iterator.remove()
+ onEvent(
+ CacheEvent.Expired(
+ key = entry.key,
+ value = entry.value.value,
+ )
+ )
+ } else {
+ // found unexpired entry, no need to look any further
+ break
+ }
+ }
+ }
+ }
+ }
+
+ /**
+ * Check whether the [CacheEntry] has expired based on either access time or write time.
+ */
+ private fun CacheEntry<Key, Value>.isExpired(): Boolean {
+ return expiresAfterAccess && (accessTimeMark.value + expireAfterAccessDuration).hasPassedNow() ||
+ expiresAfterWrite && (writeTimeMark.value + expireAfterWriteDuration).hasPassedNow()
+ }
+
+ /**
+ * Evict least recently accessed entries until [cacheEntries] is no longer over capacity.
+ */
+ private fun evictEntries() {
+ if (!evictsBySize) {
+ return
+ }
+
+ checkNotNull(accessQueue)
+
+ while (cacheEntries.size > maxSize) {
+ accessQueue.access {
+ it.firstOrNull()?.run {
+ cacheEntries.remove(key)
+ writeQueue?.remove(this)
+ accessQueue.remove(this)
+ onEvent(
+ CacheEvent.Evicted(
+ key = key,
+ value = value.value,
+ )
+ )
+ }
+ }
+ }
+ }
+
+ /**
+ * Update the eviction metadata on the [cacheEntry] which has just been read.
+ */
+ private fun recordRead(cacheEntry: CacheEntry<Key, Value>) {
+ if (expiresAfterAccess) {
+ val accessTimeMark = cacheEntry.accessTimeMark.value
+ cacheEntry.accessTimeMark.update { accessTimeMark + accessTimeMark.elapsedNow() }
+ }
+ accessQueue?.add(cacheEntry)
+ }
+
+ /**
+ * Update the eviction metadata on the [CacheEntry] which is about to be written.
+ * Note that a write is also considered an access.
+ */
+ private fun recordWrite(cacheEntry: CacheEntry<Key, Value>) {
+ if (expiresAfterAccess) {
+ val accessTimeMark = cacheEntry.accessTimeMark.value
+ cacheEntry.accessTimeMark.update { (accessTimeMark + accessTimeMark.elapsedNow()) }
+ }
+ if (expiresAfterWrite) {
+ val writeTimeMark = cacheEntry.writeTimeMark.value
+ cacheEntry.writeTimeMark.update { (writeTimeMark + writeTimeMark.elapsedNow()) }
+ }
+ accessQueue?.add(cacheEntry)
+ writeQueue?.add(cacheEntry)
+ }
+
+ private fun onEvent(event: CacheEvent<Key, Value>) {
+ eventListener?.onEvent(event)
+ }
+}
+
+/**
+ * A cache entry holds the [key] and [value] pair,
+ * along with the metadata needed to perform cache expiration and eviction.
+ */
+private class CacheEntry<Key : Any, Value : Any>(
+ val key: Key,
+ val value: AtomicRef<Value>,
+ val accessTimeMark: AtomicRef<TimeMark>,
+ val writeTimeMark: AtomicRef<TimeMark>,
+)
--- /dev/null
+package io.github.reactivecircus.cache4k
+
+import co.touchlab.stately.collections.IsoMutableSet
+
+/**
+ * A custom [IsoMutableSet] that updates the insertion order when an element is re-inserted,
+ * i.e. an inserted element will always be placed at the end
+ * regardless of whether the element already exists.
+ */
+internal class ReorderingIsoMutableSet<T> : IsoMutableSet<T>(), MutableSet<T> {
+ override fun add(element: T): Boolean = access {
+ val exists = remove(element)
+ super.add(element)
+ // respect the contract "true if this set did not already contain the specified element"
+ !exists
+ }
+}
rootProject.name = "factbooks"
+include("cache4k")
include("externals")
include("fontparser")
//include("fightgame")
import info.mechyrdia.data.Id
import info.mechyrdia.data.NationData
+import kotlinx.serialization.SerialName
import kotlinx.serialization.Serializable
import java.io.File
+@Serializable
+sealed class FileStorageConfig {
+ @Serializable
+ @SerialName("flat")
+ data class Flat(val baseDir: String) : FileStorageConfig()
+
+ @Serializable
+ @SerialName("gridFS")
+ data object GridFs : FileStorageConfig()
+}
+
@Serializable
data class Configuration(
val host: String = "127.0.0.1",
val isDevMode: Boolean = false,
- val rootDir: String = "..",
- val articleDir: String = "$rootDir/lore",
- val assetDir: String = "$rootDir/assets",
- val templateDir: String = "$rootDir/tpl",
- val jsonDocDir: String = "$rootDir/data",
- val scriptDir: String = "$rootDir/funcs",
- val april1Dir: String = "$rootDir/funny",
+ val storage: FileStorageConfig = FileStorageConfig.Flat(".."),
val dbName: String = "nslore",
val dbConn: String = "mongodb://localhost:27017",
val ownerNation: String = "mechyrdia",
) {
companion object {
- val CurrentConfiguration: Configuration by lazy {
+ val Current: Configuration by lazy {
val file = File(System.getProperty("factbooks.configpath", "./config.json"))
if (!file.isFile) {
if (file.exists())
}
val OwnerNationId: Id<NationData>
- get() = Id(Configuration.CurrentConfiguration.ownerNation)
+ get() = Id(Configuration.Current.ownerNation)
fun main() {
System.setProperty("logback.statusListenerClass", "ch.qos.logback.core.status.NopStatusListener")
- System.setProperty("io.ktor.development", Configuration.CurrentConfiguration.isDevMode.toString())
+ System.setProperty("io.ktor.development", Configuration.Current.isDevMode.toString())
- ConnectionHolder.initialize(Configuration.CurrentConfiguration.dbConn, Configuration.CurrentConfiguration.dbName)
+ ConnectionHolder.initialize(Configuration.Current.dbConn, Configuration.Current.dbName)
- embeddedServer(CIO, port = Configuration.CurrentConfiguration.port, host = Configuration.CurrentConfiguration.host, module = Application::factbooks).start(wait = true)
+ FileStorage.initialize()
+
+ embeddedServer(CIO, port = Configuration.Current.port, host = Configuration.Current.host, module = Application::factbooks).start(wait = true)
}
fun Application.factbooks() {
install(DefaultHeaders)
install(XForwardedHeaders) {
+ // Running behind a single NGINX reverse proxy
+ // which corresponds to the last entry in the
+ // X-Forwarded headers
useLastProxy()
}
}
install(ConditionalHeaders) {
- version { call, outgoingContent ->
- getVersionHeaders(call, outgoingContent)
+ version { call, _ ->
+ getVersionHeaders(call)
}
}
--- /dev/null
+@file:JvmName("MigrateFiles")
+
+package info.mechyrdia.data
+
+import info.mechyrdia.Configuration
+import info.mechyrdia.FileStorageConfig
+import kotlinx.coroutines.async
+import kotlinx.coroutines.awaitAll
+import kotlinx.coroutines.coroutineScope
+import kotlinx.coroutines.flow.map
+import kotlinx.coroutines.flow.toList
+import kotlinx.coroutines.runBlocking
+import kotlin.system.exitProcess
+
+private fun printUsage(): Nothing {
+ println("Usage: <FROM> <TO>")
+ println("Both arguments are of either following format:")
+ println(" gridfs - use GridFS (database connection indicated by config.json)")
+ println(" config - storage indicated in config file")
+ println(" file:<relative-path> - use flat-file storage")
+ exitProcess(-1)
+}
+
+private fun String.parseStorage(): FileStorageConfig {
+ val configuration = Configuration.Current
+
+ return if (this == "config")
+ configuration.storage
+ else if (this == "gridfs")
+ FileStorageConfig.GridFs
+ else if (startsWith("file:"))
+ FileStorageConfig.Flat(removePrefix("file:"))
+ else {
+ println("Invalid format for argument value $this")
+ printUsage()
+ }
+}
+
+private suspend fun migrateFile(path: StoragePath, from: FileStorage, into: FileStorage): List<String> {
+ val bytes = from.readFile(path) ?: return listOf("[Source Error] File does not exist at /$path")
+ if (!into.writeFile(path, bytes))
+ return listOf("[Target Error] File at /$path cannot be written to")
+
+ return emptyList()
+}
+
+private suspend fun migrateDir(path: StoragePath, from: FileStorage, into: FileStorage): List<String> {
+ if (!into.createDir(path))
+ return listOf("[Target Error] Directory at /$path cannot be created")
+
+ return coroutineScope {
+ from.listDir(path).map { entry ->
+ async {
+ val entryPath = path / entry.name
+ when (entry.type) {
+ StoredFileType.FILE -> migrateFile(entryPath, from, into)
+ StoredFileType.DIRECTORY -> migrateDir(entryPath, from, into)
+ }
+ }
+ }.toList().awaitAll().flatten()
+ }
+}
+
+private suspend fun migrateRoot(from: FileStorage, into: FileStorage): List<String> {
+ return coroutineScope {
+ from.listDir(StoragePath.Root).map { entry ->
+ async {
+ val entryPath = StoragePath.Root / entry.name
+ when (entry.type) {
+ StoredFileType.FILE -> migrateFile(entryPath, from, into)
+ StoredFileType.DIRECTORY -> migrateDir(entryPath, from, into)
+ }
+ }
+ }.toList().awaitAll().flatten()
+ }
+}
+
+fun main(args: Array<String>) {
+ if (args.size != 2) {
+ println("Invalid number of arguments ${args.size}, expected 2")
+ printUsage()
+ }
+
+ val (from, into) = args.map { it.parseStorage() }
+ if (from == into) {
+ println("Cannot migrate storage to itself")
+ printUsage()
+ }
+
+ val errors = runBlocking {
+ System.setProperty("logback.statusListenerClass", "ch.qos.logback.core.status.NopStatusListener")
+
+ ConnectionHolder.initialize(Configuration.Current.dbConn, Configuration.Current.dbName)
+
+ val fromStorage = FileStorage(from)
+ val intoStorage = FileStorage(into)
+
+ migrateRoot(fromStorage, intoStorage)
+ }
+
+ if (errors.isEmpty())
+ println("Successful migration! No errors encountered!")
+ else {
+ println("Migration encountered ${errors.size} ${errors.size.pluralize("error")}")
+ for (error in errors)
+ println(error)
+ }
+}
import kotlinx.serialization.ExperimentalSerializationApi
import kotlinx.serialization.KSerializer
-import kotlinx.serialization.Serializable
import kotlinx.serialization.SerializationException
-import kotlinx.serialization.builtins.ListSerializer
import kotlinx.serialization.descriptors.PrimitiveKind
import kotlinx.serialization.descriptors.PrimitiveSerialDescriptor
import kotlinx.serialization.descriptors.SerialDescriptor
import kotlinx.serialization.encoding.Decoder
import kotlinx.serialization.encoding.Encoder
import org.bson.BsonDateTime
-import org.bson.BsonNull
import org.bson.BsonReader
import org.bson.BsonWriter
import org.bson.codecs.Codec
import org.bson.codecs.configuration.CodecRegistry
import org.bson.codecs.kotlinx.BsonDecoder
import org.bson.codecs.kotlinx.BsonEncoder
-import org.bson.types.ObjectId
import java.time.Instant
object IdCodec : Codec<Id<*>> {
}
}
-@Serializable
-data class MongoDbMapEntry<K, V>(val key: K, val value: V)
-
-class MongoDbMapSerializer<K, V>(val keySerializer: KSerializer<K>, val valueSerializer: KSerializer<V>) : KSerializer<Map<K, V>> {
- private val innerSerializer = ListSerializer(MongoDbMapEntry.serializer(keySerializer, valueSerializer))
-
- override val descriptor: SerialDescriptor = innerSerializer.descriptor
-
- override fun serialize(encoder: Encoder, value: Map<K, V>) {
- innerSerializer.serialize(encoder, value.map { MongoDbMapEntry(it.key, it.value) })
- }
-
- override fun deserialize(decoder: Decoder): Map<K, V> {
- return innerSerializer.deserialize(decoder).associate { it.key to it.value }
- }
-}
-
object InstantSerializer : KSerializer<Instant> {
override val descriptor: SerialDescriptor = PrimitiveSerialDescriptor("InstantSerializer", PrimitiveKind.LONG)
return Instant.ofEpochMilli(decoder.decodeBsonValue().asDateTime().value)
}
}
-
-object InstantNullableSerializer : KSerializer<Instant?> {
- override val descriptor: SerialDescriptor = PrimitiveSerialDescriptor("InstantSerializer", PrimitiveKind.LONG)
-
- override fun serialize(encoder: Encoder, value: Instant?) {
- if (encoder !is BsonEncoder)
- throw SerializationException("Instant is not supported by ${encoder::class}")
-
- if (value == null)
- encoder.encodeBsonValue(BsonNull.VALUE)
- else
- encoder.encodeBsonValue(BsonDateTime(value.toEpochMilli()))
- }
-
- override fun deserialize(decoder: Decoder): Instant? {
- if (decoder !is BsonDecoder)
- throw SerializationException("Instant is not supported by ${decoder::class}")
-
- return if (decoder.decodeNotNullMark())
- Instant.ofEpochMilli(decoder.decodeBsonValue().asDateTime().value)
- else
- decoder.decodeNull()
- }
-}
-
-object ObjectIdSerializer : KSerializer<ObjectId> {
- override val descriptor: SerialDescriptor = PrimitiveSerialDescriptor("ObjectIdSerializer", PrimitiveKind.STRING)
-
- override fun serialize(encoder: Encoder, value: ObjectId) {
- if (encoder !is BsonEncoder)
- throw SerializationException("ObjectId is not supported by ${encoder::class}")
-
- encoder.encodeObjectId(value)
- }
-
- override fun deserialize(decoder: Decoder): ObjectId {
- if (decoder !is BsonDecoder)
- throw SerializationException("ObjectId is not supported by ${decoder::class}")
-
- return decoder.decodeObjectId()
- }
-}
-
-object ObjectIdNullableSerializer : KSerializer<ObjectId?> {
- override val descriptor: SerialDescriptor = PrimitiveSerialDescriptor("ObjectIdSerializer", PrimitiveKind.STRING)
-
- override fun serialize(encoder: Encoder, value: ObjectId?) {
- if (encoder !is BsonEncoder)
- throw SerializationException("ObjectId is not supported by ${encoder::class}")
-
- if (value == null)
- encoder.encodeBsonValue(BsonNull.VALUE)
- else
- encoder.encodeObjectId(value)
- }
-
- override fun deserialize(decoder: Decoder): ObjectId? {
- if (decoder !is BsonDecoder)
- throw SerializationException("ObjectId is not supported by ${decoder::class}")
-
- return if (decoder.decodeNotNullMark())
- decoder.decodeObjectId()
- else
- decoder.decodeNull()
- }
-}
import com.mongodb.client.model.Sorts
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.toList
+import kotlinx.serialization.Contextual
import kotlinx.serialization.SerialName
import kotlinx.serialization.Serializable
import java.time.Instant
val submittedBy: Id<NationData>,
val submittedIn: String,
- val submittedAt: @Serializable(with = InstantSerializer::class) Instant,
+ val submittedAt: @Contextual Instant,
val numEdits: Int,
- val lastEdit: @Serializable(with = InstantNullableSerializer::class) Instant?,
+ val lastEdit: @Contextual Instant?,
val contents: String
) : DataDocument<Comment> {
Table.index(Comment::submittedIn, Comment::submittedAt)
}
- suspend fun getCommentsIn(page: String): Flow<Comment> {
- return Table.select(Filters.eq(Comment::submittedIn.serialName, page), Sorts.descending(Comment::submittedAt.serialName))
+ suspend fun getCommentsIn(page: List<String>): Flow<Comment> {
+ return Table.select(Filters.eq(Comment::submittedIn.serialName, page.joinToString(separator = "/")), Sorts.descending(Comment::submittedAt.serialName))
}
suspend fun getCommentsBy(user: Id<NationData>): Flow<Comment> {
val originalPost: Id<Comment>,
val replyingPost: Id<Comment>,
- val repliedAt: @Serializable(with = InstantSerializer::class) Instant = Instant.now(),
+ val repliedAt: @Contextual Instant = Instant.now(),
) : DataDocument<CommentReplyLink> {
companion object : TableHolder<CommentReplyLink> {
override val Table = DocumentTable<CommentReplyLink>()
import com.aventrix.jnanoid.jnanoid.NanoIdUtils
import com.mongodb.ConnectionString
import com.mongodb.MongoClientSettings
+import com.mongodb.MongoDriverInformation
import com.mongodb.client.model.*
import com.mongodb.kotlin.client.coroutine.MongoClient
+import com.mongodb.kotlin.client.coroutine.MongoDatabase
+import com.mongodb.reactivestreams.client.MongoClients
+import com.mongodb.reactivestreams.client.gridfs.GridFSBucket
+import com.mongodb.reactivestreams.client.gridfs.GridFSBuckets
import info.mechyrdia.auth.SessionStorageDoc
import kotlinx.coroutines.CompletableDeferred
import kotlinx.coroutines.flow.Flow
import kotlinx.serialization.descriptors.SerialDescriptor
import kotlinx.serialization.encoding.Decoder
import kotlinx.serialization.encoding.Encoder
+import kotlinx.serialization.modules.SerializersModule
import org.bson.codecs.configuration.CodecRegistries
import org.bson.codecs.kotlinx.KotlinSerializerCodecProvider
import org.bson.conversions.Bson
import java.security.SecureRandom
+import java.time.Instant
import kotlin.reflect.KClass
import kotlin.reflect.KProperty1
+import com.mongodb.reactivestreams.client.MongoClient as JMongoClient
+import com.mongodb.reactivestreams.client.MongoDatabase as JMongoDatabase
@Serializable(IdSerializer::class)
@JvmInline
}
object ConnectionHolder {
- private lateinit var databaseName: String
+ private val jDatabaseDeferred = CompletableDeferred<JMongoDatabase>()
- private val clientDeferred = CompletableDeferred<MongoClient>()
+ suspend fun getDatabase() = MongoDatabase(jDatabaseDeferred.await())
- suspend fun getDatabase() = clientDeferred.await().getDatabase(databaseName)
+ suspend fun getBucket(): GridFSBucket = GridFSBuckets.create(jDatabaseDeferred.await())
+
+ private val bsonSerializersModule = SerializersModule {
+ contextual(Instant::class, InstantSerializer)
+ }
fun initialize(conn: String, db: String) {
- if (clientDeferred.isCompleted)
+ if (jDatabaseDeferred.isCompleted)
error("Cannot initialize database twice")
- MongoClient.create(
- MongoClientSettings.builder()
- .codecRegistry(
- CodecRegistries.fromProviders(
- IdCodecProvider,
- KotlinSerializerCodecProvider()
+ jDatabaseDeferred.complete(
+ MongoClients.create(
+ MongoClientSettings.builder()
+ .codecRegistry(
+ CodecRegistries.fromProviders(
+ IdCodecProvider,
+ KotlinSerializerCodecProvider(bsonSerializersModule)
+ )
)
- )
- .applyConnectionString(ConnectionString(conn))
- .build()
+ .applyConnectionString(ConnectionString(conn))
+ .build(),
+ MongoDriverInformation.builder()
+ .driverName("kotlin")
+ .build()
+ ).getDatabase(db)
)
- databaseName = db
- clientDeferred.complete(MongoClient.create(conn))
-
runBlocking {
for (holder in TableHolder.entries)
launch {
import com.mongodb.client.model.Filters
import com.mongodb.client.model.Updates
import com.mongodb.reactivestreams.client.gridfs.GridFSBucket
+import info.mechyrdia.Configuration
+import info.mechyrdia.FileStorageConfig
+import info.mechyrdia.lore.StoragePathAttributeKey
+import io.github.reactivecircus.cache4k.Cache
+import io.ktor.http.*
+import io.ktor.server.application.*
+import io.ktor.server.response.*
import io.ktor.util.*
import io.ktor.util.cio.*
import io.ktor.utils.io.*
import kotlinx.coroutines.reactive.asFlow
import kotlinx.coroutines.reactive.asPublisher
import kotlinx.coroutines.reactive.awaitFirst
+import kotlinx.serialization.Contextual
import kotlinx.serialization.SerialName
import kotlinx.serialization.Serializable
import org.bson.types.ObjectId
+import java.io.ByteArrayOutputStream
import java.io.File
+import java.nio.ByteBuffer
import java.time.Instant
import kotlin.String
+suspend fun ApplicationCall.respondStoredFile(fileStorage: FileStorage, path: StoragePath) {
+ val stat = fileStorage.statFile(path) ?: return respond(HttpStatusCode.NotFound)
+
+ attributes.put(StoragePathAttributeKey, path)
+ val type = ContentType.defaultForFileExtension(path.elements.last().substringAfterLast('.'))
+ respondBytesWriter(contentType = type, contentLength = stat.size) {
+ fileStorage.readFile(path, this)
+ }
+}
+
+suspend fun ApplicationCall.respondStoredFile(path: StoragePath) {
+ return respondStoredFile(FileStorage.instance, path)
+}
+
@JvmInline
value class StoragePath(val elements: List<String>) {
+ init {
+ for ((i, element) in elements.withIndex())
+ require(element.any { it != '.' }) {
+ "Cannot have elements . or .. in path, got $element at index $i in path /${elements.joinToString(separator = "/")}"
+ }
+ }
+
constructor(path: String) : this(path.split('/').filterNot(String::isEmpty))
+ val name: String
+ get() = elements.lastOrNull().orEmpty()
+
+ val isRoot: Boolean
+ get() = elements.isEmpty()
+
+ operator fun div(element: String) = this / element.split('/')
+ operator fun div(elementCollection: Iterable<String>) = StoragePath(elements + elementCollection.filterNot(String::isEmpty))
+
override fun toString(): String {
return elements.joinToString(separator = "/")
}
+
+ companion object {
+ val Root = StoragePath(emptyList())
+
+ val articleDir = Root / "lore"
+ val assetDir = Root / "assets"
+ val templateDir = Root / "tpl"
+ val jsonDocDir = Root / "data"
+ val scriptDir = Root / "funcs"
+ val april1Dir = Root / "funny"
+ }
}
enum class StoredFileType {
data class StoredFileEntry(val name: String, val type: StoredFileType)
+data class StoredFileStats(
+ val updated: Instant,
+ val size: Long,
+)
+
interface FileStorage {
suspend fun prepare() = Unit
+ suspend fun getType(path: StoragePath): StoredFileType?
+
suspend fun createDir(dir: StoragePath): Boolean
suspend fun listDir(dir: StoragePath): Flow<StoredFileEntry>
suspend fun deleteDir(dir: StoragePath): Boolean
+ suspend fun statFile(path: StoragePath): StoredFileStats?
+
suspend fun writeFile(path: StoragePath, content: ByteReadChannel): Boolean
+ suspend fun writeFile(path: StoragePath, content: ByteArray): Boolean
+
suspend fun readFile(path: StoragePath, content: ByteWriteChannel): Boolean
+ suspend fun readFile(path: StoragePath): ByteArray?
+
suspend fun copyFile(source: StoragePath, target: StoragePath): Boolean
suspend fun eraseFile(path: StoragePath): Boolean
suspend fun performMaintenance() = Unit
+
+ companion object {
+ lateinit var instance: FileStorage
+ private set
+
+ suspend operator fun invoke(config: FileStorageConfig) = when (config) {
+ is FileStorageConfig.Flat -> FlatFileStorage(File(config.baseDir))
+ FileStorageConfig.GridFs -> GridFsStorage(
+ DocumentTable(),
+ ConnectionHolder.getBucket()
+ )
+ }.apply { prepare() }
+
+ private suspend fun configure() {
+ instance = when (val storage = Configuration.Current.storage) {
+ is FileStorageConfig.Flat -> FlatFileStorage(File(storage.baseDir))
+ FileStorageConfig.GridFs -> GridFsStorage(
+ DocumentTable(),
+ ConnectionHolder.getBucket()
+ )
+ }.apply { prepare() }
+ }
+
+ fun initialize() = runBlocking { configure() }
+ }
}
-class FlatFileStorage(val root: File) : FileStorage {
- private fun resolveFile(path: StoragePath) = root.combineSafe(path.toString())
+private class FlatFileStorage(val root: File) : FileStorage {
+ private fun resolveFile(path: StoragePath) = if (path.isRoot) root else root.combineSafe(path.toString())
private fun renderEntry(file: File) = StoredFileEntry(file.name, if (file.isFile) StoredFileType.FILE else StoredFileType.DIRECTORY)
return true
}
+ private fun createFile(file: File): Boolean {
+ if (!file.exists()) {
+ val containingDir = file.parentFile
+ if (!containingDir.isDirectory)
+ if (!createDir(containingDir)) return false
+ }
+
+ return true
+ }
+
override suspend fun prepare() {
withContext(Dispatchers.IO) {
+ if (root.exists() && !root.isDirectory)
+ root.delete()
root.mkdirs()
}
}
+ override suspend fun getType(path: StoragePath): StoredFileType? {
+ val file = resolveFile(path)
+ return if (file.isFile)
+ StoredFileType.FILE
+ else if (file.isDirectory)
+ StoredFileType.DIRECTORY
+ else null
+ }
+
override suspend fun createDir(dir: StoragePath): Boolean {
return withContext(Dispatchers.IO) { createDir(resolveFile(dir)) }
}
override suspend fun listDir(dir: StoragePath): Flow<StoredFileEntry> {
- return withContext(Dispatchers.IO) { resolveFile(dir).listFiles()?.map { renderEntry(it) }.orEmpty().asFlow() }
+ return withContext(Dispatchers.IO) { resolveFile(dir).listFiles()?.map { renderEntry(it) }.orEmpty() }.asFlow()
}
override suspend fun deleteDir(dir: StoragePath): Boolean {
return withContext(Dispatchers.IO) { file.deleteRecursively() }
}
+ override suspend fun statFile(path: StoragePath): StoredFileStats? {
+ val file = resolveFile(path)
+ if (!file.isFile) return null
+
+ return StoredFileStats(Instant.ofEpochMilli(file.lastModified()), file.length())
+ }
+
override suspend fun writeFile(path: StoragePath, content: ByteReadChannel): Boolean {
val file = resolveFile(path)
- if (!file.exists())
- if (!file.parentFile.isDirectory)
- return false
- withContext(Dispatchers.IO) {
- file.writeChannel().use { content.copyTo(this) }
+ return withContext(Dispatchers.IO) {
+ if (createFile(file)) {
+ file.writeChannel().use { content.copyTo(this) }
+ true
+ } else false
}
+ }
+
+ override suspend fun writeFile(path: StoragePath, content: ByteArray): Boolean {
+ val file = resolveFile(path)
- return true
+ return withContext(Dispatchers.IO) {
+ if (createFile(file)) {
+ file.writeBytes(content)
+ true
+ } else false
+ }
}
override suspend fun readFile(path: StoragePath, content: ByteWriteChannel): Boolean {
val file = resolveFile(path)
if (!file.isFile) return false
- withContext(Dispatchers.IO) {
- file.readChannel().copyTo(content)
- }
+ file.readChannel().copyTo(content)
return true
}
+ override suspend fun readFile(path: StoragePath): ByteArray? {
+ val file = resolveFile(path)
+ if (!file.isFile) return null
+
+ return withContext(Dispatchers.IO) {
+ file.readBytes()
+ }
+ }
+
override suspend fun copyFile(source: StoragePath, target: StoragePath): Boolean {
val sourceFile = resolveFile(source)
val targetFile = resolveFile(target)
if (!sourceFile.isFile) return false
- if (targetFile.exists()) return false
withContext(Dispatchers.IO) {
- sourceFile.copyTo(targetFile)
+ sourceFile.copyTo(targetFile, overwrite = true)
}
return true
@SerialName(MONGODB_ID_KEY)
override val id: Id<GridFsEntry>,
val path: String,
- val file: @Serializable(with = ObjectIdSerializer::class) ObjectId,
- val created: @Serializable(with = InstantSerializer::class) Instant,
- val updated: @Serializable(with = InstantSerializer::class) Instant,
+ val file: @Contextual ObjectId,
+ val created: @Contextual Instant,
+ val updated: @Contextual Instant,
) : DataDocument<GridFsEntry>
private class GridFsStorage(val table: DocumentTable<GridFsEntry>, val bucket: GridFSBucket) : FileStorage {
private suspend fun getPrefix(path: String) = table.filter(Filters.regex(GridFsEntry::path.serialName, "^${Regex.fromLiteral(path)}"))
private suspend fun deletePrefix(path: String) = table.remove(Filters.regex(GridFsEntry::path.serialName, "^${Regex.fromLiteral(path)}"))
- private fun toExactPath(path: StoragePath) = "/$path"
- private fun toPrefixPath(path: StoragePath) = "/$path/"
+ private fun toExactPath(path: StoragePath) = path.elements.joinToString(separator = "") { "/$it" }
+ private fun toPrefixPath(path: StoragePath) = "${toExactPath(path)}/"
override suspend fun prepare() {
table.unique(GridFsEntry::path)
}
+ override suspend fun getType(path: StoragePath): StoredFileType? {
+ return if (getExact(toExactPath(path)) != null)
+ StoredFileType.FILE
+ else if (getPrefix(toPrefixPath(path)).count() > 0)
+ StoredFileType.DIRECTORY
+ else null
+ }
+
override suspend fun createDir(dir: StoragePath): Boolean {
return coroutineScope {
dir.elements.indices.map { index ->
return true
}
+ override suspend fun statFile(path: StoragePath): StoredFileStats? {
+ if (path.isRoot) return null
+ val file = getExact(toExactPath(path)) ?: return null
+ val gridFsFile = bucket.find(Filters.eq(MONGODB_ID_KEY, file.file)).awaitFirst()
+ return StoredFileStats(file.updated, gridFsFile.length)
+ }
+
override suspend fun writeFile(path: StoragePath, content: ByteReadChannel): Boolean {
+ if (path.isRoot) return false
if (getPrefix(toPrefixPath(path)).count() > 0) return false
val bytesPublisher = flow {
return true
}
+ override suspend fun writeFile(path: StoragePath, content: ByteArray): Boolean {
+ if (path.isRoot) return false
+ if (getPrefix(toPrefixPath(path)).count() > 0) return false
+
+ val bytesPublisher = flow {
+ emit(ByteBuffer.wrap(content))
+ }.asPublisher(CoroutineName("grid-fs-writer") + Dispatchers.IO)
+
+ val newId = bucket.uploadFromPublisher(path.elements.last(), bytesPublisher).awaitFirst()
+ updateExact(toExactPath(path), newId)
+ return true
+ }
+
override suspend fun readFile(path: StoragePath, content: ByteWriteChannel): Boolean {
+ if (path.isRoot) return false
val file = getExact(toExactPath(path)) ?: return false
val gridFsId = file.file
return true
}
+ override suspend fun readFile(path: StoragePath): ByteArray? {
+ if (path.isRoot) return null
+ val file = getExact(toExactPath(path)) ?: return null
+ val gridFsId = file.file
+
+ return ByteArrayOutputStream().also { content ->
+ bucket.downloadToPublisher(gridFsId).asFlow().collect { buffer ->
+ val array = buffer.slice().moveToByteArray()
+ withContext(Dispatchers.IO) { content.write(array) }
+ }
+ }.toByteArray()
+ }
+
override suspend fun copyFile(source: StoragePath, target: StoragePath): Boolean {
+ if (source.isRoot || target.isRoot) return false
val sourceFile = getExact(toExactPath(source)) ?: return false
updateExact(toExactPath(target), sourceFile.file)
return true
}
override suspend fun eraseFile(path: StoragePath): Boolean {
+ if (path.isRoot) return false
val file = getExact(toExactPath(path)) ?: return false
bucket.delete(file.file).awaitFirst()
table.del(file.id)
suspend operator fun invoke(comments: List<Comment>, nations: MutableMap<Id<NationData>, NationData> = mutableMapOf()): List<CommentRenderData> {
return coroutineScope {
comments.map { comment ->
- val nationData = nations.getNation(comment.submittedBy)
- val htmlResult = comment.contents.parseAs(ParserTree::toCommentHtml)
-
async {
+ val nationData = nations.getNation(comment.submittedBy)
+ val htmlResult = comment.contents.parseAs(ParserTree::toCommentHtml)
+
CommentRenderData(
id = comment.id,
submittedBy = nationData,
import kotlinx.html.FlowContent
import kotlinx.html.p
import kotlinx.html.style
+import kotlinx.serialization.Contextual
import kotlinx.serialization.SerialName
import kotlinx.serialization.Serializable
import org.intellij.lang.annotations.Language
data class PageVisitTotals(
val total: Int,
val totalUnique: Int,
- val mostRecent: @Serializable(with = InstantNullableSerializer::class) Instant?
+ val mostRecent: @Contextual Instant?
)
@Serializable
val path: String,
val visitor: String,
val visits: Int = 0,
- val lastVisit: @Serializable(with = InstantSerializer::class) Instant = Instant.now()
+ val lastVisit: @Contextual Instant = Instant.now()
) : DataDocument<PageVisitData> {
companion object : TableHolder<PageVisitData> {
override val Table = DocumentTable<PageVisitData>()
package info.mechyrdia.lore
-import info.mechyrdia.Configuration
+import info.mechyrdia.data.FileStorage
+import info.mechyrdia.data.StoragePath
import io.ktor.server.application.*
-import io.ktor.util.*
-import java.io.File
import java.time.Instant
import java.time.Month
import java.time.ZoneId
}
context(ApplicationCall)
-fun redirectFileOnApril1st(requestedFile: File): File? {
+suspend fun redirectFileOnApril1st(requestedFile: StoragePath): StoragePath? {
if (!april1stMode.isEnabled) return null
- val rootDir = File(Configuration.CurrentConfiguration.rootDir)
- val requestedPath = requestedFile.absoluteFile.toRelativeString(rootDir.absoluteFile)
- val funnyFile = File(Configuration.CurrentConfiguration.april1Dir).combineSafe(requestedPath)
- return funnyFile.takeIf { it.exists() }
+ val path = StoragePath.april1Dir / requestedFile.elements
+ if (FileStorage.instance.statFile(path) == null) return null
+ return path
}
context(ApplicationCall)
-fun getAssetFile(requestedFile: File): File {
+suspend fun getAssetFile(requestedFile: StoragePath): StoragePath {
return redirectFileOnApril1st(requestedFile) ?: requestedFile
}
-suspend fun ApplicationCall.respondAsset(assetFile: File) {
+suspend fun ApplicationCall.respondAsset(assetFile: StoragePath) {
respondCompressedFile(getAssetFile(assetFile))
}
package info.mechyrdia.lore
import info.mechyrdia.Configuration
+import info.mechyrdia.data.FileStorage
+import info.mechyrdia.data.StoragePath
import info.mechyrdia.route.Root
import info.mechyrdia.route.href
import io.ktor.server.application.*
+import kotlinx.coroutines.async
+import kotlinx.coroutines.awaitAll
+import kotlinx.coroutines.coroutineScope
+import kotlinx.coroutines.flow.map
+import kotlinx.coroutines.flow.toList
import kotlinx.html.UL
import kotlinx.html.a
import kotlinx.html.li
import kotlinx.html.ul
-import java.io.File
data class ArticleNode(val name: String, val subNodes: List<ArticleNode>)
-fun rootArticleNodeList(): List<ArticleNode> = File(Configuration.CurrentConfiguration.articleDir)
- .toArticleNode()
- .subNodes
+suspend fun rootArticleNodeList(): List<ArticleNode> = StoragePath.articleDir.toArticleNode().subNodes
-fun File.toArticleNode(): ArticleNode = ArticleNode(
+suspend fun StoragePath.toArticleNode(): ArticleNode = ArticleNode(
name,
- listFiles()
- .orEmpty()
- .map { it.toArticleNode() }
- .sortedBy { it.name }
- .sortedBy { it.subNodes.isEmpty() }
+ coroutineScope {
+ val path = this@toArticleNode
+ FileStorage.instance.listDir(path).map {
+ val subPath = path / it.name
+ async { subPath.toArticleNode() }
+ }.toList().awaitAll()
+ }
)
private val String.isViewable: Boolean
- get() = Configuration.CurrentConfiguration.isDevMode || !(endsWith(".wip") || endsWith(".old"))
+ get() = Configuration.Current.isDevMode || !(endsWith(".wip") || endsWith(".old"))
val ArticleNode.isViewable: Boolean
get() = name.isViewable
-val File.isViewable: Boolean
+val StoragePath.isViewable: Boolean
get() = name.isViewable
context(ApplicationCall)
package info.mechyrdia.lore
-import java.io.File
-import java.util.concurrent.ConcurrentHashMap
+import info.mechyrdia.data.FileStorage
+import info.mechyrdia.data.StoragePath
+import io.github.reactivecircus.cache4k.Cache
+import io.ktor.util.*
+import kotlinx.coroutines.async
+import kotlinx.coroutines.coroutineScope
+import java.time.Instant
+import kotlin.time.Duration.Companion.hours
-abstract class FileDependentCache<T> {
- private val cache = ConcurrentHashMap<File, Entry>()
-
- protected abstract fun processFile(file: File): T
-
- operator fun get(file: File): T {
- return cache.compute(file) { _, prevEntry ->
- prevEntry?.apply {
- updateIfNeeded(file)
- } ?: Entry(file)
- }!!.cachedData
- }
-
- private inner class Entry private constructor(lastModified: Long, cachedData: T) {
- constructor(file: File) : this(file.lastModified(), processFile(file))
-
- var lastModified: Long = lastModified
+val StoragePathAttributeKey = AttributeKey<StoragePath>("Mechyrdia.StoragePath")
+
+abstract class FileDependentCache<T : Any> {
+ private inner class Entry(updated: Instant, data: T) {
+ var updated: Instant = updated
private set
- var cachedData: T = cachedData
+ var data: T = data
private set
- fun updateIfNeeded(file: File) {
- val fileLastModified = file.lastModified()
- if (lastModified < fileLastModified) {
- lastModified = fileLastModified
-
- cachedData = processFile(file)
+ suspend fun updateIfNeeded(path: StoragePath): Entry? {
+ val fileUpdated = FileStorage.instance.statFile(path)?.updated ?: return null
+ if (updated < fileUpdated) {
+ updated = fileUpdated
+ data = processFile(path) ?: return null
}
+
+ return this
+ }
+ }
+
+ private suspend fun Entry(path: StoragePath): Entry? {
+ val (updated, data) = coroutineScope {
+ val updated = async { FileStorage.instance.statFile(path)?.updated }
+ val data = async { processFile(path) }
+ updated.await() to data.await()
}
+
+ if (updated == null || data == null) return null
+ return Entry(updated, data)
+ }
+
+ private val cache = Cache.Builder<StoragePath, Entry>()
+ .maximumCacheSize(160)
+ .expireAfterAccess(36.hours)
+ .build()
+
+ protected abstract suspend fun processFile(path: StoragePath): T?
+
+ suspend fun get(path: StoragePath): T? {
+ return cache.processAtomic(path) { prev ->
+ if (prev == null)
+ Entry(path)
+ else prev.updateIfNeeded(path)
+ }?.data
}
}
package info.mechyrdia.lore
+import info.mechyrdia.data.FileStorage
+import info.mechyrdia.data.StoragePath
+import info.mechyrdia.data.respondStoredFile
import io.ktor.http.*
import io.ktor.server.application.*
import io.ktor.server.request.*
import io.ktor.server.response.*
-import io.ktor.util.*
import kotlinx.coroutines.Dispatchers
-import kotlinx.coroutines.runInterruptible
+import kotlinx.coroutines.withContext
import java.io.ByteArrayOutputStream
-import java.io.File
import java.io.FilterOutputStream
import java.io.OutputStream
import java.util.zip.DeflaterOutputStream
import java.util.zip.GZIPOutputStream
-val CompressedFileAttributeKey = AttributeKey<File>("Mechyrdia.CompressedFile")
-
private val gzippedCache = CompressedCache("gzip", ::GZIPOutputStream)
private val deflatedCache = CompressedCache("deflate", ::DeflaterOutputStream)
?.first
}
-suspend fun ApplicationCall.respondCompressedFile(file: File) {
- val cache = compressedCache() ?: return respondFile(file)
+suspend fun ApplicationCall.respondCompressedFile(path: StoragePath) {
+ val cache = compressedCache() ?: return respondStoredFile(path)
+ val compressedBytes = cache.get(path) ?: return respond(HttpStatusCode.NotFound)
+ attributes.put(StoragePathAttributeKey, path)
response.header(HttpHeaders.ContentEncoding, cache.encoding)
- attributes.put(CompressedFileAttributeKey, file)
- val compressedBytes = runInterruptible(Dispatchers.IO) { cache[file] }
respondBytes(compressedBytes)
}
private class CompressedCache(val encoding: String, private val compressorFactory: (OutputStream, Boolean) -> FilterOutputStream) : FileDependentCache<ByteArray>() {
- override fun processFile(file: File): ByteArray {
- return ByteArrayOutputStream().also { oStream ->
- compressorFactory(oStream, true).use { gzip ->
- file.inputStream().use { it.copyTo(gzip) }
- gzip.flush()
- }
- }.toByteArray()
+ override suspend fun processFile(path: StoragePath): ByteArray? {
+ val fileContents = FileStorage.instance.readFile(path) ?: return null
+
+ return withContext(Dispatchers.IO) {
+ ByteArrayOutputStream().also { oStream ->
+ compressorFactory(oStream, true).use { gzip ->
+ gzip.write(fileContents)
+ gzip.flush()
+ }
+ }.toByteArray()
+ }
}
}
package info.mechyrdia.lore
+import info.mechyrdia.data.FileStorage
+import info.mechyrdia.data.StoragePath
import io.ktor.http.content.*
+import io.ktor.http.content.LastModifiedVersion
import io.ktor.server.application.*
import io.ktor.server.http.content.*
-import kotlinx.coroutines.Dispatchers
-import kotlinx.coroutines.runInterruptible
-import java.io.File
+import kotlinx.coroutines.*
import java.io.IOException
import java.io.OutputStream
import java.security.MessageDigest
private class FileHashCache(val hashAlgo: String) : FileDependentCache<ByteArray>() {
private val hashinator: ThreadLocal<MessageDigest> = ThreadLocal.withInitial { MessageDigest.getInstance(hashAlgo) }
- override fun processFile(file: File): ByteArray {
- return DigestingOutputStream(hashinator.get()).useAndGet { oStream ->
- file.inputStream().use { it.copyTo(oStream) }
+ override suspend fun processFile(path: StoragePath): ByteArray {
+ return withContext(Dispatchers.IO) {
+ DigestingOutputStream(hashinator.get()).useAndGet { oStream ->
+ oStream.write(FileStorage.instance.readFile(path) ?: ByteArray(0))
+ }
}
}
}
private val b64Encoder: Base64.Encoder = Base64.getUrlEncoder()
-suspend fun File.eTag(): String = runInterruptible(Dispatchers.IO) {
- b64Encoder.encodeToString(eTagCache[this])
-}
+suspend fun StoragePath.eTag(): String? = eTagCache.get(this)?.let(b64Encoder::encodeToString)
-private suspend fun File.getVersionHeaders() = listOf(
- LastModifiedVersion(lastModified()),
- EntityTagVersion(eTag()),
-)
+private suspend fun StoragePath.getVersionHeaders() = coroutineScope {
+ listOf(
+ async {
+ eTag()?.let {
+ EntityTagVersion(it)
+ }
+ },
+ async {
+ FileStorage.instance.statFile(this@getVersionHeaders)?.updated?.toEpochMilli()?.let {
+ LastModifiedVersion(it)
+ }
+ }
+ ).awaitAll().filterNotNull()
+}
-suspend fun getVersionHeaders(call: ApplicationCall, outgoingContent: OutgoingContent): List<Version> {
- return if (outgoingContent is LocalFileContent)
- outgoingContent.file.getVersionHeaders()
- else
- call.attributes.getOrNull(CompressedFileAttributeKey)?.getVersionHeaders()
- ?: emptyList()
+suspend fun getVersionHeaders(call: ApplicationCall): List<Version> {
+ return call.attributes.getOrNull(StoragePathAttributeKey)?.getVersionHeaders().orEmpty()
}
package info.mechyrdia.lore
-import java.io.File
-import java.util.concurrent.locks.ReentrantLock
-import kotlin.concurrent.withLock
+import info.mechyrdia.data.FileStorage
+import info.mechyrdia.data.StoragePath
+import kotlinx.coroutines.sync.Mutex
+import kotlinx.coroutines.sync.withLock
+import java.time.Instant
import kotlin.properties.ReadOnlyProperty
import kotlin.reflect.KProperty
-fun <T> fileData(file: File, loader: (File) -> T): ReadOnlyProperty<Any?, T> = object : ReadOnlyProperty<Any?, T> {
+fun <T : Any> storedData(path: StoragePath, loader: suspend (StoragePath) -> T?): ReadOnlyProperty<Any?, suspend () -> T?> = object : ReadOnlyProperty<Any?, suspend () -> T?> {
private var loadedValue: T? = null
- private var lastChanged = Long.MIN_VALUE
+ private var lastChanged = Instant.MIN
- private val lock = ReentrantLock(true)
+ private val lock = Mutex()
- override fun getValue(thisRef: Any?, property: KProperty<*>): T {
- return lock.withLock {
- val cached = loadedValue
- val lastMod = file.lastModified()
-
- @Suppress("UNCHECKED_CAST")
- if (lastChanged < lastMod) {
- lastChanged = lastMod
- loader(file).also {
- loadedValue = it
- }
- } else cached as T
+ override fun getValue(thisRef: Any?, property: KProperty<*>): suspend () -> T? {
+ return suspend {
+ lock.withLock {
+ val cached = loadedValue
+ val lastMod = FileStorage.instance.statFile(path)?.updated ?: return@withLock null
+
+ if (lastChanged < lastMod) {
+ lastChanged = lastMod
+ loader(path).also {
+ loadedValue = it
+ }
+ } else cached
+ }
}
}
}
import com.jaredrummler.fontreader.truetype.FontFileReader
import com.jaredrummler.fontreader.truetype.TTFFile
import com.jaredrummler.fontreader.util.GlyphSequence
-import info.mechyrdia.Configuration
+import info.mechyrdia.data.FileStorage
+import info.mechyrdia.data.StoragePath
import info.mechyrdia.route.KeyedEnumSerializer
import info.mechyrdia.yieldThread
-import io.ktor.util.*
+import kotlinx.coroutines.Dispatchers
+import kotlinx.coroutines.withContext
import kotlinx.serialization.Serializable
import org.slf4j.Logger
import org.slf4j.LoggerFactory
import java.awt.geom.PathIterator
import java.awt.image.BufferedImage
import java.io.ByteArrayInputStream
-import java.io.File
import java.io.IOException
import java.nio.IntBuffer
import kotlin.properties.ReadOnlyProperty
object MechyrdiaSansFont {
private val logger: Logger = LoggerFactory.getLogger(MechyrdiaSansFont::class.java)
- fun renderTextToSvg(text: String, bold: Boolean, italic: Boolean, align: TextAlignment): String {
+ suspend fun renderTextToSvg(text: String, bold: Boolean, italic: Boolean, align: TextAlignment): String {
val (file, font) = getFont(bold, italic)
return layoutText(text, file, font, align).toSvgDocument(80.0 / file.unitsPerEm, 12.0)
}
- private val fontsRoot = File(Configuration.CurrentConfiguration.rootDir, "fonts")
- private fun fontFile(name: String) = fontsRoot.combineSafe("$name.ttf")
- private fun loadFont(fontFile: File): Pair<TTFFile, Font> {
- val bytes = fontFile.readBytes()
+ private val fontsRoot = StoragePath("fonts")
+ private fun fontFile(name: String) = fontsRoot / "$name.ttf"
+ private suspend fun loadFont(fontFile: StoragePath): Pair<TTFFile, Font>? {
+ val bytes = FileStorage.instance.readFile(fontFile) ?: return null
- val file = TTFFile(true, true)
- file.readFont(FontFileReader(ByteArrayInputStream(bytes)))
-
- val font = Font
- .createFont(Font.TRUETYPE_FONT, ByteArrayInputStream(bytes))
- .deriveFont(file.unitsPerEm.toFloat())
-
- return file to font
+ return withContext(Dispatchers.IO) {
+ val file = TTFFile(true, true)
+ file.readFont(FontFileReader(ByteArrayInputStream(bytes)))
+
+ val font = Font
+ .createFont(Font.TRUETYPE_FONT, ByteArrayInputStream(bytes))
+ .deriveFont(file.unitsPerEm.toFloat())
+
+ file to font
+ }
}
- private fun loadedFont(fontName: String): ReadOnlyProperty<Any?, Pair<TTFFile, Font>> {
- return fileData(fontFile(fontName), ::loadFont)
+ private fun loadedFont(fontName: String): ReadOnlyProperty<Any?, suspend () -> Pair<TTFFile, Font>?> {
+ return storedData(fontFile(fontName), ::loadFont)
}
private val mechyrdiaSans by loadedFont("mechyrdia-sans")
private val mechyrdiaSansI by loadedFont("mechyrdia-sans-italic")
private val mechyrdiaSansBI by loadedFont("mechyrdia-sans-bold-italic")
- private val mechyrdiaSansFonts = listOf(::mechyrdiaSans, ::mechyrdiaSansI, ::mechyrdiaSansB, ::mechyrdiaSansBI)
- private fun getFont(bold: Boolean, italic: Boolean): Pair<TTFFile, Font> {
- return mechyrdiaSansFonts[(if (bold) 2 else 0) + (if (italic) 1 else 0)].get()
+ private val mechyrdiaSansFonts = listOf(mechyrdiaSans, mechyrdiaSansI, mechyrdiaSansB, mechyrdiaSansBI)
+ private suspend fun getFont(bold: Boolean, italic: Boolean): Pair<TTFFile, Font> {
+ return mechyrdiaSansFonts[(if (bold) 2 else 0) + (if (italic) 1 else 0)]()!!
}
private fun TTFFile.getGlyph(cp: Int): Int {
package info.mechyrdia.lore
-import info.mechyrdia.Configuration
import info.mechyrdia.JsonStorageCodec
import io.ktor.util.*
import kotlinx.html.*
import kotlinx.html.org.w3c.dom.events.Event
import kotlinx.html.stream.appendHTML
import kotlinx.serialization.json.JsonPrimitive
-import java.io.File
import kotlin.text.toCharArray
typealias HtmlBuilderContext = Unit
is ParserTreeNode.Tag -> false
}
+fun ParserTree.isWhitespace() = all { it.isWhitespace() }
+
fun ParserTreeNode.isParagraph(inlineTags: Set<String>): Boolean = when (this) {
is ParserTreeNode.Text -> true
ParserTreeNode.LineBreak -> false
IMAGE(HtmlTextBodyLexerTag { _, param, content ->
val url = content.sanitizeLink()
val (width, height) = getSizeParam(param)
+ val styleValue = getImageSizeStyleValue(width, height)
if (url.endsWith(".svg")) {
- val svg = File(Configuration.CurrentConfiguration.assetDir, "images")
- .combineSafe(url)
- .readText()
- .replaceFirst("<svg", "<svg${getImageSizeAttributes(width, height)}")
-
- ({ unsafe { +svg } })
+ ({
+ iframe {
+ src = "/assets/images/$url"
+ style = styleValue
+ }
+ })
} else {
- val styleValue = getImageSizeStyleValue(width, height)
-
({
script { unsafe { +"window.appendImageThumb(\"/assets/images/$url\", \"$styleValue\");" } }
})
operator fun get(name: String): ParserTree = variables[name] ?: parent?.get(name) ?: "null".textToTree()
operator fun set(name: String, value: ParserTree) {
- variables[name] = value
+ if (parent != null && name in parent)
+ parent[name] = value
+ else
+ variables[name] = value
+ }
+
+ fun setGlobal(name: String, value: ParserTree) {
+ if (parent != null)
+ parent.setGlobal(name, value)
+ else
+ variables[name] = value
}
- operator fun set(name: String, value: String) {
- variables[name] = value.textToTree()
+ fun setLocal(name: String, value: ParserTree) {
+ variables[name] = value
}
operator fun contains(name: String): Boolean = name in variables || (parent?.contains(name) == true)
}
}
+fun interface PreProcessorFilter {
+ suspend fun execute(input: ParserTree, env: AsyncLexerTagEnvironment<PreProcessingContext, PreProcessingSubject>): ParserTree
+}
+
+fun interface PreProcessorFilterProvider : PreProcessorLexerTag {
+ suspend fun provideFilter(param: String?): PreProcessorFilter?
+
+ override suspend fun processTag(env: AsyncLexerTagEnvironment<PreProcessingContext, PreProcessingSubject>, param: String?, subNodes: ParserTree): PreProcessingSubject {
+ val filter = provideFilter(param) ?: return emptyList()
+ return filter.execute(subNodes, env)
+ }
+}
+
+abstract class PreProcessorFilterLibrary : PreProcessorFilterProvider {
+ abstract val filters: Map<String, PreProcessorFilter>
+
+ override suspend fun provideFilter(param: String?) = param?.let { filters[it] }
+
+ companion object {
+ operator fun invoke(library: Map<String, PreProcessorFilter>) = object : PreProcessorFilterLibrary() {
+ override val filters: Map<String, PreProcessorFilter> = library
+ }
+ }
+}
+
fun ParserTree.asPreProcessorList(): List<ParserTree> = mapNotNull {
if (it !is ParserTreeNode.Tag || it isNotTag "item" || it.param != null)
null
async {
k to processor(k, v)
}
- }.awaitAll()
-}.toMap()
+ }.awaitAll().toMap()
+}
enum class PreProcessorTags(val type: PreProcessorLexerTag) {
EVAL(PreProcessorLexerTag { env, param, subNodes ->
emptyList()
}),
+ SET_GLOBAL(PreProcessorLexerTag { env, param, subNodes ->
+ param?.let { varName ->
+ env.context.setGlobal(varName, env.processTree(subNodes))
+ }
+
+ emptyList()
+ }),
+ SET_LOCAL(PreProcessorLexerTag { env, param, subNodes ->
+ param?.let { varName ->
+ env.context.setLocal(varName, env.processTree(subNodes))
+ }
+
+ emptyList()
+ }),
INDEX(PreProcessorLexerTag { env, param, subNodes ->
val inputList = env.processTree(subNodes).asPreProcessorList()
}.orEmpty()
}),
MATH(PreProcessorMathOperators),
- MATH_TEST(PreProcessorMathPredicate),
LOGIC(PreProcessorLogicOperator),
+ TEST(PreProcessorLogicOperator),
JSON_PARSE(PreProcessorLexerTag { _, _, subNodes ->
JsonStorageCodec.parseToJsonElement(subNodes.treeToText()).toPreProcessTree()
}),
JSON_STRINGIFY(PreProcessorLexerTag { env, _, subNodes ->
env.processTree(subNodes).toPreProcessJson().toString().textToTree()
}),
+ SCRIPT(PreProcessorLexerTag { env, param, subNodes ->
+ param?.let { scriptName ->
+ PreProcessorScriptLoader.runScriptSafe(scriptName, subNodes.asPreProcessorMap(), env) {
+ it.renderInBBCode()
+ }
+ }.orEmpty()
+ }),
+ WITH_DATA_FILE(PreProcessorLexerTag { env, param, subNodes ->
+ param?.let { dataFileName ->
+ val args = FactbookLoader.loadFactbookContext(dataFileName.split('/'))
+ env.copy(context = env.context + args).processTree(subNodes)
+ }.orEmpty()
+ }),
+ IMPORT(PreProcessorLexerTag { _, param, subNodes ->
+ param?.let { templateName ->
+ PreProcessorTemplateLoader.runTemplateWith(templateName, subNodes.asPreProcessorMap())
+ }.orEmpty()
+ }),
+ INCLUDE(PreProcessorLexerTag { env, _, subNodes ->
+ PreProcessorTemplateLoader.runTemplateHere(env.processTree(subNodes).treeToText(), env)
+ }),
+ TEMPLATE(PreProcessorLexerTag { env, _, subNodes ->
+ PreProcessorTemplateLoader.loadTemplate(env.processTree(subNodes).treeToText())
+ }),
+ ;
+
+ companion object {
+ val asTags = AsyncLexerTags(entries.associate { it.name to it.type })
+ }
+}
+
+suspend fun ParserTree.preProcess(context: Map<String, ParserTree>): ParserTree {
+ return AsyncLexerTagEnvironment(
+ PreProcessingContext(context, null),
+ PreProcessorTags.asTags,
+ PreProcessorUtils,
+ PreProcessorUtils,
+ PreProcessorUtils,
+ PreProcessorUtils,
+ ).processTree(this)
}
+
+fun Exception.renderInBBCode(): ParserTree = listOf(
+ ParserTreeNode.LineBreak,
+ ParserTreeNode.Tag("b", null, listOf(ParserTreeNode.Text("${this::class.qualifiedName}: $message"))),
+ ParserTreeNode.LineBreak,
+ ParserTreeNode.Tag("ul", null,
+ stackTraceToString().split(System.lineSeparator()).map {
+ ParserTreeNode.Tag("li", null, listOf(ParserTreeNode.Text(it)))
+ }
+ ),
+ ParserTreeNode.LineBreak,
+)
package info.mechyrdia.lore
+import info.mechyrdia.JsonStorageCodec
+import info.mechyrdia.data.FileStorage
+import info.mechyrdia.data.StoragePath
+import info.mechyrdia.lore.PebbleJsonLoader.convertJson
+import info.mechyrdia.lore.PebbleJsonLoader.deconvertJson
+import io.github.reactivecircus.cache4k.Cache
+import io.ktor.util.*
+import kotlinx.coroutines.Dispatchers
+import kotlinx.coroutines.suspendCancellableCoroutine
+import kotlinx.coroutines.withContext
+import kotlinx.serialization.json.*
+import java.security.MessageDigest
+import java.util.function.Consumer
+import javax.script.Compilable
+import javax.script.CompiledScript
+import javax.script.ScriptEngineManager
+import javax.script.SimpleBindings
+import kotlin.coroutines.*
+import kotlin.time.Duration.Companion.days
+
+object PreProcessorTemplateLoader {
+ suspend fun loadTemplate(name: String): ParserTree {
+ val templateFile = StoragePath.templateDir / "$name.tpl"
+ val template = FileStorage.instance.readFile(templateFile) ?: return emptyList()
+ return ParserState.parseText(String(template))
+ }
+
+ suspend fun runTemplateWith(name: String, args: Map<String, ParserTree>): ParserTree {
+ return loadTemplate(name).preProcess(args)
+ }
+
+ suspend fun runTemplateHere(name: String, env: AsyncLexerTagEnvironment<PreProcessingContext, PreProcessingSubject>): ParserTree {
+ return env.processTree(loadTemplate(name))
+ }
+}
+
object PreProcessorScriptLoader {
+ private val scriptEngine = ThreadLocal.withInitial { ScriptEngineManager().getEngineByExtension("groovy") }
+ private val hasher = ThreadLocal.withInitial { MessageDigest.getInstance("SHA-256") }
+ private val cache = Cache.Builder<String, CompiledScript>()
+ .maximumCacheSize(16)
+ .expireAfterAccess(72.days)
+ .build()
+
+ suspend fun loadFunction(name: String): CompiledScript? {
+ val scriptFile = StoragePath.scriptDir / "$name.groovy"
+ val script = FileStorage.instance.readFile(scriptFile) ?: return null
+
+ val digest = hex(hasher.get().digest(script))
+ return cache.get(digest) {
+ withContext(Dispatchers.IO) {
+ (scriptEngine.get() as Compilable).compile(String(script))
+ }
+ }
+ }
+
+ fun jsonToGroovy(json: JsonElement): Any? = when (json) {
+ JsonNull -> null
+ is JsonPrimitive -> if (json.isString)
+ json.content
+ else
+ json.booleanOrNull ?: json.intOrNull ?: json.double
+
+ is JsonObject -> json.mapValues { (_, it) -> convertJson(it) }
+ is JsonArray -> json.map { convertJson(it) }
+ }
+
+ fun groovyToJson(data: Any?): JsonElement = when (data) {
+ null -> JsonNull
+ is String -> JsonPrimitive(data)
+ is Number -> JsonPrimitive(data)
+ is Boolean -> JsonPrimitive(data)
+ is List<*> -> JsonArray(data.map { deconvertJson(it) })
+ is Set<*> -> JsonArray(data.map { deconvertJson(it) })
+ is Map<*, *> -> JsonObject(data.map { (k, v) -> k.toString() to deconvertJson(v) }.toMap())
+ else -> throw ClassCastException("Expected null, String, Number, Boolean, List, Set, or Map for converted data, got $data of type ${data::class.qualifiedName}")
+ }
+
+ suspend fun runScriptInternal(script: CompiledScript, args: MutableMap<String, Any?>, env: AsyncLexerTagEnvironment<PreProcessingContext, PreProcessingSubject>): Any? {
+ return suspendCancellableCoroutine { continuation ->
+ val bindings = SimpleBindings()
+ bindings["stdlib"] = PreProcessorScriptStdlib(env, continuation.context, continuation::resumeWithException)
+ bindings["ctx"] = PreProcessorScriptVarContext { jsonToGroovy(env.context[it].toPreProcessJson()) }
+ bindings["args"] = args
+ bindings["finish"] = Consumer<Any?>(continuation::resume)
+
+ script.eval(bindings)
+ }
+ }
+
+ suspend fun runScriptSafe(scriptName: String, args: Map<String, ParserTree>, env: AsyncLexerTagEnvironment<PreProcessingContext, PreProcessingSubject>, errorHandler: (Exception) -> ParserTree): ParserTree {
+ return try {
+ val script = loadFunction(scriptName)!!
+ val internalArgs = args.mapValuesTo(mutableMapOf()) { (_, it) -> jsonToGroovy(it.toPreProcessJson()) }
+ val result = runScriptInternal(script, internalArgs, env)
+ return if (result is String)
+ ParserState.parseText(result)
+ else
+ groovyToJson(result).toPreProcessTree()
+ } catch (ex: Exception) {
+ errorHandler(ex)
+ }
+ }
+}
+
+fun interface PreProcessorScriptVarContext {
+ operator fun get(name: String): Any?
+}
+class PreProcessorScriptStdlib(private val env: AsyncLexerTagEnvironment<PreProcessingContext, PreProcessingSubject>, private val context: CoroutineContext, private val onError: (Throwable) -> Unit) {
+ fun jsonStringify(data: Any?): String {
+ return PreProcessorScriptLoader.groovyToJson(data).toString()
+ }
+
+ fun jsonParse(json: String): Any? {
+ return PreProcessorScriptLoader.jsonToGroovy(JsonStorageCodec.parseToJsonElement(json))
+ }
+
+ @JvmOverloads
+ fun loadScript(scriptName: String, args: Map<String, Any?> = emptyMap(), useResult: Consumer<Any?>) {
+ suspend {
+ val script = PreProcessorScriptLoader.loadFunction(scriptName)!!
+ val argsMutable = if (args is MutableMap) args else args.toMutableMap()
+ PreProcessorScriptLoader.runScriptInternal(script, argsMutable, env)
+ }.startCoroutine(Continuation(context) { result ->
+ result.onSuccess(useResult::accept)
+ result.onFailure(onError)
+ })
+ }
}
package info.mechyrdia.lore
+import info.mechyrdia.JsonStorageCodec
+import info.mechyrdia.data.FileStorage
+import info.mechyrdia.data.StoragePath
import kotlinx.serialization.json.*
fun JsonElement.toPreProcessTree(): ParserTree = when (this) {
}
else JsonPrimitive(unparse())
}
+
+object FactbookLoader {
+ suspend fun loadJsonData(lorePath: List<String>): JsonObject {
+ val bytes = FileStorage.instance.readFile(StoragePath.jsonDocDir / lorePath) ?: return JsonObject(emptyMap())
+ return JsonStorageCodec.parseToJsonElement(String(bytes)) as JsonObject
+ }
+
+ suspend fun loadFactbookContext(lorePath: List<String>): Map<String, ParserTree> {
+ return loadJsonData(lorePath).mapValues { (_, it) -> it.toPreProcessTree() }
+ }
+
+ suspend fun loadFactbook(lorePath: List<String>): ParserTree? {
+ val bytes = FileStorage.instance.readFile(StoragePath.articleDir / lorePath) ?: return null
+ val inputTree = ParserState.parseText(String(bytes))
+ return inputTree.preProcess(loadFactbookContext(lorePath))
+ }
+}
"sum" to PreProcessorMathVariadicOperator(List<Double>::sum),
"prod" to PreProcessorMathVariadicOperator { it.fold(1.0, Double::times) },
"mean" to PreProcessorMathVariadicOperator { it.sum() / it.size.coerceAtLeast(1) },
+
+ "eq" to PreProcessorMathPredicate { left, right -> left == right },
+ "lt" to PreProcessorMathPredicate { left, right -> left < right },
+ "gt" to PreProcessorMathPredicate { left, right -> left > right },
+ "ne" to PreProcessorMathPredicate { left, right -> left != right },
+ "le" to PreProcessorMathPredicate { left, right -> left <= right },
+ "ge" to PreProcessorMathPredicate { left, right -> left >= right },
)
}
}
fun calculate(left: Double, right: Double): Boolean
-
- companion object : PreProcessorFunctionLibrary() {
- override val functions: Map<String, PreProcessorFunction> = mapOf(
- "eq" to PreProcessorMathPredicate { left, right -> left == right },
- "lt" to PreProcessorMathPredicate { left, right -> left < right },
- "gt" to PreProcessorMathPredicate { left, right -> left > right },
- "ne" to PreProcessorMathPredicate { left, right -> left != right },
- "le" to PreProcessorMathPredicate { left, right -> left <= right },
- "ge" to PreProcessorMathPredicate { left, right -> left >= right },
- )
- }
}
fun interface PreProcessorLogicBinaryOperator : PreProcessorFunction {
env.processTree(env.context["in"])
.treeToBooleanOrNull()
?.let { "${!it}".textToTree() }
- ?: emptyList()
+ .orEmpty()
},
"and" to PreProcessorLogicBinaryOperator { left, right -> left && right },
)
}
}
+
+fun interface PreProcessorInputTest : PreProcessorFilter {
+ override suspend fun execute(input: ParserTree, env: AsyncLexerTagEnvironment<PreProcessingContext, PreProcessingSubject>): ParserTree {
+ return calculate(input).booleanToTree()
+ }
+
+ fun calculate(input: ParserTree): Boolean
+
+ companion object : PreProcessorFilterLibrary() {
+ override val filters: Map<String, PreProcessorFilter> = mapOf(
+ "empty" to PreProcessorInputTest { it.isEmpty() },
+ "blank" to PreProcessorInputTest { it.isWhitespace() },
+ "notempty" to PreProcessorInputTest { it.isNotEmpty() },
+ "notblank" to PreProcessorInputTest { !it.isWhitespace() },
+ )
+ }
+}
IMAGE(HtmlTextBodyLexerTag { _, param, content ->
val url = content.sanitizeLink()
val (width, height) = getSizeParam(param)
+ val styleValue = getRawImageSizeStyleValue(width, height)
if (url.endsWith(".svg")) {
- val svg = File(Configuration.CurrentConfiguration.assetDir, "images")
- .combineSafe(url)
- .readText()
- .replaceFirst("<svg", "<svg${getRawImageSizeAttributes(width, height)}")
-
- ({ unsafe { +svg } })
+ ({
+ iframe {
+ src = "/assets/images/$url"
+ style = styleValue
+ }
+ })
} else {
- val styleValue = getRawImageSizeStyleValue(width, height)
-
({
img(src = "/assets/images/$url") {
width?.let { attributes["data-width"] = "$it" }
package info.mechyrdia.lore
-fun <TSubject> String.parseAs(converter: ParserTree.() -> TSubject) = ParserState.parseText(this).converter()
+inline fun <TSubject> String.parseAs(converter: ParserTree.() -> TSubject) = ParserState.parseText(this).converter()
-fun <TContext> String.parseAs(context: TContext, converter: ParserTree.(TContext) -> Unit) = ParserState.parseText(this).converter(context)
+inline fun <TContext> String.parseAs(context: TContext, converter: ParserTree.(TContext) -> Unit) = ParserState.parseText(this).converter(context)
sealed class ParserTreeNode {
data class Text(val text: String) : ParserTreeNode()
+++ /dev/null
-package info.mechyrdia.lore
-
-import io.pebbletemplates.pebble.PebbleEngine
-import io.pebbletemplates.pebble.error.PebbleException
-import org.slf4j.Logger
-import org.slf4j.LoggerFactory
-import java.io.StringWriter
-
-object PreParser {
- private val logger: Logger = LoggerFactory.getLogger(PreParser::class.java)
-
- private val pebble = PebbleEngine.Builder()
- .addEscapingStrategy("none", PebbleNoEscaping)
- .defaultEscapingStrategy("none")
- .autoEscaping(false)
- .loader(PebbleTemplateLoader)
- .registerExtensionCustomizer(::PebbleExtensionCustomizer)
- .extension(PebbleFactbooksExtension)
- .build()
-
- fun preparse(name: String, content: String): String {
- return try {
- val template = pebble.getLiteralTemplate(content)
- val context = PebbleJsonLoader.loadJsonContextOrEmpty(name).toMutableMap()
- val writer = StringWriter()
- template.evaluate(writer, context)
- return writer.toString()
- } catch (ex: PebbleException) {
- logger.error("Runtime error pre-parsing factbook $name", ex)
- "[h1]Error[/h1]\n\nThere was a runtime error pre-parsing this factbook: ${ex.message}"
- }
- }
-}
+++ /dev/null
-package info.mechyrdia.lore
-
-import info.mechyrdia.Configuration
-import info.mechyrdia.JsonFileCodec
-import info.mechyrdia.JsonStorageCodec
-import io.ktor.util.*
-import io.pebbletemplates.pebble.error.PebbleException
-import io.pebbletemplates.pebble.extension.*
-import io.pebbletemplates.pebble.extension.Function
-import io.pebbletemplates.pebble.extension.escaper.EscapeFilter
-import io.pebbletemplates.pebble.extension.escaper.EscapingStrategy
-import io.pebbletemplates.pebble.extension.escaper.RawFilter
-import io.pebbletemplates.pebble.extension.escaper.SafeString
-import io.pebbletemplates.pebble.extension.i18n.i18nFunction
-import io.pebbletemplates.pebble.loader.Loader
-import io.pebbletemplates.pebble.template.EvaluationContext
-import io.pebbletemplates.pebble.template.PebbleTemplate
-import io.pebbletemplates.pebble.tokenParser.*
-import kotlinx.serialization.json.*
-import java.io.File
-import java.io.IOException
-import java.io.Reader
-import java.security.MessageDigest
-import java.util.concurrent.ConcurrentHashMap
-import javax.script.*
-import kotlin.reflect.jvm.jvmName
-
-object PebbleNoEscaping : EscapingStrategy {
- override fun escape(input: String): String {
- return input
- }
-}
-
-data class PebbleTemplateCacheKey(val fileName: String, val lastModified: Long) {
- constructor(name: String) : this(name, File(Configuration.CurrentConfiguration.templateDir).combineSafe("$name.tpl").lastModified())
-
- val file: File
- get() = File(Configuration.CurrentConfiguration.templateDir).combineSafe("$fileName.tpl")
-
- companion object {
- fun exists(name: String) = File(Configuration.CurrentConfiguration.templateDir).combineSafe("$name.tpl").isFile
- }
-}
-
-object PebbleTemplateLoader : Loader<PebbleTemplateCacheKey> {
- override fun getReader(cacheKey: PebbleTemplateCacheKey): Reader {
- return cacheKey.file.reader()
- }
-
- override fun setCharset(charset: String?) {
- // no-op
- }
-
- override fun setPrefix(prefix: String) {
- // no-op
- }
-
- override fun setSuffix(suffix: String) {
- // no-op
- }
-
- override fun resolveRelativePath(relativePath: String, anchorPath: String): String {
- if ('\n' in anchorPath) // Probably a raw template contents string
- return relativePath
-
- val templateDir = File(Configuration.CurrentConfiguration.templateDir)
- return templateDir.combineSafe("$anchorPath/$relativePath").toRelativeString(templateDir)
- }
-
- override fun createCacheKey(templateName: String): PebbleTemplateCacheKey {
- return PebbleTemplateCacheKey(templateName)
- }
-
- override fun resourceExists(templateName: String): Boolean {
- return PebbleTemplateCacheKey.exists(templateName)
- }
-}
-
-class PebbleExtensionCustomizer(delegate: Extension) : ExtensionCustomizer(delegate) {
- override fun getFilters(): MutableMap<String, Filter> {
- return super.getFilters().orEmpty().filterValues { filter ->
- filter !is EscapeFilter && filter !is RawFilter
- }.toMutableMap()
- }
-
- override fun getFunctions(): MutableMap<String, Function> {
- return super.getFunctions().orEmpty().filterValues { function ->
- function !is i18nFunction
- }.toMutableMap()
- }
-
- override fun getTokenParsers(): MutableList<TokenParser> {
- return super.getTokenParsers().orEmpty().filter { tokenParser ->
- tokenParser !is AutoEscapeTokenParser && tokenParser !is CacheTokenParser && tokenParser !is FlushTokenParser && tokenParser !is ParallelTokenParser
- }.toMutableList()
- }
-}
-
-object PebbleFactbooksExtension : AbstractExtension() {
- override fun getFilters(): MutableMap<String, Filter> {
- return mutableMapOf(
- "toJson" to PebbleToJsonFilter,
- "fromJson" to PebbleFromJsonFilter,
- "script" to PebbleScriptFilter,
- )
- }
-
- override fun getFunctions(): MutableMap<String, Function> {
- return mutableMapOf(
- "loadJson" to PebbleLoadJsonFunction,
- "script" to PebbleScriptFunction,
- )
- }
-}
-
-@JvmInline
-value class JsonPath private constructor(private val pathElements: List<String>) {
- constructor(path: String) : this(path.split('.').filterNot { it.isBlank() })
-
- operator fun component1() = pathElements.firstOrNull()
- operator fun component2() = JsonPath(pathElements.drop(1))
-
- override fun toString(): String {
- return pathElements.joinToString(separator = ".")
- }
-
- companion object {
- val Empty = JsonPath(emptyList())
- }
-}
-
-operator fun JsonElement.get(path: JsonPath): JsonElement {
- val (pathHead, pathTail) = path
- pathHead ?: return this
-
- return when (this) {
- is JsonObject -> this.getValue(pathHead)[pathTail]
- is JsonArray -> this[pathHead.toInt()][pathTail]
- is JsonPrimitive -> throw NoSuchElementException("Cannot resolve path $path on JSON primitive $this")
- }
-}
-
-@JvmInline
-value class JsonImport private constructor(private val importFrom: Pair<File, JsonPath>) {
- fun resolve(): Pair<File, JsonElement> {
- return try {
- importFrom.let { (file, path) ->
- file to JsonFileCodec.parseToJsonElement(file.readText())[path]
- }
- } catch (ex: RuntimeException) {
- val filePath = importFrom.first.toRelativeString(File(Configuration.CurrentConfiguration.jsonDocDir))
- val jsonPath = importFrom.second
- throw IllegalArgumentException("Unable to resolve JSON path $jsonPath on file $filePath", ex)
- }
- }
-
- companion object {
- operator fun invoke(statement: String, currentFile: File): JsonImport? {
- if (!statement.startsWith('@')) return null
- val splitterIndex = statement.lastIndexOf('#')
-
- val (filePath, jsonPath) = if (splitterIndex != -1)
- statement.substring(1, splitterIndex) to statement.substring(splitterIndex + 1)
- else
- statement.substring(1) to null
-
- val file = if (filePath.startsWith('/'))
- File(Configuration.CurrentConfiguration.jsonDocDir).combineSafe("$filePath.json")
- else
- currentFile.parentFile.combineSafe("$filePath.json")
-
- if (!file.isFile)
- throw IllegalArgumentException("JSON import path '$filePath' does not point to a file")
-
- return JsonImport(file to (jsonPath?.let { JsonPath(it) } ?: JsonPath.Empty))
- }
- }
-}
-
-object PebbleJsonLoader {
- private fun resolveImports(json: JsonElement, currentFile: File): JsonElement = when (json) {
- JsonNull -> JsonNull
- is JsonPrimitive -> if (json.isString) {
- JsonImport(json.content, currentFile)?.let { jsonImport ->
- val (nextFile, jsonData) = jsonImport.resolve()
- resolveImports(jsonData, nextFile)
- } ?: json
- } else json
-
- is JsonObject -> JsonObject(json.mapValues { (_, it) -> resolveImports(it, currentFile) })
- is JsonArray -> JsonArray(json.map { resolveImports(it, currentFile) })
- }
-
- fun convertJson(json: JsonElement): Any? = when (json) {
- JsonNull -> null
- is JsonPrimitive -> if (json.isString)
- json.content
- else
- json.intOrNull ?: json.double
-
- is JsonObject -> json.mapValues { (_, it) -> convertJson(it) }
- is JsonArray -> json.map { convertJson(it) }
- }
-
- fun deconvertJson(data: Any?): JsonElement = when (data) {
- null -> JsonNull
- is String -> JsonPrimitive(data)
- is SafeString -> JsonPrimitive(data.toString())
- is Number -> JsonPrimitive(data)
- is Boolean -> JsonPrimitive(data)
- is List<*> -> JsonArray(data.map { deconvertJson(it) })
- is Set<*> -> JsonArray(data.map { deconvertJson(it) })
- is Map<*, *> -> JsonObject(data.map { (k, v) -> k.toString() to deconvertJson(v) }.toMap())
- else -> throw ClassCastException("Expected null, String, Number, Boolean, List, Set, or Map for converted data, got $data of type ${data::class.jvmName}")
- }
-
- fun loadJson(name: String): JsonElement =
- File(Configuration.CurrentConfiguration.jsonDocDir)
- .combineSafe("$name.json")
- .takeIf { it.isFile }
- .let { file ->
- file ?: throw IOException("Unable to find $name as a file")
-
- val text = file.readText()
- resolveImports(JsonFileCodec.parseToJsonElement(text), file)
- }
-
- private fun loadJsonContext(name: String): Map<String, Any?> = loadJson(name).let { json ->
- val data = convertJson(json)
- if (data !is Map<*, *>)
- throw IOException("JSON Object expected in file $name, got $json")
-
- data.mapKeys { (k, _) -> k.toString() }
- }
-
- fun loadJsonContextOrEmpty(name: String): Map<String, Any?> = try {
- loadJsonContext(name)
- } catch (ex: IOException) {
- emptyMap()
- }
-}
-
-object PebbleToJsonFilter : Filter {
- override fun getArgumentNames(): MutableList<String> {
- return mutableListOf()
- }
-
- override fun apply(input: Any?, args: MutableMap<String, Any?>, self: PebbleTemplate?, context: EvaluationContext?, lineNumber: Int): Any {
- return PebbleJsonLoader.deconvertJson(input).toString()
- }
-}
-
-object PebbleFromJsonFilter : Filter {
- override fun getArgumentNames(): MutableList<String> {
- return mutableListOf()
- }
-
- override fun apply(input: Any?, args: MutableMap<String, Any?>, self: PebbleTemplate?, context: EvaluationContext?, lineNumber: Int): Any? {
- return PebbleJsonLoader.convertJson(JsonStorageCodec.parseToJsonElement(input.toString()))
- }
-}
-
-object PebbleLoadJsonFunction : Function {
- override fun getArgumentNames(): MutableList<String> {
- return mutableListOf("data", "path")
- }
-
- override fun execute(args: MutableMap<String, Any?>, self: PebbleTemplate, context: EvaluationContext, lineNumber: Int): Any? {
- val dataName = args["data"]?.toString()
- ?: throw PebbleException(null, "Missing 'data' argument", lineNumber, self.name)
-
- val data = try {
- PebbleJsonLoader.loadJson(dataName)
- } catch (ex: IOException) {
- throw PebbleException(ex, ex.message, lineNumber, self.name)
- }
-
- val dataPath = args["path"]?.toString()?.let { JsonPath(it) } ?: JsonPath.Empty
- return try {
- PebbleJsonLoader.convertJson(data[dataPath])
- } catch (ex: NoSuchElementException) {
- throw PebbleException(ex, "Unable to resolve path $dataPath in JSON $data", lineNumber, self.name)
- }
- }
-}
-
-object PebbleScriptLoader {
- private val scriptEngine = ThreadLocal.withInitial { ScriptEngineManager().getEngineByExtension("groovy") }
- private val hasher = ThreadLocal.withInitial { MessageDigest.getInstance("SHA-256") }
- private val cache = ConcurrentHashMap<String, CompiledScript>()
-
- fun loadFunction(name: String): CompiledScript? {
- val scriptFile = File(Configuration.CurrentConfiguration.scriptDir).combineSafe("$name.groovy")
- if (!scriptFile.canRead())
- return null
-
- val script = scriptFile.readText()
- val digest = hasher.get().digest(script.toByteArray()).joinToString(separator = "") { it.toUByte().toString(16) }
- return cache.computeIfAbsent(digest) {
- (scriptEngine.get() as Compilable).compile(script)
- }
- }
-
- private fun runScript(scriptName: String, script: CompiledScript, input: Any?, args: MutableMap<String, Any?>, self: PebbleTemplate, context: EvaluationContext, lineNumber: Int): Any? {
- val bindings = SimpleBindings()
- bindings["text"] = input
- bindings["stdlib"] = PebbleScriptStdlib(bindings, self, lineNumber)
- bindings["ctx"] = PebbleScriptVarContext(context::getVariable)
- bindings["args"] = args.toMutableMap().apply { remove("script") }
-
- return try {
- script.eval(bindings)
- } catch (ex: ScriptException) {
- throw PebbleException(ex, "Unhandled ScriptException from $scriptName", lineNumber, self.name)
- }
- }
-
- private fun runScript(scriptName: String, input: Any?, args: MutableMap<String, Any?>, self: PebbleTemplate, context: EvaluationContext, lineNumber: Int): Any? {
- val script = loadFunction(scriptName)
- ?: throw PebbleException(null, "Script $scriptName could not be found", lineNumber, self.name)
-
- return runScript(scriptName, script, input, args, self, context, lineNumber)
- }
-
- fun runScript(input: Any?, args: MutableMap<String, Any?>, self: PebbleTemplate, context: EvaluationContext, lineNumber: Int): Any? {
- val scriptName = args["script"]?.toString()
- ?: throw PebbleException(null, "Missing 'script' argument", lineNumber, self.name)
-
- return runScript(scriptName, input, args, self, context, lineNumber)
- }
-}
-
-class PebbleScriptStdlib(private val bindings: Bindings, private val self: PebbleTemplate, private val lineNumber: Int) {
- fun serialize(data: Any?): String {
- return PebbleJsonLoader.deconvertJson(data).toString()
- }
-
- fun deserialize(json: String): Any? {
- return PebbleJsonLoader.convertJson(JsonStorageCodec.parseToJsonElement(json))
- }
-
- @JvmOverloads
- fun loadScript(scriptName: String, env: Map<String, Any?> = emptyMap()): Any? {
- val script = PebbleScriptLoader.loadFunction(scriptName)
- ?: throw PebbleException(null, "Script $scriptName could not be found", lineNumber, self.name)
-
- val innerBindings = SimpleBindings()
- innerBindings.putAll(env)
-
- return try {
- script.eval(innerBindings)
- } catch (ex: ScriptException) {
- throw PebbleException(ex, "Unhandled ScriptException from $scriptName", lineNumber, self.name)
- }
- }
-
- fun loadScriptWith(scriptName: String, env: MutableMap<String, Any?> = mutableMapOf()): Any? {
- val script = PebbleScriptLoader.loadFunction(scriptName)
- ?: throw PebbleException(null, "Script $scriptName could not be found", lineNumber, self.name)
-
- val innerBindings = SimpleBindings()
- innerBindings.putAll(env)
-
- return try {
- script.eval(innerBindings).also { _ ->
- env.clear()
- env.putAll(innerBindings)
- }
- } catch (ex: ScriptException) {
- throw PebbleException(ex, "Unhandled ScriptException from $scriptName", lineNumber, self.name)
- }
- }
-
- fun loadScriptHere(scriptName: String): Any? {
- val script = PebbleScriptLoader.loadFunction(scriptName)
- ?: throw PebbleException(null, "Script $scriptName could not be found", lineNumber, self.name)
-
- return try {
- script.eval(bindings)
- } catch (ex: ScriptException) {
- throw PebbleException(ex, "Unhandled ScriptException from $scriptName", lineNumber, self.name)
- }
- }
-}
-
-fun interface PebbleScriptVarContext {
- operator fun get(name: String): Any?
-}
-
-object PebbleScriptFilter : Filter {
- override fun getArgumentNames(): MutableList<String> {
- return mutableListOf("script")
- }
-
- override fun apply(input: Any?, args: MutableMap<String, Any?>, self: PebbleTemplate, context: EvaluationContext, lineNumber: Int): Any? {
- return PebbleScriptLoader.runScript(input, args, self, context, lineNumber)
- }
-}
-
-object PebbleScriptFunction : Function {
- override fun getArgumentNames(): MutableList<String> {
- return mutableListOf("script")
- }
-
- override fun execute(args: MutableMap<String, Any?>, self: PebbleTemplate, context: EvaluationContext, lineNumber: Int): Any? {
- return PebbleScriptLoader.runScript(null, args, self, context, lineNumber)
- }
-}
package info.mechyrdia.lore
-import info.mechyrdia.Configuration
+import info.mechyrdia.data.StoragePath
import io.ktor.server.application.*
-import io.ktor.util.*
-import java.io.File
-fun ApplicationCall.galaxyMapPage(): File {
+fun ApplicationCall.galaxyMapPage(): StoragePath {
val themeName = when (pageTheme) {
PageTheme.SYSTEM -> "system"
PageTheme.LIGHT -> "light"
PageTheme.DARK -> "dark"
}
- return File(Configuration.CurrentConfiguration.assetDir).combineSafe("map/index-$themeName.html")
+ return StoragePath.assetDir / listOf("map", "index-$themeName.html")
}
package info.mechyrdia.lore
-import info.mechyrdia.Configuration
import info.mechyrdia.JsonFileCodec
+import info.mechyrdia.data.FileStorage
+import info.mechyrdia.data.StoragePath
import info.mechyrdia.data.currentNation
import info.mechyrdia.route.Root
import info.mechyrdia.route.createCsrfToken
import info.mechyrdia.route.href
import io.ktor.server.application.*
-import io.ktor.util.*
import kotlinx.html.*
import kotlinx.serialization.Serializable
import kotlinx.serialization.builtins.ListSerializer
-import java.io.File
import kotlin.collections.component1
import kotlin.collections.component2
import kotlin.collections.set
val text: String
)
-fun loadExternalLinks(): List<NavItem> {
- val extraLinksFile = File(Configuration.CurrentConfiguration.rootDir).combineSafe("externalLinks.json")
- val extraLinks = JsonFileCodec.decodeFromString(ListSerializer(ExternalLink.serializer()), extraLinksFile.readText())
+suspend fun loadExternalLinks(): List<NavItem> {
+ val extraLinksFile = StoragePath.Root / "externalLinks.json"
+ val extraLinksJson = String(FileStorage.instance.readFile(extraLinksFile)!!)
+ val extraLinks = JsonFileCodec.decodeFromString(ListSerializer(ExternalLink.serializer()), extraLinksJson)
return if (extraLinks.isEmpty())
emptyList()
else (listOf(NavHead("See Also")) + extraLinks.map { NavLink(it.url, it.text, textIsHtml = true) })
package info.mechyrdia.lore
-import info.mechyrdia.Configuration
import info.mechyrdia.JsonFileCodec
import info.mechyrdia.data.*
import info.mechyrdia.route.KeyedEnumSerializer
import info.mechyrdia.route.Root
import info.mechyrdia.route.href
import io.ktor.server.application.*
-import io.ktor.util.*
import kotlinx.coroutines.async
import kotlinx.coroutines.coroutineScope
import kotlinx.coroutines.flow.toList
import kotlinx.html.*
import kotlinx.serialization.Serializable
-import java.io.File
@Serializable
data class IntroMetaData(
}
suspend fun ApplicationCall.loreIntroPage(): HTML.() -> Unit {
- val rootDirFile = File(Configuration.CurrentConfiguration.rootDir)
+ val metaJson = String(FileStorage.instance.readFile(StoragePath.Root / "introMeta.json")!!)
+ val metaData = JsonFileCodec.decodeFromString(IntroMetaData.serializer(), metaJson)
- val metaJsonFile = rootDirFile.combineSafe("introMeta.json")
- val metaData = JsonFileCodec.decodeFromString(IntroMetaData.serializer(), metaJsonFile.readText())
-
- val htmlFile = rootDirFile.combineSafe("intro.html")
- val fileHtml = htmlFile.readText()
+ val html = String(FileStorage.instance.readFile(StoragePath.Root / "intro.html")!!)
return page(metaData.title, standardNavBar(), null, metaData.ogData) {
section {
a { id = "page-top" }
- unsafe { raw(fileHtml) }
+ unsafe { raw(html) }
}
}
}
object LoreArticleFormatSerializer : KeyedEnumSerializer<LoreArticleFormat>(LoreArticleFormat.entries, LoreArticleFormat::format)
-fun ApplicationCall.loreRawArticlePage(pagePathParts: List<String>): HTML.() -> Unit {
- val articleDir = File(Configuration.CurrentConfiguration.articleDir)
+suspend fun ApplicationCall.loreRawArticlePage(pagePath: List<String>): HTML.() -> Unit {
+ val articleDir = StoragePath.articleDir
- val pagePath = pagePathParts.joinToString(separator = "/")
- val pageFile = if (pagePath.isEmpty()) articleDir else articleDir.combineSafe(pagePath)
+ val pageFile = articleDir / pagePath
val pageNode = pageFile.toArticleNode()
- val parentPaths = if (pagePathParts.isEmpty())
+ val parentPaths = if (pagePath.isEmpty())
emptyList()
else {
- val pathDirs = pagePathParts.dropLast(1)
+ val pathDirs = pagePath.dropLast(1)
listOf(Root.LorePage(emptyList(), LoreArticleFormat.RAW_HTML) to TOC_TITLE) + pathDirs.mapIndexed { i, part ->
Root.LorePage(pathDirs.take(i + 1), LoreArticleFormat.RAW_HTML) to part.toFriendlyPageTitle()
}
}
- val isValid = pageFile.exists() && pageFile.isViewable
+ val pageType = FileStorage.instance.getType(pageFile)
+ val isValid = pageType != null && pageFile.isViewable
if (isValid) {
- if (pageFile.isDirectory) {
+ if (pageType == StoredFileType.DIRECTORY) {
val title = pagePath.takeIf { it.isNotEmpty() }?.toFriendlyIndexTitle() ?: TOC_TITLE
return rawPage(title) {
breadCrumbs(parentPaths)
h1 { +title }
ul {
- pageNode.subNodes.renderInto(this, pagePathParts, LoreArticleFormat.RAW_HTML)
+ pageNode.subNodes.renderInto(this, pagePath, LoreArticleFormat.RAW_HTML)
}
}
}
- if (pageFile.isFile) {
- val pageTemplate = pageFile.readText()
- val pageMarkup = PreParser.preparse(pagePath, pageTemplate)
- val pageHtml = pageMarkup.parseAs(ParserTree::toRawHtml)
+ val pageMarkup = FactbookLoader.loadFactbook(pagePath)
+ if (pageType == StoredFileType.FILE && pageMarkup != null) {
+ val pageHtml = pageMarkup.toRawHtml()
val pageToC = TableOfContentsBuilder()
- pageMarkup.parseAs(pageToC, ParserTree::buildToC)
+ pageMarkup.buildToC(pageToC)
return rawPage(pageToC.toPageTitle(), pageToC.toOpenGraph()) {
breadCrumbs(parentPaths)
}
}
- val title = pagePath.substringAfterLast('/').toFriendlyPageTitle()
+ val title = pagePath.last().toFriendlyPageTitle()
return rawPage(title) {
breadCrumbs(parentPaths)
}
}
-suspend fun ApplicationCall.loreArticlePage(pagePathParts: List<String>, format: LoreArticleFormat = LoreArticleFormat.HTML): HTML.() -> Unit {
+suspend fun ApplicationCall.loreArticlePage(pagePath: List<String>, format: LoreArticleFormat = LoreArticleFormat.HTML): HTML.() -> Unit {
val totalsData = processGuestbook()
if (format == LoreArticleFormat.RAW_HTML)
- return loreRawArticlePage(pagePathParts)
+ return loreRawArticlePage(pagePath)
- val pagePath = pagePathParts.joinToString("/")
- val pageFile = File(Configuration.CurrentConfiguration.articleDir).combineSafe(pagePath)
+ val pageFile = StoragePath.articleDir / pagePath
val pageNode = pageFile.toArticleNode()
val (canCommentAs, comments) = coroutineScope {
canCommentAs.await() to comments.await()
}
- val isValid = pageFile.exists() && pageFile.isViewable
+ val pageType = FileStorage.instance.getType(pageFile)
+ val isValid = pageType != null && pageFile.isViewable
if (isValid) {
- if (pageFile.isDirectory) {
- val navbar = standardNavBar(pagePathParts.takeIf { it.isNotEmpty() })
+ if (pageType == StoredFileType.DIRECTORY) {
+ val navbar = standardNavBar(pagePath.takeIf { it.isNotEmpty() })
val title = pagePath.takeIf { it.isNotEmpty() }?.toFriendlyIndexTitle() ?: TOC_TITLE
a { id = "page-top" }
h1 { +title }
ul {
- pageNode.subNodes.renderInto(this, pagePathParts, format = format)
+ pageNode.subNodes.renderInto(this, pagePath, format = format)
}
}
- finalSection(pagePathParts, canCommentAs, comments, totalsData)
+ finalSection(pagePath, canCommentAs, comments, totalsData)
}
}
- if (pageFile.isFile) {
- val pageTemplate = pageFile.readText()
- val pageMarkup = PreParser.preparse(pagePath, pageTemplate)
- val pageHtml = pageMarkup.parseAs(ParserTree::toFactbookHtml)
+ val pageMarkup = FactbookLoader.loadFactbook(pagePath)
+ if (pageType == StoredFileType.FILE && pageMarkup != null) {
+ val pageHtml = pageMarkup.toFactbookHtml()
val pageToC = TableOfContentsBuilder()
- pageMarkup.parseAs(pageToC, ParserTree::buildToC)
+ pageMarkup.buildToC(pageToC)
val pageNav = pageToC.toNavBar() + NavLink("#comments", "Comments", aClasses = "left")
- val navbar = standardNavBar(pagePathParts)
+ val navbar = standardNavBar(pagePath)
val sidebar = PageNavSidebar(pageNav)
return page(pageToC.toPageTitle(), navbar, sidebar, pageToC.toOpenGraph()) {
+pageHtml
- finalSection(pagePathParts, canCommentAs, comments, totalsData)
+ finalSection(pagePath, canCommentAs, comments, totalsData)
}
}
}
- val title = pagePathParts.last().toFriendlyPageTitle()
- val navbar = standardNavBar(pagePathParts)
+ val title = pagePath.last().toFriendlyPageTitle()
+ val navbar = standardNavBar(pagePath)
val sidebar = PageNavSidebar(
listOf(
NavLink("#page-top", title, aClasses = "left"),
}
}
- finalSection(pagePathParts, canCommentAs, comments, totalsData)
+ finalSection(pagePath, canCommentAs, comments, totalsData)
}
}
package info.mechyrdia.lore
-import info.mechyrdia.Configuration
import info.mechyrdia.JsonFileCodec
+import info.mechyrdia.data.FileStorage
+import info.mechyrdia.data.StoragePath
import info.mechyrdia.route.KeyedEnumSerializer
import io.ktor.http.*
import io.ktor.server.application.*
import kotlinx.serialization.builtins.ListSerializer
import kotlinx.serialization.json.buildJsonObject
import kotlinx.serialization.json.put
-import java.io.File
@Serializable
data class Quote(
"https://mechyrdia.info/lore/$link"
}
-val quotesList by fileData(File(Configuration.CurrentConfiguration.rootDir).combineSafe("quotes.json")) { jsonFile ->
- JsonFileCodec.decodeFromString(ListSerializer(Quote.serializer()), jsonFile.readText())
+private val quotesListGetter by storedData(StoragePath("quotes.json")) { jsonPath ->
+ FileStorage.instance.readFile(jsonPath)?.let {
+ JsonFileCodec.decodeFromString(ListSerializer(Quote.serializer()), String(it))
+ }
}
-fun randomQuote(): Quote = quotesList.random()
+suspend fun getQuotesList() = quotesListGetter()!!
+
+suspend fun randomQuote(): Quote = getQuotesList().random()
@Serializable(with = QuoteFormatSerializer::class)
enum class QuoteFormat(val format: String?) {
package info.mechyrdia.lore
-import info.mechyrdia.Configuration
-import io.ktor.util.*
-import java.io.File
-import java.time.Instant
-import kotlin.math.max
+import info.mechyrdia.data.FileStorage
+import info.mechyrdia.data.StoragePath
-private const val AVERAGE_FACTBOOK_PAGE_CHANGEFREQ = "hourly"
-private const val AVERAGE_FACTBOOK_INDEX_CHANGEFREQ = "daily"
private const val AVERAGE_FACTBOOK_INTRO_CHANGEFREQ = "daily"
+private const val AVERAGE_FACTBOOK_PAGE_CHANGEFREQ = "hourly"
private const val FACTBOOK_INTRO_PRIORITY = "0.7"
-private const val FACTBOOK_ROOT_PRIORITY = "0.6"
-private const val FACTBOOK_INDEX_PRIORITY = "0.4"
private const val FACTBOOK_PAGE_PRIORITY = "0.8"
-private val File.lastSubFilesModified: Instant?
- get() = if (isDirectory)
- (listFiles().orEmpty().mapNotNull {
- it.lastSubFilesModified
- } + Instant.ofEpochMilli(lastModified())).max()
- else null
-
-private val File.lastContentModified: Instant
- get() = lastSubFilesModified ?: Instant.ofEpochMilli(lastModified())
-
-context(Appendable)
-private fun List<ArticleNode>.renderIntoSitemap(base: String? = null) {
- val prefix by lazy(LazyThreadSafetyMode.NONE) { base?.let { "$it/" }.orEmpty() }
- for (node in this) {
- if (node.isViewable) {
- val path = "$prefix${node.name}"
-
- val file = File(Configuration.CurrentConfiguration.articleDir).combineSafe(path)
- val lastModified = file.lastContentModified
- val changeFreq = if (node.subNodes.isNotEmpty()) AVERAGE_FACTBOOK_INDEX_CHANGEFREQ else AVERAGE_FACTBOOK_PAGE_CHANGEFREQ
- val priority = if (node.subNodes.isNotEmpty()) FACTBOOK_INDEX_PRIORITY else FACTBOOK_PAGE_PRIORITY
+private suspend fun Appendable.renderLoreSitemap() {
+ for (page in allPages()) {
+ if (page.path.isViewable) {
+ val lastModified = page.stat.updated
appendLine("\t<url>")
- appendLine("\t\t<loc>https://mechyrdia.info/lore/$path</loc>")
+ appendLine("\t\t<loc>https://mechyrdia.info/lore/${page.path}</loc>")
appendLine("\t\t<lastmod>$lastModified</lastmod>")
- appendLine("\t\t<changefreq>$changeFreq</changefreq>")
- appendLine("\t\t<priority>$priority</priority>")
+ appendLine("\t\t<changefreq>$AVERAGE_FACTBOOK_PAGE_CHANGEFREQ</changefreq>")
+ appendLine("\t\t<priority>$FACTBOOK_PAGE_PRIORITY</priority>")
appendLine("\t</url>")
- node.subNodes.renderIntoSitemap(path)
}
}
}
-private fun Appendable.renderLoreSitemap() {
- val rootFile = File(Configuration.CurrentConfiguration.articleDir)
- val rootLastModified = rootFile.lastContentModified
-
- appendLine("\t<url>")
- appendLine("\t\t<loc>https://mechyrdia.info/lore</loc>")
- appendLine("\t\t<lastmod>$rootLastModified</lastmod>")
- appendLine("\t\t<changefreq>$AVERAGE_FACTBOOK_INDEX_CHANGEFREQ</changefreq>")
- appendLine("\t\t<priority>$FACTBOOK_ROOT_PRIORITY</priority>")
- appendLine("\t</url>")
-
- rootArticleNodeList().renderIntoSitemap()
-}
-
-private fun Appendable.renderIntroSitemap() {
- val introFile = File(Configuration.CurrentConfiguration.rootDir).combineSafe("intro.html")
- val introMetaFile = File(Configuration.CurrentConfiguration.rootDir).combineSafe("introMeta.json")
+private suspend fun Appendable.renderIntroSitemap() {
+ val introFile = FileStorage.instance.statFile(StoragePath.Root / "intro.html") ?: return
+ val introMetaFile = FileStorage.instance.statFile(StoragePath.Root / "introMeta.json") ?: return
- val introLastModified = Instant.ofEpochMilli(max(introFile.lastModified(), introMetaFile.lastModified()))
+ val introLastModified = maxOf(introFile.updated, introMetaFile.updated)
appendLine("\t<url>")
appendLine("\t\t<loc>https://mechyrdia.info/</loc>")
appendLine("\t</url>")
}
-fun Appendable.generateSitemap() {
+suspend fun Appendable.generateSitemap() {
appendLine("<?xml version=\"1.0\" encoding=\"UTF-8\"?>")
appendLine("<urlset xmlns=\"http://www.sitemaps.org/schemas/sitemap/0.9\">")
renderIntroSitemap()
package info.mechyrdia.lore
import com.mongodb.client.model.Sorts
-import info.mechyrdia.Configuration
import info.mechyrdia.OwnerNationId
import info.mechyrdia.data.*
import io.ktor.http.*
import io.ktor.server.application.*
import io.ktor.util.*
+import kotlinx.coroutines.async
+import kotlinx.coroutines.awaitAll
+import kotlinx.coroutines.coroutineScope
import kotlinx.coroutines.flow.filterNot
import kotlinx.coroutines.flow.take
import kotlinx.coroutines.flow.toList
-import java.io.File
import java.time.Instant
import java.time.ZoneOffset
import java.time.format.DateTimeFormatter
-context(MutableList<File>)
-private fun ArticleNode.addPages(base: String? = null) {
+data class StoragePathWithStat(val path: StoragePath, val stat: StoredFileStats)
+
+private suspend fun ArticleNode.addPages(base: StoragePath): List<StoragePathWithStat> {
if (!this.isViewable)
- return
- val path = base?.let { "$it/$name" } ?: name
- val file = File(Configuration.CurrentConfiguration.articleDir).combineSafe(path)
- if (file.isFile)
- add(file)
- else for (subNode in subNodes)
- subNode.addPages(path)
+ return emptyList()
+ val path = base / name
+ val stat = FileStorage.instance.statFile(path)
+ return if (stat != null)
+ listOf(StoragePathWithStat(path, stat))
+ else coroutineScope {
+ subNodes.map { subNode ->
+ async {
+ subNode.addPages(path)
+ }
+ }.awaitAll().flatten()
+ }
}
-private fun allPages(): List<File> {
- return buildList {
- for (node in rootArticleNodeList())
- node.addPages()
+suspend fun allPages(): List<StoragePathWithStat> {
+ return coroutineScope {
+ rootArticleNodeList().map { subNode ->
+ async {
+ subNode.addPages(StoragePath.articleDir)
+ }
+ }.awaitAll().flatten()
}
}
-fun Appendable.generateRecentPageEdits() {
- val pages = allPages().sortedByDescending { it.lastModified() }
+suspend fun Appendable.generateRecentPageEdits() {
+ val pages = allPages().sortedByDescending { it.stat.updated }
- val mostRecentChange = pages.firstOrNull()?.lastModified()?.let { Instant.ofEpochMilli(it) }
+ val mostRecentChange = pages.firstOrNull()?.stat?.updated
RssChannel(
title = "Recently Edited Factbooks | The Hour of Decision",
categories = listOf(
RssCategory(domain = "https://nationstates.net", category = "Mechyrdia")
),
- items = pages.map { page ->
- val pagePath = page.toRelativeString(File(Configuration.CurrentConfiguration.articleDir)).replace('\\', '/')
-
- val pageTemplate = page.readText()
- val pageMarkup = PreParser.preparse(pagePath, pageTemplate)
-
- val pageToC = TableOfContentsBuilder()
- pageMarkup.parseAs(pageToC, ParserTree::buildToC)
- val pageOg = pageToC.toOpenGraph()
-
- val imageEnclosure = pageOg?.image?.let { url ->
- val assetPath = url.removePrefix("https://mechyrdia.info/assets/")
- val file = File(Configuration.CurrentConfiguration.assetDir).combineSafe(assetPath)
- RssItemEnclosure(
- url = url,
- length = file.length(),
- type = ContentType.defaultForFile(file).toString()
- )
- }
-
- RssItem(
- title = pageToC.toPageTitle(),
- description = pageOg?.desc,
- link = "https://mechyrdia.info/lore/$pagePath",
- author = null,
- comments = "https://mechyrdia.info/lore/$pagePath#comments",
- enclosure = imageEnclosure,
- pubDate = Instant.ofEpochMilli(page.lastModified())
- )
+ items = coroutineScope {
+ pages.map { page ->
+ async {
+ val pageMarkup = FactbookLoader.loadFactbook(page.path.elements.drop(1)) ?: return@async null
+
+ val pageToC = TableOfContentsBuilder()
+ pageMarkup.buildToC(pageToC)
+ val pageOg = pageToC.toOpenGraph()
+
+ val imageEnclosure = pageOg?.image?.let { url ->
+ val assetPath = url.removePrefix("https://mechyrdia.info/assets/")
+ val file = StoragePath.assetDir / assetPath
+ RssItemEnclosure(
+ url = url,
+ length = FileStorage.instance.statFile(file)?.size ?: 0L,
+ type = ContentType.defaultForFileExtension(assetPath.substringAfterLast('.')).toString()
+ )
+ }
+
+ RssItem(
+ title = pageToC.toPageTitle(),
+ description = pageOg?.desc,
+ link = "https://mechyrdia.info/lore/${page.path}",
+ author = null,
+ comments = "https://mechyrdia.info/lore/${page.path}#comments",
+ enclosure = imageEnclosure,
+ pubDate = page.stat.updated
+ )
+ }
+ }.awaitAll().filterNotNull()
}
).toXml(this)
}
-suspend fun ApplicationCall.recentCommentsRssFeedGenerator(): Appendable.() -> Unit {
+suspend fun ApplicationCall.recentCommentsRssFeedGenerator(limit: Int): Appendable.() -> Unit {
val currNation = currentNation()
- val limit = request.queryParameters["limit"]?.toIntOrNull() ?: 10
-
val validLimits = 1..100
if (limit !in validLimits)
package info.mechyrdia.route
-import info.mechyrdia.Configuration
import info.mechyrdia.auth.loginPage
import info.mechyrdia.auth.loginRoute
import info.mechyrdia.auth.logoutRoute
import io.ktor.util.pipeline.*
import kotlinx.coroutines.Dispatchers
import kotlinx.coroutines.delay
-import kotlinx.coroutines.runInterruptible
-import java.io.File
+import kotlinx.coroutines.withContext
val ErrorMessageAttributeKey = AttributeKey<String>("ErrorMessage")
override suspend fun PipelineContext<Unit, ApplicationCall>.handleCall() {
with(root) { filterCall() }
- val assetPath = path.joinToString(separator = File.separator)
- val assetFile = File(Configuration.CurrentConfiguration.assetDir).combineSafe(assetPath)
-
- call.respondAsset(assetFile)
+ call.respondAsset(StoragePath.assetDir / path)
}
}
override suspend fun PipelineContext<Unit, ApplicationCall>.handleCall() {
with(root) { filterCall() }
- call.respondFile(call.galaxyMapPage())
+ call.respondStoredFile(call.galaxyMapPage())
}
}
override suspend fun PipelineContext<Unit, ApplicationCall>.handleCall() {
with(root) { filterCall() }
- call.respondFile(File(Configuration.CurrentConfiguration.rootDir).combineSafe("robots.txt"))
+ call.respondStoredFile(StoragePath.Root / "robots.txt")
}
}
}
@Resource("comments.rss")
- class RecentCommentsRss(val root: Root = Root()) : ResourceHandler {
+ class RecentCommentsRss(val limit: Int?, val root: Root = Root()) : ResourceHandler {
override suspend fun PipelineContext<Unit, ApplicationCall>.handleCall() {
with(root) { filterCall() }
- call.respondText(buildString(call.recentCommentsRssFeedGenerator()), ContentType.Application.Rss)
+ call.respondText(buildString(call.recentCommentsRssFeedGenerator(limit ?: 10)), ContentType.Application.Rss)
}
}
override suspend fun PipelineContext<Unit, ApplicationCall>.handleCall(payload: MechyrdiaSansPayload) {
with(utils) { filterCall() }
- call.respondText(runInterruptible(Dispatchers.Default) {
+ call.respondText(withContext(Dispatchers.Default) {
MechyrdiaSansFont.renderTextToSvg(payload.lines.joinToString(separator = "\n") { it.trim() }, payload.bold, payload.italic, payload.align)
}, ContentType.Image.SVG)
}