diff --git a/eco-api/src/main/java/com/willfp/eco/core/data/handlers/DataTypeSerializer.java b/eco-api/src/main/java/com/willfp/eco/core/data/handlers/DataTypeSerializer.java new file mode 100644 index 000000000..840f5e2ec --- /dev/null +++ b/eco-api/src/main/java/com/willfp/eco/core/data/handlers/DataTypeSerializer.java @@ -0,0 +1,43 @@ +package com.willfp.eco.core.data.handlers; + +import com.willfp.eco.core.data.keys.PersistentDataKey; +import com.willfp.eco.core.data.keys.PersistentDataKeyType; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Nullable; + +import java.util.UUID; + +/** + * Handles data read/write for a {@link com.willfp.eco.core.data.keys.PersistentDataKeyType} for a specific + * data handler. + */ +public abstract class DataTypeSerializer { + /** + * Create a new data type serializer. + */ + protected DataTypeSerializer() { + + } + + /** + * Read a value. + * + * @param uuid The uuid. + * @param key The key. + * @return The value. + */ + @Nullable + public abstract T readAsync(@NotNull final UUID uuid, + @NotNull final PersistentDataKey key); + + /** + * Write a value. + * + * @param uuid The uuid. + * @param key The key. + * @param value The value. + */ + public abstract void writeAsync(@NotNull final UUID uuid, + @NotNull final PersistentDataKey key, + @NotNull final T value); +} diff --git a/eco-api/src/main/java/com/willfp/eco/core/data/handlers/PersistentDataHandler.java b/eco-api/src/main/java/com/willfp/eco/core/data/handlers/PersistentDataHandler.java index 1f782f058..a74d156b0 100644 --- a/eco-api/src/main/java/com/willfp/eco/core/data/handlers/PersistentDataHandler.java +++ b/eco-api/src/main/java/com/willfp/eco/core/data/handlers/PersistentDataHandler.java @@ -5,8 +5,16 @@ import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; import java.util.Set; import java.util.UUID; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; +import java.util.concurrent.TimeUnit; public abstract class PersistentDataHandler implements Registrable { /** @@ -14,6 +22,11 @@ public abstract class PersistentDataHandler implements Registrable { */ private final String id; + /** + * The executor. + */ + private final ExecutorService executor = Executors.newCachedThreadPool(); + /** * Create a new persistent data handler. * @@ -23,9 +36,38 @@ protected PersistentDataHandler(@NotNull final String id) { this.id = id; } - @Override - public @NotNull String getID() { - return id; + /** + * Get all UUIDs with saved data. + * + * @return All saved UUIDs. + */ + protected abstract Set getSavedUUIDs(); + + /** + * Save to disk. + *

+ * If write commits to disk, this method does not need to be overridden. + *

+ * This method is called asynchronously. + */ + protected void doSave() { + // Save to disk + } + + /** + * If the handler should autosave. + * + * @return If the handler should autosave. + */ + public boolean shouldAutosave() { + return true; + } + + /** + * Save the data. + */ + public final void save() { + executor.submit(this::doSave); } /** @@ -37,7 +79,18 @@ protected PersistentDataHandler(@NotNull final String id) { * @return The value, or null if not found. */ @Nullable - public abstract T read(@NotNull UUID uuid, @NotNull PersistentDataKey key); + public final T read(@NotNull final UUID uuid, + @NotNull final PersistentDataKey key) { + DataTypeSerializer serializer = key.getType().getSerializer(this); + Future future = executor.submit(() -> serializer.readAsync(uuid, key)); + + try { + return future.get(); + } catch (InterruptedException | ExecutionException e) { + e.printStackTrace(); + return null; + } + } /** * Write a key to persistent data. @@ -47,7 +100,12 @@ protected PersistentDataHandler(@NotNull final String id) { * @param value The value. * @param The type of the key. */ - public abstract void write(@NotNull UUID uuid, @NotNull PersistentDataKey key, @NotNull T value); + public final void write(@NotNull final UUID uuid, + @NotNull final PersistentDataKey key, + @NotNull final T value) { + DataTypeSerializer serializer = key.getType().getSerializer(this); + executor.submit(() -> serializer.writeAsync(uuid, key, value)); + } /** * Serialize data. @@ -56,12 +114,74 @@ protected PersistentDataHandler(@NotNull final String id) { * @return The serialized data. */ @NotNull - public abstract Set serializeData(@NotNull final Set> keys); + public final Set serializeData(@NotNull final Set> keys) { + Set profiles = new HashSet<>(); + + for (UUID uuid : getSavedUUIDs()) { + Map, Object> data = new HashMap<>(); + + for (PersistentDataKey key : keys) { + Object value = read(uuid, key); + data.put(key, value); + } + + profiles.add(new SerializedProfile(uuid, data)); + } + + return profiles; + } /** * Load profile data. * * @param data The data. */ - public abstract void loadProfileData(@NotNull Set data); + @SuppressWarnings("unchecked") + public final void loadProfileData(@NotNull Set data) { + for (SerializedProfile profile : data) { + for (Map.Entry, Object> entry : profile.data().entrySet()) { + PersistentDataKey key = entry.getKey(); + Object value = entry.getValue(); + + // This cast is safe because the data is serialized + write(profile.uuid(), (PersistentDataKey) key, value); + } + } + } + + /** + * Await outstanding writes. + */ + public final void awaitOutstandingWrites() throws InterruptedException { + boolean success = executor.awaitTermination(15, TimeUnit.SECONDS); + + if (!success) { + throw new InterruptedException("Failed to await outstanding writes"); + } + } + + @Override + public final @NotNull String getID() { + return id; + } + + @Override + public final boolean equals(@Nullable final Object obj) { + if (this == obj) { + return true; + } + + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + PersistentDataHandler that = (PersistentDataHandler) obj; + + return id.equals(that.id); + } + + @Override + public final int hashCode() { + return id.hashCode(); + } } diff --git a/eco-api/src/main/java/com/willfp/eco/core/data/keys/PersistentDataKeyType.java b/eco-api/src/main/java/com/willfp/eco/core/data/keys/PersistentDataKeyType.java index 97bb24227..d8cd4c4e9 100644 --- a/eco-api/src/main/java/com/willfp/eco/core/data/keys/PersistentDataKeyType.java +++ b/eco-api/src/main/java/com/willfp/eco/core/data/keys/PersistentDataKeyType.java @@ -1,12 +1,16 @@ package com.willfp.eco.core.data.keys; import com.willfp.eco.core.config.interfaces.Config; +import com.willfp.eco.core.data.handlers.DataTypeSerializer; +import com.willfp.eco.core.data.handlers.PersistentDataHandler; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.math.BigDecimal; import java.util.ArrayList; +import java.util.HashMap; import java.util.List; +import java.util.Map; import java.util.Objects; /** @@ -60,24 +64,58 @@ public final class PersistentDataKeyType { */ private final String name; + /** + * The serializers for this key type. + */ + private final Map> serializers = new HashMap<>(); + + /** + * Create new PersistentDataKeyType. + * + * @param name The name. + */ + private PersistentDataKeyType(@NotNull final String name) { + VALUES.add(this); + + this.name = name; + } + /** * Get the name of the key type. * * @return The name. */ + @NotNull public String name() { return name; } /** - * Create new PersistentDataKeyType. + * Register a serializer for this key type. * - * @param name The name. + * @param handler The handler. + * @param serializer The serializer. */ - private PersistentDataKeyType(@NotNull final String name) { - VALUES.add(this); + public void registerSerializer(@NotNull final PersistentDataHandler handler, + @NotNull final DataTypeSerializer serializer) { + this.serializers.put(handler, serializer); + } - this.name = name; + /** + * Get the serializer for a handler. + * + * @param handler The handler. + * @return The serializer. + */ + @NotNull + public DataTypeSerializer getSerializer(@NotNull final PersistentDataHandler handler) { + DataTypeSerializer serializer = this.serializers.get(handler); + + if (serializer == null) { + throw new IllegalArgumentException("No serializer for handler: " + handler); + } + + return serializer; } @Override diff --git a/eco-core/core-plugin/src/main/kotlin/com/willfp/eco/internal/spigot/data/handlers/LegacyMySQLPersistentDataHandler.kt b/eco-core/core-plugin/src/main/kotlin/com/willfp/eco/internal/spigot/data/handlers/LegacyMySQLPersistentDataHandler.kt new file mode 100644 index 000000000..0672c8b1b --- /dev/null +++ b/eco-core/core-plugin/src/main/kotlin/com/willfp/eco/internal/spigot/data/handlers/LegacyMySQLPersistentDataHandler.kt @@ -0,0 +1,113 @@ +package com.willfp.eco.internal.spigot.data.handlers + +import com.willfp.eco.core.config.ConfigType +import com.willfp.eco.core.config.Configs +import com.willfp.eco.core.config.interfaces.Config +import com.willfp.eco.core.config.readConfig +import com.willfp.eco.core.data.handlers.DataTypeSerializer +import com.willfp.eco.core.data.handlers.PersistentDataHandler +import com.willfp.eco.core.data.keys.PersistentDataKey +import com.willfp.eco.core.data.keys.PersistentDataKeyType +import com.willfp.eco.internal.spigot.EcoSpigotPlugin +import com.zaxxer.hikari.HikariConfig +import com.zaxxer.hikari.HikariDataSource +import eu.decentsoftware.holograms.api.utils.scheduler.S +import kotlinx.serialization.Contextual +import kotlinx.serialization.SerialName +import kotlinx.serialization.Serializable +import org.jetbrains.exposed.dao.id.UUIDTable +import org.jetbrains.exposed.sql.Column +import org.jetbrains.exposed.sql.Database +import org.jetbrains.exposed.sql.SchemaUtils +import org.jetbrains.exposed.sql.Table +import org.jetbrains.exposed.sql.Table.Dual.decimal +import org.jetbrains.exposed.sql.Table.Dual.double +import org.jetbrains.exposed.sql.Table.Dual.varchar +import org.jetbrains.exposed.sql.and +import org.jetbrains.exposed.sql.deleteWhere +import org.jetbrains.exposed.sql.insert +import org.jetbrains.exposed.sql.select +import org.jetbrains.exposed.sql.selectAll +import org.jetbrains.exposed.sql.transactions.transaction +import java.math.BigDecimal +import java.util.UUID + +class LegacyMySQLPersistentDataHandler( + plugin: EcoSpigotPlugin, + config: Config +) : PersistentDataHandler("mysql_legacy") { + private val dataSource = HikariDataSource(HikariConfig().apply { + driverClassName = "com.mysql.cj.jdbc.Driver" + username = config.getString("user") + password = config.getString("password") + jdbcUrl = "jdbc:mysql://" + + "${config.getString("host")}:" + + "${config.getString("port")}/" + + config.getString("database") + maximumPoolSize = config.getInt("connections") + }) + + private val database = Database.connect(dataSource) + + private val table = object : UUIDTable("eco_data") { + val data = text("json_data") + } + + init { + transaction(database) { + SchemaUtils.create(table) + } + + PersistentDataKeyType.STRING.registerSerializer(this, LegacySerializer()) + PersistentDataKeyType.BOOLEAN.registerSerializer(this, LegacySerializer()) + PersistentDataKeyType.INT.registerSerializer(this, LegacySerializer()) + PersistentDataKeyType.DOUBLE.registerSerializer(this, LegacySerializer()) + PersistentDataKeyType.BIG_DECIMAL.registerSerializer(this, LegacySerializer()) + PersistentDataKeyType.CONFIG.registerSerializer(this, LegacySerializer()) + PersistentDataKeyType.STRING_LIST.registerSerializer(this, LegacySerializer>()) + } + + override fun getSavedUUIDs(): Set { + return transaction(database) { + table.selectAll() + .map { it[table.id] } + .toSet() + }.map { it.value }.toSet() + } + + private inner class LegacySerializer : DataTypeSerializer() { + override fun readAsync(uuid: UUID, key: PersistentDataKey): T? { + val json = transaction(database) { + table.select { table.id eq uuid } + .limit(1) + .singleOrNull() + ?.get(table.data) + } + + if (json == null) { + return null + } + + val data = readConfig(json, ConfigType.JSON) + + val value: Any? = when (key.type) { + PersistentDataKeyType.INT -> data.getIntOrNull(key.key.toString()) + PersistentDataKeyType.DOUBLE -> data.getDoubleOrNull(key.key.toString()) + PersistentDataKeyType.STRING -> data.getStringOrNull(key.key.toString()) + PersistentDataKeyType.BOOLEAN -> data.getBoolOrNull(key.key.toString()) + PersistentDataKeyType.STRING_LIST -> data.getStringsOrNull(key.key.toString()) + PersistentDataKeyType.CONFIG -> data.getSubsectionOrNull(key.key.toString()) + PersistentDataKeyType.BIG_DECIMAL -> data.getBigDecimalOrNull(key.key.toString()) + + else -> null + } + + @Suppress("UNCHECKED_CAST") + return value as? T? + } + + override fun writeAsync(uuid: UUID, key: PersistentDataKey, value: T) { + throw UnsupportedOperationException("Legacy MySQL does not support writing") + } + } +} diff --git a/eco-core/core-plugin/src/main/kotlin/com/willfp/eco/internal/spigot/data/handlers/MongoPersistentDataHandler.kt b/eco-core/core-plugin/src/main/kotlin/com/willfp/eco/internal/spigot/data/handlers/MongoPersistentDataHandler.kt index 58d33d9e0..ba69d1e6a 100644 --- a/eco-core/core-plugin/src/main/kotlin/com/willfp/eco/internal/spigot/data/handlers/MongoPersistentDataHandler.kt +++ b/eco-core/core-plugin/src/main/kotlin/com/willfp/eco/internal/spigot/data/handlers/MongoPersistentDataHandler.kt @@ -1,107 +1,112 @@ package com.willfp.eco.internal.spigot.data.handlers +import com.mongodb.client.model.Filters +import com.mongodb.client.model.ReplaceOptions +import com.mongodb.kotlin.client.coroutine.MongoClient +import com.willfp.eco.core.config.Configs import com.willfp.eco.core.config.interfaces.Config +import com.willfp.eco.core.data.handlers.DataTypeSerializer import com.willfp.eco.core.data.handlers.PersistentDataHandler -import com.willfp.eco.core.data.handlers.SerializedProfile import com.willfp.eco.core.data.keys.PersistentDataKey import com.willfp.eco.core.data.keys.PersistentDataKeyType import com.willfp.eco.internal.spigot.EcoSpigotPlugin -import java.util.UUID -import java.util.concurrent.Executors -import com.mongodb.kotlin.client.coroutine.MongoClient -import com.willfp.eco.core.config.Configs -import com.willfp.eco.internal.spigot.data.storage.UUIDProfile import kotlinx.coroutines.flow.firstOrNull -import kotlinx.coroutines.flow.forEach +import kotlinx.coroutines.flow.toList import kotlinx.coroutines.runBlocking import kotlinx.serialization.Contextual import kotlinx.serialization.SerialName import kotlinx.serialization.Serializable -import org.bson.Document +import java.math.BigDecimal +import java.util.UUID class MongoPersistentDataHandler( - config: Config, - plugin: EcoSpigotPlugin -) : PersistentDataHandler("yaml") { - - private val url: String = config.getString("url") ?: error("MongoDB URL not found in config") - private val databaseName: String = config.getString("database") ?: error("Database name not found in config") - private val client = MongoClient.create(url) - private val database = client.getDatabase(databaseName) + plugin: EcoSpigotPlugin, + config: Config +) : PersistentDataHandler("mongo") { + private val client = MongoClient.create(config.getString("url")) + private val database = client.getDatabase(config.getString("database")) + + // Collection name is set for backwards compatibility private val collection = database.getCollection("uuidprofile") - private val executor = Executors.newCachedThreadPool() - override fun read(uuid: UUID, key: PersistentDataKey): T? { - return runBlocking { - doRead(uuid, key) - } - } + init { + PersistentDataKeyType.STRING.registerSerializer(this, MongoSerializer()) + PersistentDataKeyType.BOOLEAN.registerSerializer(this, MongoSerializer()) + PersistentDataKeyType.INT.registerSerializer(this, MongoSerializer()) + PersistentDataKeyType.DOUBLE.registerSerializer(this, MongoSerializer()) + PersistentDataKeyType.STRING_LIST.registerSerializer(this, MongoSerializer>()) - private suspend fun doRead(uuid: UUID, key: PersistentDataKey): T? { - val document = collection.find(Document("uuid", uuid.toString())).firstOrNull() ?: return null - val data = document.data[key.key.toString()] as? T + PersistentDataKeyType.BIG_DECIMAL.registerSerializer(this, object : MongoSerializer() { + override fun convertToMongo(value: BigDecimal): Any { + return value.toString() + } - return data - } + override fun convertFromMongo(value: Any): BigDecimal { + return BigDecimal(value.toString()) + } + }) - override fun write(uuid: UUID, key: PersistentDataKey, value: T) { - executor.submit { - runBlocking { - doWrite(uuid, key, value) + PersistentDataKeyType.CONFIG.registerSerializer(this, object : MongoSerializer() { + override fun convertToMongo(value: Config): Any { + return value.toMap() + } + + @Suppress("UNCHECKED_CAST") + override fun convertFromMongo(value: Any): Config { + return Configs.fromMap(value as Map) } + }) + } + + override fun getSavedUUIDs(): Set { + return runBlocking { + collection.find().toList().map { UUID.fromString(it.uuid) }.toSet() } } - private suspend fun doWrite(uuid: UUID, key: PersistentDataKey, value: T) { - val document = collection.find(Document("uuid", uuid.toString())).firstOrNull() ?: return null - document.data[key.key.toString()] = value + private open inner class MongoSerializer : DataTypeSerializer() { + override fun readAsync(uuid: UUID, key: PersistentDataKey): T? { + return runBlocking { + val profile = collection.find(Filters.eq("uuid", uuid.toString())).firstOrNull() + ?: return@runBlocking null - collection.replaceOne(Document("uuid", uuid.toString()), document) - } + val value = profile.data[key.key.toString()] + ?: return@runBlocking null - override fun serializeData(keys: Set>): Set { - val profiles = mutableSetOf() - - collection.find().forEach { document -> - val uuid = UUID.fromString(document.getString("uuid")) - val data = document.get("data") as Document - val profileData = keys.associateWith { key -> - when (key.type) { - PersistentDataKeyType.STRING -> data.getString(key.key.key) - PersistentDataKeyType.BOOLEAN -> data.getBoolean(key.key.key) - PersistentDataKeyType.INT -> data.getInteger(key.key.key) - PersistentDataKeyType.DOUBLE -> data.getDouble(key.key.key) - PersistentDataKeyType.STRING_LIST -> data.getList(key.key.key, String::class.java) - PersistentDataKeyType.BIG_DECIMAL -> data.getDecimal128(key.key.key)?.bigDecimalValue() - PersistentDataKeyType.CONFIG -> data.get(key.key.key) - else -> null - } ?: key.defaultValue + convertFromMongo(value) } + } - profiles.add(SerializedProfile(uuid, profileData as Map, Any>)) + protected open fun convertToMongo(value: T): Any { + return value } - return profiles - } + override fun writeAsync(uuid: UUID, key: PersistentDataKey, value: T) { + runBlocking { + val profile = collection.find(Filters.eq("uuid", uuid.toString())).firstOrNull() + ?: UUIDProfile(uuid.toString(), mutableMapOf()) - override fun loadProfileData(data: Set) { - data.forEach { profile -> - val document = Document("uuid", profile.uuid.toString()) - val profileData = Document() + profile.data[key.key.toString()] = convertToMongo(value) - profile.data.forEach { (key, value) -> - profileData.put(key.key.key, value) + collection.replaceOne( + Filters.eq("uuid", uuid.toString()), + profile, + ReplaceOptions().upsert(true) + ) } + } - document.put("data", profileData) - collection.replaceOne(Document("uuid", profile.uuid.toString()), document, com.mongodb.client.model.ReplaceOptions().upsert(true)) + protected open fun convertFromMongo(value: Any): T { + @Suppress("UNCHECKED_CAST") + return value as T } } @Serializable - internal data class UUIDProfile( + private data class UUIDProfile( // Storing UUID as strings for serialization @SerialName("_id") val uuid: String, + // Storing NamespacedKeys as strings for serialization val data: MutableMap ) diff --git a/eco-core/core-plugin/src/main/kotlin/com/willfp/eco/internal/spigot/data/handlers/MySQLPersistentDataHandler.kt b/eco-core/core-plugin/src/main/kotlin/com/willfp/eco/internal/spigot/data/handlers/MySQLPersistentDataHandler.kt new file mode 100644 index 000000000..10f1edd37 --- /dev/null +++ b/eco-core/core-plugin/src/main/kotlin/com/willfp/eco/internal/spigot/data/handlers/MySQLPersistentDataHandler.kt @@ -0,0 +1,235 @@ +package com.willfp.eco.internal.spigot.data.handlers + +import com.willfp.eco.core.config.ConfigType +import com.willfp.eco.core.config.Configs +import com.willfp.eco.core.config.interfaces.Config +import com.willfp.eco.core.config.readConfig +import com.willfp.eco.core.data.handlers.DataTypeSerializer +import com.willfp.eco.core.data.handlers.PersistentDataHandler +import com.willfp.eco.core.data.keys.PersistentDataKey +import com.willfp.eco.core.data.keys.PersistentDataKeyType +import com.willfp.eco.internal.spigot.EcoSpigotPlugin +import com.zaxxer.hikari.HikariConfig +import com.zaxxer.hikari.HikariDataSource +import eu.decentsoftware.holograms.api.utils.scheduler.S +import kotlinx.coroutines.flow.toList +import kotlinx.coroutines.runBlocking +import kotlinx.serialization.Contextual +import kotlinx.serialization.SerialName +import kotlinx.serialization.Serializable +import org.jetbrains.exposed.dao.id.UUIDTable +import org.jetbrains.exposed.sql.Column +import org.jetbrains.exposed.sql.Database +import org.jetbrains.exposed.sql.SchemaUtils +import org.jetbrains.exposed.sql.SqlExpressionBuilder.eq +import org.jetbrains.exposed.sql.Table +import org.jetbrains.exposed.sql.TextColumnType +import org.jetbrains.exposed.sql.and +import org.jetbrains.exposed.sql.deleteWhere +import org.jetbrains.exposed.sql.insert +import org.jetbrains.exposed.sql.select +import org.jetbrains.exposed.sql.selectAll +import org.jetbrains.exposed.sql.transactions.transaction +import java.math.BigDecimal +import java.util.UUID + +class MySQLPersistentDataHandler( + plugin: EcoSpigotPlugin, + config: Config +) : PersistentDataHandler("mysql") { + private val dataSource = HikariDataSource(HikariConfig().apply { + driverClassName = "com.mysql.cj.jdbc.Driver" + username = config.getString("user") + password = config.getString("password") + jdbcUrl = "jdbc:mysql://" + + "${config.getString("host")}:" + + "${config.getString("port")}/" + + config.getString("database") + maximumPoolSize = config.getInt("connections") + }) + + private val prefix = config.getString("prefix") + + private val database = Database.connect(dataSource) + + init { + PersistentDataKeyType.STRING.registerSerializer(this, object : DirectStoreSerializer() { + override val table = object : KeyTable("string") { + override val value = varchar("value", 128) + } + }) + + PersistentDataKeyType.BOOLEAN.registerSerializer(this, object : DirectStoreSerializer() { + override val table = object : KeyTable("boolean") { + override val value = bool("value") + } + }) + + PersistentDataKeyType.INT.registerSerializer(this, object : DirectStoreSerializer() { + override val table = object : KeyTable("int") { + override val value = integer("value") + } + }) + + PersistentDataKeyType.DOUBLE.registerSerializer(this, object : DirectStoreSerializer() { + override val table = object : KeyTable("double") { + override val value = double("value") + } + }) + + PersistentDataKeyType.BIG_DECIMAL.registerSerializer(this, object : DirectStoreSerializer() { + override val table = object : KeyTable("big_decimal") { + // 34 digits of precision, 4 digits of scale + override val value = decimal("value", 34, 4) + } + }) + + PersistentDataKeyType.CONFIG.registerSerializer(this, object : SingleValueSerializer() { + override val table = object : KeyTable("config") { + override val value = text("value") + } + + override fun convertFromStored(value: String): Config { + return readConfig(value, ConfigType.JSON) + } + + override fun convertToStored(value: Config): String { + // Store config as JSON + return if (value.type == ConfigType.JSON) { + value.toPlaintext() + } else { + Configs.fromMap(value.toMap(), ConfigType.JSON).toPlaintext() + } + } + }) + + PersistentDataKeyType.STRING_LIST.registerSerializer(this, object : MultiValueSerializer() { + override val table = object : ListKeyTable("string_list") { + override val value = varchar("value", 128) + } + }) + } + + override fun getSavedUUIDs(): Set { + val savedUUIDs = mutableSetOf() + + for (keyType in PersistentDataKeyType.values()) { + val serializer = keyType.getSerializer(this) as MySQLSerializer<*> + savedUUIDs.addAll(serializer.getSavedUUIDs()) + } + + return savedUUIDs + } + + private abstract inner class MySQLSerializer : DataTypeSerializer() { + protected abstract val table: ProfileTable + + init { + transaction(database) { + SchemaUtils.create(table) + } + } + + fun getSavedUUIDs(): Set { + return transaction(database) { + table.selectAll().map { it[table.uuid] }.toSet() + } + } + } + + // T is the key type + // S is the stored value type + private abstract inner class SingleValueSerializer : MySQLSerializer() { + abstract override val table: KeyTable + + abstract fun convertToStored(value: T): S + abstract fun convertFromStored(value: S): T + + override fun readAsync(uuid: UUID, key: PersistentDataKey): T? { + val stored = transaction(database) { + table.select { (table.uuid eq uuid) and (table.key eq key.key.toString()) } + .limit(1) + .singleOrNull() + ?.get(table.value) + } + + return stored?.let { convertFromStored(it) } + } + + override fun writeAsync(uuid: UUID, key: PersistentDataKey, value: T) { + transaction(database) { + table.insert { + it[table.uuid] = uuid + it[table.key] = key.key.toString() + it[table.value] = convertToStored(value) + } + } + } + } + + private abstract inner class DirectStoreSerializer : SingleValueSerializer() { + override fun convertToStored(value: T): T { + return value + } + + override fun convertFromStored(value: T): T { + return value + } + } + + private abstract inner class MultiValueSerializer : MySQLSerializer>() { + abstract override val table: ListKeyTable + + override fun readAsync(uuid: UUID, key: PersistentDataKey>): List? { + val stored = transaction(database) { + table.select { (table.uuid eq uuid) and (table.key eq key.key.toString()) } + .orderBy(table.index) + .map { it[table.value] } + } + + return stored + } + + override fun writeAsync(uuid: UUID, key: PersistentDataKey>, value: List) { + transaction(database) { + table.deleteWhere { (table.uuid eq uuid) and (table.key eq key.key.toString()) } + + value.forEachIndexed { index, t -> + table.insert { + it[table.uuid] = uuid + it[table.key] = key.key.toString() + it[table.index] = index + it[table.value] = t + } + } + } + } + } + + private abstract inner class ProfileTable(name: String) : Table(prefix + name) { + val uuid = uuid("uuid") + } + + private abstract inner class KeyTable(name: String) : ProfileTable(name) { + val key = varchar("key", 128) + abstract val value: Column + + override val primaryKey = PrimaryKey(uuid, key) + + init { + uniqueIndex() + } + } + + private abstract inner class ListKeyTable(name: String) : ProfileTable(name) { + val key = varchar("key", 128) + val index = integer("index") + abstract val value: Column + + override val primaryKey = PrimaryKey(uuid, key, index) + + init { + uniqueIndex() + } + } +} diff --git a/eco-core/core-plugin/src/main/kotlin/com/willfp/eco/internal/spigot/data/handlers/YamlPersistentDataHandler.kt b/eco-core/core-plugin/src/main/kotlin/com/willfp/eco/internal/spigot/data/handlers/YamlPersistentDataHandler.kt index 7b10d4bc1..4a1b3e1f4 100644 --- a/eco-core/core-plugin/src/main/kotlin/com/willfp/eco/internal/spigot/data/handlers/YamlPersistentDataHandler.kt +++ b/eco-core/core-plugin/src/main/kotlin/com/willfp/eco/internal/spigot/data/handlers/YamlPersistentDataHandler.kt @@ -1,10 +1,13 @@ package com.willfp.eco.internal.spigot.data.handlers +import com.willfp.eco.core.config.interfaces.Config +import com.willfp.eco.core.data.handlers.DataTypeSerializer import com.willfp.eco.core.data.handlers.PersistentDataHandler import com.willfp.eco.core.data.handlers.SerializedProfile import com.willfp.eco.core.data.keys.PersistentDataKey import com.willfp.eco.core.data.keys.PersistentDataKeyType import com.willfp.eco.internal.spigot.EcoSpigotPlugin +import java.math.BigDecimal import java.util.UUID class YamlPersistentDataHandler( @@ -12,52 +15,59 @@ class YamlPersistentDataHandler( ) : PersistentDataHandler("yaml") { private val dataYml = plugin.dataYml - @Suppress("UNCHECKED_CAST") - override fun read(uuid: UUID, key: PersistentDataKey): T? { - // Separate `as T?` for each branch to prevent compiler warnings. - val value = when (key.type) { - PersistentDataKeyType.INT -> dataYml.getIntOrNull("player.$uuid.${key.key}") as T? - PersistentDataKeyType.DOUBLE -> dataYml.getDoubleOrNull("player.$uuid.${key.key}") as T? - PersistentDataKeyType.STRING -> dataYml.getStringOrNull("player.$uuid.${key.key}") as T? - PersistentDataKeyType.BOOLEAN -> dataYml.getBoolOrNull("player.$uuid.${key.key}") as T? - PersistentDataKeyType.STRING_LIST -> dataYml.getStringsOrNull("player.$uuid.${key.key}") as T? - PersistentDataKeyType.CONFIG -> dataYml.getSubsectionOrNull("player.$uuid.${key.key}") as T? - PersistentDataKeyType.BIG_DECIMAL -> dataYml.getBigDecimalOrNull("player.$uuid.${key.key}") as T? - - else -> null - } + init { + PersistentDataKeyType.STRING.registerSerializer(this, object : YamlSerializer() { + override fun read(config: Config, key: String) = config.getStringOrNull(key) + }) + + PersistentDataKeyType.BOOLEAN.registerSerializer(this, object : YamlSerializer() { + override fun read(config: Config, key: String) = config.getBoolOrNull(key) + }) + + PersistentDataKeyType.INT.registerSerializer(this, object : YamlSerializer() { + override fun read(config: Config, key: String) = config.getIntOrNull(key) + }) + + PersistentDataKeyType.DOUBLE.registerSerializer(this, object : YamlSerializer() { + override fun read(config: Config, key: String) = config.getDoubleOrNull(key) + }) + + PersistentDataKeyType.STRING_LIST.registerSerializer(this, object : YamlSerializer>() { + override fun read(config: Config, key: String) = config.getStringsOrNull(key) + }) - return value + PersistentDataKeyType.CONFIG.registerSerializer(this, object : YamlSerializer() { + override fun read(config: Config, key: String) = config.getSubsectionOrNull(key) + }) + + PersistentDataKeyType.BIG_DECIMAL.registerSerializer(this, object : YamlSerializer() { + override fun read(config: Config, key: String) = config.getBigDecimalOrNull(key) + }) } - override fun write(uuid: UUID, key: PersistentDataKey, value: T) { - dataYml.set("player.$uuid.$key", value) + override fun getSavedUUIDs(): Set { + return dataYml.getSubsection("player").getKeys(false) + .map { UUID.fromString(it) } + .toSet() } - override fun serializeData(keys: Set>): Set { - val profiles = mutableSetOf() - val uuids = dataYml.getSubsection("player").getKeys(false).map { UUID.fromString(it) } + override fun shouldAutosave(): Boolean { + return true + } - for (uuid in uuids) { - val data = mutableMapOf, Any>() + override fun doSave() { + dataYml.save() + } - for (key in keys) { - data[key] = read(uuid, key) ?: continue - } + private abstract inner class YamlSerializer: DataTypeSerializer() { + protected abstract fun read(config: Config, key: String): T? - profiles.add(SerializedProfile(uuid, data)) + final override fun readAsync(uuid: UUID, key: PersistentDataKey): T? { + return read(dataYml, "player.$uuid.${key.key}") } - return profiles - } - - override fun loadProfileData(data: Set) { - for (profile in data) { - for ((key, value) in profile.data) { - // Dirty cast, but it's fine because we know it's the same type - @Suppress("UNCHECKED_CAST") - write(profile.uuid, key as PersistentDataKey, value as Any) - } + final override fun writeAsync(uuid: UUID, key: PersistentDataKey, value: T) { + dataYml.set("player.$uuid.${key.key}", value) } } } diff --git a/eco-core/core-plugin/src/main/resources/config.yml b/eco-core/core-plugin/src/main/resources/config.yml index 92feac624..206a827a1 100644 --- a/eco-core/core-plugin/src/main/resources/config.yml +++ b/eco-core/core-plugin/src/main/resources/config.yml @@ -20,12 +20,13 @@ mongodb: database: "eco" mysql: - # How many threads to execute statements on. Higher numbers can be faster however - # very high numbers can cause issues with OS configuration. If writes are taking - # too long, increase this value. - threads: 2 + # The table prefix to use for all tables. + prefix: "eco_" + # The maximum number of MySQL connections. connections: 10 + + # Connection details for MySQL. host: localhost port: 3306 database: database