Big changes to the database system

This a modified version of the original commit b2180b0c73.

I changed/removed:
* project version bump (a lot of unrealated file changes, wrong version in regards to breaking changes)
* Hacky changes to the NMS class
* Reverted changes to SSpawner (We want to keep the class variables final
and there doesn't seem to be a big advantage in overwriting basically the whole classe's content
when you could just create a new instance instead, as intended)
* Commit name (old name was `(changes will be in next commit)`)
This commit is contained in:
ceze88 2023-06-11 12:54:39 +02:00 committed by Christian Koop
parent 61205da3b4
commit ee9d6016bf
No known key found for this signature in database
GPG Key ID: 89A8181384E010A3
18 changed files with 1185 additions and 395 deletions

View File

@ -83,6 +83,10 @@
<include>net.kyori:*</include>
<include>org.apache.commons:commons-lang3</include>
<include>org.apache.commons:commons-text</include>
<include>org.jooq:jooq</include>
<include>org.reactivestreams:reactive-streams</include>
<include>org.mariadb.jdbc:mariadb-java-client</include>
<include>com.h2database:h2</include>
</includes>
</artifactSet>
@ -106,6 +110,26 @@
<pattern>org.apache.commons</pattern>
<shadedPattern>com.craftaro.core.third_party.org.apache.commons</shadedPattern>
</relocation>
<relocation>
<pattern>org.jooq</pattern>
<shadedPattern>com.craftaro.core.third_party.org.jooq</shadedPattern>
</relocation>
<relocation>
<pattern>org.reactivestreams</pattern>
<shadedPattern>com.craftaro.core.third_party.org.reactivestreams</shadedPattern>
</relocation>
<relocation>
<pattern>org.mariadb.jdbc</pattern>
<shadedPattern>com.craftaro.core.third_party.org.mariadb.jdbc</shadedPattern>
</relocation>
<relocation>
<pattern>org.h2</pattern>
<shadedPattern>com.craftaro.core.third_party.org.h2</shadedPattern>
</relocation>
</relocations>
</configuration>
</execution>
@ -170,6 +194,7 @@
<scope>compile</scope>
</dependency>
<!-- DB Stuff Start -->
<dependency>
<groupId>com.zaxxer</groupId>
<artifactId>HikariCP</artifactId>
@ -177,6 +202,23 @@
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.jooq</groupId>
<artifactId>jooq</artifactId>
<version>3.14.0</version>
</dependency>
<dependency>
<groupId>org.mariadb.jdbc</groupId>
<artifactId>mariadb-java-client</artifactId>
<version>3.0.8</version>
</dependency>
<dependency>
<groupId>com.h2database</groupId>
<artifactId>h2</artifactId>
<version>1.4.200</version>
</dependency>
<!-- DB Stuff End -->
<!-- Start Plugin Hooks -->
<dependency>
<groupId>com.gmail.filoghost.holographicdisplays</groupId>
@ -235,9 +277,9 @@
</dependency>
<dependency>
<groupId>com.songoda</groupId>
<artifactId>UltimateStacker</artifactId>
<version>2.4.0</version>
<groupId>com.craftaro</groupId>
<artifactId>UltimateStackerAPI</artifactId>
<version>1.0</version>
<scope>provided</scope>
</dependency>

View File

@ -1,12 +1,14 @@
package com.craftaro.core;
import com.craftaro.core.compatibility.CompatibleMaterial;
import com.craftaro.core.configuration.Config;
import com.craftaro.core.database.DataManager;
import com.craftaro.core.database.DataMigration;
import com.craftaro.core.database.DatabaseType;
import com.craftaro.core.locale.Locale;
import com.craftaro.core.utils.Metrics;
import com.craftaro.core.verification.CraftaroProductVerification;
import com.craftaro.core.verification.ProductVerificationStatus;
import com.craftaro.core.compatibility.CompatibleMaterial;
import com.craftaro.core.database.DataManagerAbstract;
import de.tr7zw.changeme.nbtapi.utils.MinecraftVersion;
import org.bukkit.Bukkit;
import org.bukkit.ChatColor;
@ -14,13 +16,17 @@ import org.bukkit.command.CommandSender;
import org.bukkit.configuration.file.FileConfiguration;
import org.bukkit.plugin.java.JavaPlugin;
import java.io.File;
import java.sql.Connection;
import java.util.Collections;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.logging.Level;
public abstract class SongodaPlugin extends JavaPlugin {
protected Locale locale;
protected Config config = new Config(this);
protected Config databaseConfig;
protected DataManager dataManager;
protected long dataLoadDelay = 20L;
private boolean licensePreventedPluginLoad = false;
@ -30,6 +36,9 @@ public abstract class SongodaPlugin extends JavaPlugin {
/* NBT-API */
MinecraftVersion.getLogger().setLevel(Level.WARNING);
MinecraftVersion.disableUpdateCheck();
// Disable tips and logo for Jooq
System.setProperty("org.jooq.no-tips", "true");
System.setProperty("org.jooq.no-logo", "true");
}
public abstract void onPluginLoad();
@ -167,6 +176,11 @@ public abstract class SongodaPlugin extends JavaPlugin {
ChatColor.RED, "Disabling", ChatColor.GRAY));
onPluginDisable();
try (Connection connection = this.dataManager.getDatabaseConnector().getConnection()) {
connection.close();
this.dataManager.getDatabaseConnector().closeConnection();
} catch (Exception ignored) {
}
console.sendMessage(ChatColor.GREEN + "=============================");
console.sendMessage(" "); // blank line to separate chatter
@ -199,43 +213,6 @@ public abstract class SongodaPlugin extends JavaPlugin {
return false;
}
protected void shutdownDataManager(DataManagerAbstract dataManager) {
// 3 minutes is overkill, but we just want to make sure
shutdownDataManager(dataManager, 15, TimeUnit.MINUTES.toSeconds(3));
}
protected void shutdownDataManager(DataManagerAbstract dataManager, int reportInterval, long secondsUntilForceShutdown) {
dataManager.shutdownTaskQueue();
while (!dataManager.isTaskQueueTerminated() && secondsUntilForceShutdown > 0) {
long secondsToWait = Math.min(reportInterval, secondsUntilForceShutdown);
try {
if (dataManager.waitForShutdown(secondsToWait, TimeUnit.SECONDS)) {
break;
}
getLogger().info(String.format("A DataManager is currently working on %d tasks... " +
"We are giving him another %d seconds until we forcefully shut him down " +
"(continuing to report in %d second intervals)",
dataManager.getTaskQueueSize(), secondsUntilForceShutdown, reportInterval));
} catch (InterruptedException ignore) {
} finally {
secondsUntilForceShutdown -= secondsToWait;
}
}
if (!dataManager.isTaskQueueTerminated()) {
int unfinishedTasks = dataManager.forceShutdownTaskQueue().size();
if (unfinishedTasks > 0) {
getLogger().log(Level.WARNING,
String.format("A DataManager has been forcefully terminated with %d unfinished tasks - " +
"This can be a serious problem, please report it to us (Craftaro / Songoda)!", unfinishedTasks));
}
}
}
protected void emergencyStop() {
this.emergencyStop = true;
@ -258,4 +235,85 @@ public abstract class SongodaPlugin extends JavaPlugin {
emergencyStop();
}
//New database stuff
public Config getDatabaseConfig() {
File databaseFile = new File(getDataFolder(), "database.yml");
if (!databaseFile.exists()) {
saveResource("database.yml", false);
}
if (this.databaseConfig == null) {
this.databaseConfig = new Config(databaseFile);
this.databaseConfig.load();
}
return this.databaseConfig;
}
/**
* Get the DataManager for this plugin.
* Note: Make sure to call initDatabase() in onPluginEnable() before using this.
*
* @return DataManager for this plugin.
*/
public DataManager getDataManager() {
return dataManager;
}
/**
* Initialize the DataManager for this plugin and convert from SQLite to H2 if needed.
*/
protected void initDatabase() {
initDatabase(Collections.emptyList());
}
/**
* Initialize the DataManager for this plugin and convert from SQLite to H2 if needed.
*
* @param migrations List of migrations to run.
*/
protected void initDatabase(List<DataMigration> migrations) {
boolean legacy = this.config.contains("MySQL");
boolean isSQLite = !this.config.getBoolean("MySQL.Enabled", false);
if (legacy && isSQLite) {
this.config.set("MySQL", null);
this.dataManager = new DataManager(this, migrations, DatabaseType.SQLITE);
} else if (legacy) {
//Copy creditental from old config to new config
this.databaseConfig.set("MySQL.Hostname", this.config.getString("MySQL.Hostname", "localhost"));
this.databaseConfig.set("MySQL.Port", this.config.getInt("MySQL.Port", 3306));
this.databaseConfig.set("MySQL.Database", this.config.getString("MySQL.Database", "database"));
this.databaseConfig.set("MySQL.Username", this.config.getString("MySQL.Username", "username"));
this.databaseConfig.set("MySQL.Password", this.config.getString("MySQL.Password", "password"));
this.databaseConfig.set("MySQL.Pool Size", this.config.getInt("MySQL.Pool Size", 5));
this.databaseConfig.set("MySQL.Use SSL", this.config.getBoolean("MySQL.Use SSL", false));
this.dataManager = new DataManager(this, migrations);
} else {
this.dataManager = new DataManager(this, migrations);
}
if (dataManager.getDatabaseConnector().isInitialized()) {
//Check if the type is SQLite
if (dataManager.getDatabaseConnector().getType() == DatabaseType.SQLITE) {
//Let's convert it to H2
DataManager newDataManager = DataMigration.convert(this, DatabaseType.H2);
if (newDataManager != null && newDataManager.getDatabaseConnector().isInitialized()) {
//Set the new data manager
setDataManager(newDataManager);
}
}
}
}
/**
* Set the DataManager for this plugin.
* Used for converting from one database to another.
*/
public void setDataManager(DataManager dataManager) {
if (dataManager == null) throw new IllegalArgumentException("DataManager cannot be null!");
if (this.dataManager == dataManager) return;
//Make sure to shut down the old data manager.
if (this.dataManager != null) {
dataManager.shutdown();
}
this.dataManager = dataManager;
}
}

View File

@ -0,0 +1,46 @@
package com.craftaro.core.database;
import java.util.Map;
import java.util.UUID;
public interface Data {
/**
* Gets the auto increment id of this data
*
* @return The auto increment id or -1 if not applicable
*/
default int getId() {
return -1;
}
/**
* Gets the unique id of this data
*
* @return The unique id or null if not applicable
*/
default UUID getUniqueId() {
return null;
}
/**
* Serializes the data into a map
* to save to the database
*
* @return The serialized data
*/
Map<String, Object> serialize();
/**
* Method used to deserialize the data
*
* @param map The map to deserialize
*/
Data deserialize(Map<String, Object> map);
/**
* No plugin prefix is required for the table
* @return The table name where the data should be stored
*/
String getTableName();
}

View File

@ -0,0 +1,441 @@
package com.craftaro.core.database;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.craftaro.core.SongodaPlugin;
import com.craftaro.core.configuration.Config;
import org.bukkit.Bukkit;
import org.jetbrains.annotations.NotNull;
import org.jooq.Query;
import org.jooq.Record;
import org.jooq.impl.DSL;
import java.io.File;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import java.util.UUID;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
public class DataManager {
protected final SongodaPlugin plugin;
protected final Config databaseConfig;
private final List<DataMigration> migrations;
protected DatabaseConnector databaseConnector;
protected DatabaseType type;
private final Map<String, AtomicInteger> autoIncrementCache = new HashMap<>();
protected final ExecutorService asyncPool = new ThreadPoolExecutor(1, 5, 30L, TimeUnit.SECONDS, new LinkedBlockingQueue<>(), new ThreadFactoryBuilder().setNameFormat(getClass().getSimpleName()+"-Database-Async-%d").build());
@Deprecated
private static final Map<String, LinkedList<Runnable>> queues = new HashMap<>();
public DataManager(SongodaPlugin plugin, List<DataMigration> migrations) {
this.plugin = plugin;
this.migrations = migrations;
this.databaseConfig = plugin.getDatabaseConfig();
load(null);
}
public DataManager(SongodaPlugin plugin, List<DataMigration> migrations, DatabaseType forcedType) {
this.plugin = plugin;
this.migrations = migrations;
this.databaseConfig = plugin.getDatabaseConfig();
load(forcedType);
}
private void load(DatabaseType forcedType) {
try {
String databaseType = databaseConfig.getString("Connection Settings.Type").toUpperCase();
if (forcedType != null) {
databaseType = forcedType.name();
}
switch (databaseType) {
case "MYSQL": {
this.databaseConnector = new MySQLConnector(plugin, databaseConfig);
break;
}
case "MARIADB": {
this.databaseConnector = new MariaDBConnector(plugin, databaseConfig);
break;
}
case "SQLITE": {
//Lets check if we have the sqlite file in the plugin folder
File databaseFile = new File(plugin.getDataFolder(), plugin.getName().toLowerCase()+".db");
if (!databaseFile.exists()) {
//Lets start SQLite and it will be converted to H2
this.databaseConnector = new SQLiteConnector(plugin);
} else {
//No need for conversion, lets use H2 instead
this.databaseConnector = new H2Connector(plugin);
}
break;
}
default: {
//H2
this.databaseConnector = new H2Connector(plugin);
break;
}
}
this.type = databaseConnector.getType();
this.plugin.getLogger().info("Data handler connected using " + databaseConnector.getType().name() + ".");
} catch (Exception ex) {
this.plugin.getLogger().severe("Fatal error trying to connect to database. Please make sure all your connection settings are correct and try again. Plugin has been disabled.");
ex.printStackTrace();
Bukkit.getPluginManager().disablePlugin(this.plugin);
}
runMigrations();
}
/**
* @return the database connector
*/
public DatabaseConnector getDatabaseConnector() {
return databaseConnector;
}
/**
* @return the prefix to be used by all table names
*/
public String getTablePrefix() {
return this.plugin.getDescription().getName().toLowerCase() + '_';
}
/**
* Runs any needed data migrations
*/
public void runMigrations() {
try (Connection connection = this.databaseConnector.getConnection()) {
int currentMigration = -1;
boolean migrationsExist;
DatabaseMetaData meta = connection.getMetaData();
ResultSet res = meta.getTables(null, null, this.getMigrationsTableName(), new String[] {"TABLE"});
migrationsExist = res.next();
if (!migrationsExist) {
// No migration table exists, create one
String createTable = "CREATE TABLE " + this.getMigrationsTableName() + " (migration_version INT NOT NULL)";
try (PreparedStatement statement = connection.prepareStatement(createTable)) {
statement.execute();
}
// Insert primary row into migration table
String insertRow = "INSERT INTO " + this.getMigrationsTableName() + " VALUES (?)";
try (PreparedStatement statement = connection.prepareStatement(insertRow)) {
statement.setInt(1, -1);
statement.execute();
}
} else {
// Grab the current migration version
String selectVersion = "SELECT migration_version FROM " + this.getMigrationsTableName();
try (PreparedStatement statement = connection.prepareStatement(selectVersion)) {
ResultSet result = statement.executeQuery();
result.next();
currentMigration = result.getInt("migration_version");
}
}
// Grab required migrations
int finalCurrentMigration = currentMigration;
List<DataMigration> requiredMigrations = this.migrations.stream()
.filter(x -> x.getRevision() > finalCurrentMigration)
.sorted(Comparator.comparingInt(DataMigration::getRevision))
.collect(Collectors.toList());
// Nothing to migrate, abort
if (requiredMigrations.isEmpty()) {
return;
}
// Migrate the data
for (DataMigration dataMigration : requiredMigrations) {
dataMigration.migrate(databaseConnector, getTablePrefix());
}
// Set the new current migration to be the highest migrated to
currentMigration = requiredMigrations.stream()
.map(DataMigration::getRevision)
.max(Integer::compareTo)
.orElse(-1);
String updateVersion = "UPDATE " + this.getMigrationsTableName() + " SET migration_version = ?";
try (PreparedStatement statement = connection.prepareStatement(updateVersion)) {
statement.setInt(1, currentMigration);
statement.execute();
}
} catch (Exception ex) {
ex.printStackTrace();
}
}
/**
* @return the name of the migrations table
*/
private String getMigrationsTableName() {
return getTablePrefix() + "migrations";
}
/**
* @return The next auto increment value for the given table
*/
public synchronized int getNextId(String table) {
String prefixedTable = getTablePrefix() + table;
if (!this.autoIncrementCache.containsKey(prefixedTable)) {
databaseConnector.connectDSL(context -> {
// context.select(DSL.max(DSL.field("id"))).from(prefixedTable).fetchOptional().ifPresentOrElse(record -> {
// if (record.get(0, Integer.class) == null) {
// this.autoIncrementCache.put(prefixedTable, new AtomicInteger(1));
// return;
// }
// this.autoIncrementCache.put(prefixedTable, new AtomicInteger(record.get(0, Integer.class)));
// }, () -> this.autoIncrementCache.put(prefixedTable, new AtomicInteger(1)));
//
//recreate upper method using java 8 syntax
Optional<Integer> max = context.select(DSL.max(DSL.field("id"))).from(prefixedTable).fetchOptional().map(record -> record.get(0, Integer.class));
this.autoIncrementCache.put(prefixedTable, new AtomicInteger(max.orElse(1)));
});
}
return this.autoIncrementCache.get(prefixedTable).incrementAndGet();
}
/**
* Saves the data to the database
*/
public void save(Data data) {
asyncPool.execute(() -> {
databaseConnector.connectDSL(context -> {
context.insertInto(DSL.table(getTablePrefix() + data.getTableName()))
.set(data.serialize())
.onConflict(DSL.field("id")).doUpdate()
.set(data.serialize())
.where(data.getId() != -1 ? DSL.field("id").eq(data.getId()) : DSL.field("uuid").eq(data.getUniqueId().toString()))
.execute();
});
});
}
/**
* Saves the data to the database synchronously
*/
public void saveSync(Data data) {
databaseConnector.connectDSL(context -> {
context.insertInto(DSL.table(getTablePrefix() + data.getTableName()))
.set(data.serialize())
.onConflict(DSL.field("id")).doUpdate()
.set(data.serialize())
.where(data.getId() != -1 ? DSL.field("id").eq(data.getId()) : DSL.field("uuid").eq(data.getUniqueId().toString()))
.execute();
});
}
/**
* Saves the data in batch to the database
*/
public void saveBatch(Collection<Data> dataBatch) {
asyncPool.execute(() -> {
databaseConnector.connectDSL(context -> {
List<Query> queries = new ArrayList<>();
for (Data data : dataBatch) {
queries.add(context.insertInto(DSL.table(getTablePrefix() + data.getTableName()))
.set(data.serialize())
.onConflict(DSL.field("id")).doUpdate()
.set(data.serialize())
.where(data.getId() != -1 ? DSL.field("id").eq(data.getId()) : DSL.field("uuid").eq(data.getUniqueId().toString())));
}
context.batch(queries).execute();
});
});
}
/**
* Saves the data in batch to the database
*/
public void saveBatchSync(Collection<Data> dataBatch) {
databaseConnector.connectDSL(context -> {
List<Query> queries = new ArrayList<>();
for (Data data : dataBatch) {
queries.add(context.insertInto(DSL.table(getTablePrefix() + data.getTableName()))
.set(data.serialize())
.onConflict(DSL.field("id")).doUpdate()
.set(data.serialize())
.where(data.getId() != -1 ? DSL.field("id").eq(data.getId()) : DSL.field("uuid").eq(data.getUniqueId().toString())));
}
context.batch(queries).execute();
});
}
/**
* Deletes the data from the database
*/
public void delete(Data data) {
asyncPool.execute(() -> {
databaseConnector.connectDSL(context -> {
context.delete(DSL.table(getTablePrefix() + data.getTableName()))
.where(data.getId() != -1 ? DSL.field("id").eq(data.getId()) : DSL.field("uuid").eq(data.getUniqueId().toString()))
.execute();
});
});
}
/**
* Loads the data from the database
* @param id The id of the data
* @return The loaded data
*/
@SuppressWarnings("unchecked")
public <T extends Data> T load(int id, Class<?> clazz, String table) {
try {
AtomicReference<Data> data = new AtomicReference<>((Data) clazz.getConstructor().newInstance());
databaseConnector.connectDSL(context -> {
try {
data.set((Data) clazz.getDeclaredConstructor().newInstance());
data.get().deserialize(Objects.requireNonNull(context.select()
.from(DSL.table(getTablePrefix() + table))
.where(DSL.field("id").eq(id))
.fetchOne())
.intoMap());
} catch (Exception ex) {
ex.printStackTrace();
}
});
return (T) data.get();
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
/**
* Loads the data from the database
* @param uuid The uuid of the data
* @return The loaded data
*/
@SuppressWarnings("unchecked")
public <T extends Data> T load(UUID uuid, Class<?> clazz, String table) {
try {
AtomicReference<Data> data = new AtomicReference<>((Data) clazz.getConstructor().newInstance());
AtomicBoolean found = new AtomicBoolean(false);
databaseConnector.connectDSL(context -> {
try {
data.set((Data) clazz.getDeclaredConstructor().newInstance());
data.get().deserialize(Objects.requireNonNull(context.select()
.from(DSL.table(getTablePrefix() + table))
.where(DSL.field("uuid").eq(uuid.toString()))
.fetchOne())
.intoMap());
found.set(true);
} catch (NullPointerException ignored) {
} catch (Exception ex) {
ex.printStackTrace();
}
});
if (found.get()) {
return (T) data.get();
} else {
return null;
}
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
/**
* Loads the data in batch from the database
* @return The loaded data
*/
@SuppressWarnings("unchecked")
public <T extends Data> List<T> loadBatch(Class<?> clazz, String table) {
try {
List<Data> dataList = Collections.synchronizedList(new ArrayList<>());
databaseConnector.connectDSL(context -> {
try {
for (@NotNull Record record : Objects.requireNonNull(context.select()
.from(DSL.table(getTablePrefix() + table))
.fetchArray())) {
Data data = (Data)clazz.getDeclaredConstructor().newInstance();
data.deserialize(record.intoMap());
dataList.add(data);
}
} catch (Exception ex) {
ex.printStackTrace();
}
});
return (List<T>) dataList;
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
/**
* Close the database and shutdown the async pool
*/
public void shutdown() {
asyncPool.shutdown();
databaseConnector.closeConnection();
}
/**
* Force shutdown the async pool and close the database
* @return Tasks that didn't finish in the async pool
*/
public List<Runnable> shutdownNow() {
databaseConnector.closeConnection();
return asyncPool.shutdownNow();
}
public void shutdownTaskQueue() {
this.asyncPool.shutdown();
}
public List<Runnable> forceShutdownTaskQueue() {
return this.asyncPool.shutdownNow();
}
public boolean isTaskQueueTerminated() {
return this.asyncPool.isTerminated();
}
public long getTaskQueueSize() {
if (this.asyncPool instanceof ThreadPoolExecutor) {
return ((ThreadPoolExecutor) this.asyncPool).getTaskCount();
}
return -1;
}
/**
* @see ExecutorService#awaitTermination(long, TimeUnit)
*/
public boolean waitForShutdown(long timeout, TimeUnit unit) throws InterruptedException {
return this.asyncPool.awaitTermination(timeout, unit);
}
public String getSyntax(String string, DatabaseType type) {
if (this.type == type) {
return string;
}
return "";
}
}

View File

@ -1,208 +0,0 @@
package com.craftaro.core.database;
import org.bukkit.Bukkit;
import org.bukkit.plugin.Plugin;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
public class DataManagerAbstract {
protected final DatabaseConnector databaseConnector;
protected final Plugin plugin;
protected final DatabaseType type;
protected final ExecutorService asyncPool = Executors.newSingleThreadExecutor();
@Deprecated
private static final Map<String, LinkedList<Runnable>> queues = new HashMap<>();
public DataManagerAbstract(DatabaseConnector databaseConnector, Plugin plugin) {
this.databaseConnector = databaseConnector;
this.plugin = plugin;
this.type = databaseConnector.getType();
}
/**
* @return the prefix to be used by all table names
*/
public String getTablePrefix() {
return this.plugin.getDescription().getName().toLowerCase() + '_';
}
/**
* Deprecated because it is often times not accurate to its use case. (+race-conditions)
*/
@Deprecated
protected int lastInsertedId(Connection connection) {
return lastInsertedId(connection, null);
}
/**
* Deprecated because it is often times not accurate to its use case. (+race-conditions)
*/
@Deprecated
protected int lastInsertedId(Connection connection, String table) {
String select = "SELECT * FROM " + this.getTablePrefix() + table + " ORDER BY id DESC LIMIT 1";
String query;
if (this.databaseConnector instanceof SQLiteConnector) {
query = table == null ? "SELECT last_insert_rowid()" : select;
} else {
query = table == null ? "SELECT LAST_INSERT_ID()" : select;
}
int id = -1;
try (Statement statement = connection.createStatement()) {
ResultSet result = statement.executeQuery(query);
result.next();
id = result.getInt(1);
} catch (SQLException ex) {
ex.printStackTrace();
}
return id;
}
/**
* Queue a task to be run asynchronously. <br>
*
* @param runnable task to run
*/
@Deprecated
public void async(Runnable runnable) {
Bukkit.getScheduler().runTaskAsynchronously(this.plugin, runnable);
}
/**
* Queue a task to be run asynchronously with all the
* advantages of CompletableFuture api <br>
*
* @param runnable task to run
*/
public CompletableFuture<Void> asyncFuture(Runnable runnable) {
return CompletableFuture.runAsync(runnable, this.asyncPool);
}
/**
* Queue a task to be run synchronously.
*
* @param runnable task to run on the next server tick
*/
public void sync(Runnable runnable) {
Bukkit.getScheduler().runTask(this.plugin, runnable);
}
public void runAsync(Runnable runnable) {
runAsync(runnable, null);
}
// FIXME: The problem with a single threaded async queue is that the database implementations and this queue
// are **not** thread-safe in any way. The connection is not pooled or anything...
// So the actual problem is that plugins just queue way too much tasks on bulk which it just shouldn't need to do...
public void runAsync(Runnable task, Consumer<Throwable> callback) {
this.asyncPool.execute(() -> {
try {
task.run();
if (callback != null) {
callback.accept(null);
}
} catch (Throwable th) {
if (callback != null) {
callback.accept(th);
return;
}
th.printStackTrace();
}
});
}
public void shutdownTaskQueue() {
this.asyncPool.shutdown();
}
public List<Runnable> forceShutdownTaskQueue() {
return this.asyncPool.shutdownNow();
}
public boolean isTaskQueueTerminated() {
return this.asyncPool.isTerminated();
}
public long getTaskQueueSize() {
if (this.asyncPool instanceof ThreadPoolExecutor) {
return ((ThreadPoolExecutor) this.asyncPool).getTaskCount();
}
return -1;
}
/**
* @see ExecutorService#awaitTermination(long, TimeUnit)
*/
public boolean waitForShutdown(long timeout, TimeUnit unit) throws InterruptedException {
return this.asyncPool.awaitTermination(timeout, unit);
}
/**
* Queue tasks to be run asynchronously.
*
* @param runnable task to put into queue.
* @param queueKey the queue key to add the runnable to.
*/
@Deprecated
public void queueAsync(Runnable runnable, String queueKey) {
if (queueKey == null) {
return;
}
List<Runnable> queue = queues.computeIfAbsent(queueKey, t -> new LinkedList<>());
queue.add(runnable);
if (queue.size() == 1) {
runQueue(queueKey);
}
}
@Deprecated
private void runQueue(String queueKey) {
doQueue(queueKey, (s) -> {
if (!queues.get(queueKey).isEmpty()) {
runQueue(queueKey);
}
});
}
@Deprecated
private void doQueue(String queueKey, Consumer<Boolean> callback) {
Runnable runnable = queues.get(queueKey).getFirst();
async(() -> {
runnable.run();
sync(() -> {
queues.get(queueKey).remove(runnable);
callback.accept(true);
});
});
}
public String getSyntax(String string, DatabaseType type) {
if (this.type == type) {
return string;
}
return "";
}
}

View File

@ -1,7 +1,16 @@
package com.craftaro.core.database;
import com.craftaro.core.SongodaCore;
import com.craftaro.core.SongodaPlugin;
import java.io.File;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
public abstract class DataMigration {
private final int revision;
@ -10,7 +19,7 @@ public abstract class DataMigration {
this.revision = revision;
}
public abstract void migrate(Connection connection, String tablePrefix) throws SQLException;
public abstract void migrate(DatabaseConnector connector, String tablePrefix) throws SQLException;
/**
* @return the revision number of this migration
@ -18,4 +27,83 @@ public abstract class DataMigration {
public int getRevision() {
return this.revision;
}
/**
* @param plugin The plugin to convert data for
* @param toType The new database type
* @return The new data manager instance
*/
public static DataManager convert(SongodaPlugin plugin, DatabaseType toType) {
DataManager from = plugin.getDataManager();
if (from.getDatabaseConnector().getType() == toType) {
plugin.getLogger().severe("Cannot convert to the same database type!");
return null;
}
DataManager to = new DataManager(plugin, Collections.emptyList(), toType);
if (!to.getDatabaseConnector().isInitialized()) {
plugin.getLogger().severe("Invalid database configuration for " + toType.name() +"! Please check your "+plugin.getName()+"/database.yml file.");
return null;
}
DatabaseConnector fromConnector = from.getDatabaseConnector();
DatabaseConnector toConnector = to.getDatabaseConnector();
Connection fromConnection;
Connection toConnection = null;
try {
fromConnection = fromConnector.getConnection();
toConnection = toConnector.getConnection();
toConnection.setAutoCommit(false);
// Retrieve the list of tables from the old database
List<String> tableNames = new ArrayList<>();
try (ResultSet rs = fromConnection.getMetaData().getTables(null, null, null, new String[] {"TABLE"})) {
while (rs.next()) {
String tableName = rs.getString("TABLE_NAME");
tableNames.add(tableName);
}
}
// Transfer the data from the old database to the new database
for (String tableName : tableNames) {
try (
PreparedStatement fromStmt = fromConnection.prepareStatement("SELECT * FROM " + tableName);
ResultSet rs = fromStmt.executeQuery();
PreparedStatement toStmt = toConnection.prepareStatement("INSERT INTO " + tableName + " VALUES (" + String.join(",", Collections.nCopies(rs.getMetaData().getColumnCount(), "?")) + ")")
) {
while (rs.next()) {
for (int i = 1; i <= rs.getMetaData().getColumnCount(); i++) {
toStmt.setObject(i, rs.getObject(i));
}
toStmt.executeUpdate();
}
}
}
toConnection.commit();
} catch (Exception e) {
if (toConnection != null)
try {
toConnection.rollback();
} catch (SQLException e1) {
e1.printStackTrace();
SongodaCore.getInstance().getLogger().severe("Failed to rollback data for the new database");
}
e.printStackTrace();
SongodaCore.getInstance().getLogger().severe("Failed to migrate data from " + from.getDatabaseConnector().getType() + " to " + to.getDatabaseConnector().getType());
return null;
} finally {
SongodaCore.getInstance().getLogger().info("Successfully migrated data from " + from.getDatabaseConnector().getType() + " to " + to.getDatabaseConnector().getType());
}
fromConnector.closeConnection();
//Get rid of the old database file
File databaseFile = new File(plugin.getDataFolder(), plugin.getName().toLowerCase()+".db");
if (databaseFile.exists()) {
//rename it to .old
databaseFile.renameTo(new File(plugin.getDataFolder(), plugin.getName().toLowerCase()+".old"));
}
return to;
}
}

View File

@ -1,106 +0,0 @@
package com.craftaro.core.database;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import java.util.stream.Collectors;
public class DataMigrationManager {
private final List<DataMigration> migrations;
private final DatabaseConnector databaseConnector;
private final DataManagerAbstract dataManagerAbstract;
public DataMigrationManager(DatabaseConnector databaseConnector, DataManagerAbstract dataManagerAbstract, DataMigration... migrations) {
this.databaseConnector = databaseConnector;
this.dataManagerAbstract = dataManagerAbstract;
this.migrations = Arrays.asList(migrations);
}
/**
* Runs any needed data migrations
*/
public void runMigrations() {
try (Connection connection = this.databaseConnector.getConnection()) {
int currentMigration = -1;
boolean migrationsExist;
String query;
if (this.databaseConnector instanceof SQLiteConnector) {
query = "SELECT 1 FROM sqlite_master WHERE type = 'table' AND name = ?";
} else {
query = "SHOW TABLES LIKE ?";
}
try (PreparedStatement statement = connection.prepareStatement(query)) {
statement.setString(1, this.getMigrationsTableName());
migrationsExist = statement.executeQuery().next();
}
if (!migrationsExist) {
// No migration table exists, create one
String createTable = "CREATE TABLE " + this.getMigrationsTableName() + " (migration_version INT NOT NULL)";
try (PreparedStatement statement = connection.prepareStatement(createTable)) {
statement.execute();
}
// Insert primary row into migration table
String insertRow = "INSERT INTO " + this.getMigrationsTableName() + " VALUES (?)";
try (PreparedStatement statement = connection.prepareStatement(insertRow)) {
statement.setInt(1, -1);
statement.execute();
}
} else {
// Grab the current migration version
String selectVersion = "SELECT migration_version FROM " + this.getMigrationsTableName();
try (PreparedStatement statement = connection.prepareStatement(selectVersion)) {
ResultSet result = statement.executeQuery();
result.next();
currentMigration = result.getInt("migration_version");
}
}
// Grab required migrations
int finalCurrentMigration = currentMigration;
List<DataMigration> requiredMigrations = this.migrations.stream()
.filter(x -> x.getRevision() > finalCurrentMigration)
.sorted(Comparator.comparingInt(DataMigration::getRevision))
.collect(Collectors.toList());
// Nothing to migrate, abort
if (requiredMigrations.isEmpty()) {
return;
}
// Migrate the data
for (DataMigration dataMigration : requiredMigrations) {
dataMigration.migrate(connection, this.dataManagerAbstract.getTablePrefix());
}
// Set the new current migration to be the highest migrated to
currentMigration = requiredMigrations.stream()
.map(DataMigration::getRevision)
.max(Integer::compareTo)
.orElse(-1);
String updateVersion = "UPDATE " + this.getMigrationsTableName() + " SET migration_version = ?";
try (PreparedStatement statement = connection.prepareStatement(updateVersion)) {
statement.setInt(1, currentMigration);
statement.execute();
}
} catch (Exception ex) {
ex.printStackTrace();
}
}
/**
* @return the name of the migrations table
*/
private String getMigrationsTableName() {
return this.dataManagerAbstract.getTablePrefix() + "migrations";
}
}

View File

@ -1,5 +1,7 @@
package com.craftaro.core.database;
import org.jooq.DSLContext;
import java.sql.Connection;
import java.sql.SQLException;
@ -23,6 +25,29 @@ public interface DatabaseConnector {
*/
void connect(ConnectionCallback callback);
/**
* Executes a callback with a Connection passed and automatically closes it when finished
*
* @param callback The callback to execute once the connection is retrieved
* @return The result of the callback
*/
OptionalResult connectOptional(ConnectionOptionalCallback callback);
/**
* Executes a callback with a DSLContext passed and automatically closes it when finished
*
* @param callback The callback to execute once the connection is retrieved
*/
void connectDSL(DSLContextCallback callback);
/**
* Executes a callback with a DSLContext passed and automatically closes it when finished
*
* @param callback The callback to execute once the connection is retrieved
* @return The result of the callback
*/
OptionalResult connectDSLOptional(DSLContextOptionalCallback callback);
/**
* Wraps a connection in a callback which will automagically handle catching sql errors
*/
@ -30,7 +55,40 @@ public interface DatabaseConnector {
void accept(Connection connection) throws SQLException;
}
Connection getConnection();
/**
* Wraps a connection in a callback which will
* automagically handle catching sql errors
* Can return a value
*/
interface ConnectionOptionalCallback {
OptionalResult accept(Connection connection) throws SQLException;
}
/**
* Wraps a connection in a callback which will automagically handle catching sql errors
*/
interface DSLContextCallback {
void accept(DSLContext context) throws SQLException;
}
/**
* Wraps a connection in a callback which will
* automagically handle catching sql errors
* Can return a value
*/
interface DSLContextOptionalCallback {
OptionalResult accept(DSLContext context) throws SQLException;
}
/**
* Gets a connection from the database
* @return The connection
*/
Connection getConnection() throws SQLException;
/**
* Gets the database type
* @return The database type
*/
DatabaseType getType();
}

View File

@ -1,8 +1,26 @@
package com.craftaro.core.database;
import org.jooq.SQLDialect;
public enum DatabaseType {
MARIADB,
MYSQL,
SQLITE
H2,
SQLITE;
public SQLDialect getDialect() {
switch (this) {
case MARIADB:
return SQLDialect.MARIADB;
case MYSQL:
return SQLDialect.MYSQL;
case SQLITE:
return SQLDialect.SQLITE;
case H2:
return SQLDialect.H2;
default:
return SQLDialect.DEFAULT;
}
}
}

View File

@ -0,0 +1,109 @@
package com.craftaro.core.database;
import com.craftaro.core.SongodaCore;
import com.craftaro.core.SongodaPlugin;
import com.craftaro.core.configuration.Config;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
import org.bukkit.plugin.Plugin;
import org.jooq.SQLDialect;
import org.jooq.impl.DSL;
import java.sql.Connection;
import java.sql.SQLException;
public class H2Connector implements DatabaseConnector {
private final Plugin plugin;
private HikariDataSource hikari;
private boolean initializedSuccessfully;
public H2Connector(SongodaPlugin plugin) {
this(plugin, plugin.getDatabaseConfig());
}
public H2Connector(Plugin plugin, Config databaseConfig) {
this.plugin = plugin;
int poolSize = databaseConfig.getInt("Connection Settings.Pool Size");
String password = databaseConfig.getString("Connection Settings.Password");
String username = databaseConfig.getString("Connection Settings.Username");
HikariConfig config = new HikariConfig();
config.setDriverClassName("com.craftaro.core.third_party.org.h2.Driver");
config.setJdbcUrl("jdbc:h2:./h2_" + plugin.getDataFolder().getPath().replaceAll("\\\\", "/") + "/" + plugin.getDescription().getName().toLowerCase()+ ";AUTO_RECONNECT=TRUE;MODE=MySQL;DATABASE_TO_LOWER=TRUE;CASE_INSENSITIVE_IDENTIFIERS=TRUE");
config.setPassword(username);
config.setUsername(password);
config.setMaximumPoolSize(poolSize);
try {
this.hikari = new HikariDataSource(config);
this.initializedSuccessfully = true;
} catch (Exception ex) {
ex.printStackTrace();
this.initializedSuccessfully = false;
}
}
@Override
public boolean isInitialized() {
return this.initializedSuccessfully;
}
@Override
public void closeConnection() {
this.hikari.close();
}
@Override
public void connect(ConnectionCallback callback) {
try (Connection connection = this.hikari.getConnection()) {
callback.accept(connection);
} catch (SQLException ex) {
this.plugin.getLogger().severe("An error occurred executing a MySQL query: " + ex.getMessage());
ex.printStackTrace();
}
}
@Override
public OptionalResult connectOptional(ConnectionOptionalCallback callback) {
try (Connection connection = getConnection()) {
return callback.accept(connection);
} catch (Exception ex) {
SongodaCore.getInstance().getLogger().severe("An error occurred executing a MySQL query: " + ex.getMessage());
ex.printStackTrace();
}
return OptionalResult.empty();
}
@Override
public void connectDSL(DSLContextCallback callback) {
try (Connection connection = getConnection()){
callback.accept(DSL.using(connection, SQLDialect.MYSQL));
} catch (Exception ex) {
this.plugin.getLogger().severe("An error occurred executing a MySQL query: " + ex.getMessage());
ex.printStackTrace();
}
}
@Override
public OptionalResult connectDSLOptional(DSLContextOptionalCallback callback) {
try (Connection connection = getConnection()) {
return callback.accept(DSL.using(connection, SQLDialect.MYSQL));
} catch (Exception ex) {
SongodaCore.getInstance().getLogger().severe("An error occurred executing a MySQL query: " + ex.getMessage());
ex.printStackTrace();
}
return OptionalResult.empty();
}
@Override
public Connection getConnection() throws SQLException {
return this.hikari.getConnection();
}
@Override
public DatabaseType getType() {
return DatabaseType.H2;
}
}

View File

@ -1,22 +1,42 @@
package com.craftaro.core.database;
import com.craftaro.core.SongodaCore;
import com.craftaro.core.SongodaPlugin;
import com.craftaro.core.configuration.Config;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
import org.bukkit.plugin.Plugin;
import org.jooq.SQLDialect;
import org.jooq.impl.DSL;
import java.sql.Connection;
import java.sql.SQLException;
public class MariaDBConnector implements DatabaseConnector {
private final Plugin plugin;
private final SongodaPlugin plugin;
private HikariDataSource hikari;
private boolean initializedSuccessfully;
public MariaDBConnector(Plugin plugin, String hostname, int port, String database, String username, String password, boolean useSSL, int poolSize) {
public MariaDBConnector(SongodaPlugin plugin) {
this(plugin, plugin.getDatabaseConfig());
}
public MariaDBConnector(SongodaPlugin plugin, Config databaseConfig) {
this.plugin = plugin;
plugin.getLogger().info("connecting to " + hostname + " : " + port);
String hostname = databaseConfig.getString("Connection Settings.Hostname");
int port = databaseConfig.getInt("Connection Settings.Port");
String database = databaseConfig.getString("Connection Settings.Database");
String username = databaseConfig.getString("Connection Settings.Username");
String password = databaseConfig.getString("Connection Settings.Password");
boolean useSSL = databaseConfig.getBoolean("Connection Settings.Use SSL");
int poolSize = databaseConfig.getInt("Connection Settings.Pool Size");
try {
Class.forName("com.songoda.core.third_party.org.mariadb.jdbc.Driver");
} catch (ClassNotFoundException e) {
throw new RuntimeException(e);
}
HikariConfig config = new HikariConfig();
config.setJdbcUrl("jdbc:mariadb://" + hostname + ":" + port + "/" + database + "?useSSL=" + useSSL);
@ -28,6 +48,7 @@ public class MariaDBConnector implements DatabaseConnector {
this.hikari = new HikariDataSource(config);
this.initializedSuccessfully = true;
} catch (Exception ex) {
ex.printStackTrace();
this.initializedSuccessfully = false;
}
}
@ -42,9 +63,8 @@ public class MariaDBConnector implements DatabaseConnector {
this.hikari.close();
}
@Deprecated
@Override
public void connect(DatabaseConnector.ConnectionCallback callback) {
public void connect(ConnectionCallback callback) {
try (Connection connection = this.hikari.getConnection()) {
callback.accept(connection);
} catch (SQLException ex) {
@ -54,13 +74,40 @@ public class MariaDBConnector implements DatabaseConnector {
}
@Override
public Connection getConnection() {
try {
return this.hikari.getConnection();
public OptionalResult connectOptional(ConnectionOptionalCallback callback) {
try (Connection connection = getConnection()) {
return callback.accept(connection);
} catch (Exception ex) {
SongodaCore.getInstance().getLogger().severe("An error occurred executing a MySQL query: " + ex.getMessage());
ex.printStackTrace();
}
return null;
return OptionalResult.empty();
}
@Override
public void connectDSL(DSLContextCallback callback) {
try (Connection connection = getConnection()){
callback.accept(DSL.using(connection, SQLDialect.MARIADB));
} catch (Exception ex) {
this.plugin.getLogger().severe("An error occurred executing a MySQL query: " + ex.getMessage());
ex.printStackTrace();
}
}
@Override
public OptionalResult connectDSLOptional(DSLContextOptionalCallback callback) {
try (Connection connection = getConnection()) {
return callback.accept(DSL.using(connection, SQLDialect.MARIADB));
} catch (Exception ex) {
SongodaCore.getInstance().getLogger().severe("An error occurred executing a MySQL query: " + ex.getMessage());
ex.printStackTrace();
}
return OptionalResult.empty();
}
@Override
public Connection getConnection() throws SQLException {
return this.hikari.getConnection();
}
@Override

View File

@ -1,8 +1,13 @@
package com.craftaro.core.database;
import com.craftaro.core.SongodaCore;
import com.craftaro.core.SongodaPlugin;
import com.craftaro.core.configuration.Config;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
import org.bukkit.plugin.Plugin;
import org.jooq.SQLDialect;
import org.jooq.impl.DSL;
import java.sql.Connection;
import java.sql.SQLException;
@ -12,10 +17,22 @@ public class MySQLConnector implements DatabaseConnector {
private HikariDataSource hikari;
private boolean initializedSuccessfully;
public MySQLConnector(Plugin plugin, String hostname, int port, String database, String username, String password, boolean useSSL, int poolSize) {
public MySQLConnector(SongodaPlugin plugin) {
this(plugin, plugin.getDatabaseConfig());
}
public MySQLConnector(Plugin plugin, Config databaseConfig) {
this.plugin = plugin;
plugin.getLogger().info("connecting to " + hostname + " : " + port);
String hostname = databaseConfig.getString("Connection Settings.Hostname");
int port = databaseConfig.getInt("Connection Settings.Port");
String database = databaseConfig.getString("Connection Settings.Database");
String username = databaseConfig.getString("Connection Settings.Username");
String password = databaseConfig.getString("Connection Settings.Password");
boolean useSSL = databaseConfig.getBoolean("Connection Settings.Use SSL");
int poolSize = databaseConfig.getInt("Connection Settings.Pool Size");
plugin.getLogger().info("Connecting to " + hostname + " : " + port + " using MySQL");
HikariConfig config = new HikariConfig();
config.setJdbcUrl("jdbc:mysql://" + hostname + ":" + port + "/" + database + "?useSSL=" + useSSL);
@ -41,7 +58,6 @@ public class MySQLConnector implements DatabaseConnector {
this.hikari.close();
}
@Deprecated
@Override
public void connect(ConnectionCallback callback) {
try (Connection connection = this.hikari.getConnection()) {
@ -53,13 +69,40 @@ public class MySQLConnector implements DatabaseConnector {
}
@Override
public Connection getConnection() {
try {
return this.hikari.getConnection();
public OptionalResult connectOptional(ConnectionOptionalCallback callback) {
try (Connection connection = getConnection()) {
return callback.accept(connection);
} catch (Exception ex) {
SongodaCore.getInstance().getLogger().severe("An error occurred executing a MySQL query: " + ex.getMessage());
ex.printStackTrace();
}
return null;
return OptionalResult.empty();
}
@Override
public void connectDSL(DSLContextCallback callback) {
try (Connection connection = getConnection()){
callback.accept(DSL.using(connection, SQLDialect.MYSQL));
} catch (Exception ex) {
this.plugin.getLogger().severe("An error occurred executing a MySQL query: " + ex.getMessage());
ex.printStackTrace();
}
}
@Override
public OptionalResult connectDSLOptional(DSLContextOptionalCallback callback) {
try (Connection connection = getConnection()) {
return callback.accept(DSL.using(connection, SQLDialect.MYSQL));
} catch (Exception ex) {
SongodaCore.getInstance().getLogger().severe("An error occurred executing a MySQL query: " + ex.getMessage());
ex.printStackTrace();
}
return OptionalResult.empty();
}
@Override
public Connection getConnection() throws SQLException {
return this.hikari.getConnection();
}
@Override

View File

@ -0,0 +1,32 @@
package com.craftaro.core.database;
public class OptionalResult {
private final Object value;
private final boolean present;
public OptionalResult(Object value, boolean present) {
this.value = value;
this.present = present;
}
public <T> T get(Class<T> clazz) {
return clazz.cast(value);
}
public boolean isPresent() {
return present;
}
public <V> V getOrDefault(V defaultValue) {
return present ? (V) value : defaultValue;
}
public static OptionalResult empty() {
return new OptionalResult(null, false);
}
public static <T> OptionalResult of(T value) {
return new OptionalResult(value, true);
}
}

View File

@ -1,12 +1,16 @@
package com.craftaro.core.database;
import com.craftaro.core.SongodaCore;
import org.bukkit.plugin.Plugin;
import org.jooq.SQLDialect;
import org.jooq.impl.DSL;
import java.io.File;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
@Deprecated
public class SQLiteConnector implements DatabaseConnector {
private final Plugin plugin;
private final String connectionString;
@ -39,7 +43,6 @@ public class SQLiteConnector implements DatabaseConnector {
}
}
@Deprecated
@Override
public void connect(ConnectionCallback callback) {
try {
@ -50,10 +53,42 @@ public class SQLiteConnector implements DatabaseConnector {
}
}
@Override
public OptionalResult connectOptional(ConnectionOptionalCallback callback) {
try (Connection connection = getConnection()) {
return callback.accept(connection);
} catch (Exception ex) {
SongodaCore.getInstance().getLogger().severe("An error occurred executing a MySQL query: " + ex.getMessage());
ex.printStackTrace();
}
return OptionalResult.empty();
}
@Override
public void connectDSL(DSLContextCallback callback) {
try (Connection connection = getConnection()){
callback.accept(DSL.using(connection, SQLDialect.SQLITE));
} catch (Exception ex) {
this.plugin.getLogger().severe("An error occurred executing a MySQL query: " + ex.getMessage());
ex.printStackTrace();
}
}
@Override
public OptionalResult connectDSLOptional(DSLContextOptionalCallback callback) {
try (Connection connection = getConnection()) {
return callback.accept(DSL.using(connection, SQLDialect.SQLITE));
} catch (Exception ex) {
SongodaCore.getInstance().getLogger().severe("An error occurred executing a MySQL query: " + ex.getMessage());
ex.printStackTrace();
}
return OptionalResult.empty();
}
@Override
public Connection getConnection() {
try {
if (this.connection == null || this.connection.isClosed()) {
if (this.connection == null || this.connection.isClosed() || !this.connection.isValid(2)) {
try {
this.connection = DriverManager.getConnection(this.connectionString);
} catch (SQLException ex) {

View File

@ -0,0 +1,67 @@
package com.craftaro.core.database;
import org.bukkit.Bukkit;
import org.bukkit.Location;
import java.util.HashMap;
import java.util.Map;
public class SerializedLocation {
private String world;
private double x;
private double y;
private double z;
private float pitch = 0;
private float yaw = 0;
public SerializedLocation(Location location) {
this.world = location.getWorld().getName();
this.x = location.getX();
this.y = location.getY();
this.z = location.getZ();
if (location.getPitch() != 0 && location.getYaw() != 0) {
this.pitch = location.getPitch();
this.yaw = location.getYaw();
}
}
public static Location of(Map<String, Object> map) {
return new Location(Bukkit.getWorld((String) map.get("world")),
(double) map.get("x"),
(double) map.get("y"),
(double) map.get("z"),
(float) map.getOrDefault("yaw", 0.0f),
(float) map.getOrDefault("pitch", 0.0f));
}
public static Map<String, Object> of(Location location) {
Map<String, Object> map = new HashMap<>();
map.put("world", location.getWorld().getName());
map.put("x", location.getX());
map.put("y", location.getY());
map.put("z", location.getZ());
if (location.getPitch() != 0 && location.getYaw() != 0) {
map.put("pitch", location.getPitch());
map.put("yaw", location.getYaw());
}
return map;
}
public Location asLocation() {
return new Location(Bukkit.getWorld(world), x, y, z, yaw, pitch);
}
public Map<String, Object> asMap() {
Map<String, Object> map = new HashMap<>();
map.put("world", world);
map.put("x", x);
map.put("y", y);
map.put("z", z);
if (pitch != 0 && yaw != 0) {
map.put("pitch", pitch);
map.put("yaw", yaw);
}
return map;
}
}

View File

@ -1,16 +1,18 @@
package com.craftaro.core.hooks.stackers;
import com.songoda.ultimatestacker.stackable.entity.EntityStack;
import com.craftaro.ultimatestacker.api.UltimateStackerAPI;
import com.craftaro.ultimatestacker.api.stack.entity.EntityStack;
import org.bukkit.Bukkit;
import org.bukkit.entity.EntityType;
import org.bukkit.entity.Item;
import org.bukkit.entity.LivingEntity;
import org.bukkit.plugin.Plugin;
public class UltimateStacker extends Stacker {
private final com.songoda.ultimatestacker.UltimateStacker plugin;
private final Plugin plugin;
public UltimateStacker() {
this.plugin = com.songoda.ultimatestacker.UltimateStacker.getInstance();
this.plugin = Bukkit.getPluginManager().getPlugin("UltimateStacker");
}
@Override
@ -35,37 +37,38 @@ public class UltimateStacker extends Stacker {
@Override
public void setItemAmount(Item item, int amount) {
com.songoda.ultimatestacker.UltimateStacker.updateItemAmount(item, amount);
UltimateStackerAPI.getStackedItemManager().getStackedItem(item, true).setAmount(amount);
}
@Override
public int getItemAmount(Item item) {
return com.songoda.ultimatestacker.UltimateStacker.getActualItemAmount(item);
return UltimateStackerAPI.getStackedItemManager().getActualItemAmount(item);
}
@Override
public boolean isStacked(LivingEntity entity) {
return plugin.getEntityStackManager().isStackedEntity(entity);
return UltimateStackerAPI.getEntityStackManager().isStackedEntity(entity);
}
@Override
public int getSize(LivingEntity entity) {
return isStacked(entity) ? plugin.getEntityStackManager().getStack(entity).getAmount() : 0;
return isStacked(entity) ? UltimateStackerAPI.getEntityStackManager().getStackedEntity(entity).getAmount() : 0;
}
@Override
public void remove(LivingEntity entity, int amount) {
EntityStack stack = plugin.getEntityStackManager().getStack(entity);
stack.removeEntityFromStack(amount);
EntityStack stack = UltimateStackerAPI.getEntityStackManager().getStackedEntity(entity);
stack.take(amount);
}
@Override
public void add(LivingEntity entity, int amount) {
plugin.getEntityStackManager().addStack(entity, amount);
EntityStack stack = UltimateStackerAPI.getEntityStackManager().getStackedEntity(entity);
stack.add(amount);
}
@Override
public int getMinStackSize(EntityType type) {
return ((Plugin) plugin).getConfig().getInt("Entities.Min Stack Amount");
return ((Plugin) plugin).getConfig().getInt("Entities.Min Stack Amount", 1);
}
}

View File

@ -1,8 +1,7 @@
package com.craftaro.core.lootables.loot;
import com.craftaro.core.SongodaCore;
import com.songoda.ultimatestacker.UltimateStacker;
import com.songoda.ultimatestacker.settings.Settings;
import com.craftaro.ultimatestacker.api.UltimateStackerAPI;
import org.bukkit.Bukkit;
import org.bukkit.Material;
import org.bukkit.entity.LivingEntity;
@ -69,7 +68,7 @@ public class DropUtils {
private static void dropItems(List<ItemStack> items, EntityDeathEvent event) {
if (SongodaCore.isRegistered("UltimateStacker")) {
List<StackedItem> stacks = new ArrayList<>();
int maxSize = Settings.MAX_STACK_ITEMS.getInt()-64;
int maxSize = UltimateStackerAPI.getSettings().getMaxItemStackSize() - 64;
for (ItemStack item : items) {
StackedItem stack = stacks.stream().filter(stackedItem -> stackedItem.getItem().getType() == item.getType()).findFirst().orElse(null);
if (stack == null) {
@ -83,9 +82,9 @@ public class DropUtils {
}
stack.setamount(newAmount);
}
Bukkit.getScheduler().runTask(UltimateStacker.getInstance(), () -> {
Bukkit.getScheduler().runTask(UltimateStackerAPI.getPlugin(), () -> {
for (StackedItem stack : stacks) {
UltimateStacker.spawnStackedItem(stack.getItem(), stack.getAmount(), event.getEntity().getLocation());
UltimateStackerAPI.getStackedItemManager().createStack(stack.getItem(), event.getEntity().getLocation(), stack.getAmount());
}
});
return;

View File

@ -0,0 +1,18 @@
Connection Settings:
# Supported: MARIADB, MYSQL, H2
# Default: H2
Type: H2
# Hostname for remote databases
Hostname: localhost
# Port for remote databases
Port: 3306
# Username for remote databases
Username: username
# Password for remote databases
Password: password
# Database name for remote databases
Database: database
# Maximum number of connections to the database
Pool Size: 5
# Use SSL for remote databases
Use SSL: false