fix: A lot of issues introduced after migrating to the new database API

The new database API introduced a lot of breaking changes and changed behaviour sadly.
When migrating this plugin to the new API these were not taken into account properly and
essentially broke the whole database/storage layer of the plugin.

* SQL statements that were valid in SQLite but are not in H2, which is now the default
* H2 jdbc driver not being shaded into the final jar
* catching exceptions, logging a message, and then continuing code execution (in the core)
* introducing race conditions
* ...

These fixes sometimes rely on the non-multi-threaded APIs provided now which will most certaintly
come with a performance hit on larger servers.
But at least it works again... tbh I do not really want to spend too much time fixing this stuff...

A potential issue that still exists in the use of `DataManager#getNextId` in `AnchorManagerImpl`.
Right now it returns a value based on the largest ID currently in the database – Completely circumventing
the whole auto_increment characteristic of the table schema...
This is another changed behaviour that gets introduced opening the possibility for conflicting IDs
*outside* of the database table itself. The ID can be used as a reference in other places
This commit is contained in:
Christian Koop 2023-10-24 01:42:04 +02:00
parent 9e850c5a49
commit 2eec844447
No known key found for this signature in database
GPG Key ID: 89A8181384E010A3
7 changed files with 15 additions and 130 deletions

View File

@ -56,6 +56,7 @@
<excludeDefaults>false</excludeDefaults>
<includes>
<include>**/nms/v*/**</include>
<include>**/core/third_party/org/h2/**</include>
</includes>
</filter>
</filters>

View File

@ -66,7 +66,7 @@ public class AnchorManagerImpl implements AnchorManager {
protected void saveAll() {
for (Set<Anchor> anchorSet : this.anchors.values()) {
Collection<Data> asData = new ArrayList<>(anchorSet.size());
this.dataManager.saveBatch(asData);
this.dataManager.saveBatchSync(asData);
}
}
@ -121,7 +121,7 @@ public class AnchorManagerImpl implements AnchorManager {
if (tmpAnchors != null) {
Collection<Data> asData = new ArrayList<>(tmpAnchors.size());
this.dataManager.saveBatch(asData);
this.dataManager.saveBatchSync(asData);
for (Anchor anchor : tmpAnchors) {
((AnchorImpl) anchor).deInit(this.plugin);
@ -236,7 +236,7 @@ public class AnchorManagerImpl implements AnchorManager {
}
Anchor anchor = new AnchorImpl(dataManager.getNextId("anchors"), owner, loc, ticks);
this.dataManager.save(anchor);
this.dataManager.saveSync(anchor);
Bukkit.getScheduler().runTask(this.plugin, () -> { //TODO: Do we need to run this sync, or we are already on the main thread?
Block block = loc.getBlock();
block.setType(Settings.MATERIAL.getMaterial().parseMaterial());
@ -290,7 +290,7 @@ public class AnchorManagerImpl implements AnchorManager {
anchor.getLocation().add(.5, .5, .5), 100, .5, .5, .5);
((AnchorImpl) anchor).deInit(this.plugin);
this.dataManager.delete(anchor);
this.dataManager.deleteSync(anchor);
}
/* Anchor access */

View File

@ -4,8 +4,6 @@ import com.craftaro.core.SongodaCore;
import com.craftaro.core.SongodaPlugin;
import com.craftaro.core.commands.CommandManager;
import com.craftaro.core.configuration.Config;
import com.craftaro.core.database.DatabaseConnector;
import com.craftaro.core.database.SQLiteConnector;
import com.craftaro.core.gui.GuiManager;
import com.craftaro.core.hooks.EconomyManager;
import com.craftaro.core.hooks.HologramManager;
@ -16,9 +14,8 @@ import com.craftaro.epicanchors.commands.sub.GiveCommand;
import com.craftaro.epicanchors.commands.sub.ReloadCommand;
import com.craftaro.epicanchors.commands.sub.SettingsCommand;
import com.craftaro.epicanchors.commands.sub.ShowCommand;
import com.craftaro.epicanchors.files.DataManager;
import com.craftaro.epicanchors.files.Settings;
import com.craftaro.epicanchors.files.migration.AnchorMigration;
import com.craftaro.epicanchors.files.migration._2_AnchorMigration;
import com.craftaro.epicanchors.files.migration._1_InitialMigration;
import com.craftaro.epicanchors.listener.AnchorListener;
import com.craftaro.epicanchors.listener.BlockListener;
@ -47,15 +44,7 @@ public final class EpicAnchors extends SongodaPlugin {
public void onPluginEnable() {
SongodaCore.registerPlugin(this, 31, XMaterial.END_PORTAL_FRAME);
// Initialize database
// this.getLogger().info("Initializing SQLite...");
// DatabaseConnector dbCon = new SQLiteConnector(this);
// this.dataManager = new DataManager(dbCon, this);
// AnchorMigration anchorMigration = new AnchorMigration(dbCon, this.dataManager, new _1_InitialMigration());
// anchorMigration.runMigrations();
// anchorMigration.migrateLegacyData(this);
initDatabase(Arrays.asList(new _1_InitialMigration(), new AnchorMigration()));
initDatabase(Arrays.asList(new _1_InitialMigration(), new _2_AnchorMigration()));
this.anchorManager = new AnchorManagerImpl(this, this.dataManager);
EpicAnchorsApi.initApi(this.anchorManager);

View File

@ -1,97 +0,0 @@
package com.craftaro.epicanchors.files;
import com.craftaro.core.database.DatabaseConnector;
import com.craftaro.epicanchors.AnchorImpl;
import com.craftaro.epicanchors.api.Anchor;
import com.craftaro.epicanchors.files.migration.AnchorMigration;
import com.craftaro.epicanchors.utils.Callback;
import com.craftaro.epicanchors.utils.UpdateCallback;
import com.craftaro.epicanchors.utils.Utils;
import org.bukkit.Bukkit;
import org.bukkit.Location;
import org.bukkit.World;
import org.bukkit.plugin.Plugin;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Objects;
import java.util.UUID;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
public class DataManager {
// public void updateAnchors(Collection<Anchor> anchors, UpdateCallback callback) {
// this.databaseConnector.connect((con) -> {
// con.setAutoCommit(false);
//
// SQLException err = null;
//
// for (Anchor anchor : anchors) {
// try (PreparedStatement ps = con.prepareStatement("UPDATE " + this.anchorTable +
// " SET ticks_left =? WHERE id =?;")) {
// ps.setInt(1, anchor.getTicksLeft());
// ps.setInt(2, anchor.getDbId());
//
// ps.executeUpdate();
// } catch (SQLException ex) {
// err = ex;
// break;
// }
// }
//
// if (err == null) {
// con.commit();
//
// resolveUpdateCallback(callback, null);
// } else {
// con.rollback();
//
// resolveUpdateCallback(callback, err);
// }
//
// con.setAutoCommit(true);
// });
// }
//
// public void deleteAnchorAsync(Anchor anchor) {
// deleteAnchorAsync(anchor, null);
// }
//
// public void deleteAnchorAsync(Anchor anchor, UpdateCallback callback) {
// this.thread.execute(() ->
// this.databaseConnector.connect((con) -> {
// try (PreparedStatement ps = con.prepareStatement("DELETE FROM " + this.anchorTable +
// " WHERE id =?;")) {
// ps.setInt(1, anchor.getDbId());
//
// ps.executeUpdate();
//
// resolveUpdateCallback(callback, null);
// } catch (Exception ex) {
// resolveUpdateCallback(callback, ex);
// }
// })
// );
// }
//
// public static String getTableName(String prefix, String name) {
// String result = prefix + name;
//
// if (!result.matches("[a-z0-9_]+")) {
// throw new IllegalStateException("The generated table name '" + result + "' contains invalid characters");
// }
//
// return result;
// }
}

View File

@ -1,7 +1,6 @@
package com.craftaro.epicanchors.files.migration;
import com.craftaro.core.database.DataMigration;
import com.craftaro.epicanchors.EpicAnchors;
import java.sql.Connection;
import java.sql.SQLException;
@ -15,15 +14,14 @@ public class _1_InitialMigration extends DataMigration {
@Override
public void migrate(Connection connection, String tablePrefix) throws SQLException {
try (Statement statement = connection.createStatement()) {
statement.execute("CREATE TABLE " + EpicAnchors.getPlugin(EpicAnchors.class).getDataManager().getTablePrefix() + "anchors (" +
"id INTEGER NOT NULL," +
statement.execute("CREATE TABLE " + tablePrefix + "anchors (" +
"id INTEGER NOT NULL PRIMARY KEY auto_increment," +
"world_name TEXT NOT NULL," +
"x INTEGER NOT NULL," +
"y INTEGER NOT NULL," +
"z INTEGER NOT NULL," +
"ticks_left INTEGER NOT NULL," +
"owner VARCHAR(36)," +
"PRIMARY KEY(id AUTOINCREMENT)" +
"owner VARCHAR(36)" +
");");
}
}

View File

@ -18,8 +18,8 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.logging.Level;
public class AnchorMigration extends DataMigration {
public AnchorMigration() {
public class _2_AnchorMigration extends DataMigration {
public _2_AnchorMigration() {
super(2);
}

View File

@ -1,8 +1,6 @@
package com.craftaro.epicanchors.utils;
import com.craftaro.core.database.Data;
import com.craftaro.core.database.DataManager;
import com.craftaro.core.third_party.org.jooq.Queries;
import com.craftaro.core.third_party.org.jooq.Query;
import com.craftaro.core.third_party.org.jooq.Record1;
import com.craftaro.core.third_party.org.jooq.Result;
@ -10,17 +8,13 @@ import com.craftaro.core.third_party.org.jooq.impl.DSL;
import com.craftaro.epicanchors.AnchorImpl;
import com.craftaro.epicanchors.EpicAnchors;
import com.craftaro.epicanchors.api.Anchor;
import com.craftaro.epicanchors.files.migration.AnchorMigration;
import com.craftaro.epicanchors.files.migration._2_AnchorMigration;
import org.bukkit.World;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
public class DataHelper {
@ -68,7 +62,7 @@ public class DataHelper {
}
public static void migrateAnchor(List<AnchorMigration.LegacyAnchorEntry> anchorQueue, UpdateCallback callback) {
public static void migrateAnchor(List<_2_AnchorMigration.LegacyAnchorEntry> anchorQueue, UpdateCallback callback) {
DataManager dataManager = EpicAnchors.getPlugin(EpicAnchors.class).getDataManager();
//recreate it with Jooq
@ -77,7 +71,7 @@ public class DataHelper {
connection.setAutoCommit(false);
try {
List<Query> queries = new ArrayList<>();
for (AnchorMigration.LegacyAnchorEntry entry : anchorQueue) {
for (_2_AnchorMigration.LegacyAnchorEntry entry : anchorQueue) {
queries.add(dslContext.insertInto(DSL.table(dataManager.getTablePrefix() + "anchors"))
.columns(
DSL.field("world_name"),