Mode storage refactoring - write group nodes into their own section in flatfile types (#502)

This commit is contained in:
Luck 2017-10-22 14:22:52 +01:00
parent f109cb684a
commit 26c813c07b
No known key found for this signature in database
GPG Key ID: EFA9B3EC5FD90F8B
27 changed files with 793 additions and 588 deletions

View File

@ -39,6 +39,7 @@ import me.lucko.luckperms.common.commands.abstraction.SubCommand;
import me.lucko.luckperms.common.commands.sender.Sender;
import me.lucko.luckperms.common.config.ConfigKeys;
import me.lucko.luckperms.common.constants.CommandPermission;
import me.lucko.luckperms.common.contexts.ContextSetJsonSerializer;
import me.lucko.luckperms.common.locale.CommandSpec;
import me.lucko.luckperms.common.locale.LocaleManager;
import me.lucko.luckperms.common.locale.Message;
@ -176,7 +177,7 @@ public class HolderEditor<T extends PermissionHolder> extends SubCommand<T> {
for (NodeModel node : nodes) {
JsonObject attributes = new JsonObject();
attributes.addProperty("permission", node.getPermission());
attributes.addProperty("value", node.isValue());
attributes.addProperty("value", node.getValue());
if (!node.getServer().equals("global")) {
attributes.addProperty("server", node.getServer());
@ -191,7 +192,7 @@ public class HolderEditor<T extends PermissionHolder> extends SubCommand<T> {
}
if (!node.getContexts().isEmpty()) {
attributes.add("context", node.getContextsAsJson());
attributes.add("context", ContextSetJsonSerializer.serializeContextSet(node.getContexts()));
}
arr.add(attributes);

View File

@ -40,6 +40,7 @@ import me.lucko.luckperms.common.commands.abstraction.SingleCommand;
import me.lucko.luckperms.common.commands.sender.Sender;
import me.lucko.luckperms.common.commands.utils.Util;
import me.lucko.luckperms.common.constants.CommandPermission;
import me.lucko.luckperms.common.contexts.ContextSetJsonSerializer;
import me.lucko.luckperms.common.locale.CommandSpec;
import me.lucko.luckperms.common.locale.LocaleManager;
import me.lucko.luckperms.common.locale.Message;
@ -191,7 +192,7 @@ public class ApplyEditsCommand extends SingleCommand {
if (data.has("context") && data.get("context").isJsonObject()) {
JsonObject contexts = data.get("context").getAsJsonObject();
context = NodeModel.deserializeContextSet(contexts).makeImmutable();
context = ContextSetJsonSerializer.deserializeContextSet(contexts).makeImmutable();
}
nodes.add(NodeModel.of(permission, value, server, world, expiry, context));

View File

@ -34,7 +34,7 @@ import com.google.gson.GsonBuilder;
import com.google.gson.JsonObject;
import me.lucko.luckperms.api.context.ImmutableContextSet;
import me.lucko.luckperms.common.node.NodeModel;
import me.lucko.luckperms.common.contexts.ContextSetJsonSerializer;
import java.io.BufferedReader;
import java.io.BufferedWriter;
@ -75,16 +75,16 @@ public class ContextsFile {
JsonObject data = new Gson().fromJson(reader, JsonObject.class);
if (data.has("context")) {
staticContexts = NodeModel.deserializeContextSet(data.get("context").getAsJsonObject()).makeImmutable();
staticContexts = ContextSetJsonSerializer.deserializeContextSet(data.get("context").getAsJsonObject()).makeImmutable();
save = true;
}
if (data.has("static-contexts")) {
staticContexts = NodeModel.deserializeContextSet(data.get("static-contexts").getAsJsonObject()).makeImmutable();
staticContexts = ContextSetJsonSerializer.deserializeContextSet(data.get("static-contexts").getAsJsonObject()).makeImmutable();
}
if (data.has("default-contexts")) {
defaultContexts = NodeModel.deserializeContextSet(data.get("default-contexts").getAsJsonObject()).makeImmutable();
defaultContexts = ContextSetJsonSerializer.deserializeContextSet(data.get("default-contexts").getAsJsonObject()).makeImmutable();
}
} catch (IOException e) {
@ -102,8 +102,8 @@ public class ContextsFile {
try (BufferedWriter writer = Files.newBufferedWriter(file.toPath(), StandardCharsets.UTF_8)) {
JsonObject data = new JsonObject();
data.add("static-contexts", NodeModel.serializeContextSet(staticContexts));
data.add("default-contexts", NodeModel.serializeContextSet(defaultContexts));
data.add("static-contexts", ContextSetJsonSerializer.serializeContextSet(staticContexts));
data.add("default-contexts", ContextSetJsonSerializer.serializeContextSet(defaultContexts));
new GsonBuilder().setPrettyPrinting().create().toJson(data, writer);
writer.flush();

View File

@ -0,0 +1,86 @@
/*
* This file is part of LuckPerms, licensed under the MIT License.
*
* Copyright (c) lucko (Luck) <luck@lucko.me>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package me.lucko.luckperms.common.contexts;
import lombok.experimental.UtilityClass;
import com.google.common.base.Preconditions;
import me.lucko.luckperms.api.context.ContextSet;
import me.lucko.luckperms.api.context.MutableContextSet;
import ninja.leaping.configurate.ConfigurationNode;
import ninja.leaping.configurate.SimpleConfigurationNode;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
@UtilityClass
public class ContextSetConfigurateSerializer {
public static ConfigurationNode serializeContextSet(ContextSet contextSet) {
ConfigurationNode data = SimpleConfigurationNode.root();
Map<String, Collection<String>> map = contextSet.toMultimap().asMap();
map.forEach((k, v) -> {
List<String> values = new ArrayList<>(v);
int size = values.size();
if (size == 1) {
data.getNode(k).setValue(values.get(0));
} else if (size > 1) {
data.getNode(k).setValue(values);
}
});
return data;
}
public static MutableContextSet deserializeContextSet(ConfigurationNode data) {
Preconditions.checkArgument(data.hasMapChildren());
Map<Object, ? extends ConfigurationNode> dataMap = data.getChildrenMap();
MutableContextSet map = MutableContextSet.create();
for (Map.Entry<Object, ? extends ConfigurationNode> e : dataMap.entrySet()) {
String k = e.getKey().toString();
ConfigurationNode v = e.getValue();
if (v.hasListChildren()) {
List<? extends ConfigurationNode> values = v.getChildrenList();
for (ConfigurationNode value : values) {
map.add(k, value.getString());
}
} else {
map.add(k, v.getString());
}
}
return map;
}
}

View File

@ -0,0 +1,90 @@
/*
* This file is part of LuckPerms, licensed under the MIT License.
*
* Copyright (c) lucko (Luck) <luck@lucko.me>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package me.lucko.luckperms.common.contexts;
import lombok.experimental.UtilityClass;
import com.google.common.base.Preconditions;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonPrimitive;
import me.lucko.luckperms.api.context.ContextSet;
import me.lucko.luckperms.api.context.MutableContextSet;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
@UtilityClass
public class ContextSetJsonSerializer {
public static JsonObject serializeContextSet(ContextSet contextSet) {
JsonObject data = new JsonObject();
Map<String, Collection<String>> map = contextSet.toMultimap().asMap();
map.forEach((k, v) -> {
List<String> values = new ArrayList<>(v);
int size = values.size();
if (size == 1) {
data.addProperty(k, values.get(0));
} else if (size > 1) {
JsonArray arr = new JsonArray();
for (String s : values) {
arr.add(new JsonPrimitive(s));
}
data.add(k, arr);
}
});
return data;
}
public static MutableContextSet deserializeContextSet(JsonElement element) {
Preconditions.checkArgument(element.isJsonObject());
JsonObject data = element.getAsJsonObject();
MutableContextSet map = MutableContextSet.create();
for (Map.Entry<String, JsonElement> e : data.entrySet()) {
String k = e.getKey();
JsonElement v = e.getValue();
if (v.isJsonArray()) {
JsonArray values = v.getAsJsonArray();
for (JsonElement value : values) {
map.add(k, value.getAsString());
}
} else {
map.add(k, v.getAsString());
}
}
return map;
}
}

View File

@ -25,44 +25,21 @@
package me.lucko.luckperms.common.node;
import lombok.AllArgsConstructor;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.NonNull;
import lombok.ToString;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableSetMultimap;
import com.google.gson.Gson;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonPrimitive;
import me.lucko.luckperms.api.Node;
import me.lucko.luckperms.api.context.ContextSet;
import me.lucko.luckperms.api.context.ImmutableContextSet;
import me.lucko.luckperms.api.context.MutableContextSet;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
/**
* An stripped down version of {@link Node}, without methods and cached values for handling permission lookups.
* An stripped down version of {@link Node}, without methods and cached values
* for handling permission lookups.
*
* All values are non-null.
*/
@Getter
@ToString
@EqualsAndHashCode
@AllArgsConstructor(staticName = "of")
public final class NodeModel {
private static final Gson GSON = new Gson();
public static NodeModel fromNode(Node node) {
return NodeModel.of(
NodeModel model = of(
node.getPermission(),
node.getValuePrimitive(),
node.getServer().orElse("global"),
@ -70,42 +47,67 @@ public final class NodeModel {
node.isTemporary() ? node.getExpiryUnixTime() : 0L,
node.getContexts().makeImmutable()
);
model.node = node;
return model;
}
public static NodeModel deserialize(String permission, boolean value, String server, String world, long expiry, String contexts) {
JsonObject context = GSON.fromJson(contexts, JsonObject.class);
return of(permission, value, server, world, expiry, deserializeContextSet(context).makeImmutable());
public static NodeModel of(String permission, boolean value, String server, String world, long expiry, ImmutableContextSet contexts) {
return new NodeModel(permission, value, server, world, expiry, contexts);
}
@NonNull
private final String permission;
@NonNull
private final boolean value;
@NonNull
private final String server;
@NonNull
private final String world;
@NonNull
private final long expiry;
@NonNull
private final ImmutableContextSet contexts;
private Node node = null;
public String serializeContext() {
return GSON.toJson(getContextsAsJson());
private NodeModel(@NonNull String permission, boolean value, @NonNull String server, @NonNull String world, long expiry, @NonNull ImmutableContextSet contexts) {
this.permission = permission;
this.value = value;
this.server = server;
this.world = world;
this.expiry = expiry;
this.contexts = contexts;
}
public JsonObject getContextsAsJson() {
return serializeContextSet(contexts);
}
public Node toNode() {
public synchronized Node toNode() {
if (node == null) {
Node.Builder builder = NodeFactory.newBuilder(permission);
builder.setValue(value);
builder.setServer(server);
builder.setWorld(world);
builder.setExpiry(expiry);
builder.withExtraContext(contexts);
return builder.build();
node = builder.build();
}
return node;
}
public String getPermission() {
return this.permission;
}
public boolean getValue() {
return this.value;
}
public String getServer() {
return this.server;
}
public String getWorld() {
return this.world;
}
public long getExpiry() {
return this.expiry;
}
public ImmutableContextSet getContexts() {
return this.contexts;
}
public NodeModel setPermission(String permission) {
@ -132,47 +134,38 @@ public final class NodeModel {
return of(permission, value, server, world, expiry, contexts);
}
public static JsonObject serializeContextSet(ContextSet contextSet) {
JsonObject data = new JsonObject();
Map<String, Collection<String>> map = contextSet.toMultimap().asMap();
public boolean equals(Object o) {
if (o == this) return true;
if (!(o instanceof NodeModel)) return false;
final NodeModel other = (NodeModel) o;
map.forEach((k, v) -> {
List<String> values = new ArrayList<>(v);
int size = values.size();
if (size == 1) {
data.addProperty(k, values.get(0));
} else if (size > 1) {
JsonArray arr = new JsonArray();
for (String s : values) {
arr.add(new JsonPrimitive(s));
}
data.add(k, arr);
}
});
return data;
return this.getPermission().equals(other.getPermission()) &&
this.getValue() == other.getValue() &&
this.getServer().equals(other.getServer()) &&
this.getWorld().equals(other.getWorld()) &&
this.getExpiry() == other.getExpiry() &&
this.getContexts().equals(other.getContexts());
}
public static MutableContextSet deserializeContextSet(JsonElement element) {
Preconditions.checkArgument(element.isJsonObject());
JsonObject data = element.getAsJsonObject();
ImmutableSetMultimap.Builder<String, String> map = ImmutableSetMultimap.builder();
for (Map.Entry<String, JsonElement> e : data.entrySet()) {
String k = e.getKey();
JsonElement v = e.getValue();
if (v.isJsonArray()) {
JsonArray values = v.getAsJsonArray();
for (JsonElement value : values) {
map.put(k, value.getAsString());
}
} else {
map.put(k, v.getAsString());
}
public int hashCode() {
final int PRIME = 59;
int result = 1;
result = result * PRIME + this.getPermission().hashCode();
result = result * PRIME + Boolean.hashCode(this.getValue());
result = result * PRIME + this.getServer().hashCode();
result = result * PRIME + this.getWorld().hashCode();
result = result * PRIME + Long.hashCode(this.getExpiry());
result = result * PRIME + this.getContexts().hashCode();
return result;
}
return MutableContextSet.fromMultimap(map.build());
public String toString() {
return "NodeModel(" +
"permission=" + this.getPermission() + ", " +
"value=" + this.getValue() + ", " +
"server=" + this.getServer() + ", " +
"world=" + this.getWorld() + ", " +
"expiry=" + this.getExpiry() + ", " +
"contexts=" + this.getContexts() + ")";
}
}

View File

@ -41,7 +41,7 @@ import me.lucko.luckperms.common.model.Group;
import me.lucko.luckperms.common.model.Track;
import me.lucko.luckperms.common.model.User;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.storage.backing.AbstractBacking;
import me.lucko.luckperms.common.storage.backing.AbstractDao;
import me.lucko.luckperms.common.storage.wrappings.BufferedOutputStorage;
import me.lucko.luckperms.common.storage.wrappings.PhasedStorage;
@ -53,11 +53,11 @@ import java.util.concurrent.CompletableFuture;
import java.util.function.Supplier;
/**
* Converts a {@link AbstractBacking} to use {@link CompletableFuture}s
* Converts a {@link AbstractDao} to use {@link CompletableFuture}s
*/
@RequiredArgsConstructor(access = AccessLevel.PRIVATE)
public class AbstractStorage implements Storage {
public static Storage wrap(LuckPermsPlugin plugin, AbstractBacking backing) {
public static Storage create(LuckPermsPlugin plugin, AbstractDao backing) {
BufferedOutputStorage bufferedDs = BufferedOutputStorage.wrap(PhasedStorage.wrap(new AbstractStorage(plugin, backing)), 250L);
plugin.getScheduler().asyncRepeating(bufferedDs, 2L);
return bufferedDs;
@ -66,19 +66,19 @@ public class AbstractStorage implements Storage {
private final LuckPermsPlugin plugin;
@Delegate(types = Delegated.class)
private final AbstractBacking backing;
private final AbstractDao dao;
@Getter
private final StorageDelegate delegate;
private AbstractStorage(LuckPermsPlugin plugin, AbstractBacking backing) {
private AbstractStorage(LuckPermsPlugin plugin, AbstractDao dao) {
this.plugin = plugin;
this.backing = backing;
this.dao = dao;
this.delegate = new StorageDelegate(plugin, this);
}
private <T> CompletableFuture<T> makeFuture(Supplier<T> supplier) {
return CompletableFuture.supplyAsync(supplier, backing.getPlugin().getScheduler().async());
return CompletableFuture.supplyAsync(supplier, dao.getPlugin().getScheduler().async());
}
@Override
@ -88,23 +88,23 @@ public class AbstractStorage implements Storage {
@Override
public CompletableFuture<Boolean> logAction(LogEntry entry) {
return makeFuture(() -> backing.logAction(entry));
return makeFuture(() -> dao.logAction(entry));
}
@Override
public CompletableFuture<Log> getLog() {
return makeFuture(backing::getLog);
return makeFuture(dao::getLog);
}
@Override
public CompletableFuture<Boolean> applyBulkUpdate(BulkUpdate bulkUpdate) {
return makeFuture(() -> backing.applyBulkUpdate(bulkUpdate));
return makeFuture(() -> dao.applyBulkUpdate(bulkUpdate));
}
@Override
public CompletableFuture<Boolean> loadUser(UUID uuid, String username) {
return makeFuture(() -> {
if (backing.loadUser(uuid, username)) {
if (dao.loadUser(uuid, username)) {
User u = plugin.getUserManager().getIfLoaded(uuid);
if (u != null) {
plugin.getApiProvider().getEventFactory().handleUserLoad(u);
@ -117,23 +117,23 @@ public class AbstractStorage implements Storage {
@Override
public CompletableFuture<Boolean> saveUser(User user) {
return makeFuture(() -> backing.saveUser(user));
return makeFuture(() -> dao.saveUser(user));
}
@Override
public CompletableFuture<Set<UUID>> getUniqueUsers() {
return makeFuture(backing::getUniqueUsers);
return makeFuture(dao::getUniqueUsers);
}
@Override
public CompletableFuture<List<HeldPermission<UUID>>> getUsersWithPermission(String permission) {
return makeFuture(() -> backing.getUsersWithPermission(permission));
return makeFuture(() -> dao.getUsersWithPermission(permission));
}
@Override
public CompletableFuture<Boolean> createAndLoadGroup(String name, CreationCause cause) {
return makeFuture(() -> {
if (backing.createAndLoadGroup(name)) {
if (dao.createAndLoadGroup(name)) {
Group g = plugin.getGroupManager().getIfLoaded(name);
if (g != null) {
plugin.getApiProvider().getEventFactory().handleGroupCreate(g, cause);
@ -147,7 +147,7 @@ public class AbstractStorage implements Storage {
@Override
public CompletableFuture<Boolean> loadGroup(String name) {
return makeFuture(() -> {
if (backing.loadGroup(name)) {
if (dao.loadGroup(name)) {
Group g = plugin.getGroupManager().getIfLoaded(name);
if (g != null) {
plugin.getApiProvider().getEventFactory().handleGroupLoad(g);
@ -161,7 +161,7 @@ public class AbstractStorage implements Storage {
@Override
public CompletableFuture<Boolean> loadAllGroups() {
return makeFuture(() -> {
if (backing.loadAllGroups()) {
if (dao.loadAllGroups()) {
plugin.getApiProvider().getEventFactory().handleGroupLoadAll();
return true;
}
@ -171,13 +171,13 @@ public class AbstractStorage implements Storage {
@Override
public CompletableFuture<Boolean> saveGroup(Group group) {
return makeFuture(() -> backing.saveGroup(group));
return makeFuture(() -> dao.saveGroup(group));
}
@Override
public CompletableFuture<Boolean> deleteGroup(Group group, DeletionCause cause) {
return makeFuture(() -> {
if (backing.deleteGroup(group)) {
if (dao.deleteGroup(group)) {
plugin.getApiProvider().getEventFactory().handleGroupDelete(group, cause);
return true;
}
@ -187,13 +187,13 @@ public class AbstractStorage implements Storage {
@Override
public CompletableFuture<List<HeldPermission<String>>> getGroupsWithPermission(String permission) {
return makeFuture(() -> backing.getGroupsWithPermission(permission));
return makeFuture(() -> dao.getGroupsWithPermission(permission));
}
@Override
public CompletableFuture<Boolean> createAndLoadTrack(String name, CreationCause cause) {
return makeFuture(() -> {
if (backing.createAndLoadTrack(name)) {
if (dao.createAndLoadTrack(name)) {
Track t = plugin.getTrackManager().getIfLoaded(name);
if (t != null) {
plugin.getApiProvider().getEventFactory().handleTrackCreate(t, cause);
@ -207,7 +207,7 @@ public class AbstractStorage implements Storage {
@Override
public CompletableFuture<Boolean> loadTrack(String name) {
return makeFuture(() -> {
if (backing.loadTrack(name)) {
if (dao.loadTrack(name)) {
Track t = plugin.getTrackManager().getIfLoaded(name);
if (t != null) {
plugin.getApiProvider().getEventFactory().handleTrackLoad(t);
@ -221,7 +221,7 @@ public class AbstractStorage implements Storage {
@Override
public CompletableFuture<Boolean> loadAllTracks() {
return makeFuture(() -> {
if (backing.loadAllTracks()) {
if (dao.loadAllTracks()) {
plugin.getApiProvider().getEventFactory().handleTrackLoadAll();
return true;
}
@ -231,13 +231,13 @@ public class AbstractStorage implements Storage {
@Override
public CompletableFuture<Boolean> saveTrack(Track track) {
return makeFuture(() -> backing.saveTrack(track));
return makeFuture(() -> dao.saveTrack(track));
}
@Override
public CompletableFuture<Boolean> deleteTrack(Track track, DeletionCause cause) {
return makeFuture(() -> {
if (backing.deleteTrack(track)) {
if (dao.deleteTrack(track)) {
plugin.getApiProvider().getEventFactory().handleTrackDelete(track, cause);
return true;
}
@ -247,17 +247,17 @@ public class AbstractStorage implements Storage {
@Override
public CompletableFuture<Boolean> saveUUIDData(UUID uuid, String username) {
return makeFuture(() -> backing.saveUUIDData(uuid, username));
return makeFuture(() -> dao.saveUUIDData(uuid, username));
}
@Override
public CompletableFuture<UUID> getUUID(String username) {
return makeFuture(() -> backing.getUUID(username));
return makeFuture(() -> dao.getUUID(username));
}
@Override
public CompletableFuture<String> getName(UUID uuid) {
return makeFuture(() -> backing.getName(uuid));
return makeFuture(() -> dao.getName(uuid));
}
private interface Delegated {

View File

@ -35,7 +35,7 @@ import me.lucko.luckperms.common.model.Group;
import me.lucko.luckperms.common.model.Track;
import me.lucko.luckperms.common.model.User;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.storage.backing.AbstractBacking;
import me.lucko.luckperms.common.storage.backing.AbstractDao;
import java.util.LinkedHashMap;
import java.util.List;
@ -43,11 +43,11 @@ import java.util.Map;
import java.util.Set;
import java.util.UUID;
public class SplitBacking extends AbstractBacking {
private final Map<String, AbstractBacking> backing;
public class SplitStorageDao extends AbstractDao {
private final Map<String, AbstractDao> backing;
private final Map<String, String> types;
protected SplitBacking(LuckPermsPlugin plugin, Map<String, AbstractBacking> backing, Map<String, String> types) {
protected SplitStorageDao(LuckPermsPlugin plugin, Map<String, AbstractDao> backing, Map<String, String> types) {
super(plugin, "Split Storage");
this.backing = ImmutableMap.copyOf(backing);
this.types = ImmutableMap.copyOf(types);
@ -56,8 +56,8 @@ public class SplitBacking extends AbstractBacking {
@Override
public void init() {
boolean success = true;
backing.values().forEach(AbstractBacking::init);
for (AbstractBacking ds : backing.values()) {
backing.values().forEach(AbstractDao::init);
for (AbstractDao ds : backing.values()) {
if (!ds.isAcceptingLogins()) {
success = false;
}
@ -68,14 +68,14 @@ public class SplitBacking extends AbstractBacking {
@Override
public void shutdown() {
backing.values().forEach(AbstractBacking::shutdown);
backing.values().forEach(AbstractDao::shutdown);
}
@Override
public Map<String, String> getMeta() {
Map<String, String> ret = new LinkedHashMap<>();
ret.put("Types", types.toString());
for (AbstractBacking backing : backing.values()) {
for (AbstractDao backing : backing.values()) {
ret.putAll(backing.getMeta());
}
return ret;

View File

@ -31,16 +31,16 @@ import com.google.common.collect.ImmutableSet;
import me.lucko.luckperms.common.config.ConfigKeys;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.storage.backing.AbstractBacking;
import me.lucko.luckperms.common.storage.backing.file.HOCONBacking;
import me.lucko.luckperms.common.storage.backing.file.JSONBacking;
import me.lucko.luckperms.common.storage.backing.file.YAMLBacking;
import me.lucko.luckperms.common.storage.backing.mongodb.MongoDBBacking;
import me.lucko.luckperms.common.storage.backing.sql.SQLBacking;
import me.lucko.luckperms.common.storage.backing.sql.provider.H2Provider;
import me.lucko.luckperms.common.storage.backing.sql.provider.MySQLProvider;
import me.lucko.luckperms.common.storage.backing.sql.provider.PostgreSQLProvider;
import me.lucko.luckperms.common.storage.backing.sql.provider.SQLiteProvider;
import me.lucko.luckperms.common.storage.backing.AbstractDao;
import me.lucko.luckperms.common.storage.backing.file.HoconDao;
import me.lucko.luckperms.common.storage.backing.file.JsonDao;
import me.lucko.luckperms.common.storage.backing.file.YamlDao;
import me.lucko.luckperms.common.storage.backing.mongodb.MongoDao;
import me.lucko.luckperms.common.storage.backing.sql.SqlDao;
import me.lucko.luckperms.common.storage.backing.sql.provider.file.H2ConnectionFactory;
import me.lucko.luckperms.common.storage.backing.sql.provider.file.SQLiteConnectionFactory;
import me.lucko.luckperms.common.storage.backing.sql.provider.remote.MySqlConnectionFactory;
import me.lucko.luckperms.common.storage.backing.sql.provider.remote.PostgreConnectionFactory;
import me.lucko.luckperms.common.utils.ImmutableCollectors;
import java.io.File;
@ -91,13 +91,13 @@ public class StorageFactory {
Set<String> neededTypes = new HashSet<>();
neededTypes.addAll(types.values());
Map<String, AbstractBacking> backing = new HashMap<>();
Map<String, AbstractDao> backing = new HashMap<>();
for (String type : neededTypes) {
backing.put(type, makeBacking(StorageType.parse(type), plugin));
backing.put(type, makeDao(StorageType.parse(type), plugin));
}
storage = AbstractStorage.wrap(plugin, new SplitBacking(plugin, backing, types));
storage = AbstractStorage.create(plugin, new SplitStorageDao(plugin, backing, types));
} else {
String method = plugin.getConfiguration().get(ConfigKeys.STORAGE_METHOD);
@ -115,52 +115,52 @@ public class StorageFactory {
}
private static Storage makeInstance(StorageType type, LuckPermsPlugin plugin) {
return AbstractStorage.wrap(plugin, makeBacking(type, plugin));
return AbstractStorage.create(plugin, makeDao(type, plugin));
}
private static AbstractBacking makeBacking(StorageType method, LuckPermsPlugin plugin) {
private static AbstractDao makeDao(StorageType method, LuckPermsPlugin plugin) {
switch (method) {
case MARIADB:
return new SQLBacking(plugin, new MySQLProvider(
return new SqlDao(plugin, new MySqlConnectionFactory(
"MariaDB",
"org.mariadb.jdbc.MySQLDataSource",
plugin.getConfiguration().get(ConfigKeys.DATABASE_VALUES)),
plugin.getConfiguration().get(ConfigKeys.SQL_TABLE_PREFIX)
);
case MYSQL:
return new SQLBacking(plugin, new MySQLProvider(
return new SqlDao(plugin, new MySqlConnectionFactory(
"MySQL",
"com.mysql.jdbc.jdbc2.optional.MysqlDataSource",
plugin.getConfiguration().get(ConfigKeys.DATABASE_VALUES)),
plugin.getConfiguration().get(ConfigKeys.SQL_TABLE_PREFIX)
);
case SQLITE:
return new SQLBacking(plugin, new SQLiteProvider(
return new SqlDao(plugin, new SQLiteConnectionFactory(
new File(plugin.getDataDirectory(), "luckperms-sqlite.db")),
plugin.getConfiguration().get(ConfigKeys.SQL_TABLE_PREFIX)
);
case H2:
return new SQLBacking(plugin, new H2Provider(
return new SqlDao(plugin, new H2ConnectionFactory(
new File(plugin.getDataDirectory(), "luckperms-h2")),
plugin.getConfiguration().get(ConfigKeys.SQL_TABLE_PREFIX)
);
case POSTGRESQL:
return new SQLBacking(plugin, new PostgreSQLProvider(
return new SqlDao(plugin, new PostgreConnectionFactory(
plugin.getConfiguration().get(ConfigKeys.DATABASE_VALUES)),
plugin.getConfiguration().get(ConfigKeys.SQL_TABLE_PREFIX)
);
case MONGODB:
return new MongoDBBacking(
return new MongoDao(
plugin,
plugin.getConfiguration().get(ConfigKeys.DATABASE_VALUES),
plugin.getConfiguration().get(ConfigKeys.MONGODB_COLLECTION_PREFIX)
);
case YAML:
return new YAMLBacking(plugin, "yaml-storage");
return new YamlDao(plugin, "yaml-storage");
case HOCON:
return new HOCONBacking(plugin, "hocon-storage");
return new HoconDao(plugin, "hocon-storage");
default:
return new JSONBacking(plugin, "json-storage");
return new JsonDao(plugin, "json-storage");
}
}
}

View File

@ -46,7 +46,7 @@ import java.util.Set;
import java.util.UUID;
@RequiredArgsConstructor(access = AccessLevel.PROTECTED)
public abstract class AbstractBacking {
public abstract class AbstractDao {
@Getter
protected final LuckPermsPlugin plugin;

View File

@ -27,22 +27,19 @@ package me.lucko.luckperms.common.storage.backing.file;
import lombok.Getter;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSetMultimap;
import com.google.common.collect.Iterables;
import com.google.common.reflect.TypeToken;
import me.lucko.luckperms.api.HeldPermission;
import me.lucko.luckperms.api.LogEntry;
import me.lucko.luckperms.api.Node;
import me.lucko.luckperms.api.context.ContextSet;
import me.lucko.luckperms.api.context.ImmutableContextSet;
import me.lucko.luckperms.api.context.MutableContextSet;
import me.lucko.luckperms.common.actionlog.Log;
import me.lucko.luckperms.common.bulkupdate.BulkUpdate;
import me.lucko.luckperms.common.commands.utils.Util;
import me.lucko.luckperms.common.constants.Constants;
import me.lucko.luckperms.common.contexts.ContextSetConfigurateSerializer;
import me.lucko.luckperms.common.managers.GenericUserManager;
import me.lucko.luckperms.common.managers.GroupManager;
import me.lucko.luckperms.common.managers.TrackManager;
@ -53,9 +50,9 @@ import me.lucko.luckperms.common.node.NodeHeldPermission;
import me.lucko.luckperms.common.node.NodeModel;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.references.UserIdentifier;
import me.lucko.luckperms.common.storage.backing.AbstractBacking;
import me.lucko.luckperms.common.storage.backing.legacy.LegacyJSONSchemaMigration;
import me.lucko.luckperms.common.storage.backing.legacy.LegacyYAMLSchemaMigration;
import me.lucko.luckperms.common.storage.backing.AbstractDao;
import me.lucko.luckperms.common.storage.backing.legacy.LegacyJsonMigration;
import me.lucko.luckperms.common.storage.backing.legacy.LegacyYamlMigration;
import ninja.leaping.configurate.ConfigurationNode;
import ninja.leaping.configurate.SimpleConfigurationNode;
@ -67,6 +64,7 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.LinkedHashSet;
@ -82,7 +80,7 @@ import java.util.logging.LogRecord;
import java.util.logging.Logger;
import java.util.stream.Collectors;
public abstract class ConfigurateBacking extends AbstractBacking {
public abstract class ConfigurateDao extends AbstractDao {
private static final String LOG_FORMAT = "%s(%s): [%s] %s(%s) --> %s";
private final Logger actionLogger = Logger.getLogger("luckperms_actions");
@ -100,7 +98,7 @@ public abstract class ConfigurateBacking extends AbstractBacking {
private File groupsDirectory;
private File tracksDirectory;
protected ConfigurateBacking(LuckPermsPlugin plugin, String name, String fileExtension, String dataFolderName) {
protected ConfigurateDao(LuckPermsPlugin plugin, String name, String fileExtension, String dataFolderName) {
super(plugin, name);
this.fileExtension = fileExtension;
this.dataFolderName = dataFolderName;
@ -210,15 +208,15 @@ public abstract class ConfigurateBacking extends AbstractBacking {
plugin.getLog().severe("Starting migration from legacy schema. This could take a while....");
plugin.getLog().severe("Please do not stop your server while the migration takes place.");
if (this instanceof YAMLBacking) {
if (this instanceof YamlDao) {
try {
new LegacyYAMLSchemaMigration(plugin, (YAMLBacking) this, oldData, data).run();
new LegacyYamlMigration(plugin, (YamlDao) this, oldData, data).run();
} catch (Exception e) {
e.printStackTrace();
}
} else if (this instanceof JSONBacking) {
} else if (this instanceof JsonDao) {
try {
new LegacyJSONSchemaMigration(plugin, (JSONBacking) this, oldData, data).run();
new LegacyJsonMigration(plugin, (JsonDao) this, oldData, data).run();
} catch (Exception e) {
e.printStackTrace();
}
@ -361,7 +359,7 @@ public abstract class ConfigurateBacking extends AbstractBacking {
reportException("bulk update", e);
return false;
}
return false;
return true;
}
@Override
@ -372,7 +370,7 @@ public abstract class ConfigurateBacking extends AbstractBacking {
ConfigurationNode object = readFile(StorageLocation.USER, uuid.toString());
if (object != null) {
String name = object.getNode("name").getString();
user.getPrimaryGroup().setStoredValue(object.getNode(this instanceof JSONBacking ? "primaryGroup" : "primary-group").getString());
user.getPrimaryGroup().setStoredValue(object.getNode(this instanceof JsonDao ? "primaryGroup" : "primary-group").getString());
Set<NodeModel> data = readNodes(object);
Set<Node> nodes = data.stream().map(NodeModel::toNode).collect(Collectors.toSet());
@ -412,7 +410,7 @@ public abstract class ConfigurateBacking extends AbstractBacking {
ConfigurationNode data = SimpleConfigurationNode.root();
data.getNode("uuid").setValue(user.getUuid().toString());
data.getNode("name").setValue(user.getName().orElse("null"));
data.getNode(this instanceof JSONBacking ? "primaryGroup" : "primary-group").setValue(user.getPrimaryGroup().getStoredValue().orElse("default"));
data.getNode(this instanceof JsonDao ? "primaryGroup" : "primary-group").setValue(user.getPrimaryGroup().getStoredValue().orElse("default"));
Set<NodeModel> nodes = user.getEnduringNodes().values().stream().map(NodeModel::fromNode).collect(Collectors.toCollection(LinkedHashSet::new));
writeNodes(data, nodes);
@ -499,8 +497,11 @@ public abstract class ConfigurateBacking extends AbstractBacking {
@Override
public boolean loadGroup(String name) {
Group group = plugin.getGroupManager().getOrMake(name);
Group group = plugin.getGroupManager().getIfLoaded(name);
if (group != null) {
group.getIoLock().lock();
}
try {
ConfigurationNode object = readFile(StorageLocation.GROUP, name);
@ -508,6 +509,11 @@ public abstract class ConfigurateBacking extends AbstractBacking {
return false;
}
if (group == null) {
group = plugin.getGroupManager().getOrMake(name);
group.getIoLock().lock();
}
Set<NodeModel> data = readNodes(object);
Set<Node> nodes = data.stream().map(NodeModel::toNode).collect(Collectors.toSet());
group.setEnduringNodes(nodes);
@ -515,8 +521,10 @@ public abstract class ConfigurateBacking extends AbstractBacking {
} catch (Exception e) {
return reportException(name, e);
} finally {
if (group != null) {
group.getIoLock().unlock();
}
}
return true;
}
@ -633,8 +641,11 @@ public abstract class ConfigurateBacking extends AbstractBacking {
@Override
public boolean loadTrack(String name) {
Track track = plugin.getTrackManager().getOrMake(name);
Track track = plugin.getTrackManager().getIfLoaded(name);
if (track != null) {
track.getIoLock().lock();
}
try {
ConfigurationNode object = readFile(StorageLocation.TRACK, name);
@ -642,14 +653,21 @@ public abstract class ConfigurateBacking extends AbstractBacking {
return false;
}
if (track == null) {
track = plugin.getTrackManager().getOrMake(name);
track.getIoLock().lock();
}
List<String> groups = object.getNode("groups").getList(TypeToken.of(String.class));
track.setGroups(groups);
} catch (Exception e) {
return reportException(name, e);
} finally {
if (track != null) {
track.getIoLock().unlock();
}
}
return true;
}
@ -720,6 +738,45 @@ public abstract class ConfigurateBacking extends AbstractBacking {
return uuidCache.lookupUsername(uuid);
}
private static Collection<NodeModel> readAttributes(ConfigurationNode entry, String permission) {
Map<Object, ? extends ConfigurationNode> attributes = entry.getChildrenMap();
boolean value = true;
String server = "global";
String world = "global";
long expiry = 0L;
ImmutableContextSet context = ImmutableContextSet.empty();
if (attributes.containsKey("value")) {
value = attributes.get("value").getBoolean();
}
if (attributes.containsKey("server")) {
server = attributes.get("server").getString();
}
if (attributes.containsKey("world")) {
world = attributes.get("world").getString();
}
if (attributes.containsKey("expiry")) {
expiry = attributes.get("expiry").getLong();
}
if (attributes.containsKey("context") && attributes.get("context").hasMapChildren()) {
ConfigurationNode contexts = attributes.get("context");
context = ContextSetConfigurateSerializer.deserializeContextSet(contexts).makeImmutable();
}
ConfigurationNode batchAttribute = attributes.get("permissions");
if (permission.startsWith("luckperms.batch") && batchAttribute != null && batchAttribute.hasListChildren()) {
List<NodeModel> nodes = new ArrayList<>();
for (ConfigurationNode element : batchAttribute.getChildrenList()) {
nodes.add(NodeModel.of(element.getString(), value, server, world, expiry, context));
}
return nodes;
} else {
return Collections.singleton(NodeModel.of(permission, value, server, world, expiry, context));
}
}
private static Set<NodeModel> readNodes(ConfigurationNode data) {
Set<NodeModel> nodes = new HashSet<>();
@ -743,66 +800,40 @@ public abstract class ConfigurateBacking extends AbstractBacking {
}
String permission = entry.getKey().toString();
Map<Object, ? extends ConfigurationNode> attributes = entry.getValue().getChildrenMap();
boolean value = true;
String server = "global";
String world = "global";
long expiry = 0L;
ImmutableContextSet context = ImmutableContextSet.empty();
if (attributes.containsKey("value")) {
value = attributes.get("value").getBoolean();
nodes.addAll(readAttributes(entry.getValue(), permission));
}
if (attributes.containsKey("server")) {
server = attributes.get("server").getString();
}
if (attributes.containsKey("world")) {
world = attributes.get("world").getString();
}
if (attributes.containsKey("expiry")) {
expiry = attributes.get("expiry").getLong();
}
if (attributes.containsKey("context") && attributes.get("context").hasMapChildren()) {
ConfigurationNode contexts = attributes.get("context");
context = deserializeContextSet(contexts).makeImmutable();
if (data.getNode("parents").hasListChildren()) {
List<? extends ConfigurationNode> parts = data.getNode("parents").getChildrenList();
for (ConfigurationNode ent : parts) {
String stringValue = ent.getValue(Types::strictAsString);
if (stringValue != null) {
nodes.add(NodeModel.of("group." + stringValue, true, "global", "global", 0L, ImmutableContextSet.empty()));
continue;
}
final ConfigurationNode batchAttribute = attributes.get("permissions");
if (permission.startsWith("luckperms.batch") && batchAttribute != null && batchAttribute.hasListChildren()) {
for (ConfigurationNode element : batchAttribute.getChildrenList()) {
nodes.add(NodeModel.of(element.getString(), value, server, world, expiry, context));
if (!ent.hasMapChildren()) {
continue;
}
} else {
nodes.add(NodeModel.of(permission, value, server, world, expiry, context));
Map.Entry<Object, ? extends ConfigurationNode> entry = Iterables.getFirst(ent.getChildrenMap().entrySet(), null);
if (entry == null || !entry.getValue().hasMapChildren()) {
continue;
}
String permission = "group." + entry.getKey().toString();
nodes.addAll(readAttributes(entry.getValue(), permission));
}
}
return nodes;
}
private static void writeNodes(ConfigurationNode to, Set<NodeModel> nodes) {
ConfigurationNode arr = SimpleConfigurationNode.root();
for (NodeModel node : nodes) {
// just a raw, default node.
boolean single = node.isValue() &&
node.getServer().equalsIgnoreCase("global") &&
node.getWorld().equalsIgnoreCase("global") &&
node.getExpiry() == 0L &&
node.getContexts().isEmpty();
// just add a string to the list.
if (single) {
arr.getAppendedNode().setValue(node.getPermission());
continue;
}
private static ConfigurationNode writeAttributes(NodeModel node) {
ConfigurationNode attributes = SimpleConfigurationNode.root();
attributes.getNode("value").setValue(node.isValue());
attributes.getNode("value").setValue(node.getValue());
if (!node.getServer().equals("global")) {
attributes.getNode("server").setValue(node.getServer());
@ -817,55 +848,54 @@ public abstract class ConfigurateBacking extends AbstractBacking {
}
if (!node.getContexts().isEmpty()) {
attributes.getNode("context").setValue(serializeContextSet(node.getContexts()));
attributes.getNode("context").setValue(ContextSetConfigurateSerializer.serializeContextSet(node.getContexts()));
}
ConfigurationNode perm = SimpleConfigurationNode.root();
perm.getNode(node.getPermission()).setValue(attributes);
arr.getAppendedNode().setValue(perm);
return attributes;
}
to.getNode("permissions").setValue(arr);
private static void writeNodes(ConfigurationNode to, Set<NodeModel> nodes) {
ConfigurationNode permsSection = SimpleConfigurationNode.root();
ConfigurationNode parentsSection = SimpleConfigurationNode.root();
for (NodeModel node : nodes) {
// just a raw, default node.
boolean single = node.getValue() &&
node.getServer().equalsIgnoreCase("global") &&
node.getWorld().equalsIgnoreCase("global") &&
node.getExpiry() == 0L &&
node.getContexts().isEmpty();
// try to parse out the group
String group = node.toNode().isGroupNode() ? node.toNode().getGroupName() : null;
// just add a string to the list.
if (single) {
if (group != null) {
parentsSection.getAppendedNode().setValue(group);
continue;
}
private static ConfigurationNode serializeContextSet(ContextSet contextSet) {
ConfigurationNode data = SimpleConfigurationNode.root();
Map<String, Collection<String>> map = contextSet.toMultimap().asMap();
map.forEach((k, v) -> {
List<String> values = new ArrayList<>(v);
int size = values.size();
if (size == 1) {
data.getNode(k).setValue(values.get(0));
} else if (size > 1) {
data.getNode(k).setValue(values);
}
});
return data;
permsSection.getAppendedNode().setValue(node.getPermission());
continue;
}
private static MutableContextSet deserializeContextSet(ConfigurationNode data) {
Preconditions.checkArgument(data.hasMapChildren());
Map<Object, ? extends ConfigurationNode> dataMap = data.getChildrenMap();
ImmutableSetMultimap.Builder<String, String> map = ImmutableSetMultimap.builder();
for (Map.Entry<Object, ? extends ConfigurationNode> e : dataMap.entrySet()) {
String k = e.getKey().toString();
ConfigurationNode v = e.getValue();
if (v.hasListChildren()) {
List<? extends ConfigurationNode> values = v.getChildrenList();
for (ConfigurationNode value : values) {
map.put(k, value.getString());
}
} else {
map.put(k, v.getString());
}
if (group != null) {
ConfigurationNode ent = SimpleConfigurationNode.root();
ent.getNode(group).setValue(writeAttributes(node));
parentsSection.getAppendedNode().setValue(ent);
continue;
}
return MutableContextSet.fromMultimap(map.build());
ConfigurationNode ent = SimpleConfigurationNode.root();
ent.getNode(node.getPermission()).setValue(writeAttributes(node));
permsSection.getAppendedNode().setValue(ent);
}
to.getNode("permissions").setValue(permsSection);
to.getNode("parents").setValue(parentsSection);
}
}

View File

@ -35,9 +35,9 @@ import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
public class HOCONBacking extends ConfigurateBacking {
public class HoconDao extends ConfigurateDao {
public HOCONBacking(LuckPermsPlugin plugin, String dataFolderName) {
public HoconDao(LuckPermsPlugin plugin, String dataFolderName) {
super(plugin, "HOCON", ".conf", dataFolderName);
}

View File

@ -35,9 +35,9 @@ import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
public class JSONBacking extends ConfigurateBacking {
public class JsonDao extends ConfigurateDao {
public JSONBacking(LuckPermsPlugin plugin, String dataFolderName) {
public JsonDao(LuckPermsPlugin plugin, String dataFolderName) {
super(plugin, "JSON", ".json", dataFolderName);
}

View File

@ -37,9 +37,9 @@ import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
public class YAMLBacking extends ConfigurateBacking {
public class YamlDao extends ConfigurateDao {
public YAMLBacking(LuckPermsPlugin plugin, String dataFolderName) {
public YamlDao(LuckPermsPlugin plugin, String dataFolderName) {
super(plugin, "YAML", ".yml", dataFolderName);
}

View File

@ -34,10 +34,11 @@ import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonPrimitive;
import me.lucko.luckperms.common.contexts.ContextSetJsonSerializer;
import me.lucko.luckperms.common.node.NodeFactory;
import me.lucko.luckperms.common.node.NodeModel;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.storage.backing.file.JSONBacking;
import me.lucko.luckperms.common.storage.backing.file.JsonDao;
import java.io.BufferedReader;
import java.io.BufferedWriter;
@ -53,23 +54,21 @@ import java.util.stream.Collectors;
@SuppressWarnings("unchecked")
@RequiredArgsConstructor
public class LegacyJSONSchemaMigration implements Runnable {
public class LegacyJsonMigration implements Runnable {
private final Gson gson = new GsonBuilder().setPrettyPrinting().create();
private final LuckPermsPlugin plugin;
private final JSONBacking backing;
private final JsonDao backing;
private final File oldDataFolder;
private final File newDataFolder;
private boolean writeElementToFile(File file, JsonElement element) {
private void writeElementToFile(File file, JsonElement element) {
try (BufferedWriter writer = Files.newBufferedWriter(file.toPath(), StandardCharsets.UTF_8)) {
gson.toJson(element, writer);
writer.flush();
return true;
} catch (Throwable t) {
plugin.getLog().warn("Exception whilst writing to file: " + file.getAbsolutePath());
t.printStackTrace();
return false;
}
}
@ -213,7 +212,7 @@ public class LegacyJSONSchemaMigration implements Runnable {
for (NodeModel node : nodes) {
// just a raw, default node.
boolean single = node.isValue() &&
boolean single = node.getValue() &&
node.getServer().equalsIgnoreCase("global") &&
node.getWorld().equalsIgnoreCase("global") &&
node.getExpiry() == 0L &&
@ -226,7 +225,7 @@ public class LegacyJSONSchemaMigration implements Runnable {
}
JsonObject attributes = new JsonObject();
attributes.addProperty("value", node.isValue());
attributes.addProperty("value", node.getValue());
if (!node.getServer().equals("global")) {
attributes.addProperty("server", node.getServer());
@ -241,7 +240,7 @@ public class LegacyJSONSchemaMigration implements Runnable {
}
if (!node.getContexts().isEmpty()) {
attributes.add("context", node.getContextsAsJson());
attributes.add("context", ContextSetJsonSerializer.serializeContextSet(node.getContexts()));
}
JsonObject perm = new JsonObject();

View File

@ -30,9 +30,10 @@ import lombok.RequiredArgsConstructor;
import com.google.common.collect.Lists;
import com.google.gson.reflect.TypeToken;
import me.lucko.luckperms.common.contexts.ContextSetJsonSerializer;
import me.lucko.luckperms.common.node.NodeFactory;
import me.lucko.luckperms.common.node.NodeModel;
import me.lucko.luckperms.common.storage.backing.sql.SQLBacking;
import me.lucko.luckperms.common.storage.backing.sql.SqlDao;
import java.lang.reflect.Type;
import java.sql.Connection;
@ -50,9 +51,9 @@ import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
@RequiredArgsConstructor
public class LegacySQLSchemaMigration implements Runnable {
public class LegacySqlMigration implements Runnable {
private static final Type NODE_MAP_TYPE = new TypeToken<Map<String, Boolean>>() {}.getType();
private final SQLBacking backing;
private final SqlDao backing;
@Override
public void run() {
@ -162,11 +163,11 @@ public class LegacySQLSchemaMigration implements Runnable {
for (NodeModel nd : nodes) {
ps.setString(1, uuid.toString());
ps.setString(2, nd.getPermission());
ps.setBoolean(3, nd.isValue());
ps.setBoolean(3, nd.getValue());
ps.setString(4, nd.getServer());
ps.setString(5, nd.getWorld());
ps.setLong(6, nd.getExpiry());
ps.setString(7, nd.serializeContext());
ps.setString(7, backing.getGson().toJson(ContextSetJsonSerializer.serializeContextSet(nd.getContexts())));
ps.addBatch();
}
ps.executeBatch();
@ -241,11 +242,11 @@ public class LegacySQLSchemaMigration implements Runnable {
for (NodeModel nd : nodes) {
ps.setString(1, name);
ps.setString(2, nd.getPermission());
ps.setBoolean(3, nd.isValue());
ps.setBoolean(3, nd.getValue());
ps.setString(4, nd.getServer());
ps.setString(5, nd.getWorld());
ps.setLong(6, nd.getExpiry());
ps.setString(7, nd.serializeContext());
ps.setString(7, backing.getGson().toJson(ContextSetJsonSerializer.serializeContextSet(nd.getContexts())));
ps.addBatch();
}
ps.executeBatch();

View File

@ -30,7 +30,7 @@ import lombok.RequiredArgsConstructor;
import me.lucko.luckperms.common.node.NodeFactory;
import me.lucko.luckperms.common.node.NodeModel;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.storage.backing.file.YAMLBacking;
import me.lucko.luckperms.common.storage.backing.file.YamlDao;
import org.yaml.snakeyaml.DumperOptions;
import org.yaml.snakeyaml.Yaml;
@ -53,12 +53,14 @@ import java.util.stream.Collectors;
@SuppressWarnings("unchecked")
@RequiredArgsConstructor
public class LegacyYAMLSchemaMigration implements Runnable {
public class LegacyYamlMigration implements Runnable {
private final LuckPermsPlugin plugin;
private final YAMLBacking backing;
private final YamlDao backing;
private final File oldDataFolder;
private final File newDataFolder;
private final Yaml yaml = getYaml();
private static Yaml getYaml() {
DumperOptions options = new DumperOptions();
options.setAllowUnicode(true);
@ -66,21 +68,19 @@ public class LegacyYAMLSchemaMigration implements Runnable {
return new Yaml(options);
}
public boolean writeMapToFile(File file, Map<String, Object> values) {
public void writeMapToFile(File file, Map<String, Object> values) {
try (BufferedWriter writer = Files.newBufferedWriter(file.toPath(), StandardCharsets.UTF_8)) {
getYaml().dump(values, writer);
yaml.dump(values, writer);
writer.flush();
return true;
} catch (Throwable t) {
plugin.getLog().warn("Exception whilst writing to file: " + file.getAbsolutePath());
t.printStackTrace();
return false;
}
}
public Map<String, Object> readMapFromFile(File file) {
try (BufferedReader reader = Files.newBufferedReader(file.toPath(), StandardCharsets.UTF_8)) {
return (Map<String, Object>) getYaml().load(reader);
return (Map<String, Object>) yaml.load(reader);
} catch (Throwable t) {
plugin.getLog().warn("Exception whilst reading from file: " + file.getAbsolutePath());
t.printStackTrace();
@ -211,7 +211,7 @@ public class LegacyYAMLSchemaMigration implements Runnable {
for (NodeModel node : nodes) {
// just a raw, default node.
boolean single = node.isValue() &&
boolean single = node.getValue() &&
node.getServer().equalsIgnoreCase("global") &&
node.getWorld().equalsIgnoreCase("global") &&
node.getExpiry() == 0L &&
@ -228,7 +228,7 @@ public class LegacyYAMLSchemaMigration implements Runnable {
// create a map of node attributes
Map<String, Object> attributes = new LinkedHashMap<>();
attributes.put("value", node.isValue());
attributes.put("value", node.getValue());
if (!node.getServer().equals("global")) {
attributes.put("server", node.getServer());

View File

@ -53,7 +53,7 @@ import me.lucko.luckperms.common.node.NodeModel;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.references.UserIdentifier;
import me.lucko.luckperms.common.storage.DatastoreConfiguration;
import me.lucko.luckperms.common.storage.backing.AbstractBacking;
import me.lucko.luckperms.common.storage.backing.AbstractDao;
import org.bson.Document;
@ -64,24 +64,14 @@ import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Objects;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.function.Function;
import java.util.stream.Collectors;
@SuppressWarnings("unchecked")
public class MongoDBBacking extends AbstractBacking {
private static <T> T call(Callable<T> c, T def) {
try {
return c.call();
} catch (Exception e) {
e.printStackTrace();
return def;
}
}
public class MongoDao extends AbstractDao {
private final DatastoreConfiguration configuration;
private MongoClient mongoClient;
@ -90,12 +80,18 @@ public class MongoDBBacking extends AbstractBacking {
@Getter
private final String prefix;
public MongoDBBacking(LuckPermsPlugin plugin, DatastoreConfiguration configuration, String prefix) {
public MongoDao(LuckPermsPlugin plugin, DatastoreConfiguration configuration, String prefix) {
super(plugin, "MongoDB");
this.configuration = configuration;
this.prefix = prefix;
}
private boolean reportException(Exception ex) {
plugin.getLog().warn("Exception thrown whilst performing i/o: ");
ex.printStackTrace();
return false;
}
@Override
public void init() {
MongoCredential credential = null;
@ -162,7 +158,7 @@ public class MongoDBBacking extends AbstractBacking {
@Override
public boolean logAction(LogEntry entry) {
return call(() -> {
try {
MongoCollection<Document> c = database.getCollection(prefix + "action");
//noinspection deprecation
@ -179,14 +175,16 @@ public class MongoDBBacking extends AbstractBacking {
}
c.insertOne(doc, new InsertOneOptions());
} catch (Exception e) {
return reportException(e);
}
return true;
}, false);
}
@Override
public Log getLog() {
return call(() -> {
final Log.Builder log = Log.builder();
Log.Builder log = Log.builder();
try {
MongoCollection<Document> c = database.getCollection(prefix + "action");
try (MongoCursor<Document> cursor = c.find().iterator()) {
@ -210,14 +208,16 @@ public class MongoDBBacking extends AbstractBacking {
log.add(e);
}
}
} catch (Exception e) {
reportException(e);
return null;
}
return log.build();
}, null);
}
@Override
public boolean applyBulkUpdate(BulkUpdate bulkUpdate) {
return call(() -> {
try {
if (bulkUpdate.getDataType().isIncludingUsers()) {
MongoCollection<Document> c = database.getCollection(prefix + "users");
@ -234,15 +234,14 @@ public class MongoDBBacking extends AbstractBacking {
nodes.add(NodeModel.fromNode(node));
}
Set<Node> results = nodes.stream()
.map(n -> Optional.ofNullable(bulkUpdate.apply(n)))
.filter(Optional::isPresent)
.map(Optional::get)
.map(NodeModel::toNode)
Set<NodeModel> results = nodes.stream()
.map(bulkUpdate::apply)
.filter(Objects::nonNull)
.collect(Collectors.toSet());
if (!nodes.equals(results)) {
Document permsDoc = new Document();
for (Map.Entry<String, Boolean> e : convert(exportToLegacy(results)).entrySet()) {
for (Map.Entry<String, Boolean> e : convert(exportToLegacy(results.stream().map(NodeModel::toNode).collect(Collectors.toList()))).entrySet()) {
permsDoc.append(e.getKey(), e.getValue());
}
@ -251,6 +250,7 @@ public class MongoDBBacking extends AbstractBacking {
}
}
}
}
if (bulkUpdate.getDataType().isIncludingGroups()) {
MongoCollection<Document> c = database.getCollection(prefix + "groups");
@ -268,15 +268,14 @@ public class MongoDBBacking extends AbstractBacking {
nodes.add(NodeModel.fromNode(node));
}
Set<Node> results = nodes.stream()
.map(n -> Optional.ofNullable(bulkUpdate.apply(n)))
.filter(Optional::isPresent)
.map(Optional::get)
.map(NodeModel::toNode)
Set<NodeModel> results = nodes.stream()
.map(bulkUpdate::apply)
.filter(Objects::nonNull)
.collect(Collectors.toSet());
if (!nodes.equals(results)) {
Document permsDoc = new Document();
for (Map.Entry<String, Boolean> e : convert(exportToLegacy(results)).entrySet()) {
for (Map.Entry<String, Boolean> e : convert(exportToLegacy(results.stream().map(NodeModel::toNode).collect(Collectors.toList()))).entrySet()) {
permsDoc.append(e.getKey(), e.getValue());
}
@ -285,9 +284,12 @@ public class MongoDBBacking extends AbstractBacking {
}
}
}
}
} catch (Exception e) {
reportException(e);
return false;
}
return true;
}, false);
}
@Override
@ -295,7 +297,6 @@ public class MongoDBBacking extends AbstractBacking {
User user = plugin.getUserManager().getOrMake(UserIdentifier.of(uuid, username));
user.getIoLock().lock();
try {
return call(() -> {
MongoCollection<Document> c = database.getCollection(prefix + "users");
try (MongoCursor<Document> cursor = c.find(new Document("_id", user.getUuid())).iterator()) {
@ -325,31 +326,24 @@ public class MongoDBBacking extends AbstractBacking {
}
}
}
return true;
}, false);
} catch (Exception e) {
return reportException(e);
} finally {
user.getIoLock().unlock();
user.getRefreshBuffer().requestDirectly();
}
return true;
}
@Override
public boolean saveUser(User user) {
if (!GenericUserManager.shouldSave(user)) {
user.getIoLock().lock();
try {
return call(() -> {
if (!GenericUserManager.shouldSave(user)) {
MongoCollection<Document> c = database.getCollection(prefix + "users");
return c.deleteOne(new Document("_id", user.getUuid())).wasAcknowledged();
}, false);
} finally {
user.getIoLock().unlock();
}
}
user.getIoLock().lock();
try {
return call(() -> {
MongoCollection<Document> c = database.getCollection(prefix + "users");
try (MongoCursor<Document> cursor = c.find(new Document("_id", user.getUuid())).iterator()) {
if (!cursor.hasNext()) {
@ -358,17 +352,18 @@ public class MongoDBBacking extends AbstractBacking {
c.replaceOne(new Document("_id", user.getUuid()), fromUser(user));
}
}
return true;
}, false);
} catch (Exception e) {
return reportException(e);
} finally {
user.getIoLock().unlock();
}
return true;
}
@Override
public Set<UUID> getUniqueUsers() {
Set<UUID> uuids = new HashSet<>();
boolean success = call(() -> {
try {
MongoCollection<Document> c = database.getCollection(prefix + "users");
try (MongoCursor<Document> cursor = c.find().iterator()) {
@ -377,17 +372,16 @@ public class MongoDBBacking extends AbstractBacking {
uuids.add(d.get("_id", UUID.class));
}
}
return true;
}, false);
return success ? uuids : null;
} catch (Exception e) {
return null;
}
return uuids;
}
@Override
public List<HeldPermission<UUID>> getUsersWithPermission(String permission) {
ImmutableList.Builder<HeldPermission<UUID>> held = ImmutableList.builder();
boolean success = call(() -> {
try {
MongoCollection<Document> c = database.getCollection(prefix + "users");
try (MongoCursor<Document> cursor = c.find().iterator()) {
@ -407,10 +401,11 @@ public class MongoDBBacking extends AbstractBacking {
}
}
}
return true;
}, false);
return success ? held.build() : null;
} catch (Exception e) {
reportException(e);
return null;
}
return held.build();
}
@Override
@ -418,7 +413,6 @@ public class MongoDBBacking extends AbstractBacking {
Group group = plugin.getGroupManager().getOrMake(name);
group.getIoLock().lock();
try {
return call(() -> {
MongoCollection<Document> c = database.getCollection(prefix + "groups");
try (MongoCursor<Document> cursor = c.find(new Document("_id", group.getName())).iterator()) {
@ -433,11 +427,12 @@ public class MongoDBBacking extends AbstractBacking {
c.insertOne(fromGroup(group));
}
}
return true;
}, false);
} catch (Exception e) {
return reportException(e);
} finally {
group.getIoLock().unlock();
}
return true;
}
@Override
@ -445,7 +440,6 @@ public class MongoDBBacking extends AbstractBacking {
Group group = plugin.getGroupManager().getOrMake(name);
group.getIoLock().lock();
try {
return call(() -> {
MongoCollection<Document> c = database.getCollection(prefix + "groups");
try (MongoCursor<Document> cursor = c.find(new Document("_id", group.getName())).iterator()) {
@ -460,7 +454,8 @@ public class MongoDBBacking extends AbstractBacking {
}
return false;
}
}, false);
} catch (Exception e) {
return reportException(e);
} finally {
group.getIoLock().unlock();
}
@ -469,39 +464,36 @@ public class MongoDBBacking extends AbstractBacking {
@Override
public boolean loadAllGroups() {
List<String> groups = new ArrayList<>();
boolean success = call(() -> {
try {
MongoCollection<Document> c = database.getCollection(prefix + "groups");
boolean b = true;
try (MongoCursor<Document> cursor = c.find().iterator()) {
while (cursor.hasNext()) {
String name = cursor.next().getString("_id");
if (!loadGroup(name)) {
b = false;
}
loadGroup(name);
groups.add(name);
}
}
return b;
}, false);
} catch (Exception e) {
reportException(e);
return false;
}
if (success) {
GroupManager gm = plugin.getGroupManager();
gm.getAll().values().stream()
.filter(g -> !groups.contains(g.getName()))
.forEach(gm::unload);
}
return success;
return true;
}
@Override
public boolean saveGroup(Group group) {
group.getIoLock().lock();
try {
return call(() -> {
MongoCollection<Document> c = database.getCollection(prefix + "groups");
return c.replaceOne(new Document("_id", group.getName()), fromGroup(group)).wasAcknowledged();
}, false);
} catch (Exception e) {
return reportException(e);
} finally {
group.getIoLock().unlock();
}
@ -510,24 +502,25 @@ public class MongoDBBacking extends AbstractBacking {
@Override
public boolean deleteGroup(Group group) {
group.getIoLock().lock();
boolean success;
try {
success = call(() -> {
MongoCollection<Document> c = database.getCollection(prefix + "groups");
return c.deleteOne(new Document("_id", group.getName())).wasAcknowledged();
}, false);
if (!c.deleteOne(new Document("_id", group.getName())).wasAcknowledged()) {
throw new RuntimeException();
}
} catch (Exception e) {
return reportException(e);
} finally {
group.getIoLock().unlock();
}
if (success) plugin.getGroupManager().unload(group);
return success;
plugin.getGroupManager().unload(group);
return true;
}
@Override
public List<HeldPermission<String>> getGroupsWithPermission(String permission) {
ImmutableList.Builder<HeldPermission<String>> held = ImmutableList.builder();
boolean success = call(() -> {
try {
MongoCollection<Document> c = database.getCollection(prefix + "groups");
try (MongoCursor<Document> cursor = c.find().iterator()) {
@ -547,10 +540,11 @@ public class MongoDBBacking extends AbstractBacking {
}
}
}
return true;
}, false);
return success ? held.build() : null;
} catch (Exception e) {
reportException(e);
return null;
}
return held.build();
}
@Override
@ -558,7 +552,6 @@ public class MongoDBBacking extends AbstractBacking {
Track track = plugin.getTrackManager().getOrMake(name);
track.getIoLock().lock();
try {
return call(() -> {
MongoCollection<Document> c = database.getCollection(prefix + "tracks");
try (MongoCursor<Document> cursor = c.find(new Document("_id", track.getName())).iterator()) {
@ -569,11 +562,12 @@ public class MongoDBBacking extends AbstractBacking {
track.setGroups((List<String>) d.get("groups"));
}
}
return true;
}, false);
} catch (Exception e) {
return reportException(e);
} finally {
track.getIoLock().unlock();
}
return true;
}
@Override
@ -581,7 +575,6 @@ public class MongoDBBacking extends AbstractBacking {
Track track = plugin.getTrackManager().getOrMake(name);
track.getIoLock().lock();
try {
return call(() -> {
MongoCollection<Document> c = database.getCollection(prefix + "tracks");
try (MongoCursor<Document> cursor = c.find(new Document("_id", track.getName())).iterator()) {
@ -592,7 +585,8 @@ public class MongoDBBacking extends AbstractBacking {
}
return false;
}
}, false);
} catch (Exception e) {
return reportException(e);
} finally {
track.getIoLock().unlock();
}
@ -601,39 +595,36 @@ public class MongoDBBacking extends AbstractBacking {
@Override
public boolean loadAllTracks() {
List<String> tracks = new ArrayList<>();
boolean success = call(() -> {
try {
MongoCollection<Document> c = database.getCollection(prefix + "tracks");
boolean b = true;
try (MongoCursor<Document> cursor = c.find().iterator()) {
while (cursor.hasNext()) {
String name = cursor.next().getString("_id");
if (!loadTrack(name)) {
b = false;
}
loadTrack(name);
tracks.add(name);
}
}
return b;
}, false);
} catch (Exception e) {
reportException(e);
return false;
}
if (success) {
TrackManager tm = plugin.getTrackManager();
tm.getAll().values().stream()
.filter(t -> !tracks.contains(t.getName()))
.forEach(tm::unload);
}
return success;
return true;
}
@Override
public boolean saveTrack(Track track) {
track.getIoLock().lock();
try {
return call(() -> {
MongoCollection<Document> c = database.getCollection(prefix + "tracks");
return c.replaceOne(new Document("_id", track.getName()), fromTrack(track)).wasAcknowledged();
}, false);
} catch (Exception e) {
return reportException(e);
} finally {
track.getIoLock().unlock();
}
@ -642,23 +633,24 @@ public class MongoDBBacking extends AbstractBacking {
@Override
public boolean deleteTrack(Track track) {
track.getIoLock().lock();
boolean success;
try {
success = call(() -> {
MongoCollection<Document> c = database.getCollection(prefix + "tracks");
return c.deleteOne(new Document("_id", track.getName())).wasAcknowledged();
}, false);
if (!c.deleteOne(new Document("_id", track.getName())).wasAcknowledged()) {
throw new RuntimeException();
}
} catch (Exception e) {
return reportException(e);
} finally {
track.getIoLock().unlock();
}
if (success) plugin.getTrackManager().unload(track);
return success;
plugin.getTrackManager().unload(track);
return true;
}
@Override
public boolean saveUUIDData(UUID uuid, String username) {
return call(() -> {
try {
MongoCollection<Document> c = database.getCollection(prefix + "uuid");
try (MongoCursor<Document> cursor = c.find(new Document("_id", uuid)).iterator()) {
@ -668,14 +660,15 @@ public class MongoDBBacking extends AbstractBacking {
c.insertOne(new Document("_id", uuid).append("name", username.toLowerCase()));
}
}
} catch (Exception e) {
return reportException(e);
}
return true;
}, false);
}
@Override
public UUID getUUID(String username) {
return call(() -> {
try {
MongoCollection<Document> c = database.getCollection(prefix + "uuid");
try (MongoCursor<Document> cursor = c.find(new Document("name", username.toLowerCase())).iterator()) {
@ -684,12 +677,15 @@ public class MongoDBBacking extends AbstractBacking {
}
}
return null;
}, null);
} catch (Exception e) {
reportException(e);
return null;
}
}
@Override
public String getName(UUID uuid) {
return call(() -> {
try {
MongoCollection<Document> c = database.getCollection(prefix + "uuid");
try (MongoCursor<Document> cursor = c.find(new Document("_id", uuid)).iterator()) {
@ -698,7 +694,10 @@ public class MongoDBBacking extends AbstractBacking {
}
}
return null;
}, null);
} catch (Exception e) {
reportException(e);
return null;
}
}
/* MongoDB does not allow '.' or '$' in key names.

View File

@ -32,21 +32,17 @@ import java.sql.Connection;
import java.sql.SQLException;
@AllArgsConstructor
public class WrappedConnection implements Connection {
public class NonClosableConnection implements Connection {
@Delegate(excludes = Exclude.class)
private Connection delegate;
private final boolean shouldClose;
@Override
public void close() throws SQLException {
if (shouldClose) {
delegate.close();
}
}
private interface Exclude {
void close();
void close() throws SQLException;
}
}

View File

@ -30,6 +30,7 @@ import lombok.Getter;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Maps;
import com.google.gson.Gson;
import com.google.gson.JsonObject;
import com.google.gson.reflect.TypeToken;
import me.lucko.luckperms.api.HeldPermission;
@ -37,6 +38,7 @@ import me.lucko.luckperms.api.LogEntry;
import me.lucko.luckperms.api.Node;
import me.lucko.luckperms.common.actionlog.Log;
import me.lucko.luckperms.common.bulkupdate.BulkUpdate;
import me.lucko.luckperms.common.contexts.ContextSetJsonSerializer;
import me.lucko.luckperms.common.managers.GenericUserManager;
import me.lucko.luckperms.common.managers.GroupManager;
import me.lucko.luckperms.common.managers.TrackManager;
@ -47,9 +49,9 @@ import me.lucko.luckperms.common.node.NodeHeldPermission;
import me.lucko.luckperms.common.node.NodeModel;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.references.UserIdentifier;
import me.lucko.luckperms.common.storage.backing.AbstractBacking;
import me.lucko.luckperms.common.storage.backing.legacy.LegacySQLSchemaMigration;
import me.lucko.luckperms.common.storage.backing.sql.provider.SQLProvider;
import me.lucko.luckperms.common.storage.backing.AbstractDao;
import me.lucko.luckperms.common.storage.backing.legacy.LegacySqlMigration;
import me.lucko.luckperms.common.storage.backing.sql.provider.AbstractConnectionFactory;
import java.io.BufferedReader;
import java.io.InputStream;
@ -72,7 +74,7 @@ import java.util.concurrent.atomic.AtomicReference;
import java.util.function.Function;
import java.util.stream.Collectors;
public class SQLBacking extends AbstractBacking {
public class SqlDao extends AbstractDao {
private static final Type LIST_STRING_TYPE = new TypeToken<List<String>>(){}.getType();
private static final String USER_PERMISSIONS_SELECT = "SELECT permission, value, server, world, expiry, contexts FROM {prefix}user_permissions WHERE uuid=?";
@ -118,12 +120,12 @@ public class SQLBacking extends AbstractBacking {
private final Gson gson;
@Getter
private final SQLProvider provider;
private final AbstractConnectionFactory provider;
@Getter
private final Function<String, String> prefix;
public SQLBacking(LuckPermsPlugin plugin, SQLProvider provider, String prefix) {
public SqlDao(LuckPermsPlugin plugin, AbstractConnectionFactory provider, String prefix) {
super(plugin, provider.getName());
this.provider = provider;
this.prefix = s -> s.replace("{prefix}", prefix);
@ -189,7 +191,7 @@ public class SQLBacking extends AbstractBacking {
plugin.getLog().severe("Starting migration from legacy schema. This could take a while....");
plugin.getLog().severe("Please do not stop your server while the migration takes place.");
new LegacySQLSchemaMigration(this).run();
new LegacySqlMigration(this).run();
}
}
@ -322,7 +324,7 @@ public class SQLBacking extends AbstractBacking {
String world = rs.getString("world");
long expiry = rs.getLong("expiry");
String contexts = rs.getString("contexts");
data.add(NodeModel.deserialize(permission, value, server, world, expiry, contexts));
data.add(deserializeNode(permission, value, server, world, expiry, contexts));
}
}
}
@ -422,7 +424,7 @@ public class SQLBacking extends AbstractBacking {
String world = rs.getString("world");
long expiry = rs.getLong("expiry");
String contexts = rs.getString("contexts");
remote.add(NodeModel.deserialize(permission, value, server, world, expiry, contexts));
remote.add(deserializeNode(permission, value, server, world, expiry, contexts));
}
}
}
@ -443,11 +445,11 @@ public class SQLBacking extends AbstractBacking {
for (NodeModel nd : toRemove) {
ps.setString(1, user.getUuid().toString());
ps.setString(2, nd.getPermission());
ps.setBoolean(3, nd.isValue());
ps.setBoolean(3, nd.getValue());
ps.setString(4, nd.getServer());
ps.setString(5, nd.getWorld());
ps.setLong(6, nd.getExpiry());
ps.setString(7, nd.serializeContext());
ps.setString(7, gson.toJson(ContextSetJsonSerializer.serializeContextSet(nd.getContexts())));
ps.addBatch();
}
ps.executeBatch();
@ -464,11 +466,11 @@ public class SQLBacking extends AbstractBacking {
for (NodeModel nd : toAdd) {
ps.setString(1, user.getUuid().toString());
ps.setString(2, nd.getPermission());
ps.setBoolean(3, nd.isValue());
ps.setBoolean(3, nd.getValue());
ps.setString(4, nd.getServer());
ps.setString(5, nd.getWorld());
ps.setLong(6, nd.getExpiry());
ps.setString(7, nd.serializeContext());
ps.setString(7, gson.toJson(ContextSetJsonSerializer.serializeContextSet(nd.getContexts())));
ps.addBatch();
}
ps.executeBatch();
@ -551,7 +553,7 @@ public class SQLBacking extends AbstractBacking {
long expiry = rs.getLong("expiry");
String contexts = rs.getString("contexts");
NodeModel data = NodeModel.deserialize(permission, value, server, world, expiry, contexts);
NodeModel data = deserializeNode(permission, value, server, world, expiry, contexts);
held.add(NodeHeldPermission.of(holder, data));
}
}
@ -633,7 +635,7 @@ public class SQLBacking extends AbstractBacking {
String world = rs.getString("world");
long expiry = rs.getLong("expiry");
String contexts = rs.getString("contexts");
data.add(NodeModel.deserialize(permission, value, server, world, expiry, contexts));
data.add(deserializeNode(permission, value, server, world, expiry, contexts));
}
}
}
@ -720,7 +722,7 @@ public class SQLBacking extends AbstractBacking {
String world = rs.getString("world");
long expiry = rs.getLong("expiry");
String contexts = rs.getString("contexts");
remote.add(NodeModel.deserialize(permission, value, server, world, expiry, contexts));
remote.add(deserializeNode(permission, value, server, world, expiry, contexts));
}
}
}
@ -742,11 +744,11 @@ public class SQLBacking extends AbstractBacking {
for (NodeModel nd : toRemove) {
ps.setString(1, group.getName());
ps.setString(2, nd.getPermission());
ps.setBoolean(3, nd.isValue());
ps.setBoolean(3, nd.getValue());
ps.setString(4, nd.getServer());
ps.setString(5, nd.getWorld());
ps.setLong(6, nd.getExpiry());
ps.setString(7, nd.serializeContext());
ps.setString(7, gson.toJson(ContextSetJsonSerializer.serializeContextSet(nd.getContexts())));
ps.addBatch();
}
ps.executeBatch();
@ -763,11 +765,11 @@ public class SQLBacking extends AbstractBacking {
for (NodeModel nd : toAdd) {
ps.setString(1, group.getName());
ps.setString(2, nd.getPermission());
ps.setBoolean(3, nd.isValue());
ps.setBoolean(3, nd.getValue());
ps.setString(4, nd.getServer());
ps.setString(5, nd.getWorld());
ps.setLong(6, nd.getExpiry());
ps.setString(7, nd.serializeContext());
ps.setString(7, gson.toJson(ContextSetJsonSerializer.serializeContextSet(nd.getContexts())));
ps.addBatch();
}
ps.executeBatch();
@ -825,7 +827,7 @@ public class SQLBacking extends AbstractBacking {
long expiry = rs.getLong("expiry");
String contexts = rs.getString("contexts");
NodeModel data = NodeModel.deserialize(permission, value, server, world, expiry, contexts);
NodeModel data = deserializeNode(permission, value, server, world, expiry, contexts);
held.add(NodeHeldPermission.of(holder, data));
}
}
@ -1113,4 +1115,9 @@ public class SQLBacking extends AbstractBacking {
return Maps.immutableEntry(toAdd, toRemove);
}
private NodeModel deserializeNode(String permission, boolean value, String server, String world, long expiry, String contexts) {
JsonObject context = gson.fromJson(contexts, JsonObject.class);
return NodeModel.of(permission, value, server, world, expiry, ContextSetJsonSerializer.deserializeContextSet(context).makeImmutable());
}
}

View File

@ -28,14 +28,13 @@ package me.lucko.luckperms.common.storage.backing.sql.provider;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import me.lucko.luckperms.common.storage.backing.sql.WrappedConnection;
import java.sql.Connection;
import java.sql.SQLException;
import java.util.Collections;
import java.util.Map;
@RequiredArgsConstructor
public abstract class SQLProvider {
public abstract class AbstractConnectionFactory {
@Getter
private final String name;
@ -48,6 +47,6 @@ public abstract class SQLProvider {
return Collections.emptyMap();
}
public abstract WrappedConnection getConnection() throws SQLException;
public abstract Connection getConnection() throws SQLException;
}

View File

@ -23,9 +23,10 @@
* SOFTWARE.
*/
package me.lucko.luckperms.common.storage.backing.sql.provider;
package me.lucko.luckperms.common.storage.backing.sql.provider.file;
import me.lucko.luckperms.common.storage.backing.sql.WrappedConnection;
import me.lucko.luckperms.common.storage.backing.sql.NonClosableConnection;
import me.lucko.luckperms.common.storage.backing.sql.provider.AbstractConnectionFactory;
import java.io.File;
import java.sql.Connection;
@ -34,14 +35,14 @@ import java.sql.SQLException;
import java.text.DecimalFormat;
import java.util.concurrent.locks.ReentrantLock;
abstract class FlatfileProvider extends SQLProvider {
abstract class FlatfileConnectionFactory extends AbstractConnectionFactory {
protected static final DecimalFormat DF = new DecimalFormat("#.00");
protected final File file;
private final ReentrantLock lock = new ReentrantLock();
private WrappedConnection connection;
private Connection connection;
FlatfileProvider(String name, File file) {
FlatfileConnectionFactory(String name, File file) {
super(name);
this.file = file;
}
@ -62,7 +63,7 @@ abstract class FlatfileProvider extends SQLProvider {
}
@Override
public WrappedConnection getConnection() throws SQLException {
public Connection getConnection() throws SQLException {
lock.lock();
try {
if (this.connection == null || this.connection.isClosed()) {
@ -72,7 +73,7 @@ abstract class FlatfileProvider extends SQLProvider {
Connection connection = DriverManager.getConnection(getDriverId() + ":" + file.getAbsolutePath());
if (connection != null) {
this.connection = new WrappedConnection(connection, false);
this.connection = new NonClosableConnection(connection);
}
}
@ -81,7 +82,7 @@ abstract class FlatfileProvider extends SQLProvider {
}
if (this.connection == null) {
throw new SQLException("Connection is null");
throw new SQLException("Unable to get a connection.");
}
return this.connection;

View File

@ -23,14 +23,14 @@
* SOFTWARE.
*/
package me.lucko.luckperms.common.storage.backing.sql.provider;
package me.lucko.luckperms.common.storage.backing.sql.provider.file;
import java.io.File;
import java.util.LinkedHashMap;
import java.util.Map;
public class H2Provider extends FlatfileProvider {
public H2Provider(File file) {
public class H2ConnectionFactory extends FlatfileConnectionFactory {
public H2ConnectionFactory(File file) {
super("H2", file);
// backwards compat
@ -46,7 +46,8 @@ public class H2Provider extends FlatfileProvider {
File databaseFile = new File(super.file.getParent(), "luckperms-h2.mv.db");
if (databaseFile.exists()) {
ret.put("File Size", DF.format(databaseFile.length() / 1048576) + "MB");
double size = databaseFile.length() / 1048576;
ret.put("File Size", DF.format(size) + "MB");
} else {
ret.put("File Size", "0MB");
}

View File

@ -23,14 +23,14 @@
* SOFTWARE.
*/
package me.lucko.luckperms.common.storage.backing.sql.provider;
package me.lucko.luckperms.common.storage.backing.sql.provider.file;
import java.io.File;
import java.util.LinkedHashMap;
import java.util.Map;
public class SQLiteProvider extends FlatfileProvider {
public SQLiteProvider(File file) {
public class SQLiteConnectionFactory extends FlatfileConnectionFactory {
public SQLiteConnectionFactory(File file) {
super("SQLite", file);
// backwards compat
@ -46,7 +46,8 @@ public class SQLiteProvider extends FlatfileProvider {
File databaseFile = new File(super.file.getParent(), "luckperms-sqlite.db");
if (databaseFile.exists()) {
ret.put("File Size", DF.format(databaseFile.length() / 1048576) + "MB");
double size = databaseFile.length() / 1048576;
ret.put("File Size", DF.format(size) + "MB");
} else {
ret.put("File Size", "0MB");
}

View File

@ -23,13 +23,13 @@
* SOFTWARE.
*/
package me.lucko.luckperms.common.storage.backing.sql.provider;
package me.lucko.luckperms.common.storage.backing.sql.provider.remote;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
import me.lucko.luckperms.common.storage.DatastoreConfiguration;
import me.lucko.luckperms.common.storage.backing.sql.WrappedConnection;
import me.lucko.luckperms.common.storage.backing.sql.provider.AbstractConnectionFactory;
import java.sql.Connection;
import java.sql.SQLException;
@ -38,13 +38,13 @@ import java.util.LinkedHashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
public class MySQLProvider extends SQLProvider {
public class MySqlConnectionFactory extends AbstractConnectionFactory {
private final DatastoreConfiguration configuration;
private final String driverClass;
private HikariDataSource hikari;
public MySQLProvider(String name, String driverClass, DatastoreConfiguration configuration) {
public MySqlConnectionFactory(String name, String driverClass, DatastoreConfiguration configuration) {
super(name);
this.configuration = configuration;
this.driverClass = driverClass;
@ -143,11 +143,11 @@ public class MySQLProvider extends SQLProvider {
}
@Override
public WrappedConnection getConnection() throws SQLException {
public Connection getConnection() throws SQLException {
Connection connection = hikari.getConnection();
if (connection == null) {
throw new SQLException("Connection is null");
throw new SQLException("Unable to get a connection from the pool.");
}
return new WrappedConnection(connection, true);
return connection;
}
}

View File

@ -23,13 +23,13 @@
* SOFTWARE.
*/
package me.lucko.luckperms.common.storage.backing.sql.provider;
package me.lucko.luckperms.common.storage.backing.sql.provider.remote;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
import me.lucko.luckperms.common.storage.DatastoreConfiguration;
import me.lucko.luckperms.common.storage.backing.sql.WrappedConnection;
import me.lucko.luckperms.common.storage.backing.sql.provider.AbstractConnectionFactory;
import java.sql.Connection;
import java.sql.SQLException;
@ -38,12 +38,12 @@ import java.util.LinkedHashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
public class PostgreSQLProvider extends SQLProvider {
public class PostgreConnectionFactory extends AbstractConnectionFactory {
private final DatastoreConfiguration configuration;
private HikariDataSource hikari;
public PostgreSQLProvider(DatastoreConfiguration configuration) {
public PostgreConnectionFactory(DatastoreConfiguration configuration) {
super("PostgreSQL");
this.configuration = configuration;
}
@ -117,11 +117,11 @@ public class PostgreSQLProvider extends SQLProvider {
}
@Override
public WrappedConnection getConnection() throws SQLException {
public Connection getConnection() throws SQLException {
Connection connection = hikari.getConnection();
if (connection == null) {
throw new SQLException("Connection is null");
throw new SQLException("Unable to get a connection from the pool.");
}
return new WrappedConnection(connection, true);
return connection;
}
}

View File

@ -36,7 +36,7 @@ import com.google.gson.JsonObject;
import me.lucko.luckperms.api.context.ImmutableContextSet;
import me.lucko.luckperms.common.contexts.ContextSetComparator;
import me.lucko.luckperms.common.node.NodeModel;
import me.lucko.luckperms.common.contexts.ContextSetJsonSerializer;
import me.lucko.luckperms.common.node.NodeWithContextComparator;
import me.lucko.luckperms.sponge.service.calculated.CalculatedSubjectData;
import me.lucko.luckperms.sponge.service.model.LPPermissionService;
@ -107,7 +107,7 @@ public class SubjectStorageModel {
JsonObject context = section.get("context").getAsJsonObject();
JsonObject data = section.get("data").getAsJsonObject();
ImmutableContextSet contextSet = NodeModel.deserializeContextSet(context).makeImmutable();
ImmutableContextSet contextSet = ContextSetJsonSerializer.deserializeContextSet(context).makeImmutable();
ImmutableMap.Builder<String, Boolean> perms = ImmutableMap.builder();
for (Map.Entry<String, JsonElement> perm : data.entrySet()) {
perms.put(perm.getKey(), perm.getValue().getAsBoolean());
@ -130,7 +130,7 @@ public class SubjectStorageModel {
JsonObject context = section.get("context").getAsJsonObject();
JsonObject data = section.get("data").getAsJsonObject();
ImmutableContextSet contextSet = NodeModel.deserializeContextSet(context).makeImmutable();
ImmutableContextSet contextSet = ContextSetJsonSerializer.deserializeContextSet(context).makeImmutable();
ImmutableMap.Builder<String, String> opts = ImmutableMap.builder();
for (Map.Entry<String, JsonElement> opt : data.entrySet()) {
opts.put(opt.getKey(), opt.getValue().getAsString());
@ -153,7 +153,7 @@ public class SubjectStorageModel {
JsonObject context = section.get("context").getAsJsonObject();
JsonArray data = section.get("data").getAsJsonArray();
ImmutableContextSet contextSet = NodeModel.deserializeContextSet(context).makeImmutable();
ImmutableContextSet contextSet = ContextSetJsonSerializer.deserializeContextSet(context).makeImmutable();
ImmutableList.Builder<SubjectReference> pars = ImmutableList.builder();
for (JsonElement p : data) {
if (!p.isJsonObject()) {
@ -189,7 +189,7 @@ public class SubjectStorageModel {
}
JsonObject section = new JsonObject();
section.add("context", NodeModel.serializeContextSet(e.getKey()));
section.add("context", ContextSetJsonSerializer.serializeContextSet(e.getKey()));
JsonObject data = new JsonObject();
@ -213,7 +213,7 @@ public class SubjectStorageModel {
}
JsonObject section = new JsonObject();
section.add("context", NodeModel.serializeContextSet(e.getKey()));
section.add("context", ContextSetJsonSerializer.serializeContextSet(e.getKey()));
JsonObject data = new JsonObject();
@ -237,7 +237,7 @@ public class SubjectStorageModel {
}
JsonObject section = new JsonObject();
section.add("context", NodeModel.serializeContextSet(e.getKey()));
section.add("context", ContextSetJsonSerializer.serializeContextSet(e.getKey()));
JsonArray data = new JsonArray();
for (SubjectReference ref : e.getValue()) {