mirror of
https://github.com/LuckPerms/LuckPerms.git
synced 2024-11-28 13:45:20 +01:00
Implement new file layout for YAML / JSON storage files (#211)
This commit is contained in:
parent
1e134df27d
commit
85c7a7db8d
@ -69,7 +69,6 @@ import java.util.Optional;
|
|||||||
import java.util.OptionalInt;
|
import java.util.OptionalInt;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.SortedSet;
|
import java.util.SortedSet;
|
||||||
import java.util.TreeMap;
|
|
||||||
import java.util.TreeSet;
|
import java.util.TreeSet;
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
@ -1357,7 +1356,7 @@ public abstract class PermissionHolder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public static Map<String, Boolean> exportToLegacy(Set<Node> nodes) {
|
public static Map<String, Boolean> exportToLegacy(Set<Node> nodes) {
|
||||||
Map<String, Boolean> m = new TreeMap<>((o1, o2) -> PriorityComparator.get().compareStrings(o1, o2));
|
Map<String, Boolean> m = new HashMap<>();
|
||||||
for (Node node : nodes) {
|
for (Node node : nodes) {
|
||||||
m.put(node.toSerializedNode(), node.getValue());
|
m.put(node.toSerializedNode(), node.getValue());
|
||||||
}
|
}
|
||||||
|
@ -132,9 +132,9 @@ public class StorageFactory {
|
|||||||
case MONGODB:
|
case MONGODB:
|
||||||
return new MongoDBBacking(plugin, plugin.getConfiguration().get(ConfigKeys.DATABASE_VALUES));
|
return new MongoDBBacking(plugin, plugin.getConfiguration().get(ConfigKeys.DATABASE_VALUES));
|
||||||
case YAML:
|
case YAML:
|
||||||
return new YAMLBacking(plugin, plugin.getDataDirectory());
|
return new YAMLBacking(plugin, plugin.getDataDirectory(), "yaml-storage");
|
||||||
default:
|
default:
|
||||||
return new JSONBacking(plugin, plugin.getDataDirectory());
|
return new JSONBacking(plugin, plugin.getDataDirectory(), "json-storage");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -22,12 +22,20 @@
|
|||||||
|
|
||||||
package me.lucko.luckperms.common.storage.backing;
|
package me.lucko.luckperms.common.storage.backing;
|
||||||
|
|
||||||
|
import lombok.Getter;
|
||||||
|
|
||||||
import me.lucko.luckperms.api.LogEntry;
|
import me.lucko.luckperms.api.LogEntry;
|
||||||
import me.lucko.luckperms.common.commands.utils.Util;
|
import me.lucko.luckperms.common.commands.utils.Util;
|
||||||
import me.lucko.luckperms.common.constants.Constants;
|
import me.lucko.luckperms.common.constants.Constants;
|
||||||
|
import me.lucko.luckperms.common.core.model.Group;
|
||||||
|
import me.lucko.luckperms.common.core.model.Track;
|
||||||
import me.lucko.luckperms.common.core.model.User;
|
import me.lucko.luckperms.common.core.model.User;
|
||||||
import me.lucko.luckperms.common.data.Log;
|
import me.lucko.luckperms.common.data.Log;
|
||||||
|
import me.lucko.luckperms.common.managers.GroupManager;
|
||||||
|
import me.lucko.luckperms.common.managers.TrackManager;
|
||||||
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
|
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
|
||||||
|
import me.lucko.luckperms.common.storage.backing.utils.LegacyJSONSchemaMigration;
|
||||||
|
import me.lucko.luckperms.common.storage.backing.utils.LegacyYAMLSchemaMigration;
|
||||||
|
|
||||||
import java.io.BufferedReader;
|
import java.io.BufferedReader;
|
||||||
import java.io.BufferedWriter;
|
import java.io.BufferedWriter;
|
||||||
@ -35,37 +43,56 @@ import java.io.File;
|
|||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.nio.charset.StandardCharsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import java.nio.file.Files;
|
import java.nio.file.Files;
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.Date;
|
import java.util.Date;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
|
import java.util.Set;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
|
import java.util.concurrent.Callable;
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
import java.util.logging.FileHandler;
|
import java.util.logging.FileHandler;
|
||||||
import java.util.logging.Formatter;
|
import java.util.logging.Formatter;
|
||||||
import java.util.logging.Level;
|
import java.util.logging.Level;
|
||||||
import java.util.logging.LogRecord;
|
import java.util.logging.LogRecord;
|
||||||
import java.util.logging.Logger;
|
import java.util.logging.Logger;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
abstract class FlatfileBacking extends AbstractBacking {
|
public abstract class FlatfileBacking extends AbstractBacking {
|
||||||
private static final String LOG_FORMAT = "%s(%s): [%s] %s(%s) --> %s";
|
private static final String LOG_FORMAT = "%s(%s): [%s] %s(%s) --> %s";
|
||||||
|
|
||||||
|
protected static <T> T call(Callable<T> c, T def) {
|
||||||
|
try {
|
||||||
|
return c.call();
|
||||||
|
} catch (Exception e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
return def;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
private final Logger actionLogger = Logger.getLogger("lp_actions");
|
private final Logger actionLogger = Logger.getLogger("lp_actions");
|
||||||
private Map<String, String> uuidCache = new ConcurrentHashMap<>();
|
private Map<String, String> uuidCache = new ConcurrentHashMap<>();
|
||||||
|
|
||||||
private final File pluginDir;
|
private final File pluginDir;
|
||||||
|
|
||||||
|
@Getter
|
||||||
private final String fileExtension;
|
private final String fileExtension;
|
||||||
|
|
||||||
|
private final String dataFolderName;
|
||||||
|
|
||||||
private File uuidData;
|
private File uuidData;
|
||||||
private File actionLog;
|
private File actionLog;
|
||||||
File usersDir;
|
protected File usersDir;
|
||||||
File groupsDir;
|
protected File groupsDir;
|
||||||
File tracksDir;
|
protected File tracksDir;
|
||||||
|
|
||||||
FlatfileBacking(LuckPermsPlugin plugin, String name, File pluginDir, String fileExtension) {
|
FlatfileBacking(LuckPermsPlugin plugin, String name, File pluginDir, String fileExtension, String dataFolderName) {
|
||||||
super(plugin, name);
|
super(plugin, name);
|
||||||
this.pluginDir = pluginDir;
|
this.pluginDir = pluginDir;
|
||||||
this.fileExtension = fileExtension;
|
this.fileExtension = fileExtension;
|
||||||
|
this.dataFolderName = dataFolderName;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -100,9 +127,31 @@ abstract class FlatfileBacking extends AbstractBacking {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private void setupFiles() throws IOException {
|
private void setupFiles() throws IOException {
|
||||||
File data = new File(pluginDir, "data");
|
File data = new File(pluginDir, dataFolderName);
|
||||||
data.mkdirs();
|
data.mkdirs();
|
||||||
|
|
||||||
|
// Perform schema migration
|
||||||
|
File oldData = new File(pluginDir, "data");
|
||||||
|
if (oldData.exists()) {
|
||||||
|
plugin.getLog().severe("===== Legacy Schema Migration =====");
|
||||||
|
plugin.getLog().severe("Starting migration from legacy schema. This could take a while....");
|
||||||
|
plugin.getLog().severe("Please do not stop your server while the migration takes place.");
|
||||||
|
|
||||||
|
if (this instanceof YAMLBacking) {
|
||||||
|
try {
|
||||||
|
new LegacyYAMLSchemaMigration(plugin, (YAMLBacking) this, oldData, data).run();
|
||||||
|
} catch (Exception e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
} else if (this instanceof JSONBacking) {
|
||||||
|
try {
|
||||||
|
new LegacyJSONSchemaMigration(plugin, (JSONBacking) this, oldData, data).run();
|
||||||
|
} catch (Exception e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
usersDir = new File(data, "users");
|
usersDir = new File(data, "users");
|
||||||
usersDir.mkdir();
|
usersDir.mkdir();
|
||||||
|
|
||||||
@ -186,29 +235,83 @@ abstract class FlatfileBacking extends AbstractBacking {
|
|||||||
return Log.builder().build();
|
return Log.builder().build();
|
||||||
}
|
}
|
||||||
|
|
||||||
private Map<String, String> getUUIDCache() {
|
@Override
|
||||||
Map<String, String> cache = new HashMap<>();
|
public Set<UUID> getUniqueUsers() {
|
||||||
|
String[] fileNames = usersDir.list((dir, name) -> name.endsWith(fileExtension));
|
||||||
try (BufferedReader reader = Files.newBufferedReader(uuidData.toPath(), StandardCharsets.UTF_8)) {
|
if (fileNames == null) return null;
|
||||||
Properties props = new Properties();
|
return Arrays.stream(fileNames)
|
||||||
props.load(reader);
|
.map(s -> s.substring(0, s.length() - fileExtension.length()))
|
||||||
for (String key : props.stringPropertyNames()) {
|
.map(UUID::fromString)
|
||||||
cache.put(key, props.getProperty(key));
|
.collect(Collectors.toSet());
|
||||||
}
|
|
||||||
} catch (IOException e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
}
|
|
||||||
return cache;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private void saveUUIDCache(Map<String, String> cache) {
|
@Override
|
||||||
try (BufferedWriter writer = Files.newBufferedWriter(uuidData.toPath(), StandardCharsets.UTF_8)) {
|
public boolean loadAllGroups() {
|
||||||
Properties properties = new Properties();
|
String[] fileNames = groupsDir.list((dir, name) -> name.endsWith(fileExtension));
|
||||||
properties.putAll(cache);
|
if (fileNames == null) return false;
|
||||||
properties.store(writer, null);
|
List<String> groups = Arrays.stream(fileNames)
|
||||||
writer.flush();
|
.map(s -> s.substring(0, s.length() - fileExtension.length()))
|
||||||
} catch (IOException e) {
|
.collect(Collectors.toList());
|
||||||
e.printStackTrace();
|
|
||||||
|
groups.forEach(this::loadGroup);
|
||||||
|
|
||||||
|
GroupManager gm = plugin.getGroupManager();
|
||||||
|
gm.getAll().values().stream()
|
||||||
|
.filter(g -> !groups.contains(g.getName()))
|
||||||
|
.forEach(gm::unload);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean deleteGroup(Group group) {
|
||||||
|
group.getIoLock().lock();
|
||||||
|
try {
|
||||||
|
return call(() -> {
|
||||||
|
File groupFile = new File(groupsDir, group.getName() + fileExtension);
|
||||||
|
registerFileAction("groups", groupFile);
|
||||||
|
|
||||||
|
if (groupFile.exists()) {
|
||||||
|
groupFile.delete();
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}, false);
|
||||||
|
} finally {
|
||||||
|
group.getIoLock().unlock();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean loadAllTracks() {
|
||||||
|
String[] fileNames = tracksDir.list((dir, name) -> name.endsWith(fileExtension));
|
||||||
|
if (fileNames == null) return false;
|
||||||
|
List<String> tracks = Arrays.stream(fileNames)
|
||||||
|
.map(s -> s.substring(0, s.length() - fileExtension.length()))
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
|
||||||
|
tracks.forEach(this::loadTrack);
|
||||||
|
|
||||||
|
TrackManager tm = plugin.getTrackManager();
|
||||||
|
tm.getAll().values().stream()
|
||||||
|
.filter(t -> !tracks.contains(t.getName()))
|
||||||
|
.forEach(tm::unload);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean deleteTrack(Track track) {
|
||||||
|
track.getIoLock().lock();
|
||||||
|
try {
|
||||||
|
return call(() -> {
|
||||||
|
File trackFile = new File(tracksDir, track.getName() + fileExtension);
|
||||||
|
registerFileAction("tracks", trackFile);
|
||||||
|
|
||||||
|
if (trackFile.exists()) {
|
||||||
|
trackFile.delete();
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}, false);
|
||||||
|
} finally {
|
||||||
|
track.getIoLock().unlock();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -235,4 +338,30 @@ abstract class FlatfileBacking extends AbstractBacking {
|
|||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private Map<String, String> getUUIDCache() {
|
||||||
|
Map<String, String> cache = new HashMap<>();
|
||||||
|
|
||||||
|
try (BufferedReader reader = Files.newBufferedReader(uuidData.toPath(), StandardCharsets.UTF_8)) {
|
||||||
|
Properties props = new Properties();
|
||||||
|
props.load(reader);
|
||||||
|
for (String key : props.stringPropertyNames()) {
|
||||||
|
cache.put(key, props.getProperty(key));
|
||||||
|
}
|
||||||
|
} catch (IOException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
return cache;
|
||||||
|
}
|
||||||
|
|
||||||
|
private void saveUUIDCache(Map<String, String> cache) {
|
||||||
|
try (BufferedWriter writer = Files.newBufferedWriter(uuidData.toPath(), StandardCharsets.UTF_8)) {
|
||||||
|
Properties properties = new Properties();
|
||||||
|
properties.putAll(cache);
|
||||||
|
properties.store(writer, null);
|
||||||
|
writer.flush();
|
||||||
|
} catch (IOException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -23,22 +23,25 @@
|
|||||||
package me.lucko.luckperms.common.storage.backing;
|
package me.lucko.luckperms.common.storage.backing;
|
||||||
|
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
import com.google.gson.stream.JsonReader;
|
import com.google.common.collect.ImmutableSetMultimap;
|
||||||
import com.google.gson.stream.JsonWriter;
|
import com.google.common.collect.Iterables;
|
||||||
|
import com.google.gson.Gson;
|
||||||
|
import com.google.gson.GsonBuilder;
|
||||||
|
import com.google.gson.JsonArray;
|
||||||
|
import com.google.gson.JsonElement;
|
||||||
|
import com.google.gson.JsonObject;
|
||||||
|
|
||||||
import me.lucko.luckperms.api.HeldPermission;
|
import me.lucko.luckperms.api.HeldPermission;
|
||||||
import me.lucko.luckperms.api.Node;
|
import me.lucko.luckperms.api.Node;
|
||||||
import me.lucko.luckperms.common.core.NodeFactory;
|
import me.lucko.luckperms.common.core.PriorityComparator;
|
||||||
import me.lucko.luckperms.common.core.UserIdentifier;
|
import me.lucko.luckperms.common.core.UserIdentifier;
|
||||||
import me.lucko.luckperms.common.core.model.Group;
|
import me.lucko.luckperms.common.core.model.Group;
|
||||||
import me.lucko.luckperms.common.core.model.Track;
|
import me.lucko.luckperms.common.core.model.Track;
|
||||||
import me.lucko.luckperms.common.core.model.User;
|
import me.lucko.luckperms.common.core.model.User;
|
||||||
import me.lucko.luckperms.common.managers.GroupManager;
|
|
||||||
import me.lucko.luckperms.common.managers.TrackManager;
|
|
||||||
import me.lucko.luckperms.common.managers.impl.GenericUserManager;
|
import me.lucko.luckperms.common.managers.impl.GenericUserManager;
|
||||||
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
|
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
|
||||||
|
import me.lucko.luckperms.common.storage.backing.utils.NodeDataHolder;
|
||||||
import me.lucko.luckperms.common.storage.holder.NodeHeldPermission;
|
import me.lucko.luckperms.common.storage.holder.NodeHeldPermission;
|
||||||
import me.lucko.luckperms.common.utils.ThrowingFunction;
|
|
||||||
|
|
||||||
import java.io.BufferedReader;
|
import java.io.BufferedReader;
|
||||||
import java.io.BufferedWriter;
|
import java.io.BufferedWriter;
|
||||||
@ -47,56 +50,44 @@ import java.io.IOException;
|
|||||||
import java.nio.charset.StandardCharsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import java.nio.file.Files;
|
import java.nio.file.Files;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Collection;
|
||||||
import java.util.HashMap;
|
import java.util.HashSet;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
import java.util.concurrent.Callable;
|
import java.util.function.Function;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import static me.lucko.luckperms.common.core.model.PermissionHolder.exportToLegacy;
|
|
||||||
|
|
||||||
@SuppressWarnings("ResultOfMethodCallIgnored")
|
@SuppressWarnings("ResultOfMethodCallIgnored")
|
||||||
public class JSONBacking extends FlatfileBacking {
|
public class JSONBacking extends FlatfileBacking {
|
||||||
private static <T> T call(Callable<T> c, T def) {
|
private final Gson gson;
|
||||||
try {
|
|
||||||
return c.call();
|
public JSONBacking(LuckPermsPlugin plugin, File pluginDir, String dataFolderName) {
|
||||||
} catch (Exception e) {
|
super(plugin, "JSON", pluginDir, ".json", dataFolderName);
|
||||||
e.printStackTrace();
|
gson = new GsonBuilder().setPrettyPrinting().create();
|
||||||
return def;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public JSONBacking(LuckPermsPlugin plugin, File pluginDir) {
|
public boolean writeElementToFile(File file, JsonElement element) {
|
||||||
super(plugin, "JSON", pluginDir, ".json");
|
|
||||||
}
|
|
||||||
|
|
||||||
private boolean fileToWriter(File file, ThrowingFunction<JsonWriter, Boolean> writeOperation) {
|
|
||||||
boolean success = false;
|
|
||||||
try (BufferedWriter writer = Files.newBufferedWriter(file.toPath(), StandardCharsets.UTF_8)) {
|
try (BufferedWriter writer = Files.newBufferedWriter(file.toPath(), StandardCharsets.UTF_8)) {
|
||||||
try (JsonWriter jsonWriter = new JsonWriter(writer)) {
|
gson.toJson(element, writer);
|
||||||
jsonWriter.setIndent(" "); // 4 spaces
|
writer.flush();
|
||||||
success = writeOperation.apply(jsonWriter);
|
return true;
|
||||||
jsonWriter.flush();
|
} catch (Throwable t) {
|
||||||
}
|
|
||||||
} catch (Exception e) {
|
|
||||||
plugin.getLog().warn("Exception whilst writing to file: " + file.getAbsolutePath());
|
plugin.getLog().warn("Exception whilst writing to file: " + file.getAbsolutePath());
|
||||||
e.printStackTrace();
|
t.printStackTrace();
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
return success;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean fileToReader(File file, ThrowingFunction<JsonReader, Boolean> readOperation) {
|
public boolean readObjectFromFile(File file, Function<JsonObject, Boolean> readOperation) {
|
||||||
boolean success = false;
|
boolean success = false;
|
||||||
try (BufferedReader reader = Files.newBufferedReader(file.toPath(), StandardCharsets.UTF_8)) {
|
try (BufferedReader reader = Files.newBufferedReader(file.toPath(), StandardCharsets.UTF_8)) {
|
||||||
try (JsonReader jsonReader = new JsonReader(reader)) {
|
JsonObject object = gson.fromJson(reader, JsonObject.class);
|
||||||
success = readOperation.apply(jsonReader);
|
success = readOperation.apply(object);
|
||||||
}
|
} catch (Throwable t) {
|
||||||
} catch (Exception e) {
|
|
||||||
plugin.getLog().warn("Exception whilst reading from file: " + file.getAbsolutePath());
|
plugin.getLog().warn("Exception whilst reading from file: " + file.getAbsolutePath());
|
||||||
e.printStackTrace();
|
t.printStackTrace();
|
||||||
}
|
}
|
||||||
return success;
|
return success;
|
||||||
}
|
}
|
||||||
@ -111,25 +102,13 @@ public class JSONBacking extends FlatfileBacking {
|
|||||||
registerFileAction("users", userFile);
|
registerFileAction("users", userFile);
|
||||||
|
|
||||||
if (userFile.exists()) {
|
if (userFile.exists()) {
|
||||||
return fileToReader(userFile, reader -> {
|
return readObjectFromFile(userFile, object -> {
|
||||||
reader.beginObject();
|
String name = object.get("name").getAsString();
|
||||||
reader.nextName(); // uuid record
|
user.getPrimaryGroup().setStoredValue(object.get("primaryGroup").getAsString());
|
||||||
reader.nextString(); // uuid
|
|
||||||
reader.nextName(); // name record
|
Set<NodeDataHolder> data = deserializePermissions(object.get("permissions").getAsJsonArray());
|
||||||
String name = reader.nextString(); // name
|
Set<Node> nodes = data.stream().map(NodeDataHolder::toNode).collect(Collectors.toSet());
|
||||||
reader.nextName(); // primaryGroup record
|
user.setNodes(nodes);
|
||||||
user.getPrimaryGroup().setStoredValue(reader.nextString()); // primaryGroup
|
|
||||||
reader.nextName(); // perms
|
|
||||||
reader.beginObject();
|
|
||||||
Map<String, Boolean> map = new HashMap<>();
|
|
||||||
while (reader.hasNext()) {
|
|
||||||
String node = reader.nextName();
|
|
||||||
boolean b = reader.nextBoolean();
|
|
||||||
map.put(node, b);
|
|
||||||
}
|
|
||||||
user.setNodes(map);
|
|
||||||
reader.endObject();
|
|
||||||
reader.endObject();
|
|
||||||
|
|
||||||
boolean save = plugin.getUserManager().giveDefaultIfNeeded(user, false);
|
boolean save = plugin.getUserManager().giveDefaultIfNeeded(user, false);
|
||||||
|
|
||||||
@ -142,21 +121,9 @@ public class JSONBacking extends FlatfileBacking {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (save) {
|
if (save) {
|
||||||
fileToWriter(userFile, writer -> {
|
saveUser(user);
|
||||||
writer.beginObject();
|
|
||||||
writer.name("uuid").value(user.getUuid().toString());
|
|
||||||
writer.name("name").value(user.getName());
|
|
||||||
writer.name("primaryGroup").value(user.getPrimaryGroup().getStoredValue());
|
|
||||||
writer.name("perms");
|
|
||||||
writer.beginObject();
|
|
||||||
for (Map.Entry<String, Boolean> e : exportToLegacy(user.getNodes()).entrySet()) {
|
|
||||||
writer.name(e.getKey()).value(e.getValue().booleanValue());
|
|
||||||
}
|
|
||||||
writer.endObject();
|
|
||||||
writer.endObject();
|
|
||||||
return true;
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
@ -198,20 +165,15 @@ public class JSONBacking extends FlatfileBacking {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return fileToWriter(userFile, writer -> {
|
JsonObject data = new JsonObject();
|
||||||
writer.beginObject();
|
data.addProperty("uuid", user.getUuid().toString());
|
||||||
writer.name("uuid").value(user.getUuid().toString());
|
data.addProperty("name", user.getName());
|
||||||
writer.name("name").value(user.getName());
|
data.addProperty("primaryGroup", user.getPrimaryGroup().getStoredValue());
|
||||||
writer.name("primaryGroup").value(user.getPrimaryGroup().getStoredValue());
|
|
||||||
writer.name("perms");
|
Set<NodeDataHolder> nodes = user.getNodes().stream().map(NodeDataHolder::fromNode).collect(Collectors.toSet());
|
||||||
writer.beginObject();
|
data.add("permissions", serializePermissions(nodes));
|
||||||
for (Map.Entry<String, Boolean> e : exportToLegacy(user.getNodes()).entrySet()) {
|
|
||||||
writer.name(e.getKey()).value(e.getValue().booleanValue());
|
return writeElementToFile(userFile, data);
|
||||||
}
|
|
||||||
writer.endObject();
|
|
||||||
writer.endObject();
|
|
||||||
return true;
|
|
||||||
});
|
|
||||||
}, false);
|
}, false);
|
||||||
} finally {
|
} finally {
|
||||||
user.getIoLock().unlock();
|
user.getIoLock().unlock();
|
||||||
@ -227,33 +189,17 @@ public class JSONBacking extends FlatfileBacking {
|
|||||||
for (File file : files) {
|
for (File file : files) {
|
||||||
registerFileAction("users", file);
|
registerFileAction("users", file);
|
||||||
|
|
||||||
Map<String, Boolean> nodes = new HashMap<>();
|
Set<NodeDataHolder> nodes = new HashSet<>();
|
||||||
fileToReader(file, reader -> {
|
readObjectFromFile(file, object -> {
|
||||||
reader.beginObject();
|
nodes.addAll(deserializePermissions(object.get("permissions").getAsJsonArray()));
|
||||||
reader.nextName(); // uuid record
|
|
||||||
reader.nextString(); // uuid
|
|
||||||
reader.nextName(); // name record
|
|
||||||
reader.nextString(); // name
|
|
||||||
reader.nextName(); // primaryGroup record
|
|
||||||
reader.nextString(); // primaryGroup
|
|
||||||
reader.nextName(); //perms
|
|
||||||
reader.beginObject();
|
|
||||||
while (reader.hasNext()) {
|
|
||||||
String node = reader.nextName();
|
|
||||||
boolean b = reader.nextBoolean();
|
|
||||||
nodes.put(node, b);
|
|
||||||
}
|
|
||||||
|
|
||||||
reader.endObject();
|
|
||||||
reader.endObject();
|
|
||||||
return true;
|
return true;
|
||||||
});
|
});
|
||||||
|
|
||||||
boolean shouldDelete = false;
|
boolean shouldDelete = false;
|
||||||
if (nodes.size() == 1) {
|
if (nodes.size() == 1) {
|
||||||
for (Map.Entry<String, Boolean> e : nodes.entrySet()) {
|
for (NodeDataHolder e : nodes) {
|
||||||
// There's only one
|
// There's only one
|
||||||
shouldDelete = e.getKey().equalsIgnoreCase("group.default") && e.getValue();
|
shouldDelete = e.getPermission().equalsIgnoreCase("group.default") && e.isValue();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -265,16 +211,6 @@ public class JSONBacking extends FlatfileBacking {
|
|||||||
}, false);
|
}, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public Set<UUID> getUniqueUsers() {
|
|
||||||
String[] fileNames = usersDir.list((dir, name) -> name.endsWith(".json"));
|
|
||||||
if (fileNames == null) return null;
|
|
||||||
return Arrays.stream(fileNames)
|
|
||||||
.map(s -> s.substring(0, s.length() - 5))
|
|
||||||
.map(UUID::fromString)
|
|
||||||
.collect(Collectors.toSet());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<HeldPermission<UUID>> getUsersWithPermission(String permission) {
|
public List<HeldPermission<UUID>> getUsersWithPermission(String permission) {
|
||||||
ImmutableList.Builder<HeldPermission<UUID>> held = ImmutableList.builder();
|
ImmutableList.Builder<HeldPermission<UUID>> held = ImmutableList.builder();
|
||||||
@ -286,35 +222,19 @@ public class JSONBacking extends FlatfileBacking {
|
|||||||
registerFileAction("users", file);
|
registerFileAction("users", file);
|
||||||
|
|
||||||
UUID holder = UUID.fromString(file.getName().substring(0, file.getName().length() - 5));
|
UUID holder = UUID.fromString(file.getName().substring(0, file.getName().length() - 5));
|
||||||
Map<String, Boolean> nodes = new HashMap<>();
|
Set<NodeDataHolder> nodes = new HashSet<>();
|
||||||
fileToReader(file, reader -> {
|
|
||||||
reader.beginObject();
|
|
||||||
reader.nextName(); // uuid record
|
|
||||||
reader.nextString(); // uuid
|
|
||||||
reader.nextName(); // name record
|
|
||||||
reader.nextString(); // name
|
|
||||||
reader.nextName(); // primaryGroup record
|
|
||||||
reader.nextString(); // primaryGroup
|
|
||||||
reader.nextName(); //perms
|
|
||||||
reader.beginObject();
|
|
||||||
while (reader.hasNext()) {
|
|
||||||
String node = reader.nextName();
|
|
||||||
boolean b = reader.nextBoolean();
|
|
||||||
nodes.put(node, b);
|
|
||||||
}
|
|
||||||
|
|
||||||
reader.endObject();
|
readObjectFromFile(file, object -> {
|
||||||
reader.endObject();
|
nodes.addAll(deserializePermissions(object.get("permissions").getAsJsonArray()));
|
||||||
return true;
|
return true;
|
||||||
});
|
});
|
||||||
|
|
||||||
for (Map.Entry<String, Boolean> e : nodes.entrySet()) {
|
for (NodeDataHolder e : nodes) {
|
||||||
Node node = NodeFactory.fromSerialisedNode(e.getKey(), e.getValue());
|
if (!e.getPermission().equalsIgnoreCase(permission)) {
|
||||||
if (!node.getPermission().equalsIgnoreCase(permission)) {
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
held.add(NodeHeldPermission.of(holder, node));
|
held.add(NodeHeldPermission.of(holder, e));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
@ -332,22 +252,10 @@ public class JSONBacking extends FlatfileBacking {
|
|||||||
registerFileAction("groups", groupFile);
|
registerFileAction("groups", groupFile);
|
||||||
|
|
||||||
if (groupFile.exists()) {
|
if (groupFile.exists()) {
|
||||||
return fileToReader(groupFile, reader -> {
|
return readObjectFromFile(groupFile, object -> {
|
||||||
reader.beginObject();
|
Set<NodeDataHolder> data = deserializePermissions(object.get("permissions").getAsJsonArray());
|
||||||
reader.nextName(); // name record
|
Set<Node> nodes = data.stream().map(NodeDataHolder::toNode).collect(Collectors.toSet());
|
||||||
reader.nextString(); // name
|
group.setNodes(nodes);
|
||||||
reader.nextName(); //perms
|
|
||||||
reader.beginObject();
|
|
||||||
Map<String, Boolean> map = new HashMap<>();
|
|
||||||
while (reader.hasNext()) {
|
|
||||||
String node = reader.nextName();
|
|
||||||
boolean b = reader.nextBoolean();
|
|
||||||
map.put(node, b);
|
|
||||||
}
|
|
||||||
group.setNodes(map);
|
|
||||||
|
|
||||||
reader.endObject();
|
|
||||||
reader.endObject();
|
|
||||||
return true;
|
return true;
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
@ -358,18 +266,13 @@ public class JSONBacking extends FlatfileBacking {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
return fileToWriter(groupFile, writer -> {
|
JsonObject data = new JsonObject();
|
||||||
writer.beginObject();
|
data.addProperty("name", group.getName());
|
||||||
writer.name("name").value(group.getName());
|
|
||||||
writer.name("perms");
|
Set<NodeDataHolder> nodes = group.getNodes().stream().map(NodeDataHolder::fromNode).collect(Collectors.toSet());
|
||||||
writer.beginObject();
|
data.add("permissions", serializePermissions(nodes));
|
||||||
for (Map.Entry<String, Boolean> e : exportToLegacy(group.getNodes()).entrySet()) {
|
|
||||||
writer.name(e.getKey()).value(e.getValue().booleanValue());
|
return writeElementToFile(groupFile, data);
|
||||||
}
|
|
||||||
writer.endObject();
|
|
||||||
writer.endObject();
|
|
||||||
return true;
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}, false);
|
}, false);
|
||||||
} finally {
|
} finally {
|
||||||
@ -386,21 +289,10 @@ public class JSONBacking extends FlatfileBacking {
|
|||||||
File groupFile = new File(groupsDir, name + ".json");
|
File groupFile = new File(groupsDir, name + ".json");
|
||||||
registerFileAction("groups", groupFile);
|
registerFileAction("groups", groupFile);
|
||||||
|
|
||||||
return groupFile.exists() && fileToReader(groupFile, reader -> {
|
return groupFile.exists() && readObjectFromFile(groupFile, object -> {
|
||||||
reader.beginObject();
|
Set<NodeDataHolder> data = deserializePermissions(object.get("permissions").getAsJsonArray());
|
||||||
reader.nextName(); // name record
|
Set<Node> nodes = data.stream().map(NodeDataHolder::toNode).collect(Collectors.toSet());
|
||||||
reader.nextString(); // name
|
group.setNodes(nodes);
|
||||||
reader.nextName(); // perms
|
|
||||||
reader.beginObject();
|
|
||||||
Map<String, Boolean> map = new HashMap<>();
|
|
||||||
while (reader.hasNext()) {
|
|
||||||
String node = reader.nextName();
|
|
||||||
boolean b = reader.nextBoolean();
|
|
||||||
map.put(node, b);
|
|
||||||
}
|
|
||||||
group.setNodes(map);
|
|
||||||
reader.endObject();
|
|
||||||
reader.endObject();
|
|
||||||
return true;
|
return true;
|
||||||
});
|
});
|
||||||
}, false);
|
}, false);
|
||||||
@ -409,23 +301,6 @@ public class JSONBacking extends FlatfileBacking {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean loadAllGroups() {
|
|
||||||
String[] fileNames = groupsDir.list((dir, name) -> name.endsWith(".json"));
|
|
||||||
if (fileNames == null) return false;
|
|
||||||
List<String> groups = Arrays.stream(fileNames)
|
|
||||||
.map(s -> s.substring(0, s.length() - 5))
|
|
||||||
.collect(Collectors.toList());
|
|
||||||
|
|
||||||
groups.forEach(this::loadGroup);
|
|
||||||
|
|
||||||
GroupManager gm = plugin.getGroupManager();
|
|
||||||
gm.getAll().values().stream()
|
|
||||||
.filter(g -> !groups.contains(g.getName()))
|
|
||||||
.forEach(gm::unload);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean saveGroup(Group group) {
|
public boolean saveGroup(Group group) {
|
||||||
group.getIoLock().lock();
|
group.getIoLock().lock();
|
||||||
@ -443,36 +318,11 @@ public class JSONBacking extends FlatfileBacking {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return fileToWriter(groupFile, writer -> {
|
JsonObject data = new JsonObject();
|
||||||
writer.beginObject();
|
data.addProperty("name", group.getName());
|
||||||
writer.name("name").value(group.getName());
|
Set<NodeDataHolder> nodes = group.getNodes().stream().map(NodeDataHolder::fromNode).collect(Collectors.toSet());
|
||||||
writer.name("perms");
|
data.add("permissions", serializePermissions(nodes));
|
||||||
writer.beginObject();
|
return writeElementToFile(groupFile, data);
|
||||||
for (Map.Entry<String, Boolean> e : exportToLegacy(group.getNodes()).entrySet()) {
|
|
||||||
writer.name(e.getKey()).value(e.getValue().booleanValue());
|
|
||||||
}
|
|
||||||
writer.endObject();
|
|
||||||
writer.endObject();
|
|
||||||
return true;
|
|
||||||
});
|
|
||||||
}, false);
|
|
||||||
} finally {
|
|
||||||
group.getIoLock().unlock();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean deleteGroup(Group group) {
|
|
||||||
group.getIoLock().lock();
|
|
||||||
try {
|
|
||||||
return call(() -> {
|
|
||||||
File groupFile = new File(groupsDir, group.getName() + ".json");
|
|
||||||
registerFileAction("groups", groupFile);
|
|
||||||
|
|
||||||
if (groupFile.exists()) {
|
|
||||||
groupFile.delete();
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}, false);
|
}, false);
|
||||||
} finally {
|
} finally {
|
||||||
group.getIoLock().unlock();
|
group.getIoLock().unlock();
|
||||||
@ -490,31 +340,18 @@ public class JSONBacking extends FlatfileBacking {
|
|||||||
registerFileAction("groups", file);
|
registerFileAction("groups", file);
|
||||||
|
|
||||||
String holder = file.getName().substring(0, file.getName().length() - 5);
|
String holder = file.getName().substring(0, file.getName().length() - 5);
|
||||||
Map<String, Boolean> nodes = new HashMap<>();
|
Set<NodeDataHolder> nodes = new HashSet<>();
|
||||||
fileToReader(file, reader -> {
|
readObjectFromFile(file, element -> {
|
||||||
reader.beginObject();
|
nodes.addAll(deserializePermissions(element.get("permissions").getAsJsonArray()));
|
||||||
reader.nextName(); // name record
|
|
||||||
reader.nextString(); // name
|
|
||||||
reader.nextName(); // perms
|
|
||||||
reader.beginObject();
|
|
||||||
while (reader.hasNext()) {
|
|
||||||
String node = reader.nextName();
|
|
||||||
boolean b = reader.nextBoolean();
|
|
||||||
nodes.put(node, b);
|
|
||||||
}
|
|
||||||
|
|
||||||
reader.endObject();
|
|
||||||
reader.endObject();
|
|
||||||
return true;
|
return true;
|
||||||
});
|
});
|
||||||
|
|
||||||
for (Map.Entry<String, Boolean> e : nodes.entrySet()) {
|
for (NodeDataHolder e : nodes) {
|
||||||
Node node = NodeFactory.fromSerialisedNode(e.getKey(), e.getValue());
|
if (!e.getPermission().equalsIgnoreCase(permission)) {
|
||||||
if (!node.getPermission().equalsIgnoreCase(permission)) {
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
held.add(NodeHeldPermission.of(holder, node));
|
held.add(NodeHeldPermission.of(holder, e));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
@ -532,19 +369,12 @@ public class JSONBacking extends FlatfileBacking {
|
|||||||
registerFileAction("tracks", trackFile);
|
registerFileAction("tracks", trackFile);
|
||||||
|
|
||||||
if (trackFile.exists()) {
|
if (trackFile.exists()) {
|
||||||
return fileToReader(trackFile, reader -> {
|
return readObjectFromFile(trackFile, element -> {
|
||||||
reader.beginObject();
|
|
||||||
reader.nextName(); // name record
|
|
||||||
reader.nextString(); // name
|
|
||||||
reader.nextName(); // groups record
|
|
||||||
reader.beginArray();
|
|
||||||
List<String> groups = new ArrayList<>();
|
List<String> groups = new ArrayList<>();
|
||||||
while (reader.hasNext()) {
|
for (JsonElement g : element.get("groups").getAsJsonArray()) {
|
||||||
groups.add(reader.nextString());
|
groups.add(g.getAsString());
|
||||||
}
|
}
|
||||||
track.setGroups(groups);
|
track.setGroups(groups);
|
||||||
reader.endArray();
|
|
||||||
reader.endObject();
|
|
||||||
return true;
|
return true;
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
@ -555,18 +385,15 @@ public class JSONBacking extends FlatfileBacking {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
return fileToWriter(trackFile, writer -> {
|
JsonObject data = new JsonObject();
|
||||||
writer.beginObject();
|
data.addProperty("name", track.getName());
|
||||||
writer.name("name").value(track.getName());
|
JsonArray groups = new JsonArray();
|
||||||
writer.name("groups");
|
|
||||||
writer.beginArray();
|
|
||||||
for (String s : track.getGroups()) {
|
for (String s : track.getGroups()) {
|
||||||
writer.value(s);
|
groups.add(s);
|
||||||
}
|
}
|
||||||
writer.endArray();
|
data.add("groups", groups);
|
||||||
writer.endObject();
|
|
||||||
return true;
|
return writeElementToFile(trackFile, data);
|
||||||
});
|
|
||||||
}
|
}
|
||||||
}, false);
|
}, false);
|
||||||
} finally {
|
} finally {
|
||||||
@ -583,19 +410,12 @@ public class JSONBacking extends FlatfileBacking {
|
|||||||
File trackFile = new File(tracksDir, name + ".json");
|
File trackFile = new File(tracksDir, name + ".json");
|
||||||
registerFileAction("tracks", trackFile);
|
registerFileAction("tracks", trackFile);
|
||||||
|
|
||||||
return trackFile.exists() && fileToReader(trackFile, reader -> {
|
return trackFile.exists() && readObjectFromFile(trackFile, element -> {
|
||||||
reader.beginObject();
|
|
||||||
reader.nextName(); // name record
|
|
||||||
reader.nextString(); // name
|
|
||||||
reader.nextName(); // groups
|
|
||||||
reader.beginArray();
|
|
||||||
List<String> groups = new ArrayList<>();
|
List<String> groups = new ArrayList<>();
|
||||||
while (reader.hasNext()) {
|
for (JsonElement g : element.get("groups").getAsJsonArray()) {
|
||||||
groups.add(reader.nextString());
|
groups.add(g.getAsString());
|
||||||
}
|
}
|
||||||
track.setGroups(groups);
|
track.setGroups(groups);
|
||||||
reader.endArray();
|
|
||||||
reader.endObject();
|
|
||||||
return true;
|
return true;
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -605,23 +425,6 @@ public class JSONBacking extends FlatfileBacking {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean loadAllTracks() {
|
|
||||||
String[] fileNames = tracksDir.list((dir, name) -> name.endsWith(".json"));
|
|
||||||
if (fileNames == null) return false;
|
|
||||||
List<String> tracks = Arrays.stream(fileNames)
|
|
||||||
.map(s -> s.substring(0, s.length() - 5))
|
|
||||||
.collect(Collectors.toList());
|
|
||||||
|
|
||||||
tracks.forEach(this::loadTrack);
|
|
||||||
|
|
||||||
TrackManager tm = plugin.getTrackManager();
|
|
||||||
tm.getAll().values().stream()
|
|
||||||
.filter(t -> !tracks.contains(t.getName()))
|
|
||||||
.forEach(tm::unload);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean saveTrack(Track track) {
|
public boolean saveTrack(Track track) {
|
||||||
track.getIoLock().lock();
|
track.getIoLock().lock();
|
||||||
@ -639,39 +442,139 @@ public class JSONBacking extends FlatfileBacking {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return fileToWriter(trackFile, writer -> {
|
JsonObject data = new JsonObject();
|
||||||
writer.beginObject();
|
data.addProperty("name", track.getName());
|
||||||
writer.name("name").value(track.getName());
|
JsonArray groups = new JsonArray();
|
||||||
writer.name("groups");
|
|
||||||
writer.beginArray();
|
|
||||||
for (String s : track.getGroups()) {
|
for (String s : track.getGroups()) {
|
||||||
writer.value(s);
|
groups.add(s);
|
||||||
}
|
}
|
||||||
writer.endArray();
|
data.add("groups", groups);
|
||||||
writer.endObject();
|
|
||||||
return true;
|
return writeElementToFile(trackFile, data);
|
||||||
});
|
|
||||||
}, false);
|
}, false);
|
||||||
} finally {
|
} finally {
|
||||||
track.getIoLock().unlock();
|
track.getIoLock().unlock();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
public static Set<NodeDataHolder> deserializePermissions(JsonArray permissionsSection) {
|
||||||
public boolean deleteTrack(Track track) {
|
Set<NodeDataHolder> nodes = new HashSet<>();
|
||||||
track.getIoLock().lock();
|
|
||||||
try {
|
|
||||||
return call(() -> {
|
|
||||||
File trackFile = new File(tracksDir, track.getName() + ".json");
|
|
||||||
registerFileAction("tracks", trackFile);
|
|
||||||
|
|
||||||
if (trackFile.exists()) {
|
for (JsonElement ent : permissionsSection) {
|
||||||
trackFile.delete();
|
if (!ent.isJsonObject()) {
|
||||||
|
continue;
|
||||||
}
|
}
|
||||||
return true;
|
|
||||||
}, false);
|
JsonObject data = ent.getAsJsonObject();
|
||||||
} finally {
|
Map.Entry<String, JsonElement> entry = Iterables.getFirst(data.entrySet(), null);
|
||||||
track.getIoLock().unlock();
|
|
||||||
|
if (entry == null || !entry.getValue().isJsonObject()) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
String permission = entry.getKey();
|
||||||
|
JsonObject attributes = entry.getValue().getAsJsonObject();
|
||||||
|
|
||||||
|
boolean value = true;
|
||||||
|
String server = "global";
|
||||||
|
String world = "global";
|
||||||
|
long expiry = 0L;
|
||||||
|
ImmutableSetMultimap context = ImmutableSetMultimap.of();
|
||||||
|
|
||||||
|
if (attributes.has("value")) {
|
||||||
|
value = attributes.get("value").getAsBoolean();
|
||||||
|
}
|
||||||
|
if (attributes.has("server")) {
|
||||||
|
server = attributes.get("server").getAsString();
|
||||||
|
}
|
||||||
|
if (attributes.has("world")) {
|
||||||
|
world = attributes.get("world").getAsString();
|
||||||
|
}
|
||||||
|
if (attributes.has("expiry")) {
|
||||||
|
expiry = attributes.get("expiry").getAsLong();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (attributes.has("context") && attributes.get("context").isJsonObject()) {
|
||||||
|
JsonObject contexts = attributes.get("context").getAsJsonObject();
|
||||||
|
ImmutableSetMultimap.Builder<String, String> map = ImmutableSetMultimap.builder();
|
||||||
|
|
||||||
|
for (Map.Entry<String, JsonElement> e : contexts.entrySet()) {
|
||||||
|
JsonElement val = e.getValue();
|
||||||
|
if (val.isJsonArray()) {
|
||||||
|
JsonArray vals = val.getAsJsonArray();
|
||||||
|
for (JsonElement element : vals) {
|
||||||
|
map.put(e.getKey(), element.getAsString());
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
map.put(e.getKey(), val.getAsString());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
context = map.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
nodes.add(NodeDataHolder.of(permission, value, server, world, expiry, context));
|
||||||
|
}
|
||||||
|
|
||||||
|
return nodes;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static JsonArray serializePermissions(Set<NodeDataHolder> nodes) {
|
||||||
|
List<JsonObject> data = new ArrayList<>();
|
||||||
|
|
||||||
|
for (NodeDataHolder node : nodes) {
|
||||||
|
JsonObject attributes = new JsonObject();
|
||||||
|
attributes.addProperty("value", node.isValue());
|
||||||
|
|
||||||
|
if (!node.getServer().equals("global")) {
|
||||||
|
attributes.addProperty("server", node.getServer());
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!node.getWorld().equals("global")) {
|
||||||
|
attributes.addProperty("world", node.getWorld());
|
||||||
|
}
|
||||||
|
|
||||||
|
if (node.getExpiry() != 0L) {
|
||||||
|
attributes.addProperty("expiry", node.getExpiry());
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!node.getContexts().isEmpty()) {
|
||||||
|
JsonObject context = new JsonObject();
|
||||||
|
Map<String, Collection<String>> map = node.getContexts().asMap();
|
||||||
|
|
||||||
|
for (Map.Entry<String, Collection<String>> e : map.entrySet()) {
|
||||||
|
List<String> vals = new ArrayList<>(e.getValue());
|
||||||
|
int size = vals.size();
|
||||||
|
|
||||||
|
if (size == 1) {
|
||||||
|
context.addProperty(e.getKey(), vals.get(0));;
|
||||||
|
} else if (size > 1) {
|
||||||
|
JsonArray arr = new JsonArray();
|
||||||
|
for (String s : vals) {
|
||||||
|
arr.add(s);
|
||||||
|
}
|
||||||
|
context.add(e.getKey(), arr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
attributes.add("context", context);
|
||||||
|
}
|
||||||
|
|
||||||
|
JsonObject perm = new JsonObject();
|
||||||
|
perm.add(node.getPermission(), attributes);
|
||||||
|
data.add(perm);
|
||||||
|
}
|
||||||
|
|
||||||
|
data.sort((o1, o2) -> PriorityComparator.get().compareStrings(
|
||||||
|
Iterables.getFirst(o1.entrySet(), null).getKey(),
|
||||||
|
Iterables.getFirst(o2.entrySet(), null).getKey()
|
||||||
|
));
|
||||||
|
|
||||||
|
JsonArray arr = new JsonArray();
|
||||||
|
for (JsonObject o : data) {
|
||||||
|
arr.add(o);
|
||||||
|
}
|
||||||
|
|
||||||
|
return arr;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -42,7 +42,7 @@ import me.lucko.luckperms.common.managers.TrackManager;
|
|||||||
import me.lucko.luckperms.common.managers.impl.GenericUserManager;
|
import me.lucko.luckperms.common.managers.impl.GenericUserManager;
|
||||||
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
|
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
|
||||||
import me.lucko.luckperms.common.storage.backing.sqlprovider.SQLProvider;
|
import me.lucko.luckperms.common.storage.backing.sqlprovider.SQLProvider;
|
||||||
import me.lucko.luckperms.common.storage.backing.utils.LegacySchemaMigration;
|
import me.lucko.luckperms.common.storage.backing.utils.LegacySQLSchemaMigration;
|
||||||
import me.lucko.luckperms.common.storage.backing.utils.NodeDataHolder;
|
import me.lucko.luckperms.common.storage.backing.utils.NodeDataHolder;
|
||||||
import me.lucko.luckperms.common.storage.holder.NodeHeldPermission;
|
import me.lucko.luckperms.common.storage.holder.NodeHeldPermission;
|
||||||
|
|
||||||
@ -179,7 +179,7 @@ public class SQLBacking extends AbstractBacking {
|
|||||||
plugin.getLog().severe("Starting migration from legacy schema. This could take a while....");
|
plugin.getLog().severe("Starting migration from legacy schema. This could take a while....");
|
||||||
plugin.getLog().severe("Please do not stop your server while the migration takes place.");
|
plugin.getLog().severe("Please do not stop your server while the migration takes place.");
|
||||||
|
|
||||||
new LegacySchemaMigration(this).run();
|
new LegacySQLSchemaMigration(this).run();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -23,18 +23,19 @@
|
|||||||
package me.lucko.luckperms.common.storage.backing;
|
package me.lucko.luckperms.common.storage.backing;
|
||||||
|
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
|
import com.google.common.collect.ImmutableSetMultimap;
|
||||||
|
import com.google.common.collect.Iterables;
|
||||||
|
|
||||||
import me.lucko.luckperms.api.HeldPermission;
|
import me.lucko.luckperms.api.HeldPermission;
|
||||||
import me.lucko.luckperms.api.Node;
|
import me.lucko.luckperms.api.Node;
|
||||||
import me.lucko.luckperms.common.core.NodeFactory;
|
import me.lucko.luckperms.common.core.PriorityComparator;
|
||||||
import me.lucko.luckperms.common.core.UserIdentifier;
|
import me.lucko.luckperms.common.core.UserIdentifier;
|
||||||
import me.lucko.luckperms.common.core.model.Group;
|
import me.lucko.luckperms.common.core.model.Group;
|
||||||
import me.lucko.luckperms.common.core.model.Track;
|
import me.lucko.luckperms.common.core.model.Track;
|
||||||
import me.lucko.luckperms.common.core.model.User;
|
import me.lucko.luckperms.common.core.model.User;
|
||||||
import me.lucko.luckperms.common.managers.GroupManager;
|
|
||||||
import me.lucko.luckperms.common.managers.TrackManager;
|
|
||||||
import me.lucko.luckperms.common.managers.impl.GenericUserManager;
|
import me.lucko.luckperms.common.managers.impl.GenericUserManager;
|
||||||
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
|
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
|
||||||
|
import me.lucko.luckperms.common.storage.backing.utils.NodeDataHolder;
|
||||||
import me.lucko.luckperms.common.storage.holder.NodeHeldPermission;
|
import me.lucko.luckperms.common.storage.holder.NodeHeldPermission;
|
||||||
|
|
||||||
import org.yaml.snakeyaml.DumperOptions;
|
import org.yaml.snakeyaml.DumperOptions;
|
||||||
@ -46,18 +47,18 @@ import java.io.File;
|
|||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.nio.charset.StandardCharsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import java.nio.file.Files;
|
import java.nio.file.Files;
|
||||||
import java.util.Arrays;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collection;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
|
import java.util.HashSet;
|
||||||
|
import java.util.LinkedHashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
import java.util.concurrent.Callable;
|
|
||||||
import java.util.function.Function;
|
import java.util.function.Function;
|
||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
import static me.lucko.luckperms.common.core.model.PermissionHolder.exportToLegacy;
|
|
||||||
|
|
||||||
@SuppressWarnings({"unchecked", "ResultOfMethodCallIgnored"})
|
@SuppressWarnings({"unchecked", "ResultOfMethodCallIgnored"})
|
||||||
public class YAMLBacking extends FlatfileBacking {
|
public class YAMLBacking extends FlatfileBacking {
|
||||||
private static Yaml getYaml() {
|
private static Yaml getYaml() {
|
||||||
@ -67,20 +68,11 @@ public class YAMLBacking extends FlatfileBacking {
|
|||||||
return new Yaml(options);
|
return new Yaml(options);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static <T> T call(Callable<T> c, T def) {
|
public YAMLBacking(LuckPermsPlugin plugin, File pluginDir, String dataFolderName) {
|
||||||
try {
|
super(plugin, "YAML", pluginDir, ".yml", dataFolderName);
|
||||||
return c.call();
|
|
||||||
} catch (Exception e) {
|
|
||||||
e.printStackTrace();
|
|
||||||
return def;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public YAMLBacking(LuckPermsPlugin plugin, File pluginDir) {
|
public boolean writeMapToFile(File file, Map<String, Object> values) {
|
||||||
super(plugin, "YAML", pluginDir, ".yml");
|
|
||||||
}
|
|
||||||
|
|
||||||
private boolean writeMapToFile(File file, Map<String, Object> values) {
|
|
||||||
try (BufferedWriter writer = Files.newBufferedWriter(file.toPath(), StandardCharsets.UTF_8)) {
|
try (BufferedWriter writer = Files.newBufferedWriter(file.toPath(), StandardCharsets.UTF_8)) {
|
||||||
getYaml().dump(values, writer);
|
getYaml().dump(values, writer);
|
||||||
writer.flush();
|
writer.flush();
|
||||||
@ -92,7 +84,7 @@ public class YAMLBacking extends FlatfileBacking {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean readMapFromFile(File file, Function<Map<String, Object>, Boolean> readOperation) {
|
public boolean readMapFromFile(File file, Function<Map<String, Object>, Boolean> readOperation) {
|
||||||
boolean success = false;
|
boolean success = false;
|
||||||
try (BufferedReader reader = Files.newBufferedReader(file.toPath(), StandardCharsets.UTF_8)) {
|
try (BufferedReader reader = Files.newBufferedReader(file.toPath(), StandardCharsets.UTF_8)) {
|
||||||
success = readOperation.apply((Map<String, Object>) getYaml().load(reader));
|
success = readOperation.apply((Map<String, Object>) getYaml().load(reader));
|
||||||
@ -116,8 +108,10 @@ public class YAMLBacking extends FlatfileBacking {
|
|||||||
// User exists, let's load.
|
// User exists, let's load.
|
||||||
String name = (String) values.get("name");
|
String name = (String) values.get("name");
|
||||||
user.getPrimaryGroup().setStoredValue((String) values.get("primary-group"));
|
user.getPrimaryGroup().setStoredValue((String) values.get("primary-group"));
|
||||||
Map<String, Boolean> perms = (Map<String, Boolean>) values.get("perms");
|
|
||||||
user.setNodes(perms);
|
Set<NodeDataHolder> data = deserializePermissions((List<Object>) values.get("permissions"));
|
||||||
|
Set<Node> nodes = data.stream().map(NodeDataHolder::toNode).collect(Collectors.toSet());
|
||||||
|
user.setNodes(nodes);
|
||||||
|
|
||||||
boolean save = plugin.getUserManager().giveDefaultIfNeeded(user, false);
|
boolean save = plugin.getUserManager().giveDefaultIfNeeded(user, false);
|
||||||
|
|
||||||
@ -130,12 +124,7 @@ public class YAMLBacking extends FlatfileBacking {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (save) {
|
if (save) {
|
||||||
Map<String, Object> data = new HashMap<>();
|
saveUser(user);
|
||||||
data.put("uuid", user.getUuid().toString());
|
|
||||||
data.put("name", user.getName());
|
|
||||||
data.put("primary-group", user.getPrimaryGroup().getStoredValue());
|
|
||||||
data.put("perms", exportToLegacy(user.getNodes()));
|
|
||||||
writeMapToFile(userFile, data);
|
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
});
|
});
|
||||||
@ -177,11 +166,14 @@ public class YAMLBacking extends FlatfileBacking {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Map<String, Object> values = new HashMap<>();
|
Map<String, Object> values = new LinkedHashMap<>();
|
||||||
values.put("uuid", user.getUuid().toString());
|
values.put("uuid", user.getUuid().toString());
|
||||||
values.put("name", user.getName());
|
values.put("name", user.getName());
|
||||||
values.put("primary-group", user.getPrimaryGroup().getStoredValue());
|
values.put("primary-group", user.getPrimaryGroup().getStoredValue());
|
||||||
values.put("perms", exportToLegacy(user.getNodes()));
|
|
||||||
|
Set<NodeDataHolder> data = user.getNodes().stream().map(NodeDataHolder::fromNode).collect(Collectors.toSet());
|
||||||
|
values.put("permissions", serializePermissions(data));
|
||||||
|
|
||||||
return writeMapToFile(userFile, values);
|
return writeMapToFile(userFile, values);
|
||||||
}, false);
|
}, false);
|
||||||
} finally {
|
} finally {
|
||||||
@ -197,18 +189,18 @@ public class YAMLBacking extends FlatfileBacking {
|
|||||||
|
|
||||||
for (File file : files) {
|
for (File file : files) {
|
||||||
registerFileAction("users", file);
|
registerFileAction("users", file);
|
||||||
Map<String, Boolean> nodes = new HashMap<>();
|
|
||||||
|
Set<NodeDataHolder> nodes = new HashSet<>();
|
||||||
readMapFromFile(file, values -> {
|
readMapFromFile(file, values -> {
|
||||||
Map<String, Boolean> perms = (Map<String, Boolean>) values.get("perms");
|
nodes.addAll(deserializePermissions((List<Object>) values.get("permissions")));
|
||||||
nodes.putAll(perms);
|
|
||||||
return true;
|
return true;
|
||||||
});
|
});
|
||||||
|
|
||||||
boolean shouldDelete = false;
|
boolean shouldDelete = false;
|
||||||
if (nodes.size() == 1) {
|
if (nodes.size() == 1) {
|
||||||
for (Map.Entry<String, Boolean> e : nodes.entrySet()) {
|
for (NodeDataHolder e : nodes) {
|
||||||
// There's only one
|
// There's only one
|
||||||
shouldDelete = e.getKey().equalsIgnoreCase("group.default") && e.getValue();
|
shouldDelete = e.getPermission().equalsIgnoreCase("group.default") && e.isValue();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -220,16 +212,6 @@ public class YAMLBacking extends FlatfileBacking {
|
|||||||
}, false);
|
}, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public Set<UUID> getUniqueUsers() {
|
|
||||||
String[] fileNames = usersDir.list((dir, name) -> name.endsWith(".yml"));
|
|
||||||
if (fileNames == null) return null;
|
|
||||||
return Arrays.stream(fileNames)
|
|
||||||
.map(s -> s.substring(0, s.length() - 4))
|
|
||||||
.map(UUID::fromString)
|
|
||||||
.collect(Collectors.toSet());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<HeldPermission<UUID>> getUsersWithPermission(String permission) {
|
public List<HeldPermission<UUID>> getUsersWithPermission(String permission) {
|
||||||
ImmutableList.Builder<HeldPermission<UUID>> held = ImmutableList.builder();
|
ImmutableList.Builder<HeldPermission<UUID>> held = ImmutableList.builder();
|
||||||
@ -241,20 +223,18 @@ public class YAMLBacking extends FlatfileBacking {
|
|||||||
registerFileAction("users", file);
|
registerFileAction("users", file);
|
||||||
|
|
||||||
UUID holder = UUID.fromString(file.getName().substring(0, file.getName().length() - 4));
|
UUID holder = UUID.fromString(file.getName().substring(0, file.getName().length() - 4));
|
||||||
Map<String, Boolean> nodes = new HashMap<>();
|
Set<NodeDataHolder> nodes = new HashSet<>();
|
||||||
readMapFromFile(file, values -> {
|
readMapFromFile(file, values -> {
|
||||||
Map<String, Boolean> perms = (Map<String, Boolean>) values.get("perms");
|
nodes.addAll(deserializePermissions((List<Object>) values.get("permissions")));
|
||||||
nodes.putAll(perms);
|
|
||||||
return true;
|
return true;
|
||||||
});
|
});
|
||||||
|
|
||||||
for (Map.Entry<String, Boolean> e : nodes.entrySet()) {
|
for (NodeDataHolder e : nodes) {
|
||||||
Node node = NodeFactory.fromSerialisedNode(e.getKey(), e.getValue());
|
if (!e.getPermission().equalsIgnoreCase(permission)) {
|
||||||
if (!node.getPermission().equalsIgnoreCase(permission)) {
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
held.add(NodeHeldPermission.of(holder, node));
|
held.add(NodeHeldPermission.of(holder, e));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
@ -270,10 +250,12 @@ public class YAMLBacking extends FlatfileBacking {
|
|||||||
return call(() -> {
|
return call(() -> {
|
||||||
File groupFile = new File(groupsDir, name + ".yml");
|
File groupFile = new File(groupsDir, name + ".yml");
|
||||||
registerFileAction("groups", groupFile);
|
registerFileAction("groups", groupFile);
|
||||||
|
|
||||||
if (groupFile.exists()) {
|
if (groupFile.exists()) {
|
||||||
return readMapFromFile(groupFile, values -> {
|
return readMapFromFile(groupFile, values -> {
|
||||||
Map<String, Boolean> perms = (Map<String, Boolean>) values.get("perms");
|
Set<NodeDataHolder> data = deserializePermissions((List<Object>) values.get("permissions"));
|
||||||
group.setNodes(perms);
|
Set<Node> nodes = data.stream().map(NodeDataHolder::toNode).collect(Collectors.toSet());
|
||||||
|
group.setNodes(nodes);
|
||||||
return true;
|
return true;
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
@ -284,9 +266,10 @@ public class YAMLBacking extends FlatfileBacking {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
Map<String, Object> values = new HashMap<>();
|
Map<String, Object> values = new LinkedHashMap<>();
|
||||||
values.put("name", group.getName());
|
values.put("name", group.getName());
|
||||||
values.put("perms", exportToLegacy(group.getNodes()));
|
Set<NodeDataHolder> data = group.getNodes().stream().map(NodeDataHolder::fromNode).collect(Collectors.toSet());
|
||||||
|
values.put("permissions", serializePermissions(data));
|
||||||
return writeMapToFile(groupFile, values);
|
return writeMapToFile(groupFile, values);
|
||||||
}
|
}
|
||||||
}, false);
|
}, false);
|
||||||
@ -303,35 +286,19 @@ public class YAMLBacking extends FlatfileBacking {
|
|||||||
return call(() -> {
|
return call(() -> {
|
||||||
File groupFile = new File(groupsDir, name + ".yml");
|
File groupFile = new File(groupsDir, name + ".yml");
|
||||||
registerFileAction("groups", groupFile);
|
registerFileAction("groups", groupFile);
|
||||||
|
|
||||||
return groupFile.exists() && readMapFromFile(groupFile, values -> {
|
return groupFile.exists() && readMapFromFile(groupFile, values -> {
|
||||||
Map<String, Boolean> perms = (Map<String, Boolean>) values.get("perms");
|
Set<NodeDataHolder> data = deserializePermissions((List<Object>) values.get("permissions"));
|
||||||
group.setNodes(perms);
|
Set<Node> nodes = data.stream().map(NodeDataHolder::toNode).collect(Collectors.toSet());
|
||||||
|
group.setNodes(nodes);
|
||||||
return true;
|
return true;
|
||||||
});
|
});
|
||||||
|
|
||||||
}, false);
|
}, false);
|
||||||
} finally {
|
} finally {
|
||||||
group.getIoLock().unlock();
|
group.getIoLock().unlock();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean loadAllGroups() {
|
|
||||||
String[] fileNames = groupsDir.list((dir, name) -> name.endsWith(".yml"));
|
|
||||||
if (fileNames == null) return false;
|
|
||||||
List<String> groups = Arrays.stream(fileNames)
|
|
||||||
.map(s -> s.substring(0, s.length() - 4))
|
|
||||||
.collect(Collectors.toList());
|
|
||||||
|
|
||||||
groups.forEach(this::loadGroup);
|
|
||||||
|
|
||||||
GroupManager gm = plugin.getGroupManager();
|
|
||||||
gm.getAll().values().stream()
|
|
||||||
.filter(g -> !groups.contains(g.getName()))
|
|
||||||
.forEach(gm::unload);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean saveGroup(Group group) {
|
public boolean saveGroup(Group group) {
|
||||||
group.getIoLock().lock();
|
group.getIoLock().lock();
|
||||||
@ -339,6 +306,7 @@ public class YAMLBacking extends FlatfileBacking {
|
|||||||
return call(() -> {
|
return call(() -> {
|
||||||
File groupFile = new File(groupsDir, group.getName() + ".yml");
|
File groupFile = new File(groupsDir, group.getName() + ".yml");
|
||||||
registerFileAction("groups", groupFile);
|
registerFileAction("groups", groupFile);
|
||||||
|
|
||||||
if (!groupFile.exists()) {
|
if (!groupFile.exists()) {
|
||||||
try {
|
try {
|
||||||
groupFile.createNewFile();
|
groupFile.createNewFile();
|
||||||
@ -348,9 +316,10 @@ public class YAMLBacking extends FlatfileBacking {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Map<String, Object> values = new HashMap<>();
|
Map<String, Object> values = new LinkedHashMap<>();
|
||||||
values.put("name", group.getName());
|
values.put("name", group.getName());
|
||||||
values.put("perms", exportToLegacy(group.getNodes()));
|
Set<NodeDataHolder> data = group.getNodes().stream().map(NodeDataHolder::fromNode).collect(Collectors.toSet());
|
||||||
|
values.put("permissions", serializePermissions(data));
|
||||||
return writeMapToFile(groupFile, values);
|
return writeMapToFile(groupFile, values);
|
||||||
}, false);
|
}, false);
|
||||||
} finally {
|
} finally {
|
||||||
@ -358,23 +327,6 @@ public class YAMLBacking extends FlatfileBacking {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean deleteGroup(Group group) {
|
|
||||||
group.getIoLock().lock();
|
|
||||||
try {
|
|
||||||
return call(() -> {
|
|
||||||
File groupFile = new File(groupsDir, group.getName() + ".yml");
|
|
||||||
registerFileAction("groups", groupFile);
|
|
||||||
if (groupFile.exists()) {
|
|
||||||
groupFile.delete();
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}, false);
|
|
||||||
} finally {
|
|
||||||
group.getIoLock().unlock();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public List<HeldPermission<String>> getGroupsWithPermission(String permission) {
|
public List<HeldPermission<String>> getGroupsWithPermission(String permission) {
|
||||||
ImmutableList.Builder<HeldPermission<String>> held = ImmutableList.builder();
|
ImmutableList.Builder<HeldPermission<String>> held = ImmutableList.builder();
|
||||||
@ -386,20 +338,18 @@ public class YAMLBacking extends FlatfileBacking {
|
|||||||
registerFileAction("groups", file);
|
registerFileAction("groups", file);
|
||||||
|
|
||||||
String holder = file.getName().substring(0, file.getName().length() - 4);
|
String holder = file.getName().substring(0, file.getName().length() - 4);
|
||||||
Map<String, Boolean> nodes = new HashMap<>();
|
Set<NodeDataHolder> nodes = new HashSet<>();
|
||||||
readMapFromFile(file, values -> {
|
readMapFromFile(file, values -> {
|
||||||
Map<String, Boolean> perms = (Map<String, Boolean>) values.get("perms");
|
nodes.addAll(deserializePermissions((List<Object>) values.get("permissions")));
|
||||||
nodes.putAll(perms);
|
|
||||||
return true;
|
return true;
|
||||||
});
|
});
|
||||||
|
|
||||||
for (Map.Entry<String, Boolean> e : nodes.entrySet()) {
|
for (NodeDataHolder e : nodes) {
|
||||||
Node node = NodeFactory.fromSerialisedNode(e.getKey(), e.getValue());
|
if (!e.getPermission().equalsIgnoreCase(permission)) {
|
||||||
if (!node.getPermission().equalsIgnoreCase(permission)) {
|
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
held.add(NodeHeldPermission.of(holder, node));
|
held.add(NodeHeldPermission.of(holder, e));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
@ -429,7 +379,7 @@ public class YAMLBacking extends FlatfileBacking {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
Map<String, Object> values = new HashMap<>();
|
Map<String, Object> values = new LinkedHashMap<>();
|
||||||
values.put("name", track.getName());
|
values.put("name", track.getName());
|
||||||
values.put("groups", track.getGroups());
|
values.put("groups", track.getGroups());
|
||||||
|
|
||||||
@ -460,23 +410,6 @@ public class YAMLBacking extends FlatfileBacking {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean loadAllTracks() {
|
|
||||||
String[] fileNames = tracksDir.list((dir, name) -> name.endsWith(".yml"));
|
|
||||||
if (fileNames == null) return false;
|
|
||||||
List<String> tracks = Arrays.stream(fileNames)
|
|
||||||
.map(s -> s.substring(0, s.length() - 4))
|
|
||||||
.collect(Collectors.toList());
|
|
||||||
|
|
||||||
tracks.forEach(this::loadTrack);
|
|
||||||
|
|
||||||
TrackManager tm = plugin.getTrackManager();
|
|
||||||
tm.getAll().values().stream()
|
|
||||||
.filter(t -> !tracks.contains(t.getName()))
|
|
||||||
.forEach(tm::unload);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean saveTrack(Track track) {
|
public boolean saveTrack(Track track) {
|
||||||
track.getIoLock().lock();
|
track.getIoLock().lock();
|
||||||
@ -494,7 +427,7 @@ public class YAMLBacking extends FlatfileBacking {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Map<String, Object> values = new HashMap<>();
|
Map<String, Object> values = new LinkedHashMap<>();
|
||||||
values.put("name", track.getName());
|
values.put("name", track.getName());
|
||||||
values.put("groups", track.getGroups());
|
values.put("groups", track.getGroups());
|
||||||
return writeMapToFile(trackFile, values);
|
return writeMapToFile(trackFile, values);
|
||||||
@ -504,21 +437,121 @@ public class YAMLBacking extends FlatfileBacking {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
public static Set<NodeDataHolder> deserializePermissions(List<Object> permissionsSection) {
|
||||||
public boolean deleteTrack(Track track) {
|
Set<NodeDataHolder> nodes = new HashSet<>();
|
||||||
track.getIoLock().lock();
|
|
||||||
try {
|
|
||||||
return call(() -> {
|
|
||||||
File trackFile = new File(tracksDir, track.getName() + ".yml");
|
|
||||||
registerFileAction("tracks", trackFile);
|
|
||||||
|
|
||||||
if (trackFile.exists()) {
|
for (Object perm : permissionsSection) {
|
||||||
trackFile.delete();
|
|
||||||
|
if (!(perm instanceof Map)) {
|
||||||
|
continue;
|
||||||
}
|
}
|
||||||
return true;
|
|
||||||
}, false);
|
Map<String, Object> data = (Map<String, Object>) perm;
|
||||||
} finally {
|
Map.Entry<String, Object> entry = Iterables.getFirst(data.entrySet(), null);
|
||||||
track.getIoLock().unlock();
|
|
||||||
|
if (entry == null) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
String permission = entry.getKey();
|
||||||
|
|
||||||
|
if (entry.getValue() != null && entry.getValue() instanceof Map) {
|
||||||
|
Map<String, Object> attributes = (Map<String, Object>) entry.getValue();
|
||||||
|
|
||||||
|
boolean value = true;
|
||||||
|
String server = "global";
|
||||||
|
String world = "global";
|
||||||
|
long expiry = 0L;
|
||||||
|
ImmutableSetMultimap context = ImmutableSetMultimap.of();
|
||||||
|
|
||||||
|
if (attributes.containsKey("value")) {
|
||||||
|
value = (boolean) attributes.get("value");
|
||||||
|
}
|
||||||
|
if (attributes.containsKey("server")) {
|
||||||
|
server = attributes.get("server").toString();
|
||||||
|
}
|
||||||
|
if (attributes.containsKey("world")) {
|
||||||
|
world = attributes.get("world").toString();
|
||||||
|
}
|
||||||
|
if (attributes.containsKey("expiry")) {
|
||||||
|
Object exp = attributes.get("expiry");
|
||||||
|
if (exp instanceof Long || exp.getClass().isPrimitive()) {
|
||||||
|
expiry = (long) exp;
|
||||||
|
} else {
|
||||||
|
expiry = (int) exp;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (attributes.get("context") != null && attributes.get("context") instanceof Map) {
|
||||||
|
Map<String, Object> contexts = (Map<String, Object>) attributes.get("context");
|
||||||
|
ImmutableSetMultimap.Builder<String, String> map = ImmutableSetMultimap.builder();
|
||||||
|
|
||||||
|
for (Map.Entry<String, Object> e : contexts.entrySet()) {
|
||||||
|
Object val = e.getValue();
|
||||||
|
if (val instanceof List) {
|
||||||
|
map.putAll(e.getKey(), ((List<String>) val));
|
||||||
|
} else {
|
||||||
|
map.put(e.getKey(), val.toString());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
context = map.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
nodes.add(NodeDataHolder.of(permission, value, server, world, expiry, context));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nodes;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static List<Map<String, Object>> serializePermissions(Set<NodeDataHolder> nodes) {
|
||||||
|
List<Map<String, Object>> data = new ArrayList<>();
|
||||||
|
|
||||||
|
for (NodeDataHolder node : nodes) {
|
||||||
|
Map<String, Object> attributes = new LinkedHashMap<>();
|
||||||
|
attributes.put("value", node.isValue());
|
||||||
|
|
||||||
|
if (!node.getServer().equals("global")) {
|
||||||
|
attributes.put("server", node.getServer());
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!node.getWorld().equals("global")) {
|
||||||
|
attributes.put("world", node.getWorld());
|
||||||
|
}
|
||||||
|
|
||||||
|
if (node.getExpiry() != 0L) {
|
||||||
|
attributes.put("expiry", node.getExpiry());
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!node.getContexts().isEmpty()) {
|
||||||
|
Map<String, Object> context = new HashMap<>();
|
||||||
|
Map<String, Collection<String>> map = node.getContexts().asMap();
|
||||||
|
|
||||||
|
for (Map.Entry<String, Collection<String>> e : map.entrySet()) {
|
||||||
|
List<String> vals = new ArrayList<>(e.getValue());
|
||||||
|
int size = vals.size();
|
||||||
|
|
||||||
|
if (size == 1) {
|
||||||
|
context.put(e.getKey(), vals.get(0));;
|
||||||
|
} else if (size > 1) {
|
||||||
|
context.put(e.getKey(), vals);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
attributes.put("context", context);
|
||||||
|
}
|
||||||
|
|
||||||
|
Map<String, Object> perm = new HashMap<>();
|
||||||
|
perm.put(node.getPermission(), attributes);
|
||||||
|
data.add(perm);
|
||||||
|
}
|
||||||
|
|
||||||
|
data.sort((o1, o2) -> PriorityComparator.get().compareStrings(
|
||||||
|
Iterables.getFirst(o1.keySet(), ""),
|
||||||
|
Iterables.getFirst(o2.keySet(), ""))
|
||||||
|
);
|
||||||
|
|
||||||
|
return data;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,180 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2016 Lucko (Luck) <luck@lucko.me>
|
||||||
|
*
|
||||||
|
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
* of this software and associated documentation files (the "Software"), to deal
|
||||||
|
* in the Software without restriction, including without limitation the rights
|
||||||
|
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
* copies of the Software, and to permit persons to whom the Software is
|
||||||
|
* furnished to do so, subject to the following conditions:
|
||||||
|
*
|
||||||
|
* The above copyright notice and this permission notice shall be included in all
|
||||||
|
* copies or substantial portions of the Software.
|
||||||
|
*
|
||||||
|
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
* SOFTWARE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package me.lucko.luckperms.common.storage.backing.utils;
|
||||||
|
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
|
||||||
|
import com.google.gson.JsonElement;
|
||||||
|
import com.google.gson.JsonObject;
|
||||||
|
|
||||||
|
import me.lucko.luckperms.common.core.NodeFactory;
|
||||||
|
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
|
||||||
|
import me.lucko.luckperms.common.storage.backing.JSONBacking;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Set;
|
||||||
|
import java.util.concurrent.atomic.AtomicReference;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
public class LegacyJSONSchemaMigration implements Runnable {
|
||||||
|
private final LuckPermsPlugin plugin;
|
||||||
|
private final JSONBacking backing;
|
||||||
|
private final File oldDataFolder;
|
||||||
|
private final File newDataFolder;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void run() {
|
||||||
|
plugin.getLog().warn("Moving existing files to their new location.");
|
||||||
|
relocateFile(oldDataFolder, newDataFolder, "actions.log");
|
||||||
|
relocateFile(oldDataFolder, newDataFolder, "uuidcache.txt");
|
||||||
|
relocateFile(oldDataFolder, newDataFolder, "tracks");
|
||||||
|
|
||||||
|
plugin.getLog().warn("Migrating group files");
|
||||||
|
File oldGroupsDir = new File(oldDataFolder, "groups");
|
||||||
|
if (oldGroupsDir.exists() && oldGroupsDir.isDirectory()) {
|
||||||
|
File newGroupsDir = new File(newDataFolder, "groups");
|
||||||
|
newGroupsDir.mkdir();
|
||||||
|
|
||||||
|
File[] toMigrate = oldGroupsDir.listFiles((dir, name) -> name.endsWith(backing.getFileExtension()));
|
||||||
|
if (toMigrate != null) {
|
||||||
|
for (File oldFile : toMigrate) {
|
||||||
|
try {
|
||||||
|
File replacementFile = new File(newGroupsDir, oldFile.getName());
|
||||||
|
|
||||||
|
AtomicReference<String> name = new AtomicReference<>(null);
|
||||||
|
Map<String, Boolean> perms = new HashMap<>();
|
||||||
|
backing.readObjectFromFile(oldFile, values -> {
|
||||||
|
name.set(values.get("name").getAsString());
|
||||||
|
JsonObject permsSection = values.get("perms").getAsJsonObject();
|
||||||
|
for (Map.Entry<String, JsonElement> e : permsSection.entrySet()) {
|
||||||
|
perms.put(e.getKey(), e.getValue().getAsBoolean());
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
});
|
||||||
|
|
||||||
|
Set<NodeDataHolder> nodes = perms.entrySet().stream()
|
||||||
|
.map(e -> NodeFactory.fromSerialisedNode(e.getKey(), e.getValue()))
|
||||||
|
.map(NodeDataHolder::fromNode)
|
||||||
|
.collect(Collectors.toSet());
|
||||||
|
|
||||||
|
if (!replacementFile.exists()) {
|
||||||
|
try {
|
||||||
|
replacementFile.createNewFile();
|
||||||
|
} catch (IOException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
JsonObject data = new JsonObject();
|
||||||
|
data.addProperty("name", name.get());
|
||||||
|
data.add("permissions", JSONBacking.serializePermissions(nodes));
|
||||||
|
backing.writeElementToFile(replacementFile, data);
|
||||||
|
|
||||||
|
oldFile.delete();
|
||||||
|
} catch (Exception e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
plugin.getLog().warn("Migrated group files, now migrating user files.");
|
||||||
|
|
||||||
|
File oldUsersDir = new File(oldDataFolder, "users");
|
||||||
|
if (oldUsersDir.exists() && oldUsersDir.isDirectory()) {
|
||||||
|
File newUsersDir = new File(newDataFolder, "users");
|
||||||
|
newUsersDir.mkdir();
|
||||||
|
|
||||||
|
File[] toMigrate = oldUsersDir.listFiles((dir, name) -> name.endsWith(backing.getFileExtension()));
|
||||||
|
if (toMigrate != null) {
|
||||||
|
for (File oldFile : toMigrate) {
|
||||||
|
try {
|
||||||
|
File replacementFile = new File(newUsersDir, oldFile.getName());
|
||||||
|
|
||||||
|
AtomicReference<String> uuid = new AtomicReference<>(null);
|
||||||
|
AtomicReference<String> name = new AtomicReference<>(null);
|
||||||
|
AtomicReference<String> primaryGroup = new AtomicReference<>(null);
|
||||||
|
Map<String, Boolean> perms = new HashMap<>();
|
||||||
|
backing.readObjectFromFile(oldFile, values -> {
|
||||||
|
uuid.set(values.get("uuid").getAsString());
|
||||||
|
name.set(values.get("name").getAsString());
|
||||||
|
primaryGroup.set(values.get("primaryGroup").getAsString());
|
||||||
|
JsonObject permsSection = values.get("perms").getAsJsonObject();
|
||||||
|
for (Map.Entry<String, JsonElement> e : permsSection.entrySet()) {
|
||||||
|
perms.put(e.getKey(), e.getValue().getAsBoolean());
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
});
|
||||||
|
|
||||||
|
Set<NodeDataHolder> nodes = perms.entrySet().stream()
|
||||||
|
.map(e -> NodeFactory.fromSerialisedNode(e.getKey(), e.getValue()))
|
||||||
|
.map(NodeDataHolder::fromNode)
|
||||||
|
.collect(Collectors.toSet());
|
||||||
|
|
||||||
|
if (!replacementFile.exists()) {
|
||||||
|
try {
|
||||||
|
replacementFile.createNewFile();
|
||||||
|
} catch (IOException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
JsonObject data = new JsonObject();
|
||||||
|
data.addProperty("uuid", uuid.get());
|
||||||
|
data.addProperty("name", name.get());
|
||||||
|
data.addProperty("primaryGroup", primaryGroup.get());
|
||||||
|
data.add("permissions", JSONBacking.serializePermissions(nodes));
|
||||||
|
backing.writeElementToFile(replacementFile, data);
|
||||||
|
|
||||||
|
oldFile.delete();
|
||||||
|
} catch (Exception e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
plugin.getLog().warn("Migrated user files.");
|
||||||
|
|
||||||
|
// rename the old data file
|
||||||
|
oldDataFolder.renameTo(new File(oldDataFolder.getParent(), "old-data-backup"));
|
||||||
|
|
||||||
|
plugin.getLog().warn("Legacy schema migration complete.");
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void relocateFile(File dirFrom, File dirTo, String fileName) {
|
||||||
|
File file = new File(dirFrom, fileName);
|
||||||
|
if (file.exists()) {
|
||||||
|
try {
|
||||||
|
Files.move(file.toPath(), new File(dirTo, fileName).toPath());
|
||||||
|
} catch (IOException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -45,13 +45,13 @@ import java.util.concurrent.atomic.AtomicInteger;
|
|||||||
import java.util.stream.Collectors;
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
@RequiredArgsConstructor
|
@RequiredArgsConstructor
|
||||||
public class LegacySchemaMigration implements Runnable {
|
public class LegacySQLSchemaMigration implements Runnable {
|
||||||
private static final Type NODE_MAP_TYPE = new TypeToken<Map<String, Boolean>>() {}.getType();
|
private static final Type NODE_MAP_TYPE = new TypeToken<Map<String, Boolean>>() {}.getType();
|
||||||
private final SQLBacking backing;
|
private final SQLBacking backing;
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void run() {
|
public void run() {
|
||||||
backing.getPlugin().getLog().info("Collecting UUID data from the old tables.");
|
backing.getPlugin().getLog().warn("Collecting UUID data from the old tables.");
|
||||||
|
|
||||||
Map<UUID, String> uuidData = new HashMap<>();
|
Map<UUID, String> uuidData = new HashMap<>();
|
||||||
try (Connection c = backing.getProvider().getConnection()) {
|
try (Connection c = backing.getProvider().getConnection()) {
|
||||||
@ -70,7 +70,7 @@ public class LegacySchemaMigration implements Runnable {
|
|||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
}
|
}
|
||||||
|
|
||||||
backing.getPlugin().getLog().info("Found " + uuidData.size() + " uuid data entries. Copying to new tables...");
|
backing.getPlugin().getLog().warn("Found " + uuidData.size() + " uuid data entries. Copying to new tables...");
|
||||||
|
|
||||||
List<Map.Entry<UUID, String>> uuidEntries = uuidData.entrySet().stream().collect(Collectors.toList());
|
List<Map.Entry<UUID, String>> uuidEntries = uuidData.entrySet().stream().collect(Collectors.toList());
|
||||||
List<List<Map.Entry<UUID, String>>> partitionedUuidEntries = Lists.partition(uuidEntries, 100);
|
List<List<Map.Entry<UUID, String>>> partitionedUuidEntries = Lists.partition(uuidEntries, 100);
|
||||||
@ -95,8 +95,8 @@ public class LegacySchemaMigration implements Runnable {
|
|||||||
uuidEntries.clear();
|
uuidEntries.clear();
|
||||||
partitionedUuidEntries.clear();
|
partitionedUuidEntries.clear();
|
||||||
|
|
||||||
backing.getPlugin().getLog().info("Migrated all uuid data.");
|
backing.getPlugin().getLog().warn("Migrated all uuid data.");
|
||||||
backing.getPlugin().getLog().info("Starting user data migration.");
|
backing.getPlugin().getLog().warn("Starting user data migration.");
|
||||||
|
|
||||||
Set<UUID> users = new HashSet<>();
|
Set<UUID> users = new HashSet<>();
|
||||||
try (Connection c = backing.getProvider().getConnection()) {
|
try (Connection c = backing.getProvider().getConnection()) {
|
||||||
@ -115,7 +115,7 @@ public class LegacySchemaMigration implements Runnable {
|
|||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
}
|
}
|
||||||
|
|
||||||
backing.getPlugin().getLog().info("Found " + users.size() + " user data entries. Copying to new tables...");
|
backing.getPlugin().getLog().warn("Found " + users.size() + " user data entries. Copying to new tables...");
|
||||||
|
|
||||||
AtomicInteger userCounter = new AtomicInteger(0);
|
AtomicInteger userCounter = new AtomicInteger(0);
|
||||||
for (UUID uuid : users) {
|
for (UUID uuid : users) {
|
||||||
@ -184,14 +184,14 @@ public class LegacySchemaMigration implements Runnable {
|
|||||||
|
|
||||||
int i = userCounter.incrementAndGet();
|
int i = userCounter.incrementAndGet();
|
||||||
if (i % 100 == 0) {
|
if (i % 100 == 0) {
|
||||||
backing.getPlugin().getLog().info("Migrated " + i + " users so far...");
|
backing.getPlugin().getLog().warn("Migrated " + i + " users so far...");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
users.clear();
|
users.clear();
|
||||||
|
|
||||||
backing.getPlugin().getLog().info("Migrated all user data.");
|
backing.getPlugin().getLog().warn("Migrated all user data.");
|
||||||
backing.getPlugin().getLog().info("Starting group data migration.");
|
backing.getPlugin().getLog().warn("Starting group data migration.");
|
||||||
|
|
||||||
Map<String, String> groupData = new HashMap<>();
|
Map<String, String> groupData = new HashMap<>();
|
||||||
try (Connection c = backing.getProvider().getConnection()) {
|
try (Connection c = backing.getProvider().getConnection()) {
|
||||||
@ -206,7 +206,7 @@ public class LegacySchemaMigration implements Runnable {
|
|||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
}
|
}
|
||||||
|
|
||||||
backing.getPlugin().getLog().info("Found " + groupData.size() + " group data entries. Copying to new tables...");
|
backing.getPlugin().getLog().warn("Found " + groupData.size() + " group data entries. Copying to new tables...");
|
||||||
for (Map.Entry<String, String> e : groupData.entrySet()) {
|
for (Map.Entry<String, String> e : groupData.entrySet()) {
|
||||||
String name = e.getKey();
|
String name = e.getKey();
|
||||||
String permsJson = e.getValue();
|
String permsJson = e.getValue();
|
||||||
@ -251,9 +251,9 @@ public class LegacySchemaMigration implements Runnable {
|
|||||||
}
|
}
|
||||||
|
|
||||||
groupData.clear();
|
groupData.clear();
|
||||||
backing.getPlugin().getLog().info("Migrated all group data.");
|
backing.getPlugin().getLog().warn("Migrated all group data.");
|
||||||
|
|
||||||
backing.getPlugin().getLog().info("Renaming action and track tables.");
|
backing.getPlugin().getLog().warn("Renaming action and track tables.");
|
||||||
try (Connection c = backing.getProvider().getConnection()) {
|
try (Connection c = backing.getProvider().getConnection()) {
|
||||||
try (PreparedStatement ps = c.prepareStatement(backing.getPrefix().apply("DROP TABLE {prefix}actions"))) {
|
try (PreparedStatement ps = c.prepareStatement(backing.getPrefix().apply("DROP TABLE {prefix}actions"))) {
|
||||||
ps.execute();
|
ps.execute();
|
||||||
@ -272,6 +272,6 @@ public class LegacySchemaMigration implements Runnable {
|
|||||||
ex.printStackTrace();
|
ex.printStackTrace();
|
||||||
}
|
}
|
||||||
|
|
||||||
backing.getPlugin().getLog().info("Legacy schema migration complete.");
|
backing.getPlugin().getLog().warn("Legacy schema migration complete.");
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -0,0 +1,172 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2016 Lucko (Luck) <luck@lucko.me>
|
||||||
|
*
|
||||||
|
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
* of this software and associated documentation files (the "Software"), to deal
|
||||||
|
* in the Software without restriction, including without limitation the rights
|
||||||
|
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
* copies of the Software, and to permit persons to whom the Software is
|
||||||
|
* furnished to do so, subject to the following conditions:
|
||||||
|
*
|
||||||
|
* The above copyright notice and this permission notice shall be included in all
|
||||||
|
* copies or substantial portions of the Software.
|
||||||
|
*
|
||||||
|
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
* SOFTWARE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package me.lucko.luckperms.common.storage.backing.utils;
|
||||||
|
|
||||||
|
import lombok.RequiredArgsConstructor;
|
||||||
|
|
||||||
|
import me.lucko.luckperms.common.core.NodeFactory;
|
||||||
|
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
|
||||||
|
import me.lucko.luckperms.common.storage.backing.YAMLBacking;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.LinkedHashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.Set;
|
||||||
|
import java.util.concurrent.atomic.AtomicReference;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
@RequiredArgsConstructor
|
||||||
|
public class LegacyYAMLSchemaMigration implements Runnable {
|
||||||
|
private final LuckPermsPlugin plugin;
|
||||||
|
private final YAMLBacking backing;
|
||||||
|
private final File oldDataFolder;
|
||||||
|
private final File newDataFolder;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void run() {
|
||||||
|
plugin.getLog().warn("Moving existing files to their new location.");
|
||||||
|
relocateFile(oldDataFolder, newDataFolder, "actions.log");
|
||||||
|
relocateFile(oldDataFolder, newDataFolder, "uuidcache.txt");
|
||||||
|
relocateFile(oldDataFolder, newDataFolder, "tracks");
|
||||||
|
|
||||||
|
plugin.getLog().warn("Migrating group files");
|
||||||
|
File oldGroupsDir = new File(oldDataFolder, "groups");
|
||||||
|
if (oldGroupsDir.exists() && oldGroupsDir.isDirectory()) {
|
||||||
|
File newGroupsDir = new File(newDataFolder, "groups");
|
||||||
|
newGroupsDir.mkdir();
|
||||||
|
|
||||||
|
File[] toMigrate = oldGroupsDir.listFiles((dir, name) -> name.endsWith(backing.getFileExtension()));
|
||||||
|
if (toMigrate != null) {
|
||||||
|
for (File oldFile : toMigrate) {
|
||||||
|
try {
|
||||||
|
File replacementFile = new File(newGroupsDir, oldFile.getName());
|
||||||
|
|
||||||
|
AtomicReference<String> name = new AtomicReference<>(null);
|
||||||
|
Map<String, Boolean> perms = new HashMap<>();
|
||||||
|
backing.readMapFromFile(oldFile, values -> {
|
||||||
|
name.set((String) values.get("name"));
|
||||||
|
perms.putAll((Map<String, Boolean>) values.get("perms"));
|
||||||
|
return true;
|
||||||
|
});
|
||||||
|
|
||||||
|
Set<NodeDataHolder> nodes = perms.entrySet().stream()
|
||||||
|
.map(e -> NodeFactory.fromSerialisedNode(e.getKey(), e.getValue()))
|
||||||
|
.map(NodeDataHolder::fromNode)
|
||||||
|
.collect(Collectors.toSet());
|
||||||
|
|
||||||
|
if (!replacementFile.exists()) {
|
||||||
|
try {
|
||||||
|
replacementFile.createNewFile();
|
||||||
|
} catch (IOException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Map<String, Object> values = new LinkedHashMap<>();
|
||||||
|
values.put("name", name.get());
|
||||||
|
values.put("permissions", YAMLBacking.serializePermissions(nodes));
|
||||||
|
backing.writeMapToFile(replacementFile, values);
|
||||||
|
|
||||||
|
oldFile.delete();
|
||||||
|
} catch (Exception e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
plugin.getLog().warn("Migrated group files, now migrating user files.");
|
||||||
|
|
||||||
|
File oldUsersDir = new File(oldDataFolder, "users");
|
||||||
|
if (oldUsersDir.exists() && oldUsersDir.isDirectory()) {
|
||||||
|
File newUsersDir = new File(newDataFolder, "users");
|
||||||
|
newUsersDir.mkdir();
|
||||||
|
|
||||||
|
File[] toMigrate = oldUsersDir.listFiles((dir, name) -> name.endsWith(backing.getFileExtension()));
|
||||||
|
if (toMigrate != null) {
|
||||||
|
for (File oldFile : toMigrate) {
|
||||||
|
try {
|
||||||
|
File replacementFile = new File(newUsersDir, oldFile.getName());
|
||||||
|
|
||||||
|
AtomicReference<String> uuid = new AtomicReference<>(null);
|
||||||
|
AtomicReference<String> name = new AtomicReference<>(null);
|
||||||
|
AtomicReference<String> primaryGroup = new AtomicReference<>(null);
|
||||||
|
Map<String, Boolean> perms = new HashMap<>();
|
||||||
|
backing.readMapFromFile(oldFile, values -> {
|
||||||
|
uuid.set((String) values.get("uuid"));
|
||||||
|
name.set((String) values.get("name"));
|
||||||
|
primaryGroup.set((String) values.get("primary-group"));
|
||||||
|
perms.putAll((Map<String, Boolean>) values.get("perms"));
|
||||||
|
return true;
|
||||||
|
});
|
||||||
|
|
||||||
|
Set<NodeDataHolder> nodes = perms.entrySet().stream()
|
||||||
|
.map(e -> NodeFactory.fromSerialisedNode(e.getKey(), e.getValue()))
|
||||||
|
.map(NodeDataHolder::fromNode)
|
||||||
|
.collect(Collectors.toSet());
|
||||||
|
|
||||||
|
if (!replacementFile.exists()) {
|
||||||
|
try {
|
||||||
|
replacementFile.createNewFile();
|
||||||
|
} catch (IOException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Map<String, Object> values = new LinkedHashMap<>();
|
||||||
|
values.put("uuid", uuid.get());
|
||||||
|
values.put("name", name.get());
|
||||||
|
values.put("primary-group", primaryGroup.get());
|
||||||
|
values.put("permissions", YAMLBacking.serializePermissions(nodes));
|
||||||
|
backing.writeMapToFile(replacementFile, values);
|
||||||
|
|
||||||
|
oldFile.delete();
|
||||||
|
} catch (Exception e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
plugin.getLog().warn("Migrated user files.");
|
||||||
|
|
||||||
|
// rename the old data file
|
||||||
|
oldDataFolder.renameTo(new File(oldDataFolder.getParent(), "old-data-backup"));
|
||||||
|
|
||||||
|
plugin.getLog().warn("Legacy schema migration complete.");
|
||||||
|
}
|
||||||
|
|
||||||
|
private static void relocateFile(File dirFrom, File dirTo, String fileName) {
|
||||||
|
File file = new File(dirFrom, fileName);
|
||||||
|
if (file.exists()) {
|
||||||
|
try {
|
||||||
|
Files.move(file.toPath(), new File(dirTo, fileName).toPath());
|
||||||
|
} catch (IOException e) {
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -1,44 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright (c) 2016 Lucko (Luck) <luck@lucko.me>
|
|
||||||
*
|
|
||||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
* of this software and associated documentation files (the "Software"), to deal
|
|
||||||
* in the Software without restriction, including without limitation the rights
|
|
||||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
* copies of the Software, and to permit persons to whom the Software is
|
|
||||||
* furnished to do so, subject to the following conditions:
|
|
||||||
*
|
|
||||||
* The above copyright notice and this permission notice shall be included in all
|
|
||||||
* copies or substantial portions of the Software.
|
|
||||||
*
|
|
||||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
||||||
* SOFTWARE.
|
|
||||||
*/
|
|
||||||
|
|
||||||
package me.lucko.luckperms.common.utils;
|
|
||||||
|
|
||||||
import java.util.Objects;
|
|
||||||
|
|
||||||
public interface ThrowingFunction<T, R> {
|
|
||||||
|
|
||||||
R apply(T t) throws Exception;
|
|
||||||
|
|
||||||
default <V> ThrowingFunction<V, R> compose(ThrowingFunction<? super V, ? extends T> before) {
|
|
||||||
Objects.requireNonNull(before);
|
|
||||||
return (V v) -> apply(before.apply(v));
|
|
||||||
}
|
|
||||||
|
|
||||||
default <V> ThrowingFunction<T, V> andThen(ThrowingFunction<? super R, ? extends V> after) {
|
|
||||||
Objects.requireNonNull(after);
|
|
||||||
return (T t) -> after.apply(apply(t));
|
|
||||||
}
|
|
||||||
|
|
||||||
static <T> ThrowingFunction<T, T> identity() {
|
|
||||||
return t -> t;
|
|
||||||
}
|
|
||||||
}
|
|
Loading…
Reference in New Issue
Block a user