Implement new file layout for YAML / JSON storage files (#211)

This commit is contained in:
Luck 2017-03-19 15:37:30 +00:00
parent 1e134df27d
commit 85c7a7db8d
No known key found for this signature in database
GPG Key ID: EFA9B3EC5FD90F8B
10 changed files with 918 additions and 546 deletions

View File

@ -69,7 +69,6 @@ import java.util.Optional;
import java.util.OptionalInt;
import java.util.Set;
import java.util.SortedSet;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
@ -1357,7 +1356,7 @@ public abstract class PermissionHolder {
}
public static Map<String, Boolean> exportToLegacy(Set<Node> nodes) {
Map<String, Boolean> m = new TreeMap<>((o1, o2) -> PriorityComparator.get().compareStrings(o1, o2));
Map<String, Boolean> m = new HashMap<>();
for (Node node : nodes) {
m.put(node.toSerializedNode(), node.getValue());
}

View File

@ -132,9 +132,9 @@ public class StorageFactory {
case MONGODB:
return new MongoDBBacking(plugin, plugin.getConfiguration().get(ConfigKeys.DATABASE_VALUES));
case YAML:
return new YAMLBacking(plugin, plugin.getDataDirectory());
return new YAMLBacking(plugin, plugin.getDataDirectory(), "yaml-storage");
default:
return new JSONBacking(plugin, plugin.getDataDirectory());
return new JSONBacking(plugin, plugin.getDataDirectory(), "json-storage");
}
}
}

View File

@ -22,12 +22,20 @@
package me.lucko.luckperms.common.storage.backing;
import lombok.Getter;
import me.lucko.luckperms.api.LogEntry;
import me.lucko.luckperms.common.commands.utils.Util;
import me.lucko.luckperms.common.constants.Constants;
import me.lucko.luckperms.common.core.model.Group;
import me.lucko.luckperms.common.core.model.Track;
import me.lucko.luckperms.common.core.model.User;
import me.lucko.luckperms.common.data.Log;
import me.lucko.luckperms.common.managers.GroupManager;
import me.lucko.luckperms.common.managers.TrackManager;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.storage.backing.utils.LegacyJSONSchemaMigration;
import me.lucko.luckperms.common.storage.backing.utils.LegacyYAMLSchemaMigration;
import java.io.BufferedReader;
import java.io.BufferedWriter;
@ -35,37 +43,56 @@ import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.Arrays;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.logging.FileHandler;
import java.util.logging.Formatter;
import java.util.logging.Level;
import java.util.logging.LogRecord;
import java.util.logging.Logger;
import java.util.stream.Collectors;
abstract class FlatfileBacking extends AbstractBacking {
public abstract class FlatfileBacking extends AbstractBacking {
private static final String LOG_FORMAT = "%s(%s): [%s] %s(%s) --> %s";
protected static <T> T call(Callable<T> c, T def) {
try {
return c.call();
} catch (Exception e) {
e.printStackTrace();
return def;
}
}
private final Logger actionLogger = Logger.getLogger("lp_actions");
private Map<String, String> uuidCache = new ConcurrentHashMap<>();
private final File pluginDir;
@Getter
private final String fileExtension;
private final String dataFolderName;
private File uuidData;
private File actionLog;
File usersDir;
File groupsDir;
File tracksDir;
protected File usersDir;
protected File groupsDir;
protected File tracksDir;
FlatfileBacking(LuckPermsPlugin plugin, String name, File pluginDir, String fileExtension) {
FlatfileBacking(LuckPermsPlugin plugin, String name, File pluginDir, String fileExtension, String dataFolderName) {
super(plugin, name);
this.pluginDir = pluginDir;
this.fileExtension = fileExtension;
this.dataFolderName = dataFolderName;
}
@Override
@ -100,9 +127,31 @@ abstract class FlatfileBacking extends AbstractBacking {
}
private void setupFiles() throws IOException {
File data = new File(pluginDir, "data");
File data = new File(pluginDir, dataFolderName);
data.mkdirs();
// Perform schema migration
File oldData = new File(pluginDir, "data");
if (oldData.exists()) {
plugin.getLog().severe("===== Legacy Schema Migration =====");
plugin.getLog().severe("Starting migration from legacy schema. This could take a while....");
plugin.getLog().severe("Please do not stop your server while the migration takes place.");
if (this instanceof YAMLBacking) {
try {
new LegacyYAMLSchemaMigration(plugin, (YAMLBacking) this, oldData, data).run();
} catch (Exception e) {
e.printStackTrace();
}
} else if (this instanceof JSONBacking) {
try {
new LegacyJSONSchemaMigration(plugin, (JSONBacking) this, oldData, data).run();
} catch (Exception e) {
e.printStackTrace();
}
}
}
usersDir = new File(data, "users");
usersDir.mkdir();
@ -186,29 +235,83 @@ abstract class FlatfileBacking extends AbstractBacking {
return Log.builder().build();
}
private Map<String, String> getUUIDCache() {
Map<String, String> cache = new HashMap<>();
try (BufferedReader reader = Files.newBufferedReader(uuidData.toPath(), StandardCharsets.UTF_8)) {
Properties props = new Properties();
props.load(reader);
for (String key : props.stringPropertyNames()) {
cache.put(key, props.getProperty(key));
}
} catch (IOException e) {
e.printStackTrace();
}
return cache;
@Override
public Set<UUID> getUniqueUsers() {
String[] fileNames = usersDir.list((dir, name) -> name.endsWith(fileExtension));
if (fileNames == null) return null;
return Arrays.stream(fileNames)
.map(s -> s.substring(0, s.length() - fileExtension.length()))
.map(UUID::fromString)
.collect(Collectors.toSet());
}
private void saveUUIDCache(Map<String, String> cache) {
try (BufferedWriter writer = Files.newBufferedWriter(uuidData.toPath(), StandardCharsets.UTF_8)) {
Properties properties = new Properties();
properties.putAll(cache);
properties.store(writer, null);
writer.flush();
} catch (IOException e) {
e.printStackTrace();
@Override
public boolean loadAllGroups() {
String[] fileNames = groupsDir.list((dir, name) -> name.endsWith(fileExtension));
if (fileNames == null) return false;
List<String> groups = Arrays.stream(fileNames)
.map(s -> s.substring(0, s.length() - fileExtension.length()))
.collect(Collectors.toList());
groups.forEach(this::loadGroup);
GroupManager gm = plugin.getGroupManager();
gm.getAll().values().stream()
.filter(g -> !groups.contains(g.getName()))
.forEach(gm::unload);
return true;
}
@Override
public boolean deleteGroup(Group group) {
group.getIoLock().lock();
try {
return call(() -> {
File groupFile = new File(groupsDir, group.getName() + fileExtension);
registerFileAction("groups", groupFile);
if (groupFile.exists()) {
groupFile.delete();
}
return true;
}, false);
} finally {
group.getIoLock().unlock();
}
}
@Override
public boolean loadAllTracks() {
String[] fileNames = tracksDir.list((dir, name) -> name.endsWith(fileExtension));
if (fileNames == null) return false;
List<String> tracks = Arrays.stream(fileNames)
.map(s -> s.substring(0, s.length() - fileExtension.length()))
.collect(Collectors.toList());
tracks.forEach(this::loadTrack);
TrackManager tm = plugin.getTrackManager();
tm.getAll().values().stream()
.filter(t -> !tracks.contains(t.getName()))
.forEach(tm::unload);
return true;
}
@Override
public boolean deleteTrack(Track track) {
track.getIoLock().lock();
try {
return call(() -> {
File trackFile = new File(tracksDir, track.getName() + fileExtension);
registerFileAction("tracks", trackFile);
if (trackFile.exists()) {
trackFile.delete();
}
return true;
}, false);
} finally {
track.getIoLock().unlock();
}
}
@ -235,4 +338,30 @@ abstract class FlatfileBacking extends AbstractBacking {
}
return null;
}
private Map<String, String> getUUIDCache() {
Map<String, String> cache = new HashMap<>();
try (BufferedReader reader = Files.newBufferedReader(uuidData.toPath(), StandardCharsets.UTF_8)) {
Properties props = new Properties();
props.load(reader);
for (String key : props.stringPropertyNames()) {
cache.put(key, props.getProperty(key));
}
} catch (IOException e) {
e.printStackTrace();
}
return cache;
}
private void saveUUIDCache(Map<String, String> cache) {
try (BufferedWriter writer = Files.newBufferedWriter(uuidData.toPath(), StandardCharsets.UTF_8)) {
Properties properties = new Properties();
properties.putAll(cache);
properties.store(writer, null);
writer.flush();
} catch (IOException e) {
e.printStackTrace();
}
}
}

View File

@ -23,22 +23,25 @@
package me.lucko.luckperms.common.storage.backing;
import com.google.common.collect.ImmutableList;
import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonWriter;
import com.google.common.collect.ImmutableSetMultimap;
import com.google.common.collect.Iterables;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import me.lucko.luckperms.api.HeldPermission;
import me.lucko.luckperms.api.Node;
import me.lucko.luckperms.common.core.NodeFactory;
import me.lucko.luckperms.common.core.PriorityComparator;
import me.lucko.luckperms.common.core.UserIdentifier;
import me.lucko.luckperms.common.core.model.Group;
import me.lucko.luckperms.common.core.model.Track;
import me.lucko.luckperms.common.core.model.User;
import me.lucko.luckperms.common.managers.GroupManager;
import me.lucko.luckperms.common.managers.TrackManager;
import me.lucko.luckperms.common.managers.impl.GenericUserManager;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.storage.backing.utils.NodeDataHolder;
import me.lucko.luckperms.common.storage.holder.NodeHeldPermission;
import me.lucko.luckperms.common.utils.ThrowingFunction;
import java.io.BufferedReader;
import java.io.BufferedWriter;
@ -47,56 +50,44 @@ import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.function.Function;
import java.util.stream.Collectors;
import static me.lucko.luckperms.common.core.model.PermissionHolder.exportToLegacy;
@SuppressWarnings("ResultOfMethodCallIgnored")
public class JSONBacking extends FlatfileBacking {
private static <T> T call(Callable<T> c, T def) {
try {
return c.call();
} catch (Exception e) {
e.printStackTrace();
return def;
}
private final Gson gson;
public JSONBacking(LuckPermsPlugin plugin, File pluginDir, String dataFolderName) {
super(plugin, "JSON", pluginDir, ".json", dataFolderName);
gson = new GsonBuilder().setPrettyPrinting().create();
}
public JSONBacking(LuckPermsPlugin plugin, File pluginDir) {
super(plugin, "JSON", pluginDir, ".json");
}
private boolean fileToWriter(File file, ThrowingFunction<JsonWriter, Boolean> writeOperation) {
boolean success = false;
public boolean writeElementToFile(File file, JsonElement element) {
try (BufferedWriter writer = Files.newBufferedWriter(file.toPath(), StandardCharsets.UTF_8)) {
try (JsonWriter jsonWriter = new JsonWriter(writer)) {
jsonWriter.setIndent(" "); // 4 spaces
success = writeOperation.apply(jsonWriter);
jsonWriter.flush();
}
} catch (Exception e) {
gson.toJson(element, writer);
writer.flush();
return true;
} catch (Throwable t) {
plugin.getLog().warn("Exception whilst writing to file: " + file.getAbsolutePath());
e.printStackTrace();
t.printStackTrace();
return false;
}
return success;
}
private boolean fileToReader(File file, ThrowingFunction<JsonReader, Boolean> readOperation) {
public boolean readObjectFromFile(File file, Function<JsonObject, Boolean> readOperation) {
boolean success = false;
try (BufferedReader reader = Files.newBufferedReader(file.toPath(), StandardCharsets.UTF_8)) {
try (JsonReader jsonReader = new JsonReader(reader)) {
success = readOperation.apply(jsonReader);
}
} catch (Exception e) {
JsonObject object = gson.fromJson(reader, JsonObject.class);
success = readOperation.apply(object);
} catch (Throwable t) {
plugin.getLog().warn("Exception whilst reading from file: " + file.getAbsolutePath());
e.printStackTrace();
t.printStackTrace();
}
return success;
}
@ -111,25 +102,13 @@ public class JSONBacking extends FlatfileBacking {
registerFileAction("users", userFile);
if (userFile.exists()) {
return fileToReader(userFile, reader -> {
reader.beginObject();
reader.nextName(); // uuid record
reader.nextString(); // uuid
reader.nextName(); // name record
String name = reader.nextString(); // name
reader.nextName(); // primaryGroup record
user.getPrimaryGroup().setStoredValue(reader.nextString()); // primaryGroup
reader.nextName(); // perms
reader.beginObject();
Map<String, Boolean> map = new HashMap<>();
while (reader.hasNext()) {
String node = reader.nextName();
boolean b = reader.nextBoolean();
map.put(node, b);
}
user.setNodes(map);
reader.endObject();
reader.endObject();
return readObjectFromFile(userFile, object -> {
String name = object.get("name").getAsString();
user.getPrimaryGroup().setStoredValue(object.get("primaryGroup").getAsString());
Set<NodeDataHolder> data = deserializePermissions(object.get("permissions").getAsJsonArray());
Set<Node> nodes = data.stream().map(NodeDataHolder::toNode).collect(Collectors.toSet());
user.setNodes(nodes);
boolean save = plugin.getUserManager().giveDefaultIfNeeded(user, false);
@ -142,21 +121,9 @@ public class JSONBacking extends FlatfileBacking {
}
if (save) {
fileToWriter(userFile, writer -> {
writer.beginObject();
writer.name("uuid").value(user.getUuid().toString());
writer.name("name").value(user.getName());
writer.name("primaryGroup").value(user.getPrimaryGroup().getStoredValue());
writer.name("perms");
writer.beginObject();
for (Map.Entry<String, Boolean> e : exportToLegacy(user.getNodes()).entrySet()) {
writer.name(e.getKey()).value(e.getValue().booleanValue());
}
writer.endObject();
writer.endObject();
return true;
});
saveUser(user);
}
return true;
});
} else {
@ -198,20 +165,15 @@ public class JSONBacking extends FlatfileBacking {
}
}
return fileToWriter(userFile, writer -> {
writer.beginObject();
writer.name("uuid").value(user.getUuid().toString());
writer.name("name").value(user.getName());
writer.name("primaryGroup").value(user.getPrimaryGroup().getStoredValue());
writer.name("perms");
writer.beginObject();
for (Map.Entry<String, Boolean> e : exportToLegacy(user.getNodes()).entrySet()) {
writer.name(e.getKey()).value(e.getValue().booleanValue());
}
writer.endObject();
writer.endObject();
return true;
});
JsonObject data = new JsonObject();
data.addProperty("uuid", user.getUuid().toString());
data.addProperty("name", user.getName());
data.addProperty("primaryGroup", user.getPrimaryGroup().getStoredValue());
Set<NodeDataHolder> nodes = user.getNodes().stream().map(NodeDataHolder::fromNode).collect(Collectors.toSet());
data.add("permissions", serializePermissions(nodes));
return writeElementToFile(userFile, data);
}, false);
} finally {
user.getIoLock().unlock();
@ -227,33 +189,17 @@ public class JSONBacking extends FlatfileBacking {
for (File file : files) {
registerFileAction("users", file);
Map<String, Boolean> nodes = new HashMap<>();
fileToReader(file, reader -> {
reader.beginObject();
reader.nextName(); // uuid record
reader.nextString(); // uuid
reader.nextName(); // name record
reader.nextString(); // name
reader.nextName(); // primaryGroup record
reader.nextString(); // primaryGroup
reader.nextName(); //perms
reader.beginObject();
while (reader.hasNext()) {
String node = reader.nextName();
boolean b = reader.nextBoolean();
nodes.put(node, b);
}
reader.endObject();
reader.endObject();
Set<NodeDataHolder> nodes = new HashSet<>();
readObjectFromFile(file, object -> {
nodes.addAll(deserializePermissions(object.get("permissions").getAsJsonArray()));
return true;
});
boolean shouldDelete = false;
if (nodes.size() == 1) {
for (Map.Entry<String, Boolean> e : nodes.entrySet()) {
for (NodeDataHolder e : nodes) {
// There's only one
shouldDelete = e.getKey().equalsIgnoreCase("group.default") && e.getValue();
shouldDelete = e.getPermission().equalsIgnoreCase("group.default") && e.isValue();
}
}
@ -265,16 +211,6 @@ public class JSONBacking extends FlatfileBacking {
}, false);
}
@Override
public Set<UUID> getUniqueUsers() {
String[] fileNames = usersDir.list((dir, name) -> name.endsWith(".json"));
if (fileNames == null) return null;
return Arrays.stream(fileNames)
.map(s -> s.substring(0, s.length() - 5))
.map(UUID::fromString)
.collect(Collectors.toSet());
}
@Override
public List<HeldPermission<UUID>> getUsersWithPermission(String permission) {
ImmutableList.Builder<HeldPermission<UUID>> held = ImmutableList.builder();
@ -286,35 +222,19 @@ public class JSONBacking extends FlatfileBacking {
registerFileAction("users", file);
UUID holder = UUID.fromString(file.getName().substring(0, file.getName().length() - 5));
Map<String, Boolean> nodes = new HashMap<>();
fileToReader(file, reader -> {
reader.beginObject();
reader.nextName(); // uuid record
reader.nextString(); // uuid
reader.nextName(); // name record
reader.nextString(); // name
reader.nextName(); // primaryGroup record
reader.nextString(); // primaryGroup
reader.nextName(); //perms
reader.beginObject();
while (reader.hasNext()) {
String node = reader.nextName();
boolean b = reader.nextBoolean();
nodes.put(node, b);
}
Set<NodeDataHolder> nodes = new HashSet<>();
reader.endObject();
reader.endObject();
readObjectFromFile(file, object -> {
nodes.addAll(deserializePermissions(object.get("permissions").getAsJsonArray()));
return true;
});
for (Map.Entry<String, Boolean> e : nodes.entrySet()) {
Node node = NodeFactory.fromSerialisedNode(e.getKey(), e.getValue());
if (!node.getPermission().equalsIgnoreCase(permission)) {
for (NodeDataHolder e : nodes) {
if (!e.getPermission().equalsIgnoreCase(permission)) {
continue;
}
held.add(NodeHeldPermission.of(holder, node));
held.add(NodeHeldPermission.of(holder, e));
}
}
return true;
@ -332,22 +252,10 @@ public class JSONBacking extends FlatfileBacking {
registerFileAction("groups", groupFile);
if (groupFile.exists()) {
return fileToReader(groupFile, reader -> {
reader.beginObject();
reader.nextName(); // name record
reader.nextString(); // name
reader.nextName(); //perms
reader.beginObject();
Map<String, Boolean> map = new HashMap<>();
while (reader.hasNext()) {
String node = reader.nextName();
boolean b = reader.nextBoolean();
map.put(node, b);
}
group.setNodes(map);
reader.endObject();
reader.endObject();
return readObjectFromFile(groupFile, object -> {
Set<NodeDataHolder> data = deserializePermissions(object.get("permissions").getAsJsonArray());
Set<Node> nodes = data.stream().map(NodeDataHolder::toNode).collect(Collectors.toSet());
group.setNodes(nodes);
return true;
});
} else {
@ -358,18 +266,13 @@ public class JSONBacking extends FlatfileBacking {
return false;
}
return fileToWriter(groupFile, writer -> {
writer.beginObject();
writer.name("name").value(group.getName());
writer.name("perms");
writer.beginObject();
for (Map.Entry<String, Boolean> e : exportToLegacy(group.getNodes()).entrySet()) {
writer.name(e.getKey()).value(e.getValue().booleanValue());
}
writer.endObject();
writer.endObject();
return true;
});
JsonObject data = new JsonObject();
data.addProperty("name", group.getName());
Set<NodeDataHolder> nodes = group.getNodes().stream().map(NodeDataHolder::fromNode).collect(Collectors.toSet());
data.add("permissions", serializePermissions(nodes));
return writeElementToFile(groupFile, data);
}
}, false);
} finally {
@ -386,21 +289,10 @@ public class JSONBacking extends FlatfileBacking {
File groupFile = new File(groupsDir, name + ".json");
registerFileAction("groups", groupFile);
return groupFile.exists() && fileToReader(groupFile, reader -> {
reader.beginObject();
reader.nextName(); // name record
reader.nextString(); // name
reader.nextName(); // perms
reader.beginObject();
Map<String, Boolean> map = new HashMap<>();
while (reader.hasNext()) {
String node = reader.nextName();
boolean b = reader.nextBoolean();
map.put(node, b);
}
group.setNodes(map);
reader.endObject();
reader.endObject();
return groupFile.exists() && readObjectFromFile(groupFile, object -> {
Set<NodeDataHolder> data = deserializePermissions(object.get("permissions").getAsJsonArray());
Set<Node> nodes = data.stream().map(NodeDataHolder::toNode).collect(Collectors.toSet());
group.setNodes(nodes);
return true;
});
}, false);
@ -409,23 +301,6 @@ public class JSONBacking extends FlatfileBacking {
}
}
@Override
public boolean loadAllGroups() {
String[] fileNames = groupsDir.list((dir, name) -> name.endsWith(".json"));
if (fileNames == null) return false;
List<String> groups = Arrays.stream(fileNames)
.map(s -> s.substring(0, s.length() - 5))
.collect(Collectors.toList());
groups.forEach(this::loadGroup);
GroupManager gm = plugin.getGroupManager();
gm.getAll().values().stream()
.filter(g -> !groups.contains(g.getName()))
.forEach(gm::unload);
return true;
}
@Override
public boolean saveGroup(Group group) {
group.getIoLock().lock();
@ -443,36 +318,11 @@ public class JSONBacking extends FlatfileBacking {
}
}
return fileToWriter(groupFile, writer -> {
writer.beginObject();
writer.name("name").value(group.getName());
writer.name("perms");
writer.beginObject();
for (Map.Entry<String, Boolean> e : exportToLegacy(group.getNodes()).entrySet()) {
writer.name(e.getKey()).value(e.getValue().booleanValue());
}
writer.endObject();
writer.endObject();
return true;
});
}, false);
} finally {
group.getIoLock().unlock();
}
}
@Override
public boolean deleteGroup(Group group) {
group.getIoLock().lock();
try {
return call(() -> {
File groupFile = new File(groupsDir, group.getName() + ".json");
registerFileAction("groups", groupFile);
if (groupFile.exists()) {
groupFile.delete();
}
return true;
JsonObject data = new JsonObject();
data.addProperty("name", group.getName());
Set<NodeDataHolder> nodes = group.getNodes().stream().map(NodeDataHolder::fromNode).collect(Collectors.toSet());
data.add("permissions", serializePermissions(nodes));
return writeElementToFile(groupFile, data);
}, false);
} finally {
group.getIoLock().unlock();
@ -490,31 +340,18 @@ public class JSONBacking extends FlatfileBacking {
registerFileAction("groups", file);
String holder = file.getName().substring(0, file.getName().length() - 5);
Map<String, Boolean> nodes = new HashMap<>();
fileToReader(file, reader -> {
reader.beginObject();
reader.nextName(); // name record
reader.nextString(); // name
reader.nextName(); // perms
reader.beginObject();
while (reader.hasNext()) {
String node = reader.nextName();
boolean b = reader.nextBoolean();
nodes.put(node, b);
}
reader.endObject();
reader.endObject();
Set<NodeDataHolder> nodes = new HashSet<>();
readObjectFromFile(file, element -> {
nodes.addAll(deserializePermissions(element.get("permissions").getAsJsonArray()));
return true;
});
for (Map.Entry<String, Boolean> e : nodes.entrySet()) {
Node node = NodeFactory.fromSerialisedNode(e.getKey(), e.getValue());
if (!node.getPermission().equalsIgnoreCase(permission)) {
for (NodeDataHolder e : nodes) {
if (!e.getPermission().equalsIgnoreCase(permission)) {
continue;
}
held.add(NodeHeldPermission.of(holder, node));
held.add(NodeHeldPermission.of(holder, e));
}
}
return true;
@ -532,19 +369,12 @@ public class JSONBacking extends FlatfileBacking {
registerFileAction("tracks", trackFile);
if (trackFile.exists()) {
return fileToReader(trackFile, reader -> {
reader.beginObject();
reader.nextName(); // name record
reader.nextString(); // name
reader.nextName(); // groups record
reader.beginArray();
return readObjectFromFile(trackFile, element -> {
List<String> groups = new ArrayList<>();
while (reader.hasNext()) {
groups.add(reader.nextString());
for (JsonElement g : element.get("groups").getAsJsonArray()) {
groups.add(g.getAsString());
}
track.setGroups(groups);
reader.endArray();
reader.endObject();
return true;
});
} else {
@ -555,18 +385,15 @@ public class JSONBacking extends FlatfileBacking {
return false;
}
return fileToWriter(trackFile, writer -> {
writer.beginObject();
writer.name("name").value(track.getName());
writer.name("groups");
writer.beginArray();
JsonObject data = new JsonObject();
data.addProperty("name", track.getName());
JsonArray groups = new JsonArray();
for (String s : track.getGroups()) {
writer.value(s);
groups.add(s);
}
writer.endArray();
writer.endObject();
return true;
});
data.add("groups", groups);
return writeElementToFile(trackFile, data);
}
}, false);
} finally {
@ -583,19 +410,12 @@ public class JSONBacking extends FlatfileBacking {
File trackFile = new File(tracksDir, name + ".json");
registerFileAction("tracks", trackFile);
return trackFile.exists() && fileToReader(trackFile, reader -> {
reader.beginObject();
reader.nextName(); // name record
reader.nextString(); // name
reader.nextName(); // groups
reader.beginArray();
return trackFile.exists() && readObjectFromFile(trackFile, element -> {
List<String> groups = new ArrayList<>();
while (reader.hasNext()) {
groups.add(reader.nextString());
for (JsonElement g : element.get("groups").getAsJsonArray()) {
groups.add(g.getAsString());
}
track.setGroups(groups);
reader.endArray();
reader.endObject();
return true;
});
@ -605,23 +425,6 @@ public class JSONBacking extends FlatfileBacking {
}
}
@Override
public boolean loadAllTracks() {
String[] fileNames = tracksDir.list((dir, name) -> name.endsWith(".json"));
if (fileNames == null) return false;
List<String> tracks = Arrays.stream(fileNames)
.map(s -> s.substring(0, s.length() - 5))
.collect(Collectors.toList());
tracks.forEach(this::loadTrack);
TrackManager tm = plugin.getTrackManager();
tm.getAll().values().stream()
.filter(t -> !tracks.contains(t.getName()))
.forEach(tm::unload);
return true;
}
@Override
public boolean saveTrack(Track track) {
track.getIoLock().lock();
@ -639,39 +442,139 @@ public class JSONBacking extends FlatfileBacking {
}
}
return fileToWriter(trackFile, writer -> {
writer.beginObject();
writer.name("name").value(track.getName());
writer.name("groups");
writer.beginArray();
JsonObject data = new JsonObject();
data.addProperty("name", track.getName());
JsonArray groups = new JsonArray();
for (String s : track.getGroups()) {
writer.value(s);
groups.add(s);
}
writer.endArray();
writer.endObject();
return true;
});
data.add("groups", groups);
return writeElementToFile(trackFile, data);
}, false);
} finally {
track.getIoLock().unlock();
}
}
@Override
public boolean deleteTrack(Track track) {
track.getIoLock().lock();
try {
return call(() -> {
File trackFile = new File(tracksDir, track.getName() + ".json");
registerFileAction("tracks", trackFile);
public static Set<NodeDataHolder> deserializePermissions(JsonArray permissionsSection) {
Set<NodeDataHolder> nodes = new HashSet<>();
if (trackFile.exists()) {
trackFile.delete();
for (JsonElement ent : permissionsSection) {
if (!ent.isJsonObject()) {
continue;
}
return true;
}, false);
} finally {
track.getIoLock().unlock();
JsonObject data = ent.getAsJsonObject();
Map.Entry<String, JsonElement> entry = Iterables.getFirst(data.entrySet(), null);
if (entry == null || !entry.getValue().isJsonObject()) {
continue;
}
String permission = entry.getKey();
JsonObject attributes = entry.getValue().getAsJsonObject();
boolean value = true;
String server = "global";
String world = "global";
long expiry = 0L;
ImmutableSetMultimap context = ImmutableSetMultimap.of();
if (attributes.has("value")) {
value = attributes.get("value").getAsBoolean();
}
if (attributes.has("server")) {
server = attributes.get("server").getAsString();
}
if (attributes.has("world")) {
world = attributes.get("world").getAsString();
}
if (attributes.has("expiry")) {
expiry = attributes.get("expiry").getAsLong();
}
if (attributes.has("context") && attributes.get("context").isJsonObject()) {
JsonObject contexts = attributes.get("context").getAsJsonObject();
ImmutableSetMultimap.Builder<String, String> map = ImmutableSetMultimap.builder();
for (Map.Entry<String, JsonElement> e : contexts.entrySet()) {
JsonElement val = e.getValue();
if (val.isJsonArray()) {
JsonArray vals = val.getAsJsonArray();
for (JsonElement element : vals) {
map.put(e.getKey(), element.getAsString());
}
} else {
map.put(e.getKey(), val.getAsString());
}
}
context = map.build();
}
nodes.add(NodeDataHolder.of(permission, value, server, world, expiry, context));
}
return nodes;
}
public static JsonArray serializePermissions(Set<NodeDataHolder> nodes) {
List<JsonObject> data = new ArrayList<>();
for (NodeDataHolder node : nodes) {
JsonObject attributes = new JsonObject();
attributes.addProperty("value", node.isValue());
if (!node.getServer().equals("global")) {
attributes.addProperty("server", node.getServer());
}
if (!node.getWorld().equals("global")) {
attributes.addProperty("world", node.getWorld());
}
if (node.getExpiry() != 0L) {
attributes.addProperty("expiry", node.getExpiry());
}
if (!node.getContexts().isEmpty()) {
JsonObject context = new JsonObject();
Map<String, Collection<String>> map = node.getContexts().asMap();
for (Map.Entry<String, Collection<String>> e : map.entrySet()) {
List<String> vals = new ArrayList<>(e.getValue());
int size = vals.size();
if (size == 1) {
context.addProperty(e.getKey(), vals.get(0));;
} else if (size > 1) {
JsonArray arr = new JsonArray();
for (String s : vals) {
arr.add(s);
}
context.add(e.getKey(), arr);
}
}
attributes.add("context", context);
}
JsonObject perm = new JsonObject();
perm.add(node.getPermission(), attributes);
data.add(perm);
}
data.sort((o1, o2) -> PriorityComparator.get().compareStrings(
Iterables.getFirst(o1.entrySet(), null).getKey(),
Iterables.getFirst(o2.entrySet(), null).getKey()
));
JsonArray arr = new JsonArray();
for (JsonObject o : data) {
arr.add(o);
}
return arr;
}
}

View File

@ -42,7 +42,7 @@ import me.lucko.luckperms.common.managers.TrackManager;
import me.lucko.luckperms.common.managers.impl.GenericUserManager;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.storage.backing.sqlprovider.SQLProvider;
import me.lucko.luckperms.common.storage.backing.utils.LegacySchemaMigration;
import me.lucko.luckperms.common.storage.backing.utils.LegacySQLSchemaMigration;
import me.lucko.luckperms.common.storage.backing.utils.NodeDataHolder;
import me.lucko.luckperms.common.storage.holder.NodeHeldPermission;
@ -179,7 +179,7 @@ public class SQLBacking extends AbstractBacking {
plugin.getLog().severe("Starting migration from legacy schema. This could take a while....");
plugin.getLog().severe("Please do not stop your server while the migration takes place.");
new LegacySchemaMigration(this).run();
new LegacySQLSchemaMigration(this).run();
}
}

View File

@ -23,18 +23,19 @@
package me.lucko.luckperms.common.storage.backing;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSetMultimap;
import com.google.common.collect.Iterables;
import me.lucko.luckperms.api.HeldPermission;
import me.lucko.luckperms.api.Node;
import me.lucko.luckperms.common.core.NodeFactory;
import me.lucko.luckperms.common.core.PriorityComparator;
import me.lucko.luckperms.common.core.UserIdentifier;
import me.lucko.luckperms.common.core.model.Group;
import me.lucko.luckperms.common.core.model.Track;
import me.lucko.luckperms.common.core.model.User;
import me.lucko.luckperms.common.managers.GroupManager;
import me.lucko.luckperms.common.managers.TrackManager;
import me.lucko.luckperms.common.managers.impl.GenericUserManager;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.storage.backing.utils.NodeDataHolder;
import me.lucko.luckperms.common.storage.holder.NodeHeldPermission;
import org.yaml.snakeyaml.DumperOptions;
@ -46,18 +47,18 @@ import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.Arrays;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.function.Function;
import java.util.stream.Collectors;
import static me.lucko.luckperms.common.core.model.PermissionHolder.exportToLegacy;
@SuppressWarnings({"unchecked", "ResultOfMethodCallIgnored"})
public class YAMLBacking extends FlatfileBacking {
private static Yaml getYaml() {
@ -67,20 +68,11 @@ public class YAMLBacking extends FlatfileBacking {
return new Yaml(options);
}
private static <T> T call(Callable<T> c, T def) {
try {
return c.call();
} catch (Exception e) {
e.printStackTrace();
return def;
}
public YAMLBacking(LuckPermsPlugin plugin, File pluginDir, String dataFolderName) {
super(plugin, "YAML", pluginDir, ".yml", dataFolderName);
}
public YAMLBacking(LuckPermsPlugin plugin, File pluginDir) {
super(plugin, "YAML", pluginDir, ".yml");
}
private boolean writeMapToFile(File file, Map<String, Object> values) {
public boolean writeMapToFile(File file, Map<String, Object> values) {
try (BufferedWriter writer = Files.newBufferedWriter(file.toPath(), StandardCharsets.UTF_8)) {
getYaml().dump(values, writer);
writer.flush();
@ -92,7 +84,7 @@ public class YAMLBacking extends FlatfileBacking {
}
}
private boolean readMapFromFile(File file, Function<Map<String, Object>, Boolean> readOperation) {
public boolean readMapFromFile(File file, Function<Map<String, Object>, Boolean> readOperation) {
boolean success = false;
try (BufferedReader reader = Files.newBufferedReader(file.toPath(), StandardCharsets.UTF_8)) {
success = readOperation.apply((Map<String, Object>) getYaml().load(reader));
@ -116,8 +108,10 @@ public class YAMLBacking extends FlatfileBacking {
// User exists, let's load.
String name = (String) values.get("name");
user.getPrimaryGroup().setStoredValue((String) values.get("primary-group"));
Map<String, Boolean> perms = (Map<String, Boolean>) values.get("perms");
user.setNodes(perms);
Set<NodeDataHolder> data = deserializePermissions((List<Object>) values.get("permissions"));
Set<Node> nodes = data.stream().map(NodeDataHolder::toNode).collect(Collectors.toSet());
user.setNodes(nodes);
boolean save = plugin.getUserManager().giveDefaultIfNeeded(user, false);
@ -130,12 +124,7 @@ public class YAMLBacking extends FlatfileBacking {
}
if (save) {
Map<String, Object> data = new HashMap<>();
data.put("uuid", user.getUuid().toString());
data.put("name", user.getName());
data.put("primary-group", user.getPrimaryGroup().getStoredValue());
data.put("perms", exportToLegacy(user.getNodes()));
writeMapToFile(userFile, data);
saveUser(user);
}
return true;
});
@ -177,11 +166,14 @@ public class YAMLBacking extends FlatfileBacking {
}
}
Map<String, Object> values = new HashMap<>();
Map<String, Object> values = new LinkedHashMap<>();
values.put("uuid", user.getUuid().toString());
values.put("name", user.getName());
values.put("primary-group", user.getPrimaryGroup().getStoredValue());
values.put("perms", exportToLegacy(user.getNodes()));
Set<NodeDataHolder> data = user.getNodes().stream().map(NodeDataHolder::fromNode).collect(Collectors.toSet());
values.put("permissions", serializePermissions(data));
return writeMapToFile(userFile, values);
}, false);
} finally {
@ -197,18 +189,18 @@ public class YAMLBacking extends FlatfileBacking {
for (File file : files) {
registerFileAction("users", file);
Map<String, Boolean> nodes = new HashMap<>();
Set<NodeDataHolder> nodes = new HashSet<>();
readMapFromFile(file, values -> {
Map<String, Boolean> perms = (Map<String, Boolean>) values.get("perms");
nodes.putAll(perms);
nodes.addAll(deserializePermissions((List<Object>) values.get("permissions")));
return true;
});
boolean shouldDelete = false;
if (nodes.size() == 1) {
for (Map.Entry<String, Boolean> e : nodes.entrySet()) {
for (NodeDataHolder e : nodes) {
// There's only one
shouldDelete = e.getKey().equalsIgnoreCase("group.default") && e.getValue();
shouldDelete = e.getPermission().equalsIgnoreCase("group.default") && e.isValue();
}
}
@ -220,16 +212,6 @@ public class YAMLBacking extends FlatfileBacking {
}, false);
}
@Override
public Set<UUID> getUniqueUsers() {
String[] fileNames = usersDir.list((dir, name) -> name.endsWith(".yml"));
if (fileNames == null) return null;
return Arrays.stream(fileNames)
.map(s -> s.substring(0, s.length() - 4))
.map(UUID::fromString)
.collect(Collectors.toSet());
}
@Override
public List<HeldPermission<UUID>> getUsersWithPermission(String permission) {
ImmutableList.Builder<HeldPermission<UUID>> held = ImmutableList.builder();
@ -241,20 +223,18 @@ public class YAMLBacking extends FlatfileBacking {
registerFileAction("users", file);
UUID holder = UUID.fromString(file.getName().substring(0, file.getName().length() - 4));
Map<String, Boolean> nodes = new HashMap<>();
Set<NodeDataHolder> nodes = new HashSet<>();
readMapFromFile(file, values -> {
Map<String, Boolean> perms = (Map<String, Boolean>) values.get("perms");
nodes.putAll(perms);
nodes.addAll(deserializePermissions((List<Object>) values.get("permissions")));
return true;
});
for (Map.Entry<String, Boolean> e : nodes.entrySet()) {
Node node = NodeFactory.fromSerialisedNode(e.getKey(), e.getValue());
if (!node.getPermission().equalsIgnoreCase(permission)) {
for (NodeDataHolder e : nodes) {
if (!e.getPermission().equalsIgnoreCase(permission)) {
continue;
}
held.add(NodeHeldPermission.of(holder, node));
held.add(NodeHeldPermission.of(holder, e));
}
}
return true;
@ -270,10 +250,12 @@ public class YAMLBacking extends FlatfileBacking {
return call(() -> {
File groupFile = new File(groupsDir, name + ".yml");
registerFileAction("groups", groupFile);
if (groupFile.exists()) {
return readMapFromFile(groupFile, values -> {
Map<String, Boolean> perms = (Map<String, Boolean>) values.get("perms");
group.setNodes(perms);
Set<NodeDataHolder> data = deserializePermissions((List<Object>) values.get("permissions"));
Set<Node> nodes = data.stream().map(NodeDataHolder::toNode).collect(Collectors.toSet());
group.setNodes(nodes);
return true;
});
} else {
@ -284,9 +266,10 @@ public class YAMLBacking extends FlatfileBacking {
return false;
}
Map<String, Object> values = new HashMap<>();
Map<String, Object> values = new LinkedHashMap<>();
values.put("name", group.getName());
values.put("perms", exportToLegacy(group.getNodes()));
Set<NodeDataHolder> data = group.getNodes().stream().map(NodeDataHolder::fromNode).collect(Collectors.toSet());
values.put("permissions", serializePermissions(data));
return writeMapToFile(groupFile, values);
}
}, false);
@ -303,35 +286,19 @@ public class YAMLBacking extends FlatfileBacking {
return call(() -> {
File groupFile = new File(groupsDir, name + ".yml");
registerFileAction("groups", groupFile);
return groupFile.exists() && readMapFromFile(groupFile, values -> {
Map<String, Boolean> perms = (Map<String, Boolean>) values.get("perms");
group.setNodes(perms);
Set<NodeDataHolder> data = deserializePermissions((List<Object>) values.get("permissions"));
Set<Node> nodes = data.stream().map(NodeDataHolder::toNode).collect(Collectors.toSet());
group.setNodes(nodes);
return true;
});
}, false);
} finally {
group.getIoLock().unlock();
}
}
@Override
public boolean loadAllGroups() {
String[] fileNames = groupsDir.list((dir, name) -> name.endsWith(".yml"));
if (fileNames == null) return false;
List<String> groups = Arrays.stream(fileNames)
.map(s -> s.substring(0, s.length() - 4))
.collect(Collectors.toList());
groups.forEach(this::loadGroup);
GroupManager gm = plugin.getGroupManager();
gm.getAll().values().stream()
.filter(g -> !groups.contains(g.getName()))
.forEach(gm::unload);
return true;
}
@Override
public boolean saveGroup(Group group) {
group.getIoLock().lock();
@ -339,6 +306,7 @@ public class YAMLBacking extends FlatfileBacking {
return call(() -> {
File groupFile = new File(groupsDir, group.getName() + ".yml");
registerFileAction("groups", groupFile);
if (!groupFile.exists()) {
try {
groupFile.createNewFile();
@ -348,9 +316,10 @@ public class YAMLBacking extends FlatfileBacking {
}
}
Map<String, Object> values = new HashMap<>();
Map<String, Object> values = new LinkedHashMap<>();
values.put("name", group.getName());
values.put("perms", exportToLegacy(group.getNodes()));
Set<NodeDataHolder> data = group.getNodes().stream().map(NodeDataHolder::fromNode).collect(Collectors.toSet());
values.put("permissions", serializePermissions(data));
return writeMapToFile(groupFile, values);
}, false);
} finally {
@ -358,23 +327,6 @@ public class YAMLBacking extends FlatfileBacking {
}
}
@Override
public boolean deleteGroup(Group group) {
group.getIoLock().lock();
try {
return call(() -> {
File groupFile = new File(groupsDir, group.getName() + ".yml");
registerFileAction("groups", groupFile);
if (groupFile.exists()) {
groupFile.delete();
}
return true;
}, false);
} finally {
group.getIoLock().unlock();
}
}
@Override
public List<HeldPermission<String>> getGroupsWithPermission(String permission) {
ImmutableList.Builder<HeldPermission<String>> held = ImmutableList.builder();
@ -386,20 +338,18 @@ public class YAMLBacking extends FlatfileBacking {
registerFileAction("groups", file);
String holder = file.getName().substring(0, file.getName().length() - 4);
Map<String, Boolean> nodes = new HashMap<>();
Set<NodeDataHolder> nodes = new HashSet<>();
readMapFromFile(file, values -> {
Map<String, Boolean> perms = (Map<String, Boolean>) values.get("perms");
nodes.putAll(perms);
nodes.addAll(deserializePermissions((List<Object>) values.get("permissions")));
return true;
});
for (Map.Entry<String, Boolean> e : nodes.entrySet()) {
Node node = NodeFactory.fromSerialisedNode(e.getKey(), e.getValue());
if (!node.getPermission().equalsIgnoreCase(permission)) {
for (NodeDataHolder e : nodes) {
if (!e.getPermission().equalsIgnoreCase(permission)) {
continue;
}
held.add(NodeHeldPermission.of(holder, node));
held.add(NodeHeldPermission.of(holder, e));
}
}
return true;
@ -429,7 +379,7 @@ public class YAMLBacking extends FlatfileBacking {
return false;
}
Map<String, Object> values = new HashMap<>();
Map<String, Object> values = new LinkedHashMap<>();
values.put("name", track.getName());
values.put("groups", track.getGroups());
@ -460,23 +410,6 @@ public class YAMLBacking extends FlatfileBacking {
}
}
@Override
public boolean loadAllTracks() {
String[] fileNames = tracksDir.list((dir, name) -> name.endsWith(".yml"));
if (fileNames == null) return false;
List<String> tracks = Arrays.stream(fileNames)
.map(s -> s.substring(0, s.length() - 4))
.collect(Collectors.toList());
tracks.forEach(this::loadTrack);
TrackManager tm = plugin.getTrackManager();
tm.getAll().values().stream()
.filter(t -> !tracks.contains(t.getName()))
.forEach(tm::unload);
return true;
}
@Override
public boolean saveTrack(Track track) {
track.getIoLock().lock();
@ -494,7 +427,7 @@ public class YAMLBacking extends FlatfileBacking {
}
}
Map<String, Object> values = new HashMap<>();
Map<String, Object> values = new LinkedHashMap<>();
values.put("name", track.getName());
values.put("groups", track.getGroups());
return writeMapToFile(trackFile, values);
@ -504,21 +437,121 @@ public class YAMLBacking extends FlatfileBacking {
}
}
@Override
public boolean deleteTrack(Track track) {
track.getIoLock().lock();
try {
return call(() -> {
File trackFile = new File(tracksDir, track.getName() + ".yml");
registerFileAction("tracks", trackFile);
public static Set<NodeDataHolder> deserializePermissions(List<Object> permissionsSection) {
Set<NodeDataHolder> nodes = new HashSet<>();
if (trackFile.exists()) {
trackFile.delete();
for (Object perm : permissionsSection) {
if (!(perm instanceof Map)) {
continue;
}
return true;
}, false);
} finally {
track.getIoLock().unlock();
Map<String, Object> data = (Map<String, Object>) perm;
Map.Entry<String, Object> entry = Iterables.getFirst(data.entrySet(), null);
if (entry == null) {
continue;
}
String permission = entry.getKey();
if (entry.getValue() != null && entry.getValue() instanceof Map) {
Map<String, Object> attributes = (Map<String, Object>) entry.getValue();
boolean value = true;
String server = "global";
String world = "global";
long expiry = 0L;
ImmutableSetMultimap context = ImmutableSetMultimap.of();
if (attributes.containsKey("value")) {
value = (boolean) attributes.get("value");
}
if (attributes.containsKey("server")) {
server = attributes.get("server").toString();
}
if (attributes.containsKey("world")) {
world = attributes.get("world").toString();
}
if (attributes.containsKey("expiry")) {
Object exp = attributes.get("expiry");
if (exp instanceof Long || exp.getClass().isPrimitive()) {
expiry = (long) exp;
} else {
expiry = (int) exp;
}
}
if (attributes.get("context") != null && attributes.get("context") instanceof Map) {
Map<String, Object> contexts = (Map<String, Object>) attributes.get("context");
ImmutableSetMultimap.Builder<String, String> map = ImmutableSetMultimap.builder();
for (Map.Entry<String, Object> e : contexts.entrySet()) {
Object val = e.getValue();
if (val instanceof List) {
map.putAll(e.getKey(), ((List<String>) val));
} else {
map.put(e.getKey(), val.toString());
}
}
context = map.build();
}
nodes.add(NodeDataHolder.of(permission, value, server, world, expiry, context));
}
}
return nodes;
}
public static List<Map<String, Object>> serializePermissions(Set<NodeDataHolder> nodes) {
List<Map<String, Object>> data = new ArrayList<>();
for (NodeDataHolder node : nodes) {
Map<String, Object> attributes = new LinkedHashMap<>();
attributes.put("value", node.isValue());
if (!node.getServer().equals("global")) {
attributes.put("server", node.getServer());
}
if (!node.getWorld().equals("global")) {
attributes.put("world", node.getWorld());
}
if (node.getExpiry() != 0L) {
attributes.put("expiry", node.getExpiry());
}
if (!node.getContexts().isEmpty()) {
Map<String, Object> context = new HashMap<>();
Map<String, Collection<String>> map = node.getContexts().asMap();
for (Map.Entry<String, Collection<String>> e : map.entrySet()) {
List<String> vals = new ArrayList<>(e.getValue());
int size = vals.size();
if (size == 1) {
context.put(e.getKey(), vals.get(0));;
} else if (size > 1) {
context.put(e.getKey(), vals);
}
}
attributes.put("context", context);
}
Map<String, Object> perm = new HashMap<>();
perm.put(node.getPermission(), attributes);
data.add(perm);
}
data.sort((o1, o2) -> PriorityComparator.get().compareStrings(
Iterables.getFirst(o1.keySet(), ""),
Iterables.getFirst(o2.keySet(), ""))
);
return data;
}
}

View File

@ -0,0 +1,180 @@
/*
* Copyright (c) 2016 Lucko (Luck) <luck@lucko.me>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package me.lucko.luckperms.common.storage.backing.utils;
import lombok.RequiredArgsConstructor;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import me.lucko.luckperms.common.core.NodeFactory;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.storage.backing.JSONBacking;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
@SuppressWarnings("unchecked")
@RequiredArgsConstructor
public class LegacyJSONSchemaMigration implements Runnable {
private final LuckPermsPlugin plugin;
private final JSONBacking backing;
private final File oldDataFolder;
private final File newDataFolder;
@Override
public void run() {
plugin.getLog().warn("Moving existing files to their new location.");
relocateFile(oldDataFolder, newDataFolder, "actions.log");
relocateFile(oldDataFolder, newDataFolder, "uuidcache.txt");
relocateFile(oldDataFolder, newDataFolder, "tracks");
plugin.getLog().warn("Migrating group files");
File oldGroupsDir = new File(oldDataFolder, "groups");
if (oldGroupsDir.exists() && oldGroupsDir.isDirectory()) {
File newGroupsDir = new File(newDataFolder, "groups");
newGroupsDir.mkdir();
File[] toMigrate = oldGroupsDir.listFiles((dir, name) -> name.endsWith(backing.getFileExtension()));
if (toMigrate != null) {
for (File oldFile : toMigrate) {
try {
File replacementFile = new File(newGroupsDir, oldFile.getName());
AtomicReference<String> name = new AtomicReference<>(null);
Map<String, Boolean> perms = new HashMap<>();
backing.readObjectFromFile(oldFile, values -> {
name.set(values.get("name").getAsString());
JsonObject permsSection = values.get("perms").getAsJsonObject();
for (Map.Entry<String, JsonElement> e : permsSection.entrySet()) {
perms.put(e.getKey(), e.getValue().getAsBoolean());
}
return true;
});
Set<NodeDataHolder> nodes = perms.entrySet().stream()
.map(e -> NodeFactory.fromSerialisedNode(e.getKey(), e.getValue()))
.map(NodeDataHolder::fromNode)
.collect(Collectors.toSet());
if (!replacementFile.exists()) {
try {
replacementFile.createNewFile();
} catch (IOException e) {
e.printStackTrace();
}
}
JsonObject data = new JsonObject();
data.addProperty("name", name.get());
data.add("permissions", JSONBacking.serializePermissions(nodes));
backing.writeElementToFile(replacementFile, data);
oldFile.delete();
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
plugin.getLog().warn("Migrated group files, now migrating user files.");
File oldUsersDir = new File(oldDataFolder, "users");
if (oldUsersDir.exists() && oldUsersDir.isDirectory()) {
File newUsersDir = new File(newDataFolder, "users");
newUsersDir.mkdir();
File[] toMigrate = oldUsersDir.listFiles((dir, name) -> name.endsWith(backing.getFileExtension()));
if (toMigrate != null) {
for (File oldFile : toMigrate) {
try {
File replacementFile = new File(newUsersDir, oldFile.getName());
AtomicReference<String> uuid = new AtomicReference<>(null);
AtomicReference<String> name = new AtomicReference<>(null);
AtomicReference<String> primaryGroup = new AtomicReference<>(null);
Map<String, Boolean> perms = new HashMap<>();
backing.readObjectFromFile(oldFile, values -> {
uuid.set(values.get("uuid").getAsString());
name.set(values.get("name").getAsString());
primaryGroup.set(values.get("primaryGroup").getAsString());
JsonObject permsSection = values.get("perms").getAsJsonObject();
for (Map.Entry<String, JsonElement> e : permsSection.entrySet()) {
perms.put(e.getKey(), e.getValue().getAsBoolean());
}
return true;
});
Set<NodeDataHolder> nodes = perms.entrySet().stream()
.map(e -> NodeFactory.fromSerialisedNode(e.getKey(), e.getValue()))
.map(NodeDataHolder::fromNode)
.collect(Collectors.toSet());
if (!replacementFile.exists()) {
try {
replacementFile.createNewFile();
} catch (IOException e) {
e.printStackTrace();
}
}
JsonObject data = new JsonObject();
data.addProperty("uuid", uuid.get());
data.addProperty("name", name.get());
data.addProperty("primaryGroup", primaryGroup.get());
data.add("permissions", JSONBacking.serializePermissions(nodes));
backing.writeElementToFile(replacementFile, data);
oldFile.delete();
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
plugin.getLog().warn("Migrated user files.");
// rename the old data file
oldDataFolder.renameTo(new File(oldDataFolder.getParent(), "old-data-backup"));
plugin.getLog().warn("Legacy schema migration complete.");
}
private static void relocateFile(File dirFrom, File dirTo, String fileName) {
File file = new File(dirFrom, fileName);
if (file.exists()) {
try {
Files.move(file.toPath(), new File(dirTo, fileName).toPath());
} catch (IOException e) {
e.printStackTrace();
}
}
}
}

View File

@ -45,13 +45,13 @@ import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
@RequiredArgsConstructor
public class LegacySchemaMigration implements Runnable {
public class LegacySQLSchemaMigration implements Runnable {
private static final Type NODE_MAP_TYPE = new TypeToken<Map<String, Boolean>>() {}.getType();
private final SQLBacking backing;
@Override
public void run() {
backing.getPlugin().getLog().info("Collecting UUID data from the old tables.");
backing.getPlugin().getLog().warn("Collecting UUID data from the old tables.");
Map<UUID, String> uuidData = new HashMap<>();
try (Connection c = backing.getProvider().getConnection()) {
@ -70,7 +70,7 @@ public class LegacySchemaMigration implements Runnable {
e.printStackTrace();
}
backing.getPlugin().getLog().info("Found " + uuidData.size() + " uuid data entries. Copying to new tables...");
backing.getPlugin().getLog().warn("Found " + uuidData.size() + " uuid data entries. Copying to new tables...");
List<Map.Entry<UUID, String>> uuidEntries = uuidData.entrySet().stream().collect(Collectors.toList());
List<List<Map.Entry<UUID, String>>> partitionedUuidEntries = Lists.partition(uuidEntries, 100);
@ -95,8 +95,8 @@ public class LegacySchemaMigration implements Runnable {
uuidEntries.clear();
partitionedUuidEntries.clear();
backing.getPlugin().getLog().info("Migrated all uuid data.");
backing.getPlugin().getLog().info("Starting user data migration.");
backing.getPlugin().getLog().warn("Migrated all uuid data.");
backing.getPlugin().getLog().warn("Starting user data migration.");
Set<UUID> users = new HashSet<>();
try (Connection c = backing.getProvider().getConnection()) {
@ -115,7 +115,7 @@ public class LegacySchemaMigration implements Runnable {
e.printStackTrace();
}
backing.getPlugin().getLog().info("Found " + users.size() + " user data entries. Copying to new tables...");
backing.getPlugin().getLog().warn("Found " + users.size() + " user data entries. Copying to new tables...");
AtomicInteger userCounter = new AtomicInteger(0);
for (UUID uuid : users) {
@ -184,14 +184,14 @@ public class LegacySchemaMigration implements Runnable {
int i = userCounter.incrementAndGet();
if (i % 100 == 0) {
backing.getPlugin().getLog().info("Migrated " + i + " users so far...");
backing.getPlugin().getLog().warn("Migrated " + i + " users so far...");
}
}
users.clear();
backing.getPlugin().getLog().info("Migrated all user data.");
backing.getPlugin().getLog().info("Starting group data migration.");
backing.getPlugin().getLog().warn("Migrated all user data.");
backing.getPlugin().getLog().warn("Starting group data migration.");
Map<String, String> groupData = new HashMap<>();
try (Connection c = backing.getProvider().getConnection()) {
@ -206,7 +206,7 @@ public class LegacySchemaMigration implements Runnable {
e.printStackTrace();
}
backing.getPlugin().getLog().info("Found " + groupData.size() + " group data entries. Copying to new tables...");
backing.getPlugin().getLog().warn("Found " + groupData.size() + " group data entries. Copying to new tables...");
for (Map.Entry<String, String> e : groupData.entrySet()) {
String name = e.getKey();
String permsJson = e.getValue();
@ -251,9 +251,9 @@ public class LegacySchemaMigration implements Runnable {
}
groupData.clear();
backing.getPlugin().getLog().info("Migrated all group data.");
backing.getPlugin().getLog().warn("Migrated all group data.");
backing.getPlugin().getLog().info("Renaming action and track tables.");
backing.getPlugin().getLog().warn("Renaming action and track tables.");
try (Connection c = backing.getProvider().getConnection()) {
try (PreparedStatement ps = c.prepareStatement(backing.getPrefix().apply("DROP TABLE {prefix}actions"))) {
ps.execute();
@ -272,6 +272,6 @@ public class LegacySchemaMigration implements Runnable {
ex.printStackTrace();
}
backing.getPlugin().getLog().info("Legacy schema migration complete.");
backing.getPlugin().getLog().warn("Legacy schema migration complete.");
}
}

View File

@ -0,0 +1,172 @@
/*
* Copyright (c) 2016 Lucko (Luck) <luck@lucko.me>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package me.lucko.luckperms.common.storage.backing.utils;
import lombok.RequiredArgsConstructor;
import me.lucko.luckperms.common.core.NodeFactory;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.storage.backing.YAMLBacking;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
@SuppressWarnings("unchecked")
@RequiredArgsConstructor
public class LegacyYAMLSchemaMigration implements Runnable {
private final LuckPermsPlugin plugin;
private final YAMLBacking backing;
private final File oldDataFolder;
private final File newDataFolder;
@Override
public void run() {
plugin.getLog().warn("Moving existing files to their new location.");
relocateFile(oldDataFolder, newDataFolder, "actions.log");
relocateFile(oldDataFolder, newDataFolder, "uuidcache.txt");
relocateFile(oldDataFolder, newDataFolder, "tracks");
plugin.getLog().warn("Migrating group files");
File oldGroupsDir = new File(oldDataFolder, "groups");
if (oldGroupsDir.exists() && oldGroupsDir.isDirectory()) {
File newGroupsDir = new File(newDataFolder, "groups");
newGroupsDir.mkdir();
File[] toMigrate = oldGroupsDir.listFiles((dir, name) -> name.endsWith(backing.getFileExtension()));
if (toMigrate != null) {
for (File oldFile : toMigrate) {
try {
File replacementFile = new File(newGroupsDir, oldFile.getName());
AtomicReference<String> name = new AtomicReference<>(null);
Map<String, Boolean> perms = new HashMap<>();
backing.readMapFromFile(oldFile, values -> {
name.set((String) values.get("name"));
perms.putAll((Map<String, Boolean>) values.get("perms"));
return true;
});
Set<NodeDataHolder> nodes = perms.entrySet().stream()
.map(e -> NodeFactory.fromSerialisedNode(e.getKey(), e.getValue()))
.map(NodeDataHolder::fromNode)
.collect(Collectors.toSet());
if (!replacementFile.exists()) {
try {
replacementFile.createNewFile();
} catch (IOException e) {
e.printStackTrace();
}
}
Map<String, Object> values = new LinkedHashMap<>();
values.put("name", name.get());
values.put("permissions", YAMLBacking.serializePermissions(nodes));
backing.writeMapToFile(replacementFile, values);
oldFile.delete();
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
plugin.getLog().warn("Migrated group files, now migrating user files.");
File oldUsersDir = new File(oldDataFolder, "users");
if (oldUsersDir.exists() && oldUsersDir.isDirectory()) {
File newUsersDir = new File(newDataFolder, "users");
newUsersDir.mkdir();
File[] toMigrate = oldUsersDir.listFiles((dir, name) -> name.endsWith(backing.getFileExtension()));
if (toMigrate != null) {
for (File oldFile : toMigrate) {
try {
File replacementFile = new File(newUsersDir, oldFile.getName());
AtomicReference<String> uuid = new AtomicReference<>(null);
AtomicReference<String> name = new AtomicReference<>(null);
AtomicReference<String> primaryGroup = new AtomicReference<>(null);
Map<String, Boolean> perms = new HashMap<>();
backing.readMapFromFile(oldFile, values -> {
uuid.set((String) values.get("uuid"));
name.set((String) values.get("name"));
primaryGroup.set((String) values.get("primary-group"));
perms.putAll((Map<String, Boolean>) values.get("perms"));
return true;
});
Set<NodeDataHolder> nodes = perms.entrySet().stream()
.map(e -> NodeFactory.fromSerialisedNode(e.getKey(), e.getValue()))
.map(NodeDataHolder::fromNode)
.collect(Collectors.toSet());
if (!replacementFile.exists()) {
try {
replacementFile.createNewFile();
} catch (IOException e) {
e.printStackTrace();
}
}
Map<String, Object> values = new LinkedHashMap<>();
values.put("uuid", uuid.get());
values.put("name", name.get());
values.put("primary-group", primaryGroup.get());
values.put("permissions", YAMLBacking.serializePermissions(nodes));
backing.writeMapToFile(replacementFile, values);
oldFile.delete();
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
plugin.getLog().warn("Migrated user files.");
// rename the old data file
oldDataFolder.renameTo(new File(oldDataFolder.getParent(), "old-data-backup"));
plugin.getLog().warn("Legacy schema migration complete.");
}
private static void relocateFile(File dirFrom, File dirTo, String fileName) {
File file = new File(dirFrom, fileName);
if (file.exists()) {
try {
Files.move(file.toPath(), new File(dirTo, fileName).toPath());
} catch (IOException e) {
e.printStackTrace();
}
}
}
}

View File

@ -1,44 +0,0 @@
/*
* Copyright (c) 2016 Lucko (Luck) <luck@lucko.me>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package me.lucko.luckperms.common.utils;
import java.util.Objects;
public interface ThrowingFunction<T, R> {
R apply(T t) throws Exception;
default <V> ThrowingFunction<V, R> compose(ThrowingFunction<? super V, ? extends T> before) {
Objects.requireNonNull(before);
return (V v) -> apply(before.apply(v));
}
default <V> ThrowingFunction<T, V> andThen(ThrowingFunction<? super R, ? extends V> after) {
Objects.requireNonNull(after);
return (T t) -> after.apply(apply(t));
}
static <T> ThrowingFunction<T, T> identity() {
return t -> t;
}
}