Rewrite flatfile storage (YAML & JSON) to use configurate, add HOCON storage method

This commit is contained in:
Luck 2017-10-21 23:20:45 +01:00
parent aa0be40124
commit fa4bb0447a
No known key found for this signature in database
GPG Key ID: EFA9B3EC5FD90F8B
17 changed files with 1289 additions and 1463 deletions

View File

@ -263,7 +263,7 @@ vault-debug: false
# Which storage method the plugin should use. # Which storage method the plugin should use.
# #
# See: https://github.com/lucko/LuckPerms/wiki/Choosing-a-Storage-type # See: https://github.com/lucko/LuckPerms/wiki/Choosing-a-Storage-type
# Currently supported: mysql, mariadb, postgresql, sqlite, h2, json, yaml, mongodb # Currently supported: mysql, mariadb, postgresql, sqlite, h2, json, yaml, hocon, mongodb
# #
# Fill out connection info below if you're using MySQL, MariaDB, PostgreSQL or MongoDB # Fill out connection info below if you're using MySQL, MariaDB, PostgreSQL or MongoDB
# If your MySQL server supports it, the "mariadb" option is preferred over "mysql". # If your MySQL server supports it, the "mariadb" option is preferred over "mysql".

View File

@ -197,7 +197,7 @@ meta-formatting:
# Which storage method the plugin should use. # Which storage method the plugin should use.
# #
# See: https://github.com/lucko/LuckPerms/wiki/Choosing-a-Storage-type # See: https://github.com/lucko/LuckPerms/wiki/Choosing-a-Storage-type
# Currently supported: mysql, mariadb, postgresql, sqlite, h2, json, yaml, mongodb # Currently supported: mysql, mariadb, postgresql, sqlite, h2, json, yaml, hocon, mongodb
# #
# Fill out connection info below if you're using MySQL, MariaDB, PostgreSQL or MongoDB # Fill out connection info below if you're using MySQL, MariaDB, PostgreSQL or MongoDB
# If your MySQL server supports it, the "mariadb" option is preferred over "mysql". # If your MySQL server supports it, the "mariadb" option is preferred over "mysql".

View File

@ -56,6 +56,50 @@
</exclusion> </exclusion>
</exclusions> </exclusions>
</dependency> </dependency>
<!-- configurate -->
<dependency>
<groupId>ninja.leaping.configurate</groupId>
<artifactId>configurate-core</artifactId>
<version>3.3</version>
<scope>compile</scope>
<optional>true</optional>
<exclusions>
<exclusion>
<groupId>com.google.guava</groupId>
<artifactId>guava</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>ninja.leaping.configurate</groupId>
<artifactId>configurate-yaml</artifactId>
<version>3.3</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>org.yaml</groupId>
<artifactId>snakeyaml</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>ninja.leaping.configurate</groupId>
<artifactId>configurate-gson</artifactId>
<version>3.3</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>com.google.code.gson</groupId>
<artifactId>gson</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>ninja.leaping.configurate</groupId>
<artifactId>configurate-hocon</artifactId>
<version>3.3</version>
<scope>provided</scope>
</dependency>
<!-- HikariCP --> <!-- HikariCP -->
<dependency> <dependency>
<groupId>com.zaxxer</groupId> <groupId>com.zaxxer</groupId>

View File

@ -42,7 +42,13 @@ public enum Dependency {
SLF4J_SIMPLE("org.slf4j", "slf4j-simple", "1.7.25"), SLF4J_SIMPLE("org.slf4j", "slf4j-simple", "1.7.25"),
SLF4J_API("org.slf4j", "slf4j-api", "1.7.25"), SLF4J_API("org.slf4j", "slf4j-api", "1.7.25"),
MONGODB_DRIVER("org.mongodb", "mongo-java-driver", "3.5.0"), MONGODB_DRIVER("org.mongodb", "mongo-java-driver", "3.5.0"),
JEDIS("https://github.com/lucko/jedis/releases/download/jedis-2.9.1-shaded/jedis-2.9.1-shaded.jar", "2.9.1-shaded"); JEDIS("https://github.com/lucko/jedis/releases/download/jedis-2.9.1-shaded/jedis-2.9.1-shaded.jar", "2.9.1-shaded"),
CONFIGURATE_CORE("ninja.leaping.configurate", "configurate-core", "3.3"),
CONFIGURATE_GSON("ninja.leaping.configurate", "configurate-gson", "3.3"),
CONFIGURATE_YAML("ninja.leaping.configurate", "configurate-yaml", "3.3"),
CONFIGURATE_HOCON("ninja.leaping.configurate", "configurate-hocon", "3.3"),
HOCON_CONFIG("com.typesafe", "config", "1.3.1");
private final String url; private final String url;
private final String version; private final String version;

View File

@ -30,6 +30,7 @@ import lombok.experimental.UtilityClass;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import me.lucko.luckperms.api.PlatformType;
import me.lucko.luckperms.common.config.ConfigKeys; import me.lucko.luckperms.common.config.ConfigKeys;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin; import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.storage.StorageType; import me.lucko.luckperms.common.storage.StorageType;
@ -67,8 +68,9 @@ public class DependencyManager {
} }
private static final Map<StorageType, List<Dependency>> STORAGE_DEPENDENCIES = ImmutableMap.<StorageType, List<Dependency>>builder() private static final Map<StorageType, List<Dependency>> STORAGE_DEPENDENCIES = ImmutableMap.<StorageType, List<Dependency>>builder()
.put(StorageType.JSON, ImmutableList.of()) .put(StorageType.JSON, ImmutableList.of(Dependency.CONFIGURATE_CORE, Dependency.CONFIGURATE_GSON))
.put(StorageType.YAML, ImmutableList.of()) .put(StorageType.YAML, ImmutableList.of(Dependency.CONFIGURATE_CORE, Dependency.CONFIGURATE_YAML))
.put(StorageType.HOCON, ImmutableList.of(Dependency.HOCON_CONFIG, Dependency.CONFIGURATE_CORE, Dependency.CONFIGURATE_HOCON))
.put(StorageType.MONGODB, ImmutableList.of(Dependency.MONGODB_DRIVER)) .put(StorageType.MONGODB, ImmutableList.of(Dependency.MONGODB_DRIVER))
.put(StorageType.MARIADB, ImmutableList.of(Dependency.MARIADB_DRIVER, Dependency.SLF4J_API, Dependency.SLF4J_SIMPLE, Dependency.HIKARI)) .put(StorageType.MARIADB, ImmutableList.of(Dependency.MARIADB_DRIVER, Dependency.SLF4J_API, Dependency.SLF4J_SIMPLE, Dependency.HIKARI))
.put(StorageType.MYSQL, ImmutableList.of(Dependency.MYSQL_DRIVER, Dependency.SLF4J_API, Dependency.SLF4J_SIMPLE, Dependency.HIKARI)) .put(StorageType.MYSQL, ImmutableList.of(Dependency.MYSQL_DRIVER, Dependency.SLF4J_API, Dependency.SLF4J_SIMPLE, Dependency.HIKARI))
@ -93,6 +95,15 @@ public class DependencyManager {
dependencies.remove(Dependency.SLF4J_SIMPLE); dependencies.remove(Dependency.SLF4J_SIMPLE);
} }
// don't load configurate dependencies on sponge
if (plugin.getServerType() == PlatformType.SPONGE) {
dependencies.remove(Dependency.CONFIGURATE_CORE);
dependencies.remove(Dependency.CONFIGURATE_GSON);
dependencies.remove(Dependency.CONFIGURATE_YAML);
dependencies.remove(Dependency.CONFIGURATE_HOCON);
dependencies.remove(Dependency.HOCON_CONFIG);
}
loadDependencies(plugin, dependencies); loadDependencies(plugin, dependencies);
} }

View File

@ -32,6 +32,7 @@ import com.google.common.collect.ImmutableSet;
import me.lucko.luckperms.common.config.ConfigKeys; import me.lucko.luckperms.common.config.ConfigKeys;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin; import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.storage.backing.AbstractBacking; import me.lucko.luckperms.common.storage.backing.AbstractBacking;
import me.lucko.luckperms.common.storage.backing.file.HOCONBacking;
import me.lucko.luckperms.common.storage.backing.file.JSONBacking; import me.lucko.luckperms.common.storage.backing.file.JSONBacking;
import me.lucko.luckperms.common.storage.backing.file.YAMLBacking; import me.lucko.luckperms.common.storage.backing.file.YAMLBacking;
import me.lucko.luckperms.common.storage.backing.mongodb.MongoDBBacking; import me.lucko.luckperms.common.storage.backing.mongodb.MongoDBBacking;
@ -155,9 +156,11 @@ public class StorageFactory {
plugin.getConfiguration().get(ConfigKeys.MONGODB_COLLECTION_PREFIX) plugin.getConfiguration().get(ConfigKeys.MONGODB_COLLECTION_PREFIX)
); );
case YAML: case YAML:
return new YAMLBacking(plugin, plugin.getDataDirectory(), "yaml-storage"); return new YAMLBacking(plugin, "yaml-storage");
case HOCON:
return new HOCONBacking(plugin, "hocon-storage");
default: default:
return new JSONBacking(plugin, plugin.getDataDirectory(), "json-storage"); return new JSONBacking(plugin, "json-storage");
} }
} }
} }

View File

@ -35,6 +35,7 @@ public enum StorageType {
JSON("JSON", "json", "flatfile"), JSON("JSON", "json", "flatfile"),
YAML("YAML", "yaml", "yml"), YAML("YAML", "yaml", "yml"),
HOCON("HOCON", "hocon"),
MONGODB("MongoDB", "mongodb"), MONGODB("MongoDB", "mongodb"),
MARIADB("MariaDB", "mariadb"), MARIADB("MariaDB", "mariadb"),
MYSQL("MySQL", "mysql"), MYSQL("MySQL", "mysql"),

View File

@ -0,0 +1,853 @@
/*
* This file is part of LuckPerms, licensed under the MIT License.
*
* Copyright (c) lucko (Luck) <luck@lucko.me>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package me.lucko.luckperms.common.storage.backing.file;
import lombok.Getter;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSetMultimap;
import com.google.common.collect.Iterables;
import com.google.common.reflect.TypeToken;
import me.lucko.luckperms.api.HeldPermission;
import me.lucko.luckperms.api.LogEntry;
import me.lucko.luckperms.api.Node;
import me.lucko.luckperms.api.context.ContextSet;
import me.lucko.luckperms.api.context.ImmutableContextSet;
import me.lucko.luckperms.api.context.MutableContextSet;
import me.lucko.luckperms.common.actionlog.Log;
import me.lucko.luckperms.common.bulkupdate.BulkUpdate;
import me.lucko.luckperms.common.commands.utils.Util;
import me.lucko.luckperms.common.constants.Constants;
import me.lucko.luckperms.common.managers.GenericUserManager;
import me.lucko.luckperms.common.managers.GroupManager;
import me.lucko.luckperms.common.managers.TrackManager;
import me.lucko.luckperms.common.model.Group;
import me.lucko.luckperms.common.model.Track;
import me.lucko.luckperms.common.model.User;
import me.lucko.luckperms.common.node.NodeHeldPermission;
import me.lucko.luckperms.common.node.NodeModel;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.references.UserIdentifier;
import me.lucko.luckperms.common.storage.backing.AbstractBacking;
import me.lucko.luckperms.common.storage.backing.legacy.LegacyJSONSchemaMigration;
import me.lucko.luckperms.common.storage.backing.legacy.LegacyYAMLSchemaMigration;
import ninja.leaping.configurate.ConfigurationNode;
import ninja.leaping.configurate.SimpleConfigurationNode;
import ninja.leaping.configurate.Types;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Date;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.UUID;
import java.util.logging.FileHandler;
import java.util.logging.Formatter;
import java.util.logging.Level;
import java.util.logging.LogRecord;
import java.util.logging.Logger;
import java.util.stream.Collectors;
public abstract class ConfigurateBacking extends AbstractBacking {
private static final String LOG_FORMAT = "%s(%s): [%s] %s(%s) --> %s";
private final Logger actionLogger = Logger.getLogger("luckperms_actions");
private final FileUuidCache uuidCache = new FileUuidCache();
@Getter
private final String fileExtension;
private final String dataFolderName;
private File uuidDataFile;
private File actionLogFile;
private File usersDirectory;
private File groupsDirectory;
private File tracksDirectory;
protected ConfigurateBacking(LuckPermsPlugin plugin, String name, String fileExtension, String dataFolderName) {
super(plugin, name);
this.fileExtension = fileExtension;
this.dataFolderName = dataFolderName;
}
private ConfigurationNode readFile(StorageLocation location, String name) throws IOException {
File file = new File(getDirectory(location), name + fileExtension);
registerFileAction(location, file);
return readFile(file);
}
protected abstract ConfigurationNode readFile(File file) throws IOException;
private void saveFile(StorageLocation location, String name, ConfigurationNode node) throws IOException {
File file = new File(getDirectory(location), name + fileExtension);
registerFileAction(location, file);
saveFile(file, node);
}
protected abstract void saveFile(File file, ConfigurationNode node) throws IOException;
private File getDirectory(StorageLocation location) {
switch (location) {
case USER:
return usersDirectory;
case GROUP:
return groupsDirectory;
case TRACK:
return tracksDirectory;
default:
throw new RuntimeException();
}
}
private FilenameFilter getFileTypeFilter() {
return (dir, name) -> name.endsWith(fileExtension);
}
private boolean reportException(String file, Exception ex) {
plugin.getLog().warn("Exception thrown whilst performing i/o: " + file);
ex.printStackTrace();
return false;
}
private void registerFileAction(StorageLocation type, File file) {
plugin.applyToFileWatcher(fileWatcher -> fileWatcher.registerChange(type, file.getName()));
}
@Override
public void init() {
try {
setupFiles();
} catch (IOException e) {
e.printStackTrace();
return;
}
uuidCache.load(uuidDataFile);
try {
FileHandler fh = new FileHandler(actionLogFile.getAbsolutePath(), 0, 1, true);
fh.setFormatter(new Formatter() {
@Override
public String format(LogRecord record) {
return new Date(record.getMillis()).toString() + ": " + record.getMessage() + "\n";
}
});
actionLogger.addHandler(fh);
actionLogger.setUseParentHandlers(false);
actionLogger.setLevel(Level.ALL);
actionLogger.setFilter(record -> true);
} catch (Exception e) {
e.printStackTrace();
}
setAcceptingLogins(true);
}
private void setupFiles() throws IOException {
File data = new File(plugin.getDataDirectory(), dataFolderName);
// Try to perform schema migration
File oldData = new File(plugin.getDataDirectory(), "data");
if (!data.exists() && oldData.exists()) {
data.mkdirs();
plugin.getLog().severe("===== Legacy Schema Migration =====");
plugin.getLog().severe("Starting migration from legacy schema. This could take a while....");
plugin.getLog().severe("Please do not stop your server while the migration takes place.");
if (this instanceof YAMLBacking) {
try {
new LegacyYAMLSchemaMigration(plugin, (YAMLBacking) this, oldData, data).run();
} catch (Exception e) {
e.printStackTrace();
}
} else if (this instanceof JSONBacking) {
try {
new LegacyJSONSchemaMigration(plugin, (JSONBacking) this, oldData, data).run();
} catch (Exception e) {
e.printStackTrace();
}
}
} else {
data.mkdirs();
}
usersDirectory = new File(data, "users");
usersDirectory.mkdir();
groupsDirectory = new File(data, "groups");
groupsDirectory.mkdir();
tracksDirectory = new File(data, "tracks");
tracksDirectory.mkdir();
uuidDataFile = new File(data, "uuidcache.txt");
uuidDataFile.createNewFile();
actionLogFile = new File(data, "actions.log");
actionLogFile.createNewFile();
// Listen for file changes.
plugin.applyToFileWatcher(watcher -> {
watcher.subscribe("user", usersDirectory.toPath(), s -> {
if (!s.endsWith(fileExtension)) {
return;
}
String user = s.substring(0, s.length() - fileExtension.length());
UUID uuid = Util.parseUuid(user);
if (uuid == null) {
return;
}
User u = plugin.getUserManager().getIfLoaded(uuid);
if (u != null) {
plugin.getLog().info("[FileWatcher] Refreshing user " + u.getFriendlyName());
plugin.getStorage().loadUser(uuid, "null");
}
});
watcher.subscribe("group", groupsDirectory.toPath(), s -> {
if (!s.endsWith(fileExtension)) {
return;
}
String groupName = s.substring(0, s.length() - fileExtension.length());
plugin.getLog().info("[FileWatcher] Refreshing group " + groupName);
plugin.getUpdateTaskBuffer().request();
});
watcher.subscribe("track", tracksDirectory.toPath(), s -> {
if (!s.endsWith(fileExtension)) {
return;
}
String trackName = s.substring(0, s.length() - fileExtension.length());
plugin.getLog().info("[FileWatcher] Refreshing track " + trackName);
plugin.getStorage().loadAllTracks();
});
});
}
@Override
public void shutdown() {
uuidCache.save(uuidDataFile);
}
@Override
public boolean logAction(LogEntry entry) {
//noinspection deprecation
actionLogger.info(String.format(LOG_FORMAT,
(entry.getActor().equals(Constants.CONSOLE_UUID) ? "" : entry.getActor() + " "),
entry.getActorName(),
Character.toString(entry.getType()),
(entry.getActed() == null ? "" : entry.getActed().toString() + " "),
entry.getActedName(),
entry.getAction())
);
return true;
}
@Override
public Log getLog() {
// Flatfile doesn't support viewing log data from in-game. You can just read the file in a text editor.
return Log.builder().build();
}
@Override
public boolean applyBulkUpdate(BulkUpdate bulkUpdate) {
try {
if (bulkUpdate.getDataType().isIncludingUsers()) {
File[] files = getDirectory(StorageLocation.USER).listFiles(getFileTypeFilter());
if (files == null) {
throw new IllegalStateException("Users directory matched no files.");
}
for (File file : files) {
try {
registerFileAction(StorageLocation.USER, file);
ConfigurationNode object = readFile(file);
Set<NodeModel> nodes = readNodes(object);
Set<NodeModel> results = nodes.stream()
.map(bulkUpdate::apply)
.filter(Objects::nonNull)
.collect(Collectors.toSet());
if (!nodes.equals(results)) {
writeNodes(object, results);
}
} catch (Exception e) {
reportException(file.getName(), e);
}
}
}
if (bulkUpdate.getDataType().isIncludingGroups()) {
File[] files = getDirectory(StorageLocation.GROUP).listFiles(getFileTypeFilter());
if (files == null) {
throw new IllegalStateException("Groups directory matched no files.");
}
for (File file : files) {
try {
registerFileAction(StorageLocation.GROUP, file);
ConfigurationNode object = readFile(file);
Set<NodeModel> nodes = readNodes(object);
Set<NodeModel> results = nodes.stream()
.map(bulkUpdate::apply)
.filter(Objects::nonNull)
.collect(Collectors.toSet());
if (!nodes.equals(results)) {
writeNodes(object, results);
}
} catch (Exception e) {
reportException(file.getName(), e);
}
}
}
} catch (Exception e) {
reportException("bulk update", e);
return false;
}
return false;
}
@Override
public boolean loadUser(UUID uuid, String username) {
User user = plugin.getUserManager().getOrMake(UserIdentifier.of(uuid, username));
user.getIoLock().lock();
try {
ConfigurationNode object = readFile(StorageLocation.USER, uuid.toString());
if (object != null) {
String name = object.getNode("name").getString();
user.getPrimaryGroup().setStoredValue(object.getNode(this instanceof JSONBacking ? "primaryGroup" : "primary-group").getString());
Set<NodeModel> data = readNodes(object);
Set<Node> nodes = data.stream().map(NodeModel::toNode).collect(Collectors.toSet());
user.setEnduringNodes(nodes);
user.setName(name, true);
boolean save = plugin.getUserManager().giveDefaultIfNeeded(user, false);
if (user.getName().isPresent() && (name == null || !user.getName().get().equalsIgnoreCase(name))) {
save = true;
}
if (save) {
saveUser(user);
}
} else {
if (GenericUserManager.shouldSave(user)) {
user.clearNodes();
user.getPrimaryGroup().setStoredValue(null);
plugin.getUserManager().giveDefaultIfNeeded(user, false);
}
}
} catch (Exception e) {
return reportException(uuid.toString(), e);
} finally {
user.getIoLock().unlock();
}
return true;
}
@Override
public boolean saveUser(User user) {
user.getIoLock().lock();
try {
if (!GenericUserManager.shouldSave(user)) {
saveFile(StorageLocation.USER, user.getUuid().toString(), null);
} else {
ConfigurationNode data = SimpleConfigurationNode.root();
data.getNode("uuid").setValue(user.getUuid().toString());
data.getNode("name").setValue(user.getName().orElse("null"));
data.getNode(this instanceof JSONBacking ? "primaryGroup" : "primary-group").setValue(user.getPrimaryGroup().getStoredValue().orElse("default"));
Set<NodeModel> nodes = user.getEnduringNodes().values().stream().map(NodeModel::fromNode).collect(Collectors.toCollection(LinkedHashSet::new));
writeNodes(data, nodes);
saveFile(StorageLocation.USER, user.getUuid().toString(), data);
}
} catch (Exception e) {
return reportException(user.getUuid().toString(), e);
} finally {
user.getIoLock().unlock();
}
return true;
}
@Override
public Set<UUID> getUniqueUsers() {
String[] fileNames = usersDirectory.list(getFileTypeFilter());
if (fileNames == null) return null;
return Arrays.stream(fileNames)
.map(s -> s.substring(0, s.length() - fileExtension.length()))
.map(UUID::fromString)
.collect(Collectors.toSet());
}
@Override
public List<HeldPermission<UUID>> getUsersWithPermission(String permission) {
ImmutableList.Builder<HeldPermission<UUID>> held = ImmutableList.builder();
try {
File[] files = getDirectory(StorageLocation.USER).listFiles(getFileTypeFilter());
if (files == null) {
throw new IllegalStateException("Users directory matched no files.");
}
for (File file : files) {
try {
registerFileAction(StorageLocation.USER, file);
ConfigurationNode object = readFile(file);
UUID holder = UUID.fromString(file.getName().substring(0, file.getName().length() - fileExtension.length()));
Set<NodeModel> nodes = readNodes(object);
for (NodeModel e : nodes) {
if (!e.getPermission().equalsIgnoreCase(permission)) {
continue;
}
held.add(NodeHeldPermission.of(holder, e));
}
} catch (Exception e) {
reportException(file.getName(), e);
}
}
} catch (Exception e) {
reportException("users", e);
return null;
}
return held.build();
}
@Override
public boolean createAndLoadGroup(String name) {
Group group = plugin.getGroupManager().getOrMake(name);
group.getIoLock().lock();
try {
ConfigurationNode object = readFile(StorageLocation.GROUP, name);
if (object != null) {
Set<NodeModel> data = readNodes(object);
Set<Node> nodes = data.stream().map(NodeModel::toNode).collect(Collectors.toSet());
group.setEnduringNodes(nodes);
} else {
ConfigurationNode data = SimpleConfigurationNode.root();
data.getNode("name").setValue(group.getName());
Set<NodeModel> nodes = group.getEnduringNodes().values().stream().map(NodeModel::fromNode).collect(Collectors.toCollection(LinkedHashSet::new));
writeNodes(data, nodes);
saveFile(StorageLocation.GROUP, name, data);
}
} catch (Exception e) {
return reportException(name, e);
} finally {
group.getIoLock().unlock();
}
return true;
}
@Override
public boolean loadGroup(String name) {
Group group = plugin.getGroupManager().getOrMake(name);
group.getIoLock().lock();
try {
ConfigurationNode object = readFile(StorageLocation.GROUP, name);
if (object == null) {
return false;
}
Set<NodeModel> data = readNodes(object);
Set<Node> nodes = data.stream().map(NodeModel::toNode).collect(Collectors.toSet());
group.setEnduringNodes(nodes);
} catch (Exception e) {
return reportException(name, e);
} finally {
group.getIoLock().unlock();
}
return true;
}
@Override
public boolean loadAllGroups() {
String[] fileNames = groupsDirectory.list(getFileTypeFilter());
if (fileNames == null) return false;
List<String> groups = Arrays.stream(fileNames)
.map(s -> s.substring(0, s.length() - fileExtension.length()))
.collect(Collectors.toList());
groups.forEach(this::loadGroup);
GroupManager gm = plugin.getGroupManager();
gm.getAll().values().stream()
.filter(g -> !groups.contains(g.getName()))
.forEach(gm::unload);
return true;
}
@Override
public boolean saveGroup(Group group) {
group.getIoLock().lock();
try {
ConfigurationNode data = SimpleConfigurationNode.root();
data.getNode("name").setValue(group.getName());
Set<NodeModel> nodes = group.getEnduringNodes().values().stream().map(NodeModel::fromNode).collect(Collectors.toCollection(LinkedHashSet::new));
writeNodes(data, nodes);
saveFile(StorageLocation.GROUP, group.getName(), data);
} catch (Exception e) {
return reportException(group.getName(), e);
} finally {
group.getIoLock().unlock();
}
return true;
}
@Override
public boolean deleteGroup(Group group) {
group.getIoLock().lock();
try {
File groupFile = new File(groupsDirectory, group.getName() + fileExtension);
registerFileAction(StorageLocation.GROUP, groupFile);
if (groupFile.exists()) {
groupFile.delete();
}
} catch (Exception e) {
return reportException(group.getName(), e);
} finally {
group.getIoLock().unlock();
}
return true;
}
@Override
public List<HeldPermission<String>> getGroupsWithPermission(String permission) {
ImmutableList.Builder<HeldPermission<String>> held = ImmutableList.builder();
try {
File[] files = getDirectory(StorageLocation.GROUP).listFiles(getFileTypeFilter());
if (files == null) {
throw new IllegalStateException("Groups directory matched no files.");
}
for (File file : files) {
try {
registerFileAction(StorageLocation.GROUP, file);
ConfigurationNode object = readFile(file);
String holder = file.getName().substring(0, file.getName().length() - fileExtension.length());
Set<NodeModel> nodes = readNodes(object);
for (NodeModel e : nodes) {
if (!e.getPermission().equalsIgnoreCase(permission)) {
continue;
}
held.add(NodeHeldPermission.of(holder, e));
}
} catch (Exception e) {
reportException(file.getName(), e);
}
}
} catch (Exception e) {
reportException("groups", e);
return null;
}
return held.build();
}
@Override
public boolean createAndLoadTrack(String name) {
Track track = plugin.getTrackManager().getOrMake(name);
track.getIoLock().lock();
try {
ConfigurationNode object = readFile(StorageLocation.TRACK, name);
if (object != null) {
List<String> groups = object.getNode("groups").getList(TypeToken.of(String.class));
track.setGroups(groups);
} else {
ConfigurationNode data = SimpleConfigurationNode.root();
data.getNode("name").setValue(name);
data.getNode("groups").setValue(track.getGroups());
saveFile(StorageLocation.TRACK, name, data);
}
} catch (Exception e) {
return reportException(name, e);
} finally {
track.getIoLock().unlock();
}
return true;
}
@Override
public boolean loadTrack(String name) {
Track track = plugin.getTrackManager().getOrMake(name);
track.getIoLock().lock();
try {
ConfigurationNode object = readFile(StorageLocation.TRACK, name);
if (object == null) {
return false;
}
List<String> groups = object.getNode("groups").getList(TypeToken.of(String.class));
track.setGroups(groups);
} catch (Exception e) {
return reportException(name, e);
} finally {
track.getIoLock().unlock();
}
return true;
}
@Override
public boolean loadAllTracks() {
String[] fileNames = tracksDirectory.list(getFileTypeFilter());
if (fileNames == null) return false;
List<String> tracks = Arrays.stream(fileNames)
.map(s -> s.substring(0, s.length() - fileExtension.length()))
.collect(Collectors.toList());
tracks.forEach(this::loadTrack);
TrackManager tm = plugin.getTrackManager();
tm.getAll().values().stream()
.filter(t -> !tracks.contains(t.getName()))
.forEach(tm::unload);
return true;
}
@Override
public boolean saveTrack(Track track) {
track.getIoLock().lock();
try {
ConfigurationNode data = SimpleConfigurationNode.root();
data.getNode("name").setValue(track.getName());
data.getNode("groups").setValue(track.getGroups());
saveFile(StorageLocation.TRACK, track.getName(), data);
} catch (Exception e) {
return reportException(track.getName(), e);
} finally {
track.getIoLock().unlock();
}
return true;
}
@Override
public boolean deleteTrack(Track track) {
track.getIoLock().lock();
try {
File trackFile = new File(tracksDirectory, track.getName() + fileExtension);
registerFileAction(StorageLocation.TRACK, trackFile);
if (trackFile.exists()) {
trackFile.delete();
}
} catch (Exception e) {
return reportException(track.getName(), e);
} finally {
track.getIoLock().unlock();
}
return true;
}
@Override
public boolean saveUUIDData(UUID uuid, String username) {
uuidCache.addMapping(uuid, username);
return true;
}
@Override
public UUID getUUID(String username) {
return uuidCache.lookupUUID(username);
}
@Override
public String getName(UUID uuid) {
return uuidCache.lookupUsername(uuid);
}
private static Set<NodeModel> readNodes(ConfigurationNode data) {
Set<NodeModel> nodes = new HashSet<>();
if (data.getNode("permissions").hasListChildren()) {
List<? extends ConfigurationNode> parts = data.getNode("permissions").getChildrenList();
for (ConfigurationNode ent : parts) {
String stringValue = ent.getValue(Types::strictAsString);
if (stringValue != null) {
nodes.add(NodeModel.of(stringValue, true, "global", "global", 0L, ImmutableContextSet.empty()));
continue;
}
if (!ent.hasMapChildren()) {
continue;
}
Map.Entry<Object, ? extends ConfigurationNode> entry = Iterables.getFirst(ent.getChildrenMap().entrySet(), null);
if (entry == null || !entry.getValue().hasMapChildren()) {
continue;
}
String permission = entry.getKey().toString();
Map<Object, ? extends ConfigurationNode> attributes = entry.getValue().getChildrenMap();
boolean value = true;
String server = "global";
String world = "global";
long expiry = 0L;
ImmutableContextSet context = ImmutableContextSet.empty();
if (attributes.containsKey("value")) {
value = attributes.get("value").getBoolean();
}
if (attributes.containsKey("server")) {
server = attributes.get("server").getString();
}
if (attributes.containsKey("world")) {
world = attributes.get("world").getString();
}
if (attributes.containsKey("expiry")) {
expiry = attributes.get("expiry").getLong();
}
if (attributes.containsKey("context") && attributes.get("context").hasMapChildren()) {
ConfigurationNode contexts = attributes.get("context");
context = deserializeContextSet(contexts).makeImmutable();
}
final ConfigurationNode batchAttribute = attributes.get("permissions");
if (permission.startsWith("luckperms.batch") && batchAttribute != null && batchAttribute.hasListChildren()) {
for (ConfigurationNode element : batchAttribute.getChildrenList()) {
nodes.add(NodeModel.of(element.getString(), value, server, world, expiry, context));
}
} else {
nodes.add(NodeModel.of(permission, value, server, world, expiry, context));
}
}
}
return nodes;
}
private static void writeNodes(ConfigurationNode to, Set<NodeModel> nodes) {
ConfigurationNode arr = SimpleConfigurationNode.root();
for (NodeModel node : nodes) {
// just a raw, default node.
boolean single = node.isValue() &&
node.getServer().equalsIgnoreCase("global") &&
node.getWorld().equalsIgnoreCase("global") &&
node.getExpiry() == 0L &&
node.getContexts().isEmpty();
// just add a string to the list.
if (single) {
arr.getAppendedNode().setValue(node.getPermission());
continue;
}
ConfigurationNode attributes = SimpleConfigurationNode.root();
attributes.getNode("value").setValue(node.isValue());
if (!node.getServer().equals("global")) {
attributes.getNode("server").setValue(node.getServer());
}
if (!node.getWorld().equals("global")) {
attributes.getNode("world").setValue(node.getWorld());
}
if (node.getExpiry() != 0L) {
attributes.getNode("expiry").setValue(node.getExpiry());
}
if (!node.getContexts().isEmpty()) {
attributes.getNode("context").setValue(serializeContextSet(node.getContexts()));
}
ConfigurationNode perm = SimpleConfigurationNode.root();
perm.getNode(node.getPermission()).setValue(attributes);
arr.getAppendedNode().setValue(perm);
}
to.getNode("permissions").setValue(arr);
}
private static ConfigurationNode serializeContextSet(ContextSet contextSet) {
ConfigurationNode data = SimpleConfigurationNode.root();
Map<String, Collection<String>> map = contextSet.toMultimap().asMap();
map.forEach((k, v) -> {
List<String> values = new ArrayList<>(v);
int size = values.size();
if (size == 1) {
data.getNode(k).setValue(values.get(0));
} else if (size > 1) {
data.getNode(k).setValue(values);
}
});
return data;
}
private static MutableContextSet deserializeContextSet(ConfigurationNode data) {
Preconditions.checkArgument(data.hasMapChildren());
Map<Object, ? extends ConfigurationNode> dataMap = data.getChildrenMap();
ImmutableSetMultimap.Builder<String, String> map = ImmutableSetMultimap.builder();
for (Map.Entry<Object, ? extends ConfigurationNode> e : dataMap.entrySet()) {
String k = e.getKey().toString();
ConfigurationNode v = e.getValue();
if (v.hasListChildren()) {
List<? extends ConfigurationNode> values = v.getChildrenList();
for (ConfigurationNode value : values) {
map.put(k, value.getString());
}
} else {
map.put(k, v.getString());
}
}
return MutableContextSet.fromMultimap(map.build());
}
}

View File

@ -83,8 +83,8 @@ public class FileWatcher implements Runnable {
}, 40L); }, 40L);
} }
public void registerChange(String id, String filename) { public void registerChange(StorageLocation location, String fileName) {
internalChanges.put(id + "/" + filename, System.currentTimeMillis()); internalChanges.put(location.name().toLowerCase() + "/" + fileName, System.currentTimeMillis());
} }
public void close() { public void close() {
@ -124,12 +124,17 @@ public class FileWatcher implements Runnable {
Path file = path.resolve(context); Path file = path.resolve(context);
String fileName = context.toString(); String fileName = context.toString();
// ignore temporary changes
if (fileName.endsWith(".tmp")) {
continue;
}
if (internalChanges.containsKey(id + "/" + fileName)) { if (internalChanges.containsKey(id + "/" + fileName)) {
// This file was modified by the system. // This file was modified by the system.
continue; continue;
} }
registerChange(id, fileName); internalChanges.put(id + "/" + fileName, System.currentTimeMillis());
plugin.getLog().info("[FileWatcher] Detected change in file: " + file.toString()); plugin.getLog().info("[FileWatcher] Detected change in file: " + file.toString());

View File

@ -1,334 +0,0 @@
/*
* This file is part of LuckPerms, licensed under the MIT License.
*
* Copyright (c) lucko (Luck) <luck@lucko.me>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package me.lucko.luckperms.common.storage.backing.file;
import lombok.Getter;
import me.lucko.luckperms.api.LogEntry;
import me.lucko.luckperms.common.actionlog.Log;
import me.lucko.luckperms.common.commands.utils.Util;
import me.lucko.luckperms.common.constants.Constants;
import me.lucko.luckperms.common.managers.GroupManager;
import me.lucko.luckperms.common.managers.TrackManager;
import me.lucko.luckperms.common.model.Group;
import me.lucko.luckperms.common.model.Track;
import me.lucko.luckperms.common.model.User;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.storage.backing.AbstractBacking;
import me.lucko.luckperms.common.storage.backing.legacy.LegacyJSONSchemaMigration;
import me.lucko.luckperms.common.storage.backing.legacy.LegacyYAMLSchemaMigration;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.Callable;
import java.util.logging.FileHandler;
import java.util.logging.Formatter;
import java.util.logging.Level;
import java.util.logging.LogRecord;
import java.util.logging.Logger;
import java.util.stream.Collectors;
public abstract class FlatfileBacking extends AbstractBacking {
private static final String LOG_FORMAT = "%s(%s): [%s] %s(%s) --> %s";
private final Logger actionLogger = Logger.getLogger("luckperms_actions");
private final FileUuidCache uuidCache = new FileUuidCache();
private final File pluginDir;
@Getter
private final String fileExtension;
private final String dataFolderName;
private File uuidData;
private File actionLog;
protected File usersDir;
protected File groupsDir;
protected File tracksDir;
FlatfileBacking(LuckPermsPlugin plugin, String name, File pluginDir, String fileExtension, String dataFolderName) {
super(plugin, name);
this.pluginDir = pluginDir;
this.fileExtension = fileExtension;
this.dataFolderName = dataFolderName;
}
@Override
public void init() {
try {
setupFiles();
} catch (IOException e) {
e.printStackTrace();
return;
}
uuidCache.load(uuidData);
try {
FileHandler fh = new FileHandler(actionLog.getAbsolutePath(), 0, 1, true);
fh.setFormatter(new Formatter() {
@Override
public String format(LogRecord record) {
return new Date(record.getMillis()).toString() + ": " + record.getMessage() + "\n";
}
});
actionLogger.addHandler(fh);
actionLogger.setUseParentHandlers(false);
actionLogger.setLevel(Level.ALL);
actionLogger.setFilter(record -> true);
} catch (Exception e) {
e.printStackTrace();
}
setAcceptingLogins(true);
}
private void setupFiles() throws IOException {
File data = new File(pluginDir, dataFolderName);
// Try to perform schema migration
File oldData = new File(pluginDir, "data");
if (!data.exists() && oldData.exists()) {
data.mkdirs();
plugin.getLog().severe("===== Legacy Schema Migration =====");
plugin.getLog().severe("Starting migration from legacy schema. This could take a while....");
plugin.getLog().severe("Please do not stop your server while the migration takes place.");
if (this instanceof YAMLBacking) {
try {
new LegacyYAMLSchemaMigration(plugin, (YAMLBacking) this, oldData, data).run();
} catch (Exception e) {
e.printStackTrace();
}
} else if (this instanceof JSONBacking) {
try {
new LegacyJSONSchemaMigration(plugin, (JSONBacking) this, oldData, data).run();
} catch (Exception e) {
e.printStackTrace();
}
}
} else {
data.mkdirs();
}
usersDir = new File(data, "users");
usersDir.mkdir();
groupsDir = new File(data, "groups");
groupsDir.mkdir();
tracksDir = new File(data, "tracks");
tracksDir.mkdir();
uuidData = new File(data, "uuidcache.txt");
uuidData.createNewFile();
actionLog = new File(data, "actions.log");
actionLog.createNewFile();
// Listen for file changes.
plugin.applyToFileWatcher(watcher -> {
watcher.subscribe("users", usersDir.toPath(), s -> {
if (!s.endsWith(fileExtension)) {
return;
}
String user = s.substring(0, s.length() - fileExtension.length());
UUID uuid = Util.parseUuid(user);
if (uuid == null) {
return;
}
User u = plugin.getUserManager().getIfLoaded(uuid);
if (u != null) {
plugin.getLog().info("[FileWatcher] Refreshing user " + u.getFriendlyName());
plugin.getStorage().loadUser(uuid, "null");
}
});
watcher.subscribe("groups", groupsDir.toPath(), s -> {
if (!s.endsWith(fileExtension)) {
return;
}
String groupName = s.substring(0, s.length() - fileExtension.length());
plugin.getLog().info("[FileWatcher] Refreshing group " + groupName);
plugin.getUpdateTaskBuffer().request();
});
watcher.subscribe("tracks", tracksDir.toPath(), s -> {
if (!s.endsWith(fileExtension)) {
return;
}
String trackName = s.substring(0, s.length() - fileExtension.length());
plugin.getLog().info("[FileWatcher] Refreshing track " + trackName);
plugin.getStorage().loadAllTracks();
});
});
}
@Override
public void shutdown() {
uuidCache.save(uuidData);
}
protected void registerFileAction(String type, File file) {
plugin.applyToFileWatcher(fileWatcher -> fileWatcher.registerChange(type, file.getName()));
}
protected <T> T call(String file, Callable<T> c, T def) {
try {
return c.call();
} catch (Exception e) {
plugin.getLog().warn("Exception thrown whilst performing i/o: " + file);
e.printStackTrace();
return def;
}
}
@Override
public boolean logAction(LogEntry entry) {
//noinspection deprecation
actionLogger.info(String.format(LOG_FORMAT,
(entry.getActor().equals(Constants.CONSOLE_UUID) ? "" : entry.getActor() + " "),
entry.getActorName(),
Character.toString(entry.getType()),
(entry.getActed() == null ? "" : entry.getActed().toString() + " "),
entry.getActedName(),
entry.getAction())
);
return true;
}
@Override
public Log getLog() {
// Flatfile doesn't support viewing log data from in-game. You can just read the file in a text editor.
return Log.builder().build();
}
@Override
public Set<UUID> getUniqueUsers() {
String[] fileNames = usersDir.list((dir, name) -> name.endsWith(fileExtension));
if (fileNames == null) return null;
return Arrays.stream(fileNames)
.map(s -> s.substring(0, s.length() - fileExtension.length()))
.map(UUID::fromString)
.collect(Collectors.toSet());
}
@Override
public boolean loadAllGroups() {
String[] fileNames = groupsDir.list((dir, name) -> name.endsWith(fileExtension));
if (fileNames == null) return false;
List<String> groups = Arrays.stream(fileNames)
.map(s -> s.substring(0, s.length() - fileExtension.length()))
.collect(Collectors.toList());
groups.forEach(this::loadGroup);
GroupManager gm = plugin.getGroupManager();
gm.getAll().values().stream()
.filter(g -> !groups.contains(g.getName()))
.forEach(gm::unload);
return true;
}
@Override
public boolean deleteGroup(Group group) {
group.getIoLock().lock();
try {
return call(group.getName(), () -> {
File groupFile = new File(groupsDir, group.getName() + fileExtension);
registerFileAction("groups", groupFile);
if (groupFile.exists()) {
groupFile.delete();
}
return true;
}, false);
} finally {
group.getIoLock().unlock();
}
}
@Override
public boolean loadAllTracks() {
String[] fileNames = tracksDir.list((dir, name) -> name.endsWith(fileExtension));
if (fileNames == null) return false;
List<String> tracks = Arrays.stream(fileNames)
.map(s -> s.substring(0, s.length() - fileExtension.length()))
.collect(Collectors.toList());
tracks.forEach(this::loadTrack);
TrackManager tm = plugin.getTrackManager();
tm.getAll().values().stream()
.filter(t -> !tracks.contains(t.getName()))
.forEach(tm::unload);
return true;
}
@Override
public boolean deleteTrack(Track track) {
track.getIoLock().lock();
try {
return call(track.getName(), () -> {
File trackFile = new File(tracksDir, track.getName() + fileExtension);
registerFileAction("tracks", trackFile);
if (trackFile.exists()) {
trackFile.delete();
}
return true;
}, false);
} finally {
track.getIoLock().unlock();
}
}
@Override
public boolean saveUUIDData(UUID uuid, String username) {
uuidCache.addMapping(uuid, username);
return true;
}
@Override
public UUID getUUID(String username) {
return uuidCache.lookupUUID(username);
}
@Override
public String getName(UUID uuid) {
return uuidCache.lookupUsername(uuid);
}
}

View File

@ -0,0 +1,72 @@
/*
* This file is part of LuckPerms, licensed under the MIT License.
*
* Copyright (c) lucko (Luck) <luck@lucko.me>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package me.lucko.luckperms.common.storage.backing.file;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import ninja.leaping.configurate.ConfigurationNode;
import ninja.leaping.configurate.hocon.HoconConfigurationLoader;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
public class HOCONBacking extends ConfigurateBacking {
public HOCONBacking(LuckPermsPlugin plugin, String dataFolderName) {
super(plugin, "HOCON", ".conf", dataFolderName);
}
@Override
protected ConfigurationNode readFile(File file) throws IOException {
if (!file.exists()) {
return null;
}
HoconConfigurationLoader loader = HoconConfigurationLoader.builder()
.setSource(() -> Files.newBufferedReader(file.toPath(), StandardCharsets.UTF_8))
.setSink(() -> Files.newBufferedWriter(file.toPath(), StandardCharsets.UTF_8))
.build();
return loader.load();
}
@Override
protected void saveFile(File file, ConfigurationNode node) throws IOException {
if (node == null && file.exists()) {
file.delete();
return;
}
HoconConfigurationLoader loader = HoconConfigurationLoader.builder()
.setSource(() -> Files.newBufferedReader(file.toPath(), StandardCharsets.UTF_8))
.setSink(() -> Files.newBufferedWriter(file.toPath(), StandardCharsets.UTF_8))
.build();
loader.save(node);
}
}

View File

@ -25,551 +25,50 @@
package me.lucko.luckperms.common.storage.backing.file; package me.lucko.luckperms.common.storage.backing.file;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterables;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonPrimitive;
import me.lucko.luckperms.api.HeldPermission;
import me.lucko.luckperms.api.Node;
import me.lucko.luckperms.api.context.ImmutableContextSet;
import me.lucko.luckperms.common.bulkupdate.BulkUpdate;
import me.lucko.luckperms.common.managers.GenericUserManager;
import me.lucko.luckperms.common.model.Group;
import me.lucko.luckperms.common.model.Track;
import me.lucko.luckperms.common.model.User;
import me.lucko.luckperms.common.node.NodeHeldPermission;
import me.lucko.luckperms.common.node.NodeModel;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin; import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.references.UserIdentifier;
import java.io.BufferedReader; import ninja.leaping.configurate.ConfigurationNode;
import java.io.BufferedWriter; import ninja.leaping.configurate.gson.GsonConfigurationLoader;
import java.io.File; import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.nio.file.Files; import java.nio.file.Files;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.UUID;
import java.util.stream.Collectors;
@SuppressWarnings("ResultOfMethodCallIgnored") public class JSONBacking extends ConfigurateBacking {
public class JSONBacking extends FlatfileBacking {
private final Gson gson;
public JSONBacking(LuckPermsPlugin plugin, File pluginDir, String dataFolderName) { public JSONBacking(LuckPermsPlugin plugin, String dataFolderName) {
super(plugin, "JSON", pluginDir, ".json", dataFolderName); super(plugin, "JSON", ".json", dataFolderName);
gson = new GsonBuilder().setPrettyPrinting().create();
} }
public boolean writeElementToFile(File file, JsonElement element) { @Override
try (BufferedWriter writer = Files.newBufferedWriter(file.toPath(), StandardCharsets.UTF_8)) { protected ConfigurationNode readFile(File file) throws IOException {
gson.toJson(element, writer); if (!file.exists()) {
writer.flush();
return true;
} catch (Throwable t) {
plugin.getLog().warn("Exception whilst writing to file: " + file.getAbsolutePath());
t.printStackTrace();
return false;
}
}
public JsonObject readObjectFromFile(File file) {
try (BufferedReader reader = Files.newBufferedReader(file.toPath(), StandardCharsets.UTF_8)) {
return gson.fromJson(reader, JsonObject.class);
} catch (Throwable t) {
plugin.getLog().warn("Exception whilst reading from file: " + file.getAbsolutePath());
t.printStackTrace();
return null; return null;
} }
GsonConfigurationLoader loader = GsonConfigurationLoader.builder()
.setIndent(2)
.setSource(() -> Files.newBufferedReader(file.toPath(), StandardCharsets.UTF_8))
.setSink(() -> Files.newBufferedWriter(file.toPath(), StandardCharsets.UTF_8))
.build();
return loader.load();
} }
@Override @Override
public boolean applyBulkUpdate(BulkUpdate bulkUpdate) { protected void saveFile(File file, ConfigurationNode node) throws IOException {
return call("null", () -> { if (node == null && file.exists()) {
if (bulkUpdate.getDataType().isIncludingUsers()) { file.delete();
File[] files = usersDir.listFiles((dir, name1) -> name1.endsWith(".json")); return;
if (files == null) return false;
for (File file : files) {
call(file.getName(), () -> {
registerFileAction("users", file);
JsonObject object = readObjectFromFile(file);
Set<NodeModel> nodes = new HashSet<>();
nodes.addAll(deserializePermissions(object.get("permissions").getAsJsonArray()));
Set<NodeModel> results = nodes.stream()
.map(n -> Optional.ofNullable(bulkUpdate.apply(n)))
.filter(Optional::isPresent)
.map(Optional::get)
.collect(Collectors.toSet());
object.add("permissions", serializePermissions(results));
writeElementToFile(file, object);
return true;
}, true);
}
}
if (bulkUpdate.getDataType().isIncludingGroups()) {
File[] files = groupsDir.listFiles((dir, name1) -> name1.endsWith(".json"));
if (files == null) return false;
for (File file : files) {
call(file.getName(), () -> {
registerFileAction("groups", file);
JsonObject object = readObjectFromFile(file);
Set<NodeModel> nodes = new HashSet<>();
nodes.addAll(deserializePermissions(object.get("permissions").getAsJsonArray()));
Set<NodeModel> results = nodes.stream()
.map(n -> Optional.ofNullable(bulkUpdate.apply(n)))
.filter(Optional::isPresent)
.map(Optional::get)
.collect(Collectors.toSet());
object.add("permissions", serializePermissions(results));
writeElementToFile(file, object);
return true;
}, true);
}
}
return true;
}, false);
}
@Override
public boolean loadUser(UUID uuid, String username) {
User user = plugin.getUserManager().getOrMake(UserIdentifier.of(uuid, username));
user.getIoLock().lock();
try {
return call(uuid.toString(), () -> {
File userFile = new File(usersDir, uuid.toString() + ".json");
registerFileAction("users", userFile);
if (userFile.exists()) {
JsonObject object = readObjectFromFile(userFile);
String name = object.get("name").getAsString();
user.getPrimaryGroup().setStoredValue(object.get("primaryGroup").getAsString());
Set<NodeModel> data = deserializePermissions(object.get("permissions").getAsJsonArray());
Set<Node> nodes = data.stream().map(NodeModel::toNode).collect(Collectors.toSet());
user.setEnduringNodes(nodes);
user.setName(name, true);
boolean save = plugin.getUserManager().giveDefaultIfNeeded(user, false);
if (user.getName().isPresent() && (name == null || !user.getName().get().equalsIgnoreCase(name))) {
save = true;
}
if (save) {
saveUser(user);
}
return true;
} else {
if (GenericUserManager.shouldSave(user)) {
user.clearNodes();
user.getPrimaryGroup().setStoredValue(null);
plugin.getUserManager().giveDefaultIfNeeded(user, false);
}
return true;
}
}, false);
} finally {
user.getIoLock().unlock();
user.getRefreshBuffer().requestDirectly();
}
}
@Override
public boolean saveUser(User user) {
user.getIoLock().lock();
try {
return call(user.getUuid().toString(), () -> {
File userFile = new File(usersDir, user.getUuid().toString() + ".json");
registerFileAction("users", userFile);
if (!GenericUserManager.shouldSave(user)) {
if (userFile.exists()) {
userFile.delete();
}
return true;
}
if (!userFile.exists()) {
userFile.createNewFile();
}
JsonObject data = new JsonObject();
data.addProperty("uuid", user.getUuid().toString());
data.addProperty("name", user.getName().orElse("null"));
data.addProperty("primaryGroup", user.getPrimaryGroup().getStoredValue().orElse("default"));
Set<NodeModel> nodes = user.getEnduringNodes().values().stream().map(NodeModel::fromNode).collect(Collectors.toCollection(LinkedHashSet::new));
data.add("permissions", serializePermissions(nodes));
return writeElementToFile(userFile, data);
}, false);
} finally {
user.getIoLock().unlock();
}
}
@Override
public List<HeldPermission<UUID>> getUsersWithPermission(String permission) {
ImmutableList.Builder<HeldPermission<UUID>> held = ImmutableList.builder();
boolean success = call("null", () -> {
File[] files = usersDir.listFiles((dir, name1) -> name1.endsWith(".json"));
if (files == null) return false;
for (File file : files) {
call(file.getName(), () -> {
registerFileAction("users", file);
UUID holder = UUID.fromString(file.getName().substring(0, file.getName().length() - 5));
JsonObject object = readObjectFromFile(file);
Set<NodeModel> nodes = new HashSet<>();
nodes.addAll(deserializePermissions(object.get("permissions").getAsJsonArray()));
for (NodeModel e : nodes) {
if (!e.getPermission().equalsIgnoreCase(permission)) {
continue;
}
held.add(NodeHeldPermission.of(holder, e));
}
return true;
}, true);
}
return true;
}, false);
return success ? held.build() : null;
}
@Override
public boolean createAndLoadGroup(String name) {
Group group = plugin.getGroupManager().getOrMake(name);
group.getIoLock().lock();
try {
return call(name, () -> {
File groupFile = new File(groupsDir, name + ".json");
registerFileAction("groups", groupFile);
if (groupFile.exists()) {
JsonObject object = readObjectFromFile(groupFile);
Set<NodeModel> data = deserializePermissions(object.get("permissions").getAsJsonArray());
Set<Node> nodes = data.stream().map(NodeModel::toNode).collect(Collectors.toSet());
group.setEnduringNodes(nodes);
return true;
} else {
groupFile.createNewFile();
JsonObject data = new JsonObject();
data.addProperty("name", group.getName());
Set<NodeModel> nodes = group.getEnduringNodes().values().stream().map(NodeModel::fromNode).collect(Collectors.toCollection(LinkedHashSet::new));
data.add("permissions", serializePermissions(nodes));
return writeElementToFile(groupFile, data);
}
}, false);
} finally {
group.getIoLock().unlock();
}
}
@Override
public boolean loadGroup(String name) {
Group group = plugin.getGroupManager().getOrMake(name);
group.getIoLock().lock();
try {
return call(name, () -> {
File groupFile = new File(groupsDir, name + ".json");
registerFileAction("groups", groupFile);
if (!groupFile.exists()) {
return false;
}
JsonObject object = readObjectFromFile(groupFile);
Set<NodeModel> data = deserializePermissions(object.get("permissions").getAsJsonArray());
Set<Node> nodes = data.stream().map(NodeModel::toNode).collect(Collectors.toSet());
group.setEnduringNodes(nodes);
return true;
}, false);
} finally {
group.getIoLock().unlock();
}
}
@Override
public boolean saveGroup(Group group) {
group.getIoLock().lock();
try {
return call(group.getName(), () -> {
File groupFile = new File(groupsDir, group.getName() + ".json");
registerFileAction("groups", groupFile);
if (!groupFile.exists()) {
groupFile.createNewFile();
}
JsonObject data = new JsonObject();
data.addProperty("name", group.getName());
Set<NodeModel> nodes = group.getEnduringNodes().values().stream().map(NodeModel::fromNode).collect(Collectors.toCollection(LinkedHashSet::new));
data.add("permissions", serializePermissions(nodes));
return writeElementToFile(groupFile, data);
}, false);
} finally {
group.getIoLock().unlock();
}
}
@Override
public List<HeldPermission<String>> getGroupsWithPermission(String permission) {
ImmutableList.Builder<HeldPermission<String>> held = ImmutableList.builder();
boolean success = call("null", () -> {
File[] files = groupsDir.listFiles((dir, name1) -> name1.endsWith(".json"));
if (files == null) return false;
for (File file : files) {
call(file.getName(), () -> {
registerFileAction("groups", file);
String holder = file.getName().substring(0, file.getName().length() - 5);
JsonObject object = readObjectFromFile(file);
Set<NodeModel> nodes = new HashSet<>();
nodes.addAll(deserializePermissions(object.get("permissions").getAsJsonArray()));
for (NodeModel e : nodes) {
if (!e.getPermission().equalsIgnoreCase(permission)) {
continue;
}
held.add(NodeHeldPermission.of(holder, e));
}
return true;
}, true);
}
return true;
}, false);
return success ? held.build() : null;
}
@Override
public boolean createAndLoadTrack(String name) {
Track track = plugin.getTrackManager().getOrMake(name);
track.getIoLock().lock();
try {
return call(name, () -> {
File trackFile = new File(tracksDir, name + ".json");
registerFileAction("tracks", trackFile);
if (trackFile.exists()) {
JsonObject object = readObjectFromFile(trackFile);
List<String> groups = new ArrayList<>();
for (JsonElement g : object.get("groups").getAsJsonArray()) {
groups.add(g.getAsString());
}
track.setGroups(groups);
return true;
} else {
trackFile.createNewFile();
JsonObject data = new JsonObject();
data.addProperty("name", track.getName());
JsonArray groups = new JsonArray();
for (String s : track.getGroups()) {
groups.add(new JsonPrimitive(s));
}
data.add("groups", groups);
return writeElementToFile(trackFile, data);
}
}, false);
} finally {
track.getIoLock().unlock();
}
}
@Override
public boolean loadTrack(String name) {
Track track = plugin.getTrackManager().getOrMake(name);
track.getIoLock().lock();
try {
return call(name, () -> {
File trackFile = new File(tracksDir, name + ".json");
registerFileAction("tracks", trackFile);
if (!trackFile.exists()) {
return false;
}
JsonObject object = readObjectFromFile(trackFile);
List<String> groups = new ArrayList<>();
for (JsonElement g : object.get("groups").getAsJsonArray()) {
groups.add(g.getAsString());
}
track.setGroups(groups);
return true;
}, false);
} finally {
track.getIoLock().unlock();
}
}
@Override
public boolean saveTrack(Track track) {
track.getIoLock().lock();
try {
return call(track.getName(), () -> {
File trackFile = new File(tracksDir, track.getName() + ".json");
registerFileAction("tracks", trackFile);
if (!trackFile.exists()) {
trackFile.createNewFile();
}
JsonObject data = new JsonObject();
data.addProperty("name", track.getName());
JsonArray groups = new JsonArray();
for (String s : track.getGroups()) {
groups.add(new JsonPrimitive(s));
}
data.add("groups", groups);
return writeElementToFile(trackFile, data);
}, false);
} finally {
track.getIoLock().unlock();
}
}
public static Set<NodeModel> deserializePermissions(JsonArray permissionsSection) {
Set<NodeModel> nodes = new HashSet<>();
for (JsonElement ent : permissionsSection) {
if (ent.isJsonPrimitive() && ent.getAsJsonPrimitive().isString()) {
String permission = ent.getAsJsonPrimitive().getAsString();
nodes.add(NodeModel.of(permission, true, "global", "global", 0L, ImmutableContextSet.empty()));
continue;
}
if (!ent.isJsonObject()) {
continue;
}
JsonObject data = ent.getAsJsonObject();
Map.Entry<String, JsonElement> entry = Iterables.getFirst(data.entrySet(), null);
if (entry == null || !entry.getValue().isJsonObject()) {
continue;
}
String permission = entry.getKey();
JsonObject attributes = entry.getValue().getAsJsonObject();
boolean value = true;
String server = "global";
String world = "global";
long expiry = 0L;
ImmutableContextSet context = ImmutableContextSet.empty();
if (attributes.has("value")) {
value = attributes.get("value").getAsBoolean();
}
if (attributes.has("server")) {
server = attributes.get("server").getAsString();
}
if (attributes.has("world")) {
world = attributes.get("world").getAsString();
}
if (attributes.has("expiry")) {
expiry = attributes.get("expiry").getAsLong();
}
if (attributes.has("context") && attributes.get("context").isJsonObject()) {
JsonObject contexts = attributes.get("context").getAsJsonObject();
context = NodeModel.deserializeContextSet(contexts).makeImmutable();
}
final JsonElement batchAttribute = attributes.get("permissions");
if (permission.startsWith("luckperms.batch") && batchAttribute != null && batchAttribute.isJsonArray()) {
for (JsonElement element : batchAttribute.getAsJsonArray()) {
nodes.add(NodeModel.of(element.getAsString(), value, server, world, expiry, context));
}
} else {
nodes.add(NodeModel.of(permission, value, server, world, expiry, context));
}
} }
return nodes; GsonConfigurationLoader loader = GsonConfigurationLoader.builder()
} .setIndent(2)
.setSource(() -> Files.newBufferedReader(file.toPath(), StandardCharsets.UTF_8))
.setSink(() -> Files.newBufferedWriter(file.toPath(), StandardCharsets.UTF_8))
.build();
public static JsonArray serializePermissions(Set<NodeModel> nodes) { loader.save(node);
JsonArray arr = new JsonArray();
for (NodeModel node : nodes) {
// just a raw, default node.
boolean single = node.isValue() &&
node.getServer().equalsIgnoreCase("global") &&
node.getWorld().equalsIgnoreCase("global") &&
node.getExpiry() == 0L &&
node.getContexts().isEmpty();
// just add a string to the list.
if (single) {
arr.add(new JsonPrimitive(node.getPermission()));
continue;
}
JsonObject attributes = new JsonObject();
attributes.addProperty("value", node.isValue());
if (!node.getServer().equals("global")) {
attributes.addProperty("server", node.getServer());
}
if (!node.getWorld().equals("global")) {
attributes.addProperty("world", node.getWorld());
}
if (node.getExpiry() != 0L) {
attributes.addProperty("expiry", node.getExpiry());
}
if (!node.getContexts().isEmpty()) {
attributes.add("context", node.getContextsAsJson());
}
JsonObject perm = new JsonObject();
perm.add(node.getPermission(), attributes);
arr.add(perm);
}
return arr;
} }
} }

View File

@ -0,0 +1,32 @@
/*
* This file is part of LuckPerms, licensed under the MIT License.
*
* Copyright (c) lucko (Luck) <luck@lucko.me>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package me.lucko.luckperms.common.storage.backing.file;
public enum StorageLocation {
USER, GROUP, TRACK
}

View File

@ -25,600 +25,53 @@
package me.lucko.luckperms.common.storage.backing.file; package me.lucko.luckperms.common.storage.backing.file;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSetMultimap;
import com.google.common.collect.Iterables;
import me.lucko.luckperms.api.HeldPermission;
import me.lucko.luckperms.api.Node;
import me.lucko.luckperms.api.context.ImmutableContextSet;
import me.lucko.luckperms.common.bulkupdate.BulkUpdate;
import me.lucko.luckperms.common.managers.GenericUserManager;
import me.lucko.luckperms.common.model.Group;
import me.lucko.luckperms.common.model.Track;
import me.lucko.luckperms.common.model.User;
import me.lucko.luckperms.common.node.NodeHeldPermission;
import me.lucko.luckperms.common.node.NodeModel;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin; import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.references.UserIdentifier;
import org.yaml.snakeyaml.DumperOptions; import org.yaml.snakeyaml.DumperOptions;
import org.yaml.snakeyaml.Yaml;
import java.io.BufferedReader; import ninja.leaping.configurate.ConfigurationNode;
import java.io.BufferedWriter; import ninja.leaping.configurate.yaml.YAMLConfigurationLoader;
import java.io.File; import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.nio.file.Files; import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.UUID;
import java.util.stream.Collectors;
@SuppressWarnings({"unchecked", "ResultOfMethodCallIgnored"}) public class YAMLBacking extends ConfigurateBacking {
public class YAMLBacking extends FlatfileBacking {
private static Yaml getYaml() { public YAMLBacking(LuckPermsPlugin plugin, String dataFolderName) {
DumperOptions options = new DumperOptions(); super(plugin, "YAML", ".yml", dataFolderName);
options.setAllowUnicode(true);
options.setDefaultFlowStyle(DumperOptions.FlowStyle.BLOCK);
return new Yaml(options);
} }
public YAMLBacking(LuckPermsPlugin plugin, File pluginDir, String dataFolderName) { @Override
super(plugin, "YAML", pluginDir, ".yml", dataFolderName); protected ConfigurationNode readFile(File file) throws IOException {
} if (!file.exists()) {
public boolean writeMapToFile(File file, Map<String, Object> values) {
try (BufferedWriter writer = Files.newBufferedWriter(file.toPath(), StandardCharsets.UTF_8)) {
getYaml().dump(values, writer);
writer.flush();
return true;
} catch (Throwable t) {
plugin.getLog().warn("Exception whilst writing to file: " + file.getAbsolutePath());
t.printStackTrace();
return false;
}
}
public Map<String, Object> readMapFromFile(File file) {
try (BufferedReader reader = Files.newBufferedReader(file.toPath(), StandardCharsets.UTF_8)) {
return (Map<String, Object>) getYaml().load(reader);
} catch (Throwable t) {
plugin.getLog().warn("Exception whilst reading from file: " + file.getAbsolutePath());
t.printStackTrace();
return null; return null;
} }
YAMLConfigurationLoader loader = YAMLConfigurationLoader.builder()
.setFlowStyle(DumperOptions.FlowStyle.BLOCK)
.setSource(() -> Files.newBufferedReader(file.toPath(), StandardCharsets.UTF_8))
.setSink(() -> Files.newBufferedWriter(file.toPath(), StandardCharsets.UTF_8))
.build();
return loader.load();
} }
@Override @Override
public boolean applyBulkUpdate(BulkUpdate bulkUpdate) { protected void saveFile(File file, ConfigurationNode node) throws IOException {
return call("null", () -> { if (node == null && file.exists()) {
if (bulkUpdate.getDataType().isIncludingUsers()) { file.delete();
File[] files = usersDir.listFiles((dir, name1) -> name1.endsWith(".yml")); return;
if (files == null) return false;
for (File file : files) {
call(file.getName(), () -> {
registerFileAction("users", file);
Map<String, Object> values = readMapFromFile(file);
Set<NodeModel> nodes = new HashSet<>();
nodes.addAll(deserializePermissions((List<Object>) values.get("permissions")));
Set<NodeModel> results = nodes.stream()
.map(n -> Optional.ofNullable(bulkUpdate.apply(n)))
.filter(Optional::isPresent)
.map(Optional::get)
.collect(Collectors.toSet());
values.put("permissions", serializePermissions(results));
writeMapToFile(file, values);
return true;
}, true);
}
}
if (bulkUpdate.getDataType().isIncludingGroups()) {
File[] files = groupsDir.listFiles((dir, name1) -> name1.endsWith(".yml"));
if (files == null) return false;
for (File file : files) {
call(file.getName(), () -> {
registerFileAction("groups", file);
Map<String, Object> values = readMapFromFile(file);
Set<NodeModel> nodes = new HashSet<>();
nodes.addAll(deserializePermissions((List<Object>) values.get("permissions")));
Set<NodeModel> results = nodes.stream()
.map(n -> Optional.ofNullable(bulkUpdate.apply(n)))
.filter(Optional::isPresent)
.map(Optional::get)
.collect(Collectors.toSet());
values.put("permissions", serializePermissions(results));
writeMapToFile(file, values);
return true;
}, true);
}
}
return true;
}, false);
}
@Override
public boolean loadUser(UUID uuid, String username) {
User user = plugin.getUserManager().getOrMake(UserIdentifier.of(uuid, username));
user.getIoLock().lock();
try {
return call(uuid.toString(), () -> {
File userFile = new File(usersDir, uuid.toString() + ".yml");
registerFileAction("users", userFile);
if (userFile.exists()) {
Map<String, Object> values = readMapFromFile(userFile);
// User exists, let's load.
String name = (String) values.get("name");
user.getPrimaryGroup().setStoredValue((String) values.get("primary-group"));
Set<NodeModel> data = deserializePermissions((List<Object>) values.get("permissions"));
Set<Node> nodes = data.stream().map(NodeModel::toNode).collect(Collectors.toSet());
user.setEnduringNodes(nodes);
user.setName(name, true);
boolean save = plugin.getUserManager().giveDefaultIfNeeded(user, false);
if (user.getName().isPresent() && (name == null || !user.getName().get().equalsIgnoreCase(name))) {
save = true;
}
if (save) {
saveUser(user);
}
return true;
} else {
if (GenericUserManager.shouldSave(user)) {
user.clearNodes();
user.getPrimaryGroup().setStoredValue(null);
plugin.getUserManager().giveDefaultIfNeeded(user, false);
}
return true;
}
}, false);
} finally {
user.getIoLock().unlock();
user.getRefreshBuffer().requestDirectly();
}
}
@Override
public boolean saveUser(User user) {
user.getIoLock().lock();
try {
return call(user.getUuid().toString(), () -> {
File userFile = new File(usersDir, user.getUuid().toString() + ".yml");
registerFileAction("users", userFile);
if (!GenericUserManager.shouldSave(user)) {
if (userFile.exists()) {
userFile.delete();
}
return true;
}
if (!userFile.exists()) {
userFile.createNewFile();
}
Map<String, Object> values = new LinkedHashMap<>();
values.put("uuid", user.getUuid().toString());
values.put("name", user.getName().orElse("null"));
values.put("primary-group", user.getPrimaryGroup().getStoredValue().orElse("default"));
Set<NodeModel> data = user.getEnduringNodes().values().stream().map(NodeModel::fromNode).collect(Collectors.toCollection(LinkedHashSet::new));
values.put("permissions", serializePermissions(data));
return writeMapToFile(userFile, values);
}, false);
} finally {
user.getIoLock().unlock();
}
}
@Override
public List<HeldPermission<UUID>> getUsersWithPermission(String permission) {
ImmutableList.Builder<HeldPermission<UUID>> held = ImmutableList.builder();
boolean success = call("null", () -> {
File[] files = usersDir.listFiles((dir, name1) -> name1.endsWith(".yml"));
if (files == null) return false;
for (File file : files) {
call(file.getName(), () -> {
registerFileAction("users", file);
UUID holder = UUID.fromString(file.getName().substring(0, file.getName().length() - 4));
Map<String, Object> values = readMapFromFile(file);
Set<NodeModel> nodes = new HashSet<>();
nodes.addAll(deserializePermissions((List<Object>) values.get("permissions")));
for (NodeModel e : nodes) {
if (!e.getPermission().equalsIgnoreCase(permission)) {
continue;
}
held.add(NodeHeldPermission.of(holder, e));
}
return true;
}, true);
}
return true;
}, false);
return success ? held.build() : null;
}
@Override
public boolean createAndLoadGroup(String name) {
Group group = plugin.getGroupManager().getOrMake(name);
group.getIoLock().lock();
try {
return call(name, () -> {
File groupFile = new File(groupsDir, name + ".yml");
registerFileAction("groups", groupFile);
if (groupFile.exists()) {
Map<String, Object> values = readMapFromFile(groupFile);
Set<NodeModel> data = deserializePermissions((List<Object>) values.get("permissions"));
Set<Node> nodes = data.stream().map(NodeModel::toNode).collect(Collectors.toSet());
group.setEnduringNodes(nodes);
return true;
} else {
groupFile.createNewFile();
Map<String, Object> values = new LinkedHashMap<>();
values.put("name", group.getName());
Set<NodeModel> data = group.getEnduringNodes().values().stream().map(NodeModel::fromNode).collect(Collectors.toCollection(LinkedHashSet::new));
values.put("permissions", serializePermissions(data));
return writeMapToFile(groupFile, values);
}
}, false);
} finally {
group.getIoLock().unlock();
}
}
@Override
public boolean loadGroup(String name) {
Group group = plugin.getGroupManager().getOrMake(name);
group.getIoLock().lock();
try {
return call(name, () -> {
File groupFile = new File(groupsDir, name + ".yml");
registerFileAction("groups", groupFile);
if (!groupFile.exists()) {
return false;
}
Map<String, Object> values = readMapFromFile(groupFile);
Set<NodeModel> data = deserializePermissions((List<Object>) values.get("permissions"));
Set<Node> nodes = data.stream().map(NodeModel::toNode).collect(Collectors.toSet());
group.setEnduringNodes(nodes);
return true;
}, false);
} finally {
group.getIoLock().unlock();
}
}
@Override
public boolean saveGroup(Group group) {
group.getIoLock().lock();
try {
return call(group.getName(), () -> {
File groupFile = new File(groupsDir, group.getName() + ".yml");
registerFileAction("groups", groupFile);
if (!groupFile.exists()) {
groupFile.createNewFile();
}
Map<String, Object> values = new LinkedHashMap<>();
values.put("name", group.getName());
Set<NodeModel> data = group.getEnduringNodes().values().stream().map(NodeModel::fromNode).collect(Collectors.toCollection(LinkedHashSet::new));
values.put("permissions", serializePermissions(data));
return writeMapToFile(groupFile, values);
}, false);
} finally {
group.getIoLock().unlock();
}
}
@Override
public List<HeldPermission<String>> getGroupsWithPermission(String permission) {
ImmutableList.Builder<HeldPermission<String>> held = ImmutableList.builder();
boolean success = call("null", () -> {
File[] files = groupsDir.listFiles((dir, name1) -> name1.endsWith(".yml"));
if (files == null) return false;
for (File file : files) {
call(file.getName(), () -> {
registerFileAction("groups", file);
String holder = file.getName().substring(0, file.getName().length() - 4);
Map<String, Object> values = readMapFromFile(file);
Set<NodeModel> nodes = new HashSet<>();
nodes.addAll(deserializePermissions((List<Object>) values.get("permissions")));
for (NodeModel e : nodes) {
if (!e.getPermission().equalsIgnoreCase(permission)) {
continue;
}
held.add(NodeHeldPermission.of(holder, e));
}
return true;
}, true);
}
return true;
}, false);
return success ? held.build() : null;
}
@Override
public boolean createAndLoadTrack(String name) {
Track track = plugin.getTrackManager().getOrMake(name);
track.getIoLock().lock();
try {
return call(name, () -> {
File trackFile = new File(tracksDir, name + ".yml");
registerFileAction("tracks", trackFile);
if (trackFile.exists()) {
Map<String, Object> values = readMapFromFile(trackFile);
track.setGroups((List<String>) values.get("groups"));
return true;
} else {
trackFile.createNewFile();
Map<String, Object> values = new LinkedHashMap<>();
values.put("name", track.getName());
values.put("groups", track.getGroups());
return writeMapToFile(trackFile, values);
}
}, false);
} finally {
track.getIoLock().unlock();
}
}
@Override
public boolean loadTrack(String name) {
Track track = plugin.getTrackManager().getOrMake(name);
track.getIoLock().lock();
try {
return call(name, () -> {
File trackFile = new File(tracksDir, name + ".yml");
registerFileAction("tracks", trackFile);
if (!trackFile.exists()) {
return false;
}
Map<String, Object> values = readMapFromFile(trackFile);
track.setGroups((List<String>) values.get("groups"));
return true;
}, false);
} finally {
track.getIoLock().unlock();
}
}
@Override
public boolean saveTrack(Track track) {
track.getIoLock().lock();
try {
return call(track.getName(), () -> {
File trackFile = new File(tracksDir, track.getName() + ".yml");
registerFileAction("tracks", trackFile);
if (!trackFile.exists()) {
trackFile.createNewFile();
}
Map<String, Object> values = new LinkedHashMap<>();
values.put("name", track.getName());
values.put("groups", track.getGroups());
return writeMapToFile(trackFile, values);
}, false);
} finally {
track.getIoLock().unlock();
}
}
public static Set<NodeModel> deserializePermissions(List<Object> permissionsSection) {
Set<NodeModel> nodes = new HashSet<>();
for (Object perm : permissionsSection) {
// each object in the permission list is either a String or Map.
// just a permission with no extra context.
if (perm instanceof String) {
String permission = (String) perm;
nodes.add(NodeModel.of(permission, true, "global", "global", 0L, ImmutableContextSet.empty()));
continue;
}
// it must be a map at this point.
if (!(perm instanceof Map)) {
continue;
}
if (((Map) perm).isEmpty()) {
continue;
}
// the permission object, should only contain one entry.
Map<String, Object> data = (Map<String, Object>) perm;
// get the only entry in the map. the key is the permission, the object is the attributes associated with it.
Map.Entry<String, Object> entry = Iterables.getFirst(data.entrySet(), null);
String permission = entry.getKey();
if (entry.getValue() != null && entry.getValue() instanceof Map) {
Map<String, Object> attributes = (Map<String, Object>) entry.getValue();
boolean value = true;
String server = "global";
String world = "global";
long expiry = 0L;
ImmutableSetMultimap context = ImmutableSetMultimap.of();
if (attributes.containsKey("value")) {
value = (boolean) attributes.get("value");
}
if (attributes.containsKey("server")) {
server = attributes.get("server").toString();
}
if (attributes.containsKey("world")) {
world = attributes.get("world").toString();
}
if (attributes.containsKey("expiry")) {
Object exp = attributes.get("expiry");
if (exp instanceof Long || exp.getClass().isPrimitive()) {
expiry = (long) exp;
} else {
expiry = (int) exp;
}
}
if (attributes.get("context") != null && attributes.get("context") instanceof Map) {
Map<String, Object> contexts = (Map<String, Object>) attributes.get("context");
ImmutableSetMultimap.Builder<String, String> map = ImmutableSetMultimap.builder();
for (Map.Entry<String, Object> e : contexts.entrySet()) {
Object val = e.getValue();
if (val instanceof List) {
map.putAll(e.getKey(), ((List<String>) val));
} else {
map.put(e.getKey(), val.toString());
}
}
context = map.build();
}
if (permission.startsWith("luckperms.batch") && attributes.get("permissions") instanceof List) {
final List<String> batchPerms = (List<String>) attributes.get("permissions");
for (String rawPerm : batchPerms) {
nodes.add(NodeModel.of(rawPerm, value, server, world, expiry, ImmutableContextSet.fromMultimap(context)));
}
} else {
nodes.add(NodeModel.of(permission, value, server, world, expiry, ImmutableContextSet.fromMultimap(context)));
}
}
} }
return nodes; YAMLConfigurationLoader loader = YAMLConfigurationLoader.builder()
} .setFlowStyle(DumperOptions.FlowStyle.BLOCK)
.setIndent(2)
.setSource(() -> Files.newBufferedReader(file.toPath(), StandardCharsets.UTF_8))
.setSink(() -> Files.newBufferedWriter(file.toPath(), StandardCharsets.UTF_8))
.build();
/** loader.save(node);
* Serializes a set of nodes to a format which can be serialised by SnakeYAML.
* (Maps, Lists and raw types)
*
* Returns a list of objects.
*
* Each object is either instanceof String, just a raw permission node with value=true and default context
*
* OR
*
* Is a Map of String to Map. The map contains only one entry, where the key is the permission string, and the value
* is a map containing the attributes of the node.
*
* @param nodes the nodes to serialize
* @return a SnakeYAML friendly representation of the map
*/
public static List<Object> serializePermissions(Set<NodeModel> nodes) {
List<Object> data = new ArrayList<>();
for (NodeModel node : nodes) {
// just a raw, default node.
boolean single = node.isValue() &&
node.getServer().equalsIgnoreCase("global") &&
node.getWorld().equalsIgnoreCase("global") &&
node.getExpiry() == 0L &&
node.getContexts().isEmpty();
// just add a string to the list.
if (single) {
data.add(node.getPermission());
continue;
}
// otherwise, this node has some other special context which needs to be saved.
// we serialise this way so it gets represented nicely in YAML.
// create a map of node attributes
Map<String, Object> attributes = new LinkedHashMap<>();
attributes.put("value", node.isValue());
if (!node.getServer().equals("global")) {
attributes.put("server", node.getServer());
}
if (!node.getWorld().equals("global")) {
attributes.put("world", node.getWorld());
}
if (node.getExpiry() != 0L) {
attributes.put("expiry", node.getExpiry());
}
if (!node.getContexts().isEmpty()) {
Map<String, Object> context = new HashMap<>();
Map<String, Collection<String>> map = node.getContexts().toMultimap().asMap();
for (Map.Entry<String, Collection<String>> e : map.entrySet()) {
List<String> vals = new ArrayList<>(e.getValue());
int size = vals.size();
if (size == 1) {
context.put(e.getKey(), vals.get(0));
} else if (size > 1) {
context.put(e.getKey(), vals);
}
}
attributes.put("context", context);
}
// create a new map to represent this entry in the list
// the map will only contain one entry. (the permission --> attributes)
Map<String, Object> perm = new HashMap<>();
// add the node to the map
perm.put(node.getPermission(), attributes);
// add the map to the object list, and continue
data.add(perm);
}
return data;
} }
} }

View File

@ -27,16 +27,23 @@ package me.lucko.luckperms.common.storage.backing.legacy;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement; import com.google.gson.JsonElement;
import com.google.gson.JsonObject; import com.google.gson.JsonObject;
import com.google.gson.JsonPrimitive;
import me.lucko.luckperms.common.node.NodeFactory; import me.lucko.luckperms.common.node.NodeFactory;
import me.lucko.luckperms.common.node.NodeModel; import me.lucko.luckperms.common.node.NodeModel;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin; import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.storage.backing.file.JSONBacking; import me.lucko.luckperms.common.storage.backing.file.JSONBacking;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files; import java.nio.file.Files;
import java.util.HashMap; import java.util.HashMap;
import java.util.LinkedHashSet; import java.util.LinkedHashSet;
@ -47,11 +54,35 @@ import java.util.stream.Collectors;
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
@RequiredArgsConstructor @RequiredArgsConstructor
public class LegacyJSONSchemaMigration implements Runnable { public class LegacyJSONSchemaMigration implements Runnable {
private final Gson gson = new GsonBuilder().setPrettyPrinting().create();
private final LuckPermsPlugin plugin; private final LuckPermsPlugin plugin;
private final JSONBacking backing; private final JSONBacking backing;
private final File oldDataFolder; private final File oldDataFolder;
private final File newDataFolder; private final File newDataFolder;
private boolean writeElementToFile(File file, JsonElement element) {
try (BufferedWriter writer = Files.newBufferedWriter(file.toPath(), StandardCharsets.UTF_8)) {
gson.toJson(element, writer);
writer.flush();
return true;
} catch (Throwable t) {
plugin.getLog().warn("Exception whilst writing to file: " + file.getAbsolutePath());
t.printStackTrace();
return false;
}
}
private JsonObject readObjectFromFile(File file) {
try (BufferedReader reader = Files.newBufferedReader(file.toPath(), StandardCharsets.UTF_8)) {
return gson.fromJson(reader, JsonObject.class);
} catch (Throwable t) {
plugin.getLog().warn("Exception whilst reading from file: " + file.getAbsolutePath());
t.printStackTrace();
return null;
}
}
@Override @Override
public void run() { public void run() {
plugin.getLog().warn("Moving existing files to their new location."); plugin.getLog().warn("Moving existing files to their new location.");
@ -71,7 +102,7 @@ public class LegacyJSONSchemaMigration implements Runnable {
try { try {
File replacementFile = new File(newGroupsDir, oldFile.getName()); File replacementFile = new File(newGroupsDir, oldFile.getName());
JsonObject values = backing.readObjectFromFile(oldFile); JsonObject values = readObjectFromFile(oldFile);
Map<String, Boolean> perms = new HashMap<>(); Map<String, Boolean> perms = new HashMap<>();
String name = values.get("name").getAsString(); String name = values.get("name").getAsString();
@ -96,8 +127,8 @@ public class LegacyJSONSchemaMigration implements Runnable {
JsonObject data = new JsonObject(); JsonObject data = new JsonObject();
data.addProperty("name", name); data.addProperty("name", name);
data.add("permissions", JSONBacking.serializePermissions(nodes)); data.add("permissions", serializePermissions(nodes));
backing.writeElementToFile(replacementFile, data); writeElementToFile(replacementFile, data);
oldFile.delete(); oldFile.delete();
} catch (Exception e) { } catch (Exception e) {
@ -120,7 +151,7 @@ public class LegacyJSONSchemaMigration implements Runnable {
try { try {
File replacementFile = new File(newUsersDir, oldFile.getName()); File replacementFile = new File(newUsersDir, oldFile.getName());
JsonObject values = backing.readObjectFromFile(oldFile); JsonObject values = readObjectFromFile(oldFile);
Map<String, Boolean> perms = new HashMap<>(); Map<String, Boolean> perms = new HashMap<>();
String uuid = values.get("uuid").getAsString(); String uuid = values.get("uuid").getAsString();
@ -148,8 +179,8 @@ public class LegacyJSONSchemaMigration implements Runnable {
data.addProperty("uuid", uuid); data.addProperty("uuid", uuid);
data.addProperty("name", name); data.addProperty("name", name);
data.addProperty("primaryGroup", primaryGroup); data.addProperty("primaryGroup", primaryGroup);
data.add("permissions", JSONBacking.serializePermissions(nodes)); data.add("permissions", serializePermissions(nodes));
backing.writeElementToFile(replacementFile, data); writeElementToFile(replacementFile, data);
oldFile.delete(); oldFile.delete();
} catch (Exception e) { } catch (Exception e) {
@ -176,4 +207,48 @@ public class LegacyJSONSchemaMigration implements Runnable {
} }
} }
} }
private static JsonArray serializePermissions(Set<NodeModel> nodes) {
JsonArray arr = new JsonArray();
for (NodeModel node : nodes) {
// just a raw, default node.
boolean single = node.isValue() &&
node.getServer().equalsIgnoreCase("global") &&
node.getWorld().equalsIgnoreCase("global") &&
node.getExpiry() == 0L &&
node.getContexts().isEmpty();
// just add a string to the list.
if (single) {
arr.add(new JsonPrimitive(node.getPermission()));
continue;
}
JsonObject attributes = new JsonObject();
attributes.addProperty("value", node.isValue());
if (!node.getServer().equals("global")) {
attributes.addProperty("server", node.getServer());
}
if (!node.getWorld().equals("global")) {
attributes.addProperty("world", node.getWorld());
}
if (node.getExpiry() != 0L) {
attributes.addProperty("expiry", node.getExpiry());
}
if (!node.getContexts().isEmpty()) {
attributes.add("context", node.getContextsAsJson());
}
JsonObject perm = new JsonObject();
perm.add(node.getPermission(), attributes);
arr.add(perm);
}
return arr;
}
} }

View File

@ -32,12 +32,21 @@ import me.lucko.luckperms.common.node.NodeModel;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin; import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.storage.backing.file.YAMLBacking; import me.lucko.luckperms.common.storage.backing.file.YAMLBacking;
import org.yaml.snakeyaml.DumperOptions;
import org.yaml.snakeyaml.Yaml;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files; import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap; import java.util.HashMap;
import java.util.LinkedHashMap; import java.util.LinkedHashMap;
import java.util.LinkedHashSet; import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@ -50,6 +59,35 @@ public class LegacyYAMLSchemaMigration implements Runnable {
private final File oldDataFolder; private final File oldDataFolder;
private final File newDataFolder; private final File newDataFolder;
private static Yaml getYaml() {
DumperOptions options = new DumperOptions();
options.setAllowUnicode(true);
options.setDefaultFlowStyle(DumperOptions.FlowStyle.BLOCK);
return new Yaml(options);
}
public boolean writeMapToFile(File file, Map<String, Object> values) {
try (BufferedWriter writer = Files.newBufferedWriter(file.toPath(), StandardCharsets.UTF_8)) {
getYaml().dump(values, writer);
writer.flush();
return true;
} catch (Throwable t) {
plugin.getLog().warn("Exception whilst writing to file: " + file.getAbsolutePath());
t.printStackTrace();
return false;
}
}
public Map<String, Object> readMapFromFile(File file) {
try (BufferedReader reader = Files.newBufferedReader(file.toPath(), StandardCharsets.UTF_8)) {
return (Map<String, Object>) getYaml().load(reader);
} catch (Throwable t) {
plugin.getLog().warn("Exception whilst reading from file: " + file.getAbsolutePath());
t.printStackTrace();
return null;
}
}
@Override @Override
public void run() { public void run() {
plugin.getLog().warn("Moving existing files to their new location."); plugin.getLog().warn("Moving existing files to their new location.");
@ -69,7 +107,7 @@ public class LegacyYAMLSchemaMigration implements Runnable {
try { try {
File replacementFile = new File(newGroupsDir, oldFile.getName()); File replacementFile = new File(newGroupsDir, oldFile.getName());
Map<String, Object> data = backing.readMapFromFile(oldFile); Map<String, Object> data = readMapFromFile(oldFile);
Map<String, Boolean> perms = new HashMap<>(); Map<String, Boolean> perms = new HashMap<>();
String name = (String) data.get("name"); String name = (String) data.get("name");
@ -90,8 +128,8 @@ public class LegacyYAMLSchemaMigration implements Runnable {
Map<String, Object> values = new LinkedHashMap<>(); Map<String, Object> values = new LinkedHashMap<>();
values.put("name", name); values.put("name", name);
values.put("permissions", YAMLBacking.serializePermissions(nodes)); values.put("permissions", serializePermissions(nodes));
backing.writeMapToFile(replacementFile, values); writeMapToFile(replacementFile, values);
oldFile.delete(); oldFile.delete();
} catch (Exception e) { } catch (Exception e) {
@ -114,7 +152,7 @@ public class LegacyYAMLSchemaMigration implements Runnable {
try { try {
File replacementFile = new File(newUsersDir, oldFile.getName()); File replacementFile = new File(newUsersDir, oldFile.getName());
Map<String, Object> data = backing.readMapFromFile(oldFile); Map<String, Object> data = readMapFromFile(oldFile);
Map<String, Boolean> perms = new HashMap<>(); Map<String, Boolean> perms = new HashMap<>();
String uuid = (String) data.get("uuid"); String uuid = (String) data.get("uuid");
@ -139,8 +177,8 @@ public class LegacyYAMLSchemaMigration implements Runnable {
values.put("uuid", uuid); values.put("uuid", uuid);
values.put("name", name); values.put("name", name);
values.put("primary-group", primaryGroup); values.put("primary-group", primaryGroup);
values.put("permissions", YAMLBacking.serializePermissions(nodes)); values.put("permissions", serializePermissions(nodes));
backing.writeMapToFile(replacementFile, values); writeMapToFile(replacementFile, values);
oldFile.delete(); oldFile.delete();
} catch (Exception e) { } catch (Exception e) {
@ -167,4 +205,72 @@ public class LegacyYAMLSchemaMigration implements Runnable {
} }
} }
} }
private static List<Object> serializePermissions(Set<NodeModel> nodes) {
List<Object> data = new ArrayList<>();
for (NodeModel node : nodes) {
// just a raw, default node.
boolean single = node.isValue() &&
node.getServer().equalsIgnoreCase("global") &&
node.getWorld().equalsIgnoreCase("global") &&
node.getExpiry() == 0L &&
node.getContexts().isEmpty();
// just add a string to the list.
if (single) {
data.add(node.getPermission());
continue;
}
// otherwise, this node has some other special context which needs to be saved.
// we serialise this way so it gets represented nicely in YAML.
// create a map of node attributes
Map<String, Object> attributes = new LinkedHashMap<>();
attributes.put("value", node.isValue());
if (!node.getServer().equals("global")) {
attributes.put("server", node.getServer());
}
if (!node.getWorld().equals("global")) {
attributes.put("world", node.getWorld());
}
if (node.getExpiry() != 0L) {
attributes.put("expiry", node.getExpiry());
}
if (!node.getContexts().isEmpty()) {
Map<String, Object> context = new HashMap<>();
Map<String, Collection<String>> map = node.getContexts().toMultimap().asMap();
for (Map.Entry<String, Collection<String>> e : map.entrySet()) {
List<String> vals = new ArrayList<>(e.getValue());
int size = vals.size();
if (size == 1) {
context.put(e.getKey(), vals.get(0));
} else if (size > 1) {
context.put(e.getKey(), vals);
}
}
attributes.put("context", context);
}
// create a new map to represent this entry in the list
// the map will only contain one entry. (the permission --> attributes)
Map<String, Object> perm = new HashMap<>();
// add the node to the map
perm.put(node.getPermission(), attributes);
// add the map to the object list, and continue
data.add(perm);
}
return data;
}
} }

View File

@ -212,7 +212,7 @@ meta-formatting {
# Which storage method the plugin should use. # Which storage method the plugin should use.
# #
# See: https://github.com/lucko/LuckPerms/wiki/Choosing-a-Storage-type # See: https://github.com/lucko/LuckPerms/wiki/Choosing-a-Storage-type
# Currently supported: mysql, mariadb, postgresql, sqlite, h2, json, yaml, mongodb # Currently supported: mysql, mariadb, postgresql, sqlite, h2, json, yaml, hocon, mongodb
# #
# Fill out connection info below if you're using MySQL, MariaDB, PostgreSQL or MongoDB # Fill out connection info below if you're using MySQL, MariaDB, PostgreSQL or MongoDB
# If your MySQL server supports it, the "mariadb" option is preferred over "mysql". # If your MySQL server supports it, the "mariadb" option is preferred over "mysql".