Remove character constraints on group names, cleanup legacy node (de)serialization system

This commit is contained in:
Luck 2017-11-23 20:05:38 +00:00
parent 3afeb30795
commit a7cb7ec1af
No known key found for this signature in database
GPG Key ID: EFA9B3EC5FD90F8B
10 changed files with 134 additions and 143 deletions

View File

@ -138,7 +138,7 @@ public class AssignmentExpression {
private PermissionToken(String permission) { private PermissionToken(String permission) {
this.permission = permission; this.permission = permission;
this.node = LegacyNodeFactory.fromSerializedNode(permission, true); this.node = LegacyNodeFactory.fromLegacyString(permission, true);
} }
@Override @Override

View File

@ -51,8 +51,8 @@ public class AssignmentRule {
this.hasTrueExpression = AssignmentExpression.compile(hasTrueExpression); this.hasTrueExpression = AssignmentExpression.compile(hasTrueExpression);
this.hasFalseExpression = AssignmentExpression.compile(hasFalseExpression); this.hasFalseExpression = AssignmentExpression.compile(hasFalseExpression);
this.lacksExpression = AssignmentExpression.compile(lacksExpression); this.lacksExpression = AssignmentExpression.compile(lacksExpression);
this.toGive = toGive.stream().map(s -> LegacyNodeFactory.fromSerializedNode(s, true)).collect(ImmutableCollectors.toList());; this.toGive = toGive.stream().map(s -> LegacyNodeFactory.fromLegacyString(s, true)).collect(ImmutableCollectors.toList());;
this.toTake = toTake.stream().map(s -> LegacyNodeFactory.fromSerializedNode(s, true)).collect(ImmutableCollectors.toList()); this.toTake = toTake.stream().map(s -> LegacyNodeFactory.fromLegacyString(s, true)).collect(ImmutableCollectors.toList());
this.setPrimaryGroup = setPrimaryGroup; this.setPrimaryGroup = setPrimaryGroup;
} }

View File

@ -35,8 +35,6 @@ import java.util.regex.Pattern;
@UtilityClass @UtilityClass
public class DataConstraints { public class DataConstraints {
public static final Pattern RESERVED_CHARACTERS_PATTERN = Pattern.compile("[\\/\\$\\.]");
public static final int MAX_PERMISSION_LENGTH = 200; public static final int MAX_PERMISSION_LENGTH = 200;
public static final int MAX_TRACK_NAME_LENGTH = 36; public static final int MAX_TRACK_NAME_LENGTH = 36;
@ -85,10 +83,6 @@ public class DataConstraints {
return false; return false;
} }
if (RESERVED_CHARACTERS_PATTERN.matcher(s).find()) {
return false;
}
return true; return true;
}; };
@ -97,10 +91,6 @@ public class DataConstraints {
return false; return false;
} }
if (RESERVED_CHARACTERS_PATTERN.matcher(s).find()) {
return false;
}
return true; return true;
}; };
@ -113,10 +103,6 @@ public class DataConstraints {
return false; return false;
} }
if (RESERVED_CHARACTERS_PATTERN.matcher(s).find()) {
return false;
}
return true; return true;
}; };
@ -125,10 +111,6 @@ public class DataConstraints {
return false; return false;
} }
if (RESERVED_CHARACTERS_PATTERN.matcher(s).find()) {
return false;
}
return true; return true;
}; };
@ -143,10 +125,6 @@ public class DataConstraints {
return false; return false;
} }
if (RESERVED_CHARACTERS_PATTERN.matcher(s).find()) {
return false;
}
return true; return true;
}; };
@ -159,10 +137,6 @@ public class DataConstraints {
return false; return false;
} }
if (RESERVED_CHARACTERS_PATTERN.matcher(s).find()) {
return false;
}
return true; return true;
}; };

View File

@ -54,17 +54,6 @@ public final class ImmutableNode implements Node {
*/ */
private static final int NODE_SEPARATOR_CHAR = Character.getNumericValue('.'); private static final int NODE_SEPARATOR_CHAR = Character.getNumericValue('.');
/**
* The characters which are delimited when serializing a permission string
*/
private static final String[] PERMISSION_DELIMITERS = new String[]{"/", "-", "$", "(", ")", "=", ","};
/**
* The characters which are delimited when serializing a server or world string
*/
private static final String[] SERVER_WORLD_DELIMITERS = new String[]{"/", "-"};
/* /*
* NODE STATE * NODE STATE
* *
@ -146,12 +135,12 @@ public final class ImmutableNode implements Node {
world = standardizeServerWorld(world); world = standardizeServerWorld(world);
// define core attributes // define core attributes
this.permission = NodeFactory.unescapeDelimiters(permission, PERMISSION_DELIMITERS).intern(); this.permission = LegacyNodeFactory.unescapeDelimiters(permission, LegacyNodeFactory.PERMISSION_DELIMITERS).intern();
this.value = value; this.value = value;
this.override = override; this.override = override;
this.expireAt = expireAt; this.expireAt = expireAt;
this.server = internString(NodeFactory.unescapeDelimiters(server, SERVER_WORLD_DELIMITERS)); this.server = internString(LegacyNodeFactory.unescapeDelimiters(server, LegacyNodeFactory.SERVER_WORLD_DELIMITERS));
this.world = internString(NodeFactory.unescapeDelimiters(world, SERVER_WORLD_DELIMITERS)); this.world = internString(LegacyNodeFactory.unescapeDelimiters(world, LegacyNodeFactory.SERVER_WORLD_DELIMITERS));
this.contexts = contexts == null ? ContextSet.empty() : contexts.makeImmutable(); this.contexts = contexts == null ? ContextSet.empty() : contexts.makeImmutable();
// define cached state // define cached state

View File

@ -40,6 +40,26 @@ import java.util.regex.Pattern;
@UtilityClass @UtilityClass
public class LegacyNodeFactory { public class LegacyNodeFactory {
/**
* The characters which are delimited when serializing a permission string
*/
static final String[] PERMISSION_DELIMITERS = new String[]{"/", "-", "$", "(", ")", "=", ","};
/**
* The characters which are delimited when serializing a server or world string
*/
static final String[] SERVER_WORLD_DELIMITERS = new String[]{"/", "-"};
/**
* The characters which are delimited when serializing a context set
*/
static final String[] CONTEXT_DELIMITERS = new String[]{"=", "(", ")", ","};
/**
* The characters which are delimited when serializing meta/prefix/suffix strings
*/
private static final String[] GENERIC_DELIMITERS = new String[]{".", "/", "-", "$"};
// legacy node format delimiters // legacy node format delimiters
private static final Pattern LEGACY_SERVER_DELIM = PatternCache.compileDelimitedMatcher("/", "\\"); private static final Pattern LEGACY_SERVER_DELIM = PatternCache.compileDelimitedMatcher("/", "\\");
private static final Splitter LEGACY_SERVER_SPLITTER = Splitter.on(LEGACY_SERVER_DELIM).limit(2); private static final Splitter LEGACY_SERVER_SPLITTER = Splitter.on(LEGACY_SERVER_DELIM).limit(2);
@ -47,8 +67,44 @@ public class LegacyNodeFactory {
private static final Splitter LEGACY_WORLD_SPLITTER = Splitter.on(LEGACY_WORLD_DELIM).limit(2); private static final Splitter LEGACY_WORLD_SPLITTER = Splitter.on(LEGACY_WORLD_DELIM).limit(2);
private static final Pattern LEGACY_EXPIRY_DELIM = PatternCache.compileDelimitedMatcher("$", "\\"); private static final Pattern LEGACY_EXPIRY_DELIM = PatternCache.compileDelimitedMatcher("$", "\\");
private static final Splitter LEGACY_EXPIRY_SPLITTER = Splitter.on(LEGACY_EXPIRY_DELIM).limit(2); private static final Splitter LEGACY_EXPIRY_SPLITTER = Splitter.on(LEGACY_EXPIRY_DELIM).limit(2);
private static final Pattern LEGACY_CONTEXT_DELIM = PatternCache.compileDelimitedMatcher(")", "\\");
private static final Splitter CONTEXT_SPLITTER = Splitter.on(LEGACY_CONTEXT_DELIM).limit(2);
private static final Pattern LEGACY_CONTEXT_PAIR_DELIM = PatternCache.compileDelimitedMatcher(",", "\\");
private static final Pattern LEGACY_CONTEXT_PAIR_PART_DELIM = PatternCache.compileDelimitedMatcher("=", "\\");
private static final Splitter.MapSplitter LEGACY_CONTEXT_PART_SPLITTER = Splitter.on(LEGACY_CONTEXT_PAIR_DELIM)
.withKeyValueSeparator(Splitter.on(LEGACY_CONTEXT_PAIR_PART_DELIM));
public static Node fromSerializedNode(String s, boolean b) { public static String toSerializedNode(Node node) {
StringBuilder builder = new StringBuilder();
if (node.getServer().orElse(null) != null) {
builder.append(escapeDelimiters(node.getServer().orElse(null), SERVER_WORLD_DELIMITERS));
if (node.getWorld().orElse(null) != null) {
builder.append("-").append(escapeDelimiters(node.getWorld().orElse(null), SERVER_WORLD_DELIMITERS));
}
builder.append("/");
} else {
if (node.getWorld().orElse(null) != null) {
builder.append("global-").append(escapeDelimiters(node.getWorld().orElse(null), SERVER_WORLD_DELIMITERS)).append("/");
}
}
if (!node.getContexts().isEmpty()) {
builder.append("(");
for (Map.Entry<String, String> entry : node.getContexts().toSet()) {
builder.append(escapeDelimiters(entry.getKey(), CONTEXT_DELIMITERS))
.append("=").append(escapeDelimiters(entry.getValue(), CONTEXT_DELIMITERS)).append(",");
}
builder.deleteCharAt(builder.length() - 1);
builder.append(")");
}
builder.append(escapeDelimiters(node.getPermission(), PERMISSION_DELIMITERS));
if (node.isTemporary()) builder.append("$").append(node.getExpiryUnixTime());
return builder.toString();
}
public static Node fromLegacyString(String s, boolean b) {
if (b) { if (b) {
return builderFromLegacyString(s, true).build(); return builderFromLegacyString(s, true).build();
} else { } else {
@ -115,6 +171,51 @@ public class LegacyNodeFactory {
} }
} }
static String escapeCharacters(String s) {
if (s == null) {
throw new NullPointerException();
}
return escapeDelimiters(s, GENERIC_DELIMITERS);
}
static String unescapeCharacters(String s) {
if (s == null) {
throw new NullPointerException();
}
// super old hack - this format is no longer used for escaping,
// but we'll keep supporting it when unescaping
s = s.replace("{SEP}", ".");
s = s.replace("{FSEP}", "/");
s = s.replace("{DSEP}", "$");
s = unescapeDelimiters(s, GENERIC_DELIMITERS);
return s;
}
private static String escapeDelimiters(String s, String... delimiters) {
if (s == null) {
return null;
}
for (String d : delimiters) {
s = s.replace(d, "\\" + d);
}
return s;
}
static String unescapeDelimiters(String s, String... delimiters) {
if (s == null) {
return null;
}
for (String d : delimiters) {
s = s.replace("\\" + d, d);
}
return s;
}
private static final class LegacyNodeBuilder extends NodeBuilder { private static final class LegacyNodeBuilder extends NodeBuilder {
private static final Pattern NODE_CONTEXTS_PATTERN = Pattern.compile("\\(.+\\).*"); private static final Pattern NODE_CONTEXTS_PATTERN = Pattern.compile("\\(.+\\).*");
@ -122,14 +223,17 @@ public class LegacyNodeFactory {
if (!NODE_CONTEXTS_PATTERN.matcher(permission).matches()) { if (!NODE_CONTEXTS_PATTERN.matcher(permission).matches()) {
this.permission = permission; this.permission = permission;
} else { } else {
List<String> contextParts = Splitter.on(PatternCache.compileDelimitedMatcher(")", "\\")).limit(2).splitToList(permission.substring(1)); List<String> contextParts = CONTEXT_SPLITTER.splitToList(permission.substring(1));
// 0 = context, 1 = node // 0 = context, 1 = node
this.permission = contextParts.get(1); this.permission = contextParts.get(1);
try { try {
Map<String, String> map = Splitter.on(PatternCache.compileDelimitedMatcher(",", "\\")).withKeyValueSeparator(Splitter.on(PatternCache.compileDelimitedMatcher("=", "\\"))).split(contextParts.get(0)); Map<String, String> map = LEGACY_CONTEXT_PART_SPLITTER.split(contextParts.get(0));
for (Map.Entry<String, String> e : map.entrySet()) { for (Map.Entry<String, String> e : map.entrySet()) {
this.withExtraContext(NodeFactory.unescapeDelimiters(e.getKey(), "=", "(", ")", ","), NodeFactory.unescapeDelimiters(e.getValue(), "=", "(", ")", ",")); this.withExtraContext(
unescapeDelimiters(e.getKey(), CONTEXT_DELIMITERS),
unescapeDelimiters(e.getValue(), CONTEXT_DELIMITERS)
);
} }
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {

View File

@ -40,14 +40,13 @@ import java.util.Iterator;
import java.util.Map; import java.util.Map;
/** /**
* Utility class to make Node(Builder) instances from serialised strings or existing Nodes * Utility class to make Node(Builder) instances from strings or existing Nodes
*/ */
@UtilityClass @UtilityClass
public class NodeFactory { public class NodeFactory {
// used to split prefix/suffix/meta nodes // used to split prefix/suffix/meta nodes
private static final Splitter META_SPLITTER = Splitter.on(PatternCache.compileDelimitedMatcher(".", "\\")).limit(2); private static final Splitter META_SPLITTER = Splitter.on(PatternCache.compileDelimitedMatcher(".", "\\")).limit(2);
private static final String[] DELIMS = new String[]{".", "/", "-", "$"};
public static Node.Builder newBuilder(String s) { public static Node.Builder newBuilder(String s) {
return new NodeBuilder(s); return new NodeBuilder(s);
@ -65,7 +64,7 @@ public class NodeFactory {
return makeSuffixNode(100, value); return makeSuffixNode(100, value);
} }
return new NodeBuilder("meta." + escapeCharacters(key) + "." + escapeCharacters(value)); return new NodeBuilder("meta." + LegacyNodeFactory.escapeCharacters(key) + "." + LegacyNodeFactory.escapeCharacters(value));
} }
public static Node.Builder makeChatMetaNode(ChatMetaType type, int priority, String s) { public static Node.Builder makeChatMetaNode(ChatMetaType type, int priority, String s) {
@ -73,11 +72,11 @@ public class NodeFactory {
} }
public static Node.Builder makePrefixNode(int priority, String prefix) { public static Node.Builder makePrefixNode(int priority, String prefix) {
return new NodeBuilder("prefix." + priority + "." + escapeCharacters(prefix)); return new NodeBuilder("prefix." + priority + "." + LegacyNodeFactory.escapeCharacters(prefix));
} }
public static Node.Builder makeSuffixNode(int priority, String suffix) { public static Node.Builder makeSuffixNode(int priority, String suffix) {
return new NodeBuilder("suffix." + priority + "." + escapeCharacters(suffix)); return new NodeBuilder("suffix." + priority + "." + LegacyNodeFactory.escapeCharacters(suffix));
} }
public static String nodeAsCommand(Node node, String id, boolean group, boolean set) { public static String nodeAsCommand(Node node, String id, boolean group, boolean set) {
@ -173,49 +172,6 @@ public class NodeFactory {
return sb; return sb;
} }
public static String escapeCharacters(String s) {
if (s == null) {
throw new NullPointerException();
}
return escapeDelimiters(s, DELIMS);
}
public static String unescapeCharacters(String s) {
if (s == null) {
throw new NullPointerException();
}
s = s.replace("{SEP}", ".");
s = s.replace("{FSEP}", "/");
s = s.replace("{DSEP}", "$");
s = unescapeDelimiters(s, DELIMS);
return s;
}
public static String escapeDelimiters(String s, String... delims) {
if (s == null) {
return null;
}
for (String delim : delims) {
s = s.replace(delim, "\\" + delim);
}
return s;
}
public static String unescapeDelimiters(String s, String... delims) {
if (s == null) {
return null;
}
for (String delim : delims) {
s = s.replace("\\" + delim, delim);
}
return s;
}
public static String parseGroupNode(String s) { public static String parseGroupNode(String s) {
String lower = s.toLowerCase(); String lower = s.toLowerCase();
if (!lower.startsWith("group.")) { if (!lower.startsWith("group.")) {
@ -237,7 +193,7 @@ public class NodeFactory {
if (!metaParts.hasNext()) return null; if (!metaParts.hasNext()) return null;
String value = metaParts.next(); String value = metaParts.next();
return Maps.immutableEntry(unescapeCharacters(key).intern(), unescapeCharacters(value).intern()); return Maps.immutableEntry(LegacyNodeFactory.unescapeCharacters(key).intern(), LegacyNodeFactory.unescapeCharacters(value).intern());
} }
private static Map.Entry<Integer, String> parseChatMetaNode(String type, String s) { private static Map.Entry<Integer, String> parseChatMetaNode(String type, String s) {
@ -255,7 +211,7 @@ public class NodeFactory {
try { try {
int p = Integer.parseInt(priority); int p = Integer.parseInt(priority);
String v = unescapeCharacters(value).intern(); String v = LegacyNodeFactory.unescapeCharacters(value).intern();
return Maps.immutableEntry(p, v); return Maps.immutableEntry(p, v);
} catch (NumberFormatException e) { } catch (NumberFormatException e) {
return null; return null;

View File

@ -112,7 +112,7 @@ public class LegacyJsonMigration implements Runnable {
Set<NodeModel> nodes = perms.entrySet().stream() Set<NodeModel> nodes = perms.entrySet().stream()
.map(e -> LegacyNodeFactory.fromSerializedNode(e.getKey(), e.getValue())) .map(e -> LegacyNodeFactory.fromLegacyString(e.getKey(), e.getValue()))
.map(NodeModel::fromNode) .map(NodeModel::fromNode)
.collect(Collectors.toCollection(LinkedHashSet::new)); .collect(Collectors.toCollection(LinkedHashSet::new));
@ -162,7 +162,7 @@ public class LegacyJsonMigration implements Runnable {
} }
Set<NodeModel> nodes = perms.entrySet().stream() Set<NodeModel> nodes = perms.entrySet().stream()
.map(e -> LegacyNodeFactory.fromSerializedNode(e.getKey(), e.getValue())) .map(e -> LegacyNodeFactory.fromLegacyString(e.getKey(), e.getValue()))
.map(NodeModel::fromNode) .map(NodeModel::fromNode)
.collect(Collectors.toCollection(LinkedHashSet::new)); .collect(Collectors.toCollection(LinkedHashSet::new));

View File

@ -154,7 +154,7 @@ public class LegacySqlMigration implements Runnable {
} }
Set<NodeModel> nodes = convertedPerms.entrySet().stream() Set<NodeModel> nodes = convertedPerms.entrySet().stream()
.map(e -> LegacyNodeFactory.fromSerializedNode(e.getKey(), e.getValue())) .map(e -> LegacyNodeFactory.fromLegacyString(e.getKey(), e.getValue()))
.map(NodeModel::fromNode) .map(NodeModel::fromNode)
.collect(Collectors.toSet()); .collect(Collectors.toSet());
@ -233,7 +233,7 @@ public class LegacySqlMigration implements Runnable {
} }
Set<NodeModel> nodes = convertedPerms.entrySet().stream() Set<NodeModel> nodes = convertedPerms.entrySet().stream()
.map(ent -> LegacyNodeFactory.fromSerializedNode(ent.getKey(), ent.getValue())) .map(ent -> LegacyNodeFactory.fromLegacyString(ent.getKey(), ent.getValue()))
.map(NodeModel::fromNode) .map(NodeModel::fromNode)
.collect(Collectors.toSet()); .collect(Collectors.toSet());

View File

@ -114,7 +114,7 @@ public class LegacyYamlMigration implements Runnable {
perms.putAll((Map<String, Boolean>) data.get("perms")); perms.putAll((Map<String, Boolean>) data.get("perms"));
Set<NodeModel> nodes = perms.entrySet().stream() Set<NodeModel> nodes = perms.entrySet().stream()
.map(e -> LegacyNodeFactory.fromSerializedNode(e.getKey(), e.getValue())) .map(e -> LegacyNodeFactory.fromLegacyString(e.getKey(), e.getValue()))
.map(NodeModel::fromNode) .map(NodeModel::fromNode)
.collect(Collectors.toCollection(LinkedHashSet::new)); .collect(Collectors.toCollection(LinkedHashSet::new));
@ -161,7 +161,7 @@ public class LegacyYamlMigration implements Runnable {
perms.putAll((Map<String, Boolean>) data.get("perms")); perms.putAll((Map<String, Boolean>) data.get("perms"));
Set<NodeModel> nodes = perms.entrySet().stream() Set<NodeModel> nodes = perms.entrySet().stream()
.map(e -> LegacyNodeFactory.fromSerializedNode(e.getKey(), e.getValue())) .map(e -> LegacyNodeFactory.fromLegacyString(e.getKey(), e.getValue()))
.map(NodeModel::fromNode) .map(NodeModel::fromNode)
.collect(Collectors.toCollection(LinkedHashSet::new)); .collect(Collectors.toCollection(LinkedHashSet::new));

View File

@ -49,7 +49,6 @@ import me.lucko.luckperms.common.model.Group;
import me.lucko.luckperms.common.model.Track; import me.lucko.luckperms.common.model.Track;
import me.lucko.luckperms.common.model.User; import me.lucko.luckperms.common.model.User;
import me.lucko.luckperms.common.node.LegacyNodeFactory; import me.lucko.luckperms.common.node.LegacyNodeFactory;
import me.lucko.luckperms.common.node.NodeFactory;
import me.lucko.luckperms.common.node.NodeHeldPermission; import me.lucko.luckperms.common.node.NodeHeldPermission;
import me.lucko.luckperms.common.node.NodeModel; import me.lucko.luckperms.common.node.NodeModel;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin; import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
@ -232,7 +231,7 @@ public class MongoDao extends AbstractDao {
Set<NodeModel> nodes = new HashSet<>(); Set<NodeModel> nodes = new HashSet<>();
for (Map.Entry<String, Boolean> e : perms.entrySet()) { for (Map.Entry<String, Boolean> e : perms.entrySet()) {
Node node = LegacyNodeFactory.fromSerializedNode(e.getKey(), e.getValue()); Node node = LegacyNodeFactory.fromLegacyString(e.getKey(), e.getValue());
nodes.add(NodeModel.fromNode(node)); nodes.add(NodeModel.fromNode(node));
} }
@ -266,7 +265,7 @@ public class MongoDao extends AbstractDao {
Set<NodeModel> nodes = new HashSet<>(); Set<NodeModel> nodes = new HashSet<>();
for (Map.Entry<String, Boolean> e : perms.entrySet()) { for (Map.Entry<String, Boolean> e : perms.entrySet()) {
Node node = LegacyNodeFactory.fromSerializedNode(e.getKey(), e.getValue()); Node node = LegacyNodeFactory.fromLegacyString(e.getKey(), e.getValue());
nodes.add(NodeModel.fromNode(node)); nodes.add(NodeModel.fromNode(node));
} }
@ -306,7 +305,7 @@ public class MongoDao extends AbstractDao {
// User exists, let's load. // User exists, let's load.
Document d = cursor.next(); Document d = cursor.next();
user.setEnduringNodes(revert((Map<String, Boolean>) d.get("perms")).entrySet().stream() user.setEnduringNodes(revert((Map<String, Boolean>) d.get("perms")).entrySet().stream()
.map(e -> LegacyNodeFactory.fromSerializedNode(e.getKey(), e.getValue())) .map(e -> LegacyNodeFactory.fromLegacyString(e.getKey(), e.getValue()))
.collect(Collectors.toSet()) .collect(Collectors.toSet())
); );
user.getPrimaryGroup().setStoredValue(d.getString("primaryGroup")); user.getPrimaryGroup().setStoredValue(d.getString("primaryGroup"));
@ -394,7 +393,7 @@ public class MongoDao extends AbstractDao {
Map<String, Boolean> perms = revert((Map<String, Boolean>) d.get("perms")); Map<String, Boolean> perms = revert((Map<String, Boolean>) d.get("perms"));
for (Map.Entry<String, Boolean> e : perms.entrySet()) { for (Map.Entry<String, Boolean> e : perms.entrySet()) {
Node node = LegacyNodeFactory.fromSerializedNode(e.getKey(), e.getValue()); Node node = LegacyNodeFactory.fromLegacyString(e.getKey(), e.getValue());
if (!node.getPermission().equalsIgnoreCase(permission)) { if (!node.getPermission().equalsIgnoreCase(permission)) {
continue; continue;
} }
@ -422,7 +421,7 @@ public class MongoDao extends AbstractDao {
// Group exists, let's load. // Group exists, let's load.
Document d = cursor.next(); Document d = cursor.next();
group.setEnduringNodes(revert((Map<String, Boolean>) d.get("perms")).entrySet().stream() group.setEnduringNodes(revert((Map<String, Boolean>) d.get("perms")).entrySet().stream()
.map(e -> LegacyNodeFactory.fromSerializedNode(e.getKey(), e.getValue())) .map(e -> LegacyNodeFactory.fromLegacyString(e.getKey(), e.getValue()))
.collect(Collectors.toSet()) .collect(Collectors.toSet())
); );
} else { } else {
@ -452,7 +451,7 @@ public class MongoDao extends AbstractDao {
Document d = cursor.next(); Document d = cursor.next();
group.setEnduringNodes(revert((Map<String, Boolean>) d.get("perms")).entrySet().stream() group.setEnduringNodes(revert((Map<String, Boolean>) d.get("perms")).entrySet().stream()
.map(e -> LegacyNodeFactory.fromSerializedNode(e.getKey(), e.getValue())) .map(e -> LegacyNodeFactory.fromLegacyString(e.getKey(), e.getValue()))
.collect(Collectors.toSet()) .collect(Collectors.toSet())
); );
} }
@ -535,7 +534,7 @@ public class MongoDao extends AbstractDao {
Map<String, Boolean> perms = revert((Map<String, Boolean>) d.get("perms")); Map<String, Boolean> perms = revert((Map<String, Boolean>) d.get("perms"));
for (Map.Entry<String, Boolean> e : perms.entrySet()) { for (Map.Entry<String, Boolean> e : perms.entrySet()) {
Node node = LegacyNodeFactory.fromSerializedNode(e.getKey(), e.getValue()); Node node = LegacyNodeFactory.fromLegacyString(e.getKey(), e.getValue());
if (!node.getPermission().equalsIgnoreCase(permission)) { if (!node.getPermission().equalsIgnoreCase(permission)) {
continue; continue;
} }
@ -755,39 +754,8 @@ public class MongoDao extends AbstractDao {
Map<String, Boolean> m = new HashMap<>(); Map<String, Boolean> m = new HashMap<>();
for (Node node : nodes) { for (Node node : nodes) {
//noinspection deprecation //noinspection deprecation
m.put(toSerializedNode(node), node.getValuePrimitive()); m.put(LegacyNodeFactory.toSerializedNode(node), node.getValuePrimitive());
} }
return m; return m;
} }
private static final String[] SERVER_WORLD_DELIMITERS = new String[]{"/", "-"};
private static String toSerializedNode(Node node) {
StringBuilder builder = new StringBuilder();
if (node.getServer().orElse(null) != null) {
builder.append(NodeFactory.escapeDelimiters(node.getServer().orElse(null), SERVER_WORLD_DELIMITERS));
if (node.getWorld().orElse(null) != null) {
builder.append("-").append(NodeFactory.escapeDelimiters(node.getWorld().orElse(null), SERVER_WORLD_DELIMITERS));
}
builder.append("/");
} else {
if (node.getWorld().orElse(null) != null) {
builder.append("global-").append(NodeFactory.escapeDelimiters(node.getWorld().orElse(null), SERVER_WORLD_DELIMITERS)).append("/");
}
}
if (!node.getContexts().isEmpty()) {
builder.append("(");
for (Map.Entry<String, String> entry : node.getContexts().toSet()) {
builder.append(NodeFactory.escapeDelimiters(entry.getKey(), "=", "(", ")", ",")).append("=").append(NodeFactory.escapeDelimiters(entry.getValue(), "=", "(", ")", ",")).append(",");
}
builder.deleteCharAt(builder.length() - 1);
builder.append(")");
}
builder.append(NodeFactory.escapeDelimiters(node.getPermission(), "/", "-", "$", "(", ")", "=", ","));
if (node.isTemporary()) builder.append("$").append(node.getExpiryUnixTime());
return builder.toString();
}
} }