Refactor NodeModel into a more useful class, remove duplicate context serialization methods

This commit is contained in:
Luck 2017-04-04 19:19:31 +01:00
parent 7259e6be0a
commit ea00ec64af
No known key found for this signature in database
GPG Key ID: EFA9B3EC5FD90F8B
16 changed files with 297 additions and 339 deletions

View File

@ -32,7 +32,7 @@ public class NodeFactoryDelegate implements me.lucko.luckperms.api.NodeFactory {
@Override @Override
public Node fromSerialisedNode(@NonNull String serialisedPermission, boolean value) { public Node fromSerialisedNode(@NonNull String serialisedPermission, boolean value) {
return NodeFactory.fromSerialisedNode(serialisedPermission, value); return NodeFactory.fromSerializedNode(serialisedPermission, value);
} }
@Override @Override
@ -47,7 +47,7 @@ public class NodeFactoryDelegate implements me.lucko.luckperms.api.NodeFactory {
@Override @Override
public Node.Builder newBuilderFromSerialisedNode(@NonNull String serialisedPermission, boolean value) { public Node.Builder newBuilderFromSerialisedNode(@NonNull String serialisedPermission, boolean value) {
return NodeFactory.builderFromSerialisedNode(serialisedPermission, value); return NodeFactory.builderFromSerializedNode(serialisedPermission, value);
} }
@Override @Override

View File

@ -3,14 +3,12 @@ package me.lucko.luckperms.common.config;
import lombok.Getter; import lombok.Getter;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import com.google.common.collect.ImmutableSetMultimap;
import com.google.gson.Gson; import com.google.gson.Gson;
import com.google.gson.GsonBuilder; import com.google.gson.GsonBuilder;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject; import com.google.gson.JsonObject;
import me.lucko.luckperms.api.context.ImmutableContextSet; import me.lucko.luckperms.api.context.ImmutableContextSet;
import me.lucko.luckperms.common.core.NodeModel;
import java.io.BufferedReader; import java.io.BufferedReader;
import java.io.BufferedWriter; import java.io.BufferedWriter;
@ -18,7 +16,6 @@ import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.nio.file.Files; import java.nio.file.Files;
import java.util.Map;
@RequiredArgsConstructor @RequiredArgsConstructor
public class StaticContextsFile { public class StaticContextsFile {
@ -49,21 +46,7 @@ public class StaticContextsFile {
} }
JsonObject contexts = data.get("context").getAsJsonObject(); JsonObject contexts = data.get("context").getAsJsonObject();
ImmutableSetMultimap.Builder<String, String> map = ImmutableSetMultimap.builder(); contextSet = NodeModel.deserializeContextSet(contexts).makeImmutable();
for (Map.Entry<String, JsonElement> e : contexts.entrySet()) {
JsonElement val = e.getValue();
if (val.isJsonArray()) {
JsonArray vals = val.getAsJsonArray();
for (JsonElement element : vals) {
map.put(e.getKey(), element.getAsString());
}
} else {
map.put(e.getKey(), val.getAsString());
}
}
contextSet = ImmutableContextSet.fromMultimap(map.build());
} catch (IOException e) { } catch (IOException e) {
e.printStackTrace(); e.printStackTrace();
} }

View File

@ -43,12 +43,12 @@ import java.util.Map;
@UtilityClass @UtilityClass
public class NodeFactory { public class NodeFactory {
private static final LoadingCache<String, Node> CACHE = Caffeine.newBuilder() private static final LoadingCache<String, Node> CACHE = Caffeine.newBuilder()
.build(s -> builderFromSerialisedNode(s, true).build()); .build(s -> builderFromSerializedNode(s, true).build());
private static final LoadingCache<String, Node> CACHE_NEGATED = Caffeine.newBuilder() private static final LoadingCache<String, Node> CACHE_NEGATED = Caffeine.newBuilder()
.build(s -> builderFromSerialisedNode(s, false).build()); .build(s -> builderFromSerializedNode(s, false).build());
public static Node fromSerialisedNode(String s, Boolean b) { public static Node fromSerializedNode(String s, Boolean b) {
try { try {
return b ? CACHE.get(s) : CACHE_NEGATED.get(s); return b ? CACHE.get(s) : CACHE_NEGATED.get(s);
} catch (Exception e) { } catch (Exception e) {
@ -60,7 +60,7 @@ public class NodeFactory {
return new NodeBuilder(s, false); return new NodeBuilder(s, false);
} }
public static Node.Builder builderFromSerialisedNode(String s, Boolean b) { public static Node.Builder builderFromSerializedNode(String s, Boolean b) {
// if contains / // if contains /
if (Patterns.compileDelimitedMatcher("/", "\\").matcher(s).find()) { if (Patterns.compileDelimitedMatcher("/", "\\").matcher(s).find()) {
List<String> parts = Splitter.on(Patterns.compileDelimitedMatcher("/", "\\")).limit(2).splitToList(s); List<String> parts = Splitter.on(Patterns.compileDelimitedMatcher("/", "\\")).limit(2).splitToList(s);

View File

@ -0,0 +1,168 @@
/*
* Copyright (c) 2017 Lucko (Luck) <luck@lucko.me>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package me.lucko.luckperms.common.core;
import lombok.AllArgsConstructor;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.ToString;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableSetMultimap;
import com.google.gson.Gson;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonPrimitive;
import me.lucko.luckperms.api.Node;
import me.lucko.luckperms.api.context.ContextSet;
import me.lucko.luckperms.api.context.ImmutableContextSet;
import me.lucko.luckperms.api.context.MutableContextSet;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
/**
* An stripped down version of {@link Node}, without methods and cached values for handling permission lookups.
*
* All values are non-null.
*/
@Getter
@ToString
@EqualsAndHashCode
@AllArgsConstructor(staticName = "of")
public final class NodeModel {
private static final Gson GSON = new Gson();
public static NodeModel fromNode(Node node) {
return NodeModel.of(
node.getPermission(),
node.getValue(),
node.getServer().orElse("global"),
node.getWorld().orElse("global"),
node.isTemporary() ? node.getExpiryUnixTime() : 0L,
node.getContexts().makeImmutable()
);
}
public static NodeModel deserialize(String permission, boolean value, String server, String world, long expiry, String contexts) {
JsonObject context = GSON.fromJson(contexts, JsonObject.class);
return of(permission, value, server, world, expiry, deserializeContextSet(context).makeImmutable());
}
private final String permission;
private final boolean value;
private final String server;
private final String world;
private final long expiry;
private final ImmutableContextSet contexts;
public String serializeContext() {
return GSON.toJson(getContextsAsJson());
}
public JsonObject getContextsAsJson() {
return serializeContextSet(contexts);
}
public Node toNode() {
Node.Builder builder = NodeFactory.newBuilder(permission);
builder.setValue(value);
builder.setServer(server);
builder.setWorld(world);
builder.setExpiry(expiry);
builder.withExtraContext(contexts);
return builder.build();
}
public NodeModel setPermission(String permission) {
return of(permission, value, server, world, expiry, contexts);
}
public NodeModel setValue(boolean value) {
return of(permission, value, server, world, expiry, contexts);
}
public NodeModel setServer(String server) {
return of(permission, value, server, world, expiry, contexts);
}
public NodeModel setWorld(String world) {
return of(permission, value, server, world, expiry, contexts);
}
public NodeModel setExpiry(long expiry) {
return of(permission, value, server, world, expiry, contexts);
}
public NodeModel setContexts(ImmutableContextSet contexts) {
return of(permission, value, server, world, expiry, contexts);
}
public static JsonObject serializeContextSet(ContextSet contextSet) {
JsonObject data = new JsonObject();
Map<String, Collection<String>> map = contextSet.toMultimap().asMap();
map.forEach((k, v) -> {
List<String> values = new ArrayList<>(v);
int size = values.size();
if (size == 1) {
data.addProperty(k, values.get(0));
} else if (size > 1) {
JsonArray arr = new JsonArray();
for (String s : values) {
arr.add(new JsonPrimitive(s));
}
data.add(k, arr);
}
});
return data;
}
public static MutableContextSet deserializeContextSet(JsonElement element) {
Preconditions.checkArgument(element.isJsonObject());
JsonObject data = element.getAsJsonObject();
ImmutableSetMultimap.Builder<String, String> map = ImmutableSetMultimap.builder();
for (Map.Entry<String, JsonElement> e : data.entrySet()) {
String k = e.getKey();
JsonElement v = e.getValue();
if (v.isJsonArray()) {
JsonArray values = v.getAsJsonArray();
for (JsonElement value : values) {
map.put(k, value.getAsString());
}
} else {
map.put(k, v.getAsString());
}
}
return MutableContextSet.fromMultimap(map.build());
}
}

View File

@ -37,7 +37,7 @@ public class LogicParser {
try { try {
ScriptEngine engine = Scripting.getScriptEngine(); ScriptEngine engine = Scripting.getScriptEngine();
String expression = generateExpression(s, s1 -> holder.hasPermission(NodeFactory.fromSerialisedNode(s1, true)) == tristate); String expression = generateExpression(s, s1 -> holder.hasPermission(NodeFactory.fromSerializedNode(s1, true)) == tristate);
if (engine == null) { if (engine == null) {
throw new NullPointerException("script engine"); throw new NullPointerException("script engine");
} }

View File

@ -89,11 +89,11 @@ public class Rule {
// The holder meets all of the requirements of this rule. // The holder meets all of the requirements of this rule.
for (String s : toTake) { for (String s : toTake) {
user.unsetPermission(NodeFactory.fromSerialisedNode(s, true)); user.unsetPermission(NodeFactory.fromSerializedNode(s, true));
} }
for (String s : toGive) { for (String s : toGive) {
user.setPermission(NodeFactory.fromSerialisedNode(s, true)); user.setPermission(NodeFactory.fromSerializedNode(s, true));
} }
if (setPrimaryGroup != null) { if (setPrimaryGroup != null) {

View File

@ -23,7 +23,6 @@
package me.lucko.luckperms.common.storage.backing; package me.lucko.luckperms.common.storage.backing;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSetMultimap;
import com.google.common.collect.Iterables; import com.google.common.collect.Iterables;
import com.google.gson.Gson; import com.google.gson.Gson;
import com.google.gson.GsonBuilder; import com.google.gson.GsonBuilder;
@ -34,6 +33,8 @@ import com.google.gson.JsonPrimitive;
import me.lucko.luckperms.api.HeldPermission; import me.lucko.luckperms.api.HeldPermission;
import me.lucko.luckperms.api.Node; import me.lucko.luckperms.api.Node;
import me.lucko.luckperms.api.context.ImmutableContextSet;
import me.lucko.luckperms.common.core.NodeModel;
import me.lucko.luckperms.common.core.PriorityComparator; import me.lucko.luckperms.common.core.PriorityComparator;
import me.lucko.luckperms.common.core.UserIdentifier; import me.lucko.luckperms.common.core.UserIdentifier;
import me.lucko.luckperms.common.core.model.Group; import me.lucko.luckperms.common.core.model.Group;
@ -41,7 +42,6 @@ import me.lucko.luckperms.common.core.model.Track;
import me.lucko.luckperms.common.core.model.User; import me.lucko.luckperms.common.core.model.User;
import me.lucko.luckperms.common.managers.impl.GenericUserManager; import me.lucko.luckperms.common.managers.impl.GenericUserManager;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin; import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.storage.backing.utils.NodeDataHolder;
import me.lucko.luckperms.common.storage.holder.NodeHeldPermission; import me.lucko.luckperms.common.storage.holder.NodeHeldPermission;
import java.io.BufferedReader; import java.io.BufferedReader;
@ -51,7 +51,6 @@ import java.io.IOException;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.nio.file.Files; import java.nio.file.Files;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet; import java.util.HashSet;
import java.util.LinkedHashSet; import java.util.LinkedHashSet;
import java.util.List; import java.util.List;
@ -108,8 +107,8 @@ public class JSONBacking extends FlatfileBacking {
String name = object.get("name").getAsString(); String name = object.get("name").getAsString();
user.getPrimaryGroup().setStoredValue(object.get("primaryGroup").getAsString()); user.getPrimaryGroup().setStoredValue(object.get("primaryGroup").getAsString());
Set<NodeDataHolder> data = deserializePermissions(object.get("permissions").getAsJsonArray()); Set<NodeModel> data = deserializePermissions(object.get("permissions").getAsJsonArray());
Set<Node> nodes = data.stream().map(NodeDataHolder::toNode).collect(Collectors.toSet()); Set<Node> nodes = data.stream().map(NodeModel::toNode).collect(Collectors.toSet());
user.setNodes(nodes); user.setNodes(nodes);
boolean save = plugin.getUserManager().giveDefaultIfNeeded(user, false); boolean save = plugin.getUserManager().giveDefaultIfNeeded(user, false);
@ -172,7 +171,7 @@ public class JSONBacking extends FlatfileBacking {
data.addProperty("name", user.getName()); data.addProperty("name", user.getName());
data.addProperty("primaryGroup", user.getPrimaryGroup().getStoredValue()); data.addProperty("primaryGroup", user.getPrimaryGroup().getStoredValue());
Set<NodeDataHolder> nodes = user.getNodes().values().stream().map(NodeDataHolder::fromNode).collect(Collectors.toCollection(LinkedHashSet::new)); Set<NodeModel> nodes = user.getNodes().values().stream().map(NodeModel::fromNode).collect(Collectors.toCollection(LinkedHashSet::new));
data.add("permissions", serializePermissions(nodes)); data.add("permissions", serializePermissions(nodes));
return writeElementToFile(userFile, data); return writeElementToFile(userFile, data);
@ -191,7 +190,7 @@ public class JSONBacking extends FlatfileBacking {
for (File file : files) { for (File file : files) {
registerFileAction("users", file); registerFileAction("users", file);
Set<NodeDataHolder> nodes = new HashSet<>(); Set<NodeModel> nodes = new HashSet<>();
readObjectFromFile(file, object -> { readObjectFromFile(file, object -> {
nodes.addAll(deserializePermissions(object.get("permissions").getAsJsonArray())); nodes.addAll(deserializePermissions(object.get("permissions").getAsJsonArray()));
return true; return true;
@ -199,7 +198,7 @@ public class JSONBacking extends FlatfileBacking {
boolean shouldDelete = false; boolean shouldDelete = false;
if (nodes.size() == 1) { if (nodes.size() == 1) {
for (NodeDataHolder e : nodes) { for (NodeModel e : nodes) {
// There's only one // There's only one
shouldDelete = e.getPermission().equalsIgnoreCase("group.default") && e.isValue(); shouldDelete = e.getPermission().equalsIgnoreCase("group.default") && e.isValue();
} }
@ -224,14 +223,14 @@ public class JSONBacking extends FlatfileBacking {
registerFileAction("users", file); registerFileAction("users", file);
UUID holder = UUID.fromString(file.getName().substring(0, file.getName().length() - 5)); UUID holder = UUID.fromString(file.getName().substring(0, file.getName().length() - 5));
Set<NodeDataHolder> nodes = new HashSet<>(); Set<NodeModel> nodes = new HashSet<>();
readObjectFromFile(file, object -> { readObjectFromFile(file, object -> {
nodes.addAll(deserializePermissions(object.get("permissions").getAsJsonArray())); nodes.addAll(deserializePermissions(object.get("permissions").getAsJsonArray()));
return true; return true;
}); });
for (NodeDataHolder e : nodes) { for (NodeModel e : nodes) {
if (!e.getPermission().equalsIgnoreCase(permission)) { if (!e.getPermission().equalsIgnoreCase(permission)) {
continue; continue;
} }
@ -255,8 +254,8 @@ public class JSONBacking extends FlatfileBacking {
if (groupFile.exists()) { if (groupFile.exists()) {
return readObjectFromFile(groupFile, object -> { return readObjectFromFile(groupFile, object -> {
Set<NodeDataHolder> data = deserializePermissions(object.get("permissions").getAsJsonArray()); Set<NodeModel> data = deserializePermissions(object.get("permissions").getAsJsonArray());
Set<Node> nodes = data.stream().map(NodeDataHolder::toNode).collect(Collectors.toSet()); Set<Node> nodes = data.stream().map(NodeModel::toNode).collect(Collectors.toSet());
group.setNodes(nodes); group.setNodes(nodes);
return true; return true;
}); });
@ -271,7 +270,7 @@ public class JSONBacking extends FlatfileBacking {
JsonObject data = new JsonObject(); JsonObject data = new JsonObject();
data.addProperty("name", group.getName()); data.addProperty("name", group.getName());
Set<NodeDataHolder> nodes = group.getNodes().values().stream().map(NodeDataHolder::fromNode).collect(Collectors.toCollection(LinkedHashSet::new)); Set<NodeModel> nodes = group.getNodes().values().stream().map(NodeModel::fromNode).collect(Collectors.toCollection(LinkedHashSet::new));
data.add("permissions", serializePermissions(nodes)); data.add("permissions", serializePermissions(nodes));
return writeElementToFile(groupFile, data); return writeElementToFile(groupFile, data);
@ -292,8 +291,8 @@ public class JSONBacking extends FlatfileBacking {
registerFileAction("groups", groupFile); registerFileAction("groups", groupFile);
return groupFile.exists() && readObjectFromFile(groupFile, object -> { return groupFile.exists() && readObjectFromFile(groupFile, object -> {
Set<NodeDataHolder> data = deserializePermissions(object.get("permissions").getAsJsonArray()); Set<NodeModel> data = deserializePermissions(object.get("permissions").getAsJsonArray());
Set<Node> nodes = data.stream().map(NodeDataHolder::toNode).collect(Collectors.toSet()); Set<Node> nodes = data.stream().map(NodeModel::toNode).collect(Collectors.toSet());
group.setNodes(nodes); group.setNodes(nodes);
return true; return true;
}); });
@ -322,7 +321,7 @@ public class JSONBacking extends FlatfileBacking {
JsonObject data = new JsonObject(); JsonObject data = new JsonObject();
data.addProperty("name", group.getName()); data.addProperty("name", group.getName());
Set<NodeDataHolder> nodes = group.getNodes().values().stream().map(NodeDataHolder::fromNode).collect(Collectors.toCollection(LinkedHashSet::new)); Set<NodeModel> nodes = group.getNodes().values().stream().map(NodeModel::fromNode).collect(Collectors.toCollection(LinkedHashSet::new));
data.add("permissions", serializePermissions(nodes)); data.add("permissions", serializePermissions(nodes));
return writeElementToFile(groupFile, data); return writeElementToFile(groupFile, data);
}, false); }, false);
@ -342,13 +341,13 @@ public class JSONBacking extends FlatfileBacking {
registerFileAction("groups", file); registerFileAction("groups", file);
String holder = file.getName().substring(0, file.getName().length() - 5); String holder = file.getName().substring(0, file.getName().length() - 5);
Set<NodeDataHolder> nodes = new HashSet<>(); Set<NodeModel> nodes = new HashSet<>();
readObjectFromFile(file, element -> { readObjectFromFile(file, element -> {
nodes.addAll(deserializePermissions(element.get("permissions").getAsJsonArray())); nodes.addAll(deserializePermissions(element.get("permissions").getAsJsonArray()));
return true; return true;
}); });
for (NodeDataHolder e : nodes) { for (NodeModel e : nodes) {
if (!e.getPermission().equalsIgnoreCase(permission)) { if (!e.getPermission().equalsIgnoreCase(permission)) {
continue; continue;
} }
@ -459,8 +458,8 @@ public class JSONBacking extends FlatfileBacking {
} }
} }
public static Set<NodeDataHolder> deserializePermissions(JsonArray permissionsSection) { public static Set<NodeModel> deserializePermissions(JsonArray permissionsSection) {
Set<NodeDataHolder> nodes = new HashSet<>(); Set<NodeModel> nodes = new HashSet<>();
for (JsonElement ent : permissionsSection) { for (JsonElement ent : permissionsSection) {
if (!ent.isJsonObject()) { if (!ent.isJsonObject()) {
@ -481,7 +480,7 @@ public class JSONBacking extends FlatfileBacking {
String server = "global"; String server = "global";
String world = "global"; String world = "global";
long expiry = 0L; long expiry = 0L;
ImmutableSetMultimap context = ImmutableSetMultimap.of(); ImmutableContextSet context = ImmutableContextSet.empty();
if (attributes.has("value")) { if (attributes.has("value")) {
value = attributes.get("value").getAsBoolean(); value = attributes.get("value").getAsBoolean();
@ -498,33 +497,19 @@ public class JSONBacking extends FlatfileBacking {
if (attributes.has("context") && attributes.get("context").isJsonObject()) { if (attributes.has("context") && attributes.get("context").isJsonObject()) {
JsonObject contexts = attributes.get("context").getAsJsonObject(); JsonObject contexts = attributes.get("context").getAsJsonObject();
ImmutableSetMultimap.Builder<String, String> map = ImmutableSetMultimap.builder(); context = NodeModel.deserializeContextSet(contexts).makeImmutable();
for (Map.Entry<String, JsonElement> e : contexts.entrySet()) {
JsonElement val = e.getValue();
if (val.isJsonArray()) {
JsonArray vals = val.getAsJsonArray();
for (JsonElement element : vals) {
map.put(e.getKey(), element.getAsString());
}
} else {
map.put(e.getKey(), val.getAsString());
}
}
context = map.build();
} }
nodes.add(NodeDataHolder.of(permission, value, server, world, expiry, context)); nodes.add(NodeModel.of(permission, value, server, world, expiry, context));
} }
return nodes; return nodes;
} }
public static JsonArray serializePermissions(Set<NodeDataHolder> nodes) { public static JsonArray serializePermissions(Set<NodeModel> nodes) {
List<JsonObject> data = new ArrayList<>(); List<JsonObject> data = new ArrayList<>();
for (NodeDataHolder node : nodes) { for (NodeModel node : nodes) {
JsonObject attributes = new JsonObject(); JsonObject attributes = new JsonObject();
attributes.addProperty("value", node.isValue()); attributes.addProperty("value", node.isValue());
@ -541,25 +526,7 @@ public class JSONBacking extends FlatfileBacking {
} }
if (!node.getContexts().isEmpty()) { if (!node.getContexts().isEmpty()) {
JsonObject context = new JsonObject(); attributes.add("context", node.getContextsAsJson());
Map<String, Collection<String>> map = node.getContexts().asMap();
for (Map.Entry<String, Collection<String>> e : map.entrySet()) {
List<String> vals = new ArrayList<>(e.getValue());
int size = vals.size();
if (size == 1) {
context.addProperty(e.getKey(), vals.get(0));
} else if (size > 1) {
JsonArray arr = new JsonArray();
for (String s : vals) {
arr.add(new JsonPrimitive(s));
}
context.add(e.getKey(), arr);
}
}
attributes.add("context", context);
} }
JsonObject perm = new JsonObject(); JsonObject perm = new JsonObject();

View File

@ -237,7 +237,7 @@ public class MongoDBBacking extends AbstractBacking {
// User exists, let's load. // User exists, let's load.
Document d = cursor.next(); Document d = cursor.next();
user.setNodes(revert((Map<String, Boolean>) d.get("perms")).entrySet().stream() user.setNodes(revert((Map<String, Boolean>) d.get("perms")).entrySet().stream()
.map(e -> NodeFactory.fromSerialisedNode(e.getKey(), e.getValue())) .map(e -> NodeFactory.fromSerializedNode(e.getKey(), e.getValue()))
.collect(Collectors.toSet()) .collect(Collectors.toSet())
); );
user.getPrimaryGroup().setStoredValue(d.getString("primaryGroup")); user.getPrimaryGroup().setStoredValue(d.getString("primaryGroup"));
@ -341,7 +341,7 @@ public class MongoDBBacking extends AbstractBacking {
Map<String, Boolean> perms = revert((Map<String, Boolean>) d.get("perms")); Map<String, Boolean> perms = revert((Map<String, Boolean>) d.get("perms"));
for (Map.Entry<String, Boolean> e : perms.entrySet()) { for (Map.Entry<String, Boolean> e : perms.entrySet()) {
Node node = NodeFactory.fromSerialisedNode(e.getKey(), e.getValue()); Node node = NodeFactory.fromSerializedNode(e.getKey(), e.getValue());
if (!node.getPermission().equalsIgnoreCase(permission)) { if (!node.getPermission().equalsIgnoreCase(permission)) {
continue; continue;
} }
@ -369,7 +369,7 @@ public class MongoDBBacking extends AbstractBacking {
// Group exists, let's load. // Group exists, let's load.
Document d = cursor.next(); Document d = cursor.next();
group.setNodes(revert((Map<String, Boolean>) d.get("perms")).entrySet().stream() group.setNodes(revert((Map<String, Boolean>) d.get("perms")).entrySet().stream()
.map(e -> NodeFactory.fromSerialisedNode(e.getKey(), e.getValue())) .map(e -> NodeFactory.fromSerializedNode(e.getKey(), e.getValue()))
.collect(Collectors.toSet()) .collect(Collectors.toSet())
); );
} else { } else {
@ -396,7 +396,7 @@ public class MongoDBBacking extends AbstractBacking {
Document d = cursor.next(); Document d = cursor.next();
group.setNodes(revert((Map<String, Boolean>) d.get("perms")).entrySet().stream() group.setNodes(revert((Map<String, Boolean>) d.get("perms")).entrySet().stream()
.map(e -> NodeFactory.fromSerialisedNode(e.getKey(), e.getValue())) .map(e -> NodeFactory.fromSerializedNode(e.getKey(), e.getValue()))
.collect(Collectors.toSet()) .collect(Collectors.toSet())
); );
return true; return true;
@ -481,7 +481,7 @@ public class MongoDBBacking extends AbstractBacking {
Map<String, Boolean> perms = revert((Map<String, Boolean>) d.get("perms")); Map<String, Boolean> perms = revert((Map<String, Boolean>) d.get("perms"));
for (Map.Entry<String, Boolean> e : perms.entrySet()) { for (Map.Entry<String, Boolean> e : perms.entrySet()) {
Node node = NodeFactory.fromSerialisedNode(e.getKey(), e.getValue()); Node node = NodeFactory.fromSerializedNode(e.getKey(), e.getValue());
if (!node.getPermission().equalsIgnoreCase(permission)) { if (!node.getPermission().equalsIgnoreCase(permission)) {
continue; continue;
} }

View File

@ -32,6 +32,7 @@ import com.google.gson.reflect.TypeToken;
import me.lucko.luckperms.api.HeldPermission; import me.lucko.luckperms.api.HeldPermission;
import me.lucko.luckperms.api.LogEntry; import me.lucko.luckperms.api.LogEntry;
import me.lucko.luckperms.api.Node; import me.lucko.luckperms.api.Node;
import me.lucko.luckperms.common.core.NodeModel;
import me.lucko.luckperms.common.core.UserIdentifier; import me.lucko.luckperms.common.core.UserIdentifier;
import me.lucko.luckperms.common.core.model.Group; import me.lucko.luckperms.common.core.model.Group;
import me.lucko.luckperms.common.core.model.Track; import me.lucko.luckperms.common.core.model.Track;
@ -43,7 +44,6 @@ import me.lucko.luckperms.common.managers.impl.GenericUserManager;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin; import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.storage.backing.sqlprovider.SQLProvider; import me.lucko.luckperms.common.storage.backing.sqlprovider.SQLProvider;
import me.lucko.luckperms.common.storage.backing.utils.LegacySQLSchemaMigration; import me.lucko.luckperms.common.storage.backing.utils.LegacySQLSchemaMigration;
import me.lucko.luckperms.common.storage.backing.utils.NodeDataHolder;
import me.lucko.luckperms.common.storage.holder.NodeHeldPermission; import me.lucko.luckperms.common.storage.holder.NodeHeldPermission;
import java.io.BufferedReader; import java.io.BufferedReader;
@ -253,7 +253,7 @@ public class SQLBacking extends AbstractBacking {
User user = plugin.getUserManager().getOrMake(UserIdentifier.of(uuid, username)); User user = plugin.getUserManager().getOrMake(UserIdentifier.of(uuid, username));
user.getIoLock().lock(); user.getIoLock().lock();
try { try {
List<NodeDataHolder> data = new ArrayList<>(); List<NodeModel> data = new ArrayList<>();
AtomicReference<String> primaryGroup = new AtomicReference<>(null); AtomicReference<String> primaryGroup = new AtomicReference<>(null);
AtomicReference<String> userName = new AtomicReference<>(null); AtomicReference<String> userName = new AtomicReference<>(null);
@ -270,7 +270,7 @@ public class SQLBacking extends AbstractBacking {
String world = rs.getString("world"); String world = rs.getString("world");
long expiry = rs.getLong("expiry"); long expiry = rs.getLong("expiry");
String contexts = rs.getString("contexts"); String contexts = rs.getString("contexts");
data.add(NodeDataHolder.of(permission, value, server, world, expiry, contexts)); data.add(NodeModel.deserialize(permission, value, server, world, expiry, contexts));
} }
} }
} }
@ -315,7 +315,7 @@ public class SQLBacking extends AbstractBacking {
// If the user has any data in storage // If the user has any data in storage
if (!data.isEmpty()) { if (!data.isEmpty()) {
Set<Node> nodes = data.stream().map(NodeDataHolder::toNode).collect(Collectors.toSet()); Set<Node> nodes = data.stream().map(NodeModel::toNode).collect(Collectors.toSet());
user.setNodes(nodes); user.setNodes(nodes);
// Save back to the store if data was changed // Save back to the store if data was changed
@ -364,7 +364,7 @@ public class SQLBacking extends AbstractBacking {
} }
// Get a snapshot of current data. // Get a snapshot of current data.
Set<NodeDataHolder> remote = new HashSet<>(); Set<NodeModel> remote = new HashSet<>();
try (Connection c = provider.getConnection()) { try (Connection c = provider.getConnection()) {
try (PreparedStatement ps = c.prepareStatement(prefix.apply(USER_PERMISSIONS_SELECT))) { try (PreparedStatement ps = c.prepareStatement(prefix.apply(USER_PERMISSIONS_SELECT))) {
ps.setString(1, user.getUuid().toString()); ps.setString(1, user.getUuid().toString());
@ -377,7 +377,7 @@ public class SQLBacking extends AbstractBacking {
String world = rs.getString("world"); String world = rs.getString("world");
long expiry = rs.getLong("expiry"); long expiry = rs.getLong("expiry");
String contexts = rs.getString("contexts"); String contexts = rs.getString("contexts");
remote.add(NodeDataHolder.of(permission, value, server, world, expiry, contexts)); remote.add(NodeModel.deserialize(permission, value, server, world, expiry, contexts));
} }
} }
} }
@ -385,24 +385,24 @@ public class SQLBacking extends AbstractBacking {
return false; return false;
} }
Set<NodeDataHolder> local = user.getNodes().values().stream().map(NodeDataHolder::fromNode).collect(Collectors.toSet()); Set<NodeModel> local = user.getNodes().values().stream().map(NodeModel::fromNode).collect(Collectors.toSet());
Map.Entry<Set<NodeDataHolder>, Set<NodeDataHolder>> diff = compareSets(local, remote); Map.Entry<Set<NodeModel>, Set<NodeModel>> diff = compareSets(local, remote);
Set<NodeDataHolder> toAdd = diff.getKey(); Set<NodeModel> toAdd = diff.getKey();
Set<NodeDataHolder> toRemove = diff.getValue(); Set<NodeModel> toRemove = diff.getValue();
if (!toRemove.isEmpty()) { if (!toRemove.isEmpty()) {
try (Connection c = provider.getConnection()) { try (Connection c = provider.getConnection()) {
try (PreparedStatement ps = c.prepareStatement(prefix.apply(USER_PERMISSIONS_DELETE_SPECIFIC))) { try (PreparedStatement ps = c.prepareStatement(prefix.apply(USER_PERMISSIONS_DELETE_SPECIFIC))) {
for (NodeDataHolder nd : toRemove) { for (NodeModel nd : toRemove) {
ps.setString(1, user.getUuid().toString()); ps.setString(1, user.getUuid().toString());
ps.setString(2, nd.getPermission()); ps.setString(2, nd.getPermission());
ps.setBoolean(3, nd.isValue()); ps.setBoolean(3, nd.isValue());
ps.setString(4, nd.getServer()); ps.setString(4, nd.getServer());
ps.setString(5, nd.getWorld()); ps.setString(5, nd.getWorld());
ps.setLong(6, nd.getExpiry()); ps.setLong(6, nd.getExpiry());
ps.setString(7, nd.serialiseContext()); ps.setString(7, nd.serializeContext());
ps.addBatch(); ps.addBatch();
} }
ps.executeBatch(); ps.executeBatch();
@ -416,14 +416,14 @@ public class SQLBacking extends AbstractBacking {
if (!toAdd.isEmpty()) { if (!toAdd.isEmpty()) {
try (Connection c = provider.getConnection()) { try (Connection c = provider.getConnection()) {
try (PreparedStatement ps = c.prepareStatement(prefix.apply(USER_PERMISSIONS_INSERT))) { try (PreparedStatement ps = c.prepareStatement(prefix.apply(USER_PERMISSIONS_INSERT))) {
for (NodeDataHolder nd : toAdd) { for (NodeModel nd : toAdd) {
ps.setString(1, user.getUuid().toString()); ps.setString(1, user.getUuid().toString());
ps.setString(2, nd.getPermission()); ps.setString(2, nd.getPermission());
ps.setBoolean(3, nd.isValue()); ps.setBoolean(3, nd.isValue());
ps.setString(4, nd.getServer()); ps.setString(4, nd.getServer());
ps.setString(5, nd.getWorld()); ps.setString(5, nd.getWorld());
ps.setLong(6, nd.getExpiry()); ps.setLong(6, nd.getExpiry());
ps.setString(7, nd.serialiseContext()); ps.setString(7, nd.serializeContext());
ps.addBatch(); ps.addBatch();
} }
ps.executeBatch(); ps.executeBatch();
@ -490,7 +490,7 @@ public class SQLBacking extends AbstractBacking {
long expiry = rs.getLong("expiry"); long expiry = rs.getLong("expiry");
String contexts = rs.getString("contexts"); String contexts = rs.getString("contexts");
NodeDataHolder data = NodeDataHolder.of(permission, value, server, world, expiry, contexts); NodeModel data = NodeModel.deserialize(permission, value, server, world, expiry, contexts);
held.add(NodeHeldPermission.of(holder, data)); held.add(NodeHeldPermission.of(holder, data));
} }
} }
@ -558,7 +558,7 @@ public class SQLBacking extends AbstractBacking {
Group group = plugin.getGroupManager().getOrMake(name); Group group = plugin.getGroupManager().getOrMake(name);
group.getIoLock().lock(); group.getIoLock().lock();
try { try {
List<NodeDataHolder> data = new ArrayList<>(); List<NodeModel> data = new ArrayList<>();
try (Connection c = provider.getConnection()) { try (Connection c = provider.getConnection()) {
try (PreparedStatement ps = c.prepareStatement(prefix.apply(GROUP_PERMISSIONS_SELECT))) { try (PreparedStatement ps = c.prepareStatement(prefix.apply(GROUP_PERMISSIONS_SELECT))) {
@ -572,7 +572,7 @@ public class SQLBacking extends AbstractBacking {
String world = rs.getString("world"); String world = rs.getString("world");
long expiry = rs.getLong("expiry"); long expiry = rs.getLong("expiry");
String contexts = rs.getString("contexts"); String contexts = rs.getString("contexts");
data.add(NodeDataHolder.of(permission, value, server, world, expiry, contexts)); data.add(NodeModel.deserialize(permission, value, server, world, expiry, contexts));
} }
} }
} }
@ -582,7 +582,7 @@ public class SQLBacking extends AbstractBacking {
} }
if (!data.isEmpty()) { if (!data.isEmpty()) {
Set<Node> nodes = data.stream().map(NodeDataHolder::toNode).collect(Collectors.toSet()); Set<Node> nodes = data.stream().map(NodeModel::toNode).collect(Collectors.toSet());
group.setNodes(nodes); group.setNodes(nodes);
} else { } else {
group.clearNodes(); group.clearNodes();
@ -646,7 +646,7 @@ public class SQLBacking extends AbstractBacking {
} }
// Get a snapshot of current data // Get a snapshot of current data
Set<NodeDataHolder> remote = new HashSet<>(); Set<NodeModel> remote = new HashSet<>();
try (Connection c = provider.getConnection()) { try (Connection c = provider.getConnection()) {
try (PreparedStatement ps = c.prepareStatement(prefix.apply(GROUP_PERMISSIONS_SELECT))) { try (PreparedStatement ps = c.prepareStatement(prefix.apply(GROUP_PERMISSIONS_SELECT))) {
ps.setString(1, group.getName()); ps.setString(1, group.getName());
@ -659,7 +659,7 @@ public class SQLBacking extends AbstractBacking {
String world = rs.getString("world"); String world = rs.getString("world");
long expiry = rs.getLong("expiry"); long expiry = rs.getLong("expiry");
String contexts = rs.getString("contexts"); String contexts = rs.getString("contexts");
remote.add(NodeDataHolder.of(permission, value, server, world, expiry, contexts)); remote.add(NodeModel.deserialize(permission, value, server, world, expiry, contexts));
} }
} }
} }
@ -668,24 +668,24 @@ public class SQLBacking extends AbstractBacking {
return false; return false;
} }
Set<NodeDataHolder> local = group.getNodes().values().stream().map(NodeDataHolder::fromNode).collect(Collectors.toSet()); Set<NodeModel> local = group.getNodes().values().stream().map(NodeModel::fromNode).collect(Collectors.toSet());
Map.Entry<Set<NodeDataHolder>, Set<NodeDataHolder>> diff = compareSets(local, remote); Map.Entry<Set<NodeModel>, Set<NodeModel>> diff = compareSets(local, remote);
Set<NodeDataHolder> toAdd = diff.getKey(); Set<NodeModel> toAdd = diff.getKey();
Set<NodeDataHolder> toRemove = diff.getValue(); Set<NodeModel> toRemove = diff.getValue();
if (!toRemove.isEmpty()) { if (!toRemove.isEmpty()) {
try (Connection c = provider.getConnection()) { try (Connection c = provider.getConnection()) {
try (PreparedStatement ps = c.prepareStatement(prefix.apply(GROUP_PERMISSIONS_DELETE_SPECIFIC))) { try (PreparedStatement ps = c.prepareStatement(prefix.apply(GROUP_PERMISSIONS_DELETE_SPECIFIC))) {
for (NodeDataHolder nd : toRemove) { for (NodeModel nd : toRemove) {
ps.setString(1, group.getName()); ps.setString(1, group.getName());
ps.setString(2, nd.getPermission()); ps.setString(2, nd.getPermission());
ps.setBoolean(3, nd.isValue()); ps.setBoolean(3, nd.isValue());
ps.setString(4, nd.getServer()); ps.setString(4, nd.getServer());
ps.setString(5, nd.getWorld()); ps.setString(5, nd.getWorld());
ps.setLong(6, nd.getExpiry()); ps.setLong(6, nd.getExpiry());
ps.setString(7, nd.serialiseContext()); ps.setString(7, nd.serializeContext());
ps.addBatch(); ps.addBatch();
} }
ps.executeBatch(); ps.executeBatch();
@ -699,14 +699,14 @@ public class SQLBacking extends AbstractBacking {
if (!toAdd.isEmpty()) { if (!toAdd.isEmpty()) {
try (Connection c = provider.getConnection()) { try (Connection c = provider.getConnection()) {
try (PreparedStatement ps = c.prepareStatement(prefix.apply(GROUP_PERMISSIONS_INSERT))) { try (PreparedStatement ps = c.prepareStatement(prefix.apply(GROUP_PERMISSIONS_INSERT))) {
for (NodeDataHolder nd : toAdd) { for (NodeModel nd : toAdd) {
ps.setString(1, group.getName()); ps.setString(1, group.getName());
ps.setString(2, nd.getPermission()); ps.setString(2, nd.getPermission());
ps.setBoolean(3, nd.isValue()); ps.setBoolean(3, nd.isValue());
ps.setString(4, nd.getServer()); ps.setString(4, nd.getServer());
ps.setString(5, nd.getWorld()); ps.setString(5, nd.getWorld());
ps.setLong(6, nd.getExpiry()); ps.setLong(6, nd.getExpiry());
ps.setString(7, nd.serialiseContext()); ps.setString(7, nd.serializeContext());
ps.addBatch(); ps.addBatch();
} }
ps.executeBatch(); ps.executeBatch();
@ -764,7 +764,7 @@ public class SQLBacking extends AbstractBacking {
long expiry = rs.getLong("expiry"); long expiry = rs.getLong("expiry");
String contexts = rs.getString("contexts"); String contexts = rs.getString("contexts");
NodeDataHolder data = NodeDataHolder.of(permission, value, server, world, expiry, contexts); NodeModel data = NodeModel.deserialize(permission, value, server, world, expiry, contexts);
held.add(NodeHeldPermission.of(holder, data)); held.add(NodeHeldPermission.of(holder, data));
} }
} }
@ -1040,14 +1040,14 @@ public class SQLBacking extends AbstractBacking {
* @param remote the remote set * @param remote the remote set
* @return the entries to add to remote, and the entries to remove from remote * @return the entries to add to remote, and the entries to remove from remote
*/ */
private static Map.Entry<Set<NodeDataHolder>, Set<NodeDataHolder>> compareSets(Set<NodeDataHolder> local, Set<NodeDataHolder> remote) { private static Map.Entry<Set<NodeModel>, Set<NodeModel>> compareSets(Set<NodeModel> local, Set<NodeModel> remote) {
// entries in local but not remote need to be added // entries in local but not remote need to be added
// entries in remote but not local need to be removed // entries in remote but not local need to be removed
Set<NodeDataHolder> toAdd = new HashSet<>(local); Set<NodeModel> toAdd = new HashSet<>(local);
toAdd.removeAll(remote); toAdd.removeAll(remote);
Set<NodeDataHolder> toRemove = new HashSet<>(remote); Set<NodeModel> toRemove = new HashSet<>(remote);
toRemove.removeAll(local); toRemove.removeAll(local);
return Maps.immutableEntry(toAdd, toRemove); return Maps.immutableEntry(toAdd, toRemove);

View File

@ -28,13 +28,13 @@ import com.google.common.collect.Iterables;
import me.lucko.luckperms.api.HeldPermission; import me.lucko.luckperms.api.HeldPermission;
import me.lucko.luckperms.api.Node; import me.lucko.luckperms.api.Node;
import me.lucko.luckperms.common.core.NodeModel;
import me.lucko.luckperms.common.core.UserIdentifier; import me.lucko.luckperms.common.core.UserIdentifier;
import me.lucko.luckperms.common.core.model.Group; import me.lucko.luckperms.common.core.model.Group;
import me.lucko.luckperms.common.core.model.Track; import me.lucko.luckperms.common.core.model.Track;
import me.lucko.luckperms.common.core.model.User; import me.lucko.luckperms.common.core.model.User;
import me.lucko.luckperms.common.managers.impl.GenericUserManager; import me.lucko.luckperms.common.managers.impl.GenericUserManager;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin; import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.storage.backing.utils.NodeDataHolder;
import me.lucko.luckperms.common.storage.holder.NodeHeldPermission; import me.lucko.luckperms.common.storage.holder.NodeHeldPermission;
import org.yaml.snakeyaml.DumperOptions; import org.yaml.snakeyaml.DumperOptions;
@ -109,8 +109,8 @@ public class YAMLBacking extends FlatfileBacking {
String name = (String) values.get("name"); String name = (String) values.get("name");
user.getPrimaryGroup().setStoredValue((String) values.get("primary-group")); user.getPrimaryGroup().setStoredValue((String) values.get("primary-group"));
Set<NodeDataHolder> data = deserializePermissions((List<Object>) values.get("permissions")); Set<NodeModel> data = deserializePermissions((List<Object>) values.get("permissions"));
Set<Node> nodes = data.stream().map(NodeDataHolder::toNode).collect(Collectors.toSet()); Set<Node> nodes = data.stream().map(NodeModel::toNode).collect(Collectors.toSet());
user.setNodes(nodes); user.setNodes(nodes);
boolean save = plugin.getUserManager().giveDefaultIfNeeded(user, false); boolean save = plugin.getUserManager().giveDefaultIfNeeded(user, false);
@ -171,7 +171,7 @@ public class YAMLBacking extends FlatfileBacking {
values.put("name", user.getName()); values.put("name", user.getName());
values.put("primary-group", user.getPrimaryGroup().getStoredValue()); values.put("primary-group", user.getPrimaryGroup().getStoredValue());
Set<NodeDataHolder> data = user.getNodes().values().stream().map(NodeDataHolder::fromNode).collect(Collectors.toCollection(LinkedHashSet::new)); Set<NodeModel> data = user.getNodes().values().stream().map(NodeModel::fromNode).collect(Collectors.toCollection(LinkedHashSet::new));
values.put("permissions", serializePermissions(data)); values.put("permissions", serializePermissions(data));
return writeMapToFile(userFile, values); return writeMapToFile(userFile, values);
@ -190,7 +190,7 @@ public class YAMLBacking extends FlatfileBacking {
for (File file : files) { for (File file : files) {
registerFileAction("users", file); registerFileAction("users", file);
Set<NodeDataHolder> nodes = new HashSet<>(); Set<NodeModel> nodes = new HashSet<>();
readMapFromFile(file, values -> { readMapFromFile(file, values -> {
nodes.addAll(deserializePermissions((List<Object>) values.get("permissions"))); nodes.addAll(deserializePermissions((List<Object>) values.get("permissions")));
return true; return true;
@ -198,7 +198,7 @@ public class YAMLBacking extends FlatfileBacking {
boolean shouldDelete = false; boolean shouldDelete = false;
if (nodes.size() == 1) { if (nodes.size() == 1) {
for (NodeDataHolder e : nodes) { for (NodeModel e : nodes) {
// There's only one // There's only one
shouldDelete = e.getPermission().equalsIgnoreCase("group.default") && e.isValue(); shouldDelete = e.getPermission().equalsIgnoreCase("group.default") && e.isValue();
} }
@ -223,13 +223,13 @@ public class YAMLBacking extends FlatfileBacking {
registerFileAction("users", file); registerFileAction("users", file);
UUID holder = UUID.fromString(file.getName().substring(0, file.getName().length() - 4)); UUID holder = UUID.fromString(file.getName().substring(0, file.getName().length() - 4));
Set<NodeDataHolder> nodes = new HashSet<>(); Set<NodeModel> nodes = new HashSet<>();
readMapFromFile(file, values -> { readMapFromFile(file, values -> {
nodes.addAll(deserializePermissions((List<Object>) values.get("permissions"))); nodes.addAll(deserializePermissions((List<Object>) values.get("permissions")));
return true; return true;
}); });
for (NodeDataHolder e : nodes) { for (NodeModel e : nodes) {
if (!e.getPermission().equalsIgnoreCase(permission)) { if (!e.getPermission().equalsIgnoreCase(permission)) {
continue; continue;
} }
@ -253,8 +253,8 @@ public class YAMLBacking extends FlatfileBacking {
if (groupFile.exists()) { if (groupFile.exists()) {
return readMapFromFile(groupFile, values -> { return readMapFromFile(groupFile, values -> {
Set<NodeDataHolder> data = deserializePermissions((List<Object>) values.get("permissions")); Set<NodeModel> data = deserializePermissions((List<Object>) values.get("permissions"));
Set<Node> nodes = data.stream().map(NodeDataHolder::toNode).collect(Collectors.toSet()); Set<Node> nodes = data.stream().map(NodeModel::toNode).collect(Collectors.toSet());
group.setNodes(nodes); group.setNodes(nodes);
return true; return true;
}); });
@ -268,7 +268,7 @@ public class YAMLBacking extends FlatfileBacking {
Map<String, Object> values = new LinkedHashMap<>(); Map<String, Object> values = new LinkedHashMap<>();
values.put("name", group.getName()); values.put("name", group.getName());
Set<NodeDataHolder> data = group.getNodes().values().stream().map(NodeDataHolder::fromNode).collect(Collectors.toCollection(LinkedHashSet::new)); Set<NodeModel> data = group.getNodes().values().stream().map(NodeModel::fromNode).collect(Collectors.toCollection(LinkedHashSet::new));
values.put("permissions", serializePermissions(data)); values.put("permissions", serializePermissions(data));
return writeMapToFile(groupFile, values); return writeMapToFile(groupFile, values);
} }
@ -288,8 +288,8 @@ public class YAMLBacking extends FlatfileBacking {
registerFileAction("groups", groupFile); registerFileAction("groups", groupFile);
return groupFile.exists() && readMapFromFile(groupFile, values -> { return groupFile.exists() && readMapFromFile(groupFile, values -> {
Set<NodeDataHolder> data = deserializePermissions((List<Object>) values.get("permissions")); Set<NodeModel> data = deserializePermissions((List<Object>) values.get("permissions"));
Set<Node> nodes = data.stream().map(NodeDataHolder::toNode).collect(Collectors.toSet()); Set<Node> nodes = data.stream().map(NodeModel::toNode).collect(Collectors.toSet());
group.setNodes(nodes); group.setNodes(nodes);
return true; return true;
}); });
@ -318,7 +318,7 @@ public class YAMLBacking extends FlatfileBacking {
Map<String, Object> values = new LinkedHashMap<>(); Map<String, Object> values = new LinkedHashMap<>();
values.put("name", group.getName()); values.put("name", group.getName());
Set<NodeDataHolder> data = group.getNodes().values().stream().map(NodeDataHolder::fromNode).collect(Collectors.toCollection(LinkedHashSet::new)); Set<NodeModel> data = group.getNodes().values().stream().map(NodeModel::fromNode).collect(Collectors.toCollection(LinkedHashSet::new));
values.put("permissions", serializePermissions(data)); values.put("permissions", serializePermissions(data));
return writeMapToFile(groupFile, values); return writeMapToFile(groupFile, values);
}, false); }, false);
@ -338,13 +338,13 @@ public class YAMLBacking extends FlatfileBacking {
registerFileAction("groups", file); registerFileAction("groups", file);
String holder = file.getName().substring(0, file.getName().length() - 4); String holder = file.getName().substring(0, file.getName().length() - 4);
Set<NodeDataHolder> nodes = new HashSet<>(); Set<NodeModel> nodes = new HashSet<>();
readMapFromFile(file, values -> { readMapFromFile(file, values -> {
nodes.addAll(deserializePermissions((List<Object>) values.get("permissions"))); nodes.addAll(deserializePermissions((List<Object>) values.get("permissions")));
return true; return true;
}); });
for (NodeDataHolder e : nodes) { for (NodeModel e : nodes) {
if (!e.getPermission().equalsIgnoreCase(permission)) { if (!e.getPermission().equalsIgnoreCase(permission)) {
continue; continue;
} }
@ -437,8 +437,8 @@ public class YAMLBacking extends FlatfileBacking {
} }
} }
public static Set<NodeDataHolder> deserializePermissions(List<Object> permissionsSection) { public static Set<NodeModel> deserializePermissions(List<Object> permissionsSection) {
Set<NodeDataHolder> nodes = new HashSet<>(); Set<NodeModel> nodes = new HashSet<>();
for (Object perm : permissionsSection) { for (Object perm : permissionsSection) {
@ -498,17 +498,17 @@ public class YAMLBacking extends FlatfileBacking {
context = map.build(); context = map.build();
} }
nodes.add(NodeDataHolder.of(permission, value, server, world, expiry, context)); nodes.add(NodeModel.of(permission, value, server, world, expiry, context));
} }
} }
return nodes; return nodes;
} }
public static List<Map<String, Object>> serializePermissions(Set<NodeDataHolder> nodes) { public static List<Map<String, Object>> serializePermissions(Set<NodeModel> nodes) {
List<Map<String, Object>> data = new ArrayList<>(); List<Map<String, Object>> data = new ArrayList<>();
for (NodeDataHolder node : nodes) { for (NodeModel node : nodes) {
Map<String, Object> attributes = new LinkedHashMap<>(); Map<String, Object> attributes = new LinkedHashMap<>();
attributes.put("value", node.isValue()); attributes.put("value", node.isValue());

View File

@ -28,6 +28,7 @@ import com.google.gson.JsonElement;
import com.google.gson.JsonObject; import com.google.gson.JsonObject;
import me.lucko.luckperms.common.core.NodeFactory; import me.lucko.luckperms.common.core.NodeFactory;
import me.lucko.luckperms.common.core.NodeModel;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin; import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.storage.backing.JSONBacking; import me.lucko.luckperms.common.storage.backing.JSONBacking;
@ -79,9 +80,9 @@ public class LegacyJSONSchemaMigration implements Runnable {
return true; return true;
}); });
Set<NodeDataHolder> nodes = perms.entrySet().stream() Set<NodeModel> nodes = perms.entrySet().stream()
.map(e -> NodeFactory.fromSerialisedNode(e.getKey(), e.getValue())) .map(e -> NodeFactory.fromSerializedNode(e.getKey(), e.getValue()))
.map(NodeDataHolder::fromNode) .map(NodeModel::fromNode)
.collect(Collectors.toCollection(LinkedHashSet::new)); .collect(Collectors.toCollection(LinkedHashSet::new));
if (!replacementFile.exists()) { if (!replacementFile.exists()) {
@ -133,9 +134,9 @@ public class LegacyJSONSchemaMigration implements Runnable {
return true; return true;
}); });
Set<NodeDataHolder> nodes = perms.entrySet().stream() Set<NodeModel> nodes = perms.entrySet().stream()
.map(e -> NodeFactory.fromSerialisedNode(e.getKey(), e.getValue())) .map(e -> NodeFactory.fromSerializedNode(e.getKey(), e.getValue()))
.map(NodeDataHolder::fromNode) .map(NodeModel::fromNode)
.collect(Collectors.toCollection(LinkedHashSet::new)); .collect(Collectors.toCollection(LinkedHashSet::new));
if (!replacementFile.exists()) { if (!replacementFile.exists()) {

View File

@ -28,6 +28,7 @@ import com.google.common.collect.Lists;
import com.google.gson.reflect.TypeToken; import com.google.gson.reflect.TypeToken;
import me.lucko.luckperms.common.core.NodeFactory; import me.lucko.luckperms.common.core.NodeFactory;
import me.lucko.luckperms.common.core.NodeModel;
import me.lucko.luckperms.common.storage.backing.SQLBacking; import me.lucko.luckperms.common.storage.backing.SQLBacking;
import java.lang.reflect.Type; import java.lang.reflect.Type;
@ -147,21 +148,21 @@ public class LegacySQLSchemaMigration implements Runnable {
continue; continue;
} }
Set<NodeDataHolder> nodes = convertedPerms.entrySet().stream() Set<NodeModel> nodes = convertedPerms.entrySet().stream()
.map(e -> NodeFactory.fromSerialisedNode(e.getKey(), e.getValue())) .map(e -> NodeFactory.fromSerializedNode(e.getKey(), e.getValue()))
.map(NodeDataHolder::fromNode) .map(NodeModel::fromNode)
.collect(Collectors.toSet()); .collect(Collectors.toSet());
try (Connection c = backing.getProvider().getConnection()) { try (Connection c = backing.getProvider().getConnection()) {
try (PreparedStatement ps = c.prepareStatement(backing.getPrefix().apply("INSERT INTO {prefix}user_permissions(uuid, permission, value, server, world, expiry, contexts) VALUES(?, ?, ?, ?, ?, ?, ?)"))) { try (PreparedStatement ps = c.prepareStatement(backing.getPrefix().apply("INSERT INTO {prefix}user_permissions(uuid, permission, value, server, world, expiry, contexts) VALUES(?, ?, ?, ?, ?, ?, ?)"))) {
for (NodeDataHolder nd : nodes) { for (NodeModel nd : nodes) {
ps.setString(1, uuid.toString()); ps.setString(1, uuid.toString());
ps.setString(2, nd.getPermission()); ps.setString(2, nd.getPermission());
ps.setBoolean(3, nd.isValue()); ps.setBoolean(3, nd.isValue());
ps.setString(4, nd.getServer()); ps.setString(4, nd.getServer());
ps.setString(5, nd.getWorld()); ps.setString(5, nd.getWorld());
ps.setLong(6, nd.getExpiry()); ps.setLong(6, nd.getExpiry());
ps.setString(7, nd.serialiseContext()); ps.setString(7, nd.serializeContext());
ps.addBatch(); ps.addBatch();
} }
ps.executeBatch(); ps.executeBatch();
@ -226,21 +227,21 @@ public class LegacySQLSchemaMigration implements Runnable {
continue; continue;
} }
Set<NodeDataHolder> nodes = convertedPerms.entrySet().stream() Set<NodeModel> nodes = convertedPerms.entrySet().stream()
.map(ent -> NodeFactory.fromSerialisedNode(ent.getKey(), ent.getValue())) .map(ent -> NodeFactory.fromSerializedNode(ent.getKey(), ent.getValue()))
.map(NodeDataHolder::fromNode) .map(NodeModel::fromNode)
.collect(Collectors.toSet()); .collect(Collectors.toSet());
try (Connection c = backing.getProvider().getConnection()) { try (Connection c = backing.getProvider().getConnection()) {
try (PreparedStatement ps = c.prepareStatement(backing.getPrefix().apply("INSERT INTO {prefix}group_permissions(name, permission, value, server, world, expiry, contexts) VALUES(?, ?, ?, ?, ?, ?, ?)"))) { try (PreparedStatement ps = c.prepareStatement(backing.getPrefix().apply("INSERT INTO {prefix}group_permissions(name, permission, value, server, world, expiry, contexts) VALUES(?, ?, ?, ?, ?, ?, ?)"))) {
for (NodeDataHolder nd : nodes) { for (NodeModel nd : nodes) {
ps.setString(1, name); ps.setString(1, name);
ps.setString(2, nd.getPermission()); ps.setString(2, nd.getPermission());
ps.setBoolean(3, nd.isValue()); ps.setBoolean(3, nd.isValue());
ps.setString(4, nd.getServer()); ps.setString(4, nd.getServer());
ps.setString(5, nd.getWorld()); ps.setString(5, nd.getWorld());
ps.setLong(6, nd.getExpiry()); ps.setLong(6, nd.getExpiry());
ps.setString(7, nd.serialiseContext()); ps.setString(7, nd.serializeContext());
ps.addBatch(); ps.addBatch();
} }
ps.executeBatch(); ps.executeBatch();

View File

@ -25,6 +25,7 @@ package me.lucko.luckperms.common.storage.backing.utils;
import lombok.RequiredArgsConstructor; import lombok.RequiredArgsConstructor;
import me.lucko.luckperms.common.core.NodeFactory; import me.lucko.luckperms.common.core.NodeFactory;
import me.lucko.luckperms.common.core.NodeModel;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin; import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.storage.backing.YAMLBacking; import me.lucko.luckperms.common.storage.backing.YAMLBacking;
@ -74,9 +75,9 @@ public class LegacyYAMLSchemaMigration implements Runnable {
return true; return true;
}); });
Set<NodeDataHolder> nodes = perms.entrySet().stream() Set<NodeModel> nodes = perms.entrySet().stream()
.map(e -> NodeFactory.fromSerialisedNode(e.getKey(), e.getValue())) .map(e -> NodeFactory.fromSerializedNode(e.getKey(), e.getValue()))
.map(NodeDataHolder::fromNode) .map(NodeModel::fromNode)
.collect(Collectors.toCollection(LinkedHashSet::new)); .collect(Collectors.toCollection(LinkedHashSet::new));
if (!replacementFile.exists()) { if (!replacementFile.exists()) {
@ -125,9 +126,9 @@ public class LegacyYAMLSchemaMigration implements Runnable {
return true; return true;
}); });
Set<NodeDataHolder> nodes = perms.entrySet().stream() Set<NodeModel> nodes = perms.entrySet().stream()
.map(e -> NodeFactory.fromSerialisedNode(e.getKey(), e.getValue())) .map(e -> NodeFactory.fromSerializedNode(e.getKey(), e.getValue()))
.map(NodeDataHolder::fromNode) .map(NodeModel::fromNode)
.collect(Collectors.toCollection(LinkedHashSet::new)); .collect(Collectors.toCollection(LinkedHashSet::new));
if (!replacementFile.exists()) { if (!replacementFile.exists()) {

View File

@ -1,124 +0,0 @@
/*
* Copyright (c) 2016 Lucko (Luck) <luck@lucko.me>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package me.lucko.luckperms.common.storage.backing.utils;
import lombok.AllArgsConstructor;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.ToString;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSetMultimap;
import com.google.gson.Gson;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonPrimitive;
import me.lucko.luckperms.api.Node;
import me.lucko.luckperms.common.core.NodeFactory;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
@Getter
@ToString
@EqualsAndHashCode
@AllArgsConstructor(staticName = "of")
public final class NodeDataHolder {
private static final Gson GSON = new Gson();
public static NodeDataHolder fromNode(Node node) {
return NodeDataHolder.of(
node.getPermission(),
node.getValue(),
node.getServer().orElse("global"),
node.getWorld().orElse("global"),
node.isTemporary() ? node.getExpiryUnixTime() : 0L,
ImmutableSetMultimap.copyOf(node.getContexts().toMultimap())
);
}
public static NodeDataHolder of(String permission, boolean value, String server, String world, long expiry, String contexts) {
JsonObject context = GSON.fromJson(contexts, JsonObject.class);
ImmutableSetMultimap.Builder<String, String> map = ImmutableSetMultimap.builder();
for (Map.Entry<String, JsonElement> e : context.entrySet()) {
JsonElement val = e.getValue();
if (val.isJsonArray()) {
JsonArray vals = val.getAsJsonArray();
for (JsonElement element : vals) {
map.put(e.getKey(), element.getAsString());
}
} else {
map.put(e.getKey(), val.getAsString());
}
}
return new NodeDataHolder(permission, value, server, world, expiry, map.build());
}
private final String permission;
private final boolean value;
private final String server;
private final String world;
private final long expiry;
private final ImmutableSetMultimap<String, String> contexts;
public String serialiseContext() {
JsonObject context = new JsonObject();
ImmutableMap<String, Collection<String>> map = getContexts().asMap();
map.forEach((key, value) -> {
List<String> vals = new ArrayList<>(value);
int size = vals.size();
if (size == 1) {
context.addProperty(key, vals.get(0));
} else if (size > 1) {
JsonArray arr = new JsonArray();
for (String s : vals) {
arr.add(new JsonPrimitive(s));
}
context.add(key, arr);
}
});
return GSON.toJson(context);
}
public Node toNode() {
Node.Builder builder = NodeFactory.newBuilder(permission);
builder.setValue(value);
builder.setServer(server);
builder.setWorld(world);
builder.setExpiry(expiry);
for (Map.Entry<String, String> e : contexts.entries()) {
builder.withExtraContext(e);
}
return builder.build();
}
}

View File

@ -30,7 +30,7 @@ import com.google.common.collect.Multimap;
import me.lucko.luckperms.api.HeldPermission; import me.lucko.luckperms.api.HeldPermission;
import me.lucko.luckperms.api.Node; import me.lucko.luckperms.api.Node;
import me.lucko.luckperms.common.storage.backing.utils.NodeDataHolder; import me.lucko.luckperms.common.core.NodeModel;
import java.util.Optional; import java.util.Optional;
import java.util.OptionalLong; import java.util.OptionalLong;
@ -39,8 +39,8 @@ import java.util.OptionalLong;
@EqualsAndHashCode @EqualsAndHashCode
@AllArgsConstructor(staticName = "of") @AllArgsConstructor(staticName = "of")
public final class NodeHeldPermission<T> implements HeldPermission<T> { public final class NodeHeldPermission<T> implements HeldPermission<T> {
public static <T> NodeHeldPermission<T> of(T holder, NodeDataHolder nodeDataHolder) { public static <T> NodeHeldPermission<T> of(T holder, NodeModel nodeModel) {
return of(holder, nodeDataHolder.toNode()); return of(holder, nodeModel.toNode());
} }
private final T holder; private final T holder;

View File

@ -30,18 +30,15 @@ import com.google.common.collect.ImmutableMap;
import com.google.gson.JsonArray; import com.google.gson.JsonArray;
import com.google.gson.JsonElement; import com.google.gson.JsonElement;
import com.google.gson.JsonObject; import com.google.gson.JsonObject;
import com.google.gson.JsonPrimitive;
import me.lucko.luckperms.api.context.ContextSet;
import me.lucko.luckperms.api.context.ImmutableContextSet; import me.lucko.luckperms.api.context.ImmutableContextSet;
import me.lucko.luckperms.api.context.MutableContextSet;
import me.lucko.luckperms.common.core.ContextSetComparator; import me.lucko.luckperms.common.core.ContextSetComparator;
import me.lucko.luckperms.common.core.NodeModel;
import me.lucko.luckperms.common.core.PriorityComparator; import me.lucko.luckperms.common.core.PriorityComparator;
import me.lucko.luckperms.sponge.service.calculated.CalculatedSubjectData; import me.lucko.luckperms.sponge.service.calculated.CalculatedSubjectData;
import me.lucko.luckperms.sponge.service.references.SubjectReference; import me.lucko.luckperms.sponge.service.references.SubjectReference;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collection;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
@ -100,7 +97,7 @@ public class SubjectStorageModel {
JsonObject context = section.get("context").getAsJsonObject(); JsonObject context = section.get("context").getAsJsonObject();
JsonObject data = section.get("data").getAsJsonObject(); JsonObject data = section.get("data").getAsJsonObject();
ImmutableContextSet contextSet = contextsFromJson(context); ImmutableContextSet contextSet = NodeModel.deserializeContextSet(context).makeImmutable();
ImmutableMap.Builder<String, Boolean> perms = ImmutableMap.builder(); ImmutableMap.Builder<String, Boolean> perms = ImmutableMap.builder();
for (Map.Entry<String, JsonElement> perm : data.entrySet()) { for (Map.Entry<String, JsonElement> perm : data.entrySet()) {
perms.put(perm.getKey(), perm.getValue().getAsBoolean()); perms.put(perm.getKey(), perm.getValue().getAsBoolean());
@ -123,7 +120,7 @@ public class SubjectStorageModel {
JsonObject context = section.get("context").getAsJsonObject(); JsonObject context = section.get("context").getAsJsonObject();
JsonObject data = section.get("data").getAsJsonObject(); JsonObject data = section.get("data").getAsJsonObject();
ImmutableContextSet contextSet = contextsFromJson(context); ImmutableContextSet contextSet = NodeModel.deserializeContextSet(context).makeImmutable();
ImmutableMap.Builder<String, String> opts = ImmutableMap.builder(); ImmutableMap.Builder<String, String> opts = ImmutableMap.builder();
for (Map.Entry<String, JsonElement> opt : data.entrySet()) { for (Map.Entry<String, JsonElement> opt : data.entrySet()) {
opts.put(opt.getKey(), opt.getValue().getAsString()); opts.put(opt.getKey(), opt.getValue().getAsString());
@ -146,7 +143,7 @@ public class SubjectStorageModel {
JsonObject context = section.get("context").getAsJsonObject(); JsonObject context = section.get("context").getAsJsonObject();
JsonArray data = section.get("data").getAsJsonArray(); JsonArray data = section.get("data").getAsJsonArray();
ImmutableContextSet contextSet = contextsFromJson(context); ImmutableContextSet contextSet = NodeModel.deserializeContextSet(context).makeImmutable();
ImmutableList.Builder<SubjectReference> pars = ImmutableList.builder(); ImmutableList.Builder<SubjectReference> pars = ImmutableList.builder();
for (JsonElement p : data) { for (JsonElement p : data) {
if (!p.isJsonObject()) { if (!p.isJsonObject()) {
@ -182,7 +179,7 @@ public class SubjectStorageModel {
} }
JsonObject section = new JsonObject(); JsonObject section = new JsonObject();
section.add("context", contextsToJson(e.getKey())); section.add("context", NodeModel.serializeContextSet(e.getKey()));
JsonObject data = new JsonObject(); JsonObject data = new JsonObject();
@ -206,7 +203,7 @@ public class SubjectStorageModel {
} }
JsonObject section = new JsonObject(); JsonObject section = new JsonObject();
section.add("context", contextsToJson(e.getKey())); section.add("context", NodeModel.serializeContextSet(e.getKey()));
JsonObject data = new JsonObject(); JsonObject data = new JsonObject();
@ -230,7 +227,7 @@ public class SubjectStorageModel {
} }
JsonObject section = new JsonObject(); JsonObject section = new JsonObject();
section.add("context", contextsToJson(e.getKey())); section.add("context", NodeModel.serializeContextSet(e.getKey()));
JsonArray data = new JsonArray(); JsonArray data = new JsonArray();
for (SubjectReference ref : e.getValue()) { for (SubjectReference ref : e.getValue()) {
@ -253,40 +250,4 @@ public class SubjectStorageModel {
subjectData.replaceOptions(options); subjectData.replaceOptions(options);
subjectData.replaceParents(parents); subjectData.replaceParents(parents);
} }
private static ImmutableContextSet contextsFromJson(JsonObject contexts) {
MutableContextSet ret = MutableContextSet.create();
for (Map.Entry<String, JsonElement> e : contexts.entrySet()) {
String key = e.getKey();
if (e.getValue().isJsonArray()) {
JsonArray values = e.getValue().getAsJsonArray();
for (JsonElement value : values) {
ret.add(key, value.getAsString());
}
} else {
ret.add(key, e.getValue().getAsString());
}
}
return ret.makeImmutable();
}
private static JsonObject contextsToJson(ContextSet contexts) {
JsonObject ret = new JsonObject();
for (Map.Entry<String, Collection<String>> e : contexts.toMultimap().asMap().entrySet()) {
String key = e.getKey();
List<String> values = new ArrayList<>(e.getValue());
if (values.size() == 1) {
ret.addProperty(key, values.get(0));
} else if (values.size() > 1) {
JsonArray arr = new JsonArray();
for (String s : values) {
arr.add(new JsonPrimitive(s));
}
ret.add(key, arr);
}
}
return ret;
}
} }