diff --git a/common/src/main/java/me/lucko/luckperms/common/commands/impl/generic/other/HolderEditor.java b/common/src/main/java/me/lucko/luckperms/common/commands/impl/generic/other/HolderEditor.java index f3a52387..787489e0 100644 --- a/common/src/main/java/me/lucko/luckperms/common/commands/impl/generic/other/HolderEditor.java +++ b/common/src/main/java/me/lucko/luckperms/common/commands/impl/generic/other/HolderEditor.java @@ -39,6 +39,7 @@ import me.lucko.luckperms.common.commands.abstraction.SubCommand; import me.lucko.luckperms.common.commands.sender.Sender; import me.lucko.luckperms.common.config.ConfigKeys; import me.lucko.luckperms.common.constants.CommandPermission; +import me.lucko.luckperms.common.contexts.ContextSetJsonSerializer; import me.lucko.luckperms.common.locale.CommandSpec; import me.lucko.luckperms.common.locale.LocaleManager; import me.lucko.luckperms.common.locale.Message; @@ -176,7 +177,7 @@ public class HolderEditor extends SubCommand { for (NodeModel node : nodes) { JsonObject attributes = new JsonObject(); attributes.addProperty("permission", node.getPermission()); - attributes.addProperty("value", node.isValue()); + attributes.addProperty("value", node.getValue()); if (!node.getServer().equals("global")) { attributes.addProperty("server", node.getServer()); @@ -191,7 +192,7 @@ public class HolderEditor extends SubCommand { } if (!node.getContexts().isEmpty()) { - attributes.add("context", node.getContextsAsJson()); + attributes.add("context", ContextSetJsonSerializer.serializeContextSet(node.getContexts())); } arr.add(attributes); diff --git a/common/src/main/java/me/lucko/luckperms/common/commands/impl/misc/ApplyEditsCommand.java b/common/src/main/java/me/lucko/luckperms/common/commands/impl/misc/ApplyEditsCommand.java index 10d4e896..7df1794d 100644 --- a/common/src/main/java/me/lucko/luckperms/common/commands/impl/misc/ApplyEditsCommand.java +++ b/common/src/main/java/me/lucko/luckperms/common/commands/impl/misc/ApplyEditsCommand.java @@ -40,6 +40,7 @@ import me.lucko.luckperms.common.commands.abstraction.SingleCommand; import me.lucko.luckperms.common.commands.sender.Sender; import me.lucko.luckperms.common.commands.utils.Util; import me.lucko.luckperms.common.constants.CommandPermission; +import me.lucko.luckperms.common.contexts.ContextSetJsonSerializer; import me.lucko.luckperms.common.locale.CommandSpec; import me.lucko.luckperms.common.locale.LocaleManager; import me.lucko.luckperms.common.locale.Message; @@ -191,7 +192,7 @@ public class ApplyEditsCommand extends SingleCommand { if (data.has("context") && data.get("context").isJsonObject()) { JsonObject contexts = data.get("context").getAsJsonObject(); - context = NodeModel.deserializeContextSet(contexts).makeImmutable(); + context = ContextSetJsonSerializer.deserializeContextSet(contexts).makeImmutable(); } nodes.add(NodeModel.of(permission, value, server, world, expiry, context)); diff --git a/common/src/main/java/me/lucko/luckperms/common/config/ContextsFile.java b/common/src/main/java/me/lucko/luckperms/common/config/ContextsFile.java index da9e688b..d4bd288e 100644 --- a/common/src/main/java/me/lucko/luckperms/common/config/ContextsFile.java +++ b/common/src/main/java/me/lucko/luckperms/common/config/ContextsFile.java @@ -34,7 +34,7 @@ import com.google.gson.GsonBuilder; import com.google.gson.JsonObject; import me.lucko.luckperms.api.context.ImmutableContextSet; -import me.lucko.luckperms.common.node.NodeModel; +import me.lucko.luckperms.common.contexts.ContextSetJsonSerializer; import java.io.BufferedReader; import java.io.BufferedWriter; @@ -75,16 +75,16 @@ public class ContextsFile { JsonObject data = new Gson().fromJson(reader, JsonObject.class); if (data.has("context")) { - staticContexts = NodeModel.deserializeContextSet(data.get("context").getAsJsonObject()).makeImmutable(); + staticContexts = ContextSetJsonSerializer.deserializeContextSet(data.get("context").getAsJsonObject()).makeImmutable(); save = true; } if (data.has("static-contexts")) { - staticContexts = NodeModel.deserializeContextSet(data.get("static-contexts").getAsJsonObject()).makeImmutable(); + staticContexts = ContextSetJsonSerializer.deserializeContextSet(data.get("static-contexts").getAsJsonObject()).makeImmutable(); } if (data.has("default-contexts")) { - defaultContexts = NodeModel.deserializeContextSet(data.get("default-contexts").getAsJsonObject()).makeImmutable(); + defaultContexts = ContextSetJsonSerializer.deserializeContextSet(data.get("default-contexts").getAsJsonObject()).makeImmutable(); } } catch (IOException e) { @@ -102,8 +102,8 @@ public class ContextsFile { try (BufferedWriter writer = Files.newBufferedWriter(file.toPath(), StandardCharsets.UTF_8)) { JsonObject data = new JsonObject(); - data.add("static-contexts", NodeModel.serializeContextSet(staticContexts)); - data.add("default-contexts", NodeModel.serializeContextSet(defaultContexts)); + data.add("static-contexts", ContextSetJsonSerializer.serializeContextSet(staticContexts)); + data.add("default-contexts", ContextSetJsonSerializer.serializeContextSet(defaultContexts)); new GsonBuilder().setPrettyPrinting().create().toJson(data, writer); writer.flush(); diff --git a/common/src/main/java/me/lucko/luckperms/common/contexts/ContextSetConfigurateSerializer.java b/common/src/main/java/me/lucko/luckperms/common/contexts/ContextSetConfigurateSerializer.java new file mode 100644 index 00000000..15206e7f --- /dev/null +++ b/common/src/main/java/me/lucko/luckperms/common/contexts/ContextSetConfigurateSerializer.java @@ -0,0 +1,86 @@ +/* + * This file is part of LuckPerms, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package me.lucko.luckperms.common.contexts; + +import lombok.experimental.UtilityClass; + +import com.google.common.base.Preconditions; + +import me.lucko.luckperms.api.context.ContextSet; +import me.lucko.luckperms.api.context.MutableContextSet; + +import ninja.leaping.configurate.ConfigurationNode; +import ninja.leaping.configurate.SimpleConfigurationNode; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; + +@UtilityClass +public class ContextSetConfigurateSerializer { + + public static ConfigurationNode serializeContextSet(ContextSet contextSet) { + ConfigurationNode data = SimpleConfigurationNode.root(); + Map> map = contextSet.toMultimap().asMap(); + + map.forEach((k, v) -> { + List values = new ArrayList<>(v); + int size = values.size(); + + if (size == 1) { + data.getNode(k).setValue(values.get(0)); + } else if (size > 1) { + data.getNode(k).setValue(values); + } + }); + + return data; + } + + public static MutableContextSet deserializeContextSet(ConfigurationNode data) { + Preconditions.checkArgument(data.hasMapChildren()); + Map dataMap = data.getChildrenMap(); + + MutableContextSet map = MutableContextSet.create(); + for (Map.Entry e : dataMap.entrySet()) { + String k = e.getKey().toString(); + ConfigurationNode v = e.getValue(); + + if (v.hasListChildren()) { + List values = v.getChildrenList(); + for (ConfigurationNode value : values) { + map.add(k, value.getString()); + } + } else { + map.add(k, v.getString()); + } + } + + return map; + } + +} diff --git a/common/src/main/java/me/lucko/luckperms/common/contexts/ContextSetJsonSerializer.java b/common/src/main/java/me/lucko/luckperms/common/contexts/ContextSetJsonSerializer.java new file mode 100644 index 00000000..0e2ccd76 --- /dev/null +++ b/common/src/main/java/me/lucko/luckperms/common/contexts/ContextSetJsonSerializer.java @@ -0,0 +1,90 @@ +/* + * This file is part of LuckPerms, licensed under the MIT License. + * + * Copyright (c) lucko (Luck) + * Copyright (c) contributors + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package me.lucko.luckperms.common.contexts; + +import lombok.experimental.UtilityClass; + +import com.google.common.base.Preconditions; +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonPrimitive; + +import me.lucko.luckperms.api.context.ContextSet; +import me.lucko.luckperms.api.context.MutableContextSet; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Map; + +@UtilityClass +public class ContextSetJsonSerializer { + + public static JsonObject serializeContextSet(ContextSet contextSet) { + JsonObject data = new JsonObject(); + Map> map = contextSet.toMultimap().asMap(); + + map.forEach((k, v) -> { + List values = new ArrayList<>(v); + int size = values.size(); + + if (size == 1) { + data.addProperty(k, values.get(0)); + } else if (size > 1) { + JsonArray arr = new JsonArray(); + for (String s : values) { + arr.add(new JsonPrimitive(s)); + } + data.add(k, arr); + } + }); + + return data; + } + + public static MutableContextSet deserializeContextSet(JsonElement element) { + Preconditions.checkArgument(element.isJsonObject()); + JsonObject data = element.getAsJsonObject(); + + MutableContextSet map = MutableContextSet.create(); + for (Map.Entry e : data.entrySet()) { + String k = e.getKey(); + JsonElement v = e.getValue(); + if (v.isJsonArray()) { + JsonArray values = v.getAsJsonArray(); + for (JsonElement value : values) { + map.add(k, value.getAsString()); + } + } else { + map.add(k, v.getAsString()); + } + } + + return map; + } + +} diff --git a/common/src/main/java/me/lucko/luckperms/common/node/NodeModel.java b/common/src/main/java/me/lucko/luckperms/common/node/NodeModel.java index c2293923..5c194a0f 100644 --- a/common/src/main/java/me/lucko/luckperms/common/node/NodeModel.java +++ b/common/src/main/java/me/lucko/luckperms/common/node/NodeModel.java @@ -25,44 +25,21 @@ package me.lucko.luckperms.common.node; -import lombok.AllArgsConstructor; -import lombok.EqualsAndHashCode; -import lombok.Getter; import lombok.NonNull; -import lombok.ToString; - -import com.google.common.base.Preconditions; -import com.google.common.collect.ImmutableSetMultimap; -import com.google.gson.Gson; -import com.google.gson.JsonArray; -import com.google.gson.JsonElement; -import com.google.gson.JsonObject; -import com.google.gson.JsonPrimitive; import me.lucko.luckperms.api.Node; -import me.lucko.luckperms.api.context.ContextSet; import me.lucko.luckperms.api.context.ImmutableContextSet; -import me.lucko.luckperms.api.context.MutableContextSet; - -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import java.util.Map; /** - * An stripped down version of {@link Node}, without methods and cached values for handling permission lookups. + * An stripped down version of {@link Node}, without methods and cached values + * for handling permission lookups. * * All values are non-null. */ -@Getter -@ToString -@EqualsAndHashCode -@AllArgsConstructor(staticName = "of") public final class NodeModel { - private static final Gson GSON = new Gson(); public static NodeModel fromNode(Node node) { - return NodeModel.of( + NodeModel model = of( node.getPermission(), node.getValuePrimitive(), node.getServer().orElse("global"), @@ -70,42 +47,67 @@ public final class NodeModel { node.isTemporary() ? node.getExpiryUnixTime() : 0L, node.getContexts().makeImmutable() ); + model.node = node; + return model; } - public static NodeModel deserialize(String permission, boolean value, String server, String world, long expiry, String contexts) { - JsonObject context = GSON.fromJson(contexts, JsonObject.class); - return of(permission, value, server, world, expiry, deserializeContextSet(context).makeImmutable()); + public static NodeModel of(String permission, boolean value, String server, String world, long expiry, ImmutableContextSet contexts) { + return new NodeModel(permission, value, server, world, expiry, contexts); } - @NonNull private final String permission; - @NonNull private final boolean value; - @NonNull private final String server; - @NonNull private final String world; - @NonNull private final long expiry; - @NonNull private final ImmutableContextSet contexts; + private Node node = null; - public String serializeContext() { - return GSON.toJson(getContextsAsJson()); + private NodeModel(@NonNull String permission, boolean value, @NonNull String server, @NonNull String world, long expiry, @NonNull ImmutableContextSet contexts) { + this.permission = permission; + this.value = value; + this.server = server; + this.world = world; + this.expiry = expiry; + this.contexts = contexts; } - public JsonObject getContextsAsJson() { - return serializeContextSet(contexts); + public synchronized Node toNode() { + if (node == null) { + Node.Builder builder = NodeFactory.newBuilder(permission); + builder.setValue(value); + builder.setServer(server); + builder.setWorld(world); + builder.setExpiry(expiry); + builder.withExtraContext(contexts); + node = builder.build(); + } + + return node; } - public Node toNode() { - Node.Builder builder = NodeFactory.newBuilder(permission); - builder.setValue(value); - builder.setServer(server); - builder.setWorld(world); - builder.setExpiry(expiry); - builder.withExtraContext(contexts); - return builder.build(); + public String getPermission() { + return this.permission; + } + + public boolean getValue() { + return this.value; + } + + public String getServer() { + return this.server; + } + + public String getWorld() { + return this.world; + } + + public long getExpiry() { + return this.expiry; + } + + public ImmutableContextSet getContexts() { + return this.contexts; } public NodeModel setPermission(String permission) { @@ -132,47 +134,38 @@ public final class NodeModel { return of(permission, value, server, world, expiry, contexts); } - public static JsonObject serializeContextSet(ContextSet contextSet) { - JsonObject data = new JsonObject(); - Map> map = contextSet.toMultimap().asMap(); + public boolean equals(Object o) { + if (o == this) return true; + if (!(o instanceof NodeModel)) return false; + final NodeModel other = (NodeModel) o; - map.forEach((k, v) -> { - List values = new ArrayList<>(v); - int size = values.size(); - - if (size == 1) { - data.addProperty(k, values.get(0)); - } else if (size > 1) { - JsonArray arr = new JsonArray(); - for (String s : values) { - arr.add(new JsonPrimitive(s)); - } - data.add(k, arr); - } - }); - - return data; + return this.getPermission().equals(other.getPermission()) && + this.getValue() == other.getValue() && + this.getServer().equals(other.getServer()) && + this.getWorld().equals(other.getWorld()) && + this.getExpiry() == other.getExpiry() && + this.getContexts().equals(other.getContexts()); } - public static MutableContextSet deserializeContextSet(JsonElement element) { - Preconditions.checkArgument(element.isJsonObject()); - JsonObject data = element.getAsJsonObject(); - - ImmutableSetMultimap.Builder map = ImmutableSetMultimap.builder(); - for (Map.Entry e : data.entrySet()) { - String k = e.getKey(); - JsonElement v = e.getValue(); - if (v.isJsonArray()) { - JsonArray values = v.getAsJsonArray(); - for (JsonElement value : values) { - map.put(k, value.getAsString()); - } - } else { - map.put(k, v.getAsString()); - } - } - - return MutableContextSet.fromMultimap(map.build()); + public int hashCode() { + final int PRIME = 59; + int result = 1; + result = result * PRIME + this.getPermission().hashCode(); + result = result * PRIME + Boolean.hashCode(this.getValue()); + result = result * PRIME + this.getServer().hashCode(); + result = result * PRIME + this.getWorld().hashCode(); + result = result * PRIME + Long.hashCode(this.getExpiry()); + result = result * PRIME + this.getContexts().hashCode(); + return result; } + public String toString() { + return "NodeModel(" + + "permission=" + this.getPermission() + ", " + + "value=" + this.getValue() + ", " + + "server=" + this.getServer() + ", " + + "world=" + this.getWorld() + ", " + + "expiry=" + this.getExpiry() + ", " + + "contexts=" + this.getContexts() + ")"; + } } diff --git a/common/src/main/java/me/lucko/luckperms/common/storage/AbstractStorage.java b/common/src/main/java/me/lucko/luckperms/common/storage/AbstractStorage.java index e9ac137c..d8eed50c 100644 --- a/common/src/main/java/me/lucko/luckperms/common/storage/AbstractStorage.java +++ b/common/src/main/java/me/lucko/luckperms/common/storage/AbstractStorage.java @@ -41,7 +41,7 @@ import me.lucko.luckperms.common.model.Group; import me.lucko.luckperms.common.model.Track; import me.lucko.luckperms.common.model.User; import me.lucko.luckperms.common.plugin.LuckPermsPlugin; -import me.lucko.luckperms.common.storage.backing.AbstractBacking; +import me.lucko.luckperms.common.storage.backing.AbstractDao; import me.lucko.luckperms.common.storage.wrappings.BufferedOutputStorage; import me.lucko.luckperms.common.storage.wrappings.PhasedStorage; @@ -53,11 +53,11 @@ import java.util.concurrent.CompletableFuture; import java.util.function.Supplier; /** - * Converts a {@link AbstractBacking} to use {@link CompletableFuture}s + * Converts a {@link AbstractDao} to use {@link CompletableFuture}s */ @RequiredArgsConstructor(access = AccessLevel.PRIVATE) public class AbstractStorage implements Storage { - public static Storage wrap(LuckPermsPlugin plugin, AbstractBacking backing) { + public static Storage create(LuckPermsPlugin plugin, AbstractDao backing) { BufferedOutputStorage bufferedDs = BufferedOutputStorage.wrap(PhasedStorage.wrap(new AbstractStorage(plugin, backing)), 250L); plugin.getScheduler().asyncRepeating(bufferedDs, 2L); return bufferedDs; @@ -66,19 +66,19 @@ public class AbstractStorage implements Storage { private final LuckPermsPlugin plugin; @Delegate(types = Delegated.class) - private final AbstractBacking backing; + private final AbstractDao dao; @Getter private final StorageDelegate delegate; - private AbstractStorage(LuckPermsPlugin plugin, AbstractBacking backing) { + private AbstractStorage(LuckPermsPlugin plugin, AbstractDao dao) { this.plugin = plugin; - this.backing = backing; + this.dao = dao; this.delegate = new StorageDelegate(plugin, this); } private CompletableFuture makeFuture(Supplier supplier) { - return CompletableFuture.supplyAsync(supplier, backing.getPlugin().getScheduler().async()); + return CompletableFuture.supplyAsync(supplier, dao.getPlugin().getScheduler().async()); } @Override @@ -88,23 +88,23 @@ public class AbstractStorage implements Storage { @Override public CompletableFuture logAction(LogEntry entry) { - return makeFuture(() -> backing.logAction(entry)); + return makeFuture(() -> dao.logAction(entry)); } @Override public CompletableFuture getLog() { - return makeFuture(backing::getLog); + return makeFuture(dao::getLog); } @Override public CompletableFuture applyBulkUpdate(BulkUpdate bulkUpdate) { - return makeFuture(() -> backing.applyBulkUpdate(bulkUpdate)); + return makeFuture(() -> dao.applyBulkUpdate(bulkUpdate)); } @Override public CompletableFuture loadUser(UUID uuid, String username) { return makeFuture(() -> { - if (backing.loadUser(uuid, username)) { + if (dao.loadUser(uuid, username)) { User u = plugin.getUserManager().getIfLoaded(uuid); if (u != null) { plugin.getApiProvider().getEventFactory().handleUserLoad(u); @@ -117,23 +117,23 @@ public class AbstractStorage implements Storage { @Override public CompletableFuture saveUser(User user) { - return makeFuture(() -> backing.saveUser(user)); + return makeFuture(() -> dao.saveUser(user)); } @Override public CompletableFuture> getUniqueUsers() { - return makeFuture(backing::getUniqueUsers); + return makeFuture(dao::getUniqueUsers); } @Override public CompletableFuture>> getUsersWithPermission(String permission) { - return makeFuture(() -> backing.getUsersWithPermission(permission)); + return makeFuture(() -> dao.getUsersWithPermission(permission)); } @Override public CompletableFuture createAndLoadGroup(String name, CreationCause cause) { return makeFuture(() -> { - if (backing.createAndLoadGroup(name)) { + if (dao.createAndLoadGroup(name)) { Group g = plugin.getGroupManager().getIfLoaded(name); if (g != null) { plugin.getApiProvider().getEventFactory().handleGroupCreate(g, cause); @@ -147,7 +147,7 @@ public class AbstractStorage implements Storage { @Override public CompletableFuture loadGroup(String name) { return makeFuture(() -> { - if (backing.loadGroup(name)) { + if (dao.loadGroup(name)) { Group g = plugin.getGroupManager().getIfLoaded(name); if (g != null) { plugin.getApiProvider().getEventFactory().handleGroupLoad(g); @@ -161,7 +161,7 @@ public class AbstractStorage implements Storage { @Override public CompletableFuture loadAllGroups() { return makeFuture(() -> { - if (backing.loadAllGroups()) { + if (dao.loadAllGroups()) { plugin.getApiProvider().getEventFactory().handleGroupLoadAll(); return true; } @@ -171,13 +171,13 @@ public class AbstractStorage implements Storage { @Override public CompletableFuture saveGroup(Group group) { - return makeFuture(() -> backing.saveGroup(group)); + return makeFuture(() -> dao.saveGroup(group)); } @Override public CompletableFuture deleteGroup(Group group, DeletionCause cause) { return makeFuture(() -> { - if (backing.deleteGroup(group)) { + if (dao.deleteGroup(group)) { plugin.getApiProvider().getEventFactory().handleGroupDelete(group, cause); return true; } @@ -187,13 +187,13 @@ public class AbstractStorage implements Storage { @Override public CompletableFuture>> getGroupsWithPermission(String permission) { - return makeFuture(() -> backing.getGroupsWithPermission(permission)); + return makeFuture(() -> dao.getGroupsWithPermission(permission)); } @Override public CompletableFuture createAndLoadTrack(String name, CreationCause cause) { return makeFuture(() -> { - if (backing.createAndLoadTrack(name)) { + if (dao.createAndLoadTrack(name)) { Track t = plugin.getTrackManager().getIfLoaded(name); if (t != null) { plugin.getApiProvider().getEventFactory().handleTrackCreate(t, cause); @@ -207,7 +207,7 @@ public class AbstractStorage implements Storage { @Override public CompletableFuture loadTrack(String name) { return makeFuture(() -> { - if (backing.loadTrack(name)) { + if (dao.loadTrack(name)) { Track t = plugin.getTrackManager().getIfLoaded(name); if (t != null) { plugin.getApiProvider().getEventFactory().handleTrackLoad(t); @@ -221,7 +221,7 @@ public class AbstractStorage implements Storage { @Override public CompletableFuture loadAllTracks() { return makeFuture(() -> { - if (backing.loadAllTracks()) { + if (dao.loadAllTracks()) { plugin.getApiProvider().getEventFactory().handleTrackLoadAll(); return true; } @@ -231,13 +231,13 @@ public class AbstractStorage implements Storage { @Override public CompletableFuture saveTrack(Track track) { - return makeFuture(() -> backing.saveTrack(track)); + return makeFuture(() -> dao.saveTrack(track)); } @Override public CompletableFuture deleteTrack(Track track, DeletionCause cause) { return makeFuture(() -> { - if (backing.deleteTrack(track)) { + if (dao.deleteTrack(track)) { plugin.getApiProvider().getEventFactory().handleTrackDelete(track, cause); return true; } @@ -247,17 +247,17 @@ public class AbstractStorage implements Storage { @Override public CompletableFuture saveUUIDData(UUID uuid, String username) { - return makeFuture(() -> backing.saveUUIDData(uuid, username)); + return makeFuture(() -> dao.saveUUIDData(uuid, username)); } @Override public CompletableFuture getUUID(String username) { - return makeFuture(() -> backing.getUUID(username)); + return makeFuture(() -> dao.getUUID(username)); } @Override public CompletableFuture getName(UUID uuid) { - return makeFuture(() -> backing.getName(uuid)); + return makeFuture(() -> dao.getName(uuid)); } private interface Delegated { diff --git a/common/src/main/java/me/lucko/luckperms/common/storage/SplitBacking.java b/common/src/main/java/me/lucko/luckperms/common/storage/SplitStorageDao.java similarity index 91% rename from common/src/main/java/me/lucko/luckperms/common/storage/SplitBacking.java rename to common/src/main/java/me/lucko/luckperms/common/storage/SplitStorageDao.java index 7b7d310e..344e1749 100644 --- a/common/src/main/java/me/lucko/luckperms/common/storage/SplitBacking.java +++ b/common/src/main/java/me/lucko/luckperms/common/storage/SplitStorageDao.java @@ -35,7 +35,7 @@ import me.lucko.luckperms.common.model.Group; import me.lucko.luckperms.common.model.Track; import me.lucko.luckperms.common.model.User; import me.lucko.luckperms.common.plugin.LuckPermsPlugin; -import me.lucko.luckperms.common.storage.backing.AbstractBacking; +import me.lucko.luckperms.common.storage.backing.AbstractDao; import java.util.LinkedHashMap; import java.util.List; @@ -43,11 +43,11 @@ import java.util.Map; import java.util.Set; import java.util.UUID; -public class SplitBacking extends AbstractBacking { - private final Map backing; +public class SplitStorageDao extends AbstractDao { + private final Map backing; private final Map types; - protected SplitBacking(LuckPermsPlugin plugin, Map backing, Map types) { + protected SplitStorageDao(LuckPermsPlugin plugin, Map backing, Map types) { super(plugin, "Split Storage"); this.backing = ImmutableMap.copyOf(backing); this.types = ImmutableMap.copyOf(types); @@ -56,8 +56,8 @@ public class SplitBacking extends AbstractBacking { @Override public void init() { boolean success = true; - backing.values().forEach(AbstractBacking::init); - for (AbstractBacking ds : backing.values()) { + backing.values().forEach(AbstractDao::init); + for (AbstractDao ds : backing.values()) { if (!ds.isAcceptingLogins()) { success = false; } @@ -68,14 +68,14 @@ public class SplitBacking extends AbstractBacking { @Override public void shutdown() { - backing.values().forEach(AbstractBacking::shutdown); + backing.values().forEach(AbstractDao::shutdown); } @Override public Map getMeta() { Map ret = new LinkedHashMap<>(); ret.put("Types", types.toString()); - for (AbstractBacking backing : backing.values()) { + for (AbstractDao backing : backing.values()) { ret.putAll(backing.getMeta()); } return ret; diff --git a/common/src/main/java/me/lucko/luckperms/common/storage/StorageFactory.java b/common/src/main/java/me/lucko/luckperms/common/storage/StorageFactory.java index db6cd181..c22a8be6 100644 --- a/common/src/main/java/me/lucko/luckperms/common/storage/StorageFactory.java +++ b/common/src/main/java/me/lucko/luckperms/common/storage/StorageFactory.java @@ -31,16 +31,16 @@ import com.google.common.collect.ImmutableSet; import me.lucko.luckperms.common.config.ConfigKeys; import me.lucko.luckperms.common.plugin.LuckPermsPlugin; -import me.lucko.luckperms.common.storage.backing.AbstractBacking; -import me.lucko.luckperms.common.storage.backing.file.HOCONBacking; -import me.lucko.luckperms.common.storage.backing.file.JSONBacking; -import me.lucko.luckperms.common.storage.backing.file.YAMLBacking; -import me.lucko.luckperms.common.storage.backing.mongodb.MongoDBBacking; -import me.lucko.luckperms.common.storage.backing.sql.SQLBacking; -import me.lucko.luckperms.common.storage.backing.sql.provider.H2Provider; -import me.lucko.luckperms.common.storage.backing.sql.provider.MySQLProvider; -import me.lucko.luckperms.common.storage.backing.sql.provider.PostgreSQLProvider; -import me.lucko.luckperms.common.storage.backing.sql.provider.SQLiteProvider; +import me.lucko.luckperms.common.storage.backing.AbstractDao; +import me.lucko.luckperms.common.storage.backing.file.HoconDao; +import me.lucko.luckperms.common.storage.backing.file.JsonDao; +import me.lucko.luckperms.common.storage.backing.file.YamlDao; +import me.lucko.luckperms.common.storage.backing.mongodb.MongoDao; +import me.lucko.luckperms.common.storage.backing.sql.SqlDao; +import me.lucko.luckperms.common.storage.backing.sql.provider.file.H2ConnectionFactory; +import me.lucko.luckperms.common.storage.backing.sql.provider.file.SQLiteConnectionFactory; +import me.lucko.luckperms.common.storage.backing.sql.provider.remote.MySqlConnectionFactory; +import me.lucko.luckperms.common.storage.backing.sql.provider.remote.PostgreConnectionFactory; import me.lucko.luckperms.common.utils.ImmutableCollectors; import java.io.File; @@ -91,13 +91,13 @@ public class StorageFactory { Set neededTypes = new HashSet<>(); neededTypes.addAll(types.values()); - Map backing = new HashMap<>(); + Map backing = new HashMap<>(); for (String type : neededTypes) { - backing.put(type, makeBacking(StorageType.parse(type), plugin)); + backing.put(type, makeDao(StorageType.parse(type), plugin)); } - storage = AbstractStorage.wrap(plugin, new SplitBacking(plugin, backing, types)); + storage = AbstractStorage.create(plugin, new SplitStorageDao(plugin, backing, types)); } else { String method = plugin.getConfiguration().get(ConfigKeys.STORAGE_METHOD); @@ -115,52 +115,52 @@ public class StorageFactory { } private static Storage makeInstance(StorageType type, LuckPermsPlugin plugin) { - return AbstractStorage.wrap(plugin, makeBacking(type, plugin)); + return AbstractStorage.create(plugin, makeDao(type, plugin)); } - private static AbstractBacking makeBacking(StorageType method, LuckPermsPlugin plugin) { + private static AbstractDao makeDao(StorageType method, LuckPermsPlugin plugin) { switch (method) { case MARIADB: - return new SQLBacking(plugin, new MySQLProvider( + return new SqlDao(plugin, new MySqlConnectionFactory( "MariaDB", "org.mariadb.jdbc.MySQLDataSource", plugin.getConfiguration().get(ConfigKeys.DATABASE_VALUES)), plugin.getConfiguration().get(ConfigKeys.SQL_TABLE_PREFIX) ); case MYSQL: - return new SQLBacking(plugin, new MySQLProvider( + return new SqlDao(plugin, new MySqlConnectionFactory( "MySQL", "com.mysql.jdbc.jdbc2.optional.MysqlDataSource", plugin.getConfiguration().get(ConfigKeys.DATABASE_VALUES)), plugin.getConfiguration().get(ConfigKeys.SQL_TABLE_PREFIX) ); case SQLITE: - return new SQLBacking(plugin, new SQLiteProvider( + return new SqlDao(plugin, new SQLiteConnectionFactory( new File(plugin.getDataDirectory(), "luckperms-sqlite.db")), plugin.getConfiguration().get(ConfigKeys.SQL_TABLE_PREFIX) ); case H2: - return new SQLBacking(plugin, new H2Provider( + return new SqlDao(plugin, new H2ConnectionFactory( new File(plugin.getDataDirectory(), "luckperms-h2")), plugin.getConfiguration().get(ConfigKeys.SQL_TABLE_PREFIX) ); case POSTGRESQL: - return new SQLBacking(plugin, new PostgreSQLProvider( + return new SqlDao(plugin, new PostgreConnectionFactory( plugin.getConfiguration().get(ConfigKeys.DATABASE_VALUES)), plugin.getConfiguration().get(ConfigKeys.SQL_TABLE_PREFIX) ); case MONGODB: - return new MongoDBBacking( + return new MongoDao( plugin, plugin.getConfiguration().get(ConfigKeys.DATABASE_VALUES), plugin.getConfiguration().get(ConfigKeys.MONGODB_COLLECTION_PREFIX) ); case YAML: - return new YAMLBacking(plugin, "yaml-storage"); + return new YamlDao(plugin, "yaml-storage"); case HOCON: - return new HOCONBacking(plugin, "hocon-storage"); + return new HoconDao(plugin, "hocon-storage"); default: - return new JSONBacking(plugin, "json-storage"); + return new JsonDao(plugin, "json-storage"); } } } diff --git a/common/src/main/java/me/lucko/luckperms/common/storage/backing/AbstractBacking.java b/common/src/main/java/me/lucko/luckperms/common/storage/backing/AbstractDao.java similarity index 98% rename from common/src/main/java/me/lucko/luckperms/common/storage/backing/AbstractBacking.java rename to common/src/main/java/me/lucko/luckperms/common/storage/backing/AbstractDao.java index 33e4b8b6..59b13786 100644 --- a/common/src/main/java/me/lucko/luckperms/common/storage/backing/AbstractBacking.java +++ b/common/src/main/java/me/lucko/luckperms/common/storage/backing/AbstractDao.java @@ -46,7 +46,7 @@ import java.util.Set; import java.util.UUID; @RequiredArgsConstructor(access = AccessLevel.PROTECTED) -public abstract class AbstractBacking { +public abstract class AbstractDao { @Getter protected final LuckPermsPlugin plugin; diff --git a/common/src/main/java/me/lucko/luckperms/common/storage/backing/file/ConfigurateBacking.java b/common/src/main/java/me/lucko/luckperms/common/storage/backing/file/ConfigurateDao.java similarity index 81% rename from common/src/main/java/me/lucko/luckperms/common/storage/backing/file/ConfigurateBacking.java rename to common/src/main/java/me/lucko/luckperms/common/storage/backing/file/ConfigurateDao.java index 42975520..9f5c6080 100644 --- a/common/src/main/java/me/lucko/luckperms/common/storage/backing/file/ConfigurateBacking.java +++ b/common/src/main/java/me/lucko/luckperms/common/storage/backing/file/ConfigurateDao.java @@ -27,22 +27,19 @@ package me.lucko.luckperms.common.storage.backing.file; import lombok.Getter; -import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableSetMultimap; import com.google.common.collect.Iterables; import com.google.common.reflect.TypeToken; import me.lucko.luckperms.api.HeldPermission; import me.lucko.luckperms.api.LogEntry; import me.lucko.luckperms.api.Node; -import me.lucko.luckperms.api.context.ContextSet; import me.lucko.luckperms.api.context.ImmutableContextSet; -import me.lucko.luckperms.api.context.MutableContextSet; import me.lucko.luckperms.common.actionlog.Log; import me.lucko.luckperms.common.bulkupdate.BulkUpdate; import me.lucko.luckperms.common.commands.utils.Util; import me.lucko.luckperms.common.constants.Constants; +import me.lucko.luckperms.common.contexts.ContextSetConfigurateSerializer; import me.lucko.luckperms.common.managers.GenericUserManager; import me.lucko.luckperms.common.managers.GroupManager; import me.lucko.luckperms.common.managers.TrackManager; @@ -53,9 +50,9 @@ import me.lucko.luckperms.common.node.NodeHeldPermission; import me.lucko.luckperms.common.node.NodeModel; import me.lucko.luckperms.common.plugin.LuckPermsPlugin; import me.lucko.luckperms.common.references.UserIdentifier; -import me.lucko.luckperms.common.storage.backing.AbstractBacking; -import me.lucko.luckperms.common.storage.backing.legacy.LegacyJSONSchemaMigration; -import me.lucko.luckperms.common.storage.backing.legacy.LegacyYAMLSchemaMigration; +import me.lucko.luckperms.common.storage.backing.AbstractDao; +import me.lucko.luckperms.common.storage.backing.legacy.LegacyJsonMigration; +import me.lucko.luckperms.common.storage.backing.legacy.LegacyYamlMigration; import ninja.leaping.configurate.ConfigurationNode; import ninja.leaping.configurate.SimpleConfigurationNode; @@ -67,6 +64,7 @@ import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.Date; import java.util.HashSet; import java.util.LinkedHashSet; @@ -82,7 +80,7 @@ import java.util.logging.LogRecord; import java.util.logging.Logger; import java.util.stream.Collectors; -public abstract class ConfigurateBacking extends AbstractBacking { +public abstract class ConfigurateDao extends AbstractDao { private static final String LOG_FORMAT = "%s(%s): [%s] %s(%s) --> %s"; private final Logger actionLogger = Logger.getLogger("luckperms_actions"); @@ -100,7 +98,7 @@ public abstract class ConfigurateBacking extends AbstractBacking { private File groupsDirectory; private File tracksDirectory; - protected ConfigurateBacking(LuckPermsPlugin plugin, String name, String fileExtension, String dataFolderName) { + protected ConfigurateDao(LuckPermsPlugin plugin, String name, String fileExtension, String dataFolderName) { super(plugin, name); this.fileExtension = fileExtension; this.dataFolderName = dataFolderName; @@ -210,15 +208,15 @@ public abstract class ConfigurateBacking extends AbstractBacking { plugin.getLog().severe("Starting migration from legacy schema. This could take a while...."); plugin.getLog().severe("Please do not stop your server while the migration takes place."); - if (this instanceof YAMLBacking) { + if (this instanceof YamlDao) { try { - new LegacyYAMLSchemaMigration(plugin, (YAMLBacking) this, oldData, data).run(); + new LegacyYamlMigration(plugin, (YamlDao) this, oldData, data).run(); } catch (Exception e) { e.printStackTrace(); } - } else if (this instanceof JSONBacking) { + } else if (this instanceof JsonDao) { try { - new LegacyJSONSchemaMigration(plugin, (JSONBacking) this, oldData, data).run(); + new LegacyJsonMigration(plugin, (JsonDao) this, oldData, data).run(); } catch (Exception e) { e.printStackTrace(); } @@ -361,7 +359,7 @@ public abstract class ConfigurateBacking extends AbstractBacking { reportException("bulk update", e); return false; } - return false; + return true; } @Override @@ -372,7 +370,7 @@ public abstract class ConfigurateBacking extends AbstractBacking { ConfigurationNode object = readFile(StorageLocation.USER, uuid.toString()); if (object != null) { String name = object.getNode("name").getString(); - user.getPrimaryGroup().setStoredValue(object.getNode(this instanceof JSONBacking ? "primaryGroup" : "primary-group").getString()); + user.getPrimaryGroup().setStoredValue(object.getNode(this instanceof JsonDao ? "primaryGroup" : "primary-group").getString()); Set data = readNodes(object); Set nodes = data.stream().map(NodeModel::toNode).collect(Collectors.toSet()); @@ -412,7 +410,7 @@ public abstract class ConfigurateBacking extends AbstractBacking { ConfigurationNode data = SimpleConfigurationNode.root(); data.getNode("uuid").setValue(user.getUuid().toString()); data.getNode("name").setValue(user.getName().orElse("null")); - data.getNode(this instanceof JSONBacking ? "primaryGroup" : "primary-group").setValue(user.getPrimaryGroup().getStoredValue().orElse("default")); + data.getNode(this instanceof JsonDao ? "primaryGroup" : "primary-group").setValue(user.getPrimaryGroup().getStoredValue().orElse("default")); Set nodes = user.getEnduringNodes().values().stream().map(NodeModel::fromNode).collect(Collectors.toCollection(LinkedHashSet::new)); writeNodes(data, nodes); @@ -499,8 +497,11 @@ public abstract class ConfigurateBacking extends AbstractBacking { @Override public boolean loadGroup(String name) { - Group group = plugin.getGroupManager().getOrMake(name); - group.getIoLock().lock(); + Group group = plugin.getGroupManager().getIfLoaded(name); + if (group != null) { + group.getIoLock().lock(); + } + try { ConfigurationNode object = readFile(StorageLocation.GROUP, name); @@ -508,6 +509,11 @@ public abstract class ConfigurateBacking extends AbstractBacking { return false; } + if (group == null) { + group = plugin.getGroupManager().getOrMake(name); + group.getIoLock().lock(); + } + Set data = readNodes(object); Set nodes = data.stream().map(NodeModel::toNode).collect(Collectors.toSet()); group.setEnduringNodes(nodes); @@ -515,7 +521,9 @@ public abstract class ConfigurateBacking extends AbstractBacking { } catch (Exception e) { return reportException(name, e); } finally { - group.getIoLock().unlock(); + if (group != null) { + group.getIoLock().unlock(); + } } return true; } @@ -633,8 +641,11 @@ public abstract class ConfigurateBacking extends AbstractBacking { @Override public boolean loadTrack(String name) { - Track track = plugin.getTrackManager().getOrMake(name); - track.getIoLock().lock(); + Track track = plugin.getTrackManager().getIfLoaded(name); + if (track != null) { + track.getIoLock().lock(); + } + try { ConfigurationNode object = readFile(StorageLocation.TRACK, name); @@ -642,13 +653,20 @@ public abstract class ConfigurateBacking extends AbstractBacking { return false; } + if (track == null) { + track = plugin.getTrackManager().getOrMake(name); + track.getIoLock().lock(); + } + List groups = object.getNode("groups").getList(TypeToken.of(String.class)); track.setGroups(groups); } catch (Exception e) { return reportException(name, e); } finally { - track.getIoLock().unlock(); + if (track != null) { + track.getIoLock().unlock(); + } } return true; } @@ -720,6 +738,45 @@ public abstract class ConfigurateBacking extends AbstractBacking { return uuidCache.lookupUsername(uuid); } + private static Collection readAttributes(ConfigurationNode entry, String permission) { + Map attributes = entry.getChildrenMap(); + + boolean value = true; + String server = "global"; + String world = "global"; + long expiry = 0L; + ImmutableContextSet context = ImmutableContextSet.empty(); + + if (attributes.containsKey("value")) { + value = attributes.get("value").getBoolean(); + } + if (attributes.containsKey("server")) { + server = attributes.get("server").getString(); + } + if (attributes.containsKey("world")) { + world = attributes.get("world").getString(); + } + if (attributes.containsKey("expiry")) { + expiry = attributes.get("expiry").getLong(); + } + + if (attributes.containsKey("context") && attributes.get("context").hasMapChildren()) { + ConfigurationNode contexts = attributes.get("context"); + context = ContextSetConfigurateSerializer.deserializeContextSet(contexts).makeImmutable(); + } + + ConfigurationNode batchAttribute = attributes.get("permissions"); + if (permission.startsWith("luckperms.batch") && batchAttribute != null && batchAttribute.hasListChildren()) { + List nodes = new ArrayList<>(); + for (ConfigurationNode element : batchAttribute.getChildrenList()) { + nodes.add(NodeModel.of(element.getString(), value, server, world, expiry, context)); + } + return nodes; + } else { + return Collections.singleton(NodeModel.of(permission, value, server, world, expiry, context)); + } + } + private static Set readNodes(ConfigurationNode data) { Set nodes = new HashSet<>(); @@ -743,129 +800,102 @@ public abstract class ConfigurateBacking extends AbstractBacking { } String permission = entry.getKey().toString(); - Map attributes = entry.getValue().getChildrenMap(); + nodes.addAll(readAttributes(entry.getValue(), permission)); + } + } - boolean value = true; - String server = "global"; - String world = "global"; - long expiry = 0L; - ImmutableContextSet context = ImmutableContextSet.empty(); + if (data.getNode("parents").hasListChildren()) { + List parts = data.getNode("parents").getChildrenList(); - if (attributes.containsKey("value")) { - value = attributes.get("value").getBoolean(); - } - if (attributes.containsKey("server")) { - server = attributes.get("server").getString(); - } - if (attributes.containsKey("world")) { - world = attributes.get("world").getString(); - } - if (attributes.containsKey("expiry")) { - expiry = attributes.get("expiry").getLong(); + for (ConfigurationNode ent : parts) { + String stringValue = ent.getValue(Types::strictAsString); + if (stringValue != null) { + nodes.add(NodeModel.of("group." + stringValue, true, "global", "global", 0L, ImmutableContextSet.empty())); + continue; } - if (attributes.containsKey("context") && attributes.get("context").hasMapChildren()) { - ConfigurationNode contexts = attributes.get("context"); - context = deserializeContextSet(contexts).makeImmutable(); + if (!ent.hasMapChildren()) { + continue; } - final ConfigurationNode batchAttribute = attributes.get("permissions"); - if (permission.startsWith("luckperms.batch") && batchAttribute != null && batchAttribute.hasListChildren()) { - for (ConfigurationNode element : batchAttribute.getChildrenList()) { - nodes.add(NodeModel.of(element.getString(), value, server, world, expiry, context)); - } - } else { - nodes.add(NodeModel.of(permission, value, server, world, expiry, context)); + Map.Entry entry = Iterables.getFirst(ent.getChildrenMap().entrySet(), null); + if (entry == null || !entry.getValue().hasMapChildren()) { + continue; } + + String permission = "group." + entry.getKey().toString(); + nodes.addAll(readAttributes(entry.getValue(), permission)); } } return nodes; } + private static ConfigurationNode writeAttributes(NodeModel node) { + ConfigurationNode attributes = SimpleConfigurationNode.root(); + attributes.getNode("value").setValue(node.getValue()); + + if (!node.getServer().equals("global")) { + attributes.getNode("server").setValue(node.getServer()); + } + + if (!node.getWorld().equals("global")) { + attributes.getNode("world").setValue(node.getWorld()); + } + + if (node.getExpiry() != 0L) { + attributes.getNode("expiry").setValue(node.getExpiry()); + } + + if (!node.getContexts().isEmpty()) { + attributes.getNode("context").setValue(ContextSetConfigurateSerializer.serializeContextSet(node.getContexts())); + } + + return attributes; + } + private static void writeNodes(ConfigurationNode to, Set nodes) { - ConfigurationNode arr = SimpleConfigurationNode.root(); + ConfigurationNode permsSection = SimpleConfigurationNode.root(); + ConfigurationNode parentsSection = SimpleConfigurationNode.root(); for (NodeModel node : nodes) { // just a raw, default node. - boolean single = node.isValue() && + boolean single = node.getValue() && node.getServer().equalsIgnoreCase("global") && node.getWorld().equalsIgnoreCase("global") && node.getExpiry() == 0L && node.getContexts().isEmpty(); + // try to parse out the group + String group = node.toNode().isGroupNode() ? node.toNode().getGroupName() : null; + // just add a string to the list. if (single) { - arr.getAppendedNode().setValue(node.getPermission()); + + if (group != null) { + parentsSection.getAppendedNode().setValue(group); + continue; + } + + permsSection.getAppendedNode().setValue(node.getPermission()); continue; } - ConfigurationNode attributes = SimpleConfigurationNode.root(); - attributes.getNode("value").setValue(node.isValue()); - - if (!node.getServer().equals("global")) { - attributes.getNode("server").setValue(node.getServer()); + if (group != null) { + ConfigurationNode ent = SimpleConfigurationNode.root(); + ent.getNode(group).setValue(writeAttributes(node)); + parentsSection.getAppendedNode().setValue(ent); + continue; } - if (!node.getWorld().equals("global")) { - attributes.getNode("world").setValue(node.getWorld()); - } - - if (node.getExpiry() != 0L) { - attributes.getNode("expiry").setValue(node.getExpiry()); - } - - if (!node.getContexts().isEmpty()) { - attributes.getNode("context").setValue(serializeContextSet(node.getContexts())); - } - - ConfigurationNode perm = SimpleConfigurationNode.root(); - perm.getNode(node.getPermission()).setValue(attributes); - arr.getAppendedNode().setValue(perm); + ConfigurationNode ent = SimpleConfigurationNode.root(); + ent.getNode(node.getPermission()).setValue(writeAttributes(node)); + permsSection.getAppendedNode().setValue(ent); } - to.getNode("permissions").setValue(arr); - } - - private static ConfigurationNode serializeContextSet(ContextSet contextSet) { - ConfigurationNode data = SimpleConfigurationNode.root(); - Map> map = contextSet.toMultimap().asMap(); - - map.forEach((k, v) -> { - List values = new ArrayList<>(v); - int size = values.size(); - - if (size == 1) { - data.getNode(k).setValue(values.get(0)); - } else if (size > 1) { - data.getNode(k).setValue(values); - } - }); - - return data; - } - - private static MutableContextSet deserializeContextSet(ConfigurationNode data) { - Preconditions.checkArgument(data.hasMapChildren()); - Map dataMap = data.getChildrenMap(); - - ImmutableSetMultimap.Builder map = ImmutableSetMultimap.builder(); - for (Map.Entry e : dataMap.entrySet()) { - String k = e.getKey().toString(); - ConfigurationNode v = e.getValue(); - - if (v.hasListChildren()) { - List values = v.getChildrenList(); - for (ConfigurationNode value : values) { - map.put(k, value.getString()); - } - } else { - map.put(k, v.getString()); - } - } - - return MutableContextSet.fromMultimap(map.build()); + to.getNode("permissions").setValue(permsSection); + to.getNode("parents").setValue(parentsSection); } } diff --git a/common/src/main/java/me/lucko/luckperms/common/storage/backing/file/HOCONBacking.java b/common/src/main/java/me/lucko/luckperms/common/storage/backing/file/HoconDao.java similarity index 95% rename from common/src/main/java/me/lucko/luckperms/common/storage/backing/file/HOCONBacking.java rename to common/src/main/java/me/lucko/luckperms/common/storage/backing/file/HoconDao.java index b8c47395..dc5b661d 100644 --- a/common/src/main/java/me/lucko/luckperms/common/storage/backing/file/HOCONBacking.java +++ b/common/src/main/java/me/lucko/luckperms/common/storage/backing/file/HoconDao.java @@ -35,9 +35,9 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; -public class HOCONBacking extends ConfigurateBacking { +public class HoconDao extends ConfigurateDao { - public HOCONBacking(LuckPermsPlugin plugin, String dataFolderName) { + public HoconDao(LuckPermsPlugin plugin, String dataFolderName) { super(plugin, "HOCON", ".conf", dataFolderName); } diff --git a/common/src/main/java/me/lucko/luckperms/common/storage/backing/file/JSONBacking.java b/common/src/main/java/me/lucko/luckperms/common/storage/backing/file/JsonDao.java similarity index 95% rename from common/src/main/java/me/lucko/luckperms/common/storage/backing/file/JSONBacking.java rename to common/src/main/java/me/lucko/luckperms/common/storage/backing/file/JsonDao.java index b8d1c95b..514b61d1 100644 --- a/common/src/main/java/me/lucko/luckperms/common/storage/backing/file/JSONBacking.java +++ b/common/src/main/java/me/lucko/luckperms/common/storage/backing/file/JsonDao.java @@ -35,9 +35,9 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; -public class JSONBacking extends ConfigurateBacking { +public class JsonDao extends ConfigurateDao { - public JSONBacking(LuckPermsPlugin plugin, String dataFolderName) { + public JsonDao(LuckPermsPlugin plugin, String dataFolderName) { super(plugin, "JSON", ".json", dataFolderName); } diff --git a/common/src/main/java/me/lucko/luckperms/common/storage/backing/file/YAMLBacking.java b/common/src/main/java/me/lucko/luckperms/common/storage/backing/file/YamlDao.java similarity index 95% rename from common/src/main/java/me/lucko/luckperms/common/storage/backing/file/YAMLBacking.java rename to common/src/main/java/me/lucko/luckperms/common/storage/backing/file/YamlDao.java index b5063fa9..886bf607 100644 --- a/common/src/main/java/me/lucko/luckperms/common/storage/backing/file/YAMLBacking.java +++ b/common/src/main/java/me/lucko/luckperms/common/storage/backing/file/YamlDao.java @@ -37,9 +37,9 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; -public class YAMLBacking extends ConfigurateBacking { +public class YamlDao extends ConfigurateDao { - public YAMLBacking(LuckPermsPlugin plugin, String dataFolderName) { + public YamlDao(LuckPermsPlugin plugin, String dataFolderName) { super(plugin, "YAML", ".yml", dataFolderName); } diff --git a/common/src/main/java/me/lucko/luckperms/common/storage/backing/legacy/LegacyJSONSchemaMigration.java b/common/src/main/java/me/lucko/luckperms/common/storage/backing/legacy/LegacyJsonMigration.java similarity index 95% rename from common/src/main/java/me/lucko/luckperms/common/storage/backing/legacy/LegacyJSONSchemaMigration.java rename to common/src/main/java/me/lucko/luckperms/common/storage/backing/legacy/LegacyJsonMigration.java index 1a86d70d..6b26b87b 100644 --- a/common/src/main/java/me/lucko/luckperms/common/storage/backing/legacy/LegacyJSONSchemaMigration.java +++ b/common/src/main/java/me/lucko/luckperms/common/storage/backing/legacy/LegacyJsonMigration.java @@ -34,10 +34,11 @@ import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonPrimitive; +import me.lucko.luckperms.common.contexts.ContextSetJsonSerializer; import me.lucko.luckperms.common.node.NodeFactory; import me.lucko.luckperms.common.node.NodeModel; import me.lucko.luckperms.common.plugin.LuckPermsPlugin; -import me.lucko.luckperms.common.storage.backing.file.JSONBacking; +import me.lucko.luckperms.common.storage.backing.file.JsonDao; import java.io.BufferedReader; import java.io.BufferedWriter; @@ -53,23 +54,21 @@ import java.util.stream.Collectors; @SuppressWarnings("unchecked") @RequiredArgsConstructor -public class LegacyJSONSchemaMigration implements Runnable { +public class LegacyJsonMigration implements Runnable { private final Gson gson = new GsonBuilder().setPrettyPrinting().create(); private final LuckPermsPlugin plugin; - private final JSONBacking backing; + private final JsonDao backing; private final File oldDataFolder; private final File newDataFolder; - private boolean writeElementToFile(File file, JsonElement element) { + private void writeElementToFile(File file, JsonElement element) { try (BufferedWriter writer = Files.newBufferedWriter(file.toPath(), StandardCharsets.UTF_8)) { gson.toJson(element, writer); writer.flush(); - return true; } catch (Throwable t) { plugin.getLog().warn("Exception whilst writing to file: " + file.getAbsolutePath()); t.printStackTrace(); - return false; } } @@ -213,7 +212,7 @@ public class LegacyJSONSchemaMigration implements Runnable { for (NodeModel node : nodes) { // just a raw, default node. - boolean single = node.isValue() && + boolean single = node.getValue() && node.getServer().equalsIgnoreCase("global") && node.getWorld().equalsIgnoreCase("global") && node.getExpiry() == 0L && @@ -226,7 +225,7 @@ public class LegacyJSONSchemaMigration implements Runnable { } JsonObject attributes = new JsonObject(); - attributes.addProperty("value", node.isValue()); + attributes.addProperty("value", node.getValue()); if (!node.getServer().equals("global")) { attributes.addProperty("server", node.getServer()); @@ -241,7 +240,7 @@ public class LegacyJSONSchemaMigration implements Runnable { } if (!node.getContexts().isEmpty()) { - attributes.add("context", node.getContextsAsJson()); + attributes.add("context", ContextSetJsonSerializer.serializeContextSet(node.getContexts())); } JsonObject perm = new JsonObject(); diff --git a/common/src/main/java/me/lucko/luckperms/common/storage/backing/legacy/LegacySQLSchemaMigration.java b/common/src/main/java/me/lucko/luckperms/common/storage/backing/legacy/LegacySqlMigration.java similarity index 95% rename from common/src/main/java/me/lucko/luckperms/common/storage/backing/legacy/LegacySQLSchemaMigration.java rename to common/src/main/java/me/lucko/luckperms/common/storage/backing/legacy/LegacySqlMigration.java index 7603170e..b95f0386 100644 --- a/common/src/main/java/me/lucko/luckperms/common/storage/backing/legacy/LegacySQLSchemaMigration.java +++ b/common/src/main/java/me/lucko/luckperms/common/storage/backing/legacy/LegacySqlMigration.java @@ -30,9 +30,10 @@ import lombok.RequiredArgsConstructor; import com.google.common.collect.Lists; import com.google.gson.reflect.TypeToken; +import me.lucko.luckperms.common.contexts.ContextSetJsonSerializer; import me.lucko.luckperms.common.node.NodeFactory; import me.lucko.luckperms.common.node.NodeModel; -import me.lucko.luckperms.common.storage.backing.sql.SQLBacking; +import me.lucko.luckperms.common.storage.backing.sql.SqlDao; import java.lang.reflect.Type; import java.sql.Connection; @@ -50,9 +51,9 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; @RequiredArgsConstructor -public class LegacySQLSchemaMigration implements Runnable { +public class LegacySqlMigration implements Runnable { private static final Type NODE_MAP_TYPE = new TypeToken>() {}.getType(); - private final SQLBacking backing; + private final SqlDao backing; @Override public void run() { @@ -162,11 +163,11 @@ public class LegacySQLSchemaMigration implements Runnable { for (NodeModel nd : nodes) { ps.setString(1, uuid.toString()); ps.setString(2, nd.getPermission()); - ps.setBoolean(3, nd.isValue()); + ps.setBoolean(3, nd.getValue()); ps.setString(4, nd.getServer()); ps.setString(5, nd.getWorld()); ps.setLong(6, nd.getExpiry()); - ps.setString(7, nd.serializeContext()); + ps.setString(7, backing.getGson().toJson(ContextSetJsonSerializer.serializeContextSet(nd.getContexts()))); ps.addBatch(); } ps.executeBatch(); @@ -241,11 +242,11 @@ public class LegacySQLSchemaMigration implements Runnable { for (NodeModel nd : nodes) { ps.setString(1, name); ps.setString(2, nd.getPermission()); - ps.setBoolean(3, nd.isValue()); + ps.setBoolean(3, nd.getValue()); ps.setString(4, nd.getServer()); ps.setString(5, nd.getWorld()); ps.setLong(6, nd.getExpiry()); - ps.setString(7, nd.serializeContext()); + ps.setString(7, backing.getGson().toJson(ContextSetJsonSerializer.serializeContextSet(nd.getContexts()))); ps.addBatch(); } ps.executeBatch(); diff --git a/common/src/main/java/me/lucko/luckperms/common/storage/backing/legacy/LegacyYAMLSchemaMigration.java b/common/src/main/java/me/lucko/luckperms/common/storage/backing/legacy/LegacyYamlMigration.java similarity index 95% rename from common/src/main/java/me/lucko/luckperms/common/storage/backing/legacy/LegacyYAMLSchemaMigration.java rename to common/src/main/java/me/lucko/luckperms/common/storage/backing/legacy/LegacyYamlMigration.java index bdae1553..efb7dc49 100644 --- a/common/src/main/java/me/lucko/luckperms/common/storage/backing/legacy/LegacyYAMLSchemaMigration.java +++ b/common/src/main/java/me/lucko/luckperms/common/storage/backing/legacy/LegacyYamlMigration.java @@ -30,7 +30,7 @@ import lombok.RequiredArgsConstructor; import me.lucko.luckperms.common.node.NodeFactory; import me.lucko.luckperms.common.node.NodeModel; import me.lucko.luckperms.common.plugin.LuckPermsPlugin; -import me.lucko.luckperms.common.storage.backing.file.YAMLBacking; +import me.lucko.luckperms.common.storage.backing.file.YamlDao; import org.yaml.snakeyaml.DumperOptions; import org.yaml.snakeyaml.Yaml; @@ -53,12 +53,14 @@ import java.util.stream.Collectors; @SuppressWarnings("unchecked") @RequiredArgsConstructor -public class LegacyYAMLSchemaMigration implements Runnable { +public class LegacyYamlMigration implements Runnable { private final LuckPermsPlugin plugin; - private final YAMLBacking backing; + private final YamlDao backing; private final File oldDataFolder; private final File newDataFolder; + private final Yaml yaml = getYaml(); + private static Yaml getYaml() { DumperOptions options = new DumperOptions(); options.setAllowUnicode(true); @@ -66,21 +68,19 @@ public class LegacyYAMLSchemaMigration implements Runnable { return new Yaml(options); } - public boolean writeMapToFile(File file, Map values) { + public void writeMapToFile(File file, Map values) { try (BufferedWriter writer = Files.newBufferedWriter(file.toPath(), StandardCharsets.UTF_8)) { - getYaml().dump(values, writer); + yaml.dump(values, writer); writer.flush(); - return true; } catch (Throwable t) { plugin.getLog().warn("Exception whilst writing to file: " + file.getAbsolutePath()); t.printStackTrace(); - return false; } } public Map readMapFromFile(File file) { try (BufferedReader reader = Files.newBufferedReader(file.toPath(), StandardCharsets.UTF_8)) { - return (Map) getYaml().load(reader); + return (Map) yaml.load(reader); } catch (Throwable t) { plugin.getLog().warn("Exception whilst reading from file: " + file.getAbsolutePath()); t.printStackTrace(); @@ -211,7 +211,7 @@ public class LegacyYAMLSchemaMigration implements Runnable { for (NodeModel node : nodes) { // just a raw, default node. - boolean single = node.isValue() && + boolean single = node.getValue() && node.getServer().equalsIgnoreCase("global") && node.getWorld().equalsIgnoreCase("global") && node.getExpiry() == 0L && @@ -228,7 +228,7 @@ public class LegacyYAMLSchemaMigration implements Runnable { // create a map of node attributes Map attributes = new LinkedHashMap<>(); - attributes.put("value", node.isValue()); + attributes.put("value", node.getValue()); if (!node.getServer().equals("global")) { attributes.put("server", node.getServer()); diff --git a/common/src/main/java/me/lucko/luckperms/common/storage/backing/mongodb/MongoDBBacking.java b/common/src/main/java/me/lucko/luckperms/common/storage/backing/mongodb/MongoDao.java similarity index 66% rename from common/src/main/java/me/lucko/luckperms/common/storage/backing/mongodb/MongoDBBacking.java rename to common/src/main/java/me/lucko/luckperms/common/storage/backing/mongodb/MongoDao.java index 28f9e247..7f42fd09 100644 --- a/common/src/main/java/me/lucko/luckperms/common/storage/backing/mongodb/MongoDBBacking.java +++ b/common/src/main/java/me/lucko/luckperms/common/storage/backing/mongodb/MongoDao.java @@ -53,7 +53,7 @@ import me.lucko.luckperms.common.node.NodeModel; import me.lucko.luckperms.common.plugin.LuckPermsPlugin; import me.lucko.luckperms.common.references.UserIdentifier; import me.lucko.luckperms.common.storage.DatastoreConfiguration; -import me.lucko.luckperms.common.storage.backing.AbstractBacking; +import me.lucko.luckperms.common.storage.backing.AbstractDao; import org.bson.Document; @@ -64,24 +64,14 @@ import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; -import java.util.Optional; +import java.util.Objects; import java.util.Set; import java.util.UUID; -import java.util.concurrent.Callable; import java.util.function.Function; import java.util.stream.Collectors; @SuppressWarnings("unchecked") -public class MongoDBBacking extends AbstractBacking { - - private static T call(Callable c, T def) { - try { - return c.call(); - } catch (Exception e) { - e.printStackTrace(); - return def; - } - } +public class MongoDao extends AbstractDao { private final DatastoreConfiguration configuration; private MongoClient mongoClient; @@ -90,12 +80,18 @@ public class MongoDBBacking extends AbstractBacking { @Getter private final String prefix; - public MongoDBBacking(LuckPermsPlugin plugin, DatastoreConfiguration configuration, String prefix) { + public MongoDao(LuckPermsPlugin plugin, DatastoreConfiguration configuration, String prefix) { super(plugin, "MongoDB"); this.configuration = configuration; this.prefix = prefix; } + private boolean reportException(Exception ex) { + plugin.getLog().warn("Exception thrown whilst performing i/o: "); + ex.printStackTrace(); + return false; + } + @Override public void init() { MongoCredential credential = null; @@ -162,7 +158,7 @@ public class MongoDBBacking extends AbstractBacking { @Override public boolean logAction(LogEntry entry) { - return call(() -> { + try { MongoCollection c = database.getCollection(prefix + "action"); //noinspection deprecation @@ -179,14 +175,16 @@ public class MongoDBBacking extends AbstractBacking { } c.insertOne(doc, new InsertOneOptions()); - return true; - }, false); + } catch (Exception e) { + return reportException(e); + } + return true; } @Override public Log getLog() { - return call(() -> { - final Log.Builder log = Log.builder(); + Log.Builder log = Log.builder(); + try { MongoCollection c = database.getCollection(prefix + "action"); try (MongoCursor cursor = c.find().iterator()) { @@ -210,14 +208,16 @@ public class MongoDBBacking extends AbstractBacking { log.add(e); } } - - return log.build(); - }, null); + } catch (Exception e) { + reportException(e); + return null; + } + return log.build(); } @Override public boolean applyBulkUpdate(BulkUpdate bulkUpdate) { - return call(() -> { + try { if (bulkUpdate.getDataType().isIncludingUsers()) { MongoCollection c = database.getCollection(prefix + "users"); @@ -234,20 +234,20 @@ public class MongoDBBacking extends AbstractBacking { nodes.add(NodeModel.fromNode(node)); } - Set results = nodes.stream() - .map(n -> Optional.ofNullable(bulkUpdate.apply(n))) - .filter(Optional::isPresent) - .map(Optional::get) - .map(NodeModel::toNode) + Set results = nodes.stream() + .map(bulkUpdate::apply) + .filter(Objects::nonNull) .collect(Collectors.toSet()); - Document permsDoc = new Document(); - for (Map.Entry e : convert(exportToLegacy(results)).entrySet()) { - permsDoc.append(e.getKey(), e.getValue()); - } + if (!nodes.equals(results)) { + Document permsDoc = new Document(); + for (Map.Entry e : convert(exportToLegacy(results.stream().map(NodeModel::toNode).collect(Collectors.toList()))).entrySet()) { + permsDoc.append(e.getKey(), e.getValue()); + } - d.put("perms", perms); - c.replaceOne(new Document("_id", uuid), d); + d.put("perms", perms); + c.replaceOne(new Document("_id", uuid), d); + } } } } @@ -268,26 +268,28 @@ public class MongoDBBacking extends AbstractBacking { nodes.add(NodeModel.fromNode(node)); } - Set results = nodes.stream() - .map(n -> Optional.ofNullable(bulkUpdate.apply(n))) - .filter(Optional::isPresent) - .map(Optional::get) - .map(NodeModel::toNode) + Set results = nodes.stream() + .map(bulkUpdate::apply) + .filter(Objects::nonNull) .collect(Collectors.toSet()); - Document permsDoc = new Document(); - for (Map.Entry e : convert(exportToLegacy(results)).entrySet()) { - permsDoc.append(e.getKey(), e.getValue()); - } + if (!nodes.equals(results)) { + Document permsDoc = new Document(); + for (Map.Entry e : convert(exportToLegacy(results.stream().map(NodeModel::toNode).collect(Collectors.toList()))).entrySet()) { + permsDoc.append(e.getKey(), e.getValue()); + } - d.put("perms", perms); - c.replaceOne(new Document("_id", holder), d); + d.put("perms", perms); + c.replaceOne(new Document("_id", holder), d); + } } } } - - return true; - }, false); + } catch (Exception e) { + reportException(e); + return false; + } + return true; } @Override @@ -295,80 +297,73 @@ public class MongoDBBacking extends AbstractBacking { User user = plugin.getUserManager().getOrMake(UserIdentifier.of(uuid, username)); user.getIoLock().lock(); try { - return call(() -> { - MongoCollection c = database.getCollection(prefix + "users"); + MongoCollection c = database.getCollection(prefix + "users"); - try (MongoCursor cursor = c.find(new Document("_id", user.getUuid())).iterator()) { - if (cursor.hasNext()) { - // User exists, let's load. - Document d = cursor.next(); - user.setEnduringNodes(revert((Map) d.get("perms")).entrySet().stream() - .map(e -> NodeFactory.fromSerializedNode(e.getKey(), e.getValue())) - .collect(Collectors.toSet()) - ); - user.getPrimaryGroup().setStoredValue(d.getString("primaryGroup")); - user.setName(name, true); + try (MongoCursor cursor = c.find(new Document("_id", user.getUuid())).iterator()) { + if (cursor.hasNext()) { + // User exists, let's load. + Document d = cursor.next(); + user.setEnduringNodes(revert((Map) d.get("perms")).entrySet().stream() + .map(e -> NodeFactory.fromSerializedNode(e.getKey(), e.getValue())) + .collect(Collectors.toSet()) + ); + user.getPrimaryGroup().setStoredValue(d.getString("primaryGroup")); + user.setName(name, true); - boolean save = plugin.getUserManager().giveDefaultIfNeeded(user, false); - if (user.getName().isPresent() && (name == null || !user.getName().get().equalsIgnoreCase(name))) { - save = true; - } + boolean save = plugin.getUserManager().giveDefaultIfNeeded(user, false); + if (user.getName().isPresent() && (name == null || !user.getName().get().equalsIgnoreCase(name))) { + save = true; + } - if (save) { - c.replaceOne(new Document("_id", user.getUuid()), fromUser(user)); - } - } else { - if (GenericUserManager.shouldSave(user)) { - user.clearNodes(); - user.getPrimaryGroup().setStoredValue(null); - plugin.getUserManager().giveDefaultIfNeeded(user, false); - } + if (save) { + c.replaceOne(new Document("_id", user.getUuid()), fromUser(user)); + } + } else { + if (GenericUserManager.shouldSave(user)) { + user.clearNodes(); + user.getPrimaryGroup().setStoredValue(null); + plugin.getUserManager().giveDefaultIfNeeded(user, false); } } - return true; - }, false); + } + } catch (Exception e) { + return reportException(e); } finally { user.getIoLock().unlock(); user.getRefreshBuffer().requestDirectly(); } + return true; } @Override public boolean saveUser(User user) { - if (!GenericUserManager.shouldSave(user)) { - user.getIoLock().lock(); - try { - return call(() -> { - MongoCollection c = database.getCollection(prefix + "users"); - return c.deleteOne(new Document("_id", user.getUuid())).wasAcknowledged(); - }, false); - } finally { - user.getIoLock().unlock(); - } - } - user.getIoLock().lock(); try { - return call(() -> { + if (!GenericUserManager.shouldSave(user)) { MongoCollection c = database.getCollection(prefix + "users"); - try (MongoCursor cursor = c.find(new Document("_id", user.getUuid())).iterator()) { - if (!cursor.hasNext()) { - c.insertOne(fromUser(user)); - } else { - c.replaceOne(new Document("_id", user.getUuid()), fromUser(user)); - } + return c.deleteOne(new Document("_id", user.getUuid())).wasAcknowledged(); + } + + MongoCollection c = database.getCollection(prefix + "users"); + try (MongoCursor cursor = c.find(new Document("_id", user.getUuid())).iterator()) { + if (!cursor.hasNext()) { + c.insertOne(fromUser(user)); + } else { + c.replaceOne(new Document("_id", user.getUuid()), fromUser(user)); } - return true; - }, false); + } + } catch (Exception e) { + return reportException(e); } finally { user.getIoLock().unlock(); } + return true; } @Override public Set getUniqueUsers() { Set uuids = new HashSet<>(); - boolean success = call(() -> { + try { MongoCollection c = database.getCollection(prefix + "users"); try (MongoCursor cursor = c.find().iterator()) { @@ -377,17 +372,16 @@ public class MongoDBBacking extends AbstractBacking { uuids.add(d.get("_id", UUID.class)); } } - - return true; - }, false); - - return success ? uuids : null; + } catch (Exception e) { + return null; + } + return uuids; } @Override public List> getUsersWithPermission(String permission) { ImmutableList.Builder> held = ImmutableList.builder(); - boolean success = call(() -> { + try { MongoCollection c = database.getCollection(prefix + "users"); try (MongoCursor cursor = c.find().iterator()) { @@ -407,10 +401,11 @@ public class MongoDBBacking extends AbstractBacking { } } } - return true; - }, false); - - return success ? held.build() : null; + } catch (Exception e) { + reportException(e); + return null; + } + return held.build(); } @Override @@ -418,26 +413,26 @@ public class MongoDBBacking extends AbstractBacking { Group group = plugin.getGroupManager().getOrMake(name); group.getIoLock().lock(); try { - return call(() -> { - MongoCollection c = database.getCollection(prefix + "groups"); + MongoCollection c = database.getCollection(prefix + "groups"); - try (MongoCursor cursor = c.find(new Document("_id", group.getName())).iterator()) { - if (cursor.hasNext()) { - // Group exists, let's load. - Document d = cursor.next(); - group.setEnduringNodes(revert((Map) d.get("perms")).entrySet().stream() - .map(e -> NodeFactory.fromSerializedNode(e.getKey(), e.getValue())) - .collect(Collectors.toSet()) - ); - } else { - c.insertOne(fromGroup(group)); - } + try (MongoCursor cursor = c.find(new Document("_id", group.getName())).iterator()) { + if (cursor.hasNext()) { + // Group exists, let's load. + Document d = cursor.next(); + group.setEnduringNodes(revert((Map) d.get("perms")).entrySet().stream() + .map(e -> NodeFactory.fromSerializedNode(e.getKey(), e.getValue())) + .collect(Collectors.toSet()) + ); + } else { + c.insertOne(fromGroup(group)); } - return true; - }, false); + } + } catch (Exception e) { + return reportException(e); } finally { group.getIoLock().unlock(); } + return true; } @Override @@ -445,22 +440,22 @@ public class MongoDBBacking extends AbstractBacking { Group group = plugin.getGroupManager().getOrMake(name); group.getIoLock().lock(); try { - return call(() -> { - MongoCollection c = database.getCollection(prefix + "groups"); + MongoCollection c = database.getCollection(prefix + "groups"); - try (MongoCursor cursor = c.find(new Document("_id", group.getName())).iterator()) { - if (cursor.hasNext()) { - Document d = cursor.next(); + try (MongoCursor cursor = c.find(new Document("_id", group.getName())).iterator()) { + if (cursor.hasNext()) { + Document d = cursor.next(); - group.setEnduringNodes(revert((Map) d.get("perms")).entrySet().stream() - .map(e -> NodeFactory.fromSerializedNode(e.getKey(), e.getValue())) - .collect(Collectors.toSet()) - ); - return true; - } - return false; + group.setEnduringNodes(revert((Map) d.get("perms")).entrySet().stream() + .map(e -> NodeFactory.fromSerializedNode(e.getKey(), e.getValue())) + .collect(Collectors.toSet()) + ); + return true; } - }, false); + return false; + } + } catch (Exception e) { + return reportException(e); } finally { group.getIoLock().unlock(); } @@ -469,39 +464,36 @@ public class MongoDBBacking extends AbstractBacking { @Override public boolean loadAllGroups() { List groups = new ArrayList<>(); - boolean success = call(() -> { + try { MongoCollection c = database.getCollection(prefix + "groups"); - boolean b = true; try (MongoCursor cursor = c.find().iterator()) { while (cursor.hasNext()) { String name = cursor.next().getString("_id"); - if (!loadGroup(name)) { - b = false; - } + loadGroup(name); groups.add(name); } } - return b; - }, false); - - if (success) { - GroupManager gm = plugin.getGroupManager(); - gm.getAll().values().stream() - .filter(g -> !groups.contains(g.getName())) - .forEach(gm::unload); + } catch (Exception e) { + reportException(e); + return false; } - return success; + + GroupManager gm = plugin.getGroupManager(); + gm.getAll().values().stream() + .filter(g -> !groups.contains(g.getName())) + .forEach(gm::unload); + return true; } @Override public boolean saveGroup(Group group) { group.getIoLock().lock(); try { - return call(() -> { - MongoCollection c = database.getCollection(prefix + "groups"); - return c.replaceOne(new Document("_id", group.getName()), fromGroup(group)).wasAcknowledged(); - }, false); + MongoCollection c = database.getCollection(prefix + "groups"); + return c.replaceOne(new Document("_id", group.getName()), fromGroup(group)).wasAcknowledged(); + } catch (Exception e) { + return reportException(e); } finally { group.getIoLock().unlock(); } @@ -510,24 +502,25 @@ public class MongoDBBacking extends AbstractBacking { @Override public boolean deleteGroup(Group group) { group.getIoLock().lock(); - boolean success; try { - success = call(() -> { - MongoCollection c = database.getCollection(prefix + "groups"); - return c.deleteOne(new Document("_id", group.getName())).wasAcknowledged(); - }, false); + MongoCollection c = database.getCollection(prefix + "groups"); + if (!c.deleteOne(new Document("_id", group.getName())).wasAcknowledged()) { + throw new RuntimeException(); + } + } catch (Exception e) { + return reportException(e); } finally { group.getIoLock().unlock(); } - if (success) plugin.getGroupManager().unload(group); - return success; + plugin.getGroupManager().unload(group); + return true; } @Override public List> getGroupsWithPermission(String permission) { ImmutableList.Builder> held = ImmutableList.builder(); - boolean success = call(() -> { + try { MongoCollection c = database.getCollection(prefix + "groups"); try (MongoCursor cursor = c.find().iterator()) { @@ -547,10 +540,11 @@ public class MongoDBBacking extends AbstractBacking { } } } - return true; - }, false); - - return success ? held.build() : null; + } catch (Exception e) { + reportException(e); + return null; + } + return held.build(); } @Override @@ -558,22 +552,22 @@ public class MongoDBBacking extends AbstractBacking { Track track = plugin.getTrackManager().getOrMake(name); track.getIoLock().lock(); try { - return call(() -> { - MongoCollection c = database.getCollection(prefix + "tracks"); + MongoCollection c = database.getCollection(prefix + "tracks"); - try (MongoCursor cursor = c.find(new Document("_id", track.getName())).iterator()) { - if (!cursor.hasNext()) { - c.insertOne(fromTrack(track)); - } else { - Document d = cursor.next(); - track.setGroups((List) d.get("groups")); - } + try (MongoCursor cursor = c.find(new Document("_id", track.getName())).iterator()) { + if (!cursor.hasNext()) { + c.insertOne(fromTrack(track)); + } else { + Document d = cursor.next(); + track.setGroups((List) d.get("groups")); } - return true; - }, false); + } + } catch (Exception e) { + return reportException(e); } finally { track.getIoLock().unlock(); } + return true; } @Override @@ -581,18 +575,18 @@ public class MongoDBBacking extends AbstractBacking { Track track = plugin.getTrackManager().getOrMake(name); track.getIoLock().lock(); try { - return call(() -> { - MongoCollection c = database.getCollection(prefix + "tracks"); + MongoCollection c = database.getCollection(prefix + "tracks"); - try (MongoCursor cursor = c.find(new Document("_id", track.getName())).iterator()) { - if (cursor.hasNext()) { - Document d = cursor.next(); - track.setGroups((List) d.get("groups")); - return true; - } - return false; + try (MongoCursor cursor = c.find(new Document("_id", track.getName())).iterator()) { + if (cursor.hasNext()) { + Document d = cursor.next(); + track.setGroups((List) d.get("groups")); + return true; } - }, false); + return false; + } + } catch (Exception e) { + return reportException(e); } finally { track.getIoLock().unlock(); } @@ -601,39 +595,36 @@ public class MongoDBBacking extends AbstractBacking { @Override public boolean loadAllTracks() { List tracks = new ArrayList<>(); - boolean success = call(() -> { + try { MongoCollection c = database.getCollection(prefix + "tracks"); - boolean b = true; try (MongoCursor cursor = c.find().iterator()) { while (cursor.hasNext()) { String name = cursor.next().getString("_id"); - if (!loadTrack(name)) { - b = false; - } + loadTrack(name); tracks.add(name); } } - return b; - }, false); - - if (success) { - TrackManager tm = plugin.getTrackManager(); - tm.getAll().values().stream() - .filter(t -> !tracks.contains(t.getName())) - .forEach(tm::unload); + } catch (Exception e) { + reportException(e); + return false; } - return success; + + TrackManager tm = plugin.getTrackManager(); + tm.getAll().values().stream() + .filter(t -> !tracks.contains(t.getName())) + .forEach(tm::unload); + return true; } @Override public boolean saveTrack(Track track) { track.getIoLock().lock(); try { - return call(() -> { - MongoCollection c = database.getCollection(prefix + "tracks"); - return c.replaceOne(new Document("_id", track.getName()), fromTrack(track)).wasAcknowledged(); - }, false); + MongoCollection c = database.getCollection(prefix + "tracks"); + return c.replaceOne(new Document("_id", track.getName()), fromTrack(track)).wasAcknowledged(); + } catch (Exception e) { + return reportException(e); } finally { track.getIoLock().unlock(); } @@ -642,23 +633,24 @@ public class MongoDBBacking extends AbstractBacking { @Override public boolean deleteTrack(Track track) { track.getIoLock().lock(); - boolean success; try { - success = call(() -> { - MongoCollection c = database.getCollection(prefix + "tracks"); - return c.deleteOne(new Document("_id", track.getName())).wasAcknowledged(); - }, false); + MongoCollection c = database.getCollection(prefix + "tracks"); + if (!c.deleteOne(new Document("_id", track.getName())).wasAcknowledged()) { + throw new RuntimeException(); + } + } catch (Exception e) { + return reportException(e); } finally { track.getIoLock().unlock(); } - if (success) plugin.getTrackManager().unload(track); - return success; + plugin.getTrackManager().unload(track); + return true; } @Override public boolean saveUUIDData(UUID uuid, String username) { - return call(() -> { + try { MongoCollection c = database.getCollection(prefix + "uuid"); try (MongoCursor cursor = c.find(new Document("_id", uuid)).iterator()) { @@ -668,14 +660,15 @@ public class MongoDBBacking extends AbstractBacking { c.insertOne(new Document("_id", uuid).append("name", username.toLowerCase())); } } - - return true; - }, false); + } catch (Exception e) { + return reportException(e); + } + return true; } @Override public UUID getUUID(String username) { - return call(() -> { + try { MongoCollection c = database.getCollection(prefix + "uuid"); try (MongoCursor cursor = c.find(new Document("name", username.toLowerCase())).iterator()) { @@ -684,12 +677,15 @@ public class MongoDBBacking extends AbstractBacking { } } return null; - }, null); + } catch (Exception e) { + reportException(e); + return null; + } } @Override public String getName(UUID uuid) { - return call(() -> { + try { MongoCollection c = database.getCollection(prefix + "uuid"); try (MongoCursor cursor = c.find(new Document("_id", uuid)).iterator()) { @@ -698,7 +694,10 @@ public class MongoDBBacking extends AbstractBacking { } } return null; - }, null); + } catch (Exception e) { + reportException(e); + return null; + } } /* MongoDB does not allow '.' or '$' in key names. diff --git a/common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/WrappedConnection.java b/common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/NonClosableConnection.java similarity index 89% rename from common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/WrappedConnection.java rename to common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/NonClosableConnection.java index 7171825a..7aec1252 100644 --- a/common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/WrappedConnection.java +++ b/common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/NonClosableConnection.java @@ -32,21 +32,17 @@ import java.sql.Connection; import java.sql.SQLException; @AllArgsConstructor -public class WrappedConnection implements Connection { +public class NonClosableConnection implements Connection { @Delegate(excludes = Exclude.class) private Connection delegate; - private final boolean shouldClose; - @Override public void close() throws SQLException { - if (shouldClose) { - delegate.close(); - } + } private interface Exclude { - void close(); + void close() throws SQLException; } } diff --git a/common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/SQLBacking.java b/common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/SqlDao.java similarity index 95% rename from common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/SQLBacking.java rename to common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/SqlDao.java index 950e866a..302b89c6 100644 --- a/common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/SQLBacking.java +++ b/common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/SqlDao.java @@ -30,6 +30,7 @@ import lombok.Getter; import com.google.common.collect.ImmutableList; import com.google.common.collect.Maps; import com.google.gson.Gson; +import com.google.gson.JsonObject; import com.google.gson.reflect.TypeToken; import me.lucko.luckperms.api.HeldPermission; @@ -37,6 +38,7 @@ import me.lucko.luckperms.api.LogEntry; import me.lucko.luckperms.api.Node; import me.lucko.luckperms.common.actionlog.Log; import me.lucko.luckperms.common.bulkupdate.BulkUpdate; +import me.lucko.luckperms.common.contexts.ContextSetJsonSerializer; import me.lucko.luckperms.common.managers.GenericUserManager; import me.lucko.luckperms.common.managers.GroupManager; import me.lucko.luckperms.common.managers.TrackManager; @@ -47,9 +49,9 @@ import me.lucko.luckperms.common.node.NodeHeldPermission; import me.lucko.luckperms.common.node.NodeModel; import me.lucko.luckperms.common.plugin.LuckPermsPlugin; import me.lucko.luckperms.common.references.UserIdentifier; -import me.lucko.luckperms.common.storage.backing.AbstractBacking; -import me.lucko.luckperms.common.storage.backing.legacy.LegacySQLSchemaMigration; -import me.lucko.luckperms.common.storage.backing.sql.provider.SQLProvider; +import me.lucko.luckperms.common.storage.backing.AbstractDao; +import me.lucko.luckperms.common.storage.backing.legacy.LegacySqlMigration; +import me.lucko.luckperms.common.storage.backing.sql.provider.AbstractConnectionFactory; import java.io.BufferedReader; import java.io.InputStream; @@ -72,7 +74,7 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.function.Function; import java.util.stream.Collectors; -public class SQLBacking extends AbstractBacking { +public class SqlDao extends AbstractDao { private static final Type LIST_STRING_TYPE = new TypeToken>(){}.getType(); private static final String USER_PERMISSIONS_SELECT = "SELECT permission, value, server, world, expiry, contexts FROM {prefix}user_permissions WHERE uuid=?"; @@ -118,12 +120,12 @@ public class SQLBacking extends AbstractBacking { private final Gson gson; @Getter - private final SQLProvider provider; + private final AbstractConnectionFactory provider; @Getter private final Function prefix; - public SQLBacking(LuckPermsPlugin plugin, SQLProvider provider, String prefix) { + public SqlDao(LuckPermsPlugin plugin, AbstractConnectionFactory provider, String prefix) { super(plugin, provider.getName()); this.provider = provider; this.prefix = s -> s.replace("{prefix}", prefix); @@ -189,7 +191,7 @@ public class SQLBacking extends AbstractBacking { plugin.getLog().severe("Starting migration from legacy schema. This could take a while...."); plugin.getLog().severe("Please do not stop your server while the migration takes place."); - new LegacySQLSchemaMigration(this).run(); + new LegacySqlMigration(this).run(); } } @@ -322,7 +324,7 @@ public class SQLBacking extends AbstractBacking { String world = rs.getString("world"); long expiry = rs.getLong("expiry"); String contexts = rs.getString("contexts"); - data.add(NodeModel.deserialize(permission, value, server, world, expiry, contexts)); + data.add(deserializeNode(permission, value, server, world, expiry, contexts)); } } } @@ -422,7 +424,7 @@ public class SQLBacking extends AbstractBacking { String world = rs.getString("world"); long expiry = rs.getLong("expiry"); String contexts = rs.getString("contexts"); - remote.add(NodeModel.deserialize(permission, value, server, world, expiry, contexts)); + remote.add(deserializeNode(permission, value, server, world, expiry, contexts)); } } } @@ -443,11 +445,11 @@ public class SQLBacking extends AbstractBacking { for (NodeModel nd : toRemove) { ps.setString(1, user.getUuid().toString()); ps.setString(2, nd.getPermission()); - ps.setBoolean(3, nd.isValue()); + ps.setBoolean(3, nd.getValue()); ps.setString(4, nd.getServer()); ps.setString(5, nd.getWorld()); ps.setLong(6, nd.getExpiry()); - ps.setString(7, nd.serializeContext()); + ps.setString(7, gson.toJson(ContextSetJsonSerializer.serializeContextSet(nd.getContexts()))); ps.addBatch(); } ps.executeBatch(); @@ -464,11 +466,11 @@ public class SQLBacking extends AbstractBacking { for (NodeModel nd : toAdd) { ps.setString(1, user.getUuid().toString()); ps.setString(2, nd.getPermission()); - ps.setBoolean(3, nd.isValue()); + ps.setBoolean(3, nd.getValue()); ps.setString(4, nd.getServer()); ps.setString(5, nd.getWorld()); ps.setLong(6, nd.getExpiry()); - ps.setString(7, nd.serializeContext()); + ps.setString(7, gson.toJson(ContextSetJsonSerializer.serializeContextSet(nd.getContexts()))); ps.addBatch(); } ps.executeBatch(); @@ -551,7 +553,7 @@ public class SQLBacking extends AbstractBacking { long expiry = rs.getLong("expiry"); String contexts = rs.getString("contexts"); - NodeModel data = NodeModel.deserialize(permission, value, server, world, expiry, contexts); + NodeModel data = deserializeNode(permission, value, server, world, expiry, contexts); held.add(NodeHeldPermission.of(holder, data)); } } @@ -633,7 +635,7 @@ public class SQLBacking extends AbstractBacking { String world = rs.getString("world"); long expiry = rs.getLong("expiry"); String contexts = rs.getString("contexts"); - data.add(NodeModel.deserialize(permission, value, server, world, expiry, contexts)); + data.add(deserializeNode(permission, value, server, world, expiry, contexts)); } } } @@ -720,7 +722,7 @@ public class SQLBacking extends AbstractBacking { String world = rs.getString("world"); long expiry = rs.getLong("expiry"); String contexts = rs.getString("contexts"); - remote.add(NodeModel.deserialize(permission, value, server, world, expiry, contexts)); + remote.add(deserializeNode(permission, value, server, world, expiry, contexts)); } } } @@ -742,11 +744,11 @@ public class SQLBacking extends AbstractBacking { for (NodeModel nd : toRemove) { ps.setString(1, group.getName()); ps.setString(2, nd.getPermission()); - ps.setBoolean(3, nd.isValue()); + ps.setBoolean(3, nd.getValue()); ps.setString(4, nd.getServer()); ps.setString(5, nd.getWorld()); ps.setLong(6, nd.getExpiry()); - ps.setString(7, nd.serializeContext()); + ps.setString(7, gson.toJson(ContextSetJsonSerializer.serializeContextSet(nd.getContexts()))); ps.addBatch(); } ps.executeBatch(); @@ -763,11 +765,11 @@ public class SQLBacking extends AbstractBacking { for (NodeModel nd : toAdd) { ps.setString(1, group.getName()); ps.setString(2, nd.getPermission()); - ps.setBoolean(3, nd.isValue()); + ps.setBoolean(3, nd.getValue()); ps.setString(4, nd.getServer()); ps.setString(5, nd.getWorld()); ps.setLong(6, nd.getExpiry()); - ps.setString(7, nd.serializeContext()); + ps.setString(7, gson.toJson(ContextSetJsonSerializer.serializeContextSet(nd.getContexts()))); ps.addBatch(); } ps.executeBatch(); @@ -825,7 +827,7 @@ public class SQLBacking extends AbstractBacking { long expiry = rs.getLong("expiry"); String contexts = rs.getString("contexts"); - NodeModel data = NodeModel.deserialize(permission, value, server, world, expiry, contexts); + NodeModel data = deserializeNode(permission, value, server, world, expiry, contexts); held.add(NodeHeldPermission.of(holder, data)); } } @@ -1113,4 +1115,9 @@ public class SQLBacking extends AbstractBacking { return Maps.immutableEntry(toAdd, toRemove); } + + private NodeModel deserializeNode(String permission, boolean value, String server, String world, long expiry, String contexts) { + JsonObject context = gson.fromJson(contexts, JsonObject.class); + return NodeModel.of(permission, value, server, world, expiry, ContextSetJsonSerializer.deserializeContextSet(context).makeImmutable()); + } } diff --git a/common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/provider/SQLProvider.java b/common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/provider/AbstractConnectionFactory.java similarity index 90% rename from common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/provider/SQLProvider.java rename to common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/provider/AbstractConnectionFactory.java index 4555f8d9..062e290c 100644 --- a/common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/provider/SQLProvider.java +++ b/common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/provider/AbstractConnectionFactory.java @@ -28,14 +28,13 @@ package me.lucko.luckperms.common.storage.backing.sql.provider; import lombok.Getter; import lombok.RequiredArgsConstructor; -import me.lucko.luckperms.common.storage.backing.sql.WrappedConnection; - +import java.sql.Connection; import java.sql.SQLException; import java.util.Collections; import java.util.Map; @RequiredArgsConstructor -public abstract class SQLProvider { +public abstract class AbstractConnectionFactory { @Getter private final String name; @@ -48,6 +47,6 @@ public abstract class SQLProvider { return Collections.emptyMap(); } - public abstract WrappedConnection getConnection() throws SQLException; + public abstract Connection getConnection() throws SQLException; } diff --git a/common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/provider/FlatfileProvider.java b/common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/provider/file/FlatfileConnectionFactory.java similarity index 80% rename from common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/provider/FlatfileProvider.java rename to common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/provider/file/FlatfileConnectionFactory.java index b8aacf50..295423f6 100644 --- a/common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/provider/FlatfileProvider.java +++ b/common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/provider/file/FlatfileConnectionFactory.java @@ -23,9 +23,10 @@ * SOFTWARE. */ -package me.lucko.luckperms.common.storage.backing.sql.provider; +package me.lucko.luckperms.common.storage.backing.sql.provider.file; -import me.lucko.luckperms.common.storage.backing.sql.WrappedConnection; +import me.lucko.luckperms.common.storage.backing.sql.NonClosableConnection; +import me.lucko.luckperms.common.storage.backing.sql.provider.AbstractConnectionFactory; import java.io.File; import java.sql.Connection; @@ -34,14 +35,14 @@ import java.sql.SQLException; import java.text.DecimalFormat; import java.util.concurrent.locks.ReentrantLock; -abstract class FlatfileProvider extends SQLProvider { +abstract class FlatfileConnectionFactory extends AbstractConnectionFactory { protected static final DecimalFormat DF = new DecimalFormat("#.00"); protected final File file; private final ReentrantLock lock = new ReentrantLock(); - private WrappedConnection connection; + private Connection connection; - FlatfileProvider(String name, File file) { + FlatfileConnectionFactory(String name, File file) { super(name); this.file = file; } @@ -62,7 +63,7 @@ abstract class FlatfileProvider extends SQLProvider { } @Override - public WrappedConnection getConnection() throws SQLException { + public Connection getConnection() throws SQLException { lock.lock(); try { if (this.connection == null || this.connection.isClosed()) { @@ -72,7 +73,7 @@ abstract class FlatfileProvider extends SQLProvider { Connection connection = DriverManager.getConnection(getDriverId() + ":" + file.getAbsolutePath()); if (connection != null) { - this.connection = new WrappedConnection(connection, false); + this.connection = new NonClosableConnection(connection); } } @@ -81,7 +82,7 @@ abstract class FlatfileProvider extends SQLProvider { } if (this.connection == null) { - throw new SQLException("Connection is null"); + throw new SQLException("Unable to get a connection."); } return this.connection; diff --git a/common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/provider/H2Provider.java b/common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/provider/file/H2ConnectionFactory.java similarity index 87% rename from common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/provider/H2Provider.java rename to common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/provider/file/H2ConnectionFactory.java index 1b3cbfa1..415e79cc 100644 --- a/common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/provider/H2Provider.java +++ b/common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/provider/file/H2ConnectionFactory.java @@ -23,14 +23,14 @@ * SOFTWARE. */ -package me.lucko.luckperms.common.storage.backing.sql.provider; +package me.lucko.luckperms.common.storage.backing.sql.provider.file; import java.io.File; import java.util.LinkedHashMap; import java.util.Map; -public class H2Provider extends FlatfileProvider { - public H2Provider(File file) { +public class H2ConnectionFactory extends FlatfileConnectionFactory { + public H2ConnectionFactory(File file) { super("H2", file); // backwards compat @@ -46,7 +46,8 @@ public class H2Provider extends FlatfileProvider { File databaseFile = new File(super.file.getParent(), "luckperms-h2.mv.db"); if (databaseFile.exists()) { - ret.put("File Size", DF.format(databaseFile.length() / 1048576) + "MB"); + double size = databaseFile.length() / 1048576; + ret.put("File Size", DF.format(size) + "MB"); } else { ret.put("File Size", "0MB"); } diff --git a/common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/provider/SQLiteProvider.java b/common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/provider/file/SQLiteConnectionFactory.java similarity index 87% rename from common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/provider/SQLiteProvider.java rename to common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/provider/file/SQLiteConnectionFactory.java index 48c852d2..6e552553 100644 --- a/common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/provider/SQLiteProvider.java +++ b/common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/provider/file/SQLiteConnectionFactory.java @@ -23,14 +23,14 @@ * SOFTWARE. */ -package me.lucko.luckperms.common.storage.backing.sql.provider; +package me.lucko.luckperms.common.storage.backing.sql.provider.file; import java.io.File; import java.util.LinkedHashMap; import java.util.Map; -public class SQLiteProvider extends FlatfileProvider { - public SQLiteProvider(File file) { +public class SQLiteConnectionFactory extends FlatfileConnectionFactory { + public SQLiteConnectionFactory(File file) { super("SQLite", file); // backwards compat @@ -46,7 +46,8 @@ public class SQLiteProvider extends FlatfileProvider { File databaseFile = new File(super.file.getParent(), "luckperms-sqlite.db"); if (databaseFile.exists()) { - ret.put("File Size", DF.format(databaseFile.length() / 1048576) + "MB"); + double size = databaseFile.length() / 1048576; + ret.put("File Size", DF.format(size) + "MB"); } else { ret.put("File Size", "0MB"); } diff --git a/common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/provider/MySQLProvider.java b/common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/provider/remote/MySqlConnectionFactory.java similarity index 91% rename from common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/provider/MySQLProvider.java rename to common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/provider/remote/MySqlConnectionFactory.java index 73c8119f..4af0cf19 100644 --- a/common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/provider/MySQLProvider.java +++ b/common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/provider/remote/MySqlConnectionFactory.java @@ -23,13 +23,13 @@ * SOFTWARE. */ -package me.lucko.luckperms.common.storage.backing.sql.provider; +package me.lucko.luckperms.common.storage.backing.sql.provider.remote; import com.zaxxer.hikari.HikariConfig; import com.zaxxer.hikari.HikariDataSource; import me.lucko.luckperms.common.storage.DatastoreConfiguration; -import me.lucko.luckperms.common.storage.backing.sql.WrappedConnection; +import me.lucko.luckperms.common.storage.backing.sql.provider.AbstractConnectionFactory; import java.sql.Connection; import java.sql.SQLException; @@ -38,13 +38,13 @@ import java.util.LinkedHashMap; import java.util.Map; import java.util.concurrent.TimeUnit; -public class MySQLProvider extends SQLProvider { +public class MySqlConnectionFactory extends AbstractConnectionFactory { private final DatastoreConfiguration configuration; private final String driverClass; private HikariDataSource hikari; - public MySQLProvider(String name, String driverClass, DatastoreConfiguration configuration) { + public MySqlConnectionFactory(String name, String driverClass, DatastoreConfiguration configuration) { super(name); this.configuration = configuration; this.driverClass = driverClass; @@ -143,11 +143,11 @@ public class MySQLProvider extends SQLProvider { } @Override - public WrappedConnection getConnection() throws SQLException { + public Connection getConnection() throws SQLException { Connection connection = hikari.getConnection(); if (connection == null) { - throw new SQLException("Connection is null"); + throw new SQLException("Unable to get a connection from the pool."); } - return new WrappedConnection(connection, true); + return connection; } } diff --git a/common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/provider/PostgreSQLProvider.java b/common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/provider/remote/PostgreConnectionFactory.java similarity index 89% rename from common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/provider/PostgreSQLProvider.java rename to common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/provider/remote/PostgreConnectionFactory.java index f22fe4d7..c4b7de8f 100644 --- a/common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/provider/PostgreSQLProvider.java +++ b/common/src/main/java/me/lucko/luckperms/common/storage/backing/sql/provider/remote/PostgreConnectionFactory.java @@ -23,13 +23,13 @@ * SOFTWARE. */ -package me.lucko.luckperms.common.storage.backing.sql.provider; +package me.lucko.luckperms.common.storage.backing.sql.provider.remote; import com.zaxxer.hikari.HikariConfig; import com.zaxxer.hikari.HikariDataSource; import me.lucko.luckperms.common.storage.DatastoreConfiguration; -import me.lucko.luckperms.common.storage.backing.sql.WrappedConnection; +import me.lucko.luckperms.common.storage.backing.sql.provider.AbstractConnectionFactory; import java.sql.Connection; import java.sql.SQLException; @@ -38,12 +38,12 @@ import java.util.LinkedHashMap; import java.util.Map; import java.util.concurrent.TimeUnit; -public class PostgreSQLProvider extends SQLProvider { +public class PostgreConnectionFactory extends AbstractConnectionFactory { private final DatastoreConfiguration configuration; private HikariDataSource hikari; - public PostgreSQLProvider(DatastoreConfiguration configuration) { + public PostgreConnectionFactory(DatastoreConfiguration configuration) { super("PostgreSQL"); this.configuration = configuration; } @@ -117,11 +117,11 @@ public class PostgreSQLProvider extends SQLProvider { } @Override - public WrappedConnection getConnection() throws SQLException { + public Connection getConnection() throws SQLException { Connection connection = hikari.getConnection(); if (connection == null) { - throw new SQLException("Connection is null"); + throw new SQLException("Unable to get a connection from the pool."); } - return new WrappedConnection(connection, true); + return connection; } } diff --git a/sponge/src/main/java/me/lucko/luckperms/sponge/service/storage/SubjectStorageModel.java b/sponge/src/main/java/me/lucko/luckperms/sponge/service/storage/SubjectStorageModel.java index fc679b92..f1d03469 100644 --- a/sponge/src/main/java/me/lucko/luckperms/sponge/service/storage/SubjectStorageModel.java +++ b/sponge/src/main/java/me/lucko/luckperms/sponge/service/storage/SubjectStorageModel.java @@ -36,7 +36,7 @@ import com.google.gson.JsonObject; import me.lucko.luckperms.api.context.ImmutableContextSet; import me.lucko.luckperms.common.contexts.ContextSetComparator; -import me.lucko.luckperms.common.node.NodeModel; +import me.lucko.luckperms.common.contexts.ContextSetJsonSerializer; import me.lucko.luckperms.common.node.NodeWithContextComparator; import me.lucko.luckperms.sponge.service.calculated.CalculatedSubjectData; import me.lucko.luckperms.sponge.service.model.LPPermissionService; @@ -107,7 +107,7 @@ public class SubjectStorageModel { JsonObject context = section.get("context").getAsJsonObject(); JsonObject data = section.get("data").getAsJsonObject(); - ImmutableContextSet contextSet = NodeModel.deserializeContextSet(context).makeImmutable(); + ImmutableContextSet contextSet = ContextSetJsonSerializer.deserializeContextSet(context).makeImmutable(); ImmutableMap.Builder perms = ImmutableMap.builder(); for (Map.Entry perm : data.entrySet()) { perms.put(perm.getKey(), perm.getValue().getAsBoolean()); @@ -130,7 +130,7 @@ public class SubjectStorageModel { JsonObject context = section.get("context").getAsJsonObject(); JsonObject data = section.get("data").getAsJsonObject(); - ImmutableContextSet contextSet = NodeModel.deserializeContextSet(context).makeImmutable(); + ImmutableContextSet contextSet = ContextSetJsonSerializer.deserializeContextSet(context).makeImmutable(); ImmutableMap.Builder opts = ImmutableMap.builder(); for (Map.Entry opt : data.entrySet()) { opts.put(opt.getKey(), opt.getValue().getAsString()); @@ -153,7 +153,7 @@ public class SubjectStorageModel { JsonObject context = section.get("context").getAsJsonObject(); JsonArray data = section.get("data").getAsJsonArray(); - ImmutableContextSet contextSet = NodeModel.deserializeContextSet(context).makeImmutable(); + ImmutableContextSet contextSet = ContextSetJsonSerializer.deserializeContextSet(context).makeImmutable(); ImmutableList.Builder pars = ImmutableList.builder(); for (JsonElement p : data) { if (!p.isJsonObject()) { @@ -189,7 +189,7 @@ public class SubjectStorageModel { } JsonObject section = new JsonObject(); - section.add("context", NodeModel.serializeContextSet(e.getKey())); + section.add("context", ContextSetJsonSerializer.serializeContextSet(e.getKey())); JsonObject data = new JsonObject(); @@ -213,7 +213,7 @@ public class SubjectStorageModel { } JsonObject section = new JsonObject(); - section.add("context", NodeModel.serializeContextSet(e.getKey())); + section.add("context", ContextSetJsonSerializer.serializeContextSet(e.getKey())); JsonObject data = new JsonObject(); @@ -237,7 +237,7 @@ public class SubjectStorageModel { } JsonObject section = new JsonObject(); - section.add("context", NodeModel.serializeContextSet(e.getKey())); + section.add("context", ContextSetJsonSerializer.serializeContextSet(e.getKey())); JsonArray data = new JsonArray(); for (SubjectReference ref : e.getValue()) {