Remove old schema migration code, cleanup a bit

This commit is contained in:
Luck 2018-02-16 13:19:43 +00:00
parent 0b5588a7dd
commit 765f9d2545
No known key found for this signature in database
GPG Key ID: EFA9B3EC5FD90F8B
38 changed files with 582 additions and 1549 deletions

View File

@ -136,6 +136,13 @@
<version>2.6.1</version>
<scope>provided</scope>
</dependency>
<!-- HikariCP -->
<dependency>
<groupId>com.zaxxer</groupId>
<artifactId>HikariCP</artifactId>
<version>2.7.6</version>
<scope>provided</scope>
</dependency>
<!-- Spigot -->
<dependency>

View File

@ -29,7 +29,7 @@ import com.google.common.util.concurrent.ThreadFactoryBuilder;
import me.lucko.luckperms.common.plugin.SchedulerAdapter;
import me.lucko.luckperms.common.plugin.SchedulerTask;
import me.lucko.luckperms.common.utils.SafeIterator;
import me.lucko.luckperms.common.utils.SafeIteration;
import org.bukkit.scheduler.BukkitScheduler;
import org.bukkit.scheduler.BukkitTask;
@ -105,7 +105,7 @@ public class BukkitSchedulerAdapter implements SchedulerAdapter {
@Override
public void shutdown() {
SafeIterator.iterate(this.tasks, SchedulerTask::cancel);
SafeIteration.iterate(this.tasks, SchedulerTask::cancel);
// wait for executor
this.asyncFallback.shutdown();

View File

@ -46,7 +46,7 @@ import me.lucko.luckperms.common.model.User;
import me.lucko.luckperms.common.node.NodeFactory;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.utils.Predicates;
import me.lucko.luckperms.common.utils.SafeIterator;
import me.lucko.luckperms.common.utils.SafeIteration;
import org.bukkit.configuration.ConfigurationSection;
import org.bukkit.configuration.file.YamlConfiguration;
@ -124,14 +124,14 @@ public class MigrationBPermissions extends SubCommand<Object> {
// Migrate one world at a time.
log.log("Starting world migration.");
SafeIterator.iterate(worldManager.getAllWorlds(), world -> {
SafeIteration.iterate(worldManager.getAllWorlds(), world -> {
log.log("Migrating world: " + world.getName());
// Migrate all groups
log.log("Starting group migration in world " + world.getName() + ".");
AtomicInteger groupCount = new AtomicInteger(0);
SafeIterator.iterate(world.getAll(CalculableType.GROUP), group -> {
SafeIteration.iterate(world.getAll(CalculableType.GROUP), group -> {
String groupName = MigrationUtils.standardizeName(group.getName());
if (group.getName().equalsIgnoreCase(world.getDefaultGroup())) {
groupName = NodeFactory.DEFAULT_GROUP_NAME;
@ -153,7 +153,7 @@ public class MigrationBPermissions extends SubCommand<Object> {
// Migrate all users
log.log("Starting user migration in world " + world.getName() + ".");
AtomicInteger userCount = new AtomicInteger(0);
SafeIterator.iterate(world.getAll(CalculableType.USER), user -> {
SafeIteration.iterate(world.getAll(CalculableType.USER), user -> {
// There is no mention of UUIDs in the API. I assume that name = uuid. idk?
UUID uuid = BukkitMigrationUtils.lookupUuid(log, user.getName());
if (uuid == null) {

View File

@ -41,7 +41,7 @@ import me.lucko.luckperms.common.model.User;
import me.lucko.luckperms.common.node.NodeFactory;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.utils.Predicates;
import me.lucko.luckperms.common.utils.SafeIterator;
import me.lucko.luckperms.common.utils.SafeIteration;
import org.anjocaido.groupmanager.GlobalGroups;
import org.anjocaido.groupmanager.GroupManager;
@ -93,7 +93,7 @@ public class MigrationGroupManager extends SubCommand<Object> {
GlobalGroups gg = GroupManager.getGlobalGroups();
AtomicInteger globalGroupCount = new AtomicInteger(0);
SafeIterator.iterate(gg.getGroupList(), g -> {
SafeIteration.iterate(gg.getGroupList(), g -> {
String groupName = MigrationUtils.standardizeName(g.getName());
Group group = plugin.getStorage().createAndLoadGroup(groupName, CreationCause.INTERNAL).join();
@ -120,13 +120,13 @@ public class MigrationGroupManager extends SubCommand<Object> {
// Collect data for all users and groups.
log.log("Collecting user and group data.");
SafeIterator.iterate(worlds, String::toLowerCase, world -> {
SafeIteration.iterate(worlds, String::toLowerCase, world -> {
log.log("Querying world " + world);
WorldDataHolder wdh = wh.getWorldData(world);
AtomicInteger groupWorldCount = new AtomicInteger(0);
SafeIterator.iterate(wdh.getGroupList(), group -> {
SafeIteration.iterate(wdh.getGroupList(), group -> {
String groupName = MigrationUtils.standardizeName(group.getName());
groups.putIfAbsent(groupName, new HashSet<>());
@ -160,7 +160,7 @@ public class MigrationGroupManager extends SubCommand<Object> {
log.log("Migrated " + groupWorldCount.get() + " groups in world " + world);
AtomicInteger userWorldCount = new AtomicInteger(0);
SafeIterator.iterate(wdh.getUserList(), user -> {
SafeIteration.iterate(wdh.getUserList(), user -> {
UUID uuid = BukkitMigrationUtils.lookupUuid(log, user.getUUID());
if (uuid == null) {
return;
@ -210,7 +210,7 @@ public class MigrationGroupManager extends SubCommand<Object> {
log.log("Starting group migration.");
AtomicInteger groupCount = new AtomicInteger(0);
SafeIterator.iterate(groups.entrySet(), e -> {
SafeIteration.iterate(groups.entrySet(), e -> {
Group group = plugin.getStorage().createAndLoadGroup(e.getKey(), CreationCause.INTERNAL).join();
for (Node node : e.getValue()) {
@ -224,7 +224,7 @@ public class MigrationGroupManager extends SubCommand<Object> {
log.log("Starting user migration.");
AtomicInteger userCount = new AtomicInteger(0);
SafeIterator.iterate(users.entrySet(), e -> {
SafeIteration.iterate(users.entrySet(), e -> {
User user = plugin.getStorage().loadUser(e.getKey(), null).join();
for (Node node : e.getValue()) {

View File

@ -42,7 +42,7 @@ import me.lucko.luckperms.common.model.User;
import me.lucko.luckperms.common.node.NodeFactory;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.utils.Predicates;
import me.lucko.luckperms.common.utils.SafeIterator;
import me.lucko.luckperms.common.utils.SafeIteration;
import org.bukkit.Bukkit;
import org.bukkit.configuration.ConfigurationSection;
@ -79,7 +79,7 @@ public class MigrationPermissionsBukkit extends SubCommand<Object> {
ConfigurationSection groupsSection = config.getConfigurationSection("groups");
SafeIterator.iterate(groupsSection.getKeys(false), key -> {
SafeIteration.iterate(groupsSection.getKeys(false), key -> {
final String groupName = MigrationUtils.standardizeName(key);
Group lpGroup = plugin.getStorage().createAndLoadGroup(groupName, CreationCause.INTERNAL).join();
@ -99,7 +99,7 @@ public class MigrationPermissionsBukkit extends SubCommand<Object> {
ConfigurationSection usersSection = config.getConfigurationSection("users");
SafeIterator.iterate(usersSection.getKeys(false), key -> {
SafeIteration.iterate(usersSection.getKeys(false), key -> {
UUID uuid = BukkitMigrationUtils.lookupUuid(log, key);
if (uuid == null) {
return;

View File

@ -43,7 +43,7 @@ import me.lucko.luckperms.common.model.User;
import me.lucko.luckperms.common.node.NodeFactory;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.utils.Predicates;
import me.lucko.luckperms.common.utils.SafeIterator;
import me.lucko.luckperms.common.utils.SafeIteration;
import org.bukkit.Bukkit;
@ -117,7 +117,7 @@ public class MigrationPermissionsEx extends SubCommand<Object> {
log.log("Starting group migration.");
AtomicInteger groupCount = new AtomicInteger(0);
Set<String> ladders = new HashSet<>();
SafeIterator.iterate(manager.getGroupList(), group -> {
SafeIteration.iterate(manager.getGroupList(), group -> {
int groupWeight = maxWeight - group.getRank();
final String groupName = MigrationUtils.standardizeName(group.getName());
@ -161,7 +161,7 @@ public class MigrationPermissionsEx extends SubCommand<Object> {
// Increment the max weight from the group migrations. All user meta should override.
int userWeight = maxWeight + 5;
SafeIterator.iterate(manager.getUsers(), user -> {
SafeIteration.iterate(manager.getUsers(), user -> {
UUID u = BukkitMigrationUtils.lookupUuid(log, user.getIdentifier());
if (u == null) {
return;

View File

@ -31,6 +31,7 @@ import com.github.gustav9797.PowerfulPermsAPI.Permission;
import com.github.gustav9797.PowerfulPermsAPI.PermissionManager;
import com.github.gustav9797.PowerfulPermsAPI.PowerfulPermsPlugin;
import com.google.common.collect.ImmutableSet;
import com.zaxxer.hikari.HikariDataSource;
import me.lucko.luckperms.api.Node;
import me.lucko.luckperms.api.event.cause.CreationCause;
@ -47,9 +48,8 @@ import me.lucko.luckperms.common.model.User;
import me.lucko.luckperms.common.node.NodeFactory;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.storage.StorageType;
import me.lucko.luckperms.common.utils.HikariSupplier;
import me.lucko.luckperms.common.utils.Predicates;
import me.lucko.luckperms.common.utils.SafeIterator;
import me.lucko.luckperms.common.utils.SafeIteration;
import org.bukkit.Bukkit;
@ -57,6 +57,7 @@ import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
@ -158,7 +159,7 @@ public class MigrationPowerfulPerms extends SubCommand<Object> {
// Groups first.
log.log("Starting group migration.");
AtomicInteger groupCount = new AtomicInteger(0);
SafeIterator.iterate(groups, g -> {
SafeIteration.iterate(groups, g -> {
maxWeight.set(Math.max(maxWeight.get(), g.getRank()));
String groupName = MigrationUtils.standardizeName(g.getName());
@ -218,7 +219,7 @@ public class MigrationPowerfulPerms extends SubCommand<Object> {
maxWeight.addAndGet(5);
// Migrate all users and their groups
SafeIterator.iterate(uuids, uuid -> {
SafeIteration.iterate(uuids, uuid -> {
// Create a LuckPerms user for the UUID
User user = plugin.getStorage().loadUser(uuid, null).join();
@ -340,4 +341,47 @@ public class MigrationPowerfulPerms extends SubCommand<Object> {
throw new RuntimeException(e);
}
}
/**
* A simple hikari wrapper
*/
public static final class HikariSupplier implements AutoCloseable {
private final String address;
private final String database;
private final String username;
private final String password;
private HikariDataSource hikari;
public HikariSupplier(String address, String database, String username, String password) {
this.address = address;
this.database = database;
this.username = username;
this.password = password;
}
public void setup(String poolName) {
this.hikari = new HikariDataSource();
this.hikari.setPoolName(poolName);
this.hikari.setMaximumPoolSize(2);
this.hikari.setDataSourceClassName("com.mysql.jdbc.jdbc2.optional.MysqlDataSource");
this.hikari.addDataSourceProperty("serverName", this.address.split(":")[0]);
this.hikari.addDataSourceProperty("port", this.address.split(":")[1]);
this.hikari.addDataSourceProperty("databaseName", this.database);
this.hikari.addDataSourceProperty("user", this.username);
this.hikari.addDataSourceProperty("password", this.password);
}
@Override
public void close() {
this.hikari.close();
}
public Connection getConnection() throws SQLException {
return this.hikari.getConnection();
}
}
}

View File

@ -43,7 +43,7 @@ import me.lucko.luckperms.common.model.User;
import me.lucko.luckperms.common.node.NodeFactory;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.utils.Predicates;
import me.lucko.luckperms.common.utils.SafeIterator;
import me.lucko.luckperms.common.utils.SafeIteration;
import org.bukkit.Bukkit;
import org.tyrannyofheaven.bukkit.zPermissions.ZPermissionsService;
@ -104,7 +104,7 @@ public class MigrationZPermissions extends SubCommand<Object> {
AtomicInteger groupCount = new AtomicInteger(0);
AtomicInteger maxWeight = new AtomicInteger(0);
SafeIterator.iterate(internalService.getEntities(true), entity -> {
SafeIteration.iterate(internalService.getEntities(true), entity -> {
String groupName = MigrationUtils.standardizeName(entity.getDisplayName());
Group group = plugin.getStorage().createAndLoadGroup(groupName, CreationCause.INTERNAL).join();
@ -138,7 +138,7 @@ public class MigrationZPermissions extends SubCommand<Object> {
// Migrate all tracks
log.log("Starting track migration.");
AtomicInteger trackCount = new AtomicInteger(0);
SafeIterator.iterate(service.getAllTracks(), t -> {
SafeIteration.iterate(service.getAllTracks(), t -> {
String trackName = MigrationUtils.standardizeName(t);
Track track = plugin.getStorage().createAndLoadTrack(trackName, CreationCause.INTERNAL).join();
track.setGroups(service.getTrackGroups(t));
@ -156,7 +156,7 @@ public class MigrationZPermissions extends SubCommand<Object> {
Set<UUID> usersToMigrate = new HashSet<>(userParents.keySet());
usersToMigrate.addAll(service.getAllPlayersUUID());
SafeIterator.iterate(usersToMigrate, u -> {
SafeIteration.iterate(usersToMigrate, u -> {
PermissionEntity entity = internalService.getEntity(null, u, false);
String username = null;

View File

@ -27,7 +27,7 @@ package me.lucko.luckperms.bungee;
import me.lucko.luckperms.common.plugin.SchedulerAdapter;
import me.lucko.luckperms.common.plugin.SchedulerTask;
import me.lucko.luckperms.common.utils.SafeIterator;
import me.lucko.luckperms.common.utils.SafeIteration;
import net.md_5.bungee.api.scheduler.ScheduledTask;
import net.md_5.bungee.api.scheduler.TaskScheduler;
@ -109,7 +109,7 @@ public class BungeeSchedulerAdapter implements SchedulerAdapter {
@Override
public void shutdown() {
SafeIterator.iterate(this.tasks, SchedulerTask::cancel);
SafeIteration.iterate(this.tasks, SchedulerTask::cancel);
}
private static final class BungeeSchedulerTask implements SchedulerTask {

View File

@ -38,7 +38,7 @@ import me.lucko.luckperms.common.model.PermissionHolder;
import me.lucko.luckperms.common.node.NodeFactory;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.utils.Predicates;
import me.lucko.luckperms.common.utils.SafeIterator;
import me.lucko.luckperms.common.utils.SafeIteration;
import net.alpenblock.bungeeperms.BungeePerms;
import net.alpenblock.bungeeperms.Group;
@ -82,7 +82,7 @@ public class MigrationBungeePerms extends SubCommand<Object> {
// Migrate all groups.
log.log("Starting group migration.");
AtomicInteger groupCount = new AtomicInteger(0);
SafeIterator.iterate(groups, g -> {
SafeIteration.iterate(groups, g -> {
int groupWeight = maxWeight - g.getRank();
// Make a LuckPerms group for the one being migrated
@ -104,7 +104,7 @@ public class MigrationBungeePerms extends SubCommand<Object> {
// Increment the max weight from the group migrations. All user meta should override.
int userWeight = maxWeight + 5;
SafeIterator.iterate(bp.getPermissionsManager().getBackEnd().loadUsers(), u -> {
SafeIteration.iterate(bp.getPermissionsManager().getBackEnd().loadUsers(), u -> {
if (u.getUUID() == null) {
log.logErr("Could not parse UUID for user: " + u.getName());
return;

View File

@ -38,11 +38,19 @@ import java.util.UUID;
import java.util.stream.Collectors;
public class Log {
private static Log empty = null;
public static Builder builder() {
return new Builder();
}
public synchronized static Log empty() {
if (empty == null) {
empty = builder().build();
}
return empty;
}
private static SortedMap<Integer, ExtendedLogEntry> getPage(Set<ExtendedLogEntry> set, int pageNo, int entries) {
if (pageNo < 1) {
throw new IllegalArgumentException("pageNo cannot be less than 1: " + pageNo);

View File

@ -40,7 +40,7 @@ import me.lucko.luckperms.common.locale.Message;
import me.lucko.luckperms.common.model.PermissionHolder;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.utils.Predicates;
import me.lucko.luckperms.common.webeditor.WebEditorUtils;
import me.lucko.luckperms.common.webeditor.WebEditor;
import net.kyori.text.Component;
import net.kyori.text.TextComponent;
@ -66,10 +66,10 @@ public class HolderEditor<T extends PermissionHolder> extends SubCommand<T> {
Message.EDITOR_START.send(sender);
// form the payload data
JsonObject payload = WebEditorUtils.formPayload(Collections.singletonList(holder), sender, label, plugin);
JsonObject payload = WebEditor.formPayload(Collections.singletonList(holder), sender, label, plugin);
// upload the payload data to gist
String gistId = WebEditorUtils.postToGist(new GsonBuilder().setPrettyPrinting().disableHtmlEscaping().create().toJson(payload));
String gistId = WebEditor.postToGist(new GsonBuilder().setPrettyPrinting().disableHtmlEscaping().create().toJson(payload));
if (gistId == null) {
Message.EDITOR_UPLOAD_FAILURE.send(sender);
return CommandResult.STATE_ERROR;

View File

@ -47,7 +47,7 @@ import me.lucko.luckperms.common.node.NodeModel;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.utils.DateUtil;
import me.lucko.luckperms.common.utils.Predicates;
import me.lucko.luckperms.common.webeditor.WebEditorUtils;
import me.lucko.luckperms.common.webeditor.WebEditor;
import java.util.HashSet;
import java.util.List;
@ -69,7 +69,7 @@ public class ApplyEditsCommand extends SingleCommand {
return CommandResult.INVALID_ARGS;
}
JsonObject data = WebEditorUtils.getDataFromGist(code);
JsonObject data = WebEditor.getDataFromGist(code);
if (data == null) {
Message.APPLY_EDITS_UNABLE_TO_READ.send(sender, code);
return CommandResult.FAILURE;
@ -94,7 +94,7 @@ public class ApplyEditsCommand extends SingleCommand {
}
String who = data.get("who").getAsString();
PermissionHolder holder = WebEditorUtils.getHolderFromIdentifier(plugin, sender, who);
PermissionHolder holder = WebEditor.getHolderFromIdentifier(plugin, sender, who);
if (holder == null) {
// the #getHolderFromIdentifier method will send the error message onto the sender
return false;
@ -105,7 +105,7 @@ public class ApplyEditsCommand extends SingleCommand {
return false;
}
Set<NodeModel> nodes = WebEditorUtils.deserializePermissions(data.getAsJsonArray("nodes"));
Set<NodeModel> nodes = WebEditor.deserializePermissions(data.getAsJsonArray("nodes"));
Set<Node> before = new HashSet<>(holder.getEnduringNodes().values());
Set<Node> after = nodes.stream().map(NodeModel::toNode).collect(Collectors.toSet());

View File

@ -41,7 +41,7 @@ import me.lucko.luckperms.common.locale.Message;
import me.lucko.luckperms.common.model.PermissionHolder;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.utils.Predicates;
import me.lucko.luckperms.common.webeditor.WebEditorUtils;
import me.lucko.luckperms.common.webeditor.WebEditor;
import net.kyori.text.Component;
import net.kyori.text.TextComponent;
@ -92,10 +92,10 @@ public class EditorCommand extends SingleCommand {
Message.EDITOR_START.send(sender);
// form the payload data
JsonObject payload = WebEditorUtils.formPayload(holders, sender, label, plugin);
JsonObject payload = WebEditor.formPayload(holders, sender, label, plugin);
// upload the payload data to gist
String gistId = WebEditorUtils.postToGist(new GsonBuilder().setPrettyPrinting().disableHtmlEscaping().create().toJson(payload));
String gistId = WebEditor.postToGist(new GsonBuilder().setPrettyPrinting().disableHtmlEscaping().create().toJson(payload));
if (gistId == null) {
Message.EDITOR_UPLOAD_FAILURE.send(sender);
return CommandResult.STATE_ERROR;

View File

@ -0,0 +1,250 @@
/*
* This file is part of LuckPerms, licensed under the MIT License.
*
* Copyright (c) lucko (Luck) <luck@lucko.me>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package me.lucko.luckperms.common.node;
import me.lucko.luckperms.api.Node;
import me.lucko.luckperms.api.Tristate;
import me.lucko.luckperms.api.context.ContextSet;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import javax.annotation.Nonnull;
public abstract class ForwardingNode implements Node {
protected abstract Node delegate();
@Override
public int hashCode() {
return delegate().hashCode();
}
@Override
public boolean equals(Object obj) {
return this == obj || delegate().equals(obj);
}
@Nonnull
@Override
public String getPermission() {
return delegate().getPermission();
}
@Nonnull
@Override
public Boolean getValue() {
return delegate().getValue();
}
@Override
public boolean getValuePrimitive() {
return delegate().getValuePrimitive();
}
@Nonnull
@Override
public Tristate getTristate() {
return delegate().getTristate();
}
@Override
public boolean isNegated() {
return delegate().isNegated();
}
@Override
public boolean isOverride() {
return delegate().isOverride();
}
@Nonnull
@Override
public Optional<String> getServer() {
return delegate().getServer();
}
@Nonnull
@Override
public Optional<String> getWorld() {
return delegate().getWorld();
}
@Override
public boolean isServerSpecific() {
return delegate().isServerSpecific();
}
@Override
public boolean isWorldSpecific() {
return delegate().isWorldSpecific();
}
@Override
public boolean appliesGlobally() {
return delegate().appliesGlobally();
}
@Override
public boolean hasSpecificContext() {
return delegate().hasSpecificContext();
}
@Override
public boolean shouldApplyWithContext(@Nonnull ContextSet context) {
return delegate().shouldApplyWithContext(context);
}
@Nonnull
@Override
public List<String> resolveShorthand() {
return delegate().resolveShorthand();
}
@Override
public boolean isTemporary() {
return delegate().isTemporary();
}
@Override
public boolean isPermanent() {
return delegate().isPermanent();
}
@Override
public long getExpiryUnixTime() throws IllegalStateException {
return delegate().getExpiryUnixTime();
}
@Nonnull
@Override
public Date getExpiry() throws IllegalStateException {
return delegate().getExpiry();
}
@Override
public long getSecondsTilExpiry() throws IllegalStateException {
return delegate().getSecondsTilExpiry();
}
@Override
public boolean hasExpired() {
return delegate().hasExpired();
}
@Nonnull
@Override
public ContextSet getContexts() {
return delegate().getContexts();
}
@Nonnull
@Override
public ContextSet getFullContexts() {
return delegate().getFullContexts();
}
@Override
public boolean isGroupNode() {
return delegate().isGroupNode();
}
@Nonnull
@Override
public String getGroupName() throws IllegalStateException {
return delegate().getGroupName();
}
@Override
public boolean isWildcard() {
return delegate().isWildcard();
}
@Override
public int getWildcardLevel() throws IllegalStateException {
return delegate().getWildcardLevel();
}
@Override
public boolean isMeta() {
return delegate().isMeta();
}
@Nonnull
@Override
public Map.Entry<String, String> getMeta() throws IllegalStateException {
return delegate().getMeta();
}
@Override
public boolean isPrefix() {
return delegate().isPrefix();
}
@Nonnull
@Override
public Map.Entry<Integer, String> getPrefix() throws IllegalStateException {
return delegate().getPrefix();
}
@Override
public boolean isSuffix() {
return delegate().isSuffix();
}
@Nonnull
@Override
public Map.Entry<Integer, String> getSuffix() throws IllegalStateException {
return delegate().getSuffix();
}
@Override
public boolean equalsIgnoringValue(@Nonnull Node other) {
return delegate().equalsIgnoringValue(other);
}
@Override
public boolean almostEquals(@Nonnull Node other) {
return delegate().almostEquals(other);
}
@Override
public boolean equalsIgnoringValueOrTemp(@Nonnull Node other) {
return delegate().equalsIgnoringValueOrTemp(other);
}
@Override
public String getKey() {
return delegate().getKey();
}
@Override
public Boolean setValue(Boolean value) {
return delegate().setValue(value);
}
}

View File

@ -27,21 +27,15 @@ package me.lucko.luckperms.common.node;
import me.lucko.luckperms.api.LocalizedNode;
import me.lucko.luckperms.api.Node;
import me.lucko.luckperms.api.Tristate;
import me.lucko.luckperms.api.context.ContextSet;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import javax.annotation.Nonnull;
/**
* Holds a Node and where it was inherited from. All calls are passed onto the contained Node instance.
*/
public final class ImmutableLocalizedNode implements LocalizedNode {
public final class ImmutableLocalizedNode extends ForwardingNode implements LocalizedNode {
public static ImmutableLocalizedNode of(Node node, String location) {
Objects.requireNonNull(node, "node");
Objects.requireNonNull(location, "location");
@ -57,13 +51,8 @@ public final class ImmutableLocalizedNode implements LocalizedNode {
}
@Override
public int hashCode() {
return this.node.hashCode();
}
@Override
public boolean equals(Object obj) {
return this == obj || this.node.equals(obj);
protected Node delegate() {
return this.node;
}
@Nonnull
@ -78,204 +67,6 @@ public final class ImmutableLocalizedNode implements LocalizedNode {
return this.location;
}
@Nonnull
@Override
public Optional<String> getServer() {
return this.node.getServer();
}
@Override
public boolean getValuePrimitive() {
return this.node.getValuePrimitive();
}
@Override
public String getKey() {
return this.node.getKey();
}
@Nonnull
@Override
public Map.Entry<String, String> getMeta() throws IllegalStateException {
return this.node.getMeta();
}
@Override
public boolean isServerSpecific() {
return this.node.isServerSpecific();
}
@Nonnull
@Override
public Tristate getTristate() {
return this.node.getTristate();
}
@Override
public boolean hasExpired() {
return this.node.hasExpired();
}
@Override
public boolean isWildcard() {
return this.node.isWildcard();
}
@Override
public boolean equalsIgnoringValueOrTemp(@Nonnull Node other) {
return this.node.equalsIgnoringValueOrTemp(other);
}
@Nonnull
@Override
public List<String> resolveShorthand() {
return this.node.resolveShorthand();
}
@Override
public boolean almostEquals(@Nonnull Node other) {
return this.node.almostEquals(other);
}
@Nonnull
@Override
public ContextSet getFullContexts() {
return this.node.getFullContexts();
}
@Override
public long getSecondsTilExpiry() throws IllegalStateException {
return this.node.getSecondsTilExpiry();
}
@Nonnull
@Override
public String getPermission() {
return this.node.getPermission();
}
@Nonnull
@Override
public Map.Entry<Integer, String> getSuffix() throws IllegalStateException {
return this.node.getSuffix();
}
@Override
public boolean isWorldSpecific() {
return this.node.isWorldSpecific();
}
@Override
public boolean equalsIgnoringValue(@Nonnull Node other) {
return this.node.equalsIgnoringValue(other);
}
@Override
public long getExpiryUnixTime() throws IllegalStateException {
return this.node.getExpiryUnixTime();
}
@Override
public boolean isGroupNode() {
return this.node.isGroupNode();
}
@Override
public Boolean setValue(Boolean value) {
return this.node.setValue(value);
}
@Override
public boolean isPrefix() {
return this.node.isPrefix();
}
@Nonnull
@Override
public String getGroupName() throws IllegalStateException {
return this.node.getGroupName();
}
@Nonnull
@Override
public Date getExpiry() throws IllegalStateException {
return this.node.getExpiry();
}
@Override
public boolean isNegated() {
return this.node.isNegated();
}
@Override
public boolean hasSpecificContext() {
return this.node.hasSpecificContext();
}
@Override
public int getWildcardLevel() throws IllegalStateException {
return this.node.getWildcardLevel();
}
@Override
public boolean isTemporary() {
return this.node.isTemporary();
}
@Nonnull
@Override
public Map.Entry<Integer, String> getPrefix() throws IllegalStateException {
return this.node.getPrefix();
}
@Override
public boolean isMeta() {
return this.node.isMeta();
}
@Override
public boolean isPermanent() {
return this.node.isPermanent();
}
@Nonnull
@Override
public Optional<String> getWorld() {
return this.node.getWorld();
}
@Nonnull
@Override
public Boolean getValue() {
return this.node.getValue();
}
@Override
public boolean isOverride() {
return this.node.isOverride();
}
@Override
public boolean isSuffix() {
return this.node.isSuffix();
}
@Nonnull
@Override
public ContextSet getContexts() {
return this.node.getContexts();
}
@Override
public boolean appliesGlobally() {
return this.node.appliesGlobally();
}
@Override
public boolean shouldApplyWithContext(@Nonnull ContextSet context) {
return this.node.shouldApplyWithContext(context);
}
@Override
public String toString() {
return "ImmutableLocalizedNode(node=" + this.getNode() + ", location=" + this.getLocation() + ")";

View File

@ -26,21 +26,13 @@
package me.lucko.luckperms.common.node;
import me.lucko.luckperms.api.Node;
import me.lucko.luckperms.api.Tristate;
import me.lucko.luckperms.api.context.ContextSet;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Optional;
import javax.annotation.Nonnull;
/**
* Holds a Node and plus an owning object. All calls are passed onto the contained Node instance.
*/
public final class ImmutableTransientNode<O> implements Node {
public final class ImmutableTransientNode<O> extends ForwardingNode implements Node {
public static <O> ImmutableTransientNode<O> of(Node node, O owner) {
Objects.requireNonNull(node, "node");
Objects.requireNonNull(owner, "owner");
@ -56,13 +48,8 @@ public final class ImmutableTransientNode<O> implements Node {
}
@Override
public int hashCode() {
return this.node.hashCode();
}
@Override
public boolean equals(Object obj) {
return this == obj || this.node.equals(obj);
protected Node delegate() {
return this.node;
}
public Node getNode() {
@ -73,204 +60,6 @@ public final class ImmutableTransientNode<O> implements Node {
return this.owner;
}
@Nonnull
@Override
public Optional<String> getServer() {
return this.node.getServer();
}
@Override
public boolean getValuePrimitive() {
return this.node.getValuePrimitive();
}
@Override
public String getKey() {
return this.node.getKey();
}
@Nonnull
@Override
public Map.Entry<String, String> getMeta() throws IllegalStateException {
return this.node.getMeta();
}
@Override
public boolean isServerSpecific() {
return this.node.isServerSpecific();
}
@Nonnull
@Override
public Tristate getTristate() {
return this.node.getTristate();
}
@Override
public boolean hasExpired() {
return this.node.hasExpired();
}
@Override
public boolean isWildcard() {
return this.node.isWildcard();
}
@Override
public boolean equalsIgnoringValueOrTemp(@Nonnull Node other) {
return this.node.equalsIgnoringValueOrTemp(other);
}
@Nonnull
@Override
public List<String> resolveShorthand() {
return this.node.resolveShorthand();
}
@Override
public boolean almostEquals(@Nonnull Node other) {
return this.node.almostEquals(other);
}
@Nonnull
@Override
public ContextSet getFullContexts() {
return this.node.getFullContexts();
}
@Override
public long getSecondsTilExpiry() throws IllegalStateException {
return this.node.getSecondsTilExpiry();
}
@Nonnull
@Override
public String getPermission() {
return this.node.getPermission();
}
@Nonnull
@Override
public Map.Entry<Integer, String> getSuffix() throws IllegalStateException {
return this.node.getSuffix();
}
@Override
public boolean isWorldSpecific() {
return this.node.isWorldSpecific();
}
@Override
public boolean equalsIgnoringValue(@Nonnull Node other) {
return this.node.equalsIgnoringValue(other);
}
@Override
public long getExpiryUnixTime() throws IllegalStateException {
return this.node.getExpiryUnixTime();
}
@Override
public boolean isGroupNode() {
return this.node.isGroupNode();
}
@Override
public Boolean setValue(Boolean value) {
return this.node.setValue(value);
}
@Override
public boolean isPrefix() {
return this.node.isPrefix();
}
@Nonnull
@Override
public String getGroupName() throws IllegalStateException {
return this.node.getGroupName();
}
@Nonnull
@Override
public Date getExpiry() throws IllegalStateException {
return this.node.getExpiry();
}
@Override
public boolean isNegated() {
return this.node.isNegated();
}
@Override
public boolean hasSpecificContext() {
return this.node.hasSpecificContext();
}
@Override
public int getWildcardLevel() throws IllegalStateException {
return this.node.getWildcardLevel();
}
@Override
public boolean isTemporary() {
return this.node.isTemporary();
}
@Nonnull
@Override
public Map.Entry<Integer, String> getPrefix() throws IllegalStateException {
return this.node.getPrefix();
}
@Override
public boolean isMeta() {
return this.node.isMeta();
}
@Override
public boolean isPermanent() {
return this.node.isPermanent();
}
@Nonnull
@Override
public Optional<String> getWorld() {
return this.node.getWorld();
}
@Nonnull
@Override
public Boolean getValue() {
return this.node.getValue();
}
@Override
public boolean isOverride() {
return this.node.isOverride();
}
@Override
public boolean isSuffix() {
return this.node.isSuffix();
}
@Nonnull
@Override
public ContextSet getContexts() {
return this.node.getContexts();
}
@Override
public boolean appliesGlobally() {
return this.node.appliesGlobally();
}
@Override
public boolean shouldApplyWithContext(@Nonnull ContextSet context) {
return this.node.shouldApplyWithContext(context);
}
@Override
public String toString() {
return "ImmutableTransientNode(node=" + this.getNode() + ", owner=" + this.getOwner() + ")";

View File

@ -53,13 +53,15 @@ import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionException;
/**
* Converts a {@link AbstractDao} to use {@link CompletableFuture}s
* Implements {@link Storage} using an {@link AbstractDao}.
*/
public class AbstractStorage implements Storage {
public static Storage create(LuckPermsPlugin plugin, AbstractDao backing) {
BufferedOutputStorage bufferedDs = BufferedOutputStorage.wrap(PhasedStorage.wrap(new AbstractStorage(plugin, backing)), 250L);
plugin.getScheduler().asyncRepeating(bufferedDs, 2L);
return bufferedDs;
Storage base = new AbstractStorage(plugin, backing);
Storage phased = PhasedStorage.wrap(base);
BufferedOutputStorage buffered = BufferedOutputStorage.wrap(phased, 250L);
plugin.getScheduler().asyncRepeating(buffered, 2L);
return buffered;
}
private final LuckPermsPlugin plugin;

View File

@ -33,7 +33,6 @@ import me.lucko.luckperms.api.Node;
import me.lucko.luckperms.api.context.ImmutableContextSet;
import me.lucko.luckperms.common.actionlog.Log;
import me.lucko.luckperms.common.bulkupdate.BulkUpdate;
import me.lucko.luckperms.common.commands.CommandManager;
import me.lucko.luckperms.common.contexts.ContextSetConfigurateSerializer;
import me.lucko.luckperms.common.managers.group.GroupManager;
import me.lucko.luckperms.common.managers.track.TrackManager;
@ -46,8 +45,6 @@ import me.lucko.luckperms.common.node.NodeModel;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.references.UserIdentifier;
import me.lucko.luckperms.common.storage.dao.AbstractDao;
import me.lucko.luckperms.common.storage.dao.legacy.LegacyJsonMigration;
import me.lucko.luckperms.common.storage.dao.legacy.LegacyYamlMigration;
import me.lucko.luckperms.common.utils.ImmutableCollectors;
import me.lucko.luckperms.common.utils.Uuids;
@ -64,7 +61,6 @@ import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
@ -73,18 +69,11 @@ import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import java.util.UUID;
import java.util.logging.FileHandler;
import java.util.logging.Formatter;
import java.util.logging.Level;
import java.util.logging.LogRecord;
import java.util.logging.Logger;
import java.util.stream.Collectors;
public abstract class ConfigurateDao extends AbstractDao {
private static final String LOG_FORMAT = "%s(%s): [%s] %s(%s) --> %s";
private final Logger actionLogger = Logger.getLogger("luckperms_actions");
private final FileUuidCache uuidCache = new FileUuidCache();
private final FileActionLogger actionLogger = new FileActionLogger();
private final String fileExtension;
private final String dataFolderName;
@ -169,132 +158,59 @@ public abstract class ConfigurateDao extends AbstractDao {
@Override
public void init() {
try {
setupFiles();
File data = FileUtils.mkdirs(new File(this.plugin.getDataDirectory(), this.dataFolderName));
this.usersDirectory = FileUtils.mkdir(new File(data, "users"));
this.groupsDirectory = FileUtils.mkdir(new File(data, "groups"));
this.tracksDirectory = FileUtils.mkdir(new File(data, "tracks"));
this.uuidDataFile = FileUtils.createNewFile(new File(data, "uuidcache.txt"));
this.actionLogFile = FileUtils.createNewFile(new File(data, "actions.log"));
// Listen for file changes.
this.plugin.getFileWatcher().ifPresent(watcher -> {
watcher.subscribe("user", this.usersDirectory.toPath(), s -> {
if (!s.endsWith(this.fileExtension)) {
return;
}
String user = s.substring(0, s.length() - this.fileExtension.length());
UUID uuid = Uuids.parseNullable(user);
if (uuid == null) {
return;
}
User u = this.plugin.getUserManager().getIfLoaded(uuid);
if (u != null) {
this.plugin.getLog().info("[FileWatcher] Refreshing user " + u.getFriendlyName());
this.plugin.getStorage().loadUser(uuid, null);
}
});
watcher.subscribe("group", this.groupsDirectory.toPath(), s -> {
if (!s.endsWith(this.fileExtension)) {
return;
}
String groupName = s.substring(0, s.length() - this.fileExtension.length());
this.plugin.getLog().info("[FileWatcher] Refreshing group " + groupName);
this.plugin.getUpdateTaskBuffer().request();
});
watcher.subscribe("track", this.tracksDirectory.toPath(), s -> {
if (!s.endsWith(this.fileExtension)) {
return;
}
String trackName = s.substring(0, s.length() - this.fileExtension.length());
this.plugin.getLog().info("[FileWatcher] Refreshing track " + trackName);
this.plugin.getStorage().loadAllTracks();
});
});
} catch (IOException e) {
e.printStackTrace();
return;
}
this.uuidCache.load(this.uuidDataFile);
try {
FileHandler fh = new FileHandler(this.actionLogFile.getAbsolutePath(), 0, 1, true);
fh.setFormatter(new Formatter() {
@Override
public String format(LogRecord record) {
return new Date(record.getMillis()).toString() + ": " + record.getMessage() + "\n";
}
});
this.actionLogger.addHandler(fh);
this.actionLogger.setUseParentHandlers(false);
this.actionLogger.setLevel(Level.ALL);
this.actionLogger.setFilter(record -> true);
} catch (Exception e) {
e.printStackTrace();
}
}
private static void mkdir(File file) throws IOException {
if (file.exists()) {
return;
}
if (!file.mkdir()) {
throw new IOException("Unable to create directory - " + file.getPath());
}
}
private static void mkdirs(File file) throws IOException {
if (file.exists()) {
return;
}
if (!file.mkdirs()) {
throw new IOException("Unable to create directory - " + file.getPath());
}
}
private void setupFiles() throws IOException {
File data = new File(this.plugin.getDataDirectory(), this.dataFolderName);
// Try to perform schema migration
File oldData = new File(this.plugin.getDataDirectory(), "data");
if (!data.exists() && oldData.exists()) {
mkdirs(data);
this.plugin.getLog().severe("===== Legacy Schema Migration =====");
this.plugin.getLog().severe("Starting migration from legacy schema. This could take a while....");
this.plugin.getLog().severe("Please do not stop your server while the migration takes place.");
if (this instanceof YamlDao) {
try {
new LegacyYamlMigration(this.plugin, (YamlDao) this, oldData, data).run();
} catch (Exception e) {
e.printStackTrace();
}
} else if (this instanceof JsonDao) {
try {
new LegacyJsonMigration(this.plugin, (JsonDao) this, oldData, data).run();
} catch (Exception e) {
e.printStackTrace();
}
}
} else {
mkdirs(data);
}
this.usersDirectory = new File(data, "users");
mkdir(this.usersDirectory);
this.groupsDirectory = new File(data, "groups");
mkdir(this.groupsDirectory);
this.tracksDirectory = new File(data, "tracks");
mkdir(this.tracksDirectory);
this.uuidDataFile = new File(data, "uuidcache.txt");
this.uuidDataFile.createNewFile();
this.actionLogFile = new File(data, "actions.log");
this.actionLogFile.createNewFile();
// Listen for file changes.
this.plugin.getFileWatcher().ifPresent(watcher -> {
watcher.subscribe("user", this.usersDirectory.toPath(), s -> {
if (!s.endsWith(this.fileExtension)) {
return;
}
String user = s.substring(0, s.length() - this.fileExtension.length());
UUID uuid = Uuids.parseNullable(user);
if (uuid == null) {
return;
}
User u = this.plugin.getUserManager().getIfLoaded(uuid);
if (u != null) {
this.plugin.getLog().info("[FileWatcher] Refreshing user " + u.getFriendlyName());
this.plugin.getStorage().loadUser(uuid, null);
}
});
watcher.subscribe("group", this.groupsDirectory.toPath(), s -> {
if (!s.endsWith(this.fileExtension)) {
return;
}
String groupName = s.substring(0, s.length() - this.fileExtension.length());
this.plugin.getLog().info("[FileWatcher] Refreshing group " + groupName);
this.plugin.getUpdateTaskBuffer().request();
});
watcher.subscribe("track", this.tracksDirectory.toPath(), s -> {
if (!s.endsWith(this.fileExtension)) {
return;
}
String trackName = s.substring(0, s.length() - this.fileExtension.length());
this.plugin.getLog().info("[FileWatcher] Refreshing track " + trackName);
this.plugin.getStorage().loadAllTracks();
});
});
this.actionLogger.init(this.actionLogFile);
}
@Override
@ -304,20 +220,14 @@ public abstract class ConfigurateDao extends AbstractDao {
@Override
public void logAction(LogEntry entry) {
this.actionLogger.info(String.format(LOG_FORMAT,
(entry.getActor().equals(CommandManager.CONSOLE_UUID) ? "" : entry.getActor() + " "),
entry.getActorName(),
Character.toString(entry.getType().getCode()),
entry.getActed().map(e -> e.toString() + " ").orElse(""),
entry.getActedName(),
entry.getAction())
);
this.actionLogger.logAction(entry);
}
@Override
public Log getLog() {
// Flatfile doesn't support viewing log data from in-game. You can just read the file in a text editor.
return Log.builder().build();
// File based daos don't support viewing log data from in-game.
// You can just read the file in a text editor.
return Log.empty();
}
@Override

View File

@ -0,0 +1,72 @@
/*
* This file is part of LuckPerms, licensed under the MIT License.
*
* Copyright (c) lucko (Luck) <luck@lucko.me>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package me.lucko.luckperms.common.storage.dao.file;
import me.lucko.luckperms.api.LogEntry;
import me.lucko.luckperms.common.commands.CommandManager;
import java.io.File;
import java.util.Date;
import java.util.logging.FileHandler;
import java.util.logging.Formatter;
import java.util.logging.Level;
import java.util.logging.LogRecord;
import java.util.logging.Logger;
public class FileActionLogger {
private static final String LOG_FORMAT = "%s(%s): [%s] %s(%s) --> %s";
private final Logger actionLogger = Logger.getLogger("luckperms_actions");
public void init(File file) {
try {
FileHandler fh = new FileHandler(file.getAbsolutePath(), 0, 1, true);
fh.setFormatter(new Formatter() {
@Override
public String format(LogRecord record) {
return new Date(record.getMillis()).toString() + ": " + record.getMessage() + "\n";
}
});
this.actionLogger.addHandler(fh);
this.actionLogger.setUseParentHandlers(false);
this.actionLogger.setLevel(Level.ALL);
this.actionLogger.setFilter(record -> true);
} catch (Exception e) {
e.printStackTrace();
}
}
public void logAction(LogEntry entry) {
this.actionLogger.info(String.format(LOG_FORMAT,
(entry.getActor().equals(CommandManager.CONSOLE_UUID) ? "" : entry.getActor() + " "),
entry.getActorName(),
Character.toString(entry.getType().getCode()),
entry.getActed().map(e -> e.toString() + " ").orElse(""),
entry.getActedName(),
entry.getAction())
);
}
}

View File

@ -0,0 +1,65 @@
/*
* This file is part of LuckPerms, licensed under the MIT License.
*
* Copyright (c) lucko (Luck) <luck@lucko.me>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package me.lucko.luckperms.common.storage.dao.file;
import java.io.File;
import java.io.IOException;
public final class FileUtils {
public static File mkdir(File file) throws IOException {
if (file.exists()) {
return file;
}
if (!file.mkdir()) {
throw new IOException("Unable to create directory - " + file.getPath());
}
return file;
}
public static File mkdirs(File file) throws IOException {
if (file.exists()) {
return file;
}
if (!file.mkdirs()) {
throw new IOException("Unable to create directory - " + file.getPath());
}
return file;
}
public static File createNewFile(File file) throws IOException {
if (file.exists()) {
return file;
}
if (!file.createNewFile()) {
throw new IOException("Unable to create file - " + file.getPath());
}
return file;
}
private FileUtils() {}
}

View File

@ -36,7 +36,6 @@ import java.nio.file.Files;
import java.nio.file.Path;
public class HoconDao extends ConfigurateDao {
public HoconDao(LuckPermsPlugin plugin, String dataFolderName) {
super(plugin, "HOCON", ".conf", dataFolderName);
}

View File

@ -36,7 +36,6 @@ import java.nio.file.Files;
import java.nio.file.Path;
public class JsonDao extends ConfigurateDao {
public JsonDao(LuckPermsPlugin plugin, String dataFolderName) {
super(plugin, "JSON", ".json", dataFolderName);
}

View File

@ -38,7 +38,6 @@ import java.nio.file.Files;
import java.nio.file.Path;
public class YamlDao extends ConfigurateDao {
public YamlDao(LuckPermsPlugin plugin, String dataFolderName) {
super(plugin, "YAML", ".yml", dataFolderName);
}

View File

@ -1,257 +0,0 @@
/*
* This file is part of LuckPerms, licensed under the MIT License.
*
* Copyright (c) lucko (Luck) <luck@lucko.me>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package me.lucko.luckperms.common.storage.dao.legacy;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonPrimitive;
import me.lucko.luckperms.common.contexts.ContextSetJsonSerializer;
import me.lucko.luckperms.common.node.LegacyNodeFactory;
import me.lucko.luckperms.common.node.NodeModel;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.storage.dao.file.JsonDao;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
@SuppressWarnings("unchecked")
public class LegacyJsonMigration implements Runnable {
private final Gson gson = new GsonBuilder().setPrettyPrinting().create();
private final LuckPermsPlugin plugin;
private final JsonDao backing;
private final File oldDataFolder;
private final File newDataFolder;
public LegacyJsonMigration(LuckPermsPlugin plugin, JsonDao backing, File oldDataFolder, File newDataFolder) {
this.plugin = plugin;
this.backing = backing;
this.oldDataFolder = oldDataFolder;
this.newDataFolder = newDataFolder;
}
private void writeElementToFile(File file, JsonElement element) {
try (BufferedWriter writer = Files.newBufferedWriter(file.toPath(), StandardCharsets.UTF_8)) {
this.gson.toJson(element, writer);
writer.flush();
} catch (Throwable t) {
this.plugin.getLog().warn("Exception whilst writing to file: " + file.getAbsolutePath());
t.printStackTrace();
}
}
private JsonObject readObjectFromFile(File file) {
try (BufferedReader reader = Files.newBufferedReader(file.toPath(), StandardCharsets.UTF_8)) {
return this.gson.fromJson(reader, JsonObject.class);
} catch (Throwable t) {
this.plugin.getLog().warn("Exception whilst reading from file: " + file.getAbsolutePath());
t.printStackTrace();
return null;
}
}
@Override
public void run() {
this.plugin.getLog().warn("Moving existing files to their new location.");
relocateFile(this.oldDataFolder, this.newDataFolder, "actions.log");
relocateFile(this.oldDataFolder, this.newDataFolder, "uuidcache.txt");
relocateFile(this.oldDataFolder, this.newDataFolder, "tracks");
this.plugin.getLog().warn("Migrating group files");
File oldGroupsDir = new File(this.oldDataFolder, "groups");
if (oldGroupsDir.exists() && oldGroupsDir.isDirectory()) {
File newGroupsDir = new File(this.newDataFolder, "groups");
newGroupsDir.mkdir();
File[] toMigrate = oldGroupsDir.listFiles((dir, name) -> name.endsWith(this.backing.getFileExtension()));
if (toMigrate != null) {
for (File oldFile : toMigrate) {
try {
File replacementFile = new File(newGroupsDir, oldFile.getName());
JsonObject values = readObjectFromFile(oldFile);
Map<String, Boolean> perms = new HashMap<>();
String name = values.get("name").getAsString();
JsonObject permsSection = values.get("perms").getAsJsonObject();
for (Map.Entry<String, JsonElement> e : permsSection.entrySet()) {
perms.put(e.getKey(), e.getValue().getAsBoolean());
}
Set<NodeModel> nodes = perms.entrySet().stream()
.map(e -> LegacyNodeFactory.fromLegacyString(e.getKey(), e.getValue()))
.map(NodeModel::fromNode)
.collect(Collectors.toCollection(LinkedHashSet::new));
if (!replacementFile.exists()) {
try {
replacementFile.createNewFile();
} catch (IOException e) {
e.printStackTrace();
}
}
JsonObject data = new JsonObject();
data.addProperty("name", name);
data.add("permissions", serializePermissions(nodes));
writeElementToFile(replacementFile, data);
oldFile.delete();
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
this.plugin.getLog().warn("Migrated group files, now migrating user files.");
File oldUsersDir = new File(this.oldDataFolder, "users");
if (oldUsersDir.exists() && oldUsersDir.isDirectory()) {
File newUsersDir = new File(this.newDataFolder, "users");
newUsersDir.mkdir();
File[] toMigrate = oldUsersDir.listFiles((dir, name) -> name.endsWith(this.backing.getFileExtension()));
if (toMigrate != null) {
for (File oldFile : toMigrate) {
try {
File replacementFile = new File(newUsersDir, oldFile.getName());
JsonObject values = readObjectFromFile(oldFile);
Map<String, Boolean> perms = new HashMap<>();
String uuid = values.get("uuid").getAsString();
String name = values.get("name").getAsString();
String primaryGroup = values.get("primaryGroup").getAsString();
JsonObject permsSection = values.get("perms").getAsJsonObject();
for (Map.Entry<String, JsonElement> e : permsSection.entrySet()) {
perms.put(e.getKey(), e.getValue().getAsBoolean());
}
Set<NodeModel> nodes = perms.entrySet().stream()
.map(e -> LegacyNodeFactory.fromLegacyString(e.getKey(), e.getValue()))
.map(NodeModel::fromNode)
.collect(Collectors.toCollection(LinkedHashSet::new));
if (!replacementFile.exists()) {
try {
replacementFile.createNewFile();
} catch (IOException e) {
e.printStackTrace();
}
}
JsonObject data = new JsonObject();
data.addProperty("uuid", uuid);
data.addProperty("name", name);
data.addProperty("primaryGroup", primaryGroup);
data.add("permissions", serializePermissions(nodes));
writeElementToFile(replacementFile, data);
oldFile.delete();
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
this.plugin.getLog().warn("Migrated user files.");
// rename the old data file
this.oldDataFolder.renameTo(new File(this.oldDataFolder.getParent(), "old-data-backup"));
this.plugin.getLog().warn("Legacy schema migration complete.");
}
private static void relocateFile(File dirFrom, File dirTo, String fileName) {
File file = new File(dirFrom, fileName);
if (file.exists()) {
try {
Files.move(file.toPath(), new File(dirTo, fileName).toPath());
} catch (IOException e) {
e.printStackTrace();
}
}
}
private static JsonArray serializePermissions(Set<NodeModel> nodes) {
JsonArray arr = new JsonArray();
for (NodeModel node : nodes) {
// just a raw, default node.
boolean single = node.getValue() &&
node.getServer().equalsIgnoreCase("global") &&
node.getWorld().equalsIgnoreCase("global") &&
node.getExpiry() == 0L &&
node.getContexts().isEmpty();
// just add a string to the list.
if (single) {
arr.add(new JsonPrimitive(node.getPermission()));
continue;
}
JsonObject attributes = new JsonObject();
attributes.addProperty("value", node.getValue());
if (!node.getServer().equals("global")) {
attributes.addProperty("server", node.getServer());
}
if (!node.getWorld().equals("global")) {
attributes.addProperty("world", node.getWorld());
}
if (node.getExpiry() != 0L) {
attributes.addProperty("expiry", node.getExpiry());
}
if (!node.getContexts().isEmpty()) {
attributes.add("context", ContextSetJsonSerializer.serializeContextSet(node.getContexts()));
}
JsonObject perm = new JsonObject();
perm.add(node.getPermission(), attributes);
arr.add(perm);
}
return arr;
}
}

View File

@ -1,285 +0,0 @@
/*
* This file is part of LuckPerms, licensed under the MIT License.
*
* Copyright (c) lucko (Luck) <luck@lucko.me>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package me.lucko.luckperms.common.storage.dao.legacy;
import com.google.common.collect.Lists;
import com.google.gson.reflect.TypeToken;
import me.lucko.luckperms.common.contexts.ContextSetJsonSerializer;
import me.lucko.luckperms.common.node.LegacyNodeFactory;
import me.lucko.luckperms.common.node.NodeFactory;
import me.lucko.luckperms.common.node.NodeModel;
import me.lucko.luckperms.common.storage.dao.sql.SqlDao;
import java.lang.reflect.Type;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
public class LegacySqlMigration implements Runnable {
private static final Type NODE_MAP_TYPE = new TypeToken<Map<String, Boolean>>() {}.getType();
private final SqlDao backing;
public LegacySqlMigration(SqlDao backing) {
this.backing = backing;
}
@Override
public void run() {
this.backing.getPlugin().getLog().warn("Collecting UUID data from the old tables.");
Map<UUID, String> uuidData = new HashMap<>();
try (Connection c = this.backing.getProvider().getConnection()) {
try (PreparedStatement ps = c.prepareStatement("SELECT uuid, name FROM lp_uuid")) {
try (ResultSet rs = ps.executeQuery()) {
while (rs.next()) {
try {
uuidData.put(UUID.fromString(rs.getString("uuid")), rs.getString("name"));
} catch (IllegalArgumentException e) {
e.printStackTrace();
}
}
}
}
} catch (SQLException e) {
e.printStackTrace();
}
this.backing.getPlugin().getLog().warn("Found " + uuidData.size() + " uuid data entries. Copying to new tables...");
List<Map.Entry<UUID, String>> uuidEntries = new ArrayList<>(uuidData.entrySet());
List<List<Map.Entry<UUID, String>>> partitionedUuidEntries = Lists.partition(uuidEntries, 100);
for (List<Map.Entry<UUID, String>> l : partitionedUuidEntries) {
try (Connection c = this.backing.getProvider().getConnection()) {
try (PreparedStatement ps = c.prepareStatement(this.backing.getPrefix().apply("INSERT INTO {prefix}players VALUES(?, ?, ?)"))) {
for (Map.Entry<UUID, String> e : l) {
ps.setString(1, e.getKey().toString());
ps.setString(2, e.getValue().toLowerCase());
ps.setString(3, NodeFactory.DEFAULT_GROUP_NAME);
ps.addBatch();
}
ps.executeBatch();
}
} catch (SQLException e) {
e.printStackTrace();
}
}
uuidData.clear();
uuidEntries.clear();
partitionedUuidEntries.clear();
this.backing.getPlugin().getLog().warn("Migrated all uuid data.");
this.backing.getPlugin().getLog().warn("Starting user data migration.");
Set<UUID> users = new HashSet<>();
try (Connection c = this.backing.getProvider().getConnection()) {
try (PreparedStatement ps = c.prepareStatement("SELECT uuid FROM lp_users")) {
try (ResultSet rs = ps.executeQuery()) {
while (rs.next()) {
try {
users.add(UUID.fromString(rs.getString("uuid")));
} catch (IllegalArgumentException e) {
e.printStackTrace();
}
}
}
}
} catch (SQLException e) {
e.printStackTrace();
}
this.backing.getPlugin().getLog().warn("Found " + users.size() + " user data entries. Copying to new tables...");
AtomicInteger userCounter = new AtomicInteger(0);
for (UUID uuid : users) {
String permsJson = null;
String primaryGroup = null;
try (Connection c = this.backing.getProvider().getConnection()) {
try (PreparedStatement ps = c.prepareStatement("SELECT primary_group, perms FROM lp_users WHERE uuid=?")) {
ps.setString(1, uuid.toString());
try (ResultSet rs = ps.executeQuery()) {
if (rs.next()) {
permsJson = rs.getString("perms");
primaryGroup = rs.getString("primary_group");
}
}
}
} catch (SQLException e) {
e.printStackTrace();
}
if (permsJson == null || primaryGroup == null) {
new Throwable().printStackTrace();
continue;
}
Map<String, Boolean> convertedPerms = this.backing.getGson().fromJson(permsJson, NODE_MAP_TYPE);
if (convertedPerms == null) {
new Throwable().printStackTrace();
continue;
}
Set<NodeModel> nodes = convertedPerms.entrySet().stream()
.map(e -> LegacyNodeFactory.fromLegacyString(e.getKey(), e.getValue()))
.map(NodeModel::fromNode)
.collect(Collectors.toSet());
try (Connection c = this.backing.getProvider().getConnection()) {
try (PreparedStatement ps = c.prepareStatement(this.backing.getPrefix().apply("INSERT INTO {prefix}user_permissions(uuid, permission, value, server, world, expiry, contexts) VALUES(?, ?, ?, ?, ?, ?, ?)"))) {
for (NodeModel nd : nodes) {
ps.setString(1, uuid.toString());
ps.setString(2, nd.getPermission());
ps.setBoolean(3, nd.getValue());
ps.setString(4, nd.getServer());
ps.setString(5, nd.getWorld());
ps.setLong(6, nd.getExpiry());
ps.setString(7, this.backing.getGson().toJson(ContextSetJsonSerializer.serializeContextSet(nd.getContexts())));
ps.addBatch();
}
ps.executeBatch();
}
} catch (SQLException e) {
e.printStackTrace();
}
if (!primaryGroup.equalsIgnoreCase(NodeFactory.DEFAULT_GROUP_NAME)) {
try (Connection c = this.backing.getProvider().getConnection()) {
try (PreparedStatement ps = c.prepareStatement(this.backing.getPrefix().apply("UPDATE {prefix}players SET primary_group=? WHERE uuid=?"))) {
ps.setString(1, primaryGroup);
ps.setString(2, uuid.toString());
ps.execute();
}
} catch (SQLException e) {
e.printStackTrace();
}
}
int i = userCounter.incrementAndGet();
if (i % 100 == 0) {
this.backing.getPlugin().getLog().warn("Migrated " + i + " users so far...");
}
}
users.clear();
this.backing.getPlugin().getLog().warn("Migrated all user data.");
this.backing.getPlugin().getLog().warn("Starting group data migration.");
Map<String, String> groupData = new HashMap<>();
try (Connection c = this.backing.getProvider().getConnection()) {
try (PreparedStatement ps = c.prepareStatement("SELECT name, perms FROM lp_groups")) {
try (ResultSet rs = ps.executeQuery()) {
while (rs.next()) {
groupData.put(rs.getString("name"), rs.getString("perms"));
}
}
}
} catch (SQLException e) {
e.printStackTrace();
}
this.backing.getPlugin().getLog().warn("Found " + groupData.size() + " group data entries. Copying to new tables...");
for (Map.Entry<String, String> e : groupData.entrySet()) {
String name = e.getKey();
String permsJson = e.getValue();
try (Connection c = this.backing.getProvider().getConnection()) {
try (PreparedStatement ps = c.prepareStatement(this.backing.getPrefix().apply("INSERT INTO {prefix}groups VALUES(?)"))) {
ps.setString(1, name);
ps.execute();
}
} catch (SQLException ex) {
ex.printStackTrace();
}
Map<String, Boolean> convertedPerms = this.backing.getGson().fromJson(permsJson, NODE_MAP_TYPE);
if (convertedPerms == null) {
new Throwable().printStackTrace();
continue;
}
Set<NodeModel> nodes = convertedPerms.entrySet().stream()
.map(ent -> LegacyNodeFactory.fromLegacyString(ent.getKey(), ent.getValue()))
.map(NodeModel::fromNode)
.collect(Collectors.toSet());
try (Connection c = this.backing.getProvider().getConnection()) {
try (PreparedStatement ps = c.prepareStatement(this.backing.getPrefix().apply("INSERT INTO {prefix}group_permissions(name, permission, value, server, world, expiry, contexts) VALUES(?, ?, ?, ?, ?, ?, ?)"))) {
for (NodeModel nd : nodes) {
ps.setString(1, name);
ps.setString(2, nd.getPermission());
ps.setBoolean(3, nd.getValue());
ps.setString(4, nd.getServer());
ps.setString(5, nd.getWorld());
ps.setLong(6, nd.getExpiry());
ps.setString(7, this.backing.getGson().toJson(ContextSetJsonSerializer.serializeContextSet(nd.getContexts())));
ps.addBatch();
}
ps.executeBatch();
}
} catch (SQLException ex) {
ex.printStackTrace();
}
}
groupData.clear();
this.backing.getPlugin().getLog().warn("Migrated all group data.");
this.backing.getPlugin().getLog().warn("Renaming action and track tables.");
try (Connection c = this.backing.getProvider().getConnection()) {
try (PreparedStatement ps = c.prepareStatement(this.backing.getPrefix().apply("DROP TABLE {prefix}actions"))) {
ps.execute();
}
try (PreparedStatement ps = c.prepareStatement(this.backing.getPrefix().apply("ALTER TABLE lp_actions RENAME TO {prefix}actions"))) {
ps.execute();
}
try (PreparedStatement ps = c.prepareStatement(this.backing.getPrefix().apply("DROP TABLE {prefix}tracks"))) {
ps.execute();
}
try (PreparedStatement ps = c.prepareStatement(this.backing.getPrefix().apply("ALTER TABLE lp_tracks RENAME TO {prefix}tracks"))) {
ps.execute();
}
} catch (SQLException ex) {
ex.printStackTrace();
}
this.backing.getPlugin().getLog().warn("Legacy schema migration complete.");
}
}

View File

@ -1,278 +0,0 @@
/*
* This file is part of LuckPerms, licensed under the MIT License.
*
* Copyright (c) lucko (Luck) <luck@lucko.me>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package me.lucko.luckperms.common.storage.dao.legacy;
import me.lucko.luckperms.common.node.LegacyNodeFactory;
import me.lucko.luckperms.common.node.NodeModel;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.storage.dao.file.YamlDao;
import org.yaml.snakeyaml.DumperOptions;
import org.yaml.snakeyaml.Yaml;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
@SuppressWarnings("unchecked")
public class LegacyYamlMigration implements Runnable {
private final LuckPermsPlugin plugin;
private final YamlDao backing;
private final File oldDataFolder;
private final File newDataFolder;
private final Yaml yaml = getYaml();
private static Yaml getYaml() {
DumperOptions options = new DumperOptions();
options.setAllowUnicode(true);
options.setDefaultFlowStyle(DumperOptions.FlowStyle.BLOCK);
return new Yaml(options);
}
public LegacyYamlMigration(LuckPermsPlugin plugin, YamlDao backing, File oldDataFolder, File newDataFolder) {
this.plugin = plugin;
this.backing = backing;
this.oldDataFolder = oldDataFolder;
this.newDataFolder = newDataFolder;
}
public void writeMapToFile(File file, Map<String, Object> values) {
try (BufferedWriter writer = Files.newBufferedWriter(file.toPath(), StandardCharsets.UTF_8)) {
this.yaml.dump(values, writer);
writer.flush();
} catch (Throwable t) {
this.plugin.getLog().warn("Exception whilst writing to file: " + file.getAbsolutePath());
t.printStackTrace();
}
}
public Map<String, Object> readMapFromFile(File file) {
try (BufferedReader reader = Files.newBufferedReader(file.toPath(), StandardCharsets.UTF_8)) {
return (Map<String, Object>) this.yaml.load(reader);
} catch (Throwable t) {
this.plugin.getLog().warn("Exception whilst reading from file: " + file.getAbsolutePath());
t.printStackTrace();
return null;
}
}
@Override
public void run() {
this.plugin.getLog().warn("Moving existing files to their new location.");
relocateFile(this.oldDataFolder, this.newDataFolder, "actions.log");
relocateFile(this.oldDataFolder, this.newDataFolder, "uuidcache.txt");
relocateFile(this.oldDataFolder, this.newDataFolder, "tracks");
this.plugin.getLog().warn("Migrating group files");
File oldGroupsDir = new File(this.oldDataFolder, "groups");
if (oldGroupsDir.exists() && oldGroupsDir.isDirectory()) {
File newGroupsDir = new File(this.newDataFolder, "groups");
newGroupsDir.mkdir();
File[] toMigrate = oldGroupsDir.listFiles((dir, name) -> name.endsWith(this.backing.getFileExtension()));
if (toMigrate != null) {
for (File oldFile : toMigrate) {
try {
File replacementFile = new File(newGroupsDir, oldFile.getName());
Map<String, Object> data = readMapFromFile(oldFile);
String name = (String) data.get("name");
Map<String, Boolean> perms = new HashMap<>((Map<String, Boolean>) data.get("perms"));
Set<NodeModel> nodes = perms.entrySet().stream()
.map(e -> LegacyNodeFactory.fromLegacyString(e.getKey(), e.getValue()))
.map(NodeModel::fromNode)
.collect(Collectors.toCollection(LinkedHashSet::new));
if (!replacementFile.exists()) {
try {
replacementFile.createNewFile();
} catch (IOException e) {
e.printStackTrace();
}
}
Map<String, Object> values = new LinkedHashMap<>();
values.put("name", name);
values.put("permissions", serializePermissions(nodes));
writeMapToFile(replacementFile, values);
oldFile.delete();
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
this.plugin.getLog().warn("Migrated group files, now migrating user files.");
File oldUsersDir = new File(this.oldDataFolder, "users");
if (oldUsersDir.exists() && oldUsersDir.isDirectory()) {
File newUsersDir = new File(this.newDataFolder, "users");
newUsersDir.mkdir();
File[] toMigrate = oldUsersDir.listFiles((dir, name) -> name.endsWith(this.backing.getFileExtension()));
if (toMigrate != null) {
for (File oldFile : toMigrate) {
try {
File replacementFile = new File(newUsersDir, oldFile.getName());
Map<String, Object> data = readMapFromFile(oldFile);
String uuid = (String) data.get("uuid");
String name = (String) data.get("name");
String primaryGroup = (String) data.get("primary-group");
Map<String, Boolean> perms = new HashMap<>((Map<String, Boolean>) data.get("perms"));
Set<NodeModel> nodes = perms.entrySet().stream()
.map(e -> LegacyNodeFactory.fromLegacyString(e.getKey(), e.getValue()))
.map(NodeModel::fromNode)
.collect(Collectors.toCollection(LinkedHashSet::new));
if (!replacementFile.exists()) {
try {
replacementFile.createNewFile();
} catch (IOException e) {
e.printStackTrace();
}
}
Map<String, Object> values = new LinkedHashMap<>();
values.put("uuid", uuid);
values.put("name", name);
values.put("primary-group", primaryGroup);
values.put("permissions", serializePermissions(nodes));
writeMapToFile(replacementFile, values);
oldFile.delete();
} catch (Exception e) {
e.printStackTrace();
}
}
}
}
this.plugin.getLog().warn("Migrated user files.");
// rename the old data file
this.oldDataFolder.renameTo(new File(this.oldDataFolder.getParent(), "old-data-backup"));
this.plugin.getLog().warn("Legacy schema migration complete.");
}
private static void relocateFile(File dirFrom, File dirTo, String fileName) {
File file = new File(dirFrom, fileName);
if (file.exists()) {
try {
Files.move(file.toPath(), new File(dirTo, fileName).toPath());
} catch (IOException e) {
e.printStackTrace();
}
}
}
private static List<Object> serializePermissions(Set<NodeModel> nodes) {
List<Object> data = new ArrayList<>();
for (NodeModel node : nodes) {
// just a raw, default node.
boolean single = node.getValue() &&
node.getServer().equalsIgnoreCase("global") &&
node.getWorld().equalsIgnoreCase("global") &&
node.getExpiry() == 0L &&
node.getContexts().isEmpty();
// just add a string to the list.
if (single) {
data.add(node.getPermission());
continue;
}
// otherwise, this node has some other special context which needs to be saved.
// we serialize this way so it gets represented nicely in YAML.
// create a map of node attributes
Map<String, Object> attributes = new LinkedHashMap<>();
attributes.put("value", node.getValue());
if (!node.getServer().equals("global")) {
attributes.put("server", node.getServer());
}
if (!node.getWorld().equals("global")) {
attributes.put("world", node.getWorld());
}
if (node.getExpiry() != 0L) {
attributes.put("expiry", node.getExpiry());
}
if (!node.getContexts().isEmpty()) {
Map<String, Object> context = new HashMap<>();
Map<String, Collection<String>> map = node.getContexts().toMultimap().asMap();
for (Map.Entry<String, Collection<String>> e : map.entrySet()) {
List<String> vals = new ArrayList<>(e.getValue());
int size = vals.size();
if (size == 1) {
context.put(e.getKey(), vals.get(0));
} else if (size > 1) {
context.put(e.getKey(), vals);
}
}
attributes.put("context", context);
}
// create a new map to represent this entry in the list
// the map will only contain one entry. (the permission --> attributes)
Map<String, Object> perm = new HashMap<>();
// add the node to the map
perm.put(node.getPermission(), attributes);
// add the map to the object list, and continue
data.add(perm);
}
return data;
}
}

View File

@ -47,7 +47,6 @@ import me.lucko.luckperms.common.node.NodeModel;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.references.UserIdentifier;
import me.lucko.luckperms.common.storage.dao.AbstractDao;
import me.lucko.luckperms.common.storage.dao.legacy.LegacySqlMigration;
import me.lucko.luckperms.common.storage.dao.sql.connection.AbstractConnectionFactory;
import me.lucko.luckperms.common.storage.dao.sql.connection.file.SQLiteConnectionFactory;
import me.lucko.luckperms.common.storage.dao.sql.connection.hikari.PostgreConnectionFactory;
@ -115,11 +114,8 @@ public class SqlDao extends AbstractDao {
private static final String ACTION_INSERT = "INSERT INTO {prefix}actions(time, actor_uuid, actor_name, type, acted_uuid, acted_name, action) VALUES(?, ?, ?, ?, ?, ?, ?)";
private static final String ACTION_SELECT_ALL = "SELECT * FROM {prefix}actions";
private final Gson gson;
private final AbstractConnectionFactory provider;
private final Function<String, String> prefix;
public SqlDao(LuckPermsPlugin plugin, AbstractConnectionFactory provider, String prefix) {
@ -133,10 +129,6 @@ public class SqlDao extends AbstractDao {
return this.gson;
}
public AbstractConnectionFactory getProvider() {
return this.provider;
}
public Function<String, String> getPrefix() {
return this.prefix;
}
@ -193,15 +185,6 @@ public class SqlDao extends AbstractDao {
}
}
}
// Try migration from legacy backing
if (tableExists("lp_users")) {
this.plugin.getLog().severe("===== Legacy Schema Migration =====");
this.plugin.getLog().severe("Starting migration from legacy schema. This could take a while....");
this.plugin.getLog().severe("Please do not stop your server while the migration takes place.");
new LegacySqlMigration(this).run();
}
}
// migrations

View File

@ -25,6 +25,9 @@
package me.lucko.luckperms.common.storage.provider;
/**
* Hook to allow external code to provide a storage dao
*/
public final class StorageProviders {
private static StorageProvider provider = null;

View File

@ -46,6 +46,9 @@ import java.util.Set;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
/**
* A storage wrapping that passes save tasks through a buffer
*/
public class BufferedOutputStorage implements Storage, Runnable {
public static BufferedOutputStorage wrap(Storage storage, long flushTime) {
return new BufferedOutputStorage(storage, flushTime);

View File

@ -49,7 +49,8 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
/**
* A Datastore wrapping that ensures all tasks are completed before {@link Storage#shutdown()} is called.
* A storage wrapping that ensures all tasks are completed before
* {@link Storage#shutdown()} is called.
*/
public class PhasedStorage implements Storage {
public static PhasedStorage wrap(Storage storage) {

View File

@ -1,72 +0,0 @@
/*
* This file is part of LuckPerms, licensed under the MIT License.
*
* Copyright (c) lucko (Luck) <luck@lucko.me>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package me.lucko.luckperms.common.utils;
import com.zaxxer.hikari.HikariDataSource;
import java.sql.Connection;
import java.sql.SQLException;
/**
* A simple hikari wrapper
*/
public class HikariSupplier implements AutoCloseable {
private final String address;
private final String database;
private final String username;
private final String password;
private HikariDataSource hikari;
public HikariSupplier(String address, String database, String username, String password) {
this.address = address;
this.database = database;
this.username = username;
this.password = password;
}
public void setup(String poolName) {
this.hikari = new HikariDataSource();
this.hikari.setPoolName(poolName);
this.hikari.setMaximumPoolSize(2);
this.hikari.setDataSourceClassName("com.mysql.jdbc.jdbc2.optional.MysqlDataSource");
this.hikari.addDataSourceProperty("serverName", this.address.split(":")[0]);
this.hikari.addDataSourceProperty("port", this.address.split(":")[1]);
this.hikari.addDataSourceProperty("databaseName", this.database);
this.hikari.addDataSourceProperty("user", this.username);
this.hikari.addDataSourceProperty("password", this.password);
}
@Override
public void close() {
this.hikari.close();
}
public Connection getConnection() throws SQLException {
return this.hikari.getConnection();
}
}

View File

@ -28,7 +28,7 @@ package me.lucko.luckperms.common.utils;
import java.util.function.Consumer;
import java.util.function.Function;
public final class SafeIterator {
public final class SafeIteration {
public static <I> void iterate(Iterable<I> iterable, Consumer<I> action) {
for (I i : iterable) {
@ -70,6 +70,6 @@ public final class SafeIterator {
}
}
private SafeIterator() {}
private SafeIteration() {}
}

View File

@ -29,17 +29,17 @@ import javax.script.ScriptEngine;
import javax.script.ScriptEngineManager;
/**
* Nashorn provider utility
* Provides a nashorn script engine (lazily)
*/
public final class Scripting {
private static ScriptEngine SCRIPT_ENGINE = null;
private static ScriptEngine engine = null;
// Lazily load
public static synchronized ScriptEngine getScriptEngine() {
if (SCRIPT_ENGINE == null) {
SCRIPT_ENGINE = new ScriptEngineManager(null).getEngineByName("nashorn");
if (engine == null) {
engine = new ScriptEngineManager(null).getEngineByName("nashorn");
}
return SCRIPT_ENGINE;
return engine;
}
private Scripting() {}

View File

@ -63,7 +63,7 @@ import java.util.stream.Stream;
/**
* Utility methods for interacting with the LuckPerms web permission editor.
*/
public final class WebEditorUtils {
public final class WebEditor {
private static final Gson GSON = new Gson();
private static final String FILE_NAME = "luckperms-data.json";
@ -80,7 +80,7 @@ public final class WebEditorUtils {
}
// attach the holders permissions
payload.add("nodes", WebEditorUtils.serializePermissions(holder.getEnduringNodes().values().stream().map(NodeModel::fromNode)));
payload.add("nodes", serializePermissions(holder.getEnduringNodes().values().stream().map(NodeModel::fromNode)));
}
public static JsonObject formPayload(List<PermissionHolder> holders, Sender sender, String cmdLabel, LuckPermsPlugin plugin) {
@ -206,7 +206,7 @@ public final class WebEditorUtils {
}
}
public static JsonArray serializePermissions(Stream<NodeModel> nodes) {
private static JsonArray serializePermissions(Stream<NodeModel> nodes) {
JsonArray arr = new JsonArray();
nodes.forEach(node -> {
JsonObject attributes = new JsonObject();
@ -275,6 +275,6 @@ public final class WebEditorUtils {
return nodes;
}
private WebEditorUtils() {}
private WebEditor() {}
}

View File

@ -27,7 +27,7 @@ package me.lucko.luckperms.sponge;
import me.lucko.luckperms.common.plugin.SchedulerAdapter;
import me.lucko.luckperms.common.plugin.SchedulerTask;
import me.lucko.luckperms.common.utils.SafeIterator;
import me.lucko.luckperms.common.utils.SafeIteration;
import org.spongepowered.api.scheduler.Scheduler;
import org.spongepowered.api.scheduler.Task;
@ -122,7 +122,7 @@ public class SpongeSchedulerAdapter implements SchedulerAdapter {
@Override
public void shutdown() {
SafeIterator.iterate(this.tasks, SchedulerTask::cancel);
SafeIteration.iterate(this.tasks, SchedulerTask::cancel);
}
private static final class SpongeSchedulerTask implements SchedulerTask {

View File

@ -38,7 +38,7 @@ import me.lucko.luckperms.common.model.Group;
import me.lucko.luckperms.common.model.User;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.utils.Predicates;
import me.lucko.luckperms.common.utils.SafeIterator;
import me.lucko.luckperms.common.utils.SafeIteration;
import me.lucko.luckperms.common.utils.Uuids;
import me.lucko.luckperms.sponge.LPSpongePlugin;
import me.lucko.luckperms.sponge.service.LuckPermsService;
@ -91,7 +91,7 @@ public class MigrationPermissionManager extends SubCommand<Object> {
// Migrate defaults
log.log("Migrating default subjects.");
SafeIterator.iterate(pmService.getKnownSubjects().values(), collection -> {
SafeIteration.iterate(pmService.getKnownSubjects().values(), collection -> {
migrateSubjectData(
collection.getDefaults().getSubjectData(),
lpService.getCollection("defaults").loadSubject(collection.getIdentifier()).join().sponge().getSubjectData()
@ -102,7 +102,7 @@ public class MigrationPermissionManager extends SubCommand<Object> {
// Migrate groups
log.log("Starting group migration.");
AtomicInteger groupCount = new AtomicInteger(0);
SafeIterator.iterate(pmService.getGroupSubjects().getAllSubjects(), pmGroup -> {
SafeIteration.iterate(pmService.getGroupSubjects().getAllSubjects(), pmGroup -> {
String pmName = MigrationUtils.standardizeName(pmGroup.getIdentifier());
// Make a LuckPerms group for the one being migrated
@ -117,7 +117,7 @@ public class MigrationPermissionManager extends SubCommand<Object> {
// Migrate users
log.log("Starting user migration.");
AtomicInteger userCount = new AtomicInteger(0);
SafeIterator.iterate(pmService.getUserSubjects().getAllSubjects(), pmUser -> {
SafeIteration.iterate(pmService.getUserSubjects().getAllSubjects(), pmUser -> {
UUID uuid = Uuids.parseNullable(pmUser.getIdentifier());
if (uuid == null) {
log.logErr("Could not parse UUID for user: " + pmUser.getIdentifier());

View File

@ -39,7 +39,7 @@ import me.lucko.luckperms.common.model.Track;
import me.lucko.luckperms.common.model.User;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
import me.lucko.luckperms.common.utils.Predicates;
import me.lucko.luckperms.common.utils.SafeIterator;
import me.lucko.luckperms.common.utils.SafeIteration;
import me.lucko.luckperms.common.utils.Uuids;
import me.lucko.luckperms.sponge.LPSpongePlugin;
import me.lucko.luckperms.sponge.service.LuckPermsService;
@ -88,7 +88,7 @@ public class MigrationPermissionsEx extends SubCommand<Object> {
// Migrate defaults
log.log("Migrating default subjects.");
SafeIterator.iterate(pexService.getKnownSubjects().values(), collection -> {
SafeIteration.iterate(pexService.getKnownSubjects().values(), collection -> {
migrateSubjectData(
collection.getDefaults().getSubjectData(),
lpService.getCollection("defaults").loadSubject(collection.getIdentifier()).join().sponge().getSubjectData()
@ -113,7 +113,7 @@ public class MigrationPermissionsEx extends SubCommand<Object> {
// Migrate groups
log.log("Starting group migration.");
AtomicInteger groupCount = new AtomicInteger(0);
SafeIterator.iterate(pexService.getGroupSubjects().getAllSubjects(), pexGroup -> {
SafeIteration.iterate(pexService.getGroupSubjects().getAllSubjects(), pexGroup -> {
String pexName = MigrationUtils.standardizeName(pexGroup.getIdentifier());
Optional<String> rankString = pexGroup.getOption("rank");
@ -151,7 +151,7 @@ public class MigrationPermissionsEx extends SubCommand<Object> {
// Migrate tracks
log.log("Starting track migration.");
SafeIterator.iterate(tracks.entrySet(), e -> {
SafeIteration.iterate(tracks.entrySet(), e -> {
Track track = plugin.getStorage().createAndLoadTrack(e.getKey(), CreationCause.INTERNAL).join();
for (String groupName : e.getValue().values()) {
Group group = plugin.getGroupManager().getIfLoaded(groupName);
@ -169,7 +169,7 @@ public class MigrationPermissionsEx extends SubCommand<Object> {
// Increment the max weight from the group migrations. All user meta should override.
int userWeight = maxWeight + 5;
SafeIterator.iterate(pexService.getUserSubjects().getAllSubjects(), pexUser -> {
SafeIteration.iterate(pexService.getUserSubjects().getAllSubjects(), pexUser -> {
UUID uuid = Uuids.parseNullable(pexUser.getIdentifier());
if (uuid == null) {
log.logErr("Could not parse UUID for user: " + pexUser.getIdentifier());