Remove usage of the now-redundant ExtractedContexts class, other misc cleanup

This commit is contained in:
Luck 2017-10-15 14:23:51 +01:00
parent 28961b1cfa
commit b26fc69e73
No known key found for this signature in database
GPG Key ID: EFA9B3EC5FD90F8B
56 changed files with 443 additions and 487 deletions

View File

@ -25,22 +25,46 @@
package me.lucko.luckperms.api;
import com.google.common.base.Preconditions;
import me.lucko.luckperms.api.context.ContextSet;
import me.lucko.luckperms.api.context.ImmutableContextSet;
import javax.annotation.Nonnull;
/**
* Context and options for a permission lookup.
* Encapsulates the options and settings for a permission lookup.
*
* <p>All values are immutable.</p>
* <p>This class is immutable.</p>
*
* @since 2.11
*/
public class Contexts {
/**
* The context key used to denote the subjects server
*/
public static final String SERVER_KEY = "server";
/**
* The context key used to denote the subjects world
*/
public static final String WORLD_KEY = "world";
private static final Contexts GLOBAL = new Contexts(ContextSet.empty(), true, true, true, true, true, false);
/**
* A 'global' or default contexts instance.
*
* Simply passes an empty context set, with all accumulation settings set to true.
*/
private static final Contexts GLOBAL = new Contexts(
ContextSet.empty(),
true,
true,
true,
true,
true,
false
);
/**
* Gets the {@link FullySatisfiedContexts} instance.
@ -69,12 +93,12 @@ public class Contexts {
/**
* The contexts that apply for this lookup
* The keys for servers and worlds are defined as static values.
*/
private final ContextSet context;
private final ImmutableContextSet context;
/**
* The mode to parse defaults on Bukkit
* If the target subject is OP. This is used to parse defaults on Bukkit,
* and is ignored on all other platforms.
*
* @since 2.12
*/
@ -105,18 +129,18 @@ public class Contexts {
*/
private final boolean applyGlobalWorldGroups;
public Contexts(@Nonnull ContextSet context, boolean includeGlobal, boolean includeGlobalWorld, boolean applyGroups, boolean applyGlobalGroups, boolean applyGlobalWorldGroups, boolean op) {
if (context == null) {
throw new NullPointerException("context");
}
// cache hashcode - this class is immutable, and is used as an index in the permission cache.
private final int hashCode;
this.context = context.makeImmutable();
public Contexts(@Nonnull ContextSet context, boolean includeGlobal, boolean includeGlobalWorld, boolean applyGroups, boolean applyGlobalGroups, boolean applyGlobalWorldGroups, boolean op) {
this.context = Preconditions.checkNotNull(context, "context").makeImmutable();
this.includeGlobal = includeGlobal;
this.includeGlobalWorld = includeGlobalWorld;
this.applyGroups = applyGroups;
this.applyGlobalGroups = applyGlobalGroups;
this.applyGlobalWorldGroups = applyGlobalWorldGroups;
this.op = op;
this.hashCode = calculateHashCode();
}
/**
@ -131,7 +155,8 @@ public class Contexts {
}
/**
* Gets if OP defaults should be included
* Gets if the target subject is OP. This is used to parse defaults on Bukkit,
* and is ignored on all other platforms.
*
* @return true if op defaults should be included
*/
@ -198,10 +223,6 @@ public class Contexts {
")";
}
/*
* Ugly auto-generated lombok code
*/
@Override
public boolean equals(Object o) {
if (o == this) return true;
@ -218,8 +239,7 @@ public class Contexts {
this.isApplyGlobalWorldGroups() == other.isApplyGlobalWorldGroups();
}
@Override
public int hashCode() {
private int calculateHashCode() {
final int PRIME = 59;
int result = 1;
final Object contexts = this.getContexts();
@ -233,4 +253,8 @@ public class Contexts {
return result;
}
@Override
public int hashCode() {
return hashCode;
}
}

View File

@ -55,7 +55,7 @@ public enum DataMutateResult {
*/
FAIL(false, RuntimeException::new);
private boolean value;
private final boolean value;
private final Supplier<? extends Exception> exceptionSupplier;
DataMutateResult(boolean value, Supplier<? extends Exception> exceptionSupplier) {
@ -104,6 +104,7 @@ public enum DataMutateResult {
sneakyThrow0(t);
}
@SuppressWarnings("unchecked")
private static <T extends Throwable> void sneakyThrow0(Throwable t) throws T {
throw (T) t;
}

View File

@ -48,6 +48,8 @@ import javax.annotation.Nonnull;
* @since 3.3
*/
public final class FullySatisfiedContexts extends Contexts {
// singleton
private static final FullySatisfiedContexts INSTANCE = new FullySatisfiedContexts();
@Nonnull
@ -62,16 +64,18 @@ public final class FullySatisfiedContexts extends Contexts {
@Nonnull
@Override
public String toString() {
return "FullySatisfiedContexts";
return "FullySatisfiedContexts()";
}
@Override
public boolean equals(Object o) {
// this class is a singleton, so we can use object comparison to check equality.
return o == this;
}
@Override
public int hashCode() {
// just use the system hashcode - we need to override the hashcode impl in super
return System.identityHashCode(this);
}
}

View File

@ -426,8 +426,8 @@ public class LogEntry implements Comparable<LogEntry> {
* @param <B> the log builder type
*/
public static abstract class AbstractLogEntryBuilder<T extends LogEntry, B extends AbstractLogEntryBuilder<T, B>> {
private T obj;
private B thisObj;
private final T obj;
private final B thisObj;
public AbstractLogEntryBuilder() {
obj = createEmptyLog();

View File

@ -48,6 +48,7 @@ import javax.annotation.Nullable;
* <p>Any changes made to permission holding objects will be lost unless the
* instance is saved back to the {@link Storage}.</p>
*/
@SuppressWarnings("RedundantThrows")
public interface PermissionHolder {
/**

View File

@ -36,6 +36,7 @@ import javax.annotation.Nullable;
/**
* An ordered chain of {@link Group}s.
*/
@SuppressWarnings("RedundantThrows")
public interface Track {
/**

View File

@ -58,6 +58,9 @@ public final class MetaContexts {
private final MetaStackDefinition prefixStackDefinition;
private final MetaStackDefinition suffixStackDefinition;
// cache hashcode - this class is immutable, and is used as an index in the permission cache.
private final int hashCode;
/**
* Creates a new meta contexts instance
*
@ -69,6 +72,7 @@ public final class MetaContexts {
this.contexts = Preconditions.checkNotNull(contexts, "contexts");
this.prefixStackDefinition = Preconditions.checkNotNull(prefixStackDefinition, "prefixStackDefinition");
this.suffixStackDefinition = Preconditions.checkNotNull(suffixStackDefinition, "suffixStackDefinition");
this.hashCode = calculateHashCode();
}
@Nonnull
@ -106,8 +110,7 @@ public final class MetaContexts {
this.getSuffixStackDefinition().equals(other.getSuffixStackDefinition());
}
@Override
public int hashCode() {
private int calculateHashCode() {
final int PRIME = 59;
int result = 1;
result = result * PRIME + this.getContexts().hashCode();
@ -115,4 +118,9 @@ public final class MetaContexts {
result = result * PRIME + this.getSuffixStackDefinition().hashCode();
return result;
}
@Override
public int hashCode() {
return hashCode;
}
}

View File

@ -59,7 +59,7 @@ public class LPBukkitScheduler implements LuckPermsScheduler {
@Setter
private boolean useBukkitAsync = false;
private Set<BukkitTask> tasks = ConcurrentHashMap.newKeySet();
private final Set<BukkitTask> tasks = ConcurrentHashMap.newKeySet();
public LPBukkitScheduler(LPBukkitPlugin plugin) {
this.plugin = plugin;

View File

@ -55,15 +55,17 @@ public class Injector {
*
* This field is where the permissible is stored on a HumanEntity.
*/
private static Field humanEntityPermissibleField;
private static final Field HUMAN_ENTITY_PERMISSIBLE_FIELD;
/**
* The field where attachments are stored on a permissible base.
*/
private static Field permissibleBaseAttachmentsField;
private static final Field PERMISSIBLE_BASE_ATTACHMENTS_FIELD;
private static Throwable cachedThrowable = null;
static {
Field humanEntityPermissibleField;
Field permissibleBaseAttachmentsField;
try {
// Catch all. If this setup doesn't fully complete without
// exceptions, then the Injector will not work.
@ -85,9 +87,11 @@ public class Injector {
permissibleBaseAttachmentsField.setAccessible(true);
} catch (Throwable t) {
cachedThrowable = t;
t.printStackTrace();
throw new RuntimeException("Injector did not init successfully.", t);
}
HUMAN_ENTITY_PERMISSIBLE_FIELD = humanEntityPermissibleField;
PERMISSIBLE_BASE_ATTACHMENTS_FIELD = permissibleBaseAttachmentsField;
}
/**
@ -99,13 +103,8 @@ public class Injector {
*/
public static void inject(Player player, LPPermissible newPermissible) throws Exception {
// make sure the class inited without errors, otherwise, print a trace
if (cachedThrowable != null) {
throw new RuntimeException("Injector did not init successfully.", cachedThrowable);
}
// get the existing PermissibleBase held by the player
PermissibleBase oldPermissible = (PermissibleBase) humanEntityPermissibleField.get(player);
PermissibleBase oldPermissible = (PermissibleBase) HUMAN_ENTITY_PERMISSIBLE_FIELD.get(player);
// seems we have already injected into this player.
if (oldPermissible instanceof LPPermissible) {
@ -115,7 +114,7 @@ public class Injector {
// Move attachments over from the old permissible
//noinspection unchecked
List<PermissionAttachment> attachments = (List<PermissionAttachment>) permissibleBaseAttachmentsField.get(oldPermissible);
List<PermissionAttachment> attachments = (List<PermissionAttachment>) PERMISSIBLE_BASE_ATTACHMENTS_FIELD.get(oldPermissible);
newPermissible.addAttachments(attachments);
attachments.clear();
@ -128,7 +127,7 @@ public class Injector {
newPermissible.updateSubscriptionsAsync();
// inject the new instance
humanEntityPermissibleField.set(player, newPermissible);
HUMAN_ENTITY_PERMISSIBLE_FIELD.set(player, newPermissible);
// register the injection with the map
INJECTED_PERMISSIBLES.put(player.getUniqueId(), newPermissible);
@ -143,13 +142,9 @@ public class Injector {
* @throws Exception propagates any exceptions which were thrown during uninjection
*/
public static void unInject(Player player, boolean dummy, boolean unsubscribe) throws Exception {
// make sure the class inited without errors, otherwise, print a trace
if (cachedThrowable != null) {
throw new RuntimeException("Injector did not init successfully.", cachedThrowable);
}
// gets the players current permissible.
PermissibleBase permissible = (PermissibleBase) humanEntityPermissibleField.get(player);
PermissibleBase permissible = (PermissibleBase) HUMAN_ENTITY_PERMISSIBLE_FIELD.get(player);
// only uninject if the permissible was a luckperms one.
if (permissible instanceof LPPermissible) {
@ -169,7 +164,7 @@ public class Injector {
// handle the replacement permissible.
if (dummy) {
// just inject a dummy class. this is used when we know the player is about to quit the server.
humanEntityPermissibleField.set(player, new DummyPermissibleBase());
HUMAN_ENTITY_PERMISSIBLE_FIELD.set(player, new DummyPermissibleBase());
} else {
// otherwise, inject the permissible they had when we first injected.
@ -182,11 +177,11 @@ public class Injector {
}
//noinspection unchecked
List<PermissionAttachment> newPbAttachments = (List<PermissionAttachment>) permissibleBaseAttachmentsField.get(newPb);
List<PermissionAttachment> newPbAttachments = (List<PermissionAttachment>) PERMISSIBLE_BASE_ATTACHMENTS_FIELD.get(newPb);
newPbAttachments.addAll(lpAttachments);
lpAttachments.clear();
humanEntityPermissibleField.set(player, newPb);
HUMAN_ENTITY_PERMISSIBLE_FIELD.set(player, newPb);
}
}

View File

@ -41,7 +41,7 @@ import java.util.concurrent.ConcurrentHashMap;
@RequiredArgsConstructor
public class ChildProcessor implements PermissionProcessor {
private final ChildPermissionProvider provider;
private Map<String, Boolean> childPermissions = new ConcurrentHashMap<>();
private final Map<String, Boolean> childPermissions = new ConcurrentHashMap<>();
@Override
public Tristate hasPermission(String permission) {

View File

@ -32,7 +32,6 @@ import me.lucko.luckperms.api.Contexts;
import me.lucko.luckperms.api.Node;
import me.lucko.luckperms.api.caching.MetaData;
import me.lucko.luckperms.common.caching.MetaAccumulator;
import me.lucko.luckperms.common.contexts.ExtractedContexts;
import me.lucko.luckperms.common.model.Group;
import me.lucko.luckperms.common.model.PermissionHolder;
import me.lucko.luckperms.common.model.User;
@ -284,7 +283,7 @@ public class VaultChatHook extends Chat {
holder.removeIf(type::matches);
// find the max inherited priority & add 10
MetaAccumulator metaAccumulator = holder.accumulateMeta(null, null, ExtractedContexts.generate(perms.createContextForWorldSet(finalWorld)));
MetaAccumulator metaAccumulator = holder.accumulateMeta(null, null, perms.createContextForWorldSet(finalWorld));
int priority = (type == ChatMetaType.PREFIX ? metaAccumulator.getPrefixes() : metaAccumulator.getSuffixes()).keySet().stream()
.mapToInt(e -> e).max().orElse(0) + 10;
@ -355,8 +354,8 @@ public class VaultChatHook extends Chat {
int priority = Integer.MIN_VALUE;
String meta = null;
ExtractedContexts ec = ExtractedContexts.generate(Contexts.of(perms.createContextForWorldLookup(world).getContexts(), perms.isIncludeGlobal(), true, true, true, true, false));
for (Node n : group.getAllNodes(ec)) {
Contexts contexts = Contexts.of(perms.createContextForWorldLookup(world).getContexts(), perms.isIncludeGlobal(), true, true, true, true, false);
for (Node n : group.getAllNodes(contexts)) {
if (!n.getValuePrimitive()) continue;
if (type.shouldIgnore(n)) continue;
if (!n.shouldApplyWithContext(perms.createContextForWorldLookup(world).getContexts())) continue;

View File

@ -37,7 +37,6 @@ import me.lucko.luckperms.api.context.MutableContextSet;
import me.lucko.luckperms.bukkit.LPBukkitPlugin;
import me.lucko.luckperms.common.caching.PermissionCache;
import me.lucko.luckperms.common.config.ConfigKeys;
import me.lucko.luckperms.common.contexts.ExtractedContexts;
import me.lucko.luckperms.common.model.Group;
import me.lucko.luckperms.common.model.PermissionHolder;
import me.lucko.luckperms.common.model.User;
@ -226,7 +225,7 @@ public class VaultPermissionHook extends Permission {
if (group == null) return false;
// This is a nasty call. Groups aren't cached. :(
Map<String, Boolean> permissions = group.exportNodesAndShorthand(ExtractedContexts.generate(createContextForWorldLookup(world)), true);
Map<String, Boolean> permissions = group.exportNodesAndShorthand(createContextForWorldLookup(world), true);
return permissions.containsKey(permission.toLowerCase()) && permissions.get(permission.toLowerCase());
}

View File

@ -37,8 +37,8 @@ import java.util.concurrent.TimeUnit;
public class LPBungeeScheduler implements LuckPermsScheduler {
private final LPBungeePlugin plugin;
private Executor asyncExecutor;
private Set<ScheduledTask> tasks = ConcurrentHashMap.newKeySet();
private final Executor asyncExecutor;
private final Set<ScheduledTask> tasks = ConcurrentHashMap.newKeySet();
public LPBungeeScheduler(LPBungeePlugin plugin) {
this.plugin = plugin;

View File

@ -151,7 +151,7 @@ public class ApiProvider implements LuckPermsApi {
@Override
public Set<User> getUsers() {
return plugin.getUserManager().getAll().values().stream().map(u -> u.getDelegate()).collect(Collectors.toSet());
return plugin.getUserManager().getAll().values().stream().map(me.lucko.luckperms.common.model.User::getDelegate).collect(Collectors.toSet());
}
@Override
@ -177,7 +177,7 @@ public class ApiProvider implements LuckPermsApi {
@Override
public Set<Group> getGroups() {
return plugin.getGroupManager().getAll().values().stream().map(g -> g.getDelegate()).collect(Collectors.toSet());
return plugin.getGroupManager().getAll().values().stream().map(me.lucko.luckperms.common.model.Group::getDelegate).collect(Collectors.toSet());
}
@Override
@ -198,7 +198,7 @@ public class ApiProvider implements LuckPermsApi {
@Override
public Set<Track> getTracks() {
return plugin.getTrackManager().getAll().values().stream().map(t -> t.getDelegate()).collect(Collectors.toSet());
return plugin.getTrackManager().getAll().values().stream().map(me.lucko.luckperms.common.model.Track::getDelegate).collect(Collectors.toSet());
}
@Override

View File

@ -52,7 +52,7 @@ public class NodeFactoryDelegate implements me.lucko.luckperms.api.NodeFactory {
@Override
public Node.Builder newBuilderFromSerialisedNode(@NonNull String serialisedPermission, boolean value) {
return NodeFactory.builderFromSerializedNode(serialisedPermission, value);
return NodeFactory.builderFromLegacyString(serialisedPermission, value);
}
@Override

View File

@ -41,7 +41,6 @@ import me.lucko.luckperms.api.Tristate;
import me.lucko.luckperms.api.context.ContextSet;
import me.lucko.luckperms.api.context.ImmutableContextSet;
import me.lucko.luckperms.api.context.MutableContextSet;
import me.lucko.luckperms.common.contexts.ExtractedContexts;
import me.lucko.luckperms.common.model.User;
import me.lucko.luckperms.common.node.MetaType;
import me.lucko.luckperms.common.node.NodeFactory;
@ -108,7 +107,7 @@ public class PermissionHolderDelegate implements PermissionHolder {
@Override
public SortedSet<LocalizedNode> getAllNodes(@NonNull Contexts contexts) {
return new TreeSet<>(handle.resolveInheritancesAlmostEqual(ExtractedContexts.generate(contexts)));
return new TreeSet<>(handle.resolveInheritancesAlmostEqual(contexts));
}
@Override
@ -118,12 +117,12 @@ public class PermissionHolderDelegate implements PermissionHolder {
@Override
public Set<LocalizedNode> getAllNodesFiltered(@NonNull Contexts contexts) {
return new HashSet<>(handle.getAllNodes(ExtractedContexts.generate(contexts)));
return new HashSet<>(handle.getAllNodes(contexts));
}
@Override
public Map<String, Boolean> exportNodes(Contexts contexts, boolean lowerCase) {
return new HashMap<>(handle.exportNodesAndShorthand(ExtractedContexts.generate(contexts), lowerCase));
return new HashMap<>(handle.exportNodesAndShorthand(contexts, lowerCase));
}
@Override
@ -438,7 +437,7 @@ public class PermissionHolderDelegate implements PermissionHolder {
@Override
public List<LocalizedNode> resolveInheritances(Contexts contexts) {
return handle.resolveInheritances(ExtractedContexts.generate(contexts));
return handle.resolveInheritances(contexts);
}
@Override

View File

@ -49,7 +49,7 @@ public abstract class BufferedRequest<T> {
private final Executor executor;
private WeakReference<Processor<T>> processor = null;
private ReentrantLock lock = new ReentrantLock();
private final ReentrantLock lock = new ReentrantLock();
public CompletableFuture<T> request() {
lock.lock();

View File

@ -37,7 +37,6 @@ import me.lucko.luckperms.api.Contexts;
import me.lucko.luckperms.api.caching.MetaContexts;
import me.lucko.luckperms.api.caching.UserData;
import me.lucko.luckperms.common.config.ConfigKeys;
import me.lucko.luckperms.common.contexts.ExtractedContexts;
import me.lucko.luckperms.common.metastacking.SimpleMetaStack;
import me.lucko.luckperms.common.model.User;
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
@ -89,7 +88,7 @@ public class UserCache implements UserData {
if (contexts == Contexts.allowAll()) {
data.setPermissions(user.exportNodesAndShorthand(true));
} else {
data.setPermissions(user.exportNodesAndShorthand(ExtractedContexts.generate(contexts), true));
data.setPermissions(user.exportNodesAndShorthand(contexts, true));
}
return data;
@ -102,7 +101,7 @@ public class UserCache implements UserData {
if (contexts.getContexts() == Contexts.allowAll()) {
data.loadMeta(user.accumulateMeta(newAccumulator(contexts), null));
} else {
data.loadMeta(user.accumulateMeta(newAccumulator(contexts), null, ExtractedContexts.generate(contexts.getContexts())));
data.loadMeta(user.accumulateMeta(newAccumulator(contexts), null, contexts.getContexts()));
}
return data;
@ -180,7 +179,7 @@ public class UserCache implements UserData {
if (contexts == Contexts.allowAll()) {
oldData.comparePermissions(user.exportNodesAndShorthand(true));
} else {
oldData.comparePermissions(user.exportNodesAndShorthand(ExtractedContexts.generate(contexts), true));
oldData.comparePermissions(user.exportNodesAndShorthand(contexts, true));
}
return oldData;
@ -198,7 +197,7 @@ public class UserCache implements UserData {
if (contexts.getContexts() == Contexts.allowAll()) {
oldData.loadMeta(user.accumulateMeta(newAccumulator(contexts), null));
} else {
oldData.loadMeta(user.accumulateMeta(newAccumulator(contexts), null, ExtractedContexts.generate(contexts.getContexts())));
oldData.loadMeta(user.accumulateMeta(newAccumulator(contexts), null, contexts.getContexts()));
}
return oldData;

View File

@ -27,6 +27,7 @@ package me.lucko.luckperms.common.calculators;
import lombok.RequiredArgsConstructor;
import com.github.benmanes.caffeine.cache.CacheLoader;
import com.github.benmanes.caffeine.cache.Caffeine;
import com.github.benmanes.caffeine.cache.LoadingCache;
@ -42,14 +43,13 @@ import java.util.Map;
* Calculates and caches permissions
*/
@RequiredArgsConstructor
public class PermissionCalculator {
public class PermissionCalculator implements CacheLoader<String, Tristate> {
private final LuckPermsPlugin plugin;
private final PermissionCalculatorMetadata metadata;
private final List<PermissionProcessor> processors;
// caches lookup calls.
private final LoadingCache<String, Tristate> lookupCache = Caffeine.newBuilder()
.build(this::lookupPermissionValue);
private final LoadingCache<String, Tristate> lookupCache = Caffeine.newBuilder().build(this);
public void invalidateCache() {
lookupCache.invalidateAll();
@ -71,6 +71,11 @@ public class PermissionCalculator {
return result;
}
@Override
public Tristate load(String s) {
return lookupPermissionValue(s);
}
private Tristate lookupPermissionValue(String permission) {
// offer the permission to the permission vault

View File

@ -54,7 +54,7 @@ public class SharedMainCommand<T extends PermissionHolder> extends SubCommand<T>
/**
* If this instance of the shared command is targeting a user. Otherwise, it targets a group.
*/
private boolean user;
private final boolean user;
public SharedMainCommand(LocalizedSpec spec, String name, boolean user, List<SharedSubCommand> secondaryCommands) {
super(spec, name, null, Predicates.alwaysFalse());

View File

@ -116,7 +116,7 @@ public abstract class SubCommand<T> extends Command<T, Void> {
if (args.isEmpty() || args.get(0).equals("")) {
return cache.getRootNode().getChildren()
.map(Map::keySet)
.map(s -> s.stream().collect(Collectors.toList()))
.map(s -> (List<String>) new ArrayList<>(s))
.orElse(Collections.emptyList());
}

View File

@ -25,6 +25,7 @@
package me.lucko.luckperms.common.commands.impl.group;
import me.lucko.luckperms.api.LogEntry;
import me.lucko.luckperms.api.event.cause.CreationCause;
import me.lucko.luckperms.common.actionlog.ExtendedLogEntry;
import me.lucko.luckperms.common.commands.CommandResult;
@ -69,7 +70,7 @@ public class CreateGroup extends SingleCommand {
}
Message.CREATE_SUCCESS.send(sender, groupName);
ExtendedLogEntry.build().actor(sender).actedName(groupName).type('G').action("create").build().submit(plugin, sender);
ExtendedLogEntry.build().actor(sender).actedName(groupName).entryType(LogEntry.Type.GROUP).action("create").build().submit(plugin, sender);
return CommandResult.SUCCESS;
}
}

View File

@ -25,6 +25,7 @@
package me.lucko.luckperms.common.commands.impl.group;
import me.lucko.luckperms.api.LogEntry;
import me.lucko.luckperms.api.event.cause.DeletionCause;
import me.lucko.luckperms.common.actionlog.ExtendedLogEntry;
import me.lucko.luckperms.common.commands.CommandResult;
@ -78,7 +79,7 @@ public class DeleteGroup extends SingleCommand {
}
Message.DELETE_SUCCESS.send(sender, group.getDisplayName());
ExtendedLogEntry.build().actor(sender).actedName(groupName).type('G').action("delete").build().submit(plugin, sender);
ExtendedLogEntry.build().actor(sender).actedName(groupName).entryType(LogEntry.Type.GROUP).action("delete").build().submit(plugin, sender);
plugin.getUpdateTaskBuffer().request();
return CommandResult.SUCCESS;
}

View File

@ -141,10 +141,7 @@ public class BulkUpdateCommand extends SingleCommand {
bulkUpdateBuilder.constraint(Constraint.of(field, comparison, expr));
}
String id = "" + ThreadLocalRandom.current().nextInt(9) +
ThreadLocalRandom.current().nextInt(9) +
ThreadLocalRandom.current().nextInt(9) +
ThreadLocalRandom.current().nextInt(9);
String id = String.format("%04d", ThreadLocalRandom.current().nextInt(10000));
BulkUpdate bulkUpdate = bulkUpdateBuilder.build();

View File

@ -44,7 +44,7 @@ import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
public class ExportCommand extends SingleCommand {
private AtomicBoolean running = new AtomicBoolean(false);
private final AtomicBoolean running = new AtomicBoolean(false);
public ExportCommand(LocaleManager locale) {
super(CommandSpec.EXPORT.spec(locale), "Export", CommandPermission.EXPORT, Predicates.not(1));

View File

@ -45,7 +45,7 @@ import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
public class ImportCommand extends SingleCommand {
private AtomicBoolean running = new AtomicBoolean(false);
private final AtomicBoolean running = new AtomicBoolean(false);
public ImportCommand(LocaleManager locale) {
super(CommandSpec.IMPORT.spec(locale), "Import", CommandPermission.IMPORT, Predicates.not(1));

View File

@ -25,6 +25,7 @@
package me.lucko.luckperms.common.commands.impl.track;
import me.lucko.luckperms.api.LogEntry;
import me.lucko.luckperms.api.event.cause.CreationCause;
import me.lucko.luckperms.common.actionlog.ExtendedLogEntry;
import me.lucko.luckperms.common.commands.CommandResult;
@ -69,7 +70,7 @@ public class CreateTrack extends SingleCommand {
}
Message.CREATE_SUCCESS.send(sender, trackName);
ExtendedLogEntry.build().actor(sender).actedName(trackName).type('T').action("create").build().submit(plugin, sender);
ExtendedLogEntry.build().actor(sender).actedName(trackName).entryType(LogEntry.Type.TRACK).action("create").build().submit(plugin, sender);
return CommandResult.SUCCESS;
}
}

View File

@ -25,6 +25,7 @@
package me.lucko.luckperms.common.commands.impl.track;
import me.lucko.luckperms.api.LogEntry;
import me.lucko.luckperms.api.event.cause.DeletionCause;
import me.lucko.luckperms.common.actionlog.ExtendedLogEntry;
import me.lucko.luckperms.common.commands.CommandResult;
@ -71,7 +72,7 @@ public class DeleteTrack extends SingleCommand {
}
Message.DELETE_SUCCESS.send(sender, trackName);
ExtendedLogEntry.build().actor(sender).actedName(trackName).type('T').action("delete").build().submit(plugin, sender);
ExtendedLogEntry.build().actor(sender).actedName(trackName).entryType(LogEntry.Type.TRACK).action("delete").build().submit(plugin, sender);
plugin.getUpdateTaskBuffer().request();
return CommandResult.SUCCESS;
}

View File

@ -169,7 +169,7 @@ public enum CommandPermission {
public static final String ROOT = "luckperms.";
private String node;
private final String node;
@Getter
private Type type;

View File

@ -1,76 +0,0 @@
/*
* This file is part of LuckPerms, licensed under the MIT License.
*
* Copyright (c) lucko (Luck) <luck@lucko.me>
* Copyright (c) contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package me.lucko.luckperms.common.contexts;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.ToString;
import me.lucko.luckperms.api.Contexts;
import me.lucko.luckperms.api.context.ContextSet;
import me.lucko.luckperms.api.context.ImmutableContextSet;
@Getter
@EqualsAndHashCode
@ToString
public final class ExtractedContexts {
public static ExtractedContexts generate(Contexts contexts) {
return new ExtractedContexts(contexts);
}
public static ExtractedContexts generate(ContextSet contexts) {
return new ExtractedContexts(contexts);
}
private final Contexts contexts;
private final ImmutableContextSet contextSet;
private String server;
private String world;
private ExtractedContexts(Contexts context) {
this.contexts = context;
this.contextSet = context.getContexts().makeImmutable();
setup(context.getContexts());
}
private ExtractedContexts(ContextSet contexts) {
this.contexts = null;
this.contextSet = contexts.makeImmutable();
setup(contexts);
}
private void setup(ContextSet contexts) {
server = contexts.getAnyValue("server").orElse(null);
world = contexts.getAnyValue("world").orElse(null);
}
public Contexts getContexts() {
if (contexts == null) {
throw new NullPointerException("contexts");
}
return contexts;
}
}

View File

@ -68,7 +68,7 @@ public class LogicParser {
String match = input.substring(i, i2 + 1);
String matchContent = match.substring(1, match.length() - 1);
String matchReplacement = ("" + checker.apply(matchContent)).toLowerCase();
String matchReplacement = (Boolean.toString(checker.apply(matchContent))).toLowerCase();
input = input.replaceFirst(Pattern.quote(match), matchReplacement);
}

View File

@ -438,8 +438,8 @@ public enum Message {
}
@Getter
private String message;
private boolean showPrefix;
private final String message;
private final boolean showPrefix;
Message(String message, boolean showPrefix) {
// rewrite hardcoded placeholders according to their position

View File

@ -73,9 +73,9 @@ public class ProgressLogger {
public void logAllProgress(String msg, int amount) {
if (pluginName == null) {
listeners.forEach(s -> logProgressMessage.send(s, msg.replace("{}", "" + amount)));
listeners.forEach(s -> logProgressMessage.send(s, msg.replace("{}", Integer.toString(amount))));
} else {
listeners.forEach(s -> logProgressMessage.send(s, pluginName, msg.replace("{}", "" + amount)));
listeners.forEach(s -> logProgressMessage.send(s, pluginName, msg.replace("{}", Integer.toString(amount))));
}
}

View File

@ -25,7 +25,6 @@
package me.lucko.luckperms.common.metastacking;
import lombok.EqualsAndHashCode;
import lombok.Getter;
import lombok.NonNull;
import lombok.ToString;
@ -38,7 +37,6 @@ import me.lucko.luckperms.api.metastacking.MetaStackElement;
import java.util.List;
@Getter
@EqualsAndHashCode
@ToString
public final class SimpleMetaStackDefinition implements MetaStackDefinition {
@ -47,10 +45,41 @@ public final class SimpleMetaStackDefinition implements MetaStackDefinition {
private final String middleSpacer;
private final String endSpacer;
// cache hashcode - this class is immutable, and used an index in MetaContexts
private final int hashCode;
public SimpleMetaStackDefinition(@NonNull List<MetaStackElement> elements, @NonNull String startSpacer, @NonNull String middleSpacer, @NonNull String endSpacer) {
this.elements = ImmutableList.copyOf(elements);
this.startSpacer = startSpacer;
this.middleSpacer = middleSpacer;
this.endSpacer = endSpacer;
this.hashCode = calculateHashCode();
}
@Override
public boolean equals(Object o) {
if (o == this) return true;
if (!(o instanceof SimpleMetaStackDefinition)) return false;
final SimpleMetaStackDefinition other = (SimpleMetaStackDefinition) o;
return this.getElements().equals(other.getElements()) &&
this.getStartSpacer().equals(other.getStartSpacer()) &&
this.getMiddleSpacer().equals(other.getMiddleSpacer()) &&
this.getEndSpacer().equals(other.getEndSpacer());
}
private int calculateHashCode() {
final int PRIME = 59;
int result = 1;
result = result * PRIME + this.getElements().hashCode();
result = result * PRIME + this.getStartSpacer().hashCode();
result = result * PRIME + this.getMiddleSpacer().hashCode();
result = result * PRIME + this.getEndSpacer().hashCode();
return result;
}
@Override
public int hashCode() {
return hashCode;
}
}

View File

@ -51,7 +51,6 @@ import me.lucko.luckperms.common.caching.MetaAccumulator;
import me.lucko.luckperms.common.caching.handlers.StateListener;
import me.lucko.luckperms.common.config.ConfigKeys;
import me.lucko.luckperms.common.contexts.ContextSetComparator;
import me.lucko.luckperms.common.contexts.ExtractedContexts;
import me.lucko.luckperms.common.node.ImmutableLocalizedNode;
import me.lucko.luckperms.common.node.InheritanceInfo;
import me.lucko.luckperms.common.node.MetaType;
@ -480,7 +479,7 @@ public abstract class PermissionHolder {
public boolean removeIf(Predicate<Node> predicate) {
boolean result;
ImmutableCollection<Node> before = getEnduringNodes().values();;
ImmutableCollection<Node> before = getEnduringNodes().values();
nodesLock.lock();
try {
@ -524,7 +523,7 @@ public abstract class PermissionHolder {
* @param context context to decide if groups should be applied
* @return a set of nodes
*/
public List<LocalizedNode> resolveInheritances(List<LocalizedNode> accumulator, Set<String> excludedGroups, ExtractedContexts context) {
public List<LocalizedNode> resolveInheritances(List<LocalizedNode> accumulator, Set<String> excludedGroups, Contexts context) {
if (accumulator == null) {
accumulator = new ArrayList<>();
}
@ -538,14 +537,12 @@ public abstract class PermissionHolder {
}
// get and add the objects own nodes
List<Node> nodes = filterNodes(context.getContextSet());
List<Node> nodes = filterNodes(context.getContexts());
for (Node node : nodes) {
ImmutableLocalizedNode localizedNode = ImmutableLocalizedNode.of(node, getObjectName());
accumulator.add(localizedNode);
}
Contexts contexts = context.getContexts();
// resolve and process the objects parents
List<Group> resolvedGroups = new ArrayList<>();
Set<String> processedGroups = new HashSet<>();
@ -556,7 +553,7 @@ public abstract class PermissionHolder {
if (!processedGroups.add(groupName) || excludedGroups.contains(groupName) || !n.getValuePrimitive()) continue;
if (!((contexts.isApplyGlobalGroups() || n.isServerSpecific()) && (contexts.isApplyGlobalWorldGroups() || n.isWorldSpecific()))) {
if (!((context.isApplyGlobalGroups() || n.isServerSpecific()) && (context.isApplyGlobalWorldGroups() || n.isWorldSpecific()))) {
continue;
}
@ -576,11 +573,11 @@ public abstract class PermissionHolder {
return accumulator;
}
public List<LocalizedNode> resolveInheritances(ExtractedContexts context) {
public List<LocalizedNode> resolveInheritances(Contexts context) {
return resolveInheritances(null, null, context);
}
public SortedSet<LocalizedNode> resolveInheritancesAlmostEqual(ExtractedContexts contexts) {
public SortedSet<LocalizedNode> resolveInheritancesAlmostEqual(Contexts contexts) {
List<LocalizedNode> nodes = resolveInheritances(new LinkedList<>(), null, contexts);
NodeTools.removeAlmostEqual(nodes.iterator());
SortedSet<LocalizedNode> ret = new TreeSet<>(NodeWithContextComparator.reverse());
@ -588,7 +585,7 @@ public abstract class PermissionHolder {
return ret;
}
public SortedSet<LocalizedNode> resolveInheritancesMergeTemp(ExtractedContexts contexts) {
public SortedSet<LocalizedNode> resolveInheritancesMergeTemp(Contexts contexts) {
List<LocalizedNode> nodes = resolveInheritances(new LinkedList<>(), null, contexts);
NodeTools.removeIgnoreValueOrTemp(nodes.iterator());
SortedSet<LocalizedNode> ret = new TreeSet<>(NodeWithContextComparator.reverse());
@ -668,24 +665,22 @@ public abstract class PermissionHolder {
return ret;
}
public SortedSet<LocalizedNode> getAllNodes(ExtractedContexts context) {
Contexts contexts = context.getContexts();
public SortedSet<LocalizedNode> getAllNodes(Contexts context) {
List<LocalizedNode> entries;
if (contexts.isApplyGroups()) {
if (context.isApplyGroups()) {
entries = resolveInheritances(new LinkedList<>(), null, context);
} else {
entries = new LinkedList<>();
for (Node n : filterNodes(context.getContextSet())) {
for (Node n : filterNodes(context.getContexts())) {
ImmutableLocalizedNode localizedNode = ImmutableLocalizedNode.of(n, getObjectName());
entries.add(localizedNode);
}
}
if (!contexts.isIncludeGlobal()) {
if (!context.isIncludeGlobal()) {
entries.removeIf(n -> !n.isGroupNode() && !n.isServerSpecific());
}
if (!contexts.isApplyGlobalWorldGroups()) {
if (!context.isApplyGlobalWorldGroups()) {
entries.removeIf(n -> !n.isGroupNode() && !n.isWorldSpecific());
}
@ -695,24 +690,22 @@ public abstract class PermissionHolder {
return ret;
}
public Map<String, Boolean> exportNodesAndShorthand(ExtractedContexts context, boolean lowerCase) {
Contexts contexts = context.getContexts();
public Map<String, Boolean> exportNodesAndShorthand(Contexts context, boolean lowerCase) {
List<LocalizedNode> entries;
if (contexts.isApplyGroups()) {
if (context.isApplyGroups()) {
entries = resolveInheritances(new LinkedList<>(), null, context);
} else {
entries = new LinkedList<>();
for (Node n : filterNodes(context.getContextSet())) {
for (Node n : filterNodes(context.getContexts())) {
ImmutableLocalizedNode localizedNode = ImmutableLocalizedNode.of(n, getObjectName());
entries.add(localizedNode);
}
}
if (!contexts.isIncludeGlobal()) {
if (!context.isIncludeGlobal()) {
entries.removeIf(n -> !n.isGroupNode() && !n.isServerSpecific());
}
if (!contexts.isApplyGlobalWorldGroups()) {
if (!context.isApplyGlobalWorldGroups()) {
entries.removeIf(n -> !n.isGroupNode() && !n.isWorldSpecific());
}
@ -755,7 +748,7 @@ public abstract class PermissionHolder {
return ImmutableMap.copyOf(perms);
}
public MetaAccumulator accumulateMeta(MetaAccumulator accumulator, Set<String> excludedGroups, ExtractedContexts context) {
public MetaAccumulator accumulateMeta(MetaAccumulator accumulator, Set<String> excludedGroups, Contexts context) {
if (accumulator == null) {
accumulator = MetaAccumulator.makeFromConfig(plugin);
}
@ -768,16 +761,14 @@ public abstract class PermissionHolder {
excludedGroups.add(getObjectName().toLowerCase());
}
Contexts contexts = context.getContexts();
// get and add the objects own nodes
List<Node> nodes = filterNodes(context.getContextSet());
List<Node> nodes = filterNodes(context.getContexts());
for (Node node : nodes) {
if (!node.getValuePrimitive()) continue;
if (!node.isMeta() && !node.isPrefix() && !node.isSuffix()) continue;
if (!((contexts.isIncludeGlobal() || node.isServerSpecific()) && (contexts.isIncludeGlobalWorld() || node.isWorldSpecific()))) {
if (!((context.isIncludeGlobal() || node.isServerSpecific()) && (context.isIncludeGlobalWorld() || node.isWorldSpecific()))) {
continue;
}
@ -799,7 +790,7 @@ public abstract class PermissionHolder {
if (!processedGroups.add(groupName) || excludedGroups.contains(groupName) || !n.getValuePrimitive()) continue;
if (!((contexts.isApplyGlobalGroups() || n.isServerSpecific()) && (contexts.isApplyGlobalWorldGroups() || n.isWorldSpecific()))) {
if (!((context.isApplyGlobalGroups() || n.isServerSpecific()) && (context.isApplyGlobalWorldGroups() || n.isWorldSpecific()))) {
continue;
}
@ -1049,7 +1040,7 @@ public abstract class PermissionHolder {
return DataMutateResult.ALREADY_HAS;
}
ImmutableCollection<Node> before = getEnduringNodes().values();;
ImmutableCollection<Node> before = getEnduringNodes().values();
nodesLock.lock();
try {

View File

@ -28,11 +28,8 @@ package me.lucko.luckperms.common.node;
import lombok.Getter;
import lombok.ToString;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Maps;
import me.lucko.luckperms.api.MetaUtils;
import me.lucko.luckperms.api.Node;
import me.lucko.luckperms.api.context.ContextSet;
import me.lucko.luckperms.api.context.ImmutableContextSet;
@ -56,11 +53,21 @@ import static com.google.common.base.Preconditions.checkState;
*/
@ToString(of = {"permission", "value", "override", "server", "world", "expireAt", "contexts"})
public final class ImmutableNode implements Node {
/**
* The character which separates each part of a permission node
*/
private static final int NODE_SEPARATOR_CHAR = Character.getNumericValue('.');
private static final String[] PERMISSION_DELIMS = new String[]{"/", "-", "$", "(", ")", "=", ","};
private static final String[] SERVER_WORLD_DELIMS = new String[]{"/", "-"};
private static final Splitter META_SPLITTER = Splitter.on(PatternCache.compileDelimitedMatcher(".", "\\")).limit(2);
/**
* The characters which are delimited when serializing a permission string
*/
private static final String[] PERMISSION_DELIMITERS = new String[]{"/", "-", "$", "(", ")", "=", ","};
/**
* The characters which are delimited when serializing a server or world string
*/
private static final String[] SERVER_WORLD_DELIMITERS = new String[]{"/", "-"};
/*
@ -114,19 +121,11 @@ public final class ImmutableNode implements Node {
private final int hashCode;
private final boolean isGroup;
// all nullable
private String groupName;
private final boolean isWildcard;
private final int wildcardLevel;
private final boolean isMeta;
private Map.Entry<String, String> meta;
private final boolean isPrefix;
private Map.Entry<Integer, String> prefix;
private final boolean isSuffix;
private Map.Entry<Integer, String> suffix;
private final List<String> resolvedShorthand;
@ -145,65 +144,34 @@ public final class ImmutableNode implements Node {
*/
@SuppressWarnings("deprecation")
ImmutableNode(String permission, boolean value, boolean override, long expireAt, String server, String world, ContextSet contexts) {
if (permission == null || permission.equals("")) {
if (permission == null || permission.isEmpty()) {
throw new IllegalArgumentException("Empty permission");
}
// standardize server/world values.
if (server != null) {
server = server.toLowerCase();
}
if (world != null) {
world = world.toLowerCase();
}
if (server != null && (server.equals("global") || server.equals(""))) {
server = null;
}
if (world != null && (world.equals("global") || world.equals(""))) {
world = null;
}
server = standardizeServerWorld(server);
world = standardizeServerWorld(world);
this.permission = NodeFactory.unescapeDelimiters(permission, PERMISSION_DELIMS).intern();
// define core attributes
this.permission = NodeFactory.unescapeDelimiters(permission, PERMISSION_DELIMITERS).intern();
this.value = value;
this.override = override;
this.expireAt = expireAt;
this.server = internString(NodeFactory.unescapeDelimiters(server, SERVER_WORLD_DELIMS));
this.world = internString(NodeFactory.unescapeDelimiters(world, SERVER_WORLD_DELIMS));
this.server = internString(NodeFactory.unescapeDelimiters(server, SERVER_WORLD_DELIMITERS));
this.world = internString(NodeFactory.unescapeDelimiters(world, SERVER_WORLD_DELIMITERS));
this.contexts = contexts == null ? ContextSet.empty() : contexts.makeImmutable();
String lowerCasePermission = this.permission.toLowerCase();
// Setup state
isGroup = lowerCasePermission.startsWith("group.");
if (isGroup) {
groupName = lowerCasePermission.substring("group.".length()).intern();
}
isWildcard = this.permission.endsWith(".*");
wildcardLevel = this.permission.chars().filter(num -> num == NODE_SEPARATOR_CHAR).sum();
isMeta = NodeFactory.isMetaNode(this.permission);
if (isMeta) {
List<String> metaPart = META_SPLITTER.splitToList(this.permission.substring("meta.".length()));
meta = Maps.immutableEntry(MetaUtils.unescapeCharacters(metaPart.get(0)).intern(), MetaUtils.unescapeCharacters(metaPart.get(1)).intern());
}
isPrefix = NodeFactory.isPrefixNode(this.permission);
if (isPrefix) {
List<String> prefixPart = META_SPLITTER.splitToList(this.permission.substring("prefix.".length()));
Integer i = Integer.parseInt(prefixPart.get(0));
prefix = Maps.immutableEntry(i, MetaUtils.unescapeCharacters(prefixPart.get(1)).intern());
}
isSuffix = NodeFactory.isSuffixNode(this.permission);
if (isSuffix) {
List<String> suffixPart = META_SPLITTER.splitToList(this.permission.substring("suffix.".length()));
Integer i = Integer.parseInt(suffixPart.get(0));
suffix = Maps.immutableEntry(i, MetaUtils.unescapeCharacters(suffixPart.get(1)).intern());
}
// define cached state
groupName = NodeFactory.parseGroupNode(this.permission);
wildcardLevel = this.permission.endsWith(".*") ? this.permission.chars().filter(num -> num == NODE_SEPARATOR_CHAR).sum() : -1;
meta = NodeFactory.parseMetaNode(this.permission);
prefix = NodeFactory.parsePrefixNode(this.permission);
suffix = NodeFactory.parseSuffixNode(this.permission);
resolvedShorthand = ImmutableList.copyOf(ShorthandParser.parseShorthand(getPermission()));
optServer = Optional.ofNullable(this.server);
optWorld = Optional.ofNullable(this.world);
// calculate the "full" context set
if (this.server != null || this.world != null) {
MutableContextSet fullContexts = this.contexts.mutableCopy();
if (this.server != null) {
@ -218,8 +186,6 @@ public final class ImmutableNode implements Node {
this.fullContexts = this.contexts;
}
this.optServer = Optional.ofNullable(this.server);
this.optWorld = Optional.ofNullable(this.world);
this.hashCode = calculateHashCode();
}
@ -288,7 +254,7 @@ public final class ImmutableNode implements Node {
@Override
public boolean isGroupNode() {
return isGroup;
return groupName != null;
}
@Override
@ -299,17 +265,18 @@ public final class ImmutableNode implements Node {
@Override
public boolean isWildcard() {
return isWildcard;
return wildcardLevel != -1;
}
@Override
public int getWildcardLevel() {
checkState(isWildcard(), "Node is not a wildcard");
return wildcardLevel;
}
@Override
public boolean isMeta() {
return isMeta;
return meta != null;
}
@Override
@ -320,7 +287,7 @@ public final class ImmutableNode implements Node {
@Override
public boolean isPrefix() {
return isPrefix;
return prefix != null;
}
@Override
@ -331,7 +298,7 @@ public final class ImmutableNode implements Node {
@Override
public boolean isSuffix() {
return isSuffix;
return suffix != null;
}
@Override
@ -446,11 +413,11 @@ public final class ImmutableNode implements Node {
StringBuilder builder = new StringBuilder();
if (server != null) {
builder.append(NodeFactory.escapeDelimiters(server, SERVER_WORLD_DELIMS));
if (world != null) builder.append("-").append(NodeFactory.escapeDelimiters(world, SERVER_WORLD_DELIMS));
builder.append(NodeFactory.escapeDelimiters(server, SERVER_WORLD_DELIMITERS));
if (world != null) builder.append("-").append(NodeFactory.escapeDelimiters(world, SERVER_WORLD_DELIMITERS));
builder.append("/");
} else {
if (world != null) builder.append("global-").append(NodeFactory.escapeDelimiters(world, SERVER_WORLD_DELIMS)).append("/");
if (world != null) builder.append("global-").append(NodeFactory.escapeDelimiters(world, SERVER_WORLD_DELIMITERS)).append("/");
}
if (!contexts.isEmpty()) {
@ -618,4 +585,16 @@ public final class ImmutableNode implements Node {
return s == null ? null : s.intern();
}
private static String standardizeServerWorld(String s) {
if (s != null) {
s = s.toLowerCase();
if (s.equals("global") || s.isEmpty()) {
s = null;
}
}
return s;
}
}

View File

@ -30,6 +30,7 @@ import lombok.experimental.UtilityClass;
import com.github.benmanes.caffeine.cache.Caffeine;
import com.github.benmanes.caffeine.cache.LoadingCache;
import com.google.common.base.Splitter;
import com.google.common.collect.Maps;
import me.lucko.luckperms.api.ChatMetaType;
import me.lucko.luckperms.api.MetaUtils;
@ -38,23 +39,41 @@ import me.lucko.luckperms.api.context.ContextSet;
import me.lucko.luckperms.common.model.Group;
import me.lucko.luckperms.common.utils.PatternCache;
import java.util.List;
import java.util.Iterator;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.regex.Pattern;
/**
* Utility class to make Node(Builder) instances from serialised strings or existing Nodes
*/
@SuppressWarnings("deprecation")
@UtilityClass
public class NodeFactory {
private static final LoadingCache<String, Node> CACHE = Caffeine.newBuilder()
.build(s -> builderFromSerializedNode(s, true).build());
private static final LoadingCache<String, Node> CACHE_NEGATED = Caffeine.newBuilder()
.build(s -> builderFromSerializedNode(s, false).build());
// used to split prefix/suffix/meta nodes
private static final Splitter META_SPLITTER = Splitter.on(PatternCache.compileDelimitedMatcher(".", "\\")).limit(2);
// legacy node format delimiters
private static final Pattern LEGACY_SERVER_DELIM = PatternCache.compileDelimitedMatcher("/", "\\");
private static final Splitter LEGACY_SERVER_SPLITTER = Splitter.on(LEGACY_SERVER_DELIM).limit(2);
private static final Pattern LEGACY_WORLD_DELIM = PatternCache.compileDelimitedMatcher("-", "\\");
private static final Splitter LEGACY_WORLD_SPLITTER = Splitter.on(LEGACY_WORLD_DELIM).limit(2);
private static final Pattern LEGACY_EXPIRY_DELIM = PatternCache.compileDelimitedMatcher("$", "\\");
private static final Splitter LEGACY_EXPIRY_SPLITTER = Splitter.on(LEGACY_EXPIRY_DELIM).limit(2);
// caches the conversion between legacy node strings --> node instances
private static final LoadingCache<String, Node> LEGACY_SERIALIZATION_CACHE = Caffeine.newBuilder()
.expireAfterAccess(10, TimeUnit.MINUTES)
.build(s -> builderFromLegacyString(s, true).build());
private static final LoadingCache<String, Node> LEGACY_SERIALIZATION_CACHE_NEGATED = Caffeine.newBuilder()
.expireAfterAccess(10, TimeUnit.MINUTES)
.build(s -> builderFromLegacyString(s, false).build());
public static Node fromSerializedNode(String s, Boolean b) {
try {
return b ? CACHE.get(s) : CACHE_NEGATED.get(s);
return b ? LEGACY_SERIALIZATION_CACHE.get(s) : LEGACY_SERIALIZATION_CACHE_NEGATED.get(s);
} catch (Exception e) {
throw new IllegalArgumentException(e);
}
@ -64,44 +83,59 @@ public class NodeFactory {
return new NodeBuilder(s, false);
}
public static Node.Builder builderFromSerializedNode(String s, Boolean b) {
public static Node.Builder builderFromLegacyString(String s, Boolean b) {
// if contains /
if (PatternCache.compileDelimitedMatcher("/", "\\").matcher(s).find()) {
List<String> parts = Splitter.on(PatternCache.compileDelimitedMatcher("/", "\\")).limit(2).splitToList(s);
if (LEGACY_SERVER_DELIM.matcher(s).find()) {
// 0=server(+world) 1=node
Iterator<String> parts = LEGACY_SERVER_SPLITTER.split(s).iterator();
String parts0 = parts.next();
String parts1 = parts.next();
// WORLD SPECIFIC
// if parts[0] contains -
if (PatternCache.compileDelimitedMatcher("-", "\\").matcher(parts.get(0)).find()) {
List<String> serverParts = Splitter.on(PatternCache.compileDelimitedMatcher("-", "\\")).limit(2).splitToList(parts.get(0));
if (LEGACY_WORLD_DELIM.matcher(parts0).find()) {
// 0=server 1=world
Iterator<String> serverParts = LEGACY_WORLD_SPLITTER.split(parts0).iterator();
String serverParts0 = serverParts.next();
String serverParts1 = serverParts.next();
// if parts[1] contains $
if (PatternCache.compileDelimitedMatcher("$", "\\").matcher(parts.get(1)).find()) {
List<String> tempParts = Splitter.on('$').limit(2).splitToList(parts.get(1));
return new NodeBuilder(tempParts.get(0), true).setServer(serverParts.get(0)).setWorld(serverParts.get(1))
.setExpiry(Long.parseLong(tempParts.get(1))).setValue(b);
if (LEGACY_EXPIRY_DELIM.matcher(parts1).find()) {
// 0=node 1=expiry
Iterator<String> tempParts = LEGACY_EXPIRY_SPLITTER.split(parts1).iterator();
String tempParts0 = tempParts.next();
String tempParts1 = tempParts.next();
return new NodeBuilder(tempParts0, true).setServer(serverParts0).setWorld(serverParts1).setExpiry(Long.parseLong(tempParts1)).setValue(b);
} else {
return new NodeBuilder(parts.get(1), true).setServer(serverParts.get(0)).setWorld(serverParts.get(1)).setValue(b);
return new NodeBuilder(parts1, true).setServer(serverParts0).setWorld(serverParts1).setValue(b);
}
} else {
// SERVER BUT NOT WORLD SPECIFIC
// if parts[1] contains $
if (PatternCache.compileDelimitedMatcher("$", "\\").matcher(parts.get(1)).find()) {
List<String> tempParts = Splitter.on(PatternCache.compileDelimitedMatcher("$", "\\")).limit(2).splitToList(parts.get(1));
return new NodeBuilder(tempParts.get(0), true).setServer(parts.get(0)).setExpiry(Long.parseLong(tempParts.get(1))).setValue(b);
if (LEGACY_EXPIRY_DELIM.matcher(parts1).find()) {
// 0=node 1=expiry
Iterator<String> tempParts = LEGACY_EXPIRY_SPLITTER.split(parts1).iterator();
String tempParts0 = tempParts.next();
String tempParts1 = tempParts.next();
return new NodeBuilder(tempParts0, true).setServer(parts0).setExpiry(Long.parseLong(tempParts1)).setValue(b);
} else {
return new NodeBuilder(parts.get(1), true).setServer(parts.get(0)).setValue(b);
return new NodeBuilder(parts1, true).setServer(parts0).setValue(b);
}
}
} else {
// NOT SERVER SPECIFIC
// if s contains $
if (PatternCache.compileDelimitedMatcher("$", "\\").matcher(s).find()) {
List<String> tempParts = Splitter.on(PatternCache.compileDelimitedMatcher("$", "\\")).limit(2).splitToList(s);
return new NodeBuilder(tempParts.get(0), true).setExpiry(Long.parseLong(tempParts.get(1))).setValue(b);
if (LEGACY_EXPIRY_DELIM.matcher(s).find()) {
// 0=node 1=expiry
Iterator<String> tempParts = LEGACY_EXPIRY_SPLITTER.split(s).iterator();
String tempParts0 = tempParts.next();
String tempParts1 = tempParts.next();
return new NodeBuilder(tempParts0, true).setExpiry(Long.parseLong(tempParts1)).setValue(b);
} else {
return new NodeBuilder(s, true).setValue(b);
}
@ -250,40 +284,58 @@ public class NodeFactory {
return s;
}
public static boolean isMetaNode(String s) {
public static String parseGroupNode(String s) {
String lower = s.toLowerCase();
if (!lower.startsWith("group.")) {
return null;
}
return lower.substring("group.".length()).intern();
}
public static Map.Entry<String, String> parseMetaNode(String s) {
if (!s.startsWith("meta.")) {
return false;
return null;
}
String parts = s.substring("meta.".length());
return PatternCache.compileDelimitedMatcher(".", "\\").matcher(parts).find();
Iterator<String> metaParts = META_SPLITTER.split(s.substring("meta.".length())).iterator();
if (!metaParts.hasNext()) return null;
String key = metaParts.next();
if (!metaParts.hasNext()) return null;
String value = metaParts.next();
return Maps.immutableEntry(MetaUtils.unescapeCharacters(key).intern(), MetaUtils.unescapeCharacters(value).intern());
}
private static boolean isChatMetaNode(String type, String s) {
private static Map.Entry<Integer, String> parseChatMetaNode(String type, String s) {
if (!s.startsWith(type + ".")) {
return false;
}
String parts = s.substring((type + ".").length());
if (!PatternCache.compileDelimitedMatcher(".", "\\").matcher(parts).find()) {
return false;
return null;
}
List<String> metaParts = Splitter.on(PatternCache.compileDelimitedMatcher(".", "\\")).limit(2).splitToList(parts);
String priority = metaParts.get(0);
Iterator<String> metaParts = META_SPLITTER.split(s.substring((type + ".").length())).iterator();
if (!metaParts.hasNext()) return null;
String priority = metaParts.next();
if (!metaParts.hasNext()) return null;
String value = metaParts.next();
try {
Integer.parseInt(priority);
return true;
int p = Integer.parseInt(priority);
String v = MetaUtils.unescapeCharacters(value).intern();
return Maps.immutableEntry(p, v);
} catch (NumberFormatException e) {
return false;
return null;
}
}
public static boolean isPrefixNode(String s) {
return isChatMetaNode("prefix", s);
public static Map.Entry<Integer, String> parsePrefixNode(String s) {
return parseChatMetaNode("prefix", s);
}
public static boolean isSuffixNode(String s) {
return isChatMetaNode("suffix", s);
public static Map.Entry<Integer, String> parseSuffixNode(String s) {
return parseChatMetaNode("suffix", s);
}
public static Node make(String node) {

View File

@ -26,7 +26,6 @@
package me.lucko.luckperms.common.primarygroup;
import me.lucko.luckperms.api.Contexts;
import me.lucko.luckperms.common.contexts.ExtractedContexts;
import me.lucko.luckperms.common.model.Group;
import me.lucko.luckperms.common.model.User;
@ -50,7 +49,7 @@ public class AllParentsByWeightHolder extends CachedPrimaryGroupHolder {
// hack to get a list of groups the holder is inheriting from
Set<String> groupNames = new LinkedHashSet<>();
user.resolveInheritances(new NoopList<>(), groupNames, ExtractedContexts.generate(contexts));
user.resolveInheritances(new NoopList<>(), groupNames, contexts);
List<Group> groups = new ArrayList<>();
for (String groupName : groupNames) {

View File

@ -33,7 +33,7 @@ import java.util.concurrent.ConcurrentHashMap;
import java.util.regex.Pattern;
public class RegexProcessor implements PermissionProcessor {
private Map<Pattern, Boolean> regexPermissions = new ConcurrentHashMap<>();
private final Map<Pattern, Boolean> regexPermissions = new ConcurrentHashMap<>();
@Override
public Tristate hasPermission(String permission) {

View File

@ -33,7 +33,7 @@ import java.util.function.Consumer;
/**
* A reference to a specific {@link PermissionHolder}.
*
* @param <I> the holder type
* @param <S> the holder type
* @param <I> the holder identifier type
*/
public interface HolderReference<S, I> extends Identifiable<I> {

View File

@ -60,7 +60,7 @@ public abstract class FlatfileBacking extends AbstractBacking {
private static final String LOG_FORMAT = "%s(%s): [%s] %s(%s) --> %s";
private final Logger actionLogger = Logger.getLogger("luckperms_actions");
private FileUuidCache uuidCache = new FileUuidCache();
private final FileUuidCache uuidCache = new FileUuidCache();
private final File pluginDir;
@ -218,6 +218,7 @@ public abstract class FlatfileBacking extends AbstractBacking {
@Override
public boolean logAction(LogEntry entry) {
//noinspection deprecation
actionLogger.info(String.format(LOG_FORMAT,
(entry.getActor().equals(Constants.CONSOLE_UUID) ? "" : entry.getActor() + " "),
entry.getActorName(),

View File

@ -165,6 +165,7 @@ public class MongoDBBacking extends AbstractBacking {
return call(() -> {
MongoCollection<Document> c = database.getCollection(prefix + "action");
//noinspection deprecation
Document doc = new Document()
.append("timestamp", entry.getTimestamp())
.append("actor", entry.getActor())
@ -750,6 +751,7 @@ public class MongoDBBacking extends AbstractBacking {
public static Map<String, Boolean> exportToLegacy(Iterable<Node> nodes) {
Map<String, Boolean> m = new HashMap<>();
for (Node node : nodes) {
//noinspection deprecation
m.put(node.toSerializedNode(), node.getValuePrimitive());
}
return m;

View File

@ -221,6 +221,7 @@ public class SQLBacking extends AbstractBacking {
ps.setLong(1, entry.getTimestamp());
ps.setString(2, entry.getActor().toString());
ps.setString(3, entry.getActorName());
//noinspection deprecation
ps.setString(4, Character.toString(entry.getType()));
ps.setString(5, entry.getActed() == null ? "null" : entry.getActed().toString());
ps.setString(6, entry.getActedName());

View File

@ -50,7 +50,7 @@ abstract class FlatfileProvider extends SQLProvider {
protected abstract String getDriverId();
@Override
public void init() throws Exception {
public void init() {
}

View File

@ -51,7 +51,7 @@ public class MySQLProvider extends SQLProvider {
}
@Override
public void init() throws Exception {
public void init() {
HikariConfig config = new HikariConfig();
String address = configuration.getAddress();

View File

@ -49,7 +49,7 @@ public class PostgreSQLProvider extends SQLProvider {
}
@Override
public void init() throws Exception {
public void init() {
HikariConfig config = new HikariConfig();
String address = configuration.getAddress();

View File

@ -40,7 +40,7 @@ public abstract class SQLProvider {
@Getter
private final String name;
public abstract void init() throws Exception;
public abstract void init();
public abstract void shutdown() throws Exception;

View File

@ -79,13 +79,13 @@ public class SubjectCollectionProxy implements SubjectCollection {
@Override
public Map<Subject, Boolean> getAllWithPermission(String s) {
// again, these methods will lazily load subjects.
return (Map) handle.getAllWithPermission(s).thenApply(map -> {
return map.entrySet().stream()
.collect(ImmutableCollectors.toImmutableMap(
e -> new SubjectProxy(service, e.getKey()),
Map.Entry::getValue
));
}).join();
return (Map) handle.getAllWithPermission(s)
.thenApply(map -> map.entrySet().stream()
.collect(ImmutableCollectors.toImmutableMap(
e -> new SubjectProxy(service, e.getKey()),
Map.Entry::getValue
))
).join();
}
@Override

View File

@ -73,13 +73,11 @@ public class SubjectDataProxy implements SubjectData {
@Override
public boolean setPermission(Set<Context> contexts, String permission, Tristate value) {
getHandle().thenCompose(handle -> {
return handle.setPermission(
CompatibilityUtil.convertContexts(contexts),
permission,
CompatibilityUtil.convertTristate(value)
);
});
getHandle().thenCompose(handle -> handle.setPermission(
CompatibilityUtil.convertContexts(contexts),
permission,
CompatibilityUtil.convertTristate(value)
));
return true;
}
@ -97,52 +95,44 @@ public class SubjectDataProxy implements SubjectData {
@Override
public Map<Set<Context>, List<Subject>> getAllParents() {
return (Map) getHandle().thenApply(handle -> {
return handle.getAllParents().entrySet().stream()
.collect(ImmutableCollectors.toImmutableMap(
e -> CompatibilityUtil.convertContexts(e.getKey()),
e -> e.getValue().stream()
.map(s -> new SubjectProxy(service, s))
.collect(ImmutableCollectors.toImmutableList())
)
);
}).join();
return (Map) getHandle().thenApply(handle -> handle.getAllParents().entrySet().stream()
.collect(ImmutableCollectors.toImmutableMap(
e -> CompatibilityUtil.convertContexts(e.getKey()),
e -> e.getValue().stream()
.map(s -> new SubjectProxy(service, s))
.collect(ImmutableCollectors.toImmutableList())
)
)).join();
}
@Override
public List<Subject> getParents(Set<Context> contexts) {
return (List) getHandle().thenApply(handle -> {
return handle.getParents(CompatibilityUtil.convertContexts(contexts)).stream()
.map(s -> new SubjectProxy(service, s))
.collect(ImmutableCollectors.toImmutableList());
}).join();
return (List) getHandle().thenApply(handle -> handle.getParents(CompatibilityUtil.convertContexts(contexts)).stream()
.map(s -> new SubjectProxy(service, s))
.collect(ImmutableCollectors.toImmutableList())).join();
}
@Override
public boolean addParent(Set<Context> contexts, Subject parent) {
getHandle().thenCompose(handle -> {
return handle.addParent(
CompatibilityUtil.convertContexts(contexts),
service.newSubjectReference(
parent.getContainingCollection().getIdentifier(),
parent.getIdentifier()
)
);
});
getHandle().thenCompose(handle -> handle.addParent(
CompatibilityUtil.convertContexts(contexts),
service.newSubjectReference(
parent.getContainingCollection().getIdentifier(),
parent.getIdentifier()
)
));
return true;
}
@Override
public boolean removeParent(Set<Context> contexts, Subject parent) {
getHandle().thenCompose(handle -> {
return handle.removeParent(
CompatibilityUtil.convertContexts(contexts),
service.newSubjectReference(
parent.getContainingCollection().getIdentifier(),
parent.getIdentifier()
)
);
});
getHandle().thenCompose(handle -> handle.removeParent(
CompatibilityUtil.convertContexts(contexts),
service.newSubjectReference(
parent.getContainingCollection().getIdentifier(),
parent.getIdentifier()
)
));
return true;
}
@ -154,19 +144,17 @@ public class SubjectDataProxy implements SubjectData {
@Override
public boolean clearParents(Set<Context> contexts) {
;
getHandle().thenCompose(handle -> handle.clearParents(CompatibilityUtil.convertContexts(contexts)));
return true;
}
@Override
public Map<Set<Context>, Map<String, String>> getAllOptions() {
return (Map) getHandle().thenApply(handle -> {
return handle.getAllOptions().entrySet().stream()
.collect(ImmutableCollectors.toImmutableMap(
e -> CompatibilityUtil.convertContexts(e.getKey()),
Map.Entry::getValue
));
}).join();
return (Map) getHandle().thenApply(handle -> handle.getAllOptions().entrySet().stream()
.collect(ImmutableCollectors.toImmutableMap(
e -> CompatibilityUtil.convertContexts(e.getKey()),
Map.Entry::getValue
))).join();
}
@Override

View File

@ -78,81 +78,63 @@ public class SubjectProxy implements Subject {
@Override
public boolean hasPermission(Set<Context> contexts, String permission) {
return getHandle().thenApply(handle -> {
return handle.getPermissionValue(CompatibilityUtil.convertContexts(contexts), permission).asBoolean();
}).join();
return getHandle().thenApply(handle -> handle.getPermissionValue(CompatibilityUtil.convertContexts(contexts), permission).asBoolean()).join();
}
@Override
public boolean hasPermission(String permission) {
return getHandle().thenApply(handle -> {
return handle.getPermissionValue(ImmutableContextSet.empty(), permission).asBoolean();
}).join();
return getHandle().thenApply(handle -> handle.getPermissionValue(ImmutableContextSet.empty(), permission).asBoolean()).join();
}
@Override
public Tristate getPermissionValue(Set<Context> contexts, String permission) {
return getHandle().thenApply(handle -> {
return CompatibilityUtil.convertTristate(handle.getPermissionValue(CompatibilityUtil.convertContexts(contexts), permission));
}).join();
return getHandle().thenApply(handle -> CompatibilityUtil.convertTristate(handle.getPermissionValue(CompatibilityUtil.convertContexts(contexts), permission))).join();
}
@Override
public boolean isChildOf(Subject parent) {
return getHandle().thenApply(handle -> {
return handle.isChildOf(
ImmutableContextSet.empty(),
service.newSubjectReference(
parent.getContainingCollection().getIdentifier(),
parent.getIdentifier()
)
);
}).join();
return getHandle().thenApply(handle -> handle.isChildOf(
ImmutableContextSet.empty(),
service.newSubjectReference(
parent.getContainingCollection().getIdentifier(),
parent.getIdentifier()
)
)).join();
}
@Override
public boolean isChildOf(Set<Context> contexts, Subject parent) {
return getHandle().thenApply(handle -> {
return handle.isChildOf(
CompatibilityUtil.convertContexts(contexts),
service.newSubjectReference(
parent.getContainingCollection().getIdentifier(),
parent.getIdentifier()
)
);
}).join();
return getHandle().thenApply(handle -> handle.isChildOf(
CompatibilityUtil.convertContexts(contexts),
service.newSubjectReference(
parent.getContainingCollection().getIdentifier(),
parent.getIdentifier()
)
)).join();
}
@Override
public List<Subject> getParents() {
return (List) getHandle().thenApply(handle -> {
return handle.getParents(ImmutableContextSet.empty()).stream()
.map(s -> new SubjectProxy(service, s))
.collect(ImmutableCollectors.toImmutableList());
}).join();
return (List) getHandle().thenApply(handle -> handle.getParents(ImmutableContextSet.empty()).stream()
.map(s -> new SubjectProxy(service, s))
.collect(ImmutableCollectors.toImmutableList())).join();
}
@Override
public List<Subject> getParents(Set<Context> contexts) {
return (List) getHandle().thenApply(handle -> {
return handle.getParents(CompatibilityUtil.convertContexts(contexts)).stream()
.map(s -> new SubjectProxy(service, s))
.collect(ImmutableCollectors.toImmutableList());
}).join();
return (List) getHandle().thenApply(handle -> handle.getParents(CompatibilityUtil.convertContexts(contexts)).stream()
.map(s -> new SubjectProxy(service, s))
.collect(ImmutableCollectors.toImmutableList())).join();
}
@Override
public Optional<String> getOption(Set<Context> contexts, String key) {
return getHandle().thenApply(handle -> {
return handle.getOption(CompatibilityUtil.convertContexts(contexts), key);
}).join();
return getHandle().thenApply(handle -> handle.getOption(CompatibilityUtil.convertContexts(contexts), key)).join();
}
@Override
public Optional<String> getOption(String key) {
return getHandle().thenApply(handle -> {
return handle.getOption(ImmutableContextSet.empty(), key);
}).join();
return getHandle().thenApply(handle -> handle.getOption(ImmutableContextSet.empty(), key)).join();
}
@Override

View File

@ -59,13 +59,11 @@ public class SubjectDataProxy implements SubjectData {
@Override
public Map<Set<Context>, Map<String, Boolean>> getAllPermissions() {
return (Map) getHandle().thenApply(handle -> {
return handle.getAllPermissions().entrySet().stream()
.collect(ImmutableCollectors.toImmutableMap(
e -> CompatibilityUtil.convertContexts(e.getKey()),
Map.Entry::getValue
));
}).join();
return (Map) getHandle().thenApply(handle -> handle.getAllPermissions().entrySet().stream()
.collect(ImmutableCollectors.toImmutableMap(
e -> CompatibilityUtil.convertContexts(e.getKey()),
Map.Entry::getValue
))).join();
}
@Override
@ -75,13 +73,11 @@ public class SubjectDataProxy implements SubjectData {
@Override
public CompletableFuture<Boolean> setPermission(Set<Context> contexts, String permission, Tristate value) {
return getHandle().thenCompose(handle -> {
return handle.setPermission(
CompatibilityUtil.convertContexts(contexts),
permission,
CompatibilityUtil.convertTristate(value)
);
});
return getHandle().thenCompose(handle -> handle.setPermission(
CompatibilityUtil.convertContexts(contexts),
permission,
CompatibilityUtil.convertTristate(value)
));
}
@Override
@ -96,13 +92,11 @@ public class SubjectDataProxy implements SubjectData {
@Override
public Map<Set<Context>, List<org.spongepowered.api.service.permission.SubjectReference>> getAllParents() {
return (Map) getHandle().thenApply(handle -> {
return handle.getAllParents().entrySet().stream()
.collect(ImmutableCollectors.toImmutableMap(
e -> CompatibilityUtil.convertContexts(e.getKey()),
Map.Entry::getValue
));
}).join();
return (Map) getHandle().thenApply(handle -> handle.getAllParents().entrySet().stream()
.collect(ImmutableCollectors.toImmutableMap(
e -> CompatibilityUtil.convertContexts(e.getKey()),
Map.Entry::getValue
))).join();
}
@Override
@ -132,13 +126,11 @@ public class SubjectDataProxy implements SubjectData {
@Override
public Map<Set<Context>, Map<String, String>> getAllOptions() {
return (Map) getHandle().thenApply(handle -> {
return handle.getAllOptions().entrySet().stream()
.collect(ImmutableCollectors.toImmutableMap(
e -> CompatibilityUtil.convertContexts(e.getKey()),
Map.Entry::getValue
));
}).join();
return (Map) getHandle().thenApply(handle -> handle.getAllOptions().entrySet().stream()
.collect(ImmutableCollectors.toImmutableMap(
e -> CompatibilityUtil.convertContexts(e.getKey()),
Map.Entry::getValue
))).join();
}
@Override

View File

@ -88,65 +88,47 @@ public class SubjectProxy implements Subject {
@Override
public boolean hasPermission(Set<Context> contexts, String permission) {
return getHandle().thenApply(handle -> {
return handle.getPermissionValue(CompatibilityUtil.convertContexts(contexts), permission).asBoolean();
}).join();
return getHandle().thenApply(handle -> handle.getPermissionValue(CompatibilityUtil.convertContexts(contexts), permission).asBoolean()).join();
}
@Override
public boolean hasPermission(String permission) {
return getHandle().thenApply(handle -> {
return handle.getPermissionValue(ImmutableContextSet.empty(), permission).asBoolean();
}).join();
return getHandle().thenApply(handle -> handle.getPermissionValue(ImmutableContextSet.empty(), permission).asBoolean()).join();
}
@Override
public Tristate getPermissionValue(Set<Context> contexts, String permission) {
return getHandle().thenApply(handle -> {
return CompatibilityUtil.convertTristate(handle.getPermissionValue(CompatibilityUtil.convertContexts(contexts), permission));
}).join();
return getHandle().thenApply(handle -> CompatibilityUtil.convertTristate(handle.getPermissionValue(CompatibilityUtil.convertContexts(contexts), permission))).join();
}
@Override
public boolean isChildOf(org.spongepowered.api.service.permission.SubjectReference parent) {
return getHandle().thenApply(handle -> {
return handle.isChildOf(ImmutableContextSet.empty(), SubjectReferenceFactory.obtain(service, parent));
}).join();
return getHandle().thenApply(handle -> handle.isChildOf(ImmutableContextSet.empty(), SubjectReferenceFactory.obtain(service, parent))).join();
}
@Override
public boolean isChildOf(Set<Context> contexts, org.spongepowered.api.service.permission.SubjectReference parent) {
return getHandle().thenApply(handle -> {
return handle.isChildOf(CompatibilityUtil.convertContexts(contexts), SubjectReferenceFactory.obtain(service, parent));
}).join();
return getHandle().thenApply(handle -> handle.isChildOf(CompatibilityUtil.convertContexts(contexts), SubjectReferenceFactory.obtain(service, parent))).join();
}
@Override
public List<org.spongepowered.api.service.permission.SubjectReference> getParents() {
return (List) getHandle().thenApply(handle -> {
return handle.getParents(ImmutableContextSet.empty());
}).join();
return (List) getHandle().thenApply(handle -> handle.getParents(ImmutableContextSet.empty())).join();
}
@Override
public List<org.spongepowered.api.service.permission.SubjectReference> getParents(Set<Context> contexts) {
return (List) getHandle().thenApply(handle -> {
return handle.getParents(CompatibilityUtil.convertContexts(contexts));
}).join();
return (List) getHandle().thenApply(handle -> handle.getParents(CompatibilityUtil.convertContexts(contexts))).join();
}
@Override
public Optional<String> getOption(Set<Context> contexts, String key) {
return getHandle().thenApply(handle -> {
return handle.getOption(CompatibilityUtil.convertContexts(contexts), key);
}).join();
return getHandle().thenApply(handle -> handle.getOption(CompatibilityUtil.convertContexts(contexts), key)).join();
}
@Override
public Optional<String> getOption(String key) {
return getHandle().thenApply(handle -> {
return handle.getOption(ImmutableContextSet.empty(), key);
}).join();
return getHandle().thenApply(handle -> handle.getOption(ImmutableContextSet.empty(), key)).join();
}
@Override

View File

@ -35,7 +35,7 @@ import java.util.concurrent.Executor;
public class LPSpongeScheduler implements LuckPermsScheduler {
private final LPSpongePlugin plugin;
private Set<Task> tasks = ConcurrentHashMap.newKeySet();
private final Set<Task> tasks = ConcurrentHashMap.newKeySet();
public LPSpongeScheduler(LPSpongePlugin plugin) {
this.plugin = plugin;

View File

@ -38,7 +38,6 @@ import me.lucko.luckperms.api.Node;
import me.lucko.luckperms.api.Tristate;
import me.lucko.luckperms.api.context.ImmutableContextSet;
import me.lucko.luckperms.common.caching.MetaAccumulator;
import me.lucko.luckperms.common.contexts.ExtractedContexts;
import me.lucko.luckperms.common.model.Group;
import me.lucko.luckperms.sponge.LPSpongePlugin;
import me.lucko.luckperms.sponge.service.LuckPermsService;
@ -92,7 +91,7 @@ public class SpongeGroup extends Group {
.expireAfterAccess(10, TimeUnit.MINUTES)
.build(contexts -> {
// TODO move this away from NodeTree
Map<String, Boolean> permissions = getParent().getAllNodes(ExtractedContexts.generate(getPlugin().getService().calculateContexts(contexts))).stream()
Map<String, Boolean> permissions = getParent().getAllNodes(getPlugin().getService().calculateContexts(contexts)).stream()
.map(LocalizedNode::getNode)
.collect(Collectors.toMap(Node::getPermission, Node::getValuePrimitive));
@ -102,7 +101,7 @@ public class SpongeGroup extends Group {
private final LoadingCache<ImmutableContextSet, ImmutableList<SubjectReference>> parentCache = Caffeine.newBuilder()
.expireAfterWrite(10, TimeUnit.MINUTES)
.build(contexts -> {
Set<SubjectReference> subjects = getParent().getAllNodes(ExtractedContexts.generate(getPlugin().getService().calculateContexts(contexts))).stream()
Set<SubjectReference> subjects = getParent().getAllNodes(getPlugin().getService().calculateContexts(contexts)).stream()
.map(LocalizedNode::getNode)
.filter(Node::isGroupNode)
.map(Node::getGroupName)
@ -230,16 +229,16 @@ public class SpongeGroup extends Group {
@Override
public ImmutableContextSet getActiveContextSet() {
return plugin.getContextManager().getApplicableContext(this.sponge()).makeImmutable();
return plugin.getContextManager().getApplicableContext(this.sponge());
}
private Optional<String> getChatMeta(ImmutableContextSet contexts, ChatMetaType type) {
MetaAccumulator metaAccumulator = parent.accumulateMeta(null, null, ExtractedContexts.generate(plugin.getService().calculateContexts(contexts)));
MetaAccumulator metaAccumulator = parent.accumulateMeta(null, null, plugin.getService().calculateContexts(contexts));
return Optional.ofNullable(metaAccumulator.getStack(type).toFormattedString());
}
private Optional<String> getMeta(ImmutableContextSet contexts, String key) {
MetaAccumulator metaAccumulator = parent.accumulateMeta(null, null, ExtractedContexts.generate(plugin.getService().calculateContexts(contexts)));
MetaAccumulator metaAccumulator = parent.accumulateMeta(null, null, plugin.getService().calculateContexts(contexts));
ListMultimap<String, String> meta = metaAccumulator.getMeta();
List<String> ret = meta.get(key);
return ret.isEmpty() ? Optional.empty() : Optional.of(ret.get(0));

View File

@ -38,7 +38,6 @@ import me.lucko.luckperms.api.Node;
import me.lucko.luckperms.api.Tristate;
import me.lucko.luckperms.api.context.ImmutableContextSet;
import me.lucko.luckperms.common.caching.MetaAccumulator;
import me.lucko.luckperms.common.contexts.ExtractedContexts;
import me.lucko.luckperms.common.model.Group;
import me.lucko.luckperms.common.model.PermissionHolder;
import me.lucko.luckperms.common.model.User;
@ -354,7 +353,7 @@ public class LuckPermsSubjectData implements LPSubjectData {
toRemove.forEach(makeUnsetConsumer(enduring));
MetaAccumulator metaAccumulator = holder.accumulateMeta(null, null, ExtractedContexts.generate(service.calculateContexts(context)));
MetaAccumulator metaAccumulator = holder.accumulateMeta(null, null, service.calculateContexts(context));
int priority = metaAccumulator.getChatMeta(type).keySet().stream().mapToInt(e -> e).max().orElse(0);
priority += 10;

View File

@ -286,6 +286,6 @@ public class PersistedSubject implements LPSubject {
@Override
public ImmutableContextSet getActiveContextSet() {
return service.getPlugin().getContextManager().getApplicableContext(sponge()).makeImmutable();
return service.getPlugin().getContextManager().getApplicableContext(sponge());
}
}