Rewrite inheritance resolution implementation
This commit is contained in:
parent
cb63b321d0
commit
1137e476dd
@ -25,6 +25,7 @@ package me.lucko.luckperms.api.context;
|
|||||||
import com.google.common.collect.Multimap;
|
import com.google.common.collect.Multimap;
|
||||||
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.Optional;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -167,6 +168,16 @@ public interface ContextSet {
|
|||||||
*/
|
*/
|
||||||
Set<String> getValues(String key);
|
Set<String> getValues(String key);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns any value from this set matching the key, if present.
|
||||||
|
*
|
||||||
|
* @param key the key to find values for
|
||||||
|
* @return an optional containing any match
|
||||||
|
*/
|
||||||
|
default Optional<String> getAnyValue(String key) {
|
||||||
|
return getValues(key).stream().findAny();
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check if thr set contains a given key mapped to a given value
|
* Check if thr set contains a given key mapped to a given value
|
||||||
*
|
*
|
||||||
@ -187,6 +198,34 @@ public interface ContextSet {
|
|||||||
*/
|
*/
|
||||||
boolean hasIgnoreCase(String key, String value);
|
boolean hasIgnoreCase(String key, String value);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks to see if all entries in this context set are also included in another set.
|
||||||
|
*
|
||||||
|
* @param other the other set to check
|
||||||
|
* @return true if all entries in this set are also in the other set
|
||||||
|
*/
|
||||||
|
default boolean isSatisfiedBy(ContextSet other) {
|
||||||
|
if (this.isEmpty()) {
|
||||||
|
// this is empty, so is therefore always satisfied.
|
||||||
|
return true;
|
||||||
|
} else if (other.isEmpty()) {
|
||||||
|
// this set isn't empty, but the other one is
|
||||||
|
return false;
|
||||||
|
} else if (this.size() > other.size()) {
|
||||||
|
// this set has more unique entries than the other set, so there's no way this can be satisfied.
|
||||||
|
return false;
|
||||||
|
} else {
|
||||||
|
// neither are empty, we need to compare the individual entries
|
||||||
|
for (Map.Entry<String, String> pair : toSet()) {
|
||||||
|
if (!other.has(pair.getKey(), pair.getValue())) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check if the set is empty
|
* Check if the set is empty
|
||||||
*
|
*
|
||||||
|
@ -28,7 +28,7 @@ import me.lucko.luckperms.api.Contexts;
|
|||||||
import me.lucko.luckperms.api.Node;
|
import me.lucko.luckperms.api.Node;
|
||||||
import me.lucko.luckperms.api.caching.MetaData;
|
import me.lucko.luckperms.api.caching.MetaData;
|
||||||
import me.lucko.luckperms.api.context.ContextSet;
|
import me.lucko.luckperms.api.context.ContextSet;
|
||||||
import me.lucko.luckperms.common.caching.MetaHolder;
|
import me.lucko.luckperms.common.caching.MetaAccumulator;
|
||||||
import me.lucko.luckperms.common.core.NodeFactory;
|
import me.lucko.luckperms.common.core.NodeFactory;
|
||||||
import me.lucko.luckperms.common.core.model.Group;
|
import me.lucko.luckperms.common.core.model.Group;
|
||||||
import me.lucko.luckperms.common.core.model.PermissionHolder;
|
import me.lucko.luckperms.common.core.model.PermissionHolder;
|
||||||
@ -99,8 +99,8 @@ public class VaultChatHook extends Chat {
|
|||||||
holder.removeIf(n -> prefix ? n.isPrefix() : n.isSuffix());
|
holder.removeIf(n -> prefix ? n.isPrefix() : n.isSuffix());
|
||||||
|
|
||||||
// find the max inherited priority & add 10
|
// find the max inherited priority & add 10
|
||||||
MetaHolder metaHolder = holder.accumulateMeta(null, null, ExtractedContexts.generate(perms.createContextForWorld(finalWorld)));
|
MetaAccumulator metaAccumulator = holder.accumulateMeta(null, null, ExtractedContexts.generate(perms.createContextForWorld(finalWorld)));
|
||||||
int priority = (prefix ? metaHolder.getPrefixes() : metaHolder.getSuffixes()).keySet().stream()
|
int priority = (prefix ? metaAccumulator.getPrefixes() : metaAccumulator.getSuffixes()).keySet().stream()
|
||||||
.mapToInt(e -> e).max().orElse(0) + 10;
|
.mapToInt(e -> e).max().orElse(0) + 10;
|
||||||
|
|
||||||
Node.Builder chatMetaNode = NodeFactory.makeChatMetaNode(prefix, priority, value);
|
Node.Builder chatMetaNode = NodeFactory.makeChatMetaNode(prefix, priority, value);
|
||||||
@ -158,7 +158,7 @@ public class VaultChatHook extends Chat {
|
|||||||
|
|
||||||
perms.log("Getting meta: '" + node + "' for group " + group.getName() + " on world " + world + ", server " + perms.getServer());
|
perms.log("Getting meta: '" + node + "' for group " + group.getName() + " on world " + world + ", server " + perms.getServer());
|
||||||
|
|
||||||
for (Node n : group.getPermissions(true)) {
|
for (Node n : group.mergePermissionsToList()) {
|
||||||
if (!n.getValue()) continue;
|
if (!n.getValue()) continue;
|
||||||
if (!n.isMeta()) continue;
|
if (!n.isMeta()) continue;
|
||||||
if (!n.shouldApplyOnServer(perms.getServer(), perms.isIncludeGlobal(), false)) continue;
|
if (!n.shouldApplyOnServer(perms.getServer(), perms.isIncludeGlobal(), false)) continue;
|
||||||
@ -189,7 +189,7 @@ public class VaultChatHook extends Chat {
|
|||||||
}
|
}
|
||||||
|
|
||||||
ExtractedContexts ec = ExtractedContexts.generate(new Contexts(ContextSet.fromMap(context), perms.isIncludeGlobal(), true, true, true, true, false));
|
ExtractedContexts ec = ExtractedContexts.generate(new Contexts(ContextSet.fromMap(context), perms.isIncludeGlobal(), true, true, true, true, false));
|
||||||
for (Node n : group.getAllNodes(null, ec)) {
|
for (Node n : group.getAllNodes(ec)) {
|
||||||
if (!n.getValue()) continue;
|
if (!n.getValue()) continue;
|
||||||
if (prefix ? !n.isPrefix() : !n.isSuffix()) continue;
|
if (prefix ? !n.isPrefix() : !n.isSuffix()) continue;
|
||||||
if (!n.shouldApplyOnServer(perms.getServer(), perms.isIncludeGlobal(), false)) continue;
|
if (!n.shouldApplyOnServer(perms.getServer(), perms.isIncludeGlobal(), false)) continue;
|
||||||
|
@ -26,7 +26,6 @@ import lombok.Getter;
|
|||||||
import lombok.NonNull;
|
import lombok.NonNull;
|
||||||
|
|
||||||
import me.lucko.luckperms.api.Contexts;
|
import me.lucko.luckperms.api.Contexts;
|
||||||
import me.lucko.luckperms.api.LocalizedNode;
|
|
||||||
import me.lucko.luckperms.api.Node;
|
import me.lucko.luckperms.api.Node;
|
||||||
import me.lucko.luckperms.api.Tristate;
|
import me.lucko.luckperms.api.Tristate;
|
||||||
import me.lucko.luckperms.api.caching.PermissionData;
|
import me.lucko.luckperms.api.caching.PermissionData;
|
||||||
@ -36,6 +35,7 @@ import me.lucko.luckperms.common.config.ConfigKeys;
|
|||||||
import me.lucko.luckperms.common.core.model.Group;
|
import me.lucko.luckperms.common.core.model.Group;
|
||||||
import me.lucko.luckperms.common.core.model.PermissionHolder;
|
import me.lucko.luckperms.common.core.model.PermissionHolder;
|
||||||
import me.lucko.luckperms.common.core.model.User;
|
import me.lucko.luckperms.common.core.model.User;
|
||||||
|
import me.lucko.luckperms.common.utils.ExtractedContexts;
|
||||||
import me.lucko.luckperms.exceptions.ObjectAlreadyHasException;
|
import me.lucko.luckperms.exceptions.ObjectAlreadyHasException;
|
||||||
import me.lucko.luckperms.exceptions.ObjectLacksException;
|
import me.lucko.luckperms.exceptions.ObjectLacksException;
|
||||||
|
|
||||||
@ -197,7 +197,7 @@ public class VaultPermissionHook extends Permission {
|
|||||||
if (group == null) return false;
|
if (group == null) return false;
|
||||||
|
|
||||||
// This is a nasty call. Groups aren't cached. :(
|
// This is a nasty call. Groups aren't cached. :(
|
||||||
Map<String, Boolean> permissions = group.exportNodes(createContextForWorld(world), true);
|
Map<String, Boolean> permissions = group.exportNodes(ExtractedContexts.generate(createContextForWorld(world)), true);
|
||||||
return permissions.containsKey(permission.toLowerCase()) && permissions.get(permission.toLowerCase());
|
return permissions.containsKey(permission.toLowerCase()) && permissions.get(permission.toLowerCase());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -234,7 +234,7 @@ public class VaultPermissionHook extends Permission {
|
|||||||
if (user == null) return false;
|
if (user == null) return false;
|
||||||
|
|
||||||
String w = world; // screw effectively final
|
String w = world; // screw effectively final
|
||||||
return user.getNodes().stream()
|
return user.getNodes().values().stream()
|
||||||
.filter(Node::isGroupNode)
|
.filter(Node::isGroupNode)
|
||||||
.filter(n -> n.shouldApplyOnServer(getServer(), isIncludeGlobal(), false))
|
.filter(n -> n.shouldApplyOnServer(getServer(), isIncludeGlobal(), false))
|
||||||
.filter(n -> n.shouldApplyOnWorld(w, true, false))
|
.filter(n -> n.shouldApplyOnWorld(w, true, false))
|
||||||
@ -303,7 +303,7 @@ public class VaultPermissionHook extends Permission {
|
|||||||
if (user == null) return new String[0];
|
if (user == null) return new String[0];
|
||||||
|
|
||||||
String w = world; // screw effectively final
|
String w = world; // screw effectively final
|
||||||
return user.getNodes().stream()
|
return user.getNodes().values().stream()
|
||||||
.filter(Node::isGroupNode)
|
.filter(Node::isGroupNode)
|
||||||
.filter(n -> n.shouldApplyOnServer(getServer(), isIncludeGlobal(), false))
|
.filter(n -> n.shouldApplyOnServer(getServer(), isIncludeGlobal(), false))
|
||||||
.filter(n -> n.shouldApplyOnWorld(w, true, false))
|
.filter(n -> n.shouldApplyOnWorld(w, true, false))
|
||||||
@ -352,7 +352,7 @@ public class VaultPermissionHook extends Permission {
|
|||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// we need to check the users permissions only
|
// we need to check the users permissions only
|
||||||
for (LocalizedNode node : user.getPermissions(true)) {
|
for (Node node : user.mergePermissionsToList()) {
|
||||||
if (!node.getValue()) continue;
|
if (!node.getValue()) continue;
|
||||||
if (!node.getPermission().toLowerCase().startsWith("vault.primarygroup.")) continue;
|
if (!node.getPermission().toLowerCase().startsWith("vault.primarygroup.")) continue;
|
||||||
if (!node.shouldApplyOnServer(getServer(), isIncludeGlobal(), false)) continue;
|
if (!node.shouldApplyOnServer(getServer(), isIncludeGlobal(), false)) continue;
|
||||||
|
@ -60,32 +60,32 @@ public class PermissionHolderDelegate implements PermissionHolder {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SortedSet<? extends Node> getPermissions() {
|
public SortedSet<? extends Node> getPermissions() {
|
||||||
return ImmutableSortedSet.copyOfSorted(master.getPermissions(false));
|
return ImmutableSortedSet.copyOfSorted(master.mergePermissionsToSortedSet());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Set<Node> getEnduringPermissions() {
|
public Set<Node> getEnduringPermissions() {
|
||||||
return ImmutableSet.copyOf(master.getNodes());
|
return ImmutableSet.copyOf(master.getNodes().values());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Set<Node> getTransientPermissions() {
|
public Set<Node> getTransientPermissions() {
|
||||||
return ImmutableSet.copyOf(master.getTransientNodes());
|
return ImmutableSet.copyOf(master.getTransientNodes().values());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public SortedSet<LocalizedNode> getAllNodes(@NonNull Contexts contexts) {
|
public SortedSet<LocalizedNode> getAllNodes(@NonNull Contexts contexts) {
|
||||||
return new TreeSet<>(master.getAllNodes(null, ExtractedContexts.generate(contexts)));
|
return new TreeSet<>(master.resolveInheritancesAlmostEqual(ExtractedContexts.generate(contexts)));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Set<LocalizedNode> getAllNodesFiltered(@NonNull Contexts contexts) {
|
public Set<LocalizedNode> getAllNodesFiltered(@NonNull Contexts contexts) {
|
||||||
return new HashSet<>(master.getAllNodesFiltered(ExtractedContexts.generate(contexts)));
|
return new HashSet<>(master.getAllNodes(ExtractedContexts.generate(contexts)));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Map<String, Boolean> exportNodes(Contexts contexts, boolean lowerCase) {
|
public Map<String, Boolean> exportNodes(Contexts contexts, boolean lowerCase) {
|
||||||
return new HashMap<>(master.exportNodes(contexts, lowerCase));
|
return new HashMap<>(master.exportNodes(ExtractedContexts.generate(contexts), lowerCase));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -41,7 +41,7 @@ import java.util.TreeMap;
|
|||||||
*/
|
*/
|
||||||
@Getter
|
@Getter
|
||||||
@ToString
|
@ToString
|
||||||
public class MetaHolder {
|
public class MetaAccumulator {
|
||||||
|
|
||||||
@Getter(AccessLevel.NONE)
|
@Getter(AccessLevel.NONE)
|
||||||
private final Map<String, String> meta;
|
private final Map<String, String> meta;
|
||||||
@ -52,7 +52,7 @@ public class MetaHolder {
|
|||||||
private final MetaStack prefixStack;
|
private final MetaStack prefixStack;
|
||||||
private final MetaStack suffixStack;
|
private final MetaStack suffixStack;
|
||||||
|
|
||||||
public MetaHolder(MetaStack prefixStack, MetaStack suffixStack) {
|
public MetaAccumulator(MetaStack prefixStack, MetaStack suffixStack) {
|
||||||
this.meta = new HashMap<>();
|
this.meta = new HashMap<>();
|
||||||
this.prefixes = new TreeMap<>(Comparator.reverseOrder());
|
this.prefixes = new TreeMap<>(Comparator.reverseOrder());
|
||||||
this.suffixes = new TreeMap<>(Comparator.reverseOrder());
|
this.suffixes = new TreeMap<>(Comparator.reverseOrder());
|
||||||
@ -60,7 +60,7 @@ public class MetaHolder {
|
|||||||
this.suffixStack = suffixStack;
|
this.suffixStack = suffixStack;
|
||||||
}
|
}
|
||||||
|
|
||||||
public MetaHolder() {
|
public MetaAccumulator() {
|
||||||
this(NoopMetaStack.INSTANCE, NoopMetaStack.INSTANCE);
|
this(NoopMetaStack.INSTANCE, NoopMetaStack.INSTANCE);
|
||||||
}
|
}
|
||||||
|
|
@ -59,7 +59,7 @@ public class MetaCache implements MetaData {
|
|||||||
@Getter
|
@Getter
|
||||||
private MetaStack suffixStack = NoopMetaStack.INSTANCE;
|
private MetaStack suffixStack = NoopMetaStack.INSTANCE;
|
||||||
|
|
||||||
public void loadMeta(MetaHolder meta) {
|
public void loadMeta(MetaAccumulator meta) {
|
||||||
lock.writeLock().lock();
|
lock.writeLock().lock();
|
||||||
try {
|
try {
|
||||||
this.meta = ImmutableMap.copyOf(meta.getMeta());
|
this.meta = ImmutableMap.copyOf(meta.getMeta());
|
||||||
|
@ -69,7 +69,7 @@ public class UserCache implements UserData {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ListenableFuture<PermissionCache> reload(Contexts contexts, PermissionCache oldData) {
|
public ListenableFuture<PermissionCache> reload(Contexts contexts, PermissionCache oldData) {
|
||||||
oldData.comparePermissions(user.exportNodes(contexts, true));
|
oldData.comparePermissions(user.exportNodes(ExtractedContexts.generate(contexts), true));
|
||||||
return Futures.immediateFuture(oldData);
|
return Futures.immediateFuture(oldData);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@ -102,7 +102,7 @@ public class UserCache implements UserData {
|
|||||||
@Override
|
@Override
|
||||||
public PermissionCache calculatePermissions(@NonNull Contexts contexts) {
|
public PermissionCache calculatePermissions(@NonNull Contexts contexts) {
|
||||||
PermissionCache data = new PermissionCache(contexts, user, calculatorFactory);
|
PermissionCache data = new PermissionCache(contexts, user, calculatorFactory);
|
||||||
data.setPermissions(user.exportNodes(contexts, true));
|
data.setPermissions(user.exportNodes(ExtractedContexts.generate(contexts), true));
|
||||||
return data;
|
return data;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -27,24 +27,22 @@ import lombok.RequiredArgsConstructor;
|
|||||||
import com.google.common.collect.HashMultimap;
|
import com.google.common.collect.HashMultimap;
|
||||||
import com.google.common.collect.Multimap;
|
import com.google.common.collect.Multimap;
|
||||||
|
|
||||||
import me.lucko.luckperms.common.core.model.PermissionHolder;
|
|
||||||
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
|
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
|
||||||
|
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.concurrent.locks.ReentrantLock;
|
import java.util.concurrent.locks.ReentrantLock;
|
||||||
import java.util.function.Consumer;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Manages the cached state of all permission holders
|
* Manages the cached state of all permission holders
|
||||||
*/
|
*/
|
||||||
@RequiredArgsConstructor
|
@RequiredArgsConstructor
|
||||||
public class CachedStateManager {
|
public class CachedStateManager {
|
||||||
private static final Consumer<PermissionHolder> INVALIDATE_CONSUMER = PermissionHolder::invalidateInheritanceCaches;
|
// private static final Consumer<PermissionHolder> INVALIDATE_CONSUMER = PermissionHolder::invalidateInheritanceCaches;
|
||||||
|
|
||||||
private final LuckPermsPlugin plugin;
|
private final LuckPermsPlugin plugin;
|
||||||
|
|
||||||
// Group --> Groups that inherit from that group. (reverse relationship)
|
// Group --> Groups/Users that inherit from that group. (reverse relationship)
|
||||||
private final Multimap<HolderReference, HolderReference> map = HashMultimap.create();
|
private final Multimap<HolderReference, HolderReference> map = HashMultimap.create();
|
||||||
private final ReentrantLock lock = new ReentrantLock();
|
private final ReentrantLock lock = new ReentrantLock();
|
||||||
|
|
||||||
@ -117,6 +115,7 @@ public class CachedStateManager {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
public void invalidateInheritances(HolderReference holder) {
|
public void invalidateInheritances(HolderReference holder) {
|
||||||
Set<HolderReference> toInvalidate = getInheritances(holder);
|
Set<HolderReference> toInvalidate = getInheritances(holder);
|
||||||
invalidateInheritances(plugin, toInvalidate);
|
invalidateInheritances(plugin, toInvalidate);
|
||||||
@ -125,5 +124,6 @@ public class CachedStateManager {
|
|||||||
public static void invalidateInheritances(LuckPermsPlugin plugin, Set<HolderReference> references) {
|
public static void invalidateInheritances(LuckPermsPlugin plugin, Set<HolderReference> references) {
|
||||||
references.forEach(hr -> hr.apply(plugin, INVALIDATE_CONSUMER));
|
references.forEach(hr -> hr.apply(plugin, INVALIDATE_CONSUMER));
|
||||||
}
|
}
|
||||||
|
*/
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -23,6 +23,7 @@
|
|||||||
package me.lucko.luckperms.common.caching.stacking;
|
package me.lucko.luckperms.common.caching.stacking;
|
||||||
|
|
||||||
import me.lucko.luckperms.api.LocalizedNode;
|
import me.lucko.luckperms.api.LocalizedNode;
|
||||||
|
import me.lucko.luckperms.api.Node;
|
||||||
import me.lucko.luckperms.common.core.model.Track;
|
import me.lucko.luckperms.common.core.model.Track;
|
||||||
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
|
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
|
||||||
|
|
||||||
@ -44,7 +45,7 @@ public interface MetaStackElement {
|
|||||||
* @param node the node to check
|
* @param node the node to check
|
||||||
* @return true if the accumulation should return
|
* @return true if the accumulation should return
|
||||||
*/
|
*/
|
||||||
static boolean checkMetaType(boolean expectingPrefix, LocalizedNode node) {
|
static boolean checkMetaType(boolean expectingPrefix, Node node) {
|
||||||
if (expectingPrefix) {
|
if (expectingPrefix) {
|
||||||
if (!node.isPrefix()) {
|
if (!node.isPrefix()) {
|
||||||
return true;
|
return true;
|
||||||
|
@ -60,7 +60,7 @@ public class MetaInfo extends SharedSubCommand {
|
|||||||
Set<LocalizedNode> meta = new HashSet<>();
|
Set<LocalizedNode> meta = new HashSet<>();
|
||||||
|
|
||||||
// Collect data
|
// Collect data
|
||||||
for (LocalizedNode node : holder.getAllNodes(null, ExtractedContexts.generate(Contexts.allowAll()))) {
|
for (LocalizedNode node : holder.resolveInheritancesAlmostEqual(ExtractedContexts.generate(Contexts.allowAll()))) {
|
||||||
if (!node.isSuffix() && !node.isPrefix() && !node.isMeta()) {
|
if (!node.isSuffix() && !node.isPrefix() && !node.isMeta()) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -66,7 +66,7 @@ public class MetaRemovePrefix extends SharedSubCommand {
|
|||||||
// Handle bulk removal
|
// Handle bulk removal
|
||||||
if (prefix.equalsIgnoreCase("null")) {
|
if (prefix.equalsIgnoreCase("null")) {
|
||||||
List<Node> toRemove = new ArrayList<>();
|
List<Node> toRemove = new ArrayList<>();
|
||||||
for (Node node : holder.getNodes()) {
|
for (Node node : holder.getNodes().values()) {
|
||||||
if (!node.isPrefix()) continue;
|
if (!node.isPrefix()) continue;
|
||||||
if (node.getPrefix().getKey() != priority) continue;
|
if (node.getPrefix().getKey() != priority) continue;
|
||||||
if (node.isTemporary()) continue;
|
if (node.isTemporary()) continue;
|
||||||
|
@ -66,7 +66,7 @@ public class MetaRemoveSuffix extends SharedSubCommand {
|
|||||||
// Handle bulk removal
|
// Handle bulk removal
|
||||||
if (suffix.equalsIgnoreCase("null")) {
|
if (suffix.equalsIgnoreCase("null")) {
|
||||||
List<Node> toRemove = new ArrayList<>();
|
List<Node> toRemove = new ArrayList<>();
|
||||||
for (Node node : holder.getNodes()) {
|
for (Node node : holder.getNodes().values()) {
|
||||||
if (!node.isSuffix()) continue;
|
if (!node.isSuffix()) continue;
|
||||||
if (node.getSuffix().getKey() != priority) continue;
|
if (node.getSuffix().getKey() != priority) continue;
|
||||||
if (node.isTemporary()) continue;
|
if (node.isTemporary()) continue;
|
||||||
|
@ -66,7 +66,7 @@ public class MetaRemoveTempPrefix extends SharedSubCommand {
|
|||||||
// Handle bulk removal
|
// Handle bulk removal
|
||||||
if (prefix.equalsIgnoreCase("null")) {
|
if (prefix.equalsIgnoreCase("null")) {
|
||||||
List<Node> toRemove = new ArrayList<>();
|
List<Node> toRemove = new ArrayList<>();
|
||||||
for (Node node : holder.getNodes()) {
|
for (Node node : holder.getNodes().values()) {
|
||||||
if (!node.isPrefix()) continue;
|
if (!node.isPrefix()) continue;
|
||||||
if (node.getPrefix().getKey() != priority) continue;
|
if (node.getPrefix().getKey() != priority) continue;
|
||||||
if (node.isPermanent()) continue;
|
if (node.isPermanent()) continue;
|
||||||
|
@ -66,7 +66,7 @@ public class MetaRemoveTempSuffix extends SharedSubCommand {
|
|||||||
// Handle bulk removal
|
// Handle bulk removal
|
||||||
if (suffix.equalsIgnoreCase("null")) {
|
if (suffix.equalsIgnoreCase("null")) {
|
||||||
List<Node> toRemove = new ArrayList<>();
|
List<Node> toRemove = new ArrayList<>();
|
||||||
for (Node node : holder.getNodes()) {
|
for (Node node : holder.getNodes().values()) {
|
||||||
if (!node.isSuffix()) continue;
|
if (!node.isSuffix()) continue;
|
||||||
if (node.getSuffix().getKey() != priority) continue;
|
if (node.getSuffix().getKey() != priority) continue;
|
||||||
if (node.isPermanent()) continue;
|
if (node.isPermanent()) continue;
|
||||||
|
@ -51,7 +51,7 @@ public class HolderShowTracks<T extends PermissionHolder> extends SubCommand<T>
|
|||||||
return CommandResult.LOADING_ERROR;
|
return CommandResult.LOADING_ERROR;
|
||||||
}
|
}
|
||||||
|
|
||||||
Set<Node> nodes = holder.getNodes().stream()
|
Set<Node> nodes = holder.getNodes().values().stream()
|
||||||
.filter(Node::isGroupNode)
|
.filter(Node::isGroupNode)
|
||||||
.filter(Node::isPermanent)
|
.filter(Node::isPermanent)
|
||||||
.collect(Collectors.toSet());
|
.collect(Collectors.toSet());
|
||||||
|
@ -43,8 +43,8 @@ public class ParentInfo extends SharedSubCommand {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public CommandResult execute(LuckPermsPlugin plugin, Sender sender, PermissionHolder holder, List<String> args, String label) throws CommandException {
|
public CommandResult execute(LuckPermsPlugin plugin, Sender sender, PermissionHolder holder, List<String> args, String label) throws CommandException {
|
||||||
Message.LISTPARENTS.send(sender, holder.getFriendlyName(), Util.permGroupsToString(holder.getPermissions(false)));
|
Message.LISTPARENTS.send(sender, holder.getFriendlyName(), Util.permGroupsToString(holder.mergePermissionsToSortedSet()));
|
||||||
Message.LISTPARENTS_TEMP.send(sender, holder.getFriendlyName(), Util.tempGroupsToString(holder.getPermissions(false)));
|
Message.LISTPARENTS_TEMP.send(sender, holder.getFriendlyName(), Util.tempGroupsToString(holder.mergePermissionsToSortedSet()));
|
||||||
return CommandResult.SUCCESS;
|
return CommandResult.SUCCESS;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -55,11 +55,11 @@ public class PermissionInfo extends SharedSubCommand {
|
|||||||
public CommandResult execute(LuckPermsPlugin plugin, Sender sender, PermissionHolder holder, List<String> args, String label) throws CommandException {
|
public CommandResult execute(LuckPermsPlugin plugin, Sender sender, PermissionHolder holder, List<String> args, String label) throws CommandException {
|
||||||
if (sender.getUuid().equals(Constants.CONSOLE_UUID)) {
|
if (sender.getUuid().equals(Constants.CONSOLE_UUID)) {
|
||||||
Message.LISTNODES.send(sender, holder.getFriendlyName());
|
Message.LISTNODES.send(sender, holder.getFriendlyName());
|
||||||
sender.sendMessage(Util.color(Util.permNodesToStringConsole(holder.getPermissions(false))));
|
sender.sendMessage(Util.color(Util.permNodesToStringConsole(holder.mergePermissionsToSortedSet())));
|
||||||
} else {
|
} else {
|
||||||
int page = ArgumentUtils.handleIntOrElse(0, args, 1);
|
int page = ArgumentUtils.handleIntOrElse(0, args, 1);
|
||||||
|
|
||||||
Map.Entry<FancyMessage, String> ent = Util.permNodesToMessage(holder.getPermissions(false), holder, label, page);
|
Map.Entry<FancyMessage, String> ent = Util.permNodesToMessage(holder.mergePermissionsToSortedSet(), holder, label, page);
|
||||||
if (ent.getValue() != null) {
|
if (ent.getValue() != null) {
|
||||||
Message.LISTNODES_WITH_PAGE.send(sender, holder.getFriendlyName(), ent.getValue());
|
Message.LISTNODES_WITH_PAGE.send(sender, holder.getFriendlyName(), ent.getValue());
|
||||||
sender.sendMessage(ent.getKey());
|
sender.sendMessage(ent.getKey());
|
||||||
@ -69,7 +69,7 @@ public class PermissionInfo extends SharedSubCommand {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Message.LISTNODES_TEMP.send(sender, holder.getFriendlyName(), Util.tempNodesToString(holder.getPermissions(false)));
|
Message.LISTNODES_TEMP.send(sender, holder.getFriendlyName(), Util.tempNodesToString(holder.mergePermissionsToSortedSet()));
|
||||||
return CommandResult.SUCCESS;
|
return CommandResult.SUCCESS;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -70,7 +70,7 @@ public class GroupBulkChange extends SubCommand<Group> {
|
|||||||
return CommandResult.FAILURE;
|
return CommandResult.FAILURE;
|
||||||
}
|
}
|
||||||
|
|
||||||
Iterator<Node> iterator = group.getNodes().iterator();
|
Iterator<Node> iterator = group.getNodes().values().iterator();
|
||||||
if (type.equals("world")) {
|
if (type.equals("world")) {
|
||||||
while (iterator.hasNext()) {
|
while (iterator.hasNext()) {
|
||||||
Node element = iterator.next();
|
Node element = iterator.next();
|
||||||
|
@ -61,7 +61,7 @@ public class GroupClone extends SubCommand<Group> {
|
|||||||
return CommandResult.LOADING_ERROR;
|
return CommandResult.LOADING_ERROR;
|
||||||
}
|
}
|
||||||
|
|
||||||
newGroup.setNodes(group.getNodes());
|
newGroup.replaceNodes(group.getNodes());
|
||||||
|
|
||||||
Message.CLONE_SUCCESS.send(sender, group.getName(), newGroup.getName());
|
Message.CLONE_SUCCESS.send(sender, group.getName(), newGroup.getName());
|
||||||
LogEntry.build().actor(sender).acted(group).action("clone " + newGroup.getName()).build().submit(plugin, sender);
|
LogEntry.build().actor(sender).acted(group).action("clone " + newGroup.getName()).build().submit(plugin, sender);
|
||||||
|
@ -57,12 +57,12 @@ public class GroupInfo extends SubCommand<Group> {
|
|||||||
group.getMetaNodes().size()
|
group.getMetaNodes().size()
|
||||||
);
|
);
|
||||||
|
|
||||||
Set<Node> parents = group.getPermissions(false).stream()
|
Set<Node> parents = group.mergePermissions().stream()
|
||||||
.filter(Node::isGroupNode)
|
.filter(Node::isGroupNode)
|
||||||
.filter(Node::isPermanent)
|
.filter(Node::isPermanent)
|
||||||
.collect(Collectors.toSet());
|
.collect(Collectors.toSet());
|
||||||
|
|
||||||
Set<Node> tempParents = group.getPermissions(false).stream()
|
Set<Node> tempParents = group.mergePermissions().stream()
|
||||||
.filter(Node::isGroupNode)
|
.filter(Node::isGroupNode)
|
||||||
.filter(Node::isTemporary)
|
.filter(Node::isTemporary)
|
||||||
.collect(Collectors.toSet());
|
.collect(Collectors.toSet());
|
||||||
|
@ -75,7 +75,7 @@ public class GroupRename extends SubCommand<Group> {
|
|||||||
return CommandResult.FAILURE;
|
return CommandResult.FAILURE;
|
||||||
}
|
}
|
||||||
|
|
||||||
newGroup.setNodes(group.getNodes());
|
newGroup.replaceNodes(group.getNodes());
|
||||||
|
|
||||||
Message.RENAME_SUCCESS.send(sender, group.getName(), newGroup.getName());
|
Message.RENAME_SUCCESS.send(sender, group.getName(), newGroup.getName());
|
||||||
LogEntry.build().actor(sender).acted(group).action("rename " + newGroup.getName()).build().submit(plugin, sender);
|
LogEntry.build().actor(sender).acted(group).action("rename " + newGroup.getName()).build().submit(plugin, sender);
|
||||||
|
@ -22,7 +22,6 @@
|
|||||||
|
|
||||||
package me.lucko.luckperms.common.commands.impl.group;
|
package me.lucko.luckperms.common.commands.impl.group;
|
||||||
|
|
||||||
import me.lucko.luckperms.api.Node;
|
|
||||||
import me.lucko.luckperms.common.commands.Arg;
|
import me.lucko.luckperms.common.commands.Arg;
|
||||||
import me.lucko.luckperms.common.commands.CommandException;
|
import me.lucko.luckperms.common.commands.CommandException;
|
||||||
import me.lucko.luckperms.common.commands.CommandResult;
|
import me.lucko.luckperms.common.commands.CommandResult;
|
||||||
@ -35,12 +34,8 @@ import me.lucko.luckperms.common.core.NodeFactory;
|
|||||||
import me.lucko.luckperms.common.core.model.Group;
|
import me.lucko.luckperms.common.core.model.Group;
|
||||||
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
|
import me.lucko.luckperms.common.plugin.LuckPermsPlugin;
|
||||||
import me.lucko.luckperms.common.utils.Predicates;
|
import me.lucko.luckperms.common.utils.Predicates;
|
||||||
import me.lucko.luckperms.exceptions.ObjectAlreadyHasException;
|
|
||||||
import me.lucko.luckperms.exceptions.ObjectLacksException;
|
|
||||||
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Set;
|
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
public class GroupSetWeight extends SubCommand<Group> {
|
public class GroupSetWeight extends SubCommand<Group> {
|
||||||
public GroupSetWeight() {
|
public GroupSetWeight() {
|
||||||
@ -53,19 +48,8 @@ public class GroupSetWeight extends SubCommand<Group> {
|
|||||||
public CommandResult execute(LuckPermsPlugin plugin, Sender sender, Group group, List<String> args, String label) throws CommandException {
|
public CommandResult execute(LuckPermsPlugin plugin, Sender sender, Group group, List<String> args, String label) throws CommandException {
|
||||||
int weight = ArgumentUtils.handlePriority(0, args);
|
int weight = ArgumentUtils.handlePriority(0, args);
|
||||||
|
|
||||||
Set<Node> existingWeightNodes = group.getNodes().stream()
|
group.removeIf(n -> n.getPermission().startsWith("weight."));
|
||||||
.filter(n -> n.getPermission().startsWith("weight."))
|
group.setPermissionUnchecked(NodeFactory.newBuilder("weight." + weight).build());
|
||||||
.collect(Collectors.toSet());
|
|
||||||
|
|
||||||
existingWeightNodes.forEach(n -> {
|
|
||||||
try {
|
|
||||||
group.unsetPermission(n);
|
|
||||||
} catch (ObjectLacksException ignored) {}
|
|
||||||
});
|
|
||||||
|
|
||||||
try {
|
|
||||||
group.setPermission(NodeFactory.newBuilder("weight." + weight).build());
|
|
||||||
} catch (ObjectAlreadyHasException ignored) {}
|
|
||||||
|
|
||||||
save(group, sender, plugin);
|
save(group, sender, plugin);
|
||||||
Message.GROUP_SET_WEIGHT.send(sender, weight, group.getDisplayName());
|
Message.GROUP_SET_WEIGHT.send(sender, weight, group.getDisplayName());
|
||||||
|
@ -70,7 +70,7 @@ public class UserBulkChange extends SubCommand<User> {
|
|||||||
return CommandResult.FAILURE;
|
return CommandResult.FAILURE;
|
||||||
}
|
}
|
||||||
|
|
||||||
Iterator<Node> iterator = user.getNodes().iterator();
|
Iterator<Node> iterator = user.getNodes().values().iterator();
|
||||||
if (type.equals("world")) {
|
if (type.equals("world")) {
|
||||||
while (iterator.hasNext()) {
|
while (iterator.hasNext()) {
|
||||||
Node element = iterator.next();
|
Node element = iterator.next();
|
||||||
|
@ -91,7 +91,7 @@ public class UserDemote extends SubCommand<User> {
|
|||||||
|
|
||||||
// Load applicable groups
|
// Load applicable groups
|
||||||
Set<Node> nodes = new HashSet<>();
|
Set<Node> nodes = new HashSet<>();
|
||||||
for (Node node : user.getNodes()) {
|
for (Node node : user.getNodes().values()) {
|
||||||
if (!node.isGroupNode()) {
|
if (!node.isGroupNode()) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -62,12 +62,12 @@ public class UserInfo extends SubCommand<User> {
|
|||||||
user.getMetaNodes().size()
|
user.getMetaNodes().size()
|
||||||
);
|
);
|
||||||
|
|
||||||
Set<Node> parents = user.getPermissions(false).stream()
|
Set<Node> parents = user.mergePermissions().stream()
|
||||||
.filter(Node::isGroupNode)
|
.filter(Node::isGroupNode)
|
||||||
.filter(Node::isPermanent)
|
.filter(Node::isPermanent)
|
||||||
.collect(Collectors.toSet());
|
.collect(Collectors.toSet());
|
||||||
|
|
||||||
Set<Node> tempParents = user.getPermissions(false).stream()
|
Set<Node> tempParents = user.mergePermissions().stream()
|
||||||
.filter(Node::isGroupNode)
|
.filter(Node::isGroupNode)
|
||||||
.filter(Node::isTemporary)
|
.filter(Node::isTemporary)
|
||||||
.collect(Collectors.toSet());
|
.collect(Collectors.toSet());
|
||||||
|
@ -91,7 +91,7 @@ public class UserPromote extends SubCommand<User> {
|
|||||||
|
|
||||||
// Load applicable groups
|
// Load applicable groups
|
||||||
Set<Node> nodes = new HashSet<>();
|
Set<Node> nodes = new HashSet<>();
|
||||||
for (Node node : user.getNodes()) {
|
for (Node node : user.getNodes().values()) {
|
||||||
if (!node.isGroupNode()) {
|
if (!node.isGroupNode()) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
@ -82,7 +82,7 @@ public class BulkEditGroup extends SubCommand<Storage> {
|
|||||||
|
|
||||||
Set<Node> toAdd = new HashSet<>();
|
Set<Node> toAdd = new HashSet<>();
|
||||||
Set<Node> toRemove = new HashSet<>();
|
Set<Node> toRemove = new HashSet<>();
|
||||||
Iterator<Node> iterator = user.getNodes().iterator();
|
Iterator<Node> iterator = user.getNodes().values().iterator();
|
||||||
if (type.equals("world")) {
|
if (type.equals("world")) {
|
||||||
while (iterator.hasNext()) {
|
while (iterator.hasNext()) {
|
||||||
Node element = iterator.next();
|
Node element = iterator.next();
|
||||||
|
@ -82,7 +82,7 @@ public class BulkEditPermission extends SubCommand<Storage> {
|
|||||||
|
|
||||||
Set<Node> toAdd = new HashSet<>();
|
Set<Node> toAdd = new HashSet<>();
|
||||||
Set<Node> toRemove = new HashSet<>();
|
Set<Node> toRemove = new HashSet<>();
|
||||||
Iterator<Node> iterator = user.getNodes().iterator();
|
Iterator<Node> iterator = user.getNodes().values().iterator();
|
||||||
if (type.equals("world")) {
|
if (type.equals("world")) {
|
||||||
while (iterator.hasNext()) {
|
while (iterator.hasNext()) {
|
||||||
Node element = iterator.next();
|
Node element = iterator.next();
|
||||||
|
@ -0,0 +1,115 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2016 Lucko (Luck) <luck@lucko.me>
|
||||||
|
*
|
||||||
|
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
* of this software and associated documentation files (the "Software"), to deal
|
||||||
|
* in the Software without restriction, including without limitation the rights
|
||||||
|
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
* copies of the Software, and to permit persons to whom the Software is
|
||||||
|
* furnished to do so, subject to the following conditions:
|
||||||
|
*
|
||||||
|
* The above copyright notice and this permission notice shall be included in all
|
||||||
|
* copies or substantial portions of the Software.
|
||||||
|
*
|
||||||
|
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
* SOFTWARE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package me.lucko.luckperms.common.core;
|
||||||
|
|
||||||
|
import me.lucko.luckperms.api.context.ImmutableContextSet;
|
||||||
|
|
||||||
|
import java.util.Comparator;
|
||||||
|
import java.util.Iterator;
|
||||||
|
import java.util.Map;
|
||||||
|
import java.util.TreeSet;
|
||||||
|
|
||||||
|
public class ContextSetComparator implements Comparator<ImmutableContextSet> {
|
||||||
|
private static final Comparator<Map.Entry<String, String>> STRING_ENTRY_COMPARATOR = (o1, o2) -> {
|
||||||
|
int ret = o1.getKey().compareTo(o2.getKey());
|
||||||
|
if (ret != 0) {
|
||||||
|
return ret;
|
||||||
|
}
|
||||||
|
|
||||||
|
return o1.getValue().compareTo(o2.getValue());
|
||||||
|
};
|
||||||
|
|
||||||
|
private static final ContextSetComparator INSTANCE = new ContextSetComparator();
|
||||||
|
public static Comparator<ImmutableContextSet> get() {
|
||||||
|
return INSTANCE;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Comparator<ImmutableContextSet> reverse() {
|
||||||
|
return INSTANCE.reversed();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int compare(ImmutableContextSet o1, ImmutableContextSet o2) {
|
||||||
|
if (o1.equals(o2)) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
boolean o1ServerSpecific = o1.containsKey("server");
|
||||||
|
boolean o2ServerSpecific = o2.containsKey("server");
|
||||||
|
if (o1ServerSpecific != o2ServerSpecific) {
|
||||||
|
return o1ServerSpecific ? 1 : -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
boolean o1WorldSpecific = o1.containsKey("world");
|
||||||
|
boolean o2WorldSpecific = o2.containsKey("world");
|
||||||
|
if (o1WorldSpecific != o2WorldSpecific) {
|
||||||
|
return o1WorldSpecific ? 1 : -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
int o1Size = o1.size();
|
||||||
|
int o2Size = o2.size();
|
||||||
|
if (o1Size != o2Size) {
|
||||||
|
return o1Size > o2Size ? 1 : -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// we *have* to maintain transitivity in this comparator. this may be expensive, but it's necessary, as these
|
||||||
|
// values are stored in a treemap.
|
||||||
|
|
||||||
|
// in order to have consistent ordering, we have to compare the content of the context sets by ordering the
|
||||||
|
// elements and then comparing which set is greater.
|
||||||
|
TreeSet<Map.Entry<String, String>> o1Map = new TreeSet<>(STRING_ENTRY_COMPARATOR);
|
||||||
|
TreeSet<Map.Entry<String, String>> o2Map = new TreeSet<>(STRING_ENTRY_COMPARATOR);
|
||||||
|
|
||||||
|
o1Map.addAll(o1.toMultimap().entries());
|
||||||
|
o2Map.addAll(o2.toMultimap().entries());
|
||||||
|
|
||||||
|
int o1MapSize = o1Map.size();
|
||||||
|
int o2MapSize = o2Map.size();
|
||||||
|
if (o1MapSize != o2MapSize) {
|
||||||
|
return o1MapSize > o2MapSize ? 1 : -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// size is definitely the same
|
||||||
|
Iterator<Map.Entry<String, String>> it1 = o1Map.iterator();
|
||||||
|
Iterator<Map.Entry<String, String>> it2 = o2Map.iterator();
|
||||||
|
|
||||||
|
while (it1.hasNext()) {
|
||||||
|
Map.Entry<String, String> ent1 = it1.next();
|
||||||
|
Map.Entry<String, String> ent2 = it2.next();
|
||||||
|
|
||||||
|
// compare these values.
|
||||||
|
if (ent1.getKey().equals(ent2.getKey()) && ent1.getValue().equals(ent2.getValue())) {
|
||||||
|
// identical entries. just move on
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// these values are at the same position in the ordered sets.
|
||||||
|
// if ent1 is "greater" than ent2, then at this first position, o1 has a "greater" entry, and can therefore be considered
|
||||||
|
// a greater set.
|
||||||
|
return STRING_ENTRY_COMPARATOR.compare(ent1, ent2);
|
||||||
|
}
|
||||||
|
|
||||||
|
// shouldn't ever reach this point. ever.
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,64 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2016 Lucko (Luck) <luck@lucko.me>
|
||||||
|
*
|
||||||
|
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
* of this software and associated documentation files (the "Software"), to deal
|
||||||
|
* in the Software without restriction, including without limitation the rights
|
||||||
|
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
* copies of the Software, and to permit persons to whom the Software is
|
||||||
|
* furnished to do so, subject to the following conditions:
|
||||||
|
*
|
||||||
|
* The above copyright notice and this permission notice shall be included in all
|
||||||
|
* copies or substantial portions of the Software.
|
||||||
|
*
|
||||||
|
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
* SOFTWARE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package me.lucko.luckperms.common.core;
|
||||||
|
|
||||||
|
import me.lucko.luckperms.api.Node;
|
||||||
|
|
||||||
|
import java.util.Comparator;
|
||||||
|
|
||||||
|
public class NodeComparator implements Comparator<Node> {
|
||||||
|
private static final NodeComparator INSTANCE = new NodeComparator();
|
||||||
|
public static Comparator<Node> get() {
|
||||||
|
return INSTANCE;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static Comparator<Node> reverse() {
|
||||||
|
return INSTANCE.reversed();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int compare(Node o1, Node o2) {
|
||||||
|
if (o1.equals(o2)) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (o1.isTemporary() != o2.isTemporary()) {
|
||||||
|
return o1.isTemporary() ? 1 : -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (o1.isWildcard() != o2.isWildcard()) {
|
||||||
|
return o1.isWildcard() ? 1 : -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (o1.isTemporary()) {
|
||||||
|
return o1.getSecondsTilExpiry() < o2.getSecondsTilExpiry() ? 1 : -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (o1.isWildcard()) {
|
||||||
|
return o1.getWildcardLevel() > o2.getWildcardLevel() ? 1 : -1;
|
||||||
|
}
|
||||||
|
|
||||||
|
return PriorityComparator.get().compareStrings(o1.getPermission(), o2.getPermission()) == 1 ? -1 : 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -102,13 +102,19 @@ public class PriorityComparator implements Comparator<LocalizedNode> {
|
|||||||
|
|
||||||
public int compareStrings(String o1, String o2) {
|
public int compareStrings(String o1, String o2) {
|
||||||
if (o1.equals(o2)) {
|
if (o1.equals(o2)) {
|
||||||
return 1;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
CollationKey o1c = collationKeyCache.get(o1);
|
CollationKey o1c = collationKeyCache.get(o1);
|
||||||
CollationKey o2c = collationKeyCache.get(o2);
|
CollationKey o2c = collationKeyCache.get(o2);
|
||||||
return o1c.compareTo(o2c) == 1 ? 1 : -1;
|
int i = o1c.compareTo(o2c);
|
||||||
|
if (i != 0) {
|
||||||
|
return i;
|
||||||
|
}
|
||||||
|
|
||||||
|
// fallback to standard string comparison
|
||||||
|
return o1.compareTo(o2);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
// ignored
|
// ignored
|
||||||
}
|
}
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -171,6 +171,5 @@ public class User extends PermissionHolder implements Identifiable<UserIdentifie
|
|||||||
if (cache != null) {
|
if (cache != null) {
|
||||||
cache.cleanup();
|
cache.cleanup();
|
||||||
}
|
}
|
||||||
forceCleanup();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -115,7 +115,7 @@ public class Exporter implements Runnable {
|
|||||||
AtomicInteger groupCount = new AtomicInteger(0);
|
AtomicInteger groupCount = new AtomicInteger(0);
|
||||||
for (Group group : plugin.getGroupManager().getAll().values()) {
|
for (Group group : plugin.getGroupManager().getAll().values()) {
|
||||||
write(writer, "# Export group: " + group.getName());
|
write(writer, "# Export group: " + group.getName());
|
||||||
for (Node node : group.getNodes()) {
|
for (Node node : group.getNodes().values()) {
|
||||||
write(writer, NodeFactory.nodeAsCommand(node, group.getName(), true));
|
write(writer, NodeFactory.nodeAsCommand(node, group.getName(), true));
|
||||||
}
|
}
|
||||||
write(writer, "");
|
write(writer, "");
|
||||||
@ -217,7 +217,7 @@ public class Exporter implements Runnable {
|
|||||||
output.add("# Export user: " + user.getUuid().toString() + " - " + user.getName());
|
output.add("# Export user: " + user.getUuid().toString() + " - " + user.getName());
|
||||||
|
|
||||||
boolean inDefault = false;
|
boolean inDefault = false;
|
||||||
for (Node node : user.getNodes()) {
|
for (Node node : user.getNodes().values()) {
|
||||||
if (node.isGroupNode() && node.getGroupName().equalsIgnoreCase("default")) {
|
if (node.isGroupNode() && node.getGroupName().equalsIgnoreCase("default")) {
|
||||||
inDefault = true;
|
inDefault = true;
|
||||||
continue;
|
continue;
|
||||||
|
@ -83,7 +83,7 @@ public final class EventFactory {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public void handleGroupDelete(Group group, DeletionCause cause) {
|
public void handleGroupDelete(Group group, DeletionCause cause) {
|
||||||
EventGroupDelete event = new EventGroupDelete(group.getName(), ImmutableSet.copyOf(group.getNodes()), cause);
|
EventGroupDelete event = new EventGroupDelete(group.getName(), ImmutableSet.copyOf(group.getNodes().values()), cause);
|
||||||
fireEvent(event);
|
fireEvent(event);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -41,7 +41,7 @@ public class GenericUserManager extends AbstractManager<UserIdentifier, User> im
|
|||||||
boolean hasGroup = false;
|
boolean hasGroup = false;
|
||||||
|
|
||||||
if (user.getPrimaryGroup().getStoredValue() != null && !user.getPrimaryGroup().getStoredValue().isEmpty()) {
|
if (user.getPrimaryGroup().getStoredValue() != null && !user.getPrimaryGroup().getStoredValue().isEmpty()) {
|
||||||
for (Node node : user.getPermissions(false)) {
|
for (Node node : user.getNodes().values()) {
|
||||||
if (node.isServerSpecific() || node.isWorldSpecific()) {
|
if (node.isServerSpecific() || node.isWorldSpecific()) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@ -78,7 +78,7 @@ public class GenericUserManager extends AbstractManager<UserIdentifier, User> im
|
|||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
for (Node node : user.getNodes()) {
|
for (Node node : user.getNodes().values()) {
|
||||||
// There's only one.
|
// There's only one.
|
||||||
if (!node.isGroupNode()) {
|
if (!node.isGroupNode()) {
|
||||||
return true;
|
return true;
|
||||||
|
@ -48,7 +48,7 @@ public class AllParentsByWeightHolder extends StoredHolder {
|
|||||||
return cachedValue;
|
return cachedValue;
|
||||||
}
|
}
|
||||||
|
|
||||||
cachedValue = user.getAllNodes(null, ExtractedContexts.generate(Contexts.allowAll())).stream()
|
cachedValue = user.resolveInheritancesAlmostEqual(ExtractedContexts.generate(Contexts.allowAll())).stream()
|
||||||
.filter(Node::isGroupNode)
|
.filter(Node::isGroupNode)
|
||||||
.filter(Node::getValue)
|
.filter(Node::getValue)
|
||||||
.map(n -> Optional.ofNullable(user.getPlugin().getGroupManager().getIfLoaded(n.getGroupName())))
|
.map(n -> Optional.ofNullable(user.getPlugin().getGroupManager().getIfLoaded(n.getGroupName())))
|
||||||
|
@ -46,7 +46,7 @@ public class ParentsByWeightHolder extends StoredHolder {
|
|||||||
return cachedValue;
|
return cachedValue;
|
||||||
}
|
}
|
||||||
|
|
||||||
cachedValue = user.getPermissions(true).stream()
|
cachedValue = user.mergePermissionsToList().stream()
|
||||||
.filter(Node::isGroupNode)
|
.filter(Node::isGroupNode)
|
||||||
.filter(Node::getValue)
|
.filter(Node::getValue)
|
||||||
.map(n -> Optional.ofNullable(user.getPlugin().getGroupManager().getIfLoaded(n.getGroupName())))
|
.map(n -> Optional.ofNullable(user.getPlugin().getGroupManager().getIfLoaded(n.getGroupName())))
|
||||||
|
@ -171,7 +171,7 @@ public class JSONBacking extends FlatfileBacking {
|
|||||||
data.addProperty("name", user.getName());
|
data.addProperty("name", user.getName());
|
||||||
data.addProperty("primaryGroup", user.getPrimaryGroup().getStoredValue());
|
data.addProperty("primaryGroup", user.getPrimaryGroup().getStoredValue());
|
||||||
|
|
||||||
Set<NodeDataHolder> nodes = user.getNodes().stream().map(NodeDataHolder::fromNode).collect(Collectors.toSet());
|
Set<NodeDataHolder> nodes = user.getNodes().values().stream().map(NodeDataHolder::fromNode).collect(Collectors.toSet());
|
||||||
data.add("permissions", serializePermissions(nodes));
|
data.add("permissions", serializePermissions(nodes));
|
||||||
|
|
||||||
return writeElementToFile(userFile, data);
|
return writeElementToFile(userFile, data);
|
||||||
@ -270,7 +270,7 @@ public class JSONBacking extends FlatfileBacking {
|
|||||||
JsonObject data = new JsonObject();
|
JsonObject data = new JsonObject();
|
||||||
data.addProperty("name", group.getName());
|
data.addProperty("name", group.getName());
|
||||||
|
|
||||||
Set<NodeDataHolder> nodes = group.getNodes().stream().map(NodeDataHolder::fromNode).collect(Collectors.toSet());
|
Set<NodeDataHolder> nodes = group.getNodes().values().stream().map(NodeDataHolder::fromNode).collect(Collectors.toSet());
|
||||||
data.add("permissions", serializePermissions(nodes));
|
data.add("permissions", serializePermissions(nodes));
|
||||||
|
|
||||||
return writeElementToFile(groupFile, data);
|
return writeElementToFile(groupFile, data);
|
||||||
@ -321,7 +321,7 @@ public class JSONBacking extends FlatfileBacking {
|
|||||||
|
|
||||||
JsonObject data = new JsonObject();
|
JsonObject data = new JsonObject();
|
||||||
data.addProperty("name", group.getName());
|
data.addProperty("name", group.getName());
|
||||||
Set<NodeDataHolder> nodes = group.getNodes().stream().map(NodeDataHolder::fromNode).collect(Collectors.toSet());
|
Set<NodeDataHolder> nodes = group.getNodes().values().stream().map(NodeDataHolder::fromNode).collect(Collectors.toSet());
|
||||||
data.add("permissions", serializePermissions(nodes));
|
data.add("permissions", serializePermissions(nodes));
|
||||||
return writeElementToFile(groupFile, data);
|
return writeElementToFile(groupFile, data);
|
||||||
}, false);
|
}, false);
|
||||||
|
@ -97,7 +97,7 @@ public class MongoDBBacking extends AbstractBacking {
|
|||||||
.append("primaryGroup", user.getPrimaryGroup().getStoredValue());
|
.append("primaryGroup", user.getPrimaryGroup().getStoredValue());
|
||||||
|
|
||||||
Document perms = new Document();
|
Document perms = new Document();
|
||||||
for (Map.Entry<String, Boolean> e : convert(exportToLegacy(user.getNodes())).entrySet()) {
|
for (Map.Entry<String, Boolean> e : convert(exportToLegacy(user.getNodes().values())).entrySet()) {
|
||||||
perms.append(e.getKey(), e.getValue());
|
perms.append(e.getKey(), e.getValue());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -109,7 +109,7 @@ public class MongoDBBacking extends AbstractBacking {
|
|||||||
Document main = new Document("_id", group.getName());
|
Document main = new Document("_id", group.getName());
|
||||||
|
|
||||||
Document perms = new Document();
|
Document perms = new Document();
|
||||||
for (Map.Entry<String, Boolean> e : convert(exportToLegacy(group.getNodes())).entrySet()) {
|
for (Map.Entry<String, Boolean> e : convert(exportToLegacy(group.getNodes().values())).entrySet()) {
|
||||||
perms.append(e.getKey(), e.getValue());
|
perms.append(e.getKey(), e.getValue());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -237,7 +237,10 @@ public class MongoDBBacking extends AbstractBacking {
|
|||||||
if (cursor.hasNext()) {
|
if (cursor.hasNext()) {
|
||||||
// User exists, let's load.
|
// User exists, let's load.
|
||||||
Document d = cursor.next();
|
Document d = cursor.next();
|
||||||
user.setNodes(revert((Map<String, Boolean>) d.get("perms")));
|
user.setNodes(revert((Map<String, Boolean>) d.get("perms")).entrySet().stream()
|
||||||
|
.map(e -> NodeFactory.fromSerialisedNode(e.getKey(), e.getValue()))
|
||||||
|
.collect(Collectors.toSet())
|
||||||
|
);
|
||||||
user.getPrimaryGroup().setStoredValue(d.getString("primaryGroup"));
|
user.getPrimaryGroup().setStoredValue(d.getString("primaryGroup"));
|
||||||
|
|
||||||
boolean save = plugin.getUserManager().giveDefaultIfNeeded(user, false);
|
boolean save = plugin.getUserManager().giveDefaultIfNeeded(user, false);
|
||||||
@ -366,7 +369,10 @@ public class MongoDBBacking extends AbstractBacking {
|
|||||||
if (cursor.hasNext()) {
|
if (cursor.hasNext()) {
|
||||||
// Group exists, let's load.
|
// Group exists, let's load.
|
||||||
Document d = cursor.next();
|
Document d = cursor.next();
|
||||||
group.setNodes(revert((Map<String, Boolean>) d.get("perms")));
|
group.setNodes(revert((Map<String, Boolean>) d.get("perms")).entrySet().stream()
|
||||||
|
.map(e -> NodeFactory.fromSerialisedNode(e.getKey(), e.getValue()))
|
||||||
|
.collect(Collectors.toSet())
|
||||||
|
);
|
||||||
} else {
|
} else {
|
||||||
c.insertOne(fromGroup(group));
|
c.insertOne(fromGroup(group));
|
||||||
}
|
}
|
||||||
@ -389,7 +395,11 @@ public class MongoDBBacking extends AbstractBacking {
|
|||||||
try (MongoCursor<Document> cursor = c.find(new Document("_id", group.getName())).iterator()) {
|
try (MongoCursor<Document> cursor = c.find(new Document("_id", group.getName())).iterator()) {
|
||||||
if (cursor.hasNext()) {
|
if (cursor.hasNext()) {
|
||||||
Document d = cursor.next();
|
Document d = cursor.next();
|
||||||
group.setNodes(revert((Map<String, Boolean>) d.get("perms")));
|
|
||||||
|
group.setNodes(revert((Map<String, Boolean>) d.get("perms")).entrySet().stream()
|
||||||
|
.map(e -> NodeFactory.fromSerialisedNode(e.getKey(), e.getValue()))
|
||||||
|
.collect(Collectors.toSet())
|
||||||
|
);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
|
@ -385,7 +385,7 @@ public class SQLBacking extends AbstractBacking {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
Set<NodeDataHolder> local = user.getNodes().stream().map(NodeDataHolder::fromNode).collect(Collectors.toSet());
|
Set<NodeDataHolder> local = user.getNodes().values().stream().map(NodeDataHolder::fromNode).collect(Collectors.toSet());
|
||||||
|
|
||||||
Map.Entry<Set<NodeDataHolder>, Set<NodeDataHolder>> diff = compareSets(local, remote);
|
Map.Entry<Set<NodeDataHolder>, Set<NodeDataHolder>> diff = compareSets(local, remote);
|
||||||
|
|
||||||
@ -668,7 +668,7 @@ public class SQLBacking extends AbstractBacking {
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
Set<NodeDataHolder> local = group.getNodes().stream().map(NodeDataHolder::fromNode).collect(Collectors.toSet());
|
Set<NodeDataHolder> local = group.getNodes().values().stream().map(NodeDataHolder::fromNode).collect(Collectors.toSet());
|
||||||
|
|
||||||
Map.Entry<Set<NodeDataHolder>, Set<NodeDataHolder>> diff = compareSets(local, remote);
|
Map.Entry<Set<NodeDataHolder>, Set<NodeDataHolder>> diff = compareSets(local, remote);
|
||||||
|
|
||||||
|
@ -171,7 +171,7 @@ public class YAMLBacking extends FlatfileBacking {
|
|||||||
values.put("name", user.getName());
|
values.put("name", user.getName());
|
||||||
values.put("primary-group", user.getPrimaryGroup().getStoredValue());
|
values.put("primary-group", user.getPrimaryGroup().getStoredValue());
|
||||||
|
|
||||||
Set<NodeDataHolder> data = user.getNodes().stream().map(NodeDataHolder::fromNode).collect(Collectors.toSet());
|
Set<NodeDataHolder> data = user.getNodes().values().stream().map(NodeDataHolder::fromNode).collect(Collectors.toSet());
|
||||||
values.put("permissions", serializePermissions(data));
|
values.put("permissions", serializePermissions(data));
|
||||||
|
|
||||||
return writeMapToFile(userFile, values);
|
return writeMapToFile(userFile, values);
|
||||||
@ -268,7 +268,7 @@ public class YAMLBacking extends FlatfileBacking {
|
|||||||
|
|
||||||
Map<String, Object> values = new LinkedHashMap<>();
|
Map<String, Object> values = new LinkedHashMap<>();
|
||||||
values.put("name", group.getName());
|
values.put("name", group.getName());
|
||||||
Set<NodeDataHolder> data = group.getNodes().stream().map(NodeDataHolder::fromNode).collect(Collectors.toSet());
|
Set<NodeDataHolder> data = group.getNodes().values().stream().map(NodeDataHolder::fromNode).collect(Collectors.toSet());
|
||||||
values.put("permissions", serializePermissions(data));
|
values.put("permissions", serializePermissions(data));
|
||||||
return writeMapToFile(groupFile, values);
|
return writeMapToFile(groupFile, values);
|
||||||
}
|
}
|
||||||
@ -318,7 +318,7 @@ public class YAMLBacking extends FlatfileBacking {
|
|||||||
|
|
||||||
Map<String, Object> values = new LinkedHashMap<>();
|
Map<String, Object> values = new LinkedHashMap<>();
|
||||||
values.put("name", group.getName());
|
values.put("name", group.getName());
|
||||||
Set<NodeDataHolder> data = group.getNodes().stream().map(NodeDataHolder::fromNode).collect(Collectors.toSet());
|
Set<NodeDataHolder> data = group.getNodes().values().stream().map(NodeDataHolder::fromNode).collect(Collectors.toSet());
|
||||||
values.put("permissions", serializePermissions(data));
|
values.put("permissions", serializePermissions(data));
|
||||||
return writeMapToFile(groupFile, values);
|
return writeMapToFile(groupFile, values);
|
||||||
}, false);
|
}, false);
|
||||||
|
@ -29,7 +29,6 @@ import lombok.ToString;
|
|||||||
import me.lucko.luckperms.api.Contexts;
|
import me.lucko.luckperms.api.Contexts;
|
||||||
import me.lucko.luckperms.api.context.ContextSet;
|
import me.lucko.luckperms.api.context.ContextSet;
|
||||||
import me.lucko.luckperms.api.context.ImmutableContextSet;
|
import me.lucko.luckperms.api.context.ImmutableContextSet;
|
||||||
import me.lucko.luckperms.api.context.MutableContextSet;
|
|
||||||
|
|
||||||
@Getter
|
@Getter
|
||||||
@EqualsAndHashCode
|
@EqualsAndHashCode
|
||||||
@ -43,29 +42,26 @@ public class ExtractedContexts {
|
|||||||
return new ExtractedContexts(contexts);
|
return new ExtractedContexts(contexts);
|
||||||
}
|
}
|
||||||
|
|
||||||
private Contexts contexts;
|
private final Contexts contexts;
|
||||||
private ImmutableContextSet contextSet;
|
private final ImmutableContextSet contextSet;
|
||||||
private String server;
|
private String server;
|
||||||
private String world;
|
private String world;
|
||||||
|
|
||||||
private ExtractedContexts(Contexts context) {
|
private ExtractedContexts(Contexts context) {
|
||||||
this.contexts = context;
|
this.contexts = context;
|
||||||
|
this.contextSet = context.getContexts().makeImmutable();
|
||||||
setup(context.getContexts());
|
setup(context.getContexts());
|
||||||
}
|
}
|
||||||
|
|
||||||
private ExtractedContexts(ContextSet contexts) {
|
private ExtractedContexts(ContextSet contexts) {
|
||||||
this.contexts = null;
|
this.contexts = null;
|
||||||
|
this.contextSet = contexts.makeImmutable();
|
||||||
setup(contexts);
|
setup(contexts);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void setup(ContextSet contexts) {
|
private void setup(ContextSet contexts) {
|
||||||
MutableContextSet contextSet = MutableContextSet.fromSet(contexts);
|
server = contexts.getAnyValue("server").orElse(null);
|
||||||
server = contextSet.getValues("server").stream().findAny().orElse(null);
|
world = contexts.getAnyValue("world").orElse(null);
|
||||||
world = contextSet.getValues("world").stream().findAny().orElse(null);
|
|
||||||
contextSet.removeAll("server");
|
|
||||||
contextSet.removeAll("world");
|
|
||||||
|
|
||||||
this.contextSet = contextSet.makeImmutable();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public Contexts getContexts() {
|
public Contexts getContexts() {
|
||||||
|
@ -0,0 +1,103 @@
|
|||||||
|
/*
|
||||||
|
* Copyright (c) 2016 Lucko (Luck) <luck@lucko.me>
|
||||||
|
*
|
||||||
|
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
* of this software and associated documentation files (the "Software"), to deal
|
||||||
|
* in the Software without restriction, including without limitation the rights
|
||||||
|
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
* copies of the Software, and to permit persons to whom the Software is
|
||||||
|
* furnished to do so, subject to the following conditions:
|
||||||
|
*
|
||||||
|
* The above copyright notice and this permission notice shall be included in all
|
||||||
|
* copies or substantial portions of the Software.
|
||||||
|
*
|
||||||
|
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
* SOFTWARE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package me.lucko.luckperms.common.utils;
|
||||||
|
|
||||||
|
import lombok.experimental.UtilityClass;
|
||||||
|
|
||||||
|
import me.lucko.luckperms.api.Node;
|
||||||
|
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Iterator;
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
@UtilityClass
|
||||||
|
public class NodeTools {
|
||||||
|
|
||||||
|
public static <T extends Node> void removeAlmostEqual(Iterator<T> it) {
|
||||||
|
List<T> alreadyIn = new ArrayList<>();
|
||||||
|
|
||||||
|
iter:
|
||||||
|
while (it.hasNext()) {
|
||||||
|
T next = it.next();
|
||||||
|
for (T n : alreadyIn) {
|
||||||
|
if (next.almostEquals(n)) {
|
||||||
|
it.remove();
|
||||||
|
continue iter;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
alreadyIn.add(next);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static <T extends Node> void removeIgnoreValue(Iterator<T> it) {
|
||||||
|
List<T> alreadyIn = new ArrayList<>();
|
||||||
|
|
||||||
|
iter:
|
||||||
|
while (it.hasNext()) {
|
||||||
|
T next = it.next();
|
||||||
|
for (T n : alreadyIn) {
|
||||||
|
if (next.equalsIgnoringValue(n)) {
|
||||||
|
it.remove();
|
||||||
|
continue iter;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
alreadyIn.add(next);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static <T extends Node> void removeIgnoreValueOrTemp(Iterator<T> it) {
|
||||||
|
List<T> alreadyIn = new ArrayList<>();
|
||||||
|
|
||||||
|
iter:
|
||||||
|
while (it.hasNext()) {
|
||||||
|
T next = it.next();
|
||||||
|
for (T n : alreadyIn) {
|
||||||
|
if (next.equalsIgnoringValueOrTemp(n)) {
|
||||||
|
it.remove();
|
||||||
|
continue iter;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
alreadyIn.add(next);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static <T extends Node> void removeSamePermission(Iterator<T> it) {
|
||||||
|
List<T> alreadyIn = new ArrayList<>();
|
||||||
|
|
||||||
|
iter:
|
||||||
|
while (it.hasNext()) {
|
||||||
|
T next = it.next();
|
||||||
|
for (T n : alreadyIn) {
|
||||||
|
if (next.getPermission().equals(n.getPermission())) {
|
||||||
|
it.remove();
|
||||||
|
continue iter;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
alreadyIn.add(next);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -33,7 +33,7 @@ import me.lucko.luckperms.api.LocalizedNode;
|
|||||||
import me.lucko.luckperms.api.Node;
|
import me.lucko.luckperms.api.Node;
|
||||||
import me.lucko.luckperms.api.Tristate;
|
import me.lucko.luckperms.api.Tristate;
|
||||||
import me.lucko.luckperms.api.context.ContextSet;
|
import me.lucko.luckperms.api.context.ContextSet;
|
||||||
import me.lucko.luckperms.common.caching.MetaHolder;
|
import me.lucko.luckperms.common.caching.MetaAccumulator;
|
||||||
import me.lucko.luckperms.common.core.model.Group;
|
import me.lucko.luckperms.common.core.model.Group;
|
||||||
import me.lucko.luckperms.common.utils.ExtractedContexts;
|
import me.lucko.luckperms.common.utils.ExtractedContexts;
|
||||||
import me.lucko.luckperms.sponge.LPSpongePlugin;
|
import me.lucko.luckperms.sponge.LPSpongePlugin;
|
||||||
@ -83,7 +83,7 @@ public class SpongeGroup extends Group {
|
|||||||
@Override
|
@Override
|
||||||
public NodeTree load(ContextSet contexts) {
|
public NodeTree load(ContextSet contexts) {
|
||||||
// TODO move this away from NodeTree
|
// TODO move this away from NodeTree
|
||||||
Map<String, Boolean> permissions = parent.getAllNodesFiltered(ExtractedContexts.generate(plugin.getService().calculateContexts(contexts))).stream()
|
Map<String, Boolean> permissions = parent.getAllNodes(ExtractedContexts.generate(plugin.getService().calculateContexts(contexts))).stream()
|
||||||
.map(LocalizedNode::getNode)
|
.map(LocalizedNode::getNode)
|
||||||
.collect(Collectors.toMap(Node::getPermission, Node::getValue));
|
.collect(Collectors.toMap(Node::getPermission, Node::getValue));
|
||||||
|
|
||||||
@ -96,7 +96,7 @@ public class SpongeGroup extends Group {
|
|||||||
.build(new CacheLoader<ContextSet, Set<SubjectReference>>() {
|
.build(new CacheLoader<ContextSet, Set<SubjectReference>>() {
|
||||||
@Override
|
@Override
|
||||||
public Set<SubjectReference> load(ContextSet contexts) {
|
public Set<SubjectReference> load(ContextSet contexts) {
|
||||||
Set<SubjectReference> subjects = parent.getAllNodesFiltered(ExtractedContexts.generate(plugin.getService().calculateContexts(contexts))).stream()
|
Set<SubjectReference> subjects = parent.getAllNodes(ExtractedContexts.generate(plugin.getService().calculateContexts(contexts))).stream()
|
||||||
.map(LocalizedNode::getNode)
|
.map(LocalizedNode::getNode)
|
||||||
.filter(Node::isGroupNode)
|
.filter(Node::isGroupNode)
|
||||||
.map(Node::getGroupName)
|
.map(Node::getGroupName)
|
||||||
@ -224,17 +224,17 @@ public class SpongeGroup extends Group {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private Optional<String> getChatMeta(ContextSet contexts, boolean prefix) {
|
private Optional<String> getChatMeta(ContextSet contexts, boolean prefix) {
|
||||||
MetaHolder metaHolder = parent.accumulateMeta(null, null, ExtractedContexts.generate(plugin.getService().calculateContexts(contexts)));
|
MetaAccumulator metaAccumulator = parent.accumulateMeta(null, null, ExtractedContexts.generate(plugin.getService().calculateContexts(contexts)));
|
||||||
if (prefix) {
|
if (prefix) {
|
||||||
return Optional.ofNullable(metaHolder.getPrefixStack().toFormattedString());
|
return Optional.ofNullable(metaAccumulator.getPrefixStack().toFormattedString());
|
||||||
} else {
|
} else {
|
||||||
return Optional.ofNullable(metaHolder.getSuffixStack().toFormattedString());
|
return Optional.ofNullable(metaAccumulator.getSuffixStack().toFormattedString());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private Optional<String> getMeta(ContextSet contexts, String key) {
|
private Optional<String> getMeta(ContextSet contexts, String key) {
|
||||||
MetaHolder metaHolder = parent.accumulateMeta(null, null, ExtractedContexts.generate(plugin.getService().calculateContexts(contexts)));
|
MetaAccumulator metaAccumulator = parent.accumulateMeta(null, null, ExtractedContexts.generate(plugin.getService().calculateContexts(contexts)));
|
||||||
Map<String, String> meta = metaHolder.getMeta();
|
Map<String, String> meta = metaAccumulator.getMeta();
|
||||||
return Optional.ofNullable(meta.get(key));
|
return Optional.ofNullable(meta.get(key));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -93,7 +93,7 @@ public class SpongeUser extends User {
|
|||||||
return (now - lastUse) > 600000;
|
return (now - lastUse) > 600000;
|
||||||
}
|
}
|
||||||
|
|
||||||
private void checkData() {
|
private synchronized void checkData() {
|
||||||
if (parent.getUserData() == null) {
|
if (parent.getUserData() == null) {
|
||||||
plugin.getLog().warn("User " + parent.getName() + " - " + parent.getUuid() + " does not have any data loaded.");
|
plugin.getLog().warn("User " + parent.getName() + " - " + parent.getUuid() + " does not have any data loaded.");
|
||||||
parent.setupData(false);
|
parent.setupData(false);
|
||||||
|
@ -33,7 +33,7 @@ import me.lucko.luckperms.api.Node;
|
|||||||
import me.lucko.luckperms.api.Tristate;
|
import me.lucko.luckperms.api.Tristate;
|
||||||
import me.lucko.luckperms.api.context.ContextSet;
|
import me.lucko.luckperms.api.context.ContextSet;
|
||||||
import me.lucko.luckperms.api.context.ImmutableContextSet;
|
import me.lucko.luckperms.api.context.ImmutableContextSet;
|
||||||
import me.lucko.luckperms.common.caching.MetaHolder;
|
import me.lucko.luckperms.common.caching.MetaAccumulator;
|
||||||
import me.lucko.luckperms.common.core.NodeBuilder;
|
import me.lucko.luckperms.common.core.NodeBuilder;
|
||||||
import me.lucko.luckperms.common.core.NodeFactory;
|
import me.lucko.luckperms.common.core.NodeFactory;
|
||||||
import me.lucko.luckperms.common.core.model.Group;
|
import me.lucko.luckperms.common.core.model.Group;
|
||||||
@ -75,7 +75,7 @@ public class LuckPermsSubjectData implements LPSubjectData {
|
|||||||
try (Timing ignored = service.getPlugin().getTimings().time(LPTiming.LP_SUBJECT_GET_PERMISSIONS)) {
|
try (Timing ignored = service.getPlugin().getTimings().time(LPTiming.LP_SUBJECT_GET_PERMISSIONS)) {
|
||||||
Map<ImmutableContextSet, Map<String, Boolean>> perms = new HashMap<>();
|
Map<ImmutableContextSet, Map<String, Boolean>> perms = new HashMap<>();
|
||||||
|
|
||||||
for (Node n : enduring ? holder.getNodes() : holder.getTransientNodes()) {
|
for (Node n : enduring ? holder.getNodes().values() : holder.getTransientNodes().values()) {
|
||||||
ContextSet contexts = n.getFullContexts();
|
ContextSet contexts = n.getFullContexts();
|
||||||
perms.computeIfAbsent(contexts.makeImmutable(), cs -> new HashMap<>()).put(n.getPermission(), n.getValue());
|
perms.computeIfAbsent(contexts.makeImmutable(), cs -> new HashMap<>()).put(n.getPermission(), n.getValue());
|
||||||
}
|
}
|
||||||
@ -166,7 +166,7 @@ public class LuckPermsSubjectData implements LPSubjectData {
|
|||||||
try (Timing ignored = service.getPlugin().getTimings().time(LPTiming.LP_SUBJECT_GET_PARENTS)) {
|
try (Timing ignored = service.getPlugin().getTimings().time(LPTiming.LP_SUBJECT_GET_PARENTS)) {
|
||||||
Map<ImmutableContextSet, Set<SubjectReference>> parents = new HashMap<>();
|
Map<ImmutableContextSet, Set<SubjectReference>> parents = new HashMap<>();
|
||||||
|
|
||||||
for (Node n : enduring ? holder.getNodes() : holder.getTransientNodes()) {
|
for (Node n : enduring ? holder.getNodes().values() : holder.getTransientNodes().values()) {
|
||||||
if (!n.isGroupNode()) continue;
|
if (!n.isGroupNode()) continue;
|
||||||
|
|
||||||
ContextSet contexts = n.getFullContexts();
|
ContextSet contexts = n.getFullContexts();
|
||||||
@ -271,7 +271,7 @@ public class LuckPermsSubjectData implements LPSubjectData {
|
|||||||
Map<ImmutableContextSet, Integer> minPrefixPriority = new HashMap<>();
|
Map<ImmutableContextSet, Integer> minPrefixPriority = new HashMap<>();
|
||||||
Map<ImmutableContextSet, Integer> minSuffixPriority = new HashMap<>();
|
Map<ImmutableContextSet, Integer> minSuffixPriority = new HashMap<>();
|
||||||
|
|
||||||
for (Node n : enduring ? holder.getNodes() : holder.getTransientNodes()) {
|
for (Node n : enduring ? holder.getNodes().values() : holder.getTransientNodes().values()) {
|
||||||
if (!n.getValue()) continue;
|
if (!n.getValue()) continue;
|
||||||
if (!n.isMeta() && !n.isPrefix() && !n.isSuffix()) continue;
|
if (!n.isMeta() && !n.isPrefix() && !n.isSuffix()) continue;
|
||||||
|
|
||||||
@ -331,8 +331,8 @@ public class LuckPermsSubjectData implements LPSubjectData {
|
|||||||
|
|
||||||
toRemove.forEach(makeUnsetConsumer(enduring));
|
toRemove.forEach(makeUnsetConsumer(enduring));
|
||||||
|
|
||||||
MetaHolder metaHolder = holder.accumulateMeta(null, null, ExtractedContexts.generate(service.calculateContexts(context)));
|
MetaAccumulator metaAccumulator = holder.accumulateMeta(null, null, ExtractedContexts.generate(service.calculateContexts(context)));
|
||||||
int priority = (type.equals("prefix") ? metaHolder.getPrefixes() : metaHolder.getSuffixes()).keySet().stream()
|
int priority = (type.equals("prefix") ? metaAccumulator.getPrefixes() : metaAccumulator.getSuffixes()).keySet().stream()
|
||||||
.mapToInt(e -> e).max().orElse(0);
|
.mapToInt(e -> e).max().orElse(0);
|
||||||
priority += 10;
|
priority += 10;
|
||||||
|
|
||||||
@ -416,7 +416,7 @@ public class LuckPermsSubjectData implements LPSubjectData {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private Stream<Node> streamNodes(boolean enduring) {
|
private Stream<Node> streamNodes(boolean enduring) {
|
||||||
return (enduring ? holder.getNodes() : holder.getTransientNodes()).stream();
|
return (enduring ? holder.getNodes() : holder.getTransientNodes()).values().stream();
|
||||||
}
|
}
|
||||||
|
|
||||||
private Consumer<Node> makeUnsetConsumer(boolean enduring) {
|
private Consumer<Node> makeUnsetConsumer(boolean enduring) {
|
||||||
|
Loading…
Reference in New Issue
Block a user