Remove CFI redirect and other minor changes.

This commit is contained in:
MattBDev 2020-02-21 14:56:21 -05:00
parent 4695c008b4
commit 62cb04b8ca
7 changed files with 74 additions and 87 deletions

View File

@ -39,6 +39,8 @@ import java.util.UUID;
import java.util.concurrent.Callable; import java.util.concurrent.Callable;
import java.util.concurrent.Future; import java.util.concurrent.Future;
import java.util.function.Function; import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
import javax.annotation.Nullable; import javax.annotation.Nullable;
import net.minecraft.server.v1_14_R1.BiomeBase; import net.minecraft.server.v1_14_R1.BiomeBase;
import net.minecraft.server.v1_14_R1.BlockPosition; import net.minecraft.server.v1_14_R1.BlockPosition;
@ -183,15 +185,13 @@ public class BukkitGetBlocks_1_14 extends CharGetBlocks {
@NotNull @NotNull
@Override @Override
public Iterator<CompoundTag> iterator() { public Iterator<CompoundTag> iterator() {
Iterable<CompoundTag> result = Iterables.transform(Iterables.concat(slices), new com.google.common.base.Function<Entity, CompoundTag>() { Iterable<CompoundTag> result = StreamSupport
@Nullable .stream(Iterables.concat(slices).spliterator(), false).map(input -> {
@Override BukkitImplAdapter adapter = WorldEditPlugin.getInstance()
public CompoundTag apply(@Nullable Entity input) { .getBukkitImplAdapter();
BukkitImplAdapter adapter = WorldEditPlugin.getInstance().getBukkitImplAdapter();
NBTTagCompound tag = new NBTTagCompound(); NBTTagCompound tag = new NBTTagCompound();
return (CompoundTag) adapter.toNative(input.save(tag)); return (CompoundTag) adapter.toNative(input.save(tag));
} }).collect(Collectors.toList());
});
return result.iterator(); return result.iterator();
} }
}; };

View File

@ -39,6 +39,8 @@ import java.util.UUID;
import java.util.concurrent.Callable; import java.util.concurrent.Callable;
import java.util.concurrent.Future; import java.util.concurrent.Future;
import java.util.function.Function; import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
import javax.annotation.Nullable; import javax.annotation.Nullable;
import net.minecraft.server.v1_15_R1.BiomeBase; import net.minecraft.server.v1_15_R1.BiomeBase;
import net.minecraft.server.v1_15_R1.BiomeStorage; import net.minecraft.server.v1_15_R1.BiomeStorage;
@ -191,15 +193,13 @@ public class BukkitGetBlocks_1_15 extends CharGetBlocks {
@NotNull @NotNull
@Override @Override
public Iterator<CompoundTag> iterator() { public Iterator<CompoundTag> iterator() {
Iterable<CompoundTag> result = Iterables.transform(Iterables.concat(slices), new com.google.common.base.Function<Entity, CompoundTag>() { Iterable<CompoundTag> result = StreamSupport
@Nullable .stream(Iterables.concat(slices).spliterator(), false).map(input -> {
@Override BukkitImplAdapter adapter = WorldEditPlugin.getInstance()
public CompoundTag apply(@Nullable Entity input) { .getBukkitImplAdapter();
BukkitImplAdapter adapter = WorldEditPlugin.getInstance().getBukkitImplAdapter();
NBTTagCompound tag = new NBTTagCompound(); NBTTagCompound tag = new NBTTagCompound();
return (CompoundTag) adapter.toNative(input.save(tag)); return (CompoundTag) adapter.toNative(input.save(tag));
} }).collect(Collectors.toList());
});
return result.iterator(); return result.iterator();
} }
}; };

View File

@ -4,6 +4,7 @@ import com.boydti.fawe.bukkit.wrapper.AsyncBlock;
import com.boydti.fawe.bukkit.wrapper.AsyncWorld; import com.boydti.fawe.bukkit.wrapper.AsyncWorld;
import com.boydti.fawe.regions.FaweMask; import com.boydti.fawe.regions.FaweMask;
import com.sk89q.worldedit.bukkit.BukkitAdapter; import com.sk89q.worldedit.bukkit.BukkitAdapter;
import com.sk89q.worldedit.bukkit.BukkitPlayer;
import com.sk89q.worldedit.entity.Player; import com.sk89q.worldedit.entity.Player;
import com.sk89q.worldedit.math.BlockVector3; import com.sk89q.worldedit.math.BlockVector3;
import com.sk89q.worldedit.regions.CuboidRegion; import com.sk89q.worldedit.regions.CuboidRegion;
@ -54,7 +55,7 @@ public class FreeBuildRegion extends BukkitMaskManager {
BlockVector3 pos2 = BlockVector3.ZERO; BlockVector3 pos2 = BlockVector3.ZERO;
AsyncBlock block = new AsyncBlock(asyncWorld, 0, 0, 0); AsyncBlock block = new AsyncBlock(asyncWorld, 0, 0, 0);
BlockBreakEvent event = new BlockBreakEvent(block, BukkitAdapter.adapt(player)); BlockBreakEvent event = new BlockBreakEvent(block, ((BukkitPlayer) player).getPlayer());
return new FaweMask(pos1, pos2) { return new FaweMask(pos1, pos2) {

View File

@ -1,50 +0,0 @@
package com.boydti.fawe.bukkit.regions.plotsquared;
import com.github.intellectualsites.plotsquared.commands.Command;
import com.github.intellectualsites.plotsquared.commands.CommandDeclaration;
import com.github.intellectualsites.plotsquared.plot.commands.CommandCategory;
import com.github.intellectualsites.plotsquared.plot.commands.MainCommand;
import com.github.intellectualsites.plotsquared.plot.commands.RequiredType;
import com.github.intellectualsites.plotsquared.plot.config.Captions;
import com.github.intellectualsites.plotsquared.plot.object.Plot;
import com.github.intellectualsites.plotsquared.plot.object.PlotArea;
import com.github.intellectualsites.plotsquared.plot.object.PlotPlayer;
import com.github.intellectualsites.plotsquared.plot.object.RunnableVal2;
import com.github.intellectualsites.plotsquared.plot.object.RunnableVal3;
import com.github.intellectualsites.plotsquared.plot.object.worlds.SinglePlotArea;
import com.sk89q.worldedit.WorldEdit;
import java.util.concurrent.CompletableFuture;
@CommandDeclaration(
command = "cfi",
permission = "plots.createfromimage",
aliases = {"createfromheightmap", "createfromimage", "cfhm"},
category = CommandCategory.APPEARANCE,
requiredType = RequiredType.NONE,
description = "Generate a world from an image heightmap: [More info](https://goo.gl/friFbV)",
usage = "/plots cfi [url or dimensions]"
)
public class CFIRedirect extends Command {
private final WorldEdit we;
public CFIRedirect() {
super(MainCommand.getInstance(), true);
this.we = WorldEdit.getInstance();
}
@Override
public CompletableFuture<Boolean> execute(final PlotPlayer player, String[] args, RunnableVal3<Command, Runnable, Runnable> confirm, RunnableVal2<Command, CommandResult> whenDone) throws CommandException {
checkTrue(args.length >= 1, Captions.COMMAND_SYNTAX, getUsage());
final Plot plot = check(player.getCurrentPlot(), Captions.NOT_IN_PLOT);
checkTrue(plot.isOwner(player.getUUID()), Captions.NOW_OWNER);
checkTrue(plot.getRunning() == 0, Captions.WAIT_FOR_TIMER);
final PlotArea area = plot.getArea();
if (area instanceof SinglePlotArea) {
player.sendMessage("The command has been changed to: //cfi");
} else {
player.sendMessage("Must have the `worlds` component enabled in the PlotSquared config.yml");
return CompletableFuture.completedFuture(false);
}
return CompletableFuture.completedFuture(true);
}
}

View File

@ -13,6 +13,7 @@ import java.util.WeakHashMap;
import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.locks.ReentrantLock; import java.util.concurrent.locks.ReentrantLock;
import org.jetbrains.annotations.NotNull;
/** /**
@ -83,13 +84,12 @@ public class SoftHashMap<K, V> implements Map<K, V> {
* elements retained after a GC due to the strong references. * elements retained after a GC due to the strong references.
* <p/> * <p/>
* Note that in a highly concurrent environments the exact total number of strong references may differ slightly * Note that in a highly concurrent environments the exact total number of strong references may differ slightly
* than the actual <code>retentionSize</code> value. This number is intended to be a best-effort retention low * than the actual {@code retentionSize} value. This number is intended to be a best-effort retention low
* water mark. * water mark.
* *
* @param retentionSize the total number of most recent entries in the map that will be strongly referenced * @param retentionSize the total number of most recent entries in the map that will be strongly referenced
* (retained), preventing them from being eagerly garbage collected by the JVM. * (retained), preventing them from being eagerly garbage collected by the JVM.
*/ */
@SuppressWarnings({"unchecked"})
public SoftHashMap(int retentionSize) { public SoftHashMap(int retentionSize) {
super(); super();
RETENTION_SIZE = Math.max(0, retentionSize); RETENTION_SIZE = Math.max(0, retentionSize);
@ -120,7 +120,7 @@ public class SoftHashMap<K, V> implements Map<K, V> {
* elements retained after a GC due to the strong references. * elements retained after a GC due to the strong references.
* <p/> * <p/>
* Note that in a highly concurrent environments the exact total number of strong references may differ slightly * Note that in a highly concurrent environments the exact total number of strong references may differ slightly
* than the actual <code>retentionSize</code> value. This number is intended to be a best-effort retention low * than the actual {@code retentionSize} value. This number is intended to be a best-effort retention low
* water mark. * water mark.
* *
* @param source the backing map to populate this {@code SoftHashMap} * @param source the backing map to populate this {@code SoftHashMap}
@ -132,6 +132,7 @@ public class SoftHashMap<K, V> implements Map<K, V> {
putAll(source); putAll(source);
} }
@Override
public V get(Object key) { public V get(Object key) {
processQueue(); processQueue();
@ -185,24 +186,28 @@ public class SoftHashMap<K, V> implements Map<K, V> {
} }
} }
@Override
public boolean isEmpty() { public boolean isEmpty() {
processQueue(); processQueue();
return map.isEmpty(); return map.isEmpty();
} }
@Override
public boolean containsKey(Object key) { public boolean containsKey(Object key) {
processQueue(); processQueue();
return map.containsKey(key); return map.containsKey(key);
} }
@Override
public boolean containsValue(Object value) { public boolean containsValue(Object value) {
processQueue(); processQueue();
Collection values = values(); Collection<?> values = values();
return values != null && values.contains(value); return values.contains(value);
} }
public void putAll(Map<? extends K, ? extends V> m) { @Override
if (m == null || m.isEmpty()) { public void putAll(@NotNull Map<? extends K, ? extends V> m) {
if (m.isEmpty()) {
processQueue(); processQueue();
return; return;
} }
@ -211,17 +216,21 @@ public class SoftHashMap<K, V> implements Map<K, V> {
} }
} }
@Override
@NotNull
public Set<K> keySet() { public Set<K> keySet() {
processQueue(); processQueue();
return map.keySet(); return map.keySet();
} }
@Override
@NotNull
public Collection<V> values() { public Collection<V> values() {
processQueue(); processQueue();
Collection<K> keys = map.keySet(); Collection<K> keys = map.keySet();
if (keys.isEmpty()) { if (keys.isEmpty()) {
//noinspection unchecked //noinspection unchecked
return Collections.EMPTY_SET; return Collections.emptySet();
} }
Collection<V> values = new ArrayList<>(keys.size()); Collection<V> values = new ArrayList<>(keys.size());
for (K key : keys) { for (K key : keys) {
@ -236,6 +245,7 @@ public class SoftHashMap<K, V> implements Map<K, V> {
/** /**
* Creates a new entry, but wraps the value in a SoftValue instance to enable auto garbage collection. * Creates a new entry, but wraps the value in a SoftValue instance to enable auto garbage collection.
*/ */
@Override
public V put(K key, V value) { public V put(K key, V value) {
processQueue(); // throw out garbage collected values first processQueue(); // throw out garbage collected values first
SoftValue<V, K> sv = new SoftValue<>(value, key, queue); SoftValue<V, K> sv = new SoftValue<>(value, key, queue);
@ -244,12 +254,14 @@ public class SoftHashMap<K, V> implements Map<K, V> {
return previous != null ? previous.get() : null; return previous != null ? previous.get() : null;
} }
@Override
public V remove(Object key) { public V remove(Object key) {
processQueue(); // throw out garbage collected values first processQueue(); // throw out garbage collected values first
SoftValue<V, K> raw = map.remove(key); SoftValue<V, K> raw = map.remove(key);
return raw != null ? raw.get() : null; return raw != null ? raw.get() : null;
} }
@Override
public void clear() { public void clear() {
strongReferencesLock.lock(); strongReferencesLock.lock();
try { try {
@ -261,17 +273,20 @@ public class SoftHashMap<K, V> implements Map<K, V> {
map.clear(); map.clear();
} }
@Override
public int size() { public int size() {
processQueue(); // throw out garbage collected values first processQueue(); // throw out garbage collected values first
return map.size(); return map.size();
} }
@Override
@NotNull
public Set<Map.Entry<K, V>> entrySet() { public Set<Map.Entry<K, V>> entrySet() {
processQueue(); // throw out garbage collected values first processQueue(); // throw out garbage collected values first
Collection<K> keys = map.keySet(); Collection<K> keys = map.keySet();
if (keys.isEmpty()) { if (keys.isEmpty()) {
//noinspection unchecked //noinspection unchecked
return Collections.EMPTY_SET; return Collections.emptySet();
} }
Map<K, V> kvPairs = new HashMap<>(keys.size()); Map<K, V> kvPairs = new HashMap<>(keys.size());

View File

@ -80,7 +80,7 @@ public class ReflectionUtils {
blankField(enumClass, "enumConstants"); // IBM JDK blankField(enumClass, "enumConstants"); // IBM JDK
} }
private static Class<?> UNMODIFIABLE_MAP = Collections.unmodifiableMap(Collections.EMPTY_MAP).getClass(); private static Class<?> UNMODIFIABLE_MAP = Collections.unmodifiableMap(Collections.emptyMap()).getClass();
public static <T, V> Map<T, V> getMap(Map<T, V> map) { public static <T, V> Map<T, V> getMap(Map<T, V> map) {
try { try {

View File

@ -12,6 +12,8 @@ package net.jpountz.lz4;
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*
* This file has been modified for use in the FAWE project.
*/ */
import java.io.FilterOutputStream; import java.io.FilterOutputStream;
@ -21,10 +23,10 @@ import java.util.zip.Checksum;
import net.jpountz.util.SafeUtils; import net.jpountz.util.SafeUtils;
/** /**
* Streaming LZ4. * Streaming LZ4 (not compatible with the LZ4 Frame format).
* <p>
* This class compresses data into fixed-size blocks of compressed data. * This class compresses data into fixed-size blocks of compressed data.
* * This class uses its own format and is not compatible with the LZ4 Frame format.
* @see LZ4BlockInputStream * @see LZ4BlockInputStream
*/ */
public final class LZ4BlockOutputStream extends FilterOutputStream { public final class LZ4BlockOutputStream extends FilterOutputStream {
@ -73,13 +75,13 @@ public final class LZ4BlockOutputStream extends FilterOutputStream {
private int o; private int o;
/** /**
* Create a new {@link OutputStream} with configurable block size. Large * Creates a new {@link OutputStream} with configurable block size. Large
* blocks require more memory at compression and decompression time but * blocks require more memory at compression and decompression time but
* should improve the compression ratio. * should improve the compression ratio.
* *
* @param out the {@link OutputStream} to feed * @param out the {@link OutputStream} to feed
* @param blockSize the maximum number of bytes to try to compress at once, * @param blockSize the maximum number of bytes to try to compress at once,
* must be >= 64 and <= 32 M * must be &gt;= 64 and &lt;= 32 M
* @param compressor the {@link LZ4Compressor} instance to use to compress * @param compressor the {@link LZ4Compressor} instance to use to compress
* data * data
* @param checksum the {@link Checksum} instance to use to check data for * @param checksum the {@link Checksum} instance to use to check data for
@ -101,14 +103,29 @@ public final class LZ4BlockOutputStream extends FilterOutputStream {
System.arraycopy(MAGIC, 0, compressedBuffer, 0, MAGIC_LENGTH); System.arraycopy(MAGIC, 0, compressedBuffer, 0, MAGIC_LENGTH);
} }
/**
* Creates a new instance which checks stream integrity and doesn't sync flush.
*
* @param out the {@link OutputStream} to feed
* @param blockSize the maximum number of bytes to try to compress at once,
* must be &gt;= 64 and &lt;= 32 M
* @param compressor the {@link LZ4Compressor} instance to use to compress
* data
*
* @see #LZ4BlockOutputStream(OutputStream, int, LZ4Compressor, Checksum, boolean)
*/
public LZ4BlockOutputStream(OutputStream out, int blockSize, LZ4Compressor compressor) { public LZ4BlockOutputStream(OutputStream out, int blockSize, LZ4Compressor compressor) {
this(out, blockSize, compressor, null, false); this(out, blockSize, compressor, null, false);
} }
/** /**
* Create a new instance which compresses with the standard LZ4 compression * Creates a new instance which compresses with the standard LZ4 compression
* algorithm. * algorithm.
* *
* @param out the {@link OutputStream} to feed
* @param blockSize the maximum number of bytes to try to compress at once,
* must be &gt;= 64 and &lt;= 32 M
*
* @see #LZ4BlockOutputStream(OutputStream, int, LZ4Compressor) * @see #LZ4BlockOutputStream(OutputStream, int, LZ4Compressor)
* @see LZ4Factory#fastCompressor() * @see LZ4Factory#fastCompressor()
*/ */
@ -117,7 +134,9 @@ public final class LZ4BlockOutputStream extends FilterOutputStream {
} }
/** /**
* Create a new instance which compresses into blocks of 64 KB. * Creates a new instance which compresses into blocks of 64 KB.
*
* @param out the {@link OutputStream} to feed
* *
* @see #LZ4BlockOutputStream(OutputStream, int) * @see #LZ4BlockOutputStream(OutputStream, int)
*/ */
@ -206,8 +225,8 @@ public final class LZ4BlockOutputStream extends FilterOutputStream {
} }
/** /**
* Flush this compressed {@link OutputStream}. * Flushes this compressed {@link OutputStream}.
* <p> *
* If the stream has been created with <code>syncFlush=true</code>, pending * If the stream has been created with <code>syncFlush=true</code>, pending
* data will be compressed and appended to the underlying {@link OutputStream} * data will be compressed and appended to the underlying {@link OutputStream}
* before calling {@link OutputStream#flush()} on the underlying stream. * before calling {@link OutputStream#flush()} on the underlying stream.
@ -228,6 +247,8 @@ public final class LZ4BlockOutputStream extends FilterOutputStream {
/** /**
* Same as {@link #close()} except that it doesn't close the underlying stream. * Same as {@link #close()} except that it doesn't close the underlying stream.
* This can be useful if you want to keep on using the underlying stream. * This can be useful if you want to keep on using the underlying stream.
*
* @throws IOException if an I/O error occurs.
*/ */
public void finish() throws IOException { public void finish() throws IOException {
ensureNotFinished(); ensureNotFinished();