fix: Improve edit processing (#2247)

This commit is contained in:
Jordan 2023-06-01 15:11:16 +01:00 committed by GitHub
parent 7ce17e5834
commit 228e84e555
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
9 changed files with 638 additions and 23 deletions

View File

@ -339,9 +339,10 @@ public class Fawe {
Settings.settings().QUEUE.TARGET_SIZE, Settings.settings().QUEUE.TARGET_SIZE,
Settings.settings().QUEUE.PARALLEL_THREADS Settings.settings().QUEUE.PARALLEL_THREADS
); );
if (Settings.settings().QUEUE.TARGET_SIZE < 2 * Settings.settings().QUEUE.PARALLEL_THREADS) { if (Settings.settings().QUEUE.TARGET_SIZE < 4 * Settings.settings().QUEUE.PARALLEL_THREADS) {
LOGGER.error( LOGGER.error(
"queue.target_size is {}, and queue.parallel_threads is {}. It is HIGHLY recommended that queue" + ".target_size be at least twice queue.parallel_threads or higher.", "queue.target_size is {}, and queue.parallel_threads is {}. It is HIGHLY recommended that queue" +
".target_size be at least four times queue.parallel_threads or greater.",
Settings.settings().QUEUE.TARGET_SIZE, Settings.settings().QUEUE.TARGET_SIZE,
Settings.settings().QUEUE.PARALLEL_THREADS Settings.settings().QUEUE.PARALLEL_THREADS
); );

View File

@ -520,10 +520,10 @@ public class Settings extends Config {
" - A smaller value will reduce memory usage", " - A smaller value will reduce memory usage",
" - A value too small may break some operations (deform?)", " - A value too small may break some operations (deform?)",
" - Values smaller than the configurated parallel-threads are not accepted", " - Values smaller than the configurated parallel-threads are not accepted",
" - It is recommended this option be at least 2x greater than parallel-threads" " - It is recommended this option be at least 4x greater than parallel-threads"
}) })
public int TARGET_SIZE = 64; public int TARGET_SIZE = 8 * Runtime.getRuntime().availableProcessors();
@Comment({ @Comment({
"Force FAWE to start placing chunks regardless of whether an edit is finished processing", "Force FAWE to start placing chunks regardless of whether an edit is finished processing",
" - A larger value will use slightly less CPU time", " - A larger value will use slightly less CPU time",

View File

@ -9,7 +9,20 @@ import java.util.Map;
public class BlockVector3ChunkMap<T> implements IAdaptedMap<BlockVector3, T, Integer, T> { public class BlockVector3ChunkMap<T> implements IAdaptedMap<BlockVector3, T, Integer, T> {
private final Int2ObjectArrayMap<T> map = new Int2ObjectArrayMap<>(); private final Int2ObjectArrayMap<T> map;
public BlockVector3ChunkMap() {
map = new Int2ObjectArrayMap<>();
}
/**
* Create a new instance that is a copy of an existing map
*
* @param map existing map to copy
*/
public BlockVector3ChunkMap(BlockVector3ChunkMap<T> map) {
this.map = new Int2ObjectArrayMap<>(map.getParent());
}
@Override @Override
public Map<Integer, T> getParent() { public Map<Integer, T> getParent() {

View File

@ -8,6 +8,7 @@ import com.sk89q.worldedit.math.BlockVector3;
import com.sk89q.worldedit.world.biome.BiomeType; import com.sk89q.worldedit.world.biome.BiomeType;
import com.sk89q.worldedit.world.block.BlockStateHolder; import com.sk89q.worldedit.world.block.BlockStateHolder;
import javax.annotation.Nonnull;
import javax.annotation.Nullable; import javax.annotation.Nullable;
import java.util.EnumMap; import java.util.EnumMap;
import java.util.Map; import java.util.Map;
@ -115,4 +116,15 @@ public interface IChunkSet extends IBlocks, OutputExtent {
*/ */
boolean hasBiomes(int layer); boolean hasBiomes(int layer);
/**
* Create an entirely distinct copy of this SET instance. All mutable data must be copied to sufficiently prevent leakage
* between the copy and the original.
*
* @return distinct new {@link IChunkSet instance}
*/
@Nonnull
default IChunkSet createCopy() {
return this;
}
} }

View File

@ -53,7 +53,6 @@ public abstract class QueueHandler implements Trimable, Runnable {
*/ */
private long last; private long last;
private long allocate = 50; private long allocate = 50;
private double targetTPS = 18;
public QueueHandler() { public QueueHandler() {
TaskManager.taskManager().repeat(this, 1); TaskManager.taskManager().repeat(this, 1);
@ -87,7 +86,7 @@ public abstract class QueueHandler implements Trimable, Runnable {
private long getAllocate() { private long getAllocate() {
long now = System.currentTimeMillis(); long now = System.currentTimeMillis();
targetTPS = 18 - Math.max(Settings.settings().QUEUE.EXTRA_TIME_MS * 0.05, 0); double targetTPS = 18 - Math.max(Settings.settings().QUEUE.EXTRA_TIME_MS * 0.05, 0);
long diff = 50 + this.last - (this.last = now); long diff = 50 + this.last - (this.last = now);
long absDiff = Math.abs(diff); long absDiff = Math.abs(diff);
if (diff == 0) { if (diff == 0) {

View File

@ -275,8 +275,8 @@ public class SingleThreadQueueExtent extends ExtentBatchProcessorHolder implemen
* Get a new IChunk from either the pool, or create a new one<br> + Initialize it at the * Get a new IChunk from either the pool, or create a new one<br> + Initialize it at the
* coordinates * coordinates
* *
* @param chunkX * @param chunkX X chunk coordinate
* @param chunkZ * @param chunkZ Z chunk coordinate
* @return IChunk * @return IChunk
*/ */
private ChunkHolder poolOrCreate(int chunkX, int chunkZ) { private ChunkHolder poolOrCreate(int chunkX, int chunkZ) {
@ -309,19 +309,11 @@ public class SingleThreadQueueExtent extends ExtentBatchProcessorHolder implemen
// If queueing is enabled AND either of the following // If queueing is enabled AND either of the following
// - memory is low & queue size > num threads + 8 // - memory is low & queue size > num threads + 8
// - queue size > target size and primary queue has less than num threads submissions // - queue size > target size and primary queue has less than num threads submissions
if (enabledQueue && ((lowMem && size > Settings.settings().QUEUE.PARALLEL_THREADS + 8) || (size > Settings.settings().QUEUE.TARGET_SIZE && Fawe int targetSize = lowMem ? Settings.settings().QUEUE.PARALLEL_THREADS + 8 : Settings.settings().QUEUE.TARGET_SIZE;
.instance() if (enabledQueue && size > targetSize && (lowMem || Fawe.instance().getQueueHandler().isUnderutilized())) {
.getQueueHandler()
.isUnderutilized()))) {
chunk = chunks.removeFirst(); chunk = chunks.removeFirst();
final Future future = submitUnchecked(chunk); final Future future = submitUnchecked(chunk);
if (future != null && !future.isDone()) { if (future != null && !future.isDone()) {
final int targetSize;
if (lowMem) {
targetSize = Settings.settings().QUEUE.PARALLEL_THREADS + 8;
} else {
targetSize = Settings.settings().QUEUE.TARGET_SIZE;
}
pollSubmissions(targetSize, lowMem); pollSubmissions(targetSize, lowMem);
submissions.add(future); submissions.add(future);
} }

View File

@ -20,7 +20,6 @@ import java.util.HashSet;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import java.util.UUID; import java.util.UUID;
import java.util.stream.IntStream;
public class CharSetBlocks extends CharBlocks implements IChunkSet { public class CharSetBlocks extends CharBlocks implements IChunkSet {
@ -306,8 +305,12 @@ public class CharSetBlocks extends CharBlocks implements IChunkSet {
|| (heightMaps != null && !heightMaps.isEmpty())) { || (heightMaps != null && !heightMaps.isEmpty())) {
return false; return false;
} }
//noinspection SimplifyStreamApiCallChains - this is faster than using #noneMatch for (int i = minSectionPosition; i <= maxSectionPosition; i++) {
return !IntStream.range(minSectionPosition, maxSectionPosition + 1).anyMatch(this::hasSection); if (hasSection(i)) {
return false;
}
}
return true;
} }
@Override @Override
@ -316,6 +319,9 @@ public class CharSetBlocks extends CharBlocks implements IChunkSet {
tiles = null; tiles = null;
entities = null; entities = null;
entityRemoves = null; entityRemoves = null;
light = null;
skyLight = null;
heightMaps = null;
super.reset(); super.reset();
return null; return null;
} }
@ -329,6 +335,62 @@ public class CharSetBlocks extends CharBlocks implements IChunkSet {
return biomes != null && biomes[layer] != null; return biomes != null && biomes[layer] != null;
} }
@Override
public ThreadUnsafeCharBlocks createCopy() {
char[][] blocksCopy = new char[sectionCount][];
for (int i = 0; i < sectionCount; i++) {
if (blocks[i] != null) {
blocksCopy[i] = new char[FaweCache.INSTANCE.BLOCKS_PER_LAYER];
System.arraycopy(blocks[i], 0, blocksCopy[i], 0, FaweCache.INSTANCE.BLOCKS_PER_LAYER);
}
}
BiomeType[][] biomesCopy;
if (biomes == null) {
biomesCopy = null;
} else {
biomesCopy = new BiomeType[sectionCount][];
for (int i = 0; i < sectionCount; i++) {
if (biomes[i] != null) {
biomesCopy[i] = new BiomeType[biomes[i].length];
System.arraycopy(biomes[i], 0, biomesCopy[i], 0, biomes[i].length);
}
}
}
char[][] lightCopy = createLightCopy(light, sectionCount);
char[][] skyLightCopy = createLightCopy(skyLight, sectionCount);
return new ThreadUnsafeCharBlocks(
blocksCopy,
minSectionPosition,
maxSectionPosition,
biomesCopy,
sectionCount,
lightCopy,
skyLightCopy,
tiles != null ? new BlockVector3ChunkMap<>(tiles) : null,
entities != null ? new HashSet<>(entities) : null,
entityRemoves != null ? new HashSet<>(entityRemoves) : null,
heightMaps != null ? new EnumMap<>(heightMaps) : null,
defaultOrdinal(),
fastMode,
bitMask
);
}
static char[][] createLightCopy(char[][] lightArr, int sectionCount) {
if (lightArr == null) {
return null;
} else {
char[][] lightCopy = new char[sectionCount][];
for (int i = 0; i < sectionCount; i++) {
if (lightArr[i] != null) {
lightCopy[i] = new char[lightArr[i].length];
System.arraycopy(lightArr[i], 0, lightCopy[i], 0, lightArr[i].length);
}
}
return lightCopy;
}
}
@Override @Override
public char[] load(final int layer) { public char[] load(final int layer) {
updateSectionIndexRange(layer); updateSectionIndexRange(layer);

View File

@ -0,0 +1,536 @@
package com.fastasyncworldedit.core.queue.implementation.blocks;
import com.fastasyncworldedit.core.Fawe;
import com.fastasyncworldedit.core.FaweCache;
import com.fastasyncworldedit.core.extent.processor.heightmap.HeightMapType;
import com.fastasyncworldedit.core.math.BlockVector3ChunkMap;
import com.fastasyncworldedit.core.queue.IBlocks;
import com.fastasyncworldedit.core.queue.IChunkSet;
import com.sk89q.jnbt.CompoundTag;
import com.sk89q.worldedit.internal.util.LogManagerCompat;
import com.sk89q.worldedit.math.BlockVector3;
import com.sk89q.worldedit.world.biome.BiomeType;
import com.sk89q.worldedit.world.block.BlockState;
import com.sk89q.worldedit.world.block.BlockStateHolder;
import com.sk89q.worldedit.world.block.BlockTypesCache;
import org.apache.logging.log4j.Logger;
import org.jetbrains.annotations.Nullable;
import java.util.Arrays;
import java.util.Collections;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
/**
* Equivalent to {@link CharSetBlocks} without any attempt to make thread-safe for improved performance.
* This is currently only used as a "copy" of {@link CharSetBlocks} to provide to
* {@link com.fastasyncworldedit.core.queue.IBatchProcessor} instances for processing without overlapping the continuing edit.
*
* @since TODO
*/
public class ThreadUnsafeCharBlocks implements IChunkSet, IBlocks {
private static final Logger LOGGER = LogManagerCompat.getLogger();
private final char defaultOrdinal;
private char[][] blocks;
private int minSectionPosition;
private int maxSectionPosition;
private int sectionCount;
private BiomeType[][] biomes;
private char[][] light;
private char[][] skyLight;
private BlockVector3ChunkMap<CompoundTag> tiles;
private HashSet<CompoundTag> entities;
private HashSet<UUID> entityRemoves;
private Map<HeightMapType, int[]> heightMaps;
private boolean fastMode;
private int bitMask;
/**
* New instance given the data stored in a {@link CharSetBlocks} instance.
*
* @since TODO
*/
ThreadUnsafeCharBlocks(
char[][] blocks,
int minSectionPosition,
int maxSectionPosition,
BiomeType[][] biomes,
int sectionCount,
char[][] light,
char[][] skyLight,
BlockVector3ChunkMap<CompoundTag> tiles,
HashSet<CompoundTag> entities,
HashSet<UUID> entityRemoves,
Map<HeightMapType, int[]> heightMaps,
char defaultOrdinal,
boolean fastMode,
int bitMask
) {
this.blocks = blocks;
this.minSectionPosition = minSectionPosition;
this.maxSectionPosition = maxSectionPosition;
this.biomes = biomes;
this.sectionCount = sectionCount;
this.light = light;
this.skyLight = skyLight;
this.tiles = tiles;
this.entities = entities;
this.entityRemoves = entityRemoves;
this.heightMaps = heightMaps;
this.defaultOrdinal = defaultOrdinal;
this.fastMode = fastMode;
this.bitMask = bitMask;
}
@Override
public boolean hasSection(int layer) {
layer -= minSectionPosition;
return layer >= 0 && layer < blocks.length && blocks[layer] != null && blocks[layer].length == FaweCache.INSTANCE.BLOCKS_PER_LAYER;
}
@Override
public char[] load(int layer) {
updateSectionIndexRange(layer);
layer -= minSectionPosition;
return blocks[layer];
}
@Nullable
@Override
public char[] loadIfPresent(int layer) {
layer -= minSectionPosition;
return blocks[layer];
}
@Override
public Map<BlockVector3, CompoundTag> getTiles() {
return tiles == null ? Collections.emptyMap() : tiles;
}
@Override
public CompoundTag getTile(int x, int y, int z) {
return tiles.get(x, y, z);
}
@Override
public Set<CompoundTag> getEntities() {
return entities == null ? Collections.emptySet() : entities;
}
@Override
public Map<HeightMapType, int[]> getHeightMaps() {
return heightMaps == null ? new HashMap<>() : heightMaps;
}
@Override
public void removeSectionLighting(int layer, boolean sky) {
updateSectionIndexRange(layer);
layer -= minSectionPosition;
if (light == null) {
light = new char[sectionCount][];
}
if (light[layer] == null) {
light[layer] = new char[4096];
}
Arrays.fill(light[layer], (char) 0);
if (sky) {
if (skyLight == null) {
skyLight = new char[sectionCount][];
}
if (skyLight[layer] == null) {
skyLight[layer] = new char[4096];
}
Arrays.fill(skyLight[layer], (char) 0);
}
}
@Override
public boolean trim(boolean aggressive, int layer) {
return false;
}
@Override
public int getSectionCount() {
return sectionCount;
}
@Override
public int getMaxSectionPosition() {
return maxSectionPosition;
}
@Override
public int getMinSectionPosition() {
return minSectionPosition;
}
public char get(int x, int y, int z) {
int layer = (y >> 4);
if (!hasSection(layer)) {
return defaultOrdinal;
}
final int index = (y & 15) << 8 | z << 4 | x;
return blocks[layer - minSectionPosition][index];
}
@Override
public BiomeType getBiomeType(int x, int y, int z) {
int layer;
if (biomes == null || (y >> 4) < minSectionPosition || (y >> 4) > maxSectionPosition) {
return null;
} else if (biomes[(layer = (y >> 4) - minSectionPosition)] == null) {
return null;
}
return biomes[layer][(y & 15) >> 2 | (z >> 2) << 2 | x >> 2];
}
@Override
public BlockState getBlock(int x, int y, int z) {
return BlockTypesCache.states[get(x, y, z)];
}
@Override
public boolean setBiome(int x, int y, int z, BiomeType biome) {
updateSectionIndexRange(y >> 4);
int layer = (y >> 4) - minSectionPosition;
if (biomes == null) {
biomes = new BiomeType[sectionCount][];
biomes[layer] = new BiomeType[64];
} else if (biomes[layer] == null) {
biomes[layer] = new BiomeType[64];
}
biomes[layer][(y & 12) << 2 | (z & 12) | (x & 12) >> 2] = biome;
return true;
}
@Override
public boolean setBiome(BlockVector3 position, BiomeType biome) {
return setBiome(position.getX(), position.getY(), position.getZ(), biome);
}
public void set(int x, int y, int z, char value) {
final int layer = y >> 4;
final int index = (y & 15) << 8 | z << 4 | x;
try {
blocks[layer][index] = value;
} catch (ArrayIndexOutOfBoundsException exception) {
LOGGER.error("Tried setting block at coordinates (" + x + "," + y + "," + z + ")");
assert Fawe.platform() != null;
LOGGER.error("Layer variable was = {}", layer, exception);
}
}
@Override
public <T extends BlockStateHolder<T>> boolean setBlock(int x, int y, int z, T holder) {
updateSectionIndexRange(y >> 4);
set(x, y, z, holder.getOrdinalChar());
holder.applyTileEntity(this, x, y, z);
return true;
}
@Override
public void setBlocks(int layer, char[] data) {
updateSectionIndexRange(layer);
layer -= minSectionPosition;
this.blocks[layer] = data;
}
@Override
public boolean isEmpty() {
if (biomes != null
|| light != null
|| skyLight != null
|| (entities != null && !entities.isEmpty())
|| (tiles != null && !tiles.isEmpty())
|| (entityRemoves != null && !entityRemoves.isEmpty())
|| (heightMaps != null && !heightMaps.isEmpty())) {
return false;
}
for (int i = minSectionPosition; i <= maxSectionPosition; i++) {
if (hasSection(i)) {
return false;
}
}
return true;
}
@Override
public boolean setTile(int x, int y, int z, CompoundTag tile) {
updateSectionIndexRange(y >> 4);
if (tiles == null) {
tiles = new BlockVector3ChunkMap<>();
}
tiles.put(x, y, z, tile);
return true;
}
@Override
public void setBlockLight(int x, int y, int z, int value) {
updateSectionIndexRange(y >> 4);
if (light == null) {
light = new char[sectionCount][];
}
final int layer = (y >> 4) - minSectionPosition;
if (light[layer] == null) {
char[] c = new char[4096];
Arrays.fill(c, (char) 16);
light[layer] = c;
}
final int index = (y & 15) << 8 | (z & 15) << 4 | (x & 15);
light[layer][index] = (char) value;
}
@Override
public void setSkyLight(int x, int y, int z, int value) {
updateSectionIndexRange(y >> 4);
if (skyLight == null) {
skyLight = new char[sectionCount][];
}
final int layer = (y >> 4) - minSectionPosition;
if (skyLight[layer] == null) {
char[] c = new char[4096];
Arrays.fill(c, (char) 16);
skyLight[layer] = c;
}
final int index = (y & 15) << 8 | (z & 15) << 4 | (x & 15);
skyLight[layer][index] = (char) value;
}
@Override
public void setHeightMap(HeightMapType type, int[] heightMap) {
if (heightMaps == null) {
heightMaps = new EnumMap<>(HeightMapType.class);
}
heightMaps.put(type, heightMap);
}
@Override
public void setLightLayer(int layer, char[] toSet) {
updateSectionIndexRange(layer);
if (light == null) {
light = new char[sectionCount][];
}
layer -= minSectionPosition;
light[layer] = toSet;
}
@Override
public void setSkyLightLayer(int layer, char[] toSet) {
updateSectionIndexRange(layer);
if (skyLight == null) {
skyLight = new char[sectionCount][];
}
layer -= minSectionPosition;
skyLight[layer] = toSet;
}
@Override
public void setFullBright(int layer) {
updateSectionIndexRange(layer);
layer -= minSectionPosition;
if (light == null) {
light = new char[sectionCount][];
}
if (light[layer] == null) {
light[layer] = new char[4096];
}
if (skyLight == null) {
skyLight = new char[sectionCount][];
}
if (skyLight[layer] == null) {
skyLight[layer] = new char[4096];
}
Arrays.fill(light[layer], (char) 15);
Arrays.fill(skyLight[layer], (char) 15);
}
@Override
public void setEntity(CompoundTag tag) {
if (entities == null) {
entities = new HashSet<>();
}
entities.add(tag);
}
@Override
public void removeEntity(UUID uuid) {
if (entityRemoves == null) {
entityRemoves = new HashSet<>();
}
entityRemoves.add(uuid);
}
@Override
public void setFastMode(boolean fastMode) {
this.fastMode = fastMode;
}
@Override
public boolean isFastMode() {
return fastMode;
}
@Override
public void setBitMask(int bitMask) {
this.bitMask = bitMask;
}
@Override
public int getBitMask() {
return bitMask;
}
@Override
public Set<UUID> getEntityRemoves() {
return entityRemoves == null ? Collections.emptySet() : entityRemoves;
}
@Override
public BiomeType[][] getBiomes() {
return biomes;
}
@Override
public boolean hasBiomes() {
return IChunkSet.super.hasBiomes();
}
@Override
public char[][] getLight() {
return light;
}
@Override
public char[][] getSkyLight() {
return skyLight;
}
@Override
public boolean hasLight() {
return IChunkSet.super.hasLight();
}
@Override
public IChunkSet reset() {
blocks = new char[sectionCount][];
biomes = new BiomeType[sectionCount][];
light = new char[sectionCount][];
skyLight = new char[sectionCount][];
tiles.clear();
entities.clear();
entityRemoves.clear();
heightMaps.clear();
return this;
}
@Override
public boolean hasBiomes(int layer) {
layer -= minSectionPosition;
return layer >= 0 && layer < biomes.length && biomes[layer] != null && biomes[layer].length > 0;
}
@Override
public IChunkSet createCopy() {
char[][] blocksCopy = new char[sectionCount][];
for (int i = 0; i < sectionCount; i++) {
if (blocks[i] != null) {
blocksCopy[i] = new char[FaweCache.INSTANCE.BLOCKS_PER_LAYER];
System.arraycopy(blocks[i], 0, blocksCopy[i], 0, FaweCache.INSTANCE.BLOCKS_PER_LAYER);
}
}
BiomeType[][] biomesCopy;
if (biomes == null) {
biomesCopy = null;
} else {
biomesCopy = new BiomeType[sectionCount][];
for (int i = 0; i < sectionCount; i++) {
if (biomes[i] != null) {
biomesCopy[i] = new BiomeType[biomes[i].length];
System.arraycopy(biomes[i], 0, biomesCopy[i], 0, biomes[i].length);
}
}
}
char[][] lightCopy = CharSetBlocks.createLightCopy(light, sectionCount);
char[][] skyLightCopy = CharSetBlocks.createLightCopy(skyLight, sectionCount);
return new ThreadUnsafeCharBlocks(
blocksCopy,
minSectionPosition,
maxSectionPosition,
biomesCopy,
sectionCount,
lightCopy,
skyLightCopy,
tiles != null ? new BlockVector3ChunkMap<>(tiles) : null,
entities != null ? new HashSet<>(entities) : null,
entityRemoves != null ? new HashSet<>(entityRemoves) : null,
heightMaps != null ? new HashMap<>(heightMaps) : null,
defaultOrdinal,
fastMode,
bitMask
);
}
@Override
public boolean trim(boolean aggressive) {
return false;
}
// Checks and updates the various section arrays against the new layer index
private void updateSectionIndexRange(int layer) {
if (layer >= minSectionPosition && layer <= maxSectionPosition) {
return;
}
if (layer < minSectionPosition) {
int diff = minSectionPosition - layer;
sectionCount += diff;
char[][] tmpBlocks = new char[sectionCount][];
System.arraycopy(blocks, 0, tmpBlocks, diff, blocks.length);
blocks = tmpBlocks;
minSectionPosition = layer;
if (biomes != null) {
BiomeType[][] tmpBiomes = new BiomeType[sectionCount][64];
System.arraycopy(biomes, 0, tmpBiomes, diff, biomes.length);
biomes = tmpBiomes;
}
if (light != null) {
char[][] tmplight = new char[sectionCount][];
System.arraycopy(light, 0, tmplight, diff, light.length);
light = tmplight;
}
if (skyLight != null) {
char[][] tmplight = new char[sectionCount][];
System.arraycopy(skyLight, 0, tmplight, diff, skyLight.length);
skyLight = tmplight;
}
} else {
int diff = layer - maxSectionPosition;
sectionCount += diff;
char[][] tmpBlocks = new char[sectionCount][];
System.arraycopy(blocks, 0, tmpBlocks, 0, blocks.length);
blocks = tmpBlocks;
maxSectionPosition = layer;
if (biomes != null) {
BiomeType[][] tmpBiomes = new BiomeType[sectionCount][64];
System.arraycopy(biomes, 0, tmpBiomes, 0, biomes.length);
biomes = tmpBiomes;
}
if (light != null) {
char[][] tmplight = new char[sectionCount][];
System.arraycopy(light, 0, tmplight, 0, light.length);
light = tmplight;
}
if (skyLight != null) {
char[][] tmplight = new char[sectionCount][];
System.arraycopy(skyLight, 0, tmplight, 0, skyLight.length);
skyLight = tmplight;
}
}
}
}

View File

@ -1044,7 +1044,7 @@ public class ChunkHolder<T extends Future<T>> implements IQueueChunk<T> {
if (chunkSet != null && !chunkSet.isEmpty()) { if (chunkSet != null && !chunkSet.isEmpty()) {
chunkSet.setBitMask(bitMask); chunkSet.setBitMask(bitMask);
try { try {
return this.call(chunkSet, () -> { return this.call(chunkSet.createCopy(), () -> {
this.delegate = NULL; this.delegate = NULL;
chunkSet = null; chunkSet = null;
calledLock.unlock(stamp); calledLock.unlock(stamp);