Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -339,9 +339,10 @@ public void setupConfigs() {
Settings.settings().QUEUE.TARGET_SIZE,
Settings.settings().QUEUE.PARALLEL_THREADS
);
if (Settings.settings().QUEUE.TARGET_SIZE < 2 * Settings.settings().QUEUE.PARALLEL_THREADS) {
if (Settings.settings().QUEUE.TARGET_SIZE < 4 * Settings.settings().QUEUE.PARALLEL_THREADS) {
LOGGER.error(
"queue.target_size is {}, and queue.parallel_threads is {}. It is HIGHLY recommended that queue" + ".target_size be at least twice queue.parallel_threads or higher.",
"queue.target_size is {}, and queue.parallel_threads is {}. It is HIGHLY recommended that queue" +
".target_size be at least four times queue.parallel_threads or greater.",
Settings.settings().QUEUE.TARGET_SIZE,
Settings.settings().QUEUE.PARALLEL_THREADS
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -520,10 +520,10 @@ public static class QUEUE {
" - A smaller value will reduce memory usage",
" - A value too small may break some operations (deform?)",
" - Values smaller than the configurated parallel-threads are not accepted",
" - It is recommended this option be at least 2x greater than parallel-threads"
" - It is recommended this option be at least 4x greater than parallel-threads"

})
public int TARGET_SIZE = 64;
public int TARGET_SIZE = 8 * Runtime.getRuntime().availableProcessors();
@Comment({
"Force FAWE to start placing chunks regardless of whether an edit is finished processing",
" - A larger value will use slightly less CPU time",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,20 @@

public class BlockVector3ChunkMap<T> implements IAdaptedMap<BlockVector3, T, Integer, T> {

private final Int2ObjectArrayMap<T> map = new Int2ObjectArrayMap<>();
private final Int2ObjectArrayMap<T> map;

public BlockVector3ChunkMap() {
map = new Int2ObjectArrayMap<>();
}

/**
* Create a new instance that is a copy of an existing map
*
* @param map existing map to copy
*/
public BlockVector3ChunkMap(BlockVector3ChunkMap<T> map) {
this.map = new Int2ObjectArrayMap<>(map.getParent());
}

@Override
public Map<Integer, T> getParent() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import com.sk89q.worldedit.world.biome.BiomeType;
import com.sk89q.worldedit.world.block.BlockStateHolder;

import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import java.util.EnumMap;
import java.util.Map;
Expand Down Expand Up @@ -115,4 +116,15 @@ default Operation commit() {
*/
boolean hasBiomes(int layer);

/**
* Create an entirely distinct copy of this SET instance. All mutable data must be copied to sufficiently prevent leakage
* between the copy and the original.
*
* @return distinct new {@link IChunkSet instance}
*/
@Nonnull
default IChunkSet createCopy() {
return this;
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,6 @@ public abstract class QueueHandler implements Trimable, Runnable {
*/
private long last;
private long allocate = 50;
private double targetTPS = 18;

public QueueHandler() {
TaskManager.taskManager().repeat(this, 1);
Expand Down Expand Up @@ -87,7 +86,7 @@ public boolean isUnderutilized() {

private long getAllocate() {
long now = System.currentTimeMillis();
targetTPS = 18 - Math.max(Settings.settings().QUEUE.EXTRA_TIME_MS * 0.05, 0);
double targetTPS = 18 - Math.max(Settings.settings().QUEUE.EXTRA_TIME_MS * 0.05, 0);
long diff = 50 + this.last - (this.last = now);
long absDiff = Math.abs(diff);
if (diff == 0) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -275,8 +275,8 @@ public synchronized boolean trim(boolean aggressive) {
* Get a new IChunk from either the pool, or create a new one<br> + Initialize it at the
* coordinates
*
* @param chunkX
* @param chunkZ
* @param chunkX X chunk coordinate
* @param chunkZ Z chunk coordinate
* @return IChunk
*/
private ChunkHolder poolOrCreate(int chunkX, int chunkZ) {
Expand Down Expand Up @@ -309,19 +309,11 @@ public final IQueueChunk getOrCreateChunk(int x, int z) {
// If queueing is enabled AND either of the following
// - memory is low & queue size > num threads + 8
// - queue size > target size and primary queue has less than num threads submissions
if (enabledQueue && ((lowMem && size > Settings.settings().QUEUE.PARALLEL_THREADS + 8) || (size > Settings.settings().QUEUE.TARGET_SIZE && Fawe
.instance()
.getQueueHandler()
.isUnderutilized()))) {
int targetSize = lowMem ? Settings.settings().QUEUE.PARALLEL_THREADS + 8 : Settings.settings().QUEUE.TARGET_SIZE;
if (enabledQueue && size > targetSize && (lowMem || Fawe.instance().getQueueHandler().isUnderutilized())) {
chunk = chunks.removeFirst();
final Future future = submitUnchecked(chunk);
if (future != null && !future.isDone()) {
final int targetSize;
if (lowMem) {
targetSize = Settings.settings().QUEUE.PARALLEL_THREADS + 8;
} else {
targetSize = Settings.settings().QUEUE.TARGET_SIZE;
}
pollSubmissions(targetSize, lowMem);
submissions.add(future);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.stream.IntStream;

public class CharSetBlocks extends CharBlocks implements IChunkSet {

Expand Down Expand Up @@ -306,8 +305,12 @@ public boolean isEmpty() {
|| (heightMaps != null && !heightMaps.isEmpty())) {
return false;
}
//noinspection SimplifyStreamApiCallChains - this is faster than using #noneMatch
return !IntStream.range(minSectionPosition, maxSectionPosition + 1).anyMatch(this::hasSection);
for (int i = minSectionPosition; i <= maxSectionPosition; i++) {
if (hasSection(i)) {
return false;
}
}
return true;
}

@Override
Expand All @@ -316,6 +319,9 @@ public IChunkSet reset() {
tiles = null;
entities = null;
entityRemoves = null;
light = null;
skyLight = null;
heightMaps = null;
super.reset();
return null;
}
Expand All @@ -329,6 +335,62 @@ public boolean hasBiomes(int layer) {
return biomes != null && biomes[layer] != null;
}

@Override
public ThreadUnsafeCharBlocks createCopy() {
char[][] blocksCopy = new char[sectionCount][];
for (int i = 0; i < sectionCount; i++) {
if (blocks[i] != null) {
blocksCopy[i] = new char[FaweCache.INSTANCE.BLOCKS_PER_LAYER];
System.arraycopy(blocks[i], 0, blocksCopy[i], 0, FaweCache.INSTANCE.BLOCKS_PER_LAYER);
}
}
BiomeType[][] biomesCopy;
if (biomes == null) {
biomesCopy = null;
} else {
biomesCopy = new BiomeType[sectionCount][];
for (int i = 0; i < sectionCount; i++) {
if (biomes[i] != null) {
biomesCopy[i] = new BiomeType[biomes[i].length];
System.arraycopy(biomes[i], 0, biomesCopy[i], 0, biomes[i].length);
}
}
}
char[][] lightCopy = createLightCopy(light, sectionCount);
char[][] skyLightCopy = createLightCopy(skyLight, sectionCount);
return new ThreadUnsafeCharBlocks(
blocksCopy,
minSectionPosition,
maxSectionPosition,
biomesCopy,
sectionCount,
lightCopy,
skyLightCopy,
tiles != null ? new BlockVector3ChunkMap<>(tiles) : null,
entities != null ? new HashSet<>(entities) : null,
entityRemoves != null ? new HashSet<>(entityRemoves) : null,
heightMaps != null ? new EnumMap<>(heightMaps) : null,
defaultOrdinal(),
fastMode,
bitMask
);
}

static char[][] createLightCopy(char[][] lightArr, int sectionCount) {
if (lightArr == null) {
return null;
} else {
char[][] lightCopy = new char[sectionCount][];
for (int i = 0; i < sectionCount; i++) {
if (lightArr[i] != null) {
lightCopy[i] = new char[lightArr[i].length];
System.arraycopy(lightArr[i], 0, lightCopy[i], 0, lightArr[i].length);
}
}
return lightCopy;
}
}

@Override
public char[] load(final int layer) {
updateSectionIndexRange(layer);
Expand Down
Loading