Skip to content

Commit

Permalink
Sync chunk system changes from Paper
Browse files Browse the repository at this point in the history
Additionally, slightly increase parallelism of ticket level
propagation
  • Loading branch information
Spottedleaf committed Jun 5, 2024
1 parent b6a8fed commit 23f201e
Show file tree
Hide file tree
Showing 6 changed files with 105 additions and 148 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -107,4 +107,9 @@ public final boolean removeIf(final Predicate<? super T> filter) {

return ret;
}

@Override
public Object[] moonrise$copyBackingArray() {
return this.contents.clone();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -824,34 +824,6 @@ public static Cancellable loadDataAsync(final ServerLevel world, final int chunk
return thread.loadDataAsyncInternal(world, chunkX, chunkZ, type, onComplete, intendingToBlock, priority);
}

private static Boolean doesRegionFileExist(final int chunkX, final int chunkZ, final boolean intendingToBlock,
final ChunkDataController taskController) {
final ChunkPos chunkPos = new ChunkPos(chunkX, chunkZ);
if (intendingToBlock) {
return taskController.computeForRegionFile(chunkX, chunkZ, true, (final RegionFile file) -> {
if (file == null) { // null if no regionfile exists
return Boolean.FALSE;
}

return file.hasChunk(chunkPos) ? Boolean.TRUE : Boolean.FALSE;
});
} else {
// first check if the region file for sure does not exist
if (taskController.doesRegionFileNotExist(chunkX, chunkZ)) {
return Boolean.FALSE;
} // else: it either exists or is not known, fall back to checking the loaded region file

return taskController.computeForRegionFileIfLoaded(chunkX, chunkZ, (final RegionFile file) -> {
if (file == null) { // null if not loaded
// not sure at this point, let the I/O thread figure it out
return Boolean.TRUE;
}

return file.hasChunk(chunkPos) ? Boolean.TRUE : Boolean.FALSE;
});
}
}

Cancellable loadDataAsyncInternal(final ServerLevel world, final int chunkX, final int chunkZ,
final RegionFileType type, final BiConsumer<CompoundTag, Throwable> onComplete,
final boolean intendingToBlock, final Priority priority) {
Expand All @@ -864,20 +836,6 @@ Cancellable loadDataAsyncInternal(final ServerLevel world, final int chunkX, fin
if (running == null) {
// not scheduled

if (callbackInfo.regionFileCalculation == null) {
// caller will compute this outside of compute(), to avoid holding the bin lock
callbackInfo.needsRegionFileTest = true;
return null;
}

if (callbackInfo.regionFileCalculation == Boolean.FALSE) {
// not on disk
callbackInfo.data = null;
callbackInfo.throwable = null;
callbackInfo.completeNow = true;
return null;
}

// set up task
final ChunkDataTask newTask = new ChunkDataTask(
world, chunkX, chunkZ, taskController, RegionFileIOThread.this, priority
Expand Down Expand Up @@ -908,17 +866,7 @@ Cancellable loadDataAsyncInternal(final ServerLevel world, final int chunkX, fin
return running;
};

ChunkDataTask curr = taskController.tasks.get(key);
if (curr == null) {
callbackInfo.regionFileCalculation = doesRegionFileExist(chunkX, chunkZ, intendingToBlock, taskController);
}
ChunkDataTask ret = taskController.tasks.compute(key, compute);
if (callbackInfo.needsRegionFileTest) {
// curr isn't null but when we went into compute() it was
callbackInfo.regionFileCalculation = doesRegionFileExist(chunkX, chunkZ, intendingToBlock, taskController);
// now it should be fine
ret = taskController.tasks.compute(key, compute);
}
final ChunkDataTask ret = taskController.tasks.compute(key, compute);

// needs to be scheduled
if (callbackInfo.tasksNeedsScheduling) {
Expand Down Expand Up @@ -975,8 +923,6 @@ private static final class ImmediateCallbackCompletion {
public Throwable throwable;
public boolean completeNow;
public boolean tasksNeedsScheduling;
public boolean needsRegionFileTest;
public Boolean regionFileCalculation;

}

Expand Down Expand Up @@ -1043,7 +989,7 @@ private static final class InProgressRead {

private CompoundTag value;
private Throwable throwable;
private MultiThreadedQueue<BiConsumer<CompoundTag, Throwable>> callbacks = new MultiThreadedQueue<>();
private final MultiThreadedQueue<BiConsumer<CompoundTag, Throwable>> callbacks = new MultiThreadedQueue<>();

public boolean hasNoWaiters() {
return this.callbacks.isEmpty();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Objects;
import java.util.PrimitiveIterator;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
Expand Down Expand Up @@ -1361,40 +1362,13 @@ public JsonObject getDebugJson() {
holders.add(holder.getDebugJson());
}

/* TODO
final JsonArray removeTickToChunkExpireTicketCount = new JsonArray();
ret.add("remove_tick_to_chunk_expire_ticket_count", removeTickToChunkExpireTicketCount);
for (final Long2ObjectMap.Entry<Long2IntOpenHashMap> tickEntry : this.removeTickToChunkExpireTicketCount.long2ObjectEntrySet()) {
final long tick = tickEntry.getLongKey();
final Long2IntOpenHashMap coordinateToCount = tickEntry.getValue();
final JsonObject tickJson = new JsonObject();
removeTickToChunkExpireTicketCount.add(tickJson);
tickJson.addProperty("tick", Long.valueOf(tick));
final JsonArray tickEntries = new JsonArray();
tickJson.add("entries", tickEntries);
for (final Long2IntMap.Entry entry : coordinateToCount.long2IntEntrySet()) {
final long coordinate = entry.getLongKey();
final int count = entry.getIntValue();
final JsonObject entryJson = new JsonObject();
tickEntries.add(entryJson);
entryJson.addProperty("chunkX", Long.valueOf(CoordinateUtils.getChunkX(coordinate)));
entryJson.addProperty("chunkZ", Long.valueOf(CoordinateUtils.getChunkZ(coordinate)));
entryJson.addProperty("count", Integer.valueOf(count));
}
}
final JsonArray allTicketsJson = new JsonArray();
ret.add("tickets", allTicketsJson);

for (final Long2ObjectMap.Entry<SortedArraySet<Ticket<?>>> coordinateTickets : this.tickets.long2ObjectEntrySet()) {
final long coordinate = coordinateTickets.getLongKey();
for (final Iterator<ConcurrentLong2ReferenceChainedHashTable.TableEntry<SortedArraySet<Ticket<?>>>> iterator = this.tickets.entryIterator();
iterator.hasNext();) {
final ConcurrentLong2ReferenceChainedHashTable.TableEntry<SortedArraySet<Ticket<?>>> coordinateTickets = iterator.next();
final long coordinate = coordinateTickets.getKey();
final SortedArraySet<Ticket<?>> tickets = coordinateTickets.getValue();

final JsonObject coordinateJson = new JsonObject();
Expand All @@ -1406,17 +1380,24 @@ public JsonObject getDebugJson() {
final JsonArray ticketsSerialized = new JsonArray();
coordinateJson.add("tickets", ticketsSerialized);

for (final Ticket<?> ticket : tickets) {
// note: by using a copy of the backing array, we can avoid explicit exceptions we may trip when iterating
// directly over the set using the iterator
// however, it also means we need to null-check the values, and there is a possibility that we _miss_ an
// entry OR iterate over an entry multiple times
for (final Object ticketUncasted : ((ChunkSystemSortedArraySet<Ticket<?>>)tickets).moonrise$copyBackingArray()) {
if (ticketUncasted == null) {
continue;
}
final Ticket<?> ticket = (Ticket<?>)ticketUncasted;
final JsonObject ticketSerialized = new JsonObject();
ticketsSerialized.add(ticketSerialized);

ticketSerialized.addProperty("type", ticket.getType().toString());
ticketSerialized.addProperty("level", Integer.valueOf(ticket.getTicketLevel()));
ticketSerialized.addProperty("identifier", Objects.toString(ticket.key));
ticketSerialized.addProperty("remove_tick", Long.valueOf(ticket.removalTick));
ticketSerialized.addProperty("remove_tick", Long.valueOf(((ChunkSystemTicket<?>)(Object)ticket).moonrise$getRemoveDelay()));
}
}
*/

return ret;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -212,21 +212,17 @@ public static int getTicketLevel(final ChunkStatus status) {
}

private static final int[] ACCESS_RADIUS_TABLE = new int[ChunkStatus.getStatusList().size()];
private static final int[] MAX_ACCESS_RADIUS_TABLE = new int[ACCESS_RADIUS_TABLE.length];
static {
Arrays.fill(ACCESS_RADIUS_TABLE, -1);
}

private static int getAccessRadius0(final ChunkStatus genStatus) {
if (genStatus == ChunkStatus.EMPTY) {
return 0;
}

final int radius = Math.max(((ChunkSystemChunkStatus)genStatus).moonrise$getLoadRadius(), genStatus.getRange());
final int radius = genStatus.getRange();
int maxRange = radius;

for (int dist = 1; dist <= radius; ++dist) {
final ChunkStatus requiredNeighbourStatus = ChunkStatus.getStatusAroundFullChunk(ChunkStatus.getDistance(genStatus) + dist);
for (int dist = 0; dist <= radius; ++dist) {
final ChunkStatus requiredNeighbourStatus = dist == 0 ? genStatus.getParent() : ChunkStatus.getStatusAroundFullChunk(ChunkStatus.getDistance(genStatus) + dist);
final int rad = ACCESS_RADIUS_TABLE[requiredNeighbourStatus.getIndex()];
if (rad == -1) {
throw new IllegalStateException();
Expand All @@ -238,22 +234,18 @@ private static int getAccessRadius0(final ChunkStatus genStatus) {
return maxRange;
}

private static int maxAccessRadius;
private static final int MAX_ACCESS_RADIUS;

static {
final List<ChunkStatus> statuses = ChunkStatus.getStatusList();
for (int i = 0, len = statuses.size(); i < len; ++i) {
ACCESS_RADIUS_TABLE[i] = getAccessRadius0(statuses.get(i));
}
int max = 0;
for (int i = 0, len = statuses.size(); i < len; ++i) {
MAX_ACCESS_RADIUS_TABLE[i] = max = Math.max(ACCESS_RADIUS_TABLE[i], max);
}
maxAccessRadius = max;
MAX_ACCESS_RADIUS = ACCESS_RADIUS_TABLE[ACCESS_RADIUS_TABLE.length - 1];
}

public static int getMaxAccessRadius() {
return maxAccessRadius;
return MAX_ACCESS_RADIUS;
}

public static int getAccessRadius(final ChunkStatus genStatus) {
Expand Down
Loading

0 comments on commit 23f201e

Please sign in to comment.