Heavily reduce memory consumption of AnvilLoader (#1005)

* (Heavily) Reduce memory consumption of AnvilLoader
/!\ requires building and publishing Hephaistos master to local Maven for now

* Cache biome mapping per chunk instead of per section

* Starting work on integration tests

* [AnvilLoader] Address issue #423

* [AnvilLoader] Use new Hephaistos saving API

* Use MavenCentral version of Hephaistos
This commit is contained in:
Xavier Niochaut 2022-08-05 21:05:23 +02:00 committed by GitHub
parent af9dcf258a
commit f62d4e47fc
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
24 changed files with 510 additions and 112 deletions

View File

@ -1,13 +1,19 @@
package net.minestom.demo;
import net.minestom.demo.commands.GamemodeCommand;
import net.minestom.demo.commands.SaveCommand;
import net.minestom.server.MinecraftServer;
import net.minestom.server.coordinate.Pos;
import net.minestom.server.entity.Player;
import net.minestom.server.event.GlobalEventHandler;
import net.minestom.server.event.player.PlayerLoginEvent;
import net.minestom.server.instance.AnvilLoader;
import net.minestom.server.instance.InstanceContainer;
import net.minestom.server.instance.InstanceManager;
import net.minestom.server.instance.block.Block;
import net.minestom.server.utils.NamespaceID;
import net.minestom.server.world.biomes.Biome;
import net.minestom.server.world.biomes.BiomeManager;
public class MainDemo {
@ -15,6 +21,9 @@ public class MainDemo {
// Initialization
MinecraftServer minecraftServer = MinecraftServer.init();
MinecraftServer.getCommandManager().register(new GamemodeCommand());
MinecraftServer.getCommandManager().register(new SaveCommand());
InstanceManager instanceManager = MinecraftServer.getInstanceManager();
// Create the instance
InstanceContainer instanceContainer = instanceManager.createInstanceContainer();

View File

@ -8,7 +8,7 @@ kotlin = "1.6.20"
hydrazine = "1.7.2"
dependencyGetter = "v1.0.1"
minestomData = "3e211f3953"
hephaistos = "2.4.8"
hephaistos = "2.5.0"
jetbrainsAnnotations = "23.0.0"
# Terminal / Logging

View File

@ -1,5 +1,8 @@
package net.minestom.server.instance;
import it.unimi.dsi.fastutil.ints.Int2ObjectArrayMap;
import it.unimi.dsi.fastutil.ints.Int2ObjectMap;
import it.unimi.dsi.fastutil.ints.IntIntImmutablePair;
import net.minestom.server.MinecraftServer;
import net.minestom.server.instance.block.Block;
import net.minestom.server.instance.block.BlockHandler;
@ -9,6 +12,11 @@ import net.minestom.server.world.biomes.Biome;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jglrxavpok.hephaistos.mca.*;
import org.jglrxavpok.hephaistos.mca.readers.ChunkReader;
import org.jglrxavpok.hephaistos.mca.readers.ChunkSectionReader;
import org.jglrxavpok.hephaistos.mca.readers.SectionBiomeInformation;
import org.jglrxavpok.hephaistos.mca.writer.ChunkSectionWriter;
import org.jglrxavpok.hephaistos.mca.writer.ChunkWriter;
import org.jglrxavpok.hephaistos.nbt.*;
import org.jglrxavpok.hephaistos.nbt.mutable.MutableNBTCompound;
import org.slf4j.Logger;
@ -33,6 +41,21 @@ public class AnvilLoader implements IChunkLoader {
private final Path levelPath;
private final Path regionPath;
private static class RegionCache extends ConcurrentHashMap<IntIntImmutablePair, Set<IntIntImmutablePair>> {}
/**
* Represents the chunks currently loaded per region. Used to determine when a region file can be unloaded.
*/
private final RegionCache perRegionLoadedChunks = new RegionCache();
// thread local to avoid contention issues with locks
private final ThreadLocal<Int2ObjectMap<BlockState>> blockStateId2ObjectCacheTLS = new ThreadLocal<>() {
@Override
protected Int2ObjectMap<BlockState> initialValue() {
return new Int2ObjectArrayMap<>();
}
};
public AnvilLoader(@NotNull Path path) {
this.path = path;
this.levelPath = path.resolve("level.dat");
@ -76,61 +99,45 @@ public class AnvilLoader implements IChunkLoader {
final RegionFile mcaFile = getMCAFile(instance, chunkX, chunkZ);
if (mcaFile == null)
return CompletableFuture.completedFuture(null);
final ChunkColumn fileChunk = mcaFile.getChunk(chunkX, chunkZ);
if (fileChunk == null)
final NBTCompound chunkData = mcaFile.getChunkData(chunkX, chunkZ);
if (chunkData == null)
return CompletableFuture.completedFuture(null);
final ChunkReader chunkReader = new ChunkReader(chunkData);
Chunk chunk = new DynamicChunk(instance, chunkX, chunkZ);
if(fileChunk.getMinY() < instance.getDimensionType().getMinY()) {
throw new AnvilException(
String.format("Trying to load chunk with minY = %d, but instance dimension type (%s) has a minY of %d",
fileChunk.getMinY(),
instance.getDimensionType().getName().asString(),
instance.getDimensionType().getMinY()
));
}
if(fileChunk.getMaxY() > instance.getDimensionType().getMaxY()) {
throw new AnvilException(
String.format("Trying to load chunk with maxY = %d, but instance dimension type (%s) has a maxY of %d",
fileChunk.getMaxY(),
instance.getDimensionType().getName().asString(),
instance.getDimensionType().getMaxY()
));
}
// TODO: Parallelize block, block entities and biome loading
if (fileChunk.getGenerationStatus().compareTo(ChunkColumn.GenerationStatus.Biomes) > 0) {
HashMap<String, Biome> biomeCache = new HashMap<>();
for (ChunkSection section : fileChunk.getSections().values()) {
if (section.getEmpty()) continue;
for (int y = 0; y < Chunk.CHUNK_SECTION_SIZE; y++) {
for (int z = 0; z < Chunk.CHUNK_SIZE_Z; z++) {
for (int x = 0; x < Chunk.CHUNK_SIZE_X; x++) {
int finalX = fileChunk.getX() * Chunk.CHUNK_SIZE_X + x;
int finalZ = fileChunk.getZ() * Chunk.CHUNK_SIZE_Z + z;
int finalY = section.getY() * Chunk.CHUNK_SECTION_SIZE + y;
String biomeName = section.getBiome(x, y, z);
Biome biome = biomeCache.computeIfAbsent(biomeName, n ->
Objects.requireNonNullElse(MinecraftServer.getBiomeManager().getByName(NamespaceID.from(n)), BIOME));
chunk.setBiome(finalX, finalY, finalZ, biome);
}
}
}
synchronized (chunk) {
var yRange = chunkReader.getYRange();
if(yRange.getStart() < instance.getDimensionType().getMinY()) {
throw new AnvilException(
String.format("Trying to load chunk with minY = %d, but instance dimension type (%s) has a minY of %d",
yRange.getStart(),
instance.getDimensionType().getName().asString(),
instance.getDimensionType().getMinY()
));
}
if(yRange.getEndInclusive() > instance.getDimensionType().getMaxY()) {
throw new AnvilException(
String.format("Trying to load chunk with maxY = %d, but instance dimension type (%s) has a maxY of %d",
yRange.getEndInclusive(),
instance.getDimensionType().getName().asString(),
instance.getDimensionType().getMaxY()
));
}
// TODO: Parallelize block, block entities and biome loading
// Blocks + Biomes
loadSections(chunk, chunkReader);
// Block entities
loadBlockEntities(chunk, chunkReader);
}
// Blocks
loadBlocks(chunk, fileChunk);
loadTileEntities(chunk, fileChunk);
// Lights
for (int sectionY = chunk.getMinSection(); sectionY < chunk.getMaxSection(); sectionY++) {
var section = chunk.getSection(sectionY);
var chunkSection = fileChunk.getSection((byte) sectionY);
section.setSkyLight(chunkSection.getSkyLights());
section.setBlockLight(chunkSection.getBlockLights());
}
mcaFile.forget(fileChunk);
synchronized (perRegionLoadedChunks) {
int regionX = CoordinatesKt.chunkToRegion(chunkX);
int regionZ = CoordinatesKt.chunkToRegion(chunkZ);
var chunks = perRegionLoadedChunks.computeIfAbsent(new IntIntImmutablePair(regionX, regionZ), r -> new HashSet<>()); // region cache may have been removed on another thread due to unloadChunk
chunks.add(new IntIntImmutablePair(chunkX, chunkZ));
};
return CompletableFuture.completedFuture(chunk);
}
@ -143,6 +150,10 @@ public class AnvilLoader implements IChunkLoader {
if (!Files.exists(regionPath)) {
return null;
}
synchronized (perRegionLoadedChunks) {
Set<IntIntImmutablePair> previousVersion = perRegionLoadedChunks.put(new IntIntImmutablePair(regionX, regionZ), new HashSet<>());
assert previousVersion == null : "The AnvilLoader cache should not already have data for this region.";
};
return new RegionFile(new RandomAccessFile(regionPath.toFile(), "rw"), regionX, regionZ, instance.getDimensionType().getMinY(), instance.getDimensionType().getMaxY()-1);
} catch (IOException | AnvilException e) {
MinecraftServer.getExceptionManager().handleException(e);
@ -151,28 +162,111 @@ public class AnvilLoader implements IChunkLoader {
});
}
private void loadBlocks(Chunk chunk, ChunkColumn fileChunk) {
for (var section : fileChunk.getSections().values()) {
if (section.getEmpty()) continue;
final int yOffset = Chunk.CHUNK_SECTION_SIZE * section.getY();
for (int x = 0; x < Chunk.CHUNK_SECTION_SIZE; x++) {
for (int z = 0; z < Chunk.CHUNK_SECTION_SIZE; z++) {
for (int y = 0; y < Chunk.CHUNK_SECTION_SIZE; y++) {
try {
final BlockState blockState = section.get(x, y, z);
final String blockName = blockState.getName();
if (blockName.equals("minecraft:air")) continue;
Block block = Objects.requireNonNull(Block.fromNamespaceId(blockName));
// Properties
final Map<String, String> properties = blockState.getProperties();
if (!properties.isEmpty()) block = block.withProperties(properties);
// Handler
final BlockHandler handler = MinecraftServer.getBlockManager().getHandler(block.name());
if (handler != null) block = block.withHandler(handler);
private void loadSections(Chunk chunk, ChunkReader chunkReader) {
final HashMap<String, Biome> biomeCache = new HashMap<>();
for (var sectionNBT : chunkReader.getSections()) {
ChunkSectionReader sectionReader = new ChunkSectionReader(chunkReader.getMinecraftVersion(), sectionNBT);
Section section = chunk.getSection(sectionReader.getY());
chunk.setBlock(x, y + yOffset, z, block);
} catch (Exception e) {
MinecraftServer.getExceptionManager().handleException(e);
if(sectionReader.getSkyLight() != null) {
section.setSkyLight(sectionReader.getSkyLight().copyArray());
}
if(sectionReader.getBlockLight() != null) {
section.setBlockLight(sectionReader.getBlockLight().copyArray());
}
if (sectionReader.isSectionEmpty()) continue;
final int sectionY = sectionReader.getY();
final int yOffset = Chunk.CHUNK_SECTION_SIZE * sectionY;
// Biomes
if(chunkReader.getGenerationStatus().compareTo(ChunkColumn.GenerationStatus.Biomes) > 0) {
SectionBiomeInformation sectionBiomeInformation = chunkReader.readSectionBiomes(sectionReader);
if(sectionBiomeInformation != null && sectionBiomeInformation.hasBiomeInformation()) {
if(sectionBiomeInformation.isFilledWithSingleBiome()) {
for (int y = 0; y < Chunk.CHUNK_SECTION_SIZE; y++) {
for (int z = 0; z < Chunk.CHUNK_SIZE_Z; z++) {
for (int x = 0; x < Chunk.CHUNK_SIZE_X; x++) {
int finalX = chunk.chunkX * Chunk.CHUNK_SIZE_X + x;
int finalZ = chunk.chunkZ * Chunk.CHUNK_SIZE_Z + z;
int finalY = sectionY * Chunk.CHUNK_SECTION_SIZE + y;
String biomeName = sectionBiomeInformation.getBaseBiome();
Biome biome = biomeCache.computeIfAbsent(biomeName, n ->
Objects.requireNonNullElse(MinecraftServer.getBiomeManager().getByName(NamespaceID.from(n)), BIOME));
chunk.setBiome(finalX, finalY, finalZ, biome);
}
}
}
} else {
for (int y = 0; y < Chunk.CHUNK_SECTION_SIZE; y++) {
for (int z = 0; z < Chunk.CHUNK_SIZE_Z; z++) {
for (int x = 0; x < Chunk.CHUNK_SIZE_X; x++) {
int finalX = chunk.chunkX * Chunk.CHUNK_SIZE_X + x;
int finalZ = chunk.chunkZ * Chunk.CHUNK_SIZE_Z + z;
int finalY = sectionY * Chunk.CHUNK_SECTION_SIZE + y;
int index = x/4 + (z/4) * 4 + (y/4) * 16;
String biomeName = sectionBiomeInformation.getBiomes()[index];
Biome biome = biomeCache.computeIfAbsent(biomeName, n ->
Objects.requireNonNullElse(MinecraftServer.getBiomeManager().getByName(NamespaceID.from(n)), BIOME));
chunk.setBiome(finalX, finalY, finalZ, biome);
}
}
}
}
}
}
// Blocks
final NBTList<NBTCompound> blockPalette = sectionReader.getBlockPalette();
if(blockPalette != null) {
int[] blockStateIndices = sectionReader.getUncompressedBlockStateIDs();
Block[] convertedPalette = new Block[blockPalette.getSize()];
for (int i = 0; i < convertedPalette.length; i++) {
final NBTCompound paletteEntry = blockPalette.get(i);
String blockName = Objects.requireNonNull(paletteEntry.getString("Name"));
if (blockName.equals("minecraft:air")) {
convertedPalette[i] = Block.AIR;
} else {
Block block = Objects.requireNonNull(Block.fromNamespaceId(blockName));
// Properties
final Map<String, String> properties = new HashMap<>();
NBTCompound propertiesNBT = paletteEntry.getCompound("Properties");
if (propertiesNBT != null) {
for (var property : propertiesNBT) {
if (property.getValue().getID() != NBTType.TAG_String) {
LOGGER.warn("Fail to parse block state properties {}, expected a TAG_String for {}, but contents were {}",
propertiesNBT,
property.getKey(),
property.getValue().toSNBT());
} else {
properties.put(property.getKey(), ((NBTString) property.getValue()).getValue());
}
}
}
if (!properties.isEmpty()) block = block.withProperties(properties);
// Handler
final BlockHandler handler = MinecraftServer.getBlockManager().getHandler(block.name());
if (handler != null) block = block.withHandler(handler);
convertedPalette[i] = block;
}
}
for (int y = 0; y < Chunk.CHUNK_SECTION_SIZE; y++) {
for (int z = 0; z < Chunk.CHUNK_SECTION_SIZE; z++) {
for (int x = 0; x < Chunk.CHUNK_SECTION_SIZE; x++) {
try {
int blockIndex = y * Chunk.CHUNK_SECTION_SIZE * Chunk.CHUNK_SECTION_SIZE + z * Chunk.CHUNK_SECTION_SIZE + x;
int paletteIndex = blockStateIndices[blockIndex];
Block block = convertedPalette[paletteIndex];
chunk.setBlock(x, y + yOffset, z, block);
} catch (Exception e) {
MinecraftServer.getExceptionManager().handleException(e);
}
}
}
}
@ -180,8 +274,8 @@ public class AnvilLoader implements IChunkLoader {
}
}
private void loadTileEntities(Chunk loadedChunk, ChunkColumn fileChunk) {
for (NBTCompound te : fileChunk.getTileEntities()) {
private void loadBlockEntities(Chunk loadedChunk, ChunkReader chunkReader) {
for (NBTCompound te : chunkReader.getBlockEntities()) {
final var x = te.getInt("x");
final var y = te.getInt("y");
final var z = te.getInt("z");
@ -253,19 +347,11 @@ public class AnvilLoader implements IChunkLoader {
}
}
}
ChunkColumn column;
try {
column = mcaFile.getOrCreateChunk(chunkX, chunkZ);
} catch (AnvilException | IOException e) {
LOGGER.error("Failed to save chunk " + chunkX + ", " + chunkZ, e);
MinecraftServer.getExceptionManager().handleException(e);
return AsyncUtils.VOID_FUTURE;
}
save(chunk, column);
ChunkWriter writer = new ChunkWriter(SupportedVersion.Companion.getLatest());
save(chunk, writer);
try {
LOGGER.debug("Attempt saving at {} {}", chunk.getChunkX(), chunk.getChunkZ());
mcaFile.writeColumn(column);
mcaFile.forget(column);
mcaFile.writeColumnData(writer.toNBT(), chunk.getChunkX(), chunk.getChunkZ());
} catch (IOException e) {
LOGGER.error("Failed to save chunk " + chunkX + ", " + chunkZ, e);
MinecraftServer.getExceptionManager().handleException(e);
@ -274,39 +360,112 @@ public class AnvilLoader implements IChunkLoader {
return AsyncUtils.VOID_FUTURE;
}
private void save(Chunk chunk, ChunkColumn chunkColumn) {
chunkColumn.changeVersion(SupportedVersion.Companion.getLatest());
chunkColumn.setYRange(chunk.getMinSection()*16, chunk.getMaxSection()*16-1);
List<NBTCompound> tileEntities = new ArrayList<>();
chunkColumn.setGenerationStatus(ChunkColumn.GenerationStatus.Full);
for (int x = 0; x < Chunk.CHUNK_SIZE_X; x++) {
for (int z = 0; z < Chunk.CHUNK_SIZE_Z; z++) {
for (int y = chunkColumn.getMinY(); y < chunkColumn.getMaxY(); y++) {
final Block block = chunk.getBlock(x, y, z);
// Block
chunkColumn.setBlockState(x, y, z, new BlockState(block.name(), block.properties()));
chunkColumn.setBiome(x, y, z, chunk.getBiome(x, y, z).name().asString());
private BlockState getBlockState(final Block block) {
return blockStateId2ObjectCacheTLS.get().computeIfAbsent(block.stateId(), _unused -> new BlockState(block.name(), block.properties()));
}
// Tile entity
final BlockHandler handler = block.handler();
var originalNBT = block.nbt();
if (originalNBT != null || handler != null) {
MutableNBTCompound nbt = originalNBT != null ?
originalNBT.toMutableCompound() : new MutableNBTCompound();
private void save(Chunk chunk, ChunkWriter chunkWriter) {
final int minY = chunk.getMinSection()*Chunk.CHUNK_SECTION_SIZE;
final int maxY = chunk.getMaxSection()*Chunk.CHUNK_SECTION_SIZE -1;
chunkWriter.setYPos(minY);
List<NBTCompound> blockEntities = new ArrayList<>();
chunkWriter.setStatus(ChunkColumn.GenerationStatus.Full);
if (handler != null) {
nbt.setString("id", handler.getNamespaceId().asString());
List<NBTCompound> sectionData = new ArrayList<>((maxY - minY + 1) / Chunk.CHUNK_SECTION_SIZE);
int[] palettedBiomes = new int[ChunkSection.Companion.getBiomeArraySize()];
int[] palettedBlockStates = new int[Chunk.CHUNK_SIZE_X * Chunk.CHUNK_SECTION_SIZE * Chunk.CHUNK_SIZE_Z];
for (int sectionY = chunk.getMinSection(); sectionY < chunk.getMaxSection(); sectionY++) {
ChunkSectionWriter sectionWriter = new ChunkSectionWriter(SupportedVersion.Companion.getLatest(), (byte)sectionY);
Section section = chunk.getSection(sectionY);
sectionWriter.setSkyLights(section.getSkyLight());
sectionWriter.setBlockLights(section.getBlockLight());
BiomePalette biomePalette = new BiomePalette();
BlockPalette blockPalette = new BlockPalette();
for (int sectionLocalY = 0; sectionLocalY < Chunk.CHUNK_SECTION_SIZE; sectionLocalY++) {
for (int z = 0; z < Chunk.CHUNK_SIZE_Z; z++) {
for (int x = 0; x < Chunk.CHUNK_SIZE_X; x++) {
final int y = sectionLocalY + sectionY * Chunk.CHUNK_SECTION_SIZE;
int blockIndex = x + sectionLocalY * 16 * 16 + z * 16;
final Block block = chunk.getBlock(x, y, z);
final BlockState hephaistosBlockState = getBlockState(block);
blockPalette.increaseReference(hephaistosBlockState);
palettedBlockStates[blockIndex] = blockPalette.getPaletteIndex(hephaistosBlockState);
// biome are stored for 4x4x4 volumes, avoid unnecessary work
if(x % 4 == 0 && sectionLocalY % 4 == 0 && z % 4 == 0) {
int biomeIndex = (x/4) + (sectionLocalY/4) * 4 * 4 + (z/4) * 4;
final Biome biome = chunk.getBiome(x, y, z);
final String biomeName = biome.name().asString();
biomePalette.increaseReference(biomeName);
palettedBiomes[biomeIndex] = biomePalette.getPaletteIndex(biomeName);
}
// Block entities
final BlockHandler handler = block.handler();
var originalNBT = block.nbt();
if (originalNBT != null || handler != null) {
MutableNBTCompound nbt = originalNBT != null ?
originalNBT.toMutableCompound() : new MutableNBTCompound();
if (handler != null) {
nbt.setString("id", handler.getNamespaceId().asString());
}
nbt.setInt("x", x + Chunk.CHUNK_SIZE_X * chunk.getChunkX());
nbt.setInt("y", y);
nbt.setInt("z", z + Chunk.CHUNK_SIZE_Z * chunk.getChunkZ());
nbt.setByte("keepPacked", (byte) 0);
blockEntities.add(nbt.toCompound());
}
nbt.setInt("x", x + Chunk.CHUNK_SIZE_X * chunk.getChunkX());
nbt.setInt("y", y);
nbt.setInt("z", z + Chunk.CHUNK_SIZE_Z * chunk.getChunkZ());
nbt.setByte("keepPacked", (byte) 0);
tileEntities.add(nbt.toCompound());
}
}
}
sectionWriter.setPalettedBiomes(biomePalette, palettedBiomes);
sectionWriter.setPalettedBlockStates(blockPalette, palettedBlockStates);
sectionData.add(sectionWriter.toNBT());
}
chunkColumn.setTileEntities(NBT.List(NBTType.TAG_Compound, tileEntities));
chunkWriter.setSectionsData(NBT.List(NBTType.TAG_Compound, sectionData));
chunkWriter.setBlockEntityData(NBT.List(NBTType.TAG_Compound, blockEntities));
}
/**
* Unload a given chunk. Also unloads a region when no chunk from that region is loaded.
* @param chunk the chunk to unload
*/
@Override
public void unloadChunk(Chunk chunk) {
final int regionX = CoordinatesKt.chunkToRegion(chunk.chunkX);
final int regionZ = CoordinatesKt.chunkToRegion(chunk.chunkZ);
final IntIntImmutablePair regionKey = new IntIntImmutablePair(regionX, regionZ);
synchronized (perRegionLoadedChunks) {
Set<IntIntImmutablePair> chunks = perRegionLoadedChunks.get(regionKey);
if(chunks != null) { // if null, trying to unload a chunk from a region that was not created by the AnvilLoader
// don't check return value, trying to unload a chunk not created by the AnvilLoader is valid
chunks.remove(new IntIntImmutablePair(chunk.chunkX, chunk.chunkZ));
if(chunks.isEmpty()) {
perRegionLoadedChunks.remove(regionKey);
RegionFile regionFile = alreadyLoaded.remove(RegionFile.Companion.createFileName(regionX, regionZ));
if(regionFile != null) {
try {
regionFile.close();
} catch (IOException e) {
MinecraftServer.getExceptionManager().handleException(e);
}
}
}
}
};
}
@Override

View File

@ -101,4 +101,13 @@ public interface IChunkLoader {
default boolean supportsParallelLoading() {
return false;
}
/**
* Called when a chunk is unloaded, so that this chunk loader can unload any resource it is holding.
* Note: Minestom currently has no way to determine whether the chunk comes from this loader, so you may get
* unload requests for chunks not created by the loader.
*
* @param chunk the chunk to unload
*/
default void unloadChunk(Chunk chunk) {}
}

View File

@ -228,6 +228,9 @@ public class InstanceContainer extends Instance {
// Clear cache
this.chunks.remove(getChunkIndex(chunkX, chunkZ));
chunk.unload();
if(chunkLoader != null) {
chunkLoader.unloadChunk(chunk);
}
var dispatcher = MinecraftServer.process().dispatcher();
dispatcher.deletePartition(chunk);
}

View File

@ -5,6 +5,7 @@ import net.minestom.server.coordinate.Pos;
import net.minestom.server.entity.Player;
import net.minestom.server.event.Event;
import net.minestom.server.event.EventFilter;
import net.minestom.server.instance.IChunkLoader;
import net.minestom.server.instance.Instance;
import net.minestom.server.instance.block.Block;
import org.jetbrains.annotations.NotNull;
@ -43,8 +44,16 @@ public interface Env {
}
default @NotNull Instance createFlatInstance() {
var instance = process().instance().createInstanceContainer();
return createFlatInstance(null);
}
default @NotNull Instance createFlatInstance(IChunkLoader chunkLoader) {
var instance = process().instance().createInstanceContainer(chunkLoader);
instance.setGenerator(unit -> unit.modifier().fillHeight(0, 40, Block.STONE));
return instance;
}
default void destroyInstance(Instance instance) {
process().instance().unregisterInstance(instance);
}
}

View File

@ -0,0 +1,209 @@
package net.minestom.server.instance;
import net.minestom.server.api.Env;
import net.minestom.server.api.EnvTest;
import net.minestom.server.instance.block.Block;
import net.minestom.server.utils.NamespaceID;
import net.minestom.server.utils.binary.BinaryWriter;
import net.minestom.server.world.biomes.Biome;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
import java.io.IOException;
import java.nio.file.*;
import java.nio.file.attribute.BasicFileAttributes;
import java.util.function.Consumer;
import static org.junit.jupiter.api.Assertions.assertEquals;
@EnvTest
public class AnvilLoaderIntegrationTest {
private static final Path testRoot = Path.of("src", "test", "resources", "net", "minestom", "server", "instance", "anvil_loader");
private static final Path worldFolder = Path.of("integration_test_world");
@BeforeAll
public static void prepareTest() throws IOException {
// https://stackoverflow.com/a/60621544
Files.walkFileTree(testRoot, new SimpleFileVisitor<>() {
@Override
public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs)
throws IOException {
Files.createDirectories(worldFolder.resolve(testRoot.relativize(dir)));
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs)
throws IOException {
Files.copy(file, worldFolder.resolve(testRoot.relativize(file)), StandardCopyOption.REPLACE_EXISTING);
return FileVisitResult.CONTINUE;
}
});
}
@Test
public void loadHouse(Env env) {
// load a world that contains only a basic house and make sure it is loaded properly
AnvilLoader chunkLoader = new AnvilLoader(worldFolder) {
// Force loads inside current thread
@Override
public boolean supportsParallelLoading() {
return false;
}
@Override
public boolean supportsParallelSaving() {
return false;
}
};
Instance instance = env.createFlatInstance(chunkLoader);
Consumer<Chunk> checkChunk = chunk -> {
synchronized (chunk) {
assertEquals(-4, chunk.getMinSection());
assertEquals(20, chunk.getMaxSection());
// TODO: skylight
// TODO: block light
for (int y = 0; y < 16; y++) {
for (int x = 0; x < 16; x++) {
for (int z = 0; z < 16; z++) {
Biome b = chunk.getBiome(x, y, z);
assertEquals(NamespaceID.from("minecraft:plains"), b.name());
}
}
}
}
};
for (int x = -2; x < 2; x++) {
for (int z = -2; z < 2; z++) {
checkChunk.accept(instance.loadChunk(x, z).join()); // this is a test so we don't care too much about waiting for each chunk
}
}
// wooden house with nylium ground. Open world inside MC to check out
// center of world
assertEquals(Block.BEDROCK, instance.getBlock(0, 0, 0));
// nylium stripes in front and back of house
for (int z = -4; z <= 0; z++) {
assertEquals(Block.WARPED_NYLIUM, instance.getBlock(4, 0, z));
assertEquals(Block.WARPED_NYLIUM, instance.getBlock(-3, 0, z));
assertEquals(Block.WARPED_NYLIUM, instance.getBlock(-4, 0, z));
}
// side walls
for (int x = -2; x <= 3; x++) {
if(x != 0) { // bedrock block at center
assertEquals(Block.NETHERRACK, instance.getBlock(x, 0, 0));
}
assertEquals(Block.NETHERRACK, instance.getBlock(x, 0, -4));
assertEquals(Block.OAK_PLANKS, instance.getBlock(x, 1, 0));
assertEquals(Block.OAK_PLANKS, instance.getBlock(x, 1, -4));
assertEquals(Block.OAK_PLANKS, instance.getBlock(x, 2, 0));
assertEquals(Block.OAK_PLANKS, instance.getBlock(x, 2, -4));
}
// back wall
for (int z = -4; z <= 0; z++) {
assertEquals(Block.NETHERRACK, instance.getBlock(-2, 0, z));
assertEquals(Block.OAK_PLANKS, instance.getBlock(-2, 1, z));
assertEquals(Block.OAK_PLANKS, instance.getBlock(-2, 2, z));
}
// door
Block baseDoor = Block.ACACIA_DOOR
.withProperty("facing", "west")
.withProperty("hinge", "left")
.withProperty("open", "false")
.withProperty("powered", "false")
;
Block bottomDoorPart = baseDoor.withProperty("half", "lower");
Block topDoorPart = baseDoor.withProperty("half", "upper");
assertEquals(bottomDoorPart, instance.getBlock(3, 1, -3));
assertEquals(topDoorPart, instance.getBlock(3, 2, -3));
// light blocks
Block endRod = Block.END_ROD.withProperty("facing", "up");
assertEquals(endRod, instance.getBlock(-1, 1, -1));
assertEquals(Block.TORCH, instance.getBlock(-1, 2, -1));
// flower pot
assertEquals(Block.OAK_PLANKS, instance.getBlock(-1, 1, -3));
assertEquals(Block.POTTED_POPPY, instance.getBlock(-1, 2, -3));
env.destroyInstance(instance);
}
@Test
public void loadAndSaveChunk(Env env) throws InterruptedException {
Instance instance = env.createFlatInstance(new AnvilLoader(worldFolder) {
// Force loads inside current thread
@Override
public boolean supportsParallelLoading() {
return false;
}
@Override
public boolean supportsParallelSaving() {
return false;
}
});
Chunk originalChunk = instance.loadChunk(0,0).join();
synchronized (originalChunk) {
instance.saveChunkToStorage(originalChunk);
instance.unloadChunk(originalChunk);
while(originalChunk.isLoaded()) {
Thread.sleep(1);
}
}
Chunk reloadedChunk = instance.loadChunk(0,0).join();
for(int section = reloadedChunk.getMinSection(); section < reloadedChunk.getMaxSection(); section++) {
Section originalSection = originalChunk.getSection(section);
Section reloadedSection = reloadedChunk.getSection(section);
// easiest equality check to write is a memory compare on written output
BinaryWriter originalWriter = new BinaryWriter();
BinaryWriter reloadedWriter = new BinaryWriter();
originalSection.write(originalWriter);
reloadedSection.write(reloadedWriter);
Assertions.assertArrayEquals(originalWriter.toByteArray(), reloadedWriter.toByteArray());
}
env.destroyInstance(instance);
}
@AfterAll
public static void cleanupTest() throws IOException {
Files.walkFileTree(worldFolder, new SimpleFileVisitor<>() {
@Override
public FileVisitResult postVisitDirectory(Path dir, IOException e)
throws IOException {
Files.delete(dir);
return FileVisitResult.CONTINUE;
}
@Override
public FileVisitResult visitFile(Path file, BasicFileAttributes attrs)
throws IOException {
Files.delete(file);
return FileVisitResult.CONTINUE;
}
});
}
}