Apply lighting workaround to 1.16.5+

This commit is contained in:
Mike Primm 2021-12-16 12:32:10 -06:00
parent eff72aeef0
commit 69a6bb2a2a
7 changed files with 24 additions and 155 deletions

View File

@ -910,12 +910,16 @@ public abstract class GenericMapChunkCache extends MapChunkCache {
}
if (nbt == null) return null;
String status = nbt.getString("Status");
boolean hasLight = false;
int version = nbt.getInt("DataVersion");
boolean hasLitState = false;
if (status != null) {
for (int i = 0; i < litStates.length; i++) {
if (status.equals(litStates[i])) hasLight = true;
if (status.equals(litStates[i])) { hasLitState = true; }
}
}
boolean hasLight = hasLitState; // Assume good light in a lit state
// Start generic chunk builder
GenericChunk.Builder bld = new GenericChunk.Builder(dw.minY, dw.worldheight);
int x = nbt.getInt("xPos");
@ -958,7 +962,7 @@ public abstract class GenericMapChunkCache extends MapChunkCache {
// Prescan sections to see if lit
for (int i = 0; i < sect.size(); i++) {
GenericNBTCompound sec = sect.getCompound(i);
if (sec.contains("BlockLight") || sec.contains("SkyLight")) {
if (sec.contains("SkyLight")) { // Only consider skylight for now, since that is what we generate if needed
hasLight = true;
}
}
@ -1074,9 +1078,6 @@ public abstract class GenericMapChunkCache extends MapChunkCache {
}
if (sec.contains("SkyLight")) {
sbld.skyLight(sec.getByteArray("SkyLight"));
}
else if (!hasLight) {
sbld.singleSkyLight(15);
}
// If section biome palette
if (sec.contains("biomes")) {
@ -1110,8 +1111,13 @@ public abstract class GenericMapChunkCache extends MapChunkCache {
bld.addSection(secnum, sbld.build());
sbld.reset();
}
// If pre 1.17, assume unlit state means bad light
if ((version < 2724) && (!hasLitState)) {
hasLight = false;
}
// If no light, do simple generate
if (!hasLight) {
Log.info(String.format("generateSky(%d,%d)", x, z));
bld.generateSky();
}
return bld.build();

View File

@ -25,20 +25,6 @@ public class MapChunkCache116_4 extends GenericMapChunkCache {
super(cc);
init();
}
private boolean isLitChunk(NBTTagCompound nbt) {
if ((nbt != null) && nbt.hasKey("Level")) {
nbt = nbt.getCompound("Level");
}
if (nbt != null) {
String stat = nbt.getString("Status");
ChunkStatus cs = ChunkStatus.a(stat);
if ((stat != null) && cs.b(ChunkStatus.LIGHT)) { // ChunkStatus.LIGHT
return true;
}
}
return false;
}
// Load generic chunk from existing and already loaded chunk
protected GenericChunk getLoadedChunk(DynmapChunk chunk) {
@ -50,9 +36,6 @@ public class MapChunkCache116_4 extends GenericMapChunkCache {
if ((c != null) && c.loaded) {
nbt = ChunkRegionLoader.saveChunk(cw.getHandle(), c);
}
if (!isLitChunk(nbt)) {
nbt = null;
}
if (nbt != null) {
gc = parseChunkFromNBT(new NBT.NBTCompound(nbt));
}
@ -70,9 +53,6 @@ public class MapChunkCache116_4 extends GenericMapChunkCache {
nbt = cw.getHandle().getChunkProvider().playerChunkMap.read(cc);
} catch (IOException iox) {
}
if (!isLitChunk(nbt)) {
nbt = null;
}
if (nbt != null) {
gc = parseChunkFromNBT(new NBT.NBTCompound(nbt));
}

View File

@ -29,20 +29,6 @@ public class MapChunkCache117 extends GenericMapChunkCache {
super(cc);
init();
}
private boolean isLitChunk(NBTTagCompound nbt) {
if ((nbt != null) && nbt.hasKey("Level")) {
nbt = nbt.getCompound("Level");
}
if (nbt != null) {
String stat = nbt.getString("Status");
ChunkStatus cs = ChunkStatus.a(stat);
if ((stat != null) && cs.b(ChunkStatus.l)) { // ChunkStatus.LIGHT
return true;
}
}
return false;
}
// Load generic chunk from existing and already loaded chunk
protected GenericChunk getLoadedChunk(DynmapChunk chunk) {
@ -54,9 +40,6 @@ public class MapChunkCache117 extends GenericMapChunkCache {
if ((c != null) && c.h) { // c.loaded
nbt = ChunkRegionLoader.saveChunk(cw.getHandle(), c);
}
if (!isLitChunk(nbt)) {
nbt = null;
}
if (nbt != null) {
gc = parseChunkFromNBT(new NBT.NBTCompound(nbt));
}
@ -74,9 +57,6 @@ public class MapChunkCache117 extends GenericMapChunkCache {
nbt = cw.getHandle().getChunkProvider().a.read(cc); // playerChunkMap
} catch (IOException iox) {
}
if (!isLitChunk(nbt)) {
nbt = null;
}
if (nbt != null) {
gc = parseChunkFromNBT(new NBT.NBTCompound(nbt));
}

View File

@ -50,31 +50,13 @@ public class FabricMapChunkCache extends GenericMapChunkCache {
ThreadedAnvilChunkStorage acl = cps.threadedAnvilChunkStorage;
ChunkPos coord = new ChunkPos(x, z);
CompoundTag rslt = acl.getNbt(coord);
if (!isLitChunk(rslt)) {
rslt = null;
}
return rslt;
return acl.getNbt(coord);
} catch (Exception exc) {
Log.severe(String.format("Error reading chunk: %s,%d,%d", dw.getName(), x, z), exc);
return null;
}
}
private boolean isLitChunk(CompoundTag nbt) {
if ((nbt != null) && nbt.contains("Level")) {
nbt = nbt.getCompound("Level");
}
if (nbt != null) {
String stat = nbt.getString("Status");
ChunkStatus cs = ChunkStatus.byId(stat);
if ((stat != null) && cs.isAtLeast(ChunkStatus.LIGHT)) { // ChunkStatus.LIGHT
return true;
}
}
return false;
}
// Load generic chunk from existing and already loaded chunk
protected GenericChunk getLoadedChunk(DynmapChunk chunk) {
GenericChunk gc = null;
@ -86,7 +68,7 @@ public class FabricMapChunkCache extends GenericMapChunkCache {
// TODO: find out why this is happening and why it only seems to happen since 1.16.2
Log.severe("ChunkSerializer.serialize threw a NullPointerException", e);
}
if (isLitChunk(nbt)) {
if (nbt != null) {
gc = parseChunkFromNBT(new NBT.NBTCompound(nbt));
}
}
@ -98,7 +80,7 @@ public class FabricMapChunkCache extends GenericMapChunkCache {
GenericChunk gc = null;
CompoundTag nbt = readChunk(chunk.x, chunk.z);
// If read was good
if (isLitChunk(nbt)) {
if (nbt != null) {
gc = parseChunkFromNBT(new NBT.NBTCompound(nbt));
}
return gc;

View File

@ -48,39 +48,13 @@ public class FabricMapChunkCache extends GenericMapChunkCache {
ThreadedAnvilChunkStorage acl = cps.threadedAnvilChunkStorage;
ChunkPos coord = new ChunkPos(x, z);
NbtCompound rslt = acl.getNbt(coord);
if (rslt != null) {
// Don't load uncooked chunks
String stat = rslt.getString("Status");
ChunkStatus cs = ChunkStatus.byId(stat);
if ((stat == null) ||
// Needs to be at least lighted
(!cs.isAtLeast(ChunkStatus.LIGHT))) {
rslt = null;
}
}
//Log.info(String.format("loadChunk(%d,%d)=%s", x, z, (rslt != null) ? rslt.toString() : "null"));
return rslt;
return acl.getNbt(coord);
} catch (Exception exc) {
Log.severe(String.format("Error reading chunk: %s,%d,%d", dw.getName(), x, z), exc);
return null;
}
}
private boolean isLitChunk(NbtCompound nbt) {
if ((nbt != null) && nbt.contains("Level")) {
nbt = nbt.getCompound("Level");
}
if (nbt != null) {
String stat = nbt.getString("Status");
ChunkStatus cs = ChunkStatus.byId(stat);
if ((stat != null) && cs.isAtLeast(ChunkStatus.LIGHT)) { // ChunkStatus.LIGHT
return true;
}
}
return false;
}
// Load generic chunk from existing and already loaded chunk
protected GenericChunk getLoadedChunk(DynmapChunk chunk) {
GenericChunk gc = null;
@ -92,7 +66,7 @@ public class FabricMapChunkCache extends GenericMapChunkCache {
// TODO: find out why this is happening and why it only seems to happen since 1.16.2
Log.severe("ChunkSerializer.serialize threw a NullPointerException", e);
}
if (isLitChunk(nbt)) {
if (nbt != null) {
gc = parseChunkFromNBT(new NBT.NBTCompound(nbt));
}
}
@ -104,7 +78,7 @@ public class FabricMapChunkCache extends GenericMapChunkCache {
GenericChunk gc = null;
NbtCompound nbt = readChunk(chunk.x, chunk.z);
// If read was good
if (isLitChunk(nbt)) {
if (nbt != null) {
gc = parseChunkFromNBT(new NBT.NBTCompound(nbt));
}
return gc;

View File

@ -31,27 +31,13 @@ public class ForgeMapChunkCache extends GenericMapChunkCache {
init();
}
private boolean isLitChunk(CompoundNBT nbt) {
if ((nbt != null) && nbt.contains("Level")) {
nbt = nbt.getCompound("Level");
}
if (nbt != null) {
String stat = nbt.getString("Status");
ChunkStatus cs = ChunkStatus.byName(stat);
if ((stat != null) && cs.isAtLeast(ChunkStatus.LIGHT)) { // ChunkStatus.LIGHT
return true;
}
}
return false;
}
// Load generic chunk from existing and already loaded chunk
protected GenericChunk getLoadedChunk(DynmapChunk chunk) {
GenericChunk gc = null;
IChunk ch = cps.getChunk(chunk.x, chunk.z, ChunkStatus.FULL, false);
if (ch != null) {
CompoundNBT nbt = ChunkSerializer.write(w, ch);
if (isLitChunk(nbt)) {
if (nbt != null) {
gc = parseChunkFromNBT(new NBT.NBTCompound(nbt));
}
}
@ -62,7 +48,7 @@ public class ForgeMapChunkCache extends GenericMapChunkCache {
GenericChunk gc = null;
CompoundNBT nbt = readChunk(chunk.x, chunk.z);
// If read was good
if (isLitChunk(nbt)) {
if (nbt != null) {
gc = parseChunkFromNBT(new NBT.NBTCompound(nbt));
}
return gc;
@ -86,16 +72,7 @@ public class ForgeMapChunkCache extends GenericMapChunkCache {
private CompoundNBT readChunk(int x, int z) {
try {
CompoundNBT rslt = cps.chunkManager.readChunk(new ChunkPos(x, z));
if (rslt != null) {
if (rslt.contains("Level")) {
rslt = rslt.getCompound("Level");
}
}
if (!isLitChunk(rslt)) {
rslt = null;
}
return rslt;
return cps.chunkManager.readChunk(new ChunkPos(x, z));
} catch (Exception exc) {
Log.severe(String.format("Error reading chunk: %s,%d,%d", dw.getName(), x, z), exc);
return null;

View File

@ -31,27 +31,13 @@ public class ForgeMapChunkCache extends GenericMapChunkCache {
init();
}
private boolean isLitChunk(CompoundTag nbt) {
if ((nbt != null) && nbt.contains("Level")) {
nbt = nbt.getCompound("Level");
}
if (nbt != null) {
String stat = nbt.getString("Status");
ChunkStatus cs = ChunkStatus.byName(stat);
if ((stat != null) && cs.isOrAfter(ChunkStatus.LIGHT)) { // ChunkStatus.LIGHT
return true;
}
}
return false;
}
// Load generic chunk from existing and already loaded chunk
protected GenericChunk getLoadedChunk(DynmapChunk chunk) {
GenericChunk gc = null;
ChunkAccess ch = cps.getChunk(chunk.x, chunk.z, ChunkStatus.FULL, false);
if (ch != null) {
CompoundTag nbt = ChunkSerializer.write(w, ch);
if (isLitChunk(nbt)) {
if (nbt != null) {
gc = parseChunkFromNBT(new NBT.NBTCompound(nbt));
}
}
@ -62,7 +48,7 @@ public class ForgeMapChunkCache extends GenericMapChunkCache {
GenericChunk gc = null;
CompoundTag nbt = readChunk(chunk.x, chunk.z);
// If read was good
if (isLitChunk(nbt)) {
if (nbt != null) {
gc = parseChunkFromNBT(new NBT.NBTCompound(nbt));
}
return gc;
@ -79,23 +65,7 @@ public class ForgeMapChunkCache extends GenericMapChunkCache {
private CompoundTag readChunk(int x, int z) {
try {
CompoundTag rslt = cps.chunkMap.readChunk(new ChunkPos(x, z));
if (rslt != null) {
if (rslt.contains("Level")) {
rslt = rslt.getCompound("Level");
}
// Don't load uncooked chunks
String stat = rslt.getString("Status");
ChunkStatus cs = ChunkStatus.byName(stat);
if ((stat == null) ||
// Needs to be at least lighted
(!cs.isOrAfter(ChunkStatus.LIGHT))) {
rslt = null;
}
}
// Log.info(String.format("loadChunk(%d,%d)=%s", x, z, (rslt != null) ?
// rslt.toString() : "null"));
return rslt;
return cps.chunkMap.readChunk(new ChunkPos(x, z));
} catch (Exception exc) {
Log.severe(String.format("Error reading chunk: %s,%d,%d", dw.getName(), x, z), exc);
return null;