decompressor) {
+ this.id = (byte) id;
+ this.compressor = compressor;
+ this.decompressor = decompressor;
+ }
+
+ public byte getID() {
+ return id;
+ }
+
+ public OutputStream compress(OutputStream out) throws IOException {
+ return compressor.accept(out);
+ }
+
+ public InputStream decompress(InputStream in) throws IOException {
+ return decompressor.accept(in);
+ }
+
+ /**
+ * Finishes writing compressed data to the output stream without closing it.
+ * @exception IOException if an I/O error has occurred
+ */
+ public void finish(OutputStream out) throws IOException {
+ if (out instanceof DeflaterOutputStream) {
+ ((DeflaterOutputStream) out).finish();
+ }
+ }
+
+ public static CompressionType getFromID(byte id) {
+ for (CompressionType c : CompressionType.values()) {
+ if (c.id == id) {
+ return c;
+ }
+ }
+ return null;
+ }
+
+ /**
+ * Makes an excellent guess (for minecraft nbt data) at the {@link CompressionType} used by looking for each
+ * compression types respective magic-header values.
+ *
+ *
+ * There is virtually NO RISK of this detection strategy producing a wrong result if the input
+ * bytes actually represent binary nbt data. This is because the first (uncompressed) byte is always a tag ID
+ * and the max tag id is well under 0x1f currently. Additionally, all bin nbt is stored as a {@link NamedTag}
+ * which is encoded in in-fix order [tag-id, named-tag name, tag-data] where the tag's id is put before the
+ * named-tag's name and this name is stored using UTF which always uses at least 2 bytes for the string length.
+ * This means that the tags name string would need to be extremely long (at least 35,592 bytes) before it could
+ * possibly match either gzip or zlib low magic header bytes, and we would need a valid tag-id of 0x1f to exist.
+ *
+ * WARNING: if this enum, and method, is used for non-nbt data it becomes possible, but probably unlikely,
+ * for this method to make a mistake and choose the wrong {@link CompressionType}!
+ */
+ public static CompressionType detect(byte[] bytes) {
+ if (bytes != null && bytes.length >= 2) {
+ if (bytes[0] == (byte) 0x1f && bytes[1] == (byte) 0x8b) {
+ return GZIP;
+ }
+ if (bytes[0] == (byte) 0x78 && bytes[1] == (byte) 0x9c) {
+ return ZLIB;
+ }
+ }
+ return NONE;
+ }
+}
diff --git a/src/main/java/net/querz/io/Deserializer.java b/src/main/java/io/github/ensgijs/nbt/io/Deserializer.java
similarity index 96%
rename from src/main/java/net/querz/io/Deserializer.java
rename to src/main/java/io/github/ensgijs/nbt/io/Deserializer.java
index 1849fe9d..82acb10a 100644
--- a/src/main/java/net/querz/io/Deserializer.java
+++ b/src/main/java/io/github/ensgijs/nbt/io/Deserializer.java
@@ -1,4 +1,4 @@
-package net.querz.io;
+package io.github.ensgijs.nbt.io;
import java.io.BufferedInputStream;
import java.io.ByteArrayInputStream;
@@ -37,6 +37,4 @@ default T fromURL(URL url) throws IOException {
return fromStream(stream);
}
}
-
-
}
diff --git a/src/main/java/net/querz/io/ExceptionBiFunction.java b/src/main/java/io/github/ensgijs/nbt/io/ExceptionBiFunction.java
similarity index 78%
rename from src/main/java/net/querz/io/ExceptionBiFunction.java
rename to src/main/java/io/github/ensgijs/nbt/io/ExceptionBiFunction.java
index c34dba72..120915f3 100644
--- a/src/main/java/net/querz/io/ExceptionBiFunction.java
+++ b/src/main/java/io/github/ensgijs/nbt/io/ExceptionBiFunction.java
@@ -1,4 +1,4 @@
-package net.querz.io;
+package io.github.ensgijs.nbt.io;
@FunctionalInterface
public interface ExceptionBiFunction {
diff --git a/src/main/java/net/querz/io/ExceptionTriConsumer.java b/src/main/java/io/github/ensgijs/nbt/io/ExceptionTriConsumer.java
similarity index 79%
rename from src/main/java/net/querz/io/ExceptionTriConsumer.java
rename to src/main/java/io/github/ensgijs/nbt/io/ExceptionTriConsumer.java
index d49ccc90..4d5a2b7a 100644
--- a/src/main/java/net/querz/io/ExceptionTriConsumer.java
+++ b/src/main/java/io/github/ensgijs/nbt/io/ExceptionTriConsumer.java
@@ -1,4 +1,4 @@
-package net.querz.io;
+package io.github.ensgijs.nbt.io;
@FunctionalInterface
public interface ExceptionTriConsumer {
diff --git a/src/main/java/net/querz/nbt/io/LittleEndianNBTInputStream.java b/src/main/java/io/github/ensgijs/nbt/io/LittleEndianNbtInputStream.java
similarity index 72%
rename from src/main/java/net/querz/nbt/io/LittleEndianNBTInputStream.java
rename to src/main/java/io/github/ensgijs/nbt/io/LittleEndianNbtInputStream.java
index 7c7d8ed2..a53bebb0 100644
--- a/src/main/java/net/querz/nbt/io/LittleEndianNBTInputStream.java
+++ b/src/main/java/io/github/ensgijs/nbt/io/LittleEndianNbtInputStream.java
@@ -1,21 +1,19 @@
-package net.querz.nbt.io;
-
-import net.querz.io.ExceptionBiFunction;
-import net.querz.io.MaxDepthIO;
-import net.querz.nbt.tag.ByteArrayTag;
-import net.querz.nbt.tag.ByteTag;
-import net.querz.nbt.tag.CompoundTag;
-import net.querz.nbt.tag.DoubleTag;
-import net.querz.nbt.tag.EndTag;
-import net.querz.nbt.tag.FloatTag;
-import net.querz.nbt.tag.IntArrayTag;
-import net.querz.nbt.tag.IntTag;
-import net.querz.nbt.tag.ListTag;
-import net.querz.nbt.tag.LongArrayTag;
-import net.querz.nbt.tag.LongTag;
-import net.querz.nbt.tag.ShortTag;
-import net.querz.nbt.tag.StringTag;
-import net.querz.nbt.tag.Tag;
+package io.github.ensgijs.nbt.io;
+
+import io.github.ensgijs.nbt.tag.ByteArrayTag;
+import io.github.ensgijs.nbt.tag.ByteTag;
+import io.github.ensgijs.nbt.tag.CompoundTag;
+import io.github.ensgijs.nbt.tag.DoubleTag;
+import io.github.ensgijs.nbt.tag.EndTag;
+import io.github.ensgijs.nbt.tag.FloatTag;
+import io.github.ensgijs.nbt.tag.IntArrayTag;
+import io.github.ensgijs.nbt.tag.IntTag;
+import io.github.ensgijs.nbt.tag.ListTag;
+import io.github.ensgijs.nbt.tag.LongArrayTag;
+import io.github.ensgijs.nbt.tag.LongTag;
+import io.github.ensgijs.nbt.tag.ShortTag;
+import io.github.ensgijs.nbt.tag.StringTag;
+import io.github.ensgijs.nbt.tag.Tag;
import java.io.Closeable;
import java.io.DataInput;
import java.io.DataInputStream;
@@ -25,11 +23,12 @@
import java.util.HashMap;
import java.util.Map;
-public class LittleEndianNBTInputStream implements DataInput, NBTInput, MaxDepthIO, Closeable {
+/** Use for Minecraft Bedrock edition data. */
+public class LittleEndianNbtInputStream implements DataInput, NbtInput, MaxDepthIO, Closeable {
private final DataInputStream input;
- private static Map, IOException>> readers = new HashMap<>();
+ private static Map, IOException>> readers = new HashMap<>();
private static Map> idClassMapping = new HashMap<>();
static {
@@ -42,22 +41,22 @@ public class LittleEndianNBTInputStream implements DataInput, NBTInput, MaxDepth
put(DoubleTag.ID, (i, d) -> readDouble(i), DoubleTag.class);
put(ByteArrayTag.ID, (i, d) -> readByteArray(i), ByteArrayTag.class);
put(StringTag.ID, (i, d) -> readString(i), StringTag.class);
- put(ListTag.ID, LittleEndianNBTInputStream::readListTag, ListTag.class);
- put(CompoundTag.ID, LittleEndianNBTInputStream::readCompound, CompoundTag.class);
+ put(ListTag.ID, LittleEndianNbtInputStream::readListTag, ListTag.class);
+ put(CompoundTag.ID, LittleEndianNbtInputStream::readCompound, CompoundTag.class);
put(IntArrayTag.ID, (i, d) -> readIntArray(i), IntArrayTag.class);
put(LongArrayTag.ID, (i, d) -> readLongArray(i), LongArrayTag.class);
}
- private static void put(byte id, ExceptionBiFunction, IOException> reader, Class> clazz) {
+ private static void put(byte id, ExceptionBiFunction, IOException> reader, Class> clazz) {
readers.put(id, reader);
idClassMapping.put(id, clazz);
}
- public LittleEndianNBTInputStream(InputStream in) {
+ public LittleEndianNbtInputStream(InputStream in) {
input = new DataInputStream(in);
}
- public LittleEndianNBTInputStream(DataInputStream in) {
+ public LittleEndianNbtInputStream(DataInputStream in) {
input = in;
}
@@ -72,48 +71,48 @@ public Tag> readRawTag(int maxDepth) throws IOException {
}
private Tag> readTag(byte type, int maxDepth) throws IOException {
- ExceptionBiFunction, IOException> f;
+ ExceptionBiFunction, IOException> f;
if ((f = readers.get(type)) == null) {
throw new IOException("invalid tag id \"" + type + "\"");
}
return f.accept(this, maxDepth);
}
- private static ByteTag readByte(LittleEndianNBTInputStream in) throws IOException {
+ private static ByteTag readByte(LittleEndianNbtInputStream in) throws IOException {
return new ByteTag(in.readByte());
}
- private static ShortTag readShort(LittleEndianNBTInputStream in) throws IOException {
+ private static ShortTag readShort(LittleEndianNbtInputStream in) throws IOException {
return new ShortTag(in.readShort());
}
- private static IntTag readInt(LittleEndianNBTInputStream in) throws IOException {
+ private static IntTag readInt(LittleEndianNbtInputStream in) throws IOException {
return new IntTag(in.readInt());
}
- private static LongTag readLong(LittleEndianNBTInputStream in) throws IOException {
+ private static LongTag readLong(LittleEndianNbtInputStream in) throws IOException {
return new LongTag(in.readLong());
}
- private static FloatTag readFloat(LittleEndianNBTInputStream in) throws IOException {
+ private static FloatTag readFloat(LittleEndianNbtInputStream in) throws IOException {
return new FloatTag(in.readFloat());
}
- private static DoubleTag readDouble(LittleEndianNBTInputStream in) throws IOException {
+ private static DoubleTag readDouble(LittleEndianNbtInputStream in) throws IOException {
return new DoubleTag(in.readDouble());
}
- private static StringTag readString(LittleEndianNBTInputStream in) throws IOException {
+ private static StringTag readString(LittleEndianNbtInputStream in) throws IOException {
return new StringTag(in.readUTF());
}
- private static ByteArrayTag readByteArray(LittleEndianNBTInputStream in) throws IOException {
+ private static ByteArrayTag readByteArray(LittleEndianNbtInputStream in) throws IOException {
ByteArrayTag bat = new ByteArrayTag(new byte[in.readInt()]);
in.readFully(bat.getValue());
return bat;
}
- private static IntArrayTag readIntArray(LittleEndianNBTInputStream in) throws IOException {
+ private static IntArrayTag readIntArray(LittleEndianNbtInputStream in) throws IOException {
int l = in.readInt();
int[] data = new int[l];
IntArrayTag iat = new IntArrayTag(data);
@@ -123,7 +122,7 @@ private static IntArrayTag readIntArray(LittleEndianNBTInputStream in) throws IO
return iat;
}
- private static LongArrayTag readLongArray(LittleEndianNBTInputStream in) throws IOException {
+ private static LongArrayTag readLongArray(LittleEndianNbtInputStream in) throws IOException {
int l = in.readInt();
long[] data = new long[l];
LongArrayTag iat = new LongArrayTag(data);
@@ -133,7 +132,7 @@ private static LongArrayTag readLongArray(LittleEndianNBTInputStream in) throws
return iat;
}
- private static ListTag> readListTag(LittleEndianNBTInputStream in, int maxDepth) throws IOException {
+ private static ListTag> readListTag(LittleEndianNbtInputStream in, int maxDepth) throws IOException {
byte listType = in.readByte();
ListTag> list = ListTag.createUnchecked(idClassMapping.get(listType));
int length = in.readInt();
@@ -146,7 +145,7 @@ private static ListTag> readListTag(LittleEndianNBTInputStream in, int maxDept
return list;
}
- private static CompoundTag readCompound(LittleEndianNBTInputStream in, int maxDepth) throws IOException {
+ private static CompoundTag readCompound(LittleEndianNbtInputStream in, int maxDepth) throws IOException {
CompoundTag comp = new CompoundTag();
for (int id = in.readByte() & 0xFF; id != 0; id = in.readByte() & 0xFF) {
String key = in.readUTF();
diff --git a/src/main/java/net/querz/nbt/io/LittleEndianNBTOutputStream.java b/src/main/java/io/github/ensgijs/nbt/io/LittleEndianNbtOutputStream.java
similarity index 59%
rename from src/main/java/net/querz/nbt/io/LittleEndianNBTOutputStream.java
rename to src/main/java/io/github/ensgijs/nbt/io/LittleEndianNbtOutputStream.java
index 9cda01b2..3aa756b3 100644
--- a/src/main/java/net/querz/nbt/io/LittleEndianNBTOutputStream.java
+++ b/src/main/java/io/github/ensgijs/nbt/io/LittleEndianNbtOutputStream.java
@@ -1,21 +1,19 @@
-package net.querz.nbt.io;
-
-import net.querz.io.ExceptionTriConsumer;
-import net.querz.io.MaxDepthIO;
-import net.querz.nbt.tag.ByteArrayTag;
-import net.querz.nbt.tag.ByteTag;
-import net.querz.nbt.tag.CompoundTag;
-import net.querz.nbt.tag.DoubleTag;
-import net.querz.nbt.tag.EndTag;
-import net.querz.nbt.tag.FloatTag;
-import net.querz.nbt.tag.IntArrayTag;
-import net.querz.nbt.tag.IntTag;
-import net.querz.nbt.tag.ListTag;
-import net.querz.nbt.tag.LongArrayTag;
-import net.querz.nbt.tag.LongTag;
-import net.querz.nbt.tag.ShortTag;
-import net.querz.nbt.tag.StringTag;
-import net.querz.nbt.tag.Tag;
+package io.github.ensgijs.nbt.io;
+
+import io.github.ensgijs.nbt.tag.ByteArrayTag;
+import io.github.ensgijs.nbt.tag.ByteTag;
+import io.github.ensgijs.nbt.tag.CompoundTag;
+import io.github.ensgijs.nbt.tag.DoubleTag;
+import io.github.ensgijs.nbt.tag.EndTag;
+import io.github.ensgijs.nbt.tag.FloatTag;
+import io.github.ensgijs.nbt.tag.IntArrayTag;
+import io.github.ensgijs.nbt.tag.IntTag;
+import io.github.ensgijs.nbt.tag.ListTag;
+import io.github.ensgijs.nbt.tag.LongArrayTag;
+import io.github.ensgijs.nbt.tag.LongTag;
+import io.github.ensgijs.nbt.tag.ShortTag;
+import io.github.ensgijs.nbt.tag.StringTag;
+import io.github.ensgijs.nbt.tag.Tag;
import java.io.Closeable;
import java.io.DataOutput;
import java.io.DataOutputStream;
@@ -25,12 +23,15 @@
import java.util.HashMap;
import java.util.Map;
-public class LittleEndianNBTOutputStream implements DataOutput, NBTOutput, MaxDepthIO, Closeable {
+/** Use for Minecraft Bedrock edition data. */
+public class LittleEndianNbtOutputStream implements DataOutput, NbtOutput, MaxDepthIO, Closeable {
private final DataOutputStream output;
- private static Map, Integer, IOException>> writers = new HashMap<>();
- private static Map, Byte> classIdMapping = new HashMap<>();
+ private static final Map, Byte> CLASS_ID_MAPPINGS = new HashMap<>();
+ private static final Map, Integer, IOException>> DEFAULT_WRITERS = new HashMap<>();
+ private final Map, Integer, IOException>> writers;
+
static {
put(EndTag.ID, (o, t, d) -> {}, EndTag.class);
@@ -42,23 +43,23 @@ public class LittleEndianNBTOutputStream implements DataOutput, NBTOutput, MaxDe
put(DoubleTag.ID, (o, t, d) -> writeDouble(o, t), DoubleTag.class);
put(ByteArrayTag.ID, (o, t, d) -> writeByteArray(o, t), ByteArrayTag.class);
put(StringTag.ID, (o, t, d) -> writeString(o, t), StringTag.class);
- put(ListTag.ID, LittleEndianNBTOutputStream::writeList, ListTag.class);
- put(CompoundTag.ID, LittleEndianNBTOutputStream::writeCompound, CompoundTag.class);
+ put(ListTag.ID, LittleEndianNbtOutputStream::writeList, ListTag.class);
+ put(CompoundTag.ID, LittleEndianNbtOutputStream::writeCompound, CompoundTag.class);
put(IntArrayTag.ID, (o, t, d) -> writeIntArray(o, t), IntArrayTag.class);
put(LongArrayTag.ID, (o, t, d) -> writeLongArray(o, t), LongArrayTag.class);
}
- private static void put(byte id, ExceptionTriConsumer, Integer, IOException> f, Class> clazz) {
- writers.put(id, f);
- classIdMapping.put(clazz, id);
+ private static void put(byte id, ExceptionTriConsumer, Integer, IOException> f, Class> clazz) {
+ DEFAULT_WRITERS.put(id, f);
+ CLASS_ID_MAPPINGS.put(clazz, id);
}
- public LittleEndianNBTOutputStream(OutputStream out) {
+ public LittleEndianNbtOutputStream(OutputStream out, boolean sortCompoundTagEntries) {
output = new DataOutputStream(out);
- }
-
- public LittleEndianNBTOutputStream(DataOutputStream out) {
- output = out;
+ writers = new HashMap<>(DEFAULT_WRITERS);
+ if (sortCompoundTagEntries) {
+ writers.put(CompoundTag.ID, LittleEndianNbtOutputStream::writeCompoundSortedKeys);
+ }
}
public void writeTag(NamedTag tag, int maxDepth) throws IOException {
@@ -78,7 +79,7 @@ public void writeTag(Tag> tag, int maxDepth) throws IOException {
}
public void writeRawTag(Tag> tag, int maxDepth) throws IOException {
- ExceptionTriConsumer, Integer, IOException> f;
+ ExceptionTriConsumer, Integer, IOException> f;
if ((f = writers.get(tag.getID())) == null) {
throw new IOException("invalid tag \"" + tag.getID() + "\"");
}
@@ -86,61 +87,61 @@ public void writeRawTag(Tag> tag, int maxDepth) throws IOException {
}
static byte idFromClass(Class> clazz) {
- Byte id = classIdMapping.get(clazz);
+ Byte id = CLASS_ID_MAPPINGS.get(clazz);
if (id == null) {
throw new IllegalArgumentException("unknown Tag class " + clazz.getName());
}
return id;
}
- private static void writeByte(LittleEndianNBTOutputStream out, Tag> tag) throws IOException {
+ private static void writeByte(LittleEndianNbtOutputStream out, Tag> tag) throws IOException {
out.writeByte(((ByteTag) tag).asByte());
}
- private static void writeShort(LittleEndianNBTOutputStream out, Tag> tag) throws IOException {
+ private static void writeShort(LittleEndianNbtOutputStream out, Tag> tag) throws IOException {
out.writeShort(((ShortTag) tag).asShort());
}
- private static void writeInt(LittleEndianNBTOutputStream out, Tag> tag) throws IOException {
+ private static void writeInt(LittleEndianNbtOutputStream out, Tag> tag) throws IOException {
out.writeInt(((IntTag) tag).asInt());
}
- private static void writeLong(LittleEndianNBTOutputStream out, Tag> tag) throws IOException {
+ private static void writeLong(LittleEndianNbtOutputStream out, Tag> tag) throws IOException {
out.writeLong(((LongTag) tag).asLong());
}
- private static void writeFloat(LittleEndianNBTOutputStream out, Tag> tag) throws IOException {
+ private static void writeFloat(LittleEndianNbtOutputStream out, Tag> tag) throws IOException {
out.writeFloat(((FloatTag) tag).asFloat());
}
- private static void writeDouble(LittleEndianNBTOutputStream out, Tag> tag) throws IOException {
+ private static void writeDouble(LittleEndianNbtOutputStream out, Tag> tag) throws IOException {
out.writeDouble(((DoubleTag) tag).asDouble());
}
- private static void writeString(LittleEndianNBTOutputStream out, Tag> tag) throws IOException {
+ private static void writeString(LittleEndianNbtOutputStream out, Tag> tag) throws IOException {
out.writeUTF(((StringTag) tag).getValue());
}
- private static void writeByteArray(LittleEndianNBTOutputStream out, Tag> tag) throws IOException {
+ private static void writeByteArray(LittleEndianNbtOutputStream out, Tag> tag) throws IOException {
out.writeInt(((ByteArrayTag) tag).length());
out.write(((ByteArrayTag) tag).getValue());
}
- private static void writeIntArray(LittleEndianNBTOutputStream out, Tag> tag) throws IOException {
+ private static void writeIntArray(LittleEndianNbtOutputStream out, Tag> tag) throws IOException {
out.writeInt(((IntArrayTag) tag).length());
for (int i : ((IntArrayTag) tag).getValue()) {
out.writeInt(i);
}
}
- private static void writeLongArray(LittleEndianNBTOutputStream out, Tag> tag) throws IOException {
+ private static void writeLongArray(LittleEndianNbtOutputStream out, Tag> tag) throws IOException {
out.writeInt(((LongArrayTag) tag).length());
for (long l : ((LongArrayTag) tag).getValue()) {
out.writeLong(l);
}
}
- private static void writeList(LittleEndianNBTOutputStream out, Tag> tag, int maxDepth) throws IOException {
+ private static void writeList(LittleEndianNbtOutputStream out, Tag> tag, int maxDepth) throws IOException {
out.writeByte(idFromClass(((ListTag>) tag).getTypeClass()));
out.writeInt(((ListTag>) tag).size());
for (Tag> t : ((ListTag>) tag)) {
@@ -148,15 +149,37 @@ private static void writeList(LittleEndianNBTOutputStream out, Tag> tag, int m
}
}
- private static void writeCompound(LittleEndianNBTOutputStream out, Tag> tag, int maxDepth) throws IOException {
- for (Map.Entry> entry : (CompoundTag) tag) {
- if (entry.getValue().getID() == 0) {
+ private static void writeCompound(LittleEndianNbtOutputStream out, Tag> tag, int maxDepth) throws IOException {
+ for (NamedTag entry : (CompoundTag) tag) {
+ if (entry.getTag().getID() == 0) {
throw new IOException("end tag not allowed");
}
- out.writeByte(entry.getValue().getID());
+ out.writeByte(entry.getTag().getID());
+ out.writeUTF(entry.getName());
+ out.writeRawTag(entry.getTag(), out.decrementMaxDepth(maxDepth));
+ }
+ out.writeByte(0);
+ }
+
+ /**
+ * This is useful for creating repeatable binary nbt data objects when being able to directly compare the bnbt
+ * directly without having to parse and compare tag data itself.
+ */
+ private static void writeCompoundSortedKeys(LittleEndianNbtOutputStream out, Tag> tag, int maxDepth) throws IOException {
+ var iter = ((CompoundTag) tag).entrySet().stream()
+ .sorted(Map.Entry.comparingByKey())
+ .iterator();
+ while (iter.hasNext()) {
+ var entry = iter.next();
+ var entryTag = entry.getValue();
+ if (entryTag.getID() == 0) {
+ throw new IOException("end tag not allowed");
+ }
+ out.writeByte(entryTag.getID());
out.writeUTF(entry.getKey());
- out.writeRawTag(entry.getValue(), out.decrementMaxDepth(maxDepth));
+ out.writeRawTag(entryTag, out.decrementMaxDepth(maxDepth));
}
+
out.writeByte(0);
}
diff --git a/src/main/java/net/querz/io/MaxDepthIO.java b/src/main/java/io/github/ensgijs/nbt/io/MaxDepthIO.java
similarity index 90%
rename from src/main/java/net/querz/io/MaxDepthIO.java
rename to src/main/java/io/github/ensgijs/nbt/io/MaxDepthIO.java
index 0a5fc3e7..70cf438a 100644
--- a/src/main/java/net/querz/io/MaxDepthIO.java
+++ b/src/main/java/io/github/ensgijs/nbt/io/MaxDepthIO.java
@@ -1,4 +1,4 @@
-package net.querz.io;
+package io.github.ensgijs.nbt.io;
public interface MaxDepthIO {
diff --git a/src/main/java/net/querz/io/MaxDepthReachedException.java b/src/main/java/io/github/ensgijs/nbt/io/MaxDepthReachedException.java
similarity index 88%
rename from src/main/java/net/querz/io/MaxDepthReachedException.java
rename to src/main/java/io/github/ensgijs/nbt/io/MaxDepthReachedException.java
index eb903228..5a5d3bd6 100644
--- a/src/main/java/net/querz/io/MaxDepthReachedException.java
+++ b/src/main/java/io/github/ensgijs/nbt/io/MaxDepthReachedException.java
@@ -1,4 +1,4 @@
-package net.querz.io;
+package io.github.ensgijs.nbt.io;
/**
* Exception indicating that the maximum (de-)serialization depth has been reached.
diff --git a/src/main/java/io/github/ensgijs/nbt/io/NamedTag.java b/src/main/java/io/github/ensgijs/nbt/io/NamedTag.java
new file mode 100644
index 00000000..2062780e
--- /dev/null
+++ b/src/main/java/io/github/ensgijs/nbt/io/NamedTag.java
@@ -0,0 +1,131 @@
+package io.github.ensgijs.nbt.io;
+
+import io.github.ensgijs.nbt.tag.Tag;
+import io.github.ensgijs.nbt.util.ArgValidator;
+
+import java.util.Locale;
+import java.util.Map;
+import java.util.Objects;
+import java.util.function.Predicate;
+import java.util.regex.Pattern;
+
+public class NamedTag implements Cloneable, Comparable {
+ private static final Pattern TAG_NAME_NON_QUOTE_PATTERN = Pattern.compile("^[a-zA-Z0-9_+\\-]+$");
+ private static final Predicate IS_INTEGER_STRING = Pattern.compile("^(?:\\+|-)?\\d+$").asPredicate();
+
+ private String name;
+ private Tag> tag;
+
+ protected NamedTag() {}
+
+ /**
+ * Copy constructor. Performs a deep copy of the given other NamedTag (calls {@code other.getTag().clone()}).
+ * Other's name may be null but the tag may not be.
+ */
+ public NamedTag(NamedTag other) {
+ ArgValidator.requireValue(other, "other");
+ ArgValidator.requireValue(other.tag, "other.tag");
+ this.name = other.name;
+ this.tag = other.getTag().clone();
+ }
+
+ /**
+ * Creates a NamedTag that references the given tag.
+ * @param name nullable name
+ * @param tag non-null tag
+ */
+ public NamedTag(String name, Tag> tag) {
+ ArgValidator.requireValue(tag, "tag");
+ this.name = name;
+ this.tag = tag;
+ }
+
+ public NamedTag(Map.Entry> entry) {
+ this(entry.getKey(), Objects.requireNonNull(entry.getValue()));
+ }
+
+ /** nullable */
+ public void setName(String name) {
+ this.name = name;
+ }
+
+ /** must be non-null */
+ public void setTag(Tag> tag) {
+ ArgValidator.requireValue(tag, "tag");
+ this.tag = tag;
+ }
+
+ /** nullable */
+ public String getName() {
+ return name;
+ }
+
+ /** non-null */
+ public Tag> getTag() {
+ return tag;
+ }
+
+ /** non-null */
+ @SuppressWarnings("unchecked")
+ public > T getTagAutoCast() {
+ return (T) getTag();
+ }
+
+ /**
+ * Wraps the name in quotes if it contains anything other than ascii letters (a-z), numbers, underscore, or dash.
+ * If name is null, then null is returned.
+ */
+ public String getEscapedName() {
+ return escapeName(getName());
+ }
+
+ public static String escapeName(String name) {
+ if (name != null && !TAG_NAME_NON_QUOTE_PATTERN.matcher(name).matches()) {
+ StringBuilder sb = new StringBuilder();
+ sb.append('"');
+ for (int i = 0; i < name.length(); i++) {
+ char c = name.charAt(i);
+ if (c == '\\' || c == '"') {
+ sb.append('\\');
+ }
+ sb.append(c);
+ }
+ sb.append('"');
+ return sb.toString();
+ }
+ return name;
+ }
+
+
+ @Override
+ public boolean equals(Object o) {
+ if (!(o instanceof NamedTag)) return false;
+ NamedTag other = (NamedTag) o;
+ return Objects.equals(this.getName(), other.getName()) &&
+ Objects.equals(this.getTag(), other.getTag());
+ }
+
+ public static int compare(NamedTag o1, NamedTag o2) {
+ if (o1 == o2) return 0;
+ if (o1 == null) return -1;
+ if (o2 == null) return 1;
+ String n1Lower = o1.getName().toLowerCase(Locale.ENGLISH);
+ String n2Lower = o2.getName().toLowerCase(Locale.ENGLISH);
+ if (IS_INTEGER_STRING.test(n1Lower) && IS_INTEGER_STRING.test(n2Lower)) {
+ return Long.compare(Long.parseLong(n1Lower), Long.parseLong(n2Lower));
+ } else {
+ int result = n1Lower.compareTo(n2Lower);
+ return result != 0 ? result : o1.getName().compareTo(o2.getName());
+ }
+ }
+
+ @Override
+ public NamedTag clone() {
+ return new NamedTag(this);
+ }
+
+ @Override
+ public int compareTo(NamedTag o) {
+ return compare(this, o);
+ }
+}
diff --git a/src/main/java/io/github/ensgijs/nbt/io/NbtInput.java b/src/main/java/io/github/ensgijs/nbt/io/NbtInput.java
new file mode 100644
index 00000000..a3f3db72
--- /dev/null
+++ b/src/main/java/io/github/ensgijs/nbt/io/NbtInput.java
@@ -0,0 +1,13 @@
+package io.github.ensgijs.nbt.io;
+
+import io.github.ensgijs.nbt.tag.Tag;
+
+import java.io.IOException;
+
+/** If there is no content to parse (aka empty file) then null should be returned. */
+public interface NbtInput {
+
+ NamedTag readTag(int maxDepth) throws IOException;
+
+ Tag> readRawTag(int maxDepth) throws IOException;
+}
diff --git a/src/main/java/net/querz/nbt/io/NBTOutput.java b/src/main/java/io/github/ensgijs/nbt/io/NbtOutput.java
similarity index 65%
rename from src/main/java/net/querz/nbt/io/NBTOutput.java
rename to src/main/java/io/github/ensgijs/nbt/io/NbtOutput.java
index 39f6d688..4f30c32e 100644
--- a/src/main/java/net/querz/nbt/io/NBTOutput.java
+++ b/src/main/java/io/github/ensgijs/nbt/io/NbtOutput.java
@@ -1,9 +1,10 @@
-package net.querz.nbt.io;
+package io.github.ensgijs.nbt.io;
+
+import io.github.ensgijs.nbt.tag.Tag;
-import net.querz.nbt.tag.Tag;
import java.io.IOException;
-public interface NBTOutput {
+public interface NbtOutput {
void writeTag(NamedTag tag, int maxDepth) throws IOException;
diff --git a/src/main/java/net/querz/nbt/io/ParseException.java b/src/main/java/io/github/ensgijs/nbt/io/ParseException.java
similarity index 75%
rename from src/main/java/net/querz/nbt/io/ParseException.java
rename to src/main/java/io/github/ensgijs/nbt/io/ParseException.java
index c62e0610..d7370f73 100644
--- a/src/main/java/net/querz/nbt/io/ParseException.java
+++ b/src/main/java/io/github/ensgijs/nbt/io/ParseException.java
@@ -1,4 +1,4 @@
-package net.querz.nbt.io;
+package io.github.ensgijs.nbt.io;
import java.io.IOException;
@@ -22,4 +22,10 @@ private static String formatError(String value, int index) {
builder.append("<--[HERE]");
return builder.toString();
}
+
+ public static class SilentParseException extends RuntimeException {
+ public SilentParseException(ParseException cause) {
+ super(cause);
+ }
+ }
}
diff --git a/src/main/java/io/github/ensgijs/nbt/io/PositionTrackingInputStream.java b/src/main/java/io/github/ensgijs/nbt/io/PositionTrackingInputStream.java
new file mode 100644
index 00000000..21f42378
--- /dev/null
+++ b/src/main/java/io/github/ensgijs/nbt/io/PositionTrackingInputStream.java
@@ -0,0 +1,120 @@
+package io.github.ensgijs.nbt.io;
+
+import java.io.EOFException;
+import java.io.IOException;
+import java.io.InputStream;
+
+/**
+ * Wraps an InputStream to track the read position and to allow you to skip ahead to a known position.
+ * Note: position 0 isn't necessarily the first byte in the stream - it's the first byte will be read
+ * after this object is created.
+ */
+public class PositionTrackingInputStream extends InputStream {
+ private long pos = 0;
+ private long markedPos = -1;
+ private final InputStream stream;
+ private long softEof = 0;
+
+ public PositionTrackingInputStream(InputStream in) {
+ stream = in;
+ }
+
+ public long pos() {
+ return pos;
+ }
+
+ /**
+ * Soft EOF will prevent byte[] reads that cross this position. Such reads won't fail, they'll simply only be
+ * filled up to softEof - 1. Set to LE0 to disable completely (default).
+ */
+ public void setSoftEof(long softEof) {
+ this.softEof = softEof;
+ }
+
+ @Override
+ public int read() throws IOException {
+ int ret = stream.read();
+ if (ret >= 0) pos++;
+ return ret;
+ }
+
+ @Override
+ public int read(byte[] b) throws IOException {
+ int len = b.length;
+ if (pos < softEof && (pos + len) > softEof) {
+ len = (int) (softEof - pos);
+ }
+ int ret = stream.read(b, 0, len);
+ if (ret > 0) pos += ret;
+ return ret;
+ }
+
+ @Override
+ public int read(byte[] b, int off, int len) throws IOException {
+ if (pos < softEof && (pos + len) >= softEof) {
+ len = (int) (softEof - pos);
+ }
+ int ret = stream.read(b, off, len);
+ if (ret > 0) pos += ret;
+ return ret;
+ }
+
+ @Override
+ public long skip(long n) throws IOException {
+ long ret = stream.skip(n);
+ if (ret > 0) pos += ret;
+ return ret;
+ }
+
+ @Override
+ public int available() throws IOException {
+ return stream.available();
+ }
+
+ @Override
+ public void close() throws IOException {
+ stream.close();
+ }
+
+ /**
+ * Sets soft EOF automatically (pos() + readLimit).
+ * @see #setSoftEof(long)
+ */
+ @Override
+ public synchronized void mark(int readlimit) {
+ if (super.markSupported()) {
+ markedPos = pos;
+ }
+ stream.mark(readlimit);
+ setSoftEof(pos + readlimit);
+ }
+
+ @Override
+ public synchronized void reset() throws IOException {
+ stream.reset();
+ if (markedPos < 0) throw new IOException("mark position is unknown!");
+ pos = markedPos;
+ }
+
+ @Override
+ public boolean markSupported() {
+ return stream.markSupported();
+ }
+
+ public void skipTo(long pos) throws IOException {
+ if (pos < this.pos)
+ throw new IOException(
+ "cannot skip backwards from 0x" + Long.toString(this.pos, 16) + " (" + this.pos +
+ ") to 0x" + Long.toString(pos, 16)+ " (" + pos + ")");
+ final long originalPos = this.pos;
+ while (this.pos < pos) {
+ if (skip(pos - this.pos) <= 0) {
+ throw new EOFException(String.format(
+ "Asked to skip from %,d to %,d (%,d bytes) but only skipped %,d bytes; new pos = %,d; soft EOF = %,d",
+ originalPos, pos, pos - originalPos, this.pos - originalPos, this.pos, softEof));
+ }
+ }
+ if (pos != this.pos)
+ throw new IllegalStateException();
+ }
+}
diff --git a/src/main/java/net/querz/io/Serializer.java b/src/main/java/io/github/ensgijs/nbt/io/Serializer.java
similarity index 84%
rename from src/main/java/net/querz/io/Serializer.java
rename to src/main/java/io/github/ensgijs/nbt/io/Serializer.java
index a6c9377a..7aa9cb0d 100644
--- a/src/main/java/net/querz/io/Serializer.java
+++ b/src/main/java/io/github/ensgijs/nbt/io/Serializer.java
@@ -1,4 +1,4 @@
-package net.querz.io;
+package io.github.ensgijs.nbt.io;
import java.io.BufferedOutputStream;
import java.io.ByteArrayOutputStream;
@@ -23,4 +23,7 @@ default byte[] toBytes(T object) throws IOException {
bos.close();
return bos.toByteArray();
}
+
+ boolean getSortCompoundTagEntries();
+ void setSortCompoundTagEntries(boolean sorted);
}
diff --git a/src/main/java/io/github/ensgijs/nbt/io/SilentIOException.java b/src/main/java/io/github/ensgijs/nbt/io/SilentIOException.java
new file mode 100644
index 00000000..17a6bc8d
--- /dev/null
+++ b/src/main/java/io/github/ensgijs/nbt/io/SilentIOException.java
@@ -0,0 +1,25 @@
+package io.github.ensgijs.nbt.io;
+
+import java.io.IOException;
+
+/**
+ * Used to wrap/throw IOExceptions in contests where checked exceptions cannot be used,
+ * such as when implementing Iterator.next()
+ */
+public class SilentIOException extends RuntimeException {
+ public SilentIOException() {
+ super();
+ }
+
+ public SilentIOException(String message) {
+ super(message);
+ }
+
+ public SilentIOException(String message, IOException cause) {
+ super(message, cause);
+ }
+
+ public SilentIOException(IOException cause) {
+ super(cause);
+ }
+}
diff --git a/src/main/java/net/querz/nbt/io/StringPointer.java b/src/main/java/io/github/ensgijs/nbt/io/StringPointer.java
similarity index 88%
rename from src/main/java/net/querz/nbt/io/StringPointer.java
rename to src/main/java/io/github/ensgijs/nbt/io/StringPointer.java
index fe37b20d..4a351f1b 100644
--- a/src/main/java/net/querz/nbt/io/StringPointer.java
+++ b/src/main/java/io/github/ensgijs/nbt/io/StringPointer.java
@@ -1,4 +1,4 @@
-package net.querz.nbt.io;
+package io.github.ensgijs.nbt.io;
public class StringPointer {
@@ -7,6 +7,20 @@ public class StringPointer {
public StringPointer(String value) {
this.value = value;
+ skipUtf8Bom();
+ }
+
+ public void reset() {
+ index = 0;
+ skipUtf8Bom();
+ }
+
+ /** Skips the UTF8 BOM (byte order mark) if the current index is 0, else does nothing.*/
+ private void skipUtf8Bom() {
+ if (index != 0) return;
+ if (hasNext() && next() != '\uFEFF') {
+ index = 0;
+ }
}
public int getIndex() {
diff --git a/src/main/java/io/github/ensgijs/nbt/io/TextNbtDeserializer.java b/src/main/java/io/github/ensgijs/nbt/io/TextNbtDeserializer.java
new file mode 100644
index 00000000..9e9d2c49
--- /dev/null
+++ b/src/main/java/io/github/ensgijs/nbt/io/TextNbtDeserializer.java
@@ -0,0 +1,47 @@
+package io.github.ensgijs.nbt.io;
+
+import io.github.ensgijs.nbt.tag.Tag;
+
+import java.io.*;
+import java.nio.charset.StandardCharsets;
+import java.util.stream.Collectors;
+
+public class TextNbtDeserializer implements Deserializer {
+
+ public NamedTag fromReader(Reader reader, int maxDepth) throws IOException {
+ BufferedReader bufferedReader;
+ if (reader instanceof BufferedReader) {
+ bufferedReader = (BufferedReader) reader;
+ } else {
+ bufferedReader = new BufferedReader(reader);
+ }
+ return new TextNbtParser(bufferedReader.lines().collect(Collectors.joining())).readTag(maxDepth);
+ }
+
+ public NamedTag fromReader(Reader reader) throws IOException {
+ return fromReader(reader, Tag.DEFAULT_MAX_DEPTH);
+ }
+
+ public NamedTag fromString(String s) throws IOException {
+ return fromReader(new StringReader(s));
+ }
+
+ @Override
+ public NamedTag fromStream(InputStream stream) throws IOException {
+ try (Reader reader = new InputStreamReader(stream, StandardCharsets.UTF_8)) {
+ return fromReader(reader);
+ }
+ }
+
+ @Override
+ public NamedTag fromFile(File file) throws IOException {
+ try (Reader reader = new FileReader(file)) {
+ return fromReader(reader);
+ }
+ }
+
+ @Override
+ public NamedTag fromBytes(byte[] data) throws IOException {
+ return fromReader(new StringReader(new String(data)));
+ }
+}
diff --git a/src/main/java/io/github/ensgijs/nbt/io/TextNbtHelpers.java b/src/main/java/io/github/ensgijs/nbt/io/TextNbtHelpers.java
new file mode 100644
index 00000000..011a4643
--- /dev/null
+++ b/src/main/java/io/github/ensgijs/nbt/io/TextNbtHelpers.java
@@ -0,0 +1,374 @@
+package io.github.ensgijs.nbt.io;
+
+import io.github.ensgijs.nbt.tag.Tag;
+import io.github.ensgijs.nbt.util.ArgValidator;
+import io.github.ensgijs.nbt.util.JsonPrettyPrinter;
+
+import java.io.*;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.Locale;
+import java.util.zip.GZIPInputStream;
+import java.util.zip.GZIPOutputStream;
+
+/**
+ * Utilities for converting {@link Tag}'s and {@link NamedTag}'s to and from string NBT text data and files.
+ * NOTE: {@link #readTextNbtFile(File)}, and its variants, can read both uncompressed (plain text) and GZ
+ * compressed files (usually ending in .gz file extension - but the extension itself is not evaluated, instead
+ * the gzip magic number/bom is looked for).
+ * NOTE: {@link #writeTextNbtFile(File, Tag)}, and its variants, can write both uncompressed (plain text) and GZ
+ * compressed files. If the given file name ends in the '.gz' extension it will be written as a compressed file,
+ * otherwise it will be written as plain text.
+ */
+public final class TextNbtHelpers {
+ private TextNbtHelpers() {}
+
+ //
+ public static String toTextNbt(NamedTag namedTag, boolean prettyPrint, boolean sortCompoundTagEntries) {
+ String snbt = new TextNbtSerializer(sortCompoundTagEntries).toString(namedTag);
+ return !prettyPrint ? snbt : JsonPrettyPrinter.prettyPrintJson(snbt);
+ }
+
+ /**
+ * defaults to sortCompoundTagEntries=true
+ * @see JsonPrettyPrinter#DEFAULT_SINGLE_LINE_MAX_LENGTH
+ */
+ public static String toTextNbt(NamedTag namedTag, boolean prettyPrint) {
+ return toTextNbt(namedTag, prettyPrint, true);
+ }
+
+ /**
+ * defaults to sortCompoundTagEntries=false
+ * @see JsonPrettyPrinter#DEFAULT_SINGLE_LINE_MAX_LENGTH
+ */
+ public static String toTextNbtUnsorted(NamedTag namedTag, boolean prettyPrint) {
+ return toTextNbt(namedTag, prettyPrint, false);
+ }
+
+ /**
+ * defaults to prettyPrint=true, sortCompoundTagEntries=true
+ * @see JsonPrettyPrinter#DEFAULT_SINGLE_LINE_MAX_LENGTH
+ */
+ public static String toTextNbt(NamedTag namedTag) {
+ return toTextNbt(namedTag, true, true);
+ }
+
+ /**
+ * defaults to prettyPrint=true, sortCompoundTagEntries=false
+ * @see JsonPrettyPrinter#DEFAULT_SINGLE_LINE_MAX_LENGTH
+ */
+ public static String toTextNbtUnsorted(NamedTag namedTag) {
+ return toTextNbt(namedTag, true, false);
+ }
+
+ public static String toTextNbt(Tag> tag, boolean prettyPrint, boolean sortCompoundTagEntries) {
+ return toTextNbt(new NamedTag(null, tag), prettyPrint, sortCompoundTagEntries);
+ }
+
+ /**
+ * defaults to sortCompoundTagEntries=true
+ * @see JsonPrettyPrinter#DEFAULT_SINGLE_LINE_MAX_LENGTH
+ */
+ public static String toTextNbt(Tag> tag, boolean prettyPrint) {
+ return toTextNbt(new NamedTag(null, tag), prettyPrint, true);
+ }
+
+ /**
+ * defaults to sortCompoundTagEntries=false
+ * @see JsonPrettyPrinter#DEFAULT_SINGLE_LINE_MAX_LENGTH
+ */
+ public static String toTextNbtUnsorted(Tag> tag, boolean prettyPrint) {
+ return toTextNbt(new NamedTag(null, tag), prettyPrint, false);
+ }
+
+ /**
+ * defaults to prettyPrint=true, sortCompoundTagEntries=true
+ * @see JsonPrettyPrinter#DEFAULT_SINGLE_LINE_MAX_LENGTH
+ */
+ public static String toTextNbt(Tag> tag) {
+ return toTextNbt(new NamedTag(null, tag), true, true);
+ }
+
+ /**
+ * defaults to prettyPrint=true, sortCompoundTagEntries=false
+ * @see JsonPrettyPrinter#DEFAULT_SINGLE_LINE_MAX_LENGTH
+ */
+ public static String toTextNbtUnsorted(Tag> tag) {
+ return toTextNbt(new NamedTag(null, tag), true, false);
+ }
+
+ public static NamedTag fromTextNbt(String string) throws IOException {
+ return new TextNbtDeserializer().fromString(string);
+ }
+ //
+
+
+ private static Path writeTextNbtFile0(Path filePath, Object tag, boolean prettyPrint, boolean sortCompoundTagEntries) throws IOException {
+ if (!filePath.getParent().toFile().exists()) {
+ ArgValidator.check(filePath.getParent().toFile().mkdirs(),
+ "Failed to create parent directory for " + filePath.toAbsolutePath());
+ }
+ byte[] data;
+ if (tag instanceof NamedTag) {
+ data = toTextNbt((NamedTag) tag, prettyPrint, sortCompoundTagEntries).getBytes(StandardCharsets.UTF_8);
+ } else {
+ data = toTextNbt((Tag>) tag, prettyPrint, sortCompoundTagEntries).getBytes(StandardCharsets.UTF_8);
+ }
+ if (!filePath.getFileName().toString().toLowerCase(Locale.ENGLISH).endsWith(".gz")) {
+ Files.write(filePath, data);
+ } else {
+ try (GZIPOutputStream gzOut = new GZIPOutputStream(new FileOutputStream(filePath.toFile()))) {
+ gzOut.write(data);
+ }
+ }
+ return filePath;
+ }
+
+ //
+ public static Path writeTextNbtFile(Path filePath, Tag> tag, boolean prettyPrint, boolean sortCompoundTagEntries) throws IOException {
+ return writeTextNbtFile0(filePath, tag, prettyPrint, sortCompoundTagEntries);
+ }
+
+ /**
+ * defaults to sortCompoundTagEntries=true
+ * @see JsonPrettyPrinter#DEFAULT_SINGLE_LINE_MAX_LENGTH
+ */
+ public static Path writeTextNbtFile(Path filePath, Tag> tag, boolean prettyPrint) throws IOException {
+ return writeTextNbtFile(filePath, tag, prettyPrint, true);
+ }
+
+ /**
+ * defaults to sortCompoundTagEntries=true
+ * @see JsonPrettyPrinter#DEFAULT_SINGLE_LINE_MAX_LENGTH
+ */
+ public static Path writeTextNbtFile(File file, Tag> tag, boolean prettyPrint) throws IOException {
+ return writeTextNbtFile(file.toPath(), tag, prettyPrint, true);
+ }
+
+ /**
+ * defaults to sortCompoundTagEntries=true
+ * @see JsonPrettyPrinter#DEFAULT_SINGLE_LINE_MAX_LENGTH
+ */
+ public static Path writeTextNbtFile(String file, Tag> tag, boolean prettyPrint) throws IOException {
+ return writeTextNbtFile(Paths.get(file), tag, prettyPrint, true);
+ }
+
+
+ /**
+ * defaults to prettyPrint=true, sortCompoundTagEntries=true
+ * @see JsonPrettyPrinter#DEFAULT_SINGLE_LINE_MAX_LENGTH
+ */
+ public static Path writeTextNbtFile(Path filePath, Tag> tag) throws IOException {
+ return writeTextNbtFile(filePath, tag, true, true);
+ }
+
+ /**
+ * defaults to prettyPrint=true, sortCompoundTagEntries=true
+ * @see JsonPrettyPrinter#DEFAULT_SINGLE_LINE_MAX_LENGTH
+ */
+ public static Path writeTextNbtFile(File file, Tag> tag) throws IOException {
+ return writeTextNbtFile(file.toPath(), tag, true, true);
+ }
+
+ /**
+ * defaults to prettyPrint=true, sortCompoundTagEntries=true
+ * @see JsonPrettyPrinter#DEFAULT_SINGLE_LINE_MAX_LENGTH
+ */
+ public static Path writeTextNbtFile(String file, Tag> tag) throws IOException {
+ return writeTextNbtFile(Paths.get(file), tag, true, true);
+ }
+
+
+ /**
+ * defaults to sortCompoundTagEntries=false
+ * @see JsonPrettyPrinter#DEFAULT_SINGLE_LINE_MAX_LENGTH
+ */
+ public static Path writeTextNbtFileUnsorted(Path filePath, Tag> tag, boolean prettyPrint) throws IOException {
+ return writeTextNbtFile(filePath, tag, prettyPrint, false);
+ }
+
+ /**
+ * defaults to sortCompoundTagEntries=false
+ * @see JsonPrettyPrinter#DEFAULT_SINGLE_LINE_MAX_LENGTH
+ */
+ public static Path writeTextNbtFileUnsorted(File file, Tag> tag, boolean prettyPrint) throws IOException {
+ return writeTextNbtFile(file.toPath(), tag, prettyPrint, false);
+ }
+
+ /**
+ * defaults to sortCompoundTagEntries=false
+ * @see JsonPrettyPrinter#DEFAULT_SINGLE_LINE_MAX_LENGTH
+ */
+ public static Path writeTextNbtFileUnsorted(String file, Tag> tag, boolean prettyPrint) throws IOException {
+ return writeTextNbtFile(Paths.get(file), tag, prettyPrint, false);
+ }
+
+
+ /**
+ * defaults to prettyPrint=true, sortCompoundTagEntries=false
+ * @see JsonPrettyPrinter#DEFAULT_SINGLE_LINE_MAX_LENGTH
+ */
+ public static Path writeTextNbtFileUnsorted(Path filePath, Tag> tag) throws IOException {
+ return writeTextNbtFile(filePath, tag, true, false);
+ }
+
+ /**
+ * defaults to prettyPrint=true, sortCompoundTagEntries=false
+ * @see JsonPrettyPrinter#DEFAULT_SINGLE_LINE_MAX_LENGTH
+ */
+ public static Path writeTextNbtFileUnsorted(File file, Tag> tag) throws IOException {
+ return writeTextNbtFile(file.toPath(), tag, true, false);
+ }
+
+ /**
+ * defaults to prettyPrint=true, sortCompoundTagEntries=false
+ * @see JsonPrettyPrinter#DEFAULT_SINGLE_LINE_MAX_LENGTH
+ */
+ public static Path writeTextNbtFileUnsorted(String file, Tag> tag) throws IOException {
+ return writeTextNbtFile(Paths.get(file), tag, true, false);
+ }
+ //
+
+
+ //
+ public static Path writeTextNbtFile(Path filePath, NamedTag tag, boolean prettyPrint, boolean sortCompoundTagEntries) throws IOException {
+ return writeTextNbtFile0(filePath, tag, prettyPrint, sortCompoundTagEntries);
+ }
+
+ /**
+ * defaults to sortCompoundTagEntries=true
+ * @see JsonPrettyPrinter#DEFAULT_SINGLE_LINE_MAX_LENGTH
+ */
+ public static Path writeTextNbtFile(Path filePath, NamedTag tag, boolean prettyPrint) throws IOException {
+ return writeTextNbtFile(filePath, tag, prettyPrint, true);
+ }
+
+ /**
+ * defaults to sortCompoundTagEntries=true
+ * @see JsonPrettyPrinter#DEFAULT_SINGLE_LINE_MAX_LENGTH
+ */
+ public static Path writeTextNbtFile(File file, NamedTag tag, boolean prettyPrint) throws IOException {
+ return writeTextNbtFile(file.toPath(), tag, prettyPrint, true);
+ }
+
+ /**
+ * defaults to sortCompoundTagEntries=true
+ * @see JsonPrettyPrinter#DEFAULT_SINGLE_LINE_MAX_LENGTH
+ */
+ public static Path writeTextNbtFile(String file, NamedTag tag, boolean prettyPrint) throws IOException {
+ return writeTextNbtFile(Paths.get(file), tag, prettyPrint, true);
+ }
+
+
+ /**
+ * defaults to prettyPrint=true, sortCompoundTagEntries=true
+ * @see JsonPrettyPrinter#DEFAULT_SINGLE_LINE_MAX_LENGTH
+ */
+ public static Path writeTextNbtFile(Path filePath, NamedTag tag) throws IOException {
+ return writeTextNbtFile(filePath, tag, true, true);
+ }
+
+ /**
+ * defaults to prettyPrint=true, sortCompoundTagEntries=true
+ * @see JsonPrettyPrinter#DEFAULT_SINGLE_LINE_MAX_LENGTH
+ */
+ public static Path writeTextNbtFile(File file, NamedTag tag) throws IOException {
+ return writeTextNbtFile(file.toPath(), tag, true, true);
+ }
+
+ /**
+ * defaults to prettyPrint=true, sortCompoundTagEntries=true
+ * @see JsonPrettyPrinter#DEFAULT_SINGLE_LINE_MAX_LENGTH
+ */
+ public static Path writeTextNbtFile(String file, NamedTag tag) throws IOException {
+ return writeTextNbtFile(Paths.get(file), tag, true, true);
+ }
+
+
+ /**
+ * defaults to sortCompoundTagEntries=false
+ * @see JsonPrettyPrinter#DEFAULT_SINGLE_LINE_MAX_LENGTH
+ */
+ public static Path writeTextNbtFileUnsorted(Path filePath, NamedTag tag, boolean prettyPrint) throws IOException {
+ return writeTextNbtFile(filePath, tag, prettyPrint, false);
+ }
+
+ /**
+ * defaults to sortCompoundTagEntries=false
+ * @see JsonPrettyPrinter#DEFAULT_SINGLE_LINE_MAX_LENGTH
+ */
+ public static Path writeTextNbtFileUnsorted(File file, NamedTag tag, boolean prettyPrint) throws IOException {
+ return writeTextNbtFile(file.toPath(), tag, prettyPrint, false);
+ }
+
+ /**
+ * defaults to sortCompoundTagEntries=false
+ * @see JsonPrettyPrinter#DEFAULT_SINGLE_LINE_MAX_LENGTH
+ */
+ public static Path writeTextNbtFileUnsorted(String file, NamedTag tag, boolean prettyPrint) throws IOException {
+ return writeTextNbtFile(Paths.get(file), tag, prettyPrint, false);
+ }
+
+
+ /**
+ * defaults to prettyPrint=true, sortCompoundTagEntries=false
+ * @see JsonPrettyPrinter#DEFAULT_SINGLE_LINE_MAX_LENGTH
+ */
+ public static Path writeTextNbtFileUnsorted(Path filePath, NamedTag tag) throws IOException {
+ return writeTextNbtFile(filePath, tag, true, false);
+ }
+
+ /**
+ * defaults to prettyPrint=true, sortCompoundTagEntries=false
+ * @see JsonPrettyPrinter#DEFAULT_SINGLE_LINE_MAX_LENGTH
+ */
+ public static Path writeTextNbtFileUnsorted(File file, NamedTag tag) throws IOException {
+ return writeTextNbtFile(file.toPath(), tag, true, false);
+ }
+
+ /**
+ * defaults to prettyPrint=true, sortCompoundTagEntries=false
+ * @see JsonPrettyPrinter#DEFAULT_SINGLE_LINE_MAX_LENGTH
+ */
+ public static Path writeTextNbtFileUnsorted(String file, NamedTag tag) throws IOException {
+ return writeTextNbtFile(Paths.get(file), tag, true, false);
+ }
+ //
+
+ //
+ public static NamedTag readTextNbt(InputStream is) throws IOException {
+ try (DataInputStream dis = new DataInputStream(detectDecompression(is))) {
+ return new TextNbtDeserializer().fromStream(dis);
+ }
+ }
+ /** The given file can be either plain text (uncompressed) or gz compressed. */
+ public static NamedTag readTextNbtFile(File file) throws IOException {
+ return readTextNbt(new FileInputStream(file));
+ }
+
+ /** The given file can be either plain text (uncompressed) or gz compressed. */
+ public static NamedTag readTextNbtFile(String file) throws IOException {
+ return readTextNbtFile(new File(file));
+ }
+
+ /** The given file can be either plain text (uncompressed) or gz compressed. */
+ public static NamedTag readTextNbtFile(Path path) throws IOException {
+ return readTextNbtFile(path.toFile());
+ }
+ //
+
+ static InputStream detectDecompression(InputStream is) throws IOException {
+ PushbackInputStream pbis = new PushbackInputStream(is, 2);
+ int b0 = pbis.read();
+ int b1 = pbis.read();
+ int signature = (b0 & 0xFF) | (b1 << 8);
+ if (b1 >= 0) pbis.unread(b1);
+ if (b0 >= 0) pbis.unread(b0);
+ if (signature == GZIPInputStream.GZIP_MAGIC) {
+ return new GZIPInputStream(pbis);
+ }
+ return pbis;
+ }
+}
diff --git a/src/main/java/net/querz/nbt/io/SNBTParser.java b/src/main/java/io/github/ensgijs/nbt/io/TextNbtParser.java
similarity index 71%
rename from src/main/java/net/querz/nbt/io/SNBTParser.java
rename to src/main/java/io/github/ensgijs/nbt/io/TextNbtParser.java
index 7d8d2dae..54ebee4f 100644
--- a/src/main/java/net/querz/nbt/io/SNBTParser.java
+++ b/src/main/java/io/github/ensgijs/nbt/io/TextNbtParser.java
@@ -1,26 +1,27 @@
-package net.querz.nbt.io;
-
-import net.querz.io.MaxDepthIO;
-import net.querz.nbt.tag.ArrayTag;
-import net.querz.nbt.tag.ByteArrayTag;
-import net.querz.nbt.tag.ByteTag;
-import net.querz.nbt.tag.CompoundTag;
-import net.querz.nbt.tag.DoubleTag;
-import net.querz.nbt.tag.EndTag;
-import net.querz.nbt.tag.FloatTag;
-import net.querz.nbt.tag.IntArrayTag;
-import net.querz.nbt.tag.IntTag;
-import net.querz.nbt.tag.ListTag;
-import net.querz.nbt.tag.LongArrayTag;
-import net.querz.nbt.tag.LongTag;
-import net.querz.nbt.tag.ShortTag;
-import net.querz.nbt.tag.StringTag;
-import net.querz.nbt.tag.Tag;
+package io.github.ensgijs.nbt.io;
+
+import io.github.ensgijs.nbt.tag.ArrayTag;
+import io.github.ensgijs.nbt.tag.ByteArrayTag;
+import io.github.ensgijs.nbt.tag.ByteTag;
+import io.github.ensgijs.nbt.tag.CompoundTag;
+import io.github.ensgijs.nbt.tag.DoubleTag;
+import io.github.ensgijs.nbt.tag.EndTag;
+import io.github.ensgijs.nbt.tag.FloatTag;
+import io.github.ensgijs.nbt.tag.IntArrayTag;
+import io.github.ensgijs.nbt.tag.IntTag;
+import io.github.ensgijs.nbt.tag.ListTag;
+import io.github.ensgijs.nbt.tag.LongArrayTag;
+import io.github.ensgijs.nbt.tag.LongTag;
+import io.github.ensgijs.nbt.tag.ShortTag;
+import io.github.ensgijs.nbt.tag.StringTag;
+import io.github.ensgijs.nbt.tag.Tag;
+
+import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.regex.Pattern;
-public final class SNBTParser implements MaxDepthIO {
+public final class TextNbtParser implements MaxDepthIO, NbtInput {
private static final Pattern
FLOAT_LITERAL_PATTERN = Pattern.compile("^[-+]?(?:\\d+\\.?|\\d*\\.\\d+)(?:e[-+]?\\d+)?f$", Pattern.CASE_INSENSITIVE),
@@ -34,16 +35,51 @@ public final class SNBTParser implements MaxDepthIO {
private StringPointer ptr;
- public SNBTParser(String string) {
+ public TextNbtParser(String string) {
this.ptr = new StringPointer(string);
}
+ @Override
+ public NamedTag readTag(int maxDepth) throws IOException {
+ ptr.reset();
+ ptr.skipWhitespace();
+ if (!ptr.hasNext()) return null;
+ String name = ptr.currentChar() == '"' ? ptr.parseQuotedString() : ptr.parseSimpleString();
+ // note to future self: if you're ever compelled to set NamedTag's name to null if it's empty
+ // consider changing TextNbtWriter#writeAnything(NamedTag, int)'s behavior to match
+ ptr.skipWhitespace();
+ if (ptr.hasNext() && ptr.next() ==':') {
+ ptr.skipWhitespace();
+ if (!ptr.hasNext()) {
+ throw ptr.parseException("unexpected end of input - no value after name:");
+ }
+ return new NamedTag(name, parseAnything(maxDepth));
+ }
+ return new NamedTag(null, readRawTag(maxDepth));
+ }
+
+ @Override
+ public Tag> readRawTag(int maxDepth) throws IOException {
+ ptr.reset();
+ ptr.skipWhitespace();
+ if (!ptr.hasNext()) return null;
+ return parseAnything(maxDepth);
+ }
+
+ /**
+ *
+ * @param maxDepth
+ * @param lenient allows trailing content to follow the text nbt data - this could be useful if multiple
+ * text nbt's are present without a ListTag being used.
+ * @return
+ * @throws ParseException
+ */
public Tag> parse(int maxDepth, boolean lenient) throws ParseException {
Tag> tag = parseAnything(maxDepth);
if (!lenient) {
ptr.skipWhitespace();
if (ptr.hasNext()) {
- throw ptr.parseException("invalid characters after end of snbt");
+ throw ptr.parseException("invalid characters after end of text nbt");
}
}
return tag;
@@ -57,6 +93,21 @@ public Tag> parse() throws ParseException {
return parse(Tag.DEFAULT_MAX_DEPTH, false);
}
+ /**
+ * Useful for parsing a text nbt tag used in code - generally {@link #parse()}, or one of its overloads,
+ * should be used for all other situations.
+ * Traps and rethrows any checked {@link ParseException}'s as a runtime
+ * {@link ParseException.SilentParseException}.
+ */
+ @SuppressWarnings("unchecked")
+ public static > T parseInline(String nbtText) throws ParseException.SilentParseException {
+ try {
+ return (T) new TextNbtParser(nbtText).parse();
+ } catch (ParseException ex) {
+ throw new ParseException.SilentParseException(ex);
+ }
+ }
+
public int getReadChars() {
return ptr.getIndex() + 1;
}
@@ -82,7 +133,7 @@ private Tag> parseStringOrLiteral() throws ParseException {
}
String s = ptr.parseSimpleString();
if (s.isEmpty()) {
- throw new ParseException("expected non empty value");
+ throw ptr.parseException("expected non empty value");
}
if (FLOAT_LITERAL_PATTERN.matcher(s).matches()) {
return new FloatTag(Float.parseFloat(s.substring(0, s.length() - 1)));
@@ -132,7 +183,7 @@ private CompoundTag parseCompoundTag(int maxDepth) throws ParseException {
ptr.skipWhitespace();
String key = ptr.currentChar() == '"' ? ptr.parseQuotedString() : ptr.parseSimpleString();
if (key.isEmpty()) {
- throw new ParseException("empty keys are not allowed");
+ throw ptr.parseException("empty keys are not allowed");
}
ptr.expectChar(':');
@@ -178,7 +229,7 @@ private ArrayTag> parseNumArray() throws ParseException {
case 'L':
return parseLongArrayTag();
}
- throw new ParseException("invalid array type '" + arrayType + "'");
+ throw ptr.parseException("invalid array type '" + arrayType + "'");
}
private ByteArrayTag parseByteArrayTag() throws ParseException {
diff --git a/src/main/java/io/github/ensgijs/nbt/io/TextNbtSerializer.java b/src/main/java/io/github/ensgijs/nbt/io/TextNbtSerializer.java
new file mode 100644
index 00000000..3f972cd1
--- /dev/null
+++ b/src/main/java/io/github/ensgijs/nbt/io/TextNbtSerializer.java
@@ -0,0 +1,61 @@
+package io.github.ensgijs.nbt.io;
+
+import io.github.ensgijs.nbt.tag.Tag;
+
+import java.io.*;
+
+public class TextNbtSerializer implements Serializer {
+ private boolean sortCompoundTagEntries;
+
+ public TextNbtSerializer(boolean sortCompoundTagEntries) {
+ this.sortCompoundTagEntries = sortCompoundTagEntries;
+ }
+
+ public void toWriter(NamedTag tag, Writer writer) throws IOException {
+ TextNbtWriter.write(tag, writer, sortCompoundTagEntries, Tag.DEFAULT_MAX_DEPTH);
+ }
+
+ public void toWriter(NamedTag tag, Writer writer, int maxDepth) throws IOException {
+ TextNbtWriter.write(tag, writer, sortCompoundTagEntries, maxDepth);
+ }
+
+ public String toString(NamedTag object) {
+ return toString(object, Tag.DEFAULT_MAX_DEPTH);
+ }
+
+ public String toString(NamedTag object, int maxDepth) {
+ Writer writer = new StringWriter();
+ try {
+ toWriter(object, writer, maxDepth);
+ writer.flush();
+ } catch (IOException ex) {
+ // this case should (probably) never happen so just wrap and toss if it ever does
+ throw new RuntimeException(ex);
+ }
+ return writer.toString();
+ }
+
+ @Override
+ public void toStream(NamedTag object, OutputStream stream) throws IOException {
+ Writer writer = new OutputStreamWriter(stream);
+ toWriter(object, writer);
+ writer.flush();
+ }
+
+ @Override
+ public void toFile(NamedTag object, File file) throws IOException {
+ try (Writer writer = new FileWriter(file)) {
+ toWriter(object, writer);
+ }
+ }
+
+ @Override
+ public boolean getSortCompoundTagEntries() {
+ return sortCompoundTagEntries;
+ }
+
+ @Override
+ public void setSortCompoundTagEntries(boolean sorted) {
+ sortCompoundTagEntries = sorted;
+ }
+}
diff --git a/src/main/java/io/github/ensgijs/nbt/io/TextNbtWriter.java b/src/main/java/io/github/ensgijs/nbt/io/TextNbtWriter.java
new file mode 100644
index 00000000..85ebe6c9
--- /dev/null
+++ b/src/main/java/io/github/ensgijs/nbt/io/TextNbtWriter.java
@@ -0,0 +1,133 @@
+package io.github.ensgijs.nbt.io;
+
+import io.github.ensgijs.nbt.tag.ByteArrayTag;
+import io.github.ensgijs.nbt.tag.ByteTag;
+import io.github.ensgijs.nbt.tag.CompoundTag;
+import io.github.ensgijs.nbt.tag.DoubleTag;
+import io.github.ensgijs.nbt.tag.EndTag;
+import io.github.ensgijs.nbt.tag.FloatTag;
+import io.github.ensgijs.nbt.tag.IntArrayTag;
+import io.github.ensgijs.nbt.tag.IntTag;
+import io.github.ensgijs.nbt.tag.ListTag;
+import io.github.ensgijs.nbt.tag.LongArrayTag;
+import io.github.ensgijs.nbt.tag.LongTag;
+import io.github.ensgijs.nbt.tag.ShortTag;
+import io.github.ensgijs.nbt.tag.StringTag;
+import io.github.ensgijs.nbt.tag.Tag;
+import java.io.IOException;
+import java.io.Writer;
+import java.lang.reflect.Array;
+import java.util.Iterator;
+
+/**
+ * TextNbtWriter creates a text NBT String.
+ */
+public final class TextNbtWriter implements MaxDepthIO {
+
+ private Writer writer;
+
+ private TextNbtWriter(Writer writer) {
+ this.writer = writer;
+ }
+
+ public static void write(NamedTag tag, Writer writer, boolean sortCompoundTagEntries, int maxDepth) throws IOException {
+ new TextNbtWriter(writer).writeAnything(tag, sortCompoundTagEntries, maxDepth);
+ }
+
+ public static void write(NamedTag tag, Writer writer, int maxDepth) throws IOException {
+ new TextNbtWriter(writer).writeAnything(tag, false, maxDepth);
+ }
+
+ public static void write(NamedTag tag, Writer writer) throws IOException {
+ write(tag, writer, Tag.DEFAULT_MAX_DEPTH);
+ }
+
+ public static void write(Tag> tag, Writer writer, int maxDepth) throws IOException {
+ new TextNbtWriter(writer).writeAnything(tag, false, maxDepth);
+ }
+
+ public static void write(Tag> tag, Writer writer) throws IOException {
+ write(tag, writer, Tag.DEFAULT_MAX_DEPTH);
+ }
+
+ private void writeAnything(NamedTag tag, boolean sortCompoundTagEntries, int maxDepth) throws IOException {
+ // note to future self: if you're ever compelled not write an empty name be sure to
+ // consider what that means for TextNbtParser#readTag(int)
+ if (tag.getName() != null) {
+ writer.write(tag.getEscapedName());
+ writer.write(':');
+ }
+ writeAnything(tag.getTag(), sortCompoundTagEntries, maxDepth);
+ }
+
+ private void writeAnything(Tag> tag, boolean sortCompoundTagEntries, int maxDepth) throws IOException {
+ switch (tag.getID()) {
+ case EndTag.ID:
+ //do nothing
+ break;
+ case ByteTag.ID:
+ writer.append(Byte.toString(((ByteTag) tag).asByte())).write('b');
+ break;
+ case ShortTag.ID:
+ writer.append(Short.toString(((ShortTag) tag).asShort())).write('s');
+ break;
+ case IntTag.ID:
+ writer.write(Integer.toString(((IntTag) tag).asInt()));
+ break;
+ case LongTag.ID:
+ writer.append(Long.toString(((LongTag) tag).asLong())).write('l');
+ break;
+ case FloatTag.ID:
+ writer.append(Float.toString(((FloatTag) tag).asFloat())).write('f');
+ break;
+ case DoubleTag.ID:
+ writer.append(Double.toString(((DoubleTag) tag).asDouble())).write('d');
+ break;
+ case ByteArrayTag.ID:
+ writeArray(((ByteArrayTag) tag).getValue(), ((ByteArrayTag) tag).length(), "B");
+ break;
+ case StringTag.ID:
+ writer.write(StringTag.escapeString(((StringTag) tag).getValue(), true));
+ break;
+ case ListTag.ID:
+ writer.write('[');
+ for (int i = 0; i < ((ListTag>) tag).size(); i++) {
+ writer.write(i == 0 ? "" : ",");
+ writeAnything(((ListTag>) tag).get(i), sortCompoundTagEntries, decrementMaxDepth(maxDepth));
+ }
+ writer.write(']');
+ break;
+ case CompoundTag.ID:
+ writer.write('{');
+ boolean first = true;
+ Iterator iter;
+ if (sortCompoundTagEntries) iter = ((CompoundTag) tag).stream().sorted(NamedTag::compare).iterator();
+ else iter = ((CompoundTag) tag).iterator();
+ while (iter.hasNext()) {
+ NamedTag entry = iter.next();
+ writer.write(first ? "" : ",");
+ writer.append(NamedTag.escapeName(entry.getName())).write(':');
+ writeAnything(entry.getTag(), sortCompoundTagEntries, decrementMaxDepth(maxDepth));
+ first = false;
+ }
+ writer.write('}');
+ break;
+ case IntArrayTag.ID:
+ writeArray(((IntArrayTag) tag).getValue(), ((IntArrayTag) tag).length(), "I");
+ break;
+ case LongArrayTag.ID:
+ writeArray(((LongArrayTag) tag).getValue(), ((LongArrayTag) tag).length(), "L");
+ break;
+ default:
+ throw new IOException("unknown tag with id \"" + tag.getID() + "\"");
+ }
+ }
+
+ private void writeArray(Object array, int length, String prefix) throws IOException {
+ writer.append('[').append(prefix).write(';');
+ for (int i = 0; i < length; i++) {
+ writer.append(i == 0 ? "" : ",").write(Array.get(array, i).toString());
+ }
+ writer.write(']');
+ }
+}
diff --git a/src/main/java/io/github/ensgijs/nbt/mca/ChunkBase.java b/src/main/java/io/github/ensgijs/nbt/mca/ChunkBase.java
new file mode 100644
index 00000000..ed25a330
--- /dev/null
+++ b/src/main/java/io/github/ensgijs/nbt/mca/ChunkBase.java
@@ -0,0 +1,478 @@
+package io.github.ensgijs.nbt.mca;
+
+import io.github.ensgijs.nbt.io.BinaryNbtDeserializer;
+import io.github.ensgijs.nbt.io.BinaryNbtSerializer;
+import io.github.ensgijs.nbt.io.CompressionType;
+import io.github.ensgijs.nbt.io.NamedTag;
+import io.github.ensgijs.nbt.mca.io.LoadFlags;
+import io.github.ensgijs.nbt.mca.io.MoveChunkFlags;
+import io.github.ensgijs.nbt.mca.util.*;
+import io.github.ensgijs.nbt.tag.CompoundTag;
+import io.github.ensgijs.nbt.tag.Tag;
+import io.github.ensgijs.nbt.util.ObservedCompoundTag;
+import io.github.ensgijs.nbt.query.NbtPath;
+
+import java.io.*;
+import java.util.Objects;
+import java.util.Set;
+import java.util.function.Function;
+
+/**
+ * Abstraction for the base of all chunk types. Not all chunks types are sectioned, that layer comes further up
+ * the hierarchy.
+ *
+ * Cautionary note to implementors - DO NOT USE INLINE MEMBER INITIALIZATION IN YOUR CLASSES
+ * Define all member initialization in {@link #initMembers()} or be very confused!
+ * Due to how Java initializes objects, this base class will call {@link #initReferences(long)} before any inline
+ * member initialization has occurred. The symptom of using inline member initialization is that you will get
+ * very confusing {@link NullPointerException}'s from within {@link #initReferences(long)} for members which
+ * are accessed by your {@link #initReferences(long)} implementation that you have defined inline initializers for
+ * because those initializers will not run until AFTER {@link #initReferences(long)} returns.
+ *
+ * It is however "safe" to use inline member initialization for any members which are not accessed from within
+ * {@link #initReferences(long)} - but unless you really fully understand the warning above and its full
+ * ramifications just don't do it.
+ *
+ * @see SectionedChunkBase
+ */
+public abstract class ChunkBase implements VersionedDataContainer, TagWrapper, TracksUnreadDataTags {
+
+ public static final int NO_CHUNK_COORD_SENTINEL = Integer.MIN_VALUE;
+
+ protected final long originalLoadFlags;
+ protected int dataVersion;
+ protected int chunkX = NO_CHUNK_COORD_SENTINEL;
+ protected int chunkZ = NO_CHUNK_COORD_SENTINEL;
+ // TODO: this partial state thing is questionable - evaluate if the semantics are valid or broken
+ protected boolean partial;
+ protected boolean raw;
+ protected int lastMCAUpdate;
+ /** Should be treated as effectively read-only by child classes until after {@link #initReferences}
+ * invocation has returned. */
+ protected CompoundTag data;
+ protected Set unreadDataTagKeys;
+
+ /**
+ * {@inheritDoc}
+ */
+ public Set getUnreadDataTagKeys() {
+ return unreadDataTagKeys;
+ }
+
+ /**
+ * {@inheritDoc}
+ * @return NotNull - if LoadFlags specified {@link LoadFlags#RAW} then the raw data is returned - else a new
+ * CompoundTag populated, by reference, with values that were not read during {@link #initReferences(long)}.
+ */
+ public CompoundTag getUnreadDataTags() {
+ if (raw) return data;
+ CompoundTag unread = new CompoundTag(unreadDataTagKeys.size());
+ data.forEach((k, v) -> {
+ if (unreadDataTagKeys.contains(k)) {
+ unread.put(k, v);
+ }
+ });
+ return unread;
+ }
+
+ /**
+ * Due to how Java initializes objects and how this class hierarchy is setup it is ill-advised to use inline member
+ * initialization because {@link #initReferences(long)} will be called before members are initialized which WILL
+ * result in very confusing {@link NullPointerException}'s being thrown from within {@link #initReferences(long)}.
+ * This is not a problem that can be solved by moving initialization into your constructors, because you must call
+ * the super constructor as the first line of your child constructor!
+ * So, to get around this hurdle, perform all member initialization you would normally inline in your
+ * class def, within this method instead. Implementers should never need to call this method themselves
+ * as ChunkBase will always call it, even from the default constructor. Remember to call {@code super();}
+ * from your default constructors to maintain this behavior.
+ */
+ protected void initMembers() { }
+
+ protected ChunkBase(int dataVersion) {
+ this.dataVersion = dataVersion;
+ this.originalLoadFlags = LoadFlags.LOAD_ALL_DATA;
+ this.lastMCAUpdate = (int)(System.currentTimeMillis() / 1000);
+ initMembers();
+ }
+
+ /**
+ * Create a new chunk based on raw base data from a region file.
+ * @param data The raw base data to be used.
+ */
+ public ChunkBase(CompoundTag data) {
+ this(data, LoadFlags.LOAD_ALL_DATA);
+ }
+
+ /**
+ * Create a new chunk based on raw base data from a region file.
+ * @param data The raw base data to be used.
+ * @param loadFlags Union of {@link LoadFlags} to process.
+ */
+ public ChunkBase(CompoundTag data, long loadFlags) {
+ this.data = data;
+ this.originalLoadFlags = loadFlags;
+ initMembers();
+ initReferences0(loadFlags);
+ }
+
+ private void initReferences0(long loadFlags) {
+ Objects.requireNonNull(data, "data cannot be null");
+ if ((loadFlags & LoadFlags.RAW) != 0) {
+ dataVersion = data.getInt("DataVersion");
+ raw = true;
+ } else {
+ final ObservedCompoundTag observedData = new ObservedCompoundTag(data);
+ dataVersion = observedData.getInt("DataVersion");
+ if (dataVersion == 0) {
+ throw new IllegalArgumentException("data does not contain \"DataVersion\" tag");
+ }
+
+ data = observedData;
+ initReferences(loadFlags);
+ if (data != observedData) {
+ throw new IllegalStateException("this.data was replaced during initReferences execution - this breaks unreadDataTagKeys behavior!");
+ }
+ unreadDataTagKeys = observedData.unreadKeys();
+
+ if ((loadFlags & LoadFlags.RELEASE_CHUNK_DATA_TAG) != 0) {
+ data = null;
+ // this is questionable... maybe if we also check that data version is within the known bounds too
+ // (to count it as non-partial) we could be reasonably confidant that the saved chunk would at least
+ // have a vanilla level of data.
+ if ((loadFlags & LoadFlags.LOAD_ALL_DATA) != LoadFlags.LOAD_ALL_DATA) partial = true;
+ } else {
+ // stop observing the data tag
+ data = observedData.wrappedTag();
+ }
+ }
+ }
+
+ /**
+ * Child classes should not call this method directly, it will be called for them.
+ * Raw and partial data handling is taken care of, this method will not be called if {@code loadFlags} is
+ * {@link LoadFlags#RAW}.
+ */
+ protected abstract void initReferences(final long loadFlags);
+
+ /**
+ * @return one of: region, entities, poi
+ */
+ public abstract String getMcaType();
+
+ /**
+ * {@inheritDoc}
+ */
+ public int getDataVersion() {
+ return dataVersion;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public void setDataVersion(int dataVersion) {
+ this.dataVersion = Math.max(0, dataVersion);
+ }
+
+ /**
+ * Gets this chunk's chunk-x coordinate. Returns {@link #NO_CHUNK_COORD_SENTINEL} if not supported or unknown.
+ * @see #moveChunk(int, int, long, boolean)
+ */
+ public int getChunkX() {
+ return chunkX;
+ }
+
+ /**
+ * Gets this chunk's chunk-z coordinate. Returns {@link #NO_CHUNK_COORD_SENTINEL} if not supported or unknown.
+ * @see #moveChunk(int, int, long, boolean)
+ */
+ public int getChunkZ() {
+ return chunkZ;
+ }
+
+ /**
+ * Gets this chunk's chunk-xz coordinates. Returns x = z = {@link #NO_CHUNK_COORD_SENTINEL} if not supported or unknown.
+ * @see #moveChunk(int, int, long, boolean)
+ */
+ public IntPointXZ getChunkXZ() {
+ return new IntPointXZ(getChunkX(), getChunkZ());
+ }
+
+ /**
+ * Indicates if this chunk implementation supports calling {@link #moveChunk(int, int, long, boolean)}.
+ * @return false if {@link #moveChunk(int, int, long, boolean)} is not implemented (calling it will always throw).
+ */
+ public abstract boolean moveChunkImplemented();
+
+ /**
+ * Indicates if the current chunk can be be moved with confidence or not. If this function returns false
+ * and {@link #moveChunkImplemented()} returns true then you must use {@code moveChunk(x, z, true)} to attempt
+ * a best effort move.
+ */
+ public abstract boolean moveChunkHasFullVersionSupport();
+
+ /**
+ * Attempts to update all tags that use absolute positions within this chunk.
+ * Call {@link #moveChunkImplemented()} to check if this implementation supports chunk relocation. Also
+ * check the result of {@link #moveChunkHasFullVersionSupport()} to get an idea of the level of support
+ * this implementation has for the current chunk.
+ *
If {@code force = true} the result of calling this function cannot be guaranteed to be complete and
+ * may still throw {@link UnsupportedOperationException}.
+ * @param newChunkX new absolute chunk-x
+ * @param newChunkZ new absolute chunk-z
+ * @param moveChunkFlags {@link MoveChunkFlags} OR'd together to control move chunk behavior.
+ * @param force true to ignore the guidance of {@link #moveChunkHasFullVersionSupport()} and make a best effort
+ * anyway.
+ * @return true if any data was changed as a result of this call
+ * @throws UnsupportedOperationException thrown if this chunk implementation doest support moves, or moves
+ * for this chunks version (possibly even if force = true).
+ */
+ public abstract boolean moveChunk(int newChunkX, int newChunkZ, long moveChunkFlags, boolean force);
+
+ /**
+ * Calls {@code moveChunk(newChunkX, newChunkZ, moveChunkFlags, false);}
+ * @see #moveChunk(int, int, long, boolean)
+ */
+ public boolean moveChunk(int chunkX, int chunkZ, long moveChunkFlags) {
+ return moveChunk(chunkX, chunkZ, moveChunkFlags, false);
+ }
+
+ /**
+ * Serializes this chunk to a DataOutput sink.
+ * @param sink The DataOutput to be written to.
+ * @param xPos The x-coordinate of the chunk.
+ * @param zPos The z-coordinate of the chunk.
+ * @param compressionType Chunk compression strategy to use.
+ * @param writeByteLengthPrefixInt when true the first thing written to the sink will be the total bytes written
+ * (a value equal to 4 less than the return value).
+ * @return The amount of bytes written to the DataOutput.
+ * @throws UnsupportedOperationException When something went wrong during writing.
+ * @throws IOException When something went wrong during writing.
+ */
+ public int serialize(DataOutput sink, int xPos, int zPos, CompressionType compressionType, boolean writeByteLengthPrefixInt) throws IOException {
+ if (partial) {
+ throw new UnsupportedOperationException("Partially loaded chunks cannot be serialized");
+ }
+ ByteArrayOutputStream baos = new ByteArrayOutputStream(4096);
+ new BinaryNbtSerializer(compressionType).toStream(new NamedTag(null, updateHandle(xPos, zPos)), baos);
+// try (BufferedOutputStream nbtOut = new BufferedOutputStream(compressionType.compress(baos))) {
+// new BinaryNbtSerializer(false).toStream(new NamedTag(null, updateHandle(xPos, zPos)), nbtOut);
+// }
+ byte[] rawData = baos.toByteArray();
+ if (writeByteLengthPrefixInt)
+ sink.writeInt(rawData.length + 1); // including the byte to store the compression type
+ sink.writeByte(compressionType.getID());
+ sink.write(rawData);
+ return rawData.length + (writeByteLengthPrefixInt ? 5 : 1);
+ }
+
+ /**
+ * Reads chunk data from a RandomAccessFile. The RandomAccessFile must already be at the correct position.
+ *
It is expected that the byte size int has already been read and the next byte indicates the compression
+ * used. Essentially this method is symmetrical to {@link #serialize(DataOutput, int, int, CompressionType, boolean)}
+ * when passing writeByteLengthPrefixInt=false
+ * @param raf The RandomAccessFile to read the chunk data from.
+ * @param loadFlags A logical or of {@link LoadFlags} constants indicating what data should be loaded
+ * @param lastMCAUpdateTimestamp Last mca update timestamp - epoch seconds. If LT0 the current system timestamp will be used.
+ * @param chunkAbsXHint The absolute chunk x-coord which should be used if the nbt data doesn't contain this information.
+ * @param chunkAbsZHint The absolute chunk z-coord which should be used if the nbt data doesn't contain this information.
+ * @throws IOException When something went wrong during reading.
+ */
+ public void deserialize(RandomAccessFile raf, long loadFlags, int lastMCAUpdateTimestamp, int chunkAbsXHint, int chunkAbsZHint) throws IOException {
+ deserialize(new FileInputStream(raf.getFD()), loadFlags, lastMCAUpdateTimestamp, chunkAbsXHint, chunkAbsZHint);
+ }
+
+ /**
+ * Reads chunk data from an InputStream. The InputStream must already be at the correct position.
+ * It is expected that the byte size int has already been read and the next byte indicates the compression
+ * used. Essentially this method is symmetrical to {@link #serialize(DataOutput, int, int, CompressionType, boolean)}
+ * when passing writeByteLengthPrefixInt=false
+ * @param inputStream The stream to read the chunk data from.
+ * @param loadFlags A logical or of {@link LoadFlags} constants indicating what data should be loaded
+ * @param lastMCAUpdateTimestamp Last mca update timestamp - epoch seconds. If LT0 the current system timestamp will be used.
+ * @param chunkAbsXHint The absolute chunk x-coord which should be used if the nbt data doesn't contain this information.
+ * @param chunkAbsZHint The absolute chunk z-coord which should be used if the nbt data doesn't contain this information.
+ * @throws IOException When something went wrong during reading.
+ */
+ public void deserialize(InputStream inputStream, long loadFlags, int lastMCAUpdateTimestamp, int chunkAbsXHint, int chunkAbsZHint) throws IOException {
+ int compressionTypeByte = inputStream.read();
+ if (compressionTypeByte < 0)
+ throw new EOFException();
+ CompressionType compressionType = CompressionType.getFromID((byte) compressionTypeByte);
+ if (compressionType == null) {
+ throw new IOException("invalid compression type " + compressionTypeByte);
+ }
+ NamedTag tag = new BinaryNbtDeserializer(compressionType).fromStream(inputStream);
+ if (tag != null && tag.getTag() instanceof CompoundTag) {
+ data = (CompoundTag) tag.getTag();
+ this.lastMCAUpdate = lastMCAUpdateTimestamp >= 0 ? lastMCAUpdateTimestamp : (int)(System.currentTimeMillis() / 1000);
+ this.chunkX = chunkAbsXHint;
+ this.chunkZ = chunkAbsZHint;
+ initReferences0(loadFlags);
+ } else {
+ throw new IOException("invalid data tag: " + (tag == null ? "null" : tag.getClass().getName()));
+ }
+ }
+
+ /**
+ * @return The timestamp when this region file was last updated in seconds since 1970-01-01.
+ */
+ public int getLastMCAUpdate() {
+ return lastMCAUpdate;
+ }
+
+ /**
+ * Sets the timestamp when this region file was last updated in seconds since 1970-01-01.
+ * @param lastMCAUpdate The time in seconds since 1970-01-01.
+ */
+ public void setLastMCAUpdate(int lastMCAUpdate) {
+ checkRaw();
+ this.lastMCAUpdate = lastMCAUpdate;
+ }
+
+ /**
+ * @throws UnsupportedOperationException thrown if raw is true
+ */
+ protected void checkRaw() {
+ if (raw) {
+ throw new UnsupportedOperationException("Cannot use helpers for this field when working with raw data");
+ }
+ }
+
+ protected void checkPartial() {
+ if (data == null) {
+ throw new UnsupportedOperationException("Chunk was only partially loaded due to LoadFlags used");
+ }
+ }
+
+ protected void checkChunkXZ() {
+ if (chunkX == NO_CHUNK_COORD_SENTINEL || chunkZ == NO_CHUNK_COORD_SENTINEL) {
+ throw new UnsupportedOperationException("This chunk doesn't know its XZ location");
+ }
+ }
+
+ /**
+ * Provides a reference to the full chunk data.
+ * @return The full chunk data or null if there is none, e.g. when this chunk has only been loaded partially.
+ */
+ public CompoundTag getHandle() {
+ return data;
+ }
+
+ public CompoundTag updateHandle() {
+ if (data == null) {
+ throw new UnsupportedOperationException(
+ "Cannot updateHandle() because data tag is null. This is probably because "+
+ "the LoadFlag RELEASE_CHUNK_DATA_TAG was specified");
+ }
+ if (!raw) {
+ data.putInt("DataVersion", dataVersion);
+ }
+ return data;
+ }
+
+ // Note: Not all chunk formats store xz in their NBT, but {@link McaFileBase} will call this update method
+ // to give them the chance to record them.
+ public CompoundTag updateHandle(int xPos, int zPos) {
+ return updateHandle();
+ }
+
+
+ /**
+ * @param vaPath version aware nbt path
+ * @param Return Type
+ * @return tag value, or null if there is none, or if the given vaPath doesn't support the current version
+ */
+ protected > R getTag(VersionAware vaPath) {
+ NbtPath path = vaPath.get(dataVersion);
+ if (path == null) return null; // not supported by this version
+ return path.getTag(data);
+ }
+
+ /**
+ * Simple but powerful helper - example usage
+ * {@code long myLong = getTagValue(vaPath, LongTag::asLong, 0L);}
+ * @param vaPath version aware nbt path
+ * @param evaluator value provider, given the tag (iff not null)
+ * @param defaultValue value to return if the tag specified by vaPath does not exist
+ * @param Tag Type
+ * @param Return Type
+ * @return result of calling evaluator, or defaultValue if the tag didn't exist
+ */
+ protected , R> R getTagValue(VersionAware vaPath, Function evaluator, R defaultValue) {
+ TT tag = getTag(vaPath);
+ return tag != null ? evaluator.apply(tag) : defaultValue;
+ }
+
+ /**
+ * @param vaPath version aware nbt path
+ * @param evaluator value provider, given the tag (iff not null)
+ * @param Tag Type
+ * @param Return Type
+ * @return result of calling evaluator, or NULL if the tag didn't exist
+ */
+ protected , R> R getTagValue(VersionAware vaPath, Function evaluator) {
+ return getTagValue(vaPath, evaluator, null);
+ }
+
+ /**
+ * Sets the given tag, or removes it if null. If tag is not null, parent CompoundTags will be created as-needed.
+ * If the given vaPath does not support the current data version, then NO ACTION is performed.
+ * @param vaPath version aware nbt path
+ * @param tag tag value to set - if null then the value is REMOVED
+ */
+ protected void setTag(VersionAware vaPath, Tag> tag) {
+ NbtPath path = vaPath.get(dataVersion);
+ if (path == null) return; // not supported by this version
+ path.putTag(data, tag, tag != null);
+ }
+
+ /**
+ * Sets the given tag (if it's not null). Creates parent CompoundTags as-needed.
+ * If the given vaPath does not support the current data version, then NO ACTION is performed.
+ * @param vaPath version aware nbt path
+ * @param tag tag value to set - nothing happens if this value is null
+ */
+ protected void setTagIfNotNull(VersionAware vaPath, Tag> tag) {
+ if (tag != null) {
+ setTag(vaPath, tag);
+ }
+ }
+
+ /**
+ * @return Index of this chunk in its owning region file or -1 if either chunk X or Z is
+ * {@link #NO_CHUNK_COORD_SENTINEL}.
+ */
+ public int getIndex() {
+ if (getChunkX() != NO_CHUNK_COORD_SENTINEL && getChunkZ() != NO_CHUNK_COORD_SENTINEL) {
+ return McaFileBase.getChunkIndex(getChunkX(), getChunkZ());
+ }
+ return -1;
+ }
+
+ /**
+ * Gets the region file X coord which this chunk should belong to given its current {@link #getChunkX()}.
+ * Returns {@link #NO_CHUNK_COORD_SENTINEL} if {@link #getChunkX()} returns {@link #NO_CHUNK_COORD_SENTINEL}.
+ */
+ public int getRegionX() {
+ int x = getChunkX();
+ return x != NO_CHUNK_COORD_SENTINEL ? x >> 5 : NO_CHUNK_COORD_SENTINEL;
+ }
+
+ /**
+ * Gets the region file Z coord which this chunk should belong to given its current {@link #getChunkZ()}.
+ * Returns {@link #NO_CHUNK_COORD_SENTINEL} if {@link #getChunkX()} returns {@link #NO_CHUNK_COORD_SENTINEL}.
+ */
+ public int getRegionZ() {
+ int z = getChunkZ();
+ return z != NO_CHUNK_COORD_SENTINEL ? z >> 5 : NO_CHUNK_COORD_SENTINEL;
+ }
+
+ /**
+ * Gets the region file XZ coord which this chunk should belong to given its current {@link #getChunkXZ()}.
+ * Returns XZ({@link #NO_CHUNK_COORD_SENTINEL}, {@link #NO_CHUNK_COORD_SENTINEL}) if {@link #getChunkXZ()} returns
+ * XZ({@link #NO_CHUNK_COORD_SENTINEL}, {@link #NO_CHUNK_COORD_SENTINEL}).
+ */
+ public IntPointXZ getRegionXZ() {
+ return new IntPointXZ(getRegionX(), getRegionZ());
+ }
+}
diff --git a/src/main/java/io/github/ensgijs/nbt/mca/DataVersion.java b/src/main/java/io/github/ensgijs/nbt/mca/DataVersion.java
new file mode 100644
index 00000000..99e9adc4
--- /dev/null
+++ b/src/main/java/io/github/ensgijs/nbt/mca/DataVersion.java
@@ -0,0 +1,1161 @@
+package io.github.ensgijs.nbt.mca;
+
+import java.util.Arrays;
+import java.util.Comparator;
+import java.util.Locale;
+
+// source: version.json file, found in the root directory of the client and server jars
+// table of versions can also be found on https://minecraft.fandom.com/wiki/Data_version#List_of_data_versions
+// google sheet to help generate enum values https://docs.google.com/spreadsheets/d/1VVGUPe9sfsd3rsFYcBGDnt1bTifBh3dUkKV9vQvNWQY
+// - paste rows from the fandom table into the sheet and sort ascending by data version (if you don't sort it the mc version WILL BE WRONG!)
+//
+// As the wiki has been lacking in freshness lately the test DataVersionTest#testFetchMissingDataVersionInformation
+// will help keep this enum updated for all official builds - which may exclude experimental builds, but it sure
+// beats having to farm the data by hand.
+
+/**
+ * List of MC versions and MCA data versions back to 1.9.0
+ *
+ * TODO: weekly builds don't really fit with having a version but it's annoying to not have a version too - what to do?
+ *
+ */
+public enum DataVersion {
+ // TODO: document change history by digging through net.minecraft.util.datafix.DataConverterRegistry
+ // Kept in ASC order (unit test enforced)
+ UNKNOWN(0, 0, 0),
+
+ JAVA_1_9_15W32A(100, 9, 0, "15w32a"),
+ JAVA_1_9_15W32B(103, 9, 0, "15w32b"),
+ JAVA_1_9_15W32C(104, 9, 0, "15w32c"),
+ JAVA_1_9_15W33B(111, 9, 0, "15w33b"), // A and B have the same data version
+ JAVA_1_9_15W33C(112, 9, 0, "15w33c"),
+ JAVA_1_9_15W34A(114, 9, 0, "15w34a"),
+ JAVA_1_9_15W34B(115, 9, 0, "15w34b"),
+ JAVA_1_9_15W34C(116, 9, 0, "15w34c"),
+ JAVA_1_9_15W34D(117, 9, 0, "15w34d"),
+ JAVA_1_9_15W35A(118, 9, 0, "15w35a"),
+ JAVA_1_9_15W35B(119, 9, 0, "15w35b"),
+ JAVA_1_9_15W35C(120, 9, 0, "15w35c"),
+ JAVA_1_9_15W35D(121, 9, 0, "15w35d"),
+ JAVA_1_9_15W35E(122, 9, 0, "15w35e"),
+ JAVA_1_9_15W36A(123, 9, 0, "15w36a"),
+ JAVA_1_9_15W36B(124, 9, 0, "15w36b"),
+ JAVA_1_9_15W36C(125, 9, 0, "15w36c"),
+ JAVA_1_9_15W36D(126, 9, 0, "15w36d"),
+ JAVA_1_9_15W37A(127, 9, 0, "15w37a"),
+ JAVA_1_9_15W38A(128, 9, 0, "15w38a"),
+ JAVA_1_9_15W38B(129, 9, 0, "15w38b"),
+ JAVA_1_9_15W39A(130, 9, 0, "15w39a"),
+ JAVA_1_9_15W39B(131, 9, 0, "15w39b"),
+ JAVA_1_9_15W39C(132, 9, 0, "15w39c"),
+ JAVA_1_9_15W40A(133, 9, 0, "15w40a"),
+ JAVA_1_9_15W40B(134, 9, 0, "15w40b"),
+ JAVA_1_9_15W41A(136, 9, 0, "15w41a"),
+ JAVA_1_9_15W41B(137, 9, 0, "15w41b"),
+ JAVA_1_9_15W42A(138, 9, 0, "15w42a"),
+ JAVA_1_9_15W43A(139, 9, 0, "15w43a"),
+ JAVA_1_9_15W43B(140, 9, 0, "15w43b"),
+ JAVA_1_9_15W43C(141, 9, 0, "15w43c"),
+ JAVA_1_9_15W44A(142, 9, 0, "15w44a"),
+ JAVA_1_9_15W44B(143, 9, 0, "15w44b"),
+ JAVA_1_9_15W45A(145, 9, 0, "15w45a"),
+ JAVA_1_9_15W46A(146, 9, 0, "15w46a"),
+ JAVA_1_9_15W47A(148, 9, 0, "15w47a"),
+ JAVA_1_9_15W47B(149, 9, 0, "15w47b"),
+ JAVA_1_9_15W47C(150, 9, 0, "15w47c"),
+ JAVA_1_9_15W49A(151, 9, 0, "15w49a"),
+ JAVA_1_9_15W49B(152, 9, 0, "15w49b"),
+ JAVA_1_9_15W50A(153, 9, 0, "15w50a"),
+ JAVA_1_9_15W51A(154, 9, 0, "15w51a"),
+ JAVA_1_9_15W51B(155, 9, 0, "15w51b"),
+ JAVA_1_9_16W02A(156, 9, 0, "16w02a"),
+ JAVA_1_9_16W03A(157, 9, 0, "16w03a"),
+ JAVA_1_9_16W04A(158, 9, 0, "16w04a"),
+ JAVA_1_9_16W05A(159, 9, 0, "16w05a"),
+ JAVA_1_9_16W05B(160, 9, 0, "16w05b"),
+ JAVA_1_9_16W06A(161, 9, 0, "16w06a"),
+ JAVA_1_9_16W07A(162, 9, 0, "16w07a"),
+ JAVA_1_9_16W07B(163, 9, 0, "16w07b"),
+ JAVA_1_9_PRE1(164, 9, 0, "PRE1"),
+ JAVA_1_9_PRE2(165, 9, 0, "PRE2"),
+ JAVA_1_9_PRE3(167, 9, 0, "PRE3"),
+ JAVA_1_9_PRE4(168, 9, 0, "PRE4"),
+ JAVA_1_9_0(169, 9, 0),
+ JAVA_1_9_1_PRE1(170, 9, 1, "PRE1"),
+ JAVA_1_9_1_PRE2(171, 9, 1, "PRE2"),
+ JAVA_1_9_1_PRE3(172, 9, 1, "PRE3"),
+ JAVA_1_9_1(175, 9, 1),
+ JAVA_1_9_2(176, 9, 2),
+ JAVA_1_9_3_16W14A(177, 9, 3, "16w14a"),
+ JAVA_1_9_3_16W15A(178, 9, 3, "16w15a"),
+ JAVA_1_9_3_16W15B(179, 9, 3, "16w15b"),
+ JAVA_1_9_3_PRE1(180, 9, 3, "PRE1"),
+ JAVA_1_9_3_PRE2(181, 9, 3, "PRE2"),
+ JAVA_1_9_3_PRE3(182, 9, 3, "PRE3"),
+ JAVA_1_9_3(183, 9, 3),
+ JAVA_1_9_4(184, 9, 4),
+ JAVA_1_10_16W20A(501, 10, 0, "16w20a"),
+ JAVA_1_10_16W21A(503, 10, 0, "16w21a"),
+ JAVA_1_10_16W21B(504, 10, 0, "16w21b"),
+ JAVA_1_10_PRE1(506, 10, 0, "PRE1"),
+ JAVA_1_10_PRE2(507, 10, 0, "PRE2"),
+ JAVA_1_10_0(510, 10, 0),
+ JAVA_1_10_1(511, 10, 1),
+ JAVA_1_10_2(512, 10, 2),
+ JAVA_1_11_16W32A(800, 11, 0, "16w32a"),
+ JAVA_1_11_16W32B(801, 11, 0, "16w32b"),
+ JAVA_1_11_16W33A(802, 11, 0, "16w33a"),
+ JAVA_1_11_16W35A(803, 11, 0, "16w35a"),
+ JAVA_1_11_16W36A(805, 11, 0, "16w36a"),
+ JAVA_1_11_16W38A(807, 11, 0, "16w38a"),
+ JAVA_1_11_16W39A(809, 11, 0, "16w39a"),
+ JAVA_1_11_16W39B(811, 11, 0, "16w39b"),
+ JAVA_1_11_16W39C(812, 11, 0, "16w39c"),
+ JAVA_1_11_16W40A(813, 11, 0, "16w40a"),
+ JAVA_1_11_16W41A(814, 11, 0, "16w41a"),
+ JAVA_1_11_16W42A(815, 11, 0, "16w42a"),
+ JAVA_1_11_16W43A(816, 11, 0, "16w43a"),
+ JAVA_1_11_16W44A(817, 11, 0, "16w44a"),
+ JAVA_1_11_PRE1(818, 11, 0, "PRE1"),
+ JAVA_1_11_0(819, 11, 0),
+ JAVA_1_11_1_16W50A(920, 11, 1, "16w50a"),
+ JAVA_1_11_1(921, 11, 1),
+ JAVA_1_11_2(922, 11, 2),
+ JAVA_1_12_17W06A(1022, 12, 0, "17w06a"),
+ JAVA_1_12_17W13A(1122, 12, 0, "17w13a"),
+ JAVA_1_12_17W13B(1123, 12, 0, "17w13b"),
+ JAVA_1_12_17W14A(1124, 12, 0, "17w14a"),
+ JAVA_1_12_17W15A(1125, 12, 0, "17w15a"),
+ JAVA_1_12_17W16A(1126, 12, 0, "17w16a"),
+ JAVA_1_12_17W16B(1127, 12, 0, "17w16b"),
+ JAVA_1_12_17W17A(1128, 12, 0, "17w17a"),
+ JAVA_1_12_17W17B(1129, 12, 0, "17w17b"),
+ JAVA_1_12_17W18A(1130, 12, 0, "17w18a"),
+ JAVA_1_12_17W18B(1131, 12, 0, "17w18b"),
+ JAVA_1_12_PRE1(1132, 12, 0, "PRE1"),
+ JAVA_1_12_PRE2(1133, 12, 0, "PRE2"),
+ JAVA_1_12_PRE3(1134, 12, 0, "PRE3"),
+ JAVA_1_12_PRE4(1135, 12, 0, "PRE4"),
+ JAVA_1_12_PRE5(1136, 12, 0, "PRE5"),
+ JAVA_1_12_PRE6(1137, 12, 0, "PRE6"),
+ JAVA_1_12_PRE7(1138, 12, 0, "PRE7"),
+ JAVA_1_12_0(1139, 12, 0),
+ JAVA_1_12_1_17W31A(1239, 12, 1, "17w31a"),
+ JAVA_1_12_1_PRE1(1240, 12, 1, "PRE1"),
+ JAVA_1_12_1(1241, 12, 1),
+ JAVA_1_12_2_PRE1(1341, 12, 2, "PRE1"),
+ JAVA_1_12_2_PRE2(1342, 12, 2, "PRE2"),
+ JAVA_1_12_2(1343, 12, 2),
+ JAVA_1_13_17W43A(1444, 13, 0, "17w43a"),
+ JAVA_1_13_17W43B(1445, 13, 0, "17w43b"),
+ JAVA_1_13_17W45A(1447, 13, 0, "17w45a"),
+ JAVA_1_13_17W45B(1448, 13, 0, "17w45b"),
+ JAVA_1_13_17W46A(1449, 13, 0, "17w46a"),
+ /** "Blocks" and "Data" were replaced with block palette */
+ JAVA_1_13_17W47A(1451, 13, 0, "17w47a"),
+ JAVA_1_13_17W47B(1452, 13, 0, "17w47b"),
+ JAVA_1_13_17W48A(1453, 13, 0, "17w48a"),
+ JAVA_1_13_17W49A(1454, 13, 0, "17w49a"),
+ JAVA_1_13_17W49B(1455, 13, 0, "17w49b"),
+ JAVA_1_13_17W50A(1457, 13, 0, "17w50a"),
+ JAVA_1_13_18W01A(1459, 13, 0, "18w01a"),
+ JAVA_1_13_18W02A(1461, 13, 0, "18w02a"),
+ JAVA_1_13_18W03A(1462, 13, 0, "18w03a"),
+ JAVA_1_13_18W03B(1463, 13, 0, "18w03b"),
+ JAVA_1_13_18W05A(1464, 13, 0, "18w05a"),
+ /**
+ * Biome data now stored in IntArrayTag instead of ByteArrayTag (still 2D using only 256 entries).
+ * Tags Removed
+ *
+ * - region: Level.Biomes <ByteArrayTag> (type changed)
+ * - region: Level.HeightMap <IntArrayTag>
+ * - region: Level.LightPopulated <ByteTag>
+ * - region: Level.TerrainPopulated <ByteTag> (replaced by Status string)
+ *
+ * Tags Added
+ *
+ * - region: Level.Biomes <IntArrayTag>
+ * - region: Level.Heightmaps <CompoundTag>
+ * - region: Level.Heightmaps.LIGHT <LongArrayTag>
+ * - region: Level.Heightmaps.LIQUID <LongArrayTag>
+ * - region: Level.Heightmaps.RAIN <LongArrayTag>
+ * - region: Level.Heightmaps.SOLID <LongArrayTag>
+ * - region: Level.Lights <ListTag<ListTag<ShortTag>>>
+ * - region: Level.PostProcessing <ListTag<ListTag<ShortTag>>>
+ * - region: Level.Status <StringTag>
+ * - region: Level.Structures <CompoundTag>
+ * - region: Level.Structures.References <CompoundTag>
+ *
Keys are the name of a structure type such as "Desert_Pyramid".
+ *
Values are <LongArrayTag> which are packed chunk coordinates where Z is packed in the high 32 bits and X is in the low 32 bits.
+ * - region: Level.Structures.Starts <CompoundTag>
+ *
Keys are the name of a structure type such as "Desert_Pyramid".
+ *
Values are <CompoundTag> defining structure bounds and generation information.
+ * - region: Level.ToBeTicked <ListTag<ListTag<ShortTag>>>
+ *
+ */
+ JAVA_1_13_18W06A(1466, 13, 0, "18w06a"),
+ JAVA_1_13_18W07A(1467, 13, 0, "18w07a"),
+ JAVA_1_13_18W07B(1468, 13, 0, "18w07b"),
+ JAVA_1_13_18W07C(1469, 13, 0, "18w07c"),
+ JAVA_1_13_18W08A(1470, 13, 0, "18w08a"),
+ JAVA_1_13_18W08B(1471, 13, 0, "18w08b"),
+ JAVA_1_13_18W09A(1472, 13, 0, "18w09a"),
+ JAVA_1_13_18W10A(1473, 13, 0, "18w10a"),
+ JAVA_1_13_18W10B(1474, 13, 0, "18w10b"),
+ JAVA_1_13_18W10C(1476, 13, 0, "18w10c"),
+ JAVA_1_13_18W10D(1477, 13, 0, "18w10d"),
+ JAVA_1_13_18W11A(1478, 13, 0, "18w11a"),
+ JAVA_1_13_18W14A(1479, 13, 0, "18w14a"),
+ JAVA_1_13_18W14B(1481, 13, 0, "18w14b"),
+ JAVA_1_13_18W15A(1482, 13, 0, "18w15a"),
+ /**
+ * Tags Added
+ *
+ * - region: Level.LiquidTicks <ListTag<CompoundTag>>
+ * - region: Level.LiquidTicks[].i <StringTag>
+ * - region: Level.LiquidTicks[].p <IntTag>
+ * - region: Level.LiquidTicks[].t <IntTag>
+ * - region: Level.LiquidTicks[].x <IntTag>
+ * - region: Level.LiquidTicks[].y <IntTag>
+ * - region: Level.LiquidTicks[].z <IntTag>
+ * - region: Level.LiquidsToBeTicked <ListTag<ListTag<ShortTag>>>
+ *
+ */
+ JAVA_1_13_18W16A(1483, 13, 0, "18w16a"),
+
+ /**
+ *
+ * Tags Removed
+ *
+ * - region: Level.Heightmaps.LIGHT <LongArrayTag>
+ * - region: Level.Heightmaps.LIQUID <LongArrayTag>
+ * - region: Level.Heightmaps.RAIN <LongArrayTag>
+ * - region: Level.Heightmaps.SOLID <LongArrayTag>
+ *
+ * Tags Added
+ *
+ * - region: Level.CarvingMasks <CompoundTag>
+ * - region: Level.CarvingMasks.AIR <ByteArrayTag>
+ * - region: Level.CarvingMasks.LIQUID <ByteArrayTag>
+ * - region: Level.Heightmaps.LIGHT_BLOCKING <LongArrayTag>
+ * - region: Level.Heightmaps.MOTION_BLOCKING <LongArrayTag>
+ * - region: Level.Heightmaps.MOTION_BLOCKING_NO_LEAVES <LongArrayTag>
+ * - region: Level.Heightmaps.OCEAN_FLOOR <LongArrayTag>
+ * - region: Level.Heightmaps.OCEAN_FLOOR_WG <LongArrayTag>
+ * - region: Level.Heightmaps.WORLD_SURFACE_WG <LongArrayTag>
+ *
+ */
+ JAVA_1_13_18W19A(1484, 13, 0, "18w19a"),
+ JAVA_1_13_18W19B(1485, 13, 0, "18w19b"),
+ JAVA_1_13_18W20A(1489, 13, 0, "18w20a"),
+ JAVA_1_13_18W20B(1491, 13, 0, "18w20b"),
+ /** Believe this to be the end of the Level.hasLegacyStructureData tag */
+ JAVA_1_13_18W20C(1493, 13, 0, "18w20c"),
+ JAVA_1_13_18W21A(1495, 13, 0, "18w21a"),
+ JAVA_1_13_18W21B(1496, 13, 0, "18w21b"),
+ JAVA_1_13_18W22A(1497, 13, 0, "18w22a"),
+ JAVA_1_13_18W22B(1498, 13, 0, "18w22b"),
+ JAVA_1_13_18W22C(1499, 13, 0, "18w22c"),
+ JAVA_1_13_PRE1(1501, 13, 0, "PRE1"),
+ JAVA_1_13_PRE2(1502, 13, 0, "PRE2"),
+ /**
+ * Tags Added
+ *
+ * - region: Level.Heightmaps.WORLD_SURFACE <LongArrayTag>
+ *
+ */
+ JAVA_1_13_PRE3(1503, 13, 0, "PRE3"),
+ JAVA_1_13_PRE4(1504, 13, 0, "PRE4"),
+ // 1506 -- legacy biome id mapping changed
+ JAVA_1_13_PRE5(1511, 13, 0, "PRE5"),
+ JAVA_1_13_PRE6(1512, 13, 0, "PRE6"),
+ JAVA_1_13_PRE7(1513, 13, 0, "PRE7"),
+ JAVA_1_13_PRE8(1516, 13, 0, "PRE8"),
+ JAVA_1_13_PRE9(1517, 13, 0, "PRE9"),
+ JAVA_1_13_PRE10(1518, 13, 0, "PRE10"),
+ JAVA_1_13_0(1519, 13, 0),
+ JAVA_1_13_1_18W30A(1620, 13, 1, "18w30a"),
+ JAVA_1_13_1_18W30B(1621, 13, 1, "18w30b"),
+ JAVA_1_13_1_18W31A(1622, 13, 1, "18w31a"),
+ JAVA_1_13_1_18W32A(1623, 13, 1, "18w32a"),
+ JAVA_1_13_1_18W33A(1625, 13, 1, "18w33a"),
+ JAVA_1_13_1_PRE1(1626, 13, 1, "PRE1"),
+ JAVA_1_13_1_PRE2(1627, 13, 1, "PRE2"),
+ JAVA_1_13_1(1628, 13, 1),
+ JAVA_1_13_2_PRE1(1629, 13, 2, "PRE1"),
+ JAVA_1_13_2_PRE2(1630, 13, 2, "PRE2"),
+ JAVA_1_13_2(1631, 13, 2),
+ JAVA_1_14_18W43A(1901, 14, 0, "18w43a"),
+ JAVA_1_14_18W43B(1902, 14, 0, "18w43b"),
+ JAVA_1_14_18W43C(1903, 14, 0, "18w43c"),
+ /**
+ * Tags Added
+ *
+ * - region: Level.NoiseMask <ByteArrayTag>
+ *
+ */
+ JAVA_1_14_18W44A(1907, 14, 0, "18w44a"),
+ JAVA_1_14_18W45A(1908, 14, 0, "18w45a"),
+ /**
+ * Tags Removed
+ *
+ * - region: Level.NoiseMask <ByteArrayTag>
+ *
+ */
+ JAVA_1_14_18W46A(1910, 14, 0, "18w46a"),
+ JAVA_1_14_18W47A(1912, 14, 0, "18w47a"),
+ JAVA_1_14_18W47B(1913, 14, 0, "18w47b"),
+ JAVA_1_14_18W48A(1914, 14, 0, "18w48a"),
+ JAVA_1_14_18W48B(1915, 14, 0, "18w48b"),
+ JAVA_1_14_18W49A(1916, 14, 0, "18w49a"),
+
+ /**
+ * FIRST SEEN (may have been added before this version). Villagers gain professions?
+ *
+ * - region: Level.Entities[].VillagerData.level <IntTag>
+ * - region: Level.Entities[].VillagerData.profession <StringTag>
+ * - region: Level.Entities[].VillagerData.type <StringTag>
+ *
+ */
+ JAVA_1_14_18W50A(1919, 14, 0, "18w50a"),
+ /**
+ * Tags Added
+ *
+ * - region: Level.isLightOn <ByteTag>
+ *
+ */
+ JAVA_1_14_19W02A(1921, 14, 0, "19w02a"),
+ JAVA_1_14_19W03A(1922, 14, 0, "19w03a"),
+ JAVA_1_14_19W03B(1923, 14, 0, "19w03b"),
+ JAVA_1_14_19W03C(1924, 14, 0, "19w03c"),
+ JAVA_1_14_19W04A(1926, 14, 0, "19w04a"),
+ JAVA_1_14_19W04B(1927, 14, 0, "19w04b"),
+ JAVA_1_14_19W05A(1930, 14, 0, "19w05a"),
+ JAVA_1_14_19W06A(1931, 14, 0, "19w06a"),
+ JAVA_1_14_19W07A(1932, 14, 0, "19w07a"),
+ JAVA_1_14_19W08A(1933, 14, 0, "19w08a"),
+ JAVA_1_14_19W08B(1934, 14, 0, "19w08b"),
+ JAVA_1_14_19W09A(1935, 14, 0, "19w09a"),
+ /**
+ * /poi/r.X.Z.mca files introduced with a premature nbt structure. POI files not supported by this library until
+ * {@link #JAVA_1_14_PRE1}. Note this poi format did not include a DataVersion.
+ * Temporary POI Structure
+ *
+ * - poi: # <ListTag<CompoundTag>> - the keys <#> are a number literal indicating the chunk section Y
+ * - poi: #[].free_tickets <IntTag>
+ * - poi: #[].pos <IntArrayTag>
+ * - poi: #[].type <StringTag>
+ *
+ * Villagers got brains ({@code Entities[].Brain}) in the region file data.
+ */
+ JAVA_1_14_19W11A(1937, 14, 0, "19w11a"),
+ JAVA_1_14_19W11B(1938, 14, 0, "19w11b"),
+ JAVA_1_14_19W12A(1940, 14, 0, "19w12a"),
+ JAVA_1_14_19W12B(1941, 14, 0, "19w12b"),
+ JAVA_1_14_19W13A(1942, 14, 0, "19w13a"),
+ JAVA_1_14_19W13B(1943, 14, 0, "19w13b"),
+ JAVA_1_14_19W14A(1944, 14, 0, "19w14a"),
+ JAVA_1_14_19W14B(1945, 14, 0, "19w14b"),
+ /**
+ * POI tag structure changed. Begin this library's support of POI files.
+ * Final POI Structure
+ *
+ * - poi: DataVersion <IntTag>
+ * - poi: Sections <CompoundTag>
+ * - poi: Sections.# <CompoundTag> - the keys <#> are a number literal indicating the chunk section Y
+ * - poi: Sections.#.Records <ListTag<CompoundTag>>
+ * - poi: Sections.#.Records[].free_tickets <IntTag>
+ * - poi: Sections.#.Records[].pos <IntArrayTag>
+ * - poi: Sections.#.Records[].type <StringTag>
+ * - poi: Sections.#.Valid <ByteTag> (boolean)
+ *
+ */
+ JAVA_1_14_PRE1(1947, 14, 0, "PRE1"),
+ JAVA_1_14_PRE2(1948, 14, 0, "PRE2"),
+ /**
+ * Tags Removed
+ *
+ * - region: Level.CarvingMasks.AIR <ByteArrayTag>
+ * - region: Level.CarvingMasks.LIQUID <ByteArrayTag>
+ * - region: Level.LiquidsToBeTicked <ListTag<ListTag<ShortTag>>> - NOTE: JAVA_1_18_21W43A change notes make reference to this tag so IDK
+ * - region: Level.ToBeTicked <ListTag<ListTag<ShortTag>>> - NOTE: JAVA_1_18_21W43A change notes make reference to this tag so IDK
+ *
+ */
+ JAVA_1_14_PRE3(1949, 14, 0, "PRE3"),
+ JAVA_1_14_PRE4(1950, 14, 0, "PRE4"),
+ JAVA_1_14_PRE5(1951, 14, 0, "PRE5"),
+ JAVA_1_14_0(1952, 14, 0),
+ JAVA_1_14_1_PRE1(1955, 14, 1, "PRE1"),
+ JAVA_1_14_1_PRE2(1956, 14, 1, "PRE2"),
+ JAVA_1_14_1(1957, 14, 1),
+ JAVA_1_14_2_PRE1(1958, 14, 2, "PRE1"),
+ JAVA_1_14_2_PRE2(1959, 14, 2, "PRE2"),
+ JAVA_1_14_2_PRE3(1960, 14, 2, "PRE3"),
+ JAVA_1_14_2_PRE4(1962, 14, 2, "PRE4"),
+ JAVA_1_14_2(1963, 14, 2),
+ JAVA_1_14_3_PRE1(1964, 14, 3, "PRE1"),
+ JAVA_1_14_3_PRE2(1965, 14, 3, "PRE2"),
+ JAVA_1_14_3_PRE3(1966, 14, 3, "PRE3"),
+ JAVA_1_14_3_PRE4(1967, 14, 3, "PRE4"),
+ JAVA_1_14_3(1968, 14, 3),
+ JAVA_1_14_4_PRE1(1969, 14, 4, "PRE1"),
+ JAVA_1_14_4_PRE2(1970, 14, 4, "PRE2"),
+ JAVA_1_14_4_PRE3(1971, 14, 4, "PRE3"),
+ JAVA_1_14_4_PRE4(1972, 14, 4, "PRE4"),
+ JAVA_1_14_4_PRE5(1973, 14, 4, "PRE5"),
+ JAVA_1_14_4_PRE6(1974, 14, 4, "PRE6"),
+ JAVA_1_14_4_PRE7(1975, 14, 4, "PRE7"),
+ /** First version where Mojang published jar deobfuscation mappings. */
+ JAVA_1_14_4(1976, 14, 4),
+// JAVA_1_14_3_CT1(2067, 14, 3, "CT1"),
+// JAVA_1_15_CT2(2068, 15, 0, "CT2"),
+// JAVA_1_15_CT3(2069, 15, 0, "CT3"),
+ /** Bees introduced. */
+ JAVA_1_15_19W34A(2200, 15, 0, "19w34a"),
+ JAVA_1_15_19W35A(2201, 15, 0, "19w35a"),
+ /**
+ * 3D Biomes added. Biomes array in the Level tag for each chunk changed
+ * to contain 1024 integers instead of 256 see {@link TerrainChunk}
+ */
+ JAVA_1_15_19W36A(2203, 15, 0, "19w36a"),
+ JAVA_1_15_19W37A(2204, 15, 0, "19w37a"),
+ JAVA_1_15_19W38A(2205, 15, 0, "19w38a"),
+ JAVA_1_15_19W38B(2206, 15, 0, "19w38b"),
+ JAVA_1_15_19W39A(2207, 15, 0, "19w39a"),
+ JAVA_1_15_19W40A(2208, 15, 0, "19w40a"),
+ JAVA_1_15_19W41A(2210, 15, 0, "19w41a"),
+ JAVA_1_15_19W42A(2212, 15, 0, "19w42a"),
+ JAVA_1_15_19W44A(2213, 15, 0, "19w44a"),
+ JAVA_1_15_19W45A(2214, 15, 0, "19w45a"),
+ JAVA_1_15_19W45B(2215, 15, 0, "19w45b"),
+ JAVA_1_15_19W46A(2216, 15, 0, "19w46a"),
+ JAVA_1_15_19W46B(2217, 15, 0, "19w46b"),
+ JAVA_1_15_PRE1(2218, 15, 0, "PRE1"),
+ JAVA_1_15_PRE2(2219, 15, 0, "PRE2"),
+ JAVA_1_15_PRE3(2220, 15, 0, "PRE3"),
+ JAVA_1_15_PRE4(2221, 15, 0, "PRE4"),
+ JAVA_1_15_PRE5(2222, 15, 0, "PRE5"),
+ JAVA_1_15_PRE6(2223, 15, 0, "PRE6"),
+ JAVA_1_15_PRE7(2224, 15, 0, "PRE7"),
+ JAVA_1_15_0(2225, 15, 0),
+ JAVA_1_15_1_PRE1(2226, 15, 1, "PRE1"),
+ JAVA_1_15_1(2227, 15, 1),
+ JAVA_1_15_2_PRE1(2228, 15, 2, "PRE1"),
+ JAVA_1_15_2_PRE2(2229, 15, 2, "PRE2"),
+ JAVA_1_15_2(2230, 15, 2),
+// JAVA_1_16_CT4(2320, 16, 0, "CT4"),
+// JAVA_1_16_CT5(2321, 16, 0, "CT5"),
+ JAVA_1_16_20W06A(2504, 16, 0, "20w06a"),
+ JAVA_1_16_20W07A(2506, 16, 0, "20w07a"),
+ JAVA_1_16_20W08A(2507, 16, 0, "20w08a"),
+ JAVA_1_16_20W09A(2510, 16, 0, "20w09a"),
+ JAVA_1_16_20W10A(2512, 16, 0, "20w10a"),
+ JAVA_1_16_20W11A(2513, 16, 0, "20w11a"),
+ /**
+ * Entity UUID data storage changed.
+ *
+ * Tags Removed
+ *
+ * - region: Level.Entities[].Attributes[].Modifiers[].UUIDLeast <LongTag>
+ * - region: Level.Entities[].Attributes[].Modifiers[].UUIDMost <LongTag>
+ * - region: Level.Entities[].UUIDLeast <LongTag>
+ * - region: Level.Entities[].UUIDMost <LongTag>
+ *
+ * Tags Added
+ *
+ * - region: Level.Entities[].Attributes[].Modifiers[].UUID <IntArrayTag[4]>
+ * - region: Level.Entities[].UUID <IntArrayTag>
+ *
+ */
+ JAVA_1_16_20W12A(2515, 16, 0, "20w12a"),
+ JAVA_1_16_20W13A(2520, 16, 0, "20w13a"),
+ JAVA_1_16_20W13B(2521, 16, 0, "20w13b"),
+ JAVA_1_16_20W14A(2524, 16, 0, "20w14a"),
+ JAVA_1_16_20W15A(2525, 16, 0, "20w15a"),
+ JAVA_1_16_20W16A(2526, 16, 0, "20w16a"),
+ /** Block palette packing changed in this version - see {@link TerrainSection} */
+ JAVA_1_16_20W17A(2529, 16, 0, "20w17a"),
+ JAVA_1_16_20W18A(2532, 16, 0, "20w18a"),
+ JAVA_1_16_20W19A(2534, 16, 0, "20w19a"),
+ /** The server.jar build of this version was DOA with a null pointer exception on initialization. */
+ JAVA_1_16_20W20A(2536, 16, 0, "20w20a"),
+ JAVA_1_16_20W20B(2537, 16, 0, "20w20b"),
+ /**
+ * Structure name format changed from Caps_Snake_Case to lower_snake_case.
+ * Example: Level.Structures.References.Desert_Pyramid became Level.Structures.References.desert_pyramid
+ * Example: Level.Structures.Starts.Desert_Pyramid became Level.Structures.Starts.desert_pyramid
+ *
+ */
+ JAVA_1_16_20W21A(2554, 16, 0, "20w21a"),
+ JAVA_1_16_20W22A(2555, 16, 0, "20w22a"),
+ /**
+ * Tags Removed
+ *
+ * - region: Level.Entities[].Angry <ByteTag>
+ * - region: Level.TileEntities[].Bees[].EntityData.Anger <IntTag>
+ *
+ * Tags Added
+ *
+ * - region: Level.Entities[].AngerTime <IntTag>
+ * - region: Level.TileEntities[].Bees[].EntityData.AngerTime <IntTag>
+ *
+ */
+ JAVA_1_16_PRE1(2556, 16, 0, "PRE1"),
+ JAVA_1_16_PRE2(2557, 16, 0, "PRE2"),
+ JAVA_1_16_PRE3(2559, 16, 0, "PRE3"),
+ JAVA_1_16_PRE4(2560, 16, 0, "PRE4"),
+ /**
+ * FIRST SEEN (may have been added prior to this version)
+ *
+ * - region: Level.Entities[].AngryAt <IntArrayTag>
+ *
+ */
+ JAVA_1_16_PRE5(2561, 16, 0, "PRE5"),
+ JAVA_1_16_PRE6(2562, 16, 0, "PRE6"),
+ JAVA_1_16_PRE7(2563, 16, 0, "PRE7"),
+ JAVA_1_16_PRE8(2564, 16, 0, "PRE8"),
+ JAVA_1_16_RC1(2565, 16, 0, "RC1"),
+ JAVA_1_16_0(2566, 16, 0),
+ JAVA_1_16_1(2567, 16, 1),
+ JAVA_1_16_2_20W27A(2569, 16, 2, "20w27a"),
+ JAVA_1_16_2_20W28A(2570, 16, 2, "20w28a"),
+ JAVA_1_16_2_20W29A(2571, 16, 2, "20w29a"),
+ JAVA_1_16_2_20W30A(2572, 16, 2, "20w30a"),
+ JAVA_1_16_2_PRE1(2573, 16, 2, "PRE1"),
+ JAVA_1_16_2_PRE2(2574, 16, 2, "PRE2"),
+ JAVA_1_16_2_PRE3(2575, 16, 2, "PRE3"),
+ JAVA_1_16_2_RC1(2576, 16, 2, "RC1"),
+ JAVA_1_16_2_RC2(2577, 16, 2, "RC2"),
+ JAVA_1_16_2(2578, 16, 2),
+ JAVA_1_16_3_RC1(2579, 16, 3, "RC1"),
+ JAVA_1_16_3(2580, 16, 3),
+ JAVA_1_16_4_PRE1(2581, 16, 4, "PRE1"),
+ JAVA_1_16_4_PRE2(2582, 16, 4, "PRE2"),
+ JAVA_1_16_4_RC1(2583, 16, 4, "RC1"),
+ JAVA_1_16_4(2584, 16, 4),
+ JAVA_1_16_5_RC1(2585, 16, 5, "RC1"),
+ JAVA_1_16_5(2586, 16, 5),
+ /**
+ * /entities/r.X.Z.mca files introduced.
+ * Entities no longer inside region/r.X.Z.mca - except in un-migrated chunks AND (allegedly) during some phases of
+ * chunk generation.
+ * https://www.minecraft.net/en-us/article/minecraft-snapshot-20w45a
+ */
+ JAVA_1_17_20W45A(2681, 17, 0, "20w45a"),
+ JAVA_1_17_20W46A(2682, 17, 0, "20w46a"),
+ JAVA_1_17_20W48A(2683, 17, 0, "20w48a"),
+ JAVA_1_17_20W49A(2685, 17, 0, "20w49a"),
+ JAVA_1_17_20W51A(2687, 17, 0, "20w51a"),
+ JAVA_1_17_21W03A(2689, 17, 0, "21w03a"),
+ JAVA_1_17_21W05A(2690, 17, 0, "21w05a"),
+ JAVA_1_17_21W05B(2692, 17, 0, "21w05b"),
+ JAVA_1_17_21W06A(2694, 17, 0, "21w06a"),
+ JAVA_1_17_21W07A(2695, 17, 0, "21w07a"),
+ JAVA_1_17_21W08A(2697, 17, 0, "21w08a"),
+ JAVA_1_17_21W08B(2698, 17, 0, "21w08b"),
+ JAVA_1_17_21W10A(2699, 17, 0, "21w10a"),
+// JAVA_1_17_CT6(2701, 17, 0, "CT6"),
+// JAVA_1_17_CT7(2702, 17, 0, "CT7"),
+ JAVA_1_17_21W11A(2703, 17, 0, "21w11a"),
+// JAVA_1_17_CT7B(2703, 17, 0, "CT7b"), -- ambiguous data version
+// JAVA_1_17_CT7C(2704, 17, 0, "CT7c"),
+ JAVA_1_17_21W13A(2705, 17, 0, "21w13a"),
+// JAVA_1_17_CT8(2705, 17, 0, "CT8"), -- ambiguous data version
+ JAVA_1_17_21W14A(2706, 17, 0, "21w14a"),
+// JAVA_1_17_CT8B(2706, 17, 0, "CT8b"), -- ambiguous data version
+// JAVA_1_17_CT8C(2707, 17, 0, "CT8c"),
+ JAVA_1_17_21W15A(2709, 17, 0, "21w15a"),
+ JAVA_1_17_21W16A(2711, 17, 0, "21w16a"),
+ JAVA_1_17_21W17A(2712, 17, 0, "21w17a"),
+ JAVA_1_17_21W18A(2713, 17, 0, "21w18a"),
+ JAVA_1_17_21W19A(2714, 17, 0, "21w19a"),
+ JAVA_1_17_21W20A(2715, 17, 0, "21w20a"),
+ JAVA_1_17_PRE1(2716, 17, 0, "PRE1"),
+ JAVA_1_17_PRE2(2718, 17, 0, "PRE2"),
+ JAVA_1_17_PRE3(2719, 17, 0, "PRE3"),
+ JAVA_1_17_PRE4(2720, 17, 0, "PRE4"),
+ JAVA_1_17_PRE5(2721, 17, 0, "PRE5"),
+ JAVA_1_17_RC1(2722, 17, 0, "RC1"),
+ JAVA_1_17_RC2(2723, 17, 0, "RC2"),
+ JAVA_1_17_0(2724, 17, 0),
+ JAVA_1_17_1_PRE1(2725, 17, 1, "PRE1"),
+ JAVA_1_17_1_PRE2(2726, 17, 1, "PRE2"),
+ JAVA_1_17_1_PRE3(2727, 17, 1, "PRE3"),
+ JAVA_1_17_1_RC1(2728, 17, 1, "RC1"),
+ JAVA_1_17_1_RC2(2729, 17, 1, "RC2"),
+ JAVA_1_17_1(2730, 17, 1),
+// JAVA_1_18_XS1(2825, 18, 0, "XS1"),
+// JAVA_1_18_XS2(2826, 18, 0, "XS2"),
+// JAVA_1_18_XS3(2827, 18, 0, "XS3"),
+// JAVA_1_18_XS4(2828, 18, 0, "XS4"),
+// JAVA_1_18_XS5(2829, 18, 0, "XS5"),
+// JAVA_1_18_XS6(2830, 18, 0, "XS6"),
+// JAVA_1_18_XS7(2831, 18, 0, "XS7"),
+ /**
+ * article 21w39a
+ * (yes, they didn't document these changes until a later weekly snapshot).
+ *
+ * - Level.Sections[].BlockStates & Level.Sections[].Palette have moved to a container structure in Level.Sections[].block_states
+ *
- Level.Biomes are now paletted and live in a similar container structure in Level.Sections[].biomes
+ *
+ * Tags Removed
+ *
+ * - region: Level.Biomes <IntArrayTag>
+ * - region: Level.Sections[].BlockStates <LongArrayTag>
+ * - region: Level.Sections[].Palette <ListTag<CompoundTag>>
+ *
+ * Tags Added
+ *
+ * - region: Level.Sections[].biomes <CompoundTag>
+ * - region: Level.Sections[].biomes.data <LongArrayTag>
+ * - region: Level.Sections[].biomes.palette <ListTag<StringTag>>
+ * - region: Level.Sections[].block_states <CompoundTag>
+ * - region: Level.Sections[].block_states.data <LongArrayTag>
+ * - region: Level.Sections[].block_states.palette <ListTag<CompoundTag>>
+ *
+ * About the New Biome Palette
+ * - Consists of 64 entries, representing 4×4×4 biome regions in the chunk section.
+ * - When `palette` contains a single entry `data` will be omitted and the full chunk section is composed of a single biome.
+ */
+ // 2832 -- exact point of above noted changes
+ JAVA_1_18_21W37A(2834, 18, 0, "21w37a"),
+ JAVA_1_18_21W38A(2835, 18, 0, "21w38a"),
+ JAVA_1_18_21W39A(2836, 18, 0, "21w39a"),
+ JAVA_1_18_21W40A(2838, 18, 0, "21w40a"),
+ JAVA_1_18_21W41A(2839, 18, 0, "21w41a"),
+ JAVA_1_18_21W42A(2840, 18, 0, "21w42a"),
+ /**
+ * https://www.minecraft.net/en-us/article/minecraft-snapshot-21w43a
+ *
+ * - Removed chunk’s Level and moved everything it contained up
+ *
- Chunk’s Level.Entities has moved to entities -- entities are stored in the terrain region file during chunk generation
+ *
It actually appears this tag may have been removed entirely from region mca files until {@link #JAVA_1_18_2_22W03A}
+ *
Note: Hilariously, the name remains capitalized in entities mca files.
+ * - Chunk’s Level.TileEntities has moved to block_entities
+ *
- Chunk’s Level.TileTicks and Level.ToBeTicked have moved to block_ticks
+ *
- Chunk’s Level.LiquidTicks and Level.LiquidsToBeTicked have moved to fluid_ticks
+ *
- Chunk’s Level.Sections has moved to sections
+ *
- Chunk’s Level.Structures has moved to structures
+ *
- Chunk’s Level.Structures.Starts has moved to structures.starts
+ *
- Chunk’s Level.Sections[].BlockStates and Level.Sections[].Palette have moved to a container structure in sections[].block_states
+ *
- Added yPos the minimum section y position in the chunk
+ *
- Added below_zero_retrogen containing data to support below zero generation
+ *
- Added blending_data containing data to support blending new world generation with existing chunks
+ *
+ */
+ JAVA_1_18_21W43A(2844, 18, 0, "21w43a"),
+ JAVA_1_18_21W44A(2845, 18, 0, "21w44a"),
+ JAVA_1_18_PRE1(2847, 18, 0, "PRE1"),
+ JAVA_1_18_PRE2(2848, 18, 0, "PRE2"),
+ JAVA_1_18_PRE3(2849, 18, 0, "PRE3"),
+ JAVA_1_18_PRE4(2850, 18, 0, "PRE4"),
+ JAVA_1_18_PRE5(2851, 18, 0, "PRE5"),
+ JAVA_1_18_PRE6(2853, 18, 0, "PRE6"),
+ JAVA_1_18_PRE7(2854, 18, 0, "PRE7"),
+ JAVA_1_18_PRE8(2855, 18, 0, "PRE8"),
+ JAVA_1_18_RC1(2856, 18, 0, "RC1"),
+ JAVA_1_18_RC2(2857, 18, 0, "RC2"),
+ JAVA_1_18_RC3(2858, 18, 0, "RC3"),
+ JAVA_1_18_RC4(2859, 18, 0, "RC4"),
+ JAVA_1_18_0(2860, 18, 0),
+ JAVA_1_18_1_PRE1(2861, 18, 1, "PRE1"),
+ JAVA_1_18_1_RC1(2862, 18, 1, "RC1"),
+ JAVA_1_18_1_RC2(2863, 18, 1, "RC2"),
+ JAVA_1_18_1_RC3(2864, 18, 1, "RC3"),
+ JAVA_1_18_1(2865, 18, 1),
+ /**
+ * article 21w39a (This change was
+ * noted on an earlier snapshot but didn't make it into the codebase until this one!)
+ *
+ * - Level.CarvingMasks[] is now CompoundTag containing <LongArrayTag>
+ * instead of CompoundTag containing <ByteArrayTag>.
+ *
+ * This version is also the first time the mca scan data shows the `entities` tag being present in region chunks
+ * again (probably during some stage(s) of world generation). I find it unlikely that the scanned mca versions
+ * between {@link #JAVA_1_18_21W43A} and this one just happen to not have any entities in the right state to be
+ * stored in the region mca file - that was 20 * 25 world spawns generated and scanned between these 2 versions!
+ */
+ JAVA_1_18_2_22W03A(2966, 18, 2, "22w03a"),
+ JAVA_1_18_2_22W05A(2967, 18, 2, "22w05a"),
+ JAVA_1_18_2_22W06A(2968, 18, 2, "22w06a"),
+ /**
+ * `structures.References.*` and `structures.starts.*` entry name format changed to include the "minecraft:" prefix.
+ * Ex. old: "buried_treasure", new: "minecraft:buried_treasure"
+ */
+ JAVA_1_18_2_22W07A(2969, 18, 2, "22w07a"),
+ JAVA_1_18_2_PRE1(2971, 18, 2, "PRE1"),
+ JAVA_1_18_2_PRE2(2972, 18, 2, "PRE2"),
+ JAVA_1_18_2_PRE3(2973, 18, 2, "PRE3"),
+ JAVA_1_18_2_RC1(2974, 18, 2, "RC1"),
+ JAVA_1_18_2(2975, 18, 2),
+// JAVA_1_19_XS1(3066, 19, 0, "XS1"),
+ JAVA_1_19_22W11A(3080, 19, 0, "22w11a"),
+ JAVA_1_19_22W12A(3082, 19, 0, "22w12a"),
+ JAVA_1_19_22W13A(3085, 19, 0, "22w13a"),
+ JAVA_1_19_22W14A(3088, 19, 0, "22w14a"),
+ JAVA_1_19_22W15A(3089, 19, 0, "22w15a"),
+ JAVA_1_19_22W16A(3091, 19, 0, "22w16a"),
+ JAVA_1_19_22W16B(3092, 19, 0, "22w16b"),
+ JAVA_1_19_22W17A(3093, 19, 0, "22w17a"),
+ JAVA_1_19_22W18A(3095, 19, 0, "22w18a"),
+ JAVA_1_19_22W19A(3096, 19, 0, "22w19a"),
+ JAVA_1_19_PRE1(3098, 19, 0, "PRE1"),
+ JAVA_1_19_PRE2(3099, 19, 0, "PRE2"),
+ JAVA_1_19_PRE3(3100, 19, 0, "PRE3"),
+ JAVA_1_19_PRE4(3101, 19, 0, "PRE4"),
+ JAVA_1_19_PRE5(3102, 19, 0, "PRE5"),
+ JAVA_1_19_RC1(3103, 19, 0, "RC1"),
+ JAVA_1_19_RC2(3104, 19, 0, "RC2"),
+ JAVA_1_19_0(3105, 19, 0),
+ JAVA_1_19_1_22W24A(3106, 19, 1, "22w24a"),
+ JAVA_1_19_1_PRE1(3107, 19, 1, "PRE1"),
+ JAVA_1_19_1_RC1(3109, 19, 1, "RC1"),
+ JAVA_1_19_1_PRE2(3110, 19, 1, "PRE2"),
+ JAVA_1_19_1_PRE3(3111, 19, 1, "PRE3"),
+ JAVA_1_19_1_PRE4(3112, 19, 1, "PRE4"),
+ JAVA_1_19_1_PRE5(3113, 19, 1, "PRE5"),
+ JAVA_1_19_1_PRE6(3114, 19, 1, "PRE6"),
+ JAVA_1_19_1_RC2(3115, 19, 1, "RC2"),
+ JAVA_1_19_1_RC3(3116, 19, 1, "RC3"),
+ JAVA_1_19_1(3117, 19, 1),
+ JAVA_1_19_2_RC1(3118, 19, 2, "RC1"),
+ JAVA_1_19_2_RC2(3119, 19, 2, "RC2"),
+ JAVA_1_19_2(3120, 19, 2),
+ JAVA_1_19_3_22W42A(3205, 19, 3, "22w42a"),
+ JAVA_1_19_3_22W43A(3206, 19, 3, "22w43a"),
+ /** {@code Entities[].listener.selector} appears for the first time. */
+ JAVA_1_19_3_22W44A(3207, 19, 3, "22w44a"),
+ JAVA_1_19_3_22W45A(3208, 19, 3, "22w45a"),
+ JAVA_1_19_3_22W46A(3210, 19, 3, "22w46a"),
+ JAVA_1_19_3_PRE1(3211, 19, 3, "PRE1"),
+ JAVA_1_19_3_PRE2(3212, 19, 3, "PRE2"),
+ JAVA_1_19_3_PRE3(3213, 19, 3, "PRE3"),
+ JAVA_1_19_3_RC1(3215, 19, 3, "RC1"),
+ JAVA_1_19_3_RC2(3216, 19, 3, "RC2"),
+ JAVA_1_19_3_RC3(3217, 19, 3, "RC3"),
+ JAVA_1_19_3(3218, 19, 3),
+ JAVA_1_19_4_23W03A(3320, 19, 4, "23w03a"),
+ JAVA_1_19_4_23W04A(3321, 19, 4, "23w04a"),
+ JAVA_1_19_4_23W05A(3323, 19, 4, "23w05a"),
+ JAVA_1_19_4_23W06A(3326, 19, 4, "23w06a"),
+ JAVA_1_19_4_23W07A(3329, 19, 4, "23w07a"),
+ JAVA_1_19_4_PRE1(3330, 19, 4, "PRE1"),
+ JAVA_1_19_4_PRE2(3331, 19, 4, "PRE2"),
+ JAVA_1_19_4_PRE3(3332, 19, 4, "PRE3"),
+ JAVA_1_19_4_PRE4(3333, 19, 4, "PRE4"),
+ JAVA_1_19_4_RC1(3334, 19, 4, "RC1"),
+ JAVA_1_19_4_RC2(3335, 19, 4, "RC2"),
+ JAVA_1_19_4_RC3(3336, 19, 4, "RC3"),
+ JAVA_1_19_4(3337, 19, 4),
+ JAVA_1_20_23W12A(3442, 20, 0, "23w12a"),
+ JAVA_1_20_23W13A(3443, 20, 0, "23w13a"),
+ JAVA_1_20_23W14A(3445, 20, 0, "23w14a"),
+ JAVA_1_20_23W16A(3449, 20, 0, "23w16a"),
+ JAVA_1_20_23W17A(3452, 20, 0, "23w17a"),
+ JAVA_1_20_23W18A(3453, 20, 0, "23w18a"),
+ JAVA_1_20_PRE1(3454, 20, 0, "PRE1"),
+ JAVA_1_20_PRE2(3455, 20, 0, "PRE2"),
+ JAVA_1_20_PRE3(3456, 20, 0, "PRE3"),
+ JAVA_1_20_PRE4(3457, 20, 0, "PRE4"),
+ JAVA_1_20_PRE5(3458, 20, 0, "PRE5"),
+ JAVA_1_20_PRE6(3460, 20, 0, "PRE6"),
+ JAVA_1_20_PRE7(3461, 20, 0, "PRE7"),
+ JAVA_1_20_RC1(3462, 20, 0, "RC1"),
+ JAVA_1_20_0(3463, 20, 0),
+ JAVA_1_20_1_RC1(3464, 20, 1, "RC1"),
+ JAVA_1_20_1(3465, 20, 1),
+ JAVA_1_20_2_23W31A(3567, 20, 2, "23w31a"),
+ JAVA_1_20_2_23W32A(3569, 20, 2, "23w32a"),
+ JAVA_1_20_2_23W33A(3570, 20, 2, "23w33a"),
+ JAVA_1_20_2_23W35A(3571, 20, 2, "23w35a"),
+ JAVA_1_20_2_PRE1(3572, 20, 2, "PRE1"),
+ JAVA_1_20_2_PRE2(3573, 20, 2, "PRE2"),
+ JAVA_1_20_2_PRE3(3574, 20, 2, "PRE3"),
+ JAVA_1_20_2_PRE4(3575, 20, 2, "PRE4"),
+ JAVA_1_20_2_RC1(3576, 20, 2, "RC1"),
+ JAVA_1_20_2_RC2(3577, 20, 2, "RC2"),
+ JAVA_1_20_2(3578, 20, 2),
+ JAVA_1_20_3_23W40A(3679, 20, 3, "23w40a"),
+ JAVA_1_20_3_23W41A(3681, 20, 3, "23w41a"),
+ JAVA_1_20_3_23W42A(3684, 20, 3, "23w42a"),
+ JAVA_1_20_3_23W43A(3686, 20, 3, "23w43a"),
+ JAVA_1_20_3_23W43B(3687, 20, 3, "23w43b"),
+ JAVA_1_20_3_23W44A(3688, 20, 3, "23w44a"),
+ JAVA_1_20_3_23W45A(3690, 20, 3, "23w45a"),
+ JAVA_1_20_3_23W46A(3691, 20, 3, "23w46a"),
+ JAVA_1_20_3_PRE1(3693, 20, 3, "PRE1"),
+ JAVA_1_20_3_PRE2(3694, 20, 3, "PRE2"),
+ JAVA_1_20_3_PRE3(3695, 20, 3, "PRE3"),
+ JAVA_1_20_3_PRE4(3696, 20, 3, "PRE4"),
+ JAVA_1_20_3_RC1(3697, 20, 3, "RC1"),
+ JAVA_1_20_3(3698, 20, 3),
+ JAVA_1_20_4_RC1(3699, 20, 4, "RC1"),
+ JAVA_1_20_4(3700, 20, 4),
+ JAVA_1_20_5_23W51A(3801, 20, 5, "23w51a"),
+ JAVA_1_20_5_23W51B(3802, 20, 5, "23w51b"),
+ JAVA_1_20_5_24W03A(3804, 20, 5, "24w03a"),
+ JAVA_1_20_5_24W03B(3805, 20, 5, "24w03b"),
+ JAVA_1_20_5_24W04A(3806, 20, 5, "24w04a"),
+ JAVA_1_20_5_24W05A(3809, 20, 5, "24w05a"),
+ JAVA_1_20_5_24W05B(3811, 20, 5, "24w05b"),
+ JAVA_1_20_5_24W06A(3815, 20, 5, "24w06a"),
+ JAVA_1_20_5_24W07A(3817, 20, 5, "24w07a"),
+ JAVA_1_20_5_24W09A(3819, 20, 5, "24w09a"),
+ JAVA_1_20_5_24W10A(3821, 20, 5, "24w10a"),
+ JAVA_1_20_5_24W11A(3823, 20, 5, "24w11a"),
+ JAVA_1_20_5_24W12A(3824, 20, 5, "24w12a"),
+ JAVA_1_20_5_24W13A(3826, 20, 5, "24w13a"),
+ JAVA_1_20_5_24W14A(3827, 20, 5, "24w14a"),
+ JAVA_1_20_5_PRE1(3829, 20, 5, "PRE1"),
+ JAVA_1_20_5_PRE2(3830, 20, 5, "PRE2"),
+ JAVA_1_20_5_PRE3(3831, 20, 5, "PRE3"),
+ JAVA_1_20_5_PRE4(3832, 20, 5, "PRE4"),
+ JAVA_1_20_5_RC1(3834, 20, 5, "RC1"),
+ JAVA_1_20_5_RC2(3835, 20, 5, "RC2"),
+ JAVA_1_20_5_RC3(3836, 20, 5, "RC3"),
+ JAVA_1_20_5(3837, 20, 5),
+ JAVA_1_20_6_RC1(3838, 20, 6, "RC1"),
+ JAVA_1_20_6(3839, 20, 6),
+ JAVA_1_21_24W18A(3940, 21, 0, "24w18a"),
+ JAVA_1_21_24W19A(3941, 21, 0, "24w19a"),
+ JAVA_1_21_24W19B(3942, 21, 0, "24w19b"),
+ JAVA_1_21_24W20A(3944, 21, 0, "24w20a"),
+ JAVA_1_21_24W21A(3946, 21, 0, "24w21a"),
+ JAVA_1_21_24W21B(3947, 21, 0, "24w21b"),
+ JAVA_1_21_PRE1(3948, 21, 0, "PRE1"),
+ JAVA_1_21_PRE2(3949, 21, 0, "PRE2"),
+ JAVA_1_21_PRE3(3950, 21, 0, "PRE3"),
+ JAVA_1_21_PRE4(3951, 21, 0, "PRE4"),
+ JAVA_1_21_RC1(3952, 21, 0, "RC1"),
+ JAVA_1_21_0(3953, 21, 0),
+ JAVA_1_21_1_RC1(3954, 21, 1, "RC1"),
+ JAVA_1_21_1(3955, 21, 1),
+ JAVA_1_21_2_24W33A(4058, 21, 2, "24w33a"),
+ JAVA_1_21_2_24W34A(4060, 21, 2, "24w34a"),
+ JAVA_1_21_2_24W35A(4062, 21, 2, "24w35a"),
+ JAVA_1_21_2_24W36A(4063, 21, 2, "24w36a"),
+ JAVA_1_21_2_24W37A(4065, 21, 2, "24w37a"),
+ JAVA_1_21_2_24W38A(4066, 21, 2, "24w38a"),
+ JAVA_1_21_2_24W39A(4069, 21, 2, "24w39a"),
+ JAVA_1_21_2_24W40A(4072, 21, 2, "24w40a"),
+ JAVA_1_21_2_PRE1(4073, 21, 2, "PRE1"),
+ JAVA_1_21_2_PRE2(4074, 21, 2, "PRE2"),
+ JAVA_1_21_2_PRE3(4075, 21, 2, "PRE3"),
+ JAVA_1_21_2_PRE4(4076, 21, 2, "PRE4"),
+ JAVA_1_21_2_PRE5(4077, 21, 2, "PRE5"),
+ JAVA_1_21_2_RC1(4078, 21, 2, "RC1"),
+ JAVA_1_21_2_RC2(4079, 21, 2, "RC2"),
+ JAVA_1_21_2(4080, 21, 2),
+ JAVA_1_21_3(4082, 21, 3),
+ JAVA_1_21_4_24W44A(4174, 21, 4, "24w44a"),
+ JAVA_1_21_4_24W45A(4177, 21, 4, "24w45a"),
+ JAVA_1_21_4_24W46A(4178, 21, 4, "24w46a"),
+ JAVA_1_21_4_PRE1(4179, 21, 4, "PRE1"),
+ JAVA_1_21_4_PRE2(4182, 21, 4, "PRE2"),
+ JAVA_1_21_4_PRE3(4183, 21, 4, "PRE3"),
+ JAVA_1_21_4_RC1(4184, 21, 4, "RC1"),
+ JAVA_1_21_4_RC2(4186, 21, 4, "RC2"),
+ JAVA_1_21_4_RC3(4188, 21, 4, "RC3"),
+ JAVA_1_21_4(4189, 21, 4),
+ JAVA_1_21_5_25W02A(4298, 21, 5, "25w02a"),
+ JAVA_1_21_5_25W03A(4304, 21, 5, "25w03a"),
+ JAVA_1_21_5_25W04A(4308, 21, 5, "25w04a"),
+ JAVA_1_21_5_25W05A(4310, 21, 5, "25w05a"),
+ JAVA_1_21_5_25W06A(4313, 21, 5, "25w06a"),
+ JAVA_1_21_5_25W07A(4315, 21, 5, "25w07a"),
+ JAVA_1_21_5_25W08A(4316, 21, 5, "25w08a"),
+ JAVA_1_21_5_25W09A(4317, 21, 5, "25w09a"),
+ JAVA_1_21_5_25W09B(4318, 21, 5, "25w09b"),
+ JAVA_1_21_5_25W10A(4319, 21, 5, "25w10a"),
+ JAVA_1_21_5_PRE1(4320, 21, 5, "PRE1"),
+ JAVA_1_21_5_PRE2(4321, 21, 5, "PRE2"),
+ JAVA_1_21_5_PRE3(4322, 21, 5, "PRE3"),
+ JAVA_1_21_5_RC1(4323, 21, 5, "RC1"),
+ JAVA_1_21_5_RC2(4324, 21, 5, "RC2"),
+ JAVA_1_21_5(4325, 21, 5),
+ JAVA_1_21_6_25W15A(4422, 21, 6, "25w15a"),
+ JAVA_1_21_6_25W16A(4423, 21, 6, "25w16a"),
+ JAVA_1_21_6_25W17A(4425, 21, 6, "25w17a"),
+ JAVA_1_21_6_25W18A(4426, 21, 6, "25w18a"),
+ JAVA_1_21_6_25W19A(4427, 21, 6, "25w19a"),
+ JAVA_1_21_6_25W20A(4428, 21, 6, "25w20a"),
+ JAVA_1_21_6_25W21A(4429, 21, 6, "25w21a"),
+ JAVA_1_21_6_PRE1(4430, 21, 6, "PRE1"),
+ JAVA_1_21_6_PRE2(4431, 21, 6, "PRE2"),
+ JAVA_1_21_6_PRE3(4432, 21, 6, "PRE3"),
+ JAVA_1_21_6_PRE4(4433, 21, 6, "PRE4"),
+ JAVA_1_21_6_RC1(4434, 21, 6, "RC1"),
+ JAVA_1_21_6(4435, 21, 6),
+ JAVA_1_21_7_RC1(4436, 21, 7, "RC1"),
+ JAVA_1_21_7_RC2(4437, 21, 7, "RC2"),
+ JAVA_1_21_7(4438, 21, 7),
+ JAVA_1_21_8_RC1(4439, 21, 8, "RC1"),
+ JAVA_1_21_8(4440, 21, 8),
+ JAVA_1_21_9_25W31A(4534, 21, 9, "25w31a"),
+ JAVA_1_21_9_25W32A(4536, 21, 9, "25w32a"),
+ JAVA_1_21_9_25W33A(4538, 21, 9, "25w33a"),
+ JAVA_1_21_9_25W34A(4539, 21, 9, "25w34a"),
+ JAVA_1_21_9_25W34B(4540, 21, 9, "25w34b"),
+ JAVA_1_21_9_25W35A(4542, 21, 9, "25w35a"),
+ JAVA_1_21_9_25W36A(4545, 21, 9, "25w36a"),
+ JAVA_1_21_9_25W36B(4546, 21, 9, "25w36b"),
+ JAVA_1_21_9_25W37A(4547, 21, 9, "25w37a"),
+ JAVA_1_21_9_PRE1(4549, 21, 9, "PRE1"),
+ JAVA_1_21_9_PRE2(4550, 21, 9, "PRE2"),
+ JAVA_1_21_9_PRE3(4551, 21, 9, "PRE3"),
+ JAVA_1_21_9_PRE4(4552, 21, 9, "PRE4"),
+ JAVA_1_21_9_RC1(4553, 21, 9, "RC1"),
+ JAVA_1_21_9(4554, 21, 9),
+ JAVA_1_21_10_RC1(4555, 21, 10, "RC1"),
+ JAVA_1_21_10(4556, 21, 10),
+ JAVA_1_21_11_25W41A(4653, 21, 11, "25w41a"),
+ JAVA_1_21_11_25W42A(4654, 21, 11, "25w42a"),
+ JAVA_1_21_11_25W43A(4655, 21, 11, "25w43a"),
+ JAVA_1_21_11_25W44A(4659, 21, 11, "25w44a"),
+ JAVA_1_21_11_25W45A(4660, 21, 11, "25w45a"),
+ JAVA_1_21_11_25W46A(4662, 21, 11, "25w46a"),
+ JAVA_1_21_11_PRE1(4663, 21, 11, "PRE1"),
+ JAVA_1_21_11_PRE2(4664, 21, 11, "PRE2"),
+ JAVA_1_21_11_PRE3(4665, 21, 11, "PRE3"),
+ JAVA_1_21_11_PRE4(4666, 21, 11, "PRE4"),
+ JAVA_1_21_11_PRE5(4667, 21, 11, "PRE5"),
+ JAVA_1_21_11_RC1(4668, 21, 11, "RC1"),
+ JAVA_1_21_11_RC2(4669, 21, 11, "RC2"),
+ JAVA_1_21_11_RC3(4670, 21, 11, "RC3"),
+ JAVA_1_21_11(4671, 21, 11),;
+
+ private static final int[] ids;
+ private static final DataVersion latestFullReleaseVersion;
+ private final int id;
+ private final int minor;
+ private final int patch;
+ private final boolean isFullRelease;
+ private final boolean isWeeklyRelease;
+ private final String buildDescription;
+ private final String str;
+ private final String simpleStr;
+
+ static {
+ // enum is maintained in order with a unit test to enforce the convention - so no need to sort
+ ids = Arrays.stream(values()).mapToInt(DataVersion::id).toArray();
+ latestFullReleaseVersion = Arrays.stream(values())
+ .sorted(Comparator.reverseOrder())
+ .filter(DataVersion::isFullRelease)
+ .findFirst().get();
+ }
+
+ DataVersion(int id, int minor, int patch) {
+ this(id, minor, patch, null);
+ }
+
+ /**
+ * @param id data version
+ * @param minor minor version
+ * @param patch patch number, LT0 to indicate this data version is not a full release version
+ * @param buildDescription Suggested convention (unit test enforced):
+ * - NULL (given value ignored) for full release
+ * - CT# for combat tests (e.g. CT6, CT6b)
+ * - XS# for experimental snapshots(e.g. XS1, XS2)
+ * - YYwWWz for weekly builds (e.g. 21w37a, 21w37b)
+ * - PRE# for pre-releases (e.g. PRE1, PRE2)
+ * - RC# for release candidates (e.g. RC1, RC2)
+ */
+ DataVersion(int id, int minor, int patch, String buildDescription) {
+ this.isFullRelease = buildDescription == null || "FINAL".equalsIgnoreCase(buildDescription);
+ if (!isFullRelease && buildDescription.isEmpty())
+ throw new IllegalArgumentException("buildDescription required for non-full releases");
+ this.isWeeklyRelease = buildDescription != null && buildDescription.length() >= 5 && buildDescription.charAt(2) == 'w';
+ this.id = id;
+ this.minor = minor;
+ this.patch = patch;
+ this.buildDescription = isFullRelease ? "FINAL" : buildDescription;
+ if (minor > 0) {
+ StringBuilder sb = new StringBuilder();
+ sb.append(id).append(" (1.").append(minor);
+ if (patch > 0) sb.append('.').append(patch);
+ if (!isFullRelease) sb.append(' ').append(buildDescription);
+ this.str = sb.append(')').toString();
+ } else {
+ this.str = name();
+ }
+
+ StringBuilder simpleStrBuilder = new StringBuilder();
+ if (isWeeklyRelease) {
+ simpleStrBuilder.append(buildDescription);
+ } else {
+ simpleStrBuilder.append("1.").append(minor);
+ if (patch != 0) {
+ simpleStrBuilder.append('.').append(patch);
+ }
+ if (buildDescription != null) {
+ simpleStrBuilder.append('-').append(buildDescription.toLowerCase(Locale.ENGLISH));
+ }
+ }
+ simpleStr = simpleStrBuilder.toString();
+ }
+
+ public int id() {
+ return id;
+ }
+
+ /**
+ * Version format: major.minor.patch
+ */
+ public int major() {
+ return 1;
+ }
+
+ /**
+ * Version format: major.minor.patch
+ */
+ public int minor() {
+ return minor;
+ }
+
+ /**
+ * Version format: major.minor.patch
+ */
+ public int patch() {
+ return patch;
+ }
+
+ /**
+ * True for full release.
+ * False for all other builds (e.g. experimental, pre-releases, and release-candidates).
+ */
+ public boolean isFullRelease() {
+ return isFullRelease;
+ }
+
+ public boolean isWeeklyRelease() {
+ return isWeeklyRelease;
+ }
+
+ /**
+ * Description of the minecraft build which this {@link DataVersion} refers to.
+ * You'll find {@link #toString()} to be more useful in general.
+ * Convention used:
+ * - "FULL" for full release
+ * - YYwWWz for weekly builds (e.g. 21w37a, 21w37b)
+ * - CT# for combat tests (e.g. CT6, CT6b)
+ * - XS# for experimental snapshots(e.g. XS1, XS2)
+ * - PR# for pre-releases (e.g. PR1, PR2)
+ * - RC# for release candidates (e.g. RC1, RC2)
+ */
+ public String getBuildDescription() {
+ return buildDescription;
+ }
+
+ /**
+ * TRUE as of JAVA_1_14_PRE1
+ * Indicates if point of interest .mca files exist. E.g. 'poi/r.0.0.mca'
+ * Technically poi files were introduced with {@link #JAVA_1_14_19W11A} but the nbt structure was quickly
+ * changed and this 3 week span of weekly versions isn't worth the hassle of supporting.
+ * @since {@link #JAVA_1_14_PRE1}
+ */
+ public boolean hasPoiMca() {
+ return this.id >= JAVA_1_14_PRE1.id;
+ }
+
+ /**
+ * TRUE as of 1.17
+ * Entities were pulled out of terrain 'region/r.X.Z.mca' files into their own .mca files. E.g. 'entities/r.0.0.mca'
+ */
+ public boolean hasEntitiesMca() {
+ return this.id >= JAVA_1_17_20W45A.id;
+ }
+
+ public static DataVersion bestFor(int dataVersion) {
+ int found = Arrays.binarySearch(ids, dataVersion);
+ if (found < 0) {
+ found = (found + 2) * -1;
+ if (found < 0) return UNKNOWN;
+ }
+ return values()[found];
+ }
+
+ /**
+ * @param simpleVersionStr such as "1.12", "21w13a", "1.19.1-pre3"
+ * @return exact match or null
+ */
+ public static DataVersion find(String simpleVersionStr) {
+ final String seeking = simpleVersionStr.toLowerCase(Locale.ENGLISH);
+ return Arrays.stream(values()).filter(v -> v.simpleStr.equals(seeking)).findFirst().orElse(null);
+ }
+
+ /**
+ * @return The previous known data version or null if there is none.
+ */
+ public DataVersion previous() {
+ if (this.ordinal() > 0)
+ return values()[this.ordinal() - 1];
+ else
+ return null;
+ }
+
+ /**
+ * @return The next known data version or null if there is none.
+ */
+ public DataVersion next() {
+ if (this.ordinal() < ids.length - 1)
+ return values()[this.ordinal() + 1];
+ else
+ return null;
+ }
+
+ /**
+ * @return The latest full release (non-weekly, non pre-release, etc) version defined.
+ */
+ public static DataVersion latest() {
+ return latestFullReleaseVersion;
+ }
+
+ @Override
+ public String toString() {
+ return str;
+ }
+
+ public String toSimpleString() {
+ return simpleStr;
+ }
+
+ /**
+ * Indicates if this version would be crossed by the transition between versionA and versionB.
+ * This is useful for determining if a data upgrade or downgrade would be required to support
+ * changing from versionA to versionB. The order of A and B don't matter.
+ *
+ * When using this function, call it on the data version in which a change exists. For
+ * example if you need to know if changing from A to B would require changing to/from 3D
+ * biomes then use {@code JAVA_1_15_19W36A.isCrossedByTransition(A, B)} as
+ * {@link #JAVA_1_15_19W36A} is the version which added 3D biomes.
+ *
+ * In short, if this function returns true then the act of changing data versions from A
+ * to B can be said to "cross" this version which is an indication that such a change should
+ * either be considered illegal or that upgrade/downgrade action is required.
+ *
+ * @param versionA older or newer data version than B
+ * @param versionB older or newer data version than A
+ * @return true if chaining from version A to version B, or form B to A, would result in
+ * crossing this version. This version is considered to be crossed if {@code A != B} and
+ * {@code min(A, B) < this.id <= max(A, B)}
+ * @see #throwUnsupportedVersionChangeIfCrossed(int, int)
+ */
+ public boolean isCrossedByTransition(int versionA, int versionB) {
+ if (versionA == versionB) return false;
+ if (versionA < versionB) {
+ return versionA < id && id <= versionB;
+ } else {
+ return versionB < id && id <= versionA;
+ }
+ }
+
+ /**
+ * Throws {@link UnsupportedVersionChangeException} if {@link #isCrossedByTransition(int, int)}
+ * were to return true for the given arguments.
+ */
+ public void throwUnsupportedVersionChangeIfCrossed(int versionA, int versionB) {
+ if (isCrossedByTransition(versionA, versionB)) {
+ throw new UnsupportedVersionChangeException(this, versionA, versionB);
+ }
+ }
+}
diff --git a/src/main/java/io/github/ensgijs/nbt/mca/EntitiesChunk.java b/src/main/java/io/github/ensgijs/nbt/mca/EntitiesChunk.java
new file mode 100644
index 00000000..7c83a6ee
--- /dev/null
+++ b/src/main/java/io/github/ensgijs/nbt/mca/EntitiesChunk.java
@@ -0,0 +1,32 @@
+package io.github.ensgijs.nbt.mca;
+
+import io.github.ensgijs.nbt.mca.io.McaFileHelpers;
+import io.github.ensgijs.nbt.tag.CompoundTag;
+import io.github.ensgijs.nbt.mca.entities.Entity;
+import io.github.ensgijs.nbt.mca.entities.EntityFactory;
+
+/**
+ * Thin default implementation of {@link EntitiesChunkBase}.
+ *
+ * @see EntitiesChunkBase
+ * @see EntityFactory
+ * @see McaFileHelpers#MCA_CREATORS
+ */
+public class EntitiesChunk extends EntitiesChunkBase {
+
+ protected EntitiesChunk(int dataVersion) {
+ super(dataVersion);
+ }
+
+ public EntitiesChunk(CompoundTag data) {
+ super(data);
+ }
+
+ public EntitiesChunk(CompoundTag data, long loadFlags) {
+ super(data, loadFlags);
+ }
+
+ public EntitiesChunk() {
+ super(DataVersion.latest().id());
+ }
+}
diff --git a/src/main/java/io/github/ensgijs/nbt/mca/EntitiesChunkBase.java b/src/main/java/io/github/ensgijs/nbt/mca/EntitiesChunkBase.java
new file mode 100644
index 00000000..9c518818
--- /dev/null
+++ b/src/main/java/io/github/ensgijs/nbt/mca/EntitiesChunkBase.java
@@ -0,0 +1,412 @@
+package io.github.ensgijs.nbt.mca;
+
+import io.github.ensgijs.nbt.io.NamedTag;
+import io.github.ensgijs.nbt.mca.io.LoadFlags;
+import io.github.ensgijs.nbt.mca.io.McaFileHelpers;
+import io.github.ensgijs.nbt.mca.io.MoveChunkFlags;
+import io.github.ensgijs.nbt.query.NbtPath;
+import io.github.ensgijs.nbt.tag.CompoundTag;
+import io.github.ensgijs.nbt.tag.DoubleTag;
+import io.github.ensgijs.nbt.tag.IntArrayTag;
+import io.github.ensgijs.nbt.tag.ListTag;
+import io.github.ensgijs.nbt.mca.entities.Entity;
+import io.github.ensgijs.nbt.mca.entities.EntityFactory;
+import io.github.ensgijs.nbt.mca.entities.EntityUtil;
+import io.github.ensgijs.nbt.mca.util.RegionBoundingRectangle;
+import io.github.ensgijs.nbt.util.ArgValidator;
+import io.github.ensgijs.nbt.mca.util.ChunkBoundingRectangle;
+import io.github.ensgijs.nbt.mca.util.VersionAware;
+
+import java.util.*;
+import java.util.function.Consumer;
+import java.util.stream.Stream;
+
+/**
+ * Provides all the basic functionality necessary for this type of chunk with abstraction hooks
+ * making it easy to extend this class and modify the factory behavior of {@link McaFileHelpers} to create
+ * instances of your custom class.
+ *
+ * @see EntitiesChunk
+ * @see EntityFactory
+ * @see McaFileHelpers#MCA_CREATORS
+ */
+public abstract class EntitiesChunkBase extends ChunkBase implements Iterable {
+ // Private to keep child classes clean (and well behaved) - child classes should access this via getEntities()
+ // Not populated until getEntities() is called.
+ private List entities;
+ // Not populated if loaded in RAW mode or if load flags did not include ENTITIES
+ protected ListTag entitiesTag;
+
+ protected static final VersionAware POSITION_PATH = new VersionAware()
+ .register(DataVersion.JAVA_1_17_20W45A.id(), NbtPath.of("Position"));
+
+ protected static final VersionAware ENTITIES_PATH = new VersionAware()
+ .register(DataVersion.JAVA_1_17_20W45A.id(), NbtPath.of("Entities"));
+
+ /** relative to ENTITIES_PATH[] */
+ protected static final VersionAware ENTITIES_BRAIN_MEMORIES_PATH = new VersionAware()
+ .register(0, NbtPath.of("Brain.memories"));
+
+ protected EntitiesChunkBase(int dataVersion) {
+ super(dataVersion);
+ }
+
+ public EntitiesChunkBase(CompoundTag data) {
+ super(data);
+ }
+
+ public EntitiesChunkBase(CompoundTag data, long loadFlags) {
+ super(data, loadFlags);
+ }
+
+ @Override
+ protected void initReferences(long loadFlags) {
+ // remember: this isn't called when loaded in RAW mode, see base class
+ if (dataVersion < DataVersion.JAVA_1_17_20W45A.id()) {
+ throw new UnsupportedOperationException(
+ "This class can only be used to read entities mca files introduced in JAVA_1_17_20W45A");
+ }
+
+ int[] posXZ = getTagValue(POSITION_PATH, IntArrayTag::getValue);
+ if (posXZ == null || posXZ.length != 2) {
+ throw new IllegalArgumentException("POSITION tag missing or invalid");
+ }
+ chunkX = posXZ[0];
+ chunkZ = posXZ[1];
+
+ if ((loadFlags & LoadFlags.ENTITIES) > 0) {
+ entitiesTag = getTag(ENTITIES_PATH);
+ ArgValidator.check(entitiesTag != null, "ENTITIES tag not found");
+ // Don't call initEntities() here, let getEntities do this lazily to keep things lean
+ }
+ }
+
+ /**
+ * Called to initialize entity wrappers - implementers should respect the {@code raw} setting and DO NOTHING
+ * if called when raw is set.
+ */
+ protected void initEntities() {
+ if (raw) return;
+ if (entitiesTag == null) {
+ // This branch should not be reachable in any typical usage scenario. The only way
+ // this state should happen is if there is a bug in the implementers of this class.
+ throw new IllegalStateException("Entities nbt tag was not loaded for this chunk");
+ }
+ entities = EntityFactory.fromListTag(entitiesTag, dataVersion);
+ }
+
+ /** {@inheritDoc} */
+ public String getMcaType() {
+ return "entities";
+ }
+
+ /**
+ * Gets the list of entity object instances representing all entities in this chunk.
+ * This list is lazy-instantiated to avoid the memory and compute costs of populating it if it's unused.
+ * Translation, calling this for the first time will be slower than making successive calls.
+ * If performance is everything for you, but you would still like to work with higher level objects
+ * than nbt tags, you can use {@link #getEntitiesTag()}, find an entity record you want to manipulate
+ * and use {@link EntityFactory#create(CompoundTag, int)} to get an entity instance then call
+ * {@link Entity#updateHandle()} to apply your changes all the way back to the entities tag held
+ * for this chunk.
+ */
+ public List getEntities() {
+ checkRaw();
+ if (entities == null) initEntities();
+ return entities;
+ }
+
+ /**
+ * Gets an indication of if the result of {@link #getEntities()} has been computed, or if calling it
+ * will trigger lazy instantiation.
+ *
+ * @return true if the result of {@link #getEntities()} is already computed; false if calling {@link #getEntities()}
+ * will trigger creation of wrapper objects.
+ */
+ public boolean areWrappedEntitiesGenerated() {
+ return entities != null;
+ }
+
+ /**
+ * Sets the entities in this chunk. You should probably follow this call with a call to
+ * {@link #fixEntityLocations(long)} unless you are sure all of the given entities are already
+ * within the chunks bounds.
+ * Does not trigger a handle update. The result of calling {@link #getEntitiesTag()}
+ * will not change until {@link #updateHandle()} has been called.
+ * @param entities Entities to set, not null, may be empty.
+ * @throws UnsupportedOperationException if loaded in raw mode
+ * @see #clearEntities()
+ */
+ public void setEntities(List entities) {
+ checkRaw();
+ ArgValidator.requireValue(entities);
+ this.entities = entities;
+ }
+
+ /**
+ * Gets the entities nbt tag by reference.
+ * Result may be null if chunk was loaded with a LoadFlags that excluded Entities.
+ * If you have called {@link #setEntities(List)} you will need to call {@link #updateHandle()} for the
+ * result of this method to be updated.
+ * @throws UnsupportedOperationException if loaded in raw mode
+ */
+ public ListTag getEntitiesTag() {
+ checkRaw();
+ return entitiesTag;
+ }
+
+ /**
+ * Sets the entities tag and causes the next call to {@link #getEntities()} to recreate wrapped entities.
+ * The given tag is also set as the entities tag in the underlying CompoundTag handle in the version appropriate
+ * location. I.e. calling this method or modifying the tag passed after calling this method will affect the
+ * value returned by {@link #getHandle()}.
+ * Raw mode behavior: supported!
+ * Sets the given tag in the held nbt data handle in its version correct place. Does not make calling
+ * {@link #getEntitiesTag()} or {@link #getEntities()} legal for chunks loaded in raw mode.
+ *
+ *
+ * @param entitiesTag Not null. If you want to clear the entities tag use {@link #clearEntities()} instead or if
+ * operating in raw mode you can pass a new empty tag to take advantage of this classes
+ * version awareness to place the tag in the correct location within the nbt data tag
+ * as returned by {@link #getHandle()} and {@link #updateHandle()}
+ */
+ public void setEntitiesTag(ListTag entitiesTag) {
+ checkPartial();
+ ArgValidator.requireValue(entitiesTag);
+ setEntitiesTagInternal(entitiesTag);
+ }
+
+ protected void setEntitiesTagInternal(ListTag entitiesTag) {
+ if (data != null) { // only sync the data tag if we have it - data will be null if chunk was partially loaded
+ setTag(ENTITIES_PATH, entitiesTag);
+ }
+ if (!raw) {
+ this.entitiesTag = entitiesTag;
+ }
+ // respect lazy loading and cause the next call to getEntities() to rebuild the wrapped entities
+ entities = null;
+ }
+
+ /**
+ * Clears the entities known to this chunk. If you have previously retrieved the list of entities from
+ * {@link #getEntities()} that list is unaffected by this call.
+ * Likewise a new entities tag is also created and any result previously returned from {@link #getEntitiesTag()}
+ * is also unaffected by this call.
+ * @throws UnsupportedOperationException if loaded in raw mode
+ */
+ public void clearEntities() {
+ checkRaw();
+ setEntitiesTagInternal(new ListTag<>(CompoundTag.class));
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public boolean moveChunkImplemented() {
+ return entities != null || entitiesTag != null || ENTITIES_PATH.get(dataVersion).exists(data);
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public boolean moveChunkHasFullVersionSupport() {
+ return moveChunkImplemented();
+ }
+
+ /**
+ * Sets this chunks absolute XZ and calls {@link #fixEntityLocations(long)} returning its result.
+ * Moving while in RAW mode is supported.
+ * @param newChunkX new absolute chunk-x
+ * @param newChunkZ new absolute chunk-z
+ * @param moveChunkFlags {@link MoveChunkFlags} OR'd together to control move chunk behavior.
+ * @param force unused
+ * @return true if any data was changed as a result of this call
+ * @throws UnsupportedOperationException if loaded in raw mode
+ */
+ @Override
+ public boolean moveChunk(int newChunkX, int newChunkZ, long moveChunkFlags, boolean force) {
+ if (!moveChunkImplemented())
+ throw new UnsupportedOperationException("Missing the data required to move this chunk!");
+ if (!RegionBoundingRectangle.MAX_WORLD_BORDER_BOUNDS.containsChunk(newChunkX, newChunkZ)) {
+ throw new IllegalArgumentException("Chunk XZ must be within the maximum world bounds.");
+ }
+ if (this.chunkX == newChunkX && this.chunkZ == newChunkZ) return false;
+ this.chunkX = newChunkX;
+ this.chunkZ = newChunkZ;
+ if (raw) {
+ setTag(POSITION_PATH, new IntArrayTag(newChunkX, newChunkZ));
+ }
+ if (fixEntityLocations(moveChunkFlags)) {
+ if ((moveChunkFlags & MoveChunkFlags.AUTOMATICALLY_UPDATE_HANDLE) > 0) {
+ updateHandle();
+ }
+ }
+ return true;
+ }
+
+ /**
+ * Scans all entities and moves any which are outside this chunks bounds into it preserving their
+ * relative location from their source chunk.
+ * Fixing entity locations while in RAW mode is supported.
+ * @return true if any entity locations were changed; false if no changes were made.
+ * @throws UnsupportedOperationException if loaded in raw mode
+ */
+ public boolean fixEntityLocations(long moveChunkFlags) {
+ if (!moveChunkImplemented())
+ throw new UnsupportedOperationException("Missing the data required to move this chunk!");
+ if (this.chunkX == NO_CHUNK_COORD_SENTINEL || this.chunkZ == NO_CHUNK_COORD_SENTINEL) {
+ throw new IllegalStateException("Chunk XZ not known");
+ }
+ boolean changed = false;
+ if (entities != null) {
+ final NbtPath brainMemoriesPath = ENTITIES_BRAIN_MEMORIES_PATH.get(dataVersion);
+ final NbtPath memoryPosPath = NbtPath.of("value.pos");
+ final ChunkBoundingRectangle cbr = new ChunkBoundingRectangle(chunkX, chunkZ);
+ for (ET entity : entities) {
+ if (!cbr.containsBlock(entity.getX(), entity.getZ())) {
+ entity.setX(cbr.relocateX(entity.getX()));
+ entity.setZ(cbr.relocateZ(entity.getZ()));
+ if ((moveChunkFlags & MoveChunkFlags.RANDOMIZE_ENTITY_UUID) > 0) {
+ entity.setUuid(UUID.randomUUID());
+ }
+ changed = true;
+ }
+ if (brainMemoriesPath.exists(entity.getHandle())) {
+ CompoundTag memoriesTag = brainMemoriesPath.getTag(entity.getHandle());
+ for (NamedTag entry : memoriesTag) {
+ int[] pos = memoryPosPath.getIntArray(entry.getTag());
+ if (pos != null && !cbr.containsBlock(pos[0], pos[2])) {
+ // TODO: dimension is also in this data
+ pos[0] = cbr.relocateX(pos[0]);
+ pos[2] = cbr.relocateZ(pos[2]);
+ changed = true;
+ }
+ }
+ }
+ }
+ } else if (entitiesTag != null) {
+ changed = fixEntityLocations(dataVersion, moveChunkFlags, entitiesTag, new ChunkBoundingRectangle(chunkX, chunkZ));
+ } else if (raw) {
+ ListTag tag = getTag(ENTITIES_PATH);
+ if (tag == null)
+ throw new UnsupportedOperationException("Missing the data required to move this chunk! Didn't find '" +
+ ENTITIES_PATH.get(dataVersion) +
+ "' tag while in RAW mode.");
+ changed = fixEntityLocations(dataVersion, moveChunkFlags, tag, new ChunkBoundingRectangle(chunkX, chunkZ));
+ }
+ return changed;
+ }
+
+ static boolean fixEntityLocations(int dataVersion, long moveChunkFlags, ListTag entityTags, ChunkBoundingRectangle cbr) {
+ if (entityTags == null || entityTags.isEmpty()) {
+ return false;
+ }
+ boolean changed = false;
+ final NbtPath brainMemoriesPath = ENTITIES_BRAIN_MEMORIES_PATH.get(dataVersion);
+ final NbtPath memoryPosPath = NbtPath.of("value.pos");
+ for (CompoundTag entityTag : entityTags) {
+ ListTag posTag = entityTag.getListTag("Pos").asDoubleTagList();
+ double x = posTag.get(0).asDouble();
+ double z = posTag.get(2).asDouble();
+ if (!cbr.containsBlock(x, z)) {
+ posTag.set(0, new DoubleTag(cbr.relocateX(x)));
+ posTag.set(2, new DoubleTag(cbr.relocateZ(z)));
+ if ((moveChunkFlags & MoveChunkFlags.RANDOMIZE_ENTITY_UUID) > 0) {
+ EntityUtil.setUuid(dataVersion, entityTag, UUID.randomUUID());
+ }
+ changed = true;
+ }
+
+ if (brainMemoriesPath.exists(entityTag)) {
+ CompoundTag memoriesTag = brainMemoriesPath.getTag(entityTag);
+ for (NamedTag entry : memoriesTag) {
+ int[] pos = memoryPosPath.getIntArray(entry.getTag());
+ if (pos != null && !cbr.containsBlock(pos[0], pos[2])) {
+ // TODO: dimension is also in this data
+ pos[0] = cbr.relocateX(pos[0]);
+ pos[2] = cbr.relocateZ(pos[2]);
+ changed = true;
+ }
+ }
+ }
+
+ // This is correct even for boats visually straddling a chunk border, the passengers share the boat
+ // location and the order of the passengers apparently controls their visual offset in game.
+ // Example (trimmed down) F3+I capture of such a boat:
+ // /summon minecraft:boat -1002.50 63.00 -672.01 {Type:"acacia",
+ // Passengers:[
+ // {id:"minecraft:cow",Pos:[-1002.5d,63.04d,-672.01d]},
+ // {id:"minecraft:pig",Pos:[-1002.5d,63.04d,-672.01d]}
+ // ],Rotation:[-180.0f,0.0f]}
+ if (entityTag.containsKey("Passengers")) {
+ changed |= fixEntityLocations(dataVersion, moveChunkFlags, entityTag.getListTag("Passengers").asCompoundTagList(), cbr);
+ }
+ }
+ return changed;
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public Iterator iterator() {
+ return getEntities().iterator();
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public void forEach(Consumer super ET> action) {
+ getEntities().forEach(action);
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public Spliterator spliterator() {
+ return getEntities().spliterator();
+ }
+
+ /** {@inheritDoc} */
+ public Stream stream() {
+ return getEntities().stream();
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public void setDataVersion(int dataVersion) {
+ DataVersion.JAVA_1_17_20W45A.throwUnsupportedVersionChangeIfCrossed(this.dataVersion, dataVersion);
+ super.setDataVersion(dataVersion);
+ }
+
+ @Override
+ public CompoundTag updateHandle() {
+ checkPartial();
+ if (!raw) {
+ super.updateHandle();
+ if (chunkX != NO_CHUNK_COORD_SENTINEL && chunkZ != NO_CHUNK_COORD_SENTINEL) {
+ setTag(POSITION_PATH, new IntArrayTag(chunkX, chunkZ));
+ }
+
+ // if getEntities() was never called then don't rebuild entitiesTag
+ if (entities != null) {
+ // WARN: If this chunk was loaded without the ENTITIES LoadFlag but 'entities' is not null
+ // this indicates the user called setEntities() which initialized entitiesTag
+ // so no NPE risk here - assuming someone didn't extend this class and break the contract of
+ // setEntities
+ entitiesTag.clear();
+ for (ET entity : entities) {
+ entitiesTag.add(entity.updateHandle());
+ }
+ }
+ setTagIfNotNull(ENTITIES_PATH, entitiesTag);
+ }
+ return data;
+ }
+
+ @Override
+ public CompoundTag updateHandle(int xPos, int zPos) {
+ if (!raw) {
+ if (chunkX == NO_CHUNK_COORD_SENTINEL) chunkX = xPos;
+ if (chunkZ == NO_CHUNK_COORD_SENTINEL) chunkZ = zPos;
+ ArgValidator.check(xPos == chunkX && zPos == chunkZ,
+ "Attempted to write chunk with incorrect chunk XZ. Chunk must be moved with moveChunk(..) first.");
+ updateHandle();
+ }
+ return data;
+ }
+}
diff --git a/src/main/java/io/github/ensgijs/nbt/mca/McaEntitiesFile.java b/src/main/java/io/github/ensgijs/nbt/mca/McaEntitiesFile.java
new file mode 100644
index 00000000..5f204548
--- /dev/null
+++ b/src/main/java/io/github/ensgijs/nbt/mca/McaEntitiesFile.java
@@ -0,0 +1,31 @@
+package io.github.ensgijs.nbt.mca;
+
+/**
+ * Represents an Entities data mca file (one that lives in the /entities folder). Entity mca files were added in 1.17
+ * but this class can be used to read older /region/*.mca files as well - for an example of this see
+ * EntitiesMCAFileTest testLoadingOldRegionMcaAsEntityMca
+ */
+public class McaEntitiesFile extends McaFileBase {
+ public McaEntitiesFile(int regionX, int regionZ) {
+ super(regionX, regionZ);
+ }
+
+ public McaEntitiesFile(int regionX, int regionZ, int defaultDataVersion) {
+ super(regionX, regionZ, defaultDataVersion);
+ }
+
+ public McaEntitiesFile(int regionX, int regionZ, DataVersion defaultDataVersion) {
+ super(regionX, regionZ, defaultDataVersion);
+ }
+
+ @Override
+ public Class chunkClass() {
+ return EntitiesChunk.class;
+ }
+
+ @Override
+ public EntitiesChunk createChunk() {
+ EntitiesChunk chunk = new EntitiesChunk(getDefaultChunkDataVersion());
+ return chunk;
+ }
+}
diff --git a/src/main/java/io/github/ensgijs/nbt/mca/McaFileBase.java b/src/main/java/io/github/ensgijs/nbt/mca/McaFileBase.java
new file mode 100644
index 00000000..f83bfdc4
--- /dev/null
+++ b/src/main/java/io/github/ensgijs/nbt/mca/McaFileBase.java
@@ -0,0 +1,506 @@
+package io.github.ensgijs.nbt.mca;
+
+
+import io.github.ensgijs.nbt.io.CompressionType;
+import io.github.ensgijs.nbt.mca.io.LoadFlags;
+import io.github.ensgijs.nbt.mca.io.McaFileHelpers;
+import io.github.ensgijs.nbt.mca.util.ChunkIterator;
+import io.github.ensgijs.nbt.util.ArgValidator;
+import io.github.ensgijs.nbt.mca.util.IntPointXZ;
+
+import java.io.IOException;
+import java.io.RandomAccessFile;
+import java.lang.reflect.Array;
+import java.util.NoSuchElementException;
+import java.util.Objects;
+import java.util.stream.Stream;
+import java.util.stream.StreamSupport;
+
+/**
+ * An abstract representation of an mca (aka "region") file.
+ */
+public abstract class McaFileBase implements Iterable {
+
+ protected int regionX, regionZ;
+ protected T[] chunks;
+ protected int minDataVersion;
+ protected int maxDataVersion;
+ protected int defaultDataVersion = DataVersion.latest().id(); // data version to use when creating new chunks
+
+ /**
+ * MCA file represents a world save file used by Minecraft to store world
+ * data on the hard drive.
+ * This constructor needs the x- and z-coordinates of the stored region,
+ * which can usually be taken from the file name {@code r.x.z.mca}
+ *
+ * Use this constructor when you plan to {@code deserialize(..)} an MCA file.
+ * If you are creating an MCA file from scratch prefer {@link #McaFileBase(int, int, int)}.
+ * @param regionX The x-coordinate of this mca file in region coordinates.
+ * @param regionZ The z-coordinate of this mca file in region coordinates.
+ */
+ public McaFileBase(int regionX, int regionZ) {
+ this.regionX = regionX;
+ this.regionZ = regionZ;
+ }
+
+ /**
+ * Use this constructor to specify a default data version when creating MCA files without loading
+ * from disk.
+ *
+ * @param regionX The x-coordinate of this mca file in region coordinates.
+ * @param regionZ The z-coordinate of this mca file in region coordinates.
+ * @param defaultDataVersion Data version which will be used when creating new chunks.
+ */
+ public McaFileBase(int regionX, int regionZ, int defaultDataVersion) {
+ this.regionX = regionX;
+ this.regionZ = regionZ;
+ this.defaultDataVersion = defaultDataVersion;
+ this.minDataVersion = defaultDataVersion;
+ this.maxDataVersion = defaultDataVersion;
+ }
+
+ /**
+ * Use this constructor to specify a default data version when creating MCA files without loading
+ * from disk.
+ *
+ * @param regionX The x-coordinate of this mca file in region coordinates.
+ * @param regionZ The z-coordinate of this mca file in region coordinates.
+ * @param defaultDataVersion Data version which will be used when creating new chunks.
+ */
+ public McaFileBase(int regionX, int regionZ, DataVersion defaultDataVersion) {
+ this(regionX, regionZ, defaultDataVersion.id());
+ }
+
+ /**
+ * Gets the count of non-null chunks.
+ */
+ public int count() {
+ return (int) stream().filter(Objects::nonNull).count();
+ }
+
+ /**
+ * Get minimum data version of found in loaded chunk data
+ */
+ public int getMinChunkDataVersion() {
+ return minDataVersion;
+ }
+
+ /**
+ * Get maximum data version of found in loaded chunk data
+ */
+ public int getMaxChunkDataVersion() {
+ return maxDataVersion;
+ }
+
+ /**
+ * Get chunk version which will be used when automatically creating new chunks
+ * and for chunks created by {@link #createChunk()}.
+ */
+ public int getDefaultChunkDataVersion() {
+ return defaultDataVersion;
+ }
+
+ public DataVersion getDefaultChunkDataVersionEnum() {
+ return DataVersion.bestFor(defaultDataVersion);
+ }
+
+ /**
+ * Set chunk version which will be used when automatically creating new chunks
+ * and for chunks created by {@link #createChunk()}.
+ */
+ public void setDefaultChunkDataVersion(int defaultDataVersion) {
+ this.defaultDataVersion = defaultDataVersion;
+ }
+
+ public void setDefaultChunkDataVersion(DataVersion defaultDataVersion) {
+ this.defaultDataVersion = defaultDataVersion.id();
+ }
+
+ /**
+ * @return The x-value currently set for this mca file in region coordinates.
+ * @see #moveRegion(int, int, long, boolean)
+ */
+ public int getRegionX() {
+ return regionX;
+ }
+
+ /**
+ * @return The z-value currently set for this mca file in region coordinates.
+ * @see #moveRegion(int, int, long, boolean)
+ */
+ public int getRegionZ() {
+ return regionZ;
+ }
+
+ /**
+ * Returns result of calling {@link McaFileHelpers#createNameFromRegionLocation(int, int)}
+ * with current region coordinate values.
+ * @return A mca filename in the format "r.{regionX}.{regionZ}.mca"
+ */
+ public String createRegionName() {
+ return McaFileHelpers.createNameFromRegionLocation(regionX, regionZ);
+ }
+
+ /**
+ * @return type of chunk this MCA File holds
+ */
+ public abstract Class chunkClass();
+
+ /**
+ * Creates a new chunk properly initialized to be compatible with this MCA file. At a minimum the new
+ * chunk will have an appropriate data version set.
+ */
+ public abstract T createChunk();
+
+ /**
+ * Called to deserialize a Chunk. Caller will have set the position of {@code raf} to start reading.
+ * @param raf The {@code RandomAccessFile} to read from.
+ * @param loadFlags A logical or of {@link LoadFlags} constants indicating what data should be loaded
+ * @param timestamp The timestamp when this chunk was last updated as a UNIX timestamp.
+ * @param chunkAbsXZ Absolute chunk XZ coord as calculated from region location and chunk index.
+ * @return Deserialized chunk.
+ * @throws IOException if something went wrong during deserialization.
+ */
+ protected T deserializeChunk(RandomAccessFile raf, long loadFlags, int timestamp, IntPointXZ chunkAbsXZ) throws IOException {
+ T chunk = createChunk();
+ chunk.deserialize(raf, loadFlags, timestamp, chunkAbsXZ.getX(), chunkAbsXZ.getZ());
+ // I'm going to leave this as an "idea" for now
+// if (!chunkAbsXZ.equals(chunk.getChunkX(), chunk.getChunkZ())) {
+// // this would be a good place for a logger warning
+// if (chunk.moveChunkImplemented() && chunk.moveChunkHasFullVersionSupport()) {
+// chunk.moveChunk(chunkAbsXZ.getX(), chunkAbsXZ.getZ());
+// }
+// }
+ return chunk;
+ }
+
+ /**
+ * Reads an .mca file from a {@code RandomAccessFile} into this object.
+ * This method does not perform any cleanups on the data.
+ * @param raf The {@code RandomAccessFile} to read from.
+ * @throws IOException If something went wrong during deserialization.
+ */
+ public void deserialize(RandomAccessFile raf) throws IOException {
+ deserialize(raf, LoadFlags.LOAD_ALL_DATA);
+ }
+
+ /**
+ * Reads an .mca file from a {@code RandomAccessFile} into this object.
+ * This method does not perform any cleanups on the data.
+ * @param raf The {@code RandomAccessFile} to read from.
+ * @param loadFlags A logical or of {@link LoadFlags} constants indicating what data should be loaded
+ * @throws IOException If something went wrong during deserialization.
+ */
+ @SuppressWarnings("unchecked")
+ public void deserialize(RandomAccessFile raf, long loadFlags) throws IOException {
+ chunks = (T[]) Array.newInstance(chunkClass(), 1024);
+ minDataVersion = Integer.MAX_VALUE;
+ maxDataVersion = Integer.MIN_VALUE;
+ final IntPointXZ chunkOffsetXZ = new IntPointXZ(regionX * 32, regionZ * 32);
+ for (int i = 0; i < 1024; i++) {
+ // Location information for a chunk consists of four bytes split into two fields:
+ // the first three bytes are a (big-endian) offset in 4KiB sectors from the start of the file,
+ // and a remaining byte that gives the length of the chunk (also in 4KiB sectors, rounded up).
+ // Chunks are always less than 1MiB in size. If a chunk isn't present in the region file
+ // (e.g. because it hasn't been generated or migrated yet), both fields are zero.
+ raf.seek(i * 4);
+ int offset = raf.read() << 16;
+ offset |= (raf.read() & 0xFF) << 8;
+ offset |= raf.read() & 0xFF;
+ if (raf.readByte() == 0) {
+ continue;
+ }
+ raf.seek(4096 + (i * 4));
+ int timestamp = raf.readInt();
+ raf.seek(4096L * offset + 4); //+4: skip data size
+ T chunk = deserializeChunk(raf, loadFlags, timestamp,
+ getRelativeChunkXZ(i).add(chunkOffsetXZ));
+ chunks[i] = chunk;
+ if (chunk != null && chunk.hasDataVersion()) {
+ if (chunk.getDataVersion() < minDataVersion) {
+ minDataVersion = chunk.getDataVersion();
+ }
+ if (chunk.getDataVersion() > maxDataVersion) {
+ maxDataVersion = chunk.getDataVersion();
+ }
+ }
+ }
+ maxDataVersion = Math.max(maxDataVersion, 0);
+ minDataVersion = Math.min(minDataVersion, maxDataVersion);
+ defaultDataVersion = maxDataVersion;
+ }
+
+ /**
+ * Calls {@link McaFileBase#serialize(RandomAccessFile, CompressionType, boolean)} with GZIP chunk compression and
+ * without updating any timestamps.
+ * @see McaFileBase#serialize(RandomAccessFile, CompressionType, boolean)
+ * @param raf The {@code RandomAccessFile} to write to.
+ * @return The amount of chunks written to the file.
+ * @throws IOException If something went wrong during serialization.
+ */
+ public int serialize(RandomAccessFile raf) throws IOException {
+ return serialize(raf, CompressionType.ZLIB, false);
+ }
+
+ /**
+ * Calls {@link McaFileBase#serialize(RandomAccessFile, CompressionType, boolean)} without updating any timestamps.
+ * @see McaFileBase#serialize(RandomAccessFile, CompressionType, boolean)
+ * @param raf The {@code RandomAccessFile} to write to.
+ * @return The amount of chunks written to the file.
+ * @throws IOException If something went wrong during serialization.
+ */
+ public int serialize(RandomAccessFile raf, CompressionType chunkCompressionType) throws IOException {
+ return serialize(raf, chunkCompressionType, false);
+ }
+
+ /**
+ * Serializes this object to an .mca file.
+ * This method does not perform any cleanups on the data.
+ * @param raf The {@code RandomAccessFile} to write to.
+ * @param changeLastUpdate Whether it should update all timestamps that show
+ * when this file was last updated.
+ * @return The amount of chunks written to the file.
+ * @throws IOException If something went wrong during serialization.
+ */
+ public int serialize(RandomAccessFile raf, CompressionType chunkCompressionType, boolean changeLastUpdate) throws IOException {
+ ArgValidator.requireValue(raf, "raf");
+ int globalOffset = 2;
+ int lastWritten = 0;
+ int timestamp = (int) (System.currentTimeMillis() / 1000L);
+ int chunksWritten = 0;
+ int chunkXOffset = McaFileHelpers.regionToChunk(regionX);
+ int chunkZOffset = McaFileHelpers.regionToChunk(regionZ);
+
+ // ensure that the mca header tables always exist
+ raf.seek(0x2000 - 4);
+ raf.writeInt(0);
+
+ if (chunks == null) {
+ return 0;
+ }
+
+ for (int cz = 0; cz < 32; cz++) {
+ for (int cx = 0; cx < 32; cx++) {
+ int index = getChunkIndex(cx, cz);
+ T chunk = chunks[index];
+ if (chunk == null) {
+ continue;
+ }
+ raf.seek(4096L * globalOffset);
+ lastWritten = chunk.serialize(raf, chunkXOffset + cx, chunkZOffset + cz, chunkCompressionType, true);
+
+ chunksWritten++;
+
+ // compute the count of 4kb sectors the chunk data occupies
+ int sectors = (lastWritten >> 12) + (lastWritten % 4096 == 0 ? 0 : 1);
+
+ raf.seek(index * 4L);
+ raf.writeByte(globalOffset >>> 16);
+ raf.writeByte(globalOffset >> 8 & 0xFF);
+ raf.writeByte(globalOffset & 0xFF);
+ raf.writeByte(sectors);
+
+ // write timestamp
+ raf.seek(index * 4L + 4096);
+ raf.writeInt(changeLastUpdate ? timestamp : chunk.getLastMCAUpdate());
+
+ globalOffset += sectors;
+ }
+ }
+
+ // padding
+ if (lastWritten % 4096 != 0) {
+ raf.seek(globalOffset * 4096L - 1);
+ raf.write(0);
+ }
+ return chunksWritten;
+ }
+
+ /**
+ * Set a specific Chunk at a specific index. The index must be in range of 0 - 1023.
+ * Take care as the given chunk is NOT copied by this call.
+ * @param index The index of the Chunk.
+ * @param chunk The Chunk to be set.
+ * @throws IndexOutOfBoundsException If index is not in the range.
+ */
+ @SuppressWarnings("unchecked")
+ public void setChunk(int index, T chunk) {
+ checkIndex(index);
+ if (chunks == null) {
+ chunks = (T[]) Array.newInstance(chunkClass(), 1024);
+ }
+ // TODO: figure out how best to sync chunk abs xz
+// getRelativeChunkXZ(index).add(regionX * 32, regionZ * 32);
+ chunks[index] = chunk;
+ }
+
+ /**
+ * Set a specific Chunk at a specific chunk location.
+ * The x- and z-value can be absolute chunk coordinates or they can be relative to the region origin.
+ * @param chunkX The x-coordinate of the Chunk.
+ * @param chunkZ The z-coordinate of the Chunk.
+ * @param chunk The chunk to be set.
+ *
+ */
+ public void setChunk(int chunkX, int chunkZ, T chunk) {
+ setChunk(getChunkIndex(chunkX, chunkZ), chunk);
+ }
+
+ /**
+ * Returns the chunk data of a chunk at a specific index in this file.
+ * @param index The index of the chunk in this file.
+ * @return The chunk data.
+ */
+ public T getChunk(int index) {
+ checkIndex(index);
+ if (chunks == null) {
+ return null;
+ }
+ return chunks[index];
+ }
+
+ /**
+ * Returns the chunk data of a chunk in this file.
+ * @param chunkX The x-coordinate of the chunk.
+ * @param chunkZ The z-coordinate of the chunk.
+ * @return The chunk data.
+ */
+ public T getChunk(int chunkX, int chunkZ) {
+ return getChunk(getChunkIndex(chunkX, chunkZ));
+ }
+
+ /**
+ * Removes the chunk at the given index (sets it to null) and returns the previous value.
+ * @param index chunk index [0..1024)
+ * @return chunk which was removed, or null if there was none.
+ */
+ public T removeChunk(int index) {
+ T chunk = chunks[index];
+ chunks[index] = null;
+ return chunk;
+ }
+
+ /**
+ * Removes the chunk at the given xz (sets it to null) and returns the previous value.
+ * Works with absolute and relative coordinates.
+ * @param chunkX chunk x
+ * @param chunkZ chunk z
+ * @return chunk which was removed, or null if there was none.
+ */
+ public T removeChunk(int chunkX, int chunkZ) {
+ return removeChunk(getChunkIndex(chunkX, chunkZ));
+ }
+
+ /**
+ * Calculates the index of a chunk from its x and z-coordinates in this region.
+ * This works with absolute and relative coordinates.
+ * @param chunkX The x-coordinate of the chunk.
+ * @param chunkZ The z-coordinate of the chunk.
+ * @return The index of this chunk or -1 if either chunkX or chunkZ were {@link ChunkBase#NO_CHUNK_COORD_SENTINEL}.
+ */
+ public static int getChunkIndex(int chunkX, int chunkZ) {
+ if (chunkX != ChunkBase.NO_CHUNK_COORD_SENTINEL && chunkZ != ChunkBase.NO_CHUNK_COORD_SENTINEL) {
+ return ((chunkZ & 0x1F) << 5) | (chunkX & 0x1F);
+ }
+ return -1;
+ }
+
+ /**
+ * Calculates the relative x z of a chunk within the current region given an index.
+ *
+ * @param index index of chunk in range [0..1024)
+ * @return x z location of the chunk in region relative coordinates where x and z each range [0..32)
+ */
+ public static IntPointXZ getRelativeChunkXZ(int index) {
+ checkIndex(index);
+ return new IntPointXZ(index & 0x1F, index >> 5);
+ }
+
+ protected static void checkIndex(int index) {
+ if (index < 0 || index > 1023) {
+ throw new IndexOutOfBoundsException();
+ }
+ }
+
+ protected T createChunkIfMissing(int blockX, int blockZ) {
+ int chunkX = McaFileHelpers.blockToChunk(blockX), chunkZ = McaFileHelpers.blockToChunk(blockZ);
+ T chunk = getChunk(chunkX, chunkZ);
+ if (chunk == null) {
+ chunk = createChunk();
+ setChunk(getChunkIndex(chunkX, chunkZ), chunk);
+ }
+ return chunk;
+ }
+
+ public boolean moveRegion(int newRegionX, int newRegionZ, long moveChunkFlags, boolean force) {
+ // Testing note: don't forget that updateHandle() needs to be called to see the results of this move!
+ boolean changed = false;
+ IntPointXZ newRegionMinChunkXZ = new IntPointXZ(newRegionX, newRegionZ).transformRegionToChunk();
+ ChunkIterator iter = this.iterator();
+ while (iter.hasNext()) {
+ T chunk = iter.next();
+ if (chunk != null) {
+ IntPointXZ newChunkXZ = iter.currentXZ().add(newRegionMinChunkXZ);
+ changed |= chunk.moveChunk(newChunkXZ.getX(), newChunkXZ.getZ(), moveChunkFlags, force);
+ }
+ }
+ this.regionX = newRegionX;
+ this.regionZ = newRegionZ;
+ return changed;
+ }
+
+ @Override
+ public ChunkIterator iterator() {
+ return new ChunkIteratorImpl<>(this);
+ }
+
+ public Stream stream() {
+ return StreamSupport.stream(spliterator(), false);
+ }
+
+ protected static class ChunkIteratorImpl implements ChunkIterator {
+ private final McaFileBase owner;
+ private int currentIndex;
+
+ public ChunkIteratorImpl(McaFileBase owner) {
+ this.owner = owner;
+ currentIndex = -1;
+ }
+
+ @Override
+ public boolean hasNext() {
+ return currentIndex < 1023;
+ }
+
+ @Override
+ public I next() {
+ if (!hasNext()) throw new NoSuchElementException();
+ return owner.getChunk(++currentIndex);
+ }
+
+ @Override
+ public void remove() {
+ owner.setChunk(currentIndex, null);
+ }
+
+ @Override
+ public void set(I chunk) {
+ owner.setChunk(currentIndex, chunk);
+ }
+
+ @Override
+ public int currentIndex() {
+ return currentIndex;
+ }
+
+ @Override
+ public int currentAbsoluteX() {
+ return currentX() + owner.getRegionX() * 32;
+ }
+
+ @Override
+ public int currentAbsoluteZ() {
+ return currentZ() + owner.getRegionZ() * 32;
+ }
+ }
+}
diff --git a/src/main/java/io/github/ensgijs/nbt/mca/McaPoiFile.java b/src/main/java/io/github/ensgijs/nbt/mca/McaPoiFile.java
new file mode 100644
index 00000000..66f6bf6f
--- /dev/null
+++ b/src/main/java/io/github/ensgijs/nbt/mca/McaPoiFile.java
@@ -0,0 +1,33 @@
+package io.github.ensgijs.nbt.mca;
+
+/**
+ * POI files are best thought of as an INDEX the game uses to be able to quickly locate certain blocks.
+ * However, the names of the indexed locations is not necessarily a block type but often a description of its usage
+ * and one poi type may map to multiple block types (e.g. poi of 'minecraft:home' maps to any of the bed blocks).
+ *
+ * See {@link PoiRecord} for more information and for a list of POI types and how they map to blocks.
+ */
+public class McaPoiFile extends McaFileBase {
+ public McaPoiFile(int regionX, int regionZ) {
+ super(regionX, regionZ);
+ }
+
+ public McaPoiFile(int regionX, int regionZ, int defaultDataVersion) {
+ super(regionX, regionZ, defaultDataVersion);
+ }
+
+ public McaPoiFile(int regionX, int regionZ, DataVersion defaultDataVersion) {
+ super(regionX, regionZ, defaultDataVersion);
+ }
+
+ @Override
+ public Class chunkClass() {
+ return PoiChunk.class;
+ }
+
+ @Override
+ public PoiChunk createChunk() {
+ PoiChunk chunk = new PoiChunk(getDefaultChunkDataVersion());
+ return chunk;
+ }
+}
diff --git a/src/main/java/io/github/ensgijs/nbt/mca/McaRegionFile.java b/src/main/java/io/github/ensgijs/nbt/mca/McaRegionFile.java
new file mode 100644
index 00000000..4f7abe09
--- /dev/null
+++ b/src/main/java/io/github/ensgijs/nbt/mca/McaRegionFile.java
@@ -0,0 +1,138 @@
+package io.github.ensgijs.nbt.mca;
+
+import io.github.ensgijs.nbt.mca.io.McaFileHelpers;
+
+/**
+ * Represents a Terrain data mca file (one that lives in the /region folder).
+ * Prior to MC 1.14 /region/*.mca files where the only ones that existed, 1.14 introduced /poi/*.mca
+ * and 1.17 added /entities/*.mca - this class (currently) supports both legacy region files (that contain
+ * entity data) as well as modern ones that do not.
+ */
+public class McaRegionFile extends McaFileBase implements Iterable {
+ /**
+ * The default chunk data version used when no custom version is supplied.
+ * Deprecated: use {@code DataVersion.latest().id()} instead.
+ */
+ @Deprecated
+ public static final int DEFAULT_DATA_VERSION = DataVersion.latest().id();
+
+ /**
+ * {@inheritDoc}
+ */
+ public McaRegionFile(int regionX, int regionZ) {
+ super(regionX, regionZ);
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public McaRegionFile(int regionX, int regionZ, int defaultDataVersion) {
+ super(regionX, regionZ, defaultDataVersion);
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public McaRegionFile(int regionX, int regionZ, DataVersion defaultDataVersion) {
+ super(regionX, regionZ, defaultDataVersion);
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public Class chunkClass() {
+ return TerrainChunk.class;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public TerrainChunk createChunk() {
+ return TerrainChunk.newChunk(getDefaultChunkDataVersion());
+ }
+
+ /**
+ * @deprecated Use {@link #setBiomeAt(int, int, int, int)} instead
+ */
+ @Deprecated
+ public void setBiomeAt(int blockX, int blockZ, int biomeID) {
+ createChunkIfMissing(blockX, blockZ).setLegacyBiomeAt(blockX, blockZ, biomeID);
+ }
+
+ public void setBiomeAt(int blockX, int blockY, int blockZ, int biomeID) {
+ createChunkIfMissing(blockX, blockZ).setLegacyBiomeAt(blockX, blockY, blockZ, biomeID);
+ }
+
+ /**
+ * @deprecated Use {@link #getBiomeAt(int, int, int)} instead
+ */
+ @Deprecated
+ public int getBiomeAt(int blockX, int blockZ) {
+ int chunkX = McaFileHelpers.blockToChunk(blockX), chunkZ = McaFileHelpers.blockToChunk(blockZ);
+ TerrainChunk chunk = getChunk(getChunkIndex(chunkX, chunkZ));
+ if (chunk == null) {
+ return -1;
+ }
+ return chunk.getLegacyBiomeAt(blockX, blockZ);
+ }
+
+ /**
+ * Fetches the biome id at a specific block.
+ * @param blockX The x-coordinate of the block.
+ * @param blockY The y-coordinate of the block.
+ * @param blockZ The z-coordinate of the block.
+ * @return The biome id if the chunk exists and the chunk has biomes, otherwise -1.
+ * @deprecated unsupported after JAVA_1_18_21W38A
+ */
+ public int getBiomeAt(int blockX, int blockY, int blockZ) {
+ int chunkX = McaFileHelpers.blockToChunk(blockX), chunkZ = McaFileHelpers.blockToChunk(blockZ);
+ TerrainChunk chunk = getChunk(getChunkIndex(chunkX, chunkZ));
+ if (chunk == null) {
+ return -1;
+ }
+ return chunk.getLegacyBiomeAt(blockX,blockY, blockZ);
+ }
+
+// /**
+// * Set a block state at a specific block location.
+// * The block coordinates can be absolute coordinates or they can be relative to the region.
+// * @param blockX The x-coordinate of the block.
+// * @param blockY The y-coordinate of the block.
+// * @param blockZ The z-coordinate of the block.
+// * @param state The block state to be set.
+// * @param cleanup Whether the Palette and the BLockStates should be recalculated after adding the block state.
+// */
+// public void setBlockStateAt(int blockX, int blockY, int blockZ, CompoundTag state, boolean cleanup) {
+// createChunkIfMissing(blockX, blockZ).setBlockStateAt(blockX, blockY, blockZ, state, cleanup);
+// }
+//
+// /**
+// * Fetches a block state at a specific block location.
+// * The block coordinates can be absolute coordinates or they can be relative to the region.
+// * @param blockX The x-coordinate of the block.
+// * @param blockY The y-coordinate of the block.
+// * @param blockZ The z-coordinate of the block.
+// * @return The block state or null if the chunk or the section do not exist.
+// */
+// public CompoundTag getBlockStateAt(int blockX, int blockY, int blockZ) {
+// int chunkX = McaFileHelpers.blockToChunk(blockX), chunkZ = McaFileHelpers.blockToChunk(blockZ);
+// TerrainChunk chunk = getChunk(chunkX, chunkZ);
+// if (chunk == null) {
+// return null;
+// }
+// return chunk.getBlockStateAt(blockX, blockY, blockZ);
+// }
+//
+// /**
+// * Recalculates the Palette and the BlockStates of all chunks and sections of this region.
+// */
+// public void cleanupPalettesAndBlockStates() {
+// for (TerrainChunk chunk : chunks) {
+// if (chunk != null) {
+// chunk.cleanupPalettesAndBlockStates();
+// }
+// }
+// }
+}
diff --git a/src/main/java/io/github/ensgijs/nbt/mca/PoiChunk.java b/src/main/java/io/github/ensgijs/nbt/mca/PoiChunk.java
new file mode 100644
index 00000000..bcf3538f
--- /dev/null
+++ b/src/main/java/io/github/ensgijs/nbt/mca/PoiChunk.java
@@ -0,0 +1,28 @@
+package io.github.ensgijs.nbt.mca;
+
+import io.github.ensgijs.nbt.tag.CompoundTag;
+
+public class PoiChunk extends PoiChunkBase{
+
+ protected PoiChunk(int dataVersion) {
+ super(dataVersion);
+ }
+
+ public PoiChunk() {
+ super(DataVersion.latest().id());
+ }
+
+ public PoiChunk(CompoundTag data) {
+ super(data);
+ }
+
+ public PoiChunk(CompoundTag data, long loadFlags) {
+ super(data, loadFlags);
+ }
+
+ @Override
+ protected PoiRecord createPoiRecord(CompoundTag recordTag) {
+ return new PoiRecord(recordTag);
+ }
+
+}
diff --git a/src/main/java/io/github/ensgijs/nbt/mca/PoiChunkBase.java b/src/main/java/io/github/ensgijs/nbt/mca/PoiChunkBase.java
new file mode 100644
index 00000000..74e1bc19
--- /dev/null
+++ b/src/main/java/io/github/ensgijs/nbt/mca/PoiChunkBase.java
@@ -0,0 +1,430 @@
+package io.github.ensgijs.nbt.mca;
+
+import io.github.ensgijs.nbt.io.NamedTag;
+import io.github.ensgijs.nbt.mca.io.LoadFlags;
+import io.github.ensgijs.nbt.mca.io.McaFileHelpers;
+import io.github.ensgijs.nbt.mca.io.MoveChunkFlags;
+import io.github.ensgijs.nbt.tag.CompoundTag;
+import io.github.ensgijs.nbt.tag.IntArrayTag;
+import io.github.ensgijs.nbt.tag.ListTag;
+import io.github.ensgijs.nbt.mca.util.ChunkBoundingRectangle;
+import io.github.ensgijs.nbt.mca.util.RegionBoundingRectangle;
+
+import java.util.*;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+/**
+ * Provides all the basic functionality necessary for this type of chunk with abstraction hooks
+ * making it easy to extend this class and modify the factory behavior of {@link McaFileHelpers} to create
+ * instances of your custom class.
+ */
+public abstract class PoiChunkBase extends ChunkBase implements Collection {
+ // private to preserve the ability to change how records are stored to optimize lookups later
+ private List records;
+
+ // Valid: True (1) when created by the game, however, if the decoding of POI NBT (from the region file) data fails,
+ // and the game then save the region file again, it might save false (0). This key is internally set to true when
+ // the POI section is refreshed, and a refresh always happens when the chunk section (with terrain data) at the
+ // same coordinates is decoded. To sum up, it is very unlikely to get false.
+ protected Map poiSectionValidity;
+
+ @Override
+ protected void initMembers() {
+ records = null;
+ poiSectionValidity = new HashMap<>();
+ }
+
+ protected PoiChunkBase(int dataVersion) {
+ super(dataVersion);
+ records = new ArrayList<>();
+ }
+
+ public PoiChunkBase(CompoundTag data) {
+ super(data);
+ }
+
+ public PoiChunkBase(CompoundTag data, long loadFlags) {
+ super(data, loadFlags);
+ }
+
+ @Override
+ protected void initReferences(long loadFlags) {
+ if ((loadFlags & LoadFlags.POI_RECORDS) != 0) {
+ records = new ArrayList<>();
+ CompoundTag sectionsTag = data.getCompoundTag("Sections");
+ if (sectionsTag == null) {
+ throw new IllegalArgumentException("Sections tag not found!");
+ }
+ for (NamedTag sectionTag : sectionsTag) {
+ int sectionY = Integer.parseInt(sectionTag.getName());
+ boolean valid = ((CompoundTag) sectionTag.getTag()).getBoolean("Valid", true);
+ poiSectionValidity.put(sectionY, valid);
+ ListTag recordTags = ((CompoundTag) sectionTag.getTag()).getListTagAutoCast("Records");
+ if (recordTags != null) {
+ for (CompoundTag recordTag : recordTags) {
+ T record = createPoiRecord(recordTag);
+ if (sectionY != record.getSectionY()) {
+ poiSectionValidity.put(sectionY, false);
+ }
+ records.add(record);
+ }
+ }
+ }
+ }
+ }
+
+ /** {@inheritDoc} */
+ public String getMcaType() {
+ return "poi";
+ }
+
+ @Override
+ public boolean moveChunkImplemented() {
+ return records != null || data != null;
+ }
+
+ @Override
+ public boolean moveChunkHasFullVersionSupport() {
+ return records != null || data != null;
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public boolean moveChunk(int newChunkX, int newChunkZ, long moveChunkFlags, boolean force) {
+ if (!moveChunkImplemented())
+ throw new UnsupportedOperationException("Missing the data required to move this chunk!");
+ if (!RegionBoundingRectangle.MAX_WORLD_BORDER_BOUNDS.containsChunk(chunkX, chunkZ)) {
+ throw new IllegalArgumentException("Chunk XZ must be within the maximum world bounds.");
+ }
+ // remember poi chunk nbt doesn't contain XZ location
+ this.chunkX = newChunkX;
+ this.chunkZ = newChunkZ;
+ if (fixPoiLocations(moveChunkFlags)) {
+ if ((moveChunkFlags & MoveChunkFlags.AUTOMATICALLY_UPDATE_HANDLE) > 0) {
+ updateHandle();
+ }
+ return true;
+ }
+ return false;
+ }
+
+ public boolean fixPoiLocations(long moveChunkFlags) {
+ if (!moveChunkImplemented())
+ throw new UnsupportedOperationException("Missing the data required to move this chunk!");
+ if (this.chunkX == NO_CHUNK_COORD_SENTINEL || this.chunkZ == NO_CHUNK_COORD_SENTINEL) {
+ throw new IllegalStateException("Chunk XZ not known");
+ }
+ boolean changed = false;
+ final ChunkBoundingRectangle cbr = new ChunkBoundingRectangle(chunkX, chunkZ);
+ if (!raw && records != null) {
+ for (T entity : records) {
+ if (!cbr.containsBlock(entity.getX(), entity.getZ())) {
+ entity.setX(cbr.relocateX(entity.getX()));
+ entity.setZ(cbr.relocateZ(entity.getZ()));
+ changed = true;
+ }
+ }
+ } else { // fix raw data
+ if (data == null) {
+ throw new UnsupportedOperationException(
+ "Cannot fix POI locations when RELEASE_CHUNK_DATA_TAG was set and POI_RECORDS was not set.");
+ }
+ CompoundTag sectionsTag = data.getCompoundTag("Sections");
+ if (sectionsTag == null) {
+ throw new IllegalArgumentException("Sections tag not found!");
+ }
+ for (NamedTag sectionTag : sectionsTag) {
+ ListTag recordTags = ((CompoundTag) sectionTag.getTag()).getListTagAutoCast("Records");
+ if (recordTags != null) {
+ for (CompoundTag recordTag : recordTags) {
+ IntArrayTag posTag = recordTag.getIntArrayTag("pos");
+ int[] pos = posTag.getValue(); // by ref
+ int x = pos[0];
+ int z = pos[2];
+ if (!cbr.containsBlock(x, z)) {
+ pos[0] = cbr.relocateX(x);
+ pos[2] = cbr.relocateZ(z);
+ changed = true;
+ // Don't need to call recordTag.getIntArrayTag("Pos").setValue(pos);
+ }
+ }
+ }
+ }
+ }
+ return changed;
+ }
+
+ /**
+ * Called from {@link #initReferences(long)}. Exists to provide a hook for custom implementations to override to
+ * add support for modded poi's, etc. without having to implement {@link #initReferences(long)} logic fully.
+ */
+ protected abstract T createPoiRecord(CompoundTag recordTag);
+
+ @Override
+ public boolean add(T record) {
+ if (record == null) {
+ throw new IllegalArgumentException("record must not be null");
+ }
+ return records.add(record);
+ }
+
+ /**
+ * Gets the first poi record found with the exact xyz given
+ * @param x world block x
+ * @param y world block y
+ * @param z world block z
+ * @return poi record if found, otherwise null
+ */
+ public T getFirst(final int x, final int y, final int z) {
+ return records.stream().filter(r -> r.matches(x, y, z)).findFirst().orElse(null);
+ }
+
+ /**
+ * Gets a shallow COPY of the set of poi records in this chunk.
+ * Modifications to the list will have no affect on this chunk, but modifying items in that list will.
+ * However, you can {@link #getAll()} modify the returned list, then call {@link #set(Collection)}
+ * with your modified list to update the records in this chunk.
+ */
+ public List getAll() {
+ // don't return actual records list, retain the freedom to make it something other than a list for
+ // optimizations later!
+ return new ArrayList<>(records);
+ }
+
+ /**
+ * Gets all poi record found with the exact xyz given. Really there should be only one - but nothing
+ * is stopping you from messing it up.
+ * @param x world block x
+ * @param y world block y
+ * @param z world block z
+ * @return new list of poi records at the given xyz
+ */
+ public List getAll(final int x, final int y, final int z) {
+ return records.stream().filter(r -> r.matches(x, y, z)).collect(Collectors.toList());
+ }
+
+ /**
+ * Gets all poi records of the given type
+ * @param poiType poi type
+ * @return new list of poi records matching the given poi type
+ */
+ public List getAll(final String poiType) {
+ List list = records.stream().filter(r -> r.matches(poiType)).collect(Collectors.toList());
+ return list;
+ }
+
+ /**
+ * Removes the given record from ths poi chunk both by reference and by equality.
+ * @param record record to remove
+ * @return true if any record was removed
+ */
+ @Override
+ public boolean remove(Object record) {
+ if (!(record instanceof PoiRecord)) return false;
+ return records.removeIf(r -> r == record || r.equals(record));
+ }
+
+ @Override
+ public boolean removeAll(Collection> c) {
+ return records.removeAll(c);
+ }
+
+ /**
+ * Removes all records at the given xyz.
+ * @param x world block x
+ * @param y world block y
+ * @param z world block z
+ * @return True if any records were removed
+ */
+ public boolean removeAll(final int x, final int y, final int z) {
+ return records.removeIf(r -> r.matches(x, y, z));
+ }
+
+ /**
+ * Removes all PoiRecords with the given type.
+ * @param poiType poi type to remove
+ * @return true if any records were removed
+ */
+ public boolean removeAll(final String poiType) {
+ if (poiType == null || poiType.isEmpty()) {
+ return false;
+ }
+ return records.removeIf(r -> r.matches(poiType));
+ }
+
+ /**
+ * Removes the FIRST PoiRecord at the given xyz.
+ * @param x world block x
+ * @param y world block y
+ * @param z world block z
+ * @return Removed PoiRecord or null if no such record
+ */
+ public T removeFirst(final int x, final int y, final int z) {
+ Iterator iter = records.iterator();
+ while (iter.hasNext()) {
+ T record = iter.next();
+ if (record.matches(x, y, z)) {
+ iter.remove();
+ return record;
+ }
+ }
+ return null;
+ }
+
+ @Override
+ public boolean containsAll(Collection> c) {
+ return records.containsAll(c);
+ }
+
+ @Override
+ public boolean addAll(Collection extends T> c) {
+ boolean changed = false;
+ for (T r : c) {
+ if (r != null) {
+ records.add(r);
+ changed = true;
+ }
+ }
+ return changed;
+ }
+
+ @Override
+ public boolean retainAll(Collection> c) {
+ return records.retainAll(c);
+ }
+
+ /**
+ * Removes all poi record data from this chunk. This WILL NOT provide any signal to Minecraft that the
+ * poi records for this chunk should be recalculated. Calling this function is only the correct action
+ * if you have removed all poi blocks from the chunk or if you plan to rebuild the poi records.
+ * Also resets all poi chunk section validity flags to indicate "is valid = true".
+ */
+ @Override
+ public void clear() {
+ records.clear();
+ poiSectionValidity.clear();
+ }
+
+ @Override
+ public int size() {
+ return records.size();
+ }
+
+ @Override
+ public boolean isEmpty() {
+ return records.isEmpty();
+ }
+
+ @Override
+ public boolean contains(Object o) {
+ return records.contains(o);
+ }
+
+ @Override
+ public Iterator iterator() {
+ return records.iterator();
+ }
+
+ /**
+ * Provides an iterator over poi records with the given type. This is a convenience function and does not provide
+ * any real optimization v.s. iterating over all elements.
+ * @param poiType poi type, if null or empty an empty iterator is returned
+ * @return Never null, but may be empty. Does not support {@link Iterator#remove()}
+ */
+ public Iterator iterator(final String poiType) {
+ if (poiType == null || poiType.isEmpty()) {
+ return Collections.emptyIterator();
+ }
+ return records.stream().filter(r -> r.matches(poiType)).iterator();
+ }
+
+ public Stream stream() {
+ return records.stream();
+ }
+
+ @Override
+ public Object[] toArray() {
+ return records.toArray();
+ }
+
+ @Override
+ public T1[] toArray(T1[] a) {
+ return records.toArray(a);
+ }
+
+ /**
+ * Clears the poi records from this chunk by first calling {@link #clear()}, then repopulates them by
+ * taking a shallow copy from the given collection. If the collection is null the affect of this
+ * function is the same as {@link #clear()}.
+ * @param c collection to shallow copy poi records from, any null entries will be ignored.
+ */
+ public void set(Collection c) {
+ clear();
+ if (c != null) {
+ addAll(c);
+ }
+ }
+
+ /**
+ * Marks the given subchunk invalid so that Minecraft will recompute POI for it when loaded.
+ * @param sectionY subchunk section-y to invalidate
+ */
+ public void invalidateSection(int sectionY) {
+ if (sectionY < Byte.MIN_VALUE || sectionY > Byte.MAX_VALUE)
+ throw new IllegalArgumentException("sectionY must be in range [-128..127]");
+ poiSectionValidity.put(sectionY, false);
+ }
+
+ /**
+ * Checks if the given section has been marked invalid either by calling {@link #invalidateSection(int)} or if
+ * it was already invalidated in the poi mca file.
+ */
+ public boolean isPoiSectionValid(int sectionY) {
+ if (sectionY < Byte.MIN_VALUE || sectionY > Byte.MAX_VALUE)
+ throw new IllegalArgumentException("sectionY must be in range [-128..127]");
+ return poiSectionValidity.getOrDefault(sectionY, true);
+ }
+
+ /**
+ * Checks if the given poi record resides in a section that has been marked invalid either by calling
+ * {@link #invalidateSection(int)} or was already invalidated in the poi mca file.
+ */
+ public boolean isPoiSectionValid(PoiRecord record) {
+ return record == null || poiSectionValidity.getOrDefault(record.getSectionY(), true);
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public CompoundTag updateHandle() {
+ if (raw) return data;
+ super.updateHandle();
+ Map> sectionedLists = records.stream().collect(Collectors.groupingBy(PoiRecord::getSectionY));
+ // ensure that all invalidated sections are in sectionedLists so we can just do one processing pass
+ for (int sectionY : poiSectionValidity.keySet()) {
+ if (!sectionedLists.containsKey(sectionY)) {
+ sectionedLists.put(sectionY, Collections.emptyList());
+ }
+ }
+
+ CompoundTag sectionContainerTag = new CompoundTag(sectionedLists.size());
+ data.put("Sections", sectionContainerTag);
+ for (Map.Entry> entry : sectionedLists.entrySet()) {
+ CompoundTag sectionTag = new CompoundTag();
+ List sectionRecords = entry.getValue();
+ boolean isValid = poiSectionValidity.getOrDefault(entry.getKey(), true);
+ if (!isValid || !sectionRecords.isEmpty()) {
+ sectionContainerTag.put(Integer.toString(entry.getKey()), sectionTag);
+ ListTag recordsTag = new ListTag<>(CompoundTag.class, sectionRecords.size());
+ sectionTag.putBoolean("Valid", isValid);
+ sectionTag.put("Records", recordsTag);
+ for (PoiRecord record : sectionRecords) {
+ recordsTag.add(record.updateHandle());
+ }
+ }
+ }
+ return data;
+ }
+}
diff --git a/src/main/java/io/github/ensgijs/nbt/mca/PoiRecord.java b/src/main/java/io/github/ensgijs/nbt/mca/PoiRecord.java
new file mode 100644
index 00000000..345a6d04
--- /dev/null
+++ b/src/main/java/io/github/ensgijs/nbt/mca/PoiRecord.java
@@ -0,0 +1,330 @@
+package io.github.ensgijs.nbt.mca;
+
+import io.github.ensgijs.nbt.tag.CompoundTag;
+import io.github.ensgijs.nbt.mca.util.TagWrapper;
+
+import java.util.Objects;
+
+/**
+ *
+ * In summary, if you have changed the block at a POI location, or altered the blocks in a {@link TerrainChunk}
+ * in such a way that may have added or removed POI blocks you have a few options (MC 1.14+)
+ *
+ * - calculate an accurate new poi state yourself by removing and adding poi records on the {@link PoiChunk},
+ * and to be truly accurate you must also modify villager "brains", but they will figure things out when
+ * they try to interact with their poi's and find them of the wrong type.
+ * - invalidate the poi sub-chunk within which you have made alterations with {@link PoiChunk#invalidateSection(int)}
+ * - remove the poi chunk from the poi file with {@link McaPoiFile#removeChunk(int)} or {@link McaPoiFile#removeChunk(int, int)}
+ * - delete the entire poi mca file
+ *
+ * All of the options, other than calculating poi state yourself, will trigger Minecraft to re-calculate poi records
+ * without causing errant behavior. The worst thing you can do is to do nothing - Minecraft will eventually notice
+ * but it may cause "strange behavior" and various WTF's until the game sorts itself out.
+ * About this class
+ * A record as found in POI MCA files (points of interest). Hashable and equatable, but does not consider
+ * {@code freeTickets} in those operations as that field is largely MC internal state. POI mca files were added in
+ * MC 1.14 to improve villager performance and only contained locations of blocks villagers interacted with. Over time
+ * POI mca has evolved to include locations of other block types to optimize game performance - such as improving
+ * nether portal lag by storing portal block locations in the poi files so the game doesn't need to scan every block
+ * in every chunk until it finds a destination portal.
+ *
At time of writing, 1.14 to 1.17.1, this class exposes all poi record fields. For now, there is no support for
+ * reading or storing extra fields which this class does not wrap.
+ * POI types As of 1.17
+ *
+ * - minecraft:unemployed - does not map to a block type
+ * - minecraft:armorer - block: blast_furnace
+ * - minecraft:butcher - block: smoker
+ * - minecraft:cartographer - block: cartography_table
+ * - minecraft:cleric - block: brewing_stand
+ * - minecraft:farmer - block: composter
+ * - minecraft:fisherman - block: barrel
+ * - minecraft:fletcher - block: fletching_table
+ * - minecraft:leatherworker - block: any cauldron block
+ * - minecraft:librarian - block: lectern
+ * - minecraft:mason - block: stonecutter
+ * - minecraft:nitwit - does not map to a block type
+ * - minecraft:shepherd - block: loom
+ * - minecraft:toolsmith - block: smithing_table
+ * - minecraft:weaponsmith - block: grindstone
+ * - minecraft:home - block: any bed
+ * - minecraft:meeting - block: bell
+ * - minecraft:beehive - block: beehive
+ * - minecraft:bee_nest - block: bee_nest
+ * - minecraft:nether_portal - block: nether_portal
+ * - minecraft:lodestone - block: lodestone
+ * - minecraft:lightning_rod - block: lightning_rod
+ *
+ *
+ *
+ * What are "Tickets"?
+ *
+ * Tickets are only used for blocks/poi's (points of interest) which villagers interact with. Internally
+ * Minecraft specifies a max tickets for each such poi type. This is the maximum number of villagers which
+ * can "take a ticket" (aka be using that poi at the same time; aka max number of villagers which
+ * can claim that poi and store it in their "brain"). For all villager eligible poi's that limit
+ * is one (1), with the single exception being minecraft:meeting (block minecraft:bell) which has a
+ * limit of 32.
+ *
+ * Poi entries which are not for villager interaction such as beehives, nether portals,
+ * lighting rods, etc., have a max ticket count of zero (0).
+ *
+ * A truly valid POI Record is one that satisfies all of the following conditions
+ *
+ * - the block at the poi location is appropriate for the poi type
+ * - free tickets is never GT max tickets for that poi type
+ * - {@link #getFreeTickets()} equals the count of all villagers who have stored the poi location in their
+ * "brain" subtracted from the max tickets for that poi type
+ *
+ */
+public class PoiRecord implements TagWrapper, Comparable {
+ protected String type;
+ protected int freeTickets;
+ protected int x;
+ protected int y;
+ protected int z;
+
+ public PoiRecord() { }
+
+ /**
+ * copy constructor
+ */
+ public PoiRecord(PoiRecord other) {
+ this.type = other.type;
+ this.freeTickets = other.freeTickets;
+ this.x = other.x;
+ this.y = other.y;
+ this.z = other.z;
+ }
+
+ public PoiRecord(CompoundTag data) {
+ this.freeTickets = data.getInt("free_tickets");
+ this.type = data.getString("type");
+ int[] pos = data.getIntArray("pos");
+ this.x = pos[0];
+ this.y = pos[1];
+ this.z = pos[2];
+ }
+
+ /**
+ * Defaults free tickets to result of passing the given type to {@link #maxFreeTickets(String)}
+ * @param x world block x
+ * @param y world block y - must be a within the absolute maximum limit of blocks
+ * theoretically supportable by chunk sections [-2048..2032)
+ * @param z world block z
+ * @param type required, poi type name
+ */
+ public PoiRecord(int x, int y, int z, String type) {
+ this(x, y, z, type, maxFreeTickets(type));
+ }
+
+ /**
+ * @param x world block x
+ * @param y world block y - must be a within the absolute maximum limit of blocks
+ * theoretically supportable by chunk sections [-2048..2048)
+ * @param z world block z
+ * @param type required, poi type name
+ * @param freeTickets must be GT 0
+ */
+ public PoiRecord(int x, int y, int z, String type, int freeTickets) {
+ this.type = validateType(type);
+ this.freeTickets = validateFreeTickets(freeTickets);
+ this.y = validateY(y);
+ this.x = x;
+ this.z = z;
+ }
+
+ private String validateType(String type) {
+ if (type == null || type.isEmpty()) {
+ throw new IllegalArgumentException("poi type must not be null or empty");
+ }
+ return type;
+ }
+
+ private int validateFreeTickets(int freeTickets) {
+ if (freeTickets < 0) {
+ throw new IllegalArgumentException("freeTickets must be GE 0");
+ }
+ return freeTickets;
+ }
+
+ private int validateY(int y) {
+ if (y < Byte.MIN_VALUE * 16 || y > Byte.MAX_VALUE * 16 + 15) {
+ throw new IndexOutOfBoundsException(String.format(
+ "Given Y value %d is out of range for any legal block. Y must be in range [%d..%d]",
+ y, Byte.MIN_VALUE * 16, Byte.MAX_VALUE * 16 + 15));
+ }
+ return y;
+ }
+
+ /**
+ * Returns a {@link CompoundTag} representing this record.
+ * The tag returned is newly created and not a reference to a tag held by any other object. This is a different
+ * behavior than most other {@code getHandle()} implementations.
+ */
+ @Override
+ public CompoundTag updateHandle() {
+ CompoundTag data = new CompoundTag();
+ data.putInt("free_tickets", freeTickets);
+ data.putString("type", type);
+ data.putIntArray("pos", new int[] {x, y, z});
+ return data;
+ }
+
+ /**
+ * Returns a {@link CompoundTag} representing this record.
+ * The tag returned is newly created and not a reference to a tag held by any other object. This is a different
+ * behavior than most other {@code getHandle()} implementations.
+ * @return data handle, never null
+ */
+ @Override
+ public CompoundTag getHandle() {
+ return updateHandle();
+ }
+
+ /**
+ * See class doc {@link PoiRecord}
+ */
+ public int getFreeTickets() {
+ return freeTickets;
+ }
+
+ /**
+ * See class doc {@link PoiRecord}
+ */
+ public PoiRecord setFreeTickets(int freeTickets) {
+ this.freeTickets = validateFreeTickets(freeTickets);
+ return this;
+ }
+
+ /**
+ * Sets freeTickets to the default max free tickets for this poi type.
+ * see class doc {@link PoiRecord}
+ */
+ public PoiRecord resetFreeTickets() {
+ this.freeTickets = maxFreeTickets(this.type);
+ return this;
+ }
+
+ /** Type of the point, for example: minecraft:home, minecraft:meeting, minecraft:butcher, minecraft:nether_portal */
+ public String getType() {
+ return type;
+ }
+
+ /** Type of the point, for example: minecraft:home, minecraft:meeting, minecraft:butcher, minecraft:nether_portal */
+ public PoiRecord setType(String type) {
+ this.type = validateType(type);
+ return this;
+ }
+
+ /** world x location */
+ public int getX() {
+ return x;
+ }
+
+ /** world x location */
+ public PoiRecord setX(int x) {
+ this.x = x;
+ return this;
+ }
+
+ /** world y location */
+ public int getY() {
+ return y;
+ }
+
+ /**
+ * @param y must be a within the absolute maximum limit of blocks
+ * theoretically supportable by chunk sections [-2048..2048)
+ */
+ public PoiRecord setY(int y) {
+ this.y = validateY(y);
+ return this;
+ }
+
+ /** world z location */
+ public int getZ() {
+ return z;
+ }
+
+ /** world z location */
+ public PoiRecord setZ(int z) {
+ this.z = z;
+ return this;
+ }
+
+ /**
+ * Sets XYZ
+ * @param x world block x
+ * @param y world block y
+ * @param z world block z
+ * @return self
+ */
+ public PoiRecord setYXZ(int x, int y, int z) {
+ this.x = x;
+ this.y = y;
+ this.z = z;
+ return this;
+ }
+
+ @Override
+ public int hashCode() {
+ return Objects.hash(type, x, y, z);
+ }
+
+ @Override
+ public boolean equals(Object other) {
+ if (!(other instanceof PoiRecord)) return false;
+ PoiRecord that = (PoiRecord) other;
+ return this.y == that.y && this.x == that.x && this.z == that.z && Objects.equals(this.type, that.type);
+ }
+
+ @Override
+ public int compareTo(PoiRecord other) {
+ if (other == null) {
+ return -1;
+ }
+ return Integer.compare(this.y, other.y);
+ }
+
+ public boolean matches(int x, int y, int z) {
+ return this.y == y && this.x == x && this.z == z;
+ }
+
+ public boolean matches(String type) {
+ return this.type.equals(type);
+ }
+
+ public int getSectionY() {
+ return this.y >> 4;
+ }
+
+ /**
+ * Gets the default max free tickets for the given poi type.
+ * @param poiType poi type - NOT block type
+ * @return default (vanilla) max free tickets for the given type.
+ */
+ public static int maxFreeTickets(String poiType) {
+ switch (poiType) {
+ case "minecraft:unemployed":
+ case "minecraft:armorer":
+ case "minecraft:butcher":
+ case "minecraft:cartographer":
+ case "minecraft:cleric":
+ case "minecraft:farmer":
+ case "minecraft:fisherman":
+ case "minecraft:fletcher":
+ case "minecraft:leatherworker":
+ case "minecraft:librarian":
+ case "minecraft:mason":
+ case "minecraft:nitwit":
+ case "minecraft:shepherd":
+ case "minecraft:toolsmith":
+ case "minecraft:weaponsmith":
+ case "minecraft:home":
+ return 1;
+ case "minecraft:meeting":
+ return 32;
+ }
+ return 0;
+ }
+}
diff --git a/src/main/java/io/github/ensgijs/nbt/mca/SectionBase.java b/src/main/java/io/github/ensgijs/nbt/mca/SectionBase.java
new file mode 100644
index 00000000..98bbcc05
--- /dev/null
+++ b/src/main/java/io/github/ensgijs/nbt/mca/SectionBase.java
@@ -0,0 +1,186 @@
+package io.github.ensgijs.nbt.mca;
+
+import io.github.ensgijs.nbt.tag.CompoundTag;
+import io.github.ensgijs.nbt.util.ObservedCompoundTag;
+import io.github.ensgijs.nbt.mca.io.LoadFlags;
+import io.github.ensgijs.nbt.mca.util.TagWrapper;
+import io.github.ensgijs.nbt.mca.util.TracksUnreadDataTags;
+
+import java.util.*;
+
+import static io.github.ensgijs.nbt.mca.io.LoadFlags.RAW;
+import static io.github.ensgijs.nbt.mca.io.LoadFlags.RELEASE_CHUNK_DATA_TAG;
+
+/**
+ * Sections can be thought of as "sub-chunks" which are 16x16x16 block cubes
+ * stacked atop each other to create a "chunk".
+ */
+public abstract class SectionBase> implements Comparable, TagWrapper, TracksUnreadDataTags {
+ /** Used to indicate an unset section Y value. */
+ public static final int NO_SECTION_Y_SENTINEL = Integer.MIN_VALUE;
+ /** for internal use only - modify with extreme care and precision - must be kept in sync with chunk data version */
+ protected int dataVersion;
+ private boolean raw;
+ protected CompoundTag data;
+ protected Set unreadDataTagKeys;
+ /**
+ * The height of the bottom of this section relative to Y0 as a section-y value, each 1 section-y is
+ * equal to 16 blocks.
+ * AKA: "height"
+ */
+ protected int sectionY = NO_SECTION_Y_SENTINEL;
+
+ /**
+ * {@inheritDoc}
+ */
+ public Set getUnreadDataTagKeys() {
+ return unreadDataTagKeys;
+ }
+
+ /**
+ * {@inheritDoc}
+ * @return NotNull - if LoadFlags specified {@link LoadFlags#RAW} then the raw data is returned - else a new
+ * CompoundTag populated, by reference, with values that were not read during {@link #initReferences(long)}.
+ */
+ public CompoundTag getUnreadDataTags() {
+ if (raw) return data;
+ CompoundTag unread = new CompoundTag(unreadDataTagKeys.size());
+ data.forEach((k, v) -> {
+ if (unreadDataTagKeys.contains(k)) {
+ unread.put(k, v);
+ }
+ });
+ return unread;
+ }
+
+ /**
+ * Due to how Java initializes objects and how this class hierarchy is setup it is ill-advised to use inline member
+ * initialization because {@link #initReferences(long)} will be called before members are initialized which WILL
+ * result in very confusing {@link NullPointerException}'s being thrown from within {@link #initReferences(long)}.
+ * This is not a problem that can be solved by moving initialization into your constructors, because you must call
+ * the super constructor as the first line of your child constructor!
+ * So, to get around this hurdle, perform all member initialization you would normally inline in your
+ * class def, within this method instead. Implementers should never need to call this method themselves
+ * as ChunkBase will always call it, even from the default constructor. Remember to call {@code super();}
+ * from your default constructors to maintain this behavior.
+ */
+ protected void initMembers() { }
+
+ protected SectionBase(int dataVersion) {
+ data = new CompoundTag();
+ this.dataVersion = dataVersion;
+ initMembers();
+ }
+
+ protected SectionBase(CompoundTag sectionRoot, int dataVersion, long loadFlags) {
+ Objects.requireNonNull(sectionRoot, "sectionRoot must not be null");
+ this.data = sectionRoot;
+ this.dataVersion = dataVersion;
+ initMembers();
+ initReferences0(loadFlags);
+ }
+
+ private void initReferences0(long loadFlags) {
+ Objects.requireNonNull(data, "data cannot be null");
+ // Note that if RAW was specified in the loadFlags section data will not be loaded by chunk loading.
+ // However, the user may decide to not use chunk loading and create an instance of this class directly,
+ // so we should honor it anyway.
+ if ((loadFlags & RAW) != 0) {
+ raw = true;
+ } else {
+ final ObservedCompoundTag observedData = new ObservedCompoundTag(data);
+ data = observedData;
+ initReferences(loadFlags);
+ if (data != observedData) {
+ throw new IllegalStateException("this.data was replaced during initReferences execution - this breaks unreadDataTagKeys behavior!");
+ }
+ unreadDataTagKeys = observedData.unreadKeys();
+
+ if ((loadFlags & RELEASE_CHUNK_DATA_TAG) != 0) {
+ data = new CompoundTag();
+ } else {
+ // stop observing the data tag
+ data = observedData.wrappedTag();
+ }
+ }
+ }
+
+ /**
+ * Child classes should not call this method directly, it will be called for them.
+ * Raw and partial data handling is taken care of, this method will not be called if {@code loadFlags}
+ * contains {@link LoadFlags#RAW}.
+ */
+ protected abstract void initReferences(final long loadFlags);
+
+ /** Section data version must be kept in sync with chunk data version. Use with extreme care! */
+ protected void syncDataVersion(int newDataVersion) {
+ if (newDataVersion <= 0) {
+ throw new IllegalArgumentException("Invalid data version - must be GT 0");
+ }
+ this.dataVersion = newDataVersion;
+ }
+
+ @Override
+ public int compareTo(T o) {
+ if (o == null) {
+ return -1;
+ }
+ return Integer.compare(sectionY, o.sectionY);
+ }
+
+ /**
+ * Checks whether the data of this Section is empty.
+ * @return true if empty
+ */
+ public boolean isEmpty() {
+ return data.isEmpty();
+ }
+
+ /**
+ * Gets the height of the bottom of this section relative to Y0 as a section-y value, each 1 section-y is equal to
+ * 16 blocks.
+ * This library (as a whole) will attempt to keep the value returned by this function in sync with the actual
+ * location it has been placed within its chunk.
+ * The value returned may be unreliable if this section is placed in multiple chunks at different heights
+ * or if this section is an instance of {@link TerrainSection} and user code calls {@link TerrainSection#setHeight(int)}
+ * on a section which is referenced by any chunk.
+ * Prefer using {@link TerrainChunk#getSectionY(SectionBase)} which will always be accurate within the context of the
+ * chunk.
+ * @return The Y value of this section.
+ */
+ public int getSectionY() {
+ return sectionY;
+ }
+
+ protected void syncHeight(int height) {
+ this.sectionY = height;
+ }
+
+ protected void checkY(int y) {
+ if (y == NO_SECTION_Y_SENTINEL) {
+ throw new IndexOutOfBoundsException("section Y (aka 'height') not set");
+ }
+ if (y < Byte.MIN_VALUE | y > Byte.MAX_VALUE) {
+ throw new IndexOutOfBoundsException("section Y (aka 'height') must be in range of BYTE [-128..127] was: " + y);
+ }
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public CompoundTag getHandle() {
+ return data;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ public CompoundTag updateHandle() {
+ if (data == null) {
+ throw new UnsupportedOperationException(
+ "Cannot updateHandle() because data tag is null. This is probably because "+
+ "the LoadFlag RELEASE_CHUNK_DATA_TAG was specified");
+ }
+ return data;
+ }
+}
diff --git a/src/main/java/io/github/ensgijs/nbt/mca/SectionedChunkBase.java b/src/main/java/io/github/ensgijs/nbt/mca/SectionedChunkBase.java
new file mode 100644
index 00000000..de1bbcd2
--- /dev/null
+++ b/src/main/java/io/github/ensgijs/nbt/mca/SectionedChunkBase.java
@@ -0,0 +1,256 @@
+package io.github.ensgijs.nbt.mca;
+
+import io.github.ensgijs.nbt.tag.CompoundTag;
+import io.github.ensgijs.nbt.mca.util.SectionIterator;
+
+import java.util.*;
+import java.util.stream.Stream;
+import java.util.stream.StreamSupport;
+
+/**
+ * Abstraction for the base of all chunk types which represent chunks composed of sub-chunks {@link SectionBase}.
+ * @param Concrete type of section.
+ */
+public abstract class SectionedChunkBase> extends ChunkBase implements Iterable {
+ private final TreeMap sections = new TreeMap<>();
+ private final Map sectionHeightLookup = new HashMap<>();
+
+ protected SectionedChunkBase(int dataVersion) {
+ super(dataVersion);
+ }
+
+ /**
+ * Create a new chunk based on raw base data from a region file.
+ * @param data The raw base data to be used.
+ */
+ public SectionedChunkBase(CompoundTag data) {
+ super(data);
+ }
+
+ public SectionedChunkBase(CompoundTag data, long loadFlags) {
+ super(data, loadFlags);
+ }
+
+ public boolean hasSections() {
+ return !sections.isEmpty();
+ }
+
+ public boolean containsSection(int sectionY) {
+ return sections.containsKey(sectionY);
+ }
+
+ public boolean containsSection(T section) {
+ return sectionHeightLookup.containsKey(section);
+ }
+
+ /**
+ * Sets the section at the specified section-y and synchronizes section-y by calling
+ * {@code section.setHeight(sectionY);}.
+ * @param sectionY Section-y to place the section at. It is the developers responsibility to ensure this value is
+ * reasonable. Remember that sections are 16x16x16 cubes and that 1 section-y equals 16 block-y's.
+ * @param section Section to set, may be null to remove the section.
+ * @param moveAllowed If false, and the given section is already present in this chunk {@link IllegalArgumentException}
+ * is thrown. If ture, and the given section is already present in this chunk its former
+ * section-y location is set {@code null} and the section is updated to live at the
+ * specified section-y.
+ * @return The previous section at section-y, or null if there was none or if the given section was already
+ * present at sectionY.
+ * @throws IllegalArgumentException Thrown when the given section is already present in this chunk
+ * and {@code moveAllowed} is false.
+ */
+ public T putSection(int sectionY, T section, boolean moveAllowed) throws IllegalArgumentException {
+ checkRaw();
+ if (sectionY < Byte.MIN_VALUE || sectionY > Byte.MAX_VALUE) {
+ throw new IllegalArgumentException(
+ "sectionY must be in the range of a BYTE [-128..127], given value " + sectionY);
+ }
+ if (section != null) {
+ if (sectionHeightLookup.containsKey(section)) {
+ final int oldY = sectionHeightLookup.getOrDefault(section, SectionBase.NO_SECTION_Y_SENTINEL);
+ if (sectionY == oldY) return null;
+ if (!moveAllowed) {
+ throw new IllegalArgumentException(
+ String.format("cannot place section at %d, it's already at %d", sectionY, oldY));
+ }
+ final T oldSection = sections.remove(oldY);
+ sectionHeightLookup.remove(oldSection);
+ assert(oldSection == section);
+ assert(sections.size() == sectionHeightLookup.size());
+ }
+ section.syncHeight(sectionY);
+ sectionHeightLookup.put(section, sectionY);
+ final T oldSection = sections.put(sectionY, section);
+ if (oldSection != null) sectionHeightLookup.remove(oldSection);
+ assert(sections.size() == sectionHeightLookup.size());
+ return oldSection;
+ } else {
+ final T oldSection = sections.remove(sectionY);
+ sectionHeightLookup.remove(oldSection);
+ assert(sections.size() == sectionHeightLookup.size());
+ return oldSection;
+ }
+ }
+
+ /**
+ * Sets the section at the specified section-y and synchronizes section-y by calling
+ * {@code section.setHeight(sectionY);}.
+ * @param sectionY Section-y to place the section at. It is the developers responsibility to ensure this value is
+ * reasonable. Remember that sections are 16x16x16 cubes and that 1 section-y equals 16 block-y's.
+ * @param section Section to set, may be null to remove the section.
+ * @return The previous section at section-y, or null if there was none or if the given section was already
+ * present at sectionY.
+ * @throws IllegalArgumentException Thrown when the given section is already present in this chunk.
+ * Call {@code putSection(sectionY, section, true)} to not throw this error and to move the section instead.
+ */
+ public T putSection(int sectionY, T section) {
+ return putSection(sectionY, section, false);
+ }
+
+ /**
+ * Fetches the section at the specified section-y and synchronizes section-y by calling
+ * {@code section.setHeight(sectionY);} before returning it.
+ * @param sectionY The y-coordinate of the section in this chunk. One section y is equal to 16 world y's
+ * @return The Section.
+ */
+ public T getSection(int sectionY) {
+ T section = sections.get(sectionY);
+ if (section != null) {
+ section.syncHeight(sectionY);
+ }
+ return section;
+ }
+
+ /**
+ * Alias for {@link #putSection(int, SectionBase)}
+ * Sets a section at a given section y-coordinate.
+ * @param sectionY The y-coordinate of the section in this chunk. One section y is equal to 16 world y's
+ * @param section The section to be set. May be null to remove the section.
+ * @return the previous value associated with {@code sectionY}, or null if there was no section at {@code sectionY}
+ * or if the section was already at that y.
+ * @throws IllegalStateException Thrown if adding the given section would result in that section instance occurring
+ * multiple times in this chunk. Use {@link #putSection} as an alternative to allow moving the section, otherwise
+ * it is the developers responsibility to first remove the section from this chunk
+ * ({@code setSection(sectionY, null);}) before placing it at a new section-y.
+ */
+ public T setSection(int sectionY, T section) {
+ return putSection(sectionY, section, false);
+ }
+
+ /**
+ * Looks up the section-y for the given section. This is a safer alternative to using
+ * {@link SectionBase#getSectionY()} as it will always be accurate within the context of this chunk.
+ * @param section section to lookup the section-y for.
+ * @return section-y; may be negative for worlds with a min build height below zero. If the given section is
+ * {@code null} or is not found in this chunk then {@link SectionBase#NO_SECTION_Y_SENTINEL} is returned.
+ */
+ public int getSectionY(T section) {
+ if (section == null) return SectionBase.NO_SECTION_Y_SENTINEL;
+ int y = sectionHeightLookup.getOrDefault(section, SectionBase.NO_SECTION_Y_SENTINEL);
+ section.syncHeight(y);
+ return y;
+ }
+
+ /**
+ * Gets the minimum section y-coordinate.
+ *
NOTE: fully generated terrain chunks MAY have a dummy section -1 below the world, the returned value
+ * WILL be this value - {@link TerrainSection} will exist for this Y but it will be completely empty of the
+ * standard tags you would expect to see (blocks, biomes, etc).
+ * @return The y of the lowest populated section in the chunk or {@link SectionBase#NO_SECTION_Y_SENTINEL} if there is none.
+ * @see #getSectionY(SectionBase)
+ */
+ public int getMinSectionY() {
+ if (!sections.isEmpty()) {
+ return sections.firstKey();
+ }
+ return SectionBase.NO_SECTION_Y_SENTINEL;
+ }
+
+ /**
+ * Gets the minimum section y-coordinate.
+ * @return The y of the highest populated section in the chunk or {@link SectionBase#NO_SECTION_Y_SENTINEL} if there is none.
+ */
+ public int getMaxSectionY() {
+ if (!sections.isEmpty()) {
+ return sections.lastKey();
+ }
+ return SectionBase.NO_SECTION_Y_SENTINEL;
+ }
+
+ /** min block Y, inclusive */
+ public int getWorldMinBlockY() {
+ return getMinSectionY() * 16;
+ }
+
+ /** max block Y, inclusive */
+ public int getWorldMaxBlockY() {
+ return getMaxSectionY() * 16 + 15;
+ }
+
+ /***
+ * Creates a new section and places it in this chunk at the specified section-y UNLESS
+ * the given sectionY is {@link SectionBase#NO_SECTION_Y_SENTINEL} in which case the new
+ * section is not added to this chunk.
+ * @param sectionY section y
+ * @return new section
+ * @throws IllegalArgumentException thrown if the specified y already has a section - basically throwns if
+ * {@link #containsSection(int)} would return true.
+ */
+ public abstract T createSection(int sectionY) throws IllegalArgumentException;
+
+ /**
+ * Sections provided by {@link Iterator#next()} are guaranteed to have correct values returned from
+ * calls to {@link SectionBase#getSectionY()}. Also note that the iterator itself can be queried via
+ * {@link SectionIterator#sectionY()} for the true section-y without calling a deprecated method.
+ * @return Section iterator. Supports {@link Iterator#remove()}.
+ */
+ @Override
+ public SectionIterator iterator() {
+ return new SectionIteratorImpl();
+ }
+
+ public Stream stream() {
+ return StreamSupport.stream(spliterator(), false);
+ }
+
+ protected class SectionIteratorImpl implements SectionIterator {
+ private final Iterator> iter;
+ private Map.Entry current;
+
+ public SectionIteratorImpl() {
+ iter = sections.entrySet().iterator();
+ }
+
+ @Override
+ public boolean hasNext() {
+ return iter.hasNext();
+ }
+
+ @Override
+ public T next() {
+ current = iter.next();
+ current.getValue().syncHeight(current.getKey());
+ return current.getValue();
+ }
+
+ @Override
+ public void remove() {
+ sectionHeightLookup.remove(current.getValue());
+ iter.remove();
+ }
+
+ @Override
+ public int sectionY() {
+ return current.getKey();
+ }
+
+ @Override
+ public int sectionBlockMinY() {
+ return sectionY() * 16;
+ }
+
+ @Override
+ public int sectionBlockMaxY() {
+ return sectionY() * 16 + 15;
+ }
+ }
+}
diff --git a/src/main/java/io/github/ensgijs/nbt/mca/TerrainChunk.java b/src/main/java/io/github/ensgijs/nbt/mca/TerrainChunk.java
new file mode 100644
index 00000000..d9398f9c
--- /dev/null
+++ b/src/main/java/io/github/ensgijs/nbt/mca/TerrainChunk.java
@@ -0,0 +1,82 @@
+package io.github.ensgijs.nbt.mca;
+
+import io.github.ensgijs.nbt.tag.CompoundTag;
+
+/**
+ * Represents a TERRAIN data mca chunk (from mca files that come from the /region save folder).
+ * Terrain chunks are composed of a set of {@link TerrainSection} where any empty/null
+ * section is filled with air blocks by the game. When altering existing chunks for MC 1.14+, be sure to have read and
+ * understood the documentation on {@link PoiRecord} to avoid problems with villagers, nether portal linking,
+ * lodestones, bees, and probably more as Minecraft continues to evolve.
+ *
+ * It is my (Ross / Ens) hope that in the future this class can be repurposed to serve as an abstraction
+ * layer over all the various chunk types (terrain, poi, entity - at the time of writing) and that it
+ * can take care of keeping them all in sync. But I've already put a lot of time into this library and need
+ * to return to other things so for now that goal must remain unrealized.
+ */
+public class TerrainChunk extends TerrainChunkBase {
+ /**
+ * The default chunk data version used when no custom version is supplied.
+ * @deprecated Use {@code DataVersion.latest().id()} instead.
+ */
+ @Deprecated
+ public static final int DEFAULT_DATA_VERSION = DataVersion.latest().id();
+
+ protected TerrainChunk(int dataVersion) {
+ super(dataVersion);
+ }
+
+ public TerrainChunk(CompoundTag data) {
+ super(data);
+ }
+
+ public TerrainChunk(CompoundTag data, long loadFlags) {
+ super(data, loadFlags);
+ }
+
+ public TerrainChunk() {
+ super(DataVersion.latest().id());
+ data = new CompoundTag();
+ }
+
+ @Override
+ protected TerrainSection createSection(CompoundTag section, int dataVersion, long loadFlags) {
+ return new TerrainSection(section, dataVersion, loadFlags);
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public TerrainSection createSection(int sectionY) throws IllegalArgumentException {
+ if (containsSection(sectionY)) throw new IllegalArgumentException("section already exists at section-y " + sectionY);
+ TerrainSection section = new TerrainSection(dataVersion);
+ if (sectionY != SectionBase.NO_SECTION_Y_SENTINEL) {
+ putSection(sectionY, section); // sets section height & validates range
+ }
+ return section;
+ }
+
+ public TerrainSection createSection() {
+ return createSection(SectionBase.NO_SECTION_Y_SENTINEL);
+ }
+
+ /**
+ * @deprecated Dangerous - assumes the latest full release data version defined by {@link DataVersion}
+ * prefer using {@link McaFileBase#createChunk()} or {@link McaFileBase#createChunkIfMissing(int, int)}.
+ */
+ @Deprecated
+ public static TerrainChunk newChunk() {
+ return newChunk(DataVersion.latest().id());
+ }
+
+ public static TerrainChunk newChunk(int dataVersion) {
+ TerrainChunk c = new TerrainChunk(dataVersion);
+ c.data = new CompoundTag();
+ if (dataVersion < DataVersion.JAVA_1_18_21W39A.id()) {
+ c.data.put("Level", new CompoundTag());
+ }
+ c.status = "mobs_spawned";
+ return c;
+ }
+}
diff --git a/src/main/java/io/github/ensgijs/nbt/mca/TerrainChunkBase.java b/src/main/java/io/github/ensgijs/nbt/mca/TerrainChunkBase.java
new file mode 100644
index 00000000..57573723
--- /dev/null
+++ b/src/main/java/io/github/ensgijs/nbt/mca/TerrainChunkBase.java
@@ -0,0 +1,1330 @@
+package io.github.ensgijs.nbt.mca;
+
+import io.github.ensgijs.nbt.io.NamedTag;
+import io.github.ensgijs.nbt.mca.util.*;
+import io.github.ensgijs.nbt.tag.*;
+import io.github.ensgijs.nbt.mca.io.MoveChunkFlags;
+import io.github.ensgijs.nbt.query.NbtPath;
+import io.github.ensgijs.nbt.util.ArgValidator;
+
+import java.util.*;
+
+import static io.github.ensgijs.nbt.mca.DataVersion.*;
+import static io.github.ensgijs.nbt.mca.io.LoadFlags.*;
+import static io.github.ensgijs.nbt.mca.io.MoveChunkFlags.*;
+
+/**
+ * Represents a Terrain data mca chunk. Terrain chunks are composed of a set of {@link TerrainSection} where any empty/null
+ * section is filled with air blocks by the game. When altering existing chunks for MC 1.14+, be sure to have read and
+ * understood the documentation on {@link PoiRecord} to avoid problems with villagers, nether portal linking,
+ * lodestones, bees, and probably more as Minecraft continues to evolve.
+ */
+public abstract class TerrainChunkBase extends SectionedChunkBase {
+
+ protected long lastUpdateTick;
+ /** Tick when the chunk was last saved. */
+ public static final VersionAware LAST_UPDATE_TICK_PATH = new VersionAware()
+ .register(0, NbtPath.of("Level.LastUpdate"))
+ .register(JAVA_1_18_21W43A.id(), NbtPath.of("LastUpdate"));
+
+ protected long inhabitedTimeTicks;
+ /** Cumulative amount of time players have spent in this chunk in ticks. */
+ public static final VersionAware INHABITED_TIME_TICKS_PATH = new VersionAware()
+ .register(0, NbtPath.of("Level.InhabitedTime"))
+ .register(JAVA_1_18_21W43A.id(), NbtPath.of("InhabitedTime"));
+
+ protected int[] legacyBiomes;
+ /**
+ * Only populated for data versions < JAVA_1_18_21W39A. For later data versions use
+ * {@link PalettizedCuboid} and load biomes from {@link TerrainSectionBase#getBiomes()}.
+ * @see minecraft.fandom.com/wiki/Biome/IDs_before_1.13
+ * @see minecraft.fandom.com/wiki/Biome/ID
+ */
+ public static final VersionAware LEGACY_BIOMES_PATH = new VersionAware()
+ .register(0, NbtPath.of("Level.Biomes")) // ByteArrayTag
+ .register(JAVA_1_13_18W06A.id(), NbtPath.of("Level.Biomes")) // IntArrayTag
+ .register(JAVA_1_18_21W37A.id(), null); // biomes are now paletted and live in a similar container structure in sections[].biomes
+
+ protected IntArrayTag legacyHeightMap;
+ public static final VersionAware LEGACY_HEIGHT_MAP_PATH = new VersionAware()
+ .register(0, NbtPath.of("Level.HeightMap"))
+ .register(JAVA_1_13_18W06A.id(), null);
+
+ protected CompoundTag heightMaps;
+ /**
+ * {@link CompoundTag} mapping various heightmap names to 256 (16x16) values, long[] packed,
+ * min bits per value of 9. Heightmap values are "number of blocks above bottom of world", this is not
+ * the same as block Y position. To compute the block Y value use {@code highestBlockY =
+ * (chunk.yPos * 16) - 1 + heightmap_entry_value}.
+ *
+ * - MOTION_BLOCKING
+ * - MOTION_BLOCKING_NO_LEAVES
+ * - OCEAN_FLOOR
+ * - OCEAN_FLOOR_WG
+ * - WORLD_SURFACE
+ * - WORLD_SURFACE_WG
+ *
+ * @since {@link DataVersion#JAVA_1_13_18W06A}
+ * @see LongArrayTagPackedIntegers
+ */
+ public static final VersionAware HEIGHT_MAPS_PATH = new VersionAware()
+ .register(JAVA_1_13_18W06A.id(), NbtPath.of("Level.Heightmaps"))
+ .register(JAVA_1_18_21W43A.id(), NbtPath.of("Heightmaps"));
+
+ protected CompoundTag carvingMasks;
+ public static final VersionAware CARVING_MASKS_PATH = new VersionAware()
+ .register(JAVA_1_13_18W19A.id(), NbtPath.of("Level.CarvingMasks")) // CompoundTag containing named ByteArrayTag's
+ .register(JAVA_1_18_2_22W03A.id(), NbtPath.of("Level.CarvingMasks")) // CompoundTag containing named LongArrayTag's
+ .register(JAVA_1_18_21W43A.id(), NbtPath.of("CarvingMasks")); // CompoundTag containing named LongArrayTag's
+
+ protected ListTag entities; // usage changed for chunk versions >= 2724 (1.17) after which entities are only stored in terrain chunks during world generation.
+ public static final VersionAware ENTITIES_PATH = new VersionAware()
+ .register(0, NbtPath.of("Level.Entities"))
+ .register(JAVA_1_18_21W43A.id(), NbtPath.of("entities"));
+
+ protected ListTag tileEntities;
+ public static final VersionAware TILE_ENTITIES_PATH = new VersionAware()
+ .register(0, NbtPath.of("Level.TileEntities"))
+ .register(JAVA_1_18_21W43A.id(), NbtPath.of("block_entities"));
+
+ protected ListTag tileTicks;
+ public static final VersionAware TILE_TICKS_PATH = new VersionAware()
+ .register(0, NbtPath.of("Level.TileTicks"))
+ .register(JAVA_1_18_21W43A.id(), NbtPath.of("block_ticks"));
+
+ protected ListTag> toBeTicked;
+ public static final VersionAware TO_BE_TICKED_PATH = new VersionAware()
+ .register(JAVA_1_13_18W06A.id(), NbtPath.of("Level.ToBeTicked"))
+ .register(JAVA_1_18_21W43A.id(), null); // unsure when this was removed - but notes on JAVA_1_18_21W43A say it was also "moved to block_ticks" - but the mca scans last saw it in JAVA_1_14_PRE2
+
+ protected ListTag liquidTicks;
+ public static final VersionAware LIQUID_TICKS_PATH = new VersionAware()
+ .register(0, NbtPath.of("Level.LiquidTicks"))
+ .register(JAVA_1_18_21W43A.id(), NbtPath.of("fluid_ticks"));
+
+ protected ListTag> liquidsToBeTicked;
+ public static final VersionAware LIQUIDS_TO_BE_TICKED_PATH = new VersionAware()
+ .register(0, NbtPath.of("Level.LiquidsToBeTicked"))
+ .register(JAVA_1_18_21W43A.id(), null); // unsure when this was removed - but notes on JAVA_1_18_21W43A say it was also "moved to block_ticks" - but the mca scans last saw it in JAVA_1_14_PRE2
+
+ protected ListTag> lights;
+ public static final VersionAware LIGHTS_PATH = new VersionAware()
+ .register(JAVA_1_13_18W06A.id(), NbtPath.of("Level.Lights"))
+ .register(JAVA_1_18_21W43A.id(), NbtPath.of("Lights"));
+
+ protected ListTag> postProcessing;
+ public static final VersionAware POST_PROCESSING_PATH = new VersionAware()
+ .register(JAVA_1_13_18W06A.id(), NbtPath.of("Level.PostProcessing"))
+ .register(JAVA_1_18_21W43A.id(), NbtPath.of("PostProcessing"));
+
+ protected String status;
+ public static final VersionAware STATUS_PATH = new VersionAware()
+ .register(JAVA_1_13_18W06A.id(), NbtPath.of("Level.Status"))
+ .register(JAVA_1_18_21W43A.id(), NbtPath.of("Status"));
+
+ protected CompoundTag structures;
+ public static final VersionAware STRUCTURES_PATH = new VersionAware()
+ .register(0, NbtPath.of("Level.Structures"))
+ .register(JAVA_1_18_21W43A.id(), NbtPath.of("structures"));
+ /** Relative to {@link #STRUCTURES_PATH} */
+ public static final VersionAware STRUCTURES_STARTS_PATH = new VersionAware()
+ .register(0, NbtPath.of("Starts"))
+ .register(JAVA_1_18_21W43A.id(), NbtPath.of("starts"));
+ /** Relative to {@link #STRUCTURES_PATH} */
+ public static final VersionAware STRUCTURES_REFERENCES_PATH = new VersionAware()
+ .register(0, NbtPath.of("References"));
+
+ public static final VersionAware IS_LIGHT_POPULATED_PATH = new VersionAware()
+ .register(0, NbtPath.of("Level.LightPopulated"))
+ .register(JAVA_1_13_18W06A.id(), null); // probably replaced by Level.Status progression
+
+ protected Boolean isLightOn;
+ public static final VersionAware IS_LIGHT_ON_PATH = new VersionAware()
+ .register(JAVA_1_14_19W02A.id(), NbtPath.of("Level.isLightOn"))
+ .register(JAVA_1_18_21W43A.id(), NbtPath.of("isLightOn"));
+
+ protected Boolean isTerrainPopulated;
+ public static final VersionAware TERRAIN_POPULATED_PATH = new VersionAware()
+ .register(0, NbtPath.of("Level.TerrainPopulated"))
+ .register(JAVA_1_13_18W06A.id(), null); // replaced by Level.Status progression
+
+ protected Boolean hasLegacyStructureData;
+ public static final VersionAware HAS_LEGACY_STRUCTURE_DATA_PATH = new VersionAware()
+ .register(0, NbtPath.of("Level.hasLegacyStructureData"))
+ .register(JAVA_1_13_18W20C.id(), null); // might not be exactly correct
+
+ protected CompoundTag upgradeData;
+ public static final VersionAware UPGRADE_DATA_PATH = new VersionAware()
+ .register(0, NbtPath.of("Level.UpgradeData"))
+ .register(JAVA_1_18_21W43A.id(), NbtPath.of("UpgradeData"));
+
+ public static final VersionAware SECTIONS_PATH = new VersionAware()
+ .register(0, NbtPath.of("Level.Sections"))
+ .register(JAVA_1_18_21W37A.id(), NbtPath.of("sections"));
+
+ public static final VersionAware X_POS_PATH = new VersionAware()
+ .register(0, NbtPath.of("Level.xPos"))
+ .register(JAVA_1_18_21W43A.id(), NbtPath.of("xPos"));
+
+ public static final VersionAware Z_POS_PATH = new VersionAware()
+ .register(0, NbtPath.of("Level.zPos"))
+ .register(JAVA_1_18_21W43A.id(), NbtPath.of("zPos"));
+
+ /**
+ * Represents world bottom - note there may exist a dummy chunk -1 below this depending on MC flavor and current chunk state.
+ * @since {@link DataVersion#JAVA_1_18_21W43A}
+ */
+ protected int yPos = NO_CHUNK_COORD_SENTINEL;
+ public static final VersionAware Y_POS_PATH = new VersionAware()
+ .register(JAVA_1_18_21W43A.id(), NbtPath.of("yPos"));
+ public static final VersionAware DEFAULT_WORLD_BOTTOM_Y_POS = new VersionAware()
+ .register(0, 0)
+ .register(JAVA_1_18_21W43A.id(), -4); // TODO: IDK when exactly they actually enabled deep worlds
+
+ /** @since {@link DataVersion#JAVA_1_18_21W43A} */
+ protected CompoundTag belowZeroRetrogen;
+ public static final VersionAware BELOW_ZERO_RETROGEN_PATH = new VersionAware()
+ .register(JAVA_1_18_21W43A.id(), NbtPath.of("below_zero_retrogen"));
+
+ /** @since {@link DataVersion#JAVA_1_18_21W43A} */
+ protected CompoundTag blendingData;
+ public static final VersionAware BLENDING_DATA_PATH = new VersionAware()
+ .register(JAVA_1_18_21W43A.id(), NbtPath.of("blending_data"));
+
+
+ protected TerrainChunkBase(int dataVersion) {
+ super(dataVersion);
+ }
+
+ /**
+ * Create a new chunk based on raw base data from a Terrain region file.
+ * @param data The raw base data to be used.
+ */
+ public TerrainChunkBase(CompoundTag data) {
+ super(data);
+ }
+
+ public TerrainChunkBase(CompoundTag data, long loadFlags) {
+ super(data, loadFlags);
+ }
+
+ @Override
+ protected void initMembers() {
+ // give this a reasonable default
+ yPos = DEFAULT_WORLD_BOTTOM_Y_POS.get(dataVersion);
+ }
+
+ @Override
+ protected void initReferences(final long loadFlags) {
+ if (dataVersion < JAVA_1_18_21W39A.id()) {
+ if (data.getCompoundTag("Level") == null) {
+ throw new IllegalArgumentException("data does not contain \"Level\" tag");
+ }
+ }
+
+ inhabitedTimeTicks = getTagValue(INHABITED_TIME_TICKS_PATH, LongTag::asLong, 0L);
+ lastUpdateTick = getTagValue(LAST_UPDATE_TICK_PATH, LongTag::asLong, 0L);
+ if (dataVersion < JAVA_1_18_21W37A.id() && (loadFlags & BIOMES) != 0) {
+ if (dataVersion >= DataVersion.JAVA_1_13_18W06A.id()) {
+ legacyBiomes = getTagValue(LEGACY_BIOMES_PATH, IntArrayTag::getValue);
+ } else {
+ byte[] byteBiomes = getTagValue(LEGACY_BIOMES_PATH, ByteArrayTag::getValue);
+ legacyBiomes = new int[byteBiomes.length];
+ for (int i = 0; i < legacyBiomes.length; i++) {
+ legacyBiomes[i] = byteBiomes[i];
+ }
+ }
+ if (legacyBiomes != null && legacyBiomes.length == 0) legacyBiomes = null;
+ } // palette biomes are stored at the section, not chunk, level.
+
+ if ((loadFlags & HEIGHTMAPS) != 0) {
+ legacyHeightMap = getTag(LEGACY_HEIGHT_MAP_PATH);
+ heightMaps = getTag(HEIGHT_MAPS_PATH);
+ }
+ if ((loadFlags & CARVING_MASKS) != 0) {
+ carvingMasks = getTag(CARVING_MASKS_PATH);
+ }
+ if ((loadFlags & ENTITIES) != 0) {
+ entities = getTag(ENTITIES_PATH);
+ }
+ if ((loadFlags & TILE_ENTITIES) != 0) {
+ tileEntities = getTag(TILE_ENTITIES_PATH);
+ }
+ if ((loadFlags & TILE_TICKS) != 0) {
+ tileTicks = getTag(TILE_TICKS_PATH);
+ }
+ if ((loadFlags & TO_BE_TICKED) != 0) {
+ toBeTicked = getTag(TO_BE_TICKED_PATH);
+ }
+ if ((loadFlags & LIGHTS) != 0) {
+ lights = getTag(LIGHTS_PATH);
+ }
+ if ((loadFlags & LIQUID_TICKS) != 0) {
+ liquidTicks = getTag(LIQUID_TICKS_PATH);
+ }
+ if ((loadFlags & LIQUIDS_TO_BE_TICKED) != 0) {
+ liquidsToBeTicked = getTag(LIQUIDS_TO_BE_TICKED_PATH);
+ }
+ if ((loadFlags & POST_PROCESSING) != 0) {
+ postProcessing = getTag(POST_PROCESSING_PATH);
+ }
+
+ status = getTagValue(STATUS_PATH, StringTag::getValue);
+ isLightOn = getTagValue(IS_LIGHT_ON_PATH, ByteTag::asBoolean);
+ isTerrainPopulated = getTagValue(TERRAIN_POPULATED_PATH, ByteTag::asBoolean);
+
+ // TODO: add load flag for this
+ upgradeData = getTag(UPGRADE_DATA_PATH);
+
+ if ((loadFlags & STRUCTURES) != 0) {
+ structures = getTag(STRUCTURES_PATH);
+ hasLegacyStructureData = getTagValue(HAS_LEGACY_STRUCTURE_DATA_PATH, ByteTag::asBoolean);
+ }
+
+ // chunkXZ may be pre-populated with a solid guess so don't overwrite that guess if we don't have values.
+ if (X_POS_PATH.get(dataVersion).exists(data)) {
+ chunkX = getTagValue(X_POS_PATH, t -> ((NumberTag>)t).asInt());
+ }
+ if (Z_POS_PATH.get(dataVersion).exists(data)) {
+ chunkZ = getTagValue(Z_POS_PATH, t -> ((NumberTag>)t).asInt());
+ }
+
+ yPos = getTagValue(Y_POS_PATH, t -> ((NumberTag>)t).asInt(), DEFAULT_WORLD_BOTTOM_Y_POS.get(dataVersion));
+
+ boolean loadSections = ((loadFlags & (BLOCK_LIGHTS|BLOCK_STATES|SKY_LIGHT)) != 0)
+ || (dataVersion >= JAVA_1_18_21W37A.id() && ((loadFlags & BIOMES) != 0));
+ if (loadSections) {
+ try {
+ ListTag sections = getTag(SECTIONS_PATH);
+ if (sections != null) {
+ for (CompoundTag section : sections) {
+ T newSection = createSection(section, dataVersion, loadFlags);
+ putSection(newSection.getSectionY(), newSection, false);
+ }
+ }
+ } catch (Exception ex) {
+ throw new RuntimeException("Chunk " + getChunkX() + " " + getChunkZ() + "\n" + ex.getMessage(), ex);
+ }
+ }
+ if ((loadFlags & WORLD_UPGRADE_HINTS) != 0) {
+ belowZeroRetrogen = getTag(BELOW_ZERO_RETROGEN_PATH);
+ blendingData = getTag(BLENDING_DATA_PATH);
+ }
+ }
+
+ protected abstract T createSection(CompoundTag section, int dataVersion, long loadFlags);
+
+ /** {@inheritDoc} */
+ public String getMcaType() {
+ return "region";
+ }
+
+ /**
+ * May only be used for data versions LT 2203 which includes all of 1.14
+ * and up until 19w36a (a 1.15 weekly snapshot).
+ * Note: 2D biomes have a resolution of 1x256x1 blocks.
+ * @deprecated unsupported after {@link DataVersion#JAVA_1_15_19W35A} use {@link #getLegacyBiomeAt(int, int, int)} instead for 1.15 and beyond
+ */
+ @Deprecated
+ public int getLegacyBiomeAt(int blockX, int blockZ) {
+ if (dataVersion > JAVA_1_15_19W35A.id())
+ throw new VersionLacksSupportException(dataVersion, null, JAVA_1_15_19W35A,
+ "cannot get biome using Chunk#getBiomeAt(int,int) from biome data with DataVersion of 2203 or higher (1.15+), use Chunk#getBiomeAt(int,int,int) instead");
+ if (legacyBiomes == null || legacyBiomes.length != 256) {
+ return -1;
+ }
+ return legacyBiomes[getLegacy2dBiomeIndex(blockX, blockZ)];
+ }
+
+ /**
+ * Fetches a biome id at a specific block in this chunk.
+ * The coordinates can be absolute coordinates or relative to the region or chunk.
+ * Note: 3D biomes have a resolution of 4x4x4 blocks.
+ * @param blockX The x-coordinate of the block.
+ * @param blockY The y-coordinate of the block.
+ * @param blockZ The z-coordinate of the block.
+ * @return The biome id or -1 if the biomes are not correctly initialized.
+ * @deprecated unsupported after {@link DataVersion#JAVA_1_17_1}
+ */
+ public int getLegacyBiomeAt(int blockX, int blockY, int blockZ) {
+ if (dataVersion > JAVA_1_17_1.id())
+ throw new VersionLacksSupportException(dataVersion, null, JAVA_1_17_1, "legacy biomes");
+ if (dataVersion >= JAVA_1_15_19W36A.id()) { // 3D biomes
+ if (legacyBiomes == null || legacyBiomes.length != 1024) {
+ return -1;
+ }
+ int biomeX = (blockX & 0xF) >> 2;
+ int biomeY = (blockY & 0xF) >> 2;
+ int biomeZ = (blockZ & 0xF) >> 2;
+
+ return legacyBiomes[getLegacy3dBiomeIndex(biomeX, biomeY, biomeZ)];
+ } else { // 2D biomes
+ return getLegacyBiomeAt(blockX, blockZ);
+ }
+ }
+
+ /**
+ * Should only be used for data versions LT 2203 which includes all of 1.14
+ * and up until 19w35a (a 1.15 weekly snapshot).
+ * Note: 2D biomes have a resolution of 1x256x1 blocks.
+ * @deprecated unsupported after {@link DataVersion#JAVA_1_17_1}
+ * @see #setLegacyBiomeAt(int, int, int, int)
+ */
+ @Deprecated
+ public void setLegacyBiomeAt(int blockX, int blockZ, int biomeID) {
+ checkRaw();
+ if (dataVersion > JAVA_1_17_1.id())
+ throw new VersionLacksSupportException(dataVersion, null, JAVA_1_17_1, "2D legacy biomes");
+ if (dataVersion < JAVA_1_15_19W36A.id()) { // 2D biomes
+ if (legacyBiomes == null || legacyBiomes.length != 256) {
+ legacyBiomes = new int[256];
+ Arrays.fill(legacyBiomes, -1);
+ }
+ legacyBiomes[getLegacy2dBiomeIndex(blockX, blockZ)] = biomeID;
+ } else { // 3D biomes
+ if (legacyBiomes == null || legacyBiomes.length != 1024) {
+ legacyBiomes = new int[1024];
+ Arrays.fill(legacyBiomes, -1);
+ }
+
+ int biomeX = (blockX & 0xF) >> 2;
+ int biomeZ = (blockZ & 0xF) >> 2;
+
+ for (int y = 0; y < 64; y++) {
+ legacyBiomes[getLegacy3dBiomeIndex(biomeX, y, biomeZ)] = biomeID;
+ }
+ }
+ }
+
+ /**
+ * Note: 3D biomes have a resolution of 4x4x4 blocks.
+ * @since {@link DataVersion#JAVA_1_15_19W36A}
+ * @deprecated unsupported after {@link DataVersion#JAVA_1_17_1}
+ * @see #setLegacyBiomeAt(int, int, int, int)
+ */
+ @Deprecated
+ public void setLegacyBiomeAt(int blockX, int blockY, int blockZ, int biomeID) {
+ if (dataVersion < JAVA_1_15_19W36A.id() || dataVersion >= JAVA_1_18_21W37A.id())
+ throw new VersionLacksSupportException(dataVersion, JAVA_1_15_19W36A, JAVA_1_18_21W37A.previous(), "3D legacy biomes");
+ if (legacyBiomes == null || legacyBiomes.length != 1024) {
+ legacyBiomes = new int[1024];
+ Arrays.fill(legacyBiomes, -1);
+ }
+
+ int biomeX = (blockX & 0x0F) >> 2;
+ int biomeY = blockY >> 2;
+ int biomeZ = (blockZ & 0x0F) >> 2;
+ legacyBiomes[getLegacy3dBiomeIndex(biomeX, biomeY, biomeZ)] = biomeID;
+ }
+
+ protected int getLegacy2dBiomeIndex(int blockX, int blockZ) {
+ return (blockZ & 0xF) * 16 + (blockX & 0xF);
+ }
+ protected int getLegacy3dBiomeIndex(int biomeX, int biomeY, int biomeZ) {
+ return biomeY * 16 + biomeZ * 4 + biomeX;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public void setDataVersion(int dataVersion) {
+ super.setDataVersion(dataVersion);
+ for (T section : this) {
+ if (section != null) {
+ section.syncDataVersion(dataVersion);
+ }
+ }
+ }
+
+ /**
+ * @return The generation station of this chunk.
+ */
+ public String getStatus() {
+ return status != null ? status : getTagValue(STATUS_PATH, StringTag::getValue);
+ }
+
+ /**
+ * Sets the generation status of this chunk.
+ * @param status The generation status of this chunk.
+ */
+ public void setStatus(String status) {
+ checkRaw();
+ this.status = status;
+ }
+
+ // TODO(javadoc)
+ public Boolean getLightOn() {
+ return isLightOn;
+ }
+
+ // TODO(javadoc)
+ public void setLightOn(Boolean lightOn) {
+ isLightOn = lightOn;
+ }
+
+ // TODO(javadoc)
+ public Boolean getTerrainPopulated() {
+ return isTerrainPopulated;
+ }
+
+ // TODO(javadoc)
+ public void setTerrainPopulated(Boolean terrainPopulated) {
+ isTerrainPopulated = terrainPopulated;
+ }
+
+ // TODO(javadoc)
+ public Boolean getHasLegacyStructureData() {
+ return hasLegacyStructureData;
+ }
+
+ // TODO(javadoc)
+ public void setHasLegacyStructureData(Boolean hasLegacyStructureData) {
+ this.hasLegacyStructureData = hasLegacyStructureData;
+ }
+
+ // TODO(javadoc)
+ public CompoundTag getUpgradeData() {
+ return upgradeData;
+ }
+
+ // TODO(javadoc)
+ public void setUpgradeData(CompoundTag upgradeData) {
+ this.upgradeData = upgradeData;
+ }
+
+ // 2048 bytes recording the amount of block-emitted light in each block. Makes load times faster compared to recomputing at load time. 4 bits per block.
+
+
+ /** Tick when the chunk was last saved. */
+ public long getLastUpdateTick() {
+ return lastUpdateTick;
+ }
+
+ /** Sets the tick when the chunk was last saved. */
+ public void setLastUpdateTick(long lastUpdateTick) {
+ this.lastUpdateTick = lastUpdateTick;
+ }
+
+ /**
+ * @return The cumulative amount of time players have spent in this chunk in ticks.
+ */
+ public long getInhabitedTimeTicks() {
+ return inhabitedTimeTicks;
+ }
+
+ /**
+ * Sets the cumulative amount of time players have spent in this chunk in ticks.
+ * @param inhabitedTimeTicks The time in ticks.
+ */
+ public void setInhabitedTimeTicks(long inhabitedTimeTicks) {
+ checkRaw();
+ this.inhabitedTimeTicks = inhabitedTimeTicks;
+ }
+
+ /**
+ * @return A matrix of biome IDs for all block columns in this chunk.
+ */
+ public int[] getLegacyBiomes() {
+ return legacyBiomes;
+ }
+
+ /**
+ * Sets the biome IDs for this chunk.
+ * Note: 2D biomes have a resolution of 1x256x1 blocks.
+ * Note: 3D biomes have a resolution of 4x4x4 blocks.
+ * @param legacyBiomes The biome ID matrix of this chunk. Must have a length of {@code 1024} for 1.15+ or {@code 256}
+ * for prior versions.
+ * @throws IllegalArgumentException When the biome matrix is {@code null} or does not have a version appropriate length.
+ */
+ public void setLegacyBiomes(int[] legacyBiomes) {
+ checkRaw();
+ if (dataVersion >= JAVA_1_17_1.id())
+ throw new VersionLacksSupportException(dataVersion, null, JAVA_1_17_1, "2D/3D legacy biomes");
+ if (legacyBiomes != null) {
+ final int requiredSize = dataVersion >= JAVA_1_15_19W36A.id() ? 1024 : 256;
+ if (legacyBiomes.length != requiredSize) {
+ throw new IllegalArgumentException("biomes array must have a length of " + requiredSize);
+ }
+ }
+ this.legacyBiomes = legacyBiomes;
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public int getWorldMinBlockY() {
+ return getChunkY() * 16;
+ }
+
+ /**
+ * @return The height maps of this chunk.
+ */
+ public CompoundTag getHeightMaps() {
+ return heightMaps;
+ }
+
+ /**
+ * Sets the height maps of this chunk.
+ * @param heightMaps The height maps.
+ */
+ public void setHeightMaps(CompoundTag heightMaps) {
+ checkRaw();
+ this.heightMaps = heightMaps;
+ }
+
+ /**
+ * 256 (16x16) values. Values are shifted to read as block-y value. A value of {@link #getWorldMinBlockY()} - 1
+ * indicates no block present (void).
+ * @param name typically one of
+ *
+ * - MOTION_BLOCKING
+ * - MOTION_BLOCKING_NO_LEAVES
+ * - OCEAN_FLOOR
+ * - OCEAN_FLOOR_WG
+ * - WORLD_SURFACE
+ * - WORLD_SURFACE_WG
+ *
+ * @return {@link LongArrayTagPackedIntegers} configured to yield block Y values.
+ * @since {@link DataVersion#JAVA_1_13_18W06A}
+ */
+ public LongArrayTagPackedIntegers getHeightMap(String name) {
+ if (getHeightMaps() == null)
+ return null;
+ var hm = getHeightMaps().getLongArrayTag(name);
+ if (hm == null)
+ return null;
+ final int minY = getWorldMinBlockY() - 1;
+ final int maxY = getWorldMaxBlockY();
+ return LongArrayTagPackedIntegers.builder()
+ .dataVersion(dataVersion)
+ .minBitsPerValue(Math.max(9, LongArrayTagPackedIntegers.calculateBitsRequired(maxY - minY)))
+ .valueOffset(minY)
+ .length(256)
+ .build(hm);
+ }
+
+ public IntArrayTag getLegacyHeightMap() {
+ return legacyHeightMap;
+ }
+
+ public void setLegacyHeightMap(IntArrayTag legacyHeightMap) {
+ this.legacyHeightMap = legacyHeightMap;
+ }
+
+ /**
+ * Returns a copy of the palette value at the specified position in this chunk.
+ * Modifying the returned value can be done safely, it will have no effect on this chunk.
+ * To avoid the overhead of making a copy use {@link #getBiomeAtByRef(int, int, int)} instead.
+ *
+ * Never throws IndexOutOfBoundsException. XYZ are always wrapped into bounds.
+ * @return the element at the specified position in this chunk or NULL if Y is above/below build height.
+ * @since {@link DataVersion#JAVA_1_18_21W37A}
+ */
+ public StringTag getBiomeAt(int x, int y, int z) {
+ checkRaw();
+ if (dataVersion < JAVA_1_18_21W37A.id())
+ throw new VersionLacksSupportException(dataVersion, JAVA_1_18_21W37A, null, "3D palette biomes");
+ var section = getSection(y / 16);
+ if (section == null) return null;
+ return section.getBiomes().get((x & 0xF) / 4, (y & 0xF) / 4, (z & 0xF) / 4);
+ }
+
+ /**
+ * Returns the palette value at the specified position in this chunk.
+ * WARNING if the returned value is modified it modifies every value which references the same palette
+ * entry within the same chunk section!
+ *
+ * Never throws IndexOutOfBoundsException. XYZ are always wrapped into bounds.
+ * @return the element at the specified position in this chunk or NULL if Y is above/below build height.
+ * @since {@link DataVersion#JAVA_1_18_21W37A}
+ */
+ public StringTag getBiomeAtByRef(int x, int y, int z) {
+ checkRaw();
+ if (dataVersion < JAVA_1_18_21W37A.id())
+ throw new VersionLacksSupportException(dataVersion, JAVA_1_18_21W37A, null, "3D palette biomes");
+ var section = getSection(y / 16);
+ if (section == null) return null;
+ return section.getBiomes().getByRef((x & 0xF) / 4, (y & 0xF) / 4, (z & 0xF) / 4);
+ }
+
+ /**
+ * Replaces the element at the specified position in this chunk with
+ * the specified element.
+ *
+ * Never throws IndexOutOfBoundsException. XYZ are always wrapped into bounds.
+ * @return true if the section existed and the biome was set (true even if the value was unchanged)
+ * @since {@link DataVersion#JAVA_1_18_21W37A}
+ */
+ public boolean setBiomeAt(int x, int y, int z, StringTag tag) {
+ checkRaw();
+ if (dataVersion < JAVA_1_18_21W37A.id())
+ throw new VersionLacksSupportException(dataVersion, JAVA_1_18_21W37A, null, "3D palette biomes");
+ var section = getSection(y / 16);
+ if (section != null) {
+ section.getBiomes().set((x & 0xF) / 4, (y & 0xF) / 4, (z & 0xF) / 4, tag);
+ return true;
+ }
+ return false;
+ }
+
+ /**
+ * Returns a copy of the block palette value at the specified position in this chunk.
+ * Modifying the returned value can be done safely, it will have no effect on this chunk.
+ * To avoid the overhead of making a copy use {@link #getBlockAtByRef(int, int, int)} instead.
+ *
+ * Never throws IndexOutOfBoundsException. XYZ are always wrapped into bounds.
+ * @return the element at the specified position in this chunk or NULL if Y is above/below build height.
+ * @since {@link DataVersion#JAVA_1_13_17W47A}
+ * @see BlockStateTag
+ */
+ public CompoundTag getBlockAt(int x, int y, int z) {
+ checkRaw();
+ if (dataVersion < JAVA_1_13_17W47A.id())
+ throw new VersionLacksSupportException(dataVersion, JAVA_1_13_17W47A, null, "block palettes");
+ var section = getSection(y / 16);
+ if (section == null) return null;
+ var bs = section.getBlockStates();
+ return bs != null ? bs.get(x & 0xF, y & 0xF, z & 0xF) : null;
+ }
+
+ /**
+ * Returns the block palette value at the specified position in this chunk.
+ * WARNING if the returned value is modified it modifies every value which references the same palette
+ * entry within the same chunk section!
+ *
+ * Never throws IndexOutOfBoundsException. XYZ are always wrapped into bounds.
+ * @return the element at the specified position in this chunk or NULL if Y is above/below build height.
+ * @since {@link DataVersion#JAVA_1_13_17W47A}
+ * @see BlockStateTag
+ */
+ public CompoundTag getBlockAtByRef(int x, int y, int z) {
+ checkRaw();
+ if (dataVersion < JAVA_1_13_17W47A.id())
+ throw new VersionLacksSupportException(dataVersion, JAVA_1_13_17W47A, null, "block palettes");
+ var section = getSection(y / 16);
+ if (section == null) return null;
+ var bs = section.getBlockStates();
+ return bs != null ? bs.getByRef(x & 0xF, y & 0xF, z & 0xF) : null;
+ }
+
+ /** nullable */
+ public String getBlockNameAt(int x, int y, int z) {
+ CompoundTag blockTag = getBlockAtByRef(x, y, z);
+ return blockTag != null ? blockTag.getString("Name") : null;
+ }
+
+ /**
+ * Sets the block at the specified location to be defined by tag.
+ *
+ * Never throws IndexOutOfBoundsException. XYZ are always wrapped into bounds.
+ * @param tag block palette tag, must contain a 'Name' StringTag
+ * @return true if the section existed and the block was set (true even if the value was unchanged)
+ * @since {@link DataVersion#JAVA_1_13_17W47A}
+ * @see BlockStateTag
+ */
+ public boolean setBlockAt(int x, int y, int z, CompoundTag tag) {
+ checkRaw();
+ if (dataVersion < JAVA_1_13_17W47A.id())
+ throw new VersionLacksSupportException(dataVersion, JAVA_1_13_17W47A, null, "block palettes");
+ ArgValidator.check(tag.containsKey("Name", StringTag.class), "block palette tag must contain a 'Name' StringTag");
+ var section = getSection(y / 16);
+ if (section != null) {
+ section.getBlockStates().set(x & 0xF, y & 0xF, z & 0xF, tag);
+ return true;
+ }
+ return false;
+ }
+
+ /**
+ * @return The carving masks of this chunk.
+ */
+ public CompoundTag getCarvingMasks() {
+ return carvingMasks;
+ }
+
+ /**
+ * Sets the carving masks of this chunk.
+ * @param carvingMasks The carving masks.
+ */
+ public void setCarvingMasks(CompoundTag carvingMasks) {
+ checkRaw();
+ this.carvingMasks = carvingMasks;
+ }
+
+ /**
+ * @return The entities of this chunk. May be null.
+ */
+ public ListTag getEntities() {
+ return entities;
+ }
+
+ /**
+ * Sets the entities of this chunk.
+ * @param entities The entities.
+ */
+ public void setEntities(ListTag entities) {
+ checkRaw();
+ this.entities = entities;
+ }
+
+ /**
+ * @return The tile entities of this chunk.
+ */
+ public ListTag getTileEntities() {
+ return tileEntities;
+ }
+
+ /**
+ * Sets the tile entities of this chunk.
+ * @param tileEntities The tile entities of this chunk.
+ */
+ public void setTileEntities(ListTag tileEntities) {
+ checkRaw();
+ this.tileEntities = tileEntities;
+ }
+
+ /**
+ * @return The tile ticks of this chunk.
+ */
+ public ListTag getTileTicks() {
+ return tileTicks;
+ }
+
+ /**
+ * Sets the tile ticks of this chunk.
+ * @param tileTicks Thee tile ticks.
+ */
+ public void setTileTicks(ListTag tileTicks) {
+ checkRaw();
+ this.tileTicks = tileTicks;
+ }
+
+ /**
+ * @return The liquid ticks of this chunk.
+ */
+ public ListTag getLiquidTicks() {
+ return liquidTicks;
+ }
+
+ /**
+ * Sets the liquid ticks of this chunk.
+ * @param liquidTicks The liquid ticks.
+ */
+ public void setLiquidTicks(ListTag liquidTicks) {
+ checkRaw();
+ this.liquidTicks = liquidTicks;
+ }
+
+ /**
+ * @return The light sources in this chunk.
+ */
+ public ListTag> getLights() {
+ return lights;
+ }
+
+ /**
+ * Sets the light sources in this chunk.
+ * @param lights The light sources.
+ */
+ public void setLights(ListTag> lights) {
+ checkRaw();
+ this.lights = lights;
+ }
+
+ /**
+ * @return The liquids to be ticked in this chunk.
+ */
+ public ListTag> getLiquidsToBeTicked() {
+ return liquidsToBeTicked;
+ }
+
+ /**
+ * Sets the liquids to be ticked in this chunk.
+ * @param liquidsToBeTicked The liquids to be ticked.
+ */
+ public void setLiquidsToBeTicked(ListTag> liquidsToBeTicked) {
+ checkRaw();
+ this.liquidsToBeTicked = liquidsToBeTicked;
+ }
+
+ /**
+ * @return Stuff to be ticked in this chunk.
+ */
+ public ListTag> getToBeTicked() {
+ return toBeTicked;
+ }
+
+ /**
+ * Sets stuff to be ticked in this chunk.
+ * @param toBeTicked The stuff to be ticked.
+ */
+ public void setToBeTicked(ListTag> toBeTicked) {
+ checkRaw();
+ this.toBeTicked = toBeTicked;
+ }
+
+ /**
+ * @return Things that are in post processing in this chunk.
+ */
+ public ListTag> getPostProcessing() {
+ return postProcessing;
+ }
+
+ /**
+ * Sets things to be post processed in this chunk.
+ * @param postProcessing The things to be post processed.
+ */
+ public void setPostProcessing(ListTag> postProcessing) {
+ checkRaw();
+ this.postProcessing = postProcessing;
+ }
+
+ /**
+ * @return Data about structures in this chunk.
+ */
+ public CompoundTag getStructures() {
+ return structures;
+ }
+
+ /**
+ * Sets data about structures in this chunk.
+ * @param structures The data about structures.
+ */
+ public void setStructures(CompoundTag structures) {
+ checkRaw();
+ this.structures = structures;
+ }
+
+ /**
+ * Gets the world-bottom section y in the chunk.
+ */
+ public int getChunkY() {
+ if (yPos != NO_CHUNK_COORD_SENTINEL) return yPos;
+ return DEFAULT_WORLD_BOTTOM_Y_POS.get(dataVersion);
+ }
+
+ /**
+ * @since {@link DataVersion#JAVA_1_18_21W43A}
+ */
+ public CompoundTag getBelowZeroRetrogen() {
+ return belowZeroRetrogen;
+ }
+
+ /**
+ * @since {@link DataVersion#JAVA_1_18_21W43A}
+ */
+ public CompoundTag getBlendingData() {
+ return blendingData;
+ }
+
+
+ /** {@inheritDoc} */
+ @Override
+ public boolean moveChunkImplemented() {
+ return raw || ((this.chunkX != NO_CHUNK_COORD_SENTINEL && this.chunkZ != NO_CHUNK_COORD_SENTINEL) &&
+ (data != null || (getStructures() != null && getTileEntities() != null && getTileTicks() != null && getLiquidTicks() != null)));
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ public boolean moveChunkHasFullVersionSupport() {
+ // TODO: Only strongly validated at 1.20.4 - but I believe all versions are supported but should validate
+ return true;
+ }
+
+ // For RAW support
+ private ListTag tagOrFetch(ListTag tag, VersionAware path) {
+ if (tag != null) return tag;
+ return getTag(path);
+ }
+ private CompoundTag tagOrFetch(CompoundTag tag, VersionAware path) {
+ if (tag != null) return tag;
+ return getTag(path);
+ }
+
+ /** {@inheritDoc} */
+ @SuppressWarnings("unchecked")
+ @Override
+ public boolean moveChunk(int newChunkX, int newChunkZ, long moveChunkFlags, boolean force) {
+ if (!moveChunkImplemented())
+ throw new UnsupportedOperationException("Missing the data required to move this chunk!");
+ if (!RegionBoundingRectangle.MAX_WORLD_BORDER_BOUNDS.containsChunk(chunkX, chunkZ)) {
+ throw new IllegalArgumentException("Chunk XZ must be within the maximum world bounds.");
+ }
+ if (this.chunkX == newChunkX && this.chunkZ == newChunkZ) return false;
+
+ IntPointXZ chunkDeltaXZ;
+ if (raw) {
+ // read the old data values so we can compute deltaXZ
+ this.chunkX = getTagValue(X_POS_PATH, IntTag::asInt);
+ this.chunkZ = getTagValue(Z_POS_PATH, IntTag::asInt);
+ setTag(X_POS_PATH, new IntTag(newChunkX));
+ setTag(Z_POS_PATH, new IntTag(newChunkZ));
+ }
+ chunkDeltaXZ = new IntPointXZ(newChunkX - this.chunkX, newChunkZ - this.chunkZ);
+ this.chunkX = newChunkX;
+ this.chunkZ = newChunkZ;
+
+ boolean changed = false;
+ ChunkBoundingRectangle cbr = new ChunkBoundingRectangle(newChunkX, newChunkZ);
+ changed |= fixTileLocations(moveChunkFlags, cbr, tagOrFetch(getTileEntities(), TILE_ENTITIES_PATH));
+ changed |= fixTileLocations(moveChunkFlags, cbr, tagOrFetch(getTileTicks(), TILE_TICKS_PATH));
+ changed |= fixTileLocations(moveChunkFlags, cbr, tagOrFetch(getLiquidTicks(), LIQUID_TICKS_PATH));
+ changed |= moveStructures(moveChunkFlags, tagOrFetch(getStructures(), STRUCTURES_PATH), chunkDeltaXZ);
+
+ CompoundTag upgradeTag = tagOrFetch(getUpgradeData(), UPGRADE_DATA_PATH);
+ if (upgradeTag != null && !upgradeTag.isEmpty()) {
+ if ((moveChunkFlags & DISCARD_UPGRADE_DATA) > 0) {
+ upgradeTag.clear();
+ changed = true;
+ } else {
+ for (NamedTag entry : upgradeTag) {
+ if (entry.getTag() instanceof ListTag && ((ListTag>) entry.getTag()).getTypeClass().equals(CompoundTag.class)) {
+ changed |= fixTileLocations(moveChunkFlags, cbr, (ListTag) entry.getTag());
+ }
+ }
+ }
+ }
+ changed |= fixEntitiesLocations(moveChunkFlags, cbr, tagOrFetch(getEntities(), ENTITIES_PATH));
+
+ if (changed) {
+ if ((moveChunkFlags & MoveChunkFlags.AUTOMATICALLY_UPDATE_HANDLE) > 0) {
+ updateHandle();
+ }
+ return true;
+ }
+ return false;
+
+ }
+
+ protected boolean fixEntitiesLocations(long moveChunkFlags, ChunkBoundingRectangle cbr, ListTag entitiesTagList) {
+ return EntitiesChunkBase.fixEntityLocations(dataVersion, moveChunkFlags, entitiesTagList, cbr);
+ }
+
+ protected boolean fixTileLocations(long moveChunkFlags, ChunkBoundingRectangle cbr, ListTag tagList) {
+ boolean changed = false;
+ if (tagList == null) {
+ return false;
+ }
+ for (CompoundTag tag : tagList) {
+ int x = tag.getInt("x");
+ int z = tag.getInt("z");
+ if (!cbr.containsBlock(x, z)) {
+ changed = true;
+ tag.putInt("x", cbr.relocateX(x));
+ tag.putInt("z", cbr.relocateZ(z));
+ }
+ }
+ return changed;
+ }
+
+ private static final long REMOVE_SENTINEL = 0x8FFFFFFF_8FFFFFFFL;
+ protected boolean moveStructures(long moveChunkFlags, CompoundTag structuresTag, IntPointXZ chunkDeltaXZ) {
+ final CompoundTag references = STRUCTURES_REFERENCES_PATH.get(dataVersion).get(structuresTag);
+ final CompoundTag starts = STRUCTURES_STARTS_PATH.get(dataVersion).get(structuresTag);
+ boolean changed = false;
+
+ // Discard structures if directed to do so.
+ if ((moveChunkFlags & DISCARD_STRUCTURE_DATA) > 0) {
+ if (references != null && !references.isEmpty()) {
+ references.clear();
+ changed = true;
+ }
+ if (starts != null && !starts.isEmpty()) {
+ starts.clear();
+ changed = true;
+ }
+ return changed;
+ }
+
+ // Establish regional bounds iff we are to discard out of region zones
+ final ChunkBoundingRectangle clippingRect;
+ if ((moveChunkFlags & DISCARD_STRUCTURE_REFERENCES_OUTSIDE_REGION) > 0) {
+ clippingRect = RegionBoundingRectangle.forChunk(chunkX, chunkZ);
+ } else {
+ clippingRect = null;
+ }
+
+ // Fix structure reference locations (XZ packed into a long)
+ if (references != null && !references.isEmpty()) {
+ for (Tag> tag : references.values()) {
+ boolean haveRemovals = false;
+ long[] longs = ((LongArrayTag) tag).getValue();
+ for (int i = 0; i < longs.length; i++) {
+ IntPointXZ newXZ = IntPointXZ.unpack(longs[i]).add(chunkDeltaXZ);
+ if (clippingRect != null && !clippingRect.containsChunk(newXZ)) {
+ longs[i] = REMOVE_SENTINEL;
+ haveRemovals = true;
+ } else {
+ longs[i] = IntPointXZ.pack(newXZ);
+ }
+ }
+ if (haveRemovals) {
+ ((LongArrayTag) tag).setValue(
+ Arrays.stream(longs)
+ .filter(l -> l != REMOVE_SENTINEL)
+ .toArray()
+ );
+ }
+ }
+ changed = true;
+ }
+
+ // Iterate and fix structure 'starts' - starts define the area a structure does, or will, occupy
+ // and defines what exists in each, what I'll call, zone of that structure.
+ if (starts != null && !starts.isEmpty()) {
+ IntPointXZ blockDeltaXZ = chunkDeltaXZ.transformChunkToBlock();
+ for (NamedTag startsEntry : starts) {
+ moveStructureStart((CompoundTag) startsEntry.getTag(), chunkDeltaXZ, blockDeltaXZ, clippingRect);
+ }
+ }
+ return changed;
+ }
+
+ /**
+ * NOTE: The given boundsTag tag will be emptied (have a new length of zero) if the move results in an
+ * out-of-bounds BB (rbr must be non-null for this to happen). In such a case true is always returned.
+ * So, if true is returned the caller MUST check the length of the boundsTag and take appropriate action!
+ * @return true if bounds changed
+ */
+ protected static boolean moveBoundingBox(IntArrayTag boundsTag, IntPointXZ blockDeltaXZ, ChunkBoundingRectangle clippingRect) {
+ boolean changed = false;
+ if (boundsTag != null) {
+ int[] bounds = boundsTag.getValue();
+ if (!blockDeltaXZ.isZero()) {
+ bounds[0] = bounds[0] + blockDeltaXZ.getX();
+ bounds[2] = bounds[2] + blockDeltaXZ.getZ();
+ bounds[3] = bounds[3] + blockDeltaXZ.getX();
+ bounds[5] = bounds[5] + blockDeltaXZ.getZ();
+ changed = true;
+ }
+ if (clippingRect != null && !clippingRect.constrain(bounds)) {
+ boundsTag.setValue(new int[0]);
+ return true;
+ }
+ }
+ return changed;
+ }
+
+ /**
+ * Moves a single structure start record.
+ * @see wiki Chunk_format
+ */
+ @SuppressWarnings("unchecked")
+ protected boolean moveStructureStart(CompoundTag startsTag, IntPointXZ chunkDeltaXZ, IntPointXZ blockDeltaXZ, ChunkBoundingRectangle clippingRect) {
+ if ("INVALID".equals(startsTag.getString("id"))) return false;
+ boolean changed = false;
+
+ // If the overall bounding box is invalid then discard and invalidate the entire structure.
+ // I don't see how this scenario is possible in practice for well formatted chunks.
+ // FYI the BB tag doesn't exist for all structures at this level.
+ IntArrayTag startsBbTag = startsTag.getIntArrayTag("BB");
+ if (moveBoundingBox(startsBbTag, blockDeltaXZ, clippingRect)) {
+ if (startsBbTag.length() == 0) {
+ startsTag.clear();
+ startsTag.putString("id", "INVALID");
+ return true;
+ }
+ changed = true;
+ }
+
+ if (startsTag.containsKey("ChunkX") && chunkDeltaXZ.getX() != 0) {
+ startsTag.putInt("ChunkX", chunkDeltaXZ.getX() + startsTag.getInt("ChunkX"));
+ changed = true;
+ }
+ if (startsTag.containsKey("ChunkZ") && chunkDeltaXZ.getZ() != 0) {
+ startsTag.putInt("ChunkZ", chunkDeltaXZ.getZ() + startsTag.getInt("ChunkZ"));
+ changed = true;
+ }
+
+ // List of chunks that have had their piece of the structure created.
+ // Unsure when this tag shows up - maybe during generation - maybe only for specific structures.
+ if (startsTag.containsKey("Processed")) {
+ ListTag processedListTag = startsTag.getCompoundList("Processed");
+ Iterator processedIter = processedListTag.iterator();
+ while (processedIter.hasNext()) {
+ CompoundTag processedTag = processedIter.next();
+
+ if (processedTag.containsKey("X") && blockDeltaXZ.getX() != 0) {
+ processedTag.putInt("X", blockDeltaXZ.getX() + processedTag.getInt("X"));
+ changed = true;
+ }
+ if (processedTag.containsKey("Z") && blockDeltaXZ.getZ() != 0) {
+ processedTag.putInt("Z", blockDeltaXZ.getZ() + processedTag.getInt("Z"));
+ changed = true;
+ }
+ if (clippingRect != null && !clippingRect.containsBlock(processedTag.getInt("X"), processedTag.getInt("Z"))) {
+ processedIter.remove();
+ changed = true;
+ }
+ }
+ }
+
+ if (startsTag.containsKey("Children")) {
+ ListTag childrenListTag = startsTag.getCompoundList("Children");
+ Iterator childIter = childrenListTag.iterator();
+ while (childIter.hasNext()) {
+ CompoundTag childTag = childIter.next();
+ // bounding box of structure part - note some block geometry may overhang these bounds (like roofs)
+ IntArrayTag childBbTag = childTag.getIntArrayTag("BB");
+ if (moveBoundingBox(childBbTag, blockDeltaXZ, clippingRect)) {
+ changed = true;
+ if (childBbTag.length() == 0) {
+ childIter.remove();
+ continue;
+ }
+ }
+
+ // List of entrances/exits from the room. Probably for structure generation to know
+ // where additional structure parts can be placed to continue to grow the structure.
+ if (childTag.containsKey("Entrances")) {
+ ListTag entrancesListTag = childTag.getListTagAutoCast("Entrances");
+ Iterator entranceIter = entrancesListTag.iterator();
+ while (entranceIter.hasNext()) {
+ IntArrayTag entranceBbTag = entranceIter.next();
+ if (moveBoundingBox(entranceBbTag, blockDeltaXZ, clippingRect)) {
+ changed = true;
+ if (entranceBbTag.length() == 0) {
+ entranceIter.remove();
+ }
+ }
+ }
+ }
+
+ // coordinate origin of structure part
+ if (childTag.containsKey("PosX") && blockDeltaXZ.getX() != 0) {
+ childTag.putInt("PosX", blockDeltaXZ.getX() + childTag.getInt("PosX"));
+ changed = true;
+ }
+ if (childTag.containsKey("PosZ") && blockDeltaXZ.getZ() != 0) {
+ childTag.putInt("PosZ", blockDeltaXZ.getZ() + childTag.getInt("PosZ"));
+ changed = true;
+ }
+
+ // coordinate origin of ocean ruin or shipwreck
+ if (childTag.containsKey("TPX") && blockDeltaXZ.getX() != 0) {
+ childTag.putInt("TPX", blockDeltaXZ.getX() + childTag.getInt("TPX"));
+ changed = true;
+ }
+ if (childTag.containsKey("TPZ") && blockDeltaXZ.getZ() != 0) {
+ childTag.putInt("TPZ", blockDeltaXZ.getZ() + childTag.getInt("TPZ"));
+ changed = true;
+ }
+
+ // Anything using jigsaw blocks has a 'junctions' record
+ if (childTag.containsKey("junctions")) {
+ ListTag junctionsListTag = childTag.getCompoundList("junctions");
+ Iterator junctionIter = junctionsListTag.iterator();
+ while (junctionIter.hasNext()) {
+ CompoundTag junctionTag = junctionIter.next();
+ if (blockDeltaXZ.getX() != 0) {
+ junctionTag.putInt("source_x", blockDeltaXZ.getX() + junctionTag.getInt("source_x"));
+ changed = true;
+ }
+ if (blockDeltaXZ.getZ() != 0) {
+ junctionTag.putInt("source_z", blockDeltaXZ.getZ() + junctionTag.getInt("source_z"));
+ changed = true;
+ }
+ if (clippingRect != null && !clippingRect.containsBlock(junctionTag.getInt("source_x"), junctionTag.getInt("source_z"))) {
+ junctionIter.remove();
+ changed = true;
+ }
+ }
+ // TODO: unsure how to behave if the junctions list is emptied - maybe remove the child?
+ // Scenario to look for: village/pillager outpost that abuts a region bound and has a junction
+ // crossing that bound where on one side there's a leaf node BB that hangs over the bound
+ // and only has junctions into one of the two regions.
+ }
+ }
+ // TODO: unsure if this logic is needed, or if there are structures which have no Children, so leaving it out for now
+// if (childrenListTag.isEmpty() && !startsTag.containsKey("BB")) {
+// startsTag.clear();
+// startsTag.putString("id", "INVALID");
+// return true;
+// }
+ }
+
+ return changed;
+ }
+
+ /**
+ * {@inheritDoc}
+ */
+ @Override
+ public CompoundTag updateHandle() {
+ if (raw) {
+ return data;
+ }
+ this.data = super.updateHandle();
+ setTag(LAST_UPDATE_TICK_PATH, new LongTag(lastUpdateTick));
+ setTag(INHABITED_TIME_TICKS_PATH, new LongTag(inhabitedTimeTicks));
+ if (legacyBiomes != null && dataVersion < JAVA_1_18_21W37A.id()) {
+ final int requiredSize = dataVersion <= 0 || dataVersion >= JAVA_1_15_19W36A.id() ? 1024 : 256;
+ if (legacyBiomes.length != requiredSize)
+ throw new IllegalStateException(
+ String.format("Biomes array must be %d bytes for version %d, array size is %d",
+ requiredSize, dataVersion, legacyBiomes.length));
+
+ if (dataVersion >= DataVersion.JAVA_1_13_18W06A.id()) {
+ setTag(LEGACY_BIOMES_PATH, new IntArrayTag(legacyBiomes));
+ } else {
+ byte[] byteBiomes = new byte[legacyBiomes.length];
+ for (int i = 0; i < legacyBiomes.length; i++) {
+ byteBiomes[i] = (byte) legacyBiomes[i];
+ }
+ setTag(LEGACY_BIOMES_PATH, new ByteArrayTag(byteBiomes));
+ }
+ }
+ setTagIfNotNull(LEGACY_HEIGHT_MAP_PATH, legacyHeightMap);
+ setTagIfNotNull(HEIGHT_MAPS_PATH, heightMaps);
+ setTagIfNotNull(CARVING_MASKS_PATH, carvingMasks);
+ setTagIfNotNull(ENTITIES_PATH, entities);
+ setTagIfNotNull(TILE_ENTITIES_PATH, tileEntities);
+ setTagIfNotNull(TILE_TICKS_PATH, tileTicks);
+ setTagIfNotNull(LIQUID_TICKS_PATH, liquidTicks);
+ setTagIfNotNull(LIGHTS_PATH, lights);
+ setTagIfNotNull(LIQUIDS_TO_BE_TICKED_PATH, liquidsToBeTicked);
+ setTagIfNotNull(TO_BE_TICKED_PATH, toBeTicked);
+ setTagIfNotNull(POST_PROCESSING_PATH, postProcessing);
+ if (status != null) setTag(STATUS_PATH, new StringTag(status));
+ if (isLightOn != null) setTag(IS_LIGHT_ON_PATH, new ByteTag(isLightOn));
+ if (isTerrainPopulated != null) setTag(TERRAIN_POPULATED_PATH, new ByteTag(isTerrainPopulated));
+ setTagIfNotNull(STRUCTURES_PATH, structures);
+ if (hasLegacyStructureData != null) setTag(HAS_LEGACY_STRUCTURE_DATA_PATH, new ByteTag(hasLegacyStructureData));
+
+ // TODO: This logic does not respect original load flags! However, this is a long standing bug so
+ // simply "fixing" it may break consumers... I no longer care about existing consumers and
+ // need to figure out what that "fix" I was referring to was -.-
+ ListTag sections = new ListTag<>(CompoundTag.class);
+ for (T section : this) {
+ if (section != null) {
+ sections.add(section.updateHandle()); // contract of iterator assures correctness of "height" aka section-y
+ }
+ }
+ setTag(SECTIONS_PATH, sections);
+
+ setTag(X_POS_PATH, new IntTag(getChunkX()));
+ setTag(Z_POS_PATH, new IntTag(getChunkZ()));
+ if (dataVersion >= JAVA_1_18_21W43A.id()) {
+ setTag(Y_POS_PATH, new IntTag(getChunkY()));
+ setTagIfNotNull(BELOW_ZERO_RETROGEN_PATH, belowZeroRetrogen);
+ setTagIfNotNull(BLENDING_DATA_PATH, blendingData);
+ }
+ return data;
+ }
+
+ @Override
+ public CompoundTag updateHandle(int xPos, int zPos) {
+ if (raw) {
+ return data;
+ }
+ // TODO: moveChunk or die if given xPos != chunkX and same for z?
+ updateHandle();
+ if (xPos != NO_CHUNK_COORD_SENTINEL)
+ setTag(X_POS_PATH, new IntTag(chunkX = xPos));
+ // Y_POS_PATH is set in updateHandle() - was added in 1.18
+ if (zPos != NO_CHUNK_COORD_SENTINEL)
+ setTag(Z_POS_PATH, new IntTag(chunkZ = zPos));
+ return data;
+ }
+}
diff --git a/src/main/java/io/github/ensgijs/nbt/mca/TerrainMCAFileBase.java b/src/main/java/io/github/ensgijs/nbt/mca/TerrainMCAFileBase.java
new file mode 100644
index 00000000..695b9622
--- /dev/null
+++ b/src/main/java/io/github/ensgijs/nbt/mca/TerrainMCAFileBase.java
@@ -0,0 +1,135 @@
+//package net.rossquerz.mca;
+//
+//import CompoundTag;
+//
+///**
+// * Represents a TERRAIN data mca file (those that live in the /region folder).
+// * TODO: Unused - was my intention to fraction out McaRegionFile terrain stuff to here and make McaRegionFile a version abstraction layer?And why si this called "base"?
+// */
+//public class TerrainMCAFileBase extends McaFileBase implements Iterable {
+// /**
+// * The default chunk data version used when no custom version is supplied.
+// * Deprecated: use {@code DataVersion.latest().id()} instead.
+// */
+// @Deprecated
+// public static final int DEFAULT_DATA_VERSION = DataVersion.latest().id();
+//
+// /**
+// * {@inheritDoc}
+// */
+// public TerrainMCAFileBase(int regionX, int regionZ) {
+// super(regionX, regionZ);
+// }
+//
+// /**
+// * {@inheritDoc}
+// */
+// public TerrainMCAFileBase(int regionX, int regionZ, int defaultDataVersion) {
+// super(regionX, regionZ, defaultDataVersion);
+// }
+//
+// /**
+// * {@inheritDoc}
+// */
+// public TerrainMCAFileBase(int regionX, int regionZ, DataVersion defaultDataVersion) {
+// super(regionX, regionZ, defaultDataVersion);
+// }
+//
+// /**
+// * {@inheritDoc}
+// */
+// @Override
+// public Class chunkClass() {
+// return TerrainChunk.class;
+// }
+//
+// /**
+// * {@inheritDoc}
+// */
+// @Override
+// public TerrainChunk createChunk() {
+// return TerrainChunk.newChunk(defaultDataVersion);
+// }
+//
+// /**
+// * @deprecated Use {@link #setBiomeAt(int, int, int, int)} instead
+// */
+// @Deprecated
+// public void setBiomeAt(int blockX, int blockZ, int biomeID) {
+// createChunkIfMissing(blockX, blockZ).setBiomeAt(blockX, blockZ, biomeID);
+// }
+//
+// public void setBiomeAt(int blockX, int blockY, int blockZ, int biomeID) {
+// createChunkIfMissing(blockX, blockZ).setBiomeAt(blockX, blockY, blockZ, biomeID);
+// }
+//
+// /**
+// * @deprecated Use {@link #getBiomeAt(int, int, int)} instead
+// */
+// @Deprecated
+// public int getBiomeAt(int blockX, int blockZ) {
+// int chunkX = McaFileHelpers.blockToChunk(blockX), chunkZ = McaFileHelpers.blockToChunk(blockZ);
+// TerrainChunk chunk = getChunk(getChunkIndex(chunkX, chunkZ));
+// if (chunk == null) {
+// return -1;
+// }
+// return chunk.getBiomeAt(blockX, blockZ);
+// }
+//
+// /**
+// * Fetches the biome id at a specific block.
+// * @param blockX The x-coordinate of the block.
+// * @param blockY The y-coordinate of the block.
+// * @param blockZ The z-coordinate of the block.
+// * @return The biome id if the chunk exists and the chunk has biomes, otherwise -1.
+// */
+// public int getBiomeAt(int blockX, int blockY, int blockZ) {
+// int chunkX = McaFileHelpers.blockToChunk(blockX), chunkZ = McaFileHelpers.blockToChunk(blockZ);
+// TerrainChunk chunk = getChunk(getChunkIndex(chunkX, chunkZ));
+// if (chunk == null) {
+// return -1;
+// }
+// return chunk.getBiomeAt(blockX,blockY, blockZ);
+// }
+//
+//// /**
+//// * Set a block state at a specific block location.
+//// * The block coordinates can be absolute coordinates or they can be relative to the region.
+//// * @param blockX The x-coordinate of the block.
+//// * @param blockY The y-coordinate of the block.
+//// * @param blockZ The z-coordinate of the block.
+//// * @param state The block state to be set.
+//// * @param cleanup Whether the Palette and the BLockStates should be recalculated after adding the block state.
+//// */
+//// public void setBlockStateAt(int blockX, int blockY, int blockZ, CompoundTag state, boolean cleanup) {
+//// createChunkIfMissing(blockX, blockZ).setBlockStateAt(blockX, blockY, blockZ, state, cleanup);
+//// }
+////
+//// /**
+//// * Fetches a block state at a specific block location.
+//// * The block coordinates can be absolute coordinates or they can be relative to the region.
+//// * @param blockX The x-coordinate of the block.
+//// * @param blockY The y-coordinate of the block.
+//// * @param blockZ The z-coordinate of the block.
+//// * @return The block state or null if the chunk or the section do not exist.
+//// */
+//// public CompoundTag getBlockStateAt(int blockX, int blockY, int blockZ) {
+//// int chunkX = McaFileHelpers.blockToChunk(blockX), chunkZ = McaFileHelpers.blockToChunk(blockZ);
+//// TerrainChunk chunk = getChunk(chunkX, chunkZ);
+//// if (chunk == null) {
+//// return null;
+//// }
+//// return chunk.getBlockStateAt(blockX, blockY, blockZ);
+//// }
+//
+//// /**
+//// * Recalculates the Palette and the BlockStates of all chunks and sections of this region.
+//// */
+//// public void cleanupPalettesAndBlockStates() {
+//// for (TerrainChunk chunk : chunks) {
+//// if (chunk != null) {
+//// chunk.cleanupPalettesAndBlockStates();
+//// }
+//// }
+//// }
+//}
diff --git a/src/main/java/io/github/ensgijs/nbt/mca/TerrainSection.java b/src/main/java/io/github/ensgijs/nbt/mca/TerrainSection.java
new file mode 100644
index 00000000..77a9a0c9
--- /dev/null
+++ b/src/main/java/io/github/ensgijs/nbt/mca/TerrainSection.java
@@ -0,0 +1,65 @@
+package io.github.ensgijs.nbt.mca;
+
+import io.github.ensgijs.nbt.tag.CompoundTag;
+
+/**
+ * Represents a Terrain data chunk section. See notes on {@link TerrainChunk} for possible
+ * future repurposing ideas.
+ */
+public class TerrainSection extends TerrainSectionBase {
+
+ public TerrainSection(CompoundTag sectionRoot, int dataVersion) {
+ super(sectionRoot, dataVersion);
+ }
+
+ public TerrainSection(CompoundTag sectionRoot, int dataVersion, long loadFlags) {
+ super(sectionRoot, dataVersion, loadFlags);
+ }
+
+ public TerrainSection(int dataVersion) {
+ super(dataVersion);
+ }
+
+ /**
+ * @return An empty Section initialized using the latest full release data version.
+ * @deprecated Dangerous - prefer using {@link TerrainChunk#createSection(int)} or using the
+ * {@link #TerrainSection(int)} constructor instead.
+ */
+ @Deprecated
+ public static TerrainSection newSection() {
+ return new TerrainSection(DataVersion.latest().id());
+ }
+
+ /**
+ * This method should only be used for building sections prior to adding to a chunk where you want to use this
+ * section height property for the convenience of not having to track the value separately.
+ *
+ * @deprecated To set section height (aka section-y) use
+ * {@code chunk.putSection(int, SectionBase, boolean)} instead of this function. Setting the section height
+ * by calling this function WILL NOT have any affect upon the sections height in the Chunk or or MCA data when
+ * serialized.
+ */
+ @Deprecated
+ public void setHeight(int height) {
+ syncHeight(height);
+ }
+
+ /**
+ * Updates the raw CompoundTag that this Section is based on.
+ *
+ * @param y The Y-value of this Section to include in the returned tag.
+ * DOES NOT update this sections height value permanently.
+ * @return A reference to the raw CompoundTag this Section is based on
+ * @deprecated The holding chunk is the authority on this sections y / height and takes care of all updates to it.
+ */
+ @Deprecated
+ public CompoundTag updateHandle(int y) {
+ final int oldY = sectionY;
+ try {
+ sectionY = y;
+ return updateHandle();
+ } finally {
+ sectionY = oldY;
+ }
+ }
+}
diff --git a/src/main/java/io/github/ensgijs/nbt/mca/TerrainSectionBase.java b/src/main/java/io/github/ensgijs/nbt/mca/TerrainSectionBase.java
new file mode 100644
index 00000000..e7c2999d
--- /dev/null
+++ b/src/main/java/io/github/ensgijs/nbt/mca/TerrainSectionBase.java
@@ -0,0 +1,288 @@
+package io.github.ensgijs.nbt.mca;
+
+import io.github.ensgijs.nbt.io.TextNbtParser;
+import io.github.ensgijs.nbt.tag.*;
+import io.github.ensgijs.nbt.mca.util.PalettizedCuboid;
+
+import static io.github.ensgijs.nbt.mca.DataVersion.*;
+import static io.github.ensgijs.nbt.mca.io.LoadFlags.*;
+
+/**
+ * Provides the base for all terrain section classes.
+ */
+public abstract class TerrainSectionBase extends SectionBase {
+ protected static final CompoundTag AIR_PALETTE_TAG = TextNbtParser.parseInline("{Name: \"minecraft:air\"}");
+ /** Use with care! Be sure to clone this value when used or really bad bugs are going to happen. */
+ protected static final CompoundTag DEFAULT_BLOCK_SATES_TAG = new PalettizedCuboid<>(16, AIR_PALETTE_TAG).toCompoundTag();
+ /** Use with care! Be sure to clone this value when used or really bad bugs are going to happen. */
+ protected static final CompoundTag DEFAULT_BIOMES_TAG = new PalettizedCuboid<>(4, new StringTag("minecraft:plains")).toCompoundTag();
+
+ /** Only populated for MC version < 1.13 - 4096 (16^3) block id's */
+ protected byte[] legacyBlockIds;
+ /** Only populated for MC version < 1.13 - 4096 (16^3) block data values */
+ protected byte[] legacyBlockDataValues;
+
+ /**
+ * Only populated for MC version >= JAVA_1_13_17W47A; note bit packing changed in JAVA_1_16_20W17A
+ * @see PalettizedCuboid
+ * @since {@link DataVersion#JAVA_1_13_17W47A}
+ */
+ protected PalettizedCuboid blockStates;
+ /**
+ * Only populated for MC version >= JAVA_1_18_21W37A (~ 1.18 pre1)
+ * @see PalettizedCuboid
+ * @since {@link DataVersion#JAVA_1_18_21W37A}
+ */
+ protected PalettizedCuboid biomes;
+
+ protected byte[] blockLight;
+ protected byte[] skyLight;
+
+ public static byte[] createBlockLightBuffer() {
+ return new byte[2048];
+ }
+
+ public static byte[] createSkyLightBuffer() {
+ return new byte[2048];
+ }
+
+ public TerrainSectionBase(CompoundTag sectionRoot, int dataVersion) {
+ this(sectionRoot, dataVersion, LOAD_ALL_DATA);
+ }
+
+ public TerrainSectionBase(CompoundTag sectionRoot, int dataVersion, long loadFlags) {
+ super(sectionRoot, dataVersion, loadFlags);
+ }
+
+ protected void initReferences(final long loadFlags) {
+ sectionY = data.getNumber("Y").byteValue();
+ if ((loadFlags & BIOMES) != 0) {
+ // Prior to JAVA_1_18_21W37A biomes were stored at the chunk level in a ByteArrayTag and used fixed ID's
+ // Currently they are stored in a palette object at the section level
+ if (dataVersion >= JAVA_1_18_21W37A.id()) {
+ biomes = PalettizedCuboid.fromCompoundTag(data.getCompoundTag("biomes"), 4, dataVersion);
+ }
+ }
+ if ((loadFlags & BLOCK_LIGHTS) != 0) {
+ ByteArrayTag blockLight = data.getByteArrayTag("BlockLight");
+ if (blockLight != null) this.blockLight = blockLight.getValue();
+ }
+ if ((loadFlags & BLOCK_STATES) != 0) {
+ // Block palettes were added in 1.13 - prior to this the "Blocks" ByteArrayTag was used with fixed id's
+ // In JAVA_1_16_20W17A palette data bit packing scheme changed
+ // In JAVA_1_18_21W37A the section tag structure changed significantly and 'BlockStates' and 'Palette' were moved inside 'block_states' and renamed.
+ if (dataVersion < JAVA_1_13_17W47A.id()) {
+ ByteArrayTag legacyBlockIds = data.getByteArrayTag("Blocks");
+ if (legacyBlockIds != null) this.legacyBlockIds = legacyBlockIds.getValue();
+ ByteArrayTag legacyBlockDataValues = data.getByteArrayTag("Data");
+ if (legacyBlockDataValues != null) this.legacyBlockDataValues = legacyBlockDataValues.getValue();
+ } else if (dataVersion <= JAVA_1_18_21W37A.id()) {
+ if (data.containsKey("Palette")) {
+ ListTag palette = data.getListTag("Palette").asCompoundTagList();
+ LongArrayTag blockStatesTag = data.getLongArrayTag("BlockStates"); // may be null
+ // up-convert to the modern block_states structure to simplify handling
+ CompoundTag paletteContainerTag = new CompoundTag(2);
+ paletteContainerTag.put("palette", palette);
+ if (blockStatesTag != null && blockStatesTag.length() > 0)
+ paletteContainerTag.put("data", blockStatesTag);
+ this.blockStates = PalettizedCuboid.fromCompoundTag(paletteContainerTag, 16, dataVersion);
+ }
+ } else {
+ blockStates = PalettizedCuboid.fromCompoundTag(data.getCompoundTag("block_states"), 16, dataVersion);
+ }
+ }
+ if ((loadFlags & SKY_LIGHT) != 0) {
+ ByteArrayTag skyLight = data.getByteArrayTag("SkyLight");
+ if (skyLight != null) this.skyLight = skyLight.getValue();
+ }
+ }
+
+ public TerrainSectionBase(int dataVersion) {
+ super(dataVersion);
+ blockLight = createBlockLightBuffer();
+ skyLight = createSkyLightBuffer();
+
+ if (dataVersion >= JAVA_1_13_17W47A.id()) {
+ // blockStatesTag normalized to 1.18+
+ blockStates = PalettizedCuboid.fromCompoundTag(DEFAULT_BLOCK_SATES_TAG.clone(), 16, dataVersion);
+ } else {
+ legacyBlockIds = new byte[2048];
+ legacyBlockDataValues = new byte[2048];
+ }
+ if (dataVersion >= JAVA_1_18_21W37A.id()) {
+ biomes = PalettizedCuboid.fromCompoundTag(DEFAULT_BIOMES_TAG.clone(), 4, dataVersion);
+ }
+ }
+
+ /** {@inheritDoc} */
+ @Override
+ protected void syncDataVersion(int newDataVersion) {
+ super.syncDataVersion(newDataVersion);
+ if (blockStates != null) blockStates.setDataVersion(newDataVersion);
+ if (biomes != null) biomes.setDataVersion(newDataVersion);
+ }
+
+ /**
+ * @return The block light array of this Section
+ */
+ public byte[] getBlockLight() {
+ return blockLight;
+ }
+
+ /**
+ * Sets the block light array for this section.
+ * @param blockLight The block light array
+ * @throws IllegalArgumentException When the length of the array is not 2048
+ */
+ public void setBlockLight(byte[] blockLight) {
+ if (blockLight != null && blockLight.length != 2048) {
+ throw new IllegalArgumentException("BlockLight array must have a length of 2048");
+ }
+ this.blockLight = blockLight;
+ }
+
+ /**
+ * @return The sky light values of this Section
+ */
+ public byte[] getSkyLight() {
+ return skyLight;
+ }
+
+ /**
+ * Sets the sky light values of this section.
+ * @param skyLight The custom sky light values
+ * @throws IllegalArgumentException If the length of the array is not 2048
+ */
+ public void setSkyLight(byte[] skyLight) {
+ if (skyLight != null && skyLight.length != 2048) {
+ throw new IllegalArgumentException("SkyLight array must have a length of 2048");
+ }
+ this.skyLight = skyLight;
+ }
+
+ /** Only populated for MC version < 1.13 - 4096 (16^3) block data values */
+ public byte[] getLegacyBlockDataValues() {
+ return legacyBlockDataValues;
+ }
+
+ public TerrainSectionBase setLegacyBlockDataValues(byte[] legacyBlockDataValues) {
+ if (dataVersion >= JAVA_1_13_17W47A.id()) {
+ throw new VersionLacksSupportException(dataVersion, null, JAVA_1_13_17W47A.previous(), "legacyBlockDataValues");
+ }
+ this.legacyBlockDataValues = legacyBlockDataValues;
+ return this;
+ }
+
+ /**
+ * @since {@link DataVersion#JAVA_1_13_17W47A}
+ */
+ public PalettizedCuboid getBlockStates() {
+ return blockStates;
+ }
+
+ /**
+ * @since {@link DataVersion#JAVA_1_13_17W47A}
+ */
+ public TerrainSectionBase setBlockStates(PalettizedCuboid