diff --git a/Allay-API/build.gradle.kts b/Allay-API/build.gradle.kts index 965449345..87a4d2bda 100644 --- a/Allay-API/build.gradle.kts +++ b/Allay-API/build.gradle.kts @@ -23,5 +23,11 @@ dependencies { api(libs.commonsio) api(libs.joml) api(libs.joml.primitives) - compileOnly(libs.libdeflate) + implementation(libs.libdeflate) +} + +tasks.processResources { + // input directory + from("${rootProject.projectDir}/Data") + include("mappings/") } \ No newline at end of file diff --git a/Allay-API/src/main/java/cn/allay/api/utils/VanillaBiomeIdUtils.java b/Allay-API/src/main/java/cn/allay/api/utils/VanillaBiomeIdUtils.java new file mode 100644 index 000000000..ab1e807f8 --- /dev/null +++ b/Allay-API/src/main/java/cn/allay/api/utils/VanillaBiomeIdUtils.java @@ -0,0 +1,37 @@ +package cn.allay.api.utils; + +import cn.allay.api.data.VanillaBiomeId; +import cn.allay.api.identifier.Identifier; +import it.unimi.dsi.fastutil.ints.Int2ObjectOpenHashMap; +import lombok.experimental.UtilityClass; +import org.jetbrains.annotations.Nullable; + +import java.util.HashMap; + +/** + * Allay Project 8/8/2023 + * + * @author Cool_Loong + */ +@UtilityClass +public class VanillaBiomeIdUtils { + private final static Int2ObjectOpenHashMap MAP1 = new Int2ObjectOpenHashMap<>(); + private final static HashMap MAP2 = new HashMap<>(); + + static { + for (var v : VanillaBiomeId.values()) { + MAP1.put(v.getId(), v); + MAP2.put(v.getIdentifier(), v); + } + } + + @Nullable + public VanillaBiomeId fromId(int id) { + return MAP1.get(id); + } + + @Nullable + public VanillaBiomeId fromIdentifier(Identifier identifier) { + return MAP2.get(identifier); + } +} diff --git a/Allay-API/src/main/java/cn/allay/api/world/palette/Palette.java b/Allay-API/src/main/java/cn/allay/api/world/palette/Palette.java index 0bb06ca73..14f599ef7 100644 --- a/Allay-API/src/main/java/cn/allay/api/world/palette/Palette.java +++ b/Allay-API/src/main/java/cn/allay/api/world/palette/Palette.java @@ -6,12 +6,12 @@ import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBufInputStream; import io.netty.buffer.ByteBufOutputStream; -import it.unimi.dsi.fastutil.objects.ReferenceArrayList; import lombok.EqualsAndHashCode; import org.cloudburstmc.nbt.*; import org.cloudburstmc.protocol.common.util.VarInts; import java.io.IOException; +import java.util.ArrayList; import java.util.List; import java.util.function.Function; @@ -32,7 +32,7 @@ public Palette(V first) { public Palette(V first, BitArrayVersion version) { this.bitArray = version.createArray(Chunk.SECTION_SIZE); - this.palette = new ReferenceArrayList<>(16); + this.palette = new ArrayList<>(16); this.palette.add(first); } diff --git a/Allay-Server/src/main/java/cn/allay/server/world/storage/anvil/AnvilRegionFile.java b/Allay-API/src/main/java/cn/allay/api/world/storage/AnvilRegionFile.java similarity index 73% rename from Allay-Server/src/main/java/cn/allay/server/world/storage/anvil/AnvilRegionFile.java rename to Allay-API/src/main/java/cn/allay/api/world/storage/AnvilRegionFile.java index a337f8ea4..8e8e4222b 100644 --- a/Allay-Server/src/main/java/cn/allay/server/world/storage/anvil/AnvilRegionFile.java +++ b/Allay-API/src/main/java/cn/allay/api/world/storage/AnvilRegionFile.java @@ -1,16 +1,19 @@ -package cn.allay.server.world.storage.anvil; +package cn.allay.api.world.storage; import cn.allay.api.zlib.CompressionType; import cn.allay.api.zlib.ZlibProviderType; -import io.netty.buffer.ByteBuf; -import io.netty.buffer.ByteBufAllocator; import it.unimi.dsi.fastutil.ints.IntArrayList; import org.cloudburstmc.nbt.NBTInputStream; import org.cloudburstmc.nbt.NBTOutputStream; import org.cloudburstmc.nbt.NbtMap; import org.cloudburstmc.nbt.NbtUtils; +import org.jetbrains.annotations.NotNull; +import org.jetbrains.annotations.Range; -import java.io.*; +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.EOFException; +import java.io.IOException; import java.nio.ByteBuffer; import java.nio.IntBuffer; import java.nio.channels.FileChannel; @@ -49,7 +52,8 @@ public AnvilRegionFile(Path region, int regionX, int regionZ) throws IOException //Set the pointer in the file header channel.position(0); - if (this.channel.size() < HEADER_LENGTH) { // new file, fill in data + // new file, fill in data + if (this.channel.size() < HEADER_LENGTH) { // If the file is empty, initialize the 8K Byte data if (channel.size() == 0) { channel.write(new ByteBuffer[]{EMPTY_SECTOR.duplicate(), EMPTY_SECTOR.duplicate()}); @@ -65,11 +69,10 @@ public AnvilRegionFile(Path region, int regionX, int regionZ) throws IOException usedSectors.set(0, 2); // read chunk locations + channel.position(0); ByteBuffer locations = ByteBuffer.allocate(SECTOR_SIZE * 2);//8K Bytes - while (locations.hasRemaining()) { - if (channel.read(locations) == -1) { - throw new EOFException(); - } + if (channel.read(locations) == -1) { + throw new EOFException(); } // `locations` buffer to complete the preparation for reading locations.flip(); @@ -78,12 +81,13 @@ public AnvilRegionFile(Path region, int regionX, int regionZ) throws IOException for (int i = 0; i < MAX_ENTRY_COUNT; i++) { int loc = ints.get(); this.locations.add(loc); + // mark already allocated sectors as taken. // loc 0 means the chunk is *not* stored in the file int index = sectorIndex(loc); int count = sectorCount(loc); - if (loc != 0 && index >= 0 && index + count <= availableSectors) { - usedSectors.set(index, index + count + 1); + if (loc != 0 && index + count <= availableSectors) { + usedSectors.set(index, index + count); } } for (int i = 0; i < MAX_ENTRY_COUNT; i++) { @@ -91,34 +95,32 @@ public AnvilRegionFile(Path region, int regionX, int regionZ) throws IOException } } - public synchronized NbtMap readChunkData(int chunkX, int chunkZ) throws IOException { + @NotNull + public synchronized NbtMap readChunkData(@Range(from = 0, to = 31) int chunkX, @Range(from = 0, to = 31) int chunkZ) throws IOException { int loc = this.locations.getInt(index(chunkX, chunkZ)); if (loc == 0) { return NbtMap.EMPTY; } - int fileOffset = sectorIndex(loc) * SECTOR_SIZE; + long fileOffset = (long) sectorIndex(loc) * SECTOR_SIZE; int sectorCount = sectorCount(loc); // Seek to the sector position in file channel. this.channel.position(fileOffset); // The number of bytes occupied by the chunk is allocated according to the number of sectors int bytes = sectorCount * SECTOR_SIZE; - ByteBuf buffer = ByteBufAllocator.DEFAULT.ioBuffer(bytes); + ByteBuffer buffer = ByteBuffer.allocate(bytes); // Read data from the file channel to the buffer - while (buffer.writerIndex() < bytes) { - int written = this.channel.read(buffer.internalNioBuffer(buffer.writerIndex(), buffer.writableBytes())); - if (written == -1) { - throw new EOFException(); - } - buffer.writerIndex(buffer.writerIndex() + written); + int written = this.channel.read(buffer); + if (written == -1) { + throw new EOFException(); } - // Read the chunk data length bits - int length = buffer.readInt(); - ByteBuf chunk = buffer.readSlice(length); + buffer.flip(); // The first byte represent the compression type, and the remain of the data is raw chunk data - byte compressionType = chunk.readByte(); - byte[] input = new byte[chunk.readableBytes()]; + // Read the chunk data length + int length = buffer.getInt(); + byte compressionType = buffer.get(); + byte[] input = new byte[length]; + buffer.get(input); byte[] output; - chunk.readBytes(input); output = switch (compressionType) { case GZIP_COMPRESSION -> ZlibProviderType.LibDeflateThreadLocal.of(CompressionType.GZIP, 6).inflate(input, CHUNK_SIZE_LIMIT); @@ -126,19 +128,27 @@ public synchronized NbtMap readChunkData(int chunkX, int chunkZ) throws IOExcept ZlibProviderType.LibDeflateThreadLocal.of(CompressionType.ZLIB, 6).inflate(input, CHUNK_SIZE_LIMIT); default -> throw new IllegalArgumentException("Unknown compression type: " + compressionType); }; - NBTInputStream reader = NbtUtils.createReader(new BufferedInputStream(new ByteArrayInputStream(output))); + NBTInputStream reader = NbtUtils.createReader(new ByteArrayInputStream(output)); return (NbtMap) reader.readTag(); } - public synchronized void writeChunk(int chunkX, int chunkZ, NbtMap chunkData) throws IOException { + /** + * Write chunk. + * + * @param chunkX the chunk x + * @param chunkZ the chunk z + * @param chunkData the chunk data + * @throws IOException the io exception + */ + public synchronized void writeChunk(@Range(from = 0, to = 31) int chunkX, @Range(from = 0, to = 31) int chunkZ, NbtMap chunkData) throws IOException { // Convert chunk data to byte stream and compress - ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); - NBTOutputStream writer = NbtUtils.createWriter(new BufferedOutputStream(byteArrayOutputStream)); + ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(CHUNK_SIZE_LIMIT); + NBTOutputStream writer = NbtUtils.createWriter(byteArrayOutputStream); writer.writeTag(chunkData); byte[] deflateData = ZlibProviderType.LibDeflateThreadLocal.of(CompressionType.ZLIB, 6).deflate(byteArrayOutputStream.toByteArray()); writer.close(); // Calculate the number of sector needed for the chunk - int sectorCount = (deflateData.length + CHUNK_HEADER_LENGTH) / SECTOR_SIZE; + int sectorCount = (int) Math.ceil((double) (deflateData.length + CHUNK_HEADER_LENGTH) / SECTOR_SIZE); // The maximum sector count of a chunk can use 256 sector, that is to say, the maximum size of a chunk is 256 * 4096 = 1M if (sectorCount >= SECTOR_COUNT_PER1M) { throw new IllegalArgumentException("Writing this chunk would take too many sectors (limit is 255, but " + sectorCount + " is needed)"); @@ -159,11 +169,12 @@ public synchronized void writeChunk(int chunkX, int chunkZ, NbtMap chunkData) th if (sectorStartCount == -1) { var eof = channel.size(); position = eof; - sectorStartCount = (int) eof / SECTOR_SIZE; + sectorStartCount = (int) (eof / SECTOR_SIZE); // fill up sectors + ByteBuffer byteBuffer = ByteBuffer.allocateDirect(SECTOR_SIZE); for (int i = 0; i < sectorCount; i++) { channel.position(eof + i * SECTOR_SIZE); - channel.write(ByteBuffer.allocate(SECTOR_SIZE)); + channel.write(byteBuffer); } appendToEnd = true; } else { @@ -187,19 +198,20 @@ public synchronized void writeChunk(int chunkX, int chunkZ, NbtMap chunkData) th } // Update and write locations and timestamps - locations.set(index, buildLocation(sectorStartCount, sectorCount)); - timestamps.set(index, Long.valueOf(System.currentTimeMillis()).intValue()); - ByteBuffer location = ByteBuffer.allocateDirect(4);//int - header.putInt(locations.getInt(index)); - header.flip(); - ByteBuffer timestamp = ByteBuffer.allocateDirect(4);//int - header.putInt(timestamps.getInt(index)); - header.flip(); - channel.write(location, index * 4L); - channel.write(timestamp, index * 4L + 4096); - + int loc = buildLocation(sectorStartCount, sectorCount); + int time = Long.valueOf(System.currentTimeMillis()).intValue(); + locations.set(index, loc); + timestamps.set(index, time); + ByteBuffer location = ByteBuffer.allocate(4);//int + location.putInt(loc); + location.flip(); + ByteBuffer timestamp = ByteBuffer.allocate(4);//int + timestamp.putInt(time); + timestamp.flip(); + System.out.println(channel.write(location, index * 4L)); + System.out.println(channel.write(timestamp, index * 4L + 4096)); // the data has been written, now free previous storage - usedSectors.set(previousSectorStart, previousSectorStart + previousSectorCount + 1, false); + usedSectors.set(previousSectorStart, previousSectorStart + previousSectorCount, false); } /** @@ -208,9 +220,12 @@ public synchronized void writeChunk(int chunkX, int chunkZ, NbtMap chunkData) th */ private int findAvailableSectors(int sectorCount) { for (int start = 0; start < usedSectors.size() - sectorCount; start++) { - boolean found = false; + boolean found = true; for (int i = 0; i < sectorCount; i++) { - found = !usedSectors.get(i + start); + if (usedSectors.get(i + start)) { + found = false; + break; + } } if (found) { return start; @@ -221,9 +236,9 @@ private int findAvailableSectors(int sectorCount) { private void alignment4K() throws IOException { // file is not a multiple of 4kib, add padding - long missingPadding = channel.size() % SECTOR_SIZE; + int missingPadding = (int) (channel.size() % SECTOR_SIZE); if (missingPadding > 0) { - channel.write(ByteBuffer.allocate((int) (SECTOR_SIZE - missingPadding))); + channel.write(ByteBuffer.allocate(SECTOR_SIZE - missingPadding)); } } diff --git a/Allay-API/src/main/java/cn/allay/api/zlib/JavaZibThreadLocal.java b/Allay-API/src/main/java/cn/allay/api/zlib/JavaZibThreadLocal.java new file mode 100644 index 000000000..443926b95 --- /dev/null +++ b/Allay-API/src/main/java/cn/allay/api/zlib/JavaZibThreadLocal.java @@ -0,0 +1,101 @@ +package cn.allay.api.zlib; + +import it.unimi.dsi.fastutil.io.FastByteArrayOutputStream; + +import java.io.IOException; +import java.util.zip.DataFormatException; +import java.util.zip.Deflater; +import java.util.zip.Inflater; + +/** + * Allay Project 2023/6/6 + * + * @author Cool_Loong + */ +public final class JavaZibThreadLocal implements ZlibProvider { + private static final ThreadLocal FBAO = ThreadLocal.withInitial(() -> new FastByteArrayOutputStream(1024)); + private static final ThreadLocal BUFFER = ThreadLocal.withInitial(() -> new byte[8192]); + private int level; + private CompressionType type; + private final ThreadLocal INFLATER = ThreadLocal.withInitial(Inflater::new); + private final ThreadLocal DEFLATER = ThreadLocal.withInitial(() -> new Deflater(level)); + + JavaZibThreadLocal(CompressionType type, int level) { + this.type = type; + this.level = level; + } + + @Override + public void setCompressionType(CompressionType type) { + this.type = type; + } + + @Override + public void setCompressionLevel(int level) { + this.level = level; + } + + @Override + public byte[] deflate(byte[] data) throws IOException { + try (var bos = FBAO.get()) { + if (type == CompressionType.GZIP) { + throw new UnsupportedOperationException(this.getClass().getSimpleName() + " dont support GZIP"); + } else { + Deflater deflater = DEFLATER.get(); + try { + deflater.reset(); + deflater.setInput(data); + deflater.finish(); + bos.reset(); + byte[] buffer = BUFFER.get(); + int length = 0; + while (!deflater.finished()) { + int i = deflater.deflate(buffer); + bos.write(buffer, 0, i); + length += i; + } + byte[] output = new byte[length]; + System.arraycopy(bos.array, 0, output, 0, length); + return output; + } finally { + deflater.reset(); + } + } + } + } + + @Override + public byte[] inflate(byte[] data, int maxSize) throws IOException { + try (var bos = FBAO.get()) { + if (type == CompressionType.GZIP) { + throw new UnsupportedOperationException(this.getClass().getSimpleName() + " dont support GZIP"); + } else { + Inflater inflater = INFLATER.get(); + try { + inflater.reset(); + inflater.setInput(data); + bos.reset(); + byte[] buffer = BUFFER.get(); + try { + int length = 0; + while (!inflater.finished()) { + int i = inflater.inflate(buffer); + length += i; + if (maxSize > 0 && length > maxSize) { + throw new IOException("Inflated data exceeds maximum size"); + } + bos.write(buffer, 0, i); + } + byte[] output = new byte[length]; + System.arraycopy(bos.array, 0, output, 0, length); + return output; + } catch (DataFormatException e) { + throw new IOException("Unable to inflate zlib stream", e); + } + } finally { + inflater.end(); + } + } + } + } +} diff --git a/Allay-API/src/main/java/cn/allay/api/zlib/LibDeflateThreadLocal.java b/Allay-API/src/main/java/cn/allay/api/zlib/LibDeflateThreadLocal.java index 638c7a5f9..09771e72d 100644 --- a/Allay-API/src/main/java/cn/allay/api/zlib/LibDeflateThreadLocal.java +++ b/Allay-API/src/main/java/cn/allay/api/zlib/LibDeflateThreadLocal.java @@ -1,5 +1,6 @@ package cn.allay.api.zlib; + import cn.powernukkitx.libdeflate.LibdeflateCompressor; import cn.powernukkitx.libdeflate.LibdeflateDecompressor; @@ -36,7 +37,8 @@ public void setCompressionLevel(int level) { public byte[] deflate(byte[] data) throws IOException { try (LibdeflateCompressor deflater = DEFLATER.get()) { var t = type == CompressionType.ZLIB ? cn.powernukkitx.libdeflate.CompressionType.ZLIB : cn.powernukkitx.libdeflate.CompressionType.GZIP; - byte[] buffer = deflater.getCompressBound(data.length, t) < 8192 ? new byte[8192] : new byte[data.length]; + int compressUpperBound = (int) deflater.getCompressBound(data.length, t); + byte[] buffer = new byte[compressUpperBound]; int compressedSize = deflater.compress(data, buffer, t); byte[] output = new byte[compressedSize]; System.arraycopy(buffer, 0, output, 0, compressedSize); @@ -53,8 +55,8 @@ public byte[] inflate(byte[] data, int maxSize) throws IOException { try (LibdeflateDecompressor pnxInflater = PNX_INFLATER.get()) { byte[] buffer = new byte[maxSize]; try { - var result = pnxInflater.decompressUnknownSize(data, 0, data.length, buffer, 0, buffer.length, t); - if (maxSize > 0 && result >= maxSize) { + var result = pnxInflater.decompressUnknownSize(data, 0, data.length, buffer, 0, maxSize, t); + if (maxSize > 0 && result > maxSize) { throw new IOException("Inflated data exceeds maximum size"); } byte[] output = new byte[(int) result]; diff --git a/Allay-API/src/main/java/cn/allay/api/zlib/ZlibProviderType.java b/Allay-API/src/main/java/cn/allay/api/zlib/ZlibProviderType.java index 8e02dc63d..45a5e7808 100644 --- a/Allay-API/src/main/java/cn/allay/api/zlib/ZlibProviderType.java +++ b/Allay-API/src/main/java/cn/allay/api/zlib/ZlibProviderType.java @@ -15,7 +15,7 @@ public ZlibProvider of(CompressionType type, int level) { return new LibDeflateThreadLocal(type, level); } default -> { - return new ZlibThreadLocal(type, level); + return new JavaZibThreadLocal(type, level); } } } diff --git a/Allay-API/src/main/java/cn/allay/api/zlib/ZlibThreadLocal.java b/Allay-API/src/main/java/cn/allay/api/zlib/ZlibThreadLocal.java deleted file mode 100644 index 6392f71a2..000000000 --- a/Allay-API/src/main/java/cn/allay/api/zlib/ZlibThreadLocal.java +++ /dev/null @@ -1,84 +0,0 @@ -package cn.allay.api.zlib; - -import it.unimi.dsi.fastutil.io.FastByteArrayOutputStream; - -import java.io.IOException; -import java.util.zip.DataFormatException; -import java.util.zip.Deflater; -import java.util.zip.Inflater; - -/** - * Allay Project 2023/6/6 - * - * @author Cool_Loong - */ -public final class ZlibThreadLocal implements ZlibProvider { - private static final ThreadLocal FBAO = ThreadLocal.withInitial(() -> new FastByteArrayOutputStream(1024)); - private static final ThreadLocal BUFFER = ThreadLocal.withInitial(() -> new byte[8192]); - private int level; - private CompressionType type; - private final ThreadLocal INFLATER = ThreadLocal.withInitial(() -> new Inflater(type == CompressionType.GZIP)); - private final ThreadLocal DEFLATER = ThreadLocal.withInitial(() -> new Deflater(level, type == CompressionType.GZIP)); - - ZlibThreadLocal(CompressionType type, int level) { - this.type = type; - this.level = level; - } - - @Override - public void setCompressionType(CompressionType type) { - this.type = type; - } - - @Override - public void setCompressionLevel(int level) { - this.level = level; - } - - @Override - public byte[] deflate(byte[] data) throws IOException { - Deflater deflater = DEFLATER.get(); - FastByteArrayOutputStream bos = FBAO.get(); - try { - deflater.reset(); - deflater.setInput(data); - deflater.finish(); - bos.reset(); - byte[] buffer = BUFFER.get(); - while (!deflater.finished()) { - int i = deflater.deflate(buffer); - bos.write(buffer, 0, i); - } - } finally { - deflater.reset(); - } - return bos.array; - } - - @Override - public byte[] inflate(byte[] data, int maxSize) throws IOException { - Inflater inflater = INFLATER.get(); - try (FastByteArrayOutputStream bos = FBAO.get()) { - inflater.reset(); - inflater.setInput(data); - bos.reset(); - byte[] buffer = BUFFER.get(); - try { - int length = 0; - while (!inflater.finished()) { - int i = inflater.inflate(buffer); - length += i; - if (maxSize > 0 && length >= maxSize) { - throw new IOException("Inflated data exceeds maximum size"); - } - bos.write(buffer, 0, i); - } - return bos.array; - } catch (DataFormatException e) { - throw new IOException("Unable to inflate zlib stream", e); - } - } finally { - inflater.end(); - } - } -} diff --git a/Allay-Server/build.gradle.kts b/Allay-Server/build.gradle.kts index 703989e38..412141208 100644 --- a/Allay-Server/build.gradle.kts +++ b/Allay-Server/build.gradle.kts @@ -28,6 +28,7 @@ tasks.processResources { from("${rootProject.projectDir}/Data") // exclude unpacked folder and block palette.nbt exclude("**/unpacked/**") + exclude("**/mappings/**") } //disable ,please use `java -jar` to start directly diff --git a/Allay-Server/src/main/java/cn/allay/server/world/storage/anvil/AnvilWorldStorage.java b/Allay-Server/src/main/java/cn/allay/server/world/storage/anvil/AnvilWorldStorage.java index f32e19679..72581b074 100644 --- a/Allay-Server/src/main/java/cn/allay/server/world/storage/anvil/AnvilWorldStorage.java +++ b/Allay-Server/src/main/java/cn/allay/server/world/storage/anvil/AnvilWorldStorage.java @@ -6,6 +6,7 @@ import cn.allay.api.world.chunk.Chunk; import cn.allay.api.world.gamerule.GameRule; import cn.allay.api.world.gamerule.GameRules; +import cn.allay.api.world.storage.AnvilRegionFile; import cn.allay.api.world.storage.NativeFileWorldStorage; import cn.allay.api.world.storage.WorldStorageException; import it.unimi.dsi.fastutil.longs.Long2ObjectMap; diff --git a/Allay-Server/src/test/java/cn/allay/api/zlib/ZlibTest.java b/Allay-Server/src/test/java/cn/allay/api/zlib/ZlibTest.java new file mode 100644 index 000000000..d94b52d69 --- /dev/null +++ b/Allay-Server/src/test/java/cn/allay/api/zlib/ZlibTest.java @@ -0,0 +1,81 @@ +package cn.allay.api.zlib; + +import org.junit.jupiter.api.Assertions; +import org.junit.jupiter.api.Test; + +import java.io.IOException; + +/** + * Allay Project 8/9/2023 + * + * @author Cool_Loong + */ +public class ZlibTest { + final ZlibProvider JAVA_ZLIB = ZlibProviderType.JavaZlibThreadLocal.of(CompressionType.ZLIB, 6); + final ZlibProvider LIB_DEFLATE_ZLIB = ZlibProviderType.LibDeflateThreadLocal.of(CompressionType.ZLIB, 6); + final ZlibProvider LIB_DEFLATE_GZIP = ZlibProviderType.LibDeflateThreadLocal.of(CompressionType.GZIP, 6); + + final byte[] TEST_DATA = new byte[]{122, 1, -23, 34, 123, 35, 65, 78, 91, 51, -12, 32, -4, 5, -65, -123, 12, 32, 45, 94, 123}; + final byte[] HELLO_WORLD_ZLIB = new byte[]{120, -100, -13, 72, -51, -55, -55, 87, 8, -49, 47, -54, 73, 81, 4, 0, 28, 73, 4, 62}; + final byte[] HELLO_WORLD_GZIP = new byte[]{31, -117, 8, 0, 0, 0, 0, 0, 0, -1, 1, 12, 0, -13, -1, 72, 101, 108, 108, 111, 32, 87, 111, 114, 108, 100, 33, -93, 28, 41, 28, 12, 0, 0, 0}; + + @Test + void testCompressZlib() { + try { + byte[] deflate = JAVA_ZLIB.deflate(TEST_DATA); + byte[] inflate = JAVA_ZLIB.inflate(deflate, TEST_DATA.length); + Assertions.assertArrayEquals(TEST_DATA, inflate); + } catch (IOException e) { + throw new RuntimeException(e); + } + + try { + byte[] deflate = LIB_DEFLATE_ZLIB.deflate(TEST_DATA); + byte[] inflate = LIB_DEFLATE_ZLIB.inflate(deflate, TEST_DATA.length); + Assertions.assertArrayEquals(TEST_DATA, inflate); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + @Test + void testDecompressZlib() { + try { + byte[] inflate = JAVA_ZLIB.inflate(HELLO_WORLD_ZLIB, HELLO_WORLD_ZLIB.length); + String s = new String(inflate); + Assertions.assertEquals("Hello World!", s); + } catch (IOException e) { + throw new RuntimeException(e); + } + + try { + byte[] inflate = LIB_DEFLATE_ZLIB.inflate(HELLO_WORLD_ZLIB, HELLO_WORLD_ZLIB.length); + String s = new String(inflate); + Assertions.assertEquals("Hello World!", s); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + @Test + void testCompressGZip() { + try { + byte[] deflate = LIB_DEFLATE_GZIP.deflate(TEST_DATA); + byte[] inflate = LIB_DEFLATE_GZIP.inflate(deflate, TEST_DATA.length); + Assertions.assertArrayEquals(TEST_DATA, inflate); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + @Test + void testDecompressGZip() { + try { + byte[] inflate = LIB_DEFLATE_GZIP.inflate(HELLO_WORLD_GZIP, HELLO_WORLD_GZIP.length); + String s = new String(inflate); + Assertions.assertEquals("Hello World!", s); + } catch (IOException e) { + throw new RuntimeException(e); + } + } +} diff --git a/Allay-Server/src/test/java/cn/allay/server/world/AnvilRegionFileTest.java b/Allay-Server/src/test/java/cn/allay/server/world/AnvilRegionFileTest.java index e6c8fed88..513d4308d 100644 --- a/Allay-Server/src/test/java/cn/allay/server/world/AnvilRegionFileTest.java +++ b/Allay-Server/src/test/java/cn/allay/server/world/AnvilRegionFileTest.java @@ -1,6 +1,6 @@ package cn.allay.server.world; -import cn.allay.server.world.storage.anvil.AnvilRegionFile; +import cn.allay.api.world.storage.AnvilRegionFile; import org.cloudburstmc.nbt.NbtMap; import org.junit.jupiter.api.Test; @@ -21,7 +21,11 @@ void loadMCA() throws IOException { NbtMap nbtMap = anvilRegionFile.readChunkData(0, 0); System.out.println(nbtMap.toSNBT(4)); } + @Test - void writeMCA() throws IOException { + void loadMCA1() throws IOException { + AnvilRegionFile anvilRegionFile = new AnvilRegionFile(regionPath, -1, -1); + NbtMap nbtMap = anvilRegionFile.readChunkData(31, 31); + System.out.println(nbtMap.toSNBT(4)); } } diff --git a/Allay-Server/src/test/resources/allayworld/region/r.-1.-1.mca b/Allay-Server/src/test/resources/allayworld/region/r.-1.-1.mca new file mode 100644 index 000000000..692ee10e7 Binary files /dev/null and b/Allay-Server/src/test/resources/allayworld/region/r.-1.-1.mca differ diff --git a/Allay-WorldConvert/build.gradle.kts b/Allay-WorldConvert/build.gradle.kts index a6c45fd87..6c3cdd7cd 100644 --- a/Allay-WorldConvert/build.gradle.kts +++ b/Allay-WorldConvert/build.gradle.kts @@ -39,6 +39,12 @@ tasks.startShadowScripts { enabled = false } +tasks.distTar { enabled = false } +tasks.distZip { enabled = false } +tasks.shadowDistTar { enabled = false } +tasks.shadowDistZip { enabled = false } + + tasks.shadowJar { transform(Log4j2PluginsCacheFileTransformer()) } \ No newline at end of file diff --git a/Allay-WorldConvert/src/main/java/cn/allay/worldconvert/ConvertFactory.java b/Allay-WorldConvert/src/main/java/cn/allay/worldconvert/ConvertFactory.java index 592276214..87a64df5b 100644 --- a/Allay-WorldConvert/src/main/java/cn/allay/worldconvert/ConvertFactory.java +++ b/Allay-WorldConvert/src/main/java/cn/allay/worldconvert/ConvertFactory.java @@ -1,17 +1,17 @@ package cn.allay.worldconvert; +import cn.allay.api.world.storage.AnvilRegionFile; import cn.allay.worldconvert.tasks.VanillaRegionConvertTask; import lombok.extern.slf4j.Slf4j; -import org.apache.commons.io.FileUtils; import org.jglrxavpok.hephaistos.mca.AnvilException; import org.jglrxavpok.hephaistos.mca.RegionFile; -import java.io.File; import java.io.IOException; import java.io.RandomAccessFile; import java.nio.file.Path; import java.util.Objects; import java.util.concurrent.ForkJoinPool; +import java.util.concurrent.TimeUnit; @Slf4j public class ConvertFactory { @@ -28,21 +28,19 @@ public static void of(Path input, Path output, Dimension dimension) { log.error("The region folder does not exist!"); WorldConverter.close(1); } - try { - File target = output.resolve("region").toFile(); - FileUtils.copyDirectory(files, target); - for (var file : Objects.requireNonNull(target.listFiles())) { - try { - String name = file.getName(); - String[] split = name.split("\\."); - RegionFile region = new RegionFile(new RandomAccessFile(file, "rw"), Integer.parseInt(split[1]), Integer.parseInt(split[2]), dimension.getDimensionInfo().minHeight(), dimension.getDimensionInfo().maxHeight()); - THREAD_POOL.submit(new VanillaRegionConvertTask(region, 1023)); - } catch (AnvilException | IOException e) { - throw new RuntimeException(e); - } + for (var f : Objects.requireNonNull(files.listFiles())) { + Path p = output.resolve("region"); + try { + String name = f.getName(); + String[] split = name.split("\\."); + RegionFile srcRegion = new RegionFile(new RandomAccessFile(f, "r"), Integer.parseInt(split[1]), Integer.parseInt(split[2]), dimension.getDimensionInfo().minHeight(), dimension.getDimensionInfo().maxHeight()); + AnvilRegionFile targetRegion = new AnvilRegionFile(p, Integer.parseInt(split[1]), Integer.parseInt(split[2])); + THREAD_POOL.execute(new VanillaRegionConvertTask(srcRegion, targetRegion)); + break; + } catch (AnvilException | IOException e) { + throw new RuntimeException(e); } - } catch (IOException e) { - throw new RuntimeException(e); } + THREAD_POOL.awaitQuiescence(1, TimeUnit.DAYS); } } diff --git a/Allay-WorldConvert/src/main/java/cn/allay/worldconvert/tasks/VanillaRegionConvertTask.java b/Allay-WorldConvert/src/main/java/cn/allay/worldconvert/tasks/VanillaRegionConvertTask.java index 159f75cd4..1b52b23d5 100644 --- a/Allay-WorldConvert/src/main/java/cn/allay/worldconvert/tasks/VanillaRegionConvertTask.java +++ b/Allay-WorldConvert/src/main/java/cn/allay/worldconvert/tasks/VanillaRegionConvertTask.java @@ -1,17 +1,17 @@ package cn.allay.worldconvert.tasks; import cn.allay.api.data.VanillaBiomeId; -import cn.allay.api.data.VanillaBlockTypes; import cn.allay.api.identifier.Identifier; import cn.allay.api.mapping.Mapping; +import cn.allay.api.utils.VanillaBiomeIdUtils; import cn.allay.api.world.chunk.Chunk; import cn.allay.api.world.palette.Palette; +import cn.allay.api.world.storage.AnvilRegionFile; import cn.allay.worldconvert.utils.MappingUtils; import org.cloudburstmc.nbt.NbtMap; import org.cloudburstmc.nbt.NbtMapBuilder; +import org.cloudburstmc.nbt.NbtType; import org.jglrxavpok.hephaistos.mca.*; -import org.jglrxavpok.hephaistos.nbt.NBT; -import org.jglrxavpok.hephaistos.nbt.NBTCompound; import java.io.IOException; import java.util.ArrayList; @@ -25,77 +25,83 @@ * @author Cool_Loong */ public class VanillaRegionConvertTask extends RecursiveAction { - final RegionFile regionFile; - int index; - int x; - int z; + private final static Integer AIR_BLOCK_STATE_HASH = -604749536; + final RegionFile srcRegion; + final AnvilRegionFile targetRegion; - public VanillaRegionConvertTask(RegionFile regionFile, int index) { - this.regionFile = regionFile; - this.index = index; - this.x = index >>> 5; - this.z = index & 0x0000001F; + public VanillaRegionConvertTask(RegionFile srcRegion, AnvilRegionFile targetRegion) { + this.srcRegion = srcRegion; + this.targetRegion = targetRegion; } @Override protected void compute() { - if (index > 0) { - if (index == 1) { - invokeAll(new VanillaRegionConvertTask(regionFile, --index)); - } else { - invokeAll(new VanillaRegionConvertTask(regionFile, --index), new VanillaRegionConvertTask(regionFile, --index)); - } - } - try { - ChunkColumn chunk = regionFile.getChunk(x, z); - if (chunk != null && chunk.getGenerationStatus() == ChunkColumn.GenerationStatus.Full) { - NBTCompound compound = NBT.Compound(builder -> { - builder.setString("Status", chunk.getGenerationStatus().name()) - .setInt("xPos", chunk.getX()) - .setInt("zPos", chunk.getZ()) - .set("Heightmaps", NBT.Compound(builder2 -> builder2.setLongArray("WORLD_SURFACE", chunk.getWorldSurfaceHeightMap().compact().copyArray()))); - List sections = new ArrayList<>(); - for (int i = (regionFile.getMinY() << 4); i < (regionFile.getMaxY() << 4); i++) { - NbtMapBuilder sectionNBT = NbtMap.builder(); - ChunkSection section = chunk.getSection((byte) i); - Palette blockStatesPalette = new Palette<>(VanillaBlockTypes.AIR_TYPE.getDefaultState().blockStateHash()); - Palette biomePalette = new Palette<>(VanillaBiomeId.PLAINS.getIdentifier()); - for (int x = 0; x < 16; ++x) { - for (int y = 0; y < 16; ++y) { - for (int z = 0; z < 16; ++z) { - BlockState blockState; - try { - blockState = section.get(x, y, z); - } catch (AnvilException e) { - throw new RuntimeException(e); + for (int i = 0; i < 32; i++) { + for (int j = 0; j < 32; j++) { + int cx = i + (srcRegion.getRegionX() * 32), cz = j + (srcRegion.getRegionZ() * 32); + ChunkColumn chunk; + try { + chunk = srcRegion.getChunk(cx, cz); + } catch (AnvilException | IOException e) { + throw new RuntimeException(e); + } + if (chunk != null && chunk.getGenerationStatus() == ChunkColumn.GenerationStatus.Full) { + NbtMapBuilder builder = NbtMap.builder(); + try { + builder.putString("Status", chunk.getGenerationStatus().name()) + .putInt("xPos", chunk.getX()) + .putInt("zPos", chunk.getZ()) + .put("Heightmaps", NbtMap.builder().putLongArray("WORLD_SURFACE", chunk.getWorldSurfaceHeightMap().compact().copyArray()).build()); + List sections = new ArrayList<>(); + for (int sectionY = (srcRegion.getMinY() << 4); sectionY < (srcRegion.getMaxY() << 4); sectionY++) { + NbtMapBuilder sectionNBT = NbtMap.builder(); + ChunkSection section = chunk.getSection((byte) sectionY); + if (section.getEmpty()) break; + Palette blockStatesPalette = new Palette<>(AIR_BLOCK_STATE_HASH); + Palette biomePalette = new Palette<>(VanillaBiomeId.PLAINS.getIdentifier()); + for (int x = 0; x < 16; ++x) { + for (int y = 0; y < 16; ++y) { + for (int z = 0; z < 16; ++z) { + BlockState blockState; + try { + blockState = section.get(x, y, z); + } catch (AnvilException e) { + throw new RuntimeException(e); + } + Integer beBlockStateHash = Mapping.getBeBlockStateHash(MappingUtils.convertBlockState(blockState)); + Integer beBiomeId = Mapping.getBeBiomeId(section.getBiome(x, y, z)); + Identifier biome = beBiomeId == null ? VanillaBiomeId.values()[1].getIdentifier() : VanillaBiomeIdUtils.fromId(beBiomeId).getIdentifier(); + int blockStateHash = beBlockStateHash == null ? AIR_BLOCK_STATE_HASH : beBlockStateHash; + int index = Chunk.index(x, y, z); + biomePalette.set(index, biome); + blockStatesPalette.set(index, blockStateHash); } - Integer beBlockStateHash = Mapping.getBeBlockStateHash(MappingUtils.convertBlockState(blockState)); - Integer beBiomeId = Mapping.getBeBiomeId(section.getBiome(x, y, z)); - Identifier biome = beBiomeId == null ? VanillaBiomeId.values()[1].getIdentifier() : VanillaBiomeId.values()[beBiomeId].getIdentifier(); - int blockStateHash = beBlockStateHash == null ? VanillaBlockTypes.AIR_TYPE.getDefaultState().blockStateHash() : beBlockStateHash; - int index = Chunk.index(x, y, z); - biomePalette.set(index, biome); - blockStatesPalette.set(index, blockStateHash); } } + sectionNBT.putCompound("block_states", blockStatesPalette.toNBT(Function.identity())) + .putCompound("biomes", biomePalette.toNBT(Identifier::toString)).build(); + byte[] blockLights = section.getBlockLights(); + if (blockLights.length == 2048) { + sectionNBT.putByteArray("BlockLight", blockLights); + } + byte[] skyLights = section.getSkyLights(); + if (skyLights.length == 2048) { + sectionNBT.putByteArray("SkyLight", skyLights); + } + sections.add(sectionNBT.build()); } - sectionNBT.putCompound("block_states", blockStatesPalette.toNBT(Function.identity())) - .putCompound("biomes", biomePalette.toNBT(Identifier::toString)).build(); - byte[] blockLights = section.getBlockLights(); - if (blockLights.length == 2048) { - sectionNBT.putByteArray("BlockLight", blockLights); - } - byte[] skyLights = section.getSkyLights(); - if (skyLights.length == 2048) { - sectionNBT.putByteArray("SkyLight", skyLights); - } - sections.add(MappingUtils.convertNBT(sectionNBT.build())); + builder.putList("sections", NbtType.COMPOUND, sections); + } catch (Exception e) { + e.printStackTrace(); + } + NbtMap build = builder.build(); + try { + targetRegion.writeChunk(i, j, build); + } catch (IOException e) { + throw new RuntimeException(e); } - }); - regionFile.writeColumnData(compound, x, z); + } } - } catch (AnvilException | IOException e) { - throw new RuntimeException(e); } } } diff --git a/Allay-WorldConvert/src/main/resources/level.dat b/Allay-WorldConvert/src/main/resources/level.dat deleted file mode 100644 index be80bec49..000000000 Binary files a/Allay-WorldConvert/src/main/resources/level.dat and /dev/null differ diff --git a/build.gradle.kts b/build.gradle.kts index 9ede7bbc1..63924acdd 100644 --- a/build.gradle.kts +++ b/build.gradle.kts @@ -90,9 +90,10 @@ subprojects { tasks.withType { options.encoding = "UTF-8" + val javadocOptions = options as CoreJavadocOptions + javadocOptions.addStringOption("source", "20") + javadocOptions.addBooleanOption("-enable-preview", true) //Suppress some meaningless warnings - options { - (this as CoreJavadocOptions).addStringOption("Xdoclint:none", "-quiet") - } + javadocOptions.addStringOption("Xdoclint:none", "-quiet") } }