upgrade regions format version to 3

This commit is contained in:
MihailRis 2024-09-06 12:25:52 +03:00
parent 69b90f53c3
commit e30c1b3c03
15 changed files with 138 additions and 149 deletions

View File

@ -1,6 +1,7 @@
# Menu
menu.missing-content=Missing Content!
world.convert-request=Content indices have changed! Convert world files?
world.upgrade-request=World format is outdated! Convert world files?
pack.remove-confirm=Do you want to erase all pack(s) content from the world forever?
error.pack-not-found=Could not to find pack
error.dependency-not-found=Dependency pack is not found

View File

@ -46,6 +46,7 @@ world.generators.default=Обычный
world.generators.flat=Плоский
world.Create World=Создать Мир
world.convert-request=Есть изменения в индексах! Конвертировать мир?
world.upgrade-request=Формат мира устарел! Конвертировать мир?
world.delete-confirm=Удалить мир безвозвратно?
# Настройки

View File

@ -82,7 +82,12 @@ std::unique_ptr<ubyte[]> compression::decompress(
}
case Method::EXTRLE16: {
auto decompressed = std::make_unique<ubyte[]>(dstlen);
extrle::decode16(src, srclen, decompressed.get());
size_t decoded = extrle::decode16(src, srclen, decompressed.get());
if (decoded != dstlen) {
throw std::runtime_error(
"expected decompressed size " + std::to_string(dstlen) +
" got " + std::to_string(decoded));
}
return decompressed;
}
case Method::GZIP: {

View File

@ -17,7 +17,7 @@ inline constexpr bool ENGINE_DEBUG_BUILD = true;
inline const std::string ENGINE_VERSION_STRING = "0.23";
/// @brief world regions format version
inline constexpr uint REGION_FORMAT_VERSION = 2;
inline constexpr uint REGION_FORMAT_VERSION = 3;
/// @brief max simultaneously open world region files
inline constexpr uint MAX_OPEN_REGION_FILES = 32;

View File

@ -55,7 +55,9 @@ std::shared_ptr<ContentReport> ContentReport::create(
report->items.setup(itemlist.get(), content->items);
report->buildIssues();
if (report->hasContentReorder() || report->hasMissingContent()) {
if (report->isUpgradeRequired() ||
report->hasContentReorder() ||
report->hasMissingContent()) {
return report;
} else {
return nullptr;
@ -79,6 +81,16 @@ static void build_issues(
void ContentReport::buildIssues() {
build_issues(issues, blocks);
build_issues(issues, items);
if (regionsVersion < REGION_FORMAT_VERSION) {
for (int layer = REGION_LAYER_VOXELS;
layer < REGION_LAYERS_COUNT;
layer++) {
ContentIssue issue {ContentIssueType::REGION_FORMAT_UPDATE};
issue.regionLayer = static_cast<RegionLayerIndex>(layer);
issues.push_back(issue);
}
}
}
const std::vector<ContentIssue>& ContentReport::getIssues() const {

View File

@ -13,14 +13,14 @@ static fs::path get_region_filename(int x, int z) {
/// @brief Read missing chunks data (null pointers) from region file
static void fetch_chunks(WorldRegion* region, int x, int z, regfile* file) {
auto* chunks = region->getChunks();
uint32_t* sizes = region->getSizes();
auto sizes = region->getSizes();
for (size_t i = 0; i < REGION_CHUNKS_COUNT; i++) {
int chunk_x = (i % REGION_SIZE) + x * REGION_SIZE;
int chunk_z = (i / REGION_SIZE) + z * REGION_SIZE;
if (chunks[i] == nullptr) {
chunks[i] =
RegionsLayer::readChunkData(chunk_x, chunk_z, sizes[i], file);
chunks[i] = RegionsLayer::readChunkData(
chunk_x, chunk_z, sizes[i][0], sizes[i][1], file);
}
}
}
@ -44,23 +44,26 @@ regfile::regfile(fs::path filename) : file(std::move(filename)) {
}
}
std::unique_ptr<ubyte[]> regfile::read(int index, uint32_t& length) {
std::unique_ptr<ubyte[]> regfile::read(int index, uint32_t& size, uint32_t& srcSize) {
size_t file_size = file.length();
size_t table_offset = file_size - REGION_CHUNKS_COUNT * 4;
uint32_t offset;
uint32_t buff32;
file.seekg(table_offset + index * 4);
file.read(reinterpret_cast<char*>(&offset), 4);
offset = dataio::read_int32_big(reinterpret_cast<const ubyte*>(&offset), 0);
file.read(reinterpret_cast<char*>(&buff32), 4);
uint32_t offset = dataio::le2h(buff32);
if (offset == 0) {
return nullptr;
}
file.seekg(offset);
file.read(reinterpret_cast<char*>(&offset), 4);
length = dataio::read_int32_big(reinterpret_cast<const ubyte*>(&offset), 0);
auto data = std::make_unique<ubyte[]>(length);
file.read(reinterpret_cast<char*>(data.get()), length);
file.read(reinterpret_cast<char*>(&buff32), 4);
size = dataio::le2h(buff32);
file.read(reinterpret_cast<char*>(&buff32), 4);
srcSize = dataio::le2h(buff32);
auto data = std::make_unique<ubyte[]>(size);
file.read(reinterpret_cast<char*>(data.get()), size);
return data;
}
@ -150,7 +153,7 @@ WorldRegion* RegionsLayer::getOrCreateRegion(int x, int z) {
return region;
}
ubyte* RegionsLayer::getData(int x, int z, uint32_t& size) {
ubyte* RegionsLayer::getData(int x, int z, uint32_t& size, uint32_t& srcSize) {
int regionX, regionZ, localX, localZ;
calc_reg_coords(x, z, regionX, regionZ, localX, localZ);
@ -159,15 +162,17 @@ ubyte* RegionsLayer::getData(int x, int z, uint32_t& size) {
if (data == nullptr) {
auto regfile = getRegFile({regionX, regionZ});
if (regfile != nullptr) {
auto dataptr = readChunkData(x, z, size, regfile.get());
auto dataptr = readChunkData(x, z, size, srcSize, regfile.get());
if (dataptr) {
data = dataptr.get();
region->put(localX, localZ, std::move(dataptr), size);
region->put(localX, localZ, std::move(dataptr), size, srcSize);
}
}
}
if (data != nullptr) {
size = region->getChunkDataSize(localX, localZ);
auto sizevec = region->getChunkDataSize(localX, localZ);
size = sizevec[0];
srcSize = sizevec[1];
return data;
}
return nullptr;
@ -187,47 +192,50 @@ void RegionsLayer::writeRegion(int x, int z, WorldRegion* entry) {
char header[REGION_HEADER_SIZE] = REGION_FORMAT_MAGIC;
header[8] = REGION_FORMAT_VERSION;
header[9] = 0; // flags
header[9] = static_cast<ubyte>(compression); // FIXME
std::ofstream file(filename, std::ios::out | std::ios::binary);
file.write(header, REGION_HEADER_SIZE);
size_t offset = REGION_HEADER_SIZE;
char intbuf[4] {};
uint32_t intbuf;
uint offsets[REGION_CHUNKS_COUNT] {};
auto* region = entry->getChunks();
uint32_t* sizes = entry->getSizes();
auto region = entry->getChunks();
auto sizes = entry->getSizes();
for (size_t i = 0; i < REGION_CHUNKS_COUNT; i++) {
ubyte* chunk = region[i].get();
if (chunk == nullptr) {
offsets[i] = 0;
} else {
offsets[i] = offset;
size_t compressedSize = sizes[i];
dataio::write_int32_big(
compressedSize, reinterpret_cast<ubyte*>(intbuf), 0
);
offset += 4 + compressedSize;
file.write(intbuf, 4);
file.write(reinterpret_cast<const char*>(chunk), compressedSize);
continue;
}
offsets[i] = offset;
auto sizevec = sizes[i];
uint32_t compressedSize = sizevec[0];
uint32_t srcSize = sizevec[1];
intbuf = dataio::h2le(compressedSize);
file.write(reinterpret_cast<const char*>(&intbuf), 4);
offset += 4;
intbuf = dataio::h2le(srcSize);
file.write(reinterpret_cast<const char*>(&intbuf), 4);
offset += 4;
file.write(reinterpret_cast<const char*>(chunk), compressedSize);
offset += compressedSize;
}
for (size_t i = 0; i < REGION_CHUNKS_COUNT; i++) {
dataio::write_int32_big(
offsets[i], reinterpret_cast<ubyte*>(intbuf), 0
);
file.write(intbuf, 4);
intbuf = dataio::h2le(offsets[i]);
file.write(reinterpret_cast<const char*>(&intbuf), 4);
}
}
std::unique_ptr<ubyte[]> RegionsLayer::readChunkData(
int x, int z, uint32_t& length, regfile* rfile
int x, int z, uint32_t& size, uint32_t& srcSize, regfile* rfile
) {
int regionX, regionZ, localX, localZ;
calc_reg_coords(x, z, regionX, regionZ, localX, localZ);
int chunkIndex = localZ * REGION_SIZE + localX;
return rfile->read(chunkIndex, length);
return rfile->read(chunkIndex, size, srcSize);
}

View File

@ -167,7 +167,7 @@ void WorldConverter::upgradeRegion(
void WorldConverter::convertVoxels(const fs::path& file, int x, int z) const {
logger.info() << "converting voxels region " << x << "_" << z;
wfile->getRegions().processRegion(x, z, REGION_LAYER_VOXELS, CHUNK_DATA_LEN,
wfile->getRegions().processRegion(x, z, REGION_LAYER_VOXELS,
[=](std::unique_ptr<ubyte[]> data, uint32_t*) {
Chunk::convert(data.get(), report.get());
return data;
@ -238,8 +238,13 @@ bool WorldConverter::isActive() const {
}
void WorldConverter::write() {
logger.info() << "writing world";
wfile->write(nullptr, upgradeMode ? nullptr : content);
if (upgradeMode) {
logger.info() << "refreshing version";
wfile->patchIndicesVersion("region-version", REGION_FORMAT_VERSION);
} else {
logger.info() << "writing world";
wfile->write(nullptr, content);
}
}
void WorldConverter::waitForEnd() {

View File

@ -172,6 +172,17 @@ bool WorldFiles::readResourcesData(const Content* content) {
return true;
}
void WorldFiles::patchIndicesVersion(const std::string& field, uint version) {
fs::path file = getIndicesFile();
if (!fs::is_regular_file(file)) {
logger.error() << file.filename().u8string() << " does not exists";
return;
}
auto root = files::read_json(file);
root->put(field, version);
files::write_json(file, root.get(), true);
}
static void erase_pack_indices(dynamic::Map* root, const std::string& id) {
auto prefix = id + ":";
auto blocks = root->list("blocks");

View File

@ -51,6 +51,8 @@ public:
std::optional<WorldInfo> readWorldInfo();
bool readResourcesData(const Content* content);
void patchIndicesVersion(const std::string& field, uint version);
/// @brief Write all unsaved data to world files
/// @param world target world
/// @param content world content

View File

@ -14,7 +14,7 @@ WorldRegion::WorldRegion()
: chunksData(
std::make_unique<std::unique_ptr<ubyte[]>[]>(REGION_CHUNKS_COUNT)
),
sizes(std::make_unique<uint32_t[]>(REGION_CHUNKS_COUNT)) {
sizes(std::make_unique<glm::u32vec2[]>(REGION_CHUNKS_COUNT)) {
}
WorldRegion::~WorldRegion() = default;
@ -30,23 +30,23 @@ std::unique_ptr<ubyte[]>* WorldRegion::getChunks() const {
return chunksData.get();
}
uint32_t* WorldRegion::getSizes() const {
glm::u32vec2* WorldRegion::getSizes() const {
return sizes.get();
}
void WorldRegion::put(
uint x, uint z, std::unique_ptr<ubyte[]> data, uint32_t size
uint x, uint z, std::unique_ptr<ubyte[]> data, uint32_t size, uint32_t srcSize
) {
size_t chunk_index = z * REGION_SIZE + x;
chunksData[chunk_index] = std::move(data);
sizes[chunk_index] = size;
sizes[chunk_index] = glm::u32vec2(size, srcSize);
}
ubyte* WorldRegion::getChunkData(uint x, uint z) {
return chunksData[z * REGION_SIZE + x].get();
}
uint WorldRegion::getChunkDataSize(uint x, uint z) {
glm::u32vec2 WorldRegion::getChunkDataSize(uint x, uint z) {
return sizes[z * REGION_SIZE + x];
}
@ -56,7 +56,7 @@ WorldRegions::WorldRegions(const fs::path& directory) : directory(directory) {
}
auto& voxels = layers[REGION_LAYER_VOXELS];
voxels.folder = directory / fs::path("regions");
voxels.compression = compression::Method::EXTRLE8;
voxels.compression = compression::Method::EXTRLE16;
auto& lights = layers[REGION_LAYER_LIGHTS];
lights.folder = directory / fs::path("lights");
@ -85,8 +85,9 @@ void WorldRegions::put(
int z,
RegionLayerIndex layerid,
std::unique_ptr<ubyte[]> data,
size_t size
size_t srcSize
) {
size_t size = srcSize;
auto& layer = layers[layerid];
if (layer.compression != compression::Method::NONE) {
data = compression::compress(
@ -97,7 +98,7 @@ void WorldRegions::put(
WorldRegion* region = layer.getOrCreateRegion(regionX, regionZ);
region->setUnsaved(true);
region->put(localX, localZ, std::move(data), size);
region->put(localX, localZ, std::move(data), size, srcSize);
}
static std::unique_ptr<ubyte[]> write_inventories(
@ -188,30 +189,35 @@ void WorldRegions::put(Chunk* chunk, std::vector<ubyte> entitiesData) {
std::unique_ptr<ubyte[]> WorldRegions::getVoxels(int x, int z) {
uint32_t size;
uint32_t srcSize;
auto& layer = layers[REGION_LAYER_VOXELS];
auto* data = layer.getData(x, z, size);
auto* data = layer.getData(x, z, size, srcSize);
if (data == nullptr) {
return nullptr;
}
return compression::decompress(data, size, CHUNK_DATA_LEN, layer.compression);
assert(srcSize == CHUNK_DATA_LEN);
return compression::decompress(data, size, srcSize, layer.compression);
}
std::unique_ptr<light_t[]> WorldRegions::getLights(int x, int z) {
uint32_t size;
uint32_t srcSize;
auto& layer = layers[REGION_LAYER_LIGHTS];
auto* bytes = layer.getData(x, z, size);
auto* bytes = layer.getData(x, z, size, srcSize);
if (bytes == nullptr) {
return nullptr;
}
auto data = compression::decompress(
bytes, size, LIGHTMAP_DATA_LEN, layer.compression
bytes, size, srcSize, layer.compression
);
assert(srcSize == LIGHTMAP_DATA_LEN);
return Lightmap::decode(data.get());
}
chunk_inventories_map WorldRegions::fetchInventories(int x, int z) {
uint32_t bytesSize;
auto bytes = layers[REGION_LAYER_INVENTORIES].getData(x, z, bytesSize);
uint32_t srcSize;
auto bytes = layers[REGION_LAYER_INVENTORIES].getData(x, z, bytesSize, srcSize);
if (bytes == nullptr) {
return {};
}
@ -221,7 +227,7 @@ chunk_inventories_map WorldRegions::fetchInventories(int x, int z) {
void WorldRegions::processInventories(
int x, int z, const inventoryproc& func
) {
processRegion(x, z, REGION_LAYER_INVENTORIES, 0,
processRegion(x, z, REGION_LAYER_INVENTORIES,
[=](std::unique_ptr<ubyte[]> data, uint32_t* size) {
auto inventories = load_inventories(data.get(), *size);
for (const auto& [_, inventory] : inventories) {
@ -236,7 +242,8 @@ dynamic::Map_sptr WorldRegions::fetchEntities(int x, int z) {
return nullptr;
}
uint32_t bytesSize;
const ubyte* data = layers[REGION_LAYER_ENTITIES].getData(x, z, bytesSize);
uint32_t srcSize;
const ubyte* data = layers[REGION_LAYER_ENTITIES].getData(x, z, bytesSize, srcSize);
if (data == nullptr) {
return nullptr;
}
@ -248,7 +255,7 @@ dynamic::Map_sptr WorldRegions::fetchEntities(int x, int z) {
}
void WorldRegions::processRegion(
int x, int z, RegionLayerIndex layerid, uint32_t dataLen, const regionproc& func
int x, int z, RegionLayerIndex layerid, const regionproc& func
) {
auto& layer = layers[layerid];
if (layer.getRegion(x, z)) {
@ -263,24 +270,21 @@ void WorldRegions::processRegion(
int gx = cx + x * REGION_SIZE;
int gz = cz + z * REGION_SIZE;
uint32_t length;
uint32_t srcSize;
auto data =
RegionsLayer::readChunkData(gx, gz, length, regfile.get());
RegionsLayer::readChunkData(gx, gz, length, srcSize, regfile.get());
if (data == nullptr) {
continue;
}
uint32_t totalLength = dataLen;
if (layer.compression != compression::Method::NONE) {
if (dataLen == 0) {
throw std::invalid_argument("invalid data length");
}
data = compression::decompress(
data.get(), length, dataLen, layer.compression
data.get(), length, srcSize, layer.compression
);
} else {
totalLength = length;
srcSize = length;
}
if (auto writeData = func(std::move(data), &totalLength)) {
put(gx, gz, layerid, std::move(writeData), totalLength);
if (auto writeData = func(std::move(data), &srcSize)) {
put(gx, gz, layerid, std::move(writeData), srcSize);
}
}
}

View File

@ -38,21 +38,21 @@ public:
class WorldRegion {
std::unique_ptr<std::unique_ptr<ubyte[]>[]> chunksData;
std::unique_ptr<uint32_t[]> sizes;
std::unique_ptr<glm::u32vec2[]> sizes;
bool unsaved = false;
public:
WorldRegion();
~WorldRegion();
void put(uint x, uint z, std::unique_ptr<ubyte[]> data, uint32_t size);
void put(uint x, uint z, std::unique_ptr<ubyte[]> data, uint32_t size, uint32_t srcSize);
ubyte* getChunkData(uint x, uint z);
uint getChunkDataSize(uint x, uint z);
glm::u32vec2 getChunkDataSize(uint x, uint z);
void setUnsaved(bool unsaved);
bool isUnsaved() const;
std::unique_ptr<ubyte[]>* getChunks() const;
uint32_t* getSizes() const;
glm::u32vec2* getSizes() const;
};
struct regfile {
@ -63,7 +63,7 @@ struct regfile {
regfile(fs::path filename);
regfile(const regfile&) = delete;
std::unique_ptr<ubyte[]> read(int index, uint32_t& length);
std::unique_ptr<ubyte[]> read(int index, uint32_t& size, uint32_t& srcSize);
};
using regionsmap = std::unordered_map<glm::ivec2, std::unique_ptr<WorldRegion>>;
@ -152,9 +152,10 @@ struct RegionsLayer {
/// @brief Get chunk data. Read from file if not loaded yet.
/// @param x chunk x coord
/// @param z chunk z coord
/// @param size [out] chunk data length
/// @param size [out] compressed chunk data length
/// @param size [out] source chunk data length
/// @return nullptr if no saved chunk data found
[[nodiscard]] ubyte* getData(int x, int z, uint32_t& size);
[[nodiscard]] ubyte* getData(int x, int z, uint32_t& size, uint32_t& srcSize);
/// @brief Write or rewrite region file
/// @param x region X
@ -167,11 +168,12 @@ struct RegionsLayer {
/// @brief Read chunk data from region file
/// @param x chunk x coord
/// @param z chunk z coord
/// @param length [out] chunk data length
/// @param size [out] compressed chunk data length
/// @param srcSize [out] source chunk data length
/// @param rfile region file
/// @return nullptr if chunk is not present in region file
[[nodiscard]] static std::unique_ptr<ubyte[]> readChunkData(
int x, int z, uint32_t& length, regfile* rfile
int x, int z, uint32_t& size, uint32_t& srcSize, regfile* rfile
);
};
@ -229,7 +231,7 @@ public:
/// @param layerid regions layer index
/// @param func processing callback
void processRegion(
int x, int z, RegionLayerIndex layerid, uint32_t dataLen, const regionproc& func);
int x, int z, RegionLayerIndex layerid, const regionproc& func);
void processInventories(
int x, int z, const inventoryproc& func);

View File

@ -12,6 +12,7 @@
static inline size_t VOXELS_DATA_SIZE_V1 = CHUNK_VOL * 4;
static inline size_t VOXELS_DATA_SIZE_V2 = CHUNK_VOL * 4;
#include <iostream>
static util::Buffer<ubyte> convert_voxels_1to2(const ubyte* buffer, uint32_t size) {
auto data = compression::decompress(
buffer, size, VOXELS_DATA_SIZE_V1, compression::Method::EXTRLE8);
@ -30,18 +31,20 @@ static util::Buffer<ubyte> convert_voxels_1to2(const ubyte* buffer, uint32_t siz
(static_cast<blockid_t>(bid1) << 8) | static_cast<blockid_t>(bid2);
dst[CHUNK_VOL + i] = (
(static_cast<blockstate_t>(bst1) << 8) |
static_cast<blockstate_t>(bst2)
static_cast<blockstate_t>(bst2)
);
}
size_t outLen;
auto compressed = compression::compress(
data.get(), VOXELS_DATA_SIZE_V2, outLen, compression::Method::EXTRLE16);
dstBuffer.data(), VOXELS_DATA_SIZE_V2, outLen, compression::Method::EXTRLE16);
return util::Buffer<ubyte>(std::move(compressed), outLen);
}
#include "util/timeutil.hpp"
util::Buffer<ubyte> compatibility::convert_region_2to3(
const util::Buffer<ubyte>& src, RegionLayerIndex layer
) {
timeutil::ScopeLogTimer log(555);
const size_t REGION_CHUNKS = 1024;
const size_t HEADER_SIZE = 10;
const size_t OFFSET_TABLE_SIZE = REGION_CHUNKS * sizeof(uint32_t);

View File

@ -73,7 +73,8 @@ void show_convert_request(
) {
guiutil::confirm(
engine->getGUI(),
langs::get(L"world.convert-request"),
langs::get(report->isUpgradeRequired() ?
L"world.upgrade-request" : L"world.convert-request"),
[=]() {
auto converter =
create_converter(engine, worldFiles, content, report, postRunnable);

View File

@ -77,33 +77,6 @@ std::unique_ptr<Chunk> Chunk::clone() const {
return other;
}
/**
Current chunk format:
- byte-order: big-endian
- [don't panic!] first and second bytes are separated for RLE efficiency
```cpp
uint8_t voxel_id_first_byte[CHUNK_VOL];
uint8_t voxel_id_second_byte[CHUNK_VOL];
uint8_t voxel_states_first_byte[CHUNK_VOL];
uint8_t voxel_states_second_byte[CHUNK_VOL];
```
Total size: (CHUNK_VOL * 4) bytes
*/
std::unique_ptr<ubyte[]> Chunk::encode() const {
auto buffer = std::make_unique<ubyte[]>(CHUNK_DATA_LEN);
for (uint i = 0; i < CHUNK_VOL; i++) {
buffer[i] = voxels[i].id >> 8;
buffer[CHUNK_VOL + i] = voxels[i].id & 0xFF;
blockstate_t state = blockstate2int(voxels[i].state);
buffer[CHUNK_VOL * 2 + i] = state >> 8;
buffer[CHUNK_VOL * 3 + i] = state & 0xFF;
}
return buffer;
}
/**
Current chunk format:
- byte-order: little-endian
@ -115,7 +88,7 @@ std::unique_ptr<ubyte[]> Chunk::encode() const {
Total size: (CHUNK_VOL * 4) bytes
*/
std::unique_ptr<ubyte[]> Chunk::encodeV2() const {
std::unique_ptr<ubyte[]> Chunk::encode() const {
auto buffer = std::make_unique<ubyte[]>(CHUNK_DATA_LEN);
auto dst = reinterpret_cast<uint16_t*>(buffer.get());
for (uint i = 0; i < CHUNK_VOL; i++) {
@ -126,26 +99,6 @@ std::unique_ptr<ubyte[]> Chunk::encodeV2() const {
}
bool Chunk::decode(const ubyte* data) {
for (uint i = 0; i < CHUNK_VOL; i++) {
voxel& vox = voxels[i];
ubyte bid1 = data[i];
ubyte bid2 = data[CHUNK_VOL + i];
ubyte bst1 = data[CHUNK_VOL * 2 + i];
ubyte bst2 = data[CHUNK_VOL * 3 + i];
vox.id =
(static_cast<blockid_t>(bid1) << 8) | static_cast<blockid_t>(bid2);
vox.state = int2blockstate(
(static_cast<blockstate_t>(bst1) << 8) |
static_cast<blockstate_t>(bst2)
);
}
return true;
}
bool Chunk::decodeV2(const ubyte* data) {
auto src = reinterpret_cast<const uint16_t*>(data);
for (uint i = 0; i < CHUNK_VOL; i++) {
voxel& vox = voxels[i];
@ -157,17 +110,6 @@ bool Chunk::decodeV2(const ubyte* data) {
}
void Chunk::convert(ubyte* data, const ContentReport* report) {
for (uint i = 0; i < CHUNK_VOL; i++) {
blockid_t id =
((static_cast<blockid_t>(data[i]) << 8) |
static_cast<blockid_t>(data[CHUNK_VOL + i]));
blockid_t replacement = report->blocks.getId(id);
data[i] = replacement >> 8;
data[CHUNK_VOL + i] = replacement & 0xFF;
}
}
void Chunk::convertV2(ubyte* data, const ContentReport* report) {
auto buffer = reinterpret_cast<uint16_t*>(data);
for (uint i = 0; i < CHUNK_VOL; i++) {
blockid_t id = dataio::le2h(buffer[i]);

View File

@ -66,20 +66,12 @@ public:
flags.unsaved = true;
}
/// @brief Encode chunk to bytes array of size CHUNK_DATA_LEN
/// @see /doc/specs/outdated/region_voxels_chunk_spec_v1.md
std::unique_ptr<ubyte[]> encode() const;
/// @brief Encode chunk to bytes array of size CHUNK_DATA_LEN
/// @see /doc/specs/region_voxels_chunk_spec.md
std::unique_ptr<ubyte[]> encodeV2() const;
std::unique_ptr<ubyte[]> encode() const;
/// @return true if all is fine
bool decode(const ubyte* data);
/// @return true if all is fine
bool decodeV2(const ubyte* data);
static void convert(ubyte* data, const ContentReport* report);
static void convertV2(ubyte* data, const ContentReport* report);
};