world refactor

This commit is contained in:
Tuomas Katajisto 2026-03-24 18:36:48 +02:00
parent 01eaff1c0f
commit 48555e1b00
5 changed files with 508 additions and 89 deletions

View File

@ -13,6 +13,7 @@ Pool :: #import "Pool";
Fetch_Type :: enum {
PACK;
WORLD;
WORLD_CHUNKS;
RDM_ATLAS;
RDM_LOOKUP;
}
@ -29,6 +30,8 @@ Fetch_Request :: struct {
chunk_key : Chunk_Key;
// Atlas GPU image held between RDM_ATLAS and its paired RDM_LOOKUP fetch.
rdm_pending_atlas : sg_image;
// Heap copy of world.json carried between WORLD and WORLD_CHUNKS fetches.
world_json_data : []u8;
}
Asset_Manager :: struct {
@ -52,12 +55,14 @@ RDM_LOOKUP_MAX_BYTES :: 512 * 512 * 4 * 4 + size_of(RDM_File_Header);
buf : []u8;
world_buf : []u8;
world_chunks_buf : []u8;
rdm_atlas_buf : []u8;
rdm_lookup_buf : []u8;
buffer_for_fetch :: (type: Fetch_Type) -> (*u8, u64) {
if type == .PACK return buf.data, xx buf.count;
if type == .WORLD return world_buf.data, xx world_buf.count;
if type == .WORLD_CHUNKS return world_chunks_buf.data, xx world_chunks_buf.count;
if type == .RDM_ATLAS return rdm_atlas_buf.data, xx rdm_atlas_buf.count;
if type == .RDM_LOOKUP return rdm_lookup_buf.data, xx rdm_lookup_buf.count;
return null, 0;
@ -88,13 +93,62 @@ fetch_callback :: (res: *sfetch_response_t) #c_call {
case .WORLD;
if res.failed {
if ends_with(req.path, "world.json") {
fallback_req : Fetch_Request;
fallback_req.type = .WORLD;
fallback_req.world_name = req.world_name;
fallback_req.path = sprint("./game/resources/worlds/%/index.world", req.world_name);
fallback_req.should_block = true;
array_add(*g_asset_manager.fetch_queue, fallback_req);
return;
}
log_error("Failed to load world '%'", req.world_name);
return;
}
data: []u8;
data.data = res.data.ptr;
data.count = res.data.size.(s64);
is_json := data.count > 0 && data[0] == #char "{";
if is_json {
json_copy := NewArray(data.count, u8, false);
memcpy(json_copy.data, data.data, data.count);
chunks_req : Fetch_Request;
chunks_req.type = .WORLD_CHUNKS;
chunks_req.world_name = req.world_name;
chunks_req.path = sprint("./game/resources/worlds/%/chunks.bin", req.world_name);
chunks_req.should_block = true;
chunks_req.world_json_data = json_copy;
array_add(*g_asset_manager.fetch_queue, chunks_req);
} else {
world, ok := load_world_from_data(data);
if ok {
set_loaded_world(world);
rdm_loader_enqueue_world(*get_current_world().world);
log_info("Loaded world (legacy): %", world.name);
} else {
log_error("Failed to parse world '%'", req.world_name);
}
}
case .WORLD_CHUNKS;
json_data := req.world_json_data;
defer free(json_data.data);
if res.failed {
log_error("Failed to load chunks.bin for world '%'", req.world_name);
return;
}
json_str: string;
json_str.data = json_data.data;
json_str.count = json_data.count;
chunk_bin: []u8;
chunk_bin.data = res.data.ptr;
chunk_bin.count = res.data.size.(s64);
world, ok := load_world_from_json(json_str, chunk_bin);
if ok {
set_loaded_world(world);
rdm_loader_enqueue_world(*get_current_world().world);
@ -132,12 +186,18 @@ fetch_callback :: (res: *sfetch_response_t) #c_call {
sample_count = 1,
data = atlas_imgdata,
};
// Enqueue the lookup with the atlas image embedded in the request.
lookup_path: string;
chunk_ptr := table_find_pointer(*curworld.world.chunks, req.chunk_key);
if chunk_ptr != null && chunk_ptr.rdm_lookup_path.count > 0 {
lookup_path = sprint("./game/resources/worlds/%/%", req.world_name, chunk_ptr.rdm_lookup_path);
} else {
lookup_path = rdm_chunk_filename(req.world_name, req.chunk_key, "rdm_lookup");
}
lookup_req : Fetch_Request;
lookup_req.type = .RDM_LOOKUP;
lookup_req.world_name = req.world_name;
lookup_req.chunk_key = req.chunk_key;
lookup_req.path = rdm_chunk_filename(req.world_name, req.chunk_key, "rdm_lookup");
lookup_req.path = lookup_path;
lookup_req.rdm_pending_atlas = sg_make_image(*atlas_desc);
array_add(*g_asset_manager.fetch_queue, lookup_req);
@ -327,6 +387,7 @@ free_resources_from_pack :: (pack: *Loaded_Pack) {
asset_manager_init :: () {
buf = NewArray(MAX_FILE_SIZE, u8, false);
world_buf = NewArray(MAX_FILE_SIZE, u8, false);
world_chunks_buf = NewArray(MAX_FILE_SIZE, u8, false);
rdm_atlas_buf = NewArray(RDM_ATLAS_MAX_BYTES, u8, false);
rdm_lookup_buf = NewArray(RDM_LOOKUP_MAX_BYTES, u8, false);
}
@ -386,7 +447,7 @@ load_world :: (name: string) {
req : Fetch_Request;
req.type = .WORLD;
req.world_name = sprint("%", name);
req.path = sprint("./game/resources/worlds/%/index.world", name);
req.path = sprint("./game/resources/worlds/%/world.json", name);
req.should_block = true;
array_add(*g_asset_manager.fetch_queue, req);
}

View File

@ -30,7 +30,11 @@ rdm_loader_enqueue_world :: (world: *World) {
req.type = .RDM_ATLAS;
req.world_name = world.name;
req.chunk_key = chunk.coord;
if chunk.rdm_atlas_path.count > 0 {
req.path = sprint("./game/resources/worlds/%/%", world.name, chunk.rdm_atlas_path);
} else {
req.path = rdm_chunk_filename(world.name, chunk.coord, "rdm_atlas");
}
array_add(*g_asset_manager.fetch_queue, req);
}
}

View File

@ -17,12 +17,10 @@ _emit :: (level: Log_Level, message: string) {
else ifx level == .ERROR then "[ERROR] "
else "[INFO] ";
old_alloc := context.allocator;
context.allocator = default_context.allocator;
push_allocator(default_context.allocator);
line := copy_string(tprint("%1%2", prefix, message));
print("%\n", line);
console_add_output_line(line);
context.allocator = old_alloc;
}
#scope_export

View File

@ -58,8 +58,221 @@ test_chunk_coord_values :: () {
end_suite(s);
}
make_test_world :: () -> World {
world: World;
world.name = "test_world";
world.conf.skyBase = .{0.1, 0.2, 0.3};
world.conf.sunIntensity = 5.0;
world.conf.hasClouds = 0;
world.conf.planeHeight = 2.5;
chunk1: Chunk;
chunk1.coord = .{x = 0, y = 0, z = 0};
group1: Chunk_Trile_Group;
group1.trile_name = "stone";
array_add(*group1.instances, Trile_Instance.{x = 1, y = 2, z = 3, orientation = 5});
array_add(*group1.instances, Trile_Instance.{x = 10, y = 20, z = 30, orientation = 12});
array_add(*chunk1.groups, group1);
table_set(*world.chunks, chunk1.coord, chunk1);
chunk2: Chunk;
chunk2.coord = .{x = -1, y = 0, z = 2};
group2: Chunk_Trile_Group;
group2.trile_name = "grass";
array_add(*group2.instances, Trile_Instance.{x = 0, y = 0, z = 0, orientation = 0});
array_add(*chunk2.groups, group2);
group3: Chunk_Trile_Group;
group3.trile_name = "dirt";
array_add(*group3.instances, Trile_Instance.{x = 5, y = 5, z = 5, orientation = 3});
array_add(*chunk2.groups, group3);
table_set(*world.chunks, chunk2.coord, chunk2);
e1: Particle_Emitter_Instance;
e1.definition_name = "fire";
e1.position = .{10.5, 0.0, 5.5};
e1.active = true;
array_add(*world.emitter_instances, e1);
n1: Editor_Note;
n1.text = "spawn point";
n1.position = .{x = 0, y = 0, z = 0};
array_add(*world.notes, n1);
return world;
}
test_world_save_load_roundtrip :: () {
s := begin_suite("world JSON save/load roundtrip");
world := make_test_world();
json_str, bin_data := save_world(*world);
bin_bytes: []u8;
bin_bytes.data = bin_data.data;
bin_bytes.count = bin_data.count;
loaded, ok := load_world_from_json(json_str, bin_bytes);
check(*s, "load succeeds", ok);
check(*s, "name matches", loaded.name == "test_world");
check(*s, "skyBase.x", loaded.conf.skyBase.x == 0.1);
check(*s, "skyBase.y", loaded.conf.skyBase.y == 0.2);
check(*s, "skyBase.z", loaded.conf.skyBase.z == 0.3);
check(*s, "sunIntensity", loaded.conf.sunIntensity == 5.0);
check(*s, "hasClouds", loaded.conf.hasClouds == 0);
check(*s, "planeHeight", loaded.conf.planeHeight == 2.5);
chunk0 := table_find_pointer(*loaded.chunks, Chunk_Key.{x=0, y=0, z=0});
check(*s, "chunk (0,0,0) exists", chunk0 != null);
if chunk0 {
check(*s, "chunk0 has 1 group", chunk0.groups.count == 1);
if chunk0.groups.count >= 1 {
check(*s, "chunk0 group name", chunk0.groups[0].trile_name == "stone");
check(*s, "chunk0 group has 2 instances", chunk0.groups[0].instances.count == 2);
if chunk0.groups[0].instances.count >= 2 {
inst := chunk0.groups[0].instances[0];
check(*s, "inst0 pos", inst.x == 1 && inst.y == 2 && inst.z == 3);
check(*s, "inst0 orient", inst.orientation == 5);
}
}
}
chunk_neg := table_find_pointer(*loaded.chunks, Chunk_Key.{x=-1, y=0, z=2});
check(*s, "chunk (-1,0,2) exists", chunk_neg != null);
if chunk_neg {
check(*s, "chunk_neg has 2 groups", chunk_neg.groups.count == 2);
}
check(*s, "1 emitter", loaded.emitter_instances.count == 1);
if loaded.emitter_instances.count >= 1 {
check(*s, "emitter name", loaded.emitter_instances[0].definition_name == "fire");
check(*s, "emitter pos.x", loaded.emitter_instances[0].position.x == 10.5);
}
check(*s, "1 note", loaded.notes.count == 1);
if loaded.notes.count >= 1 {
check(*s, "note text", loaded.notes[0].text == "spawn point");
check(*s, "note pos", loaded.notes[0].position == Chunk_Key.{x=0, y=0, z=0});
}
end_suite(s);
}
test_world_json_chunk_offsets :: () {
s := begin_suite("world JSON chunk offsets");
world := make_test_world();
json_str, bin_data := save_world(*world);
Jaison :: #import "Jaison";
ok, wj := Jaison.json_parse_string(json_str, World_Json);
check(*s, "JSON parses", ok);
check(*s, "2 chunk entries", wj.chunks.count == 2);
if wj.chunks.count == 2 {
for jc: wj.chunks {
check(*s, tprint("chunk %: offset+size <= bin", it_index),
cast(s64)(jc.offset + jc.size) <= bin_data.count);
}
a := wj.chunks[0];
b := wj.chunks[1];
no_overlap := (a.offset + a.size <= b.offset) || (b.offset + b.size <= a.offset);
check(*s, "chunks don't overlap", no_overlap);
}
end_suite(s);
}
test_legacy_load_cursor_fix :: () {
s := begin_suite("legacy binary cursor fix");
world := make_test_world();
builder: String_Builder;
write_value(*builder, WORLD_MAGIC);
write_value(*builder, cast(u16) 3);
name_len := cast(u16) world.name.count;
write_value(*builder, name_len);
append(*builder, world.name);
conf_bin := world_config_to_binary(*world.conf);
write_value(*builder, conf_bin);
Chunk_Data_Entry :: struct { coord: Chunk_Key; data: string; }
chunk_entries: [..]Chunk_Data_Entry;
chunk_entries.allocator = temp;
num_chunks: u32 = 0;
for chunk: world.chunks {
if chunk.groups.count == 0 then continue;
num_chunks += 1;
cb: String_Builder;
cb.allocator = temp;
write_value(*cb, cast(u16) chunk.groups.count);
for group: chunk.groups {
write_value(*cb, cast(u16) group.trile_name.count);
append(*cb, group.trile_name);
write_value(*cb, cast(u16) group.instances.count);
for inst: group.instances { write_value(*cb, inst); }
}
array_add(*chunk_entries, .{coord = chunk.coord, data = builder_to_string(*cb,, temp)});
}
write_value(*builder, num_chunks);
current_header_size := builder_string_length(*builder);
chunk_table_entry_size : s64 = size_of(s32)*3 + size_of(u32)*2;
chunk_table_size := cast(s64) num_chunks * chunk_table_entry_size;
data_start := current_header_size + chunk_table_size;
running_offset := cast(u32) data_start;
for entry: chunk_entries {
write_value(*builder, entry.coord.x);
write_value(*builder, entry.coord.y);
write_value(*builder, entry.coord.z);
write_value(*builder, running_offset);
write_value(*builder, cast(u32) entry.data.count);
running_offset += cast(u32) entry.data.count;
}
for entry: chunk_entries { append(*builder, entry.data); }
write_value(*builder, cast(u16) world.emitter_instances.count);
for inst: world.emitter_instances {
write_value(*builder, cast(u16) inst.definition_name.count);
append(*builder, inst.definition_name);
write_value(*builder, inst.position.x);
write_value(*builder, inst.position.y);
write_value(*builder, inst.position.z);
}
write_value(*builder, cast(u16) world.notes.count);
for note: world.notes {
write_value(*builder, cast(u16) note.text.count);
append(*builder, note.text);
write_value(*builder, note.position.x);
write_value(*builder, note.position.y);
write_value(*builder, note.position.z);
}
binary := builder_to_string(*builder);
data: []u8;
data.data = binary.data;
data.count = binary.count;
loaded, ok := load_world_from_data(data);
check(*s, "legacy load succeeds", ok);
check(*s, "legacy emitter count", loaded.emitter_instances.count == 1);
if loaded.emitter_instances.count >= 1 {
check(*s, "legacy emitter name", loaded.emitter_instances[0].definition_name == "fire");
}
check(*s, "legacy note count", loaded.notes.count == 1);
if loaded.notes.count >= 1 {
check(*s, "legacy note text", loaded.notes[0].text == "spawn point");
}
end_suite(s);
}
#run {
test_floor_div_mod();
test_coord_roundtrip();
test_chunk_coord_values();
test_world_save_load_roundtrip();
test_world_json_chunk_offsets();
test_legacy_load_cursor_fix();
}

View File

@ -55,6 +55,8 @@ Chunk :: struct {
rdm_atlas: sg_image;
rdm_lookup: sg_image;
rdm_valid: bool;
rdm_atlas_path: string;
rdm_lookup_path: string;
#if !FLAG_RELEASE_BUILD {
rdm_lookup_cpu: []float;
rdm_lookup_w: s32;
@ -188,6 +190,52 @@ get_current_world :: () -> *Current_World {
WORLD_MAGIC :: u32.[0x4C575254][0]; // "TRWL" as little-endian u32
WORLD_VERSION :: cast(u16) 3;
World_Json :: struct {
version : s32;
name : string;
config : World_Json_Config;
chunks : [..]World_Json_Chunk;
emitters : [..]World_Json_Emitter;
notes : [..]World_Json_Note;
}
World_Json_Config :: struct {
skyBase : [3]float;
skyTop : [3]float;
sunDisk : [3]float;
horizonHalo : [3]float;
sunHalo : [3]float;
sunLightColor : [3]float;
sunPosition : [3]float;
sunIntensity : float;
skyIntensity : float;
hasClouds : s32;
planeHeight : float;
animatePlaneHeight : s32;
waterColor : [3]float;
deepColor : [3]float;
}
World_Json_Chunk :: struct {
x : s32;
y : s32;
z : s32;
offset : s32;
size : s32;
rdm_atlas : string;
rdm_lookup : string;
}
World_Json_Emitter :: struct {
definition_name : string;
position : [3]float;
}
World_Json_Note :: struct {
text : string;
position : [3]s32;
}
// World_Config serialized as a fixed-size binary blob.
// We serialize it field-by-field to avoid padding issues.
World_Config_Binary :: struct {
@ -282,42 +330,65 @@ sworld :: () {
name := current_world.world.name;
dir := tprint("./game/resources/worlds/%", name);
file.make_directory_if_it_does_not_exist(dir, recursive = true);
data := save_world(*current_world.world);
file.write_entire_file(tprint("%/index.world", dir), data);
log_info("Saved world '%' (% bytes)", name, data.count);
json_data, bin_data := save_world(*current_world.world);
file.write_entire_file(tprint("%/world.json", dir), json_data);
file.write_entire_file(tprint("%/chunks.bin", dir), bin_data);
log_info("Saved world '%' (json=% bytes, bin=% bytes)", name, json_data.count, bin_data.count);
}
} @Command
save_world :: (world: *World) -> string {
builder: String_Builder;
// Header
write_value(*builder, WORLD_MAGIC);
write_value(*builder, WORLD_VERSION);
name_len := cast(u16) world.name.count;
write_value(*builder, name_len);
append(*builder, world.name);
// World config
conf_bin := world_config_to_binary(*world.conf);
write_value(*builder, conf_bin);
// Count non-empty chunks
num_chunks: u32 = 0;
for world.chunks {
if it.groups.count > 0 then num_chunks += 1;
world_config_to_json :: (conf: *World_Config) -> World_Json_Config {
jc: World_Json_Config;
jc.skyBase = conf.skyBase.component;
jc.skyTop = conf.skyTop.component;
jc.sunDisk = conf.sunDisk.component;
jc.horizonHalo = conf.horizonHalo.component;
jc.sunHalo = conf.sunHalo.component;
jc.sunLightColor = conf.sunLightColor.component;
jc.sunPosition = conf.sunPosition.component;
jc.sunIntensity = conf.sunIntensity;
jc.skyIntensity = conf.skyIntensity;
jc.hasClouds = conf.hasClouds;
jc.planeHeight = conf.planeHeight;
jc.animatePlaneHeight = conf.animatePlaneHeight;
jc.waterColor = conf.waterColor.component;
jc.deepColor = conf.deepColor.component;
return jc;
}
write_value(*builder, num_chunks);
// We need to write chunk table, then chunk data.
// First pass: serialize all chunk data to get sizes.
Chunk_Data_Entry :: struct {
world_config_from_json :: (jc: *World_Json_Config) -> World_Config {
conf: World_Config;
conf.skyBase.component = jc.skyBase;
conf.skyTop.component = jc.skyTop;
conf.sunDisk.component = jc.sunDisk;
conf.horizonHalo.component = jc.horizonHalo;
conf.sunHalo.component = jc.sunHalo;
conf.sunLightColor.component = jc.sunLightColor;
conf.sunPosition.component = jc.sunPosition;
conf.sunIntensity = jc.sunIntensity;
conf.skyIntensity = jc.skyIntensity;
conf.hasClouds = jc.hasClouds;
conf.planeHeight = jc.planeHeight;
conf.animatePlaneHeight = jc.animatePlaneHeight;
conf.waterColor.component = jc.waterColor;
conf.deepColor.component = jc.deepColor;
return conf;
}
save_world :: (world: *World) -> (json: string, chunks_bin: string) {
bin_builder: String_Builder;
Chunk_Save_Entry :: struct {
coord: Chunk_Key;
data: string;
offset: s32;
size: s32;
rdm_atlas_path: string;
rdm_lookup_path: string;
}
chunk_entries: [..]Chunk_Data_Entry;
chunk_entries: [..]Chunk_Save_Entry;
chunk_entries.allocator = temp;
running_offset: s32 = 0;
for chunk: world.chunks {
if chunk.groups.count == 0 then continue;
chunk_builder: String_Builder;
@ -336,59 +407,123 @@ save_world :: (world: *World) -> string {
write_value(*chunk_builder, inst);
}
}
array_add(*chunk_entries, .{coord = chunk.coord, data = builder_to_string(*chunk_builder,, temp)});
chunk_data := builder_to_string(*chunk_builder,, temp);
data_size := cast(s32) chunk_data.count;
atlas_path := chunk.rdm_atlas_path;
lookup_path := chunk.rdm_lookup_path;
if !atlas_path.count {
atlas_path = tprint("%_%_%.rdm_atlas", chunk.coord.x, chunk.coord.y, chunk.coord.z);
lookup_path = tprint("%_%_%.rdm_lookup", chunk.coord.x, chunk.coord.y, chunk.coord.z);
}
// Calculate data offsets.
// Chunk table starts right after what we've written so far.
// header_size = current builder length + chunk_table_size
current_header_size := builder_string_length(*builder);
chunk_table_entry_size : s64 = size_of(s32)*3 + size_of(u32)*2; // chunk_x, chunk_y, chunk_z, data_offset, data_size
chunk_table_size := cast(s64) num_chunks * chunk_table_entry_size;
data_start := current_header_size + chunk_table_size;
// Write chunk table
running_offset := cast(u32) data_start;
for entry: chunk_entries {
write_value(*builder, entry.coord.x);
write_value(*builder, entry.coord.y);
write_value(*builder, entry.coord.z);
write_value(*builder, running_offset);
data_size := cast(u32) entry.data.count;
write_value(*builder, data_size);
array_add(*chunk_entries, .{
coord = chunk.coord,
offset = running_offset,
size = data_size,
rdm_atlas_path = atlas_path,
rdm_lookup_path = lookup_path,
});
append(*bin_builder, chunk_data);
running_offset += data_size;
}
// Write chunk data
wj: World_Json;
wj.version = 4;
wj.name = world.name;
wj.config = world_config_to_json(*world.conf);
for entry: chunk_entries {
append(*builder, entry.data);
jc: World_Json_Chunk;
jc.x = entry.coord.x;
jc.y = entry.coord.y;
jc.z = entry.coord.z;
jc.offset = entry.offset;
jc.size = entry.size;
jc.rdm_atlas = entry.rdm_atlas_path;
jc.rdm_lookup = entry.rdm_lookup_path;
array_add(*wj.chunks, jc);
}
// Write emitter instances
num_emitters := cast(u16) world.emitter_instances.count;
write_value(*builder, num_emitters);
for inst: world.emitter_instances {
name_len := cast(u16) inst.definition_name.count;
write_value(*builder, name_len);
append(*builder, inst.definition_name);
write_value(*builder, inst.position.x);
write_value(*builder, inst.position.y);
write_value(*builder, inst.position.z);
je: World_Json_Emitter;
je.definition_name = inst.definition_name;
je.position = inst.position.component;
array_add(*wj.emitters, je);
}
// Write notes
num_notes := cast(u16) world.notes.count;
write_value(*builder, num_notes);
for note: world.notes {
text_len := cast(u16) note.text.count;
write_value(*builder, text_len);
append(*builder, note.text);
write_value(*builder, note.position.x);
write_value(*builder, note.position.y);
write_value(*builder, note.position.z);
jn: World_Json_Note;
jn.text = note.text;
jn.position = .[note.position.x, note.position.y, note.position.z];
array_add(*wj.notes, jn);
}
return builder_to_string(*builder);
json_str := Jaison.json_write_string(wj, " ");
return json_str, builder_to_string(*bin_builder);
}
load_world_from_json :: (json_str: string, chunk_bin: []u8) -> (World, bool) {
world: World;
success, wj := Jaison.json_parse_string(json_str, World_Json);
if !success {
log_error("Failed to parse world JSON");
return world, false;
}
world.name = sprint("%", wj.name);
world.conf = world_config_from_json(*wj.config);
for jc: wj.chunks {
chunk: Chunk;
chunk.coord = .{x = jc.x, y = jc.y, z = jc.z};
chunk.rdm_atlas_path = sprint("%", jc.rdm_atlas);
chunk.rdm_lookup_path = sprint("%", jc.rdm_lookup);
offset := cast(s64) jc.offset;
size := cast(s64) jc.size;
if offset + size > chunk_bin.count {
log_error("Chunk data out of bounds: offset=%, size=%, bin=%", offset, size, chunk_bin.count);
return world, false;
}
chunk_data: []u8;
chunk_data.data = chunk_bin.data + offset;
chunk_data.count = size;
chunk_cursor: s64 = 0;
num_types := read_value(chunk_data, *chunk_cursor, u16);
for t: 0..cast(s64)num_types-1 {
group: Chunk_Trile_Group;
gname_len := cast(s64) read_value(chunk_data, *chunk_cursor, u16);
group.trile_name = read_string(chunk_data, *chunk_cursor, gname_len);
count := cast(s64) read_value(chunk_data, *chunk_cursor, u16);
for i: 0..count-1 {
inst := read_value(chunk_data, *chunk_cursor, Trile_Instance);
array_add(*group.instances, inst);
}
array_add(*chunk.groups, group);
}
table_set(*world.chunks, chunk.coord, chunk);
}
for je: wj.emitters {
inst: Particle_Emitter_Instance;
inst.definition_name = sprint("%", je.definition_name);
inst.position.component = je.position;
inst.active = true;
array_add(*world.emitter_instances, inst);
}
for jn: wj.notes {
note: Editor_Note;
note.text = sprint("%", jn.text);
note.position = .{x = jn.position[0], y = jn.position[1], z = jn.position[2]};
array_add(*world.notes, note);
}
return world, true;
}
load_world_from_data :: (data: []u8) -> (World, bool) {
@ -462,6 +597,11 @@ load_world_from_data :: (data: []u8) -> (World, bool) {
table_set(*world.chunks, chunk.coord, chunk);
}
if chunk_table.count > 0 {
last := chunk_table[chunk_table.count - 1];
cursor = cast(s64)(last.offset + last.size);
}
if version >= 2 && cursor < data.count {
num_emitters := cast(s64) read_value(data, *cursor, u16);
for i: 0..num_emitters-1 {
@ -551,12 +691,15 @@ draw_world_picker :: (r_in: GR.Rect, theme: *GR.Overall_Theme) {
world_names.allocator = temp;
dir_visitor :: (info: *File_Utilities.File_Visit_Info, names: *[..]string) {
if info.short_name == "index.world" {
// Extract world name from path: .../worlds/{name}/index.world
if info.short_name == "world.json" || info.short_name == "index.world" {
#import "String";
_, left, _ := split_from_right(info.full_name, "/index.world");
suffix := ifx info.short_name == "world.json" then "/world.json" else "/index.world";
_, left, _ := split_from_right(info.full_name, suffix);
_, _, name := split_from_right(left, "/");
if name.count > 0 then array_add(names, name);
if name.count > 0 {
for names.* { if it == name then return; }
array_add(names, name);
}
}
}