mirror of
https://github.com/louis-e/arnis.git
synced 2026-01-06 21:28:08 -05:00
Compare commits
53 Commits
copilot/fi
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
2d9892fe7f | ||
|
|
b858ce4691 | ||
|
|
e031e53492 | ||
|
|
6fb9b8943d | ||
|
|
18266dd459 | ||
|
|
b1940fa412 | ||
|
|
d57a732055 | ||
|
|
4e52b38f5a | ||
|
|
feb4317086 | ||
|
|
d02cbed997 | ||
|
|
99d1f8e117 | ||
|
|
6fa76bc381 | ||
|
|
0fef27e6af | ||
|
|
fa3384cf86 | ||
|
|
ffbc5e5788 | ||
|
|
4215e7644c | ||
|
|
118335bad4 | ||
|
|
7bbee28279 | ||
|
|
9cb35a3b13 | ||
|
|
4fecf98c54 | ||
|
|
47a7b81f99 | ||
|
|
7ec90b4fef | ||
|
|
f1f3fb287a | ||
|
|
b23658d5ef | ||
|
|
cc89576828 | ||
|
|
809fa23941 | ||
|
|
51ad1fef3f | ||
|
|
8e8d8e0567 | ||
|
|
da6f23c0a2 | ||
|
|
d4a872989c | ||
|
|
2a5a5230c5 | ||
|
|
9018584b1d | ||
|
|
9eda39846c | ||
|
|
5e9d6795df | ||
|
|
54a7a4f2a9 | ||
|
|
d0d65643f5 | ||
|
|
946fd43a5e | ||
|
|
05e5ffdd2a | ||
|
|
0b7e27df7f | ||
|
|
613a410c93 | ||
|
|
faefd29e30 | ||
|
|
9ad6c75440 | ||
|
|
e51f28f067 | ||
|
|
47ddb9b211 | ||
|
|
46415bb002 | ||
|
|
0683dd3343 | ||
|
|
4d304dc978 | ||
|
|
5d97391820 | ||
|
|
bef3cfb090 | ||
|
|
5a898944f7 | ||
|
|
9fdd960009 | ||
|
|
58e4a337d9 | ||
|
|
236a7e5af9 |
14
.github/workflows/release.yml
vendored
14
.github/workflows/release.yml
vendored
@@ -87,7 +87,7 @@ jobs:
|
||||
shell: powershell
|
||||
|
||||
- name: Upload artifact
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: ${{ matrix.os }}-${{ matrix.target }}-build
|
||||
path: target/release/${{ matrix.asset_name }}
|
||||
@@ -97,13 +97,13 @@ jobs:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- name: Download macOS Intel build
|
||||
uses: actions/download-artifact@v5
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
name: macos-13-x86_64-apple-darwin-build
|
||||
path: ./intel
|
||||
|
||||
- name: Download macOS ARM64 build
|
||||
uses: actions/download-artifact@v5
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
name: macos-latest-aarch64-apple-darwin-build
|
||||
path: ./arm64
|
||||
@@ -114,7 +114,7 @@ jobs:
|
||||
chmod +x arnis-mac-universal
|
||||
|
||||
- name: Upload universal binary
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v6
|
||||
with:
|
||||
name: macos-universal-build
|
||||
path: arnis-mac-universal
|
||||
@@ -127,19 +127,19 @@ jobs:
|
||||
uses: actions/checkout@v6
|
||||
|
||||
- name: Download Windows build artifact
|
||||
uses: actions/download-artifact@v5
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
name: windows-latest-x86_64-pc-windows-msvc-build
|
||||
path: ./builds/windows
|
||||
|
||||
- name: Download Linux build artifact
|
||||
uses: actions/download-artifact@v5
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
name: ubuntu-latest-x86_64-unknown-linux-gnu-build
|
||||
path: ./builds/linux
|
||||
|
||||
- name: Download macOS universal build artifact
|
||||
uses: actions/download-artifact@v5
|
||||
uses: actions/download-artifact@v7
|
||||
with:
|
||||
name: macos-universal-build
|
||||
path: ./builds/macos
|
||||
|
||||
16
Cargo.lock
generated
16
Cargo.lock
generated
@@ -204,6 +204,7 @@ dependencies = [
|
||||
"nbtx",
|
||||
"once_cell",
|
||||
"rand 0.8.5",
|
||||
"rand_chacha 0.3.1",
|
||||
"rayon",
|
||||
"reqwest",
|
||||
"rfd",
|
||||
@@ -4487,9 +4488,9 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "rfd"
|
||||
version = "0.15.4"
|
||||
version = "0.16.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ef2bee61e6cffa4635c72d7d81a84294e28f0930db0ddcb0f66d10244674ebed"
|
||||
checksum = "a15ad77d9e70a92437d8f74c35d99b4e4691128df018833e99f90bcd36152672"
|
||||
dependencies = [
|
||||
"ashpd",
|
||||
"block2 0.6.1",
|
||||
@@ -4506,7 +4507,7 @@ dependencies = [
|
||||
"wasm-bindgen",
|
||||
"wasm-bindgen-futures",
|
||||
"web-sys",
|
||||
"windows-sys 0.59.0",
|
||||
"windows-sys 0.60.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -6765,6 +6766,15 @@ dependencies = [
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.60.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb"
|
||||
dependencies = [
|
||||
"windows-targets 0.53.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.61.2"
|
||||
|
||||
@@ -38,9 +38,10 @@ itertools = "0.14.0"
|
||||
log = "0.4.27"
|
||||
once_cell = "1.21.3"
|
||||
rand = "0.8.5"
|
||||
rand_chacha = "0.3"
|
||||
rayon = "1.10.0"
|
||||
reqwest = { version = "0.12.15", features = ["blocking", "json"] }
|
||||
rfd = { version = "0.15.4", optional = true }
|
||||
rfd = { version = "0.16.0", optional = true }
|
||||
semver = "1.0.27"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
serde_json = "1.0"
|
||||
|
||||
@@ -3,6 +3,7 @@ use crate::block_definitions::{BEDROCK, DIRT, GRASS_BLOCK, STONE};
|
||||
use crate::coordinate_system::cartesian::XZBBox;
|
||||
use crate::coordinate_system::geographic::LLBBox;
|
||||
use crate::element_processing::*;
|
||||
use crate::floodfill_cache::FloodFillCache;
|
||||
use crate::ground::Ground;
|
||||
use crate::map_renderer;
|
||||
use crate::osm_parser::ProcessedElement;
|
||||
@@ -13,6 +14,7 @@ use crate::world_editor::{WorldEditor, WorldFormat};
|
||||
use colored::Colorize;
|
||||
use indicatif::{ProgressBar, ProgressStyle};
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
pub const MIN_Y: i32 = -64;
|
||||
|
||||
@@ -53,14 +55,17 @@ pub fn generate_world_with_options(
|
||||
) -> Result<PathBuf, String> {
|
||||
let output_path = options.path.clone();
|
||||
let world_format = options.format;
|
||||
|
||||
// Create editor with appropriate format
|
||||
let mut editor: WorldEditor = WorldEditor::new_with_format_and_name(
|
||||
options.path,
|
||||
&xzbbox,
|
||||
llbbox,
|
||||
options.format,
|
||||
options.level_name,
|
||||
options.level_name.clone(),
|
||||
options.spawn_point,
|
||||
);
|
||||
let ground = Arc::new(ground);
|
||||
|
||||
println!("{} Processing data...", "[4/7]".bold());
|
||||
|
||||
@@ -68,13 +73,18 @@ pub fn generate_world_with_options(
|
||||
let highway_connectivity = highways::build_highway_connectivity_map(&elements);
|
||||
|
||||
// Set ground reference in the editor to enable elevation-aware block placement
|
||||
editor.set_ground(&ground);
|
||||
editor.set_ground(Arc::clone(&ground));
|
||||
|
||||
println!("{} Processing terrain...", "[5/7]".bold());
|
||||
emit_gui_progress_update(25.0, "Processing terrain...");
|
||||
|
||||
// Pre-compute all flood fills in parallel for better CPU utilization
|
||||
let mut flood_fill_cache = FloodFillCache::precompute(&elements, args.timeout.as_ref());
|
||||
println!("Pre-computed {} flood fills", flood_fill_cache.way_count());
|
||||
|
||||
// Process data
|
||||
let elements_count: usize = elements.len();
|
||||
let mut elements = elements; // Take ownership for consuming
|
||||
let process_pb: ProgressBar = ProgressBar::new(elements_count as u64);
|
||||
process_pb.set_style(ProgressStyle::default_bar()
|
||||
.template("{spinner:.green} [{elapsed_precise}] [{bar:45.white/black}] {pos}/{len} elements ({eta}) {msg}")
|
||||
@@ -85,7 +95,8 @@ pub fn generate_world_with_options(
|
||||
let mut current_progress_prcs: f64 = 25.0;
|
||||
let mut last_emitted_progress: f64 = current_progress_prcs;
|
||||
|
||||
for element in &elements {
|
||||
// Process elements by draining in insertion order
|
||||
for element in elements.drain(..) {
|
||||
process_pb.inc(1);
|
||||
current_progress_prcs += progress_increment_prcs;
|
||||
if (current_progress_prcs - last_emitted_progress).abs() > 0.25 {
|
||||
@@ -103,22 +114,28 @@ pub fn generate_world_with_options(
|
||||
process_pb.set_message("");
|
||||
}
|
||||
|
||||
match element {
|
||||
match &element {
|
||||
ProcessedElement::Way(way) => {
|
||||
if way.tags.contains_key("building") || way.tags.contains_key("building:part") {
|
||||
buildings::generate_buildings(&mut editor, way, args, None);
|
||||
buildings::generate_buildings(&mut editor, way, args, None, &flood_fill_cache);
|
||||
} else if way.tags.contains_key("highway") {
|
||||
highways::generate_highways(&mut editor, element, args, &highway_connectivity);
|
||||
highways::generate_highways(
|
||||
&mut editor,
|
||||
&element,
|
||||
args,
|
||||
&highway_connectivity,
|
||||
&flood_fill_cache,
|
||||
);
|
||||
} else if way.tags.contains_key("landuse") {
|
||||
landuse::generate_landuse(&mut editor, way, args);
|
||||
landuse::generate_landuse(&mut editor, way, args, &flood_fill_cache);
|
||||
} else if way.tags.contains_key("natural") {
|
||||
natural::generate_natural(&mut editor, element, args);
|
||||
natural::generate_natural(&mut editor, &element, args, &flood_fill_cache);
|
||||
} else if way.tags.contains_key("amenity") {
|
||||
amenities::generate_amenities(&mut editor, element, args);
|
||||
amenities::generate_amenities(&mut editor, &element, args, &flood_fill_cache);
|
||||
} else if way.tags.contains_key("leisure") {
|
||||
leisure::generate_leisure(&mut editor, way, args);
|
||||
leisure::generate_leisure(&mut editor, way, args, &flood_fill_cache);
|
||||
} else if way.tags.contains_key("barrier") {
|
||||
barriers::generate_barriers(&mut editor, element);
|
||||
barriers::generate_barriers(&mut editor, &element);
|
||||
} else if let Some(val) = way.tags.get("waterway") {
|
||||
if val == "dock" {
|
||||
// docks count as water areas
|
||||
@@ -138,8 +155,10 @@ pub fn generate_world_with_options(
|
||||
} else if way.tags.get("service") == Some(&"siding".to_string()) {
|
||||
highways::generate_siding(&mut editor, way);
|
||||
} else if way.tags.contains_key("man_made") {
|
||||
man_made::generate_man_made(&mut editor, element, args);
|
||||
man_made::generate_man_made(&mut editor, &element, args);
|
||||
}
|
||||
// Release flood fill cache entry for this way
|
||||
flood_fill_cache.remove_way(way.id);
|
||||
}
|
||||
ProcessedElement::Node(node) => {
|
||||
if node.tags.contains_key("door") || node.tags.contains_key("entrance") {
|
||||
@@ -147,13 +166,19 @@ pub fn generate_world_with_options(
|
||||
} else if node.tags.contains_key("natural")
|
||||
&& node.tags.get("natural") == Some(&"tree".to_string())
|
||||
{
|
||||
natural::generate_natural(&mut editor, element, args);
|
||||
natural::generate_natural(&mut editor, &element, args, &flood_fill_cache);
|
||||
} else if node.tags.contains_key("amenity") {
|
||||
amenities::generate_amenities(&mut editor, element, args);
|
||||
amenities::generate_amenities(&mut editor, &element, args, &flood_fill_cache);
|
||||
} else if node.tags.contains_key("barrier") {
|
||||
barriers::generate_barrier_nodes(&mut editor, node);
|
||||
} else if node.tags.contains_key("highway") {
|
||||
highways::generate_highways(&mut editor, element, args, &highway_connectivity);
|
||||
highways::generate_highways(
|
||||
&mut editor,
|
||||
&element,
|
||||
args,
|
||||
&highway_connectivity,
|
||||
&flood_fill_cache,
|
||||
);
|
||||
} else if node.tags.contains_key("tourism") {
|
||||
tourisms::generate_tourisms(&mut editor, node);
|
||||
} else if node.tags.contains_key("man_made") {
|
||||
@@ -162,7 +187,12 @@ pub fn generate_world_with_options(
|
||||
}
|
||||
ProcessedElement::Relation(rel) => {
|
||||
if rel.tags.contains_key("building") || rel.tags.contains_key("building:part") {
|
||||
buildings::generate_building_from_relation(&mut editor, rel, args);
|
||||
buildings::generate_building_from_relation(
|
||||
&mut editor,
|
||||
rel,
|
||||
args,
|
||||
&flood_fill_cache,
|
||||
);
|
||||
} else if rel.tags.contains_key("water")
|
||||
|| rel
|
||||
.tags
|
||||
@@ -172,24 +202,43 @@ pub fn generate_world_with_options(
|
||||
{
|
||||
water_areas::generate_water_areas_from_relation(&mut editor, rel, &xzbbox);
|
||||
} else if rel.tags.contains_key("natural") {
|
||||
natural::generate_natural_from_relation(&mut editor, rel, args);
|
||||
} else if rel.tags.contains_key("landuse") {
|
||||
landuse::generate_landuse_from_relation(&mut editor, rel, args);
|
||||
} else if rel.tags.get("leisure") == Some(&"park".to_string()) {
|
||||
leisure::generate_leisure_from_relation(&mut editor, rel, args);
|
||||
} else if rel.tags.contains_key("man_made") {
|
||||
man_made::generate_man_made(
|
||||
natural::generate_natural_from_relation(
|
||||
&mut editor,
|
||||
&ProcessedElement::Relation(rel.clone()),
|
||||
rel,
|
||||
args,
|
||||
&flood_fill_cache,
|
||||
);
|
||||
} else if rel.tags.contains_key("landuse") {
|
||||
landuse::generate_landuse_from_relation(
|
||||
&mut editor,
|
||||
rel,
|
||||
args,
|
||||
&flood_fill_cache,
|
||||
);
|
||||
} else if rel.tags.get("leisure") == Some(&"park".to_string()) {
|
||||
leisure::generate_leisure_from_relation(
|
||||
&mut editor,
|
||||
rel,
|
||||
args,
|
||||
&flood_fill_cache,
|
||||
);
|
||||
} else if rel.tags.contains_key("man_made") {
|
||||
man_made::generate_man_made(&mut editor, &element, args);
|
||||
}
|
||||
// Release flood fill cache entries for all ways in this relation
|
||||
let way_ids: Vec<u64> = rel.members.iter().map(|m| m.way.id).collect();
|
||||
flood_fill_cache.remove_relation_ways(&way_ids);
|
||||
}
|
||||
}
|
||||
// Element is dropped here, freeing its memory immediately
|
||||
}
|
||||
|
||||
process_pb.finish();
|
||||
|
||||
// Drop remaining caches
|
||||
drop(highway_connectivity);
|
||||
drop(flood_fill_cache);
|
||||
|
||||
// Generate ground layer
|
||||
let total_blocks: u64 = xzbbox.bounding_rect().total_blocks();
|
||||
let desired_updates: u64 = 1500;
|
||||
@@ -215,44 +264,61 @@ pub fn generate_world_with_options(
|
||||
|
||||
let groundlayer_block = GRASS_BLOCK;
|
||||
|
||||
for x in xzbbox.min_x()..=xzbbox.max_x() {
|
||||
for z in xzbbox.min_z()..=xzbbox.max_z() {
|
||||
// Add default dirt and grass layer if there isn't a stone layer already
|
||||
if !editor.check_for_block(x, 0, z, Some(&[STONE])) {
|
||||
editor.set_block(groundlayer_block, x, 0, z, None, None);
|
||||
editor.set_block(DIRT, x, -1, z, None, None);
|
||||
editor.set_block(DIRT, x, -2, z, None, None);
|
||||
}
|
||||
// Process ground generation chunk-by-chunk for better cache locality.
|
||||
// This keeps the same region/chunk HashMap entries hot in CPU cache,
|
||||
// rather than jumping between regions on every Z iteration.
|
||||
let min_chunk_x = xzbbox.min_x() >> 4;
|
||||
let max_chunk_x = xzbbox.max_x() >> 4;
|
||||
let min_chunk_z = xzbbox.min_z() >> 4;
|
||||
let max_chunk_z = xzbbox.max_z() >> 4;
|
||||
|
||||
// Fill underground with stone
|
||||
if args.fillground {
|
||||
// Fill from bedrock+1 to 3 blocks below ground with stone
|
||||
editor.fill_blocks_absolute(
|
||||
STONE,
|
||||
x,
|
||||
MIN_Y + 1,
|
||||
z,
|
||||
x,
|
||||
editor.get_absolute_y(x, -3, z),
|
||||
z,
|
||||
None,
|
||||
None,
|
||||
);
|
||||
}
|
||||
// Generate a bedrock level at MIN_Y
|
||||
editor.set_block_absolute(BEDROCK, x, MIN_Y, z, None, Some(&[BEDROCK]));
|
||||
for chunk_x in min_chunk_x..=max_chunk_x {
|
||||
for chunk_z in min_chunk_z..=max_chunk_z {
|
||||
// Calculate the block range for this chunk, clamped to bbox
|
||||
let chunk_min_x = (chunk_x << 4).max(xzbbox.min_x());
|
||||
let chunk_max_x = ((chunk_x << 4) + 15).min(xzbbox.max_x());
|
||||
let chunk_min_z = (chunk_z << 4).max(xzbbox.min_z());
|
||||
let chunk_max_z = ((chunk_z << 4) + 15).min(xzbbox.max_z());
|
||||
|
||||
block_counter += 1;
|
||||
// Use manual % check since is_multiple_of() is unstable on stable Rust
|
||||
#[allow(clippy::manual_is_multiple_of)]
|
||||
if block_counter % batch_size == 0 {
|
||||
ground_pb.inc(batch_size);
|
||||
}
|
||||
for x in chunk_min_x..=chunk_max_x {
|
||||
for z in chunk_min_z..=chunk_max_z {
|
||||
// Add default dirt and grass layer if there isn't a stone layer already
|
||||
if !editor.check_for_block(x, 0, z, Some(&[STONE])) {
|
||||
editor.set_block(groundlayer_block, x, 0, z, None, None);
|
||||
editor.set_block(DIRT, x, -1, z, None, None);
|
||||
editor.set_block(DIRT, x, -2, z, None, None);
|
||||
}
|
||||
|
||||
gui_progress_grnd += progress_increment_grnd;
|
||||
if (gui_progress_grnd - last_emitted_progress).abs() > 0.25 {
|
||||
emit_gui_progress_update(gui_progress_grnd, "");
|
||||
last_emitted_progress = gui_progress_grnd;
|
||||
// Fill underground with stone
|
||||
if args.fillground {
|
||||
// Fill from bedrock+1 to 3 blocks below ground with stone
|
||||
editor.fill_blocks_absolute(
|
||||
STONE,
|
||||
x,
|
||||
MIN_Y + 1,
|
||||
z,
|
||||
x,
|
||||
editor.get_absolute_y(x, -3, z),
|
||||
z,
|
||||
None,
|
||||
None,
|
||||
);
|
||||
}
|
||||
// Generate a bedrock level at MIN_Y
|
||||
editor.set_block_absolute(BEDROCK, x, MIN_Y, z, None, Some(&[BEDROCK]));
|
||||
|
||||
block_counter += 1;
|
||||
#[allow(clippy::manual_is_multiple_of)]
|
||||
if block_counter % batch_size == 0 {
|
||||
ground_pb.inc(batch_size);
|
||||
}
|
||||
|
||||
gui_progress_grnd += progress_increment_grnd;
|
||||
if (gui_progress_grnd - last_emitted_progress).abs() > 0.25 {
|
||||
emit_gui_progress_update(gui_progress_grnd, "");
|
||||
last_emitted_progress = gui_progress_grnd;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -275,6 +341,8 @@ pub fn generate_world_with_options(
|
||||
// Save world
|
||||
editor.save();
|
||||
|
||||
emit_gui_progress_update(99.0, "Finalizing world...");
|
||||
|
||||
// Update player spawn Y coordinate based on terrain height after generation
|
||||
#[cfg(feature = "gui")]
|
||||
if world_format == WorldFormat::JavaAnvil {
|
||||
@@ -293,7 +361,7 @@ pub fn generate_world_with_options(
|
||||
Some(*spawn_coords),
|
||||
bbox_string,
|
||||
args.scale,
|
||||
&ground,
|
||||
ground.as_ref(),
|
||||
) {
|
||||
let warning_msg = format!("Failed to update spawn point Y coordinate: {}", e);
|
||||
eprintln!("Warning: {}", warning_msg);
|
||||
@@ -303,8 +371,6 @@ pub fn generate_world_with_options(
|
||||
}
|
||||
}
|
||||
|
||||
emit_gui_progress_update(99.0, "Finalizing world...");
|
||||
|
||||
// For Bedrock format, emit event to open the mcworld file
|
||||
if world_format == WorldFormat::BedrockMcWorld {
|
||||
if let Some(path_str) = output_path.to_str() {
|
||||
@@ -312,41 +378,72 @@ pub fn generate_world_with_options(
|
||||
}
|
||||
}
|
||||
|
||||
// Generate top-down map preview silently in background after completion (Java only)
|
||||
// Skip map preview for very large areas to avoid memory issues
|
||||
const MAX_MAP_PREVIEW_AREA: i64 = 6400 * 6900;
|
||||
let world_width = (xzbbox.max_x() - xzbbox.min_x()) as i64;
|
||||
let world_height = (xzbbox.max_z() - xzbbox.min_z()) as i64;
|
||||
let world_area = world_width * world_height;
|
||||
|
||||
if world_format == WorldFormat::JavaAnvil && world_area <= MAX_MAP_PREVIEW_AREA {
|
||||
let world_path = args.path.clone();
|
||||
let bounds = (
|
||||
xzbbox.min_x(),
|
||||
xzbbox.max_x(),
|
||||
xzbbox.min_z(),
|
||||
xzbbox.max_z(),
|
||||
);
|
||||
std::thread::spawn(move || {
|
||||
// Use catch_unwind to prevent any panic from affecting the application
|
||||
let result = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| {
|
||||
map_renderer::render_world_map(&world_path, bounds.0, bounds.1, bounds.2, bounds.3)
|
||||
}));
|
||||
|
||||
match result {
|
||||
Ok(Ok(_path)) => {
|
||||
// Notify the GUI that the map preview is ready
|
||||
emit_map_preview_ready();
|
||||
}
|
||||
Ok(Err(e)) => {
|
||||
eprintln!("Warning: Failed to generate map preview: {}", e);
|
||||
}
|
||||
Err(_) => {
|
||||
eprintln!("Warning: Map preview generation panicked unexpectedly");
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
Ok(output_path)
|
||||
}
|
||||
|
||||
/// Information needed to generate a map preview after world generation is complete
|
||||
#[derive(Clone)]
|
||||
pub struct MapPreviewInfo {
|
||||
pub world_path: PathBuf,
|
||||
pub min_x: i32,
|
||||
pub max_x: i32,
|
||||
pub min_z: i32,
|
||||
pub max_z: i32,
|
||||
pub world_area: i64,
|
||||
}
|
||||
|
||||
impl MapPreviewInfo {
|
||||
/// Create MapPreviewInfo from world bounds
|
||||
pub fn new(world_path: PathBuf, xzbbox: &XZBBox) -> Self {
|
||||
let world_width = (xzbbox.max_x() - xzbbox.min_x()) as i64;
|
||||
let world_height = (xzbbox.max_z() - xzbbox.min_z()) as i64;
|
||||
Self {
|
||||
world_path,
|
||||
min_x: xzbbox.min_x(),
|
||||
max_x: xzbbox.max_x(),
|
||||
min_z: xzbbox.min_z(),
|
||||
max_z: xzbbox.max_z(),
|
||||
world_area: world_width * world_height,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Maximum area for which map preview generation is allowed (to avoid memory issues)
|
||||
pub const MAX_MAP_PREVIEW_AREA: i64 = 6400 * 6900;
|
||||
|
||||
/// Start map preview generation in a background thread.
|
||||
/// This should be called AFTER the world generation is complete, the session lock is released,
|
||||
/// and the GUI has been notified of 100% completion.
|
||||
///
|
||||
/// For Java worlds only, and only if the world area is within limits.
|
||||
pub fn start_map_preview_generation(info: MapPreviewInfo) {
|
||||
if info.world_area > MAX_MAP_PREVIEW_AREA {
|
||||
return;
|
||||
}
|
||||
|
||||
std::thread::spawn(move || {
|
||||
// Use catch_unwind to prevent any panic from affecting the application
|
||||
let result = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| {
|
||||
map_renderer::render_world_map(
|
||||
&info.world_path,
|
||||
info.min_x,
|
||||
info.max_x,
|
||||
info.min_z,
|
||||
info.max_z,
|
||||
)
|
||||
}));
|
||||
|
||||
match result {
|
||||
Ok(Ok(_path)) => {
|
||||
// Notify the GUI that the map preview is ready
|
||||
emit_map_preview_ready();
|
||||
}
|
||||
Ok(Err(e)) => {
|
||||
eprintln!("Warning: Failed to generate map preview: {}", e);
|
||||
}
|
||||
Err(_) => {
|
||||
eprintln!("Warning: Map preview generation panicked unexpectedly");
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
127
src/deterministic_rng.rs
Normal file
127
src/deterministic_rng.rs
Normal file
@@ -0,0 +1,127 @@
|
||||
//! Deterministic random number generation for consistent element processing.
|
||||
//!
|
||||
//! This module provides seeded RNG that ensures the same element always produces
|
||||
//! the same random values, regardless of processing order. This is essential for
|
||||
//! region-by-region streaming where the same element may be processed multiple times
|
||||
//! (once for each region it touches).
|
||||
//!
|
||||
//! # Example
|
||||
//! ```ignore
|
||||
//! let mut rng = element_rng(element_id);
|
||||
//! let color = rng.gen_bool(0.5); // Always same result for same element_id
|
||||
//! ```
|
||||
|
||||
use rand::SeedableRng;
|
||||
use rand_chacha::ChaCha8Rng;
|
||||
|
||||
/// Creates a deterministic RNG seeded from an element ID.
|
||||
///
|
||||
/// The same element ID will always produce the same sequence of random values,
|
||||
/// ensuring consistent results when an element is processed multiple times
|
||||
/// (e.g., once per region it touches during streaming).
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `element_id` - The unique OSM element ID (way ID, node ID, or relation ID)
|
||||
///
|
||||
/// # Returns
|
||||
/// A seeded ChaCha8Rng that will produce deterministic random values
|
||||
#[inline]
|
||||
pub fn element_rng(element_id: u64) -> ChaCha8Rng {
|
||||
ChaCha8Rng::seed_from_u64(element_id)
|
||||
}
|
||||
|
||||
/// Creates a deterministic RNG seeded from an element ID with an additional salt.
|
||||
///
|
||||
/// Use this when you need multiple independent random sequences for the same element.
|
||||
/// For example, one sequence for wall colors and another for roof style.
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `element_id` - The unique OSM element ID
|
||||
/// * `salt` - Additional value to create a different sequence (e.g., use different
|
||||
/// salt values for different purposes within the same element)
|
||||
#[inline]
|
||||
#[allow(dead_code)]
|
||||
pub fn element_rng_salted(element_id: u64, salt: u64) -> ChaCha8Rng {
|
||||
// Combine element_id and salt using XOR and bit rotation to avoid collisions
|
||||
let combined = element_id ^ salt.rotate_left(32);
|
||||
ChaCha8Rng::seed_from_u64(combined)
|
||||
}
|
||||
|
||||
/// Creates a deterministic RNG seeded from coordinates.
|
||||
///
|
||||
/// Use this for per-block randomness that needs to be consistent regardless
|
||||
/// of processing order (e.g., random flower placement within a natural area).
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `x` - X coordinate
|
||||
/// * `z` - Z coordinate
|
||||
/// * `element_id` - The element ID for additional uniqueness
|
||||
#[inline]
|
||||
pub fn coord_rng(x: i32, z: i32, element_id: u64) -> ChaCha8Rng {
|
||||
// Combine coordinates and element_id into a seed.
|
||||
// Cast through u32 to handle negative coordinates consistently.
|
||||
let coord_part = ((x as u32 as i64) << 32) | (z as u32 as i64);
|
||||
let seed = (coord_part as u64) ^ element_id;
|
||||
ChaCha8Rng::seed_from_u64(seed)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use rand::Rng;
|
||||
|
||||
#[test]
|
||||
fn test_element_rng_deterministic() {
|
||||
let mut rng1 = element_rng(12345);
|
||||
let mut rng2 = element_rng(12345);
|
||||
|
||||
// Same seed should produce same sequence
|
||||
for _ in 0..100 {
|
||||
assert_eq!(rng1.gen::<u64>(), rng2.gen::<u64>());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_different_elements_different_values() {
|
||||
let mut rng1 = element_rng(12345);
|
||||
let mut rng2 = element_rng(12346);
|
||||
|
||||
// Different seeds should (almost certainly) produce different values
|
||||
let v1: u64 = rng1.gen();
|
||||
let v2: u64 = rng2.gen();
|
||||
assert_ne!(v1, v2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_salted_rng_different_from_base() {
|
||||
let mut rng1 = element_rng(12345);
|
||||
let mut rng2 = element_rng_salted(12345, 1);
|
||||
|
||||
let v1: u64 = rng1.gen();
|
||||
let v2: u64 = rng2.gen();
|
||||
assert_ne!(v1, v2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_coord_rng_deterministic() {
|
||||
let mut rng1 = coord_rng(100, 200, 12345);
|
||||
let mut rng2 = coord_rng(100, 200, 12345);
|
||||
|
||||
assert_eq!(rng1.gen::<u64>(), rng2.gen::<u64>());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_coord_rng_negative_coordinates() {
|
||||
// Negative coordinates are common in Minecraft worlds
|
||||
let mut rng1 = coord_rng(-100, -200, 12345);
|
||||
let mut rng2 = coord_rng(-100, -200, 12345);
|
||||
|
||||
assert_eq!(rng1.gen::<u64>(), rng2.gen::<u64>());
|
||||
|
||||
// Ensure different negative coords produce different seeds
|
||||
let mut rng3 = coord_rng(-100, -200, 12345);
|
||||
let mut rng4 = coord_rng(-101, -200, 12345);
|
||||
|
||||
assert_ne!(rng3.gen::<u64>(), rng4.gen::<u64>());
|
||||
}
|
||||
}
|
||||
@@ -2,11 +2,19 @@ use crate::args::Args;
|
||||
use crate::block_definitions::*;
|
||||
use crate::bresenham::bresenham_line;
|
||||
use crate::coordinate_system::cartesian::XZPoint;
|
||||
use crate::floodfill::flood_fill_area;
|
||||
use crate::deterministic_rng::element_rng;
|
||||
use crate::floodfill::flood_fill_area; // Needed for inline amenity flood fills
|
||||
use crate::floodfill_cache::FloodFillCache;
|
||||
use crate::osm_parser::ProcessedElement;
|
||||
use crate::world_editor::WorldEditor;
|
||||
use rand::Rng;
|
||||
|
||||
pub fn generate_amenities(editor: &mut WorldEditor, element: &ProcessedElement, args: &Args) {
|
||||
pub fn generate_amenities(
|
||||
editor: &mut WorldEditor,
|
||||
element: &ProcessedElement,
|
||||
args: &Args,
|
||||
flood_fill_cache: &FloodFillCache,
|
||||
) {
|
||||
// Skip if 'layer' or 'level' is negative in the tags
|
||||
if let Some(layer) = element.tags().get("layer") {
|
||||
if layer.parse::<i32>().unwrap_or(0) < 0 {
|
||||
@@ -42,18 +50,14 @@ pub fn generate_amenities(editor: &mut WorldEditor, element: &ProcessedElement,
|
||||
let ground_block: Block = OAK_PLANKS;
|
||||
let roof_block: Block = STONE_BLOCK_SLAB;
|
||||
|
||||
let polygon_coords: Vec<(i32, i32)> = element
|
||||
.nodes()
|
||||
.map(|n: &crate::osm_parser::ProcessedNode| (n.x, n.z))
|
||||
.collect();
|
||||
// Use pre-computed flood fill from cache
|
||||
let floor_area: Vec<(i32, i32)> =
|
||||
flood_fill_cache.get_or_compute_element(element, args.timeout.as_ref());
|
||||
|
||||
if polygon_coords.is_empty() {
|
||||
if floor_area.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let floor_area: Vec<(i32, i32)> =
|
||||
flood_fill_area(&polygon_coords, args.timeout.as_ref());
|
||||
|
||||
// Fill the floor area
|
||||
for (x, z) in floor_area.iter() {
|
||||
editor.set_block(ground_block, *x, 0, *z, None, None);
|
||||
@@ -80,8 +84,10 @@ pub fn generate_amenities(editor: &mut WorldEditor, element: &ProcessedElement,
|
||||
"bench" => {
|
||||
// Place a bench
|
||||
if let Some(pt) = first_node {
|
||||
// 50% chance to 90 degrees rotate the bench using if
|
||||
if rand::random::<bool>() {
|
||||
// Use deterministic RNG for consistent bench orientation across region boundaries
|
||||
let mut rng = element_rng(element.id());
|
||||
// 50% chance to 90 degrees rotate the bench
|
||||
if rng.gen_bool(0.5) {
|
||||
editor.set_block(SMOOTH_STONE, pt.x, 1, pt.z, None, None);
|
||||
editor.set_block(OAK_LOG, pt.x + 1, 1, pt.z, None, None);
|
||||
editor.set_block(OAK_LOG, pt.x - 1, 1, pt.z, None, None);
|
||||
@@ -95,12 +101,9 @@ pub fn generate_amenities(editor: &mut WorldEditor, element: &ProcessedElement,
|
||||
"shelter" => {
|
||||
let roof_block: Block = STONE_BRICK_SLAB;
|
||||
|
||||
let polygon_coords: Vec<(i32, i32)> = element
|
||||
.nodes()
|
||||
.map(|n: &crate::osm_parser::ProcessedNode| (n.x, n.z))
|
||||
.collect();
|
||||
// Use pre-computed flood fill from cache
|
||||
let roof_area: Vec<(i32, i32)> =
|
||||
flood_fill_area(&polygon_coords, args.timeout.as_ref());
|
||||
flood_fill_cache.get_or_compute_element(element, args.timeout.as_ref());
|
||||
|
||||
// Place fences and roof slabs at each corner node directly
|
||||
for node in element.nodes() {
|
||||
|
||||
@@ -3,8 +3,9 @@ use crate::block_definitions::*;
|
||||
use crate::bresenham::bresenham_line;
|
||||
use crate::colors::color_text_to_rgb_tuple;
|
||||
use crate::coordinate_system::cartesian::XZPoint;
|
||||
use crate::deterministic_rng::element_rng;
|
||||
use crate::element_processing::subprocessor::buildings_interior::generate_building_interior;
|
||||
use crate::floodfill::flood_fill_area;
|
||||
use crate::floodfill_cache::FloodFillCache;
|
||||
use crate::osm_parser::{ProcessedMemberRole, ProcessedRelation, ProcessedWay};
|
||||
use crate::world_editor::WorldEditor;
|
||||
use rand::Rng;
|
||||
@@ -28,6 +29,7 @@ pub fn generate_buildings(
|
||||
element: &ProcessedWay,
|
||||
args: &Args,
|
||||
relation_levels: Option<i32>,
|
||||
flood_fill_cache: &FloodFillCache,
|
||||
) {
|
||||
// Get min_level first so we can use it both for start_level and building height calculations
|
||||
let min_level = if let Some(min_level_str) = element.tags.get("building:min_level") {
|
||||
@@ -43,10 +45,9 @@ pub fn generate_buildings(
|
||||
let scale_factor = args.scale;
|
||||
let min_level_offset = multiply_scale(min_level * 4, scale_factor);
|
||||
|
||||
// Cache floodfill result: compute once and reuse throughout
|
||||
let polygon_coords: Vec<(i32, i32)> = element.nodes.iter().map(|n| (n.x, n.z)).collect();
|
||||
// Use pre-computed flood fill from cache
|
||||
let cached_floor_area: Vec<(i32, i32)> =
|
||||
flood_fill_area(&polygon_coords, args.timeout.as_ref());
|
||||
flood_fill_cache.get_or_compute(element, args.timeout.as_ref());
|
||||
let cached_footprint_size = cached_floor_area.len();
|
||||
|
||||
// Use fixed starting Y coordinate based on maximum ground level when terrain is enabled
|
||||
@@ -121,7 +122,8 @@ pub fn generate_buildings(
|
||||
let mut processed_points: HashSet<(i32, i32)> = HashSet::new();
|
||||
let mut building_height: i32 = ((6.0 * scale_factor) as i32).max(3); // Default building height with scale and minimum
|
||||
let mut is_tall_building = false;
|
||||
let mut rng = rand::thread_rng();
|
||||
// Use deterministic RNG seeded by element ID for consistent results across region boundaries
|
||||
let mut rng = element_rng(element.id);
|
||||
let use_vertical_windows = rng.gen_bool(0.7);
|
||||
let use_accent_roof_line = rng.gen_bool(0.25);
|
||||
|
||||
@@ -386,7 +388,7 @@ pub fn generate_buildings(
|
||||
building_height = ((23.0 * scale_factor) as i32).max(3);
|
||||
}
|
||||
} else if building_type == "bridge" {
|
||||
generate_bridge(editor, element, args.timeout.as_ref());
|
||||
generate_bridge(editor, element, flood_fill_cache, args.timeout.as_ref());
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -1484,6 +1486,7 @@ pub fn generate_building_from_relation(
|
||||
editor: &mut WorldEditor,
|
||||
relation: &ProcessedRelation,
|
||||
args: &Args,
|
||||
flood_fill_cache: &FloodFillCache,
|
||||
) {
|
||||
// Extract levels from relation tags
|
||||
let relation_levels = relation
|
||||
@@ -1495,7 +1498,13 @@ pub fn generate_building_from_relation(
|
||||
// Process the outer way to create the building walls
|
||||
for member in &relation.members {
|
||||
if member.role == ProcessedMemberRole::Outer {
|
||||
generate_buildings(editor, &member.way, args, Some(relation_levels));
|
||||
generate_buildings(
|
||||
editor,
|
||||
&member.way,
|
||||
args,
|
||||
Some(relation_levels),
|
||||
flood_fill_cache,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1519,28 +1528,29 @@ pub fn generate_building_from_relation(
|
||||
fn generate_bridge(
|
||||
editor: &mut WorldEditor,
|
||||
element: &ProcessedWay,
|
||||
flood_fill_cache: &FloodFillCache,
|
||||
floodfill_timeout: Option<&Duration>,
|
||||
) {
|
||||
let floor_block: Block = STONE;
|
||||
let railing_block: Block = STONE_BRICKS;
|
||||
|
||||
// Calculate bridge level based on the "level" tag (computed once, used throughout)
|
||||
let bridge_y_offset = if let Some(level_str) = element.tags.get("level") {
|
||||
if let Ok(level) = level_str.parse::<i32>() {
|
||||
(level * 3) + 1
|
||||
} else {
|
||||
1 // Default elevation
|
||||
}
|
||||
} else {
|
||||
1 // Default elevation
|
||||
};
|
||||
|
||||
// Process the nodes to create bridge pathways and railings
|
||||
let mut previous_node: Option<(i32, i32)> = None;
|
||||
for node in &element.nodes {
|
||||
let x: i32 = node.x;
|
||||
let z: i32 = node.z;
|
||||
|
||||
// Calculate bridge level based on the "level" tag
|
||||
let bridge_y_offset = if let Some(level_str) = element.tags.get("level") {
|
||||
if let Ok(level) = level_str.parse::<i32>() {
|
||||
(level * 3) + 1
|
||||
} else {
|
||||
1 // Default elevation
|
||||
}
|
||||
} else {
|
||||
1 // Default elevation
|
||||
};
|
||||
|
||||
// Create bridge path using Bresenham's line
|
||||
if let Some(prev) = previous_node {
|
||||
let bridge_points: Vec<(i32, i32, i32)> =
|
||||
@@ -1556,21 +1566,8 @@ fn generate_bridge(
|
||||
previous_node = Some((x, z));
|
||||
}
|
||||
|
||||
// Flood fill the area between the bridge path nodes
|
||||
let polygon_coords: Vec<(i32, i32)> = element.nodes.iter().map(|n| (n.x, n.z)).collect();
|
||||
|
||||
let bridge_area: Vec<(i32, i32)> = flood_fill_area(&polygon_coords, floodfill_timeout);
|
||||
|
||||
// Calculate bridge level based on the "level" tag
|
||||
let bridge_y_offset = if let Some(level_str) = element.tags.get("level") {
|
||||
if let Ok(level) = level_str.parse::<i32>() {
|
||||
(level * 3) + 1
|
||||
} else {
|
||||
1 // Default elevation
|
||||
}
|
||||
} else {
|
||||
1 // Default elevation
|
||||
};
|
||||
// Flood fill the area between the bridge path nodes (uses cache)
|
||||
let bridge_area: Vec<(i32, i32)> = flood_fill_cache.get_or_compute(element, floodfill_timeout);
|
||||
|
||||
// Place floor blocks
|
||||
for (x, z) in bridge_area {
|
||||
|
||||
@@ -2,7 +2,7 @@ use crate::args::Args;
|
||||
use crate::block_definitions::*;
|
||||
use crate::bresenham::bresenham_line;
|
||||
use crate::coordinate_system::cartesian::XZPoint;
|
||||
use crate::floodfill::flood_fill_area;
|
||||
use crate::floodfill_cache::FloodFillCache;
|
||||
use crate::osm_parser::{ProcessedElement, ProcessedWay};
|
||||
use crate::world_editor::WorldEditor;
|
||||
use std::collections::HashMap;
|
||||
@@ -16,8 +16,15 @@ pub fn generate_highways(
|
||||
element: &ProcessedElement,
|
||||
args: &Args,
|
||||
highway_connectivity: &HighwayConnectivityMap,
|
||||
flood_fill_cache: &FloodFillCache,
|
||||
) {
|
||||
generate_highways_internal(editor, element, args, highway_connectivity);
|
||||
generate_highways_internal(
|
||||
editor,
|
||||
element,
|
||||
args,
|
||||
highway_connectivity,
|
||||
flood_fill_cache,
|
||||
);
|
||||
}
|
||||
|
||||
/// Build a connectivity map for highway endpoints to determine where slopes are needed.
|
||||
@@ -66,6 +73,7 @@ fn generate_highways_internal(
|
||||
element: &ProcessedElement,
|
||||
args: &Args,
|
||||
highway_connectivity: &HashMap<(i32, i32), Vec<i32>>, // Maps node coordinates to list of layers that connect to this node
|
||||
flood_fill_cache: &FloodFillCache,
|
||||
) {
|
||||
if let Some(highway_type) = element.tags().get("highway") {
|
||||
if highway_type == "street_lamp" {
|
||||
@@ -137,14 +145,9 @@ fn generate_highways_internal(
|
||||
};
|
||||
}
|
||||
|
||||
// Fill the area using flood fill or by iterating through the nodes
|
||||
let polygon_coords: Vec<(i32, i32)> = way
|
||||
.nodes
|
||||
.iter()
|
||||
.map(|n: &crate::osm_parser::ProcessedNode| (n.x, n.z))
|
||||
.collect();
|
||||
// Fill the area using flood fill cache
|
||||
let filled_area: Vec<(i32, i32)> =
|
||||
flood_fill_area(&polygon_coords, args.timeout.as_ref());
|
||||
flood_fill_cache.get_or_compute(way, args.timeout.as_ref());
|
||||
|
||||
for (x, z) in filled_area {
|
||||
editor.set_block(surface_block, x, 0, z, None, None);
|
||||
|
||||
@@ -1,12 +1,18 @@
|
||||
use crate::args::Args;
|
||||
use crate::block_definitions::*;
|
||||
use crate::deterministic_rng::element_rng;
|
||||
use crate::element_processing::tree::Tree;
|
||||
use crate::floodfill::flood_fill_area;
|
||||
use crate::floodfill_cache::FloodFillCache;
|
||||
use crate::osm_parser::{ProcessedMemberRole, ProcessedRelation, ProcessedWay};
|
||||
use crate::world_editor::WorldEditor;
|
||||
use rand::Rng;
|
||||
|
||||
pub fn generate_landuse(editor: &mut WorldEditor, element: &ProcessedWay, args: &Args) {
|
||||
pub fn generate_landuse(
|
||||
editor: &mut WorldEditor,
|
||||
element: &ProcessedWay,
|
||||
args: &Args,
|
||||
flood_fill_cache: &FloodFillCache,
|
||||
) {
|
||||
// Determine block type based on landuse tag
|
||||
let binding: String = "".to_string();
|
||||
let landuse_tag: &String = element.tags.get("landuse").unwrap_or(&binding);
|
||||
@@ -44,11 +50,12 @@ pub fn generate_landuse(editor: &mut WorldEditor, element: &ProcessedWay, args:
|
||||
_ => GRASS_BLOCK,
|
||||
};
|
||||
|
||||
// Get the area of the landuse element
|
||||
let polygon_coords: Vec<(i32, i32)> = element.nodes.iter().map(|n| (n.x, n.z)).collect();
|
||||
let floor_area: Vec<(i32, i32)> = flood_fill_area(&polygon_coords, args.timeout.as_ref());
|
||||
// Get the area of the landuse element using cache
|
||||
let floor_area: Vec<(i32, i32)> =
|
||||
flood_fill_cache.get_or_compute(element, args.timeout.as_ref());
|
||||
|
||||
let mut rng: rand::prelude::ThreadRng = rand::thread_rng();
|
||||
// Use deterministic RNG seeded by element ID for consistent results across region boundaries
|
||||
let mut rng = element_rng(element.id);
|
||||
|
||||
for (x, z) in floor_area {
|
||||
if landuse_tag == "traffic_island" {
|
||||
@@ -275,12 +282,13 @@ pub fn generate_landuse_from_relation(
|
||||
editor: &mut WorldEditor,
|
||||
rel: &ProcessedRelation,
|
||||
args: &Args,
|
||||
flood_fill_cache: &FloodFillCache,
|
||||
) {
|
||||
if rel.tags.contains_key("landuse") {
|
||||
// Generate individual ways with their original tags
|
||||
for member in &rel.members {
|
||||
if member.role == ProcessedMemberRole::Outer {
|
||||
generate_landuse(editor, &member.way.clone(), args);
|
||||
generate_landuse(editor, &member.way.clone(), args, flood_fill_cache);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -302,7 +310,7 @@ pub fn generate_landuse_from_relation(
|
||||
};
|
||||
|
||||
// Generate landuse area from combined way
|
||||
generate_landuse(editor, &combined_way, args);
|
||||
generate_landuse(editor, &combined_way, args, flood_fill_cache);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,19 @@
|
||||
use crate::args::Args;
|
||||
use crate::block_definitions::*;
|
||||
use crate::bresenham::bresenham_line;
|
||||
use crate::deterministic_rng::element_rng;
|
||||
use crate::element_processing::tree::Tree;
|
||||
use crate::floodfill::flood_fill_area;
|
||||
use crate::floodfill_cache::FloodFillCache;
|
||||
use crate::osm_parser::{ProcessedMemberRole, ProcessedRelation, ProcessedWay};
|
||||
use crate::world_editor::WorldEditor;
|
||||
use rand::Rng;
|
||||
|
||||
pub fn generate_leisure(editor: &mut WorldEditor, element: &ProcessedWay, args: &Args) {
|
||||
pub fn generate_leisure(
|
||||
editor: &mut WorldEditor,
|
||||
element: &ProcessedWay,
|
||||
args: &Args,
|
||||
flood_fill_cache: &FloodFillCache,
|
||||
) {
|
||||
if let Some(leisure_type) = element.tags.get("leisure") {
|
||||
let mut previous_node: Option<(i32, i32)> = None;
|
||||
let mut corner_addup: (i32, i32, i32) = (0, 0, 0);
|
||||
@@ -74,15 +80,13 @@ pub fn generate_leisure(editor: &mut WorldEditor, element: &ProcessedWay, args:
|
||||
previous_node = Some((node.x, node.z));
|
||||
}
|
||||
|
||||
// Flood-fill the interior of the leisure area
|
||||
// Flood-fill the interior of the leisure area using cache
|
||||
if corner_addup != (0, 0, 0) {
|
||||
let polygon_coords: Vec<(i32, i32)> = element
|
||||
.nodes
|
||||
.iter()
|
||||
.map(|n: &crate::osm_parser::ProcessedNode| (n.x, n.z))
|
||||
.collect();
|
||||
let filled_area: Vec<(i32, i32)> =
|
||||
flood_fill_area(&polygon_coords, args.timeout.as_ref());
|
||||
flood_fill_cache.get_or_compute(element, args.timeout.as_ref());
|
||||
|
||||
// Use deterministic RNG seeded by element ID for consistent results across region boundaries
|
||||
let mut rng = element_rng(element.id);
|
||||
|
||||
for (x, z) in filled_area {
|
||||
editor.set_block(block_type, x, 0, z, Some(&[GRASS_BLOCK]), None);
|
||||
@@ -91,7 +95,6 @@ pub fn generate_leisure(editor: &mut WorldEditor, element: &ProcessedWay, args:
|
||||
if matches!(leisure_type.as_str(), "park" | "garden" | "nature_reserve")
|
||||
&& editor.check_for_block(x, 0, z, Some(&[GRASS_BLOCK]))
|
||||
{
|
||||
let mut rng: rand::prelude::ThreadRng = rand::thread_rng();
|
||||
let random_choice: i32 = rng.gen_range(0..1000);
|
||||
|
||||
match random_choice {
|
||||
@@ -123,7 +126,6 @@ pub fn generate_leisure(editor: &mut WorldEditor, element: &ProcessedWay, args:
|
||||
|
||||
// Add playground or recreation ground features
|
||||
if matches!(leisure_type.as_str(), "playground" | "recreation_ground") {
|
||||
let mut rng: rand::prelude::ThreadRng = rand::thread_rng();
|
||||
let random_choice: i32 = rng.gen_range(0..5000);
|
||||
|
||||
match random_choice {
|
||||
@@ -176,12 +178,13 @@ pub fn generate_leisure_from_relation(
|
||||
editor: &mut WorldEditor,
|
||||
rel: &ProcessedRelation,
|
||||
args: &Args,
|
||||
flood_fill_cache: &FloodFillCache,
|
||||
) {
|
||||
if rel.tags.get("leisure") == Some(&"park".to_string()) {
|
||||
// First generate individual ways with their original tags
|
||||
for member in &rel.members {
|
||||
if member.role == ProcessedMemberRole::Outer {
|
||||
generate_leisure(editor, &member.way, args);
|
||||
generate_leisure(editor, &member.way, args, flood_fill_cache);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -201,6 +204,6 @@ pub fn generate_leisure_from_relation(
|
||||
};
|
||||
|
||||
// Generate leisure area from combined way
|
||||
generate_leisure(editor, &combined_way, args);
|
||||
generate_leisure(editor, &combined_way, args, flood_fill_cache);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,13 +1,19 @@
|
||||
use crate::args::Args;
|
||||
use crate::block_definitions::*;
|
||||
use crate::bresenham::bresenham_line;
|
||||
use crate::deterministic_rng::element_rng;
|
||||
use crate::element_processing::tree::Tree;
|
||||
use crate::floodfill::flood_fill_area;
|
||||
use crate::floodfill_cache::FloodFillCache;
|
||||
use crate::osm_parser::{ProcessedElement, ProcessedMemberRole, ProcessedRelation, ProcessedWay};
|
||||
use crate::world_editor::WorldEditor;
|
||||
use rand::Rng;
|
||||
|
||||
pub fn generate_natural(editor: &mut WorldEditor, element: &ProcessedElement, args: &Args) {
|
||||
pub fn generate_natural(
|
||||
editor: &mut WorldEditor,
|
||||
element: &ProcessedElement,
|
||||
args: &Args,
|
||||
flood_fill_cache: &FloodFillCache,
|
||||
) {
|
||||
if let Some(natural_type) = element.tags().get("natural") {
|
||||
if natural_type == "tree" {
|
||||
if let ProcessedElement::Node(node) = element {
|
||||
@@ -69,17 +75,13 @@ pub fn generate_natural(editor: &mut WorldEditor, element: &ProcessedElement, ar
|
||||
previous_node = Some((x, z));
|
||||
}
|
||||
|
||||
// If there are natural nodes, flood-fill the area
|
||||
// If there are natural nodes, flood-fill the area using cache
|
||||
if corner_addup != (0, 0, 0) {
|
||||
let polygon_coords: Vec<(i32, i32)> = way
|
||||
.nodes
|
||||
.iter()
|
||||
.map(|n: &crate::osm_parser::ProcessedNode| (n.x, n.z))
|
||||
.collect();
|
||||
let filled_area: Vec<(i32, i32)> =
|
||||
flood_fill_area(&polygon_coords, args.timeout.as_ref());
|
||||
flood_fill_cache.get_or_compute(way, args.timeout.as_ref());
|
||||
|
||||
let mut rng: rand::prelude::ThreadRng = rand::thread_rng();
|
||||
// Use deterministic RNG seeded by element ID for consistent results across region boundaries
|
||||
let mut rng = element_rng(way.id);
|
||||
|
||||
for (x, z) in filled_area {
|
||||
editor.set_block(block_type, x, 0, z, None, None);
|
||||
@@ -448,12 +450,18 @@ pub fn generate_natural_from_relation(
|
||||
editor: &mut WorldEditor,
|
||||
rel: &ProcessedRelation,
|
||||
args: &Args,
|
||||
flood_fill_cache: &FloodFillCache,
|
||||
) {
|
||||
if rel.tags.contains_key("natural") {
|
||||
// Generate individual ways with their original tags
|
||||
for member in &rel.members {
|
||||
if member.role == ProcessedMemberRole::Outer {
|
||||
generate_natural(editor, &ProcessedElement::Way(member.way.clone()), args);
|
||||
generate_natural(
|
||||
editor,
|
||||
&ProcessedElement::Way((*member.way).clone()),
|
||||
args,
|
||||
flood_fill_cache,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -475,7 +483,12 @@ pub fn generate_natural_from_relation(
|
||||
};
|
||||
|
||||
// Generate natural area from combined way
|
||||
generate_natural(editor, &ProcessedElement::Way(combined_way), args);
|
||||
generate_natural(
|
||||
editor,
|
||||
&ProcessedElement::Way(combined_way),
|
||||
args,
|
||||
flood_fill_cache,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
use crate::block_definitions::*;
|
||||
use crate::deterministic_rng::coord_rng;
|
||||
use crate::world_editor::WorldEditor;
|
||||
use rand::Rng;
|
||||
|
||||
@@ -115,7 +116,9 @@ impl Tree<'_> {
|
||||
blacklist.extend(Self::get_functional_blocks());
|
||||
blacklist.push(WATER);
|
||||
|
||||
let mut rng = rand::thread_rng();
|
||||
// Use deterministic RNG based on coordinates for consistent tree types across region boundaries
|
||||
// The element_id of 0 is used as a salt for tree-specific randomness
|
||||
let mut rng = coord_rng(x, z, 0);
|
||||
|
||||
let tree = Self::get_tree(match rng.gen_range(1..=3) {
|
||||
1 => TreeType::Oak,
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
use geo::orient::{Direction, Orient};
|
||||
use geo::{Contains, Intersects, LineString, Point, Polygon, Rect};
|
||||
use std::time::Instant;
|
||||
|
||||
use crate::clipping::clip_water_ring_to_bbox;
|
||||
use crate::{
|
||||
@@ -15,15 +14,13 @@ pub fn generate_water_area_from_way(
|
||||
element: &ProcessedWay,
|
||||
_xzbbox: &XZBBox,
|
||||
) {
|
||||
let start_time = Instant::now();
|
||||
|
||||
let outers = [element.nodes.clone()];
|
||||
if !verify_closed_rings(&outers) {
|
||||
println!("Skipping way {} due to invalid polygon", element.id);
|
||||
return;
|
||||
}
|
||||
|
||||
generate_water_areas(editor, &outers, &[], start_time);
|
||||
generate_water_areas(editor, &outers, &[]);
|
||||
}
|
||||
|
||||
pub fn generate_water_areas_from_relation(
|
||||
@@ -31,8 +28,6 @@ pub fn generate_water_areas_from_relation(
|
||||
element: &ProcessedRelation,
|
||||
xzbbox: &XZBBox,
|
||||
) {
|
||||
let start_time = Instant::now();
|
||||
|
||||
// Check if this is a water relation (either with water tag or natural=water)
|
||||
let is_water = element.tags.contains_key("water")
|
||||
|| element
|
||||
@@ -123,14 +118,13 @@ pub fn generate_water_areas_from_relation(
|
||||
return;
|
||||
}
|
||||
|
||||
generate_water_areas(editor, &outers, &inners, start_time);
|
||||
generate_water_areas(editor, &outers, &inners);
|
||||
}
|
||||
|
||||
fn generate_water_areas(
|
||||
editor: &mut WorldEditor,
|
||||
outers: &[Vec<ProcessedNode>],
|
||||
inners: &[Vec<ProcessedNode>],
|
||||
start_time: Instant,
|
||||
) {
|
||||
// Calculate polygon bounding box to limit fill area
|
||||
let mut poly_min_x = i32::MAX;
|
||||
@@ -169,9 +163,7 @@ fn generate_water_areas(
|
||||
.map(|x| x.iter().map(|y| y.xz()).collect::<Vec<_>>())
|
||||
.collect();
|
||||
|
||||
inverse_floodfill(
|
||||
min_x, min_z, max_x, max_z, outers_xz, inners_xz, editor, start_time,
|
||||
);
|
||||
inverse_floodfill(min_x, min_z, max_x, max_z, outers_xz, inners_xz, editor);
|
||||
}
|
||||
|
||||
/// Merges way segments that share endpoints into closed rings.
|
||||
@@ -308,7 +300,6 @@ fn inverse_floodfill(
|
||||
outers: Vec<Vec<XZPoint>>,
|
||||
inners: Vec<Vec<XZPoint>>,
|
||||
editor: &mut WorldEditor,
|
||||
start_time: Instant,
|
||||
) {
|
||||
// Convert to geo Polygons with normalized winding order
|
||||
let inners: Vec<_> = inners
|
||||
@@ -341,14 +332,7 @@ fn inverse_floodfill(
|
||||
})
|
||||
.collect();
|
||||
|
||||
inverse_floodfill_recursive(
|
||||
(min_x, min_z),
|
||||
(max_x, max_z),
|
||||
&outers,
|
||||
&inners,
|
||||
editor,
|
||||
start_time,
|
||||
);
|
||||
inverse_floodfill_recursive((min_x, min_z), (max_x, max_z), &outers, &inners, editor);
|
||||
}
|
||||
|
||||
fn inverse_floodfill_recursive(
|
||||
@@ -357,12 +341,11 @@ fn inverse_floodfill_recursive(
|
||||
outers: &[Polygon],
|
||||
inners: &[Polygon],
|
||||
editor: &mut WorldEditor,
|
||||
start_time: Instant,
|
||||
) {
|
||||
// Check if we've exceeded 25 seconds
|
||||
if start_time.elapsed().as_secs() > 25 {
|
||||
println!("Water area generation exceeded 25 seconds, continuing anyway");
|
||||
}
|
||||
// Check if we've exceeded 40 seconds
|
||||
// if start_time.elapsed().as_secs() > 40 {
|
||||
// println!("Water area generation exceeded 40 seconds, continuing anyway");
|
||||
// }
|
||||
|
||||
const ITERATIVE_THRES: i64 = 10_000;
|
||||
|
||||
@@ -417,7 +400,6 @@ fn inverse_floodfill_recursive(
|
||||
&outers_intersects,
|
||||
&inners_intersects,
|
||||
editor,
|
||||
start_time,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,8 @@ use crate::coordinate_system::{geographic::LLBBox, transformation::geo_distance}
|
||||
#[cfg(feature = "gui")]
|
||||
use crate::telemetry::{send_log, LogLevel};
|
||||
use image::Rgb;
|
||||
use std::path::Path;
|
||||
use rayon::prelude::*;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
/// Maximum Y coordinate in Minecraft (build height limit)
|
||||
const MAX_Y: i32 = 319;
|
||||
@@ -17,6 +18,8 @@ const TERRARIUM_OFFSET: f64 = 32768.0;
|
||||
const MIN_ZOOM: u8 = 10;
|
||||
/// Maximum zoom level for terrain tiles
|
||||
const MAX_ZOOM: u8 = 15;
|
||||
/// Maximum concurrent tile downloads to be respectful to AWS
|
||||
const MAX_CONCURRENT_DOWNLOADS: usize = 8;
|
||||
|
||||
/// Holds processed elevation data and metadata
|
||||
#[derive(Clone)]
|
||||
@@ -29,6 +32,11 @@ pub struct ElevationData {
|
||||
pub(crate) height: usize,
|
||||
}
|
||||
|
||||
/// RGB image buffer type for elevation tiles
|
||||
type TileImage = image::ImageBuffer<Rgb<u8>, Vec<u8>>;
|
||||
/// Result type for tile download operations: ((tile_x, tile_y), image) or error
|
||||
type TileDownloadResult = Result<((u32, u32), TileImage), String>;
|
||||
|
||||
/// Calculates appropriate zoom level for the given bounding box
|
||||
fn calculate_zoom_level(bbox: &LLBBox) -> u8 {
|
||||
let lat_diff: f64 = (bbox.max().lat() - bbox.min().lat()).abs();
|
||||
@@ -53,21 +61,103 @@ fn download_tile(
|
||||
tile_y: u32,
|
||||
zoom: u8,
|
||||
tile_path: &Path,
|
||||
) -> Result<image::ImageBuffer<Rgb<u8>, Vec<u8>>, Box<dyn std::error::Error>> {
|
||||
) -> Result<image::ImageBuffer<Rgb<u8>, Vec<u8>>, String> {
|
||||
println!("Fetching tile x={tile_x},y={tile_y},z={zoom} from AWS Terrain Tiles");
|
||||
let url: String = AWS_TERRARIUM_URL
|
||||
.replace("{z}", &zoom.to_string())
|
||||
.replace("{x}", &tile_x.to_string())
|
||||
.replace("{y}", &tile_y.to_string());
|
||||
|
||||
let response: reqwest::blocking::Response = client.get(&url).send()?;
|
||||
response.error_for_status_ref()?;
|
||||
let bytes = response.bytes()?;
|
||||
std::fs::write(tile_path, &bytes)?;
|
||||
let img: image::DynamicImage = image::load_from_memory(&bytes)?;
|
||||
let response = client.get(&url).send().map_err(|e| e.to_string())?;
|
||||
response.error_for_status_ref().map_err(|e| e.to_string())?;
|
||||
let bytes = response.bytes().map_err(|e| e.to_string())?;
|
||||
std::fs::write(tile_path, &bytes).map_err(|e| e.to_string())?;
|
||||
let img = image::load_from_memory(&bytes).map_err(|e| e.to_string())?;
|
||||
Ok(img.to_rgb8())
|
||||
}
|
||||
|
||||
/// Fetches a tile from cache or downloads it if not available
|
||||
/// Note: In parallel execution, multiple threads may attempt to download the same tile
|
||||
/// if it's missing or corrupted. This is harmless (just wastes some bandwidth) as
|
||||
/// file writes are atomic at the OS level.
|
||||
fn fetch_or_load_tile(
|
||||
client: &reqwest::blocking::Client,
|
||||
tile_x: u32,
|
||||
tile_y: u32,
|
||||
zoom: u8,
|
||||
tile_path: &Path,
|
||||
) -> Result<image::ImageBuffer<Rgb<u8>, Vec<u8>>, String> {
|
||||
if tile_path.exists() {
|
||||
// Check if the cached file has a reasonable size (PNG files should be at least a few KB)
|
||||
let file_size = std::fs::metadata(tile_path).map(|m| m.len()).unwrap_or(0);
|
||||
|
||||
if file_size < 1000 {
|
||||
eprintln!(
|
||||
"Warning: Cached tile at {} appears to be too small ({} bytes). Refetching tile.",
|
||||
tile_path.display(),
|
||||
file_size
|
||||
);
|
||||
#[cfg(feature = "gui")]
|
||||
send_log(
|
||||
LogLevel::Warning,
|
||||
"Cached tile appears too small, refetching.",
|
||||
);
|
||||
|
||||
// Remove the potentially corrupted file
|
||||
if let Err(e) = std::fs::remove_file(tile_path) {
|
||||
eprintln!("Warning: Failed to remove corrupted tile file: {e}");
|
||||
#[cfg(feature = "gui")]
|
||||
send_log(
|
||||
LogLevel::Warning,
|
||||
"Failed to remove corrupted tile file during refetching.",
|
||||
);
|
||||
}
|
||||
|
||||
// Re-download the tile
|
||||
return download_tile(client, tile_x, tile_y, zoom, tile_path);
|
||||
}
|
||||
|
||||
// Try to load cached tile, but handle corruption gracefully
|
||||
match image::open(tile_path) {
|
||||
Ok(img) => {
|
||||
println!(
|
||||
"Loading cached tile x={tile_x},y={tile_y},z={zoom} from {}",
|
||||
tile_path.display()
|
||||
);
|
||||
Ok(img.to_rgb8())
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!(
|
||||
"Cached tile at {} is corrupted or invalid: {}. Re-downloading...",
|
||||
tile_path.display(),
|
||||
e
|
||||
);
|
||||
#[cfg(feature = "gui")]
|
||||
send_log(
|
||||
LogLevel::Warning,
|
||||
"Cached tile is corrupted or invalid. Re-downloading...",
|
||||
);
|
||||
|
||||
// Remove the corrupted file
|
||||
if let Err(e) = std::fs::remove_file(tile_path) {
|
||||
eprintln!("Warning: Failed to remove corrupted tile file: {e}");
|
||||
#[cfg(feature = "gui")]
|
||||
send_log(
|
||||
LogLevel::Warning,
|
||||
"Failed to remove corrupted tile file during re-download.",
|
||||
);
|
||||
}
|
||||
|
||||
// Re-download the tile
|
||||
download_tile(client, tile_x, tile_y, zoom, tile_path)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Download the tile for the first time
|
||||
download_tile(client, tile_x, tile_y, zoom, tile_path)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn fetch_elevation_data(
|
||||
bbox: &LLBBox,
|
||||
scale: f64,
|
||||
@@ -91,101 +181,68 @@ pub fn fetch_elevation_data(
|
||||
let mut height_grid: Vec<Vec<f64>> = vec![vec![f64::NAN; grid_width]; grid_height];
|
||||
let mut extreme_values_found = Vec::new(); // Track extreme values for debugging
|
||||
|
||||
let client: reqwest::blocking::Client = reqwest::blocking::Client::new();
|
||||
|
||||
let tile_cache_dir = Path::new("./arnis-tile-cache");
|
||||
let tile_cache_dir = PathBuf::from("./arnis-tile-cache");
|
||||
if !tile_cache_dir.exists() {
|
||||
std::fs::create_dir_all(tile_cache_dir)?;
|
||||
std::fs::create_dir_all(&tile_cache_dir)?;
|
||||
}
|
||||
|
||||
// Fetch and process each tile
|
||||
for (tile_x, tile_y) in &tiles {
|
||||
// Check if tile is already cached
|
||||
let tile_path = tile_cache_dir.join(format!("z{zoom}_x{tile_x}_y{tile_y}.png"));
|
||||
// Create a shared HTTP client for connection pooling
|
||||
let client = reqwest::blocking::Client::new();
|
||||
|
||||
let rgb_img: image::ImageBuffer<Rgb<u8>, Vec<u8>> = if tile_path.exists() {
|
||||
// Check if the cached file has a reasonable size (PNG files should be at least a few KB)
|
||||
let file_size = match std::fs::metadata(&tile_path) {
|
||||
Ok(metadata) => metadata.len(),
|
||||
Err(_) => 0,
|
||||
};
|
||||
// Download tiles in parallel with limited concurrency to be respectful to AWS
|
||||
let num_tiles = tiles.len();
|
||||
println!(
|
||||
"Downloading {num_tiles} elevation tiles (up to {MAX_CONCURRENT_DOWNLOADS} concurrent)..."
|
||||
);
|
||||
|
||||
if file_size < 1000 {
|
||||
eprintln!(
|
||||
"Warning: Cached tile at {} appears to be too small ({} bytes). Refetching tile.",
|
||||
tile_path.display(),
|
||||
file_size
|
||||
// Use a custom thread pool to limit concurrent downloads
|
||||
let thread_pool = rayon::ThreadPoolBuilder::new()
|
||||
.num_threads(MAX_CONCURRENT_DOWNLOADS)
|
||||
.build()
|
||||
.map_err(|e| format!("Failed to create thread pool: {e}"))?;
|
||||
|
||||
let downloaded_tiles: Vec<TileDownloadResult> = thread_pool.install(|| {
|
||||
tiles
|
||||
.par_iter()
|
||||
.map(|(tile_x, tile_y)| {
|
||||
let tile_path = tile_cache_dir.join(format!("z{zoom}_x{tile_x}_y{tile_y}.png"));
|
||||
|
||||
let rgb_img = fetch_or_load_tile(&client, *tile_x, *tile_y, zoom, &tile_path)?;
|
||||
Ok(((*tile_x, *tile_y), rgb_img))
|
||||
})
|
||||
.collect()
|
||||
});
|
||||
|
||||
// Check for any download errors
|
||||
let mut successful_tiles = Vec::new();
|
||||
for result in downloaded_tiles {
|
||||
match result {
|
||||
Ok(tile_data) => successful_tiles.push(tile_data),
|
||||
Err(e) => {
|
||||
eprintln!("Warning: Failed to download tile: {e}");
|
||||
#[cfg(feature = "gui")]
|
||||
send_log(
|
||||
LogLevel::Warning,
|
||||
&format!("Failed to download elevation tile: {e}"),
|
||||
);
|
||||
|
||||
// Remove the potentially corrupted file
|
||||
if let Err(remove_err) = std::fs::remove_file(&tile_path) {
|
||||
eprintln!(
|
||||
"Warning: Failed to remove corrupted tile file: {}",
|
||||
remove_err
|
||||
);
|
||||
#[cfg(feature = "gui")]
|
||||
send_log(
|
||||
LogLevel::Warning,
|
||||
"Failed to remove corrupted tile file during refetching.",
|
||||
);
|
||||
}
|
||||
|
||||
// Re-download the tile
|
||||
download_tile(&client, *tile_x, *tile_y, zoom, &tile_path)?
|
||||
} else {
|
||||
println!(
|
||||
"Loading cached tile x={tile_x},y={tile_y},z={zoom} from {}",
|
||||
tile_path.display()
|
||||
);
|
||||
|
||||
// Try to load cached tile, but handle corruption gracefully
|
||||
match image::open(&tile_path) {
|
||||
Ok(img) => img.to_rgb8(),
|
||||
Err(e) => {
|
||||
eprintln!(
|
||||
"Cached tile at {} is corrupted or invalid: {}. Re-downloading...",
|
||||
tile_path.display(),
|
||||
e
|
||||
);
|
||||
#[cfg(feature = "gui")]
|
||||
send_log(
|
||||
LogLevel::Warning,
|
||||
"Cached tile is corrupted or invalid. Re-downloading...",
|
||||
);
|
||||
|
||||
// Remove the corrupted file
|
||||
if let Err(remove_err) = std::fs::remove_file(&tile_path) {
|
||||
eprintln!(
|
||||
"Warning: Failed to remove corrupted tile file: {}",
|
||||
remove_err
|
||||
);
|
||||
#[cfg(feature = "gui")]
|
||||
send_log(
|
||||
LogLevel::Warning,
|
||||
"Failed to remove corrupted tile file during re-download.",
|
||||
);
|
||||
}
|
||||
|
||||
// Re-download the tile
|
||||
download_tile(&client, *tile_x, *tile_y, zoom, &tile_path)?
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Download the tile for the first time
|
||||
download_tile(&client, *tile_x, *tile_y, zoom, &tile_path)?
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
println!("Processing {} elevation tiles...", successful_tiles.len());
|
||||
|
||||
// Process tiles sequentially (writes to shared height_grid)
|
||||
for ((tile_x, tile_y), rgb_img) in successful_tiles {
|
||||
// Only process pixels that fall within the requested bbox
|
||||
for (y, row) in rgb_img.rows().enumerate() {
|
||||
for (x, pixel) in row.enumerate() {
|
||||
// Convert tile pixel coordinates back to geographic coordinates
|
||||
let pixel_lng = ((*tile_x as f64 + x as f64 / 256.0) / (2.0_f64.powi(zoom as i32)))
|
||||
let pixel_lng = ((tile_x as f64 + x as f64 / 256.0) / (2.0_f64.powi(zoom as i32)))
|
||||
* 360.0
|
||||
- 180.0;
|
||||
let pixel_lat_rad = std::f64::consts::PI
|
||||
* (1.0
|
||||
- 2.0 * (*tile_y as f64 + y as f64 / 256.0) / (2.0_f64.powi(zoom as i32)));
|
||||
- 2.0 * (tile_y as f64 + y as f64 / 256.0) / (2.0_f64.powi(zoom as i32)));
|
||||
let pixel_lat = pixel_lat_rad.sinh().atan().to_degrees();
|
||||
|
||||
// Skip pixels outside the requested bounding box
|
||||
@@ -275,6 +332,9 @@ pub fn fetch_elevation_data(
|
||||
// Continue with the existing blur and conversion to Minecraft heights...
|
||||
let blurred_heights: Vec<Vec<f64>> = apply_gaussian_blur(&height_grid, sigma);
|
||||
|
||||
// Release raw height grid
|
||||
drop(height_grid);
|
||||
|
||||
let mut mc_heights: Vec<Vec<i32>> = Vec::with_capacity(blurred_heights.len());
|
||||
|
||||
// Find min/max in raw data
|
||||
|
||||
189
src/floodfill_cache.rs
Normal file
189
src/floodfill_cache.rs
Normal file
@@ -0,0 +1,189 @@
|
||||
//! Pre-computed flood fill cache for parallel polygon filling.
|
||||
//!
|
||||
//! This module provides a way to pre-compute all flood fill operations in parallel
|
||||
//! before the main element processing loop, then retrieve cached results during
|
||||
//! sequential processing.
|
||||
|
||||
use crate::floodfill::flood_fill_area;
|
||||
use crate::osm_parser::{ProcessedElement, ProcessedWay};
|
||||
use fnv::FnvHashMap;
|
||||
use rayon::prelude::*;
|
||||
use std::time::Duration;
|
||||
|
||||
/// A cache of pre-computed flood fill results, keyed by element ID.
|
||||
pub struct FloodFillCache {
|
||||
/// Cached results: element_id -> filled coordinates
|
||||
way_cache: FnvHashMap<u64, Vec<(i32, i32)>>,
|
||||
}
|
||||
|
||||
impl FloodFillCache {
|
||||
/// Creates an empty cache.
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
way_cache: FnvHashMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Pre-computes flood fills for all elements that need them.
|
||||
///
|
||||
/// This runs in parallel using Rayon, taking advantage of multiple CPU cores.
|
||||
pub fn precompute(elements: &[ProcessedElement], timeout: Option<&Duration>) -> Self {
|
||||
// Collect all ways that need flood fill
|
||||
let ways_needing_fill: Vec<&ProcessedWay> = elements
|
||||
.iter()
|
||||
.filter_map(|el| match el {
|
||||
ProcessedElement::Way(way) => {
|
||||
if Self::way_needs_flood_fill(way) {
|
||||
Some(way)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
_ => None,
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Compute all way flood fills in parallel
|
||||
let way_results: Vec<(u64, Vec<(i32, i32)>)> = ways_needing_fill
|
||||
.par_iter()
|
||||
.map(|way| {
|
||||
let polygon_coords: Vec<(i32, i32)> =
|
||||
way.nodes.iter().map(|n| (n.x, n.z)).collect();
|
||||
let filled = flood_fill_area(&polygon_coords, timeout);
|
||||
(way.id, filled)
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Build the cache
|
||||
let mut cache = Self::new();
|
||||
for (id, filled) in way_results {
|
||||
cache.way_cache.insert(id, filled);
|
||||
}
|
||||
|
||||
cache
|
||||
}
|
||||
|
||||
/// Gets cached flood fill result for a way, or computes it if not cached.
|
||||
///
|
||||
/// Note: Combined ways created from relations (e.g., in `generate_natural_from_relation`)
|
||||
/// will miss the cache and fall back to on-demand computation. This is by design,
|
||||
/// these synthetic ways don't exist in the original element list and have relation IDs
|
||||
/// rather than way IDs. The individual member ways are still cached.
|
||||
pub fn get_or_compute(
|
||||
&self,
|
||||
way: &ProcessedWay,
|
||||
timeout: Option<&Duration>,
|
||||
) -> Vec<(i32, i32)> {
|
||||
if let Some(cached) = self.way_cache.get(&way.id) {
|
||||
// Clone is intentional: each result is typically accessed once during
|
||||
// sequential processing, so the cost is acceptable vs Arc complexity
|
||||
cached.clone()
|
||||
} else {
|
||||
// Fallback: compute on demand for synthetic/combined ways from relations
|
||||
let polygon_coords: Vec<(i32, i32)> = way.nodes.iter().map(|n| (n.x, n.z)).collect();
|
||||
flood_fill_area(&polygon_coords, timeout)
|
||||
}
|
||||
}
|
||||
|
||||
/// Gets cached flood fill result for a ProcessedElement (Way only).
|
||||
/// For Nodes/Relations, returns empty vec.
|
||||
pub fn get_or_compute_element(
|
||||
&self,
|
||||
element: &ProcessedElement,
|
||||
timeout: Option<&Duration>,
|
||||
) -> Vec<(i32, i32)> {
|
||||
match element {
|
||||
ProcessedElement::Way(way) => self.get_or_compute(way, timeout),
|
||||
_ => Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Determines if a way element needs flood fill based on its tags.
|
||||
///
|
||||
/// This checks for tag presence (not specific values) because:
|
||||
/// - Only some values within each tag type actually use flood fill
|
||||
/// - But caching extra results is harmless (small memory overhead)
|
||||
/// - And avoids duplicating value-checking logic from processors
|
||||
///
|
||||
/// Covered cases:
|
||||
/// - building/building:part -> buildings::generate_buildings (includes bridge)
|
||||
/// - landuse -> landuse::generate_landuse
|
||||
/// - leisure -> leisure::generate_leisure
|
||||
/// - amenity -> amenities::generate_amenities
|
||||
/// - natural (except tree) -> natural::generate_natural
|
||||
/// - highway with area=yes -> highways::generate_highways (area fill)
|
||||
fn way_needs_flood_fill(way: &ProcessedWay) -> bool {
|
||||
way.tags.contains_key("building")
|
||||
|| way.tags.contains_key("building:part")
|
||||
|| way.tags.contains_key("landuse")
|
||||
|| way.tags.contains_key("leisure")
|
||||
|| way.tags.contains_key("amenity")
|
||||
|| way
|
||||
.tags
|
||||
.get("natural")
|
||||
.map(|v| v != "tree")
|
||||
.unwrap_or(false)
|
||||
// Highway areas (like pedestrian plazas) use flood fill when area=yes
|
||||
|| (way.tags.contains_key("highway")
|
||||
&& way.tags.get("area").map(|v| v == "yes").unwrap_or(false))
|
||||
}
|
||||
|
||||
/// Returns the number of cached way entries.
|
||||
pub fn way_count(&self) -> usize {
|
||||
self.way_cache.len()
|
||||
}
|
||||
|
||||
/// Removes a way's cached flood fill result, freeing memory.
|
||||
///
|
||||
/// Call this after processing an element to release its cached data.
|
||||
pub fn remove_way(&mut self, way_id: u64) {
|
||||
self.way_cache.remove(&way_id);
|
||||
}
|
||||
|
||||
/// Removes all cached flood fill results for ways in a relation.
|
||||
///
|
||||
/// Relations contain multiple ways, so we need to remove all of them.
|
||||
pub fn remove_relation_ways(&mut self, way_ids: &[u64]) {
|
||||
for &id in way_ids {
|
||||
self.way_cache.remove(&id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for FloodFillCache {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
/// Configures the global Rayon thread pool with a CPU usage cap.
|
||||
///
|
||||
/// Call this once at startup before any parallel operations.
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `cpu_fraction` - Fraction of available cores to use (e.g., 0.9 for 90%).
|
||||
/// Values are clamped to the range [0.1, 1.0].
|
||||
pub fn configure_rayon_thread_pool(cpu_fraction: f64) {
|
||||
// Clamp cpu_fraction to valid range
|
||||
let cpu_fraction = cpu_fraction.clamp(0.1, 1.0);
|
||||
|
||||
let available_cores = std::thread::available_parallelism()
|
||||
.map(|n| n.get())
|
||||
.unwrap_or(4);
|
||||
|
||||
let target_threads = ((available_cores as f64) * cpu_fraction).floor() as usize;
|
||||
let target_threads = target_threads.max(1); // At least 1 thread
|
||||
|
||||
// Only configure if we haven't already (this can only be called once)
|
||||
match rayon::ThreadPoolBuilder::new()
|
||||
.num_threads(target_threads)
|
||||
.build_global()
|
||||
{
|
||||
Ok(()) => {
|
||||
// Successfully configured (silent to avoid cluttering output)
|
||||
}
|
||||
Err(_) => {
|
||||
// Thread pool already configured
|
||||
}
|
||||
}
|
||||
}
|
||||
85
src/gui.rs
85
src/gui.rs
@@ -74,6 +74,9 @@ fn get_area_name_for_bedrock(bbox: &LLBBox) -> String {
|
||||
}
|
||||
|
||||
pub fn run_gui() {
|
||||
// Configure thread pool with 90% CPU cap to keep system responsive
|
||||
crate::floodfill_cache::configure_rayon_thread_pool(0.9);
|
||||
|
||||
// Launch the UI
|
||||
println!("Launching UI...");
|
||||
|
||||
@@ -794,7 +797,6 @@ fn gui_start_generation(
|
||||
selected_world: String,
|
||||
world_scale: f64,
|
||||
ground_level: i32,
|
||||
floodfill_timeout: u64,
|
||||
terrain_enabled: bool,
|
||||
skip_osm_objects: bool,
|
||||
interior_enabled: bool,
|
||||
@@ -848,16 +850,52 @@ fn gui_start_generation(
|
||||
|
||||
tauri::async_runtime::spawn(async move {
|
||||
if let Err(e) = tokio::task::spawn_blocking(move || {
|
||||
// Acquire session lock for the world directory before starting generation
|
||||
let world_path = PathBuf::from(&selected_world);
|
||||
let _session_lock = match SessionLock::acquire(&world_path) {
|
||||
Ok(lock) => lock,
|
||||
Err(e) => {
|
||||
let error_msg = format!("Failed to acquire session lock: {e}");
|
||||
|
||||
// Determine world format from UI selection first (needed for session lock decision)
|
||||
let world_format = if world_format == "bedrock" {
|
||||
WorldFormat::BedrockMcWorld
|
||||
} else {
|
||||
WorldFormat::JavaAnvil
|
||||
};
|
||||
|
||||
// Check available disk space before starting generation (minimum 3GB required)
|
||||
const MIN_DISK_SPACE_BYTES: u64 = 3 * 1024 * 1024 * 1024; // 3 GB
|
||||
let check_path = if world_format == WorldFormat::JavaAnvil {
|
||||
world_path.clone()
|
||||
} else {
|
||||
// For Bedrock, check current directory where .mcworld will be created
|
||||
std::env::current_dir().unwrap_or_else(|_| PathBuf::from("."))
|
||||
};
|
||||
match fs2::available_space(&check_path) {
|
||||
Ok(available) if available < MIN_DISK_SPACE_BYTES => {
|
||||
let error_msg = "Not enough disk space available.".to_string();
|
||||
eprintln!("{error_msg}");
|
||||
emit_gui_error(&error_msg);
|
||||
return Err(error_msg);
|
||||
}
|
||||
Err(e) => {
|
||||
// Log warning but don't block generation if we can't check space
|
||||
eprintln!("Warning: Could not check disk space: {e}");
|
||||
}
|
||||
_ => {} // Sufficient space available
|
||||
}
|
||||
|
||||
// Acquire session lock for Java worlds only
|
||||
// Session lock prevents Minecraft from having the world open during generation
|
||||
// Bedrock worlds are generated as .mcworld files and don't need this lock
|
||||
let _session_lock: Option<SessionLock> = if world_format == WorldFormat::JavaAnvil {
|
||||
match SessionLock::acquire(&world_path) {
|
||||
Ok(lock) => Some(lock),
|
||||
Err(e) => {
|
||||
let error_msg = format!("Failed to acquire session lock: {e}");
|
||||
eprintln!("{error_msg}");
|
||||
emit_gui_error(&error_msg);
|
||||
return Err(error_msg);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Parse the bounding box from the text with proper error handling
|
||||
@@ -871,13 +909,6 @@ fn gui_start_generation(
|
||||
}
|
||||
};
|
||||
|
||||
// Determine world format from UI selection
|
||||
let world_format = if world_format == "bedrock" {
|
||||
WorldFormat::BedrockMcWorld
|
||||
} else {
|
||||
WorldFormat::JavaAnvil
|
||||
};
|
||||
|
||||
// Determine output path and level name based on format
|
||||
let (generation_path, level_name) = match world_format {
|
||||
WorldFormat::JavaAnvil => {
|
||||
@@ -946,7 +977,7 @@ fn gui_start_generation(
|
||||
roof: roof_enabled,
|
||||
fillground: fillground_enabled,
|
||||
debug: false,
|
||||
timeout: Some(std::time::Duration::from_secs(floodfill_timeout)),
|
||||
timeout: Some(std::time::Duration::from_secs(40)),
|
||||
spawn_point,
|
||||
};
|
||||
|
||||
@@ -963,17 +994,27 @@ fn gui_start_generation(
|
||||
|
||||
let _ = data_processing::generate_world_with_options(
|
||||
parsed_elements,
|
||||
xzbbox,
|
||||
xzbbox.clone(),
|
||||
args.bbox,
|
||||
ground,
|
||||
&args,
|
||||
generation_options,
|
||||
generation_options.clone(),
|
||||
);
|
||||
// Explicitly release session lock before showing Done message
|
||||
// so Minecraft can open the world immediately
|
||||
drop(_session_lock);
|
||||
emit_gui_progress_update(100.0, "Done! World generation completed.");
|
||||
println!("{}", "Done! World generation completed.".green().bold());
|
||||
|
||||
// Start map preview generation silently in background (Java only)
|
||||
if world_format == WorldFormat::JavaAnvil {
|
||||
let preview_info = data_processing::MapPreviewInfo::new(
|
||||
generation_options.path.clone(),
|
||||
&xzbbox,
|
||||
);
|
||||
data_processing::start_map_preview_generation(preview_info);
|
||||
}
|
||||
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
@@ -1006,7 +1047,7 @@ fn gui_start_generation(
|
||||
|
||||
let _ = data_processing::generate_world_with_options(
|
||||
parsed_elements,
|
||||
xzbbox,
|
||||
xzbbox.clone(),
|
||||
args.bbox,
|
||||
ground,
|
||||
&args,
|
||||
@@ -1017,6 +1058,16 @@ fn gui_start_generation(
|
||||
drop(_session_lock);
|
||||
emit_gui_progress_update(100.0, "Done! World generation completed.");
|
||||
println!("{}", "Done! World generation completed.".green().bold());
|
||||
|
||||
// Start map preview generation silently in background (Java only)
|
||||
if world_format == WorldFormat::JavaAnvil {
|
||||
let preview_info = data_processing::MapPreviewInfo::new(
|
||||
generation_options.path.clone(),
|
||||
&xzbbox,
|
||||
);
|
||||
data_processing::start_map_preview_generation(preview_info);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Err(e) => {
|
||||
|
||||
8
src/gui/index.html
vendored
8
src/gui/index.html
vendored
@@ -151,14 +151,6 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Floodfill Timeout Input -->
|
||||
<div class="settings-row">
|
||||
<label for="floodfill-timeout" data-localize="floodfill_timeout">Floodfill Timeout (sec)</label>
|
||||
<div class="settings-control">
|
||||
<input type="number" id="floodfill-timeout" name="floodfill-timeout" min="0" step="1" value="20" placeholder="Seconds">
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Map Theme Selector -->
|
||||
<div class="settings-row">
|
||||
<label for="tile-theme-select" data-localize="map_theme">Map Theme</label>
|
||||
|
||||
9
src/gui/js/bbox.js
vendored
9
src/gui/js/bbox.js
vendored
@@ -899,6 +899,15 @@ $(document).ready(function () {
|
||||
});
|
||||
}
|
||||
|
||||
// If it's a rectangle, remove any existing rectangles first
|
||||
if (e.layerType === 'rectangle') {
|
||||
drawnItems.eachLayer(function(layer) {
|
||||
if (layer instanceof L.Rectangle) {
|
||||
drawnItems.removeLayer(layer);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Check if it's a rectangle and set proper styles before adding it to the layer
|
||||
if (e.layerType === 'rectangle') {
|
||||
e.layer.setStyle({
|
||||
|
||||
9
src/gui/js/main.js
vendored
9
src/gui/js/main.js
vendored
@@ -91,7 +91,6 @@ async function applyLocalization(localization) {
|
||||
"h2[data-localize='customization_settings']": "customization_settings",
|
||||
"label[data-localize='world_scale']": "world_scale",
|
||||
"label[data-localize='custom_bounding_box']": "custom_bounding_box",
|
||||
"label[data-localize='floodfill_timeout']": "floodfill_timeout",
|
||||
// DEPRECATED: Ground level localization removed
|
||||
// "label[data-localize='ground_level']": "ground_level",
|
||||
"label[data-localize='language']": "language",
|
||||
@@ -110,7 +109,6 @@ async function applyLocalization(localization) {
|
||||
|
||||
// Placeholder strings
|
||||
"input[id='bbox-coords']": "placeholder_bbox",
|
||||
"input[id='floodfill-timeout']": "placeholder_floodfill",
|
||||
// DEPRECATED: Ground level placeholder removed
|
||||
// "input[id='ground-level']": "placeholder_ground"
|
||||
};
|
||||
@@ -791,14 +789,12 @@ async function startGeneration() {
|
||||
var roof = document.getElementById("roof-toggle").checked;
|
||||
var fill_ground = document.getElementById("fillground-toggle").checked;
|
||||
var scale = parseFloat(document.getElementById("scale-value-slider").value);
|
||||
var floodfill_timeout = parseInt(document.getElementById("floodfill-timeout").value, 10);
|
||||
// var ground_level = parseInt(document.getElementById("ground-level").value, 10);
|
||||
// DEPRECATED: Ground level input removed from UI
|
||||
var ground_level = -62;
|
||||
|
||||
// Validate floodfill_timeout and ground_level
|
||||
floodfill_timeout = isNaN(floodfill_timeout) || floodfill_timeout < 0 ? 20 : floodfill_timeout;
|
||||
ground_level = isNaN(ground_level) || ground_level < -62 ? 20 : ground_level;
|
||||
// Validate ground_level
|
||||
ground_level = isNaN(ground_level) || ground_level < -62 ? -62 : ground_level;
|
||||
|
||||
// Get telemetry consent (defaults to false if not set)
|
||||
const telemetryConsent = window.getTelemetryConsent ? window.getTelemetryConsent() : false;
|
||||
@@ -809,7 +805,6 @@ async function startGeneration() {
|
||||
selectedWorld: worldPath,
|
||||
worldScale: scale,
|
||||
groundLevel: ground_level,
|
||||
floodfillTimeout: floodfill_timeout,
|
||||
terrainEnabled: terrain,
|
||||
skipOsmObjects: skipOsmObjects,
|
||||
interiorEnabled: interior,
|
||||
|
||||
@@ -9,9 +9,11 @@ mod clipping;
|
||||
mod colors;
|
||||
mod coordinate_system;
|
||||
mod data_processing;
|
||||
mod deterministic_rng;
|
||||
mod element_processing;
|
||||
mod elevation_data;
|
||||
mod floodfill;
|
||||
mod floodfill_cache;
|
||||
mod ground;
|
||||
mod map_renderer;
|
||||
mod map_transformation;
|
||||
@@ -49,6 +51,9 @@ mod progress {
|
||||
use windows::Win32::System::Console::{AttachConsole, FreeConsole, ATTACH_PARENT_PROCESS};
|
||||
|
||||
fn run_cli() {
|
||||
// Configure thread pool with 90% CPU cap to keep system responsive
|
||||
floodfill_cache::configure_rayon_thread_pool(0.9);
|
||||
|
||||
let version: &str = env!("CARGO_PKG_VERSION");
|
||||
let repository: &str = env!("CARGO_PKG_REPOSITORY");
|
||||
println!(
|
||||
|
||||
@@ -5,8 +5,8 @@ use crate::coordinate_system::transformation::CoordTransformer;
|
||||
use crate::progress::emit_gui_progress_update;
|
||||
use colored::Colorize;
|
||||
use serde::Deserialize;
|
||||
use serde_json::Value;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
|
||||
// Raw data from OSM
|
||||
|
||||
@@ -29,9 +29,18 @@ struct OsmElement {
|
||||
pub members: Vec<OsmMember>,
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct OsmData {
|
||||
pub elements: Vec<OsmElement>,
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct OsmData {
|
||||
elements: Vec<OsmElement>,
|
||||
#[serde(default)]
|
||||
pub remark: Option<String>,
|
||||
}
|
||||
|
||||
impl OsmData {
|
||||
/// Returns true if there are no elements in the OSM data
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.elements.is_empty()
|
||||
}
|
||||
}
|
||||
|
||||
struct SplitOsmData {
|
||||
@@ -68,11 +77,6 @@ impl SplitOsmData {
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_raw_osm_data(json_data: Value) -> Result<SplitOsmData, serde_json::Error> {
|
||||
let osm_data: OsmData = serde_json::from_value(json_data)?;
|
||||
Ok(SplitOsmData::from_raw_osm_data(osm_data))
|
||||
}
|
||||
|
||||
// End raw data
|
||||
|
||||
// Normalized data that we can use
|
||||
@@ -112,7 +116,7 @@ pub enum ProcessedMemberRole {
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct ProcessedMember {
|
||||
pub role: ProcessedMemberRole,
|
||||
pub way: ProcessedWay,
|
||||
pub way: Arc<ProcessedWay>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
@@ -164,7 +168,7 @@ impl ProcessedElement {
|
||||
}
|
||||
|
||||
pub fn parse_osm_data(
|
||||
json_data: Value,
|
||||
osm_data: OsmData,
|
||||
bbox: LLBBox,
|
||||
scale: f64,
|
||||
debug: bool,
|
||||
@@ -174,7 +178,7 @@ pub fn parse_osm_data(
|
||||
emit_gui_progress_update(5.0, "Parsing data...");
|
||||
|
||||
// Deserialize the JSON data into the OSMData structure
|
||||
let data = parse_raw_osm_data(json_data).expect("Failed to parse OSM data");
|
||||
let data = SplitOsmData::from_raw_osm_data(osm_data);
|
||||
|
||||
let (coord_transformer, xzbbox) = CoordTransformer::llbbox_to_xzbbox(&bbox, scale)
|
||||
.unwrap_or_else(|e| {
|
||||
@@ -189,7 +193,7 @@ pub fn parse_osm_data(
|
||||
}
|
||||
|
||||
let mut nodes_map: HashMap<u64, ProcessedNode> = HashMap::new();
|
||||
let mut ways_map: HashMap<u64, ProcessedWay> = HashMap::new();
|
||||
let mut ways_map: HashMap<u64, Arc<ProcessedWay>> = HashMap::new();
|
||||
|
||||
let mut processed_elements: Vec<ProcessedElement> = Vec::new();
|
||||
|
||||
@@ -238,17 +242,15 @@ pub fn parse_osm_data(
|
||||
let tags = element.tags.clone().unwrap_or_default();
|
||||
|
||||
// Store unclipped way for relation assembly (clipping happens after ring merging)
|
||||
ways_map.insert(
|
||||
element.id,
|
||||
ProcessedWay {
|
||||
id: element.id,
|
||||
tags: tags.clone(),
|
||||
nodes: nodes.clone(),
|
||||
},
|
||||
);
|
||||
let way = Arc::new(ProcessedWay {
|
||||
id: element.id,
|
||||
tags,
|
||||
nodes,
|
||||
});
|
||||
ways_map.insert(element.id, Arc::clone(&way));
|
||||
|
||||
// Clip way nodes for standalone way processing (not relations)
|
||||
let clipped_nodes = clip_way_to_bbox(&nodes, &xzbbox);
|
||||
let clipped_nodes = clip_way_to_bbox(&way.nodes, &xzbbox);
|
||||
|
||||
// Skip ways that are completely outside the bbox (empty after clipping)
|
||||
if clipped_nodes.is_empty() {
|
||||
@@ -257,8 +259,8 @@ pub fn parse_osm_data(
|
||||
|
||||
let processed: ProcessedWay = ProcessedWay {
|
||||
id: element.id,
|
||||
tags: tags.clone(),
|
||||
nodes: clipped_nodes.clone(),
|
||||
tags: way.tags.clone(),
|
||||
nodes: clipped_nodes,
|
||||
};
|
||||
|
||||
processed_elements.push(ProcessedElement::Way(processed));
|
||||
@@ -294,8 +296,8 @@ pub fn parse_osm_data(
|
||||
};
|
||||
|
||||
// Check if the way exists in ways_map
|
||||
let way: ProcessedWay = match ways_map.get(&mem.r#ref) {
|
||||
Some(w) => w.clone(),
|
||||
let way = match ways_map.get(&mem.r#ref) {
|
||||
Some(w) => Arc::clone(w),
|
||||
None => {
|
||||
// Way was likely filtered out because it was completely outside the bbox
|
||||
return None;
|
||||
@@ -311,11 +313,11 @@ pub fn parse_osm_data(
|
||||
if clipped_nodes.is_empty() {
|
||||
return None;
|
||||
}
|
||||
ProcessedWay {
|
||||
Arc::new(ProcessedWay {
|
||||
id: way.id,
|
||||
tags: way.tags,
|
||||
tags: way.tags.clone(),
|
||||
nodes: clipped_nodes,
|
||||
}
|
||||
})
|
||||
};
|
||||
|
||||
Some(ProcessedMember {
|
||||
@@ -336,6 +338,9 @@ pub fn parse_osm_data(
|
||||
|
||||
emit_gui_progress_update(15.0, "");
|
||||
|
||||
drop(nodes_map);
|
||||
drop(ways_map);
|
||||
|
||||
(processed_elements, xzbbox)
|
||||
}
|
||||
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
use crate::coordinate_system::geographic::LLBBox;
|
||||
use crate::osm_parser::OsmData;
|
||||
use crate::progress::{emit_gui_error, emit_gui_progress_update, is_running_with_gui};
|
||||
use colored::Colorize;
|
||||
use rand::seq::SliceRandom;
|
||||
use reqwest::blocking::Client;
|
||||
use reqwest::blocking::ClientBuilder;
|
||||
use serde::Deserialize;
|
||||
use serde_json::Value;
|
||||
use std::fs::File;
|
||||
use std::io::{self, BufReader, Write};
|
||||
use std::io::{self, BufReader, Cursor, Write};
|
||||
use std::process::Command;
|
||||
use std::time::Duration;
|
||||
|
||||
@@ -79,13 +81,14 @@ fn download_with_wget(url: &str, query: &str) -> io::Result<String> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn fetch_data_from_file(file: &str) -> Result<Value, Box<dyn std::error::Error>> {
|
||||
pub fn fetch_data_from_file(file: &str) -> Result<OsmData, Box<dyn std::error::Error>> {
|
||||
println!("{} Loading data from file...", "[1/7]".bold());
|
||||
emit_gui_progress_update(1.0, "Loading data from file...");
|
||||
|
||||
let file: File = File::open(file)?;
|
||||
let reader: BufReader<File> = BufReader::new(file);
|
||||
let data: Value = serde_json::from_reader(reader)?;
|
||||
let mut deserializer = serde_json::Deserializer::from_reader(reader);
|
||||
let data: OsmData = OsmData::deserialize(&mut deserializer)?;
|
||||
Ok(data)
|
||||
}
|
||||
|
||||
@@ -95,7 +98,7 @@ pub fn fetch_data_from_overpass(
|
||||
debug: bool,
|
||||
download_method: &str,
|
||||
save_file: Option<&str>,
|
||||
) -> Result<Value, Box<dyn std::error::Error>> {
|
||||
) -> Result<OsmData, Box<dyn std::error::Error>> {
|
||||
println!("{} Fetching data...", "[1/7]".bold());
|
||||
emit_gui_progress_update(1.0, "Fetching data...");
|
||||
|
||||
@@ -182,14 +185,12 @@ pub fn fetch_data_from_overpass(
|
||||
println!("API response saved to: {save_file}");
|
||||
}
|
||||
|
||||
let data: Value = serde_json::from_str(&response)?;
|
||||
let mut deserializer =
|
||||
serde_json::Deserializer::from_reader(Cursor::new(response.as_bytes()));
|
||||
let data: OsmData = OsmData::deserialize(&mut deserializer)?;
|
||||
|
||||
if data["elements"]
|
||||
.as_array()
|
||||
.map_or(0, |elements: &Vec<Value>| elements.len())
|
||||
== 0
|
||||
{
|
||||
if let Some(remark) = data["remark"].as_str() {
|
||||
if data.is_empty() {
|
||||
if let Some(remark) = data.remark.as_deref() {
|
||||
// Check if the remark mentions memory or other runtime errors
|
||||
if remark.contains("runtime error") && remark.contains("out of memory") {
|
||||
eprintln!("{}", "Error! The query ran out of memory on the Overpass API server. Try using a smaller area.".red().bold());
|
||||
@@ -211,7 +212,7 @@ pub fn fetch_data_from_overpass(
|
||||
}
|
||||
|
||||
if debug {
|
||||
println!("Additional debug information: {data}");
|
||||
println!("Additional debug information: {data:?}");
|
||||
}
|
||||
|
||||
if !is_running_with_gui() {
|
||||
|
||||
@@ -7,9 +7,8 @@ use crate::retrieve_data;
|
||||
// this is copied from main.rs
|
||||
pub fn generate_example(llbbox: LLBBox) -> (XZBBox, Vec<ProcessedElement>) {
|
||||
// Fetch data
|
||||
let raw_data: serde_json::Value =
|
||||
retrieve_data::fetch_data_from_overpass(llbbox, false, "requests", None)
|
||||
.expect("Failed to fetch data");
|
||||
let raw_data = retrieve_data::fetch_data_from_overpass(llbbox, false, "requests", None)
|
||||
.expect("Failed to fetch data");
|
||||
|
||||
// Parse raw data
|
||||
let (mut parsed_elements, xzbbox) = osm_parser::parse_osm_data(raw_data, llbbox, 1.0, false);
|
||||
|
||||
@@ -24,6 +24,7 @@ use std::collections::HashMap as StdHashMap;
|
||||
use std::fs::{self, File};
|
||||
use std::io::{Cursor, Write as IoWrite};
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use vek::Vec2;
|
||||
use zip::write::FileOptions;
|
||||
use zip::CompressionMethod;
|
||||
@@ -122,7 +123,7 @@ pub struct BedrockWriter {
|
||||
output_dir: PathBuf,
|
||||
level_name: String,
|
||||
spawn_point: Option<(i32, i32)>,
|
||||
ground: Option<Box<Ground>>,
|
||||
ground: Option<Arc<Ground>>,
|
||||
}
|
||||
|
||||
impl BedrockWriter {
|
||||
@@ -131,7 +132,7 @@ impl BedrockWriter {
|
||||
output_path: PathBuf,
|
||||
level_name: String,
|
||||
spawn_point: Option<(i32, i32)>,
|
||||
ground: Option<Box<Ground>>,
|
||||
ground: Option<Arc<Ground>>,
|
||||
) -> Self {
|
||||
// If the path ends with .mcworld, use it as the final archive path
|
||||
// and create a temp directory without that extension for working files
|
||||
|
||||
@@ -4,6 +4,11 @@
|
||||
//! before they are written to either Java or Bedrock format.
|
||||
|
||||
use crate::block_definitions::*;
|
||||
|
||||
/// Minimum Y coordinate in Minecraft (1.18+)
|
||||
const MIN_Y: i32 = -64;
|
||||
/// Maximum Y coordinate in Minecraft (1.18+)
|
||||
const MAX_Y: i32 = 319;
|
||||
use fastnbt::{LongArray, Value};
|
||||
use fnv::FnvHashMap;
|
||||
use serde::{Deserialize, Serialize};
|
||||
@@ -186,16 +191,20 @@ pub(crate) struct ChunkToModify {
|
||||
impl ChunkToModify {
|
||||
#[inline]
|
||||
pub fn get_block(&self, x: u8, y: i32, z: u8) -> Option<Block> {
|
||||
let section_idx: i8 = (y >> 4).try_into().unwrap();
|
||||
// Clamp Y to valid Minecraft range to prevent TryFromIntError
|
||||
let y = y.clamp(MIN_Y, MAX_Y);
|
||||
let section_idx: i8 = (y >> 4) as i8;
|
||||
let section = self.sections.get(§ion_idx)?;
|
||||
section.get_block(x, (y & 15).try_into().unwrap(), z)
|
||||
section.get_block(x, (y & 15) as u8, z)
|
||||
}
|
||||
|
||||
#[inline]
|
||||
pub fn set_block(&mut self, x: u8, y: i32, z: u8, block: Block) {
|
||||
let section_idx: i8 = (y >> 4).try_into().unwrap();
|
||||
// Clamp Y to valid Minecraft range to prevent TryFromIntError
|
||||
let y = y.clamp(MIN_Y, MAX_Y);
|
||||
let section_idx: i8 = (y >> 4) as i8;
|
||||
let section = self.sections.entry(section_idx).or_default();
|
||||
section.set_block(x, (y & 15).try_into().unwrap(), z, block);
|
||||
section.set_block(x, (y & 15) as u8, z, block);
|
||||
}
|
||||
|
||||
#[inline]
|
||||
@@ -206,9 +215,11 @@ impl ChunkToModify {
|
||||
z: u8,
|
||||
block_with_props: BlockWithProperties,
|
||||
) {
|
||||
let section_idx: i8 = (y >> 4).try_into().unwrap();
|
||||
// Clamp Y to valid Minecraft range to prevent TryFromIntError
|
||||
let y = y.clamp(MIN_Y, MAX_Y);
|
||||
let section_idx: i8 = (y >> 4) as i8;
|
||||
let section = self.sections.entry(section_idx).or_default();
|
||||
section.set_block_with_properties(x, (y & 15).try_into().unwrap(), z, block_with_props);
|
||||
section.set_block_with_properties(x, (y & 15) as u8, z, block_with_props);
|
||||
}
|
||||
|
||||
pub fn sections(&self) -> impl Iterator<Item = Section> + '_ {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
//! Java Edition Anvil format world saving.
|
||||
//!
|
||||
//! This module handles saving worlds in the Java Edition Anvil (.mca) format.
|
||||
//! Supports streaming mode for memory-efficient saving of large worlds.
|
||||
|
||||
use super::common::{Chunk, ChunkToModify, Section};
|
||||
use super::WorldEditor;
|
||||
@@ -11,11 +12,9 @@ use fastanvil::Region;
|
||||
use fastnbt::Value;
|
||||
use fnv::FnvHashMap;
|
||||
use indicatif::{ProgressBar, ProgressStyle};
|
||||
use rayon::prelude::*;
|
||||
use std::collections::HashMap;
|
||||
use std::fs::File;
|
||||
use std::io::Write;
|
||||
use std::sync::atomic::{AtomicU64, Ordering};
|
||||
|
||||
#[cfg(feature = "gui")]
|
||||
use crate::telemetry::{send_log, LogLevel};
|
||||
@@ -23,9 +22,11 @@ use crate::telemetry::{send_log, LogLevel};
|
||||
impl<'a> WorldEditor<'a> {
|
||||
/// Creates a region file for the given region coordinates.
|
||||
pub(super) fn create_region(&self, region_x: i32, region_z: i32) -> Region<File> {
|
||||
let out_path = self
|
||||
.world_dir
|
||||
.join(format!("region/r.{}.{}.mca", region_x, region_z));
|
||||
let region_dir = self.world_dir.join("region");
|
||||
let out_path = region_dir.join(format!("r.{}.{}.mca", region_x, region_z));
|
||||
|
||||
// Ensure region directory exists before creating region files
|
||||
std::fs::create_dir_all(®ion_dir).expect("Failed to create region directory");
|
||||
|
||||
const REGION_TEMPLATE: &[u8] = include_bytes!("../../assets/minecraft/region.template");
|
||||
|
||||
@@ -75,6 +76,9 @@ impl<'a> WorldEditor<'a> {
|
||||
}
|
||||
|
||||
/// Saves the world in Java Edition Anvil format.
|
||||
///
|
||||
/// Uses streaming mode: saves regions one at a time and releases memory after each,
|
||||
/// significantly reducing peak memory usage for large worlds.
|
||||
pub(super) fn save_java(&mut self) {
|
||||
println!("{} Saving world...", "[7/7]".bold());
|
||||
emit_gui_progress_update(90.0, "Saving world...");
|
||||
@@ -98,139 +102,155 @@ impl<'a> WorldEditor<'a> {
|
||||
.progress_chars("█▓░"),
|
||||
);
|
||||
|
||||
let regions_processed = AtomicU64::new(0);
|
||||
// Streaming mode: Process regions sequentially and release memory after each.
|
||||
// This significantly reduces peak memory for large worlds (100+ regions).
|
||||
// For small worlds, the overhead is negligible.
|
||||
let mut regions_processed: u64 = 0;
|
||||
|
||||
self.world
|
||||
.regions
|
||||
.par_iter()
|
||||
.for_each(|((region_x, region_z), region_to_modify)| {
|
||||
let mut region = self.create_region(*region_x, *region_z);
|
||||
let mut ser_buffer = Vec::with_capacity(8192);
|
||||
// Collect region keys first to allow draining
|
||||
let region_keys: Vec<(i32, i32)> = self.world.regions.keys().copied().collect();
|
||||
|
||||
for (&(chunk_x, chunk_z), chunk_to_modify) in ®ion_to_modify.chunks {
|
||||
if !chunk_to_modify.sections.is_empty() || !chunk_to_modify.other.is_empty() {
|
||||
// Read existing chunk data if it exists
|
||||
let existing_data = region
|
||||
.read_chunk(chunk_x as usize, chunk_z as usize)
|
||||
.unwrap()
|
||||
.unwrap_or_default();
|
||||
for (region_x, region_z) in region_keys {
|
||||
// Remove region from memory - this is the key to memory savings
|
||||
if let Some(region_to_modify) = self.world.regions.remove(&(region_x, region_z)) {
|
||||
self.save_single_region(region_x, region_z, ®ion_to_modify);
|
||||
|
||||
// Parse existing chunk or create new one
|
||||
let mut chunk: Chunk = if !existing_data.is_empty() {
|
||||
fastnbt::from_bytes(&existing_data).unwrap()
|
||||
} else {
|
||||
Chunk {
|
||||
sections: Vec::new(),
|
||||
x_pos: chunk_x + (region_x * 32),
|
||||
z_pos: chunk_z + (region_z * 32),
|
||||
is_light_on: 0,
|
||||
other: FnvHashMap::default(),
|
||||
}
|
||||
};
|
||||
// Region memory is freed when region_to_modify goes out of scope here
|
||||
}
|
||||
|
||||
// Update sections while preserving existing data
|
||||
let new_sections: Vec<Section> = chunk_to_modify.sections().collect();
|
||||
for new_section in new_sections {
|
||||
if let Some(existing_section) =
|
||||
chunk.sections.iter_mut().find(|s| s.y == new_section.y)
|
||||
{
|
||||
// Merge block states
|
||||
existing_section.block_states.palette =
|
||||
new_section.block_states.palette;
|
||||
existing_section.block_states.data = new_section.block_states.data;
|
||||
} else {
|
||||
// Add new section if it doesn't exist
|
||||
chunk.sections.push(new_section);
|
||||
}
|
||||
}
|
||||
regions_processed += 1;
|
||||
|
||||
// Preserve existing block entities and merge with new ones
|
||||
if let Some(existing_entities) = chunk.other.get_mut("block_entities") {
|
||||
if let Some(new_entities) = chunk_to_modify.other.get("block_entities")
|
||||
{
|
||||
if let (Value::List(existing), Value::List(new)) =
|
||||
(existing_entities, new_entities)
|
||||
{
|
||||
// Remove old entities that are replaced by new ones
|
||||
existing.retain(|e| {
|
||||
if let Value::Compound(map) = e {
|
||||
let (x, y, z) = get_entity_coords(map);
|
||||
!new.iter().any(|new_e| {
|
||||
if let Value::Compound(new_map) = new_e {
|
||||
let (nx, ny, nz) = get_entity_coords(new_map);
|
||||
x == nx && y == ny && z == nz
|
||||
} else {
|
||||
false
|
||||
}
|
||||
})
|
||||
} else {
|
||||
true
|
||||
}
|
||||
});
|
||||
// Add new entities
|
||||
existing.extend(new.clone());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// If no existing entities, just add the new ones
|
||||
if let Some(new_entities) = chunk_to_modify.other.get("block_entities")
|
||||
{
|
||||
chunk
|
||||
.other
|
||||
.insert("block_entities".to_string(), new_entities.clone());
|
||||
}
|
||||
}
|
||||
// Update progress at regular intervals
|
||||
let update_interval = (total_regions / 10).max(1);
|
||||
if regions_processed.is_multiple_of(update_interval)
|
||||
|| regions_processed == total_regions
|
||||
{
|
||||
let progress = 90.0 + (regions_processed as f64 / total_regions as f64) * 9.0;
|
||||
emit_gui_progress_update(progress, "Saving world...");
|
||||
}
|
||||
|
||||
// Update chunk coordinates and flags
|
||||
chunk.x_pos = chunk_x + (region_x * 32);
|
||||
chunk.z_pos = chunk_z + (region_z * 32);
|
||||
|
||||
// Create Level wrapper and save
|
||||
let level_data = create_level_wrapper(&chunk);
|
||||
ser_buffer.clear();
|
||||
fastnbt::to_writer(&mut ser_buffer, &level_data).unwrap();
|
||||
region
|
||||
.write_chunk(chunk_x as usize, chunk_z as usize, &ser_buffer)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
// Second pass: ensure all chunks exist
|
||||
for chunk_x in 0..32 {
|
||||
for chunk_z in 0..32 {
|
||||
let abs_chunk_x = chunk_x + (region_x * 32);
|
||||
let abs_chunk_z = chunk_z + (region_z * 32);
|
||||
|
||||
// Check if chunk exists in our modifications
|
||||
let chunk_exists =
|
||||
region_to_modify.chunks.contains_key(&(chunk_x, chunk_z));
|
||||
|
||||
// If chunk doesn't exist, create it with base layer
|
||||
if !chunk_exists {
|
||||
let (ser_buffer, _) = Self::create_base_chunk(abs_chunk_x, abs_chunk_z);
|
||||
region
|
||||
.write_chunk(chunk_x as usize, chunk_z as usize, &ser_buffer)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Update progress
|
||||
let regions_done = regions_processed.fetch_add(1, Ordering::SeqCst) + 1;
|
||||
|
||||
// Update progress at regular intervals (every ~1% or at least every 10 regions)
|
||||
// This ensures progress is visible even with many regions
|
||||
let update_interval = (total_regions / 10).max(1);
|
||||
if regions_done.is_multiple_of(update_interval) || regions_done == total_regions {
|
||||
let progress = 90.0 + (regions_done as f64 / total_regions as f64) * 9.0;
|
||||
emit_gui_progress_update(progress, "Saving world...");
|
||||
}
|
||||
|
||||
save_pb.inc(1);
|
||||
});
|
||||
save_pb.inc(1);
|
||||
}
|
||||
|
||||
save_pb.finish();
|
||||
}
|
||||
|
||||
/// Saves a single region to disk.
|
||||
///
|
||||
/// This is extracted to allow streaming mode to save and release regions one at a time.
|
||||
fn save_single_region(
|
||||
&self,
|
||||
region_x: i32,
|
||||
region_z: i32,
|
||||
region_to_modify: &super::common::RegionToModify,
|
||||
) {
|
||||
let mut region = self.create_region(region_x, region_z);
|
||||
let mut ser_buffer = Vec::with_capacity(8192);
|
||||
|
||||
for (&(chunk_x, chunk_z), chunk_to_modify) in ®ion_to_modify.chunks {
|
||||
if !chunk_to_modify.sections.is_empty() || !chunk_to_modify.other.is_empty() {
|
||||
// Read existing chunk data if it exists
|
||||
let existing_data = region
|
||||
.read_chunk(chunk_x as usize, chunk_z as usize)
|
||||
.unwrap()
|
||||
.unwrap_or_default();
|
||||
|
||||
// Parse existing chunk or create new one
|
||||
let mut chunk: Chunk = if !existing_data.is_empty() {
|
||||
fastnbt::from_bytes(&existing_data).unwrap()
|
||||
} else {
|
||||
Chunk {
|
||||
sections: Vec::new(),
|
||||
x_pos: chunk_x + (region_x * 32),
|
||||
z_pos: chunk_z + (region_z * 32),
|
||||
is_light_on: 0,
|
||||
other: FnvHashMap::default(),
|
||||
}
|
||||
};
|
||||
|
||||
// Update sections while preserving existing data
|
||||
let new_sections: Vec<Section> = chunk_to_modify.sections().collect();
|
||||
for new_section in new_sections {
|
||||
if let Some(existing_section) =
|
||||
chunk.sections.iter_mut().find(|s| s.y == new_section.y)
|
||||
{
|
||||
// Merge block states
|
||||
existing_section.block_states.palette = new_section.block_states.palette;
|
||||
existing_section.block_states.data = new_section.block_states.data;
|
||||
} else {
|
||||
// Add new section if it doesn't exist
|
||||
chunk.sections.push(new_section);
|
||||
}
|
||||
}
|
||||
|
||||
// Preserve existing block entities and merge with new ones
|
||||
if let Some(existing_entities) = chunk.other.get_mut("block_entities") {
|
||||
if let Some(new_entities) = chunk_to_modify.other.get("block_entities") {
|
||||
if let (Value::List(existing), Value::List(new)) =
|
||||
(existing_entities, new_entities)
|
||||
{
|
||||
// Remove old entities that are replaced by new ones
|
||||
existing.retain(|e| {
|
||||
if let Value::Compound(map) = e {
|
||||
let (x, y, z) = get_entity_coords(map);
|
||||
!new.iter().any(|new_e| {
|
||||
if let Value::Compound(new_map) = new_e {
|
||||
let (nx, ny, nz) = get_entity_coords(new_map);
|
||||
x == nx && y == ny && z == nz
|
||||
} else {
|
||||
false
|
||||
}
|
||||
})
|
||||
} else {
|
||||
true
|
||||
}
|
||||
});
|
||||
// Add new entities
|
||||
existing.extend(new.clone());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// If no existing entities, just add the new ones
|
||||
if let Some(new_entities) = chunk_to_modify.other.get("block_entities") {
|
||||
chunk
|
||||
.other
|
||||
.insert("block_entities".to_string(), new_entities.clone());
|
||||
}
|
||||
}
|
||||
|
||||
// Update chunk coordinates and flags
|
||||
chunk.x_pos = chunk_x + (region_x * 32);
|
||||
chunk.z_pos = chunk_z + (region_z * 32);
|
||||
|
||||
// Create Level wrapper and save
|
||||
let level_data = create_level_wrapper(&chunk);
|
||||
ser_buffer.clear();
|
||||
fastnbt::to_writer(&mut ser_buffer, &level_data).unwrap();
|
||||
region
|
||||
.write_chunk(chunk_x as usize, chunk_z as usize, &ser_buffer)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
// Second pass: ensure all chunks exist
|
||||
for chunk_x in 0..32 {
|
||||
for chunk_z in 0..32 {
|
||||
let abs_chunk_x = chunk_x + (region_x * 32);
|
||||
let abs_chunk_z = chunk_z + (region_z * 32);
|
||||
|
||||
// Check if chunk exists in our modifications
|
||||
let chunk_exists = region_to_modify.chunks.contains_key(&(chunk_x, chunk_z));
|
||||
|
||||
// If chunk doesn't exist, create it with base layer
|
||||
if !chunk_exists {
|
||||
let (ser_buffer, _) = Self::create_base_chunk(abs_chunk_x, abs_chunk_z);
|
||||
region
|
||||
.write_chunk(chunk_x as usize, chunk_z as usize, &ser_buffer)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Helper function to get entity coordinates
|
||||
|
||||
@@ -33,6 +33,7 @@ use std::collections::HashMap;
|
||||
use std::fs::File;
|
||||
use std::io::Write;
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
|
||||
#[cfg(feature = "gui")]
|
||||
use crate::telemetry::{send_log, LogLevel};
|
||||
@@ -71,7 +72,7 @@ pub struct WorldEditor<'a> {
|
||||
world: WorldToModify,
|
||||
xzbbox: &'a XZBBox,
|
||||
llbbox: LLBBox,
|
||||
ground: Option<Box<Ground>>,
|
||||
ground: Option<Arc<Ground>>,
|
||||
format: WorldFormat,
|
||||
/// Optional level name for Bedrock worlds (e.g., "Arnis World: New York City")
|
||||
bedrock_level_name: Option<String>,
|
||||
@@ -122,13 +123,13 @@ impl<'a> WorldEditor<'a> {
|
||||
}
|
||||
|
||||
/// Sets the ground reference for elevation-based block placement
|
||||
pub fn set_ground(&mut self, ground: &Ground) {
|
||||
self.ground = Some(Box::new(ground.clone()));
|
||||
pub fn set_ground(&mut self, ground: Arc<Ground>) {
|
||||
self.ground = Some(ground);
|
||||
}
|
||||
|
||||
/// Gets a reference to the ground data if available
|
||||
pub fn get_ground(&self) -> Option<&Ground> {
|
||||
self.ground.as_ref().map(|g| g.as_ref())
|
||||
self.ground.as_deref()
|
||||
}
|
||||
|
||||
/// Returns the current world format
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
"minWidth": 1000,
|
||||
"minHeight": 650,
|
||||
"resizable": true,
|
||||
"transparent": true,
|
||||
"transparent": false,
|
||||
"center": true,
|
||||
"theme": "Dark",
|
||||
"additionalBrowserArgs": "--disable-features=VizDisplayCompositor"
|
||||
|
||||
Reference in New Issue
Block a user