Compare commits

...

36 Commits

Author SHA1 Message Date
louis-e
ad57fdbc3a Send proper error for no internet 2026-01-08 23:32:39 +01:00
Louis Erbkamm
550870d9e0 Merge pull request #702 from louis-e/better-elevation
Implement more realistic elevation
2026-01-08 23:14:22 +01:00
louis-e
bd693ea007 Reduce log lines 2026-01-08 23:13:46 +01:00
louis-e
ce8f343414 Sample less points in bridge calc 2026-01-08 23:01:12 +01:00
louis-e
f882145780 Improve efficiency of ground generation 2026-01-08 22:42:06 +01:00
louis-e
b52d750935 Address code review feedback 2026-01-08 22:20:17 +01:00
louis-e
4d30899909 Remove snow again 2026-01-08 21:23:18 +01:00
louis-e
311610a717 Use less operations for better efficiency 2026-01-08 20:53:00 +01:00
louis-e
b4902ebc9e Add snow on top of mountains and address code review feedback 2026-01-08 20:39:56 +01:00
louis-e
e5bbb3e4a0 Address code review feedback 2026-01-08 20:03:15 +01:00
louis-e
0238cfe2d0 Implement more realistic elevation 2026-01-08 19:52:03 +01:00
Louis Erbkamm
2d9892fe7f Merge pull request #699 from louis-e/streaming-save-and-memory-reduction
Streaming save and memory reduction
2026-01-06 23:54:04 +01:00
louis-e
b858ce4691 Address code review feedback 2026-01-06 23:14:54 +01:00
louis-e
e031e53492 Address code review feedback 2026-01-06 23:05:20 +01:00
louis-e
6fb9b8943d Address code review feedback 2026-01-06 22:56:30 +01:00
louis-e
18266dd459 Comment out water area timeout warning 2026-01-06 22:39:32 +01:00
louis-e
b1940fa412 Add deterministic RNG for consistent element generation 2026-01-06 22:39:17 +01:00
louis-e
d57a732055 Drop elements and flood fill cache entries after processing 2026-01-06 22:38:55 +01:00
louis-e
4e52b38f5a stream regions during save to reduce peak memory 2026-01-06 22:38:40 +01:00
Louis Erbkamm
feb4317086 Merge pull request #698 from louis-e/floodfill-precompute-afterfix
Afterfixes for floodfill precomputation
2026-01-06 20:04:04 +01:00
louis-e
d02cbed997 Afterfixes for floodfill precomputation 2026-01-06 20:03:42 +01:00
Louis Erbkamm
99d1f8e117 Merge pull request #696 from louis-e/floodfill-precompute
Add multithreaded precomputation of floodfill
2026-01-06 20:00:13 +01:00
louis-e
6fa76bc381 Add clarifying comment 2026-01-06 19:43:52 +01:00
louis-e
0fef27e6af Address code review feedback 2026-01-06 19:36:33 +01:00
louis-e
fa3384cf86 Address code review feedback 2026-01-06 19:25:13 +01:00
louis-e
ffbc5e5788 Remove floodfill timeout parameter 2026-01-06 18:21:21 +01:00
louis-e
4215e7644c Remove floodfill timeout parameter and refine minor changes 2026-01-06 18:18:56 +01:00
louis-e
118335bad4 Add multithreaded precomputation of floodfill 2026-01-06 18:11:59 +01:00
Louis Erbkamm
7bbee28279 Merge pull request #695 from louis-e/ground-gen-cache-locality
perf: improve ground generation cache locality
2026-01-06 17:15:23 +01:00
louis-e
9cb35a3b13 perf: improve ground generation cache locality 2026-01-06 17:11:47 +01:00
Louis Erbkamm
4fecf98c54 Merge pull request #694 from louis-e/tiny-release-memory
Release memory on a few occasions
2026-01-06 17:01:48 +01:00
louis-e
47a7b81f99 Release memory on a few occasions 2026-01-06 17:01:27 +01:00
Louis Erbkamm
7ec90b4fef Merge pull request #692 from louis-e/codex/refactor-data-parsing-and-memory-usage
Stream OSM parsing and reduce cloning
2026-01-06 16:46:20 +01:00
louis-e
f1f3fb287a optimize tags cloning and fix Arc usage in OSM parser 2026-01-06 16:40:44 +01:00
Louis Erbkamm
b23658d5ef Merge branch 'main' into codex/refactor-data-parsing-and-memory-usage 2026-01-06 16:29:30 +01:00
Louis Erbkamm
51ad1fef3f Stream OSM parsing and reduce cloning 2026-01-06 15:42:40 +01:00
24 changed files with 1248 additions and 496 deletions

1
Cargo.lock generated
View File

@@ -204,6 +204,7 @@ dependencies = [
"nbtx",
"once_cell",
"rand 0.8.5",
"rand_chacha 0.3.1",
"rayon",
"reqwest",
"rfd",

View File

@@ -38,6 +38,7 @@ itertools = "0.14.0"
log = "0.4.27"
once_cell = "1.21.3"
rand = "0.8.5"
rand_chacha = "0.3"
rayon = "1.10.0"
reqwest = { version = "0.12.15", features = ["blocking", "json"] }
rfd = { version = "0.16.0", optional = true }

View File

@@ -3,6 +3,7 @@ use crate::block_definitions::{BEDROCK, DIRT, GRASS_BLOCK, STONE};
use crate::coordinate_system::cartesian::XZBBox;
use crate::coordinate_system::geographic::LLBBox;
use crate::element_processing::*;
use crate::floodfill_cache::FloodFillCache;
use crate::ground::Ground;
use crate::map_renderer;
use crate::osm_parser::ProcessedElement;
@@ -54,12 +55,14 @@ pub fn generate_world_with_options(
) -> Result<PathBuf, String> {
let output_path = options.path.clone();
let world_format = options.format;
// Create editor with appropriate format
let mut editor: WorldEditor = WorldEditor::new_with_format_and_name(
options.path,
&xzbbox,
llbbox,
options.format,
options.level_name,
options.level_name.clone(),
options.spawn_point,
);
let ground = Arc::new(ground);
@@ -75,8 +78,13 @@ pub fn generate_world_with_options(
println!("{} Processing terrain...", "[5/7]".bold());
emit_gui_progress_update(25.0, "Processing terrain...");
// Pre-compute all flood fills in parallel for better CPU utilization
let mut flood_fill_cache = FloodFillCache::precompute(&elements, args.timeout.as_ref());
println!("Pre-computed {} flood fills", flood_fill_cache.way_count());
// Process data
let elements_count: usize = elements.len();
let mut elements = elements; // Take ownership for consuming
let process_pb: ProgressBar = ProgressBar::new(elements_count as u64);
process_pb.set_style(ProgressStyle::default_bar()
.template("{spinner:.green} [{elapsed_precise}] [{bar:45.white/black}] {pos}/{len} elements ({eta}) {msg}")
@@ -87,7 +95,8 @@ pub fn generate_world_with_options(
let mut current_progress_prcs: f64 = 25.0;
let mut last_emitted_progress: f64 = current_progress_prcs;
for element in &elements {
// Process elements by draining in insertion order
for element in elements.drain(..) {
process_pb.inc(1);
current_progress_prcs += progress_increment_prcs;
if (current_progress_prcs - last_emitted_progress).abs() > 0.25 {
@@ -105,22 +114,28 @@ pub fn generate_world_with_options(
process_pb.set_message("");
}
match element {
match &element {
ProcessedElement::Way(way) => {
if way.tags.contains_key("building") || way.tags.contains_key("building:part") {
buildings::generate_buildings(&mut editor, way, args, None);
buildings::generate_buildings(&mut editor, way, args, None, &flood_fill_cache);
} else if way.tags.contains_key("highway") {
highways::generate_highways(&mut editor, element, args, &highway_connectivity);
highways::generate_highways(
&mut editor,
&element,
args,
&highway_connectivity,
&flood_fill_cache,
);
} else if way.tags.contains_key("landuse") {
landuse::generate_landuse(&mut editor, way, args);
landuse::generate_landuse(&mut editor, way, args, &flood_fill_cache);
} else if way.tags.contains_key("natural") {
natural::generate_natural(&mut editor, element, args);
natural::generate_natural(&mut editor, &element, args, &flood_fill_cache);
} else if way.tags.contains_key("amenity") {
amenities::generate_amenities(&mut editor, element, args);
amenities::generate_amenities(&mut editor, &element, args, &flood_fill_cache);
} else if way.tags.contains_key("leisure") {
leisure::generate_leisure(&mut editor, way, args);
leisure::generate_leisure(&mut editor, way, args, &flood_fill_cache);
} else if way.tags.contains_key("barrier") {
barriers::generate_barriers(&mut editor, element);
barriers::generate_barriers(&mut editor, &element);
} else if let Some(val) = way.tags.get("waterway") {
if val == "dock" {
// docks count as water areas
@@ -140,8 +155,10 @@ pub fn generate_world_with_options(
} else if way.tags.get("service") == Some(&"siding".to_string()) {
highways::generate_siding(&mut editor, way);
} else if way.tags.contains_key("man_made") {
man_made::generate_man_made(&mut editor, element, args);
man_made::generate_man_made(&mut editor, &element, args);
}
// Release flood fill cache entry for this way
flood_fill_cache.remove_way(way.id);
}
ProcessedElement::Node(node) => {
if node.tags.contains_key("door") || node.tags.contains_key("entrance") {
@@ -149,13 +166,19 @@ pub fn generate_world_with_options(
} else if node.tags.contains_key("natural")
&& node.tags.get("natural") == Some(&"tree".to_string())
{
natural::generate_natural(&mut editor, element, args);
natural::generate_natural(&mut editor, &element, args, &flood_fill_cache);
} else if node.tags.contains_key("amenity") {
amenities::generate_amenities(&mut editor, element, args);
amenities::generate_amenities(&mut editor, &element, args, &flood_fill_cache);
} else if node.tags.contains_key("barrier") {
barriers::generate_barrier_nodes(&mut editor, node);
} else if node.tags.contains_key("highway") {
highways::generate_highways(&mut editor, element, args, &highway_connectivity);
highways::generate_highways(
&mut editor,
&element,
args,
&highway_connectivity,
&flood_fill_cache,
);
} else if node.tags.contains_key("tourism") {
tourisms::generate_tourisms(&mut editor, node);
} else if node.tags.contains_key("man_made") {
@@ -164,7 +187,12 @@ pub fn generate_world_with_options(
}
ProcessedElement::Relation(rel) => {
if rel.tags.contains_key("building") || rel.tags.contains_key("building:part") {
buildings::generate_building_from_relation(&mut editor, rel, args);
buildings::generate_building_from_relation(
&mut editor,
rel,
args,
&flood_fill_cache,
);
} else if rel.tags.contains_key("water")
|| rel
.tags
@@ -174,24 +202,43 @@ pub fn generate_world_with_options(
{
water_areas::generate_water_areas_from_relation(&mut editor, rel, &xzbbox);
} else if rel.tags.contains_key("natural") {
natural::generate_natural_from_relation(&mut editor, rel, args);
} else if rel.tags.contains_key("landuse") {
landuse::generate_landuse_from_relation(&mut editor, rel, args);
} else if rel.tags.get("leisure") == Some(&"park".to_string()) {
leisure::generate_leisure_from_relation(&mut editor, rel, args);
} else if rel.tags.contains_key("man_made") {
man_made::generate_man_made(
natural::generate_natural_from_relation(
&mut editor,
&ProcessedElement::Relation(rel.clone()),
rel,
args,
&flood_fill_cache,
);
} else if rel.tags.contains_key("landuse") {
landuse::generate_landuse_from_relation(
&mut editor,
rel,
args,
&flood_fill_cache,
);
} else if rel.tags.get("leisure") == Some(&"park".to_string()) {
leisure::generate_leisure_from_relation(
&mut editor,
rel,
args,
&flood_fill_cache,
);
} else if rel.tags.contains_key("man_made") {
man_made::generate_man_made(&mut editor, &element, args);
}
// Release flood fill cache entries for all ways in this relation
let way_ids: Vec<u64> = rel.members.iter().map(|m| m.way.id).collect();
flood_fill_cache.remove_relation_ways(&way_ids);
}
}
// Element is dropped here, freeing its memory immediately
}
process_pb.finish();
// Drop remaining caches
drop(highway_connectivity);
drop(flood_fill_cache);
// Generate ground layer
let total_blocks: u64 = xzbbox.bounding_rect().total_blocks();
let desired_updates: u64 = 1500;
@@ -215,46 +262,72 @@ pub fn generate_world_with_options(
let total_iterations_grnd: f64 = total_blocks as f64;
let progress_increment_grnd: f64 = 20.0 / total_iterations_grnd;
let groundlayer_block = GRASS_BLOCK;
// Check if terrain elevation is enabled; when disabled, we can skip ground level lookups entirely
let terrain_enabled = ground.elevation_enabled;
for x in xzbbox.min_x()..=xzbbox.max_x() {
for z in xzbbox.min_z()..=xzbbox.max_z() {
// Add default dirt and grass layer if there isn't a stone layer already
if !editor.check_for_block(x, 0, z, Some(&[STONE])) {
editor.set_block(groundlayer_block, x, 0, z, None, None);
editor.set_block(DIRT, x, -1, z, None, None);
editor.set_block(DIRT, x, -2, z, None, None);
}
// Process ground generation chunk-by-chunk for better cache locality.
// This keeps the same region/chunk HashMap entries hot in CPU cache,
// rather than jumping between regions on every Z iteration.
let min_chunk_x = xzbbox.min_x() >> 4;
let max_chunk_x = xzbbox.max_x() >> 4;
let min_chunk_z = xzbbox.min_z() >> 4;
let max_chunk_z = xzbbox.max_z() >> 4;
// Fill underground with stone
if args.fillground {
// Fill from bedrock+1 to 3 blocks below ground with stone
editor.fill_blocks_absolute(
STONE,
x,
MIN_Y + 1,
z,
x,
editor.get_absolute_y(x, -3, z),
z,
None,
None,
);
}
// Generate a bedrock level at MIN_Y
editor.set_block_absolute(BEDROCK, x, MIN_Y, z, None, Some(&[BEDROCK]));
for chunk_x in min_chunk_x..=max_chunk_x {
for chunk_z in min_chunk_z..=max_chunk_z {
// Calculate the block range for this chunk, clamped to bbox
let chunk_min_x = (chunk_x << 4).max(xzbbox.min_x());
let chunk_max_x = ((chunk_x << 4) + 15).min(xzbbox.max_x());
let chunk_min_z = (chunk_z << 4).max(xzbbox.min_z());
let chunk_max_z = ((chunk_z << 4) + 15).min(xzbbox.max_z());
block_counter += 1;
// Use manual % check since is_multiple_of() is unstable on stable Rust
#[allow(clippy::manual_is_multiple_of)]
if block_counter % batch_size == 0 {
ground_pb.inc(batch_size);
}
for x in chunk_min_x..=chunk_max_x {
for z in chunk_min_z..=chunk_max_z {
// Get ground level, when terrain is enabled, look it up once per block
// When disabled, use constant ground_level (no function call overhead)
let ground_y = if terrain_enabled {
editor.get_ground_level(x, z)
} else {
args.ground_level
};
gui_progress_grnd += progress_increment_grnd;
if (gui_progress_grnd - last_emitted_progress).abs() > 0.25 {
emit_gui_progress_update(gui_progress_grnd, "");
last_emitted_progress = gui_progress_grnd;
// Add default dirt and grass layer if there isn't a stone layer already
if !editor.check_for_block_absolute(x, ground_y, z, Some(&[STONE]), None) {
editor.set_block_absolute(GRASS_BLOCK, x, ground_y, z, None, None);
editor.set_block_absolute(DIRT, x, ground_y - 1, z, None, None);
editor.set_block_absolute(DIRT, x, ground_y - 2, z, None, None);
}
// Fill underground with stone
if args.fillground {
// Fill from bedrock+1 to 3 blocks below ground with stone
editor.fill_blocks_absolute(
STONE,
x,
MIN_Y + 1,
z,
x,
ground_y - 3,
z,
None,
None,
);
}
// Generate a bedrock level at MIN_Y
editor.set_block_absolute(BEDROCK, x, MIN_Y, z, None, Some(&[BEDROCK]));
block_counter += 1;
#[allow(clippy::manual_is_multiple_of)]
if block_counter % batch_size == 0 {
ground_pb.inc(batch_size);
}
gui_progress_grnd += progress_increment_grnd;
if (gui_progress_grnd - last_emitted_progress).abs() > 0.25 {
emit_gui_progress_update(gui_progress_grnd, "");
last_emitted_progress = gui_progress_grnd;
}
}
}
}
}

127
src/deterministic_rng.rs Normal file
View File

@@ -0,0 +1,127 @@
//! Deterministic random number generation for consistent element processing.
//!
//! This module provides seeded RNG that ensures the same element always produces
//! the same random values, regardless of processing order. This is essential for
//! region-by-region streaming where the same element may be processed multiple times
//! (once for each region it touches).
//!
//! # Example
//! ```ignore
//! let mut rng = element_rng(element_id);
//! let color = rng.gen_bool(0.5); // Always same result for same element_id
//! ```
use rand::SeedableRng;
use rand_chacha::ChaCha8Rng;
/// Creates a deterministic RNG seeded from an element ID.
///
/// The same element ID will always produce the same sequence of random values,
/// ensuring consistent results when an element is processed multiple times
/// (e.g., once per region it touches during streaming).
///
/// # Arguments
/// * `element_id` - The unique OSM element ID (way ID, node ID, or relation ID)
///
/// # Returns
/// A seeded ChaCha8Rng that will produce deterministic random values
#[inline]
pub fn element_rng(element_id: u64) -> ChaCha8Rng {
ChaCha8Rng::seed_from_u64(element_id)
}
/// Creates a deterministic RNG seeded from an element ID with an additional salt.
///
/// Use this when you need multiple independent random sequences for the same element.
/// For example, one sequence for wall colors and another for roof style.
///
/// # Arguments
/// * `element_id` - The unique OSM element ID
/// * `salt` - Additional value to create a different sequence (e.g., use different
/// salt values for different purposes within the same element)
#[inline]
#[allow(dead_code)]
pub fn element_rng_salted(element_id: u64, salt: u64) -> ChaCha8Rng {
// Combine element_id and salt using XOR and bit rotation to avoid collisions
let combined = element_id ^ salt.rotate_left(32);
ChaCha8Rng::seed_from_u64(combined)
}
/// Creates a deterministic RNG seeded from coordinates.
///
/// Use this for per-block randomness that needs to be consistent regardless
/// of processing order (e.g., random flower placement within a natural area).
///
/// # Arguments
/// * `x` - X coordinate
/// * `z` - Z coordinate
/// * `element_id` - The element ID for additional uniqueness
#[inline]
pub fn coord_rng(x: i32, z: i32, element_id: u64) -> ChaCha8Rng {
// Combine coordinates and element_id into a seed.
// Cast through u32 to handle negative coordinates consistently.
let coord_part = ((x as u32 as i64) << 32) | (z as u32 as i64);
let seed = (coord_part as u64) ^ element_id;
ChaCha8Rng::seed_from_u64(seed)
}
#[cfg(test)]
mod tests {
use super::*;
use rand::Rng;
#[test]
fn test_element_rng_deterministic() {
let mut rng1 = element_rng(12345);
let mut rng2 = element_rng(12345);
// Same seed should produce same sequence
for _ in 0..100 {
assert_eq!(rng1.gen::<u64>(), rng2.gen::<u64>());
}
}
#[test]
fn test_different_elements_different_values() {
let mut rng1 = element_rng(12345);
let mut rng2 = element_rng(12346);
// Different seeds should (almost certainly) produce different values
let v1: u64 = rng1.gen();
let v2: u64 = rng2.gen();
assert_ne!(v1, v2);
}
#[test]
fn test_salted_rng_different_from_base() {
let mut rng1 = element_rng(12345);
let mut rng2 = element_rng_salted(12345, 1);
let v1: u64 = rng1.gen();
let v2: u64 = rng2.gen();
assert_ne!(v1, v2);
}
#[test]
fn test_coord_rng_deterministic() {
let mut rng1 = coord_rng(100, 200, 12345);
let mut rng2 = coord_rng(100, 200, 12345);
assert_eq!(rng1.gen::<u64>(), rng2.gen::<u64>());
}
#[test]
fn test_coord_rng_negative_coordinates() {
// Negative coordinates are common in Minecraft worlds
let mut rng1 = coord_rng(-100, -200, 12345);
let mut rng2 = coord_rng(-100, -200, 12345);
assert_eq!(rng1.gen::<u64>(), rng2.gen::<u64>());
// Ensure different negative coords produce different seeds
let mut rng3 = coord_rng(-100, -200, 12345);
let mut rng4 = coord_rng(-101, -200, 12345);
assert_ne!(rng3.gen::<u64>(), rng4.gen::<u64>());
}
}

View File

@@ -2,11 +2,19 @@ use crate::args::Args;
use crate::block_definitions::*;
use crate::bresenham::bresenham_line;
use crate::coordinate_system::cartesian::XZPoint;
use crate::floodfill::flood_fill_area;
use crate::deterministic_rng::element_rng;
use crate::floodfill::flood_fill_area; // Needed for inline amenity flood fills
use crate::floodfill_cache::FloodFillCache;
use crate::osm_parser::ProcessedElement;
use crate::world_editor::WorldEditor;
use rand::Rng;
pub fn generate_amenities(editor: &mut WorldEditor, element: &ProcessedElement, args: &Args) {
pub fn generate_amenities(
editor: &mut WorldEditor,
element: &ProcessedElement,
args: &Args,
flood_fill_cache: &FloodFillCache,
) {
// Skip if 'layer' or 'level' is negative in the tags
if let Some(layer) = element.tags().get("layer") {
if layer.parse::<i32>().unwrap_or(0) < 0 {
@@ -42,18 +50,14 @@ pub fn generate_amenities(editor: &mut WorldEditor, element: &ProcessedElement,
let ground_block: Block = OAK_PLANKS;
let roof_block: Block = STONE_BLOCK_SLAB;
let polygon_coords: Vec<(i32, i32)> = element
.nodes()
.map(|n: &crate::osm_parser::ProcessedNode| (n.x, n.z))
.collect();
// Use pre-computed flood fill from cache
let floor_area: Vec<(i32, i32)> =
flood_fill_cache.get_or_compute_element(element, args.timeout.as_ref());
if polygon_coords.is_empty() {
if floor_area.is_empty() {
return;
}
let floor_area: Vec<(i32, i32)> =
flood_fill_area(&polygon_coords, args.timeout.as_ref());
// Fill the floor area
for (x, z) in floor_area.iter() {
editor.set_block(ground_block, *x, 0, *z, None, None);
@@ -80,8 +84,10 @@ pub fn generate_amenities(editor: &mut WorldEditor, element: &ProcessedElement,
"bench" => {
// Place a bench
if let Some(pt) = first_node {
// 50% chance to 90 degrees rotate the bench using if
if rand::random::<bool>() {
// Use deterministic RNG for consistent bench orientation across region boundaries
let mut rng = element_rng(element.id());
// 50% chance to 90 degrees rotate the bench
if rng.gen_bool(0.5) {
editor.set_block(SMOOTH_STONE, pt.x, 1, pt.z, None, None);
editor.set_block(OAK_LOG, pt.x + 1, 1, pt.z, None, None);
editor.set_block(OAK_LOG, pt.x - 1, 1, pt.z, None, None);
@@ -95,12 +101,9 @@ pub fn generate_amenities(editor: &mut WorldEditor, element: &ProcessedElement,
"shelter" => {
let roof_block: Block = STONE_BRICK_SLAB;
let polygon_coords: Vec<(i32, i32)> = element
.nodes()
.map(|n: &crate::osm_parser::ProcessedNode| (n.x, n.z))
.collect();
// Use pre-computed flood fill from cache
let roof_area: Vec<(i32, i32)> =
flood_fill_area(&polygon_coords, args.timeout.as_ref());
flood_fill_cache.get_or_compute_element(element, args.timeout.as_ref());
// Place fences and roof slabs at each corner node directly
for node in element.nodes() {

View File

@@ -3,37 +3,97 @@ use crate::bresenham::bresenham_line;
use crate::osm_parser::ProcessedWay;
use crate::world_editor::WorldEditor;
// TODO FIX
// TODO FIX - This handles ways with bridge=yes tag (e.g., highway bridges)
#[allow(dead_code)]
pub fn generate_bridges(editor: &mut WorldEditor, element: &ProcessedWay) {
if let Some(_bridge_type) = element.tags.get("bridge") {
let bridge_height = 3; // Fixed height
let bridge_height = 3; // Height above the ground level
// Get start and end node elevations and use MAX for level bridge deck
// Using MAX ensures bridges don't dip when multiple bridge ways meet in a valley
let bridge_deck_ground_y = if element.nodes.len() >= 2 {
let start_node = &element.nodes[0];
let end_node = &element.nodes[element.nodes.len() - 1];
let start_y = editor.get_ground_level(start_node.x, start_node.z);
let end_y = editor.get_ground_level(end_node.x, end_node.z);
start_y.max(end_y)
} else {
return; // Need at least 2 nodes for a bridge
};
// Calculate total bridge length for ramp positioning
let total_length: f64 = element
.nodes
.windows(2)
.map(|pair| {
let dx = (pair[1].x - pair[0].x) as f64;
let dz = (pair[1].z - pair[0].z) as f64;
(dx * dx + dz * dz).sqrt()
})
.sum();
if total_length == 0.0 {
return;
}
let mut accumulated_length: f64 = 0.0;
for i in 1..element.nodes.len() {
let prev = &element.nodes[i - 1];
let cur = &element.nodes[i];
let segment_dx = (cur.x - prev.x) as f64;
let segment_dz = (cur.z - prev.z) as f64;
let segment_length = (segment_dx * segment_dx + segment_dz * segment_dz).sqrt();
let points = bresenham_line(prev.x, 0, prev.z, cur.x, 0, cur.z);
let total_length = points.len();
let ramp_length = 6; // Length of ramp at each end
let ramp_length = (total_length * 0.15).clamp(6.0, 20.0) as usize; // 15% of bridge, min 6, max 20 blocks
for (idx, (x, _, z)) in points.iter().enumerate() {
let height = if idx < ramp_length {
// Calculate progress along this segment
let segment_progress = if points.len() > 1 {
idx as f64 / (points.len() - 1) as f64
} else {
0.0
};
// Calculate overall progress along the entire bridge
let point_distance = accumulated_length + segment_progress * segment_length;
let overall_progress = (point_distance / total_length).clamp(0.0, 1.0);
let total_len_usize = total_length as usize;
let overall_idx = (overall_progress * total_len_usize as f64) as usize;
// Calculate ramp height offset
let ramp_offset = if overall_idx < ramp_length {
// Start ramp (rising)
(idx * bridge_height) / ramp_length
} else if idx >= total_length - ramp_length {
(overall_idx as f64 * bridge_height as f64 / ramp_length as f64) as i32
} else if overall_idx >= total_len_usize.saturating_sub(ramp_length) {
// End ramp (descending)
((total_length - idx) * bridge_height) / ramp_length
let dist_from_end = total_len_usize - overall_idx;
(dist_from_end as f64 * bridge_height as f64 / ramp_length as f64) as i32
} else {
// Middle section (constant height)
bridge_height
};
// Use fixed bridge deck height (max of endpoints) plus ramp offset
let bridge_y = bridge_deck_ground_y + ramp_offset;
// Place bridge blocks
for dx in -2..=2 {
editor.set_block(LIGHT_GRAY_CONCRETE, *x + dx, height as i32, *z, None, None);
editor.set_block_absolute(
LIGHT_GRAY_CONCRETE,
*x + dx,
bridge_y,
*z,
None,
None,
);
}
}
accumulated_length += segment_length;
}
}
}

View File

@@ -3,8 +3,9 @@ use crate::block_definitions::*;
use crate::bresenham::bresenham_line;
use crate::colors::color_text_to_rgb_tuple;
use crate::coordinate_system::cartesian::XZPoint;
use crate::deterministic_rng::element_rng;
use crate::element_processing::subprocessor::buildings_interior::generate_building_interior;
use crate::floodfill::flood_fill_area;
use crate::floodfill_cache::FloodFillCache;
use crate::osm_parser::{ProcessedMemberRole, ProcessedRelation, ProcessedWay};
use crate::world_editor::WorldEditor;
use rand::Rng;
@@ -28,6 +29,7 @@ pub fn generate_buildings(
element: &ProcessedWay,
args: &Args,
relation_levels: Option<i32>,
flood_fill_cache: &FloodFillCache,
) {
// Get min_level first so we can use it both for start_level and building height calculations
let min_level = if let Some(min_level_str) = element.tags.get("building:min_level") {
@@ -43,10 +45,9 @@ pub fn generate_buildings(
let scale_factor = args.scale;
let min_level_offset = multiply_scale(min_level * 4, scale_factor);
// Cache floodfill result: compute once and reuse throughout
let polygon_coords: Vec<(i32, i32)> = element.nodes.iter().map(|n| (n.x, n.z)).collect();
// Use pre-computed flood fill from cache
let cached_floor_area: Vec<(i32, i32)> =
flood_fill_area(&polygon_coords, args.timeout.as_ref());
flood_fill_cache.get_or_compute(element, args.timeout.as_ref());
let cached_footprint_size = cached_floor_area.len();
// Use fixed starting Y coordinate based on maximum ground level when terrain is enabled
@@ -121,7 +122,8 @@ pub fn generate_buildings(
let mut processed_points: HashSet<(i32, i32)> = HashSet::new();
let mut building_height: i32 = ((6.0 * scale_factor) as i32).max(3); // Default building height with scale and minimum
let mut is_tall_building = false;
let mut rng = rand::thread_rng();
// Use deterministic RNG seeded by element ID for consistent results across region boundaries
let mut rng = element_rng(element.id);
let use_vertical_windows = rng.gen_bool(0.7);
let use_accent_roof_line = rng.gen_bool(0.25);
@@ -386,7 +388,7 @@ pub fn generate_buildings(
building_height = ((23.0 * scale_factor) as i32).max(3);
}
} else if building_type == "bridge" {
generate_bridge(editor, element, args.timeout.as_ref());
generate_bridge(editor, element, flood_fill_cache, args.timeout.as_ref());
return;
}
}
@@ -1484,6 +1486,7 @@ pub fn generate_building_from_relation(
editor: &mut WorldEditor,
relation: &ProcessedRelation,
args: &Args,
flood_fill_cache: &FloodFillCache,
) {
// Extract levels from relation tags
let relation_levels = relation
@@ -1495,7 +1498,13 @@ pub fn generate_building_from_relation(
// Process the outer way to create the building walls
for member in &relation.members {
if member.role == ProcessedMemberRole::Outer {
generate_buildings(editor, &member.way, args, Some(relation_levels));
generate_buildings(
editor,
&member.way,
args,
Some(relation_levels),
flood_fill_cache,
);
}
}
@@ -1516,52 +1525,18 @@ pub fn generate_building_from_relation(
}
/// Generates a bridge structure, paying attention to the "level" tag.
/// Bridge deck is interpolated between start and end point elevations to avoid
/// being dragged down by valleys underneath.
fn generate_bridge(
editor: &mut WorldEditor,
element: &ProcessedWay,
flood_fill_cache: &FloodFillCache,
floodfill_timeout: Option<&Duration>,
) {
let floor_block: Block = STONE;
let railing_block: Block = STONE_BRICKS;
// Process the nodes to create bridge pathways and railings
let mut previous_node: Option<(i32, i32)> = None;
for node in &element.nodes {
let x: i32 = node.x;
let z: i32 = node.z;
// Calculate bridge level based on the "level" tag
let bridge_y_offset = if let Some(level_str) = element.tags.get("level") {
if let Ok(level) = level_str.parse::<i32>() {
(level * 3) + 1
} else {
1 // Default elevation
}
} else {
1 // Default elevation
};
// Create bridge path using Bresenham's line
if let Some(prev) = previous_node {
let bridge_points: Vec<(i32, i32, i32)> =
bresenham_line(prev.0, bridge_y_offset, prev.1, x, bridge_y_offset, z);
for (bx, by, bz) in bridge_points {
// Place railing blocks
editor.set_block(railing_block, bx, by + 1, bz, None, None);
editor.set_block(railing_block, bx, by, bz, None, None);
}
}
previous_node = Some((x, z));
}
// Flood fill the area between the bridge path nodes
let polygon_coords: Vec<(i32, i32)> = element.nodes.iter().map(|n| (n.x, n.z)).collect();
let bridge_area: Vec<(i32, i32)> = flood_fill_area(&polygon_coords, floodfill_timeout);
// Calculate bridge level based on the "level" tag
// Calculate bridge level offset based on the "level" tag
let bridge_y_offset = if let Some(level_str) = element.tags.get("level") {
if let Ok(level) = level_str.parse::<i32>() {
(level * 3) + 1
@@ -1572,8 +1547,51 @@ fn generate_bridge(
1 // Default elevation
};
// Need at least 2 nodes to form a bridge
if element.nodes.len() < 2 {
return;
}
// Get start and end node elevations and use MAX for level bridge deck
// Using MAX ensures bridges don't dip when multiple bridge ways meet in a valley
let start_node = &element.nodes[0];
let end_node = &element.nodes[element.nodes.len() - 1];
let start_y = editor.get_ground_level(start_node.x, start_node.z);
let end_y = editor.get_ground_level(end_node.x, end_node.z);
let bridge_deck_ground_y = start_y.max(end_y);
// Process the nodes to create bridge pathways and railings
let mut previous_node: Option<(i32, i32)> = None;
for node in &element.nodes {
let x: i32 = node.x;
let z: i32 = node.z;
// Create bridge path using Bresenham's line
if let Some(prev) = previous_node {
let bridge_points: Vec<(i32, i32, i32)> = bresenham_line(prev.0, 0, prev.1, x, 0, z);
for (bx, _, bz) in bridge_points.iter() {
// Use fixed bridge deck height (max of endpoints)
let bridge_y = bridge_deck_ground_y + bridge_y_offset;
// Place railing blocks
editor.set_block_absolute(railing_block, *bx, bridge_y + 1, *bz, None, None);
editor.set_block_absolute(railing_block, *bx, bridge_y, *bz, None, None);
}
}
previous_node = Some((x, z));
}
// Flood fill the area between the bridge path nodes (uses cache)
let bridge_area: Vec<(i32, i32)> = flood_fill_cache.get_or_compute(element, floodfill_timeout);
// Use the same level bridge deck height for filled areas
let floor_y = bridge_deck_ground_y + bridge_y_offset;
// Place floor blocks
for (x, z) in bridge_area {
editor.set_block(floor_block, x, bridge_y_offset, z, None, None);
editor.set_block_absolute(floor_block, x, floor_y, z, None, None);
}
}

View File

@@ -2,7 +2,7 @@ use crate::args::Args;
use crate::block_definitions::*;
use crate::bresenham::bresenham_line;
use crate::coordinate_system::cartesian::XZPoint;
use crate::floodfill::flood_fill_area;
use crate::floodfill_cache::FloodFillCache;
use crate::osm_parser::{ProcessedElement, ProcessedWay};
use crate::world_editor::WorldEditor;
use std::collections::HashMap;
@@ -10,14 +10,24 @@ use std::collections::HashMap;
/// Type alias for highway connectivity map
pub type HighwayConnectivityMap = HashMap<(i32, i32), Vec<i32>>;
/// Minimum terrain dip (in blocks) below max endpoint elevation to classify a bridge as valley-spanning
const VALLEY_BRIDGE_THRESHOLD: i32 = 7;
/// Generates highways with elevation support based on layer tags and connectivity analysis
pub fn generate_highways(
editor: &mut WorldEditor,
element: &ProcessedElement,
args: &Args,
highway_connectivity: &HighwayConnectivityMap,
flood_fill_cache: &FloodFillCache,
) {
generate_highways_internal(editor, element, args, highway_connectivity);
generate_highways_internal(
editor,
element,
args,
highway_connectivity,
flood_fill_cache,
);
}
/// Build a connectivity map for highway endpoints to determine where slopes are needed.
@@ -66,6 +76,7 @@ fn generate_highways_internal(
element: &ProcessedElement,
args: &Args,
highway_connectivity: &HashMap<(i32, i32), Vec<i32>>, // Maps node coordinates to list of layers that connect to this node
flood_fill_cache: &FloodFillCache,
) {
if let Some(highway_type) = element.tags().get("highway") {
if highway_type == "street_lamp" {
@@ -137,14 +148,9 @@ fn generate_highways_internal(
};
}
// Fill the area using flood fill or by iterating through the nodes
let polygon_coords: Vec<(i32, i32)> = way
.nodes
.iter()
.map(|n: &crate::osm_parser::ProcessedNode| (n.x, n.z))
.collect();
// Fill the area using flood fill cache
let filled_area: Vec<(i32, i32)> =
flood_fill_area(&polygon_coords, args.timeout.as_ref());
flood_fill_cache.get_or_compute(way, args.timeout.as_ref());
for (x, z) in filled_area {
editor.set_block(surface_block, x, 0, z, None, None);
@@ -157,6 +163,11 @@ fn generate_highways_internal(
let mut add_outline = false;
let scale_factor = args.scale;
// Check if this is a bridge - bridges need special elevation handling
// to span across valleys instead of following terrain
// Accept any bridge tag value except "no" (e.g., "yes", "viaduct", "aqueduct", etc.)
let is_bridge = element.tags().get("bridge").is_some_and(|v| v != "no");
// Parse the layer value for elevation calculation
let layer_value = element
.tags()
@@ -246,6 +257,7 @@ fn generate_highways_internal(
let base_elevation = layer_value * LAYER_HEIGHT_STEP;
// Check if we need slopes at start and end
// This is used for overpasses that need ramps to ground-level roads
let needs_start_slope =
should_add_slope_at_node(&way.nodes[0], layer_value, highway_connectivity);
let needs_end_slope = should_add_slope_at_node(
@@ -254,10 +266,67 @@ fn generate_highways_internal(
highway_connectivity,
);
// Calculate total way length for slope distribution
// Calculate total way length for slope distribution (needed before valley bridge check)
let total_way_length = calculate_way_length(way);
// Check if this is a short isolated elevated segment - if so, treat as ground level
// For bridges: detect if this spans a valley by checking terrain profile
// A valley bridge has terrain that dips significantly below the endpoints
// Skip valley detection entirely if terrain is disabled (no valleys in flat terrain)
// Skip very short bridges (< 25 blocks) as they're unlikely to span significant valleys
let terrain_enabled = editor
.get_ground()
.map(|g| g.elevation_enabled)
.unwrap_or(false);
let (is_valley_bridge, bridge_deck_y) =
if is_bridge && terrain_enabled && way.nodes.len() >= 2 && total_way_length >= 25 {
let start_node = &way.nodes[0];
let end_node = &way.nodes[way.nodes.len() - 1];
let start_y = editor.get_ground_level(start_node.x, start_node.z);
let end_y = editor.get_ground_level(end_node.x, end_node.z);
let max_endpoint_y = start_y.max(end_y);
// Sample terrain at middle nodes only (excluding endpoints we already have)
// This avoids redundant get_ground_level() calls
let middle_nodes = &way.nodes[1..way.nodes.len().saturating_sub(1)];
let sampled_min = if middle_nodes.is_empty() {
// No middle nodes, just use endpoints
start_y.min(end_y)
} else {
// Sample up to 3 middle points (5 total with endpoints) for performance
// Valleys are wide terrain features, so sparse sampling is sufficient
let sample_count = middle_nodes.len().min(3);
let step = if sample_count > 1 {
(middle_nodes.len() - 1) / (sample_count - 1)
} else {
1
};
middle_nodes
.iter()
.step_by(step.max(1))
.map(|node| editor.get_ground_level(node.x, node.z))
.min()
.unwrap_or(max_endpoint_y)
};
// Include endpoint elevations in the minimum calculation
let min_terrain_y = sampled_min.min(start_y).min(end_y);
// If ANY sampled point along the bridge is significantly lower than the max endpoint,
// treat as valley bridge
let is_valley = min_terrain_y < max_endpoint_y - VALLEY_BRIDGE_THRESHOLD;
if is_valley {
(true, max_endpoint_y)
} else {
(false, 0)
}
} else {
(false, 0)
};
// Check if this is a short isolated elevated segment (layer > 0), if so, treat as ground level
let is_short_isolated_elevated =
needs_start_slope && needs_end_slope && layer_value > 0 && total_way_length <= 35;
@@ -294,17 +363,28 @@ fn generate_highways_internal(
let gap_length: i32 = (5.0 * scale_factor).ceil() as i32;
for (point_index, (x, _, z)) in bresenham_points.iter().enumerate() {
// Calculate Y elevation for this point based on slopes and layer
let current_y = calculate_point_elevation(
segment_index,
point_index,
segment_length,
total_segments,
effective_elevation,
effective_start_slope,
effective_end_slope,
slope_length,
);
// Calculate Y elevation for this point
// For valley bridges: use fixed deck height (max of endpoints) to stay level
// For overpasses and regular roads: use terrain-relative elevation with slopes
let (current_y, use_absolute_y) = if is_valley_bridge {
// Valley bridge deck is level at the maximum endpoint elevation
// Don't add base_elevation - the layer tag indicates it's above water/road,
// not that it should be higher than the terrain endpoints
(bridge_deck_y, true)
} else {
// Regular road or overpass: use terrain-relative calculation with ramps
let y = calculate_point_elevation(
segment_index,
point_index,
segment_length,
total_segments,
effective_elevation,
effective_start_slope,
effective_end_slope,
slope_length,
);
(y, false)
};
// Draw the road surface for the entire width
for dx in -block_range..=block_range {
@@ -320,12 +400,32 @@ fn generate_highways_internal(
let is_horizontal: bool = (x2 - x1).abs() >= (z2 - z1).abs();
if is_horizontal {
if set_x % 2 < 1 {
editor.set_block(
WHITE_CONCRETE,
if use_absolute_y {
editor.set_block_absolute(
WHITE_CONCRETE,
set_x,
current_y,
set_z,
Some(&[BLACK_CONCRETE]),
None,
);
} else {
editor.set_block(
WHITE_CONCRETE,
set_x,
current_y,
set_z,
Some(&[BLACK_CONCRETE]),
None,
);
}
} else if use_absolute_y {
editor.set_block_absolute(
BLACK_CONCRETE,
set_x,
current_y,
set_z,
Some(&[BLACK_CONCRETE]),
None,
None,
);
} else {
@@ -339,12 +439,32 @@ fn generate_highways_internal(
);
}
} else if set_z % 2 < 1 {
editor.set_block(
WHITE_CONCRETE,
if use_absolute_y {
editor.set_block_absolute(
WHITE_CONCRETE,
set_x,
current_y,
set_z,
Some(&[BLACK_CONCRETE]),
None,
);
} else {
editor.set_block(
WHITE_CONCRETE,
set_x,
current_y,
set_z,
Some(&[BLACK_CONCRETE]),
None,
);
}
} else if use_absolute_y {
editor.set_block_absolute(
BLACK_CONCRETE,
set_x,
current_y,
set_z,
Some(&[BLACK_CONCRETE]),
None,
None,
);
} else {
@@ -357,6 +477,15 @@ fn generate_highways_internal(
None,
);
}
} else if use_absolute_y {
editor.set_block_absolute(
block_type,
set_x,
current_y,
set_z,
None,
Some(&[BLACK_CONCRETE, WHITE_CONCRETE]),
);
} else {
editor.set_block(
block_type,
@@ -368,30 +497,53 @@ fn generate_highways_internal(
);
}
// Add stone brick foundation underneath elevated highways for thickness
if effective_elevation > 0 && current_y > 0 {
// Add stone brick foundation underneath elevated highways/bridges for thickness
if (effective_elevation > 0 || use_absolute_y) && current_y > 0 {
// Add 1 layer of stone bricks underneath the highway surface
editor.set_block(
STONE_BRICKS,
set_x,
current_y - 1,
set_z,
None,
None,
);
if use_absolute_y {
editor.set_block_absolute(
STONE_BRICKS,
set_x,
current_y - 1,
set_z,
None,
None,
);
} else {
editor.set_block(
STONE_BRICKS,
set_x,
current_y - 1,
set_z,
None,
None,
);
}
}
// Add support pillars for elevated highways
if effective_elevation != 0 && current_y > 0 {
add_highway_support_pillar(
editor,
set_x,
current_y,
set_z,
dx,
dz,
block_range,
);
// Add support pillars for elevated highways/bridges
if (effective_elevation != 0 || use_absolute_y) && current_y > 0 {
if use_absolute_y {
add_highway_support_pillar_absolute(
editor,
set_x,
current_y,
set_z,
dx,
dz,
block_range,
);
} else {
add_highway_support_pillar(
editor,
set_x,
current_y,
set_z,
dx,
dz,
block_range,
);
}
}
}
}
@@ -402,27 +554,49 @@ fn generate_highways_internal(
for dz in -block_range..=block_range {
let outline_x = x - block_range - 1;
let outline_z = z + dz;
editor.set_block(
LIGHT_GRAY_CONCRETE,
outline_x,
current_y,
outline_z,
None,
None,
);
if use_absolute_y {
editor.set_block_absolute(
LIGHT_GRAY_CONCRETE,
outline_x,
current_y,
outline_z,
None,
None,
);
} else {
editor.set_block(
LIGHT_GRAY_CONCRETE,
outline_x,
current_y,
outline_z,
None,
None,
);
}
}
// Right outline
for dz in -block_range..=block_range {
let outline_x = x + block_range + 1;
let outline_z = z + dz;
editor.set_block(
LIGHT_GRAY_CONCRETE,
outline_x,
current_y,
outline_z,
None,
None,
);
if use_absolute_y {
editor.set_block_absolute(
LIGHT_GRAY_CONCRETE,
outline_x,
current_y,
outline_z,
None,
None,
);
} else {
editor.set_block(
LIGHT_GRAY_CONCRETE,
outline_x,
current_y,
outline_z,
None,
None,
);
}
}
}
@@ -431,14 +605,25 @@ fn generate_highways_internal(
if stripe_length < dash_length {
let stripe_x: i32 = *x;
let stripe_z: i32 = *z;
editor.set_block(
WHITE_CONCRETE,
stripe_x,
current_y,
stripe_z,
Some(&[BLACK_CONCRETE]),
None,
);
if use_absolute_y {
editor.set_block_absolute(
WHITE_CONCRETE,
stripe_x,
current_y,
stripe_z,
Some(&[BLACK_CONCRETE]),
None,
);
} else {
editor.set_block(
WHITE_CONCRETE,
stripe_x,
current_y,
stripe_z,
Some(&[BLACK_CONCRETE]),
None,
);
}
}
// Increment stripe_length and reset after completing a dash and gap
@@ -582,6 +767,46 @@ fn add_highway_support_pillar(
}
}
/// Add support pillars for bridges using absolute Y coordinates
/// Pillars extend from ground level up to the bridge deck
fn add_highway_support_pillar_absolute(
editor: &mut WorldEditor,
x: i32,
bridge_deck_y: i32,
z: i32,
dx: i32,
dz: i32,
_block_range: i32, // Keep for future use
) {
// Only add pillars at specific intervals and positions
if dx == 0 && dz == 0 && (x + z) % 8 == 0 {
// Get the actual ground level at this position
let ground_y = editor.get_ground_level(x, z);
// Add pillar from ground up to bridge deck
// Only if the bridge is actually above the ground
if bridge_deck_y > ground_y {
for y in (ground_y + 1)..bridge_deck_y {
editor.set_block_absolute(STONE_BRICKS, x, y, z, None, None);
}
// Add pillar base at ground level
for base_dx in -1..=1 {
for base_dz in -1..=1 {
editor.set_block_absolute(
STONE_BRICKS,
x + base_dx,
ground_y,
z + base_dz,
None,
None,
);
}
}
}
}
}
/// Generates a siding using stone brick slabs
pub fn generate_siding(editor: &mut WorldEditor, element: &ProcessedWay) {
let mut previous_node: Option<XZPoint> = None;

View File

@@ -1,12 +1,18 @@
use crate::args::Args;
use crate::block_definitions::*;
use crate::deterministic_rng::element_rng;
use crate::element_processing::tree::Tree;
use crate::floodfill::flood_fill_area;
use crate::floodfill_cache::FloodFillCache;
use crate::osm_parser::{ProcessedMemberRole, ProcessedRelation, ProcessedWay};
use crate::world_editor::WorldEditor;
use rand::Rng;
pub fn generate_landuse(editor: &mut WorldEditor, element: &ProcessedWay, args: &Args) {
pub fn generate_landuse(
editor: &mut WorldEditor,
element: &ProcessedWay,
args: &Args,
flood_fill_cache: &FloodFillCache,
) {
// Determine block type based on landuse tag
let binding: String = "".to_string();
let landuse_tag: &String = element.tags.get("landuse").unwrap_or(&binding);
@@ -44,11 +50,12 @@ pub fn generate_landuse(editor: &mut WorldEditor, element: &ProcessedWay, args:
_ => GRASS_BLOCK,
};
// Get the area of the landuse element
let polygon_coords: Vec<(i32, i32)> = element.nodes.iter().map(|n| (n.x, n.z)).collect();
let floor_area: Vec<(i32, i32)> = flood_fill_area(&polygon_coords, args.timeout.as_ref());
// Get the area of the landuse element using cache
let floor_area: Vec<(i32, i32)> =
flood_fill_cache.get_or_compute(element, args.timeout.as_ref());
let mut rng: rand::prelude::ThreadRng = rand::thread_rng();
// Use deterministic RNG seeded by element ID for consistent results across region boundaries
let mut rng = element_rng(element.id);
for (x, z) in floor_area {
if landuse_tag == "traffic_island" {
@@ -275,12 +282,13 @@ pub fn generate_landuse_from_relation(
editor: &mut WorldEditor,
rel: &ProcessedRelation,
args: &Args,
flood_fill_cache: &FloodFillCache,
) {
if rel.tags.contains_key("landuse") {
// Generate individual ways with their original tags
for member in &rel.members {
if member.role == ProcessedMemberRole::Outer {
generate_landuse(editor, &member.way.clone(), args);
generate_landuse(editor, &member.way.clone(), args, flood_fill_cache);
}
}
@@ -302,7 +310,7 @@ pub fn generate_landuse_from_relation(
};
// Generate landuse area from combined way
generate_landuse(editor, &combined_way, args);
generate_landuse(editor, &combined_way, args, flood_fill_cache);
}
}
}

View File

@@ -1,13 +1,19 @@
use crate::args::Args;
use crate::block_definitions::*;
use crate::bresenham::bresenham_line;
use crate::deterministic_rng::element_rng;
use crate::element_processing::tree::Tree;
use crate::floodfill::flood_fill_area;
use crate::floodfill_cache::FloodFillCache;
use crate::osm_parser::{ProcessedMemberRole, ProcessedRelation, ProcessedWay};
use crate::world_editor::WorldEditor;
use rand::Rng;
pub fn generate_leisure(editor: &mut WorldEditor, element: &ProcessedWay, args: &Args) {
pub fn generate_leisure(
editor: &mut WorldEditor,
element: &ProcessedWay,
args: &Args,
flood_fill_cache: &FloodFillCache,
) {
if let Some(leisure_type) = element.tags.get("leisure") {
let mut previous_node: Option<(i32, i32)> = None;
let mut corner_addup: (i32, i32, i32) = (0, 0, 0);
@@ -74,15 +80,13 @@ pub fn generate_leisure(editor: &mut WorldEditor, element: &ProcessedWay, args:
previous_node = Some((node.x, node.z));
}
// Flood-fill the interior of the leisure area
// Flood-fill the interior of the leisure area using cache
if corner_addup != (0, 0, 0) {
let polygon_coords: Vec<(i32, i32)> = element
.nodes
.iter()
.map(|n: &crate::osm_parser::ProcessedNode| (n.x, n.z))
.collect();
let filled_area: Vec<(i32, i32)> =
flood_fill_area(&polygon_coords, args.timeout.as_ref());
flood_fill_cache.get_or_compute(element, args.timeout.as_ref());
// Use deterministic RNG seeded by element ID for consistent results across region boundaries
let mut rng = element_rng(element.id);
for (x, z) in filled_area {
editor.set_block(block_type, x, 0, z, Some(&[GRASS_BLOCK]), None);
@@ -91,7 +95,6 @@ pub fn generate_leisure(editor: &mut WorldEditor, element: &ProcessedWay, args:
if matches!(leisure_type.as_str(), "park" | "garden" | "nature_reserve")
&& editor.check_for_block(x, 0, z, Some(&[GRASS_BLOCK]))
{
let mut rng: rand::prelude::ThreadRng = rand::thread_rng();
let random_choice: i32 = rng.gen_range(0..1000);
match random_choice {
@@ -123,7 +126,6 @@ pub fn generate_leisure(editor: &mut WorldEditor, element: &ProcessedWay, args:
// Add playground or recreation ground features
if matches!(leisure_type.as_str(), "playground" | "recreation_ground") {
let mut rng: rand::prelude::ThreadRng = rand::thread_rng();
let random_choice: i32 = rng.gen_range(0..5000);
match random_choice {
@@ -176,12 +178,13 @@ pub fn generate_leisure_from_relation(
editor: &mut WorldEditor,
rel: &ProcessedRelation,
args: &Args,
flood_fill_cache: &FloodFillCache,
) {
if rel.tags.get("leisure") == Some(&"park".to_string()) {
// First generate individual ways with their original tags
for member in &rel.members {
if member.role == ProcessedMemberRole::Outer {
generate_leisure(editor, &member.way, args);
generate_leisure(editor, &member.way, args, flood_fill_cache);
}
}
@@ -201,6 +204,6 @@ pub fn generate_leisure_from_relation(
};
// Generate leisure area from combined way
generate_leisure(editor, &combined_way, args);
generate_leisure(editor, &combined_way, args, flood_fill_cache);
}
}

View File

@@ -1,13 +1,19 @@
use crate::args::Args;
use crate::block_definitions::*;
use crate::bresenham::bresenham_line;
use crate::deterministic_rng::element_rng;
use crate::element_processing::tree::Tree;
use crate::floodfill::flood_fill_area;
use crate::floodfill_cache::FloodFillCache;
use crate::osm_parser::{ProcessedElement, ProcessedMemberRole, ProcessedRelation, ProcessedWay};
use crate::world_editor::WorldEditor;
use rand::Rng;
pub fn generate_natural(editor: &mut WorldEditor, element: &ProcessedElement, args: &Args) {
pub fn generate_natural(
editor: &mut WorldEditor,
element: &ProcessedElement,
args: &Args,
flood_fill_cache: &FloodFillCache,
) {
if let Some(natural_type) = element.tags().get("natural") {
if natural_type == "tree" {
if let ProcessedElement::Node(node) = element {
@@ -69,17 +75,13 @@ pub fn generate_natural(editor: &mut WorldEditor, element: &ProcessedElement, ar
previous_node = Some((x, z));
}
// If there are natural nodes, flood-fill the area
// If there are natural nodes, flood-fill the area using cache
if corner_addup != (0, 0, 0) {
let polygon_coords: Vec<(i32, i32)> = way
.nodes
.iter()
.map(|n: &crate::osm_parser::ProcessedNode| (n.x, n.z))
.collect();
let filled_area: Vec<(i32, i32)> =
flood_fill_area(&polygon_coords, args.timeout.as_ref());
flood_fill_cache.get_or_compute(way, args.timeout.as_ref());
let mut rng: rand::prelude::ThreadRng = rand::thread_rng();
// Use deterministic RNG seeded by element ID for consistent results across region boundaries
let mut rng = element_rng(way.id);
for (x, z) in filled_area {
editor.set_block(block_type, x, 0, z, None, None);
@@ -448,12 +450,18 @@ pub fn generate_natural_from_relation(
editor: &mut WorldEditor,
rel: &ProcessedRelation,
args: &Args,
flood_fill_cache: &FloodFillCache,
) {
if rel.tags.contains_key("natural") {
// Generate individual ways with their original tags
for member in &rel.members {
if member.role == ProcessedMemberRole::Outer {
generate_natural(editor, &ProcessedElement::Way(member.way.clone()), args);
generate_natural(
editor,
&ProcessedElement::Way((*member.way).clone()),
args,
flood_fill_cache,
);
}
}
@@ -475,7 +483,12 @@ pub fn generate_natural_from_relation(
};
// Generate natural area from combined way
generate_natural(editor, &ProcessedElement::Way(combined_way), args);
generate_natural(
editor,
&ProcessedElement::Way(combined_way),
args,
flood_fill_cache,
);
}
}
}

View File

@@ -1,4 +1,5 @@
use crate::block_definitions::*;
use crate::deterministic_rng::coord_rng;
use crate::world_editor::WorldEditor;
use rand::Rng;
@@ -115,7 +116,9 @@ impl Tree<'_> {
blacklist.extend(Self::get_functional_blocks());
blacklist.push(WATER);
let mut rng = rand::thread_rng();
// Use deterministic RNG based on coordinates for consistent tree types across region boundaries
// The element_id of 0 is used as a salt for tree-specific randomness
let mut rng = coord_rng(x, z, 0);
let tree = Self::get_tree(match rng.gen_range(1..=3) {
1 => TreeType::Oak,

View File

@@ -1,6 +1,5 @@
use geo::orient::{Direction, Orient};
use geo::{Contains, Intersects, LineString, Point, Polygon, Rect};
use std::time::Instant;
use crate::clipping::clip_water_ring_to_bbox;
use crate::{
@@ -15,15 +14,13 @@ pub fn generate_water_area_from_way(
element: &ProcessedWay,
_xzbbox: &XZBBox,
) {
let start_time = Instant::now();
let outers = [element.nodes.clone()];
if !verify_closed_rings(&outers) {
println!("Skipping way {} due to invalid polygon", element.id);
return;
}
generate_water_areas(editor, &outers, &[], start_time);
generate_water_areas(editor, &outers, &[]);
}
pub fn generate_water_areas_from_relation(
@@ -31,8 +28,6 @@ pub fn generate_water_areas_from_relation(
element: &ProcessedRelation,
xzbbox: &XZBBox,
) {
let start_time = Instant::now();
// Check if this is a water relation (either with water tag or natural=water)
let is_water = element.tags.contains_key("water")
|| element
@@ -123,14 +118,13 @@ pub fn generate_water_areas_from_relation(
return;
}
generate_water_areas(editor, &outers, &inners, start_time);
generate_water_areas(editor, &outers, &inners);
}
fn generate_water_areas(
editor: &mut WorldEditor,
outers: &[Vec<ProcessedNode>],
inners: &[Vec<ProcessedNode>],
start_time: Instant,
) {
// Calculate polygon bounding box to limit fill area
let mut poly_min_x = i32::MAX;
@@ -169,9 +163,7 @@ fn generate_water_areas(
.map(|x| x.iter().map(|y| y.xz()).collect::<Vec<_>>())
.collect();
inverse_floodfill(
min_x, min_z, max_x, max_z, outers_xz, inners_xz, editor, start_time,
);
inverse_floodfill(min_x, min_z, max_x, max_z, outers_xz, inners_xz, editor);
}
/// Merges way segments that share endpoints into closed rings.
@@ -308,7 +300,6 @@ fn inverse_floodfill(
outers: Vec<Vec<XZPoint>>,
inners: Vec<Vec<XZPoint>>,
editor: &mut WorldEditor,
start_time: Instant,
) {
// Convert to geo Polygons with normalized winding order
let inners: Vec<_> = inners
@@ -341,14 +332,7 @@ fn inverse_floodfill(
})
.collect();
inverse_floodfill_recursive(
(min_x, min_z),
(max_x, max_z),
&outers,
&inners,
editor,
start_time,
);
inverse_floodfill_recursive((min_x, min_z), (max_x, max_z), &outers, &inners, editor);
}
fn inverse_floodfill_recursive(
@@ -357,12 +341,11 @@ fn inverse_floodfill_recursive(
outers: &[Polygon],
inners: &[Polygon],
editor: &mut WorldEditor,
start_time: Instant,
) {
// Check if we've exceeded 25 seconds
if start_time.elapsed().as_secs() > 25 {
println!("Water area generation exceeded 25 seconds, continuing anyway");
}
// Check if we've exceeded 40 seconds
// if start_time.elapsed().as_secs() > 40 {
// println!("Water area generation exceeded 40 seconds, continuing anyway");
// }
const ITERATIVE_THRES: i64 = 10_000;
@@ -417,7 +400,6 @@ fn inverse_floodfill_recursive(
&outers_intersects,
&inners_intersects,
editor,
start_time,
);
}
}

View File

@@ -7,8 +7,6 @@ use std::path::{Path, PathBuf};
/// Maximum Y coordinate in Minecraft (build height limit)
const MAX_Y: i32 = 319;
/// Scale factor for converting real elevation to Minecraft heights
const BASE_HEIGHT_SCALE: f64 = 0.7;
/// AWS S3 Terrarium tiles endpoint (no API key required)
const AWS_TERRARIUM_URL: &str =
"https://s3.amazonaws.com/elevation-tiles-prod/terrarium/{z}/{x}/{y}.png";
@@ -318,20 +316,18 @@ pub fn fetch_elevation_data(
// This smooths terrain proportionally while preserving more detail.
let sigma: f64 = BASE_SIGMA_REF * (grid_size / BASE_GRID_REF).sqrt();
let blur_percentage: f64 = (sigma / grid_size) * 100.0;
eprintln!(
//let blur_percentage: f64 = (sigma / grid_size) * 100.0;
/*eprintln!(
"Elevation blur: grid={}x{}, sigma={:.2}, blur_percentage={:.2}%",
grid_width, grid_height, sigma, blur_percentage
);
/* eprintln!(
"Grid: {}x{}, Blur sigma: {:.2}",
grid_width, grid_height, sigma
); */
);*/
// Continue with the existing blur and conversion to Minecraft heights...
let blurred_heights: Vec<Vec<f64>> = apply_gaussian_blur(&height_grid, sigma);
// Release raw height grid
drop(height_grid);
let mut mc_heights: Vec<Vec<i32>> = Vec::with_capacity(blurred_heights.len());
// Find min/max in raw data
@@ -355,7 +351,7 @@ pub fn fetch_elevation_data(
}
}
eprintln!("Height data range: {min_height} to {max_height} m");
//eprintln!("Height data range: {min_height} to {max_height} m");
if extreme_low_count > 0 {
eprintln!(
"WARNING: Found {extreme_low_count} pixels with extremely low elevations (< -1000m)"
@@ -368,35 +364,58 @@ pub fn fetch_elevation_data(
}
let height_range: f64 = max_height - min_height;
// Apply scale factor to height scaling
let mut height_scale: f64 = BASE_HEIGHT_SCALE * scale.sqrt(); // sqrt to make height scaling less extreme
let mut scaled_range: f64 = height_range * height_scale;
// Adaptive scaling: ensure we don't exceed reasonable Y range
let available_y_range = (MAX_Y - ground_level) as f64;
let safety_margin = 0.9; // Use 90% of available range
let max_allowed_range = available_y_range * safety_margin;
// Realistic height scaling: 1 meter of real elevation = scale blocks in Minecraft
// At scale=1.0, 1 meter = 1 block (realistic 1:1 mapping)
// At scale=2.0, 1 meter = 2 blocks (exaggerated for larger worlds)
let ideal_scaled_range: f64 = height_range * scale;
if scaled_range > max_allowed_range {
let adjustment_factor = max_allowed_range / scaled_range;
height_scale *= adjustment_factor;
scaled_range = height_range * height_scale;
// Calculate available Y range in Minecraft (from ground_level to MAX_Y)
// Leave a buffer at the top for buildings, trees, and other structures
const TERRAIN_HEIGHT_BUFFER: i32 = 15;
let available_y_range: f64 = (MAX_Y - TERRAIN_HEIGHT_BUFFER - ground_level) as f64;
// Determine final height scale:
// - Use realistic 1:1 (times scale) if terrain fits within Minecraft limits
// - Only compress if the terrain would exceed the build height
let scaled_range: f64 = if ideal_scaled_range <= available_y_range {
// Terrain fits! Use realistic scaling
eprintln!(
"Height range too large, applying scaling adjustment factor: {adjustment_factor:.3}"
"Realistic elevation: {:.1}m range fits in {} available blocks",
height_range, available_y_range as i32
);
eprintln!("Adjusted scaled range: {scaled_range:.1} blocks");
}
ideal_scaled_range
} else {
// Terrain too tall, compress to fit within Minecraft limits
let compression_factor: f64 = available_y_range / height_range;
let compressed_range: f64 = height_range * compression_factor;
eprintln!(
"Elevation compressed: {:.1}m range -> {:.0} blocks ({:.2}:1 ratio, 1 block = {:.2}m)",
height_range,
compressed_range,
height_range / compressed_range,
compressed_range / height_range
);
compressed_range
};
// Convert to scaled Minecraft Y coordinates
// Lowest real elevation maps to ground_level, highest maps to ground_level + scaled_range
for row in blurred_heights {
let mc_row: Vec<i32> = row
.iter()
.map(|&h| {
// Scale the height differences
let relative_height: f64 = (h - min_height) / height_range;
// Calculate relative position within the elevation range (0.0 to 1.0)
let relative_height: f64 = if height_range > 0.0 {
(h - min_height) / height_range
} else {
0.0
};
// Scale to Minecraft blocks and add to ground level
let scaled_height: f64 = relative_height * scaled_range;
// With terrain enabled, ground_level is used as the MIN_Y for terrain
((ground_level as f64 + scaled_height).round() as i32).clamp(ground_level, MAX_Y)
// Clamp to valid Minecraft Y range (leave buffer at top for structures)
((ground_level as f64 + scaled_height).round() as i32)
.clamp(ground_level, MAX_Y - TERRAIN_HEIGHT_BUFFER)
})
.collect();
mc_heights.push(mc_row);
@@ -410,7 +429,7 @@ pub fn fetch_elevation_data(
max_block_height = max_block_height.max(height);
}
}
eprintln!("Minecraft height data range: {min_block_height} to {max_block_height} blocks");
//eprintln!("Minecraft height data range: {min_block_height} to {max_block_height} blocks");
Ok(ElevationData {
heights: mc_heights,
@@ -570,7 +589,7 @@ fn filter_elevation_outliers(height_grid: &mut [Vec<f64>]) {
let min_reasonable = all_heights[p1_idx];
let max_reasonable = all_heights[p99_idx];
eprintln!("Filtering outliers outside range: {min_reasonable:.1}m to {max_reasonable:.1}m");
//eprintln!("Filtering outliers outside range: {min_reasonable:.1}m to {max_reasonable:.1}m");
let mut outliers_filtered = 0;
@@ -585,7 +604,7 @@ fn filter_elevation_outliers(height_grid: &mut [Vec<f64>]) {
}
if outliers_filtered > 0 {
eprintln!("Filtered {outliers_filtered} elevation outliers, interpolating replacements...");
//eprintln!("Filtered {outliers_filtered} elevation outliers, interpolating replacements...");
// Re-run the NaN filling to interpolate the filtered values
fill_nan_values(height_grid);
}

189
src/floodfill_cache.rs Normal file
View File

@@ -0,0 +1,189 @@
//! Pre-computed flood fill cache for parallel polygon filling.
//!
//! This module provides a way to pre-compute all flood fill operations in parallel
//! before the main element processing loop, then retrieve cached results during
//! sequential processing.
use crate::floodfill::flood_fill_area;
use crate::osm_parser::{ProcessedElement, ProcessedWay};
use fnv::FnvHashMap;
use rayon::prelude::*;
use std::time::Duration;
/// A cache of pre-computed flood fill results, keyed by element ID.
pub struct FloodFillCache {
/// Cached results: element_id -> filled coordinates
way_cache: FnvHashMap<u64, Vec<(i32, i32)>>,
}
impl FloodFillCache {
/// Creates an empty cache.
pub fn new() -> Self {
Self {
way_cache: FnvHashMap::default(),
}
}
/// Pre-computes flood fills for all elements that need them.
///
/// This runs in parallel using Rayon, taking advantage of multiple CPU cores.
pub fn precompute(elements: &[ProcessedElement], timeout: Option<&Duration>) -> Self {
// Collect all ways that need flood fill
let ways_needing_fill: Vec<&ProcessedWay> = elements
.iter()
.filter_map(|el| match el {
ProcessedElement::Way(way) => {
if Self::way_needs_flood_fill(way) {
Some(way)
} else {
None
}
}
_ => None,
})
.collect();
// Compute all way flood fills in parallel
let way_results: Vec<(u64, Vec<(i32, i32)>)> = ways_needing_fill
.par_iter()
.map(|way| {
let polygon_coords: Vec<(i32, i32)> =
way.nodes.iter().map(|n| (n.x, n.z)).collect();
let filled = flood_fill_area(&polygon_coords, timeout);
(way.id, filled)
})
.collect();
// Build the cache
let mut cache = Self::new();
for (id, filled) in way_results {
cache.way_cache.insert(id, filled);
}
cache
}
/// Gets cached flood fill result for a way, or computes it if not cached.
///
/// Note: Combined ways created from relations (e.g., in `generate_natural_from_relation`)
/// will miss the cache and fall back to on-demand computation. This is by design,
/// these synthetic ways don't exist in the original element list and have relation IDs
/// rather than way IDs. The individual member ways are still cached.
pub fn get_or_compute(
&self,
way: &ProcessedWay,
timeout: Option<&Duration>,
) -> Vec<(i32, i32)> {
if let Some(cached) = self.way_cache.get(&way.id) {
// Clone is intentional: each result is typically accessed once during
// sequential processing, so the cost is acceptable vs Arc complexity
cached.clone()
} else {
// Fallback: compute on demand for synthetic/combined ways from relations
let polygon_coords: Vec<(i32, i32)> = way.nodes.iter().map(|n| (n.x, n.z)).collect();
flood_fill_area(&polygon_coords, timeout)
}
}
/// Gets cached flood fill result for a ProcessedElement (Way only).
/// For Nodes/Relations, returns empty vec.
pub fn get_or_compute_element(
&self,
element: &ProcessedElement,
timeout: Option<&Duration>,
) -> Vec<(i32, i32)> {
match element {
ProcessedElement::Way(way) => self.get_or_compute(way, timeout),
_ => Vec::new(),
}
}
/// Determines if a way element needs flood fill based on its tags.
///
/// This checks for tag presence (not specific values) because:
/// - Only some values within each tag type actually use flood fill
/// - But caching extra results is harmless (small memory overhead)
/// - And avoids duplicating value-checking logic from processors
///
/// Covered cases:
/// - building/building:part -> buildings::generate_buildings (includes bridge)
/// - landuse -> landuse::generate_landuse
/// - leisure -> leisure::generate_leisure
/// - amenity -> amenities::generate_amenities
/// - natural (except tree) -> natural::generate_natural
/// - highway with area=yes -> highways::generate_highways (area fill)
fn way_needs_flood_fill(way: &ProcessedWay) -> bool {
way.tags.contains_key("building")
|| way.tags.contains_key("building:part")
|| way.tags.contains_key("landuse")
|| way.tags.contains_key("leisure")
|| way.tags.contains_key("amenity")
|| way
.tags
.get("natural")
.map(|v| v != "tree")
.unwrap_or(false)
// Highway areas (like pedestrian plazas) use flood fill when area=yes
|| (way.tags.contains_key("highway")
&& way.tags.get("area").map(|v| v == "yes").unwrap_or(false))
}
/// Returns the number of cached way entries.
pub fn way_count(&self) -> usize {
self.way_cache.len()
}
/// Removes a way's cached flood fill result, freeing memory.
///
/// Call this after processing an element to release its cached data.
pub fn remove_way(&mut self, way_id: u64) {
self.way_cache.remove(&way_id);
}
/// Removes all cached flood fill results for ways in a relation.
///
/// Relations contain multiple ways, so we need to remove all of them.
pub fn remove_relation_ways(&mut self, way_ids: &[u64]) {
for &id in way_ids {
self.way_cache.remove(&id);
}
}
}
impl Default for FloodFillCache {
fn default() -> Self {
Self::new()
}
}
/// Configures the global Rayon thread pool with a CPU usage cap.
///
/// Call this once at startup before any parallel operations.
///
/// # Arguments
/// * `cpu_fraction` - Fraction of available cores to use (e.g., 0.9 for 90%).
/// Values are clamped to the range [0.1, 1.0].
pub fn configure_rayon_thread_pool(cpu_fraction: f64) {
// Clamp cpu_fraction to valid range
let cpu_fraction = cpu_fraction.clamp(0.1, 1.0);
let available_cores = std::thread::available_parallelism()
.map(|n| n.get())
.unwrap_or(4);
let target_threads = ((available_cores as f64) * cpu_fraction).floor() as usize;
let target_threads = target_threads.max(1); // At least 1 thread
// Only configure if we haven't already (this can only be called once)
match rayon::ThreadPoolBuilder::new()
.num_threads(target_threads)
.build_global()
{
Ok(()) => {
// Successfully configured (silent to avoid cluttering output)
}
Err(_) => {
// Thread pool already configured
}
}
}

View File

@@ -74,6 +74,9 @@ fn get_area_name_for_bedrock(bbox: &LLBBox) -> String {
}
pub fn run_gui() {
// Configure thread pool with 90% CPU cap to keep system responsive
crate::floodfill_cache::configure_rayon_thread_pool(0.9);
// Launch the UI
println!("Launching UI...");
@@ -794,7 +797,6 @@ fn gui_start_generation(
selected_world: String,
world_scale: f64,
ground_level: i32,
floodfill_timeout: u64,
terrain_enabled: bool,
skip_osm_objects: bool,
interior_enabled: bool,
@@ -975,7 +977,7 @@ fn gui_start_generation(
roof: roof_enabled,
fillground: fillground_enabled,
debug: false,
timeout: Some(std::time::Duration::from_secs(floodfill_timeout)),
timeout: Some(std::time::Duration::from_secs(40)),
spawn_point,
};
@@ -1069,10 +1071,9 @@ fn gui_start_generation(
Ok(())
}
Err(e) => {
let error_msg = format!("Failed to fetch data: {e}");
emit_gui_error(&error_msg);
emit_gui_error(&e.to_string());
// Session lock will be automatically released when _session_lock goes out of scope
Err(error_msg)
Err(e.to_string())
}
}
})

8
src/gui/index.html vendored
View File

@@ -151,14 +151,6 @@
</div>
</div>
<!-- Floodfill Timeout Input -->
<div class="settings-row">
<label for="floodfill-timeout" data-localize="floodfill_timeout">Floodfill Timeout (sec)</label>
<div class="settings-control">
<input type="number" id="floodfill-timeout" name="floodfill-timeout" min="0" step="1" value="20" placeholder="Seconds">
</div>
</div>
<!-- Map Theme Selector -->
<div class="settings-row">
<label for="tile-theme-select" data-localize="map_theme">Map Theme</label>

9
src/gui/js/main.js vendored
View File

@@ -91,7 +91,6 @@ async function applyLocalization(localization) {
"h2[data-localize='customization_settings']": "customization_settings",
"label[data-localize='world_scale']": "world_scale",
"label[data-localize='custom_bounding_box']": "custom_bounding_box",
"label[data-localize='floodfill_timeout']": "floodfill_timeout",
// DEPRECATED: Ground level localization removed
// "label[data-localize='ground_level']": "ground_level",
"label[data-localize='language']": "language",
@@ -110,7 +109,6 @@ async function applyLocalization(localization) {
// Placeholder strings
"input[id='bbox-coords']": "placeholder_bbox",
"input[id='floodfill-timeout']": "placeholder_floodfill",
// DEPRECATED: Ground level placeholder removed
// "input[id='ground-level']": "placeholder_ground"
};
@@ -791,14 +789,12 @@ async function startGeneration() {
var roof = document.getElementById("roof-toggle").checked;
var fill_ground = document.getElementById("fillground-toggle").checked;
var scale = parseFloat(document.getElementById("scale-value-slider").value);
var floodfill_timeout = parseInt(document.getElementById("floodfill-timeout").value, 10);
// var ground_level = parseInt(document.getElementById("ground-level").value, 10);
// DEPRECATED: Ground level input removed from UI
var ground_level = -62;
// Validate floodfill_timeout and ground_level
floodfill_timeout = isNaN(floodfill_timeout) || floodfill_timeout < 0 ? 20 : floodfill_timeout;
ground_level = isNaN(ground_level) || ground_level < -62 ? 20 : ground_level;
// Validate ground_level
ground_level = isNaN(ground_level) || ground_level < -62 ? -62 : ground_level;
// Get telemetry consent (defaults to false if not set)
const telemetryConsent = window.getTelemetryConsent ? window.getTelemetryConsent() : false;
@@ -809,7 +805,6 @@ async function startGeneration() {
selectedWorld: worldPath,
worldScale: scale,
groundLevel: ground_level,
floodfillTimeout: floodfill_timeout,
terrainEnabled: terrain,
skipOsmObjects: skipOsmObjects,
interiorEnabled: interior,

View File

@@ -9,9 +9,11 @@ mod clipping;
mod colors;
mod coordinate_system;
mod data_processing;
mod deterministic_rng;
mod element_processing;
mod elevation_data;
mod floodfill;
mod floodfill_cache;
mod ground;
mod map_renderer;
mod map_transformation;
@@ -49,6 +51,9 @@ mod progress {
use windows::Win32::System::Console::{AttachConsole, FreeConsole, ATTACH_PARENT_PROCESS};
fn run_cli() {
// Configure thread pool with 90% CPU cap to keep system responsive
floodfill_cache::configure_rayon_thread_pool(0.9);
let version: &str = env!("CARGO_PKG_VERSION");
let repository: &str = env!("CARGO_PKG_REPOSITORY");
println!(

View File

@@ -5,8 +5,8 @@ use crate::coordinate_system::transformation::CoordTransformer;
use crate::progress::emit_gui_progress_update;
use colored::Colorize;
use serde::Deserialize;
use serde_json::Value;
use std::collections::HashMap;
use std::sync::Arc;
// Raw data from OSM
@@ -29,9 +29,18 @@ struct OsmElement {
pub members: Vec<OsmMember>,
}
#[derive(Deserialize)]
struct OsmData {
pub elements: Vec<OsmElement>,
#[derive(Debug, Deserialize)]
pub struct OsmData {
elements: Vec<OsmElement>,
#[serde(default)]
pub remark: Option<String>,
}
impl OsmData {
/// Returns true if there are no elements in the OSM data
pub fn is_empty(&self) -> bool {
self.elements.is_empty()
}
}
struct SplitOsmData {
@@ -68,11 +77,6 @@ impl SplitOsmData {
}
}
fn parse_raw_osm_data(json_data: Value) -> Result<SplitOsmData, serde_json::Error> {
let osm_data: OsmData = serde_json::from_value(json_data)?;
Ok(SplitOsmData::from_raw_osm_data(osm_data))
}
// End raw data
// Normalized data that we can use
@@ -112,7 +116,7 @@ pub enum ProcessedMemberRole {
#[derive(Debug, Clone, PartialEq)]
pub struct ProcessedMember {
pub role: ProcessedMemberRole,
pub way: ProcessedWay,
pub way: Arc<ProcessedWay>,
}
#[derive(Debug, Clone, PartialEq)]
@@ -164,7 +168,7 @@ impl ProcessedElement {
}
pub fn parse_osm_data(
json_data: Value,
osm_data: OsmData,
bbox: LLBBox,
scale: f64,
debug: bool,
@@ -174,7 +178,7 @@ pub fn parse_osm_data(
emit_gui_progress_update(5.0, "Parsing data...");
// Deserialize the JSON data into the OSMData structure
let data = parse_raw_osm_data(json_data).expect("Failed to parse OSM data");
let data = SplitOsmData::from_raw_osm_data(osm_data);
let (coord_transformer, xzbbox) = CoordTransformer::llbbox_to_xzbbox(&bbox, scale)
.unwrap_or_else(|e| {
@@ -189,7 +193,7 @@ pub fn parse_osm_data(
}
let mut nodes_map: HashMap<u64, ProcessedNode> = HashMap::new();
let mut ways_map: HashMap<u64, ProcessedWay> = HashMap::new();
let mut ways_map: HashMap<u64, Arc<ProcessedWay>> = HashMap::new();
let mut processed_elements: Vec<ProcessedElement> = Vec::new();
@@ -238,17 +242,15 @@ pub fn parse_osm_data(
let tags = element.tags.clone().unwrap_or_default();
// Store unclipped way for relation assembly (clipping happens after ring merging)
ways_map.insert(
element.id,
ProcessedWay {
id: element.id,
tags: tags.clone(),
nodes: nodes.clone(),
},
);
let way = Arc::new(ProcessedWay {
id: element.id,
tags,
nodes,
});
ways_map.insert(element.id, Arc::clone(&way));
// Clip way nodes for standalone way processing (not relations)
let clipped_nodes = clip_way_to_bbox(&nodes, &xzbbox);
let clipped_nodes = clip_way_to_bbox(&way.nodes, &xzbbox);
// Skip ways that are completely outside the bbox (empty after clipping)
if clipped_nodes.is_empty() {
@@ -257,8 +259,8 @@ pub fn parse_osm_data(
let processed: ProcessedWay = ProcessedWay {
id: element.id,
tags: tags.clone(),
nodes: clipped_nodes.clone(),
tags: way.tags.clone(),
nodes: clipped_nodes,
};
processed_elements.push(ProcessedElement::Way(processed));
@@ -294,8 +296,8 @@ pub fn parse_osm_data(
};
// Check if the way exists in ways_map
let way: ProcessedWay = match ways_map.get(&mem.r#ref) {
Some(w) => w.clone(),
let way = match ways_map.get(&mem.r#ref) {
Some(w) => Arc::clone(w),
None => {
// Way was likely filtered out because it was completely outside the bbox
return None;
@@ -311,11 +313,11 @@ pub fn parse_osm_data(
if clipped_nodes.is_empty() {
return None;
}
ProcessedWay {
Arc::new(ProcessedWay {
id: way.id,
tags: way.tags,
tags: way.tags.clone(),
nodes: clipped_nodes,
}
})
};
Some(ProcessedMember {
@@ -336,6 +338,9 @@ pub fn parse_osm_data(
emit_gui_progress_update(15.0, "");
drop(nodes_map);
drop(ways_map);
(processed_elements, xzbbox)
}

View File

@@ -1,12 +1,14 @@
use crate::coordinate_system::geographic::LLBBox;
use crate::osm_parser::OsmData;
use crate::progress::{emit_gui_error, emit_gui_progress_update, is_running_with_gui};
use colored::Colorize;
use rand::seq::SliceRandom;
use reqwest::blocking::Client;
use reqwest::blocking::ClientBuilder;
use serde::Deserialize;
use serde_json::Value;
use std::fs::File;
use std::io::{self, BufReader, Write};
use std::io::{self, BufReader, Cursor, Write};
use std::process::Command;
use std::time::Duration;
@@ -34,19 +36,17 @@ fn download_with_reqwest(url: &str, query: &str) -> Result<String, Box<dyn std::
}
Err(e) => {
if e.is_timeout() {
eprintln!(
"{}",
"Error! Request timed out. Try selecting a smaller area."
.red()
.bold()
);
emit_gui_error("Request timed out. Try selecting a smaller area.");
let msg = "Request timed out. Try selecting a smaller area.";
eprintln!("{}", format!("Error! {msg}").red().bold());
Err(msg.into())
} else if e.is_connect() {
let msg = "No internet connection.";
eprintln!("{}", format!("Error! {msg}").red().bold());
Err(msg.into())
} else {
eprintln!("{}", format!("Error! {e:.52}").red().bold());
emit_gui_error(&format!("{:.52}", e.to_string()));
Err(format!("{e:.52}").into())
}
// Always propagate errors
Err(e.into())
}
}
}
@@ -79,13 +79,14 @@ fn download_with_wget(url: &str, query: &str) -> io::Result<String> {
}
}
pub fn fetch_data_from_file(file: &str) -> Result<Value, Box<dyn std::error::Error>> {
pub fn fetch_data_from_file(file: &str) -> Result<OsmData, Box<dyn std::error::Error>> {
println!("{} Loading data from file...", "[1/7]".bold());
emit_gui_progress_update(1.0, "Loading data from file...");
let file: File = File::open(file)?;
let reader: BufReader<File> = BufReader::new(file);
let data: Value = serde_json::from_reader(reader)?;
let mut deserializer = serde_json::Deserializer::from_reader(reader);
let data: OsmData = OsmData::deserialize(&mut deserializer)?;
Ok(data)
}
@@ -95,7 +96,7 @@ pub fn fetch_data_from_overpass(
debug: bool,
download_method: &str,
save_file: Option<&str>,
) -> Result<Value, Box<dyn std::error::Error>> {
) -> Result<OsmData, Box<dyn std::error::Error>> {
println!("{} Fetching data...", "[1/7]".bold());
emit_gui_progress_update(1.0, "Fetching data...");
@@ -182,14 +183,12 @@ pub fn fetch_data_from_overpass(
println!("API response saved to: {save_file}");
}
let data: Value = serde_json::from_str(&response)?;
let mut deserializer =
serde_json::Deserializer::from_reader(Cursor::new(response.as_bytes()));
let data: OsmData = OsmData::deserialize(&mut deserializer)?;
if data["elements"]
.as_array()
.map_or(0, |elements: &Vec<Value>| elements.len())
== 0
{
if let Some(remark) = data["remark"].as_str() {
if data.is_empty() {
if let Some(remark) = data.remark.as_deref() {
// Check if the remark mentions memory or other runtime errors
if remark.contains("runtime error") && remark.contains("out of memory") {
eprintln!("{}", "Error! The query ran out of memory on the Overpass API server. Try using a smaller area.".red().bold());
@@ -211,7 +210,7 @@ pub fn fetch_data_from_overpass(
}
if debug {
println!("Additional debug information: {data}");
println!("Additional debug information: {data:?}");
}
if !is_running_with_gui() {

View File

@@ -7,9 +7,8 @@ use crate::retrieve_data;
// this is copied from main.rs
pub fn generate_example(llbbox: LLBBox) -> (XZBBox, Vec<ProcessedElement>) {
// Fetch data
let raw_data: serde_json::Value =
retrieve_data::fetch_data_from_overpass(llbbox, false, "requests", None)
.expect("Failed to fetch data");
let raw_data = retrieve_data::fetch_data_from_overpass(llbbox, false, "requests", None)
.expect("Failed to fetch data");
// Parse raw data
let (mut parsed_elements, xzbbox) = osm_parser::parse_osm_data(raw_data, llbbox, 1.0, false);

View File

@@ -1,6 +1,7 @@
//! Java Edition Anvil format world saving.
//!
//! This module handles saving worlds in the Java Edition Anvil (.mca) format.
//! Supports streaming mode for memory-efficient saving of large worlds.
use super::common::{Chunk, ChunkToModify, Section};
use super::WorldEditor;
@@ -11,11 +12,9 @@ use fastanvil::Region;
use fastnbt::Value;
use fnv::FnvHashMap;
use indicatif::{ProgressBar, ProgressStyle};
use rayon::prelude::*;
use std::collections::HashMap;
use std::fs::File;
use std::io::Write;
use std::sync::atomic::{AtomicU64, Ordering};
#[cfg(feature = "gui")]
use crate::telemetry::{send_log, LogLevel};
@@ -77,6 +76,9 @@ impl<'a> WorldEditor<'a> {
}
/// Saves the world in Java Edition Anvil format.
///
/// Uses streaming mode: saves regions one at a time and releases memory after each,
/// significantly reducing peak memory usage for large worlds.
pub(super) fn save_java(&mut self) {
println!("{} Saving world...", "[7/7]".bold());
emit_gui_progress_update(90.0, "Saving world...");
@@ -100,139 +102,155 @@ impl<'a> WorldEditor<'a> {
.progress_chars("█▓░"),
);
let regions_processed = AtomicU64::new(0);
// Streaming mode: Process regions sequentially and release memory after each.
// This significantly reduces peak memory for large worlds (100+ regions).
// For small worlds, the overhead is negligible.
let mut regions_processed: u64 = 0;
self.world
.regions
.par_iter()
.for_each(|((region_x, region_z), region_to_modify)| {
let mut region = self.create_region(*region_x, *region_z);
let mut ser_buffer = Vec::with_capacity(8192);
// Collect region keys first to allow draining
let region_keys: Vec<(i32, i32)> = self.world.regions.keys().copied().collect();
for (&(chunk_x, chunk_z), chunk_to_modify) in &region_to_modify.chunks {
if !chunk_to_modify.sections.is_empty() || !chunk_to_modify.other.is_empty() {
// Read existing chunk data if it exists
let existing_data = region
.read_chunk(chunk_x as usize, chunk_z as usize)
.unwrap()
.unwrap_or_default();
for (region_x, region_z) in region_keys {
// Remove region from memory - this is the key to memory savings
if let Some(region_to_modify) = self.world.regions.remove(&(region_x, region_z)) {
self.save_single_region(region_x, region_z, &region_to_modify);
// Parse existing chunk or create new one
let mut chunk: Chunk = if !existing_data.is_empty() {
fastnbt::from_bytes(&existing_data).unwrap()
} else {
Chunk {
sections: Vec::new(),
x_pos: chunk_x + (region_x * 32),
z_pos: chunk_z + (region_z * 32),
is_light_on: 0,
other: FnvHashMap::default(),
}
};
// Region memory is freed when region_to_modify goes out of scope here
}
// Update sections while preserving existing data
let new_sections: Vec<Section> = chunk_to_modify.sections().collect();
for new_section in new_sections {
if let Some(existing_section) =
chunk.sections.iter_mut().find(|s| s.y == new_section.y)
{
// Merge block states
existing_section.block_states.palette =
new_section.block_states.palette;
existing_section.block_states.data = new_section.block_states.data;
} else {
// Add new section if it doesn't exist
chunk.sections.push(new_section);
}
}
regions_processed += 1;
// Preserve existing block entities and merge with new ones
if let Some(existing_entities) = chunk.other.get_mut("block_entities") {
if let Some(new_entities) = chunk_to_modify.other.get("block_entities")
{
if let (Value::List(existing), Value::List(new)) =
(existing_entities, new_entities)
{
// Remove old entities that are replaced by new ones
existing.retain(|e| {
if let Value::Compound(map) = e {
let (x, y, z) = get_entity_coords(map);
!new.iter().any(|new_e| {
if let Value::Compound(new_map) = new_e {
let (nx, ny, nz) = get_entity_coords(new_map);
x == nx && y == ny && z == nz
} else {
false
}
})
} else {
true
}
});
// Add new entities
existing.extend(new.clone());
}
}
} else {
// If no existing entities, just add the new ones
if let Some(new_entities) = chunk_to_modify.other.get("block_entities")
{
chunk
.other
.insert("block_entities".to_string(), new_entities.clone());
}
}
// Update progress at regular intervals
let update_interval = (total_regions / 10).max(1);
if regions_processed.is_multiple_of(update_interval)
|| regions_processed == total_regions
{
let progress = 90.0 + (regions_processed as f64 / total_regions as f64) * 9.0;
emit_gui_progress_update(progress, "Saving world...");
}
// Update chunk coordinates and flags
chunk.x_pos = chunk_x + (region_x * 32);
chunk.z_pos = chunk_z + (region_z * 32);
// Create Level wrapper and save
let level_data = create_level_wrapper(&chunk);
ser_buffer.clear();
fastnbt::to_writer(&mut ser_buffer, &level_data).unwrap();
region
.write_chunk(chunk_x as usize, chunk_z as usize, &ser_buffer)
.unwrap();
}
}
// Second pass: ensure all chunks exist
for chunk_x in 0..32 {
for chunk_z in 0..32 {
let abs_chunk_x = chunk_x + (region_x * 32);
let abs_chunk_z = chunk_z + (region_z * 32);
// Check if chunk exists in our modifications
let chunk_exists =
region_to_modify.chunks.contains_key(&(chunk_x, chunk_z));
// If chunk doesn't exist, create it with base layer
if !chunk_exists {
let (ser_buffer, _) = Self::create_base_chunk(abs_chunk_x, abs_chunk_z);
region
.write_chunk(chunk_x as usize, chunk_z as usize, &ser_buffer)
.unwrap();
}
}
}
// Update progress
let regions_done = regions_processed.fetch_add(1, Ordering::SeqCst) + 1;
// Update progress at regular intervals (every ~1% or at least every 10 regions)
// This ensures progress is visible even with many regions
let update_interval = (total_regions / 10).max(1);
if regions_done.is_multiple_of(update_interval) || regions_done == total_regions {
let progress = 90.0 + (regions_done as f64 / total_regions as f64) * 9.0;
emit_gui_progress_update(progress, "Saving world...");
}
save_pb.inc(1);
});
save_pb.inc(1);
}
save_pb.finish();
}
/// Saves a single region to disk.
///
/// This is extracted to allow streaming mode to save and release regions one at a time.
fn save_single_region(
&self,
region_x: i32,
region_z: i32,
region_to_modify: &super::common::RegionToModify,
) {
let mut region = self.create_region(region_x, region_z);
let mut ser_buffer = Vec::with_capacity(8192);
for (&(chunk_x, chunk_z), chunk_to_modify) in &region_to_modify.chunks {
if !chunk_to_modify.sections.is_empty() || !chunk_to_modify.other.is_empty() {
// Read existing chunk data if it exists
let existing_data = region
.read_chunk(chunk_x as usize, chunk_z as usize)
.unwrap()
.unwrap_or_default();
// Parse existing chunk or create new one
let mut chunk: Chunk = if !existing_data.is_empty() {
fastnbt::from_bytes(&existing_data).unwrap()
} else {
Chunk {
sections: Vec::new(),
x_pos: chunk_x + (region_x * 32),
z_pos: chunk_z + (region_z * 32),
is_light_on: 0,
other: FnvHashMap::default(),
}
};
// Update sections while preserving existing data
let new_sections: Vec<Section> = chunk_to_modify.sections().collect();
for new_section in new_sections {
if let Some(existing_section) =
chunk.sections.iter_mut().find(|s| s.y == new_section.y)
{
// Merge block states
existing_section.block_states.palette = new_section.block_states.palette;
existing_section.block_states.data = new_section.block_states.data;
} else {
// Add new section if it doesn't exist
chunk.sections.push(new_section);
}
}
// Preserve existing block entities and merge with new ones
if let Some(existing_entities) = chunk.other.get_mut("block_entities") {
if let Some(new_entities) = chunk_to_modify.other.get("block_entities") {
if let (Value::List(existing), Value::List(new)) =
(existing_entities, new_entities)
{
// Remove old entities that are replaced by new ones
existing.retain(|e| {
if let Value::Compound(map) = e {
let (x, y, z) = get_entity_coords(map);
!new.iter().any(|new_e| {
if let Value::Compound(new_map) = new_e {
let (nx, ny, nz) = get_entity_coords(new_map);
x == nx && y == ny && z == nz
} else {
false
}
})
} else {
true
}
});
// Add new entities
existing.extend(new.clone());
}
}
} else {
// If no existing entities, just add the new ones
if let Some(new_entities) = chunk_to_modify.other.get("block_entities") {
chunk
.other
.insert("block_entities".to_string(), new_entities.clone());
}
}
// Update chunk coordinates and flags
chunk.x_pos = chunk_x + (region_x * 32);
chunk.z_pos = chunk_z + (region_z * 32);
// Create Level wrapper and save
let level_data = create_level_wrapper(&chunk);
ser_buffer.clear();
fastnbt::to_writer(&mut ser_buffer, &level_data).unwrap();
region
.write_chunk(chunk_x as usize, chunk_z as usize, &ser_buffer)
.unwrap();
}
}
// Second pass: ensure all chunks exist
for chunk_x in 0..32 {
for chunk_z in 0..32 {
let abs_chunk_x = chunk_x + (region_x * 32);
let abs_chunk_z = chunk_z + (region_z * 32);
// Check if chunk exists in our modifications
let chunk_exists = region_to_modify.chunks.contains_key(&(chunk_x, chunk_z));
// If chunk doesn't exist, create it with base layer
if !chunk_exists {
let (ser_buffer, _) = Self::create_base_chunk(abs_chunk_x, abs_chunk_z);
region
.write_chunk(chunk_x as usize, chunk_z as usize, &ser_buffer)
.unwrap();
}
}
}
}
}
/// Helper function to get entity coordinates

View File

@@ -151,6 +151,19 @@ impl<'a> WorldEditor<'a> {
}
}
/// Get the ground level at a specific world coordinate (without any offset)
#[inline(always)]
pub fn get_ground_level(&self, x: i32, z: i32) -> i32 {
if let Some(ground) = &self.ground {
ground.level(XZPoint::new(
x - self.xzbbox.min_x(),
z - self.xzbbox.min_z(),
))
} else {
0 // Default ground level if no terrain data
}
}
/// Returns the minimum world coordinates
pub fn get_min_coords(&self) -> (i32, i32) {
(self.xzbbox.min_x(), self.xzbbox.min_z())