mirror of
https://github.com/louis-e/arnis.git
synced 2026-01-08 14:18:10 -05:00
Compare commits
2 Commits
main
...
codex/refa
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6c454aeb1d | ||
|
|
77a6041c40 |
1
Cargo.lock
generated
1
Cargo.lock
generated
@@ -204,7 +204,6 @@ dependencies = [
|
||||
"nbtx",
|
||||
"once_cell",
|
||||
"rand 0.8.5",
|
||||
"rand_chacha 0.3.1",
|
||||
"rayon",
|
||||
"reqwest",
|
||||
"rfd",
|
||||
|
||||
@@ -38,7 +38,6 @@ itertools = "0.14.0"
|
||||
log = "0.4.27"
|
||||
once_cell = "1.21.3"
|
||||
rand = "0.8.5"
|
||||
rand_chacha = "0.3"
|
||||
rayon = "1.10.0"
|
||||
reqwest = { version = "0.12.15", features = ["blocking", "json"] }
|
||||
rfd = { version = "0.16.0", optional = true }
|
||||
|
||||
@@ -3,7 +3,6 @@ use crate::block_definitions::{BEDROCK, DIRT, GRASS_BLOCK, STONE};
|
||||
use crate::coordinate_system::cartesian::XZBBox;
|
||||
use crate::coordinate_system::geographic::LLBBox;
|
||||
use crate::element_processing::*;
|
||||
use crate::floodfill_cache::FloodFillCache;
|
||||
use crate::ground::Ground;
|
||||
use crate::map_renderer;
|
||||
use crate::osm_parser::ProcessedElement;
|
||||
@@ -55,14 +54,12 @@ pub fn generate_world_with_options(
|
||||
) -> Result<PathBuf, String> {
|
||||
let output_path = options.path.clone();
|
||||
let world_format = options.format;
|
||||
|
||||
// Create editor with appropriate format
|
||||
let mut editor: WorldEditor = WorldEditor::new_with_format_and_name(
|
||||
options.path,
|
||||
&xzbbox,
|
||||
llbbox,
|
||||
options.format,
|
||||
options.level_name.clone(),
|
||||
options.level_name,
|
||||
options.spawn_point,
|
||||
);
|
||||
let ground = Arc::new(ground);
|
||||
@@ -78,13 +75,8 @@ pub fn generate_world_with_options(
|
||||
println!("{} Processing terrain...", "[5/7]".bold());
|
||||
emit_gui_progress_update(25.0, "Processing terrain...");
|
||||
|
||||
// Pre-compute all flood fills in parallel for better CPU utilization
|
||||
let mut flood_fill_cache = FloodFillCache::precompute(&elements, args.timeout.as_ref());
|
||||
println!("Pre-computed {} flood fills", flood_fill_cache.way_count());
|
||||
|
||||
// Process data
|
||||
let elements_count: usize = elements.len();
|
||||
let mut elements = elements; // Take ownership for consuming
|
||||
let process_pb: ProgressBar = ProgressBar::new(elements_count as u64);
|
||||
process_pb.set_style(ProgressStyle::default_bar()
|
||||
.template("{spinner:.green} [{elapsed_precise}] [{bar:45.white/black}] {pos}/{len} elements ({eta}) {msg}")
|
||||
@@ -95,8 +87,7 @@ pub fn generate_world_with_options(
|
||||
let mut current_progress_prcs: f64 = 25.0;
|
||||
let mut last_emitted_progress: f64 = current_progress_prcs;
|
||||
|
||||
// Process elements by draining in insertion order
|
||||
for element in elements.drain(..) {
|
||||
for element in &elements {
|
||||
process_pb.inc(1);
|
||||
current_progress_prcs += progress_increment_prcs;
|
||||
if (current_progress_prcs - last_emitted_progress).abs() > 0.25 {
|
||||
@@ -114,28 +105,22 @@ pub fn generate_world_with_options(
|
||||
process_pb.set_message("");
|
||||
}
|
||||
|
||||
match &element {
|
||||
match element {
|
||||
ProcessedElement::Way(way) => {
|
||||
if way.tags.contains_key("building") || way.tags.contains_key("building:part") {
|
||||
buildings::generate_buildings(&mut editor, way, args, None, &flood_fill_cache);
|
||||
buildings::generate_buildings(&mut editor, way, args, None);
|
||||
} else if way.tags.contains_key("highway") {
|
||||
highways::generate_highways(
|
||||
&mut editor,
|
||||
&element,
|
||||
args,
|
||||
&highway_connectivity,
|
||||
&flood_fill_cache,
|
||||
);
|
||||
highways::generate_highways(&mut editor, element, args, &highway_connectivity);
|
||||
} else if way.tags.contains_key("landuse") {
|
||||
landuse::generate_landuse(&mut editor, way, args, &flood_fill_cache);
|
||||
landuse::generate_landuse(&mut editor, way, args);
|
||||
} else if way.tags.contains_key("natural") {
|
||||
natural::generate_natural(&mut editor, &element, args, &flood_fill_cache);
|
||||
natural::generate_natural(&mut editor, element, args);
|
||||
} else if way.tags.contains_key("amenity") {
|
||||
amenities::generate_amenities(&mut editor, &element, args, &flood_fill_cache);
|
||||
amenities::generate_amenities(&mut editor, element, args);
|
||||
} else if way.tags.contains_key("leisure") {
|
||||
leisure::generate_leisure(&mut editor, way, args, &flood_fill_cache);
|
||||
leisure::generate_leisure(&mut editor, way, args);
|
||||
} else if way.tags.contains_key("barrier") {
|
||||
barriers::generate_barriers(&mut editor, &element);
|
||||
barriers::generate_barriers(&mut editor, element);
|
||||
} else if let Some(val) = way.tags.get("waterway") {
|
||||
if val == "dock" {
|
||||
// docks count as water areas
|
||||
@@ -155,10 +140,8 @@ pub fn generate_world_with_options(
|
||||
} else if way.tags.get("service") == Some(&"siding".to_string()) {
|
||||
highways::generate_siding(&mut editor, way);
|
||||
} else if way.tags.contains_key("man_made") {
|
||||
man_made::generate_man_made(&mut editor, &element, args);
|
||||
man_made::generate_man_made(&mut editor, element, args);
|
||||
}
|
||||
// Release flood fill cache entry for this way
|
||||
flood_fill_cache.remove_way(way.id);
|
||||
}
|
||||
ProcessedElement::Node(node) => {
|
||||
if node.tags.contains_key("door") || node.tags.contains_key("entrance") {
|
||||
@@ -166,19 +149,13 @@ pub fn generate_world_with_options(
|
||||
} else if node.tags.contains_key("natural")
|
||||
&& node.tags.get("natural") == Some(&"tree".to_string())
|
||||
{
|
||||
natural::generate_natural(&mut editor, &element, args, &flood_fill_cache);
|
||||
natural::generate_natural(&mut editor, element, args);
|
||||
} else if node.tags.contains_key("amenity") {
|
||||
amenities::generate_amenities(&mut editor, &element, args, &flood_fill_cache);
|
||||
amenities::generate_amenities(&mut editor, element, args);
|
||||
} else if node.tags.contains_key("barrier") {
|
||||
barriers::generate_barrier_nodes(&mut editor, node);
|
||||
} else if node.tags.contains_key("highway") {
|
||||
highways::generate_highways(
|
||||
&mut editor,
|
||||
&element,
|
||||
args,
|
||||
&highway_connectivity,
|
||||
&flood_fill_cache,
|
||||
);
|
||||
highways::generate_highways(&mut editor, element, args, &highway_connectivity);
|
||||
} else if node.tags.contains_key("tourism") {
|
||||
tourisms::generate_tourisms(&mut editor, node);
|
||||
} else if node.tags.contains_key("man_made") {
|
||||
@@ -187,12 +164,7 @@ pub fn generate_world_with_options(
|
||||
}
|
||||
ProcessedElement::Relation(rel) => {
|
||||
if rel.tags.contains_key("building") || rel.tags.contains_key("building:part") {
|
||||
buildings::generate_building_from_relation(
|
||||
&mut editor,
|
||||
rel,
|
||||
args,
|
||||
&flood_fill_cache,
|
||||
);
|
||||
buildings::generate_building_from_relation(&mut editor, rel, args);
|
||||
} else if rel.tags.contains_key("water")
|
||||
|| rel
|
||||
.tags
|
||||
@@ -202,43 +174,24 @@ pub fn generate_world_with_options(
|
||||
{
|
||||
water_areas::generate_water_areas_from_relation(&mut editor, rel, &xzbbox);
|
||||
} else if rel.tags.contains_key("natural") {
|
||||
natural::generate_natural_from_relation(
|
||||
&mut editor,
|
||||
rel,
|
||||
args,
|
||||
&flood_fill_cache,
|
||||
);
|
||||
natural::generate_natural_from_relation(&mut editor, rel, args);
|
||||
} else if rel.tags.contains_key("landuse") {
|
||||
landuse::generate_landuse_from_relation(
|
||||
&mut editor,
|
||||
rel,
|
||||
args,
|
||||
&flood_fill_cache,
|
||||
);
|
||||
landuse::generate_landuse_from_relation(&mut editor, rel, args);
|
||||
} else if rel.tags.get("leisure") == Some(&"park".to_string()) {
|
||||
leisure::generate_leisure_from_relation(
|
||||
&mut editor,
|
||||
rel,
|
||||
args,
|
||||
&flood_fill_cache,
|
||||
);
|
||||
leisure::generate_leisure_from_relation(&mut editor, rel, args);
|
||||
} else if rel.tags.contains_key("man_made") {
|
||||
man_made::generate_man_made(&mut editor, &element, args);
|
||||
man_made::generate_man_made(
|
||||
&mut editor,
|
||||
&ProcessedElement::Relation(rel.clone()),
|
||||
args,
|
||||
);
|
||||
}
|
||||
// Release flood fill cache entries for all ways in this relation
|
||||
let way_ids: Vec<u64> = rel.members.iter().map(|m| m.way.id).collect();
|
||||
flood_fill_cache.remove_relation_ways(&way_ids);
|
||||
}
|
||||
}
|
||||
// Element is dropped here, freeing its memory immediately
|
||||
}
|
||||
|
||||
process_pb.finish();
|
||||
|
||||
// Drop remaining caches
|
||||
drop(highway_connectivity);
|
||||
drop(flood_fill_cache);
|
||||
|
||||
// Generate ground layer
|
||||
let total_blocks: u64 = xzbbox.bounding_rect().total_blocks();
|
||||
let desired_updates: u64 = 1500;
|
||||
@@ -264,61 +217,44 @@ pub fn generate_world_with_options(
|
||||
|
||||
let groundlayer_block = GRASS_BLOCK;
|
||||
|
||||
// Process ground generation chunk-by-chunk for better cache locality.
|
||||
// This keeps the same region/chunk HashMap entries hot in CPU cache,
|
||||
// rather than jumping between regions on every Z iteration.
|
||||
let min_chunk_x = xzbbox.min_x() >> 4;
|
||||
let max_chunk_x = xzbbox.max_x() >> 4;
|
||||
let min_chunk_z = xzbbox.min_z() >> 4;
|
||||
let max_chunk_z = xzbbox.max_z() >> 4;
|
||||
for x in xzbbox.min_x()..=xzbbox.max_x() {
|
||||
for z in xzbbox.min_z()..=xzbbox.max_z() {
|
||||
// Add default dirt and grass layer if there isn't a stone layer already
|
||||
if !editor.check_for_block(x, 0, z, Some(&[STONE])) {
|
||||
editor.set_block(groundlayer_block, x, 0, z, None, None);
|
||||
editor.set_block(DIRT, x, -1, z, None, None);
|
||||
editor.set_block(DIRT, x, -2, z, None, None);
|
||||
}
|
||||
|
||||
for chunk_x in min_chunk_x..=max_chunk_x {
|
||||
for chunk_z in min_chunk_z..=max_chunk_z {
|
||||
// Calculate the block range for this chunk, clamped to bbox
|
||||
let chunk_min_x = (chunk_x << 4).max(xzbbox.min_x());
|
||||
let chunk_max_x = ((chunk_x << 4) + 15).min(xzbbox.max_x());
|
||||
let chunk_min_z = (chunk_z << 4).max(xzbbox.min_z());
|
||||
let chunk_max_z = ((chunk_z << 4) + 15).min(xzbbox.max_z());
|
||||
// Fill underground with stone
|
||||
if args.fillground {
|
||||
// Fill from bedrock+1 to 3 blocks below ground with stone
|
||||
editor.fill_blocks_absolute(
|
||||
STONE,
|
||||
x,
|
||||
MIN_Y + 1,
|
||||
z,
|
||||
x,
|
||||
editor.get_absolute_y(x, -3, z),
|
||||
z,
|
||||
None,
|
||||
None,
|
||||
);
|
||||
}
|
||||
// Generate a bedrock level at MIN_Y
|
||||
editor.set_block_absolute(BEDROCK, x, MIN_Y, z, None, Some(&[BEDROCK]));
|
||||
|
||||
for x in chunk_min_x..=chunk_max_x {
|
||||
for z in chunk_min_z..=chunk_max_z {
|
||||
// Add default dirt and grass layer if there isn't a stone layer already
|
||||
if !editor.check_for_block(x, 0, z, Some(&[STONE])) {
|
||||
editor.set_block(groundlayer_block, x, 0, z, None, None);
|
||||
editor.set_block(DIRT, x, -1, z, None, None);
|
||||
editor.set_block(DIRT, x, -2, z, None, None);
|
||||
}
|
||||
block_counter += 1;
|
||||
// Use manual % check since is_multiple_of() is unstable on stable Rust
|
||||
#[allow(clippy::manual_is_multiple_of)]
|
||||
if block_counter % batch_size == 0 {
|
||||
ground_pb.inc(batch_size);
|
||||
}
|
||||
|
||||
// Fill underground with stone
|
||||
if args.fillground {
|
||||
// Fill from bedrock+1 to 3 blocks below ground with stone
|
||||
editor.fill_blocks_absolute(
|
||||
STONE,
|
||||
x,
|
||||
MIN_Y + 1,
|
||||
z,
|
||||
x,
|
||||
editor.get_absolute_y(x, -3, z),
|
||||
z,
|
||||
None,
|
||||
None,
|
||||
);
|
||||
}
|
||||
// Generate a bedrock level at MIN_Y
|
||||
editor.set_block_absolute(BEDROCK, x, MIN_Y, z, None, Some(&[BEDROCK]));
|
||||
|
||||
block_counter += 1;
|
||||
#[allow(clippy::manual_is_multiple_of)]
|
||||
if block_counter % batch_size == 0 {
|
||||
ground_pb.inc(batch_size);
|
||||
}
|
||||
|
||||
gui_progress_grnd += progress_increment_grnd;
|
||||
if (gui_progress_grnd - last_emitted_progress).abs() > 0.25 {
|
||||
emit_gui_progress_update(gui_progress_grnd, "");
|
||||
last_emitted_progress = gui_progress_grnd;
|
||||
}
|
||||
}
|
||||
gui_progress_grnd += progress_increment_grnd;
|
||||
if (gui_progress_grnd - last_emitted_progress).abs() > 0.25 {
|
||||
emit_gui_progress_update(gui_progress_grnd, "");
|
||||
last_emitted_progress = gui_progress_grnd;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,127 +0,0 @@
|
||||
//! Deterministic random number generation for consistent element processing.
|
||||
//!
|
||||
//! This module provides seeded RNG that ensures the same element always produces
|
||||
//! the same random values, regardless of processing order. This is essential for
|
||||
//! region-by-region streaming where the same element may be processed multiple times
|
||||
//! (once for each region it touches).
|
||||
//!
|
||||
//! # Example
|
||||
//! ```ignore
|
||||
//! let mut rng = element_rng(element_id);
|
||||
//! let color = rng.gen_bool(0.5); // Always same result for same element_id
|
||||
//! ```
|
||||
|
||||
use rand::SeedableRng;
|
||||
use rand_chacha::ChaCha8Rng;
|
||||
|
||||
/// Creates a deterministic RNG seeded from an element ID.
|
||||
///
|
||||
/// The same element ID will always produce the same sequence of random values,
|
||||
/// ensuring consistent results when an element is processed multiple times
|
||||
/// (e.g., once per region it touches during streaming).
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `element_id` - The unique OSM element ID (way ID, node ID, or relation ID)
|
||||
///
|
||||
/// # Returns
|
||||
/// A seeded ChaCha8Rng that will produce deterministic random values
|
||||
#[inline]
|
||||
pub fn element_rng(element_id: u64) -> ChaCha8Rng {
|
||||
ChaCha8Rng::seed_from_u64(element_id)
|
||||
}
|
||||
|
||||
/// Creates a deterministic RNG seeded from an element ID with an additional salt.
|
||||
///
|
||||
/// Use this when you need multiple independent random sequences for the same element.
|
||||
/// For example, one sequence for wall colors and another for roof style.
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `element_id` - The unique OSM element ID
|
||||
/// * `salt` - Additional value to create a different sequence (e.g., use different
|
||||
/// salt values for different purposes within the same element)
|
||||
#[inline]
|
||||
#[allow(dead_code)]
|
||||
pub fn element_rng_salted(element_id: u64, salt: u64) -> ChaCha8Rng {
|
||||
// Combine element_id and salt using XOR and bit rotation to avoid collisions
|
||||
let combined = element_id ^ salt.rotate_left(32);
|
||||
ChaCha8Rng::seed_from_u64(combined)
|
||||
}
|
||||
|
||||
/// Creates a deterministic RNG seeded from coordinates.
|
||||
///
|
||||
/// Use this for per-block randomness that needs to be consistent regardless
|
||||
/// of processing order (e.g., random flower placement within a natural area).
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `x` - X coordinate
|
||||
/// * `z` - Z coordinate
|
||||
/// * `element_id` - The element ID for additional uniqueness
|
||||
#[inline]
|
||||
pub fn coord_rng(x: i32, z: i32, element_id: u64) -> ChaCha8Rng {
|
||||
// Combine coordinates and element_id into a seed.
|
||||
// Cast through u32 to handle negative coordinates consistently.
|
||||
let coord_part = ((x as u32 as i64) << 32) | (z as u32 as i64);
|
||||
let seed = (coord_part as u64) ^ element_id;
|
||||
ChaCha8Rng::seed_from_u64(seed)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use rand::Rng;
|
||||
|
||||
#[test]
|
||||
fn test_element_rng_deterministic() {
|
||||
let mut rng1 = element_rng(12345);
|
||||
let mut rng2 = element_rng(12345);
|
||||
|
||||
// Same seed should produce same sequence
|
||||
for _ in 0..100 {
|
||||
assert_eq!(rng1.gen::<u64>(), rng2.gen::<u64>());
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_different_elements_different_values() {
|
||||
let mut rng1 = element_rng(12345);
|
||||
let mut rng2 = element_rng(12346);
|
||||
|
||||
// Different seeds should (almost certainly) produce different values
|
||||
let v1: u64 = rng1.gen();
|
||||
let v2: u64 = rng2.gen();
|
||||
assert_ne!(v1, v2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_salted_rng_different_from_base() {
|
||||
let mut rng1 = element_rng(12345);
|
||||
let mut rng2 = element_rng_salted(12345, 1);
|
||||
|
||||
let v1: u64 = rng1.gen();
|
||||
let v2: u64 = rng2.gen();
|
||||
assert_ne!(v1, v2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_coord_rng_deterministic() {
|
||||
let mut rng1 = coord_rng(100, 200, 12345);
|
||||
let mut rng2 = coord_rng(100, 200, 12345);
|
||||
|
||||
assert_eq!(rng1.gen::<u64>(), rng2.gen::<u64>());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_coord_rng_negative_coordinates() {
|
||||
// Negative coordinates are common in Minecraft worlds
|
||||
let mut rng1 = coord_rng(-100, -200, 12345);
|
||||
let mut rng2 = coord_rng(-100, -200, 12345);
|
||||
|
||||
assert_eq!(rng1.gen::<u64>(), rng2.gen::<u64>());
|
||||
|
||||
// Ensure different negative coords produce different seeds
|
||||
let mut rng3 = coord_rng(-100, -200, 12345);
|
||||
let mut rng4 = coord_rng(-101, -200, 12345);
|
||||
|
||||
assert_ne!(rng3.gen::<u64>(), rng4.gen::<u64>());
|
||||
}
|
||||
}
|
||||
@@ -2,19 +2,11 @@ use crate::args::Args;
|
||||
use crate::block_definitions::*;
|
||||
use crate::bresenham::bresenham_line;
|
||||
use crate::coordinate_system::cartesian::XZPoint;
|
||||
use crate::deterministic_rng::element_rng;
|
||||
use crate::floodfill::flood_fill_area; // Needed for inline amenity flood fills
|
||||
use crate::floodfill_cache::FloodFillCache;
|
||||
use crate::floodfill::flood_fill_area;
|
||||
use crate::osm_parser::ProcessedElement;
|
||||
use crate::world_editor::WorldEditor;
|
||||
use rand::Rng;
|
||||
|
||||
pub fn generate_amenities(
|
||||
editor: &mut WorldEditor,
|
||||
element: &ProcessedElement,
|
||||
args: &Args,
|
||||
flood_fill_cache: &FloodFillCache,
|
||||
) {
|
||||
pub fn generate_amenities(editor: &mut WorldEditor, element: &ProcessedElement, args: &Args) {
|
||||
// Skip if 'layer' or 'level' is negative in the tags
|
||||
if let Some(layer) = element.tags().get("layer") {
|
||||
if layer.parse::<i32>().unwrap_or(0) < 0 {
|
||||
@@ -50,14 +42,18 @@ pub fn generate_amenities(
|
||||
let ground_block: Block = OAK_PLANKS;
|
||||
let roof_block: Block = STONE_BLOCK_SLAB;
|
||||
|
||||
// Use pre-computed flood fill from cache
|
||||
let floor_area: Vec<(i32, i32)> =
|
||||
flood_fill_cache.get_or_compute_element(element, args.timeout.as_ref());
|
||||
let polygon_coords: Vec<(i32, i32)> = element
|
||||
.nodes()
|
||||
.map(|n: &crate::osm_parser::ProcessedNode| (n.x, n.z))
|
||||
.collect();
|
||||
|
||||
if floor_area.is_empty() {
|
||||
if polygon_coords.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
let floor_area: Vec<(i32, i32)> =
|
||||
flood_fill_area(&polygon_coords, args.timeout.as_ref());
|
||||
|
||||
// Fill the floor area
|
||||
for (x, z) in floor_area.iter() {
|
||||
editor.set_block(ground_block, *x, 0, *z, None, None);
|
||||
@@ -84,10 +80,8 @@ pub fn generate_amenities(
|
||||
"bench" => {
|
||||
// Place a bench
|
||||
if let Some(pt) = first_node {
|
||||
// Use deterministic RNG for consistent bench orientation across region boundaries
|
||||
let mut rng = element_rng(element.id());
|
||||
// 50% chance to 90 degrees rotate the bench
|
||||
if rng.gen_bool(0.5) {
|
||||
// 50% chance to 90 degrees rotate the bench using if
|
||||
if rand::random::<bool>() {
|
||||
editor.set_block(SMOOTH_STONE, pt.x, 1, pt.z, None, None);
|
||||
editor.set_block(OAK_LOG, pt.x + 1, 1, pt.z, None, None);
|
||||
editor.set_block(OAK_LOG, pt.x - 1, 1, pt.z, None, None);
|
||||
@@ -101,9 +95,12 @@ pub fn generate_amenities(
|
||||
"shelter" => {
|
||||
let roof_block: Block = STONE_BRICK_SLAB;
|
||||
|
||||
// Use pre-computed flood fill from cache
|
||||
let polygon_coords: Vec<(i32, i32)> = element
|
||||
.nodes()
|
||||
.map(|n: &crate::osm_parser::ProcessedNode| (n.x, n.z))
|
||||
.collect();
|
||||
let roof_area: Vec<(i32, i32)> =
|
||||
flood_fill_cache.get_or_compute_element(element, args.timeout.as_ref());
|
||||
flood_fill_area(&polygon_coords, args.timeout.as_ref());
|
||||
|
||||
// Place fences and roof slabs at each corner node directly
|
||||
for node in element.nodes() {
|
||||
|
||||
@@ -3,9 +3,8 @@ use crate::block_definitions::*;
|
||||
use crate::bresenham::bresenham_line;
|
||||
use crate::colors::color_text_to_rgb_tuple;
|
||||
use crate::coordinate_system::cartesian::XZPoint;
|
||||
use crate::deterministic_rng::element_rng;
|
||||
use crate::element_processing::subprocessor::buildings_interior::generate_building_interior;
|
||||
use crate::floodfill_cache::FloodFillCache;
|
||||
use crate::floodfill::flood_fill_area;
|
||||
use crate::osm_parser::{ProcessedMemberRole, ProcessedRelation, ProcessedWay};
|
||||
use crate::world_editor::WorldEditor;
|
||||
use rand::Rng;
|
||||
@@ -29,7 +28,6 @@ pub fn generate_buildings(
|
||||
element: &ProcessedWay,
|
||||
args: &Args,
|
||||
relation_levels: Option<i32>,
|
||||
flood_fill_cache: &FloodFillCache,
|
||||
) {
|
||||
// Get min_level first so we can use it both for start_level and building height calculations
|
||||
let min_level = if let Some(min_level_str) = element.tags.get("building:min_level") {
|
||||
@@ -45,9 +43,10 @@ pub fn generate_buildings(
|
||||
let scale_factor = args.scale;
|
||||
let min_level_offset = multiply_scale(min_level * 4, scale_factor);
|
||||
|
||||
// Use pre-computed flood fill from cache
|
||||
// Cache floodfill result: compute once and reuse throughout
|
||||
let polygon_coords: Vec<(i32, i32)> = element.nodes.iter().map(|n| (n.x, n.z)).collect();
|
||||
let cached_floor_area: Vec<(i32, i32)> =
|
||||
flood_fill_cache.get_or_compute(element, args.timeout.as_ref());
|
||||
flood_fill_area(&polygon_coords, args.timeout.as_ref());
|
||||
let cached_footprint_size = cached_floor_area.len();
|
||||
|
||||
// Use fixed starting Y coordinate based on maximum ground level when terrain is enabled
|
||||
@@ -122,8 +121,7 @@ pub fn generate_buildings(
|
||||
let mut processed_points: HashSet<(i32, i32)> = HashSet::new();
|
||||
let mut building_height: i32 = ((6.0 * scale_factor) as i32).max(3); // Default building height with scale and minimum
|
||||
let mut is_tall_building = false;
|
||||
// Use deterministic RNG seeded by element ID for consistent results across region boundaries
|
||||
let mut rng = element_rng(element.id);
|
||||
let mut rng = rand::thread_rng();
|
||||
let use_vertical_windows = rng.gen_bool(0.7);
|
||||
let use_accent_roof_line = rng.gen_bool(0.25);
|
||||
|
||||
@@ -388,7 +386,7 @@ pub fn generate_buildings(
|
||||
building_height = ((23.0 * scale_factor) as i32).max(3);
|
||||
}
|
||||
} else if building_type == "bridge" {
|
||||
generate_bridge(editor, element, flood_fill_cache, args.timeout.as_ref());
|
||||
generate_bridge(editor, element, args.timeout.as_ref());
|
||||
return;
|
||||
}
|
||||
}
|
||||
@@ -1486,7 +1484,6 @@ pub fn generate_building_from_relation(
|
||||
editor: &mut WorldEditor,
|
||||
relation: &ProcessedRelation,
|
||||
args: &Args,
|
||||
flood_fill_cache: &FloodFillCache,
|
||||
) {
|
||||
// Extract levels from relation tags
|
||||
let relation_levels = relation
|
||||
@@ -1498,13 +1495,7 @@ pub fn generate_building_from_relation(
|
||||
// Process the outer way to create the building walls
|
||||
for member in &relation.members {
|
||||
if member.role == ProcessedMemberRole::Outer {
|
||||
generate_buildings(
|
||||
editor,
|
||||
&member.way,
|
||||
args,
|
||||
Some(relation_levels),
|
||||
flood_fill_cache,
|
||||
);
|
||||
generate_buildings(editor, &member.way, args, Some(relation_levels));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1528,29 +1519,28 @@ pub fn generate_building_from_relation(
|
||||
fn generate_bridge(
|
||||
editor: &mut WorldEditor,
|
||||
element: &ProcessedWay,
|
||||
flood_fill_cache: &FloodFillCache,
|
||||
floodfill_timeout: Option<&Duration>,
|
||||
) {
|
||||
let floor_block: Block = STONE;
|
||||
let railing_block: Block = STONE_BRICKS;
|
||||
|
||||
// Calculate bridge level based on the "level" tag (computed once, used throughout)
|
||||
let bridge_y_offset = if let Some(level_str) = element.tags.get("level") {
|
||||
if let Ok(level) = level_str.parse::<i32>() {
|
||||
(level * 3) + 1
|
||||
} else {
|
||||
1 // Default elevation
|
||||
}
|
||||
} else {
|
||||
1 // Default elevation
|
||||
};
|
||||
|
||||
// Process the nodes to create bridge pathways and railings
|
||||
let mut previous_node: Option<(i32, i32)> = None;
|
||||
for node in &element.nodes {
|
||||
let x: i32 = node.x;
|
||||
let z: i32 = node.z;
|
||||
|
||||
// Calculate bridge level based on the "level" tag
|
||||
let bridge_y_offset = if let Some(level_str) = element.tags.get("level") {
|
||||
if let Ok(level) = level_str.parse::<i32>() {
|
||||
(level * 3) + 1
|
||||
} else {
|
||||
1 // Default elevation
|
||||
}
|
||||
} else {
|
||||
1 // Default elevation
|
||||
};
|
||||
|
||||
// Create bridge path using Bresenham's line
|
||||
if let Some(prev) = previous_node {
|
||||
let bridge_points: Vec<(i32, i32, i32)> =
|
||||
@@ -1566,8 +1556,21 @@ fn generate_bridge(
|
||||
previous_node = Some((x, z));
|
||||
}
|
||||
|
||||
// Flood fill the area between the bridge path nodes (uses cache)
|
||||
let bridge_area: Vec<(i32, i32)> = flood_fill_cache.get_or_compute(element, floodfill_timeout);
|
||||
// Flood fill the area between the bridge path nodes
|
||||
let polygon_coords: Vec<(i32, i32)> = element.nodes.iter().map(|n| (n.x, n.z)).collect();
|
||||
|
||||
let bridge_area: Vec<(i32, i32)> = flood_fill_area(&polygon_coords, floodfill_timeout);
|
||||
|
||||
// Calculate bridge level based on the "level" tag
|
||||
let bridge_y_offset = if let Some(level_str) = element.tags.get("level") {
|
||||
if let Ok(level) = level_str.parse::<i32>() {
|
||||
(level * 3) + 1
|
||||
} else {
|
||||
1 // Default elevation
|
||||
}
|
||||
} else {
|
||||
1 // Default elevation
|
||||
};
|
||||
|
||||
// Place floor blocks
|
||||
for (x, z) in bridge_area {
|
||||
|
||||
@@ -2,7 +2,7 @@ use crate::args::Args;
|
||||
use crate::block_definitions::*;
|
||||
use crate::bresenham::bresenham_line;
|
||||
use crate::coordinate_system::cartesian::XZPoint;
|
||||
use crate::floodfill_cache::FloodFillCache;
|
||||
use crate::floodfill::flood_fill_area;
|
||||
use crate::osm_parser::{ProcessedElement, ProcessedWay};
|
||||
use crate::world_editor::WorldEditor;
|
||||
use std::collections::HashMap;
|
||||
@@ -16,15 +16,8 @@ pub fn generate_highways(
|
||||
element: &ProcessedElement,
|
||||
args: &Args,
|
||||
highway_connectivity: &HighwayConnectivityMap,
|
||||
flood_fill_cache: &FloodFillCache,
|
||||
) {
|
||||
generate_highways_internal(
|
||||
editor,
|
||||
element,
|
||||
args,
|
||||
highway_connectivity,
|
||||
flood_fill_cache,
|
||||
);
|
||||
generate_highways_internal(editor, element, args, highway_connectivity);
|
||||
}
|
||||
|
||||
/// Build a connectivity map for highway endpoints to determine where slopes are needed.
|
||||
@@ -73,7 +66,6 @@ fn generate_highways_internal(
|
||||
element: &ProcessedElement,
|
||||
args: &Args,
|
||||
highway_connectivity: &HashMap<(i32, i32), Vec<i32>>, // Maps node coordinates to list of layers that connect to this node
|
||||
flood_fill_cache: &FloodFillCache,
|
||||
) {
|
||||
if let Some(highway_type) = element.tags().get("highway") {
|
||||
if highway_type == "street_lamp" {
|
||||
@@ -145,9 +137,14 @@ fn generate_highways_internal(
|
||||
};
|
||||
}
|
||||
|
||||
// Fill the area using flood fill cache
|
||||
// Fill the area using flood fill or by iterating through the nodes
|
||||
let polygon_coords: Vec<(i32, i32)> = way
|
||||
.nodes
|
||||
.iter()
|
||||
.map(|n: &crate::osm_parser::ProcessedNode| (n.x, n.z))
|
||||
.collect();
|
||||
let filled_area: Vec<(i32, i32)> =
|
||||
flood_fill_cache.get_or_compute(way, args.timeout.as_ref());
|
||||
flood_fill_area(&polygon_coords, args.timeout.as_ref());
|
||||
|
||||
for (x, z) in filled_area {
|
||||
editor.set_block(surface_block, x, 0, z, None, None);
|
||||
|
||||
@@ -1,18 +1,12 @@
|
||||
use crate::args::Args;
|
||||
use crate::block_definitions::*;
|
||||
use crate::deterministic_rng::element_rng;
|
||||
use crate::element_processing::tree::Tree;
|
||||
use crate::floodfill_cache::FloodFillCache;
|
||||
use crate::floodfill::flood_fill_area;
|
||||
use crate::osm_parser::{ProcessedMemberRole, ProcessedRelation, ProcessedWay};
|
||||
use crate::world_editor::WorldEditor;
|
||||
use rand::Rng;
|
||||
|
||||
pub fn generate_landuse(
|
||||
editor: &mut WorldEditor,
|
||||
element: &ProcessedWay,
|
||||
args: &Args,
|
||||
flood_fill_cache: &FloodFillCache,
|
||||
) {
|
||||
pub fn generate_landuse(editor: &mut WorldEditor, element: &ProcessedWay, args: &Args) {
|
||||
// Determine block type based on landuse tag
|
||||
let binding: String = "".to_string();
|
||||
let landuse_tag: &String = element.tags.get("landuse").unwrap_or(&binding);
|
||||
@@ -50,12 +44,11 @@ pub fn generate_landuse(
|
||||
_ => GRASS_BLOCK,
|
||||
};
|
||||
|
||||
// Get the area of the landuse element using cache
|
||||
let floor_area: Vec<(i32, i32)> =
|
||||
flood_fill_cache.get_or_compute(element, args.timeout.as_ref());
|
||||
// Get the area of the landuse element
|
||||
let polygon_coords: Vec<(i32, i32)> = element.nodes.iter().map(|n| (n.x, n.z)).collect();
|
||||
let floor_area: Vec<(i32, i32)> = flood_fill_area(&polygon_coords, args.timeout.as_ref());
|
||||
|
||||
// Use deterministic RNG seeded by element ID for consistent results across region boundaries
|
||||
let mut rng = element_rng(element.id);
|
||||
let mut rng: rand::prelude::ThreadRng = rand::thread_rng();
|
||||
|
||||
for (x, z) in floor_area {
|
||||
if landuse_tag == "traffic_island" {
|
||||
@@ -282,13 +275,12 @@ pub fn generate_landuse_from_relation(
|
||||
editor: &mut WorldEditor,
|
||||
rel: &ProcessedRelation,
|
||||
args: &Args,
|
||||
flood_fill_cache: &FloodFillCache,
|
||||
) {
|
||||
if rel.tags.contains_key("landuse") {
|
||||
// Generate individual ways with their original tags
|
||||
for member in &rel.members {
|
||||
if member.role == ProcessedMemberRole::Outer {
|
||||
generate_landuse(editor, &member.way.clone(), args, flood_fill_cache);
|
||||
generate_landuse(editor, &member.way.clone(), args);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -310,7 +302,7 @@ pub fn generate_landuse_from_relation(
|
||||
};
|
||||
|
||||
// Generate landuse area from combined way
|
||||
generate_landuse(editor, &combined_way, args, flood_fill_cache);
|
||||
generate_landuse(editor, &combined_way, args);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,19 +1,13 @@
|
||||
use crate::args::Args;
|
||||
use crate::block_definitions::*;
|
||||
use crate::bresenham::bresenham_line;
|
||||
use crate::deterministic_rng::element_rng;
|
||||
use crate::element_processing::tree::Tree;
|
||||
use crate::floodfill_cache::FloodFillCache;
|
||||
use crate::floodfill::flood_fill_area;
|
||||
use crate::osm_parser::{ProcessedMemberRole, ProcessedRelation, ProcessedWay};
|
||||
use crate::world_editor::WorldEditor;
|
||||
use rand::Rng;
|
||||
|
||||
pub fn generate_leisure(
|
||||
editor: &mut WorldEditor,
|
||||
element: &ProcessedWay,
|
||||
args: &Args,
|
||||
flood_fill_cache: &FloodFillCache,
|
||||
) {
|
||||
pub fn generate_leisure(editor: &mut WorldEditor, element: &ProcessedWay, args: &Args) {
|
||||
if let Some(leisure_type) = element.tags.get("leisure") {
|
||||
let mut previous_node: Option<(i32, i32)> = None;
|
||||
let mut corner_addup: (i32, i32, i32) = (0, 0, 0);
|
||||
@@ -80,13 +74,15 @@ pub fn generate_leisure(
|
||||
previous_node = Some((node.x, node.z));
|
||||
}
|
||||
|
||||
// Flood-fill the interior of the leisure area using cache
|
||||
// Flood-fill the interior of the leisure area
|
||||
if corner_addup != (0, 0, 0) {
|
||||
let polygon_coords: Vec<(i32, i32)> = element
|
||||
.nodes
|
||||
.iter()
|
||||
.map(|n: &crate::osm_parser::ProcessedNode| (n.x, n.z))
|
||||
.collect();
|
||||
let filled_area: Vec<(i32, i32)> =
|
||||
flood_fill_cache.get_or_compute(element, args.timeout.as_ref());
|
||||
|
||||
// Use deterministic RNG seeded by element ID for consistent results across region boundaries
|
||||
let mut rng = element_rng(element.id);
|
||||
flood_fill_area(&polygon_coords, args.timeout.as_ref());
|
||||
|
||||
for (x, z) in filled_area {
|
||||
editor.set_block(block_type, x, 0, z, Some(&[GRASS_BLOCK]), None);
|
||||
@@ -95,6 +91,7 @@ pub fn generate_leisure(
|
||||
if matches!(leisure_type.as_str(), "park" | "garden" | "nature_reserve")
|
||||
&& editor.check_for_block(x, 0, z, Some(&[GRASS_BLOCK]))
|
||||
{
|
||||
let mut rng: rand::prelude::ThreadRng = rand::thread_rng();
|
||||
let random_choice: i32 = rng.gen_range(0..1000);
|
||||
|
||||
match random_choice {
|
||||
@@ -126,6 +123,7 @@ pub fn generate_leisure(
|
||||
|
||||
// Add playground or recreation ground features
|
||||
if matches!(leisure_type.as_str(), "playground" | "recreation_ground") {
|
||||
let mut rng: rand::prelude::ThreadRng = rand::thread_rng();
|
||||
let random_choice: i32 = rng.gen_range(0..5000);
|
||||
|
||||
match random_choice {
|
||||
@@ -178,13 +176,12 @@ pub fn generate_leisure_from_relation(
|
||||
editor: &mut WorldEditor,
|
||||
rel: &ProcessedRelation,
|
||||
args: &Args,
|
||||
flood_fill_cache: &FloodFillCache,
|
||||
) {
|
||||
if rel.tags.get("leisure") == Some(&"park".to_string()) {
|
||||
// First generate individual ways with their original tags
|
||||
for member in &rel.members {
|
||||
if member.role == ProcessedMemberRole::Outer {
|
||||
generate_leisure(editor, &member.way, args, flood_fill_cache);
|
||||
generate_leisure(editor, &member.way, args);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -204,6 +201,6 @@ pub fn generate_leisure_from_relation(
|
||||
};
|
||||
|
||||
// Generate leisure area from combined way
|
||||
generate_leisure(editor, &combined_way, args, flood_fill_cache);
|
||||
generate_leisure(editor, &combined_way, args);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,19 +1,13 @@
|
||||
use crate::args::Args;
|
||||
use crate::block_definitions::*;
|
||||
use crate::bresenham::bresenham_line;
|
||||
use crate::deterministic_rng::element_rng;
|
||||
use crate::element_processing::tree::Tree;
|
||||
use crate::floodfill_cache::FloodFillCache;
|
||||
use crate::floodfill::flood_fill_area;
|
||||
use crate::osm_parser::{ProcessedElement, ProcessedMemberRole, ProcessedRelation, ProcessedWay};
|
||||
use crate::world_editor::WorldEditor;
|
||||
use rand::Rng;
|
||||
|
||||
pub fn generate_natural(
|
||||
editor: &mut WorldEditor,
|
||||
element: &ProcessedElement,
|
||||
args: &Args,
|
||||
flood_fill_cache: &FloodFillCache,
|
||||
) {
|
||||
pub fn generate_natural(editor: &mut WorldEditor, element: &ProcessedElement, args: &Args) {
|
||||
if let Some(natural_type) = element.tags().get("natural") {
|
||||
if natural_type == "tree" {
|
||||
if let ProcessedElement::Node(node) = element {
|
||||
@@ -75,13 +69,17 @@ pub fn generate_natural(
|
||||
previous_node = Some((x, z));
|
||||
}
|
||||
|
||||
// If there are natural nodes, flood-fill the area using cache
|
||||
// If there are natural nodes, flood-fill the area
|
||||
if corner_addup != (0, 0, 0) {
|
||||
let polygon_coords: Vec<(i32, i32)> = way
|
||||
.nodes
|
||||
.iter()
|
||||
.map(|n: &crate::osm_parser::ProcessedNode| (n.x, n.z))
|
||||
.collect();
|
||||
let filled_area: Vec<(i32, i32)> =
|
||||
flood_fill_cache.get_or_compute(way, args.timeout.as_ref());
|
||||
flood_fill_area(&polygon_coords, args.timeout.as_ref());
|
||||
|
||||
// Use deterministic RNG seeded by element ID for consistent results across region boundaries
|
||||
let mut rng = element_rng(way.id);
|
||||
let mut rng: rand::prelude::ThreadRng = rand::thread_rng();
|
||||
|
||||
for (x, z) in filled_area {
|
||||
editor.set_block(block_type, x, 0, z, None, None);
|
||||
@@ -450,18 +448,12 @@ pub fn generate_natural_from_relation(
|
||||
editor: &mut WorldEditor,
|
||||
rel: &ProcessedRelation,
|
||||
args: &Args,
|
||||
flood_fill_cache: &FloodFillCache,
|
||||
) {
|
||||
if rel.tags.contains_key("natural") {
|
||||
// Generate individual ways with their original tags
|
||||
for member in &rel.members {
|
||||
if member.role == ProcessedMemberRole::Outer {
|
||||
generate_natural(
|
||||
editor,
|
||||
&ProcessedElement::Way((*member.way).clone()),
|
||||
args,
|
||||
flood_fill_cache,
|
||||
);
|
||||
generate_natural(editor, &ProcessedElement::Way(member.way.clone()), args);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -483,12 +475,7 @@ pub fn generate_natural_from_relation(
|
||||
};
|
||||
|
||||
// Generate natural area from combined way
|
||||
generate_natural(
|
||||
editor,
|
||||
&ProcessedElement::Way(combined_way),
|
||||
args,
|
||||
flood_fill_cache,
|
||||
);
|
||||
generate_natural(editor, &ProcessedElement::Way(combined_way), args);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
use crate::block_definitions::*;
|
||||
use crate::deterministic_rng::coord_rng;
|
||||
use crate::world_editor::WorldEditor;
|
||||
use rand::Rng;
|
||||
|
||||
@@ -116,9 +115,7 @@ impl Tree<'_> {
|
||||
blacklist.extend(Self::get_functional_blocks());
|
||||
blacklist.push(WATER);
|
||||
|
||||
// Use deterministic RNG based on coordinates for consistent tree types across region boundaries
|
||||
// The element_id of 0 is used as a salt for tree-specific randomness
|
||||
let mut rng = coord_rng(x, z, 0);
|
||||
let mut rng = rand::thread_rng();
|
||||
|
||||
let tree = Self::get_tree(match rng.gen_range(1..=3) {
|
||||
1 => TreeType::Oak,
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
use geo::orient::{Direction, Orient};
|
||||
use geo::{Contains, Intersects, LineString, Point, Polygon, Rect};
|
||||
use std::time::Instant;
|
||||
|
||||
use crate::clipping::clip_water_ring_to_bbox;
|
||||
use crate::{
|
||||
@@ -14,13 +15,15 @@ pub fn generate_water_area_from_way(
|
||||
element: &ProcessedWay,
|
||||
_xzbbox: &XZBBox,
|
||||
) {
|
||||
let start_time = Instant::now();
|
||||
|
||||
let outers = [element.nodes.clone()];
|
||||
if !verify_closed_rings(&outers) {
|
||||
println!("Skipping way {} due to invalid polygon", element.id);
|
||||
return;
|
||||
}
|
||||
|
||||
generate_water_areas(editor, &outers, &[]);
|
||||
generate_water_areas(editor, &outers, &[], start_time);
|
||||
}
|
||||
|
||||
pub fn generate_water_areas_from_relation(
|
||||
@@ -28,6 +31,8 @@ pub fn generate_water_areas_from_relation(
|
||||
element: &ProcessedRelation,
|
||||
xzbbox: &XZBBox,
|
||||
) {
|
||||
let start_time = Instant::now();
|
||||
|
||||
// Check if this is a water relation (either with water tag or natural=water)
|
||||
let is_water = element.tags.contains_key("water")
|
||||
|| element
|
||||
@@ -118,13 +123,14 @@ pub fn generate_water_areas_from_relation(
|
||||
return;
|
||||
}
|
||||
|
||||
generate_water_areas(editor, &outers, &inners);
|
||||
generate_water_areas(editor, &outers, &inners, start_time);
|
||||
}
|
||||
|
||||
fn generate_water_areas(
|
||||
editor: &mut WorldEditor,
|
||||
outers: &[Vec<ProcessedNode>],
|
||||
inners: &[Vec<ProcessedNode>],
|
||||
start_time: Instant,
|
||||
) {
|
||||
// Calculate polygon bounding box to limit fill area
|
||||
let mut poly_min_x = i32::MAX;
|
||||
@@ -163,7 +169,9 @@ fn generate_water_areas(
|
||||
.map(|x| x.iter().map(|y| y.xz()).collect::<Vec<_>>())
|
||||
.collect();
|
||||
|
||||
inverse_floodfill(min_x, min_z, max_x, max_z, outers_xz, inners_xz, editor);
|
||||
inverse_floodfill(
|
||||
min_x, min_z, max_x, max_z, outers_xz, inners_xz, editor, start_time,
|
||||
);
|
||||
}
|
||||
|
||||
/// Merges way segments that share endpoints into closed rings.
|
||||
@@ -300,6 +308,7 @@ fn inverse_floodfill(
|
||||
outers: Vec<Vec<XZPoint>>,
|
||||
inners: Vec<Vec<XZPoint>>,
|
||||
editor: &mut WorldEditor,
|
||||
start_time: Instant,
|
||||
) {
|
||||
// Convert to geo Polygons with normalized winding order
|
||||
let inners: Vec<_> = inners
|
||||
@@ -332,7 +341,14 @@ fn inverse_floodfill(
|
||||
})
|
||||
.collect();
|
||||
|
||||
inverse_floodfill_recursive((min_x, min_z), (max_x, max_z), &outers, &inners, editor);
|
||||
inverse_floodfill_recursive(
|
||||
(min_x, min_z),
|
||||
(max_x, max_z),
|
||||
&outers,
|
||||
&inners,
|
||||
editor,
|
||||
start_time,
|
||||
);
|
||||
}
|
||||
|
||||
fn inverse_floodfill_recursive(
|
||||
@@ -341,11 +357,12 @@ fn inverse_floodfill_recursive(
|
||||
outers: &[Polygon],
|
||||
inners: &[Polygon],
|
||||
editor: &mut WorldEditor,
|
||||
start_time: Instant,
|
||||
) {
|
||||
// Check if we've exceeded 40 seconds
|
||||
// if start_time.elapsed().as_secs() > 40 {
|
||||
// println!("Water area generation exceeded 40 seconds, continuing anyway");
|
||||
// }
|
||||
// Check if we've exceeded 25 seconds
|
||||
if start_time.elapsed().as_secs() > 25 {
|
||||
println!("Water area generation exceeded 25 seconds, continuing anyway");
|
||||
}
|
||||
|
||||
const ITERATIVE_THRES: i64 = 10_000;
|
||||
|
||||
@@ -400,6 +417,7 @@ fn inverse_floodfill_recursive(
|
||||
&outers_intersects,
|
||||
&inners_intersects,
|
||||
editor,
|
||||
start_time,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -332,9 +332,6 @@ pub fn fetch_elevation_data(
|
||||
// Continue with the existing blur and conversion to Minecraft heights...
|
||||
let blurred_heights: Vec<Vec<f64>> = apply_gaussian_blur(&height_grid, sigma);
|
||||
|
||||
// Release raw height grid
|
||||
drop(height_grid);
|
||||
|
||||
let mut mc_heights: Vec<Vec<i32>> = Vec::with_capacity(blurred_heights.len());
|
||||
|
||||
// Find min/max in raw data
|
||||
@@ -437,55 +434,59 @@ fn get_tile_coordinates(bbox: &LLBBox, zoom: u8) -> Vec<(u32, u32)> {
|
||||
}
|
||||
|
||||
fn apply_gaussian_blur(heights: &[Vec<f64>], sigma: f64) -> Vec<Vec<f64>> {
|
||||
let height: usize = heights.len();
|
||||
let width: usize = heights.first().map(|row| row.len()).unwrap_or(0);
|
||||
if height == 0 || width == 0 {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
let kernel_size: usize = (sigma * 3.0).ceil() as usize * 2 + 1;
|
||||
let kernel: Vec<f64> = create_gaussian_kernel(kernel_size, sigma);
|
||||
let half_kernel: i32 = kernel_size as i32 / 2;
|
||||
|
||||
// Apply blur
|
||||
let mut blurred: Vec<Vec<f64>> = heights.to_owned();
|
||||
let mut flat: Vec<f64> = Vec::with_capacity(width * height);
|
||||
for row in heights {
|
||||
flat.extend_from_slice(row);
|
||||
}
|
||||
|
||||
// Horizontal pass
|
||||
for row in blurred.iter_mut() {
|
||||
let mut temp: Vec<f64> = row.clone();
|
||||
for (i, val) in temp.iter_mut().enumerate() {
|
||||
let mut scratch: Vec<f64> = vec![0.0; width * height];
|
||||
|
||||
// Horizontal pass: flat -> scratch
|
||||
for y in 0..height {
|
||||
let row_offset = y * width;
|
||||
for x in 0..width {
|
||||
let mut sum: f64 = 0.0;
|
||||
let mut weight_sum: f64 = 0.0;
|
||||
for (j, k) in kernel.iter().enumerate() {
|
||||
let idx: i32 = i as i32 + j as i32 - kernel_size as i32 / 2;
|
||||
if idx >= 0 && idx < row.len() as i32 {
|
||||
sum += row[idx as usize] * k;
|
||||
let idx_x = x as i32 + j as i32 - half_kernel;
|
||||
if (0..width as i32).contains(&idx_x) {
|
||||
let idx = row_offset + idx_x as usize;
|
||||
sum += flat[idx] * k;
|
||||
weight_sum += k;
|
||||
}
|
||||
}
|
||||
*val = sum / weight_sum;
|
||||
scratch[row_offset + x] = sum / weight_sum;
|
||||
}
|
||||
*row = temp;
|
||||
}
|
||||
|
||||
// Vertical pass
|
||||
let height: usize = blurred.len();
|
||||
let width: usize = blurred[0].len();
|
||||
for x in 0..width {
|
||||
let temp: Vec<_> = blurred
|
||||
.iter()
|
||||
.take(height)
|
||||
.map(|row: &Vec<f64>| row[x])
|
||||
.collect();
|
||||
|
||||
for (y, row) in blurred.iter_mut().enumerate().take(height) {
|
||||
// Vertical pass: scratch -> flat
|
||||
for y in 0..height {
|
||||
for x in 0..width {
|
||||
let mut sum: f64 = 0.0;
|
||||
let mut weight_sum: f64 = 0.0;
|
||||
for (j, k) in kernel.iter().enumerate() {
|
||||
let idx: i32 = y as i32 + j as i32 - kernel_size as i32 / 2;
|
||||
if idx >= 0 && idx < height as i32 {
|
||||
sum += temp[idx as usize] * k;
|
||||
let idx_y = y as i32 + j as i32 - half_kernel;
|
||||
if (0..height as i32).contains(&idx_y) {
|
||||
let idx = idx_y as usize * width + x;
|
||||
sum += scratch[idx] * k;
|
||||
weight_sum += k;
|
||||
}
|
||||
}
|
||||
row[x] = sum / weight_sum;
|
||||
flat[y * width + x] = sum / weight_sum;
|
||||
}
|
||||
}
|
||||
|
||||
blurred
|
||||
flat.chunks(width).map(|row| row.to_vec()).collect()
|
||||
}
|
||||
|
||||
fn create_gaussian_kernel(size: usize, sigma: f64) -> Vec<f64> {
|
||||
|
||||
@@ -1,189 +0,0 @@
|
||||
//! Pre-computed flood fill cache for parallel polygon filling.
|
||||
//!
|
||||
//! This module provides a way to pre-compute all flood fill operations in parallel
|
||||
//! before the main element processing loop, then retrieve cached results during
|
||||
//! sequential processing.
|
||||
|
||||
use crate::floodfill::flood_fill_area;
|
||||
use crate::osm_parser::{ProcessedElement, ProcessedWay};
|
||||
use fnv::FnvHashMap;
|
||||
use rayon::prelude::*;
|
||||
use std::time::Duration;
|
||||
|
||||
/// A cache of pre-computed flood fill results, keyed by element ID.
|
||||
pub struct FloodFillCache {
|
||||
/// Cached results: element_id -> filled coordinates
|
||||
way_cache: FnvHashMap<u64, Vec<(i32, i32)>>,
|
||||
}
|
||||
|
||||
impl FloodFillCache {
|
||||
/// Creates an empty cache.
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
way_cache: FnvHashMap::default(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Pre-computes flood fills for all elements that need them.
|
||||
///
|
||||
/// This runs in parallel using Rayon, taking advantage of multiple CPU cores.
|
||||
pub fn precompute(elements: &[ProcessedElement], timeout: Option<&Duration>) -> Self {
|
||||
// Collect all ways that need flood fill
|
||||
let ways_needing_fill: Vec<&ProcessedWay> = elements
|
||||
.iter()
|
||||
.filter_map(|el| match el {
|
||||
ProcessedElement::Way(way) => {
|
||||
if Self::way_needs_flood_fill(way) {
|
||||
Some(way)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
_ => None,
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Compute all way flood fills in parallel
|
||||
let way_results: Vec<(u64, Vec<(i32, i32)>)> = ways_needing_fill
|
||||
.par_iter()
|
||||
.map(|way| {
|
||||
let polygon_coords: Vec<(i32, i32)> =
|
||||
way.nodes.iter().map(|n| (n.x, n.z)).collect();
|
||||
let filled = flood_fill_area(&polygon_coords, timeout);
|
||||
(way.id, filled)
|
||||
})
|
||||
.collect();
|
||||
|
||||
// Build the cache
|
||||
let mut cache = Self::new();
|
||||
for (id, filled) in way_results {
|
||||
cache.way_cache.insert(id, filled);
|
||||
}
|
||||
|
||||
cache
|
||||
}
|
||||
|
||||
/// Gets cached flood fill result for a way, or computes it if not cached.
|
||||
///
|
||||
/// Note: Combined ways created from relations (e.g., in `generate_natural_from_relation`)
|
||||
/// will miss the cache and fall back to on-demand computation. This is by design,
|
||||
/// these synthetic ways don't exist in the original element list and have relation IDs
|
||||
/// rather than way IDs. The individual member ways are still cached.
|
||||
pub fn get_or_compute(
|
||||
&self,
|
||||
way: &ProcessedWay,
|
||||
timeout: Option<&Duration>,
|
||||
) -> Vec<(i32, i32)> {
|
||||
if let Some(cached) = self.way_cache.get(&way.id) {
|
||||
// Clone is intentional: each result is typically accessed once during
|
||||
// sequential processing, so the cost is acceptable vs Arc complexity
|
||||
cached.clone()
|
||||
} else {
|
||||
// Fallback: compute on demand for synthetic/combined ways from relations
|
||||
let polygon_coords: Vec<(i32, i32)> = way.nodes.iter().map(|n| (n.x, n.z)).collect();
|
||||
flood_fill_area(&polygon_coords, timeout)
|
||||
}
|
||||
}
|
||||
|
||||
/// Gets cached flood fill result for a ProcessedElement (Way only).
|
||||
/// For Nodes/Relations, returns empty vec.
|
||||
pub fn get_or_compute_element(
|
||||
&self,
|
||||
element: &ProcessedElement,
|
||||
timeout: Option<&Duration>,
|
||||
) -> Vec<(i32, i32)> {
|
||||
match element {
|
||||
ProcessedElement::Way(way) => self.get_or_compute(way, timeout),
|
||||
_ => Vec::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Determines if a way element needs flood fill based on its tags.
|
||||
///
|
||||
/// This checks for tag presence (not specific values) because:
|
||||
/// - Only some values within each tag type actually use flood fill
|
||||
/// - But caching extra results is harmless (small memory overhead)
|
||||
/// - And avoids duplicating value-checking logic from processors
|
||||
///
|
||||
/// Covered cases:
|
||||
/// - building/building:part -> buildings::generate_buildings (includes bridge)
|
||||
/// - landuse -> landuse::generate_landuse
|
||||
/// - leisure -> leisure::generate_leisure
|
||||
/// - amenity -> amenities::generate_amenities
|
||||
/// - natural (except tree) -> natural::generate_natural
|
||||
/// - highway with area=yes -> highways::generate_highways (area fill)
|
||||
fn way_needs_flood_fill(way: &ProcessedWay) -> bool {
|
||||
way.tags.contains_key("building")
|
||||
|| way.tags.contains_key("building:part")
|
||||
|| way.tags.contains_key("landuse")
|
||||
|| way.tags.contains_key("leisure")
|
||||
|| way.tags.contains_key("amenity")
|
||||
|| way
|
||||
.tags
|
||||
.get("natural")
|
||||
.map(|v| v != "tree")
|
||||
.unwrap_or(false)
|
||||
// Highway areas (like pedestrian plazas) use flood fill when area=yes
|
||||
|| (way.tags.contains_key("highway")
|
||||
&& way.tags.get("area").map(|v| v == "yes").unwrap_or(false))
|
||||
}
|
||||
|
||||
/// Returns the number of cached way entries.
|
||||
pub fn way_count(&self) -> usize {
|
||||
self.way_cache.len()
|
||||
}
|
||||
|
||||
/// Removes a way's cached flood fill result, freeing memory.
|
||||
///
|
||||
/// Call this after processing an element to release its cached data.
|
||||
pub fn remove_way(&mut self, way_id: u64) {
|
||||
self.way_cache.remove(&way_id);
|
||||
}
|
||||
|
||||
/// Removes all cached flood fill results for ways in a relation.
|
||||
///
|
||||
/// Relations contain multiple ways, so we need to remove all of them.
|
||||
pub fn remove_relation_ways(&mut self, way_ids: &[u64]) {
|
||||
for &id in way_ids {
|
||||
self.way_cache.remove(&id);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for FloodFillCache {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
/// Configures the global Rayon thread pool with a CPU usage cap.
|
||||
///
|
||||
/// Call this once at startup before any parallel operations.
|
||||
///
|
||||
/// # Arguments
|
||||
/// * `cpu_fraction` - Fraction of available cores to use (e.g., 0.9 for 90%).
|
||||
/// Values are clamped to the range [0.1, 1.0].
|
||||
pub fn configure_rayon_thread_pool(cpu_fraction: f64) {
|
||||
// Clamp cpu_fraction to valid range
|
||||
let cpu_fraction = cpu_fraction.clamp(0.1, 1.0);
|
||||
|
||||
let available_cores = std::thread::available_parallelism()
|
||||
.map(|n| n.get())
|
||||
.unwrap_or(4);
|
||||
|
||||
let target_threads = ((available_cores as f64) * cpu_fraction).floor() as usize;
|
||||
let target_threads = target_threads.max(1); // At least 1 thread
|
||||
|
||||
// Only configure if we haven't already (this can only be called once)
|
||||
match rayon::ThreadPoolBuilder::new()
|
||||
.num_threads(target_threads)
|
||||
.build_global()
|
||||
{
|
||||
Ok(()) => {
|
||||
// Successfully configured (silent to avoid cluttering output)
|
||||
}
|
||||
Err(_) => {
|
||||
// Thread pool already configured
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -74,9 +74,6 @@ fn get_area_name_for_bedrock(bbox: &LLBBox) -> String {
|
||||
}
|
||||
|
||||
pub fn run_gui() {
|
||||
// Configure thread pool with 90% CPU cap to keep system responsive
|
||||
crate::floodfill_cache::configure_rayon_thread_pool(0.9);
|
||||
|
||||
// Launch the UI
|
||||
println!("Launching UI...");
|
||||
|
||||
@@ -797,6 +794,7 @@ fn gui_start_generation(
|
||||
selected_world: String,
|
||||
world_scale: f64,
|
||||
ground_level: i32,
|
||||
floodfill_timeout: u64,
|
||||
terrain_enabled: bool,
|
||||
skip_osm_objects: bool,
|
||||
interior_enabled: bool,
|
||||
@@ -977,7 +975,7 @@ fn gui_start_generation(
|
||||
roof: roof_enabled,
|
||||
fillground: fillground_enabled,
|
||||
debug: false,
|
||||
timeout: Some(std::time::Duration::from_secs(40)),
|
||||
timeout: Some(std::time::Duration::from_secs(floodfill_timeout)),
|
||||
spawn_point,
|
||||
};
|
||||
|
||||
|
||||
8
src/gui/index.html
vendored
8
src/gui/index.html
vendored
@@ -151,6 +151,14 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Floodfill Timeout Input -->
|
||||
<div class="settings-row">
|
||||
<label for="floodfill-timeout" data-localize="floodfill_timeout">Floodfill Timeout (sec)</label>
|
||||
<div class="settings-control">
|
||||
<input type="number" id="floodfill-timeout" name="floodfill-timeout" min="0" step="1" value="20" placeholder="Seconds">
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Map Theme Selector -->
|
||||
<div class="settings-row">
|
||||
<label for="tile-theme-select" data-localize="map_theme">Map Theme</label>
|
||||
|
||||
9
src/gui/js/main.js
vendored
9
src/gui/js/main.js
vendored
@@ -91,6 +91,7 @@ async function applyLocalization(localization) {
|
||||
"h2[data-localize='customization_settings']": "customization_settings",
|
||||
"label[data-localize='world_scale']": "world_scale",
|
||||
"label[data-localize='custom_bounding_box']": "custom_bounding_box",
|
||||
"label[data-localize='floodfill_timeout']": "floodfill_timeout",
|
||||
// DEPRECATED: Ground level localization removed
|
||||
// "label[data-localize='ground_level']": "ground_level",
|
||||
"label[data-localize='language']": "language",
|
||||
@@ -109,6 +110,7 @@ async function applyLocalization(localization) {
|
||||
|
||||
// Placeholder strings
|
||||
"input[id='bbox-coords']": "placeholder_bbox",
|
||||
"input[id='floodfill-timeout']": "placeholder_floodfill",
|
||||
// DEPRECATED: Ground level placeholder removed
|
||||
// "input[id='ground-level']": "placeholder_ground"
|
||||
};
|
||||
@@ -789,12 +791,14 @@ async function startGeneration() {
|
||||
var roof = document.getElementById("roof-toggle").checked;
|
||||
var fill_ground = document.getElementById("fillground-toggle").checked;
|
||||
var scale = parseFloat(document.getElementById("scale-value-slider").value);
|
||||
var floodfill_timeout = parseInt(document.getElementById("floodfill-timeout").value, 10);
|
||||
// var ground_level = parseInt(document.getElementById("ground-level").value, 10);
|
||||
// DEPRECATED: Ground level input removed from UI
|
||||
var ground_level = -62;
|
||||
|
||||
// Validate ground_level
|
||||
ground_level = isNaN(ground_level) || ground_level < -62 ? -62 : ground_level;
|
||||
// Validate floodfill_timeout and ground_level
|
||||
floodfill_timeout = isNaN(floodfill_timeout) || floodfill_timeout < 0 ? 20 : floodfill_timeout;
|
||||
ground_level = isNaN(ground_level) || ground_level < -62 ? 20 : ground_level;
|
||||
|
||||
// Get telemetry consent (defaults to false if not set)
|
||||
const telemetryConsent = window.getTelemetryConsent ? window.getTelemetryConsent() : false;
|
||||
@@ -805,6 +809,7 @@ async function startGeneration() {
|
||||
selectedWorld: worldPath,
|
||||
worldScale: scale,
|
||||
groundLevel: ground_level,
|
||||
floodfillTimeout: floodfill_timeout,
|
||||
terrainEnabled: terrain,
|
||||
skipOsmObjects: skipOsmObjects,
|
||||
interiorEnabled: interior,
|
||||
|
||||
@@ -9,11 +9,9 @@ mod clipping;
|
||||
mod colors;
|
||||
mod coordinate_system;
|
||||
mod data_processing;
|
||||
mod deterministic_rng;
|
||||
mod element_processing;
|
||||
mod elevation_data;
|
||||
mod floodfill;
|
||||
mod floodfill_cache;
|
||||
mod ground;
|
||||
mod map_renderer;
|
||||
mod map_transformation;
|
||||
@@ -51,9 +49,6 @@ mod progress {
|
||||
use windows::Win32::System::Console::{AttachConsole, FreeConsole, ATTACH_PARENT_PROCESS};
|
||||
|
||||
fn run_cli() {
|
||||
// Configure thread pool with 90% CPU cap to keep system responsive
|
||||
floodfill_cache::configure_rayon_thread_pool(0.9);
|
||||
|
||||
let version: &str = env!("CARGO_PKG_VERSION");
|
||||
let repository: &str = env!("CARGO_PKG_REPOSITORY");
|
||||
println!(
|
||||
|
||||
@@ -5,8 +5,8 @@ use crate::coordinate_system::transformation::CoordTransformer;
|
||||
use crate::progress::emit_gui_progress_update;
|
||||
use colored::Colorize;
|
||||
use serde::Deserialize;
|
||||
use serde_json::Value;
|
||||
use std::collections::HashMap;
|
||||
use std::sync::Arc;
|
||||
|
||||
// Raw data from OSM
|
||||
|
||||
@@ -29,18 +29,9 @@ struct OsmElement {
|
||||
pub members: Vec<OsmMember>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct OsmData {
|
||||
elements: Vec<OsmElement>,
|
||||
#[serde(default)]
|
||||
pub remark: Option<String>,
|
||||
}
|
||||
|
||||
impl OsmData {
|
||||
/// Returns true if there are no elements in the OSM data
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.elements.is_empty()
|
||||
}
|
||||
#[derive(Deserialize)]
|
||||
struct OsmData {
|
||||
pub elements: Vec<OsmElement>,
|
||||
}
|
||||
|
||||
struct SplitOsmData {
|
||||
@@ -77,6 +68,11 @@ impl SplitOsmData {
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_raw_osm_data(json_data: Value) -> Result<SplitOsmData, serde_json::Error> {
|
||||
let osm_data: OsmData = serde_json::from_value(json_data)?;
|
||||
Ok(SplitOsmData::from_raw_osm_data(osm_data))
|
||||
}
|
||||
|
||||
// End raw data
|
||||
|
||||
// Normalized data that we can use
|
||||
@@ -116,7 +112,7 @@ pub enum ProcessedMemberRole {
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
pub struct ProcessedMember {
|
||||
pub role: ProcessedMemberRole,
|
||||
pub way: Arc<ProcessedWay>,
|
||||
pub way: ProcessedWay,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq)]
|
||||
@@ -168,7 +164,7 @@ impl ProcessedElement {
|
||||
}
|
||||
|
||||
pub fn parse_osm_data(
|
||||
osm_data: OsmData,
|
||||
json_data: Value,
|
||||
bbox: LLBBox,
|
||||
scale: f64,
|
||||
debug: bool,
|
||||
@@ -178,7 +174,7 @@ pub fn parse_osm_data(
|
||||
emit_gui_progress_update(5.0, "Parsing data...");
|
||||
|
||||
// Deserialize the JSON data into the OSMData structure
|
||||
let data = SplitOsmData::from_raw_osm_data(osm_data);
|
||||
let data = parse_raw_osm_data(json_data).expect("Failed to parse OSM data");
|
||||
|
||||
let (coord_transformer, xzbbox) = CoordTransformer::llbbox_to_xzbbox(&bbox, scale)
|
||||
.unwrap_or_else(|e| {
|
||||
@@ -193,7 +189,7 @@ pub fn parse_osm_data(
|
||||
}
|
||||
|
||||
let mut nodes_map: HashMap<u64, ProcessedNode> = HashMap::new();
|
||||
let mut ways_map: HashMap<u64, Arc<ProcessedWay>> = HashMap::new();
|
||||
let mut ways_map: HashMap<u64, ProcessedWay> = HashMap::new();
|
||||
|
||||
let mut processed_elements: Vec<ProcessedElement> = Vec::new();
|
||||
|
||||
@@ -242,15 +238,17 @@ pub fn parse_osm_data(
|
||||
let tags = element.tags.clone().unwrap_or_default();
|
||||
|
||||
// Store unclipped way for relation assembly (clipping happens after ring merging)
|
||||
let way = Arc::new(ProcessedWay {
|
||||
id: element.id,
|
||||
tags,
|
||||
nodes,
|
||||
});
|
||||
ways_map.insert(element.id, Arc::clone(&way));
|
||||
ways_map.insert(
|
||||
element.id,
|
||||
ProcessedWay {
|
||||
id: element.id,
|
||||
tags: tags.clone(),
|
||||
nodes: nodes.clone(),
|
||||
},
|
||||
);
|
||||
|
||||
// Clip way nodes for standalone way processing (not relations)
|
||||
let clipped_nodes = clip_way_to_bbox(&way.nodes, &xzbbox);
|
||||
let clipped_nodes = clip_way_to_bbox(&nodes, &xzbbox);
|
||||
|
||||
// Skip ways that are completely outside the bbox (empty after clipping)
|
||||
if clipped_nodes.is_empty() {
|
||||
@@ -259,8 +257,8 @@ pub fn parse_osm_data(
|
||||
|
||||
let processed: ProcessedWay = ProcessedWay {
|
||||
id: element.id,
|
||||
tags: way.tags.clone(),
|
||||
nodes: clipped_nodes,
|
||||
tags: tags.clone(),
|
||||
nodes: clipped_nodes.clone(),
|
||||
};
|
||||
|
||||
processed_elements.push(ProcessedElement::Way(processed));
|
||||
@@ -296,8 +294,8 @@ pub fn parse_osm_data(
|
||||
};
|
||||
|
||||
// Check if the way exists in ways_map
|
||||
let way = match ways_map.get(&mem.r#ref) {
|
||||
Some(w) => Arc::clone(w),
|
||||
let way: ProcessedWay = match ways_map.get(&mem.r#ref) {
|
||||
Some(w) => w.clone(),
|
||||
None => {
|
||||
// Way was likely filtered out because it was completely outside the bbox
|
||||
return None;
|
||||
@@ -313,11 +311,11 @@ pub fn parse_osm_data(
|
||||
if clipped_nodes.is_empty() {
|
||||
return None;
|
||||
}
|
||||
Arc::new(ProcessedWay {
|
||||
ProcessedWay {
|
||||
id: way.id,
|
||||
tags: way.tags.clone(),
|
||||
tags: way.tags,
|
||||
nodes: clipped_nodes,
|
||||
})
|
||||
}
|
||||
};
|
||||
|
||||
Some(ProcessedMember {
|
||||
@@ -338,9 +336,6 @@ pub fn parse_osm_data(
|
||||
|
||||
emit_gui_progress_update(15.0, "");
|
||||
|
||||
drop(nodes_map);
|
||||
drop(ways_map);
|
||||
|
||||
(processed_elements, xzbbox)
|
||||
}
|
||||
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
use crate::coordinate_system::geographic::LLBBox;
|
||||
use crate::osm_parser::OsmData;
|
||||
use crate::progress::{emit_gui_error, emit_gui_progress_update, is_running_with_gui};
|
||||
use colored::Colorize;
|
||||
use rand::seq::SliceRandom;
|
||||
use reqwest::blocking::Client;
|
||||
use reqwest::blocking::ClientBuilder;
|
||||
use serde::Deserialize;
|
||||
use serde_json::Value;
|
||||
use std::fs::File;
|
||||
use std::io::{self, BufReader, Cursor, Write};
|
||||
use std::io::{self, BufReader, Write};
|
||||
use std::process::Command;
|
||||
use std::time::Duration;
|
||||
|
||||
@@ -81,14 +79,13 @@ fn download_with_wget(url: &str, query: &str) -> io::Result<String> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn fetch_data_from_file(file: &str) -> Result<OsmData, Box<dyn std::error::Error>> {
|
||||
pub fn fetch_data_from_file(file: &str) -> Result<Value, Box<dyn std::error::Error>> {
|
||||
println!("{} Loading data from file...", "[1/7]".bold());
|
||||
emit_gui_progress_update(1.0, "Loading data from file...");
|
||||
|
||||
let file: File = File::open(file)?;
|
||||
let reader: BufReader<File> = BufReader::new(file);
|
||||
let mut deserializer = serde_json::Deserializer::from_reader(reader);
|
||||
let data: OsmData = OsmData::deserialize(&mut deserializer)?;
|
||||
let data: Value = serde_json::from_reader(reader)?;
|
||||
Ok(data)
|
||||
}
|
||||
|
||||
@@ -98,7 +95,7 @@ pub fn fetch_data_from_overpass(
|
||||
debug: bool,
|
||||
download_method: &str,
|
||||
save_file: Option<&str>,
|
||||
) -> Result<OsmData, Box<dyn std::error::Error>> {
|
||||
) -> Result<Value, Box<dyn std::error::Error>> {
|
||||
println!("{} Fetching data...", "[1/7]".bold());
|
||||
emit_gui_progress_update(1.0, "Fetching data...");
|
||||
|
||||
@@ -185,12 +182,14 @@ pub fn fetch_data_from_overpass(
|
||||
println!("API response saved to: {save_file}");
|
||||
}
|
||||
|
||||
let mut deserializer =
|
||||
serde_json::Deserializer::from_reader(Cursor::new(response.as_bytes()));
|
||||
let data: OsmData = OsmData::deserialize(&mut deserializer)?;
|
||||
let data: Value = serde_json::from_str(&response)?;
|
||||
|
||||
if data.is_empty() {
|
||||
if let Some(remark) = data.remark.as_deref() {
|
||||
if data["elements"]
|
||||
.as_array()
|
||||
.map_or(0, |elements: &Vec<Value>| elements.len())
|
||||
== 0
|
||||
{
|
||||
if let Some(remark) = data["remark"].as_str() {
|
||||
// Check if the remark mentions memory or other runtime errors
|
||||
if remark.contains("runtime error") && remark.contains("out of memory") {
|
||||
eprintln!("{}", "Error! The query ran out of memory on the Overpass API server. Try using a smaller area.".red().bold());
|
||||
@@ -212,7 +211,7 @@ pub fn fetch_data_from_overpass(
|
||||
}
|
||||
|
||||
if debug {
|
||||
println!("Additional debug information: {data:?}");
|
||||
println!("Additional debug information: {data}");
|
||||
}
|
||||
|
||||
if !is_running_with_gui() {
|
||||
|
||||
@@ -7,8 +7,9 @@ use crate::retrieve_data;
|
||||
// this is copied from main.rs
|
||||
pub fn generate_example(llbbox: LLBBox) -> (XZBBox, Vec<ProcessedElement>) {
|
||||
// Fetch data
|
||||
let raw_data = retrieve_data::fetch_data_from_overpass(llbbox, false, "requests", None)
|
||||
.expect("Failed to fetch data");
|
||||
let raw_data: serde_json::Value =
|
||||
retrieve_data::fetch_data_from_overpass(llbbox, false, "requests", None)
|
||||
.expect("Failed to fetch data");
|
||||
|
||||
// Parse raw data
|
||||
let (mut parsed_elements, xzbbox) = osm_parser::parse_osm_data(raw_data, llbbox, 1.0, false);
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
//! Java Edition Anvil format world saving.
|
||||
//!
|
||||
//! This module handles saving worlds in the Java Edition Anvil (.mca) format.
|
||||
//! Supports streaming mode for memory-efficient saving of large worlds.
|
||||
|
||||
use super::common::{Chunk, ChunkToModify, Section};
|
||||
use super::WorldEditor;
|
||||
@@ -12,9 +11,11 @@ use fastanvil::Region;
|
||||
use fastnbt::Value;
|
||||
use fnv::FnvHashMap;
|
||||
use indicatif::{ProgressBar, ProgressStyle};
|
||||
use rayon::prelude::*;
|
||||
use std::collections::HashMap;
|
||||
use std::fs::File;
|
||||
use std::io::Write;
|
||||
use std::sync::atomic::{AtomicU64, Ordering};
|
||||
|
||||
#[cfg(feature = "gui")]
|
||||
use crate::telemetry::{send_log, LogLevel};
|
||||
@@ -76,9 +77,6 @@ impl<'a> WorldEditor<'a> {
|
||||
}
|
||||
|
||||
/// Saves the world in Java Edition Anvil format.
|
||||
///
|
||||
/// Uses streaming mode: saves regions one at a time and releases memory after each,
|
||||
/// significantly reducing peak memory usage for large worlds.
|
||||
pub(super) fn save_java(&mut self) {
|
||||
println!("{} Saving world...", "[7/7]".bold());
|
||||
emit_gui_progress_update(90.0, "Saving world...");
|
||||
@@ -102,154 +100,138 @@ impl<'a> WorldEditor<'a> {
|
||||
.progress_chars("█▓░"),
|
||||
);
|
||||
|
||||
// Streaming mode: Process regions sequentially and release memory after each.
|
||||
// This significantly reduces peak memory for large worlds (100+ regions).
|
||||
// For small worlds, the overhead is negligible.
|
||||
let mut regions_processed: u64 = 0;
|
||||
let regions_processed = AtomicU64::new(0);
|
||||
|
||||
// Collect region keys first to allow draining
|
||||
let region_keys: Vec<(i32, i32)> = self.world.regions.keys().copied().collect();
|
||||
self.world
|
||||
.regions
|
||||
.par_iter()
|
||||
.for_each(|((region_x, region_z), region_to_modify)| {
|
||||
let mut region = self.create_region(*region_x, *region_z);
|
||||
let mut ser_buffer = Vec::with_capacity(8192);
|
||||
|
||||
for (region_x, region_z) in region_keys {
|
||||
// Remove region from memory - this is the key to memory savings
|
||||
if let Some(region_to_modify) = self.world.regions.remove(&(region_x, region_z)) {
|
||||
self.save_single_region(region_x, region_z, ®ion_to_modify);
|
||||
for (&(chunk_x, chunk_z), chunk_to_modify) in ®ion_to_modify.chunks {
|
||||
if !chunk_to_modify.sections.is_empty() || !chunk_to_modify.other.is_empty() {
|
||||
// Read existing chunk data if it exists
|
||||
let existing_data = region
|
||||
.read_chunk(chunk_x as usize, chunk_z as usize)
|
||||
.unwrap()
|
||||
.unwrap_or_default();
|
||||
|
||||
// Region memory is freed when region_to_modify goes out of scope here
|
||||
}
|
||||
// Parse existing chunk or create new one
|
||||
let mut chunk: Chunk = if !existing_data.is_empty() {
|
||||
fastnbt::from_bytes(&existing_data).unwrap()
|
||||
} else {
|
||||
Chunk {
|
||||
sections: Vec::new(),
|
||||
x_pos: chunk_x + (region_x * 32),
|
||||
z_pos: chunk_z + (region_z * 32),
|
||||
is_light_on: 0,
|
||||
other: FnvHashMap::default(),
|
||||
}
|
||||
};
|
||||
|
||||
regions_processed += 1;
|
||||
// Update sections while preserving existing data
|
||||
let new_sections: Vec<Section> = chunk_to_modify.sections().collect();
|
||||
for new_section in new_sections {
|
||||
if let Some(existing_section) =
|
||||
chunk.sections.iter_mut().find(|s| s.y == new_section.y)
|
||||
{
|
||||
// Merge block states
|
||||
existing_section.block_states.palette =
|
||||
new_section.block_states.palette;
|
||||
existing_section.block_states.data = new_section.block_states.data;
|
||||
} else {
|
||||
// Add new section if it doesn't exist
|
||||
chunk.sections.push(new_section);
|
||||
}
|
||||
}
|
||||
|
||||
// Update progress at regular intervals
|
||||
let update_interval = (total_regions / 10).max(1);
|
||||
if regions_processed.is_multiple_of(update_interval)
|
||||
|| regions_processed == total_regions
|
||||
{
|
||||
let progress = 90.0 + (regions_processed as f64 / total_regions as f64) * 9.0;
|
||||
emit_gui_progress_update(progress, "Saving world...");
|
||||
}
|
||||
// Preserve existing block entities and merge with new ones
|
||||
if let Some(existing_entities) = chunk.other.get_mut("block_entities") {
|
||||
if let Some(new_entities) = chunk_to_modify.other.get("block_entities")
|
||||
{
|
||||
if let (Value::List(existing), Value::List(new)) =
|
||||
(existing_entities, new_entities)
|
||||
{
|
||||
// Remove old entities that are replaced by new ones
|
||||
existing.retain(|e| {
|
||||
if let Value::Compound(map) = e {
|
||||
let (x, y, z) = get_entity_coords(map);
|
||||
!new.iter().any(|new_e| {
|
||||
if let Value::Compound(new_map) = new_e {
|
||||
let (nx, ny, nz) = get_entity_coords(new_map);
|
||||
x == nx && y == ny && z == nz
|
||||
} else {
|
||||
false
|
||||
}
|
||||
})
|
||||
} else {
|
||||
true
|
||||
}
|
||||
});
|
||||
// Add new entities
|
||||
existing.extend(new.clone());
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// If no existing entities, just add the new ones
|
||||
if let Some(new_entities) = chunk_to_modify.other.get("block_entities")
|
||||
{
|
||||
chunk
|
||||
.other
|
||||
.insert("block_entities".to_string(), new_entities.clone());
|
||||
}
|
||||
}
|
||||
|
||||
save_pb.inc(1);
|
||||
}
|
||||
// Update chunk coordinates and flags
|
||||
chunk.x_pos = chunk_x + (region_x * 32);
|
||||
chunk.z_pos = chunk_z + (region_z * 32);
|
||||
|
||||
save_pb.finish();
|
||||
}
|
||||
|
||||
/// Saves a single region to disk.
|
||||
///
|
||||
/// This is extracted to allow streaming mode to save and release regions one at a time.
|
||||
fn save_single_region(
|
||||
&self,
|
||||
region_x: i32,
|
||||
region_z: i32,
|
||||
region_to_modify: &super::common::RegionToModify,
|
||||
) {
|
||||
let mut region = self.create_region(region_x, region_z);
|
||||
let mut ser_buffer = Vec::with_capacity(8192);
|
||||
|
||||
for (&(chunk_x, chunk_z), chunk_to_modify) in ®ion_to_modify.chunks {
|
||||
if !chunk_to_modify.sections.is_empty() || !chunk_to_modify.other.is_empty() {
|
||||
// Read existing chunk data if it exists
|
||||
let existing_data = region
|
||||
.read_chunk(chunk_x as usize, chunk_z as usize)
|
||||
.unwrap()
|
||||
.unwrap_or_default();
|
||||
|
||||
// Parse existing chunk or create new one
|
||||
let mut chunk: Chunk = if !existing_data.is_empty() {
|
||||
fastnbt::from_bytes(&existing_data).unwrap()
|
||||
} else {
|
||||
Chunk {
|
||||
sections: Vec::new(),
|
||||
x_pos: chunk_x + (region_x * 32),
|
||||
z_pos: chunk_z + (region_z * 32),
|
||||
is_light_on: 0,
|
||||
other: FnvHashMap::default(),
|
||||
}
|
||||
};
|
||||
|
||||
// Update sections while preserving existing data
|
||||
let new_sections: Vec<Section> = chunk_to_modify.sections().collect();
|
||||
for new_section in new_sections {
|
||||
if let Some(existing_section) =
|
||||
chunk.sections.iter_mut().find(|s| s.y == new_section.y)
|
||||
{
|
||||
// Merge block states
|
||||
existing_section.block_states.palette = new_section.block_states.palette;
|
||||
existing_section.block_states.data = new_section.block_states.data;
|
||||
} else {
|
||||
// Add new section if it doesn't exist
|
||||
chunk.sections.push(new_section);
|
||||
// Create Level wrapper and save
|
||||
let level_data = create_level_wrapper(&chunk);
|
||||
ser_buffer.clear();
|
||||
fastnbt::to_writer(&mut ser_buffer, &level_data).unwrap();
|
||||
region
|
||||
.write_chunk(chunk_x as usize, chunk_z as usize, &ser_buffer)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
// Preserve existing block entities and merge with new ones
|
||||
if let Some(existing_entities) = chunk.other.get_mut("block_entities") {
|
||||
if let Some(new_entities) = chunk_to_modify.other.get("block_entities") {
|
||||
if let (Value::List(existing), Value::List(new)) =
|
||||
(existing_entities, new_entities)
|
||||
{
|
||||
// Remove old entities that are replaced by new ones
|
||||
existing.retain(|e| {
|
||||
if let Value::Compound(map) = e {
|
||||
let (x, y, z) = get_entity_coords(map);
|
||||
!new.iter().any(|new_e| {
|
||||
if let Value::Compound(new_map) = new_e {
|
||||
let (nx, ny, nz) = get_entity_coords(new_map);
|
||||
x == nx && y == ny && z == nz
|
||||
} else {
|
||||
false
|
||||
}
|
||||
})
|
||||
} else {
|
||||
true
|
||||
}
|
||||
});
|
||||
// Add new entities
|
||||
existing.extend(new.clone());
|
||||
// Second pass: ensure all chunks exist
|
||||
for chunk_x in 0..32 {
|
||||
for chunk_z in 0..32 {
|
||||
let abs_chunk_x = chunk_x + (region_x * 32);
|
||||
let abs_chunk_z = chunk_z + (region_z * 32);
|
||||
|
||||
// Check if chunk exists in our modifications
|
||||
let chunk_exists =
|
||||
region_to_modify.chunks.contains_key(&(chunk_x, chunk_z));
|
||||
|
||||
// If chunk doesn't exist, create it with base layer
|
||||
if !chunk_exists {
|
||||
let (ser_buffer, _) = Self::create_base_chunk(abs_chunk_x, abs_chunk_z);
|
||||
region
|
||||
.write_chunk(chunk_x as usize, chunk_z as usize, &ser_buffer)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// If no existing entities, just add the new ones
|
||||
if let Some(new_entities) = chunk_to_modify.other.get("block_entities") {
|
||||
chunk
|
||||
.other
|
||||
.insert("block_entities".to_string(), new_entities.clone());
|
||||
}
|
||||
}
|
||||
|
||||
// Update chunk coordinates and flags
|
||||
chunk.x_pos = chunk_x + (region_x * 32);
|
||||
chunk.z_pos = chunk_z + (region_z * 32);
|
||||
// Update progress
|
||||
let regions_done = regions_processed.fetch_add(1, Ordering::SeqCst) + 1;
|
||||
|
||||
// Create Level wrapper and save
|
||||
let level_data = create_level_wrapper(&chunk);
|
||||
ser_buffer.clear();
|
||||
fastnbt::to_writer(&mut ser_buffer, &level_data).unwrap();
|
||||
region
|
||||
.write_chunk(chunk_x as usize, chunk_z as usize, &ser_buffer)
|
||||
.unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
// Second pass: ensure all chunks exist
|
||||
for chunk_x in 0..32 {
|
||||
for chunk_z in 0..32 {
|
||||
let abs_chunk_x = chunk_x + (region_x * 32);
|
||||
let abs_chunk_z = chunk_z + (region_z * 32);
|
||||
|
||||
// Check if chunk exists in our modifications
|
||||
let chunk_exists = region_to_modify.chunks.contains_key(&(chunk_x, chunk_z));
|
||||
|
||||
// If chunk doesn't exist, create it with base layer
|
||||
if !chunk_exists {
|
||||
let (ser_buffer, _) = Self::create_base_chunk(abs_chunk_x, abs_chunk_z);
|
||||
region
|
||||
.write_chunk(chunk_x as usize, chunk_z as usize, &ser_buffer)
|
||||
.unwrap();
|
||||
// Update progress at regular intervals (every ~1% or at least every 10 regions)
|
||||
// This ensures progress is visible even with many regions
|
||||
let update_interval = (total_regions / 10).max(1);
|
||||
if regions_done.is_multiple_of(update_interval) || regions_done == total_regions {
|
||||
let progress = 90.0 + (regions_done as f64 / total_regions as f64) * 9.0;
|
||||
emit_gui_progress_update(progress, "Saving world...");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
save_pb.inc(1);
|
||||
});
|
||||
|
||||
save_pb.finish();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user