Compare commits

..

2 Commits

Author SHA1 Message Date
Louis Erbkamm
21659c66e0 Update README.md 2024-09-20 18:26:10 +02:00
Louis Erbkamm
b35b4f83ff Delete .github/workflows directory 2024-09-20 18:23:24 +02:00
192 changed files with 3061 additions and 61996 deletions

1
.envrc
View File

@@ -1 +0,0 @@
use flake

5
.flake8 Normal file
View File

@@ -0,0 +1,5 @@
[flake8]
ignore = E203, W503, F405, F403
# line length is intentionally set to 80 here because black uses Bugbear
# See https://black.readthedocs.io/en/stable/the_black_code_style/current_style.html#line-length for more details
max-line-length = 100

1
.gitattributes vendored
View File

@@ -1 +0,0 @@
src/gui/** linguist-vendored

1
.github/FUNDING.yml vendored
View File

@@ -1 +0,0 @@
buy_me_a_coffee: louisdev

View File

@@ -10,14 +10,11 @@ assignees: ''
**Describe the bug**
A clear and concise description of what the bug is and what you expected to happen.
**Used bbox area**
Please provide your input parameters (BBOX) so we can reproduce the issue. *(For example: 48.133444 11.569462 48.142609 11.584740)*
**Arnis and Minecraft version**
Please tell us what version of Arnis and Minecraft you used, as well as if you are on Windows, Linux or MacOS.
**Used bbox parameter**
Please provide your input parameters so we can reproduce the issue.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Additional context**
Add any other context about the problem here. If you used any more custom settings, please provide them here too. Please provide the log file if possible as well, which can be found at C:\Users\USERNAME\AppData\Local\com.louisdev.arnis\logs
Add any other context about the problem here. Please also provide the --bbox input parameters you used so we can reproduce the issue.

View File

@@ -1,10 +0,0 @@
version: 2
updates:
- package-ecosystem: "cargo"
directory: "/"
schedule:
interval: "monthly"
- package-ecosystem: "github-actions"
directory: "/"
schedule:
interval: "monthly"

View File

@@ -1,74 +0,0 @@
name: CI Build
# Trigger CI on pull requests when relevant files change, and pushes to main
on:
pull_request:
paths:
- '.github/**'
- 'src/**'
- 'Cargo.toml'
- 'Cargo.lock'
push:
branches:
- main
workflow_dispatch:
jobs:
lint:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v6
- name: Set up Rust
uses: dtolnay/rust-toolchain@v1
with:
toolchain: stable
components: clippy, rustfmt
- name: Install Linux dependencies
run: |
sudo apt update
sudo apt install -y software-properties-common
sudo add-apt-repository universe
echo "deb http://archive.ubuntu.com/ubuntu $(lsb_release -sc)-backports main restricted universe multiverse" | sudo tee -a /etc/apt/sources.list
sudo apt update
sudo apt install -y libgtk-3-dev build-essential pkg-config libglib2.0-dev libsoup-3.0-dev libwebkit2gtk-4.1-dev
echo "PKG_CONFIG_PATH=/usr/lib/x86_64-linux-gnu/pkgconfig" >> $GITHUB_ENV
- uses: Swatinem/rust-cache@v2
- name: Check formatting
run: cargo fmt -- --check
- name: Check clippy lints
run: cargo clippy --all-targets --all-features -- -D warnings
build:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v6
- name: Set up Rust
uses: dtolnay/rust-toolchain@v1
with:
toolchain: stable
- name: Install Linux dependencies
run: |
sudo apt update
sudo apt install -y software-properties-common
sudo add-apt-repository universe
echo "deb http://archive.ubuntu.com/ubuntu $(lsb_release -sc)-backports main restricted universe multiverse" | sudo tee -a /etc/apt/sources.list
sudo apt update
sudo apt install -y libgtk-3-dev build-essential pkg-config libglib2.0-dev libsoup-3.0-dev libwebkit2gtk-4.1-dev
echo "PKG_CONFIG_PATH=/usr/lib/x86_64-linux-gnu/pkgconfig" >> $GITHUB_ENV
- uses: Swatinem/rust-cache@v2
- name: Build (all targets, all features)
run: cargo build --all-targets --all-features --release
- name: Run unit tests
run: cargo test --all-targets --all-features

View File

@@ -1,141 +0,0 @@
name: PR Benchmark
permissions:
contents: read
pull-requests: write
on:
pull_request:
types: [opened, reopened]
issue_comment:
types: [created]
jobs:
benchmark:
if: |
github.event_name == 'pull_request' ||
(github.event_name == 'issue_comment' &&
github.event.issue.pull_request != null &&
contains(github.event.comment.body, 'retrigger-benchmark'))
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v5
- name: Set up Rust
uses: dtolnay/rust-toolchain@v1
with:
toolchain: stable
- uses: Swatinem/rust-cache@v2
- name: Create dummy Minecraft world directory
run: |
mkdir -p "./world/region"
- name: Build for release
run: cargo build --release --no-default-features
- name: Start timer
id: start_time
run: echo "start_time=$(date +%s)" >> $GITHUB_OUTPUT
- name: Run benchmark command with memory tracking
id: benchmark
run: |
/usr/bin/time -v ./target/release/arnis --path="./world" --terrain --generate-map --bbox="48.125768 11.552296 48.148565 11.593838" 2> benchmark_log.txt
grep "Maximum resident set size" benchmark_log.txt | awk '{print $6}' > peak_mem_kb.txt
peak_kb=$(cat peak_mem_kb.txt)
peak_mb=$((peak_kb / 1024))
echo "peak_memory=${peak_mb}" >> $GITHUB_OUTPUT
- name: End timer and calculate duration
id: end_time
run: |
end_time=$(date +%s)
start_time=${{ steps.start_time.outputs.start_time }}
duration=$((end_time - start_time))
echo "duration=$duration" >> $GITHUB_OUTPUT
- name: Check for map preview
id: map_check
run: |
if [ -f "./world/arnis_world_map.png" ]; then
echo "Map preview generated successfully"
echo "map_exists=true" >> $GITHUB_OUTPUT
else
echo "Map preview not found"
echo "map_exists=false" >> $GITHUB_OUTPUT
fi
- name: Upload map preview as artifact
if: steps.map_check.outputs.map_exists == 'true'
uses: actions/upload-artifact@v4
with:
name: world-map-preview
path: ./world/arnis_world_map.png
retention-days: 60
- name: Format duration and generate summary
id: comment_body
run: |
duration=${{ steps.end_time.outputs.duration }}
minutes=$((duration / 60))
seconds=$((duration % 60))
peak_mem=${{ steps.benchmark.outputs.peak_memory }}
baseline_time=69
diff=$((duration - baseline_time))
abs_diff=${diff#-}
if [ "$diff" -lt -5 ]; then
verdict="✅ This PR **improves generation time**."
elif [ "$abs_diff" -le 4 ]; then
verdict="🟢 Generation time is unchanged."
elif [ "$diff" -le 15 ]; then
verdict="⚠️ This PR **worsens generation time**."
else
verdict="🚨 This PR **drastically worsens generation time**."
fi
baseline_mem=935
mem_annotation=""
if [ "$peak_mem" -gt 2000 ]; then
mem_diff=$((peak_mem - baseline_mem))
mem_percent=$((mem_diff * 100 / baseline_mem))
mem_annotation=" (↗ ${mem_percent}% more)"
fi
# Get current timestamp
benchmark_time=$(date -u "+%Y-%m-%d %H:%M:%S UTC")
run_url="https://github.com/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}"
{
echo "summary<<EOF"
echo "## ⏱️ Benchmark Results"
echo ""
echo "| Metric | Value |"
echo "|--------|-------|"
echo "| Duration | **${minutes}m ${seconds}s** |"
echo "| Peak Memory | **${peak_mem} MB**${mem_annotation} |"
echo "| Baseline | **${baseline_time}s** |"
echo "| Delta | **${diff}s** |"
echo "| Commit | [\`${GITHUB_SHA:0:7}\`](https://github.com/${GITHUB_REPOSITORY}/commit/${GITHUB_SHA}) |"
echo ""
echo "${verdict}"
echo ""
echo "---"
echo ""
echo "📅 **Last benchmark:** ${benchmark_time} | 📥 [Download generated world map](${run_url}#artifacts)"
echo ""
echo "_You can retrigger the benchmark by commenting \`retrigger-benchmark\`._"
echo "EOF"
} >> "$GITHUB_OUTPUT"
- name: Comment build time on PR
uses: thollander/actions-comment-pull-request@v3
with:
message: ${{ steps.comment_body.outputs.summary }}
comment-tag: benchmark-report
env:
GITHUB_TOKEN: ${{ secrets.BENCHMARK_TOKEN }}

View File

@@ -1,92 +0,0 @@
name: [DISABLED] Pre-release Dev Build
on:
push:
branches:
- main
jobs:
build:
strategy:
matrix:
include:
- os: windows-latest
target: x86_64-pc-windows-msvc
binary_name: arnis.exe
asset_name: arnis-windows-x64.exe
- os: ubuntu-latest
target: x86_64-unknown-linux-gnu
binary_name: arnis
asset_name: arnis-linux-x64
runs-on: ${{ matrix.os }}
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Set up Rust
uses: dtolnay/rust-toolchain@v1
with:
toolchain: stable
targets: ${{ matrix.target }}
- name: Install Linux dependencies
if: matrix.os == 'ubuntu-latest'
run: |
sudo apt update
sudo apt install -y software-properties-common
sudo add-apt-repository universe
echo "deb http://archive.ubuntu.com/ubuntu $(lsb_release -sc)-backports main restricted universe multiverse" | sudo tee -a /etc/apt/sources.list
sudo apt update
sudo apt install -y libgtk-3-dev build-essential pkg-config libglib2.0-dev libsoup-3.0-dev libwebkit2gtk-4.1-dev
echo "PKG_CONFIG_PATH=/usr/lib/x86_64-linux-gnu/pkgconfig" >> $GITHUB_ENV
- name: Install dependencies
run: cargo fetch --locked
- name: Build
run: cargo build --frozen --release
- name: Upload artifact
uses: actions/upload-artifact@v3
with:
name: ${{ matrix.os }}-build
path: target/release/${{ matrix.binary_name }}
prerelease:
needs: build
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v3
- name: Download Windows build artifact
uses: actions/download-artifact@v3
with:
name: windows-latest-build
path: ./builds/windows
- name: Download Linux build artifact
uses: actions/download-artifact@v3
with:
name: ubuntu-latest-build
path: ./builds/linux
- name: Make Linux binary executable
run: chmod +x ./builds/linux/arnis
- name: Create Pre-release on GitHub
uses: ncipollo/release-action@v1
with:
tag: "dev-build-${{ github.run_number }}"
name: "Experimental Development Build #${{ github.run_number }}"
body: "Automated pre-release built from the main branch for testing purposes. This build may contain experimental features. For the latest official version, please download the latest stable release."
draft: false
prerelease: true
makeLatest: false
files: |
builds/windows/arnis.exe
builds/linux/arnis
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

View File

@@ -1,160 +0,0 @@
name: Build and Release Arnis
on:
release:
types: [created]
jobs:
build:
strategy:
matrix:
include:
- os: windows-latest
target: x86_64-pc-windows-msvc
binary_name: arnis.exe
asset_name: arnis-windows.exe
- os: ubuntu-latest
target: x86_64-unknown-linux-gnu
binary_name: arnis
asset_name: arnis-linux
- os: macos-13 # Intel runner for x86_64 builds
target: x86_64-apple-darwin
binary_name: arnis
asset_name: arnis-mac-intel
- os: macos-latest # ARM64 runner for ARM64 builds
target: aarch64-apple-darwin
binary_name: arnis
asset_name: arnis-mac-arm64
runs-on: ${{ matrix.os }}
steps:
- name: Checkout code
uses: actions/checkout@v6
- name: Set up Rust
uses: dtolnay/rust-toolchain@v1
with:
toolchain: stable
targets: ${{ matrix.target }}
- name: Install Linux dependencies
if: matrix.os == 'ubuntu-latest'
run: |
sudo apt update
sudo apt install -y software-properties-common
sudo add-apt-repository universe
echo "deb http://archive.ubuntu.com/ubuntu $(lsb_release -sc)-backports main restricted universe multiverse" | sudo tee -a /etc/apt/sources.list
sudo apt update
sudo apt install -y libgtk-3-dev build-essential pkg-config libglib2.0-dev libsoup-3.0-dev libwebkit2gtk-4.1-dev
echo "PKG_CONFIG_PATH=/usr/lib/x86_64-linux-gnu/pkgconfig" >> $GITHUB_ENV
- name: Install dependencies
run: cargo fetch
- name: Build
run: cargo build --release --target ${{ matrix.target }}
- name: Rename binary for release
run: mv target/${{ matrix.target }}/release/${{ matrix.binary_name }} target/release/${{ matrix.asset_name }}
- name: Install Windows SDK
if: matrix.os == 'windows-latest'
run: |
choco install windows-sdk-10.1 -y
$env:Path += ";C:\Program Files (x86)\Windows Kits\10\bin\x64"
shell: powershell
- name: Locate signtool.exe
if: matrix.os == 'windows-latest'
id: locate_signtool
run: |
$env:ProgramFilesX86 = [System.Environment]::GetFolderPath('ProgramFilesX86')
$signtoolPath = Get-ChildItem -Path "$env:ProgramFilesX86\Windows Kits\10\bin" -Recurse -Filter signtool.exe | Where-Object { $_.FullName -match '\\x64\\' } | Select-Object -First 1 -ExpandProperty FullName
if (-not $signtoolPath) { throw "signtool.exe not found." }
echo "signtool=$signtoolPath" | Out-File -FilePath $env:GITHUB_ENV -Encoding utf8 -Append
shell: powershell
- name: Self-sign Windows executable
if: matrix.os == 'windows-latest'
run: |
$password = ConvertTo-SecureString -String $env:WINDOWS_CERT_PASSWORD -Force -AsPlainText
$cert = New-SelfSignedCertificate -Type CodeSigningCert -Subject 'CN=Arnis' -CertStoreLocation Cert:\CurrentUser\My -NotAfter (Get-Date).AddYears(5)
Export-PfxCertificate -Cert $cert -FilePath arnis-cert.pfx -Password $password
& $env:signtool sign /f arnis-cert.pfx /p $env:WINDOWS_CERT_PASSWORD /t http://timestamp.digicert.com target/release/${{ matrix.asset_name }}
env:
WINDOWS_CERT_PASSWORD: ${{ secrets.WINDOWS_CERT_PASSWORD }}
shell: powershell
- name: Upload artifact
uses: actions/upload-artifact@v4
with:
name: ${{ matrix.os }}-${{ matrix.target }}-build
path: target/release/${{ matrix.asset_name }}
create-universal-macos:
needs: build
runs-on: macos-latest
steps:
- name: Download macOS Intel build
uses: actions/download-artifact@v5
with:
name: macos-13-x86_64-apple-darwin-build
path: ./intel
- name: Download macOS ARM64 build
uses: actions/download-artifact@v5
with:
name: macos-latest-aarch64-apple-darwin-build
path: ./arm64
- name: Create universal binary
run: |
lipo -create -output arnis-mac-universal ./intel/arnis-mac-intel ./arm64/arnis-mac-arm64
chmod +x arnis-mac-universal
- name: Upload universal binary
uses: actions/upload-artifact@v4
with:
name: macos-universal-build
path: arnis-mac-universal
release:
needs: [build, create-universal-macos]
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v6
- name: Download Windows build artifact
uses: actions/download-artifact@v5
with:
name: windows-latest-x86_64-pc-windows-msvc-build
path: ./builds/windows
- name: Download Linux build artifact
uses: actions/download-artifact@v5
with:
name: ubuntu-latest-x86_64-unknown-linux-gnu-build
path: ./builds/linux
- name: Download macOS universal build artifact
uses: actions/download-artifact@v5
with:
name: macos-universal-build
path: ./builds/macos
- name: Make Linux and macOS binaries executable
run: |
chmod +x ./builds/linux/arnis-linux
chmod +x ./builds/macos/arnis-mac-universal
- name: Create GitHub Release
uses: softprops/action-gh-release@v2
with:
files: |
builds/windows/arnis-windows.exe
builds/linux/arnis-linux
builds/macos/arnis-mac-universal
env:
GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }}

View File

@@ -1,100 +0,0 @@
name: Test macOS Build
on:
push:
branches: [ main ]
paths:
- '.github/workflows/release.yml'
- 'src/**'
- 'Cargo.toml'
pull_request:
branches: [ main ]
workflow_dispatch: # Allow manual triggering
jobs:
test-macos-builds:
strategy:
matrix:
include:
- target: x86_64-apple-darwin
asset_name: arnis-mac-intel
- target: aarch64-apple-darwin
asset_name: arnis-mac-arm64
runs-on: macos-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set up Rust
uses: dtolnay/rust-toolchain@v1
with:
toolchain: stable
targets: ${{ matrix.target }}
- name: Install dependencies
run: cargo fetch
- name: Build for ${{ matrix.target }}
run: cargo build --release --target ${{ matrix.target }}
- name: Rename binary
run: mv target/${{ matrix.target }}/release/arnis target/${{ matrix.target }}/release/${{ matrix.asset_name }}
- name: Check binary architecture
run: |
file target/${{ matrix.target }}/release/${{ matrix.asset_name }}
lipo -info target/${{ matrix.target }}/release/${{ matrix.asset_name }}
- name: Test binary execution (basic check)
run: |
chmod +x target/${{ matrix.target }}/release/${{ matrix.asset_name }}
# Test that it at least shows help/version (don't run full generation)
target/${{ matrix.target }}/release/${{ matrix.asset_name }} --help || echo "Help command completed"
- name: Upload test artifact
uses: actions/upload-artifact@v4
with:
name: test-${{ matrix.target }}-build
path: target/${{ matrix.target }}/release/${{ matrix.asset_name }}
test-universal-binary:
needs: test-macos-builds
runs-on: macos-latest
steps:
- name: Download Intel build
uses: actions/download-artifact@v4
with:
name: test-x86_64-apple-darwin-build
path: ./intel
- name: Download ARM64 build
uses: actions/download-artifact@v4
with:
name: test-aarch64-apple-darwin-build
path: ./arm64
- name: Create and test universal binary
run: |
lipo -create -output arnis-mac-universal ./intel/arnis-mac-intel ./arm64/arnis-mac-arm64
chmod +x arnis-mac-universal
# Verify it's actually universal
echo "=== Universal Binary Info ==="
file arnis-mac-universal
lipo -info arnis-mac-universal
# Test execution
echo "=== Testing Universal Binary ==="
./arnis-mac-universal --help || echo "Universal binary help command completed"
# Check file size (should be sum of both architectures roughly)
echo "=== File Sizes ==="
ls -lah ./intel/arnis-mac-intel ./arm64/arnis-mac-arm64 arnis-mac-universal
- name: Upload universal binary
uses: actions/upload-artifact@v4
with:
name: test-universal-build
path: arnis-mac-universal

200
.gitignore vendored
View File

@@ -1,51 +1,167 @@
/wiki
# Original file is from https://github.com/github/gitignore/blob/main/Python.gitignore
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# Environment files
.env
/.direnv
# C extensions
*.so
# Build artifacts
/target
**/*.rs.bk
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# IDE/editor files
.idea/
/.vscode/
/*.swp
*.iml
*.suo
*.ntvs*
*.njsproj
*.sln
*.ps1
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# System files
.DS_Store
Thumbs.db
*.tmp
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Generated files
/export.json
/parsed_osm_data.txt
/elevation_debug.png
/terrain-tile-cache
/arnis-tile-cache
/gen/
/build/
*.rmeta
*.dSYM
# Flask stuff:
instance/
.webassets-cache
# Tauri specific
flake/
gen/
# Scrapy stuff:
.scrapy
# Miscellaneous
*.bak
*.old
*.orig
# Sphinx documentation
docs/_build/
# Ignore all in flake directory except specific files
/flake/*
!/flake/flake.nix
!/flake/flake.lock
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/#use-with-ide
.pdm.toml
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
# Debug files
arnis-debug-raw_data.json
arnis-debug-processed_data.json
arnis-debug-map.png
image.img

7097
Cargo.lock generated
View File

File diff suppressed because it is too large Load Diff

View File

@@ -1,54 +0,0 @@
[package]
name = "arnis"
version = "2.3.1"
edition = "2021"
description = "Arnis - Generate real life cities in Minecraft"
homepage = "https://github.com/louis-e/arnis"
repository = "https://github.com/louis-e/arnis"
license = "Apache-2.0"
readme = "README.md"
[profile.release]
lto = "thin"
overflow-checks = true
[features]
default = ["gui"]
gui = ["tauri", "tauri-plugin-log", "tauri-plugin-shell", "tokio", "rfd", "dirs", "tauri-build"]
[build-dependencies]
tauri-build = {version = "2", optional = true}
[dependencies]
base64 = "0.22.1"
clap = { version = "4.5", features = ["derive", "env"] }
colored = "3.0.0"
dirs = {version = "6.0.0", optional = true }
fastanvil = "0.32.0"
fastnbt = "2.6.0"
flate2 = "1.1"
fnv = "1.0.7"
fs2 = "0.4"
geo = "0.31.0"
image = "0.25"
indicatif = "0.17.11"
itertools = "0.14.0"
log = "0.4.27"
once_cell = "1.21.3"
rand = "0.8.5"
rayon = "1.10.0"
reqwest = { version = "0.12.15", features = ["blocking", "json"] }
rfd = { version = "0.15.4", optional = true }
semver = "1.0.27"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
tauri = { version = "2", optional = true }
tauri-plugin-log = { version = "2.6.0", optional = true }
tauri-plugin-shell = { version = "2", optional = true }
tokio = { version = "1.48.0", features = ["full"], optional = true }
[target.'cfg(windows)'.dependencies]
windows = { version = "0.61.1", features = ["Win32_System_Console"] }
[dev-dependencies]
tempfile = "3.23.0"

6
Dockerfile Normal file
View File

@@ -0,0 +1,6 @@
FROM python:3.9
RUN apt-get update && apt-get -y install git ffmpeg libsm6 libxext6
RUN cd /home && mkdir /home/region && git clone https://github.com/louis-e/arnis.git
WORKDIR /home/arnis
RUN pip install -r requirements.txt
ENTRYPOINT ["python", "arnis.py"]

811
LICENSE
View File

@@ -1,201 +1,674 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
1. Definitions.
Preamble
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
The precise terms and conditions for copying, distribution and
modification follow.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
TERMS AND CONDITIONS
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
0. Definitions.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
"This License" refers to version 3 of the GNU General Public License.
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
A "covered work" means either the unmodified Program or a work based
on the Program.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
1. Source Code.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
END OF TERMS AND CONDITIONS
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
APPENDIX: How to apply the Apache License to your work.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
Copyright [yyyy] [name of copyright owner]
The Corresponding Source for a work in source code form is that
same work.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
2. Basic Permissions.
http://www.apache.org/licenses/LICENSE-2.0
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:
<program> Copyright (C) <year> <name of author>
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, your program's commands
might be different; for a GUI interface, you would use an "about box".
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU GPL, see
<https://www.gnu.org/licenses/>.
The GNU General Public License does not permit incorporating your program
into proprietary programs. If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License. But first, please read
<https://www.gnu.org/licenses/why-not-lgpl.html>.

11
Makefile Normal file
View File

@@ -0,0 +1,11 @@
# Check if black formatter will work without any rewrites, and produces an exit code
style-check:
black src/ --check
# This will reformat all python files unders bookdifferent/ into the python black standard
style:
black src/
# Checks that the python source files are compliant regarding errors and style conventions
lint:
flake8 src/

176
README.md
View File

@@ -1,90 +1,136 @@
<img src="assets/git/banner.png" width="100%" alt="Banner">
<p align="center">
<img width="456" height="125" src="https://github.com/louis-e/arnis/blob/python-legacy/gitassets/logo.png?raw=true">
</p>
# Arnis [![CI Build Status](https://github.com/louis-e/arnis/actions/workflows/ci-build.yml/badge.svg)](https://github.com/louis-e/arnis/actions) [<img alt="GitHub Release" src="https://img.shields.io/github/v/release/louis-e/arnis" />](https://github.com/louis-e/arnis/releases) [<img alt="GitHub Downloads (all assets, all releases" src="https://img.shields.io/github/downloads/louis-e/arnis/total" />](https://github.com/louis-e/arnis/releases) [![Download here](https://img.shields.io/badge/Download-here-green)](https://github.com/louis-e/arnis/releases) [![Discord](https://img.shields.io/discord/1326192999738249267?label=Discord&color=%237289da)](https://discord.gg/mA2g69Fhxq)
# Arnis - Python Legacy Branch
This open source project generates any chosen location from the real world in Minecraft, allowing users to explore and build in a virtual world that mirrors the real one.<br><br>
This branch stores the old Python legacy version (v1.x), which was now replaced by the [Rust port](https://github.com/louis-e/arnis).
<br><br>
⇒ [Where did you find this project?](https://6okq6xh5jt4.typeform.com/to/rSjZaB41)
<br>
## :desktop_computer: Example
![Minecraft World Demo](https://github.com/louis-e/arnis/blob/python-legacy/gitassets/demo-comp.png?raw=true)
![Minecraft World Demo Before After](https://github.com/louis-e/arnis/blob/python-legacy/gitassets/before-after.gif?raw=true)
Arnis creates complex and accurate Minecraft Java Edition worlds that reflect real-world geography, topography, and architecture.
## :floppy_disk: How it works
![CLI Generation](https://github.com/louis-e/arnis/blob/python-legacy/gitassets/cli-generation.gif?raw=true)
This free and open source project is designed to handle large-scale geographic data from the real world and generate detailed Minecraft worlds. The algorithm processes geospatial data from OpenStreetMap as well as elevation data to create an accurate Minecraft representation of terrain and architecture.
Generate your hometown, big cities, and natural landscapes with ease!
The raw data obtained from the API *[(see FAQ)](#question-faq)* includes each element (buildings, walls, fountains, farmlands, etc.) with its respective corner coordinates (nodes) and descriptive tags. When you run the script, the following steps are performed automatically to generate a Minecraft world:
![Minecraft Preview](assets/git/preview.jpg)
<i>This Github page and [arnismc.com](https://arnismc.com) are the only official project websites. Do not download Arnis from any other website.</i>
#### Processing Pipeline
1. Scraping Data from API: The script fetches geospatial data from the Overpass Turbo API.
2. Determine Coordinate Extremes: Identifies the highest and lowest latitude and longitude values from the dataset.
3. Standardize Coordinate Lengths: Ensures all coordinates are of uniform length and removes the decimal separator.
4. Normalize Data: Adjusts all coordinates to start from zero by subtracting the previously determined lowest values.
5. Parse Data: Transforms the raw data into a standardized structure.
6. Sort elements by priority: Enables a layering system with prioritized elements.
7. Optimize Array Size: Focuses on the outermost buildings to reduce array size.
8. Generate Minecraft World: Iterates through the array to create the Minecraft world, including 3D structures like forests, houses, and rivers.
## :keyboard: Usage
<img width="60%" src="assets/git/gui.png"><br>
Download the [latest release](https://github.com/louis-e/arnis/releases/) or [compile](#trophy-open-source) the project on your own.
```python3 arnis.py --bbox="min_lng,min_lat,max_lng,max_lat" --path="C:/Users/username/AppData/Roaming/.minecraft/saves/worldname"```
Choose your area on the map using the rectangle tool and select your Minecraft world - then simply click on <i>Start Generation</i>!
Additionally, you can customize various generation settings, such as world scale, spawn point, or building interior generation.
Use http://bboxfinder.com/ to draw a rectangle of your wanted area. Then copy the four box coordinates as shown below and use them as the input for the --bbox parameter.
![How to find area](https://github.com/louis-e/arnis/blob/python-legacy/gitassets/bbox-finder.png?raw=true)
The world will always be generated starting from the coordinates 0 0 0.
## 📚 Documentation
Manually generate a new Minecraft world (preferably a flat world) before running the script.
The --bbox parameter specifies the bounding box coordinates in the format: min_lng,min_lat,max_lng,max_lat.
Use --path to specify the location of the Minecraft world.
With the --timeout parameter you can set the timeout for the floodfill algorithm in seconds (default: 2).
You can optionally use the parameter --debug to see processed value outputs during runtime.
<img src="assets/git/documentation.png" width="100%" alt="Banner">
#### Experimental City/State/Country Input Method
The following method is experimental and may not perform as expected. Support is limited.
Full documentation is available in the [GitHub Wiki](https://github.com/louis-e/arnis/wiki/), covering topics such as technical explanations, FAQs, contribution guidelines and roadmaps.
```python3 arnis.py --city="CityName" --state="StateName" --country="CountryName" --path="C:/Users/username/AppData/Roaming/.minecraft/saves/worldname"```
## :trophy: Open Source
#### Key objectives of this project
- **Modularity**: Ensure that all components (e.g., data fetching, processing, and world generation) are cleanly separated into distinct modules for better maintainability and scalability.
- **Performance Optimization**: We aim to keep a good performance and speed of the world generation process.
- **Comprehensive Documentation**: Detailed in-code documentation for a clear structure and logic.
- **User-Friendly Experience**: Focus on making the project easy to use for end users.
- **Cross-Platform Support**: We want this project to run smoothly on Windows, macOS, and Linux.
### Docker image (experimental)
If you want to run this project containerized, you can use the Dockerfile provided in this repository. It will automatically scrape the latest source code from the repository. After running the container, you have to manually copy the generated region files from the container to the host machine in order to use them. When running the Docker image, set the ```--path``` parameter to ```/home```.
```
docker build -t arnis .
docker run arnis --city="Arnis" --state="Schleswig Holstein" --country="Deutschland" --path="/home"
docker cp CONTAINER_ID:/home/region DESTINATION_PATH
```
#### How to contribute
This project is open source and welcomes contributions from everyone! Whether you're interested in fixing bugs, improving performance, adding new features, or enhancing documentation, your input is valuable. Simply fork the repository, make your changes, and submit a pull request. Please respect the above mentioned key objectives. Contributions of all levels are appreciated, and your efforts help improve this tool for everyone.
## :cd: Requirements
- Python 3
- ```pip install -r requirements.txt```
Command line Build: ```cargo run --no-default-features -- --terrain --path="C:/YOUR_PATH/.minecraft/saves/worldname" --bbox="min_lat,min_lng,max_lat,max_lng"```<br>
GUI Build: ```cargo run```<br>
- To conform with style guide please format any changes and check the code quality
```black .```
```flake8 src/```
After your pull request was merged, I will take care of regularly creating update releases which will include your changes.
- Functionality should be covered by automated tests.
```python -m pytest```
## :question: FAQ
- *Why do some cities take so long to generate?*<br>
The script's performance can be significantly affected by large elements, such as extensive farmlands. The floodfill algorithm can slow down considerably when dealing with such elements, leading to long processing times. Thus there is also a timeout restriction in place, which can be adjusted by the user *[(see Usage)](#keyboard-usage)*. It is recommended to start with smaller areas to get a sense of the script's performance. Continuous improvements on the algorithm especially focus on effiency improvements.
- *Where does the data come from?*<br>
The geographic data is sourced from OpenStreetMap (OSM)[^1], a free, collaborative mapping project that serves as an open-source alternative to commercial mapping services. The data is accessed via the Overpass API, which queries OSM's database.
- *How does the Minecraft world generation work?*<br>
The script uses the [anvil-parser](https://github.com/matcool/anvil-parser) library to interact with Minecraft's world format. This library allows the script to create and manipulate Minecraft region files, enabling the generation of real-world locations within the game.
- *Where does the name come from?*<br>
The project is named after Arnis[^2], the smallest city in Germany. The city's small size made it an ideal test case for developing and debugging the script efficiently.
## :memo: ToDo
Feel free to choose an item from the To-Do or Known Bugs list, or bring your own idea to the table. Contributions from everyone are welcome and encouraged to help improve this project.
- [ ] Look into https://github.com/Intergalactyc/anvil-new which seems to have a better support
- [ ] Tool for mapping real coordinates to Minecraft coordinates
- [ ] Fix railway orientation
- [ ] Fix gaps in bridges
- [ ] Full refactoring of variable and function names, establish naming conventions
- [ ] Detection of wrong bbox input
- [ ] Evaluate and implement multiprocessing in the ground layer initialization and floodfill algorithm
- [ ] Implement elevation
- [ ] Add interior to buildings
- [ ] Save fountain structure in the code (similar to the tree structure)
- [ ] Add windows to buildings
- [ ] Generate a few big cities using high performance hardware and make them available to download
- [ ] Optimize region file size
- [ ] Street markings
- [ ] Add better code comments
- [x] Alternative reliable city input options
- [x] Split up processData array into several smaller ones for big cities
- [x] Find alternative for CV2 package
- [x] Floodfill timeout parameter
- [x] Automated Tests
- [x] PEP8
- [x] Use f-Strings in print statements
- [x] Add Dockerfile
- [x] Added path check
- [x] Improve RAM usage
## :bug: Known Bugs
- [ ] Docker image size
- [x] 'Noer' bug (occurs when several different digits appear in coordinates before the decimal point)
- [x] 'Nortorf' bug (occurs when there are several elements with a big distance to each other, e.g. the API returns several different cities with the exact same name)
- [x] Saving step memory overflow
- [x] Non uniform OSM naming standards (dashes) (See name tags at https://overpass-turbo.eu/s/1mMj)
## :trophy: Hall of Fame Contributors
This section is dedicated to recognizing and celebrating the outstanding contributions of individuals who have significantly enhanced this project. Your work and dedication are deeply appreciated!
#### Contributors:
- callumfrance
- amir16yp
- EdwardWeir13579
- daniil2327
## :star: Star History
<a href="https://star-history.com/#louis-e/arnis&Date">
<picture>
<source media="(prefers-color-scheme: dark)" srcset="https://api.star-history.com/svg?repos=louis-e/arnis&Date&theme=dark" />
<source media="(prefers-color-scheme: light)" srcset="https://api.star-history.com/svg?repos=louis-e/arnis&Date&type=Date" />
<img alt="Star History Chart" src="https://api.star-history.com/svg?repos=louis-e/arnis&Date&type=Date" />
</picture>
</a>
## :newspaper: Academic & Press Recognition
<img src="assets/git/recognition.png" width="100%" alt="Banner">
Arnis has been recognized in various academic and press publications after gaining a lot of attention in December 2024.
[Floodcraft: Game-based Interactive Learning Environment using Minecraft for Flood Mitigation and Preparedness for K-12 Education](https://www.researchgate.net/publication/384644535_Floodcraft_Game-based_Interactive_Learning_Environment_using_Minecraft_for_Flood_Mitigation_and_Preparedness_for_K-12_Education)
[Hackaday: Bringing OpenStreetMap Data into Minecraft](https://hackaday.com/2024/12/30/bringing-openstreetmap-data-into-minecraft/)
[TomsHardware: Minecraft Tool Lets You Create Scale Replicas of Real-World Locations](https://www.tomshardware.com/video-games/pc-gaming/minecraft-tool-lets-you-create-scale-replicas-of-real-world-locations-arnis-uses-geospatial-data-from-openstreetmap-to-generate-minecraft-maps)
[XDA Developers: Hometown Minecraft Map: Arnis](https://www.xda-developers.com/hometown-minecraft-map-arnis/)
[![Star History Chart](https://api.star-history.com/svg?repos=louis-e/arnis&type=Date)](https://star-history.com/#louis-e/arnis&Date)
## :copyright: License Information
Copyright (c) 2022-2025 Louis Erbkamm (louis-e)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.[^3]
Download Arnis only from the official source https://arnismc.com or https://github.com/louis-e/arnis/. Every other website providing a download and claiming to be affiliated with the project is unofficial and may be malicious.
The logo was made by @nxfx21.
This project is licensed under the GNU General Public License v3.0 (GPL-3.0).[^3]
Copyright (c) 2022-2024 louis-e
[^1]: https://en.wikipedia.org/wiki/OpenStreetMap
[^2]: https://en.wikipedia.org/wiki/Arnis,_Germany
[^3]: https://github.com/louis-e/arnis/blob/main/LICENSE
[^3]:
This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
For the full license text, see the LICENSE file.

10
arnis.py Normal file
View File

@@ -0,0 +1,10 @@
#!/usr/bin/env python
# Copyright 2022 by louis-e, https://github.com/louis-e/.
# MIT License
# Please see the LICENSE file that should have been included as part of this package.
from src.main import run
if __name__ == "__main__":
run()

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 163 KiB

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.0 MiB

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 108 KiB

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 196 KiB

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 790 KiB

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 127 KiB

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 22 KiB

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 68 KiB

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.7 KiB

View File

Binary file not shown.

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 86 KiB

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 258 KiB

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.2 KiB

View File

Binary file not shown.

View File

Binary file not shown.

View File

@@ -1,4 +0,0 @@
fn main() {
#[cfg(feature = "gui")]
tauri_build::build()
}

View File

@@ -1,10 +0,0 @@
{
"$schema": "../gen/schemas/desktop-schema.json",
"identifier": "default",
"description": "Capability for the main window",
"windows": ["main"],
"permissions": [
"core:default",
"shell:allow-open"
]
}

60
flake.lock generated
View File

@@ -1,60 +0,0 @@
{
"nodes": {
"flake-utils": {
"inputs": {
"systems": "systems"
},
"locked": {
"lastModified": 1731533236,
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1755615617,
"narHash": "sha256-HMwfAJBdrr8wXAkbGhtcby1zGFvs+StOp19xNsbqdOg=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "20075955deac2583bb12f07151c2df830ef346b4",
"type": "github"
},
"original": {
"id": "nixpkgs",
"ref": "nixos-unstable",
"type": "indirect"
}
},
"root": {
"inputs": {
"flake-utils": "flake-utils",
"nixpkgs": "nixpkgs"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

View File

@@ -1,36 +0,0 @@
{
inputs = {
flake-utils.url = "github:numtide/flake-utils";
nixpkgs.url = "nixpkgs/nixos-unstable";
};
outputs =
{
flake-utils,
nixpkgs,
...
}:
flake-utils.lib.eachDefaultSystem (
system:
let
pkgs = nixpkgs.legacyPackages.${system};
stdenv = if pkgs.stdenv.isLinux then pkgs.stdenvAdapters.useMoldLinker pkgs.stdenv else pkgs.stdenv;
in
{
devShell = pkgs.mkShell.override { inherit stdenv; } {
buildInputs = with pkgs; [
openssl.dev
pkg-config
wayland
glib
gdk-pixbuf
pango
gtk3
libsoup_3.dev
webkitgtk_4_1.dev
];
};
}
);
}

View File

Before

Width:  |  Height:  |  Size: 160 KiB

After

Width:  |  Height:  |  Size: 160 KiB

BIN
gitassets/before-after.gif Normal file
View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.8 MiB

View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 79 KiB

BIN
gitassets/demo-comp.png Normal file
View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 MiB

BIN
gitassets/logo.png Normal file
View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 19 KiB

BIN
gitassets/screenshot-1.png Normal file
View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.9 MiB

BIN
gitassets/screenshot-2.png Normal file
View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.5 MiB

BIN
gitassets/screenshot-3.png Normal file
View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 MiB

BIN
gitassets/screenshot-4.png Normal file
View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.2 MiB

34
pyinst-compile.spec Normal file
View File

@@ -0,0 +1,34 @@
# -*- mode: python ; coding: utf-8 -*-
import os
import site
# Locate the site-packages directory
site_packages_path = next(p for p in site.getsitepackages() if 'site-packages' in p)
# Path to the legacy_blocks.json file
legacy_blocks_path = os.path.join(site_packages_path, 'anvil', 'legacy_blocks.json')
block_cipher = None
a = Analysis(['arnis.py'],
pathex=['.'],
binaries=[],
datas=[(legacy_blocks_path, 'anvil')],
hiddenimports=[],
hookspath=[],
runtime_hooks=[],
excludes=[],
cipher=block_cipher,
noarchive=False)
pyz = PYZ(a.pure, a.zipped_data, cipher=block_cipher)
exe = EXE(pyz,
a.scripts,
a.binaries,
a.zipfiles,
a.datas,
name='arnis',
debug=False,
strip=False,
upx=True,
runtime_tmpdir=None,
console=True )

10
requirements.txt Normal file
View File

@@ -0,0 +1,10 @@
anvil-new==1.0.1
matplotlib==3.9.0
numpy==1.26.4
pytest==8.2.1
python-polylabel==0.6
requests==2.32.2
argparse==1.4.0
black==24.4.2
flake8==7.0.0
tqdm==4.66.5

0
src/__init__.py Normal file
View File

View File

@@ -1,145 +0,0 @@
use crate::coordinate_system::geographic::LLBBox;
use clap::Parser;
use std::path::PathBuf;
use std::time::Duration;
/// Command-line arguments parser
#[derive(Parser, Debug)]
#[command(author, version, about)]
pub struct Args {
/// Bounding box of the area (min_lat,min_lng,max_lat,max_lng) (required)
#[arg(long, allow_hyphen_values = true, value_parser = LLBBox::from_str)]
pub bbox: LLBBox,
/// JSON file containing OSM data (optional)
#[arg(long, group = "location")]
pub file: Option<String>,
/// JSON file to save OSM data to (optional)
#[arg(long, group = "location")]
pub save_json_file: Option<String>,
/// Path to the Minecraft world (required)
#[arg(long, value_parser = validate_minecraft_world_path)]
pub path: PathBuf,
/// Downloader method (requests/curl/wget) (optional)
#[arg(long, default_value = "requests")]
pub downloader: String,
/// World scale to use, in blocks per meter
#[arg(long, default_value_t = 1.0)]
pub scale: f64,
/// Ground level to use in the Minecraft world
#[arg(long, default_value_t = -62)]
pub ground_level: i32,
/// Enable terrain (optional)
#[arg(long)]
pub terrain: bool,
/// Enable interior generation (optional)
#[arg(long, default_value_t = true, action = clap::ArgAction::SetTrue)]
pub interior: bool,
/// Enable roof generation (optional)
#[arg(long, default_value_t = true, action = clap::ArgAction::SetTrue)]
pub roof: bool,
/// Enable filling ground (optional)
#[arg(long, default_value_t = false, action = clap::ArgAction::SetFalse)]
pub fillground: bool,
/// Enable debug mode (optional)
#[arg(long)]
pub debug: bool,
/// Set floodfill timeout (seconds) (optional)
#[arg(long, value_parser = parse_duration)]
pub timeout: Option<Duration>,
/// Generate a top-down map preview image after world generation (optional)
#[arg(long)]
pub generate_map: bool,
/// Spawn point coordinates (lat, lng)
#[arg(skip)]
pub spawn_point: Option<(f64, f64)>,
}
fn validate_minecraft_world_path(path: &str) -> Result<PathBuf, String> {
let mc_world_path = PathBuf::from(path);
if !mc_world_path.exists() {
return Err(format!("Path does not exist: {path}"));
}
if !mc_world_path.is_dir() {
return Err(format!("Path is not a directory: {path}"));
}
let region = mc_world_path.join("region");
if !region.is_dir() {
return Err(format!("No Minecraft world found at {region:?}"));
}
Ok(mc_world_path)
}
fn parse_duration(arg: &str) -> Result<std::time::Duration, std::num::ParseIntError> {
let seconds = arg.parse()?;
Ok(std::time::Duration::from_secs(seconds))
}
#[cfg(test)]
mod tests {
use super::*;
fn minecraft_tmpdir() -> tempfile::TempDir {
let tmpdir = tempfile::tempdir().unwrap();
// create a `region` directory in the tempdir
let region_path = tmpdir.path().join("region");
std::fs::create_dir(&region_path).unwrap();
tmpdir
}
#[test]
fn test_flags() {
let tmpdir = minecraft_tmpdir();
let tmp_path = tmpdir.path().to_str().unwrap();
// Test that terrain/debug are SetTrue
let cmd = [
"arnis",
"--path",
tmp_path,
"--bbox",
"1,2,3,4",
"--terrain",
"--debug",
];
let args = Args::parse_from(cmd.iter());
assert!(args.debug);
assert!(args.terrain);
let cmd = ["arnis", "--path", tmp_path, "--bbox", "1,2,3,4"];
let args = Args::parse_from(cmd.iter());
assert!(!args.debug);
assert!(!args.terrain);
}
#[test]
fn test_required_options() {
let tmpdir = minecraft_tmpdir();
let tmp_path = tmpdir.path().to_str().unwrap();
let cmd = ["arnis"];
assert!(Args::try_parse_from(cmd.iter()).is_err());
let cmd = ["arnis", "--path", tmp_path, "--bbox", "1,2,3,4"];
assert!(Args::try_parse_from(cmd.iter()).is_ok());
let cmd = ["arnis", "--path", tmp_path, "--file", ""];
assert!(Args::try_parse_from(cmd.iter()).is_err());
// The --gui flag isn't used here, ugh. TODO clean up main.rs and its argparse usage.
// let cmd = ["arnis", "--gui"];
// assert!(Args::try_parse_from(cmd.iter()).is_ok());
}
}

95
src/blockDefinitions.py Normal file
View File

@@ -0,0 +1,95 @@
import anvil
air = anvil.Block("minecraft", "air")
birch_leaves = anvil.Block("minecraft", "birch_leaves")
birch_log = anvil.Block("minecraft", "birch_log")
black_concrete = anvil.Block("minecraft", "black_concrete")
blue_flower = anvil.Block("minecraft", "blue_orchid")
brick = anvil.Block("minecraft", "bricks")
carrots = anvil.Block("minecraft", "carrots", {"age": 7})
cauldron = anvil.Block("minecraft", "cauldron")
cobblestone = anvil.Block("minecraft", "cobblestone")
cobblestone_wall = anvil.Block("minecraft", "cobblestone_wall")
dark_oak_door_lower = anvil.Block("minecraft", "dark_oak_door", {"half": "lower"})
dark_oak_door_upper = anvil.Block("minecraft", "dark_oak_door", {"half": "upper"})
dirt = anvil.Block("minecraft", "dirt")
farmland = anvil.Block("minecraft", "farmland")
glass = anvil.Block("minecraft", "glass_pane")
glowstone = anvil.Block("minecraft", "glowstone")
grass = anvil.Block("minecraft", "grass")
grass_block = anvil.Block("minecraft", "grass_block")
gravel = anvil.Block("minecraft", "gravel")
gray_concrete = anvil.Block("minecraft", "gray_concrete")
green_stained_hardened_clay = anvil.Block("minecraft", "green_terracotta")
hay_bale = anvil.Block("minecraft", "hay_block")
iron_block = anvil.Block("minecraft", "iron_block")
light_gray_concrete = anvil.Block("minecraft", "light_gray_concrete")
oak_fence = anvil.Block("minecraft", "oak_fence")
oak_leaves = anvil.Block("minecraft", "oak_leaves")
oak_log = anvil.Block("minecraft", "oak_log")
oak_planks = anvil.Block("minecraft", "oak_planks")
podzol = anvil.Block("minecraft", "podzol")
potatoes = anvil.Block("minecraft", "potatoes", {"age": 7})
rail = anvil.Block("minecraft", "rail")
red_flower = anvil.Block("minecraft", "poppy")
sand = anvil.Block("minecraft", "sand")
scaffolding = anvil.Block("minecraft", "scaffolding")
sponge = anvil.Block("minecraft", "sponge")
spruce_log = anvil.Block("minecraft", "spruce_log")
stone = anvil.Block("minecraft", "stone")
stone_block_slab = anvil.Block("minecraft", "stone_slab")
stone_brick_slab = anvil.Block("minecraft", "stone_brick_slab")
water = anvil.Block("minecraft", "water")
wheat = anvil.Block("minecraft", "wheat", {"age": 7})
white_concrete = anvil.Block("minecraft", "white_concrete")
white_flower = anvil.Block("minecraft", "azure_bluet")
white_stained_glass = anvil.Block("minecraft", "white_stained_glass")
yellow_flower = anvil.Block("minecraft", "dandelion")
# Variations for building corners
building_corner_variations = [
anvil.Block("minecraft", "stone_bricks"),
anvil.Block("minecraft", "cobblestone"),
anvil.Block("minecraft", "bricks"),
anvil.Block("minecraft", "mossy_cobblestone"),
anvil.Block("minecraft", "sandstone"),
anvil.Block("minecraft", "red_nether_bricks"),
anvil.Block("minecraft", "blackstone"),
anvil.Block("minecraft", "smooth_quartz"),
anvil.Block("minecraft", "chiseled_stone_bricks"),
anvil.Block("minecraft", "polished_basalt"),
anvil.Block("minecraft", "cut_sandstone"),
anvil.Block("minecraft", "polished_blackstone_bricks"),
]
# Variations for building walls
building_wall_variations = [
anvil.Block("minecraft", "white_terracotta"),
anvil.Block("minecraft", "gray_terracotta"),
anvil.Block("minecraft", "bricks"),
anvil.Block("minecraft", "smooth_sandstone"),
anvil.Block("minecraft", "red_terracotta"),
anvil.Block("minecraft", "polished_diorite"),
anvil.Block("minecraft", "smooth_stone"),
anvil.Block("minecraft", "polished_andesite"),
anvil.Block("minecraft", "warped_planks"),
anvil.Block("minecraft", "end_stone_bricks"),
anvil.Block("minecraft", "smooth_red_sandstone"),
anvil.Block("minecraft", "nether_bricks"),
]
# Variations for building floors
building_floor_variations = [
anvil.Block("minecraft", "oak_planks"),
anvil.Block("minecraft", "spruce_planks"),
anvil.Block("minecraft", "dark_oak_planks"),
anvil.Block("minecraft", "stone_bricks"),
anvil.Block("minecraft", "polished_granite"),
anvil.Block("minecraft", "polished_diorite"),
anvil.Block("minecraft", "acacia_planks"),
anvil.Block("minecraft", "jungle_planks"),
anvil.Block("minecraft", "warped_planks"),
anvil.Block("minecraft", "purpur_block"),
anvil.Block("minecraft", "smooth_red_sandstone"),
anvil.Block("minecraft", "polished_blackstone"),
]

View File

File diff suppressed because it is too large Load Diff

26
src/bresenham.py Normal file
View File

@@ -0,0 +1,26 @@
def bresenham(x1, y1, x2, y2):
"""Bresenham Line Algorithm Credit: encukou/bresenham@Github"""
dx = x2 - x1
dy = y2 - y1
xsign = 1 if dx > 0 else -1
ysign = 1 if dy > 0 else -1
dx = abs(dx)
dy = abs(dy)
if dx > dy:
xx, xy, yx, yy = xsign, 0, 0, ysign
else:
dx, dy = dy, dx
xx, xy, yx, yy = 0, ysign, xsign, 0
D = 2 * dy - dx
y = 0
for x in range(dx + 1):
yield x1 + x * xx + y * yx, y1 + x * xy + y * yy
if D >= 0:
y += 1
D -= 2 * dx
D += 2 * dy

View File

@@ -1,91 +0,0 @@
/// Generates the coordinates for a line between two points using the Bresenham algorithm.
/// The result is a vector of 3D coordinates (x, y, z).
pub fn bresenham_line(
x1: i32,
y1: i32,
z1: i32,
x2: i32,
y2: i32,
z2: i32,
) -> Vec<(i32, i32, i32)> {
// Calculate max possible points needed
let dx = if x2 > x1 { x2 - x1 } else { x1 - x2 };
let dy = if y2 > y1 { y2 - y1 } else { y1 - y2 };
let dz = if z2 > z1 { z2 - z1 } else { z1 - z2 };
// Pre-allocate vector with exact size needed
let capacity = dx.max(dy).max(dz) + 1;
let mut points = Vec::with_capacity(capacity as usize);
points.reserve_exact(capacity as usize);
let xs = if x1 < x2 { 1 } else { -1 };
let ys = if y1 < y2 { 1 } else { -1 };
let zs = if z1 < z2 { 1 } else { -1 };
let mut x = x1;
let mut y = y1;
let mut z = z1;
// Determine dominant axis once, outside the loop
if dx >= dy && dx >= dz {
let mut p1 = 2 * dy - dx;
let mut p2 = 2 * dz - dx;
while x != x2 {
points.push((x, y, z));
if p1 >= 0 {
y += ys;
p1 -= 2 * dx;
}
if p2 >= 0 {
z += zs;
p2 -= 2 * dx;
}
p1 += 2 * dy;
p2 += 2 * dz;
x += xs;
}
} else if dy >= dx && dy >= dz {
let mut p1 = 2 * dx - dy;
let mut p2 = 2 * dz - dy;
while y != y2 {
points.push((x, y, z));
if p1 >= 0 {
x += xs;
p1 -= 2 * dy;
}
if p2 >= 0 {
z += zs;
p2 -= 2 * dy;
}
p1 += 2 * dx;
p2 += 2 * dz;
y += ys;
}
} else {
let mut p1 = 2 * dy - dz;
let mut p2 = 2 * dx - dz;
while z != z2 {
points.push((x, y, z));
if p1 >= 0 {
y += ys;
p1 -= 2 * dz;
}
if p2 >= 0 {
x += xs;
p2 -= 2 * dz;
}
p1 += 2 * dy;
p2 += 2 * dx;
z += zs;
}
}
points.push((x2, y2, z2));
points
}

View File

@@ -1,706 +0,0 @@
// Sutherland-Hodgman polygon clipping and related geometry utilities.
//
// Provides bbox clipping for polygons, polylines, and water rings with
// proper corner insertion for closed shapes.
use crate::coordinate_system::cartesian::{XZBBox, XZPoint};
use crate::osm_parser::ProcessedNode;
use std::collections::HashMap;
/// Clips a way to the bounding box using Sutherland-Hodgman for polygons or
/// simple line clipping for polylines. Preserves endpoint IDs for ring assembly.
pub fn clip_way_to_bbox(nodes: &[ProcessedNode], xzbbox: &XZBBox) -> Vec<ProcessedNode> {
if nodes.is_empty() {
return Vec::new();
}
let is_closed = is_closed_polygon(nodes);
if !is_closed {
return clip_polyline_to_bbox(nodes, xzbbox);
}
// If all nodes are inside the bbox, return unchanged
let has_nodes_outside = nodes
.iter()
.any(|node| !xzbbox.contains(&XZPoint::new(node.x, node.z)));
if !has_nodes_outside {
return nodes.to_vec();
}
let min_x = xzbbox.min_x() as f64;
let min_z = xzbbox.min_z() as f64;
let max_x = xzbbox.max_x() as f64;
let max_z = xzbbox.max_z() as f64;
let mut polygon: Vec<(f64, f64)> = nodes.iter().map(|n| (n.x as f64, n.z as f64)).collect();
polygon = clip_polygon_sutherland_hodgman(polygon, min_x, min_z, max_x, max_z);
if polygon.len() < 3 {
return Vec::new();
}
// Final clamping for floating-point errors
for p in &mut polygon {
p.0 = p.0.clamp(min_x, max_x);
p.1 = p.1.clamp(min_z, max_z);
}
let polygon = remove_consecutive_duplicates(polygon);
if polygon.len() < 3 {
return Vec::new();
}
let polygon = insert_bbox_corners(polygon, min_x, min_z, max_x, max_z);
let polygon = remove_consecutive_duplicates(polygon);
if polygon.len() < 3 {
return Vec::new();
}
let way_id = nodes.first().map(|n| n.id).unwrap_or(0);
assign_node_ids_preserving_endpoints(nodes, polygon, way_id)
}
/// Clips a water polygon ring to bbox using Sutherland-Hodgman (post-ring-merge).
pub fn clip_water_ring_to_bbox(
ring: &[ProcessedNode],
xzbbox: &XZBBox,
) -> Option<Vec<ProcessedNode>> {
if ring.is_empty() {
return None;
}
let min_x = xzbbox.min_x() as f64;
let min_z = xzbbox.min_z() as f64;
let max_x = xzbbox.max_x() as f64;
let max_z = xzbbox.max_z() as f64;
// Check if entire ring is inside bbox
let all_inside = ring.iter().all(|n| {
n.x as f64 >= min_x && n.x as f64 <= max_x && n.z as f64 >= min_z && n.z as f64 <= max_z
});
if all_inside {
return Some(ring.to_vec());
}
// Check if entire ring is outside bbox
if is_ring_outside_bbox(ring, min_x, min_z, max_x, max_z) {
return None;
}
// Convert to f64 coordinates and ensure closed
let mut polygon: Vec<(f64, f64)> = ring.iter().map(|n| (n.x as f64, n.z as f64)).collect();
if !polygon.is_empty() && polygon.first() != polygon.last() {
polygon.push(polygon[0]);
}
// Clip with full-range clamping (water uses simpler approach)
polygon = clip_polygon_sutherland_hodgman_simple(polygon, min_x, min_z, max_x, max_z);
if polygon.len() < 3 {
return None;
}
// Verify all points are within bbox
let all_points_inside = polygon
.iter()
.all(|&(x, z)| x >= min_x && x <= max_x && z >= min_z && z <= max_z);
if !all_points_inside {
eprintln!("ERROR: clip_water_ring_to_bbox produced points outside bbox!");
return None;
}
let polygon = insert_bbox_corners(polygon, min_x, min_z, max_x, max_z);
if polygon.len() < 3 {
return None;
}
// Convert back to ProcessedNode with synthetic IDs
let mut result: Vec<ProcessedNode> = polygon
.iter()
.enumerate()
.map(|(i, &(x, z))| ProcessedNode {
id: 1_000_000_000 + i as u64,
tags: HashMap::new(),
x: x.clamp(min_x, max_x).round() as i32,
z: z.clamp(min_z, max_z).round() as i32,
})
.collect();
// Close the loop by matching first and last ID
if !result.is_empty() {
let first_id = result[0].id;
result.last_mut().unwrap().id = first_id;
}
Some(result)
}
// ============================================================================
// Internal helpers
// ============================================================================
/// Checks if a way forms a closed polygon.
fn is_closed_polygon(nodes: &[ProcessedNode]) -> bool {
if nodes.len() < 3 {
return false;
}
let first = nodes.first().unwrap();
let last = nodes.last().unwrap();
first.id == last.id || (first.x == last.x && first.z == last.z)
}
/// Checks if an entire ring is outside the bbox.
fn is_ring_outside_bbox(
ring: &[ProcessedNode],
min_x: f64,
min_z: f64,
max_x: f64,
max_z: f64,
) -> bool {
let all_left = ring.iter().all(|n| (n.x as f64) < min_x);
let all_right = ring.iter().all(|n| (n.x as f64) > max_x);
let all_top = ring.iter().all(|n| (n.z as f64) < min_z);
let all_bottom = ring.iter().all(|n| (n.z as f64) > max_z);
all_left || all_right || all_top || all_bottom
}
/// Clips a polyline (open path) to the bounding box.
fn clip_polyline_to_bbox(nodes: &[ProcessedNode], xzbbox: &XZBBox) -> Vec<ProcessedNode> {
if nodes.is_empty() {
return Vec::new();
}
let min_x = xzbbox.min_x() as f64;
let min_z = xzbbox.min_z() as f64;
let max_x = xzbbox.max_x() as f64;
let max_z = xzbbox.max_z() as f64;
let mut result = Vec::new();
for i in 0..nodes.len() {
let current = &nodes[i];
let current_point = (current.x as f64, current.z as f64);
let current_inside = point_in_bbox(current_point, min_x, min_z, max_x, max_z);
if current_inside {
result.push(current.clone());
}
if i + 1 < nodes.len() {
let next = &nodes[i + 1];
let next_point = (next.x as f64, next.z as f64);
let next_inside = point_in_bbox(next_point, min_x, min_z, max_x, max_z);
if current_inside != next_inside {
// One endpoint inside, one outside, find single intersection
let intersections =
find_bbox_intersections(current_point, next_point, min_x, min_z, max_x, max_z);
for intersection in intersections {
let synthetic_id = nodes[0]
.id
.wrapping_mul(10000000)
.wrapping_add(result.len() as u64);
result.push(ProcessedNode {
id: synthetic_id,
x: intersection.0.round() as i32,
z: intersection.1.round() as i32,
tags: HashMap::new(),
});
}
} else if !current_inside && !next_inside {
// Both endpoints outside, segment might still cross through bbox
let mut intersections =
find_bbox_intersections(current_point, next_point, min_x, min_z, max_x, max_z);
if intersections.len() >= 2 {
// Sort intersections by distance from current point
intersections.sort_by(|a, b| {
let dist_a =
(a.0 - current_point.0).powi(2) + (a.1 - current_point.1).powi(2);
let dist_b =
(b.0 - current_point.0).powi(2) + (b.1 - current_point.1).powi(2);
dist_a
.partial_cmp(&dist_b)
.unwrap_or(std::cmp::Ordering::Equal)
});
for intersection in intersections {
let synthetic_id = nodes[0]
.id
.wrapping_mul(10000000)
.wrapping_add(result.len() as u64);
result.push(ProcessedNode {
id: synthetic_id,
x: intersection.0.round() as i32,
z: intersection.1.round() as i32,
tags: HashMap::new(),
});
}
}
}
}
}
// Preserve endpoint IDs where possible
if result.len() >= 2 {
let tolerance = 50.0;
if let Some(first_orig) = nodes.first() {
if matches_endpoint(
(result[0].x as f64, result[0].z as f64),
first_orig,
tolerance,
) {
result[0].id = first_orig.id;
}
}
if let Some(last_orig) = nodes.last() {
let last_idx = result.len() - 1;
if matches_endpoint(
(result[last_idx].x as f64, result[last_idx].z as f64),
last_orig,
tolerance,
) {
result[last_idx].id = last_orig.id;
}
}
}
result
}
/// Sutherland-Hodgman polygon clipping with edge-specific clamping.
fn clip_polygon_sutherland_hodgman(
mut polygon: Vec<(f64, f64)>,
min_x: f64,
min_z: f64,
max_x: f64,
max_z: f64,
) -> Vec<(f64, f64)> {
// Edges: bottom, right, top, left (counter-clockwise traversal)
let bbox_edges = [
(min_x, min_z, max_x, min_z, 0), // Bottom: clamp z
(max_x, min_z, max_x, max_z, 1), // Right: clamp x
(max_x, max_z, min_x, max_z, 2), // Top: clamp z
(min_x, max_z, min_x, min_z, 3), // Left: clamp x
];
for (edge_x1, edge_z1, edge_x2, edge_z2, edge_idx) in bbox_edges {
if polygon.is_empty() {
break;
}
let mut clipped = Vec::new();
let is_closed = !polygon.is_empty() && polygon.first() == polygon.last();
let edge_count = if is_closed {
polygon.len().saturating_sub(1)
} else {
polygon.len()
};
for i in 0..edge_count {
let current = polygon[i];
let next = polygon.get(i + 1).copied().unwrap_or(polygon[0]);
let current_inside = point_inside_edge(current, edge_x1, edge_z1, edge_x2, edge_z2);
let next_inside = point_inside_edge(next, edge_x1, edge_z1, edge_x2, edge_z2);
if next_inside {
if !current_inside {
if let Some(mut intersection) = line_edge_intersection(
current.0, current.1, next.0, next.1, edge_x1, edge_z1, edge_x2, edge_z2,
) {
// Clamp to current edge only
match edge_idx {
0 => intersection.1 = min_z,
1 => intersection.0 = max_x,
2 => intersection.1 = max_z,
3 => intersection.0 = min_x,
_ => {}
}
clipped.push(intersection);
}
}
clipped.push(next);
} else if current_inside {
if let Some(mut intersection) = line_edge_intersection(
current.0, current.1, next.0, next.1, edge_x1, edge_z1, edge_x2, edge_z2,
) {
match edge_idx {
0 => intersection.1 = min_z,
1 => intersection.0 = max_x,
2 => intersection.1 = max_z,
3 => intersection.0 = min_x,
_ => {}
}
clipped.push(intersection);
}
}
}
polygon = clipped;
}
polygon
}
/// Sutherland-Hodgman with full bbox clamping (simpler, for water rings).
fn clip_polygon_sutherland_hodgman_simple(
mut polygon: Vec<(f64, f64)>,
min_x: f64,
min_z: f64,
max_x: f64,
max_z: f64,
) -> Vec<(f64, f64)> {
let bbox_edges = [
(min_x, min_z, max_x, min_z),
(max_x, min_z, max_x, max_z),
(max_x, max_z, min_x, max_z),
(min_x, max_z, min_x, min_z),
];
for (edge_x1, edge_z1, edge_x2, edge_z2) in bbox_edges {
if polygon.is_empty() {
break;
}
let mut clipped = Vec::new();
for i in 0..(polygon.len().saturating_sub(1)) {
let current = polygon[i];
let next = polygon[i + 1];
let current_inside = point_inside_edge(current, edge_x1, edge_z1, edge_x2, edge_z2);
let next_inside = point_inside_edge(next, edge_x1, edge_z1, edge_x2, edge_z2);
if next_inside {
if !current_inside {
if let Some(mut intersection) = line_edge_intersection(
current.0, current.1, next.0, next.1, edge_x1, edge_z1, edge_x2, edge_z2,
) {
intersection.0 = intersection.0.clamp(min_x, max_x);
intersection.1 = intersection.1.clamp(min_z, max_z);
clipped.push(intersection);
}
}
clipped.push(next);
} else if current_inside {
if let Some(mut intersection) = line_edge_intersection(
current.0, current.1, next.0, next.1, edge_x1, edge_z1, edge_x2, edge_z2,
) {
intersection.0 = intersection.0.clamp(min_x, max_x);
intersection.1 = intersection.1.clamp(min_z, max_z);
clipped.push(intersection);
}
}
}
polygon = clipped;
}
polygon
}
/// Checks if point is inside bbox.
fn point_in_bbox(point: (f64, f64), min_x: f64, min_z: f64, max_x: f64, max_z: f64) -> bool {
point.0 >= min_x && point.0 <= max_x && point.1 >= min_z && point.1 <= max_z
}
/// Checks if point is on the "inside" side of an edge (cross product test).
fn point_inside_edge(
point: (f64, f64),
edge_x1: f64,
edge_z1: f64,
edge_x2: f64,
edge_z2: f64,
) -> bool {
let edge_dx = edge_x2 - edge_x1;
let edge_dz = edge_z2 - edge_z1;
let point_dx = point.0 - edge_x1;
let point_dz = point.1 - edge_z1;
(edge_dx * point_dz - edge_dz * point_dx) >= 0.0
}
/// Finds intersection between a line segment and an edge.
#[allow(clippy::too_many_arguments)]
fn line_edge_intersection(
line_x1: f64,
line_z1: f64,
line_x2: f64,
line_z2: f64,
edge_x1: f64,
edge_z1: f64,
edge_x2: f64,
edge_z2: f64,
) -> Option<(f64, f64)> {
let line_dx = line_x2 - line_x1;
let line_dz = line_z2 - line_z1;
let edge_dx = edge_x2 - edge_x1;
let edge_dz = edge_z2 - edge_z1;
let denom = line_dx * edge_dz - line_dz * edge_dx;
if denom.abs() < 1e-10 {
return None;
}
let dx = edge_x1 - line_x1;
let dz = edge_z1 - line_z1;
let t = (dx * edge_dz - dz * edge_dx) / denom;
if (0.0..=1.0).contains(&t) {
Some((line_x1 + t * line_dx, line_z1 + t * line_dz))
} else {
None
}
}
/// Finds intersections between a line segment and bbox edges.
fn find_bbox_intersections(
start: (f64, f64),
end: (f64, f64),
min_x: f64,
min_z: f64,
max_x: f64,
max_z: f64,
) -> Vec<(f64, f64)> {
let mut intersections = Vec::new();
let bbox_edges = [
(min_x, min_z, max_x, min_z),
(max_x, min_z, max_x, max_z),
(max_x, max_z, min_x, max_z),
(min_x, max_z, min_x, min_z),
];
for (edge_x1, edge_z1, edge_x2, edge_z2) in bbox_edges {
if let Some(intersection) = line_edge_intersection(
start.0, start.1, end.0, end.1, edge_x1, edge_z1, edge_x2, edge_z2,
) {
let on_edge = point_in_bbox(intersection, min_x, min_z, max_x, max_z)
&& ((intersection.0 == min_x || intersection.0 == max_x)
|| (intersection.1 == min_z || intersection.1 == max_z));
if on_edge {
intersections.push(intersection);
}
}
}
intersections
}
/// Returns which bbox edge a point lies on: 0=bottom, 1=right, 2=top, 3=left, -1=interior.
fn get_bbox_edge(point: (f64, f64), min_x: f64, min_z: f64, max_x: f64, max_z: f64) -> i32 {
let eps = 0.5;
let on_left = (point.0 - min_x).abs() < eps;
let on_right = (point.0 - max_x).abs() < eps;
let on_bottom = (point.1 - min_z).abs() < eps;
let on_top = (point.1 - max_z).abs() < eps;
// Handle corners (assign to edge in counter-clockwise order)
if on_bottom && on_left {
return 3;
}
if on_bottom && on_right {
return 0;
}
if on_top && on_right {
return 1;
}
if on_top && on_left {
return 2;
}
if on_bottom {
return 0;
}
if on_right {
return 1;
}
if on_top {
return 2;
}
if on_left {
return 3;
}
-1
}
/// Returns corners to insert when traversing from edge1 to edge2 via shorter path.
fn get_corners_between_edges(
edge1: i32,
edge2: i32,
min_x: f64,
min_z: f64,
max_x: f64,
max_z: f64,
) -> Vec<(f64, f64)> {
if edge1 == edge2 || edge1 < 0 || edge2 < 0 {
return Vec::new();
}
let corners = [
(max_x, min_z), // 0: bottom-right
(max_x, max_z), // 1: top-right
(min_x, max_z), // 2: top-left
(min_x, min_z), // 3: bottom-left
];
let ccw_dist = ((edge2 - edge1 + 4) % 4) as usize;
let cw_dist = ((edge1 - edge2 + 4) % 4) as usize;
// Opposite edges: don't insert corners
if ccw_dist == 2 && cw_dist == 2 {
return Vec::new();
}
let mut result = Vec::new();
if ccw_dist <= cw_dist {
let mut current = edge1;
for _ in 0..ccw_dist {
result.push(corners[current as usize]);
current = (current + 1) % 4;
}
} else {
let mut current = edge1;
for _ in 0..cw_dist {
current = (current + 4 - 1) % 4;
result.push(corners[current as usize]);
}
}
result
}
/// Inserts bbox corners where polygon transitions between different bbox edges.
fn insert_bbox_corners(
polygon: Vec<(f64, f64)>,
min_x: f64,
min_z: f64,
max_x: f64,
max_z: f64,
) -> Vec<(f64, f64)> {
if polygon.len() < 3 {
return polygon;
}
let mut result = Vec::with_capacity(polygon.len() + 4);
for i in 0..polygon.len() {
let current = polygon[i];
let next = polygon[(i + 1) % polygon.len()];
result.push(current);
let edge1 = get_bbox_edge(current, min_x, min_z, max_x, max_z);
let edge2 = get_bbox_edge(next, min_x, min_z, max_x, max_z);
if edge1 >= 0 && edge2 >= 0 && edge1 != edge2 {
for corner in get_corners_between_edges(edge1, edge2, min_x, min_z, max_x, max_z) {
result.push(corner);
}
}
}
result
}
/// Removes consecutive duplicate points (within epsilon tolerance).
fn remove_consecutive_duplicates(polygon: Vec<(f64, f64)>) -> Vec<(f64, f64)> {
if polygon.is_empty() {
return polygon;
}
let eps = 0.1;
let mut result: Vec<(f64, f64)> = Vec::with_capacity(polygon.len());
for p in &polygon {
if let Some(last) = result.last() {
if (p.0 - last.0).abs() < eps && (p.1 - last.1).abs() < eps {
continue;
}
}
result.push(*p);
}
// Check first/last duplicates for closed polygons
if result.len() > 1 {
let first = result.first().unwrap();
let last = result.last().unwrap();
if (first.0 - last.0).abs() < eps && (first.1 - last.1).abs() < eps {
result.pop();
}
}
result
}
/// Checks if a clipped coordinate matches an original endpoint.
fn matches_endpoint(coord: (f64, f64), endpoint: &ProcessedNode, tolerance: f64) -> bool {
let dx = (coord.0 - endpoint.x as f64).abs();
let dz = (coord.1 - endpoint.z as f64).abs();
dx * dx + dz * dz < tolerance * tolerance
}
/// Assigns node IDs to clipped coordinates, preserving original endpoint IDs.
fn assign_node_ids_preserving_endpoints(
original_nodes: &[ProcessedNode],
clipped_coords: Vec<(f64, f64)>,
way_id: u64,
) -> Vec<ProcessedNode> {
if clipped_coords.is_empty() {
return Vec::new();
}
let original_first = original_nodes.first();
let original_last = original_nodes.last();
let tolerance = 50.0;
let last_index = clipped_coords.len() - 1;
clipped_coords
.into_iter()
.enumerate()
.map(|(i, coord)| {
let is_first = i == 0;
let is_last = i == last_index;
if is_first || is_last {
if let Some(first) = original_first {
if matches_endpoint(coord, first, tolerance) {
return ProcessedNode {
id: first.id,
x: coord.0.round() as i32,
z: coord.1.round() as i32,
tags: HashMap::new(),
};
}
}
if let Some(last) = original_last {
if matches_endpoint(coord, last, tolerance) {
return ProcessedNode {
id: last.id,
x: coord.0.round() as i32,
z: coord.1.round() as i32,
tags: HashMap::new(),
};
}
}
}
ProcessedNode {
id: way_id.wrapping_mul(10000000).wrapping_add(i as u64),
x: coord.0.round() as i32,
z: coord.1.round() as i32,
tags: HashMap::new(),
}
})
.collect()
}

View File

@@ -1,88 +0,0 @@
pub type RGBTuple = (u8, u8, u8);
pub fn color_text_to_rgb_tuple(text: &str) -> Option<RGBTuple> {
if let Some(rgb) = full_hex_color_to_rgb_tuple(text) {
return Some(rgb);
}
if let Some(rgb) = short_hex_color_to_rgb_tuple(text) {
return Some(rgb);
}
if let Some(rgb) = color_name_to_rgb_tuple(text) {
return Some(rgb);
}
None
}
fn full_hex_color_to_rgb_tuple(text: &str) -> Option<RGBTuple> {
if text.len() != 7
|| !text.starts_with("#")
|| !text.chars().skip(1).all(|c: char| c.is_ascii_hexdigit())
{
return None;
}
let r: u8 = u8::from_str_radix(&text[1..3], 16).unwrap();
let g: u8 = u8::from_str_radix(&text[3..5], 16).unwrap();
let b: u8 = u8::from_str_radix(&text[5..7], 16).unwrap();
Some((r, g, b))
}
fn short_hex_color_to_rgb_tuple(text: &str) -> Option<RGBTuple> {
if text.len() != 4
|| !text.starts_with("#")
|| !text.chars().skip(1).all(|c: char| c.is_ascii_hexdigit())
{
return None;
}
let r: u8 = u8::from_str_radix(&text[1..2], 16).unwrap();
let r: u8 = r | (r << 4);
let g: u8 = u8::from_str_radix(&text[2..3], 16).unwrap();
let g: u8 = g | (g << 4);
let b: u8 = u8::from_str_radix(&text[3..4], 16).unwrap();
let b: u8 = b | (b << 4);
Some((r, g, b))
}
// https://wiki.openstreetmap.org/wiki/Key:colour
// https://wiki.openstreetmap.org/wiki/Key:roof:colour
fn color_name_to_rgb_tuple(text: &str) -> Option<RGBTuple> {
Some(match text {
"aqua" | "cyan" => (0, 255, 255),
"beige" => (187, 173, 142),
"black" => (0, 0, 0),
"blue" => (0, 0, 255),
"brown" => (128, 64, 0),
// darkgrey
"fuchsia" | "magenta" => (255, 0, 255),
"gray" | "grey" => (128, 128, 128),
"green" => (0, 128, 0),
// lightgrey
"lime" => (0, 255, 0),
"maroon" => (128, 0, 0),
"navy" => (0, 0, 128),
"olive" => (128, 128, 0),
"orange" => (255, 128, 0),
"purple" => (128, 0, 128),
"red" => (255, 0, 0),
"silver" => (192, 192, 192),
"teal" => (0, 128, 0),
"white" => (255, 255, 255),
"yellow" => (255, 255, 0),
_ => {
return None;
}
})
}
pub fn rgb_distance(from: &RGBTuple, to: &RGBTuple) -> u32 {
// i32 because .pow(2) returns the same data type as self and 255^2 wouldn't fit
let difference: (i32, i32, i32) = (
from.0 as i32 - to.0 as i32,
from.1 as i32 - to.1 as i32,
from.2 as i32 - to.2 as i32,
);
let distance: i32 = difference.0.pow(2) + difference.1.pow(2) + difference.2.pow(2);
distance as u32
}

View File

@@ -1,7 +0,0 @@
mod xzbbox;
mod xzpoint;
mod xzvector;
pub use xzbbox::XZBBox;
pub use xzpoint::XZPoint;
pub use xzvector::XZVector;

View File

@@ -1,4 +0,0 @@
mod rectangle;
mod xzbbox_enum;
pub use xzbbox_enum::XZBBox;

View File

@@ -1,112 +0,0 @@
use crate::coordinate_system::cartesian::{XZPoint, XZVector};
use std::fmt;
use std::ops::{Add, AddAssign, Sub, SubAssign};
/// An underlying shape of XZBBox enum.
#[derive(Copy, Clone, Debug, PartialEq)]
pub struct XZBBoxRect {
/// The "bottom-left" vertex of the rectangle
min: XZPoint,
/// The "top-right" vertex of the rectangle
max: XZPoint,
}
impl XZBBoxRect {
pub fn new(min: XZPoint, max: XZPoint) -> Result<Self, String> {
let blockx_ge_1 = max.x - min.x >= 0;
let blockz_ge_1 = max.z - min.z >= 0;
if !blockx_ge_1 {
return Err(format!(
"Invalid XZBBox::Rect: max.x should >= min.x, but encountered {} -> {}",
min.x, max.x
));
}
if !blockz_ge_1 {
return Err(format!(
"Invalid XZBBox::Rect: max.z should >= min.z, but encountered {} -> {}",
min.z, max.z
));
}
Ok(Self { min, max })
}
pub fn min(&self) -> XZPoint {
self.min
}
pub fn max(&self) -> XZPoint {
self.max
}
/// Total number of blocks covered in this 2D bbox
pub fn total_blocks(&self) -> u64 {
(self.total_blocks_x() as u64) * (self.total_blocks_z() as u64)
}
/// Total number of blocks covered in x direction
pub fn total_blocks_x(&self) -> u32 {
let nx = self.max.x - self.min.x + 1;
nx as u32
}
/// Total number of blocks covered in z direction
pub fn total_blocks_z(&self) -> u32 {
let nz = self.max.z - self.min.z + 1;
nz as u32
}
/// Check whether an XZPoint is covered
pub fn contains(&self, xzpoint: &XZPoint) -> bool {
xzpoint.x >= self.min.x
&& xzpoint.x <= self.max.x
&& xzpoint.z >= self.min.z
&& xzpoint.z <= self.max.z
}
}
impl fmt::Display for XZBBoxRect {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Rect({} -> {})", self.min, self.max)
}
}
// below are associated +- operators
impl Add<XZVector> for XZBBoxRect {
type Output = XZBBoxRect;
fn add(self, other: XZVector) -> Self {
Self {
min: self.min + other,
max: self.max + other,
}
}
}
impl AddAssign<XZVector> for XZBBoxRect {
fn add_assign(&mut self, other: XZVector) {
self.min += other;
self.max += other;
}
}
impl Sub<XZVector> for XZBBoxRect {
type Output = XZBBoxRect;
fn sub(self, other: XZVector) -> Self {
Self {
min: self.min - other,
max: self.max - other,
}
}
}
impl SubAssign<XZVector> for XZBBoxRect {
fn sub_assign(&mut self, other: XZVector) {
self.min -= other;
self.max -= other;
}
}

View File

@@ -1,202 +0,0 @@
use super::rectangle::XZBBoxRect;
use crate::coordinate_system::cartesian::{XZPoint, XZVector};
use std::fmt;
use std::ops::{Add, AddAssign, Sub, SubAssign};
/// Bounding Box in minecraft XZ space with varied shapes.
#[derive(Clone, Debug)]
pub enum XZBBox {
Rect(XZBBoxRect),
}
impl XZBBox {
/// Construct rectangle shape bbox from the x and z lengths of the world, originated at (0, 0)
pub fn rect_from_xz_lengths(length_x: f64, length_z: f64) -> Result<Self, String> {
let lenx_ge_0 = length_x >= 0.0;
let lenz_ge_0 = length_z >= 0.0;
let lenx_overflow = length_x > i32::MAX as f64;
let lenz_overflow = length_z > i32::MAX as f64;
if !lenx_ge_0 {
return Err(format!(
"Invalid XZBBox::Rect from xz lengths: length x should >=0 , but encountered {length_x}"
));
}
if !lenz_ge_0 {
return Err(format!(
"Invalid XZBBox::Rect from xz lengths: length z should >=0 , but encountered {length_x}"
));
}
if lenx_overflow {
return Err(format!(
"Invalid XZBBox::Rect from xz lengths: length x too large for i32: {length_x}"
));
}
if lenz_overflow {
return Err(format!(
"Invalid XZBBox::Rect from xz lengths: length z too large for i32: {length_z}"
));
}
Ok(Self::Rect(XZBBoxRect::new(
XZPoint { x: 0, z: 0 },
XZPoint {
x: length_x as i32,
z: length_z as i32,
},
)?))
}
/// Check whether an XZPoint is covered
pub fn contains(&self, xzpoint: &XZPoint) -> bool {
match self {
Self::Rect(r) => r.contains(xzpoint),
}
}
/// Return the circumscribed rectangle of the current XZBBox shape
pub fn bounding_rect(&self) -> XZBBoxRect {
match self {
Self::Rect(r) => *r,
}
}
/// Return the min x in all covered blocks
pub fn min_x(&self) -> i32 {
self.bounding_rect().min().x
}
/// Return the max x in all covered blocks
pub fn max_x(&self) -> i32 {
self.bounding_rect().max().x
}
/// Return the min z in all covered blocks
pub fn min_z(&self) -> i32 {
self.bounding_rect().min().z
}
/// Return the max z in all covered blocks
pub fn max_z(&self) -> i32 {
self.bounding_rect().max().z
}
}
impl fmt::Display for XZBBox {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Rect(r) => write!(f, "XZBBox::{r}"),
}
}
}
// below are associated +- operators
impl Add<XZVector> for XZBBox {
type Output = XZBBox;
fn add(self, other: XZVector) -> XZBBox {
match self {
Self::Rect(r) => Self::Rect(r + other),
}
}
}
impl AddAssign<XZVector> for XZBBox {
fn add_assign(&mut self, other: XZVector) {
match self {
Self::Rect(r) => *r += other,
}
}
}
impl Sub<XZVector> for XZBBox {
type Output = XZBBox;
fn sub(self, other: XZVector) -> XZBBox {
match self {
Self::Rect(r) => Self::Rect(r - other),
}
}
}
impl SubAssign<XZVector> for XZBBox {
fn sub_assign(&mut self, other: XZVector) {
match self {
Self::Rect(r) => *r -= other,
}
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_valid_inputs() {
// 2 * 2
let obj = XZBBox::rect_from_xz_lengths(1.0, 1.0);
assert!(obj.is_ok());
let obj = obj.unwrap();
assert_eq!(obj.bounding_rect().total_blocks_x(), 2);
assert_eq!(obj.bounding_rect().total_blocks_z(), 2);
assert_eq!(obj.bounding_rect().total_blocks(), 4);
assert_eq!(obj.min_x(), 0);
assert_eq!(obj.max_x(), 1);
assert_eq!(obj.min_z(), 0);
assert_eq!(obj.max_z(), 1);
// edge cases
// 1 * 2
let obj = XZBBox::rect_from_xz_lengths(0.0, 1.0);
assert!(obj.is_ok());
let obj = obj.unwrap();
assert_eq!(obj.bounding_rect().total_blocks_x(), 1);
assert_eq!(obj.bounding_rect().total_blocks_z(), 2);
assert_eq!(obj.bounding_rect().total_blocks(), 2);
assert_eq!(obj.min_x(), 0);
assert_eq!(obj.max_x(), 0);
assert_eq!(obj.min_z(), 0);
assert_eq!(obj.max_z(), 1);
// 2 * 1
let obj = XZBBox::rect_from_xz_lengths(1.0, 0.0);
assert!(obj.is_ok());
let obj = obj.unwrap();
assert_eq!(obj.bounding_rect().total_blocks_x(), 2);
assert_eq!(obj.bounding_rect().total_blocks_z(), 1);
assert_eq!(obj.bounding_rect().total_blocks(), 2);
assert_eq!(obj.min_x(), 0);
assert_eq!(obj.max_x(), 1);
assert_eq!(obj.min_z(), 0);
assert_eq!(obj.max_z(), 0);
// normal case
let obj = XZBBox::rect_from_xz_lengths(123.4, 322.5);
assert!(obj.is_ok());
let obj = obj.unwrap();
assert_eq!(obj.bounding_rect().total_blocks_x(), 124);
assert_eq!(obj.bounding_rect().total_blocks_z(), 323);
assert_eq!(obj.bounding_rect().total_blocks(), 124 * 323);
assert_eq!(obj.min_x(), 0);
assert_eq!(obj.max_x(), 123);
assert_eq!(obj.min_z(), 0);
assert_eq!(obj.max_z(), 322);
}
#[test]
#[allow(clippy::excessive_precision)]
fn test_invalid_inputs() {
assert!(XZBBox::rect_from_xz_lengths(-1.0, 1.5).is_err());
assert!(XZBBox::rect_from_xz_lengths(1323.5, -3287238791.395).is_err());
assert!(XZBBox::rect_from_xz_lengths(-239928341323.29389498, -3287238791.938395).is_err());
assert!(XZBBox::rect_from_xz_lengths(-0.1, 1.5).is_err());
assert!(XZBBox::rect_from_xz_lengths(-0.5, 1.5).is_err());
assert!(XZBBox::rect_from_xz_lengths(123948761293874123.2398, -0.5).is_err());
assert!(XZBBox::rect_from_xz_lengths(i32::MAX as f64 + 10.0, -0.5).is_err());
assert!(XZBBox::rect_from_xz_lengths(0.2, i32::MAX as f64 + 10.0).is_err());
}
}

View File

@@ -1,70 +0,0 @@
use super::xzvector::XZVector;
use serde::Deserialize;
use std::fmt;
use std::ops::{Add, AddAssign, Sub, SubAssign};
#[derive(Debug, Deserialize, Copy, Clone, PartialEq)]
pub struct XZPoint {
pub x: i32,
pub z: i32,
}
impl XZPoint {
pub fn new(x: i32, z: i32) -> Self {
Self { x, z }
}
}
impl fmt::Display for XZPoint {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "XZPoint({}, {})", self.x, self.z)
}
}
// below are associated +- operators
impl Add<XZVector> for XZPoint {
type Output = XZPoint;
fn add(self, other: XZVector) -> XZPoint {
XZPoint {
x: self.x + other.dx,
z: self.z + other.dz,
}
}
}
impl AddAssign<XZVector> for XZPoint {
fn add_assign(&mut self, other: XZVector) {
self.x += other.dx;
self.z += other.dz;
}
}
impl Sub for XZPoint {
type Output = XZVector;
fn sub(self, other: XZPoint) -> XZVector {
XZVector {
dx: self.x - other.x,
dz: self.z - other.z,
}
}
}
impl Sub<XZVector> for XZPoint {
type Output = XZPoint;
fn sub(self, other: XZVector) -> XZPoint {
XZPoint {
x: self.x - other.dx,
z: self.z - other.dz,
}
}
}
impl SubAssign<XZVector> for XZPoint {
fn sub_assign(&mut self, other: XZVector) {
self.x -= other.dx;
self.z -= other.dz;
}
}

View File

@@ -1,56 +0,0 @@
use serde::Deserialize;
use std::fmt;
use std::ops::{Add, AddAssign, Sub, SubAssign};
/// Vector between two points in minecraft xz space.
#[derive(Debug, Deserialize, Copy, Clone, PartialEq)]
pub struct XZVector {
/// Increment in x direction
pub dx: i32,
/// Increment in z direction
pub dz: i32,
}
impl fmt::Display for XZVector {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "XZVector({}, {})", self.dx, self.dz)
}
}
// below are associated +- operators
impl Add for XZVector {
type Output = XZVector;
fn add(self, other: XZVector) -> XZVector {
XZVector {
dx: self.dx + other.dx,
dz: self.dz + other.dz,
}
}
}
impl AddAssign for XZVector {
fn add_assign(&mut self, other: XZVector) {
self.dx += other.dx;
self.dz += other.dz;
}
}
impl Sub for XZVector {
type Output = XZVector;
fn sub(self, other: XZVector) -> XZVector {
XZVector {
dx: self.dx - other.dx,
dz: self.dz - other.dz,
}
}
}
impl SubAssign for XZVector {
fn sub_assign(&mut self, other: XZVector) {
self.dx -= other.dx;
self.dz -= other.dz;
}
}

View File

@@ -1,125 +0,0 @@
use super::llpoint::LLPoint;
/// A checked Bounding Box.
#[derive(Copy, Clone, Debug, PartialEq)]
pub struct LLBBox {
/// The "bottom-left" vertex of the rectangle
min: LLPoint,
/// The "top-right" vertex of the rectangle
max: LLPoint,
}
impl LLBBox {
pub fn new(min_lat: f64, min_lng: f64, max_lat: f64, max_lng: f64) -> Result<Self, String> {
if min_lng >= max_lng {
return Err(format!(
"Invalid LLBBox: min_lng {min_lng} >= max_lng {max_lng}"
));
}
if min_lat >= max_lat {
return Err(format!(
"Invalid LLBBox: min_lat {min_lat} >= max_lat {max_lat}"
));
}
let min = LLPoint::new(min_lat, min_lng)?;
let max = LLPoint::new(max_lat, max_lng)?;
Ok(Self { min, max })
}
pub fn from_str(s: &str) -> Result<Self, String> {
let [min_lat, min_lng, max_lat, max_lng]: [f64; 4] = s
.split([',', ' '])
.map(|e| e.parse().unwrap())
.collect::<Vec<_>>()
.try_into()
.unwrap();
// So, the GUI does Lat/Lng and no GDAL (comma-sep values), which is the exact opposite of
// what bboxfinder.com does. :facepalm: (bboxfinder is wrong here: Lat comes first!)
// DO NOT MODIFY THIS! It's correct. The CLI/GUI is passing you the numbers incorrectly.
Self::new(min_lat, min_lng, max_lat, max_lng)
}
pub fn min(&self) -> LLPoint {
self.min
}
pub fn max(&self) -> LLPoint {
self.max
}
pub fn contains(&self, llpoint: &LLPoint) -> bool {
llpoint.lat() >= self.min().lat()
&& llpoint.lat() <= self.max().lat()
&& llpoint.lng() >= self.min().lng()
&& llpoint.lng() <= self.max().lng()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_valid_input() {
assert!(LLBBox::new(0., 0., 1., 1.).is_ok());
assert!(LLBBox::new(1., 2., 3., 4.).is_ok());
// Arnis, Germany
assert!(LLBBox::new(54.627053, 9.927928, 54.634902, 9.937563).is_ok());
// Royal Observatory Greenwich, London, UK
assert!(LLBBox::new(51.470000, -0.015000, 51.480000, 0.015000).is_ok());
// The Bund, Shanghai, China
assert!(LLBBox::new(31.23256, 121.46768, 31.24993, 121.50394).is_ok());
// Santa Monica, Los Angeles, US
assert!(LLBBox::new(34.00348, -118.51226, 34.02033, -118.47600).is_ok());
// Sydney Opera House, Sydney, Australia
assert!(LLBBox::new(-33.861035, 151.204137, -33.852597, 151.222268).is_ok());
}
#[test]
fn test_from_str_commas() {
const ARNIS_STR: &str = "9.927928,54.627053,9.937563,54.634902";
let bbox_result = LLBBox::from_str(ARNIS_STR);
assert!(bbox_result.is_ok());
let arnis_correct: LLBBox = LLBBox {
min: LLPoint::new(9.927928, 54.627053).unwrap(),
max: LLPoint::new(9.937563, 54.634902).unwrap(),
};
assert_eq!(bbox_result.unwrap(), arnis_correct);
}
#[test]
fn test_from_str_spaces() {
const ARNIS_SPACE_STR: &str = "9.927928 54.627053 9.937563 54.634902";
let bbox_result = LLBBox::from_str(ARNIS_SPACE_STR);
assert!(bbox_result.is_ok());
let arnis_correct: LLBBox = LLBBox {
min: LLPoint::new(9.927928, 54.627053).unwrap(),
max: LLPoint::new(9.937563, 54.634902).unwrap(),
};
assert_eq!(bbox_result.unwrap(), arnis_correct);
}
#[test]
fn test_out_of_order() {
// Violates values in vals_in_order
assert!(LLBBox::new(0., 0., 0., 0.).is_err());
assert!(LLBBox::new(1., 0., 0., 1.).is_err());
assert!(LLBBox::new(0., 1., 1., 0.).is_err());
}
}

View File

@@ -1,60 +0,0 @@
/// Bounds-checked longitude and latitude.
#[derive(Copy, Clone, Debug, PartialEq)]
pub struct LLPoint {
lat: f64,
lng: f64,
}
impl LLPoint {
pub fn new(lat: f64, lng: f64) -> Result<Self, String> {
let lat_in_range = (-90.0..=90.0).contains(&lat);
let lng_in_range = (-180.0..=180.0).contains(&lng);
if !lat_in_range {
return Err(format!("Latitude {lat} not in range -90.0..=90.0"));
}
if !lng_in_range {
return Err(format!("Longitude {lng} not in range -180.0..=180.0"));
}
Ok(Self { lat, lng })
}
pub fn lat(&self) -> f64 {
self.lat
}
pub fn lng(&self) -> f64 {
self.lng
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_valid_input() {
assert!(LLPoint::new(0., 0.).is_ok());
// latitude extremes
assert!(LLPoint::new(-90.0, 0.).is_ok());
assert!(LLPoint::new(90.0, 0.).is_ok());
// longitude extremes
assert!(LLPoint::new(0., -180.0).is_ok());
assert!(LLPoint::new(0., 180.0).is_ok());
}
#[test]
fn test_out_of_bounds() {
// latitude out-of-bounds
assert!(LLPoint::new(-91., 0.).is_err());
assert!(LLPoint::new(91., 0.).is_err());
// longitude out-of-bounds
assert!(LLPoint::new(0., -181.).is_err());
assert!(LLPoint::new(0., 181.).is_err());
}
}

View File

@@ -1,5 +0,0 @@
mod llbbox;
mod llpoint;
pub use llbbox::LLBBox;
pub use llpoint::LLPoint;

View File

@@ -1,3 +0,0 @@
pub mod cartesian;
pub mod geographic;
pub mod transformation;

View File

@@ -1,185 +0,0 @@
use super::cartesian::{XZBBox, XZPoint};
use super::geographic::{LLBBox, LLPoint};
/// Transform geographic space (within llbbox) to a local tangential cartesian space (within xzbbox)
pub struct CoordTransformer {
len_lat: f64,
len_lng: f64,
scale_factor_x: f64,
scale_factor_z: f64,
min_lat: f64,
min_lng: f64,
}
impl CoordTransformer {
pub fn scale_factor_x(&self) -> f64 {
self.scale_factor_x
}
pub fn scale_factor_z(&self) -> f64 {
self.scale_factor_z
}
pub fn llbbox_to_xzbbox(
llbbox: &LLBBox,
scale: f64,
) -> Result<(CoordTransformer, XZBBox), String> {
let err_header = "Construct LLBBox to XZBBox transformation failed".to_string();
if scale <= 0.0 {
return Err(format!("{}: scale <= 0.0", &err_header));
}
let (scale_factor_z, scale_factor_x) = geo_distance(llbbox.min(), llbbox.max());
let scale_factor_z: f64 = scale_factor_z.floor() * scale;
let scale_factor_x: f64 = scale_factor_x.floor() * scale;
let xzbbox = XZBBox::rect_from_xz_lengths(scale_factor_x, scale_factor_z)
.map_err(|e| format!("{}:\n{}", &err_header, e))?;
Ok((
Self {
len_lat: llbbox.max().lat() - llbbox.min().lat(),
len_lng: llbbox.max().lng() - llbbox.min().lng(),
scale_factor_x,
scale_factor_z,
min_lat: llbbox.min().lat(),
min_lng: llbbox.min().lng(),
},
xzbbox,
))
}
pub fn transform_point(&self, llpoint: LLPoint) -> XZPoint {
// Calculate the relative position within the bounding box
let rel_x: f64 = (llpoint.lng() - self.min_lng) / self.len_lng;
let rel_z: f64 = 1.0 - (llpoint.lat() - self.min_lat) / self.len_lat;
// Apply scaling factors for each dimension and convert to Minecraft coordinates
let x: i32 = (rel_x * self.scale_factor_x) as i32;
let z: i32 = (rel_z * self.scale_factor_z) as i32;
XZPoint::new(x, z)
}
}
// (lat meters, lon meters)
#[inline]
pub fn geo_distance(a: LLPoint, b: LLPoint) -> (f64, f64) {
let z: f64 = lat_distance(a.lat(), b.lat());
// distance between two lons depends on their latitude. In this case we'll just average them
let x: f64 = lon_distance((a.lat() + b.lat()) / 2.0, a.lng(), b.lng());
(z, x)
}
// Haversine but optimized for a latitude delta of 0
// returns meters
fn lon_distance(lat: f64, lon1: f64, lon2: f64) -> f64 {
const R: f64 = 6_371_000.0;
let d_lon: f64 = (lon2 - lon1).to_radians();
let a: f64 =
lat.to_radians().cos() * lat.to_radians().cos() * (d_lon / 2.0).sin() * (d_lon / 2.0).sin();
let c: f64 = 2.0 * a.sqrt().atan2((1.0 - a).sqrt());
R * c
}
// Haversine but optimized for a longitude delta of 0
// returns meters
fn lat_distance(lat1: f64, lat2: f64) -> f64 {
const R: f64 = 6_371_000.0;
let d_lat: f64 = (lat2 - lat1).to_radians();
let a: f64 = (d_lat / 2.0).sin() * (d_lat / 2.0).sin();
let c: f64 = 2.0 * a.sqrt().atan2((1.0 - a).sqrt());
R * c
}
// copied legacy code
// Function to convert latitude and longitude to Minecraft coordinates.
// Function to convert latitude and longitude to Minecraft coordinates.
#[cfg(test)]
pub fn lat_lon_to_minecraft_coords(
lat: f64,
lon: f64,
bbox: LLBBox, // (min_lon, min_lat, max_lon, max_lat)
scale_factor_z: f64,
scale_factor_x: f64,
) -> (i32, i32) {
// Calculate the relative position within the bounding box
let rel_x: f64 = (lon - bbox.min().lng()) / (bbox.max().lng() - bbox.min().lng());
let rel_z: f64 = 1.0 - (lat - bbox.min().lat()) / (bbox.max().lat() - bbox.min().lat());
// Apply scaling factors for each dimension and convert to Minecraft coordinates
let x: i32 = (rel_x * scale_factor_x) as i32;
let z: i32 = (rel_z * scale_factor_z) as i32;
(x, z)
}
#[cfg(test)]
mod test {
use super::*;
use crate::test_utilities::get_llbbox_arnis;
fn test_llxztransform_one_scale_one_factor(
scale: f64,
test_latfactor: f64,
test_lngfactor: f64,
) {
let llbbox = get_llbbox_arnis();
let llpoint = LLPoint::new(
llbbox.min().lat() + (llbbox.max().lat() - llbbox.min().lat()) * test_latfactor,
llbbox.min().lng() + (llbbox.max().lng() - llbbox.min().lng()) * test_lngfactor,
)
.unwrap();
let (transformer, xzbbox_new) = CoordTransformer::llbbox_to_xzbbox(&llbbox, scale).unwrap();
// legacy xzbbox creation
let (scale_factor_z, scale_factor_x) = geo_distance(llbbox.min(), llbbox.max());
let scale_factor_z: f64 = scale_factor_z.floor() * scale;
let scale_factor_x: f64 = scale_factor_x.floor() * scale;
let xzbbox_old = XZBBox::rect_from_xz_lengths(scale_factor_x, scale_factor_z).unwrap();
// legacy coord transform
let (x, z) = lat_lon_to_minecraft_coords(
llpoint.lat(),
llpoint.lng(),
llbbox,
scale_factor_z,
scale_factor_x,
);
// new coord transform
let xzpoint = transformer.transform_point(llpoint);
assert_eq!(x, xzpoint.x);
assert_eq!(z, xzpoint.z);
assert_eq!(xzbbox_new.min_x(), xzbbox_old.min_x());
assert_eq!(xzbbox_new.max_x(), xzbbox_old.max_x());
assert_eq!(xzbbox_new.min_z(), xzbbox_old.min_z());
assert_eq!(xzbbox_new.max_z(), xzbbox_old.max_z());
}
// this ensures that transformer.transform_point == legacy lat_lon_to_minecraft_coords
#[test]
pub fn test_llxztransform() {
test_llxztransform_one_scale_one_factor(1.0, 0.5, 0.5);
test_llxztransform_one_scale_one_factor(3.0, 0.1, 0.2);
test_llxztransform_one_scale_one_factor(10.0, -1.2, 2.0);
test_llxztransform_one_scale_one_factor(0.4, 0.3, -0.2);
test_llxztransform_one_scale_one_factor(0.1, 0.2, 0.7);
}
// this ensures that invalid inputs can be handled correctly
#[test]
pub fn test_invalid_construct() {
let llbbox = get_llbbox_arnis();
let obj = CoordTransformer::llbbox_to_xzbbox(&llbbox, 0.0);
assert!(obj.is_err());
let obj = CoordTransformer::llbbox_to_xzbbox(&llbbox, -1.2);
assert!(obj.is_err());
}
}

View File

@@ -1,319 +0,0 @@
use crate::args::Args;
use crate::block_definitions::{BEDROCK, DIRT, GRASS_BLOCK, STONE};
use crate::coordinate_system::cartesian::XZBBox;
use crate::coordinate_system::geographic::LLBBox;
use crate::element_processing::*;
use crate::ground::Ground;
use crate::map_renderer;
use crate::osm_parser::ProcessedElement;
use crate::progress::{emit_gui_progress_update, emit_map_preview_ready};
#[cfg(feature = "gui")]
use crate::telemetry::{send_log, LogLevel};
use crate::world_editor::WorldEditor;
use colored::Colorize;
use indicatif::{ProgressBar, ProgressStyle};
pub const MIN_Y: i32 = -64;
pub fn generate_world(
elements: Vec<ProcessedElement>,
xzbbox: XZBBox,
llbbox: LLBBox,
ground: Ground,
args: &Args,
) -> Result<(), String> {
let mut editor: WorldEditor = WorldEditor::new(args.path.clone(), &xzbbox, llbbox);
println!("{} Processing data...", "[4/7]".bold());
// Build highway connectivity map once before processing
let highway_connectivity = highways::build_highway_connectivity_map(&elements);
// Set ground reference in the editor to enable elevation-aware block placement
editor.set_ground(&ground);
println!("{} Processing terrain...", "[5/7]".bold());
emit_gui_progress_update(25.0, "Processing terrain...");
// Process data
let elements_count: usize = elements.len();
let process_pb: ProgressBar = ProgressBar::new(elements_count as u64);
process_pb.set_style(ProgressStyle::default_bar()
.template("{spinner:.green} [{elapsed_precise}] [{bar:45.white/black}] {pos}/{len} elements ({eta}) {msg}")
.unwrap()
.progress_chars("█▓░"));
let progress_increment_prcs: f64 = 45.0 / elements_count as f64;
let mut current_progress_prcs: f64 = 25.0;
let mut last_emitted_progress: f64 = current_progress_prcs;
for element in &elements {
process_pb.inc(1);
current_progress_prcs += progress_increment_prcs;
if (current_progress_prcs - last_emitted_progress).abs() > 0.25 {
emit_gui_progress_update(current_progress_prcs, "");
last_emitted_progress = current_progress_prcs;
}
if args.debug {
process_pb.set_message(format!(
"(Element ID: {} / Type: {})",
element.id(),
element.kind()
));
} else {
process_pb.set_message("");
}
match element {
ProcessedElement::Way(way) => {
if way.tags.contains_key("building") || way.tags.contains_key("building:part") {
buildings::generate_buildings(&mut editor, way, args, None);
} else if way.tags.contains_key("highway") {
highways::generate_highways(&mut editor, element, args, &highway_connectivity);
} else if way.tags.contains_key("landuse") {
landuse::generate_landuse(&mut editor, way, args);
} else if way.tags.contains_key("natural") {
natural::generate_natural(&mut editor, element, args);
} else if way.tags.contains_key("amenity") {
amenities::generate_amenities(&mut editor, element, args);
} else if way.tags.contains_key("leisure") {
leisure::generate_leisure(&mut editor, way, args);
} else if way.tags.contains_key("barrier") {
barriers::generate_barriers(&mut editor, element);
} else if let Some(val) = way.tags.get("waterway") {
if val == "dock" {
// docks count as water areas
water_areas::generate_water_area_from_way(&mut editor, way, &xzbbox);
} else {
waterways::generate_waterways(&mut editor, way);
}
} else if way.tags.contains_key("bridge") {
//bridges::generate_bridges(&mut editor, way, ground_level); // TODO FIX
} else if way.tags.contains_key("railway") {
railways::generate_railways(&mut editor, way);
} else if way.tags.contains_key("roller_coaster") {
railways::generate_roller_coaster(&mut editor, way);
} else if way.tags.contains_key("aeroway") || way.tags.contains_key("area:aeroway")
{
highways::generate_aeroway(&mut editor, way, args);
} else if way.tags.get("service") == Some(&"siding".to_string()) {
highways::generate_siding(&mut editor, way);
} else if way.tags.contains_key("man_made") {
man_made::generate_man_made(&mut editor, element, args);
}
}
ProcessedElement::Node(node) => {
if node.tags.contains_key("door") || node.tags.contains_key("entrance") {
doors::generate_doors(&mut editor, node);
} else if node.tags.contains_key("natural")
&& node.tags.get("natural") == Some(&"tree".to_string())
{
natural::generate_natural(&mut editor, element, args);
} else if node.tags.contains_key("amenity") {
amenities::generate_amenities(&mut editor, element, args);
} else if node.tags.contains_key("barrier") {
barriers::generate_barrier_nodes(&mut editor, node);
} else if node.tags.contains_key("highway") {
highways::generate_highways(&mut editor, element, args, &highway_connectivity);
} else if node.tags.contains_key("tourism") {
tourisms::generate_tourisms(&mut editor, node);
} else if node.tags.contains_key("man_made") {
man_made::generate_man_made_nodes(&mut editor, node);
}
}
ProcessedElement::Relation(rel) => {
if rel.tags.contains_key("building") || rel.tags.contains_key("building:part") {
buildings::generate_building_from_relation(&mut editor, rel, args);
} else if rel.tags.contains_key("water")
|| rel
.tags
.get("natural")
.map(|val| val == "water" || val == "bay")
.unwrap_or(false)
{
water_areas::generate_water_areas_from_relation(&mut editor, rel, &xzbbox);
} else if rel.tags.contains_key("natural") {
natural::generate_natural_from_relation(&mut editor, rel, args);
} else if rel.tags.contains_key("landuse") {
landuse::generate_landuse_from_relation(&mut editor, rel, args);
} else if rel.tags.get("leisure") == Some(&"park".to_string()) {
leisure::generate_leisure_from_relation(&mut editor, rel, args);
} else if rel.tags.contains_key("man_made") {
man_made::generate_man_made(
&mut editor,
&ProcessedElement::Relation(rel.clone()),
args,
);
}
}
}
}
process_pb.finish();
// Generate ground layer
let total_blocks: u64 = xzbbox.bounding_rect().total_blocks();
let desired_updates: u64 = 1500;
let batch_size: u64 = (total_blocks / desired_updates).max(1);
let mut block_counter: u64 = 0;
println!("{} Generating ground...", "[6/7]".bold());
emit_gui_progress_update(70.0, "Generating ground...");
let ground_pb: ProgressBar = ProgressBar::new(total_blocks);
ground_pb.set_style(
ProgressStyle::default_bar()
.template("{spinner:.green} [{elapsed_precise}] [{bar:45}] {pos}/{len} blocks ({eta})")
.unwrap()
.progress_chars("█▓░"),
);
let mut gui_progress_grnd: f64 = 70.0;
let mut last_emitted_progress: f64 = gui_progress_grnd;
let total_iterations_grnd: f64 = total_blocks as f64;
let progress_increment_grnd: f64 = 20.0 / total_iterations_grnd;
let groundlayer_block = GRASS_BLOCK;
for x in xzbbox.min_x()..=xzbbox.max_x() {
for z in xzbbox.min_z()..=xzbbox.max_z() {
// Add default dirt and grass layer if there isn't a stone layer already
if !editor.check_for_block(x, 0, z, Some(&[STONE])) {
editor.set_block(groundlayer_block, x, 0, z, None, None);
editor.set_block(DIRT, x, -1, z, None, None);
editor.set_block(DIRT, x, -2, z, None, None);
}
// Fill underground with stone
if args.fillground {
// Fill from bedrock+1 to 3 blocks below ground with stone
editor.fill_blocks_absolute(
STONE,
x,
MIN_Y + 1,
z,
x,
editor.get_absolute_y(x, -3, z),
z,
None,
None,
);
}
// Generate a bedrock level at MIN_Y
editor.set_block_absolute(BEDROCK, x, MIN_Y, z, None, Some(&[BEDROCK]));
block_counter += 1;
// Use manual % check since is_multiple_of() is unstable on stable Rust
#[allow(clippy::manual_is_multiple_of)]
if block_counter % batch_size == 0 {
ground_pb.inc(batch_size);
}
gui_progress_grnd += progress_increment_grnd;
if (gui_progress_grnd - last_emitted_progress).abs() > 0.25 {
emit_gui_progress_update(gui_progress_grnd, "");
last_emitted_progress = gui_progress_grnd;
}
}
}
// Set sign for player orientation
/*editor.set_sign(
"↑".to_string(),
"Generated World".to_string(),
"This direction".to_string(),
"".to_string(),
9,
-61,
9,
6,
);*/
ground_pb.inc(block_counter % batch_size);
ground_pb.finish();
// Save world
editor.save();
// Update player spawn Y coordinate based on terrain height after generation
#[cfg(feature = "gui")]
if let Some(spawn_coords) = &args.spawn_point {
use crate::gui::update_player_spawn_y_after_generation;
let bbox_string = format!(
"{},{},{},{}",
args.bbox.min().lng(),
args.bbox.min().lat(),
args.bbox.max().lng(),
args.bbox.max().lat()
);
if let Err(e) = update_player_spawn_y_after_generation(
&args.path,
Some(*spawn_coords),
bbox_string,
args.scale,
&ground,
) {
let warning_msg = format!("Failed to update spawn point Y coordinate: {}", e);
eprintln!("Warning: {}", warning_msg);
#[cfg(feature = "gui")]
send_log(LogLevel::Warning, &warning_msg);
}
}
emit_gui_progress_update(100.0, "Done! World generation completed.");
println!("{}", "Done! World generation completed.".green().bold());
// Generate top-down map preview:
// - Always for GUI mode (non-blocking, runs in background)
// - Only when --generate-map flag is set for CLI mode (blocking, waits for completion)
#[cfg(feature = "gui")]
let should_generate_map = true;
#[cfg(not(feature = "gui"))]
let should_generate_map = args.generate_map;
if should_generate_map {
let world_path = args.path.clone();
let bounds = (
xzbbox.min_x(),
xzbbox.max_x(),
xzbbox.min_z(),
xzbbox.max_z(),
);
let map_thread = std::thread::spawn(move || {
// Use catch_unwind to prevent any panic from affecting the application
let result = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| {
map_renderer::render_world_map(&world_path, bounds.0, bounds.1, bounds.2, bounds.3)
}));
match result {
Ok(Ok(_path)) => {
// Notify the GUI that the map preview is ready
emit_map_preview_ready();
}
Ok(Err(e)) => {
eprintln!("Warning: Failed to generate map preview: {}", e);
}
Err(_) => {
eprintln!("Warning: Map preview generation panicked unexpectedly");
}
}
});
// In CLI mode, wait for map generation to complete before exiting
// In GUI mode, let it run in background to keep UI responsive
#[cfg(not(feature = "gui"))]
{
let _ = map_thread.join();
}
// In GUI mode, we don't join, let the thread run in background
#[cfg(feature = "gui")]
drop(map_thread);
}
Ok(())
}

View File

@@ -1,262 +0,0 @@
use crate::args::Args;
use crate::block_definitions::*;
use crate::bresenham::bresenham_line;
use crate::coordinate_system::cartesian::XZPoint;
use crate::floodfill::flood_fill_area;
use crate::osm_parser::ProcessedElement;
use crate::world_editor::WorldEditor;
pub fn generate_amenities(editor: &mut WorldEditor, element: &ProcessedElement, args: &Args) {
// Skip if 'layer' or 'level' is negative in the tags
if let Some(layer) = element.tags().get("layer") {
if layer.parse::<i32>().unwrap_or(0) < 0 {
return;
}
}
if let Some(level) = element.tags().get("level") {
if level.parse::<i32>().unwrap_or(0) < 0 {
return;
}
}
if let Some(amenity_type) = element.tags().get("amenity") {
let first_node: Option<XZPoint> = element
.nodes()
.map(|n: &crate::osm_parser::ProcessedNode| XZPoint::new(n.x, n.z))
.next();
match amenity_type.as_str() {
"waste_disposal" | "waste_basket" => {
// Place a cauldron for waste disposal or waste basket
if let Some(pt) = first_node {
editor.set_block(CAULDRON, pt.x, 1, pt.z, None, None);
}
}
"vending_machine" | "atm" => {
if let Some(pt) = first_node {
editor.set_block(IRON_BLOCK, pt.x, 1, pt.z, None, None);
editor.set_block(IRON_BLOCK, pt.x, 2, pt.z, None, None);
}
}
"bicycle_parking" => {
let ground_block: Block = OAK_PLANKS;
let roof_block: Block = STONE_BLOCK_SLAB;
let polygon_coords: Vec<(i32, i32)> = element
.nodes()
.map(|n: &crate::osm_parser::ProcessedNode| (n.x, n.z))
.collect();
if polygon_coords.is_empty() {
return;
}
let floor_area: Vec<(i32, i32)> =
flood_fill_area(&polygon_coords, args.timeout.as_ref());
// Fill the floor area
for (x, z) in floor_area.iter() {
editor.set_block(ground_block, *x, 0, *z, None, None);
}
// Place fences and roof slabs at each corner node
for node in element.nodes() {
let x: i32 = node.x;
let z: i32 = node.z;
// Set ground block and fences
editor.set_block(ground_block, x, 0, z, None, None);
for y in 1..=4 {
editor.set_block(OAK_FENCE, x, y, z, None, None);
}
editor.set_block(roof_block, x, 5, z, None, None);
}
// Flood fill the roof area
for (x, z) in floor_area.iter() {
editor.set_block(roof_block, *x, 5, *z, None, None);
}
}
"bench" => {
// Place a bench
if let Some(pt) = first_node {
// 50% chance to 90 degrees rotate the bench using if
if rand::random::<bool>() {
editor.set_block(SMOOTH_STONE, pt.x, 1, pt.z, None, None);
editor.set_block(OAK_LOG, pt.x + 1, 1, pt.z, None, None);
editor.set_block(OAK_LOG, pt.x - 1, 1, pt.z, None, None);
} else {
editor.set_block(SMOOTH_STONE, pt.x, 1, pt.z, None, None);
editor.set_block(OAK_LOG, pt.x, 1, pt.z + 1, None, None);
editor.set_block(OAK_LOG, pt.x, 1, pt.z - 1, None, None);
}
}
}
"shelter" => {
let roof_block: Block = STONE_BRICK_SLAB;
let polygon_coords: Vec<(i32, i32)> = element
.nodes()
.map(|n: &crate::osm_parser::ProcessedNode| (n.x, n.z))
.collect();
let roof_area: Vec<(i32, i32)> =
flood_fill_area(&polygon_coords, args.timeout.as_ref());
// Place fences and roof slabs at each corner node directly
for node in element.nodes() {
let x: i32 = node.x;
let z: i32 = node.z;
for fence_height in 1..=4 {
editor.set_block(OAK_FENCE, x, fence_height, z, None, None);
}
editor.set_block(roof_block, x, 5, z, None, None);
}
// Flood fill the roof area
for (x, z) in roof_area.iter() {
editor.set_block(roof_block, *x, 5, *z, None, None);
}
}
"parking" | "fountain" => {
// Process parking or fountain areas
let mut previous_node: Option<XZPoint> = None;
let mut corner_addup: (i32, i32, i32) = (0, 0, 0);
let mut current_amenity: Vec<(i32, i32)> = vec![];
let block_type = match amenity_type.as_str() {
"fountain" => WATER,
"parking" => GRAY_CONCRETE,
_ => GRAY_CONCRETE,
};
for node in element.nodes() {
let pt: XZPoint = node.xz();
if let Some(prev) = previous_node {
// Create borders for fountain or parking area
let bresenham_points: Vec<(i32, i32, i32)> =
bresenham_line(prev.x, 0, prev.z, pt.x, 0, pt.z);
for (bx, _, bz) in bresenham_points {
editor.set_block(block_type, bx, 0, bz, Some(&[BLACK_CONCRETE]), None);
// Decorative border around fountains
if amenity_type == "fountain" {
for dx in [-1, 0, 1].iter() {
for dz in [-1, 0, 1].iter() {
if (*dx, *dz) != (0, 0) {
editor.set_block(
LIGHT_GRAY_CONCRETE,
bx + dx,
0,
bz + dz,
None,
None,
);
}
}
}
}
current_amenity.push((node.x, node.z));
corner_addup.0 += node.x;
corner_addup.1 += node.z;
corner_addup.2 += 1;
}
}
previous_node = Some(pt);
}
// Flood-fill the interior area for parking or fountains
if corner_addup.2 > 0 {
let polygon_coords: Vec<(i32, i32)> = current_amenity.to_vec();
let flood_area: Vec<(i32, i32)> =
flood_fill_area(&polygon_coords, args.timeout.as_ref());
for (x, z) in flood_area {
editor.set_block(
block_type,
x,
0,
z,
Some(&[BLACK_CONCRETE, GRAY_CONCRETE]),
None,
);
// Enhanced parking space markings
if amenity_type == "parking" {
// Create defined parking spaces with realistic layout
let space_width = 4; // Width of each parking space
let space_length = 6; // Length of each parking space
let lane_width = 5; // Width of driving lanes
// Calculate which "zone" this coordinate falls into
let zone_x = x / space_width;
let zone_z = z / (space_length + lane_width);
let local_x = x % space_width;
let local_z = z % (space_length + lane_width);
// Create parking space boundaries (only within parking areas, not in driving lanes)
if local_z < space_length {
// We're in a parking space area, not in the driving lane
if local_x == 0 {
// Vertical parking space lines (only on the left edge)
editor.set_block(
LIGHT_GRAY_CONCRETE,
x,
0,
z,
Some(&[BLACK_CONCRETE, GRAY_CONCRETE]),
None,
);
} else if local_z == 0 {
// Horizontal parking space lines (only on the top edge)
editor.set_block(
LIGHT_GRAY_CONCRETE,
x,
0,
z,
Some(&[BLACK_CONCRETE, GRAY_CONCRETE]),
None,
);
}
} else if local_z == space_length {
// Bottom edge of parking spaces (border with driving lane)
editor.set_block(
LIGHT_GRAY_CONCRETE,
x,
0,
z,
Some(&[BLACK_CONCRETE, GRAY_CONCRETE]),
None,
);
} else if local_z > space_length && local_z < space_length + lane_width
{
// Driving lane - use darker concrete
editor.set_block(
BLACK_CONCRETE,
x,
0,
z,
Some(&[GRAY_CONCRETE]),
None,
);
}
// Add light posts at parking space outline corners
if local_x == 0 && local_z == 0 && zone_x % 3 == 0 && zone_z % 2 == 0 {
// Light posts at regular intervals on parking space corners
editor.set_block(COBBLESTONE_WALL, x, 1, z, None, None);
for dy in 2..=4 {
editor.set_block(OAK_FENCE, x, dy, z, None, None);
}
editor.set_block(GLOWSTONE, x, 5, z, None, None);
}
}
}
}
}
_ => {}
}
}
}

View File

@@ -1,171 +0,0 @@
use crate::block_definitions::*;
use crate::bresenham::bresenham_line;
use crate::osm_parser::{ProcessedElement, ProcessedNode};
use crate::world_editor::WorldEditor;
pub fn generate_barriers(editor: &mut WorldEditor, element: &ProcessedElement) {
// Default values
let mut barrier_material: Block = COBBLESTONE_WALL;
let mut barrier_height: i32 = 2;
match element.tags().get("barrier").map(|s| s.as_str()) {
Some("bollard") => {
barrier_material = COBBLESTONE_WALL;
barrier_height = 1;
}
Some("kerb") => {
// Ignore kerbs
return;
}
Some("hedge") => {
barrier_material = OAK_LEAVES;
barrier_height = 2;
}
Some("fence") => {
// Handle fence sub-types
match element.tags().get("fence_type").map(|s| s.as_str()) {
Some("railing" | "bars" | "krest") => {
barrier_material = STONE_BRICK_WALL;
barrier_height = 1;
}
Some(
"chain_link" | "metal" | "wire" | "barbed_wire" | "corrugated_metal"
| "electric" | "metal_bars",
) => {
barrier_material = STONE_BRICK_WALL; // IRON_BARS
barrier_height = 2;
}
Some("slatted" | "paling") => {
barrier_material = OAK_FENCE;
barrier_height = 1;
}
Some("wood" | "split_rail" | "panel" | "pole") => {
barrier_material = OAK_FENCE;
barrier_height = 2;
}
Some("concrete" | "stone") => {
barrier_material = STONE_BRICK_WALL;
barrier_height = 2;
}
Some("glass") => {
barrier_material = GLASS;
barrier_height = 1;
}
_ => {}
}
}
Some("wall") => {
barrier_material = STONE_BRICK_WALL;
barrier_height = 3;
}
_ => {}
}
// Tagged material takes priority over inferred
if let Some(barrier_mat) = element.tags().get("material") {
if barrier_mat == "brick" {
barrier_material = BRICK;
}
if barrier_mat == "concrete" {
barrier_material = LIGHT_GRAY_CONCRETE;
}
if barrier_mat == "metal" {
barrier_material = STONE_BRICK_WALL; // IRON_BARS
}
}
if let ProcessedElement::Way(way) = element {
// Determine wall height
let wall_height: i32 = element
.tags()
.get("height")
.and_then(|height: &String| height.parse::<f32>().ok())
.map(|height: f32| height.round() as i32)
.unwrap_or(barrier_height);
// Process nodes to create the barrier wall
for i in 1..way.nodes.len() {
let prev: &crate::osm_parser::ProcessedNode = &way.nodes[i - 1];
let x1: i32 = prev.x;
let z1: i32 = prev.z;
let cur: &crate::osm_parser::ProcessedNode = &way.nodes[i];
let x2: i32 = cur.x;
let z2: i32 = cur.z;
// Generate the line of coordinates between the two nodes
let bresenham_points: Vec<(i32, i32, i32)> = bresenham_line(x1, 0, z1, x2, 0, z2);
for (bx, _, bz) in bresenham_points {
// Build the barrier wall to the specified height
for y in 1..=wall_height {
editor.set_block(barrier_material, bx, y, bz, None, None);
}
// Add an optional top to the barrier if the height is more than 1
if wall_height > 1 {
editor.set_block(STONE_BRICK_SLAB, bx, wall_height + 1, bz, None, None);
}
}
}
}
}
pub fn generate_barrier_nodes(editor: &mut WorldEditor<'_>, node: &ProcessedNode) {
match node.tags.get("barrier").map(|s| s.as_str()) {
Some("bollard") => {
editor.set_block(COBBLESTONE_WALL, node.x, 1, node.z, None, None);
}
Some("stile" | "gate" | "swing_gate" | "lift_gate") => {
/*editor.set_block(
OAK_TRAPDOOR,
node.x,
1,
node.z,
Some(&[
COBBLESTONE_WALL,
OAK_FENCE,
STONE_BRICK_WALL,
OAK_LEAVES,
STONE_BRICK_SLAB,
]),
None,
);
editor.set_block(
AIR,
node.x,
2,
node.z,
Some(&[
COBBLESTONE_WALL,
OAK_FENCE,
STONE_BRICK_WALL,
OAK_LEAVES,
STONE_BRICK_SLAB,
]),
None,
);
editor.set_block(
AIR,
node.x,
3,
node.z,
Some(&[
COBBLESTONE_WALL,
OAK_FENCE,
STONE_BRICK_WALL,
OAK_LEAVES,
STONE_BRICK_SLAB,
]),
None,
);*/
}
Some("block") => {
editor.set_block(STONE, node.x, 1, node.z, None, None);
}
Some("entrance") => {
editor.set_block(AIR, node.x, 1, node.z, None, None);
}
None => {}
_ => {}
}
}

View File

@@ -1,39 +0,0 @@
use crate::block_definitions::*;
use crate::bresenham::bresenham_line;
use crate::osm_parser::ProcessedWay;
use crate::world_editor::WorldEditor;
// TODO FIX
#[allow(dead_code)]
pub fn generate_bridges(editor: &mut WorldEditor, element: &ProcessedWay) {
if let Some(_bridge_type) = element.tags.get("bridge") {
let bridge_height = 3; // Fixed height
for i in 1..element.nodes.len() {
let prev = &element.nodes[i - 1];
let cur = &element.nodes[i];
let points = bresenham_line(prev.x, 0, prev.z, cur.x, 0, cur.z);
let total_length = points.len();
let ramp_length = 6; // Length of ramp at each end
for (idx, (x, _, z)) in points.iter().enumerate() {
let height = if idx < ramp_length {
// Start ramp (rising)
(idx * bridge_height) / ramp_length
} else if idx >= total_length - ramp_length {
// End ramp (descending)
((total_length - idx) * bridge_height) / ramp_length
} else {
// Middle section (constant height)
bridge_height
};
// Place bridge blocks
for dx in -2..=2 {
editor.set_block(LIGHT_GRAY_CONCRETE, *x + dx, height as i32, *z, None, None);
}
}
}
}
}

View File

File diff suppressed because it is too large Load Diff

View File

@@ -1,25 +0,0 @@
use crate::block_definitions::*;
use crate::osm_parser::ProcessedNode;
use crate::world_editor::WorldEditor;
pub fn generate_doors(editor: &mut WorldEditor, element: &ProcessedNode) {
// Check if the element is a door or entrance
if element.tags.contains_key("door") || element.tags.contains_key("entrance") {
// Check for the "level" tag and skip doors that are not at ground level
if let Some(level_str) = element.tags.get("level") {
if let Ok(level) = level_str.parse::<i32>() {
if level != 0 {
return; // Skip doors not on ground level
}
}
}
let x: i32 = element.x;
let z: i32 = element.z;
// Set the ground block and the door blocks
editor.set_block(GRAY_CONCRETE, x, 0, z, None, None);
editor.set_block(DARK_OAK_DOOR_LOWER, x, 1, z, None, None);
editor.set_block(DARK_OAK_DOOR_UPPER, x, 2, z, None, None);
}
}

View File

@@ -1,640 +0,0 @@
use crate::args::Args;
use crate::block_definitions::*;
use crate::bresenham::bresenham_line;
use crate::coordinate_system::cartesian::XZPoint;
use crate::floodfill::flood_fill_area;
use crate::osm_parser::{ProcessedElement, ProcessedWay};
use crate::world_editor::WorldEditor;
use std::collections::HashMap;
/// Type alias for highway connectivity map
pub type HighwayConnectivityMap = HashMap<(i32, i32), Vec<i32>>;
/// Generates highways with elevation support based on layer tags and connectivity analysis
pub fn generate_highways(
editor: &mut WorldEditor,
element: &ProcessedElement,
args: &Args,
highway_connectivity: &HighwayConnectivityMap,
) {
generate_highways_internal(editor, element, args, highway_connectivity);
}
/// Build a connectivity map for highway endpoints to determine where slopes are needed.
pub fn build_highway_connectivity_map(elements: &[ProcessedElement]) -> HighwayConnectivityMap {
let mut connectivity_map: HashMap<(i32, i32), Vec<i32>> = HashMap::new();
for element in elements {
if let ProcessedElement::Way(way) = element {
if way.tags.contains_key("highway") {
let layer_value = way
.tags
.get("layer")
.and_then(|layer| layer.parse::<i32>().ok())
.unwrap_or(0);
// Treat negative layers as ground level (0) for connectivity
let layer_value = if layer_value < 0 { 0 } else { layer_value };
// Add connectivity for start and end nodes
if !way.nodes.is_empty() {
let start_node = &way.nodes[0];
let end_node = &way.nodes[way.nodes.len() - 1];
let start_coord = (start_node.x, start_node.z);
let end_coord = (end_node.x, end_node.z);
connectivity_map
.entry(start_coord)
.or_default()
.push(layer_value);
connectivity_map
.entry(end_coord)
.or_default()
.push(layer_value);
}
}
}
}
connectivity_map
}
/// Internal function that generates highways with connectivity context for elevation handling
fn generate_highways_internal(
editor: &mut WorldEditor,
element: &ProcessedElement,
args: &Args,
highway_connectivity: &HashMap<(i32, i32), Vec<i32>>, // Maps node coordinates to list of layers that connect to this node
) {
if let Some(highway_type) = element.tags().get("highway") {
if highway_type == "street_lamp" {
// Handle street lamps
if let ProcessedElement::Node(first_node) = element {
let x: i32 = first_node.x;
let z: i32 = first_node.z;
editor.set_block(COBBLESTONE_WALL, x, 1, z, None, None);
for dy in 2..=4 {
editor.set_block(OAK_FENCE, x, dy, z, None, None);
}
editor.set_block(GLOWSTONE, x, 5, z, None, None);
}
} else if highway_type == "crossing" {
// Handle traffic signals for crossings
if let Some(crossing_type) = element.tags().get("crossing") {
if crossing_type == "traffic_signals" {
if let ProcessedElement::Node(node) = element {
let x: i32 = node.x;
let z: i32 = node.z;
for dy in 1..=3 {
editor.set_block(COBBLESTONE_WALL, x, dy, z, None, None);
}
editor.set_block(GREEN_WOOL, x, 4, z, None, None);
editor.set_block(YELLOW_WOOL, x, 5, z, None, None);
editor.set_block(RED_WOOL, x, 6, z, None, None);
}
}
}
} else if highway_type == "bus_stop" {
// Handle bus stops
if let ProcessedElement::Node(node) = element {
let x = node.x;
let z = node.z;
for dy in 1..=3 {
editor.set_block(COBBLESTONE_WALL, x, dy, z, None, None);
}
editor.set_block(WHITE_WOOL, x, 4, z, None, None);
editor.set_block(WHITE_WOOL, x + 1, 4, z, None, None);
}
} else if element
.tags()
.get("area")
.is_some_and(|v: &String| v == "yes")
{
let ProcessedElement::Way(way) = element else {
return;
};
// Handle areas like pedestrian plazas
let mut surface_block: Block = STONE; // Default block
// Determine the block type based on the 'surface' tag
if let Some(surface) = element.tags().get("surface") {
surface_block = match surface.as_str() {
"paving_stones" | "sett" => STONE_BRICKS,
"bricks" => BRICK,
"wood" => OAK_PLANKS,
"asphalt" => BLACK_CONCRETE,
"gravel" | "fine_gravel" => GRAVEL,
"grass" => GRASS_BLOCK,
"dirt" | "ground" | "earth" => DIRT,
"sand" => SAND,
"concrete" => LIGHT_GRAY_CONCRETE,
_ => STONE, // Default to stone for unknown surfaces
};
}
// Fill the area using flood fill or by iterating through the nodes
let polygon_coords: Vec<(i32, i32)> = way
.nodes
.iter()
.map(|n: &crate::osm_parser::ProcessedNode| (n.x, n.z))
.collect();
let filled_area: Vec<(i32, i32)> =
flood_fill_area(&polygon_coords, args.timeout.as_ref());
for (x, z) in filled_area {
editor.set_block(surface_block, x, 0, z, None, None);
}
} else {
let mut previous_node: Option<(i32, i32)> = None;
let mut block_type = BLACK_CONCRETE;
let mut block_range: i32 = 2;
let mut add_stripe = false;
let mut add_outline = false;
let scale_factor = args.scale;
// Parse the layer value for elevation calculation
let layer_value = element
.tags()
.get("layer")
.and_then(|layer| layer.parse::<i32>().ok())
.unwrap_or(0);
// Treat negative layers as ground level (0)
let layer_value = if layer_value < 0 { 0 } else { layer_value };
// Skip if 'level' is negative in the tags (indoor mapping)
if let Some(level) = element.tags().get("level") {
if level.parse::<i32>().unwrap_or(0) < 0 {
return;
}
}
// Determine block type and range based on highway type
match highway_type.as_str() {
"footway" | "pedestrian" => {
block_type = GRAY_CONCRETE;
block_range = 1;
}
"path" => {
block_type = DIRT_PATH;
block_range = 1;
}
"motorway" | "primary" | "trunk" => {
block_range = 5;
add_stripe = true;
}
"secondary" => {
block_range = 4;
add_stripe = true;
}
"tertiary" => {
add_stripe = true;
}
"track" => {
block_range = 1;
}
"service" => {
block_type = GRAY_CONCRETE;
block_range = 2;
}
"secondary_link" | "tertiary_link" => {
//Exit ramps, sliproads
block_type = BLACK_CONCRETE;
block_range = 1;
}
"escape" => {
// Sand trap for vehicles on mountainous roads
block_type = SAND;
block_range = 1;
}
"steps" => {
//TODO: Add correct stairs respecting height, step_count, etc.
block_type = GRAY_CONCRETE;
block_range = 1;
}
_ => {
if let Some(lanes) = element.tags().get("lanes") {
if lanes == "2" {
block_range = 3;
add_stripe = true;
add_outline = true;
} else if lanes != "1" {
block_range = 4;
add_stripe = true;
add_outline = true;
}
}
}
}
let ProcessedElement::Way(way) = element else {
return;
};
if scale_factor < 1.0 {
block_range = ((block_range as f64) * scale_factor).floor() as i32;
}
// Calculate elevation based on layer
const LAYER_HEIGHT_STEP: i32 = 6; // Each layer is 6 blocks higher/lower
let base_elevation = layer_value * LAYER_HEIGHT_STEP;
// Check if we need slopes at start and end
let needs_start_slope =
should_add_slope_at_node(&way.nodes[0], layer_value, highway_connectivity);
let needs_end_slope = should_add_slope_at_node(
&way.nodes[way.nodes.len() - 1],
layer_value,
highway_connectivity,
);
// Calculate total way length for slope distribution
let total_way_length = calculate_way_length(way);
// Check if this is a short isolated elevated segment - if so, treat as ground level
let is_short_isolated_elevated =
needs_start_slope && needs_end_slope && layer_value > 0 && total_way_length <= 35;
// Override elevation and slopes for short isolated segments
let (effective_elevation, effective_start_slope, effective_end_slope) =
if is_short_isolated_elevated {
(0, false, false) // Treat as ground level
} else {
(base_elevation, needs_start_slope, needs_end_slope)
};
let slope_length = (total_way_length as f32 * 0.35).clamp(15.0, 50.0) as usize; // 35% of way length, max 50 blocks, min 15 blocks
// Iterate over nodes to create the highway
let mut segment_index = 0;
let total_segments = way.nodes.len() - 1;
for node in &way.nodes {
if let Some(prev) = previous_node {
let (x1, z1) = prev;
let x2: i32 = node.x;
let z2: i32 = node.z;
// Generate the line of coordinates between the two nodes
let bresenham_points: Vec<(i32, i32, i32)> =
bresenham_line(x1, 0, z1, x2, 0, z2);
// Calculate elevation for this segment
let segment_length = bresenham_points.len();
// Variables to manage dashed line pattern
let mut stripe_length: i32 = 0;
let dash_length: i32 = (5.0 * scale_factor).ceil() as i32;
let gap_length: i32 = (5.0 * scale_factor).ceil() as i32;
for (point_index, (x, _, z)) in bresenham_points.iter().enumerate() {
// Calculate Y elevation for this point based on slopes and layer
let current_y = calculate_point_elevation(
segment_index,
point_index,
segment_length,
total_segments,
effective_elevation,
effective_start_slope,
effective_end_slope,
slope_length,
);
// Draw the road surface for the entire width
for dx in -block_range..=block_range {
for dz in -block_range..=block_range {
let set_x: i32 = x + dx;
let set_z: i32 = z + dz;
// Zebra crossing logic
if highway_type == "footway"
&& element.tags().get("footway")
== Some(&"crossing".to_string())
{
let is_horizontal: bool = (x2 - x1).abs() >= (z2 - z1).abs();
if is_horizontal {
if set_x % 2 < 1 {
editor.set_block(
WHITE_CONCRETE,
set_x,
current_y,
set_z,
Some(&[BLACK_CONCRETE]),
None,
);
} else {
editor.set_block(
BLACK_CONCRETE,
set_x,
current_y,
set_z,
None,
None,
);
}
} else if set_z % 2 < 1 {
editor.set_block(
WHITE_CONCRETE,
set_x,
current_y,
set_z,
Some(&[BLACK_CONCRETE]),
None,
);
} else {
editor.set_block(
BLACK_CONCRETE,
set_x,
current_y,
set_z,
None,
None,
);
}
} else {
editor.set_block(
block_type,
set_x,
current_y,
set_z,
None,
Some(&[BLACK_CONCRETE, WHITE_CONCRETE]),
);
}
// Add stone brick foundation underneath elevated highways for thickness
if effective_elevation > 0 && current_y > 0 {
// Add 1 layer of stone bricks underneath the highway surface
editor.set_block(
STONE_BRICKS,
set_x,
current_y - 1,
set_z,
None,
None,
);
}
// Add support pillars for elevated highways
if effective_elevation != 0 && current_y > 0 {
add_highway_support_pillar(
editor,
set_x,
current_y,
set_z,
dx,
dz,
block_range,
);
}
}
}
// Add light gray concrete outline for multi-lane roads
if add_outline {
// Left outline
for dz in -block_range..=block_range {
let outline_x = x - block_range - 1;
let outline_z = z + dz;
editor.set_block(
LIGHT_GRAY_CONCRETE,
outline_x,
current_y,
outline_z,
None,
None,
);
}
// Right outline
for dz in -block_range..=block_range {
let outline_x = x + block_range + 1;
let outline_z = z + dz;
editor.set_block(
LIGHT_GRAY_CONCRETE,
outline_x,
current_y,
outline_z,
None,
None,
);
}
}
// Add a dashed white line in the middle for larger roads
if add_stripe {
if stripe_length < dash_length {
let stripe_x: i32 = *x;
let stripe_z: i32 = *z;
editor.set_block(
WHITE_CONCRETE,
stripe_x,
current_y,
stripe_z,
Some(&[BLACK_CONCRETE]),
None,
);
}
// Increment stripe_length and reset after completing a dash and gap
stripe_length += 1;
if stripe_length >= dash_length + gap_length {
stripe_length = 0;
}
}
}
segment_index += 1;
}
previous_node = Some((node.x, node.z));
}
}
}
}
/// Helper function to determine if a slope should be added at a specific node
fn should_add_slope_at_node(
node: &crate::osm_parser::ProcessedNode,
current_layer: i32,
highway_connectivity: &HashMap<(i32, i32), Vec<i32>>,
) -> bool {
let node_coord = (node.x, node.z);
// If we don't have connectivity information, always add slopes for non-zero layers
if highway_connectivity.is_empty() {
return current_layer != 0;
}
// Check if there are other highways at different layers connected to this node
if let Some(connected_layers) = highway_connectivity.get(&node_coord) {
// Count how many ways are at the same layer as current way
let same_layer_count = connected_layers
.iter()
.filter(|&&layer| layer == current_layer)
.count();
// If this is the only way at this layer connecting to this node, we need a slope
// (unless we're at ground level and connecting to ground level ways)
if same_layer_count <= 1 {
return current_layer != 0;
}
// If there are multiple ways at the same layer, don't add slope
false
} else {
// No other highways connected, add slope if not at ground level
current_layer != 0
}
}
/// Helper function to calculate the total length of a way in blocks
fn calculate_way_length(way: &ProcessedWay) -> usize {
let mut total_length = 0;
let mut previous_node: Option<&crate::osm_parser::ProcessedNode> = None;
for node in &way.nodes {
if let Some(prev) = previous_node {
let dx = (node.x - prev.x).abs();
let dz = (node.z - prev.z).abs();
total_length += ((dx * dx + dz * dz) as f32).sqrt() as usize;
}
previous_node = Some(node);
}
total_length
}
/// Calculate the Y elevation for a specific point along the highway
#[allow(clippy::too_many_arguments)]
fn calculate_point_elevation(
segment_index: usize,
point_index: usize,
segment_length: usize,
total_segments: usize,
base_elevation: i32,
needs_start_slope: bool,
needs_end_slope: bool,
slope_length: usize,
) -> i32 {
// If no slopes needed, return base elevation
if !needs_start_slope && !needs_end_slope {
return base_elevation;
}
// Calculate total distance from start
let total_distance_from_start = segment_index * segment_length + point_index;
let total_way_length = total_segments * segment_length;
// Ensure we have reasonable values
if total_way_length == 0 || slope_length == 0 {
return base_elevation;
}
// Start slope calculation - gradual rise from ground level
if needs_start_slope && total_distance_from_start <= slope_length {
let slope_progress = total_distance_from_start as f32 / slope_length as f32;
let elevation_offset = (base_elevation as f32 * slope_progress) as i32;
return elevation_offset;
}
// End slope calculation - gradual descent to ground level
if needs_end_slope
&& total_distance_from_start >= (total_way_length.saturating_sub(slope_length))
{
let distance_from_end = total_way_length - total_distance_from_start;
let slope_progress = distance_from_end as f32 / slope_length as f32;
let elevation_offset = (base_elevation as f32 * slope_progress) as i32;
return elevation_offset;
}
// Middle section at full elevation
base_elevation
}
/// Add support pillars for elevated highways
fn add_highway_support_pillar(
editor: &mut WorldEditor,
x: i32,
highway_y: i32,
z: i32,
dx: i32,
dz: i32,
_block_range: i32, // Keep for future use
) {
// Only add pillars at specific intervals and positions
if dx == 0 && dz == 0 && (x + z) % 8 == 0 {
// Add pillar from ground to highway level
for y in 1..highway_y {
editor.set_block(STONE_BRICKS, x, y, z, None, None);
}
// Add pillar base
for base_dx in -1..=1 {
for base_dz in -1..=1 {
editor.set_block(STONE_BRICKS, x + base_dx, 0, z + base_dz, None, None);
}
}
}
}
/// Generates a siding using stone brick slabs
pub fn generate_siding(editor: &mut WorldEditor, element: &ProcessedWay) {
let mut previous_node: Option<XZPoint> = None;
let siding_block: Block = STONE_BRICK_SLAB;
for node in &element.nodes {
let current_node = node.xz();
// Draw the siding using Bresenham's line algorithm between nodes
if let Some(prev_node) = previous_node {
let bresenham_points: Vec<(i32, i32, i32)> = bresenham_line(
prev_node.x,
0,
prev_node.z,
current_node.x,
0,
current_node.z,
);
for (bx, _, bz) in bresenham_points {
if !editor.check_for_block(bx, 0, bz, Some(&[BLACK_CONCRETE, WHITE_CONCRETE])) {
editor.set_block(siding_block, bx, 1, bz, None, None);
}
}
}
previous_node = Some(current_node);
}
}
/// Generates an aeroway
pub fn generate_aeroway(editor: &mut WorldEditor, way: &ProcessedWay, args: &Args) {
let mut previous_node: Option<(i32, i32)> = None;
let surface_block = LIGHT_GRAY_CONCRETE;
for node in &way.nodes {
if let Some(prev) = previous_node {
let (x1, z1) = prev;
let x2 = node.x;
let z2 = node.z;
let points = bresenham_line(x1, 0, z1, x2, 0, z2);
let way_width: i32 = (12.0 * args.scale).ceil() as i32;
for (x, _, z) in points {
for dx in -way_width..=way_width {
for dz in -way_width..=way_width {
let set_x = x + dx;
let set_z = z + dz;
editor.set_block(surface_block, set_x, 0, set_z, None, None);
}
}
}
}
previous_node = Some((node.x, node.z));
}
}

View File

@@ -1,308 +0,0 @@
use crate::args::Args;
use crate::block_definitions::*;
use crate::element_processing::tree::Tree;
use crate::floodfill::flood_fill_area;
use crate::osm_parser::{ProcessedMemberRole, ProcessedRelation, ProcessedWay};
use crate::world_editor::WorldEditor;
use rand::Rng;
pub fn generate_landuse(editor: &mut WorldEditor, element: &ProcessedWay, args: &Args) {
// Determine block type based on landuse tag
let binding: String = "".to_string();
let landuse_tag: &String = element.tags.get("landuse").unwrap_or(&binding);
let block_type = match landuse_tag.as_str() {
"greenfield" | "meadow" | "grass" | "orchard" | "forest" => GRASS_BLOCK,
"farmland" => FARMLAND,
"cemetery" => PODZOL,
"construction" => COARSE_DIRT,
"traffic_island" => STONE_BLOCK_SLAB,
"residential" => {
let residential_tag = element.tags.get("residential").unwrap_or(&binding);
if residential_tag == "rural" {
GRASS_BLOCK
} else {
STONE_BRICKS
}
}
"commercial" => SMOOTH_STONE,
"education" => POLISHED_ANDESITE,
"religious" => POLISHED_ANDESITE,
"industrial" => COBBLESTONE,
"military" => GRAY_CONCRETE,
"railway" => GRAVEL,
"landfill" => {
// Gravel if man_made = spoil_heap or heap, coarse dirt else
let manmade_tag = element.tags.get("man_made").unwrap_or(&binding);
if manmade_tag == "spoil_heap" || manmade_tag == "heap" {
GRAVEL
} else {
COARSE_DIRT
}
}
"quarry" => STONE,
_ => GRASS_BLOCK,
};
// Get the area of the landuse element
let polygon_coords: Vec<(i32, i32)> = element.nodes.iter().map(|n| (n.x, n.z)).collect();
let floor_area: Vec<(i32, i32)> = flood_fill_area(&polygon_coords, args.timeout.as_ref());
let mut rng: rand::prelude::ThreadRng = rand::thread_rng();
for (x, z) in floor_area {
if landuse_tag == "traffic_island" {
editor.set_block(block_type, x, 1, z, None, None);
} else if landuse_tag == "construction" || landuse_tag == "railway" {
editor.set_block(block_type, x, 0, z, None, Some(&[SPONGE]));
} else {
editor.set_block(block_type, x, 0, z, None, None);
}
// Add specific features for different landuse types
match landuse_tag.as_str() {
"cemetery" => {
if (x % 3 == 0) && (z % 3 == 0) {
let random_choice: i32 = rng.gen_range(0..100);
if random_choice < 15 {
// Place graves
if editor.check_for_block(x, 0, z, Some(&[PODZOL])) {
if rng.gen_bool(0.5) {
editor.set_block(COBBLESTONE, x - 1, 1, z, None, None);
editor.set_block(STONE_BRICK_SLAB, x - 1, 2, z, None, None);
editor.set_block(STONE_BRICK_SLAB, x, 1, z, None, None);
editor.set_block(STONE_BRICK_SLAB, x + 1, 1, z, None, None);
} else {
editor.set_block(COBBLESTONE, x, 1, z - 1, None, None);
editor.set_block(STONE_BRICK_SLAB, x, 2, z - 1, None, None);
editor.set_block(STONE_BRICK_SLAB, x, 1, z, None, None);
editor.set_block(STONE_BRICK_SLAB, x, 1, z + 1, None, None);
}
}
} else if random_choice < 30 {
if editor.check_for_block(x, 0, z, Some(&[PODZOL])) {
editor.set_block(RED_FLOWER, x, 1, z, None, None);
}
} else if random_choice < 33 {
Tree::create(editor, (x, 1, z));
} else if random_choice < 35 {
editor.set_block(OAK_LEAVES, x, 1, z, None, None);
}
}
}
"forest" => {
if editor.check_for_block(x, 0, z, Some(&[GRASS_BLOCK])) {
let random_choice: i32 = rng.gen_range(0..30);
if random_choice == 20 {
Tree::create(editor, (x, 1, z));
} else if random_choice == 2 {
let flower_block: Block = match rng.gen_range(1..=5) {
1 => OAK_LEAVES,
2 => RED_FLOWER,
3 => BLUE_FLOWER,
4 => YELLOW_FLOWER,
_ => WHITE_FLOWER,
};
editor.set_block(flower_block, x, 1, z, None, None);
} else if random_choice <= 12 {
editor.set_block(GRASS, x, 1, z, None, None);
}
}
}
"farmland" => {
// Check if the current block is not water or another undesired block
if !editor.check_for_block(x, 0, z, Some(&[WATER])) {
if x % 9 == 0 && z % 9 == 0 {
// Place water in dot pattern
editor.set_block(WATER, x, 0, z, Some(&[FARMLAND]), None);
} else if rng.gen_range(0..76) == 0 {
let special_choice: i32 = rng.gen_range(1..=10);
if special_choice <= 4 {
editor.set_block(HAY_BALE, x, 1, z, None, Some(&[SPONGE]));
} else {
editor.set_block(OAK_LEAVES, x, 1, z, None, Some(&[SPONGE]));
}
} else {
// Set crops only if the block below is farmland
if editor.check_for_block(x, 0, z, Some(&[FARMLAND])) {
let crop_choice = [WHEAT, CARROTS, POTATOES][rng.gen_range(0..3)];
editor.set_block(crop_choice, x, 1, z, None, None);
}
}
}
}
"construction" => {
let random_choice: i32 = rng.gen_range(0..1501);
if random_choice < 15 {
editor.set_block(SCAFFOLDING, x, 1, z, None, None);
if random_choice < 2 {
editor.set_block(SCAFFOLDING, x, 2, z, None, None);
editor.set_block(SCAFFOLDING, x, 3, z, None, None);
} else if random_choice < 4 {
editor.set_block(SCAFFOLDING, x, 2, z, None, None);
editor.set_block(SCAFFOLDING, x, 3, z, None, None);
editor.set_block(SCAFFOLDING, x, 4, z, None, None);
editor.set_block(SCAFFOLDING, x, 1, z + 1, None, None);
} else {
editor.set_block(SCAFFOLDING, x, 2, z, None, None);
editor.set_block(SCAFFOLDING, x, 3, z, None, None);
editor.set_block(SCAFFOLDING, x, 4, z, None, None);
editor.set_block(SCAFFOLDING, x, 5, z, None, None);
editor.set_block(SCAFFOLDING, x - 1, 1, z, None, None);
editor.set_block(SCAFFOLDING, x + 1, 1, z - 1, None, None);
}
} else if random_choice < 55 {
let construction_items: [Block; 13] = [
OAK_LOG,
COBBLESTONE,
GRAVEL,
GLOWSTONE,
STONE,
COBBLESTONE_WALL,
BLACK_CONCRETE,
SAND,
OAK_PLANKS,
DIRT,
BRICK,
CRAFTING_TABLE,
FURNACE,
];
editor.set_block(
construction_items[rng.gen_range(0..construction_items.len())],
x,
1,
z,
None,
None,
);
} else if random_choice < 65 {
if random_choice < 60 {
editor.set_block(DIRT, x, 1, z, None, None);
editor.set_block(DIRT, x, 2, z, None, None);
editor.set_block(DIRT, x + 1, 1, z, None, None);
editor.set_block(DIRT, x, 1, z + 1, None, None);
} else {
editor.set_block(DIRT, x, 1, z, None, None);
editor.set_block(DIRT, x, 2, z, None, None);
editor.set_block(DIRT, x - 1, 1, z, None, None);
editor.set_block(DIRT, x, 1, z - 1, None, None);
}
} else if random_choice < 100 {
editor.set_block(GRAVEL, x, 0, z, None, Some(&[SPONGE]));
} else if random_choice < 115 {
editor.set_block(SAND, x, 0, z, None, Some(&[SPONGE]));
} else if random_choice < 125 {
editor.set_block(DIORITE, x, 0, z, None, Some(&[SPONGE]));
} else if random_choice < 145 {
editor.set_block(BRICK, x, 0, z, None, Some(&[SPONGE]));
} else if random_choice < 155 {
editor.set_block(GRANITE, x, 0, z, None, Some(&[SPONGE]));
} else if random_choice < 180 {
editor.set_block(ANDESITE, x, 0, z, None, Some(&[SPONGE]));
} else if random_choice < 565 {
editor.set_block(COBBLESTONE, x, 0, z, None, Some(&[SPONGE]));
}
}
"grass" => {
if editor.check_for_block(x, 0, z, Some(&[GRASS_BLOCK])) {
match rng.gen_range(0..200) {
0 => editor.set_block(OAK_LEAVES, x, 1, z, None, None),
1..=170 => editor.set_block(GRASS, x, 1, z, None, None),
_ => {}
}
}
}
"greenfield" => {
if editor.check_for_block(x, 0, z, Some(&[GRASS_BLOCK])) {
match rng.gen_range(0..200) {
0 => editor.set_block(OAK_LEAVES, x, 1, z, None, None),
1..=17 => editor.set_block(GRASS, x, 1, z, None, None),
_ => {}
}
}
}
"meadow" => {
if editor.check_for_block(x, 0, z, Some(&[GRASS_BLOCK])) {
let random_choice: i32 = rng.gen_range(0..1001);
if random_choice < 5 {
Tree::create(editor, (x, 1, z));
} else if random_choice < 6 {
editor.set_block(RED_FLOWER, x, 1, z, None, None);
} else if random_choice < 9 {
editor.set_block(OAK_LEAVES, x, 1, z, None, None);
} else if random_choice < 800 {
editor.set_block(GRASS, x, 1, z, None, None);
}
}
}
"orchard" => {
if x % 18 == 0 && z % 10 == 0 {
Tree::create(editor, (x, 1, z));
} else if editor.check_for_block(x, 0, z, Some(&[GRASS_BLOCK])) {
match rng.gen_range(0..100) {
0 => editor.set_block(OAK_LEAVES, x, 1, z, None, None),
1..=20 => editor.set_block(GRASS, x, 1, z, None, None),
_ => {}
}
}
}
"quarry" => {
// Add stone layer under it
editor.set_block(STONE, x, -1, z, Some(&[STONE]), None);
editor.set_block(STONE, x, -2, z, Some(&[STONE]), None);
// Generate ore blocks
if let Some(resource) = element.tags.get("resource") {
let ore_block = match resource.as_str() {
"iron_ore" => IRON_ORE,
"coal" => COAL_ORE,
"copper" => COPPER_ORE,
"gold" => GOLD_ORE,
"clay" | "kaolinite" => CLAY,
_ => STONE,
};
let random_choice: i32 = rng.gen_range(0..100 + editor.get_absolute_y(x, 0, z)); // The deeper it is the more resources are there
if random_choice < 5 {
editor.set_block(ore_block, x, 0, z, Some(&[STONE]), None);
}
}
}
_ => {}
}
}
}
pub fn generate_landuse_from_relation(
editor: &mut WorldEditor,
rel: &ProcessedRelation,
args: &Args,
) {
if rel.tags.contains_key("landuse") {
// Generate individual ways with their original tags
for member in &rel.members {
if member.role == ProcessedMemberRole::Outer {
generate_landuse(editor, &member.way.clone(), args);
}
}
// Combine all outer ways into one with relation tags
let mut combined_nodes = Vec::new();
for member in &rel.members {
if member.role == ProcessedMemberRole::Outer {
combined_nodes.extend(member.way.nodes.clone());
}
}
// Only process if we have nodes
if !combined_nodes.is_empty() {
// Create combined way with relation tags
let combined_way = ProcessedWay {
id: rel.id,
nodes: combined_nodes,
tags: rel.tags.clone(),
};
// Generate landuse area from combined way
generate_landuse(editor, &combined_way, args);
}
}
}

View File

@@ -1,206 +0,0 @@
use crate::args::Args;
use crate::block_definitions::*;
use crate::bresenham::bresenham_line;
use crate::element_processing::tree::Tree;
use crate::floodfill::flood_fill_area;
use crate::osm_parser::{ProcessedMemberRole, ProcessedRelation, ProcessedWay};
use crate::world_editor::WorldEditor;
use rand::Rng;
pub fn generate_leisure(editor: &mut WorldEditor, element: &ProcessedWay, args: &Args) {
if let Some(leisure_type) = element.tags.get("leisure") {
let mut previous_node: Option<(i32, i32)> = None;
let mut corner_addup: (i32, i32, i32) = (0, 0, 0);
let mut current_leisure: Vec<(i32, i32)> = vec![];
// Determine block type based on leisure type
let block_type: Block = match leisure_type.as_str() {
"park" | "nature_reserve" | "garden" | "disc_golf_course" | "golf_course" => {
GRASS_BLOCK
}
"schoolyard" => BLACK_CONCRETE,
"playground" | "recreation_ground" | "pitch" | "beach_resort" | "dog_park" => {
if let Some(surface) = element.tags.get("surface") {
match surface.as_str() {
"clay" => TERRACOTTA,
"sand" => SAND,
"tartan" => RED_TERRACOTTA,
"grass" => GRASS_BLOCK,
"dirt" => DIRT,
"pebblestone" | "cobblestone" | "unhewn_cobblestone" => COBBLESTONE,
_ => GREEN_STAINED_HARDENED_CLAY,
}
} else {
GREEN_STAINED_HARDENED_CLAY
}
}
"swimming_pool" | "swimming_area" => WATER, //Swimming area: Area in a larger body of water for swimming
"bathing_place" => SMOOTH_SANDSTONE, // Could be sand or concrete
"outdoor_seating" => SMOOTH_STONE, //Usually stone or stone bricks
"water_park" | "slipway" => LIGHT_GRAY_CONCRETE, // Water park area, not the pool. Usually is concrete
"ice_rink" => PACKED_ICE, // TODO: Ice for Ice Rink, needs building defined
_ => GRASS_BLOCK,
};
// Process leisure area nodes
for node in &element.nodes {
if let Some(prev) = previous_node {
// Draw a line between the current and previous node
let bresenham_points: Vec<(i32, i32, i32)> =
bresenham_line(prev.0, 0, prev.1, node.x, 0, node.z);
for (bx, _, bz) in bresenham_points {
editor.set_block(
block_type,
bx,
0,
bz,
Some(&[
GRASS_BLOCK,
STONE_BRICKS,
SMOOTH_STONE,
LIGHT_GRAY_CONCRETE,
COBBLESTONE,
GRAY_CONCRETE,
]),
None,
);
}
current_leisure.push((node.x, node.z));
corner_addup.0 += node.x;
corner_addup.1 += node.z;
corner_addup.2 += 1;
}
previous_node = Some((node.x, node.z));
}
// Flood-fill the interior of the leisure area
if corner_addup != (0, 0, 0) {
let polygon_coords: Vec<(i32, i32)> = element
.nodes
.iter()
.map(|n: &crate::osm_parser::ProcessedNode| (n.x, n.z))
.collect();
let filled_area: Vec<(i32, i32)> =
flood_fill_area(&polygon_coords, args.timeout.as_ref());
for (x, z) in filled_area {
editor.set_block(block_type, x, 0, z, Some(&[GRASS_BLOCK]), None);
// Add decorative elements for parks and gardens
if matches!(leisure_type.as_str(), "park" | "garden" | "nature_reserve")
&& editor.check_for_block(x, 0, z, Some(&[GRASS_BLOCK]))
{
let mut rng: rand::prelude::ThreadRng = rand::thread_rng();
let random_choice: i32 = rng.gen_range(0..1000);
match random_choice {
0..30 => {
// Flowers
let flower_choice = match random_choice {
0..10 => RED_FLOWER,
10..20 => YELLOW_FLOWER,
20..30 => BLUE_FLOWER,
_ => WHITE_FLOWER,
};
editor.set_block(flower_choice, x, 1, z, None, None);
}
30..90 => {
// Grass
editor.set_block(GRASS, x, 1, z, None, None);
}
90..105 => {
// Oak leaves
editor.set_block(OAK_LEAVES, x, 1, z, None, None);
}
105..120 => {
// Tree
Tree::create(editor, (x, 1, z));
}
_ => {}
}
}
// Add playground or recreation ground features
if matches!(leisure_type.as_str(), "playground" | "recreation_ground") {
let mut rng: rand::prelude::ThreadRng = rand::thread_rng();
let random_choice: i32 = rng.gen_range(0..5000);
match random_choice {
0..10 => {
// Swing set
for y in 1..=3 {
editor.set_block(OAK_FENCE, x - 1, y, z, None, None);
editor.set_block(OAK_FENCE, x + 1, y, z, None, None);
}
editor.set_block(OAK_PLANKS, x - 1, 4, z, None, None);
editor.set_block(OAK_SLAB, x, 4, z, None, None);
editor.set_block(OAK_PLANKS, x + 1, 4, z, None, None);
editor.set_block(STONE_BLOCK_SLAB, x, 2, z, None, None);
}
10..20 => {
// Slide
editor.set_block(OAK_SLAB, x, 1, z, None, None);
editor.set_block(OAK_SLAB, x + 1, 2, z, None, None);
editor.set_block(OAK_SLAB, x + 2, 3, z, None, None);
editor.set_block(OAK_PLANKS, x + 2, 2, z, None, None);
editor.set_block(OAK_PLANKS, x + 2, 1, z, None, None);
editor.set_block(LADDER, x + 2, 2, z - 1, None, None);
editor.set_block(LADDER, x + 2, 1, z - 1, None, None);
}
20..30 => {
// Sandpit
editor.fill_blocks(
SAND,
x - 3,
0,
z - 3,
x + 3,
0,
z + 3,
Some(&[GREEN_STAINED_HARDENED_CLAY]),
None,
);
}
_ => {}
}
}
}
}
}
}
pub fn generate_leisure_from_relation(
editor: &mut WorldEditor,
rel: &ProcessedRelation,
args: &Args,
) {
if rel.tags.get("leisure") == Some(&"park".to_string()) {
// First generate individual ways with their original tags
for member in &rel.members {
if member.role == ProcessedMemberRole::Outer {
generate_leisure(editor, &member.way, args);
}
}
// Then combine all outer ways into one
let mut combined_nodes = Vec::new();
for member in &rel.members {
if member.role == ProcessedMemberRole::Outer {
combined_nodes.extend(member.way.nodes.clone());
}
}
// Create combined way with relation tags
let combined_way = ProcessedWay {
id: rel.id,
nodes: combined_nodes,
tags: rel.tags.clone(),
};
// Generate leisure area from combined way
generate_leisure(editor, &combined_way, args);
}
}

View File

@@ -1,256 +0,0 @@
use crate::args::Args;
use crate::block_definitions::*;
use crate::bresenham::bresenham_line;
use crate::osm_parser::{ProcessedElement, ProcessedNode};
use crate::world_editor::WorldEditor;
pub fn generate_man_made(editor: &mut WorldEditor, element: &ProcessedElement, _args: &Args) {
// Skip if 'layer' or 'level' is negative in the tags
if let Some(layer) = element.tags().get("layer") {
if layer.parse::<i32>().unwrap_or(0) < 0 {
return;
}
}
if let Some(level) = element.tags().get("level") {
if level.parse::<i32>().unwrap_or(0) < 0 {
return;
}
}
if let Some(man_made_type) = element.tags().get("man_made") {
match man_made_type.as_str() {
"pier" => generate_pier(editor, element),
"antenna" => generate_antenna(editor, element),
"chimney" => generate_chimney(editor, element),
"water_well" => generate_water_well(editor, element),
"water_tower" => generate_water_tower(editor, element),
"mast" => generate_antenna(editor, element),
_ => {} // Unknown man_made type, ignore
}
}
}
/// Generate a pier structure with OAK_SLAB planks and OAK_LOG support pillars
fn generate_pier(editor: &mut WorldEditor, element: &ProcessedElement) {
if let ProcessedElement::Way(way) = element {
let nodes = &way.nodes;
if nodes.len() < 2 {
return;
}
// Extract pier dimensions from tags
let pier_width = element
.tags()
.get("width")
.and_then(|w| w.parse::<i32>().ok())
.unwrap_or(3); // Default 3 blocks wide
let pier_height = 1; // Pier deck height above ground
let support_spacing = 4; // Support pillars every 4 blocks
// Generate the pier walkway using bresenham line algorithm
for i in 0..nodes.len() - 1 {
let start_node = &nodes[i];
let end_node = &nodes[i + 1];
let line_points =
bresenham_line(start_node.x, 0, start_node.z, end_node.x, 0, end_node.z);
for (index, (center_x, _y, center_z)) in line_points.iter().enumerate() {
// Create pier deck (3 blocks wide)
let half_width = pier_width / 2;
for x in (center_x - half_width)..=(center_x + half_width) {
for z in (center_z - half_width)..=(center_z + half_width) {
editor.set_block(OAK_SLAB, x, pier_height, z, None, None);
}
}
// Add support pillars every few blocks
if index % support_spacing == 0 {
let half_width = pier_width / 2;
// Place support pillars at the edges of the pier
let support_positions = [
(center_x - half_width, center_z), // Left side
(center_x + half_width, center_z), // Right side
];
for (pillar_x, pillar_z) in support_positions {
// Support pillars going down from pier level
editor.set_block(OAK_LOG, pillar_x, 0, *pillar_z, None, None);
}
}
}
}
}
}
/// Generate an antenna/radio tower
fn generate_antenna(editor: &mut WorldEditor, element: &ProcessedElement) {
if let Some(first_node) = element.nodes().next() {
let x = first_node.x;
let z = first_node.z;
// Extract antenna configuration from tags
let height = match element.tags().get("height") {
Some(h) => h.parse::<i32>().unwrap_or(20).min(40), // Max 40 blocks
None => match element.tags().get("tower:type").map(|s| s.as_str()) {
Some("communication") => 20,
Some("cellular") => 15,
_ => 20,
},
};
// Build the main tower pole
editor.set_block(IRON_BLOCK, x, 3, z, None, None);
for y in 4..height {
editor.set_block(IRON_BARS, x, y, z, None, None);
}
// Add structural supports every 7 blocks
for y in (7..height).step_by(7) {
editor.set_block(IRON_BLOCK, x, y, z, Some(&[IRON_BARS]), None);
let support_positions = [(1, 0), (-1, 0), (0, 1), (0, -1)];
for (dx, dz) in support_positions {
editor.set_block(IRON_BLOCK, x + dx, y, z + dz, None, None);
}
}
// Equipment housing at base
editor.fill_blocks(
GRAY_CONCRETE,
x - 1,
1,
z - 1,
x + 1,
2,
z + 1,
Some(&[GRAY_CONCRETE]),
None,
);
}
}
/// Generate a chimney structure
fn generate_chimney(editor: &mut WorldEditor, element: &ProcessedElement) {
if let Some(first_node) = element.nodes().next() {
let x = first_node.x;
let z = first_node.z;
let height = 25;
// Build 3x3 brick chimney with hole in the middle
for y in 0..height {
for dx in -1..=1 {
for dz in -1..=1 {
// Skip center block to create hole
if dx == 0 && dz == 0 {
continue;
}
editor.set_block(BRICK, x + dx, y, z + dz, None, None);
}
}
}
}
}
/// Generate a water well structure
fn generate_water_well(editor: &mut WorldEditor, element: &ProcessedElement) {
if let Some(first_node) = element.nodes().next() {
let x = first_node.x;
let z = first_node.z;
// Build stone well structure (3x3 base with water in center)
for dx in -1..=1 {
for dz in -1..=1 {
if dx == 0 && dz == 0 {
// Water in the center
editor.set_block(WATER, x, -1, z, None, None);
editor.set_block(WATER, x, 0, z, None, None);
} else {
// Stone well walls
editor.set_block(STONE_BRICKS, x + dx, 0, z + dz, None, None);
editor.set_block(STONE_BRICKS, x + dx, 1, z + dz, None, None);
}
}
}
// Add wooden well frame structure
editor.fill_blocks(OAK_LOG, x - 2, 1, z, x - 2, 4, z, None, None);
editor.fill_blocks(OAK_LOG, x + 2, 1, z, x + 2, 4, z, None, None);
// Crossbeam with pulley system
editor.set_block(OAK_SLAB, x - 1, 5, z, None, None);
editor.set_block(OAK_FENCE, x, 4, z, None, None);
editor.set_block(OAK_SLAB, x, 5, z, None, None);
editor.set_block(OAK_SLAB, x + 1, 5, z, None, None);
// Bucket hanging from center
editor.set_block(IRON_BLOCK, x, 3, z, None, None);
}
}
/// Generate a water tower structure
fn generate_water_tower(editor: &mut WorldEditor, element: &ProcessedElement) {
if let Some(first_node) = element.nodes().next() {
let x = first_node.x;
let z = first_node.z;
let tower_height = 20;
let tank_height = 6;
// Build support legs (4 corner pillars)
let leg_positions = [(-2, -2), (2, -2), (-2, 2), (2, 2)];
for (dx, dz) in leg_positions {
for y in 0..tower_height {
editor.set_block(IRON_BLOCK, x + dx, y, z + dz, None, None);
}
}
// Add cross-bracing every 5 blocks for stability
for y in (5..tower_height).step_by(5) {
// Horizontal bracing
for dx in -1..=1 {
editor.set_block(SMOOTH_STONE, x + dx, y, z - 2, None, None);
editor.set_block(SMOOTH_STONE, x + dx, y, z + 2, None, None);
}
for dz in -1..=1 {
editor.set_block(SMOOTH_STONE, x - 2, y, z + dz, None, None);
editor.set_block(SMOOTH_STONE, x + 2, y, z + dz, None, None);
}
}
// Build water tank at the top - simple rectangular tank
editor.fill_blocks(
POLISHED_ANDESITE,
x - 3,
tower_height,
z - 3,
x + 3,
tower_height + tank_height,
z + 3,
None,
None,
);
// Add polished andesite pipe going down from the tank
for y in 0..tower_height {
editor.set_block(POLISHED_ANDESITE, x, y, z, None, None);
}
}
}
/// Generate man_made structures for node elements
pub fn generate_man_made_nodes(editor: &mut WorldEditor, node: &ProcessedNode) {
if let Some(man_made_type) = node.tags.get("man_made") {
let element = ProcessedElement::Node(node.clone());
match man_made_type.as_str() {
"antenna" => generate_antenna(editor, &element),
"chimney" => generate_chimney(editor, &element),
"water_well" => generate_water_well(editor, &element),
"water_tower" => generate_water_tower(editor, &element),
"mast" => generate_antenna(editor, &element),
_ => {} // Unknown man_made type, ignore
}
}
}

View File

@@ -1,16 +0,0 @@
pub mod amenities;
pub mod barriers;
pub mod bridges;
pub mod buildings;
pub mod doors;
pub mod highways;
pub mod landuse;
pub mod leisure;
pub mod man_made;
pub mod natural;
pub mod railways;
pub mod subprocessor;
pub mod tourisms;
pub mod tree;
pub mod water_areas;
pub mod waterways;

View File

@@ -1,481 +0,0 @@
use crate::args::Args;
use crate::block_definitions::*;
use crate::bresenham::bresenham_line;
use crate::element_processing::tree::Tree;
use crate::floodfill::flood_fill_area;
use crate::osm_parser::{ProcessedElement, ProcessedMemberRole, ProcessedRelation, ProcessedWay};
use crate::world_editor::WorldEditor;
use rand::Rng;
pub fn generate_natural(editor: &mut WorldEditor, element: &ProcessedElement, args: &Args) {
if let Some(natural_type) = element.tags().get("natural") {
if natural_type == "tree" {
if let ProcessedElement::Node(node) = element {
let x: i32 = node.x;
let z: i32 = node.z;
Tree::create(editor, (x, 1, z));
}
} else {
let mut previous_node: Option<(i32, i32)> = None;
let mut corner_addup: (i32, i32, i32) = (0, 0, 0);
let mut current_natural: Vec<(i32, i32)> = vec![];
let binding: String = "".to_string();
// Determine block type based on natural tag
let block_type: Block = match natural_type.as_str() {
"scrub" | "grassland" | "wood" | "heath" | "tree_row" => GRASS_BLOCK,
"sand" | "dune" => SAND,
"beach" | "shoal" => {
let surface = element.tags().get("natural").unwrap_or(&binding);
match surface.as_str() {
"gravel" => GRAVEL,
_ => SAND,
}
}
"water" | "reef" => WATER,
"bare_rock" => STONE,
"blockfield" => COBBLESTONE,
"glacier" => PACKED_ICE,
"mud" | "wetland" => MUD,
"mountain_range" => COBBLESTONE,
"saddle" | "ridge" => STONE,
"shrubbery" | "tundra" | "hill" => GRASS_BLOCK,
"cliff" => STONE,
_ => GRASS_BLOCK,
};
let ProcessedElement::Way(way) = element else {
return;
};
// Process natural nodes to fill the area
for node in &way.nodes {
let x: i32 = node.x;
let z: i32 = node.z;
if let Some(prev) = previous_node {
// Generate the line of coordinates between the two nodes
let bresenham_points: Vec<(i32, i32, i32)> =
bresenham_line(prev.0, 0, prev.1, x, 0, z);
for (bx, _, bz) in bresenham_points {
editor.set_block(block_type, bx, 0, bz, None, None);
}
current_natural.push((x, z));
corner_addup = (corner_addup.0 + x, corner_addup.1 + z, corner_addup.2 + 1);
}
previous_node = Some((x, z));
}
// If there are natural nodes, flood-fill the area
if corner_addup != (0, 0, 0) {
let polygon_coords: Vec<(i32, i32)> = way
.nodes
.iter()
.map(|n: &crate::osm_parser::ProcessedNode| (n.x, n.z))
.collect();
let filled_area: Vec<(i32, i32)> =
flood_fill_area(&polygon_coords, args.timeout.as_ref());
let mut rng: rand::prelude::ThreadRng = rand::thread_rng();
for (x, z) in filled_area {
editor.set_block(block_type, x, 0, z, None, None);
// Generate custom layer instead of dirt, must be stone on the lowest level
match natural_type.as_str() {
"beach" | "sand" | "dune" | "shoal" => {
editor.set_block(SAND, x, 0, z, None, None);
}
"glacier" => {
editor.set_block(PACKED_ICE, x, 0, z, None, None);
editor.set_block(STONE, x, -1, z, None, None);
}
"bare_rock" => {
editor.set_block(STONE, x, 0, z, None, None);
}
_ => {}
}
// Generate surface elements
if editor.check_for_block(x, 0, z, Some(&[WATER])) {
continue;
}
match natural_type.as_str() {
"grassland" => {
if !editor.check_for_block(x, 0, z, Some(&[GRASS_BLOCK])) {
continue;
}
if rng.gen_bool(0.6) {
editor.set_block(GRASS, x, 1, z, None, None);
}
}
"heath" => {
if !editor.check_for_block(x, 0, z, Some(&[GRASS_BLOCK])) {
continue;
}
let random_choice = rng.gen_range(0..500);
if random_choice < 33 {
if random_choice <= 2 {
editor.set_block(COBBLESTONE, x, 0, z, None, None);
} else if random_choice < 6 {
editor.set_block(OAK_LEAVES, x, 1, z, None, None);
} else {
editor.set_block(GRASS, x, 1, z, None, None);
}
}
}
"scrub" => {
if !editor.check_for_block(x, 0, z, Some(&[GRASS_BLOCK])) {
continue;
}
let random_choice = rng.gen_range(0..500);
if random_choice == 0 {
Tree::create(editor, (x, 1, z));
} else if random_choice == 1 {
let flower_block = match rng.gen_range(1..=4) {
1 => RED_FLOWER,
2 => BLUE_FLOWER,
3 => YELLOW_FLOWER,
_ => WHITE_FLOWER,
};
editor.set_block(flower_block, x, 1, z, None, None);
} else if random_choice < 40 {
editor.set_block(OAK_LEAVES, x, 1, z, None, None);
if random_choice < 15 {
editor.set_block(OAK_LEAVES, x, 2, z, None, None);
}
} else if random_choice < 300 {
if random_choice < 250 {
editor.set_block(GRASS, x, 1, z, None, None);
} else {
editor.set_block(TALL_GRASS_BOTTOM, x, 1, z, None, None);
editor.set_block(TALL_GRASS_TOP, x, 2, z, None, None);
}
}
}
"tree_row" | "wood" => {
if !editor.check_for_block(x, 0, z, Some(&[GRASS_BLOCK])) {
continue;
}
let random_choice: i32 = rng.gen_range(0..30);
if random_choice == 0 {
Tree::create(editor, (x, 1, z));
} else if random_choice == 1 {
let flower_block = match rng.gen_range(1..=4) {
1 => RED_FLOWER,
2 => BLUE_FLOWER,
3 => YELLOW_FLOWER,
_ => WHITE_FLOWER,
};
editor.set_block(flower_block, x, 1, z, None, None);
} else if random_choice <= 12 {
editor.set_block(GRASS, x, 1, z, None, None);
}
}
"sand" => {
if editor.check_for_block(x, 0, z, Some(&[SAND]))
&& rng.gen_range(0..100) == 1
{
editor.set_block(DEAD_BUSH, x, 1, z, None, None);
}
}
"shoal" => {
if rng.gen_bool(0.05) {
editor.set_block(WATER, x, 0, z, Some(&[SAND, GRAVEL]), None);
}
}
"wetland" => {
if let Some(wetland_type) = element.tags().get("wetland") {
// Wetland without water blocks
if matches!(wetland_type.as_str(), "wet_meadow" | "fen") {
if rng.gen_bool(0.3) {
editor.set_block(GRASS_BLOCK, x, 0, z, Some(&[MUD]), None);
}
editor.set_block(GRASS, x, 1, z, None, None);
continue;
}
// All the other types of wetland
if rng.gen_bool(0.3) {
editor.set_block(
WATER,
x,
0,
z,
Some(&[MUD, GRASS_BLOCK]),
None,
);
continue;
}
if !editor.check_for_block(x, 0, z, Some(&[MUD, MOSS_BLOCK])) {
continue;
}
match wetland_type.as_str() {
"reedbed" => {
editor.set_block(TALL_GRASS_BOTTOM, x, 1, z, None, None);
editor.set_block(TALL_GRASS_TOP, x, 2, z, None, None);
}
"swamp" | "mangrove" => {
// TODO implement mangrove
let random_choice: i32 = rng.gen_range(0..40);
if random_choice == 0 {
Tree::create(editor, (x, 1, z));
} else if random_choice < 35 {
editor.set_block(GRASS, x, 1, z, None, None);
}
}
"bog" => {
if rng.gen_bool(0.2) {
editor.set_block(
MOSS_BLOCK,
x,
0,
z,
Some(&[MUD]),
None,
);
}
if rng.gen_bool(0.15) {
editor.set_block(GRASS, x, 1, z, None, None);
}
}
"tidalflat" => {
continue; // No vegetation here
}
_ => {
editor.set_block(GRASS, x, 1, z, None, None);
}
}
} else {
// Generic natural=wetland without wetland=... tag
if rng.gen_bool(0.3) {
editor.set_block(WATER, x, 0, z, Some(&[MUD]), None);
continue;
}
editor.set_block(GRASS, x, 1, z, None, None);
}
}
"mountain_range" => {
// Create block clusters instead of random placement
let cluster_chance = rng.gen_range(0..1000);
if cluster_chance < 50 {
// 5% chance to start a new cluster
let cluster_block = match rng.gen_range(0..7) {
0 => DIRT,
1 => STONE,
2 => GRAVEL,
3 => GRANITE,
4 => DIORITE,
5 => ANDESITE,
_ => GRASS_BLOCK,
};
// Generate cluster size (5-10 blocks radius)
let cluster_size = rng.gen_range(5..=10);
// Create cluster around current position
for dx in -(cluster_size as i32)..=(cluster_size as i32) {
for dz in -(cluster_size as i32)..=(cluster_size as i32) {
let cluster_x = x + dx;
let cluster_z = z + dz;
// Use distance to create more natural cluster shape
let distance = ((dx * dx + dz * dz) as f32).sqrt();
if distance <= cluster_size as f32 {
// Probability decreases with distance from center
let place_prob = 1.0 - (distance / cluster_size as f32);
if rng.gen::<f32>() < place_prob {
editor.set_block(
cluster_block,
cluster_x,
0,
cluster_z,
None,
None,
);
// Add vegetation on grass blocks
if cluster_block == GRASS_BLOCK {
let vegetation_chance = rng.gen_range(0..100);
if vegetation_chance == 0 {
// 1% chance for rare trees
Tree::create(
editor,
(cluster_x, 1, cluster_z),
);
} else if vegetation_chance < 15 {
// 15% chance for grass
editor.set_block(
GRASS, cluster_x, 1, cluster_z, None,
None,
);
} else if vegetation_chance < 25 {
// 10% chance for oak leaves
editor.set_block(
OAK_LEAVES, cluster_x, 1, cluster_z,
None, None,
);
}
}
}
}
}
}
}
}
"saddle" => {
// Saddle areas - lowest point between peaks, mix of stone and grass
let terrain_chance = rng.gen_range(0..100);
if terrain_chance < 30 {
// 30% chance for exposed stone
editor.set_block(STONE, x, 0, z, None, None);
} else if terrain_chance < 50 {
// 20% chance for gravel/rocky terrain
editor.set_block(GRAVEL, x, 0, z, None, None);
} else {
// 50% chance for grass
editor.set_block(GRASS_BLOCK, x, 0, z, None, None);
if rng.gen_bool(0.4) {
// 40% chance for grass on top
editor.set_block(GRASS, x, 1, z, None, None);
}
}
}
"ridge" => {
// Ridge areas - elevated crest, mostly rocky with some vegetation
let ridge_chance = rng.gen_range(0..100);
if ridge_chance < 60 {
// 60% chance for stone/rocky terrain
let rock_type = match rng.gen_range(0..4) {
0 => STONE,
1 => COBBLESTONE,
2 => GRANITE,
_ => ANDESITE,
};
editor.set_block(rock_type, x, 0, z, None, None);
} else {
// 40% chance for grass with sparse vegetation
editor.set_block(GRASS_BLOCK, x, 0, z, None, None);
let vegetation_chance = rng.gen_range(0..100);
if vegetation_chance < 20 {
// 20% chance for grass
editor.set_block(GRASS, x, 1, z, None, None);
} else if vegetation_chance < 25 {
// 5% chance for small shrubs
editor.set_block(OAK_LEAVES, x, 1, z, None, None);
}
}
}
"shrubbery" => {
// Manicured shrubs and decorative vegetation
editor.set_block(OAK_LEAVES, x, 1, z, None, None);
editor.set_block(OAK_LEAVES, x, 2, z, None, None);
}
"tundra" => {
// Treeless habitat with low vegetation, mosses, lichens
if !editor.check_for_block(x, 0, z, Some(&[GRASS_BLOCK])) {
continue;
}
let tundra_chance = rng.gen_range(0..100);
if tundra_chance < 40 {
// 40% chance for grass (sedges, grasses)
editor.set_block(GRASS, x, 1, z, None, None);
} else if tundra_chance < 60 {
// 20% chance for moss
editor.set_block(MOSS_BLOCK, x, 0, z, Some(&[GRASS_BLOCK]), None);
} else if tundra_chance < 70 {
// 10% chance for dead bush (lichens)
editor.set_block(DEAD_BUSH, x, 1, z, None, None);
}
// 30% chance for bare ground (no surface block)
}
"cliff" => {
// Cliff areas - predominantly stone with minimal vegetation
let cliff_chance = rng.gen_range(0..100);
if cliff_chance < 90 {
// 90% chance for stone variants
let stone_type = match rng.gen_range(0..4) {
0 => STONE,
1 => COBBLESTONE,
2 => ANDESITE,
_ => DIORITE,
};
editor.set_block(stone_type, x, 0, z, None, None);
} else {
// 10% chance for gravel/loose rock
editor.set_block(GRAVEL, x, 0, z, None, None);
}
}
"hill" => {
// Hill areas - elevated terrain with sparse trees and mostly grass
if !editor.check_for_block(x, 0, z, Some(&[GRASS_BLOCK])) {
continue;
}
let hill_chance = rng.gen_range(0..1000);
if hill_chance == 0 {
// 0.1% chance for rare trees
Tree::create(editor, (x, 1, z));
} else if hill_chance < 50 {
// 5% chance for flowers
let flower_block = match rng.gen_range(1..=4) {
1 => RED_FLOWER,
2 => BLUE_FLOWER,
3 => YELLOW_FLOWER,
_ => WHITE_FLOWER,
};
editor.set_block(flower_block, x, 1, z, None, None);
} else if hill_chance < 600 {
// 55% chance for grass
editor.set_block(GRASS, x, 1, z, None, None);
} else if hill_chance < 650 {
// 5% chance for tall grass
editor.set_block(TALL_GRASS_BOTTOM, x, 1, z, None, None);
editor.set_block(TALL_GRASS_TOP, x, 2, z, None, None);
}
// 35% chance for bare grass block
}
_ => {}
}
}
}
}
}
}
pub fn generate_natural_from_relation(
editor: &mut WorldEditor,
rel: &ProcessedRelation,
args: &Args,
) {
if rel.tags.contains_key("natural") {
// Generate individual ways with their original tags
for member in &rel.members {
if member.role == ProcessedMemberRole::Outer {
generate_natural(editor, &ProcessedElement::Way(member.way.clone()), args);
}
}
// Combine all outer ways into one with relation tags
let mut combined_nodes = Vec::new();
for member in &rel.members {
if member.role == ProcessedMemberRole::Outer {
combined_nodes.extend(member.way.nodes.clone());
}
}
// Only process if we have nodes
if !combined_nodes.is_empty() {
// Create combined way with relation tags
let combined_way = ProcessedWay {
id: rel.id,
nodes: combined_nodes,
tags: rel.tags.clone(),
};
// Generate natural area from combined way
generate_natural(editor, &ProcessedElement::Way(combined_way), args);
}
}
}

View File

@@ -1,244 +0,0 @@
use crate::block_definitions::*;
use crate::bresenham::bresenham_line;
use crate::osm_parser::ProcessedWay;
use crate::world_editor::WorldEditor;
pub fn generate_railways(editor: &mut WorldEditor, element: &ProcessedWay) {
if let Some(railway_type) = element.tags.get("railway") {
if [
"proposed",
"abandoned",
"subway",
"construction",
"razed",
"turntable",
]
.contains(&railway_type.as_str())
{
return;
}
if let Some(subway) = element.tags.get("subway") {
if subway == "yes" {
return;
}
}
if let Some(tunnel) = element.tags.get("tunnel") {
if tunnel == "yes" {
return;
}
}
for i in 1..element.nodes.len() {
let prev_node = element.nodes[i - 1].xz();
let cur_node = element.nodes[i].xz();
let points = bresenham_line(prev_node.x, 0, prev_node.z, cur_node.x, 0, cur_node.z);
let smoothed_points = smooth_diagonal_rails(&points);
for j in 0..smoothed_points.len() {
let (bx, _, bz) = smoothed_points[j];
editor.set_block(GRAVEL, bx, 0, bz, None, None);
let prev = if j > 0 {
Some(smoothed_points[j - 1])
} else {
None
};
let next = if j < smoothed_points.len() - 1 {
Some(smoothed_points[j + 1])
} else {
None
};
let rail_block = determine_rail_direction(
(bx, bz),
prev.map(|(x, _, z)| (x, z)),
next.map(|(x, _, z)| (x, z)),
);
editor.set_block(rail_block, bx, 1, bz, None, None);
if bx % 4 == 0 {
editor.set_block(OAK_LOG, bx, 0, bz, None, None);
}
}
}
}
}
fn smooth_diagonal_rails(points: &[(i32, i32, i32)]) -> Vec<(i32, i32, i32)> {
let mut smoothed = Vec::new();
for i in 0..points.len() {
let current = points[i];
smoothed.push(current);
if i + 1 >= points.len() {
continue;
}
let next = points[i + 1];
let (x1, y1, z1) = current;
let (x2, _, z2) = next;
// If points are diagonally adjacent
if (x2 - x1).abs() == 1 && (z2 - z1).abs() == 1 {
// Look ahead to determine best intermediate point
let look_ahead = if i + 2 < points.len() {
Some(points[i + 2])
} else {
None
};
// Look behind
let look_behind = if i > 0 { Some(points[i - 1]) } else { None };
// Choose intermediate point based on the overall curve direction
let intermediate = if let Some((prev_x, _, _prev_z)) = look_behind {
if prev_x == x1 {
// Coming from vertical, keep x constant
(x1, y1, z2)
} else {
// Coming from horizontal, keep z constant
(x2, y1, z1)
}
} else if let Some((next_x, _, _next_z)) = look_ahead {
if next_x == x2 {
// Going to vertical, keep x constant
(x2, y1, z1)
} else {
// Going to horizontal, keep z constant
(x1, y1, z2)
}
} else {
// Default to horizontal first if no context
(x2, y1, z1)
};
smoothed.push(intermediate);
}
}
smoothed
}
fn determine_rail_direction(
current: (i32, i32),
prev: Option<(i32, i32)>,
next: Option<(i32, i32)>,
) -> Block {
let (x, z) = current;
match (prev, next) {
(Some((px, pz)), Some((nx, nz))) => {
if px == nx {
RAIL_NORTH_SOUTH
} else if pz == nz {
RAIL_EAST_WEST
} else {
// Calculate relative movements
let from_prev = (px - x, pz - z);
let to_next = (nx - x, nz - z);
match (from_prev, to_next) {
// East to North or North to East
((-1, 0), (0, -1)) | ((0, -1), (-1, 0)) => RAIL_NORTH_WEST,
// West to North or North to West
((1, 0), (0, -1)) | ((0, -1), (1, 0)) => RAIL_NORTH_EAST,
// East to South or South to East
((-1, 0), (0, 1)) | ((0, 1), (-1, 0)) => RAIL_SOUTH_WEST,
// West to South or South to West
((1, 0), (0, 1)) | ((0, 1), (1, 0)) => RAIL_SOUTH_EAST,
_ => {
if (px - x).abs() > (pz - z).abs() {
RAIL_EAST_WEST
} else {
RAIL_NORTH_SOUTH
}
}
}
}
}
(Some((px, pz)), None) | (None, Some((px, pz))) => {
if px == x {
RAIL_NORTH_SOUTH
} else if pz == z {
RAIL_EAST_WEST
} else {
RAIL_NORTH_SOUTH
}
}
(None, None) => RAIL_NORTH_SOUTH,
}
}
pub fn generate_roller_coaster(editor: &mut WorldEditor, element: &ProcessedWay) {
if let Some(roller_coaster) = element.tags.get("roller_coaster") {
if roller_coaster == "track" {
// Check if it's indoor (skip if yes)
if let Some(indoor) = element.tags.get("indoor") {
if indoor == "yes" {
return;
}
}
// Check if layer is negative (skip if yes)
if let Some(layer) = element.tags.get("layer") {
if let Ok(layer_value) = layer.parse::<i32>() {
if layer_value < 0 {
return;
}
}
}
let elevation_height = 4; // 4 blocks in the air
let pillar_interval = 6; // Support pillars every 6 blocks
for i in 1..element.nodes.len() {
let prev_node = element.nodes[i - 1].xz();
let cur_node = element.nodes[i].xz();
let points = bresenham_line(prev_node.x, 0, prev_node.z, cur_node.x, 0, cur_node.z);
let smoothed_points = smooth_diagonal_rails(&points);
for j in 0..smoothed_points.len() {
let (bx, _, bz) = smoothed_points[j];
// Place track foundation at elevation height
editor.set_block(IRON_BLOCK, bx, elevation_height, bz, None, None);
let prev = if j > 0 {
Some(smoothed_points[j - 1])
} else {
None
};
let next = if j < smoothed_points.len() - 1 {
Some(smoothed_points[j + 1])
} else {
None
};
let rail_block = determine_rail_direction(
(bx, bz),
prev.map(|(x, _, z)| (x, z)),
next.map(|(x, _, z)| (x, z)),
);
// Place rail on top of the foundation
editor.set_block(rail_block, bx, elevation_height + 1, bz, None, None);
// Place support pillars every pillar_interval blocks
if bx % pillar_interval == 0 && bz % pillar_interval == 0 {
// Create a pillar from ground level up to the track
for y in 1..elevation_height {
editor.set_block(IRON_BLOCK, bx, y, bz, None, None);
}
}
}
}
}
}
}

View File

@@ -1,303 +0,0 @@
use crate::block_definitions::*;
use crate::world_editor::WorldEditor;
use std::collections::HashSet;
/// Interior layout for building ground floors (1st layer above floor)
#[rustfmt::skip]
const INTERIOR1_LAYER1: [[char; 23]; 23] = [
['1', 'U', ' ', 'W', 'C', ' ', ' ', ' ', 'S', 'S', 'W', 'B', 'T', 'T', 'B', 'W', '7', '8', ' ', ' ', ' ', ' ', 'W',],
['2', ' ', ' ', 'W', 'F', ' ', ' ', ' ', 'U', 'U', 'W', 'B', 'T', 'T', 'B', 'W', '7', '8', ' ', ' ', ' ', 'B', 'W',],
[' ', ' ', ' ', 'W', 'F', ' ', ' ', ' ', ' ', ' ', 'W', 'B', 'T', 'T', 'B', 'W', 'W', 'W', 'D', 'W', 'W', 'W', 'W',],
['W', 'W', 'D', 'W', 'L', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', 'A', 'W', ' ', ' ', ' ', ' ', ' ', ' ', 'W',],
[' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', 'D', ' ', ' ', ' ', ' ', ' ', ' ', 'W',],
[' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', 'W', 'W', 'W', 'W', 'D', 'W', 'W', 'W', 'W', 'D', 'W', 'W', ' ', ' ', 'D',],
[' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', 'W', 'B', 'B', 'B', ' ', ' ', 'J', 'W', ' ', ' ', ' ', 'B', 'W', 'W', 'W',],
['W', 'W', 'W', 'W', 'D', 'W', ' ', ' ', 'W', 'T', 'S', 'S', 'T', ' ', ' ', 'W', 'S', 'S', ' ', 'B', 'W', 'W', 'W',],
[' ', ' ', ' ', ' ', ' ', 'W', ' ', ' ', 'W', 'T', 'T', 'T', 'T', ' ', ' ', 'W', 'U', 'U', ' ', 'B', 'W', ' ', ' ',],
[' ', ' ', ' ', ' ', ' ', 'W', ' ', ' ', 'D', 'T', 'T', 'T', 'T', ' ', 'B', 'W', ' ', ' ', ' ', 'B', 'W', ' ', ' ',],
['L', ' ', 'A', 'L', 'W', 'W', ' ', ' ', 'W', 'J', 'U', 'U', ' ', ' ', 'B', 'W', 'W', 'D', 'W', 'W', 'W', ' ', ' ',],
['W', 'W', 'W', 'W', 'W', 'W', ' ', ' ', 'W', 'W', 'W', 'W', 'W', 'D', 'W', 'W', ' ', ' ', 'W', 'C', 'C', 'W', 'W',],
['B', 'B', ' ', 'W', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', 'D', ' ', ' ', 'W', ' ', ' ', 'W', 'W',],
[' ', ' ', ' ', 'D', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', 'D', ' ', ' ', ' ', ' ', ' ', ' ', 'D',],
[' ', '6', ' ', 'W', ' ', ' ', 'W', 'W', 'W', 'W', 'W', 'D', 'W', 'W', 'D', 'W', ' ', ' ', ' ', ' ', ' ', ' ', 'W',],
['U', '5', ' ', 'W', ' ', ' ', 'W', 'C', 'F', 'F', ' ', ' ', 'W', ' ', ' ', 'W', 'W', 'D', 'W', 'W', ' ', ' ', 'W',],
['W', 'W', 'W', 'W', ' ', ' ', 'W', ' ', ' ', ' ', ' ', ' ', 'W', 'L', ' ', 'W', 'A', ' ', 'B', 'W', ' ', ' ', 'W',],
['B', ' ', ' ', ' ', ' ', ' ', 'D', ' ', ' ', ' ', ' ', ' ', 'W', ' ', ' ', 'W', ' ', ' ', 'B', 'W', 'J', ' ', 'W',],
[' ', ' ', ' ', ' ', ' ', ' ', 'W', ' ', ' ', ' ', ' ', ' ', ' ', 'W', ' ', 'W', 'U', ' ', ' ', 'W', 'B', ' ', 'D',],
['J', ' ', ' ', 'C', 'B', 'B', 'W', 'L', 'F', ' ', 'W', 'F', ' ', 'W', 'L', 'W', '7', '8', ' ', 'W', 'B', ' ', 'W',],
['B', ' ', ' ', 'B', 'W', 'W', 'W', 'W', 'W', ' ', 'W', 'A', ' ', 'W', 'W', 'W', 'W', 'W', 'W', 'W', 'C', ' ', 'W',],
['B', ' ', ' ', 'B', 'W', ' ', ' ', ' ', 'D', ' ', 'W', 'C', ' ', ' ', 'W', 'W', 'B', 'B', 'B', 'B', 'W', 'D', 'W',],
['W', 'W', 'D', 'W', 'C', ' ', ' ', ' ', 'W', 'W', 'W', 'B', 'T', 'T', 'B', 'W', ' ', ' ', ' ', ' ', ' ', ' ', 'W',],
];
/// Interior layout for building ground floors (2nd layer above floor)
#[rustfmt::skip]
const INTERIOR1_LAYER2: [[char; 23]; 23] = [
[' ', 'P', ' ', 'W', ' ', ' ', ' ', ' ', ' ', ' ', 'W', 'B', ' ', ' ', 'B', 'W', ' ', ' ', ' ', ' ', ' ', ' ', 'W',],
[' ', ' ', ' ', 'W', ' ', ' ', ' ', ' ', 'P', 'P', 'W', 'B', ' ', ' ', 'B', 'W', ' ', ' ', ' ', ' ', ' ', 'B', 'W',],
[' ', ' ', ' ', 'W', ' ', ' ', ' ', ' ', ' ', ' ', 'W', 'B', ' ', ' ', 'B', 'W', 'W', 'W', 'D', 'W', 'W', 'W', 'W',],
['W', 'W', 'D', 'W', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', 'W', ' ', ' ', ' ', ' ', ' ', ' ', 'W',],
[' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', 'D', ' ', ' ', ' ', ' ', ' ', ' ', 'W',],
[' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', 'W', 'W', 'W', 'W', 'D', 'W', 'W', 'W', 'W', 'D', 'W', 'W', ' ', ' ', 'D',],
[' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', 'W', 'B', 'B', 'B', ' ', ' ', ' ', 'W', ' ', ' ', ' ', 'B', 'W', 'W', 'W',],
['W', 'W', 'W', 'W', 'D', 'W', ' ', ' ', 'W', ' ', ' ', ' ', ' ', ' ', ' ', 'W', ' ', ' ', ' ', 'B', 'W', 'W', 'W',],
[' ', ' ', ' ', ' ', ' ', 'W', ' ', ' ', 'W', ' ', ' ', ' ', ' ', ' ', ' ', 'W', 'P', 'P', ' ', 'B', 'W', ' ', ' ',],
[' ', ' ', ' ', ' ', ' ', 'W', ' ', ' ', 'D', ' ', ' ', ' ', ' ', ' ', 'B', 'W', ' ', ' ', ' ', 'B', 'W', ' ', ' ',],
[' ', ' ', ' ', ' ', 'W', 'W', ' ', ' ', 'W', ' ', 'P', 'P', ' ', ' ', 'B', 'W', 'W', 'D', 'W', 'W', 'W', ' ', ' ',],
['W', 'W', 'W', 'W', 'W', 'W', ' ', ' ', 'W', 'W', 'W', 'W', 'W', 'D', 'W', 'W', ' ', ' ', 'W', 'C', 'C', 'W', 'W',],
['B', 'B', ' ', 'W', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', 'D', ' ', ' ', 'W', ' ', ' ', 'W', 'W',],
[' ', ' ', ' ', 'D', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', 'D', ' ', ' ', ' ', ' ', ' ', ' ', 'D',],
[' ', ' ', ' ', 'W', ' ', ' ', 'W', 'W', 'W', 'W', 'W', 'D', 'W', 'W', 'D', 'W', ' ', ' ', ' ', ' ', ' ', ' ', 'W',],
['P', ' ', ' ', 'W', ' ', ' ', 'W', 'N', ' ', ' ', ' ', ' ', 'W', ' ', ' ', 'W', 'W', 'D', 'W', 'W', ' ', ' ', 'W',],
['W', 'W', 'W', 'W', ' ', ' ', 'W', ' ', ' ', ' ', ' ', ' ', 'W', ' ', ' ', 'W', ' ', ' ', 'B', 'W', ' ', ' ', 'W',],
['B', ' ', ' ', ' ', ' ', ' ', 'D', ' ', ' ', ' ', ' ', ' ', 'W', ' ', ' ', 'W', ' ', ' ', 'C', 'W', ' ', ' ', 'W',],
[' ', ' ', ' ', ' ', ' ', ' ', 'W', ' ', ' ', ' ', ' ', ' ', ' ', 'W', ' ', 'W', 'P', ' ', ' ', 'W', 'B', ' ', 'D',],
[' ', ' ', ' ', ' ', 'B', 'B', 'W', ' ', ' ', ' ', 'W', ' ', ' ', 'W', 'P', 'W', ' ', ' ', ' ', 'W', 'B', ' ', 'W',],
['B', ' ', ' ', 'B', 'W', 'W', 'W', 'W', 'W', ' ', 'W', ' ', ' ', 'W', 'W', 'W', 'W', 'W', 'W', 'W', ' ', ' ', 'W',],
['B', ' ', ' ', 'B', 'W', ' ', ' ', ' ', 'D', ' ', 'W', 'N', ' ', ' ', 'W', 'W', 'B', 'B', 'B', 'B', 'W', 'D', 'W',],
['W', 'W', 'D', 'W', ' ', ' ', ' ', ' ', 'W', 'W', 'W', 'B', ' ', ' ', 'B', 'W', ' ', ' ', ' ', ' ', ' ', ' ', 'W',],
];
/// Interior layout for building level floors (1nd layer above floor)
#[rustfmt::skip]
const INTERIOR2_LAYER1: [[char; 23]; 23] = [
['W', 'W', 'W', 'D', 'W', 'W', 'W', 'W', 'W', ' ', ' ', 'W', 'W', 'W', 'W', 'W', 'W', 'W', 'W', 'D', 'W', 'W', 'W',],
['U', ' ', ' ', ' ', ' ', ' ', 'C', 'W', 'L', ' ', ' ', 'L', 'W', 'A', 'A', 'W', ' ', ' ', ' ', ' ', ' ', 'L', 'W',],
[' ', ' ', ' ', ' ', ' ', ' ', ' ', 'W', ' ', ' ', ' ', ' ', 'W', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', 'W',],
[' ', ' ', 'W', 'W', 'W', ' ', ' ', 'W', ' ', ' ', ' ', ' ', 'W', ' ', ' ', ' ', ' ', ' ', 'S', 'S', 'S', ' ', 'W',],
[' ', ' ', 'W', 'F', ' ', ' ', ' ', 'W', 'C', ' ', ' ', ' ', ' ', ' ', ' ', 'W', 'J', ' ', 'U', 'U', 'U', ' ', 'D',],
['U', ' ', 'W', 'F', ' ', ' ', ' ', 'W', ' ', ' ', ' ', ' ', 'W', ' ', ' ', 'W', 'W', 'W', 'W', 'W', 'W', 'W', 'W',],
['U', ' ', 'W', 'F', ' ', ' ', ' ', 'D', ' ', ' ', 'T', 'T', 'W', ' ', ' ', ' ', ' ', ' ', 'U', 'W', ' ', 'L', 'W',],
[' ', ' ', 'W', 'W', 'W', ' ', ' ', 'W', ' ', ' ', 'T', 'J', 'W', ' ', ' ', ' ', ' ', ' ', ' ', 'W', ' ', ' ', 'W',],
[' ', ' ', ' ', ' ', ' ', ' ', ' ', 'W', 'W', 'W', 'W', 'W', 'W', 'D', 'W', 'W', 'W', ' ', ' ', 'W', 'L', ' ', 'W',],
['J', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', 'W', 'C', ' ', ' ', ' ', 'B', 'W', ' ', ' ', 'W', ' ', ' ', 'W',],
['W', 'W', 'W', 'W', 'W', 'L', ' ', ' ', ' ', ' ', 'W', 'C', ' ', ' ', ' ', 'B', 'W', ' ', ' ', 'W', 'W', 'D', 'W',],
[' ', 'A', 'B', 'B', 'W', 'W', 'W', 'W', ' ', ' ', 'W', ' ', ' ', ' ', ' ', 'B', 'W', ' ', ' ', ' ', ' ', ' ', 'W',],
[' ', ' ', ' ', 'B', 'W', 'L', ' ', ' ', ' ', ' ', 'W', 'L', ' ', ' ', 'B', 'W', 'W', 'B', 'B', 'W', ' ', ' ', 'W',],
[' ', ' ', ' ', 'B', 'W', ' ', ' ', ' ', ' ', ' ', 'W', 'W', 'W', 'W', 'W', 'W', 'W', 'W', 'W', 'W', ' ', ' ', 'D',],
[' ', ' ', ' ', ' ', 'D', ' ', ' ', 'U', ' ', ' ', ' ', 'D', ' ', ' ', 'F', 'F', 'W', 'A', 'A', 'W', ' ', ' ', 'W',],
[' ', ' ', ' ', ' ', 'W', ' ', ' ', 'U', ' ', ' ', 'W', 'W', ' ', ' ', ' ', ' ', 'C', ' ', ' ', 'W', ' ', ' ', 'W',],
['C', ' ', ' ', ' ', 'W', 'W', 'W', 'W', 'W', 'W', 'W', 'W', ' ', ' ', ' ', ' ', 'L', ' ', ' ', 'W', 'W', 'D', 'W',],
[' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', 'D', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', 'W',],
['L', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', 'W', 'L', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', 'W',],
['W', 'W', 'W', 'W', 'W', 'W', ' ', ' ', 'U', 'U', ' ', 'W', 'W', 'W', 'W', 'W', 'W', 'W', 'W', 'W', 'W', ' ', 'W',],
[' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', 'U', 'U', ' ', 'W', 'B', ' ', 'U', 'U', 'B', ' ', ' ', ' ', ' ', ' ', 'W',],
['S', 'S', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', 'W', 'B', ' ', ' ', ' ', ' ', ' ', ' ', ' ', 'B', ' ', 'W',],
['U', 'U', ' ', ' ', ' ', 'L', 'B', 'B', 'B', ' ', ' ', 'W', 'B', 'B', 'B', 'B', 'B', 'B', 'B', ' ', 'B', 'D', 'W',],
];
/// Interior layout for building level floors (2nd layer above floor)
#[rustfmt::skip]
const INTERIOR2_LAYER2: [[char; 23]; 23] = [
['W', 'W', 'W', 'D', 'W', 'W', 'W', 'W', 'W', ' ', ' ', 'W', 'W', 'W', 'W', 'W', 'W', 'W', 'W', 'D', 'W', 'W', 'W',],
['P', ' ', ' ', ' ', ' ', ' ', ' ', 'W', 'E', ' ', ' ', 'E', 'W', ' ', ' ', 'W', ' ', ' ', ' ', ' ', ' ', 'E', 'W',],
[' ', ' ', ' ', ' ', ' ', ' ', ' ', 'W', ' ', ' ', ' ', ' ', 'W', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', 'W',],
[' ', ' ', 'W', 'W', 'W', ' ', ' ', 'W', ' ', ' ', ' ', ' ', 'W', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', 'W',],
[' ', ' ', 'W', 'F', ' ', ' ', ' ', 'W', ' ', ' ', ' ', ' ', ' ', ' ', ' ', 'W', ' ', ' ', 'P', 'P', 'P', ' ', 'D',],
['P', ' ', 'W', 'F', ' ', ' ', ' ', 'W', ' ', ' ', ' ', ' ', 'W', ' ', ' ', 'W', 'W', 'W', 'W', 'W', 'W', 'W', 'W',],
['P', ' ', 'W', 'F', ' ', ' ', ' ', 'D', ' ', ' ', ' ', ' ', 'W', ' ', ' ', ' ', ' ', ' ', 'P', 'W', ' ', 'P', 'W',],
[' ', ' ', 'W', 'W', 'W', ' ', ' ', 'W', ' ', ' ', ' ', ' ', 'W', ' ', ' ', ' ', ' ', ' ', ' ', 'W', ' ', ' ', 'W',],
[' ', ' ', ' ', ' ', ' ', ' ', ' ', 'W', 'W', 'W', 'W', 'W', 'W', 'D', 'W', 'W', 'W', ' ', ' ', 'W', ' ', ' ', 'W',],
[' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', 'W', 'P', ' ', ' ', ' ', 'B', 'W', ' ', ' ', 'W', ' ', ' ', 'W',],
['W', 'W', 'W', 'W', 'W', 'E', ' ', ' ', ' ', ' ', 'W', 'P', ' ', ' ', ' ', 'B', 'W', ' ', ' ', 'W', 'W', 'D', 'W',],
[' ', ' ', 'B', 'B', 'W', 'W', 'W', 'W', ' ', ' ', 'W', ' ', ' ', ' ', ' ', 'B', 'W', ' ', ' ', ' ', ' ', ' ', 'W',],
[' ', ' ', ' ', 'B', 'W', 'E', ' ', ' ', ' ', ' ', 'W', 'E', ' ', ' ', 'B', 'W', 'W', 'B', 'B', 'W', ' ', ' ', 'W',],
[' ', ' ', ' ', 'B', 'W', ' ', ' ', ' ', ' ', ' ', 'W', 'W', 'W', 'W', 'W', 'W', 'W', 'W', 'W', 'W', ' ', ' ', 'D',],
[' ', ' ', ' ', ' ', 'D', ' ', ' ', 'P', ' ', ' ', ' ', 'D', ' ', ' ', ' ', ' ', 'W', ' ', ' ', 'W', ' ', ' ', 'W',],
[' ', ' ', ' ', ' ', 'W', ' ', ' ', 'P', ' ', ' ', 'W', 'W', ' ', ' ', ' ', ' ', ' ', ' ', ' ', 'W', ' ', ' ', 'W',],
[' ', ' ', ' ', ' ', 'W', 'W', 'W', 'W', 'W', 'W', 'W', 'W', ' ', ' ', ' ', ' ', 'E', ' ', ' ', 'W', 'W', 'D', 'W',],
[' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', 'D', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', 'W',],
['E', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', 'W', 'E', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', 'W',],
['W', 'W', 'W', 'W', 'W', 'W', ' ', ' ', 'P', 'P', ' ', 'W', 'W', 'W', 'W', 'W', 'W', 'W', 'W', 'W', 'W', ' ', 'W',],
[' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', 'P', 'P', ' ', 'W', 'B', ' ', 'P', 'P', 'B', ' ', ' ', ' ', ' ', ' ', 'W',],
[' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', 'W', 'B', ' ', ' ', ' ', ' ', ' ', ' ', ' ', 'B', ' ', 'W',],
['P', 'P', ' ', ' ', ' ', 'E', 'B', 'B', 'B', ' ', ' ', 'W', 'B', 'B', 'B', 'B', 'B', 'B', 'B', ' ', 'B', ' ', 'D',],
];
/// Maps interior layout characters to actual block types for different floor layers
#[inline(always)]
pub fn get_interior_block(c: char, is_layer2: bool, wall_block: Block) -> Option<Block> {
match c {
' ' => None, // Nothing
'W' => Some(wall_block), // Use the building's wall block for interior walls
'U' => Some(OAK_FENCE), // Oak Fence
'S' => Some(OAK_STAIRS), // Oak Stairs
'B' => Some(BOOKSHELF), // Bookshelf
'C' => Some(CRAFTING_TABLE), // Crafting Table
'F' => Some(FURNACE), // Furnace
'1' => Some(RED_BED_NORTH_HEAD), // Bed North Head
'2' => Some(RED_BED_NORTH_FOOT), // Bed North Foot
'3' => Some(RED_BED_EAST_HEAD), // Bed East Head
'4' => Some(RED_BED_EAST_FOOT), // Bed East Foot
'5' => Some(RED_BED_SOUTH_HEAD), // Bed South Head
'6' => Some(RED_BED_SOUTH_FOOT), // Bed South Foot
'7' => Some(RED_BED_WEST_HEAD), // Bed West Head
'8' => Some(RED_BED_WEST_FOOT), // Bed West Foot
// 'H' => Some(CHEST), // Chest
'L' => Some(CAULDRON), // Cauldron
'A' => Some(ANVIL), // Anvil
'P' => Some(OAK_PRESSURE_PLATE), // Pressure Plate
'D' => {
// Use different door types for different layers
if is_layer2 {
Some(DARK_OAK_DOOR_UPPER)
} else {
Some(DARK_OAK_DOOR_LOWER)
}
}
'J' => Some(NOTE_BLOCK), // Note block
'G' => Some(GLOWSTONE), // Glowstone
'N' => Some(BREWING_STAND), // Brewing Stand
'T' => Some(WHITE_CARPET), // White Carpet
'E' => Some(OAK_LEAVES), // Oak Leaves
_ => None, // Default case for unknown characters
}
}
/// Generates interior layouts inside buildings at each floor level
#[allow(clippy::too_many_arguments)]
pub fn generate_building_interior(
editor: &mut WorldEditor,
floor_area: &[(i32, i32)],
min_x: i32,
min_z: i32,
max_x: i32,
max_z: i32,
start_y_offset: i32,
building_height: i32,
wall_block: Block,
floor_levels: &[i32],
args: &crate::args::Args,
element: &crate::osm_parser::ProcessedWay,
abs_terrain_offset: i32,
) {
// Skip interior generation for very small buildings
let width = max_x - min_x + 1;
let depth = max_z - min_z + 1;
if width < 8 || depth < 8 {
return; // Building too small for interior
}
// For efficiency, create a HashSet of floor area coordinates
let floor_area_set: HashSet<(i32, i32)> = floor_area.iter().cloned().collect();
// Add buffer around edges to avoid placing furniture too close to walls
let buffer = 2;
let interior_min_x = min_x + buffer;
let interior_min_z = min_z + buffer;
let interior_max_x = max_x - buffer;
let interior_max_z = max_z - buffer;
// Generate interiors for each floor
for (floor_index, &floor_y) in floor_levels.iter().enumerate() {
// Store wall and door positions for this floor to extend them to the ceiling
let mut wall_positions = Vec::new();
let mut door_positions = Vec::new();
// Determine the floor extension height (ceiling) - either next floor or roof
let current_floor_ceiling = if floor_index < floor_levels.len() - 1 {
// For intermediate floors, extend walls up to just below the next floor
floor_levels[floor_index + 1] - 1
} else {
// Last floor ceiling depends on roof generation
if args.roof
&& element.tags.contains_key("roof:shape")
&& element.tags.get("roof:shape").unwrap() != "flat"
{
// When roof generation is enabled with non-flat roofs, stop at building height (no extra ceiling)
start_y_offset + building_height
} else {
// When roof generation is disabled or flat roof, extend to building top + 1 (includes ceiling)
start_y_offset + building_height + 1
}
};
// Choose the appropriate interior pattern based on floor number
let (layer1, layer2) = if floor_index == 0 {
// Ground floor uses INTERIOR1 patterns
(&INTERIOR1_LAYER1, &INTERIOR1_LAYER2)
} else {
// Upper floors use INTERIOR2 patterns
(&INTERIOR2_LAYER1, &INTERIOR2_LAYER2)
};
// Get dimensions for the selected pattern
let pattern_height = layer1.len() as i32;
let pattern_width = layer1[0].len() as i32;
// Calculate Y offset - place interior 1 block above floor level consistently
let y_offset = 1;
// Create a seamless repeating pattern across the interior of this floor
for z in interior_min_z..=interior_max_z {
for x in interior_min_x..=interior_max_x {
// Skip if outside the building's floor area
if !floor_area_set.contains(&(x, z)) {
continue;
}
// Map the world coordinates to pattern coordinates using modulo
// This creates a seamless tiling effect across the entire building
// Add floor_index offset to create variation between floors
let pattern_x = ((x - interior_min_x + floor_index as i32) % pattern_width
+ pattern_width)
% pattern_width;
let pattern_z = ((z - interior_min_z + floor_index as i32) % pattern_height
+ pattern_height)
% pattern_height;
// Access the pattern arrays safely
let cell1 = layer1[pattern_z as usize][pattern_x as usize];
let cell2 = layer2[pattern_z as usize][pattern_x as usize];
// Place first layer blocks
if let Some(block) = get_interior_block(cell1, false, wall_block) {
editor.set_block_absolute(
block,
x,
floor_y + y_offset + abs_terrain_offset,
z,
None,
None,
);
// If this is a wall in layer 1, add to wall positions to extend later
if cell1 == 'W' {
wall_positions.push((x, z));
}
// If this is a door in layer 1, add to door positions to add wall above later
else if cell1 == 'D' {
door_positions.push((x, z));
}
}
// Place second layer blocks
if let Some(block) = get_interior_block(cell2, true, wall_block) {
editor.set_block_absolute(
block,
x,
floor_y + y_offset + abs_terrain_offset + 1,
z,
None,
None,
);
}
}
}
// Extend walls all the way to the next floor ceiling or roof
for (x, z) in &wall_positions {
for y in (floor_y + y_offset + 2)..=current_floor_ceiling {
editor.set_block_absolute(wall_block, *x, y + abs_terrain_offset, *z, None, None);
}
}
// Add wall blocks above doors all the way to the ceiling/next floor
for (x, z) in &door_positions {
for y in (floor_y + y_offset + 2)..=current_floor_ceiling {
editor.set_block_absolute(wall_block, *x, y + abs_terrain_offset, *z, None, None);
}
}
}
}

View File

@@ -1 +0,0 @@
pub mod buildings_interior;

View File

@@ -1,34 +0,0 @@
use crate::block_definitions::*;
use crate::osm_parser::ProcessedNode;
use crate::world_editor::WorldEditor;
pub fn generate_tourisms(editor: &mut WorldEditor, element: &ProcessedNode) {
// Skip if 'layer' or 'level' is negative in the tags
if let Some(layer) = element.tags.get("layer") {
if layer.parse::<i32>().unwrap_or(0) < 0 {
return;
}
}
if let Some(level) = element.tags.get("level") {
if level.parse::<i32>().unwrap_or(0) < 0 {
return;
}
}
if let Some(tourism_type) = element.tags.get("tourism") {
let x: i32 = element.x;
let z: i32 = element.z;
if tourism_type == "information" {
if let Some(info_type) = element.tags.get("information").map(|x: &String| x.as_str()) {
if info_type != "office" && info_type != "visitor_centre" {
// Draw an information board
// TODO draw a sign with text if provided
editor.set_block(COBBLESTONE_WALL, x, 1, z, None, None);
editor.set_block(OAK_PLANKS, x, 2, z, None, None);
}
}
}
}
}

View File

@@ -1,342 +0,0 @@
use crate::block_definitions::*;
use crate::world_editor::WorldEditor;
use rand::Rng;
type Coord = (i32, i32, i32);
// TODO all this data would probably be better suited in a TOML file or something.
/// A circular pattern around a central point.
#[rustfmt::skip]
const ROUND1_PATTERN: [Coord; 8] = [
(-2, 0, 0),
(2, 0, 0),
(0, 0, -2),
(0, 0, 2),
(-1, 0, -1),
(1, 0, 1),
(1, 0, -1),
(-1, 0, 1),
];
/// A wider circular pattern.
const ROUND2_PATTERN: [Coord; 12] = [
(3, 0, 0),
(2, 0, -1),
(2, 0, 1),
(1, 0, -2),
(1, 0, 2),
(-3, 0, 0),
(-2, 0, -1),
(-2, 0, 1),
(-1, 0, 2),
(-1, 0, -2),
(0, 0, -3),
(0, 0, 3),
];
/// A more scattered circular pattern.
const ROUND3_PATTERN: [Coord; 12] = [
(3, 0, -1),
(3, 0, 1),
(2, 0, -2),
(2, 0, 2),
(1, 0, -3),
(1, 0, 3),
(-3, 0, -1),
(-3, 0, 1),
(-2, 0, -2),
(-2, 0, 2),
(-1, 0, 3),
(-1, 0, -3),
];
/// Used for iterating over each of the round patterns
const ROUND_PATTERNS: [&[Coord]; 3] = [&ROUND1_PATTERN, &ROUND2_PATTERN, &ROUND3_PATTERN];
//////////////////////////////////////////////////
const OAK_LEAVES_FILL: [(Coord, Coord); 5] = [
((-1, 3, 0), (-1, 9, 0)),
((1, 3, 0), (1, 9, 0)),
((0, 3, -1), (0, 9, -1)),
((0, 3, 1), (0, 9, 1)),
((0, 9, 0), (0, 10, 0)),
];
const SPRUCE_LEAVES_FILL: [(Coord, Coord); 6] = [
((-1, 3, 0), (-1, 10, 0)),
((0, 3, -1), (0, 10, -1)),
((1, 3, 0), (1, 10, 0)),
((0, 3, -1), (0, 10, -1)),
((0, 3, 1), (0, 10, 1)),
((0, 11, 0), (0, 11, 0)),
];
const BIRCH_LEAVES_FILL: [(Coord, Coord); 5] = [
((-1, 2, 0), (-1, 7, 0)),
((1, 2, 0), (1, 7, 0)),
((0, 2, -1), (0, 7, -1)),
((0, 2, 1), (0, 7, 1)),
((0, 7, 0), (0, 8, 0)),
];
//////////////////////////////////////////////////
/// Helper function to set blocks in various patterns.
fn round(editor: &mut WorldEditor, material: Block, (x, y, z): Coord, block_pattern: &[Coord]) {
for (i, j, k) in block_pattern {
editor.set_block(material, x + i, y + j, z + k, None, None);
}
}
pub enum TreeType {
Oak,
Spruce,
Birch,
}
// TODO what should be moved in, and what should be referenced?
pub struct Tree<'a> {
// kind: TreeType, // NOTE: Not actually necessary to store!
log_block: Block,
log_height: i32,
leaves_block: Block,
leaves_fill: &'a [(Coord, Coord)],
round_ranges: [Vec<i32>; 3],
}
impl Tree<'_> {
pub fn create(editor: &mut WorldEditor, (x, y, z): Coord) {
let mut blacklist: Vec<Block> = Vec::new();
blacklist.extend(Self::get_building_wall_blocks());
blacklist.extend(Self::get_building_floor_blocks());
blacklist.extend(Self::get_structural_blocks());
blacklist.extend(Self::get_functional_blocks());
blacklist.push(WATER);
let mut rng = rand::thread_rng();
let tree = Self::get_tree(match rng.gen_range(1..=3) {
1 => TreeType::Oak,
2 => TreeType::Spruce,
3 => TreeType::Birch,
_ => unreachable!(),
});
// Build the logs
editor.fill_blocks(
tree.log_block,
x,
y,
z,
x,
y + tree.log_height,
z,
None,
Some(&blacklist),
);
// Fill in the leaves
for ((i1, j1, k1), (i2, j2, k2)) in tree.leaves_fill {
editor.fill_blocks(
tree.leaves_block,
x + i1,
y + j1,
z + k1,
x + i2,
y + j2,
z + k2,
None,
None,
);
}
// Do the three rounds
for (round_range, round_pattern) in tree.round_ranges.iter().zip(ROUND_PATTERNS) {
for offset in round_range {
round(editor, tree.leaves_block, (x, y + offset, z), round_pattern);
}
}
}
fn get_tree(kind: TreeType) -> Self {
match kind {
TreeType::Oak => Self {
// kind,
log_block: OAK_LOG,
log_height: 8,
leaves_block: OAK_LEAVES,
leaves_fill: &OAK_LEAVES_FILL,
round_ranges: [
(3..=8).rev().collect(),
(4..=7).rev().collect(),
(5..=6).rev().collect(),
],
},
TreeType::Spruce => Self {
// kind,
log_block: SPRUCE_LOG,
log_height: 9,
leaves_block: BIRCH_LEAVES, // TODO Is this correct?
leaves_fill: &SPRUCE_LEAVES_FILL,
// TODO can I omit the third empty vec? May cause issues with iter zip
round_ranges: [vec![9, 7, 6, 4, 3], vec![6, 3], vec![]],
},
TreeType::Birch => Self {
// kind,
log_block: BIRCH_LOG,
log_height: 6,
leaves_block: BIRCH_LEAVES,
leaves_fill: &BIRCH_LEAVES_FILL,
round_ranges: [(2..=6).rev().collect(), (2..=4).collect(), vec![]],
},
} // match
} // fn get_tree
/// Get all possible building wall blocks
fn get_building_wall_blocks() -> Vec<Block> {
vec![
BLACKSTONE,
BLACK_TERRACOTTA,
BRICK,
BROWN_CONCRETE,
BROWN_TERRACOTTA,
DEEPSLATE_BRICKS,
END_STONE_BRICKS,
GRAY_CONCRETE,
GRAY_TERRACOTTA,
LIGHT_BLUE_TERRACOTTA,
LIGHT_GRAY_CONCRETE,
MUD_BRICKS,
NETHER_BRICK,
NETHERITE_BLOCK,
POLISHED_ANDESITE,
POLISHED_BLACKSTONE,
POLISHED_BLACKSTONE_BRICKS,
POLISHED_DEEPSLATE,
POLISHED_GRANITE,
QUARTZ_BLOCK,
QUARTZ_BRICKS,
SANDSTONE,
SMOOTH_SANDSTONE,
SMOOTH_STONE,
STONE_BRICKS,
WHITE_CONCRETE,
WHITE_TERRACOTTA,
ORANGE_TERRACOTTA,
GREEN_STAINED_HARDENED_CLAY,
BLUE_TERRACOTTA,
YELLOW_TERRACOTTA,
BLACK_CONCRETE,
WHITE_CONCRETE,
GRAY_CONCRETE,
LIGHT_GRAY_CONCRETE,
BROWN_CONCRETE,
RED_CONCRETE,
ORANGE_TERRACOTTA,
YELLOW_CONCRETE,
LIME_CONCRETE,
GREEN_STAINED_HARDENED_CLAY,
CYAN_CONCRETE,
LIGHT_BLUE_CONCRETE,
BLUE_CONCRETE,
PURPLE_CONCRETE,
MAGENTA_CONCRETE,
RED_TERRACOTTA,
]
}
/// Get all possible building floor blocks
fn get_building_floor_blocks() -> Vec<Block> {
vec![
GRAY_CONCRETE,
LIGHT_GRAY_CONCRETE,
WHITE_CONCRETE,
SMOOTH_STONE,
POLISHED_ANDESITE,
STONE_BRICKS,
]
}
/// Get structural blocks (fences, walls, stairs, slabs, rails, etc.)
fn get_structural_blocks() -> Vec<Block> {
vec![
// Fences
OAK_FENCE,
// Walls
COBBLESTONE_WALL,
ANDESITE_WALL,
STONE_BRICK_WALL,
// Stairs
OAK_STAIRS,
// Slabs
OAK_SLAB,
STONE_BLOCK_SLAB,
STONE_BRICK_SLAB,
// Rails
RAIL,
RAIL_NORTH_SOUTH,
RAIL_EAST_WEST,
RAIL_ASCENDING_EAST,
RAIL_ASCENDING_WEST,
RAIL_ASCENDING_NORTH,
RAIL_ASCENDING_SOUTH,
RAIL_NORTH_EAST,
RAIL_NORTH_WEST,
RAIL_SOUTH_EAST,
RAIL_SOUTH_WEST,
// Doors and trapdoors
OAK_DOOR,
DARK_OAK_DOOR_LOWER,
DARK_OAK_DOOR_UPPER,
OAK_TRAPDOOR,
// Ladders
LADDER,
]
}
/// Get functional blocks (furniture, decorative items, etc.)
fn get_functional_blocks() -> Vec<Block> {
vec![
// Furniture and functional blocks
CHEST,
CRAFTING_TABLE,
FURNACE,
ANVIL,
BREWING_STAND,
NOTE_BLOCK,
BOOKSHELF,
CAULDRON,
// Beds
RED_BED_NORTH_HEAD,
RED_BED_NORTH_FOOT,
RED_BED_EAST_HEAD,
RED_BED_EAST_FOOT,
RED_BED_SOUTH_HEAD,
RED_BED_SOUTH_FOOT,
RED_BED_WEST_HEAD,
RED_BED_WEST_FOOT,
// Pressure plates and signs
OAK_PRESSURE_PLATE,
SIGN,
// Glass blocks (windows)
GLASS,
WHITE_STAINED_GLASS,
GRAY_STAINED_GLASS,
LIGHT_GRAY_STAINED_GLASS,
BROWN_STAINED_GLASS,
TINTED_GLASS,
// Carpets
WHITE_CARPET,
RED_CARPET,
// Other structural/building blocks
IRON_BARS,
IRON_BLOCK,
SCAFFOLDING,
BEDROCK,
]
}
} // impl Tree

View File

@@ -1,461 +0,0 @@
use geo::orient::{Direction, Orient};
use geo::{Contains, Intersects, LineString, Point, Polygon, Rect};
use std::time::Instant;
use crate::clipping::clip_water_ring_to_bbox;
use crate::{
block_definitions::WATER,
coordinate_system::cartesian::{XZBBox, XZPoint},
osm_parser::{ProcessedMemberRole, ProcessedNode, ProcessedRelation, ProcessedWay},
world_editor::WorldEditor,
};
pub fn generate_water_area_from_way(
editor: &mut WorldEditor,
element: &ProcessedWay,
_xzbbox: &XZBBox,
) {
let start_time = Instant::now();
let outers = [element.nodes.clone()];
if !verify_closed_rings(&outers) {
println!("Skipping way {} due to invalid polygon", element.id);
return;
}
generate_water_areas(editor, &outers, &[], start_time);
}
pub fn generate_water_areas_from_relation(
editor: &mut WorldEditor,
element: &ProcessedRelation,
xzbbox: &XZBBox,
) {
let start_time = Instant::now();
// Check if this is a water relation (either with water tag or natural=water)
let is_water = element.tags.contains_key("water")
|| element
.tags
.get("natural")
.map(|val| val == "water" || val == "bay")
.unwrap_or(false);
if !is_water {
return;
}
// Don't handle water below layer 0
if let Some(layer) = element.tags.get("layer") {
if layer.parse::<i32>().map(|x| x < 0).unwrap_or(false) {
return;
}
}
let mut outers: Vec<Vec<ProcessedNode>> = vec![];
let mut inners: Vec<Vec<ProcessedNode>> = vec![];
for mem in &element.members {
match mem.role {
ProcessedMemberRole::Outer => outers.push(mem.way.nodes.clone()),
ProcessedMemberRole::Inner => inners.push(mem.way.nodes.clone()),
}
}
// Preserve OSM-defined outer/inner roles without modification
merge_way_segments(&mut outers);
// Clip assembled rings to bbox (must happen after merging to preserve ring connectivity)
outers = outers
.into_iter()
.filter_map(|ring| clip_water_ring_to_bbox(&ring, xzbbox))
.collect();
merge_way_segments(&mut inners);
inners = inners
.into_iter()
.filter_map(|ring| clip_water_ring_to_bbox(&ring, xzbbox))
.collect();
if !verify_closed_rings(&outers) {
// For clipped multipolygons, some loops may not close perfectly
// Instead of force-closing with straight lines (which creates wedges),
// filter out unclosed loops and only render the properly closed ones
// Filter: Keep only loops that are already closed OR can be closed within 1 block
outers.retain(|loop_nodes| {
if loop_nodes.len() < 3 {
return false;
}
let first = &loop_nodes[0];
let last = loop_nodes.last().unwrap();
let dx = (first.x - last.x).abs();
let dz = (first.z - last.z).abs();
// Keep if already closed by ID or endpoints are within 1 block
first.id == last.id || (dx <= 1 && dz <= 1)
});
// Now close the remaining loops that are within 1 block tolerance
for loop_nodes in outers.iter_mut() {
let first = loop_nodes[0].clone();
let last_idx = loop_nodes.len() - 1;
if loop_nodes[0].id != loop_nodes[last_idx].id {
// Endpoints are close (within tolerance), close the loop
loop_nodes.push(first);
}
}
// If no valid outer loops remain, skip the relation
if outers.is_empty() {
return;
}
// Verify again after filtering and closing
if !verify_closed_rings(&outers) {
println!("Skipping relation {} due to invalid polygon", element.id);
return;
}
}
merge_way_segments(&mut inners);
if !verify_closed_rings(&inners) {
println!("Skipping relation {} due to invalid polygon", element.id);
return;
}
generate_water_areas(editor, &outers, &inners, start_time);
}
fn generate_water_areas(
editor: &mut WorldEditor,
outers: &[Vec<ProcessedNode>],
inners: &[Vec<ProcessedNode>],
start_time: Instant,
) {
// Calculate polygon bounding box to limit fill area
let mut poly_min_x = i32::MAX;
let mut poly_min_z = i32::MAX;
let mut poly_max_x = i32::MIN;
let mut poly_max_z = i32::MIN;
for outer in outers {
for node in outer {
poly_min_x = poly_min_x.min(node.x);
poly_min_z = poly_min_z.min(node.z);
poly_max_x = poly_max_x.max(node.x);
poly_max_z = poly_max_z.max(node.z);
}
}
// If no valid bounds, nothing to fill
if poly_min_x == i32::MAX || poly_max_x == i32::MIN {
return;
}
// Clamp to world bounds just in case
let (world_min_x, world_min_z) = editor.get_min_coords();
let (world_max_x, world_max_z) = editor.get_max_coords();
let min_x = poly_min_x.max(world_min_x);
let min_z = poly_min_z.max(world_min_z);
let max_x = poly_max_x.min(world_max_x);
let max_z = poly_max_z.min(world_max_z);
let outers_xz: Vec<Vec<XZPoint>> = outers
.iter()
.map(|x| x.iter().map(|y| y.xz()).collect::<Vec<_>>())
.collect();
let inners_xz: Vec<Vec<XZPoint>> = inners
.iter()
.map(|x| x.iter().map(|y| y.xz()).collect::<Vec<_>>())
.collect();
inverse_floodfill(
min_x, min_z, max_x, max_z, outers_xz, inners_xz, editor, start_time,
);
}
/// Merges way segments that share endpoints into closed rings.
fn merge_way_segments(rings: &mut Vec<Vec<ProcessedNode>>) {
let mut removed: Vec<usize> = vec![];
let mut merged: Vec<Vec<ProcessedNode>> = vec![];
// Match nodes by ID or proximity (handles synthetic nodes from bbox clipping)
let nodes_match = |a: &ProcessedNode, b: &ProcessedNode| -> bool {
if a.id == b.id {
return true;
}
let dx = (a.x - b.x).abs();
let dz = (a.z - b.z).abs();
dx <= 1 && dz <= 1
};
for i in 0..rings.len() {
for j in 0..rings.len() {
if i == j {
continue;
}
if removed.contains(&i) || removed.contains(&j) {
continue;
}
let x: &Vec<ProcessedNode> = &rings[i];
let y: &Vec<ProcessedNode> = &rings[j];
// Skip empty rings (can happen after clipping)
if x.is_empty() || y.is_empty() {
continue;
}
let x_first = &x[0];
let x_last = x.last().unwrap();
let y_first = &y[0];
let y_last = y.last().unwrap();
// Skip already-closed rings
if nodes_match(x_first, x_last) {
continue;
}
if nodes_match(y_first, y_last) {
continue;
}
if nodes_match(x_first, y_first) {
removed.push(i);
removed.push(j);
let mut x: Vec<ProcessedNode> = x.clone();
x.reverse();
x.extend(y.iter().skip(1).cloned());
merged.push(x);
} else if nodes_match(x_last, y_last) {
removed.push(i);
removed.push(j);
let mut x: Vec<ProcessedNode> = x.clone();
x.extend(y.iter().rev().skip(1).cloned());
merged.push(x);
} else if nodes_match(x_first, y_last) {
removed.push(i);
removed.push(j);
let mut y: Vec<ProcessedNode> = y.clone();
y.extend(x.iter().skip(1).cloned());
merged.push(y);
} else if nodes_match(x_last, y_first) {
removed.push(i);
removed.push(j);
let mut x: Vec<ProcessedNode> = x.clone();
x.extend(y.iter().skip(1).cloned());
merged.push(x);
}
}
}
removed.sort();
for r in removed.iter().rev() {
rings.remove(*r);
}
let merged_len: usize = merged.len();
for m in merged {
rings.push(m);
}
if merged_len > 0 {
merge_way_segments(rings);
}
}
/// Verifies all rings are properly closed (first node matches last).
fn verify_closed_rings(rings: &[Vec<ProcessedNode>]) -> bool {
let mut valid = true;
for ring in rings {
let first = &ring[0];
let last = ring.last().unwrap();
// Check if ring is closed (by ID or proximity)
let is_closed = first.id == last.id || {
let dx = (first.x - last.x).abs();
let dz = (first.z - last.z).abs();
dx <= 1 && dz <= 1
};
if !is_closed {
eprintln!("WARN: Disconnected ring");
valid = false;
}
}
valid
}
// Water areas are absolutely huge. We can't easily flood fill the entire thing.
// Instead, we'll iterate over all the blocks in our MC world, and check if each
// one is in the river or not
#[allow(clippy::too_many_arguments)]
fn inverse_floodfill(
min_x: i32,
min_z: i32,
max_x: i32,
max_z: i32,
outers: Vec<Vec<XZPoint>>,
inners: Vec<Vec<XZPoint>>,
editor: &mut WorldEditor,
start_time: Instant,
) {
// Convert to geo Polygons with normalized winding order
let inners: Vec<_> = inners
.into_iter()
.map(|x| {
Polygon::new(
LineString::from(
x.iter()
.map(|pt| (pt.x as f64, pt.z as f64))
.collect::<Vec<_>>(),
),
vec![],
)
.orient(Direction::Default)
})
.collect();
let outers: Vec<_> = outers
.into_iter()
.map(|x| {
Polygon::new(
LineString::from(
x.iter()
.map(|pt| (pt.x as f64, pt.z as f64))
.collect::<Vec<_>>(),
),
vec![],
)
.orient(Direction::Default)
})
.collect();
inverse_floodfill_recursive(
(min_x, min_z),
(max_x, max_z),
&outers,
&inners,
editor,
start_time,
);
}
fn inverse_floodfill_recursive(
min: (i32, i32),
max: (i32, i32),
outers: &[Polygon],
inners: &[Polygon],
editor: &mut WorldEditor,
start_time: Instant,
) {
// Check if we've exceeded 25 seconds
if start_time.elapsed().as_secs() > 25 {
println!("Water area generation exceeded 25 seconds, continuing anyway");
}
const ITERATIVE_THRES: i64 = 10_000;
if min.0 > max.0 || min.1 > max.1 {
return;
}
// Multiply as i64 to avoid overflow; in release builds where unchecked math is
// enabled, this could cause the rest of this code to end up in an infinite loop.
if ((max.0 - min.0) as i64) * ((max.1 - min.1) as i64) < ITERATIVE_THRES {
inverse_floodfill_iterative(min, max, 0, outers, inners, editor);
return;
}
let center_x: i32 = (min.0 + max.0) / 2;
let center_z: i32 = (min.1 + max.1) / 2;
let quadrants: [(i32, i32, i32, i32); 4] = [
(min.0, center_x, min.1, center_z),
(center_x, max.0, min.1, center_z),
(min.0, center_x, center_z, max.1),
(center_x, max.0, center_z, max.1),
];
for (min_x, max_x, min_z, max_z) in quadrants {
let rect: Rect = Rect::new(
Point::new(min_x as f64, min_z as f64),
Point::new(max_x as f64, max_z as f64),
);
if outers.iter().any(|outer: &Polygon| outer.contains(&rect))
&& !inners.iter().any(|inner: &Polygon| inner.intersects(&rect))
{
rect_fill(min_x, max_x, min_z, max_z, 0, editor);
continue;
}
let outers_intersects: Vec<_> = outers
.iter()
.filter(|poly| poly.intersects(&rect))
.cloned()
.collect();
let inners_intersects: Vec<_> = inners
.iter()
.filter(|poly| poly.intersects(&rect))
.cloned()
.collect();
if !outers_intersects.is_empty() {
inverse_floodfill_recursive(
(min_x, min_z),
(max_x, max_z),
&outers_intersects,
&inners_intersects,
editor,
start_time,
);
}
}
}
// once we "zoom in" enough, it's more efficient to switch to iteration
fn inverse_floodfill_iterative(
min: (i32, i32),
max: (i32, i32),
ground_level: i32,
outers: &[Polygon],
inners: &[Polygon],
editor: &mut WorldEditor,
) {
for x in min.0..max.0 {
for z in min.1..max.1 {
let p: Point = Point::new(x as f64, z as f64);
if outers.iter().any(|poly: &Polygon| poly.contains(&p))
&& inners.iter().all(|poly: &Polygon| !poly.contains(&p))
{
editor.set_block(WATER, x, ground_level, z, None, None);
}
}
}
}
fn rect_fill(
min_x: i32,
max_x: i32,
min_z: i32,
max_z: i32,
ground_level: i32,
editor: &mut WorldEditor,
) {
for x in min_x..max_x {
for z in min_z..max_z {
editor.set_block(WATER, x, ground_level, z, None, None);
}
}
}

View File

@@ -1,115 +0,0 @@
use crate::block_definitions::*;
use crate::bresenham::bresenham_line;
use crate::osm_parser::ProcessedWay;
use crate::world_editor::WorldEditor;
pub fn generate_waterways(editor: &mut WorldEditor, element: &ProcessedWay) {
if let Some(waterway_type) = element.tags.get("waterway") {
let (mut waterway_width, waterway_depth) = get_waterway_dimensions(waterway_type);
// Check for custom width in tags
if let Some(width_str) = element.tags.get("width") {
waterway_width = width_str.parse::<i32>().unwrap_or_else(|_| {
width_str
.parse::<f32>()
.map(|f: f32| f as i32)
.unwrap_or(waterway_width)
});
}
// Skip layers below the ground level
if matches!(
element.tags.get("layer").map(|s| s.as_str()),
Some("-1") | Some("-2") | Some("-3")
) {
return;
}
// Process consecutive node pairs to create waterways
// Use windows(2) to avoid connecting last node back to first
for nodes_pair in element.nodes.windows(2) {
let prev_node = nodes_pair[0].xz();
let current_node = nodes_pair[1].xz();
// Draw a line between the current and previous node
let bresenham_points: Vec<(i32, i32, i32)> = bresenham_line(
prev_node.x,
0,
prev_node.z,
current_node.x,
0,
current_node.z,
);
for (bx, _, bz) in bresenham_points {
// Create water channel with proper depth and sloped banks
create_water_channel(editor, bx, bz, waterway_width, waterway_depth);
}
}
}
}
/// Determines width and depth based on waterway type
fn get_waterway_dimensions(waterway_type: &str) -> (i32, i32) {
match waterway_type {
"river" => (8, 3), // Large rivers: 8 blocks wide, 3 blocks deep
"canal" => (6, 2), // Canals: 6 blocks wide, 2 blocks deep
"stream" => (3, 2), // Streams: 3 blocks wide, 2 blocks deep
"fairway" => (12, 3), // Shipping fairways: 12 blocks wide, 3 blocks deep
"flowline" => (2, 1), // Water flow lines: 2 blocks wide, 1 block deep
"brook" => (2, 1), // Small brooks: 2 blocks wide, 1 block deep
"ditch" => (2, 1), // Ditches: 2 blocks wide, 1 block deep
"drain" => (1, 1), // Drainage: 1 block wide, 1 block deep
_ => (4, 2), // Default: 4 blocks wide, 2 blocks deep
}
}
/// Creates a water channel with proper depth and sloped banks
fn create_water_channel(
editor: &mut WorldEditor,
center_x: i32,
center_z: i32,
width: i32,
depth: i32,
) {
let half_width = width / 2;
for x in (center_x - half_width - 1)..=(center_x + half_width + 1) {
for z in (center_z - half_width - 1)..=(center_z + half_width + 1) {
let dx = (x - center_x).abs();
let dz = (z - center_z).abs();
let distance_from_center = dx.max(dz);
if distance_from_center <= half_width {
// Main water channel
for y in (1 - depth)..=0 {
editor.set_block(WATER, x, y, z, None, None);
}
// Place one layer of dirt below the water channel
editor.set_block(DIRT, x, -depth, z, None, None);
// Clear vegetation above the water
editor.set_block(AIR, x, 1, z, Some(&[GRASS, WHEAT, CARROTS, POTATOES]), None);
} else if distance_from_center == half_width + 1 && depth > 1 {
// Create sloped banks (one block interval slopes)
let slope_depth = (depth - 1).max(1);
for y in (1 - slope_depth)..=0 {
if y == 0 {
// Surface level - place water or air
editor.set_block(WATER, x, y, z, None, None);
} else {
// Below surface - dig out for slope
editor.set_block(AIR, x, y, z, None, None);
}
}
// Place one layer of dirt below the sloped areas
editor.set_block(DIRT, x, -slope_depth, z, None, None);
// Clear vegetation above sloped areas
editor.set_block(AIR, x, 1, z, Some(&[GRASS, WHEAT, CARROTS, POTATOES]), None);
}
}
}
}

View File

@@ -1,607 +0,0 @@
use crate::coordinate_system::{geographic::LLBBox, transformation::geo_distance};
#[cfg(feature = "gui")]
use crate::telemetry::{send_log, LogLevel};
use image::Rgb;
use std::path::Path;
/// Maximum Y coordinate in Minecraft (build height limit)
const MAX_Y: i32 = 319;
/// Scale factor for converting real elevation to Minecraft heights
const BASE_HEIGHT_SCALE: f64 = 0.7;
/// AWS S3 Terrarium tiles endpoint (no API key required)
const AWS_TERRARIUM_URL: &str =
"https://s3.amazonaws.com/elevation-tiles-prod/terrarium/{z}/{x}/{y}.png";
/// Terrarium format offset for height decoding
const TERRARIUM_OFFSET: f64 = 32768.0;
/// Minimum zoom level for terrain tiles
const MIN_ZOOM: u8 = 10;
/// Maximum zoom level for terrain tiles
const MAX_ZOOM: u8 = 15;
/// Holds processed elevation data and metadata
#[derive(Clone)]
pub struct ElevationData {
/// Height values in Minecraft Y coordinates
pub(crate) heights: Vec<Vec<i32>>,
/// Width of the elevation grid
pub(crate) width: usize,
/// Height of the elevation grid
pub(crate) height: usize,
}
/// Calculates appropriate zoom level for the given bounding box
fn calculate_zoom_level(bbox: &LLBBox) -> u8 {
let lat_diff: f64 = (bbox.max().lat() - bbox.min().lat()).abs();
let lng_diff: f64 = (bbox.max().lng() - bbox.min().lng()).abs();
let max_diff: f64 = lat_diff.max(lng_diff);
let zoom: u8 = (-max_diff.log2() + 20.0) as u8;
zoom.clamp(MIN_ZOOM, MAX_ZOOM)
}
fn lat_lng_to_tile(lat: f64, lng: f64, zoom: u8) -> (u32, u32) {
let lat_rad: f64 = lat.to_radians();
let n: f64 = 2.0_f64.powi(zoom as i32);
let x: u32 = ((lng + 180.0) / 360.0 * n).floor() as u32;
let y: u32 = ((1.0 - lat_rad.tan().asinh() / std::f64::consts::PI) / 2.0 * n).floor() as u32;
(x, y)
}
/// Downloads a tile from AWS Terrain Tiles service
fn download_tile(
client: &reqwest::blocking::Client,
tile_x: u32,
tile_y: u32,
zoom: u8,
tile_path: &Path,
) -> Result<image::ImageBuffer<Rgb<u8>, Vec<u8>>, Box<dyn std::error::Error>> {
println!("Fetching tile x={tile_x},y={tile_y},z={zoom} from AWS Terrain Tiles");
let url: String = AWS_TERRARIUM_URL
.replace("{z}", &zoom.to_string())
.replace("{x}", &tile_x.to_string())
.replace("{y}", &tile_y.to_string());
let response: reqwest::blocking::Response = client.get(&url).send()?;
response.error_for_status_ref()?;
let bytes = response.bytes()?;
std::fs::write(tile_path, &bytes)?;
let img: image::DynamicImage = image::load_from_memory(&bytes)?;
Ok(img.to_rgb8())
}
pub fn fetch_elevation_data(
bbox: &LLBBox,
scale: f64,
ground_level: i32,
) -> Result<ElevationData, Box<dyn std::error::Error>> {
let (base_scale_z, base_scale_x) = geo_distance(bbox.min(), bbox.max());
// Apply same floor() and scale operations as CoordTransformer.llbbox_to_xzbbox()
let scale_factor_z: f64 = base_scale_z.floor() * scale;
let scale_factor_x: f64 = base_scale_x.floor() * scale;
// Calculate zoom and tiles
let zoom: u8 = calculate_zoom_level(bbox);
let tiles: Vec<(u32, u32)> = get_tile_coordinates(bbox, zoom);
// Match grid dimensions with Minecraft world size
let grid_width: usize = scale_factor_x as usize;
let grid_height: usize = scale_factor_z as usize;
// Initialize height grid with proper dimensions
let mut height_grid: Vec<Vec<f64>> = vec![vec![f64::NAN; grid_width]; grid_height];
let mut extreme_values_found = Vec::new(); // Track extreme values for debugging
let client: reqwest::blocking::Client = reqwest::blocking::Client::new();
let tile_cache_dir = Path::new("./arnis-tile-cache");
if !tile_cache_dir.exists() {
std::fs::create_dir_all(tile_cache_dir)?;
}
// Fetch and process each tile
for (tile_x, tile_y) in &tiles {
// Check if tile is already cached
let tile_path = tile_cache_dir.join(format!("z{zoom}_x{tile_x}_y{tile_y}.png"));
let rgb_img: image::ImageBuffer<Rgb<u8>, Vec<u8>> = if tile_path.exists() {
// Check if the cached file has a reasonable size (PNG files should be at least a few KB)
let file_size = match std::fs::metadata(&tile_path) {
Ok(metadata) => metadata.len(),
Err(_) => 0,
};
if file_size < 1000 {
eprintln!(
"Warning: Cached tile at {} appears to be too small ({} bytes). Refetching tile.",
tile_path.display(),
file_size
);
#[cfg(feature = "gui")]
send_log(
LogLevel::Warning,
"Cached tile appears to be too small. Refetching tile.",
);
// Remove the potentially corrupted file
if let Err(remove_err) = std::fs::remove_file(&tile_path) {
eprintln!(
"Warning: Failed to remove corrupted tile file: {}",
remove_err
);
#[cfg(feature = "gui")]
send_log(
LogLevel::Warning,
"Failed to remove corrupted tile file during refetching.",
);
}
// Re-download the tile
download_tile(&client, *tile_x, *tile_y, zoom, &tile_path)?
} else {
println!(
"Loading cached tile x={tile_x},y={tile_y},z={zoom} from {}",
tile_path.display()
);
// Try to load cached tile, but handle corruption gracefully
match image::open(&tile_path) {
Ok(img) => img.to_rgb8(),
Err(e) => {
eprintln!(
"Cached tile at {} is corrupted or invalid: {}. Re-downloading...",
tile_path.display(),
e
);
#[cfg(feature = "gui")]
send_log(
LogLevel::Warning,
"Cached tile is corrupted or invalid. Re-downloading...",
);
// Remove the corrupted file
if let Err(remove_err) = std::fs::remove_file(&tile_path) {
eprintln!(
"Warning: Failed to remove corrupted tile file: {}",
remove_err
);
#[cfg(feature = "gui")]
send_log(
LogLevel::Warning,
"Failed to remove corrupted tile file during re-download.",
);
}
// Re-download the tile
download_tile(&client, *tile_x, *tile_y, zoom, &tile_path)?
}
}
}
} else {
// Download the tile for the first time
download_tile(&client, *tile_x, *tile_y, zoom, &tile_path)?
};
// Only process pixels that fall within the requested bbox
for (y, row) in rgb_img.rows().enumerate() {
for (x, pixel) in row.enumerate() {
// Convert tile pixel coordinates back to geographic coordinates
let pixel_lng = ((*tile_x as f64 + x as f64 / 256.0) / (2.0_f64.powi(zoom as i32)))
* 360.0
- 180.0;
let pixel_lat_rad = std::f64::consts::PI
* (1.0
- 2.0 * (*tile_y as f64 + y as f64 / 256.0) / (2.0_f64.powi(zoom as i32)));
let pixel_lat = pixel_lat_rad.sinh().atan().to_degrees();
// Skip pixels outside the requested bounding box
if pixel_lat < bbox.min().lat()
|| pixel_lat > bbox.max().lat()
|| pixel_lng < bbox.min().lng()
|| pixel_lng > bbox.max().lng()
{
continue;
}
// Map geographic coordinates to grid coordinates
let rel_x = (pixel_lng - bbox.min().lng()) / (bbox.max().lng() - bbox.min().lng());
let rel_y =
1.0 - (pixel_lat - bbox.min().lat()) / (bbox.max().lat() - bbox.min().lat());
let scaled_x = (rel_x * grid_width as f64).round() as usize;
let scaled_y = (rel_y * grid_height as f64).round() as usize;
if scaled_y >= grid_height || scaled_x >= grid_width {
continue;
}
// Decode Terrarium format: (R * 256 + G + B/256) - 32768
let height: f64 =
(pixel[0] as f64 * 256.0 + pixel[1] as f64 + pixel[2] as f64 / 256.0)
- TERRARIUM_OFFSET;
// Track extreme values for debugging
if !(-1000.0..=10000.0).contains(&height) {
extreme_values_found
.push((tile_x, tile_y, x, y, pixel[0], pixel[1], pixel[2], height));
if extreme_values_found.len() <= 5 {
// Only log first 5 extreme values
eprintln!("Extreme value found: tile({tile_x},{tile_y}) pixel({x},{y}) RGB({},{},{}) = {height}m",
pixel[0], pixel[1], pixel[2]);
}
}
height_grid[scaled_y][scaled_x] = height;
}
}
}
// Report on extreme values found
if !extreme_values_found.is_empty() {
eprintln!(
"Found {} total extreme elevation values during tile processing",
extreme_values_found.len()
);
eprintln!("This may indicate corrupted tile data or areas with invalid elevation data");
}
// Fill in any NaN values by interpolating from nearest valid values
fill_nan_values(&mut height_grid);
// Filter extreme outliers that might be due to corrupted tile data
filter_elevation_outliers(&mut height_grid);
// Calculate blur sigma based on grid resolution
// Reference points for tuning:
const SMALL_GRID_REF: f64 = 100.0; // Reference grid size
const SMALL_SIGMA_REF: f64 = 15.0; // Sigma for 100x100 grid
const LARGE_GRID_REF: f64 = 1000.0; // Reference grid size
const LARGE_SIGMA_REF: f64 = 7.0; // Sigma for 1000x1000 grid
let grid_size: f64 = (grid_width.min(grid_height) as f64).max(1.0);
let sigma: f64 = if grid_size <= SMALL_GRID_REF {
// Linear scaling for small grids
SMALL_SIGMA_REF * (grid_size / SMALL_GRID_REF)
} else {
// Logarithmic scaling for larger grids
let ln_small: f64 = SMALL_GRID_REF.ln();
let ln_large: f64 = LARGE_GRID_REF.ln();
let log_grid_size: f64 = grid_size.ln();
let t: f64 = (log_grid_size - ln_small) / (ln_large - ln_small);
SMALL_SIGMA_REF + t * (LARGE_SIGMA_REF - SMALL_SIGMA_REF)
};
/* eprintln!(
"Grid: {}x{}, Blur sigma: {:.2}",
grid_width, grid_height, sigma
); */
// Continue with the existing blur and conversion to Minecraft heights...
let blurred_heights: Vec<Vec<f64>> = apply_gaussian_blur(&height_grid, sigma);
let mut mc_heights: Vec<Vec<i32>> = Vec::with_capacity(blurred_heights.len());
// Find min/max in raw data
let mut min_height: f64 = f64::MAX;
let mut max_height: f64 = f64::MIN;
let mut extreme_low_count = 0;
let mut extreme_high_count = 0;
for row in &blurred_heights {
for &height in row {
min_height = min_height.min(height);
max_height = max_height.max(height);
// Count extreme values that might indicate data issues
if height < -1000.0 {
extreme_low_count += 1;
}
if height > 10000.0 {
extreme_high_count += 1;
}
}
}
eprintln!("Height data range: {min_height} to {max_height} m");
if extreme_low_count > 0 {
eprintln!(
"WARNING: Found {extreme_low_count} pixels with extremely low elevations (< -1000m)"
);
}
if extreme_high_count > 0 {
eprintln!(
"WARNING: Found {extreme_high_count} pixels with extremely high elevations (> 10000m)"
);
}
let height_range: f64 = max_height - min_height;
// Apply scale factor to height scaling
let mut height_scale: f64 = BASE_HEIGHT_SCALE * scale.sqrt(); // sqrt to make height scaling less extreme
let mut scaled_range: f64 = height_range * height_scale;
// Adaptive scaling: ensure we don't exceed reasonable Y range
let available_y_range = (MAX_Y - ground_level) as f64;
let safety_margin = 0.9; // Use 90% of available range
let max_allowed_range = available_y_range * safety_margin;
if scaled_range > max_allowed_range {
let adjustment_factor = max_allowed_range / scaled_range;
height_scale *= adjustment_factor;
scaled_range = height_range * height_scale;
eprintln!(
"Height range too large, applying scaling adjustment factor: {adjustment_factor:.3}"
);
eprintln!("Adjusted scaled range: {scaled_range:.1} blocks");
}
// Convert to scaled Minecraft Y coordinates
for row in blurred_heights {
let mc_row: Vec<i32> = row
.iter()
.map(|&h| {
// Scale the height differences
let relative_height: f64 = (h - min_height) / height_range;
let scaled_height: f64 = relative_height * scaled_range;
// With terrain enabled, ground_level is used as the MIN_Y for terrain
((ground_level as f64 + scaled_height).round() as i32).clamp(ground_level, MAX_Y)
})
.collect();
mc_heights.push(mc_row);
}
let mut min_block_height: i32 = i32::MAX;
let mut max_block_height: i32 = i32::MIN;
for row in &mc_heights {
for &height in row {
min_block_height = min_block_height.min(height);
max_block_height = max_block_height.max(height);
}
}
eprintln!("Minecraft height data range: {min_block_height} to {max_block_height} blocks");
Ok(ElevationData {
heights: mc_heights,
width: grid_width,
height: grid_height,
})
}
fn get_tile_coordinates(bbox: &LLBBox, zoom: u8) -> Vec<(u32, u32)> {
// Convert lat/lng to tile coordinates
let (x1, y1) = lat_lng_to_tile(bbox.min().lat(), bbox.min().lng(), zoom);
let (x2, y2) = lat_lng_to_tile(bbox.max().lat(), bbox.max().lng(), zoom);
let mut tiles: Vec<(u32, u32)> = Vec::new();
for x in x1.min(x2)..=x1.max(x2) {
for y in y1.min(y2)..=y1.max(y2) {
tiles.push((x, y));
}
}
tiles
}
fn apply_gaussian_blur(heights: &[Vec<f64>], sigma: f64) -> Vec<Vec<f64>> {
let kernel_size: usize = (sigma * 3.0).ceil() as usize * 2 + 1;
let kernel: Vec<f64> = create_gaussian_kernel(kernel_size, sigma);
// Apply blur
let mut blurred: Vec<Vec<f64>> = heights.to_owned();
// Horizontal pass
for row in blurred.iter_mut() {
let mut temp: Vec<f64> = row.clone();
for (i, val) in temp.iter_mut().enumerate() {
let mut sum: f64 = 0.0;
let mut weight_sum: f64 = 0.0;
for (j, k) in kernel.iter().enumerate() {
let idx: i32 = i as i32 + j as i32 - kernel_size as i32 / 2;
if idx >= 0 && idx < row.len() as i32 {
sum += row[idx as usize] * k;
weight_sum += k;
}
}
*val = sum / weight_sum;
}
*row = temp;
}
// Vertical pass
let height: usize = blurred.len();
let width: usize = blurred[0].len();
for x in 0..width {
let temp: Vec<_> = blurred
.iter()
.take(height)
.map(|row: &Vec<f64>| row[x])
.collect();
for (y, row) in blurred.iter_mut().enumerate().take(height) {
let mut sum: f64 = 0.0;
let mut weight_sum: f64 = 0.0;
for (j, k) in kernel.iter().enumerate() {
let idx: i32 = y as i32 + j as i32 - kernel_size as i32 / 2;
if idx >= 0 && idx < height as i32 {
sum += temp[idx as usize] * k;
weight_sum += k;
}
}
row[x] = sum / weight_sum;
}
}
blurred
}
fn create_gaussian_kernel(size: usize, sigma: f64) -> Vec<f64> {
let mut kernel: Vec<f64> = vec![0.0; size];
let center: f64 = size as f64 / 2.0;
for (i, value) in kernel.iter_mut().enumerate() {
let x: f64 = i as f64 - center;
*value = (-x * x / (2.0 * sigma * sigma)).exp();
}
let sum: f64 = kernel.iter().sum();
for k in kernel.iter_mut() {
*k /= sum;
}
kernel
}
fn fill_nan_values(height_grid: &mut [Vec<f64>]) {
let height: usize = height_grid.len();
let width: usize = height_grid[0].len();
let mut changes_made: bool = true;
while changes_made {
changes_made = false;
for y in 0..height {
for x in 0..width {
if height_grid[y][x].is_nan() {
let mut sum: f64 = 0.0;
let mut count: i32 = 0;
// Check neighboring cells
for dy in -1..=1 {
for dx in -1..=1 {
let ny: i32 = y as i32 + dy;
let nx: i32 = x as i32 + dx;
if ny >= 0 && ny < height as i32 && nx >= 0 && nx < width as i32 {
let val: f64 = height_grid[ny as usize][nx as usize];
if !val.is_nan() {
sum += val;
count += 1;
}
}
}
}
if count > 0 {
height_grid[y][x] = sum / count as f64;
changes_made = true;
}
}
}
}
}
}
fn filter_elevation_outliers(height_grid: &mut [Vec<f64>]) {
let height = height_grid.len();
let width = height_grid[0].len();
// Collect all valid height values to calculate statistics
let mut all_heights: Vec<f64> = Vec::new();
for row in height_grid.iter() {
for &h in row {
if !h.is_nan() && h.is_finite() {
all_heights.push(h);
}
}
}
if all_heights.is_empty() {
return;
}
// Sort to find percentiles
all_heights.sort_by(|a, b| a.partial_cmp(b).unwrap());
let len = all_heights.len();
// Use 1st and 99th percentiles to define reasonable bounds
let p1_idx = (len as f64 * 0.01) as usize;
let p99_idx = (len as f64 * 0.99) as usize;
let min_reasonable = all_heights[p1_idx];
let max_reasonable = all_heights[p99_idx];
eprintln!("Filtering outliers outside range: {min_reasonable:.1}m to {max_reasonable:.1}m");
let mut outliers_filtered = 0;
// Replace outliers with NaN, then fill them using interpolation
for row in height_grid.iter_mut().take(height) {
for h in row.iter_mut().take(width) {
if !h.is_nan() && (*h < min_reasonable || *h > max_reasonable) {
*h = f64::NAN;
outliers_filtered += 1;
}
}
}
if outliers_filtered > 0 {
eprintln!("Filtered {outliers_filtered} elevation outliers, interpolating replacements...");
// Re-run the NaN filling to interpolate the filtered values
fill_nan_values(height_grid);
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_terrarium_height_decoding() {
// Test known Terrarium RGB values
// Sea level (0m) in Terrarium format should be (128, 0, 0) = 32768 - 32768 = 0
let sea_level_pixel = [128, 0, 0];
let height = (sea_level_pixel[0] as f64 * 256.0
+ sea_level_pixel[1] as f64
+ sea_level_pixel[2] as f64 / 256.0)
- TERRARIUM_OFFSET;
assert_eq!(height, 0.0);
// Test simple case: height of 1000m
// 1000 + 32768 = 33768 = 131 * 256 + 232
let test_pixel = [131, 232, 0];
let height =
(test_pixel[0] as f64 * 256.0 + test_pixel[1] as f64 + test_pixel[2] as f64 / 256.0)
- TERRARIUM_OFFSET;
assert_eq!(height, 1000.0);
// Test below sea level (-100m)
// -100 + 32768 = 32668 = 127 * 256 + 156
let below_sea_pixel = [127, 156, 0];
let height = (below_sea_pixel[0] as f64 * 256.0
+ below_sea_pixel[1] as f64
+ below_sea_pixel[2] as f64 / 256.0)
- TERRARIUM_OFFSET;
assert_eq!(height, -100.0);
}
#[test]
fn test_aws_url_generation() {
let url = AWS_TERRARIUM_URL
.replace("{z}", "15")
.replace("{x}", "17436")
.replace("{y}", "11365");
assert_eq!(
url,
"https://s3.amazonaws.com/elevation-tiles-prod/terrarium/15/17436/11365.png"
);
}
#[test]
#[ignore] // This test requires internet connection, run with --ignored
fn test_aws_tile_fetch() {
use reqwest::blocking::Client;
let client = Client::new();
let url = "https://s3.amazonaws.com/elevation-tiles-prod/terrarium/15/17436/11365.png";
let response = client.get(url).send();
assert!(response.is_ok());
let response = response.unwrap();
assert!(response.status().is_success());
assert!(response
.headers()
.get("content-type")
.unwrap()
.to_str()
.unwrap()
.contains("image"));
}
}

View File

@@ -1,212 +0,0 @@
use geo::{Contains, LineString, Point, Polygon};
use itertools::Itertools;
use std::collections::{HashSet, VecDeque};
use std::time::{Duration, Instant};
/// Main flood fill function with automatic algorithm selection
/// Chooses the best algorithm based on polygon size and complexity
pub fn flood_fill_area(
polygon_coords: &[(i32, i32)],
timeout: Option<&Duration>,
) -> Vec<(i32, i32)> {
if polygon_coords.len() < 3 {
return vec![]; // Not a valid polygon
}
// Calculate bounding box of the polygon using itertools
let (min_x, max_x) = polygon_coords
.iter()
.map(|&(x, _)| x)
.minmax()
.into_option()
.unwrap();
let (min_z, max_z) = polygon_coords
.iter()
.map(|&(_, z)| z)
.minmax()
.into_option()
.unwrap();
let area = (max_x - min_x + 1) as i64 * (max_z - min_z + 1) as i64;
// For small and medium areas, use optimized flood fill with span filling
if area < 50000 {
optimized_flood_fill_area(polygon_coords, timeout, min_x, max_x, min_z, max_z)
} else {
// For larger areas, use original flood fill with grid sampling
original_flood_fill_area(polygon_coords, timeout, min_x, max_x, min_z, max_z)
}
}
/// Optimized flood fill for larger polygons with multi-seed detection for complex shapes like U-shapes
fn optimized_flood_fill_area(
polygon_coords: &[(i32, i32)],
timeout: Option<&Duration>,
min_x: i32,
max_x: i32,
min_z: i32,
max_z: i32,
) -> Vec<(i32, i32)> {
let start_time = Instant::now();
let mut filled_area = Vec::new();
let mut global_visited = HashSet::new();
// Create polygon for containment testing
let exterior_coords: Vec<(f64, f64)> = polygon_coords
.iter()
.map(|&(x, z)| (x as f64, z as f64))
.collect();
let exterior = LineString::from(exterior_coords);
let polygon = Polygon::new(exterior, vec![]);
// Optimized step sizes: larger steps for efficiency, but still catch U-shapes
let width = max_x - min_x + 1;
let height = max_z - min_z + 1;
let step_x = (width / 6).clamp(1, 8); // Balance between coverage and speed
let step_z = (height / 6).clamp(1, 8);
// Pre-allocate queue with reasonable capacity to avoid reallocations
let mut queue = VecDeque::with_capacity(1024);
for z in (min_z..=max_z).step_by(step_z as usize) {
for x in (min_x..=max_x).step_by(step_x as usize) {
// Fast timeout check, only every few iterations
if filled_area.len() % 100 == 0 {
if let Some(timeout) = timeout {
if start_time.elapsed() > *timeout {
return filled_area;
}
}
}
// Skip if already visited or not inside polygon
if global_visited.contains(&(x, z))
|| !polygon.contains(&Point::new(x as f64, z as f64))
{
continue;
}
// Start flood fill from this seed point
queue.clear(); // Reuse queue instead of creating new one
queue.push_back((x, z));
global_visited.insert((x, z));
while let Some((curr_x, curr_z)) = queue.pop_front() {
// Add current point to filled area
filled_area.push((curr_x, curr_z));
// Check all four directions with optimized bounds checking
let neighbors = [
(curr_x - 1, curr_z),
(curr_x + 1, curr_z),
(curr_x, curr_z - 1),
(curr_x, curr_z + 1),
];
for (nx, nz) in neighbors.iter() {
if *nx >= min_x
&& *nx <= max_x
&& *nz >= min_z
&& *nz <= max_z
&& !global_visited.contains(&(*nx, *nz))
{
// Only check polygon containment for unvisited points
if polygon.contains(&Point::new(*nx as f64, *nz as f64)) {
global_visited.insert((*nx, *nz));
queue.push_back((*nx, *nz));
}
}
}
}
}
}
filled_area
}
/// Original flood fill algorithm with enhanced multi-seed detection for complex shapes
fn original_flood_fill_area(
polygon_coords: &[(i32, i32)],
timeout: Option<&Duration>,
min_x: i32,
max_x: i32,
min_z: i32,
max_z: i32,
) -> Vec<(i32, i32)> {
let start_time = Instant::now();
let mut filled_area: Vec<(i32, i32)> = Vec::new();
let mut global_visited: HashSet<(i32, i32)> = HashSet::new();
// Convert input to a geo::Polygon for efficient point-in-polygon testing
let exterior_coords: Vec<(f64, f64)> = polygon_coords
.iter()
.map(|&(x, z)| (x as f64, z as f64))
.collect::<Vec<_>>();
let exterior: LineString = LineString::from(exterior_coords);
let polygon: Polygon<f64> = Polygon::new(exterior, vec![]);
// Optimized step sizes for large polygons - coarser sampling for speed
let width = max_x - min_x + 1;
let height = max_z - min_z + 1;
let step_x: i32 = (width / 8).clamp(1, 12); // Cap max step size for coverage
let step_z: i32 = (height / 8).clamp(1, 12);
// Pre-allocate queue and reserve space for filled_area
let mut queue: VecDeque<(i32, i32)> = VecDeque::with_capacity(2048);
filled_area.reserve(1000); // Reserve space to reduce reallocations
// Scan for multiple seed points to handle U-shapes and concave polygons
for z in (min_z..=max_z).step_by(step_z as usize) {
for x in (min_x..=max_x).step_by(step_x as usize) {
// Reduced timeout checking frequency for better performance
// Use manual % check since is_multiple_of() is unstable on stable Rust
if let Some(timeout) = timeout {
if &start_time.elapsed() > timeout {
return filled_area;
}
}
// Skip if already processed or not inside polygon
if global_visited.contains(&(x, z))
|| !polygon.contains(&Point::new(x as f64, z as f64))
{
continue;
}
// Start flood-fill from this seed point
queue.clear(); // Reuse queue
queue.push_back((x, z));
global_visited.insert((x, z));
while let Some((curr_x, curr_z)) = queue.pop_front() {
// Only check polygon containment once per point when adding to filled_area
if polygon.contains(&Point::new(curr_x as f64, curr_z as f64)) {
filled_area.push((curr_x, curr_z));
// Check adjacent points with optimized iteration
let neighbors = [
(curr_x - 1, curr_z),
(curr_x + 1, curr_z),
(curr_x, curr_z - 1),
(curr_x, curr_z + 1),
];
for (nx, nz) in neighbors.iter() {
if *nx >= min_x
&& *nx <= max_x
&& *nz >= min_z
&& *nz <= max_z
&& !global_visited.contains(&(*nx, *nz))
{
global_visited.insert((*nx, *nz));
queue.push_back((*nx, *nz));
}
}
}
}
}
}
filled_area
}

166
src/getData.py Normal file
View File

@@ -0,0 +1,166 @@
from random import choice
import json
import os
import requests
import subprocess
def download_with_requests(url, params, filename):
response = requests.get(url, params=params)
if response.status_code == 200:
with open(filename, "w") as file:
json.dump(response.json(), file)
return filename
else:
print("Failed to download data. Status code:", response.status_code)
return None
def download_with_curl(url, params, filename):
# Prepare curl command with parameters
curl_command = [
"curl",
"-o",
filename,
url + "?" + "&".join([f"{key}={value}" for key, value in params.items()]),
]
subprocess.call(curl_command)
return filename
def download_with_wget(url, params, filename):
# Prepare wget command with parameters
wget_command = [
"wget",
"-O",
filename,
url + "?" + "&".join([f"{key}={value}" for key, value in params.items()]),
]
subprocess.call(wget_command)
return filename
def getData(city, state, country, bbox, file, debug, download_method="requests"):
print("Fetching data...")
api_servers = [
"https://overpass-api.de/api/interpreter",
"https://lz4.overpass-api.de/api/interpreter",
"https://z.overpass-api.de/api/interpreter",
"https://overpass.kumi.systems/api/interpreter",
"https://overpass.private.coffee/api/interpreter",
]
url = choice(api_servers)
if city:
query1 = f"""
[out:json];
area[name="{city}"]->.city;
area[name="{state}"]->.state;
area[name="{country}"]->.country;
(
nwr(area.country)(area.state)(area.city)[building];
nwr(area.country)(area.state)(area.city)[highway];
nwr(area.country)(area.state)(area.city)[landuse];
nwr(area.country)(area.state)(area.city)[natural];
nwr(area.country)(area.state)(area.city)[leisure];
nwr(area.country)(area.state)(area.city)[waterway]["waterway"!="fairway"];
nwr(area.country)(area.state)(area.city)[amenity];
nwr(area.country)(area.state)(area.city)[bridge];
nwr(area.country)(area.state)(area.city)[railway];
nwr(area.country)(area.state)(area.city)[barrier];
);
(._;>;);
out;
"""
elif bbox:
bbox = bbox.split(",")
bbox = [float(i) for i in bbox]
if debug:
print(f"Bbox input: {bbox}")
query1 = f"""
[out:json][bbox:{bbox[1]},{bbox[0]},{bbox[3]},{bbox[2]}];
(
nwr["building"];
nwr["highway"];
nwr["landuse"];
nwr["natural"];
nwr["leisure"];
nwr["waterway"];
nwr["amenity"];
nwr["bridge"];
nwr["railway"];
nwr["barrier"];
nwr["entrance"];
nwr["door"];
)->.waysinbbox;
(
node(w.waysinbbox);
)->.nodesinbbox;
.waysinbbox out body;
.nodesinbbox out skel qt;
"""
elif file:
print("Loading data from file")
else:
query1 = f"""
[out:json];
area[name="{city}"]->.city;
area[name="{country}"]->.country;
(
nwr(area.country)(area.city)[building];
nwr(area.country)(area.city)[highway];
nwr(area.country)(area.city)[landuse];
nwr(area.country)(area.city)[natural];
nwr(area.country)(area.city)[leisure];
nwr(area.country)(area.city)[waterway]["waterway"!="fairway"];
nwr(area.country)(area.city)[amenity];
nwr(area.country)(area.city)[bridge];
nwr(area.country)(area.city)[railway];
nwr(area.country)(area.city)[barrier];
);
(._;>;);
out;
"""
if debug:
print(f"OSM Query: {query1}")
try:
if file:
with open("data.json", encoding="utf8") as dataset:
data = json.load(dataset)
else:
if debug:
print(f"Chosen server: {url}")
filename = "arnis-debug-raw_data.json"
if download_method == "requests":
file_path = download_with_requests(url, {"data": query1}, filename)
elif download_method == "curl":
file_path = download_with_curl(url, {"data": query1}, filename)
elif download_method == "wget":
file_path = download_with_wget(url, {"data": query1}, filename)
else:
print("Invalid download method. Using 'requests' by default.")
file_path = download_with_requests(url, {"data": query1}, filename)
if file_path is None:
os._exit(1)
with open(file_path, "r") as file:
data = json.load(file)
if len(data["elements"]) == 0:
print("Error! No data available")
os._exit(1)
except Exception as e:
if "The server is probably too busy to handle your request." in str(e):
print("Error! OSM server overloaded")
elif "Dispatcher_Client::request_read_and_idx::rate_limited" in str(e):
print("Error! IP rate limited, wait before trying again")
else:
print(f"Error! {e}")
os._exit(1)
return data

View File

@@ -1,152 +0,0 @@
use crate::args::Args;
use crate::coordinate_system::{cartesian::XZPoint, geographic::LLBBox};
use crate::elevation_data::{fetch_elevation_data, ElevationData};
use crate::progress::emit_gui_progress_update;
use colored::Colorize;
use image::{Rgb, RgbImage};
/// Represents terrain data and elevation settings
#[derive(Clone)]
pub struct Ground {
pub elevation_enabled: bool,
ground_level: i32,
elevation_data: Option<ElevationData>,
}
impl Ground {
pub fn new_flat(ground_level: i32) -> Self {
Self {
elevation_enabled: false,
ground_level,
elevation_data: None,
}
}
pub fn new_enabled(bbox: &LLBBox, scale: f64, ground_level: i32) -> Self {
match fetch_elevation_data(bbox, scale, ground_level) {
Ok(elevation_data) => Self {
elevation_enabled: true,
ground_level,
elevation_data: Some(elevation_data),
},
Err(e) => {
eprintln!("Failed to fetch elevation data: {}", e);
emit_gui_progress_update(15.0, "Elevation unavailable, using flat ground");
// Graceful fallback: disable elevation and keep provided ground_level
Self {
elevation_enabled: false,
ground_level,
elevation_data: None,
}
}
}
}
/// Returns the ground level at the given coordinates
#[inline(always)]
pub fn level(&self, coord: XZPoint) -> i32 {
if !self.elevation_enabled || self.elevation_data.is_none() {
return self.ground_level;
}
let data: &ElevationData = self.elevation_data.as_ref().unwrap();
let (x_ratio, z_ratio) = self.get_data_coordinates(coord, data);
self.interpolate_height(x_ratio, z_ratio, data)
}
#[allow(unused)]
#[inline(always)]
pub fn min_level<I: Iterator<Item = XZPoint>>(&self, coords: I) -> Option<i32> {
if !self.elevation_enabled {
return Some(self.ground_level);
}
coords.map(|c: XZPoint| self.level(c)).min()
}
#[allow(unused)]
#[inline(always)]
pub fn max_level<I: Iterator<Item = XZPoint>>(&self, coords: I) -> Option<i32> {
if !self.elevation_enabled {
return Some(self.ground_level);
}
coords.map(|c: XZPoint| self.level(c)).max()
}
/// Converts game coordinates to elevation data coordinates
#[inline(always)]
fn get_data_coordinates(&self, coord: XZPoint, data: &ElevationData) -> (f64, f64) {
let x_ratio: f64 = coord.x as f64 / data.width as f64;
let z_ratio: f64 = coord.z as f64 / data.height as f64;
(x_ratio.clamp(0.0, 1.0), z_ratio.clamp(0.0, 1.0))
}
/// Interpolates height value from the elevation grid
#[inline(always)]
fn interpolate_height(&self, x_ratio: f64, z_ratio: f64, data: &ElevationData) -> i32 {
let x: usize = ((x_ratio * (data.width - 1) as f64).round() as usize).min(data.width - 1);
let z: usize = ((z_ratio * (data.height - 1) as f64).round() as usize).min(data.height - 1);
data.heights[z][x]
}
fn save_debug_image(&self, filename: &str) {
let heights = &self
.elevation_data
.as_ref()
.expect("Elevation data not available")
.heights;
if heights.is_empty() || heights[0].is_empty() {
return;
}
let height: usize = heights.len();
let width: usize = heights[0].len();
let mut img: image::ImageBuffer<Rgb<u8>, Vec<u8>> =
RgbImage::new(width as u32, height as u32);
let mut min_height: i32 = i32::MAX;
let mut max_height: i32 = i32::MIN;
for row in heights {
for &h in row {
min_height = min_height.min(h);
max_height = max_height.max(h);
}
}
for (y, row) in heights.iter().enumerate() {
for (x, &h) in row.iter().enumerate() {
let normalized: u8 =
(((h - min_height) as f64 / (max_height - min_height) as f64) * 255.0) as u8;
img.put_pixel(
x as u32,
y as u32,
Rgb([normalized, normalized, normalized]),
);
}
}
// Ensure filename has .png extension
let filename: String = if !filename.ends_with(".png") {
format!("{filename}.png")
} else {
filename.to_string()
};
if let Err(e) = img.save(&filename) {
eprintln!("Failed to save debug image: {e}");
}
}
}
pub fn generate_ground_data(args: &Args) -> Ground {
if args.terrain {
println!("{} Fetching elevation...", "[3/7]".bold());
emit_gui_progress_update(15.0, "Fetching elevation...");
let ground = Ground::new_enabled(&args.bbox, args.scale, args.ground_level);
if args.debug {
ground.save_debug_image("elevation_debug");
}
return ground;
}
Ground::new_flat(args.ground_level)
}

View File

@@ -1,909 +0,0 @@
use crate::args::Args;
use crate::coordinate_system::cartesian::XZPoint;
use crate::coordinate_system::geographic::{LLBBox, LLPoint};
use crate::coordinate_system::transformation::CoordTransformer;
use crate::data_processing;
use crate::ground::{self, Ground};
use crate::map_transformation;
use crate::osm_parser;
use crate::progress;
use crate::retrieve_data;
use crate::telemetry::{self, send_log, LogLevel};
use crate::version_check;
use fastnbt::Value;
use flate2::read::GzDecoder;
use fs2::FileExt;
use log::LevelFilter;
use rfd::FileDialog;
use std::io::Read;
use std::path::{Path, PathBuf};
use std::{env, fs, io::Write};
use tauri_plugin_log::{Builder as LogBuilder, Target, TargetKind};
/// Manages the session.lock file for a Minecraft world directory
struct SessionLock {
file: fs::File,
path: PathBuf,
}
impl SessionLock {
/// Creates and locks a session.lock file in the specified world directory
fn acquire(world_path: &Path) -> Result<Self, String> {
let session_lock_path = world_path.join("session.lock");
// Create or open the session.lock file
let file = fs::File::create(&session_lock_path)
.map_err(|e| format!("Failed to create session.lock file: {e}"))?;
// Write the snowman character (U+2603) as specified by Minecraft format
let snowman_bytes = "".as_bytes(); // This is UTF-8 encoded E2 98 83
(&file)
.write_all(snowman_bytes)
.map_err(|e| format!("Failed to write to session.lock file: {e}"))?;
// Acquire an exclusive lock on the file
file.try_lock_exclusive()
.map_err(|e| format!("Failed to acquire lock on session.lock file: {e}"))?;
Ok(SessionLock {
file,
path: session_lock_path,
})
}
}
impl Drop for SessionLock {
fn drop(&mut self) {
// Release the lock and remove the session.lock file
let _ = self.file.unlock();
let _ = fs::remove_file(&self.path);
}
}
pub fn run_gui() {
// Launch the UI
println!("Launching UI...");
// Install panic hook for crash reporting
telemetry::install_panic_hook();
// Workaround WebKit2GTK issue with NVIDIA drivers and graphics issues
// Source: https://github.com/tauri-apps/tauri/issues/10702
#[cfg(target_os = "linux")]
unsafe {
// Disable problematic GPU features that cause map loading issues
env::set_var("WEBKIT_DISABLE_DMABUF_RENDERER", "1");
env::set_var("WEBKIT_DISABLE_COMPOSITING_MODE", "1");
// Force software rendering for better compatibility
env::set_var("LIBGL_ALWAYS_SOFTWARE", "1");
env::set_var("GALLIUM_DRIVER", "softpipe");
// Note: Removed sandbox disabling for security reasons
// Note: Removed Qt WebEngine flags as they don't apply to Tauri
}
tauri::Builder::default()
.plugin(
LogBuilder::default()
.level(LevelFilter::Warn)
.targets([
Target::new(TargetKind::LogDir {
file_name: Some("arnis".into()),
}),
Target::new(TargetKind::Stdout),
])
.build(),
)
.plugin(tauri_plugin_shell::init())
.invoke_handler(tauri::generate_handler![
gui_select_world,
gui_start_generation,
gui_get_version,
gui_check_for_updates,
gui_get_world_map_data
])
.setup(|app| {
let app_handle = app.handle();
let main_window = tauri::Manager::get_webview_window(app_handle, "main")
.expect("Failed to get main window");
progress::set_main_window(main_window);
Ok(())
})
.run(tauri::generate_context!())
.expect("Error while starting the application UI (Tauri)");
}
#[tauri::command]
fn gui_select_world(generate_new: bool) -> Result<String, i32> {
// Determine the default Minecraft 'saves' directory based on the OS
let default_dir: Option<PathBuf> = if cfg!(target_os = "windows") {
env::var("APPDATA")
.ok()
.map(|appdata: String| PathBuf::from(appdata).join(".minecraft").join("saves"))
} else if cfg!(target_os = "macos") {
dirs::home_dir().map(|home: PathBuf| {
home.join("Library/Application Support/minecraft")
.join("saves")
})
} else if cfg!(target_os = "linux") {
dirs::home_dir().map(|home| {
let flatpak_path = home.join(".var/app/com.mojang.Minecraft/.minecraft/saves");
if flatpak_path.exists() {
flatpak_path
} else {
home.join(".minecraft/saves")
}
})
} else {
None
};
if generate_new {
// Handle new world generation
if let Some(default_path) = &default_dir {
if default_path.exists() {
// Call create_new_world and return the result
create_new_world(default_path).map_err(|_| 1) // Error code 1: Minecraft directory not found
} else {
Err(1) // Error code 1: Minecraft directory not found
}
} else {
Err(1) // Error code 1: Minecraft directory not found
}
} else {
// Handle existing world selection
// Open the directory picker dialog
let dialog: FileDialog = FileDialog::new();
let dialog: FileDialog = if let Some(start_dir) = default_dir.filter(|dir| dir.exists()) {
dialog.set_directory(start_dir)
} else {
dialog
};
if let Some(path) = dialog.pick_folder() {
// Check if the "region" folder exists within the selected directory
if path.join("region").exists() {
// Check the 'session.lock' file
let session_lock_path = path.join("session.lock");
if session_lock_path.exists() {
// Try to acquire a lock on the session.lock file
if let Ok(file) = fs::File::open(&session_lock_path) {
if fs2::FileExt::try_lock_shared(&file).is_err() {
return Err(2); // Error code 2: The selected world is currently in use
} else {
// Release the lock immediately
let _ = fs2::FileExt::unlock(&file);
}
}
}
return Ok(path.display().to_string());
} else {
// No Minecraft directory found, generating new world in custom user selected directory
return create_new_world(&path).map_err(|_| 3); // Error code 3: Failed to create new world
}
}
// If no folder was selected, return an error message
Err(4) // Error code 4: No world selected
}
}
fn create_new_world(base_path: &Path) -> Result<String, String> {
// Generate a unique world name with proper counter
// Check for both "Arnis World X" and "Arnis World X: Location" patterns
let mut counter: i32 = 1;
let unique_name: String = loop {
let candidate_name: String = format!("Arnis World {counter}");
let candidate_path: PathBuf = base_path.join(&candidate_name);
// Check for exact match (no location suffix)
let exact_match_exists = candidate_path.exists();
// Check for worlds with location suffix (Arnis World X: Location)
let location_pattern = format!("Arnis World {counter}: ");
let location_match_exists = fs::read_dir(base_path)
.map(|entries| {
entries
.filter_map(Result::ok)
.filter_map(|entry| entry.file_name().into_string().ok())
.any(|name| name.starts_with(&location_pattern))
})
.unwrap_or(false);
if !exact_match_exists && !location_match_exists {
break candidate_name;
}
counter += 1;
};
let new_world_path: PathBuf = base_path.join(&unique_name);
// Create the new world directory structure
fs::create_dir_all(new_world_path.join("region"))
.map_err(|e| format!("Failed to create world directory: {e}"))?;
// Copy the region template file
const REGION_TEMPLATE: &[u8] = include_bytes!("../assets/minecraft/region.template");
let region_path = new_world_path.join("region").join("r.0.0.mca");
fs::write(&region_path, REGION_TEMPLATE)
.map_err(|e| format!("Failed to create region file: {e}"))?;
// Add the level.dat file
const LEVEL_TEMPLATE: &[u8] = include_bytes!("../assets/minecraft/level.dat");
// Decompress the gzipped level.template
let mut decoder = GzDecoder::new(LEVEL_TEMPLATE);
let mut decompressed_data = Vec::new();
decoder
.read_to_end(&mut decompressed_data)
.map_err(|e| format!("Failed to decompress level.template: {e}"))?;
// Parse the decompressed NBT data
let mut level_data: Value = fastnbt::from_bytes(&decompressed_data)
.map_err(|e| format!("Failed to parse level.dat template: {e}"))?;
// Modify the LevelName, LastPlayed and player position fields
if let Value::Compound(ref mut root) = level_data {
if let Some(Value::Compound(ref mut data)) = root.get_mut("Data") {
// Update LevelName
data.insert("LevelName".to_string(), Value::String(unique_name.clone()));
// Update LastPlayed to the current Unix time in milliseconds
let current_time = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.map_err(|e| format!("Failed to get current time: {e}"))?;
let current_time_millis = current_time.as_millis() as i64;
data.insert("LastPlayed".to_string(), Value::Long(current_time_millis));
// Update player position and rotation
if let Some(Value::Compound(ref mut player)) = data.get_mut("Player") {
if let Some(Value::List(ref mut pos)) = player.get_mut("Pos") {
if let Value::Double(ref mut x) = pos.get_mut(0).unwrap() {
*x = -5.0;
}
if let Value::Double(ref mut y) = pos.get_mut(1).unwrap() {
*y = -61.0;
}
if let Value::Double(ref mut z) = pos.get_mut(2).unwrap() {
*z = -5.0;
}
}
if let Some(Value::List(ref mut rot)) = player.get_mut("Rotation") {
if let Value::Float(ref mut x) = rot.get_mut(0).unwrap() {
*x = -45.0;
}
}
}
}
}
// Serialize the updated NBT data back to bytes
let serialized_level_data: Vec<u8> = fastnbt::to_bytes(&level_data)
.map_err(|e| format!("Failed to serialize updated level.dat: {e}"))?;
// Compress the serialized data back to gzip
let mut encoder = flate2::write::GzEncoder::new(Vec::new(), flate2::Compression::default());
encoder
.write_all(&serialized_level_data)
.map_err(|e| format!("Failed to compress updated level.dat: {e}"))?;
let compressed_level_data = encoder
.finish()
.map_err(|e| format!("Failed to finalize compression for level.dat: {e}"))?;
// Write the level.dat file
fs::write(new_world_path.join("level.dat"), compressed_level_data)
.map_err(|e| format!("Failed to create level.dat file: {e}"))?;
// Add the icon.png file
const ICON_TEMPLATE: &[u8] = include_bytes!("../assets/minecraft/icon.png");
fs::write(new_world_path.join("icon.png"), ICON_TEMPLATE)
.map_err(|e| format!("Failed to create icon.png file: {e}"))?;
Ok(new_world_path.display().to_string())
}
/// Adds localized area name to the world name in level.dat
fn add_localized_world_name(world_path: PathBuf, bbox: &LLBBox) -> PathBuf {
// Only proceed if the path exists
if !world_path.exists() {
return world_path;
}
// Check the level.dat file first to get the current name
let level_path = world_path.join("level.dat");
if !level_path.exists() {
return world_path;
}
// Try to read the current world name from level.dat
let Ok(level_data) = std::fs::read(&level_path) else {
return world_path;
};
let mut decoder = GzDecoder::new(level_data.as_slice());
let mut decompressed_data = Vec::new();
if decoder.read_to_end(&mut decompressed_data).is_err() {
return world_path;
}
let Ok(Value::Compound(ref root)) = fastnbt::from_bytes::<Value>(&decompressed_data) else {
return world_path;
};
let Some(Value::Compound(ref data)) = root.get("Data") else {
return world_path;
};
let Some(Value::String(current_name)) = data.get("LevelName") else {
return world_path;
};
// Only modify if it's an Arnis world and doesn't already have an area name
if !current_name.starts_with("Arnis World ") || current_name.contains(": ") {
return world_path;
}
// Calculate center coordinates of bbox
let center_lat = (bbox.min().lat() + bbox.max().lat()) / 2.0;
let center_lon = (bbox.min().lng() + bbox.max().lng()) / 2.0;
// Try to fetch the area name
let area_name = match retrieve_data::fetch_area_name(center_lat, center_lon) {
Ok(Some(name)) => name,
_ => return world_path, // Keep original name if no area name found
};
// Create new name with localized area name, ensuring total length doesn't exceed 30 characters
let base_name = current_name.clone();
let max_area_name_len = 30 - base_name.len() - 2; // 2 chars for ": "
let truncated_area_name =
if area_name.chars().count() > max_area_name_len && max_area_name_len > 0 {
// Truncate the area name to fit within the 30 character limit
area_name
.chars()
.take(max_area_name_len)
.collect::<String>()
} else if max_area_name_len == 0 {
// If base name is already too long, don't add area name
return world_path;
} else {
area_name
};
let new_name = format!("{base_name}: {truncated_area_name}");
// Update the level.dat file with the new name
if let Ok(level_data) = std::fs::read(&level_path) {
let mut decoder = GzDecoder::new(level_data.as_slice());
let mut decompressed_data = Vec::new();
if decoder.read_to_end(&mut decompressed_data).is_ok() {
if let Ok(mut nbt_data) = fastnbt::from_bytes::<Value>(&decompressed_data) {
// Update the level name in NBT data
if let Value::Compound(ref mut root) = nbt_data {
if let Some(Value::Compound(ref mut data)) = root.get_mut("Data") {
data.insert("LevelName".to_string(), Value::String(new_name));
// Save the updated NBT data
if let Ok(serialized_data) = fastnbt::to_bytes(&nbt_data) {
let mut encoder = flate2::write::GzEncoder::new(
Vec::new(),
flate2::Compression::default(),
);
if encoder.write_all(&serialized_data).is_ok() {
if let Ok(compressed_data) = encoder.finish() {
if let Err(e) = std::fs::write(&level_path, compressed_data) {
eprintln!("Failed to update level.dat with area name: {e}");
send_log(
LogLevel::Warning,
"Failed to update level.dat with area name",
);
}
}
}
}
}
}
}
}
}
// Return the original path since we didn't change the directory name
world_path
}
// Function to update player position in level.dat based on spawn point coordinates
fn update_player_position(
world_path: &str,
spawn_point: Option<(f64, f64)>,
bbox_text: String,
scale: f64,
) -> Result<(), String> {
use crate::coordinate_system::transformation::CoordTransformer;
let Some((lat, lng)) = spawn_point else {
return Ok(()); // No spawn point selected, exit early
};
// Parse geometrical point and bounding box
let llpoint =
LLPoint::new(lat, lng).map_err(|e| format!("Failed to parse spawn point:\n{e}"))?;
let llbbox = LLBBox::from_str(&bbox_text)
.map_err(|e| format!("Failed to parse bounding box for spawn point:\n{e}"))?;
// Check if spawn point is within the bbox
if !llbbox.contains(&llpoint) {
return Err("Spawn point is outside the selected area".to_string());
}
// Convert lat/lng to Minecraft coordinates
let (transformer, _) = CoordTransformer::llbbox_to_xzbbox(&llbbox, scale)
.map_err(|e| format!("Failed to build transformation on coordinate systems:\n{e}"))?;
let xzpoint = transformer.transform_point(llpoint);
// Default y spawn position since terrain elevation cannot be determined yet
let y = 150.0;
// Read and update the level.dat file
let level_path = PathBuf::from(world_path).join("level.dat");
if !level_path.exists() {
return Err(format!("Level.dat not found at {level_path:?}"));
}
// Read the level.dat file
let level_data = match std::fs::read(&level_path) {
Ok(data) => data,
Err(e) => return Err(format!("Failed to read level.dat: {e}")),
};
// Decompress and parse the NBT data
let mut decoder = GzDecoder::new(level_data.as_slice());
let mut decompressed_data = Vec::new();
if let Err(e) = decoder.read_to_end(&mut decompressed_data) {
return Err(format!("Failed to decompress level.dat: {e}"));
}
let mut nbt_data = match fastnbt::from_bytes::<Value>(&decompressed_data) {
Ok(data) => data,
Err(e) => return Err(format!("Failed to parse level.dat NBT data: {e}")),
};
// Update player position and world spawn point
if let Value::Compound(ref mut root) = nbt_data {
if let Some(Value::Compound(ref mut data)) = root.get_mut("Data") {
// Set world spawn point
data.insert("SpawnX".to_string(), Value::Int(xzpoint.x));
data.insert("SpawnY".to_string(), Value::Int(y as i32));
data.insert("SpawnZ".to_string(), Value::Int(xzpoint.z));
// Update player position
if let Some(Value::Compound(ref mut player)) = data.get_mut("Player") {
if let Some(Value::List(ref mut pos)) = player.get_mut("Pos") {
if let Value::Double(ref mut pos_x) = pos.get_mut(0).unwrap() {
*pos_x = xzpoint.x as f64;
}
if let Value::Double(ref mut pos_y) = pos.get_mut(1).unwrap() {
*pos_y = y;
}
if let Value::Double(ref mut pos_z) = pos.get_mut(2).unwrap() {
*pos_z = xzpoint.z as f64;
}
}
}
}
}
// Serialize and save the updated level.dat
let serialized_data = match fastnbt::to_bytes(&nbt_data) {
Ok(data) => data,
Err(e) => return Err(format!("Failed to serialize updated level.dat: {e}")),
};
let mut encoder = flate2::write::GzEncoder::new(Vec::new(), flate2::Compression::default());
if let Err(e) = encoder.write_all(&serialized_data) {
return Err(format!("Failed to compress updated level.dat: {e}"));
}
let compressed_data = match encoder.finish() {
Ok(data) => data,
Err(e) => return Err(format!("Failed to finalize compression for level.dat: {e}")),
};
// Write the updated level.dat file
if let Err(e) = std::fs::write(level_path, compressed_data) {
return Err(format!("Failed to write updated level.dat: {e}"));
}
Ok(())
}
// Function to update player spawn Y coordinate based on terrain height after generation
pub fn update_player_spawn_y_after_generation(
world_path: &Path,
spawn_point: Option<(f64, f64)>,
bbox_text: String,
scale: f64,
ground: &Ground,
) -> Result<(), String> {
use crate::coordinate_system::transformation::CoordTransformer;
let Some((_lat, _lng)) = spawn_point else {
return Ok(()); // No spawn point selected, exit early
};
// Read the current level.dat file to get existing spawn coordinates
let level_path = PathBuf::from(world_path).join("level.dat");
if !level_path.exists() {
return Err(format!("Level.dat not found at {level_path:?}"));
}
// Read the level.dat file
let level_data = match std::fs::read(&level_path) {
Ok(data) => data,
Err(e) => return Err(format!("Failed to read level.dat: {e}")),
};
// Decompress and parse the NBT data
let mut decoder = GzDecoder::new(level_data.as_slice());
let mut decompressed_data = Vec::new();
if let Err(e) = decoder.read_to_end(&mut decompressed_data) {
return Err(format!("Failed to decompress level.dat: {e}"));
}
let mut nbt_data = match fastnbt::from_bytes::<Value>(&decompressed_data) {
Ok(data) => data,
Err(e) => return Err(format!("Failed to parse level.dat NBT data: {e}")),
};
// Get existing spawn coordinates and calculate new Y based on terrain
let (existing_spawn_x, existing_spawn_z) = if let Value::Compound(ref root) = nbt_data {
if let Some(Value::Compound(ref data)) = root.get("Data") {
let spawn_x = data.get("SpawnX").and_then(|v| {
if let Value::Int(x) = v {
Some(*x)
} else {
None
}
});
let spawn_z = data.get("SpawnZ").and_then(|v| {
if let Value::Int(z) = v {
Some(*z)
} else {
None
}
});
match (spawn_x, spawn_z) {
(Some(x), Some(z)) => (x, z),
_ => {
return Err("Spawn coordinates not found in level.dat".to_string());
}
}
} else {
return Err("Invalid level.dat structure: no Data compound".to_string());
}
} else {
return Err("Invalid level.dat structure: root is not a compound".to_string());
};
// Calculate terrain-based Y coordinate
let spawn_y = if ground.elevation_enabled {
// Parse coordinates for terrain lookup
let llbbox = LLBBox::from_str(&bbox_text)
.map_err(|e| format!("Failed to parse bounding box for spawn point:\n{e}"))?;
let (_, xzbbox) = CoordTransformer::llbbox_to_xzbbox(&llbbox, scale)
.map_err(|e| format!("Failed to build transformation:\n{e}"))?;
// Calculate relative coordinates for ground system
let relative_x = existing_spawn_x - xzbbox.min_x();
let relative_z = existing_spawn_z - xzbbox.min_z();
let terrain_point = XZPoint::new(relative_x, relative_z);
ground.level(terrain_point) + 2
} else {
-61 // Default Y if no terrain
};
// Update player position and world spawn point
if let Value::Compound(ref mut root) = nbt_data {
if let Some(Value::Compound(ref mut data)) = root.get_mut("Data") {
// Only update the Y coordinate, keep existing X and Z
data.insert("SpawnY".to_string(), Value::Int(spawn_y));
// Update player position - only Y coordinate
if let Some(Value::Compound(ref mut player)) = data.get_mut("Player") {
if let Some(Value::List(ref mut pos)) = player.get_mut("Pos") {
// Keep existing X and Z, only update Y
if let Value::Double(ref mut pos_y) = pos.get_mut(1).unwrap() {
*pos_y = spawn_y as f64;
}
}
}
}
}
// Serialize and save the updated level.dat
let serialized_data = match fastnbt::to_bytes(&nbt_data) {
Ok(data) => data,
Err(e) => return Err(format!("Failed to serialize updated level.dat: {e}")),
};
let mut encoder = flate2::write::GzEncoder::new(Vec::new(), flate2::Compression::default());
if let Err(e) = encoder.write_all(&serialized_data) {
return Err(format!("Failed to compress updated level.dat: {e}"));
}
let compressed_data = match encoder.finish() {
Ok(data) => data,
Err(e) => return Err(format!("Failed to finalize compression for level.dat: {e}")),
};
// Write the updated level.dat file
if let Err(e) = std::fs::write(level_path, compressed_data) {
return Err(format!("Failed to write updated level.dat: {e}"));
}
Ok(())
}
#[tauri::command]
fn gui_get_version() -> String {
env!("CARGO_PKG_VERSION").to_string()
}
#[tauri::command]
fn gui_check_for_updates() -> Result<bool, String> {
match version_check::check_for_updates() {
Ok(is_newer) => Ok(is_newer),
Err(e) => Err(format!("Error checking for updates: {e}")),
}
}
/// Returns the world map image data as base64 and geo bounds for overlay display.
/// Returns None if the map image or metadata doesn't exist.
#[tauri::command]
fn gui_get_world_map_data(world_path: String) -> Result<Option<WorldMapData>, String> {
let world_dir = PathBuf::from(&world_path);
let map_path = world_dir.join("arnis_world_map.png");
let metadata_path = world_dir.join("metadata.json");
// Check if both files exist
if !map_path.exists() || !metadata_path.exists() {
return Ok(None);
}
// Read and encode the map image as base64
let image_data = fs::read(&map_path).map_err(|e| format!("Failed to read map image: {e}"))?;
let base64_image =
base64::Engine::encode(&base64::engine::general_purpose::STANDARD, &image_data);
// Read metadata
let metadata_content =
fs::read_to_string(&metadata_path).map_err(|e| format!("Failed to read metadata: {e}"))?;
let metadata: serde_json::Value = serde_json::from_str(&metadata_content)
.map_err(|e| format!("Failed to parse metadata: {e}"))?;
// Extract geo bounds (metadata uses camelCase from serde)
let min_lat = metadata["minGeoLat"]
.as_f64()
.ok_or("Missing minGeoLat in metadata")?;
let max_lat = metadata["maxGeoLat"]
.as_f64()
.ok_or("Missing maxGeoLat in metadata")?;
let min_lon = metadata["minGeoLon"]
.as_f64()
.ok_or("Missing minGeoLon in metadata")?;
let max_lon = metadata["maxGeoLon"]
.as_f64()
.ok_or("Missing maxGeoLon in metadata")?;
Ok(Some(WorldMapData {
image_base64: format!("data:image/png;base64,{}", base64_image),
min_lat,
max_lat,
min_lon,
max_lon,
}))
}
/// Data structure for world map overlay
#[derive(serde::Serialize)]
struct WorldMapData {
image_base64: String,
min_lat: f64,
max_lat: f64,
min_lon: f64,
max_lon: f64,
}
#[tauri::command]
#[allow(clippy::too_many_arguments)]
#[allow(unused_variables)]
fn gui_start_generation(
bbox_text: String,
selected_world: String,
world_scale: f64,
ground_level: i32,
floodfill_timeout: u64,
terrain_enabled: bool,
skip_osm_objects: bool,
interior_enabled: bool,
roof_enabled: bool,
fillground_enabled: bool,
is_new_world: bool,
spawn_point: Option<(f64, f64)>,
telemetry_consent: bool,
) -> Result<(), String> {
use progress::emit_gui_error;
use LLBBox;
// Store telemetry consent for crash reporting
telemetry::set_telemetry_consent(telemetry_consent);
// Send generation click telemetry
telemetry::send_generation_click();
// If spawn point was chosen and the world is new, check and set the spawn point
if is_new_world && spawn_point.is_some() {
// Verify the spawn point is within bounds
if let Some(coords) = spawn_point {
let llbbox = match LLBBox::from_str(&bbox_text) {
Ok(bbox) => bbox,
Err(e) => {
let error_msg = format!("Failed to parse bounding box: {e}");
eprintln!("{error_msg}");
emit_gui_error(&error_msg);
return Err(error_msg);
}
};
let llpoint = LLPoint::new(coords.0, coords.1)
.map_err(|e| format!("Failed to parse spawn point: {e}"))?;
if llbbox.contains(&llpoint) {
// Spawn point is valid, update the player position
update_player_position(
&selected_world,
spawn_point,
bbox_text.clone(),
world_scale,
)
.map_err(|e| format!("Failed to set spawn point: {e}"))?;
}
}
}
tauri::async_runtime::spawn(async move {
if let Err(e) = tokio::task::spawn_blocking(move || {
// Acquire session lock for the world directory before starting generation
let world_path = PathBuf::from(&selected_world);
let _session_lock = match SessionLock::acquire(&world_path) {
Ok(lock) => lock,
Err(e) => {
let error_msg = format!("Failed to acquire session lock: {e}");
eprintln!("{error_msg}");
emit_gui_error(&error_msg);
return Err(error_msg);
}
};
// Parse the bounding box from the text with proper error handling
let bbox = match LLBBox::from_str(&bbox_text) {
Ok(bbox) => bbox,
Err(e) => {
let error_msg = format!("Failed to parse bounding box: {e}");
eprintln!("{error_msg}");
emit_gui_error(&error_msg);
return Err(error_msg);
}
};
// Add localized name to the world if user generated a new world
let updated_world_path = if is_new_world {
add_localized_world_name(world_path, &bbox)
} else {
world_path
};
// Create an Args instance with the chosen bounding box and world directory path
let args: Args = Args {
bbox,
file: None,
save_json_file: None,
path: updated_world_path,
downloader: "requests".to_string(),
scale: world_scale,
ground_level,
terrain: terrain_enabled,
interior: interior_enabled,
roof: roof_enabled,
fillground: fillground_enabled,
debug: false,
timeout: Some(std::time::Duration::from_secs(floodfill_timeout)),
generate_map: true,
spawn_point,
};
// If skip_osm_objects is true (terrain-only mode), skip fetching and processing OSM data
if skip_osm_objects {
// Generate ground data (terrain) for terrain-only mode
let ground = ground::generate_ground_data(&args);
// Create empty parsed_elements and xzbbox for terrain-only mode
let parsed_elements = Vec::new();
let (_coord_transformer, xzbbox) =
CoordTransformer::llbbox_to_xzbbox(&args.bbox, args.scale)
.map_err(|e| format!("Failed to create coordinate transformer: {}", e))?;
let _ = data_processing::generate_world(
parsed_elements,
xzbbox,
args.bbox,
ground,
&args,
);
// Session lock will be automatically released when _session_lock goes out of scope
return Ok(());
}
// Run data fetch and world generation (standard mode: objects + terrain, or objects only)
match retrieve_data::fetch_data_from_overpass(args.bbox, args.debug, "requests", None) {
Ok(raw_data) => {
let (mut parsed_elements, mut xzbbox) =
osm_parser::parse_osm_data(raw_data, args.bbox, args.scale, args.debug);
parsed_elements.sort_by(|el1, el2| {
let (el1_priority, el2_priority) =
(osm_parser::get_priority(el1), osm_parser::get_priority(el2));
match (
el1.tags().contains_key("landuse"),
el2.tags().contains_key("landuse"),
) {
(true, false) => std::cmp::Ordering::Greater,
(false, true) => std::cmp::Ordering::Less,
_ => el1_priority.cmp(&el2_priority),
}
});
let mut ground = ground::generate_ground_data(&args);
// Transform map (parsed_elements). Operations are defined in a json file
map_transformation::transform_map(
&mut parsed_elements,
&mut xzbbox,
&mut ground,
);
let _ = data_processing::generate_world(
parsed_elements,
xzbbox,
args.bbox,
ground,
&args,
);
// Session lock will be automatically released when _session_lock goes out of scope
Ok(())
}
Err(e) => {
let error_msg = format!("Failed to fetch data: {e}");
emit_gui_error(&error_msg);
// Session lock will be automatically released when _session_lock goes out of scope
Err(error_msg)
}
}
})
.await
{
let error_msg = format!("Error in blocking task: {e}");
eprintln!("{error_msg}");
emit_gui_error(&error_msg);
// Session lock will be automatically released when the task fails
}
});
Ok(())
}

View File

@@ -1,15 +0,0 @@
[Desktop Entry]
Type=Application
Name=Arnis
Comment=Generate cities from real life in Minecraft
Comment[lt]=Sugeneruokite tikrovės miestus „Minecraft“ žaidime
Comment[de]=Generiere echte Städte in Minecraft
Comment[fr]=Générez des villes réelles dans Minecraft
Comment[es]=Genera ciudades reales en Minecraft
Comment[pt]=Gere cidades reais no Minecraft
Comment[zh_CN]=在 Minecraft 中生成现实城市
Icon=icon
Exec=arnis
Categories=Game;Education;Geoscience
Keywords=OSM;OpenStreetMap;Minecraft
StartupWMClass=arnis

View File

@@ -1,381 +0,0 @@
html,
body,
#map {
height: 100%;
width: 100%;
padding: 0px;
margin: 0px;
font-family: "Courier New", Courier, monospace;
}
#info-box {
position: absolute;
width: 100%;
height: auto;
bottom: 0;
border: 0 0 7px 0;
z-index: 10000;
}
#coord-format {
position: absolute;
bottom: 0;
right: 15;
}
#coord-format .bboxlabel {
width: 100%;
}
#coord-format form {
margin-bottom: 10px;
}
#info img {
vertical-align: middle;
height: 16px;
width: auto;
opacity: 0.6;
}
#info img:hover {
opacity: 1;
cursor: pointer;
}
#info-toggle ul {
margin: 0 0 0 20px;
padding: 0;
}
#info-toggle ul li {
display: inline-block;
padding: 4px 7px 0;
background-color: rgba(230, 230, 230, 0.4);
border-top-left-radius: 3px;
border-top-right-radius: 3px;
font-weight: 900;
cursor: pointer;
}
#info-toggle ul li.active {
background-color: rgba(255, 255, 255, 0.8);
}
#info-toggle-button {
background-color: rgba(0, 0, 0, 0.7) !important;
color: #fff;
}
#info-toggle-button:hover {
background-color: #f4f4f4;
}
#projlabel,
#wgslabel {
display: inline-block;
color: #333;
z-index: 10000;
}
#projcoords {
display: none;
}
.coords {
text-align: left;
padding: 7px 0;
background-color: rgba(255, 255, 255, 0.8);
}
#bbounds,
#mbounds,
#mcenter {
font-size: small;
display: block;
z-index: 10000;
padding: 2px 0;
}
#mouse,
#tile,
#zoom {
display: inline-block;
}
.bboxlabel {
font-size: small;
font-weight: bold;
z-index: 10000;
background-color: black;
color: white;
text-align: center;
display: inline-block;
padding-left: 2px;
width: 60px;
}
.bboxllpos,
.bboxllpossmall,
.bboxprojpos,
.bboxprojpossmall,
.tilesmall,
.zoomsmall {
font-size: small;
font-weight: bold;
color: black;
display: inline-block;
padding-left: 5px;
}
.bboxllpos {
width: 350px;
}
.bboxllpossmall {
width: 200px;
}
.bboxprojpos {
width: 450px;
}
.bboxprojpossmall {
width: 250px;
}
.tilesmall {
width: 60px;
}
.zoomsmall {
width: 20px;
}
#map-ui-proj {
position: absolute;
top: 460px;
left: 10px;
list-style: none;
margin: 0;
padding: 0;
z-index: 100;
}
#map-ui-proj a {
font: normal 13px/18px 'Helvetica Neue', Helvetica, sans-serif;
background: #FFF;
color: #3C4E5A;
display: block;
margin: 0;
padding: 0;
border: 1px solid #BBB;
border-bottom-width: 0;
min-width: 75px;
padding: 2px;
text-decoration: none;
text-align: center;
}
#map-ui-proj a:hover {
background: #ECF5FA;
}
#map-ui-proj li:last-child a {
border-bottom-width: 1px;
-webkit-border-radius: 0 0 3px 3px;
border-radius: 0 0 3px 3px;
}
#map-ui-proj li:first-child a {
-webkit-border-radius: 3px 3px 0 0;
border-radius: 3px 3px 0 0;
}
#map-ui-proj a.active {
background: #3887BE;
border-color: #3887BE;
border-top-color: #FFF;
color: #FFF;
}
.epsglabel {
font-size: small;
font-weight: bold;
background-color: black;
color: white;
z-index: 10000;
}
#map-ui {
position: absolute;
top: 330px;
left: 12px;
list-style: none;
margin: 0;
padding: 0;
z-index: 100;
}
#map-ui a {
background-color: #fff;
border-bottom: 1px solid #ccc;
width: 30px;
height: 30px;
line-height: 30px;
display: block;
text-align: center;
text-decoration: none;
color: black;
background-image: url('css/maps/images/bbox-sprites.png');
background-repeat: no-repeat;
}
#map-ui a:hover {
background-color: #ECF5FA;
}
#map-ui li:last-child a {
border-bottom-width: 1px;
-webkit-border-radius: 0 0 3px 3px;
border-radius: 0 0 3px 3px;
background-position: -33px -2px;
}
#map-ui li:last-child a.enabled {
border-bottom-width: 1px;
-webkit-border-radius: 0 0 3px 3px;
border-radius: 0 0 3px 3px;
background-position: -278px -2px;
}
#map-ui li:first-child a {
-webkit-border-radius: 3px 3px 0 0;
border-radius: 3px 3px 0 0;
}
#create-geojson a.enabled {
background-position: -248px -2px;
}
#create-geojson a {
background-position: -2px -2px;
}
#geolocation a {
background-position: -186px -2px;
}
#geolocation a.active {
background-position: -63px -2px;
}
#help a.enabled {
background-position: -342px -2px;
}
#help a {
background-position: -93px -2px;
}
.zeroclipboard-is-hover {
opacity: 1 !important;
}
.leaflet-sidebar textarea {
/* display: none; */
height: 95%;
width: 100%;
border: 1;
padding: 2em;
resize: none;
-webkit-box-sizing: border-box;
/* Safari/Chrome, other WebKit */
-moz-box-sizing: border-box;
/* Firefox, other Gecko */
box-sizing: border-box;
/* Opera/IE 8+ */
}
.leaflet-sidebar button {
font-size: large;
}
.leaflet-sidebar .close {
z-index: 10001;
}
.ui-autocomplete {
max-height: 180px;
overflow-y: auto;
/* prevent horizontal scrollbar */
overflow-x: hidden;
}
/* IE 6 doesn't support max-height
* we use height instead, but this forces the menu to always be this tall
*/
* html .ui-autocomplete {
height: 180px;
}
.unblurred.leaflet-tile-loaded {
-moz-filter: blur(0) sepia(0) invert(0);
-webkit-filter: blur(0) sepia(0) invert(0);
filter: blur(0) sepia(0) invert(0);
-moz-transition: all 1s ease;
-webkit-transition: all 1s ease;
transition: all 1s ease;
}
.blurred.leaflet-tile-loaded {
-moz-filter: blur(1px) sepia(1) invert(1);
-moz-transition: all 1s ease;
-webkit-filter: blur(1px) sepia(1) invert(1);
-webkit-transition: all 1s ease;
filter: blur(1px) sepia(1) invert(1);
transition: all 1s ease;
}
/* World Preview Button in Edit Toolbar */
.leaflet-draw-toolbar .leaflet-draw-edit-preview {
background-position: -31px -2px;
}
.leaflet-draw-toolbar .leaflet-draw-edit-preview.disabled {
opacity: 0.4;
cursor: not-allowed;
pointer-events: none;
}
.leaflet-draw-toolbar .leaflet-draw-edit-preview.active {
background-color: #a0d0ff;
}
.world-preview-slider-container {
padding: 6px 8px !important;
background: white !important;
background-clip: padding-box;
}
.world-preview-slider-container a {
display: none !important;
}
.world-preview-slider {
width: 80px;
height: 8px;
cursor: pointer;
accent-color: #3887BE;
display: block;
margin: 0;
}

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.7 KiB

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 418 B

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 312 B

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 205 B

View File

Binary file not shown.

Before

Width:  |  Height:  |  Size: 262 B

Some files were not shown because too many files have changed in this diff Show More