Refactor exporters with shared GDAL helpers and CLI options

This commit is contained in:
s0wlz (Matthias Puchstein)
2025-12-15 20:57:16 +01:00
parent abb37f25e2
commit e0429b22d3
6 changed files with 233 additions and 173 deletions

View File

@@ -13,7 +13,8 @@
- Orthophoto export: `uv run python export_ortho_tiles.py` (requires JP2s under `raw_dop/jp2/`).
- Refresh VRT manually if needed: `gdalbuildvrt work/dgm.vrt raw_dgm1/*.tif`.
- Inspect a result: `gdalinfo export_unity/height_png16/<tile>.png | head` to sanity-check bounds and scaling.
- Expected warning: `Computed -srcwin ... falls partially outside source raster extent` means the DOP coverage is slightly smaller than the tile footprint; edge pixels will be filled with NoData/zeros. Add adjacent JP2s or shrink the requested window if you need to silence it.
- Expected warning: `Computed -srcwin ... falls partially outside source raster extent` means the DOP coverage is slightly smaller than the tile footprint; edge pixels will be filled with NoData/zeros. Add adjacent JP2s or shrink the requested window if you need to silence it.
- Scripts accept CLI overrides (e.g., `--out-dir`, `--jpeg-quality`, `--resample`); run `uv run python <script> -h` to see options.
## Coding Style & Naming Conventions
- Python scripts use 4-space indentation, early-exit error handling, and `SystemExit` for fatal issues; follow PEP 8 where practical.

View File

@@ -37,6 +37,7 @@ This repository converts DGM1 elevation tiles into Unity-ready 16-bit PNG height
- Refresh VRT: `gdalbuildvrt work/dgm.vrt raw_dgm1/*.tif`
- Run export pipeline: `uv run python export_heightmaps.py`
- Inspect an output tile: `gdalinfo export_unity/height_png16/<tile>.png | head`
- Override defaults (e.g., orthophoto out dir): `uv run python export_ortho_tiles.py --out-dir export_unity/ortho_jpg` (see `-h` on each script for tunables).
### Workflow Notes
- The script computes a global min/max from the VRT to scale all tiles consistently; adjust `OUT_RES`, `RESAMPLE`, or `TILE_SIZE_M` in `export_heightmaps.py` if your AOI or target resolution changes.

View File

@@ -1,152 +1,151 @@
#!/usr/bin/env python3
"""Export DGM1 tiles to Unity-ready 16-bit PNG heightmaps and a manifest."""
from __future__ import annotations
import argparse
import glob
import os
from typing import Iterable
from osgeo import gdal
RAW_DIR = "raw_dgm1"
VRT_PATH = "work/dgm.vrt"
OUT_DIR = "export_unity/height_png16"
TILE_SIZE_M = 1000 # real-world tile size in meters
OUT_RES = 1025 # Unity Terrain-friendly resolution (2^n + 1)
RESAMPLE = "bilinear"
os.makedirs("work", exist_ok=True)
os.makedirs(OUT_DIR, exist_ok=True)
from gdal_utils import (
build_vrt,
cleanup_aux_files,
ensure_dir,
ensure_parent,
open_dataset,
safe_remove,
)
gdal.UseExceptions()
def build_dgm_vrt_if_needed():
"""Build the DGM VRT automatically when missing."""
if os.path.exists(VRT_PATH):
return
tif_paths = sorted(glob.glob(os.path.join(RAW_DIR, "*.tif")))
if not tif_paths:
raise SystemExit(f"No TIFFs found in {RAW_DIR}; cannot build {VRT_PATH}.")
print(f"Building {VRT_PATH} from {len(tif_paths)} GeoTIFFs...")
try:
gdal.BuildVRT(VRT_PATH, tif_paths)
except RuntimeError as exc:
raise SystemExit(f"Could not build {VRT_PATH}: {exc}") from exc
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(description="Export heightmaps and manifest from DGM tiles.")
parser.add_argument("--raw-dir", default="raw_dgm1", help="Directory containing input DGM GeoTIFFs.")
parser.add_argument("--vrt-path", default="work/dgm.vrt", help="Path to build/read the DGM VRT.")
parser.add_argument("--out-dir", default="export_unity/height_png16", help="Output directory for PNG heightmaps.")
parser.add_argument(
"--manifest-path",
default=os.path.join("export_unity", "tile_index.csv"),
help="Output CSV manifest path.",
)
parser.add_argument("--out-res", type=int, default=1025, help="Output resolution per tile (2^n + 1 for Unity).")
parser.add_argument("--resample", default="bilinear", help="GDAL resampling algorithm used during warp.")
parser.add_argument(
"--tile-size-m",
type=int,
default=1000,
help="Real-world tile size in meters (informational; input footprints drive bounds).",
)
parser.add_argument(
"--skip-cleanup",
action="store_true",
help="Leave temp GDAL files instead of deleting aux XML and tmp rasters.",
)
return parser.parse_args()
def open_dataset(path, purpose):
"""Open a dataset and fail fast with context."""
try:
ds = gdal.Open(path)
except RuntimeError as exc:
raise SystemExit(f"{purpose}: {exc}") from exc
if ds is None:
raise SystemExit(f"{purpose}: GDAL returned None for {path}")
return ds
def safe_remove(path):
"""Remove a file if present; return True when deleted."""
try:
os.remove(path)
return True
except FileNotFoundError:
return False
except OSError as exc:
print(f"Warning: could not remove {path}: {exc}")
return False
def cleanup_aux_files():
"""Clear GDAL sidecars and leftover temp files to keep the repo tidy."""
patterns = [
def build_patterns(raw_dir: str) -> Iterable[str]:
return [
os.path.join("work", "*_tmp.tif"),
os.path.join("work", "*_tmp.tif.aux.xml"),
os.path.join("work", "*.aux.xml"),
os.path.join(RAW_DIR, "*.aux.xml"),
os.path.join(raw_dir, "*.aux.xml"),
]
removed = 0
for pattern in patterns:
for path in glob.glob(pattern):
if safe_remove(path):
removed += 1
print(f"Cleanup removed {removed} temporary files/sidecars.")
build_dgm_vrt_if_needed()
ds = open_dataset(VRT_PATH, f"Could not open {VRT_PATH} after attempting to build it.")
def export_heightmaps(args: argparse.Namespace) -> int:
ensure_dir("work")
ensure_dir(args.out_dir)
ensure_parent(args.manifest_path)
band = ds.GetRasterBand(1)
tif_paths = sorted(glob.glob(os.path.join(args.raw_dir, "*.tif")))
build_vrt(args.vrt_path, tif_paths)
ds = open_dataset(args.vrt_path, f"Could not open {args.vrt_path} after attempting to build it.")
gmin, gmax = band.ComputeRasterMinMax(False)
print(f"GLOBAL_MIN={gmin}, GLOBAL_MAX={gmax}")
band = ds.GetRasterBand(1)
gmin, gmax = band.ComputeRasterMinMax(False)
print(f"GLOBAL_MIN={gmin}, GLOBAL_MAX={gmax}")
manifest_path = os.path.join("export_unity", "tile_index.csv")
with open(manifest_path, "w", encoding="utf-8") as f:
f.write("tile_id,xmin,ymin,xmax,ymax,global_min,global_max,out_res\n")
with open(args.manifest_path, "w", encoding="utf-8") as f:
f.write("tile_id,xmin,ymin,xmax,ymax,global_min,global_max,out_res\n")
skipped = 0
written = 0
skipped = 0
written = 0
for tif in sorted(glob.glob(os.path.join(RAW_DIR, "*.tif"))):
try:
tds = open_dataset(tif, f"Skipping unreadable {tif}")
except SystemExit as exc:
print(exc)
skipped += 1
continue
for tif in tif_paths:
try:
tds = open_dataset(tif, f"Skipping unreadable {tif}")
except SystemExit as exc:
print(exc)
skipped += 1
continue
gt = tds.GetGeoTransform()
ulx, xres, _, uly, _, yres = gt # yres typically negative in north-up rasters
gt = tds.GetGeoTransform()
ulx, xres, _, uly, _, yres = gt # yres typically negative in north-up rasters
# Use the source tile footprint directly to avoid shifting during export.
xmax = ulx + xres * tds.RasterXSize
ymin = uly + yres * tds.RasterYSize
xmin = ulx
ymax = uly
# Use the source tile footprint directly to avoid shifting during export.
xmax = ulx + xres * tds.RasterXSize
ymin = uly + yres * tds.RasterYSize
xmin = ulx
ymax = uly
base = os.path.splitext(os.path.basename(tif))[0]
tile_id = base # keep stable naming = easy re-export + reimport
base = os.path.splitext(os.path.basename(tif))[0]
tile_id = base # keep stable naming = easy re-export + reimport
tmp_path = os.path.join("work", f"{tile_id}_tmp.tif")
out_path = os.path.join(OUT_DIR, f"{tile_id}.png")
tmp_path = os.path.join("work", f"{tile_id}_tmp.tif")
out_path = os.path.join(args.out_dir, f"{tile_id}.png")
warp_opts = gdal.WarpOptions(
outputBounds=(xmin, ymin, xmax, ymax),
width=OUT_RES,
height=OUT_RES,
resampleAlg=RESAMPLE,
srcNodata=-9999,
dstNodata=gmin, # fill nodata with global min to avoid deep pits
)
try:
gdal.Warp(tmp_path, ds, options=warp_opts)
except RuntimeError as exc:
print(f"Warp failed for {tile_id}: {exc}")
skipped += 1
continue
warp_opts = gdal.WarpOptions(
outputBounds=(xmin, ymin, xmax, ymax),
width=args.out_res,
height=args.out_res,
resampleAlg=args.resample,
srcNodata=-9999,
dstNodata=gmin, # fill nodata with global min to avoid deep pits
)
try:
gdal.Warp(tmp_path, ds, options=warp_opts)
except RuntimeError as exc:
print(f"Warp failed for {tile_id}: {exc}")
skipped += 1
continue
# Scale to UInt16 (0..65535) using Strategy-B global min/max
trans_opts = gdal.TranslateOptions(
outputType=gdal.GDT_UInt16,
scaleParams=[(gmin, gmax, 0, 65535)],
format="PNG",
creationOptions=["WORLDFILE=YES"], # emit .wld so GIS tools place tiles correctly
)
try:
gdal.Translate(out_path, tmp_path, options=trans_opts)
except RuntimeError as exc:
print(f"Translate failed for {tile_id}: {exc}")
skipped += 1
continue
safe_remove(tmp_path)
safe_remove(f"{tmp_path}.aux.xml")
# Scale to UInt16 (0..65535) using Strategy-B global min/max
trans_opts = gdal.TranslateOptions(
outputType=gdal.GDT_UInt16,
scaleParams=[(gmin, gmax, 0, 65535)],
format="PNG",
creationOptions=["WORLDFILE=YES"], # emit .wld so GIS tools place tiles correctly
)
try:
gdal.Translate(out_path, tmp_path, options=trans_opts)
except RuntimeError as exc:
print(f"Translate failed for {tile_id}: {exc}")
skipped += 1
continue
safe_remove(tmp_path)
safe_remove(f"{tmp_path}.aux.xml")
f.write(f"{tile_id},{xmin},{ymin},{xmax},{ymax},{gmin},{gmax},{OUT_RES}\n")
print(f"Wrote {out_path}")
written += 1
f.write(f"{tile_id},{xmin},{ymin},{xmax},{ymax},{gmin},{gmax},{args.out_res}\n")
print(f"Wrote {out_path}")
written += 1
print(f"Manifest: {manifest_path}")
print(f"Summary: wrote {written} tiles; skipped {skipped}.")
cleanup_aux_files()
print(f"Manifest: {args.manifest_path}")
print(f"Summary: wrote {written} tiles; skipped {skipped}.")
if not args.skip_cleanup:
removed = cleanup_aux_files(build_patterns(args.raw_dir))
print(f"Cleanup removed {removed} temporary files/sidecars.")
if skipped:
raise SystemExit(1)
return 1 if skipped else 0
def main() -> None:
args = parse_args()
raise SystemExit(export_heightmaps(args))
if __name__ == "__main__":
main()

View File

@@ -1,68 +1,51 @@
#!/usr/bin/env python3
"""Export orthophoto tiles aligned to the terrain grid.
Inputs
- raw_dop/jp2/*.jp2 : orthophoto source tiles (DOP20 RGB)
- export_unity/tile_index.csv : manifest produced by export_heightmaps.py
Outputs
- work/dop.vrt : auto-built VRT mosaic of all JP2 tiles
- export_unity/ortho_jpg/<tile_id>.jpg : cropped JPEG tiles + .jgw worldfiles
"""
"""Export orthophoto tiles aligned to the terrain grid."""
from __future__ import annotations
import argparse
import csv
import glob
import os
from typing import Iterable
from osgeo import gdal
RAW_ORTHO_DIR = "raw_dop/jp2"
VRT_PATH = "work/dop.vrt"
TILE_INDEX = "export_unity/tile_index.csv"
OUT_DIR = "export_unity/ortho_jpg"
# 1000 m tiles at ~0.5 m/pixel give good visual quality in Unity while staying light.
OUT_RES = 2048
JPEG_QUALITY = 90
from gdal_utils import build_vrt, ensure_dir, ensure_parent, open_dataset
gdal.UseExceptions()
os.makedirs("work", exist_ok=True)
os.makedirs(OUT_DIR, exist_ok=True)
def build_vrt(jp2_paths: Iterable[str]) -> None:
"""Build the orthophoto VRT if missing."""
print(f"Building VRT at {VRT_PATH} from {len(list(jp2_paths))} JP2 files...")
gdal.BuildVRT(VRT_PATH, list(jp2_paths))
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(description="Export orthophoto tiles aligned to the terrain grid.")
parser.add_argument("--raw-ortho-dir", default="raw_dop/jp2", help="Directory containing JP2 orthophoto tiles.")
parser.add_argument("--vrt-path", default="work/dop.vrt", help="Path to build/read the orthophoto VRT.")
parser.add_argument("--tile-index", default="export_unity/tile_index.csv", help="Tile manifest from heightmap export.")
parser.add_argument("--out-dir", default="export_unity/ortho_jpg", help="Output directory for cropped orthophotos.")
parser.add_argument(
"--out-res",
type=int,
default=2048,
help="Output resolution per tile (default matches 1000 m tiles at ~0.5 m/px).",
)
parser.add_argument("--jpeg-quality", type=int, default=90, help="JPEG quality for exported tiles.")
return parser.parse_args()
def open_dataset(path: str, purpose: str):
"""Open a dataset and fail fast with context."""
try:
ds = gdal.Open(path)
except RuntimeError as exc:
raise SystemExit(f"{purpose}: {exc}") from exc
if ds is None:
raise SystemExit(f"{purpose}: GDAL returned None for {path}")
return ds
def export_orthos(args: argparse.Namespace) -> int:
ensure_dir("work")
ensure_dir(args.out_dir)
ensure_parent(args.vrt_path)
def main() -> None:
jp2_paths = sorted(glob.glob(os.path.join(RAW_ORTHO_DIR, "*.jp2")))
jp2_paths = sorted(glob.glob(os.path.join(args.raw_ortho_dir, "*.jp2")))
if not jp2_paths:
raise SystemExit(f"No JP2 files found in {RAW_ORTHO_DIR}. Run raw_dop/dlscript.sh first.")
raise SystemExit(f"No JP2 files found in {args.raw_ortho_dir}. Run raw_dop/dlscript.sh first.")
if not os.path.exists(VRT_PATH):
build_vrt(jp2_paths)
build_vrt(args.vrt_path, jp2_paths)
vrt_ds = open_dataset(args.vrt_path, f"Could not open VRT at {args.vrt_path}")
vrt_ds = open_dataset(VRT_PATH, f"Could not open VRT at {VRT_PATH}")
if not os.path.exists(args.tile_index):
raise SystemExit(f"Tile index missing: {args.tile_index}. Run export_heightmaps.py first.")
if not os.path.exists(TILE_INDEX):
raise SystemExit(f"Tile index missing: {TILE_INDEX}. Run export_heightmaps.py first.")
with open(TILE_INDEX, newline="", encoding="utf-8") as f:
with open(args.tile_index, newline="", encoding="utf-8") as f:
reader = csv.DictReader(f)
written = 0
skipped = 0
@@ -79,13 +62,13 @@ def main() -> None:
skipped += 1
continue
out_path = os.path.join(OUT_DIR, f"{tile_id}.jpg")
out_path = os.path.join(args.out_dir, f"{tile_id}.jpg")
opts = gdal.TranslateOptions(
format="JPEG",
width=OUT_RES,
height=OUT_RES,
width=args.out_res,
height=args.out_res,
projWin=(xmin, ymax, xmax, ymin), # xmin,xmax,ymax,ymin (upper-left origin)
creationOptions=[f"QUALITY={JPEG_QUALITY}", "WORLDFILE=YES"],
creationOptions=[f"QUALITY={args.jpeg_quality}", "WORLDFILE=YES"],
)
try:
gdal.Translate(out_path, vrt_ds, options=opts)
@@ -98,8 +81,12 @@ def main() -> None:
print(f"Wrote {out_path}")
print(f"Summary: wrote {written} orthophoto tiles; skipped {skipped}.")
if skipped:
raise SystemExit(1)
return 1 if skipped else 0
def main() -> None:
args = parse_args()
raise SystemExit(export_orthos(args))
if __name__ == "__main__":

71
gdal_utils.py Normal file
View File

@@ -0,0 +1,71 @@
#!/usr/bin/env python3
"""Shared GDAL helpers for the GeoData exporters."""
from __future__ import annotations
import glob
import os
from typing import Iterable, Sequence
from osgeo import gdal
gdal.UseExceptions()
def ensure_dir(path: str) -> None:
"""Create a directory path if missing."""
os.makedirs(path, exist_ok=True)
def ensure_parent(path: str) -> None:
"""Create the parent directory for a file path."""
parent = os.path.dirname(path)
if parent:
ensure_dir(parent)
def open_dataset(path: str, purpose: str):
"""Open a dataset and fail fast with context."""
try:
ds = gdal.Open(path)
except RuntimeError as exc:
raise SystemExit(f"{purpose}: {exc}") from exc
if ds is None:
raise SystemExit(f"{purpose}: GDAL returned None for {path}")
return ds
def build_vrt(vrt_path: str, sources: Sequence[str]) -> bool:
"""Build a VRT if missing; returns True if built."""
if os.path.exists(vrt_path):
return False
if not sources:
raise SystemExit(f"No sources available to build VRT {vrt_path}.")
ensure_parent(vrt_path)
print(f"Building {vrt_path} from {len(sources)} files...")
try:
gdal.BuildVRT(vrt_path, list(sources))
except RuntimeError as exc:
raise SystemExit(f"Could not build {vrt_path}: {exc}") from exc
return True
def safe_remove(path: str) -> bool:
"""Remove a file if present; return True when deleted."""
try:
os.remove(path)
return True
except FileNotFoundError:
return False
except OSError as exc:
print(f"Warning: could not remove {path}: {exc}")
return False
def cleanup_aux_files(patterns: Iterable[str]) -> int:
"""Clear GDAL sidecars and temp files matching glob patterns."""
removed = 0
for pattern in patterns:
for match in glob.glob(pattern):
if safe_remove(match):
removed += 1
return removed

View File

@@ -14,12 +14,13 @@ build-backend = "hatchling.build"
[tool.hatch.build.targets.wheel]
packages = []
force-include = { "export_heightmaps.py" = "export_heightmaps.py", "export_ortho_tiles.py" = "export_ortho_tiles.py" }
force-include = { "export_heightmaps.py" = "export_heightmaps.py", "export_ortho_tiles.py" = "export_ortho_tiles.py", "gdal_utils.py" = "gdal_utils.py" }
[tool.hatch.build.targets.sdist]
include = [
"export_heightmaps.py",
"export_ortho_tiles.py",
"gdal_utils.py",
"README.md",
"AGENTS.md",
"pyproject.toml",