Files
GeoData/scripts/rebase_cityjson_tiles.py
2025-12-18 21:18:47 +01:00

204 lines
6.4 KiB
Python

#!/usr/bin/env python3
"""Shift CityJSON coordinates into tile-local space using tile_index.csv offsets."""
from __future__ import annotations
import argparse
import csv
import json
import sys
from pathlib import Path
from typing import Any, Iterable
DEFAULT_TILE_INDEX = Path("export_unity/tile_index.csv")
def parse_args(argv: Iterable[str] | None = None) -> argparse.Namespace:
parser = argparse.ArgumentParser(
description="Rebase CityJSON coordinates so XY are relative to the tile bounds from tile_index.csv."
)
parser.add_argument(
"--input-dir",
type=Path,
default=Path("work/cityjson_tri"),
help="Directory containing CityJSON files (triangulated/split).",
)
parser.add_argument(
"--output-dir",
type=Path,
default=Path("work/cityjson_tri_local"),
help="Directory to write tile-local CityJSON files.",
)
parser.add_argument(
"--tile-index",
type=Path,
default=DEFAULT_TILE_INDEX,
help="Path to tile_index.csv produced by the heightmap export.",
)
parser.add_argument(
"--pattern",
default="**/*.city.json",
help="Glob pattern for input files (defaults to any *.city.json under the input dir).",
)
return parser.parse_args(argv)
def resolve_input_file(path: Path) -> Path | None:
"""Handle both flat files and citygml-tools style output directories."""
if path.is_file():
return path
if path.is_dir():
candidate = path / f"{path.stem}.json"
if candidate.is_file():
return candidate
matches = list(path.glob("*.json"))
if len(matches) == 1:
return matches[0]
return None
def strip_suffixes(name: str) -> str:
"""Remove known suffixes (.tri, .roof, .wall, .ground, .closure, .city.json)."""
trimmed = name
if trimmed.endswith(".json"):
trimmed = trimmed[: -len(".json")]
if trimmed.endswith(".city"):
trimmed = trimmed[: -len(".city")]
for suffix in (".tri", ".roof", ".wall", ".ground", ".closure"):
if trimmed.endswith(suffix):
trimmed = trimmed[: -len(suffix)]
return trimmed
def tile_suffix(tile_id: str) -> str:
parts = tile_id.split("_")
return "_".join(parts[-3:]) if len(parts) >= 3 else tile_id
def load_tile_offsets(tile_index: Path) -> dict[str, tuple[float, float]]:
if not tile_index.exists():
raise SystemExit(f"tile_index.csv missing: {tile_index}")
offsets: dict[str, tuple[float, float]] = {}
with tile_index.open("r", encoding="utf-8", newline="") as handle:
reader = csv.DictReader(handle)
for row in reader:
tile_id = row.get("tile_id")
if not tile_id:
continue
try:
xmin = float(row["xmin"])
ymin = float(row["ymin"])
except (KeyError, TypeError, ValueError):
continue
offset = (xmin, ymin)
offsets[tile_id] = offset
offsets[tile_suffix(tile_id)] = offset
return offsets
def read_json(path: Path) -> dict[str, Any]:
with path.open("r", encoding="utf-8") as handle:
return json.load(handle)
def write_json(path: Path, payload: dict[str, Any]) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
with path.open("w", encoding="utf-8") as handle:
json.dump(payload, handle, ensure_ascii=True, indent=2)
handle.write("\n")
def ensure_three(values: list[Any] | None, default: float) -> list[float]:
resolved = [default, default, default]
if not values:
return resolved
for idx, value in enumerate(values[:3]):
try:
resolved[idx] = float(value)
except (TypeError, ValueError):
resolved[idx] = default
return resolved
def compute_extent(vertices: list[list[float]], scale: list[float], translate: list[float]) -> list[float] | None:
usable = [vertex for vertex in vertices if len(vertex) >= 3]
if not usable:
return None
xs = [vertex[0] * scale[0] + translate[0] for vertex in usable]
ys = [vertex[1] * scale[1] + translate[1] for vertex in usable]
zs = [vertex[2] * scale[2] + translate[2] for vertex in usable]
return [min(xs), min(ys), min(zs), max(xs), max(ys), max(zs)]
def rebase_cityjson(cityjson: dict[str, Any], offset: tuple[float, float]) -> None:
xmin, ymin = offset
transform = cityjson.get("transform")
if transform:
translate = ensure_three(transform.get("translate"), 0.0)
scale = ensure_three(transform.get("scale"), 1.0)
translate[0] -= xmin
translate[1] -= ymin
transform["translate"] = translate
transform["scale"] = scale
cityjson["transform"] = transform
else:
for vertex in cityjson.get("vertices") or []:
if len(vertex) >= 2:
vertex[0] -= xmin
vertex[1] -= ymin
translate = [0.0, 0.0, 0.0]
scale = [1.0, 1.0, 1.0]
extent = compute_extent(cityjson.get("vertices") or [], scale, translate)
if extent:
metadata = cityjson.get("metadata") or {}
metadata["geographicalExtent"] = extent
cityjson["metadata"] = metadata
def process_file(path: Path, offsets: dict[str, tuple[float, float]], output_dir: Path) -> int:
resolved = resolve_input_file(path)
if not resolved:
print(f"[skip] cannot resolve CityJSON file for {path}", file=sys.stderr)
return 0
tile_name = strip_suffixes(path.name)
offset = offsets.get(tile_name) or offsets.get(tile_suffix(tile_name))
if offset is None:
print(f"[skip] no tile_index entry for {tile_name}", file=sys.stderr)
return 0
cityjson = read_json(resolved)
rebase_cityjson(cityjson, offset)
output_path = output_dir / path.name
write_json(output_path, cityjson)
return 1
def main(argv: Iterable[str] | None = None) -> int:
args = parse_args(argv)
offsets = load_tile_offsets(args.tile_index)
files = sorted(args.input_dir.glob(args.pattern))
if not files:
print(f"No input files matched pattern '{args.pattern}' in {args.input_dir}", file=sys.stderr)
return 1
written = 0
for path in files:
written += process_file(path, offsets, args.output_dir)
print(f"Wrote {written} tile-local file(s) to {args.output_dir}")
return 0 if written else 1
if __name__ == "__main__":
sys.exit(main())