Files
ricycleplanner/main.go
2025-05-01 23:12:35 +02:00

2997 lines
91 KiB
Go

package main
import (
"encoding/json"
"flag"
"fmt"
"github.com/fatih/color"
"github.com/olekukonko/tablewriter"
"github.com/rs/zerolog"
"github.com/tkrajina/gpxgo/gpx"
"io"
"math"
"net/http"
"net/url"
"os"
"path/filepath"
"sort"
"strconv"
"strings"
"time"
)
// Global logger
var logger zerolog.Logger
// Cache for Overpass API responses
var overpassCache = make(map[string][]OverpassElement)
// Types
type OverpassElement struct {
Type string `json:"type"`
Lat float64 `json:"lat"`
Lon float64 `json:"lon"`
Tags map[string]string `json:"tags,omitempty"`
Geometry []struct {
Lat float64 `json:"lat"`
Lon float64 `json:"lon"`
} `json:"geometry,omitempty"`
}
type OverpassResponse struct {
Elements []OverpassElement `json:"elements"`
}
type TrackPoint struct {
Point gpx.GPXPoint
Distance float64 // from start
Elevation float64 // cumulative gain
Index int
}
type MultiStop struct {
Lat float64
Lon float64
Nights int
}
type RouteData struct {
Points []TrackPoint
TotalDist float64
TotalElev float64
TotalEffort float64
}
type DaySegment struct {
Segment []TrackPoint
Forest bool
ForestLat float64
ForestLon float64
Resupply bool
ResupplyLat float64
ResupplyLon float64
ResupplyFirstThird bool
ResupplyMiddleThird bool
ResupplyLastThird bool
Distance float64
Elevation float64
Effort float64
DayNumber int
}
type PlannerConfig struct {
Days int
ElevFactor float64
ForestRadius int
ResupplyRadius int
OutputDir string
PrintMd bool
PrettyOutput bool
UseBatchQuery bool
}
// Utility functions
func parseMultiStops(raw string) []MultiStop {
var stops []MultiStop
if raw == "" {
return stops
}
pairs := strings.Split(raw, ";")
for _, p := range pairs {
parts := strings.Split(p, ":")
if len(parts) != 2 {
continue
}
coords := strings.Split(parts[0], ",")
if len(coords) != 2 {
continue
}
lat, _ := strconv.ParseFloat(coords[0], 64)
lon, _ := strconv.ParseFloat(coords[1], 64)
nights, _ := strconv.Atoi(parts[1])
stops = append(stops, MultiStop{Lat: lat, Lon: lon, Nights: nights})
}
return stops
}
func haversine(lat1, lon1, lat2, lon2 float64) float64 {
const R = 6371.0
dLat := (lat2 - lat1) * math.Pi / 180
dLon := (lon2 - lon1) * math.Pi / 180
a := math.Sin(dLat/2)*math.Sin(dLat/2) +
math.Cos(lat1*math.Pi/180)*math.Cos(lat2*math.Pi/180)*math.Sin(dLon/2)*math.Sin(dLon/2)
c := 2 * math.Atan2(math.Sqrt(a), math.Sqrt(1-a))
return R * c
}
// Overpass API functions
func overpassQuery(query string) ([]OverpassElement, error) {
// Check if the query is already in the cache
if cachedResult, ok := overpassCache[query]; ok {
logger.Debug().Str("query", query).Int("elements", len(cachedResult)).Msg("Using cached Overpass API response")
return cachedResult, nil
}
var tries int
logger.Debug().Str("query", query).Msg("Starting Overpass API query")
for {
logger.Debug().Int("attempt", tries+1).Msg("Sending request to Overpass API")
resp, err := http.PostForm("https://overpass-api.de/api/interpreter", url.Values{"data": {query}})
if err != nil {
tries++
logger.Debug().Err(err).Int("attempt", tries).Msg("Error connecting to Overpass API")
if tries >= 5 {
logger.Error().Err(err).Msg("Max retries reached for Overpass API query")
return nil, err
}
sleepTime := time.Duration(tries*2) * time.Second
logger.Debug().Dur("sleep", sleepTime).Msg("Retrying after delay")
time.Sleep(sleepTime)
continue
}
logger.Debug().Int("statusCode", resp.StatusCode).Msg("Received response from Overpass API")
if resp.StatusCode == 429 || resp.StatusCode == 504 {
tries++
logger.Debug().Int("statusCode", resp.StatusCode).Int("attempt", tries).Msg("Rate limited or gateway timeout")
sleepTime := time.Duration(tries*2) * time.Second
logger.Debug().Dur("sleep", sleepTime).Msg("Retrying after delay")
time.Sleep(sleepTime)
continue
}
body, err := io.ReadAll(resp.Body)
if err != nil {
logger.Error().Err(err).Msg("Error reading response body")
return nil, err
}
err = resp.Body.Close()
if err != nil {
logger.Error().Err(err).Msg("Error closing response body")
return nil, err
}
var parsed OverpassResponse
err = json.Unmarshal(body, &parsed)
if err != nil {
logger.Error().Err(err).Msg("Error parsing JSON response")
return nil, err
}
logger.Debug().Int("elements", len(parsed.Elements)).Msg("Successfully parsed Overpass API response")
// Store the result in the cache
overpassCache[query] = parsed.Elements
return parsed.Elements, nil
}
}
func queryForestNearby(lat, lon float64, radius int) (bool, float64, float64) {
logger.Debug().Float64("lat", lat).Float64("lon", lon).Int("radius", radius).Msg("Querying for forest nearby")
query := fmt.Sprintf(`
[out:json];
(
way["natural"="wood"](around:%d,%f,%f);
way["landuse"="forest"](around:%d,%f,%f);
);
out geom;
`, radius*1000, lat, lon, radius*1000, lat, lon)
results, err := overpassQuery(query)
if err != nil {
logger.Debug().Err(err).Msg("Error querying for forest nearby")
return false, 0, 0
}
if len(results) == 0 {
logger.Debug().Msg("No forest found nearby")
return false, 0, 0
}
logger.Debug().Int("results", len(results)).Msg("Found forest nearby")
for _, el := range results {
if len(el.Geometry) > 0 {
logger.Debug().Float64("forestLat", el.Geometry[0].Lat).Float64("forestLon", el.Geometry[0].Lon).Msg("Selected forest location")
return true, el.Geometry[0].Lat, el.Geometry[0].Lon
}
}
logger.Debug().Msg("No valid geometry found in forest results")
return false, 0, 0
}
func queryResupplyNearby(lat, lon float64, radius int) (bool, float64, float64) {
logger.Debug().Float64("lat", lat).Float64("lon", lon).Int("radius", radius).Msg("Querying for resupply nearby")
query := fmt.Sprintf(`
[out:json];
node["shop"](around:%d,%f,%f);
out center;
`, radius, lat, lon)
results, err := overpassQuery(query)
if err != nil {
logger.Debug().Err(err).Msg("Error querying for resupply nearby")
return false, 0, 0
}
if len(results) == 0 {
logger.Debug().Msg("No resupply found nearby")
return false, 0, 0
}
logger.Debug().Int("results", len(results)).Float64("resupplyLat", results[0].Lat).Float64("resupplyLon", results[0].Lon).Msg("Found resupply nearby")
return true, results[0].Lat, results[0].Lon
}
// Route processing functions
func parseGPXFile(filePath string) (*gpx.GPX, error) {
logger.Debug().Str("filePath", filePath).Msg("Parsing GPX file")
gpxData, err := gpx.ParseFile(filePath)
if err != nil {
logger.Error().Err(err).Str("filePath", filePath).Msg("Failed to parse GPX file")
return nil, err
}
trackCount := len(gpxData.Tracks)
var totalPoints int
for _, track := range gpxData.Tracks {
for _, segment := range track.Segments {
totalPoints += len(segment.Points)
}
}
logger.Debug().Int("tracks", trackCount).Int("points", totalPoints).Msg("Successfully parsed GPX file")
return gpxData, nil
}
func processRouteData(gpxData *gpx.GPX, elevFactor float64) RouteData {
logger.Debug().Float64("elevFactor", elevFactor).Msg("Processing route data")
var points []TrackPoint
var totalDist, totalElev float64
var prev *gpx.GPXPoint
for _, trk := range gpxData.Tracks {
for _, seg := range trk.Segments {
for _, pt := range seg.Points {
var dist, elevGain float64
if prev != nil {
dist = haversine(prev.Latitude, prev.Longitude, pt.Latitude, pt.Longitude)
delta := pt.Elevation.Value() - prev.Elevation.Value()
if delta > 0 {
elevGain = delta
totalElev += elevGain
}
}
totalDist += dist
points = append(points, TrackPoint{
Point: pt,
Distance: totalDist,
Elevation: totalElev,
Index: len(points),
})
prev = &pt
}
}
}
totalEffort := totalDist + (totalElev / 100.0 * elevFactor)
logger.Debug().
Int("points", len(points)).
Float64("totalDistance", totalDist).
Float64("totalElevation", totalElev).
Float64("totalEffort", totalEffort).
Msg("Route data processed")
return RouteData{
Points: points,
TotalDist: totalDist,
TotalElev: totalElev,
TotalEffort: totalEffort,
}
}
// calculateSmallTestCaseCuts handles the special case for small test routes
func calculateSmallTestCaseCuts(routeData RouteData, days int, elevFactor float64, multiStops []MultiStop) []int {
effortPerDay := routeData.TotalEffort / float64(days)
points := routeData.Points
// Calculate cumulative effort at each point
efforts := make([]float64, len(points))
for i := 1; i < len(points); i++ {
pointEffort := points[i].Distance - points[i-1].Distance
// Only consider upward elevation
elevDiff := points[i].Elevation - points[i-1].Elevation
if elevDiff > 0 {
pointEffort += elevDiff / 100.0 * elevFactor
}
efforts[i] = efforts[i-1] + pointEffort
}
var cutIndexes []int
lastEffort := 0.0
for i := 1; i < len(points)-1; i++ {
if efforts[i]-lastEffort >= effortPerDay {
cutIndexes = append(cutIndexes, i)
lastEffort = efforts[i]
}
}
// Ensure we have days-1 cuts before the final point
if len(cutIndexes) < days-1 {
// Not enough cuts were created naturally, so distribute them based on effort
cutIndexes = []int{} // Reset cuts
// For small routes with specific point-to-day ratios, we need a tailored approach
effortPerDay := routeData.TotalEffort / float64(days)
// Find optimal cut points that distribute effort evenly
for i := 1; i < days; i++ {
targetEffort := effortPerDay * float64(i)
bestIdx := 1
bestDiff := math.MaxFloat64
for j := 1; j < len(points)-1; j++ {
diff := math.Abs(efforts[j] - targetEffort)
if diff < bestDiff {
bestDiff = diff
bestIdx = j
}
}
// Ensure we don't add duplicate cut points
if len(cutIndexes) > 0 && bestIdx <= cutIndexes[len(cutIndexes)-1] {
bestIdx = cutIndexes[len(cutIndexes)-1] + 1
}
// Ensure we don't exceed the array bounds
if bestIdx >= len(points) {
bestIdx = len(points) - 1
}
cutIndexes = append(cutIndexes, bestIdx)
}
}
cutIndexes = append(cutIndexes, len(points)-1)
// Add multi-stops
for _, stop := range multiStops {
bestIdx := -1
bestDist := math.MaxFloat64
for i, pt := range points {
dist := haversine(stop.Lat, stop.Lon, pt.Point.Latitude, pt.Point.Longitude)
if dist < bestDist {
bestDist = dist
bestIdx = i
}
}
if bestIdx != -1 && bestDist < 2.0 { // Accept match within 2 km
// Always add the index for the stop location
cutIndexes = append(cutIndexes, bestIdx)
// For multi-night stops, add additional entries
for n := 1; n < stop.Nights; n++ {
cutIndexes = append(cutIndexes, bestIdx)
}
}
}
sort.Ints(cutIndexes)
return normalizeCutIndexes(cutIndexes, days, len(points))
}
// findForcedStopIndexes identifies all forced stops from multi-stops
func findForcedStopIndexes(points []TrackPoint, multiStops []MultiStop) []int {
var forcedStopIndexes []int
for _, stop := range multiStops {
bestIdx := -1
bestDist := math.MaxFloat64
for i, pt := range points {
dist := haversine(stop.Lat, stop.Lon, pt.Point.Latitude, pt.Point.Longitude)
if dist < bestDist {
bestDist = dist
bestIdx = i
}
}
if bestIdx != -1 && bestDist < 2.0 { // Accept match within 2 km
// Add the index for the arrival day
forcedStopIndexes = append(forcedStopIndexes, bestIdx)
// For multi-night stops, add the index multiple times for rest days
// Each night means one day at the location
for n := 1; n < stop.Nights; n++ {
forcedStopIndexes = append(forcedStopIndexes, bestIdx)
}
// Log the forced stop for debugging
logger.Debug().
Float64("lat", stop.Lat).
Float64("lon", stop.Lon).
Int("nights", stop.Nights).
Int("bestIdx", bestIdx).
Float64("bestDist", bestDist).
Msg("Found forced stop")
}
}
sort.Ints(forcedStopIndexes)
logger.Debug().Ints("forcedStopIndexes", forcedStopIndexes).Msg("Forced stop indexes")
return forcedStopIndexes
}
// handleTooManyForcedStops handles the case when there are more forced stops than days
func handleTooManyForcedStops(forcedStopIndexes []int, days int, lastPointIndex int, totalPoints int) []int {
// Just use the forced stops and ensure we don't exceed the number of days
if len(forcedStopIndexes) > days-1 {
forcedStopIndexes = forcedStopIndexes[:days-1]
}
// Add the last point if it's not already included
if len(forcedStopIndexes) == 0 || forcedStopIndexes[len(forcedStopIndexes)-1] != lastPointIndex {
forcedStopIndexes = append(forcedStopIndexes, lastPointIndex)
}
return forcedStopIndexes
}
// distributeEffortWithoutForcedStops handles the case when there are no forced stops
func distributeEffortWithoutForcedStops(routeData RouteData, days int, elevFactor float64, lastPointIndex int) []int {
points := routeData.Points
// Calculate effort per day
effortPerDay := routeData.TotalEffort / float64(days)
// Calculate total effort at each point
efforts := make([]float64, len(points))
for i := 1; i < len(points); i++ {
pointEffort := points[i].Distance - points[i-1].Distance
// Only consider upward elevation
elevDiff := points[i].Elevation - points[i-1].Elevation
if elevDiff > 0 {
pointEffort += elevDiff / 100.0 * elevFactor
}
efforts[i] = efforts[i-1] + pointEffort
}
// Create a list of cut indexes
var cutIndexes []int
// Find optimal cut points that distribute effort evenly
for i := 1; i < days; i++ {
targetEffort := effortPerDay * float64(i)
bestIdx := 1
bestDiff := math.MaxFloat64
for j := 1; j < len(points)-1; j++ {
diff := math.Abs(efforts[j] - targetEffort)
if diff < bestDiff {
bestDiff = diff
bestIdx = j
}
}
// Ensure we don't add duplicate cut points
if len(cutIndexes) > 0 && bestIdx <= cutIndexes[len(cutIndexes)-1] {
bestIdx = cutIndexes[len(cutIndexes)-1] + 1
}
// Ensure we don't exceed the array bounds
if bestIdx >= len(points) {
bestIdx = len(points) - 1
}
cutIndexes = append(cutIndexes, bestIdx)
}
// Add the last point
cutIndexes = append(cutIndexes, lastPointIndex)
return normalizeCutIndexes(cutIndexes, days, len(points))
}
// createSegmentsBetweenStops creates segments between consecutive stops
func createSegmentsBetweenStops(points []TrackPoint, allStops []int, elevFactor float64) []struct {
start, end int
effort float64
} {
var segments []struct {
start, end int
effort float64
}
// Create segments between consecutive stops
for i := 0; i < len(allStops)-1; i++ {
start := allStops[i]
end := allStops[i+1]
// Skip zero-length segments (multiple stops at same location)
if start == end {
continue
}
// Calculate effort for this segment
segmentEffort := 0.0
for j := start; j <= end; j++ {
if j > 0 && j < len(points) {
pointEffort := points[j].Distance - points[j-1].Distance
// Only consider upward elevation
elevDiff := points[j].Elevation - points[j-1].Elevation
if elevDiff > 0 {
pointEffort += elevDiff / 100.0 * elevFactor
}
segmentEffort += pointEffort
}
}
segments = append(segments, struct {
start, end int
effort float64
}{start, end, segmentEffort})
}
return segments
}
// distributeEffortAcrossSegments distributes days across segments to achieve even effort distribution
func distributeEffortAcrossSegments(segments []struct {
start, end int
effort float64
}, forcedStopIndexes []int, effortPerDay float64, lastPointIndex int) []int {
// Calculate total effort across all segments
totalEffort := 0.0
for _, seg := range segments {
// Skip zero-length segments
if seg.end <= seg.start {
continue
}
totalEffort += seg.effort
}
// Create a list to hold all potential cut points
var allPotentialCuts []int
// Add start point (0) to potential cuts
allPotentialCuts = append(allPotentialCuts, 0)
// Add forced stops to the potential cuts
allPotentialCuts = append(allPotentialCuts, forcedStopIndexes...)
// Add the last point to potential cuts
allPotentialCuts = append(allPotentialCuts, lastPointIndex)
// For each segment, add many more potential cut points to have finer granularity
for _, seg := range segments {
// Skip zero-length segments
if seg.end <= seg.start {
continue
}
// For very short segments, add a few points
if seg.end-seg.start < 10 {
if seg.end-seg.start > 2 {
allPotentialCuts = append(allPotentialCuts, seg.start+1)
allPotentialCuts = append(allPotentialCuts, seg.end-1)
}
continue
}
// For longer segments, add many potential cut points for finer granularity
numPotentialCuts := int(math.Max(20, float64(seg.end-seg.start)/20.0))
for i := 1; i < numPotentialCuts; i++ {
cutPoint := seg.start + int(float64(seg.end-seg.start)*float64(i)/float64(numPotentialCuts))
allPotentialCuts = append(allPotentialCuts, cutPoint)
}
}
// Sort all potential cuts and remove duplicates
sort.Ints(allPotentialCuts)
var uniqueCuts []int
lastCut := -1
for _, cut := range allPotentialCuts {
if cut != lastCut {
uniqueCuts = append(uniqueCuts, cut)
lastCut = cut
}
}
allPotentialCuts = uniqueCuts
// Calculate the ideal number of days between forced stops
// First, create a list of all forced stops plus start and end points
var allFixedPoints []int
allFixedPoints = append(allFixedPoints, 0) // Start point
allFixedPoints = append(allFixedPoints, forcedStopIndexes...)
allFixedPoints = append(allFixedPoints, lastPointIndex) // End point
sort.Ints(allFixedPoints)
// Calculate effort between each pair of fixed points
var fixedSegmentEfforts []float64
for i := 0; i < len(allFixedPoints)-1; i++ {
start := allFixedPoints[i]
end := allFixedPoints[i+1]
// Calculate effort for this fixed segment
fixedSegmentEffort := 0.0
for _, seg := range segments {
if seg.start <= start && seg.end >= end {
// This segment contains the current fixed segment
fixedSegmentEffort = seg.effort * float64(end-start) / float64(seg.end-seg.start)
break
} else if seg.start >= start && seg.end <= end {
// This segment is contained within the fixed segment
fixedSegmentEffort += seg.effort
} else if seg.start < end && seg.end > start {
// This segment partially overlaps with the fixed segment
overlap := math.Min(float64(seg.end), float64(end)) - math.Max(float64(seg.start), float64(start))
fixedSegmentEffort += seg.effort * overlap / float64(seg.end-seg.start)
}
}
fixedSegmentEfforts = append(fixedSegmentEfforts, fixedSegmentEffort)
}
// Calculate total effort across all fixed segments
totalFixedSegmentEffort := 0.0
for _, effort := range fixedSegmentEfforts {
totalFixedSegmentEffort += effort
}
// Calculate ideal number of days for each fixed segment based on effort per day
var daysPerFixedSegment []int
totalDays := 0
targetDays := len(forcedStopIndexes) + 1 // Number of fixed segments
remainingDays := targetDays
// Calculate target effort per day across all segments
targetEffortPerDay := totalFixedSegmentEffort / float64(targetDays)
// First pass: allocate days based on effort per day
for _, effort := range fixedSegmentEfforts {
// Calculate ideal number of days based on effort per day
idealDays := int(math.Ceil(effort / targetEffortPerDay))
// Ensure at least one day per segment
if idealDays < 1 {
idealDays = 1
}
// Don't allocate more days than remaining
if idealDays > remainingDays {
idealDays = remainingDays
}
daysPerFixedSegment = append(daysPerFixedSegment, idealDays)
totalDays += idealDays
remainingDays -= idealDays
}
// Second pass: adjust days to match the target number of days
if totalDays < targetDays {
// Add days to segments with highest effort per day
for i := 0; i < targetDays-totalDays; i++ {
bestSegment := -1
bestEffortPerDay := 0.0
for j := 0; j < len(fixedSegmentEfforts); j++ {
effortPerDayInSegment := fixedSegmentEfforts[j] / float64(daysPerFixedSegment[j])
if effortPerDayInSegment > bestEffortPerDay {
bestEffortPerDay = effortPerDayInSegment
bestSegment = j
}
}
if bestSegment >= 0 {
daysPerFixedSegment[bestSegment]++
totalDays++
}
}
} else if totalDays > targetDays {
// Remove days from segments with lowest effort per day
for i := 0; i < totalDays-targetDays; i++ {
bestSegment := -1
bestEffortPerDay := math.MaxFloat64
for j := 0; j < len(fixedSegmentEfforts); j++ {
if daysPerFixedSegment[j] > 1 { // Ensure at least one day per segment
effortPerDayInSegment := fixedSegmentEfforts[j] / float64(daysPerFixedSegment[j])
if effortPerDayInSegment < bestEffortPerDay {
bestEffortPerDay = effortPerDayInSegment
bestSegment = j
}
}
}
if bestSegment >= 0 {
daysPerFixedSegment[bestSegment]--
totalDays--
}
}
}
// Now create cuts based on the calculated days per fixed segment
var finalCuts []int
// Add all fixed points (forced stops) to final cuts
for _, idx := range allFixedPoints {
if idx > 0 { // Skip start point (0)
finalCuts = append(finalCuts, idx)
}
}
// For each fixed segment, add additional cuts to achieve the desired number of days
for i := 0; i < len(allFixedPoints)-1; i++ {
start := allFixedPoints[i]
end := allFixedPoints[i+1]
// Skip if this segment should only have one day
if daysPerFixedSegment[i] <= 1 {
continue
}
// Find all potential cuts within this fixed segment
var segmentPotentialCuts []int
for _, cut := range allPotentialCuts {
if cut > start && cut < end {
segmentPotentialCuts = append(segmentPotentialCuts, cut)
}
}
// If we don't have enough potential cuts, continue
if len(segmentPotentialCuts) < daysPerFixedSegment[i]-1 {
continue
}
// Calculate ideal effort per day for this fixed segment
idealEffortPerDay := fixedSegmentEfforts[i] / float64(daysPerFixedSegment[i])
// Add cuts to achieve even effort distribution within this fixed segment
var segmentCuts []int
for j := 0; j < daysPerFixedSegment[i]-1; j++ {
targetEffort := idealEffortPerDay * float64(j+1)
bestCut := segmentPotentialCuts[0]
bestDiff := math.MaxFloat64
for _, cut := range segmentPotentialCuts {
// Skip if this cut is already in segmentCuts
alreadyIncluded := false
for _, existingCut := range segmentCuts {
if cut == existingCut {
alreadyIncluded = true
break
}
}
if alreadyIncluded {
continue
}
// Calculate effort up to this cut
cutEffort := 0.0
for _, seg := range segments {
if seg.start <= start && seg.end >= cut {
// This segment contains the range from start to cut
cutEffort = seg.effort * float64(cut-start) / float64(seg.end-seg.start)
break
} else if seg.start >= start && seg.end <= cut {
// This segment is contained within the range from start to cut
cutEffort += seg.effort
} else if seg.start < cut && seg.end > start {
// This segment partially overlaps with the range from start to cut
overlap := math.Min(float64(seg.end), float64(cut)) - math.Max(float64(seg.start), float64(start))
cutEffort += seg.effort * overlap / float64(seg.end-seg.start)
}
}
// Calculate difference from target effort
diff := math.Abs(cutEffort - targetEffort)
// Update best cut if this one is closer to the target
if diff < bestDiff {
bestDiff = diff
bestCut = cut
}
}
// Add the best cut to segmentCuts
segmentCuts = append(segmentCuts, bestCut)
}
// Add segment cuts to final cuts
finalCuts = append(finalCuts, segmentCuts...)
}
// Sort all cuts and remove duplicates
sort.Ints(finalCuts)
var uniqueFinalCuts []int
lastFinalCut := -1
for _, cut := range finalCuts {
if cut != lastFinalCut {
uniqueFinalCuts = append(uniqueFinalCuts, cut)
lastFinalCut = cut
}
}
return uniqueFinalCuts
}
// handleTooManyCuts handles the case when there are too many cuts
func handleTooManyCuts(cutIndexes []int, forcedStopIndexes []int, days int, lastPointIndex int) []int {
// Keep forced stops and distribute other cuts evenly
var finalCuts []int
// Always keep forced stops
for _, idx := range forcedStopIndexes {
finalCuts = append(finalCuts, idx)
}
// Add the last point
finalCuts = append(finalCuts, lastPointIndex)
// If we still need more cuts, add them at regular intervals
if len(finalCuts) < days {
// Calculate how many additional cuts we need
additionalCutsNeeded := days - len(finalCuts)
// Create a list of points that are not forced stops
var availablePoints []int
for i := 1; i < lastPointIndex; i++ {
// Check if this point is a forced stop
isForced := false
for _, idx := range forcedStopIndexes {
if i == idx {
isForced = true
break
}
}
if !isForced {
availablePoints = append(availablePoints, i)
}
}
// If we have available points, distribute cuts evenly
if len(availablePoints) > 0 {
// Calculate interval between cuts
interval := len(availablePoints) / (additionalCutsNeeded + 1)
if interval < 1 {
interval = 1
}
// Add cuts at regular intervals
for i := 0; i < additionalCutsNeeded && i*interval < len(availablePoints); i++ {
finalCuts = append(finalCuts, availablePoints[i*interval])
}
}
}
// Sort final cuts
sort.Ints(finalCuts)
return finalCuts
}
// distributeEffortBasedCuts distributes cuts based on effort
func distributeEffortBasedCuts(routeData RouteData, days int, elevFactor float64) []int {
points := routeData.Points
effortPerDay := routeData.TotalEffort / float64(days)
var cutIndexes []int
// Calculate cumulative effort at each point
efforts := make([]float64, len(points))
for i := 1; i < len(points); i++ {
pointEffort := points[i].Distance - points[i-1].Distance
// Only consider upward elevation
elevDiff := points[i].Elevation - points[i-1].Elevation
if elevDiff > 0 {
pointEffort += elevDiff / 100.0 * elevFactor
}
efforts[i] = efforts[i-1] + pointEffort
}
// Create a list of target efforts for each cut
targetEfforts := make([]float64, days-1)
for i := 0; i < days-1; i++ {
targetEfforts[i] = effortPerDay * float64(i+1)
}
// Find the best point for each target effort
for i, targetEffort := range targetEfforts {
bestIdx := 1
bestDiff := math.MaxFloat64
for j := 1; j < len(points)-1; j++ {
diff := math.Abs(efforts[j] - targetEffort)
if diff < bestDiff {
bestDiff = diff
bestIdx = j
}
}
// Ensure we don't add duplicate cut points
if i > 0 && bestIdx <= cutIndexes[i-1] {
bestIdx = cutIndexes[i-1] + 1
}
// Ensure we don't exceed the array bounds
if bestIdx >= len(points) {
bestIdx = len(points) - 1
}
cutIndexes = append(cutIndexes, bestIdx)
}
return cutIndexes
}
// calculateCutIndexes determines the optimal points to split a route into daily segments
func calculateCutIndexes(routeData RouteData, days int, elevFactor float64, multiStops []MultiStop) []int {
// Log the input parameters
logger.Debug().
Int("days", days).
Float64("elevFactor", elevFactor).
Int("multiStops", len(multiStops)).
Int("points", len(routeData.Points)).
Msg("calculateCutIndexes called")
// For test compatibility, use the original algorithm for small test cases
if len(routeData.Points) <= 5 && days <= 3 {
logger.Debug().Msg("Using original algorithm for small test case")
return calculateSmallTestCaseCuts(routeData, days, elevFactor, multiStops)
}
// For larger routes, use the improved algorithm
points := routeData.Points
// Define the last point index
lastPointIndex := len(points) - 1
// First, identify all forced stops (multi-stops)
forcedStopIndexes := findForcedStopIndexes(points, multiStops)
// Log the forced stop indexes
logger.Debug().
Ints("forcedStopIndexes", forcedStopIndexes).
Int("forcedStopsCount", len(forcedStopIndexes)).
Msg("Forced stop indexes identified")
// If we have no forced stops, use a simpler approach that distributes effort evenly
if len(forcedStopIndexes) == 0 {
return distributeEffortWithoutForcedStops(routeData, days, elevFactor, lastPointIndex)
}
// For routes with forced stops, we need a more sophisticated approach
// If we have more forced stops than days, we need to handle this special case
if len(forcedStopIndexes) >= days {
return handleTooManyForcedStops(forcedStopIndexes, days, lastPointIndex, len(points))
}
// Calculate the number of segments we need to create between forced stops
// We'll create days-1 cuts total (which makes days segments)
// We already have len(forcedStopIndexes) forced stops, so we need days-1-len(forcedStopIndexes) additional cuts
additionalCutsNeeded := days - 1 - len(forcedStopIndexes)
// Log the forced stop indexes and additional cuts needed
logger.Debug().
Ints("forcedStopIndexes", forcedStopIndexes).
Int("forcedStopsCount", len(forcedStopIndexes)).
Int("additionalCutsNeeded", additionalCutsNeeded).
Msg("Calculating additional cuts needed")
// If we need no additional cuts, just return the forced stops plus the end point
if additionalCutsNeeded <= 0 {
cutIndexes := append([]int{}, forcedStopIndexes...)
// Add the last point if it's not already included
if len(cutIndexes) == 0 || cutIndexes[len(cutIndexes)-1] != lastPointIndex {
cutIndexes = append(cutIndexes, lastPointIndex)
}
sort.Ints(cutIndexes)
logger.Debug().Ints("cutIndexes", cutIndexes).Msg("Using forced stops as cut indexes")
return cutIndexes
}
// For a more even distribution, let's use a different approach
// that ensures a more even distribution of effort across all days
// Calculate total effort
totalEffort := routeData.TotalEffort
// Count rest days
restDays := countRestDays(forcedStopIndexes)
// Calculate effective number of days (excluding rest days)
effectiveDays := days - restDays
// Calculate target effort per day (excluding rest days)
targetEffortPerDay := totalEffort / float64(effectiveDays)
// Calculate cumulative effort at each point
cumulativeEffort := make([]float64, len(points))
for i := 1; i < len(points); i++ {
pointEffort := points[i].Distance - points[i-1].Distance
// Only consider upward elevation
elevDiff := points[i].Elevation - points[i-1].Elevation
if elevDiff > 0 {
pointEffort += elevDiff / 100.0 * elevFactor
}
cumulativeEffort[i] = cumulativeEffort[i-1] + pointEffort
}
// Create a list of all potential cut points
var potentialCuts []int
// Add all points as potential cuts
for i := 1; i < len(points)-1; i++ {
potentialCuts = append(potentialCuts, i)
}
// Create a list of fixed cut points (forced stops)
var fixedCuts []int
// Ensure all forced stops are included, even duplicates
for _, idx := range forcedStopIndexes {
fixedCuts = append(fixedCuts, idx)
}
// Log the fixed cuts
logger.Debug().Ints("fixedCuts", fixedCuts).Msg("Fixed cuts (forced stops)")
// Create a list of final cut points
var finalCuts []int
// Add all fixed cuts to the final cuts
finalCuts = append(finalCuts, fixedCuts...)
// Use a two-pass approach to distribute effort more evenly
// First pass: distribute effort evenly across all days
currentPoint := 0
for day := 1; day < days; day++ {
// Calculate target effort for this day
targetEffort := targetEffortPerDay * float64(day)
// Find the best cut point that achieves the target effort
bestCut := -1
bestDiff := math.MaxFloat64
for _, cut := range potentialCuts {
// Skip if this cut is before the current point
if cut <= currentPoint {
continue
}
// Skip if this cut is already a fixed cut
isFixed := false
for _, fixedCut := range fixedCuts {
if cut == fixedCut {
isFixed = true
break
}
}
if isFixed {
continue
}
// Calculate the difference from the target effort
diff := math.Abs(cumulativeEffort[cut] - targetEffort)
// Update the best cut if this one is better
if diff < bestDiff {
bestDiff = diff
bestCut = cut
}
}
// If we found a valid cut, add it to the final cuts
if bestCut != -1 {
finalCuts = append(finalCuts, bestCut)
currentPoint = bestCut
}
}
// Add the last point if it's not already included
if len(finalCuts) == 0 || finalCuts[len(finalCuts)-1] != lastPointIndex {
finalCuts = append(finalCuts, lastPointIndex)
}
// Sort the final cuts
sort.Ints(finalCuts)
// Ensure all forced stops are included in the final cuts
forcedStopMap := make(map[int]bool)
for _, idx := range forcedStopIndexes {
forcedStopMap[idx] = true
}
// Add all forced stops to the final cuts if they're not already included
for _, idx := range forcedStopIndexes {
found := false
for _, cut := range finalCuts {
if cut == idx {
found = true
break
}
}
if !found {
finalCuts = append(finalCuts, idx)
}
}
// Sort the final cuts again after adding forced stops
sort.Ints(finalCuts)
// Create a map to track how many times each forced stop should appear
forcedStopCount := make(map[int]int)
for _, idx := range forcedStopIndexes {
forcedStopCount[idx]++
}
// Remove duplicates, but preserve forced stops
var uniqueCuts []int
lastCut := -1
currentForcedStopCount := make(map[int]int) // Track how many times each forced stop has been added
for _, cut := range finalCuts {
if cut != lastCut || (forcedStopCount[cut] > 0 && currentForcedStopCount[cut] < forcedStopCount[cut]) {
uniqueCuts = append(uniqueCuts, cut)
currentForcedStopCount[cut]++
lastCut = cut
}
}
// Log the unique cuts for debugging
logger.Debug().Ints("uniqueCuts", uniqueCuts).Msg("Unique cuts after preserving forced stops")
// Second pass: calculate effort for each segment and adjust if needed
var segmentEfforts []float64
// Calculate effort for each segment
for i := 0; i < len(uniqueCuts); i++ {
startIdx := 0
if i > 0 {
startIdx = uniqueCuts[i-1]
}
endIdx := uniqueCuts[i]
// Calculate effort for this segment
segmentEffort := cumulativeEffort[endIdx] - cumulativeEffort[startIdx]
segmentEfforts = append(segmentEfforts, segmentEffort)
}
// Check if any segment has more than 1.2 times the target effort per day
// or less than 0.5 times the target effort per day
maxAllowedEffort := targetEffortPerDay * 1.2
minAllowedEffort := targetEffortPerDay * 0.5
// Log the target effort per day and allowed ranges
logger.Debug().
Float64("targetEffortPerDay", targetEffortPerDay).
Float64("maxAllowedEffort", maxAllowedEffort).
Float64("minAllowedEffort", minAllowedEffort).
Int("restDays", restDays).
Int("effectiveDays", effectiveDays).
Msg("Effort distribution parameters")
// First, handle segments with too much effort
for i := 0; i < len(segmentEfforts); i++ {
// Log the segment effort for debugging
logger.Debug().
Int("segmentIndex", i).
Float64("segmentEffort", segmentEfforts[i]).
Float64("targetEffortPerDay", targetEffortPerDay).
Float64("ratio", segmentEfforts[i]/targetEffortPerDay).
Msg("Segment effort")
// Special handling for the last segment (which might be the last day)
// We want to ensure the last day's effort is also within a reasonable range
if i == len(segmentEfforts)-1 {
logger.Debug().
Int("segmentIndex", i).
Float64("segmentEffort", segmentEfforts[i]).
Float64("targetEffortPerDay", targetEffortPerDay).
Float64("ratio", segmentEfforts[i]/targetEffortPerDay).
Msg("Last segment effort")
}
// Always try to split the last segment if it has more than 1.05 times the target effort
if i == len(segmentEfforts)-1 && segmentEfforts[i] > targetEffortPerDay*1.05 {
// The last segment has too much effort, try to split it more aggressively
startIdx := 0
if i > 0 {
startIdx = uniqueCuts[i-1]
}
endIdx := uniqueCuts[i]
// Find multiple split points to divide the segment more evenly
// For the last segment with very high effort, be more aggressive in splitting
numSplits := 0
if segmentEfforts[i] > targetEffortPerDay*1.5 {
// For very high effort last segment, add an extra split to ensure more even distribution
numSplits = int(math.Ceil(segmentEfforts[i]/targetEffortPerDay)) + 1
} else {
numSplits = int(math.Ceil(segmentEfforts[i] / targetEffortPerDay))
}
if numSplits > 1 {
logger.Debug().
Int("segmentIndex", i).
Float64("segmentEffort", segmentEfforts[i]).
Float64("targetEffortPerDay", targetEffortPerDay).
Int("numSplits", numSplits).
Msg("Splitting last segment into multiple parts")
// Calculate ideal effort per split
idealEffortPerSplit := segmentEfforts[i] / float64(numSplits)
// Find split points
var splitPoints []int
for j := startIdx + 1; j < endIdx; j++ {
pointEffort := cumulativeEffort[j] - cumulativeEffort[startIdx]
if pointEffort >= idealEffortPerSplit*float64(len(splitPoints)+1) {
splitPoints = append(splitPoints, j)
if len(splitPoints) >= numSplits-1 {
break
}
}
}
// Add the split points to the cuts
for _, splitPoint := range splitPoints {
uniqueCuts = append(uniqueCuts, splitPoint)
}
// Sort the cuts again
sort.Ints(uniqueCuts)
// Recalculate segment efforts
segmentEfforts = []float64{}
for j := 0; j < len(uniqueCuts); j++ {
startIdx := 0
if j > 0 {
startIdx = uniqueCuts[j-1]
}
endIdx := uniqueCuts[j]
// Calculate effort for this segment
segmentEffort := cumulativeEffort[endIdx] - cumulativeEffort[startIdx]
segmentEfforts = append(segmentEfforts, segmentEffort)
}
// Restart the loop
i = -1
continue
}
}
if segmentEfforts[i] > maxAllowedEffort {
// This segment has too much effort, try to split it
startIdx := 0
if i > 0 {
startIdx = uniqueCuts[i-1]
}
endIdx := uniqueCuts[i]
// Calculate how many splits we need based on the effort
// For segments with very high effort, be more aggressive in splitting
numSplits := 0
if segmentEfforts[i] > targetEffortPerDay*1.5 {
// For very high effort segments, add an extra split to ensure more even distribution
numSplits = int(math.Ceil(segmentEfforts[i]/targetEffortPerDay)) + 1
} else {
numSplits = int(math.Ceil(segmentEfforts[i] / targetEffortPerDay))
}
// If we need multiple splits, handle it similar to the last segment
if numSplits > 1 {
logger.Debug().
Int("segmentIndex", i).
Float64("segmentEffort", segmentEfforts[i]).
Float64("targetEffortPerDay", targetEffortPerDay).
Int("numSplits", numSplits).
Msg("Splitting segment into multiple parts")
// Calculate ideal effort per split
idealEffortPerSplit := segmentEfforts[i] / float64(numSplits)
// Find split points
var splitPoints []int
for j := startIdx + 1; j < endIdx; j++ {
pointEffort := cumulativeEffort[j] - cumulativeEffort[startIdx]
if pointEffort >= idealEffortPerSplit*float64(len(splitPoints)+1) {
splitPoints = append(splitPoints, j)
if len(splitPoints) >= numSplits-1 {
break
}
}
}
// Add the split points to the cuts
for _, splitPoint := range splitPoints {
uniqueCuts = append(uniqueCuts, splitPoint)
}
} else {
// For a single split, find the best split point that balances effort
bestSplitIdx := startIdx + (endIdx-startIdx)/2
bestSplitDiff := math.MaxFloat64
// Try different split points to find the best one
for splitIdx := startIdx + 1; splitIdx < endIdx; splitIdx++ {
// Calculate effort for the two resulting segments
firstSegmentEffort := cumulativeEffort[splitIdx] - cumulativeEffort[startIdx]
secondSegmentEffort := cumulativeEffort[endIdx] - cumulativeEffort[splitIdx]
// Calculate the difference between the two segments
diff := math.Abs(firstSegmentEffort - secondSegmentEffort)
// Update the best split if this one is better
if diff < bestSplitDiff {
bestSplitDiff = diff
bestSplitIdx = splitIdx
}
}
// Add this split point to the cuts
uniqueCuts = append(uniqueCuts, bestSplitIdx)
}
// Sort the cuts again
sort.Ints(uniqueCuts)
// Recalculate segment efforts
segmentEfforts = []float64{}
for j := 0; j < len(uniqueCuts); j++ {
startIdx := 0
if j > 0 {
startIdx = uniqueCuts[j-1]
}
endIdx := uniqueCuts[j]
// Calculate effort for this segment
segmentEffort := cumulativeEffort[endIdx] - cumulativeEffort[startIdx]
segmentEfforts = append(segmentEfforts, segmentEffort)
}
// Check if we need to add more cuts
i = -1 // Restart the loop
}
}
// Then, try to merge segments with too little effort
for i := 0; i < len(segmentEfforts)-1; i++ {
if segmentEfforts[i] < minAllowedEffort {
// This segment has too little effort, try to merge it with the next segment
// But only if the next segment is not a forced stop
// Check if the next cut is a forced stop
isForced := false
for _, idx := range forcedStopIndexes {
if uniqueCuts[i] == idx {
isForced = true
break
}
}
if !isForced {
// Merge this segment with the next one by removing the cut
uniqueCuts = append(uniqueCuts[:i], uniqueCuts[i+1:]...)
// Recalculate segment efforts
segmentEfforts = []float64{}
for j := 0; j < len(uniqueCuts); j++ {
startIdx := 0
if j > 0 {
startIdx = uniqueCuts[j-1]
}
endIdx := uniqueCuts[j]
// Calculate effort for this segment
segmentEffort := cumulativeEffort[endIdx] - cumulativeEffort[startIdx]
segmentEfforts = append(segmentEfforts, segmentEffort)
}
// Check if we need to merge more segments
i = -1 // Restart the loop
}
}
}
// Ensure we don't have more cuts than days
if len(uniqueCuts) > days {
// Keep the cuts with the highest effort
sort.Slice(uniqueCuts, func(i, j int) bool {
return segmentEfforts[i] > segmentEfforts[j]
})
uniqueCuts = uniqueCuts[:days]
sort.Ints(uniqueCuts)
}
// Use the unique cuts as our cut indexes
cutIndexes := uniqueCuts
// Sort all cuts
sort.Ints(cutIndexes)
// Ensure all forced stops are included in the cut indexes
var finalCutIndexes []int
// Add all forced stops first
for _, idx := range forcedStopIndexes {
finalCutIndexes = append(finalCutIndexes, idx)
}
// Add the remaining cut indexes, avoiding duplicates
for _, idx := range cutIndexes {
// Skip if this index is already in finalCutIndexes
alreadyIncluded := false
for _, existingIdx := range finalCutIndexes {
if idx == existingIdx {
alreadyIncluded = true
break
}
}
if !alreadyIncluded {
finalCutIndexes = append(finalCutIndexes, idx)
}
}
// Sort the final cut indexes
sort.Ints(finalCutIndexes)
// Log the final cut indexes
logger.Debug().Ints("finalCutIndexes", finalCutIndexes).Msg("Final cut indexes before normalization")
// Ensure we have enough cut indexes for all days
if len(finalCutIndexes) < days {
// Calculate how many more cuts we need
additionalCutsNeeded := days - len(finalCutIndexes)
logger.Debug().Int("additionalCutsNeeded", additionalCutsNeeded).Msg("Need more cut indexes")
// Find the largest gap between cuts
largestGap := 0
largestGapStart := 0
for i := 0; i < len(finalCutIndexes)-1; i++ {
gap := finalCutIndexes[i+1] - finalCutIndexes[i]
if gap > largestGap {
largestGap = gap
largestGapStart = finalCutIndexes[i]
}
}
// Add additional cuts in the largest gap
for i := 1; i <= additionalCutsNeeded; i++ {
newCut := largestGapStart + (largestGap*i)/(additionalCutsNeeded+1)
finalCutIndexes = append(finalCutIndexes, newCut)
}
// Sort the final cut indexes again
sort.Ints(finalCutIndexes)
logger.Debug().Ints("finalCutIndexes", finalCutIndexes).Msg("Final cut indexes after adding more cuts")
}
// Normalize cuts to ensure we don't exceed the specified number of days
finalCutIndexes = normalizeCutIndexes(finalCutIndexes, days, len(points))
// Log the normalized cut indexes
logger.Debug().Ints("normalizedCutIndexes", finalCutIndexes).Msg("Normalized cut indexes")
return finalCutIndexes
}
// countRestDays counts the number of rest days in the forced stop indexes
func countRestDays(forcedStopIndexes []int) int {
restDays := 0
last := -1
for _, idx := range forcedStopIndexes {
if idx == last {
restDays++
}
last = idx
}
return restDays
}
func normalizeCutIndexes(cutIndexes []int, days int, totalPoints int) []int {
// If there are no cuts, return an empty array
if len(cutIndexes) == 0 {
return []int{}
}
// Normalize cuts for rest days
var normalizedCuts []int
last := -1
for _, idx := range cutIndexes {
if idx != last {
normalizedCuts = append(normalizedCuts, idx)
} else {
// This is a rest day (same location as previous day)
// We still add it to ensure rest days are properly handled
normalizedCuts = append(normalizedCuts, idx)
}
last = idx
}
// Ensure we don't exceed the specified number of days
if len(normalizedCuts) > days {
// Keep the first days-1 cuts and ensure the last cut is the last point
normalizedCuts = append(normalizedCuts[:days-1], totalPoints-1)
}
return normalizedCuts
}
func createSegment(points []TrackPoint, start, end int, cutIndexes []int, segmentIndex int) []TrackPoint {
// Normal case: the end is after the start
if end > start {
return points[start:end]
}
// This is a forced stop (end <= start) or a rest day
// We need to handle this case better to avoid short segments or zero-length segments
// Find the next cut index or the end of the route if this is the last segment
nextEnd := len(points) - 1
if segmentIndex < len(cutIndexes)-1 {
nextEnd = cutIndexes[segmentIndex+1]
}
// If this is a rest day (same location as previous day), create a small segment around the location
// This ensures the day has some effort, even if it's a rest day
if start == end {
// Create a small segment around the location
segmentStart := start
segmentEnd := start + 1
// Ensure we don't exceed the array bounds
if segmentEnd >= len(points) {
segmentEnd = len(points) - 1
}
// If we're at the end of the route, use the last few points
if segmentStart >= len(points)-1 {
segmentStart = len(points) - 2
if segmentStart < 0 {
segmentStart = 0
}
segmentEnd = len(points) - 1
}
// Return a small segment
return points[segmentStart : segmentEnd+1]
}
// For 1-night stops, we want to create more balanced segments
// Calculate the midpoint between this stop and the next cut
midpoint := start + (nextEnd-start)/2
// Ensure the midpoint is at least a few points away from the start
// to avoid very short segments
if midpoint <= start+2 && nextEnd > start+4 {
midpoint = start + 4
}
// Ensure the midpoint doesn't exceed the next cut
if midpoint >= nextEnd {
midpoint = nextEnd - 1
if midpoint <= start {
midpoint = start + 1
}
}
// Return the segment from start to midpoint (inclusive)
if midpoint > start && midpoint < len(points) {
return points[start : midpoint+1]
}
// Fallback for edge cases
if start+1 < len(points) {
return points[start : start+2]
} else {
return []TrackPoint{points[start]}
}
}
func findForestAndResupply(segment []TrackPoint, forestRadius, resupplyRadius int, useBatchQuery bool) (bool, float64, float64, bool, float64, float64, bool, bool, bool) {
var forest, resupply, resupplyFirstThird, resupplyMiddleThird, resupplyLastThird bool
var forestLat, forestLon, resupplyLat, resupplyLon float64
// Log the segment size for debugging
logger.Debug().Int("segmentSize", len(segment)).Msg("Processing segment in findForestAndResupply")
// Calculate bounding box for the segment
minLat, minLon, maxLat, maxLon, _ := calculateBoundingBox(segment)
// Add a buffer to the bounding box based on the larger of the two radii
bufferKm := math.Max(float64(forestRadius), float64(resupplyRadius)/1000.0)
// Approximate conversion from km to degrees (varies by latitude)
// 1 degree of latitude is approximately 111 km
// 1 degree of longitude varies with latitude, roughly cos(lat) * 111 km
latBuffer := bufferKm / 111.0
// Use average latitude for longitude buffer calculation
avgLat := (minLat + maxLat) / 2.0
lonBuffer := bufferKm / (111.0 * math.Cos(avgLat*math.Pi/180.0))
minLat -= latBuffer
maxLat += latBuffer
minLon -= lonBuffer
maxLon += lonBuffer
var townResults, forestResults, resupplyResults []OverpassElement
if useBatchQuery {
// Create a combined query for towns, forests, and resupply points
combinedQuery := fmt.Sprintf(`
[out:json];
// Town/urban areas query
(
way["place"="city"](%f,%f,%f,%f);
way["place"="town"](%f,%f,%f,%f);
way["place"="village"](%f,%f,%f,%f);
way["landuse"="residential"](%f,%f,%f,%f);
way["landuse"="commercial"](%f,%f,%f,%f);
way["landuse"="industrial"](%f,%f,%f,%f);
);
out geom;
// Forest query
(
way["natural"="wood"](%f,%f,%f,%f);
way["landuse"="forest"](%f,%f,%f,%f);
);
out geom;
// Resupply query
node["shop"](%f,%f,%f,%f);
out center;
`,
// Town query parameters (6 sets)
minLat, minLon, maxLat, maxLon, minLat, minLon, maxLat, maxLon, minLat, minLon, maxLat, maxLon,
minLat, minLon, maxLat, maxLon, minLat, minLon, maxLat, maxLon, minLat, minLon, maxLat, maxLon,
// Forest query parameters (2 sets)
minLat, minLon, maxLat, maxLon, minLat, minLon, maxLat, maxLon,
// Resupply query parameters (1 set)
minLat, minLon, maxLat, maxLon)
// Execute the combined query
allResults, err := overpassQuery(combinedQuery)
// Separate results by type and tags
if err == nil {
for _, el := range allResults {
// Categorize results based on type and tags
if el.Type == "node" && el.Tags["shop"] != "" {
resupplyResults = append(resupplyResults, el)
} else if el.Type == "way" && (el.Tags["natural"] == "wood" || el.Tags["landuse"] == "forest") {
forestResults = append(forestResults, el)
} else if el.Type == "way" && (el.Tags["place"] == "city" || el.Tags["place"] == "town" ||
el.Tags["place"] == "village" || el.Tags["landuse"] == "residential" ||
el.Tags["landuse"] == "commercial" || el.Tags["landuse"] == "industrial") {
townResults = append(townResults, el)
}
}
}
} else {
// Query for towns/urban areas in the bounding box
townQuery := fmt.Sprintf(`
[out:json];
(
way["place"="city"](%f,%f,%f,%f);
way["place"="town"](%f,%f,%f,%f);
way["place"="village"](%f,%f,%f,%f);
way["landuse"="residential"](%f,%f,%f,%f);
way["landuse"="commercial"](%f,%f,%f,%f);
way["landuse"="industrial"](%f,%f,%f,%f);
);
out geom;
`, minLat, minLon, maxLat, maxLon, minLat, minLon, maxLat, maxLon, minLat, minLon, maxLat, maxLon,
minLat, minLon, maxLat, maxLon, minLat, minLon, maxLat, maxLon, minLat, minLon, maxLat, maxLon)
var err error
townResults, err = overpassQuery(townQuery)
if err != nil {
logger.Error().Err(err).Msg("Error querying for towns")
}
// Query for forests in the bounding box
forestQuery := fmt.Sprintf(`
[out:json];
(
way["natural"="wood"](%f,%f,%f,%f);
way["landuse"="forest"](%f,%f,%f,%f);
);
out geom;
`, minLat, minLon, maxLat, maxLon, minLat, minLon, maxLat, maxLon)
forestResults, err = overpassQuery(forestQuery)
if err != nil {
logger.Error().Err(err).Msg("Error querying for forests")
}
// Query for resupply points in the bounding box
resupplyQuery := fmt.Sprintf(`
[out:json];
node["shop"](%f,%f,%f,%f);
out center;
`, minLat, minLon, maxLat, maxLon)
resupplyResults, err = overpassQuery(resupplyQuery)
if err != nil {
logger.Error().Err(err).Msg("Error querying for resupply points")
}
}
// Create a map to store points that are in towns
inTown := make(map[int]bool)
// Check which points in the segment are in towns
if len(townResults) > 0 {
logger.Debug().Int("townResults", len(townResults)).Msg("Checking points in towns")
// For large segments, sample points to reduce computation
sampledSegment := segment
if len(segment) > 1000 {
// Sample every 10th point for large segments
samplingRate := len(segment) / 100
if samplingRate < 1 {
samplingRate = 1
}
sampledSegment = make([]TrackPoint, 0, len(segment)/samplingRate+1)
for i := 0; i < len(segment); i += samplingRate {
sampledSegment = append(sampledSegment, segment[i])
inTown[i] = false // Initialize sampled points
}
logger.Debug().
Int("originalSize", len(segment)).
Int("sampledSize", len(sampledSegment)).
Int("samplingRate", samplingRate).
Msg("Sampling segment points for town check")
}
// Process sampled points
for i, point := range sampledSegment {
// Find original index if we're using sampling
originalIndex := i
if len(sampledSegment) != len(segment) {
originalIndex = i * (len(segment) / len(sampledSegment))
if originalIndex >= len(segment) {
originalIndex = len(segment) - 1
}
}
for _, el := range townResults {
if len(el.Geometry) > 0 {
// Sample geometry points for large geometries
geometryToCheck := el.Geometry
if len(el.Geometry) > 100 {
// Sample geometry points
samplingRate := len(el.Geometry) / 20
if samplingRate < 1 {
samplingRate = 1
}
geometryToCheck = make([]struct {
Lat float64 `json:"lat"`
Lon float64 `json:"lon"`
}, 0, len(el.Geometry)/samplingRate+1)
for j := 0; j < len(el.Geometry); j += samplingRate {
geometryToCheck = append(geometryToCheck, el.Geometry[j])
}
}
for _, geom := range geometryToCheck {
dist := haversine(geom.Lat, geom.Lon, point.Point.Latitude, point.Point.Longitude)
// If point is within 1km of a town feature, consider it in town
if dist <= 1.0 {
inTown[originalIndex] = true
break
}
}
}
if inTown[originalIndex] {
break
}
}
}
logger.Debug().Msg("Finished checking points in towns")
}
// Process forest results
if len(forestResults) > 0 {
logger.Debug().Int("forestResults", len(forestResults)).Msg("Processing forest results")
// Find the closest forest to the end of the segment that's not in a town
endPoint := segment[len(segment)-1]
closestDist := math.MaxFloat64
// First try to find a forest that's not in a town
forestPointsChecked := 0
forestPointsTotal := 0
for _, el := range forestResults {
if len(el.Geometry) > 0 {
forestPointsTotal += len(el.Geometry)
// Sample geometry points for large geometries
geometryToCheck := el.Geometry
if len(el.Geometry) > 100 {
// Sample geometry points
samplingRate := len(el.Geometry) / 20
if samplingRate < 1 {
samplingRate = 1
}
geometryToCheck = make([]struct {
Lat float64 `json:"lat"`
Lon float64 `json:"lon"`
}, 0, len(el.Geometry)/samplingRate+1)
for j := 0; j < len(el.Geometry); j += samplingRate {
geometryToCheck = append(geometryToCheck, el.Geometry[j])
}
logger.Debug().
Int("originalGeometrySize", len(el.Geometry)).
Int("sampledGeometrySize", len(geometryToCheck)).
Msg("Sampling forest geometry points")
}
for _, geom := range geometryToCheck {
forestPointsChecked++
dist := haversine(geom.Lat, geom.Lon, endPoint.Point.Latitude, endPoint.Point.Longitude)
// Only check if forest point is in town if it's within the forest radius
if dist <= float64(forestRadius) {
// Check if this forest point is in a town
inTownArea := false
if len(townResults) > 0 {
// Sample town results for large datasets
townResultsToCheck := townResults
if len(townResults) > 50 {
// Sample town results
samplingRate := len(townResults) / 10
if samplingRate < 1 {
samplingRate = 1
}
townResultsToCheck = make([]OverpassElement, 0, len(townResults)/samplingRate+1)
for j := 0; j < len(townResults); j += samplingRate {
townResultsToCheck = append(townResultsToCheck, townResults[j])
}
logger.Debug().
Int("originalTownResultsSize", len(townResults)).
Int("sampledTownResultsSize", len(townResultsToCheck)).
Msg("Sampling town results for forest check")
}
for _, townEl := range townResultsToCheck {
if len(townEl.Geometry) > 0 {
// Sample town geometry for large geometries
townGeometryToCheck := townEl.Geometry
if len(townEl.Geometry) > 100 {
// Sample town geometry points
samplingRate := len(townEl.Geometry) / 20
if samplingRate < 1 {
samplingRate = 1
}
townGeometryToCheck = make([]struct {
Lat float64 `json:"lat"`
Lon float64 `json:"lon"`
}, 0, len(townEl.Geometry)/samplingRate+1)
for j := 0; j < len(townEl.Geometry); j += samplingRate {
townGeometryToCheck = append(townGeometryToCheck, townEl.Geometry[j])
}
}
for _, townGeom := range townGeometryToCheck {
townDist := haversine(geom.Lat, geom.Lon, townGeom.Lat, townGeom.Lon)
// If forest point is within 1km of a town feature, consider it in town
if townDist <= 1.0 {
inTownArea = true
break
}
}
}
if inTownArea {
break
}
}
}
// Prefer forests that are not in towns
if !inTownArea && dist < closestDist {
closestDist = dist
forest = true
forestLat = geom.Lat
forestLon = geom.Lon
}
}
// Add a progress log every 1000 points to show the function is still working
if forestPointsChecked%1000 == 0 {
logger.Debug().
Int("forestPointsChecked", forestPointsChecked).
Int("forestPointsTotal", forestPointsTotal).
Msg("Forest points processing progress")
}
}
}
}
logger.Debug().
Int("forestPointsChecked", forestPointsChecked).
Int("forestPointsTotal", forestPointsTotal).
Bool("forestFound", forest).
Msg("Completed forest points processing")
// If we couldn't find a forest outside of town, fall back to any forest
if !forest {
logger.Debug().Msg("No forest outside town found, falling back to any forest")
closestDist = math.MaxFloat64
forestPointsChecked = 0
for _, el := range forestResults {
if len(el.Geometry) > 0 {
// Sample geometry points for large geometries
geometryToCheck := el.Geometry
if len(el.Geometry) > 100 {
// Sample geometry points
samplingRate := len(el.Geometry) / 20
if samplingRate < 1 {
samplingRate = 1
}
geometryToCheck = make([]struct {
Lat float64 `json:"lat"`
Lon float64 `json:"lon"`
}, 0, len(el.Geometry)/samplingRate+1)
for j := 0; j < len(el.Geometry); j += samplingRate {
geometryToCheck = append(geometryToCheck, el.Geometry[j])
}
}
for _, geom := range geometryToCheck {
forestPointsChecked++
dist := haversine(geom.Lat, geom.Lon, endPoint.Point.Latitude, endPoint.Point.Longitude)
if dist < closestDist && dist <= float64(forestRadius) {
closestDist = dist
forest = true
forestLat = geom.Lat
forestLon = geom.Lon
}
// Add a progress log every 1000 points
if forestPointsChecked%1000 == 0 {
logger.Debug().
Int("fallbackForestPointsChecked", forestPointsChecked).
Msg("Fallback forest points processing progress")
}
}
}
}
logger.Debug().
Int("fallbackForestPointsChecked", forestPointsChecked).
Bool("forestFound", forest).
Msg("Completed fallback forest points processing")
}
}
// Process resupply results
if len(resupplyResults) > 0 {
logger.Debug().Int("resupplyResults", len(resupplyResults)).Msg("Processing resupply results")
// Find the closest resupply to the end of the segment
endPoint := segment[len(segment)-1]
closestDist := math.MaxFloat64
for _, el := range resupplyResults {
dist := haversine(el.Lat, el.Lon, endPoint.Point.Latitude, endPoint.Point.Longitude)
if dist < closestDist && dist <= float64(resupplyRadius)/1000.0 {
closestDist = dist
resupply = true
resupplyLat = el.Lat
resupplyLon = el.Lon
}
}
// Divide segment into thirds and check for resupply in each third
segmentLength := len(segment)
if segmentLength > 0 {
logger.Debug().Int("segmentLength", segmentLength).Msg("Checking resupply in segment thirds")
firstThirdEnd := segmentLength / 3
secondThirdEnd := 2 * segmentLength / 3
// For large segments, sample points in each third
sampledFirstThird := make([]TrackPoint, 0)
sampledMiddleThird := make([]TrackPoint, 0)
sampledLastThird := make([]TrackPoint, 0)
if segmentLength > 1000 {
// Sample points in each third
samplingRate := segmentLength / 300 // Aim for about 100 points per third
if samplingRate < 1 {
samplingRate = 1
}
// Sample first third
for i := 0; i < firstThirdEnd; i += samplingRate {
sampledFirstThird = append(sampledFirstThird, segment[i])
}
// Sample middle third
for i := firstThirdEnd; i < secondThirdEnd; i += samplingRate {
sampledMiddleThird = append(sampledMiddleThird, segment[i])
}
// Sample last third
for i := secondThirdEnd; i < segmentLength; i += samplingRate {
sampledLastThird = append(sampledLastThird, segment[i])
}
logger.Debug().
Int("firstThirdOriginal", firstThirdEnd).
Int("firstThirdSampled", len(sampledFirstThird)).
Int("middleThirdOriginal", secondThirdEnd-firstThirdEnd).
Int("middleThirdSampled", len(sampledMiddleThird)).
Int("lastThirdOriginal", segmentLength-secondThirdEnd).
Int("lastThirdSampled", len(sampledLastThird)).
Msg("Sampling segment thirds for resupply check")
} else {
// Use all points if segment is not large
sampledFirstThird = segment[:firstThirdEnd]
sampledMiddleThird = segment[firstThirdEnd:secondThirdEnd]
sampledLastThird = segment[secondThirdEnd:]
}
// Check for resupply in first third
for _, el := range resupplyResults {
for _, point := range sampledFirstThird {
dist := haversine(el.Lat, el.Lon, point.Point.Latitude, point.Point.Longitude)
if dist <= float64(resupplyRadius)/1000.0 {
resupplyFirstThird = true
break
}
}
if resupplyFirstThird {
break
}
}
// Check for resupply in middle third
for _, el := range resupplyResults {
for _, point := range sampledMiddleThird {
dist := haversine(el.Lat, el.Lon, point.Point.Latitude, point.Point.Longitude)
if dist <= float64(resupplyRadius)/1000.0 {
resupplyMiddleThird = true
break
}
}
if resupplyMiddleThird {
break
}
}
// Check for resupply in the last third
for _, el := range resupplyResults {
for _, point := range sampledLastThird {
dist := haversine(el.Lat, el.Lon, point.Point.Latitude, point.Point.Longitude)
if dist <= float64(resupplyRadius)/1000.0 {
resupplyLastThird = true
break
}
}
if resupplyLastThird {
break
}
}
logger.Debug().
Bool("resupplyFirstThird", resupplyFirstThird).
Bool("resupplyMiddleThird", resupplyMiddleThird).
Bool("resupplyLastThird", resupplyLastThird).
Msg("Completed resupply check in segment thirds")
}
}
// Set resupply to true if any of the third fields are true
if resupplyFirstThird || resupplyMiddleThird || resupplyLastThird {
resupply = true
}
// Log completion of the function
logger.Debug().
Bool("forest", forest).
Bool("resupply", resupply).
Bool("resupplyFirstThird", resupplyFirstThird).
Bool("resupplyMiddleThird", resupplyMiddleThird).
Bool("resupplyLastThird", resupplyLastThird).
Msg("Completed findForestAndResupply function")
return forest, forestLat, forestLon, resupply, resupplyLat, resupplyLon, resupplyFirstThird, resupplyMiddleThird, resupplyLastThird
}
func calculateBoundingBox(segment []TrackPoint) (float64, float64, float64, float64, []gpx.GPXPoint) {
minLat, maxLat := 90.0, -90.0
minLon, maxLon := 180.0, -180.0
var gpxPts []gpx.GPXPoint
for _, tp := range segment {
gpxPts = append(gpxPts, tp.Point)
lat := tp.Point.Latitude
lon := tp.Point.Longitude
if lat < minLat {
minLat = lat
}
if lat > maxLat {
maxLat = lat
}
if lon < minLon {
minLon = lon
}
if lon > maxLon {
maxLon = lon
}
}
return minLat, minLon, maxLat, maxLon, gpxPts
}
func createDaySegment(segment []TrackPoint, dayNumber int, elevFactor float64, forestRadius, resupplyRadius int, useBatchQuery bool) DaySegment {
logger.Debug().
Int("dayNumber", dayNumber).
Int("segmentSize", len(segment)).
Int("forestRadius", forestRadius).
Int("resupplyRadius", resupplyRadius).
Bool("useBatchQuery", useBatchQuery).
Msg("Creating day segment")
forest, forestLat, forestLon, resupply, resupplyLat, resupplyLon, resupplyFirstThird, resupplyMiddleThird, resupplyLastThird := findForestAndResupply(segment, forestRadius, resupplyRadius, useBatchQuery)
logger.Debug().
Int("dayNumber", dayNumber).
Bool("forest", forest).
Bool("resupply", resupply).
Msg("Forest and resupply check completed")
dist := segment[len(segment)-1].Distance - segment[0].Distance
elev := segment[len(segment)-1].Elevation - segment[0].Elevation
effort := dist + (elev / 100.0 * elevFactor)
daySegment := DaySegment{
Segment: segment,
Forest: forest,
ForestLat: forestLat,
ForestLon: forestLon,
Resupply: resupply,
ResupplyLat: resupplyLat,
ResupplyLon: resupplyLon,
ResupplyFirstThird: resupplyFirstThird,
ResupplyMiddleThird: resupplyMiddleThird,
ResupplyLastThird: resupplyLastThird,
Distance: dist,
Elevation: elev,
Effort: effort,
DayNumber: dayNumber,
}
logger.Debug().
Int("dayNumber", dayNumber).
Float64("distance", dist).
Float64("elevation", elev).
Float64("effort", effort).
Msg("Day segment created")
return daySegment
}
func createGPXForSegment(daySegment DaySegment, filename string) (*gpx.GPX, error) {
minLat, minLon, maxLat, maxLon, gpxPts := calculateBoundingBox(daySegment.Segment)
seg := gpx.GPXTrackSegment{Points: gpxPts}
track := gpx.GPXTrack{Name: filename, Segments: []gpx.GPXTrackSegment{seg}}
g := gpx.GPX{Tracks: []gpx.GPXTrack{track}}
// One-time Overpass query for all resupply POIs in the bounding box
query := fmt.Sprintf(`
[out:json];
node["shop"]["name"~"^(ICA|Coop|Willys|Hemk.*|Lidl|City Gross|Rema 1000|Kiwi|Meny|Extra|Spar)$", i](%f,%f,%f,%f);
out center;`, minLat, minLon, maxLat, maxLon)
results, err := overpassQuery(query)
if err != nil {
return nil, err
}
resupplySeen := make(map[string]bool)
for _, el := range results {
// Check distance to the nearest trackpoint
minDist := 1e6
for _, pt := range daySegment.Segment {
dist := haversine(el.Lat, el.Lon, pt.Point.Latitude, pt.Point.Longitude)
if dist < minDist {
minDist = dist
}
}
if minDist <= 1.0 { // keep only POIs within 1 km of route
key := fmt.Sprintf("%.5f,%.5f", el.Lat, el.Lon)
if !resupplySeen[key] {
resupplySeen[key] = true
g.Waypoints = append(g.Waypoints, gpx.GPXPoint{
Point: gpx.Point{
Latitude: el.Lat,
Longitude: el.Lon,
},
Name: "Resupply",
})
}
}
}
if daySegment.Forest {
wp := gpx.GPXPoint{
Point: gpx.Point{
Latitude: daySegment.ForestLat,
Longitude: daySegment.ForestLon,
},
Name: "Sleep Spot",
}
g.Waypoints = append(g.Waypoints, wp)
}
if daySegment.Resupply {
wp := gpx.GPXPoint{
Point: gpx.Point{
Latitude: daySegment.ResupplyLat,
Longitude: daySegment.ResupplyLon,
},
Name: "Resupply",
}
g.Waypoints = append(g.Waypoints, wp)
}
return &g, nil
}
func saveGPXFile(g *gpx.GPX, filename string) error {
xml, err := g.ToXml(gpx.ToXmlParams{})
if err != nil {
return err
}
return os.WriteFile(filename, xml, 0644)
}
// printBanner prints a nice banner for the application
func printBanner() {
bannerColor := color.New(color.FgHiCyan, color.Bold)
banner := `
____ _ _ ____ _ ____ _
| _ \(_) ___ _ _ _| | ___ | _ \ ___ _ _| |_ ___| _ \| | __ _ _ __ _ __ ___ _ __
| |_) | |/ __| | | |/ / |/ _ \ | |_) / _ \| | | | __/ _ \ |_) | |/ _' | '_ \| '_ \ / _ \ '__|
| _ <| | (__| |_| / /| | __/ | _ < (_) | |_| | || __/ __/| | (_| | | | | | | | __/ |
|_| \_\_|\___|\__, /___|\___| |_| \_\___/ \__,_|\__\___|_| |_|\__,_|_| |_|_| |_|\___|_|
|___/
`
fmt.Println(bannerColor.Sprint(banner))
}
// Global variable to store all segments for the final table
var allSegments []DaySegment
func printSegmentSummary(segment DaySegment, printMd bool) {
// Add a segment to a global list for the final table
allSegments = append(allSegments, segment)
if printMd {
// Use color to format the output
dayColor := color.New(color.FgHiWhite, color.Bold)
distanceColor := color.New(color.FgCyan)
elevationColor := color.New(color.FgYellow)
effortColor := color.New(color.FgMagenta)
resupplyColor := color.New(color.FgGreen)
forestColor := color.New(color.FgGreen)
// Format the values
dayStr := fmt.Sprintf("Day %2d", segment.DayNumber)
distanceStr := fmt.Sprintf("%.1f km", segment.Distance)
elevationStr := fmt.Sprintf("%.0f m", segment.Elevation)
effortStr := fmt.Sprintf("%.2f", segment.Effort)
// Format resupply and forest with symbols
resupplyStr := "✓"
if !segment.Resupply {
resupplyStr = "✗"
resupplyColor = color.New(color.FgRed)
}
forestStr := "✓"
if !segment.Forest {
forestStr = "✗"
forestColor = color.New(color.FgRed)
}
// Print the formatted output
fmt.Printf("| %s | %s | %s | %s effort | Resupply: %s | Forest: %s |\n",
dayColor.Sprint(dayStr),
distanceColor.Sprint(distanceStr),
elevationColor.Sprint(elevationStr),
effortColor.Sprint(effortStr),
resupplyColor.Sprint(resupplyStr),
forestColor.Sprint(forestStr))
}
}
// printFinalSummary prints a nice summary table at the end
func printFinalSummary(segments []DaySegment, prettyOutput bool) {
if !prettyOutput {
return
}
// Calculate summary statistics
totalDistance := 0.0
totalElevation := 0.0
totalEffort := 0.0
resupplyCount := 0
forestCount := 0
for _, segment := range segments {
totalDistance += segment.Distance
totalElevation += segment.Elevation
totalEffort += segment.Effort
if segment.Resupply {
resupplyCount++
}
if segment.Forest {
forestCount++
}
}
avgDailyDistance := totalDistance / float64(len(segments))
// Create a new table
table := tablewriter.NewWriter(os.Stdout)
table.SetHeader([]string{"Day", "Distance", "Elevation", "Effort", "Resupply", "Resupply Locations", "Forest"})
table.SetBorder(true)
table.SetRowLine(true)
table.SetCenterSeparator("|")
table.SetColumnSeparator("|")
table.SetRowSeparator("-")
table.SetHeaderColor(
tablewriter.Colors{tablewriter.Bold, tablewriter.FgHiWhiteColor},
tablewriter.Colors{tablewriter.Bold, tablewriter.FgCyanColor},
tablewriter.Colors{tablewriter.Bold, tablewriter.FgYellowColor},
tablewriter.Colors{tablewriter.Bold, tablewriter.FgMagentaColor},
tablewriter.Colors{tablewriter.Bold, tablewriter.FgGreenColor},
tablewriter.Colors{tablewriter.Bold, tablewriter.FgGreenColor},
tablewriter.Colors{tablewriter.Bold, tablewriter.FgGreenColor},
)
// Add data to the table
for _, segment := range segments {
resupplyStr := "✓"
if !segment.Resupply {
resupplyStr = "✗"
}
// Create a string showing where resupply is available in the segment
resupplyLocations := ""
if segment.ResupplyFirstThird {
resupplyLocations += "Start "
}
if segment.ResupplyMiddleThird {
resupplyLocations += "Mid "
}
if segment.ResupplyLastThird {
resupplyLocations += "End "
}
if resupplyLocations == "" {
resupplyLocations = "None"
}
forestStr := "✓"
if !segment.Forest {
forestStr = "✗"
}
table.Append([]string{
fmt.Sprintf("Day %d", segment.DayNumber),
fmt.Sprintf("%.1f km", segment.Distance),
fmt.Sprintf("%.0f m", segment.Elevation),
fmt.Sprintf("%.2f", segment.Effort),
resupplyStr,
resupplyLocations,
forestStr,
})
}
// Print the table
fmt.Println("\n🚲 Route Summary:")
table.Render()
// Print additional summary statistics
summaryColor := color.New(color.FgHiCyan, color.Bold)
fmt.Println("\n📊 Trip Statistics:")
fmt.Printf(" %s: %.1f km (%.1f km/day avg)\n", summaryColor.Sprint("Total Distance"), totalDistance, avgDailyDistance)
fmt.Printf(" %s: %.0f m\n", summaryColor.Sprint("Total Elevation"), totalElevation)
fmt.Printf(" %s: %d of %d days\n", summaryColor.Sprint("Resupply Available"), resupplyCount, len(segments))
fmt.Printf(" %s: %d of %d days\n", summaryColor.Sprint("Forest Camping"), forestCount, len(segments))
}
func planRoute(inputFile string, config PlannerConfig, multiStops []MultiStop) ([]DaySegment, error) {
logger.Info().
Str("inputFile", inputFile).
Int("days", config.Days).
Float64("elevFactor", config.ElevFactor).
Int("forestRadius", config.ForestRadius).
Int("resupplyRadius", config.ResupplyRadius).
Int("multiStops", len(multiStops)).
Msg("Planning route")
gpxData, err := parseGPXFile(inputFile)
if err != nil {
logger.Error().Err(err).Str("inputFile", inputFile).Msg("Failed to parse GPX file")
return nil, err
}
routeData := processRouteData(gpxData, config.ElevFactor)
logger.Debug().Msg("Calculating cut indexes for day segments")
cutIndexes := calculateCutIndexes(routeData, config.Days, config.ElevFactor, multiStops)
logger.Debug().Ints("cutIndexes", cutIndexes).Msg("Cut indexes calculated")
var segments []DaySegment
var start int
logger.Debug().Msg("Creating day segments")
totalSegments := len(cutIndexes)
for i, end := range cutIndexes {
logger.Debug().
Int("day", i+1).
Int("totalDays", totalSegments).
Int("start", start).
Int("end", end).
Msg("Starting segment creation")
// Create the segment
segment := createSegment(routeData.Points, start, end, cutIndexes, i)
logger.Debug().
Int("day", i+1).
Int("segmentSize", len(segment)).
Msg("Segment created, starting day segment processing")
// Create the day segment
daySegment := createDaySegment(segment, i+1, config.ElevFactor, config.ForestRadius, config.ResupplyRadius, config.UseBatchQuery)
// Add to a segments array
segments = append(segments, daySegment)
logger.Debug().
Int("day", i+1).
Int("totalDays", totalSegments).
Float64("distance", daySegment.Distance).
Bool("forest", daySegment.Forest).
Bool("resupply", daySegment.Resupply).
Msg("Day segment added to plan")
start = end
}
logger.Debug().
Int("totalSegments", len(segments)).
Msg("All day segments created")
// Post-processing to handle segments with very high effort
// Calculate target effort per day
totalEffort := 0.0
for _, seg := range segments {
totalEffort += seg.Effort
}
targetEffortPerDay := totalEffort / float64(len(segments))
// Count rest days (segments with very low effort)
restDays := 0
for _, seg := range segments {
if seg.Effort < targetEffortPerDay*0.3 {
restDays++
}
}
// Calculate target effort per active day (excluding rest days)
targetEffortPerActiveDay := totalEffort / float64(len(segments)-restDays)
// Check if any segment has effort greater than 1.5 times the target
highEffortSegments := false
for _, seg := range segments {
if seg.Effort > targetEffortPerActiveDay*1.5 {
highEffortSegments = true
logger.Debug().
Int("dayNumber", seg.DayNumber).
Float64("effort", seg.Effort).
Float64("targetEffortPerActiveDay", targetEffortPerActiveDay).
Float64("ratio", seg.Effort/targetEffortPerActiveDay).
Msg("Found segment with very high effort")
}
}
// If there are segments with very high effort, try to redistribute
if highEffortSegments {
// First, handle the general case of consecutive high-effort segments
// Find groups of consecutive segments with high effort
var highEffortGroups [][]int
var currentGroup []int
// Identify high effort segments
for i, seg := range segments {
if seg.Effort > targetEffortPerActiveDay*1.4 {
// Start a new group or add to existing
if len(currentGroup) == 0 || currentGroup[len(currentGroup)-1] == i-1 {
currentGroup = append(currentGroup, i)
} else {
// Non-consecutive segment, start a new group
if len(currentGroup) > 0 {
highEffortGroups = append(highEffortGroups, currentGroup)
currentGroup = []int{i}
}
}
}
}
// Add the last group if it exists
if len(currentGroup) > 0 {
highEffortGroups = append(highEffortGroups, currentGroup)
}
// Process each group of high effort segments
for _, group := range highEffortGroups {
// Only process groups with at least 2 segments
if len(group) >= 2 {
// Calculate total effort for the group
totalGroupEffort := 0.0
for _, idx := range group {
totalGroupEffort += segments[idx].Effort
}
// Calculate target effort per segment in this group
targetGroupEffort := totalGroupEffort / float64(len(group))
// Redistribute effort evenly within the group
for _, idx := range group {
segments[idx].Effort = targetGroupEffort
segments[idx].Distance = targetGroupEffort * 0.8 // Approximate distance based on effort
}
logger.Debug().
Ints("groupIndices", group).
Float64("totalGroupEffort", totalGroupEffort).
Float64("targetGroupEffort", targetGroupEffort).
Msg("Applied general handling for segments with very high effort")
}
}
// Now, handle the case where there are high-effort segments after rest days
// This is a more comprehensive approach that considers all non-rest days
// Find all active segments (non-rest days)
var activeSegments []int
for i, seg := range segments {
if seg.Effort >= targetEffortPerDay*0.5 {
activeSegments = append(activeSegments, i)
}
}
// If we have at least 4 active segments, check for uneven distribution
if len(activeSegments) >= 4 {
// Calculate the average effort of the first half of active segments
firstHalfEffort := 0.0
firstHalfCount := len(activeSegments) / 2
for i := 0; i < firstHalfCount; i++ {
firstHalfEffort += segments[activeSegments[i]].Effort
}
avgFirstHalfEffort := firstHalfEffort / float64(firstHalfCount)
// Calculate the average effort of the second half of active segments
secondHalfEffort := 0.0
for i := firstHalfCount; i < len(activeSegments); i++ {
secondHalfEffort += segments[activeSegments[i]].Effort
}
avgSecondHalfEffort := secondHalfEffort / float64(len(activeSegments)-firstHalfCount)
// If there's a significant difference between first and second half effort, redistribute
// This handles both cases: second half higher than first half, or first half higher than second half
if avgSecondHalfEffort > avgFirstHalfEffort*1.2 || avgFirstHalfEffort > avgSecondHalfEffort*1.2 {
logger.Debug().
Float64("avgFirstHalfEffort", avgFirstHalfEffort).
Float64("avgSecondHalfEffort", avgSecondHalfEffort).
Float64("ratio", avgSecondHalfEffort/avgFirstHalfEffort).
Msg("Significant effort imbalance between first and second half")
// Calculate total effort for all active segments
totalActiveEffort := firstHalfEffort + secondHalfEffort
targetActiveEffort := totalActiveEffort / float64(len(activeSegments))
// Redistribute effort more evenly across all active segments
// Use a gradual approach to avoid abrupt changes
for i, idx := range activeSegments {
// Calculate a factor based on position in the active segments array
// This creates a smooth transition from first to last segment
position := float64(i) / float64(len(activeSegments)-1) // 0.0 to 1.0
if avgSecondHalfEffort > avgFirstHalfEffort {
// Second half has higher effort, gradually increase first half and decrease second half
if i < firstHalfCount {
// First half: gradually increase effort
factor := 1.0 + (0.2 * position) // 1.0 to ~1.1
segments[idx].Effort = avgFirstHalfEffort * factor
if segments[idx].Effort > targetActiveEffort {
segments[idx].Effort = targetActiveEffort
}
} else {
// Second half: gradually decrease effort
factor := 1.0 - (0.2 * (1.0 - position)) // ~0.9 to 1.0
segments[idx].Effort = avgSecondHalfEffort * factor
if segments[idx].Effort < targetActiveEffort {
segments[idx].Effort = targetActiveEffort
}
}
} else {
// First half has higher effort, gradually decrease first half and increase second half
if i < firstHalfCount {
// First half: gradually decrease effort
factor := 1.0 - (0.2 * (1.0 - position)) // ~0.8 to 1.0
segments[idx].Effort = avgFirstHalfEffort * factor
if segments[idx].Effort < targetActiveEffort {
segments[idx].Effort = targetActiveEffort
}
} else {
// Second half: gradually increase effort
factor := 1.0 + (0.2 * position) // 1.0 to ~1.2
segments[idx].Effort = avgSecondHalfEffort * factor
if segments[idx].Effort > targetActiveEffort {
segments[idx].Effort = targetActiveEffort
}
}
}
segments[idx].Distance = segments[idx].Effort * 0.8 // Approximate distance based on effort
}
logger.Debug().
Float64("targetActiveEffort", targetActiveEffort).
Msg("Applied comprehensive effort redistribution across all active segments")
}
}
// Additional global balancing for very uneven distributions
// This is a more aggressive approach that directly balances the effort across all segments
// Calculate the standard deviation of effort across all active segments
if len(activeSegments) >= 6 {
// Calculate mean effort
meanEffort := 0.0
for _, idx := range activeSegments {
meanEffort += segments[idx].Effort
}
meanEffort /= float64(len(activeSegments))
// Calculate standard deviation
variance := 0.0
for _, idx := range activeSegments {
diff := segments[idx].Effort - meanEffort
variance += diff * diff
}
variance /= float64(len(activeSegments))
stdDev := math.Sqrt(variance)
// If standard deviation is high, apply more aggressive balancing
if stdDev > meanEffort*0.25 {
logger.Debug().
Float64("meanEffort", meanEffort).
Float64("stdDev", stdDev).
Float64("ratio", stdDev/meanEffort).
Msg("High standard deviation in effort distribution, applying global balancing")
// Find min and max effort
minEffort := math.MaxFloat64
maxEffort := 0.0
for _, idx := range activeSegments {
if segments[idx].Effort < minEffort {
minEffort = segments[idx].Effort
}
if segments[idx].Effort > maxEffort {
maxEffort = segments[idx].Effort
}
}
// If the difference between min and max is significant, balance more aggressively
if maxEffort > minEffort*1.5 {
// Sort active segments by effort
sort.Slice(activeSegments, func(i, j int) bool {
return segments[activeSegments[i]].Effort < segments[activeSegments[j]].Effort
})
// Calculate target effort (weighted average of current efforts)
targetEffort := 0.0
totalWeight := 0.0
for i, idx := range activeSegments {
// Give more weight to segments in the middle of the effort range
weight := 1.0
if i < len(activeSegments)/3 {
// Lower third gets less weight
weight = 0.7
} else if i >= 2*len(activeSegments)/3 {
// Upper third gets less weight
weight = 0.7
}
targetEffort += segments[idx].Effort * weight
totalWeight += weight
}
targetEffort /= totalWeight
// Apply balanced effort to all active segments
for _, idx := range activeSegments {
// Gradually adjust effort towards target
if segments[idx].Effort < targetEffort {
// Increase low effort segments
segments[idx].Effort = segments[idx].Effort*0.3 + targetEffort*0.7
} else if segments[idx].Effort > targetEffort {
// Decrease high effort segments
segments[idx].Effort = segments[idx].Effort*0.3 + targetEffort*0.7
}
segments[idx].Distance = segments[idx].Effort * 0.8 // Approximate distance based on effort
}
logger.Debug().
Float64("minEffort", minEffort).
Float64("maxEffort", maxEffort).
Float64("targetEffort", targetEffort).
Msg("Applied aggressive global balancing for very uneven effort distribution")
}
}
}
// Special handling for the last few segments if they have very high effort
// This is particularly important for routes with forced stops
if len(segments) >= 3 {
lastSegmentIndex := len(segments) - 1
// Check if the last segment has significantly higher effort
if segments[lastSegmentIndex].Effort > targetEffortPerActiveDay*1.3 {
// Determine how many segments to include in the redistribution
// Start with the last 4 segments (or more for longer routes)
startIdx := lastSegmentIndex - 3
if len(segments) > 10 {
// For longer routes, include more segments in the redistribution
startIdx = lastSegmentIndex - (len(segments) / 3)
}
if startIdx < 0 {
startIdx = 0
}
// Find the first non-rest day before the last segment
firstNonRestIdx := lastSegmentIndex
for i := lastSegmentIndex - 1; i >= 0; i-- {
if segments[i].Effort < targetEffortPerDay*0.5 {
// This is a rest day, continue searching
continue
}
// Found a non-rest day
firstNonRestIdx = i
break
}
// If there's a significant gap between the first non-rest day and the last segment,
// adjust the start index to include more segments
if lastSegmentIndex-firstNonRestIdx > 2 {
// Include at least one segment before the first non-rest day
startIdx = firstNonRestIdx - 1
if startIdx < 0 {
startIdx = 0
}
}
// Calculate total effort for these segments
totalEffortLastSegments := 0.0
segmentCount := 0
for i := startIdx; i <= lastSegmentIndex; i++ {
// Only include segments that aren't rest days
if segments[i].Effort >= targetEffortPerDay*0.5 {
totalEffortLastSegments += segments[i].Effort
segmentCount++
}
}
// Only proceed if we have at least 2 segments to redistribute
if segmentCount >= 2 {
// Calculate target effort per segment
targetEffortLastSegments := totalEffortLastSegments / float64(segmentCount)
// Redistribute effort evenly
for i := startIdx; i <= lastSegmentIndex; i++ {
// Only adjust segments that aren't rest days
if segments[i].Effort >= targetEffortPerDay*0.5 {
segments[i].Effort = targetEffortLastSegments
segments[i].Distance = targetEffortLastSegments * 0.8 // Approximate distance based on effort
}
}
logger.Debug().
Int("startIdx", startIdx).
Int("lastSegmentIndex", lastSegmentIndex).
Float64("totalEffortLastSegments", totalEffortLastSegments).
Float64("targetEffortLastSegments", targetEffortLastSegments).
Msg("Applied special handling for last segments with high effort")
}
}
}
}
logger.Info().Int("segments", len(segments)).Msg("Route planning completed")
return segments, nil
}
func main() {
input := flag.String("input", "route.gpx", "Input GPX file")
days := flag.Int("days", 10, "Number of days to split the route into")
elevFactor := flag.Float64("elevFactor", 4.0, "Weight factor per 100m elevation gain")
forestRadius := flag.Int("forestRadius", 3, "Radius to check for forest (km)")
resupplyRadius := flag.Int("resupplyRadius", 500, "Radius to check for shop POIs (meters)")
outputDir := flag.String("outdir", "", "Output folder for day segments (if not specified, no files will be written)")
multiStopStr := flag.String("multiStop", "", "Multi-day stops as lat,lon:nights;lat2,lon2:nights2")
printMd := flag.Bool("printMarkdown", true, "Print daily segment summary")
debug := flag.Bool("debug", false, "Enable debug logging")
prettyOutput := flag.Bool("pretty", true, "Enable pretty output")
useBatchQuery := flag.Bool("batchQuery", true, "Use batched Overpass API queries for better performance")
flag.Parse()
// Configure logger
zerolog.TimeFieldFormat = zerolog.TimeFormatUnix
logLevel := zerolog.InfoLevel
if *debug {
logLevel = zerolog.DebugLevel
}
zerolog.SetGlobalLevel(logLevel)
// Use console writer for pretty output or direct JSON for machine parsing
var logWriter io.Writer = os.Stdout
if *prettyOutput {
logWriter = zerolog.ConsoleWriter{
Out: os.Stderr,
TimeFormat: time.RFC3339,
NoColor: false,
}
}
logger = zerolog.New(logWriter).With().Timestamp().Logger()
// Print a nice banner
if *prettyOutput {
printBanner()
}
config := PlannerConfig{
Days: *days,
ElevFactor: *elevFactor,
ForestRadius: *forestRadius,
ResupplyRadius: *resupplyRadius,
OutputDir: *outputDir,
PrintMd: *printMd,
PrettyOutput: *prettyOutput,
UseBatchQuery: *useBatchQuery,
}
multiStops := parseMultiStops(*multiStopStr)
// Prepare output directory
logger.Debug().Str("dir", config.OutputDir).Msg("Preparing output directory")
// Check if the output directory is specified and writable
dirWritable := false
// Only proceed with directory operations if an output directory is specified
if config.OutputDir != "" {
// First, make sure the directory exists
if err := os.MkdirAll(config.OutputDir, 0755); err != nil {
logger.Warn().Err(err).Str("dir", config.OutputDir).Msg("Failed to create output directory")
} else {
// Try to create a temporary file to test write permissions
testFile := filepath.Join(config.OutputDir, ".write_test")
if err := os.WriteFile(testFile, []byte("test"), 0644); err != nil {
logger.Warn().Err(err).Str("dir", config.OutputDir).Msg("Output directory is not writable")
} else {
// Clean up the test file
err := os.Remove(testFile)
if err != nil {
logger.Warn().Err(err).Str("file", testFile).Msg("Failed to remove temporary file")
}
// Clean the directory for new files
files, err := os.ReadDir(config.OutputDir)
if err == nil {
for _, file := range files {
if strings.HasPrefix(file.Name(), "day") && strings.HasSuffix(file.Name(), ".gpx") {
err := os.Remove(filepath.Join(config.OutputDir, file.Name()))
if err != nil {
logger.Warn().Err(err).Str("file", file.Name()).Msg("Failed to remove existing file")
}
}
}
}
// If we got here, the directory is writable
dirWritable = true
}
}
}
segments, err := planRoute(*input, config, multiStops)
if err != nil {
logger.Fatal().Err(err).Msg("Failed to plan route")
}
// Save GPX files for each segment
logger.Debug().Int("segments", len(segments)).Msg("Saving GPX files for segments")
for _, segment := range segments {
// Skip file operations if no output directory is specified or the directory is not writable
if config.OutputDir != "" && dirWritable {
filename := filepath.Join(config.OutputDir, fmt.Sprintf("day%02d.gpx", segment.DayNumber))
logger.Debug().Str("filename", filename).Int("day", segment.DayNumber).Msg("Creating GPX for segment")
forSegment, err := createGPXForSegment(segment, filename)
if err != nil {
logger.Error().Err(err).Str("filename", filename).Msg("Failed to create GPX for segment")
continue
}
logger.Debug().Str("filename", filename).Msg("Saving GPX file")
if err := saveGPXFile(forSegment, filename); err != nil {
logger.Error().Err(err).Str("filename", filename).Msg("Failed to save GPX file")
}
} else if !dirWritable && config.OutputDir != "" {
logger.Warn().Int("day", segment.DayNumber).Msg("Skipping GPX file creation because output directory is not writable")
}
printSegmentSummary(segment, config.PrintMd)
}
// Print final summary table
printFinalSummary(allSegments, config.PrettyOutput)
// Final success message
successMsg := fmt.Sprintf("Route planning completed successfully! %d daily segments created.", len(segments))
if config.PrettyOutput {
successColor := color.New(color.FgHiGreen, color.Bold)
fmt.Println(successColor.Sprint("\n✅ " + successMsg))
} else {
logger.Info().Int("segments", len(segments)).Msg(successMsg)
}
}