Merge pull request 'backend/feature/add-description' (#63) from backend/feature/add-description into main
Some checks failed
Build and deploy the backend to production / Build and push image (push) Successful in 1m36s
/ push-to-remote (push) Failing after 33s
Build and deploy the backend to production / Deploy to production (push) Successful in 25s

Reviewed-on: #63
This commit is contained in:
kscheidecker 2025-02-21 07:38:15 +00:00
commit f6d0cd5360
24 changed files with 704 additions and 137 deletions

3
backend/.gitignore vendored
View File

@ -1,6 +1,9 @@
# osm-cache # osm-cache
cache_XML/ cache_XML/
# secrets
*secrets.yaml
# Byte-compiled / optimized / DLL files # Byte-compiled / optimized / DLL files
__pycache__/ __pycache__/
*.py[cod] *.py[cod]

363
backend/landmarks.json Normal file
View File

@ -0,0 +1,363 @@
[
{
"name": "Chinatown",
"type": "shopping",
"location": [
45.7554934,
4.8444852
],
"osm_type": "way",
"osm_id": 996515596,
"attractiveness": 129,
"n_tags": 0,
"image_url": null,
"website_url": null,
"wiki_url": null,
"keywords": {},
"description": null,
"duration": 30,
"name_en": null,
"uuid": "285d159c-68ee-4b37-8d71-f27ee3d38b02",
"must_do": false,
"must_avoid": false,
"is_secondary": false,
"time_to_reach_next": 0,
"next_uuid": null,
"is_viewpoint": false,
"is_place_of_worship": false
},
{
"name": "Galeries Lafayette",
"type": "shopping",
"location": [
45.7627107,
4.8556833
],
"osm_type": "way",
"osm_id": 1069872743,
"attractiveness": 197,
"n_tags": 11,
"image_url": null,
"website_url": "http://www.galerieslafayette.com/",
"wiki_url": null,
"keywords": null,
"description": null,
"duration": 30,
"name_en": null,
"uuid": "28f1bc30-10d3-4944-8861-0ed9abca012d",
"must_do": false,
"must_avoid": false,
"is_secondary": false,
"time_to_reach_next": 0,
"next_uuid": null,
"is_viewpoint": false,
"is_place_of_worship": false
},
{
"name": "Muji",
"type": "shopping",
"location": [
45.7615971,
4.8543781
],
"osm_type": "way",
"osm_id": 1044165817,
"attractiveness": 259,
"n_tags": 14,
"image_url": null,
"website_url": "https://www.muji.com/fr/",
"wiki_url": null,
"keywords": null,
"description": null,
"duration": 30,
"name_en": "Muji",
"uuid": "957f86a5-6c00-41a2-815d-d6f739052be4",
"must_do": false,
"must_avoid": false,
"is_secondary": false,
"time_to_reach_next": 0,
"next_uuid": null,
"is_viewpoint": false,
"is_place_of_worship": false
},
{
"name": "HEMA",
"type": "shopping",
"location": [
45.7619133,
4.8565239
],
"osm_type": "way",
"osm_id": 1069872750,
"attractiveness": 156,
"n_tags": 9,
"image_url": null,
"website_url": "https://fr.westfield.com/lapartdieu/store/HEMA/www.hema.fr",
"wiki_url": null,
"keywords": null,
"description": null,
"duration": 30,
"name_en": null,
"uuid": "8dae9d3e-e4c4-4e80-941d-0b106e22c85b",
"must_do": false,
"must_avoid": false,
"is_secondary": false,
"time_to_reach_next": 0,
"next_uuid": null,
"is_viewpoint": false,
"is_place_of_worship": false
},
{
"name": "Cordeliers",
"type": "shopping",
"location": [
45.7622752,
4.8337998
],
"osm_type": "node",
"osm_id": 5545183519,
"attractiveness": 813,
"n_tags": 0,
"image_url": null,
"website_url": null,
"wiki_url": null,
"keywords": {},
"description": null,
"duration": 30,
"name_en": null,
"uuid": "ba02adb5-e28f-4645-8c2d-25ead6232379",
"must_do": false,
"must_avoid": false,
"is_secondary": false,
"time_to_reach_next": 0,
"next_uuid": null,
"is_viewpoint": false,
"is_place_of_worship": false
},
{
"name": "Halles de Lyon Paul Bocuse",
"type": "shopping",
"location": [
45.7628282,
4.8505601
],
"osm_type": "relation",
"osm_id": 971529,
"attractiveness": 272,
"n_tags": 12,
"image_url": null,
"website_url": "https://www.halles-de-lyon-paulbocuse.com/",
"wiki_url": "fr:Halles de Lyon-Paul Bocuse",
"keywords": {
"importance": "national",
"height": null,
"place_type": "marketplace",
"date": null
},
"description": "Halles de Lyon Paul Bocuse is a marketplace of national importance.",
"duration": 30,
"name_en": null,
"uuid": "bbd50de3-aa91-425d-90c2-d4abfd1b4abe",
"must_do": false,
"must_avoid": false,
"is_secondary": false,
"time_to_reach_next": 0,
"next_uuid": null,
"is_viewpoint": false,
"is_place_of_worship": false
},
{
"name": "Grand Bazar",
"type": "shopping",
"location": [
45.7632141,
4.8361975
],
"osm_type": "way",
"osm_id": 82399951,
"attractiveness": 93,
"n_tags": 7,
"image_url": null,
"website_url": null,
"wiki_url": null,
"keywords": null,
"description": null,
"duration": 30,
"name_en": null,
"uuid": "3de9131c-87c5-4efb-9fa8-064896fb8b29",
"must_do": false,
"must_avoid": false,
"is_secondary": false,
"time_to_reach_next": 0,
"next_uuid": null,
"is_viewpoint": false,
"is_place_of_worship": false
},
{
"name": "Shopping Area",
"type": "shopping",
"location": [
45.7673452,
4.8438683
],
"osm_type": "node",
"osm_id": 0,
"attractiveness": 156,
"n_tags": 0,
"image_url": null,
"website_url": null,
"wiki_url": null,
"keywords": {},
"description": null,
"duration": 30,
"name_en": null,
"uuid": "df2482a8-7e2e-4536-aad3-564899b2fa65",
"must_do": false,
"must_avoid": false,
"is_secondary": false,
"time_to_reach_next": 0,
"next_uuid": null,
"is_viewpoint": false,
"is_place_of_worship": false
},
{
"name": "Cour Oxyg\u00e8ne",
"type": "shopping",
"location": [
45.7620905,
4.8568873
],
"osm_type": "way",
"osm_id": 132673030,
"attractiveness": 63,
"n_tags": 5,
"image_url": null,
"website_url": null,
"wiki_url": null,
"keywords": null,
"description": null,
"duration": 30,
"name_en": null,
"uuid": "ed134f76-9a02-4bee-9c10-78454f7bc4ce",
"must_do": false,
"must_avoid": false,
"is_secondary": false,
"time_to_reach_next": 0,
"next_uuid": null,
"is_viewpoint": false,
"is_place_of_worship": false
},
{
"name": "P\u00f4le de Commerces et de Loisirs Confluence",
"type": "shopping",
"location": [
45.7410414,
4.8171031
],
"osm_type": "way",
"osm_id": 440270633,
"attractiveness": 259,
"n_tags": 14,
"image_url": null,
"website_url": "https://www.confluence.fr/",
"wiki_url": null,
"keywords": null,
"description": null,
"duration": 30,
"name_en": null,
"uuid": "dd7e2f5f-0e60-4560-b903-e5ded4b6e36a",
"must_do": false,
"must_avoid": false,
"is_secondary": false,
"time_to_reach_next": 0,
"next_uuid": null,
"is_viewpoint": false,
"is_place_of_worship": false
},
{
"name": "Grand H\u00f4tel-Dieu",
"type": "shopping",
"location": [
45.7586955,
4.8364597
],
"osm_type": "relation",
"osm_id": 300128,
"attractiveness": 546,
"n_tags": 22,
"image_url": null,
"website_url": "https://grand-hotel-dieu.com",
"wiki_url": "fr:H\u00f4tel-Dieu de Lyon",
"keywords": {
"importance": "international",
"height": null,
"place_type": "building",
"date": "C17"
},
"description": "Grand H\u00f4tel-Dieu is an internationally famous building. It was constructed in C17.",
"duration": 30,
"name_en": null,
"uuid": "a91265a8-ffbd-44f7-a7ab-3ff75f08fbab",
"must_do": false,
"must_avoid": false,
"is_secondary": false,
"time_to_reach_next": 0,
"next_uuid": null,
"is_viewpoint": false,
"is_place_of_worship": false
},
{
"name": "Westfield La Part-Dieu",
"type": "shopping",
"location": [
45.761331,
4.855676
],
"osm_type": "way",
"osm_id": 62338376,
"attractiveness": 546,
"n_tags": 22,
"image_url": null,
"website_url": "https://fr.westfield.com/lapartdieu",
"wiki_url": "fr:La Part-Dieu (centre commercial)",
"keywords": null,
"description": null,
"duration": 30,
"name_en": null,
"uuid": "7d60316f-d689-4fcf-be68-ffc09353b826",
"must_do": false,
"must_avoid": false,
"is_secondary": false,
"time_to_reach_next": 0,
"next_uuid": null,
"is_viewpoint": false,
"is_place_of_worship": false
},
{
"name": "Ainay",
"type": "shopping",
"location": [
45.7553105,
4.8312084
],
"osm_type": "node",
"osm_id": 5545126047,
"attractiveness": 132,
"n_tags": 0,
"image_url": null,
"website_url": null,
"wiki_url": null,
"keywords": {},
"description": null,
"duration": 30,
"name_en": null,
"uuid": "ad214f3d-a4b9-4078-876a-446caa7ab01c",
"must_do": false,
"must_avoid": false,
"is_secondary": false,
"time_to_reach_next": 0,
"next_uuid": null,
"is_viewpoint": false,
"is_place_of_worship": false
}
]

File diff suppressed because one or more lines are too long

View File

View File

@ -8,8 +8,8 @@ from pydantic import BaseModel
from ..overpass.overpass import Overpass, get_base_info from ..overpass.overpass import Overpass, get_base_info
from ..structs.landmark import Landmark from ..structs.landmark import Landmark
from .get_time_distance import get_distance from ..utils.get_time_distance import get_distance
from .utils import create_bbox from ..utils.bbox import create_bbox
@ -103,7 +103,7 @@ class ClusterManager:
out = out out = out
) )
except Exception as e: except Exception as e:
self.logger.error(f"Error fetching clusters: {e}") self.logger.warning(f"Error fetching clusters: {e}")
if result is None : if result is None :
self.logger.debug(f"Found no {cluster_type} clusters, overpass query returned no datapoints.") self.logger.debug(f"Found no {cluster_type} clusters, overpass query returned no datapoints.")
@ -113,7 +113,7 @@ class ClusterManager:
points = [] points = []
for elem in result: for elem in result:
osm_type = elem.get('type') osm_type = elem.get('type')
# Get coordinates and append them to the points list # Get coordinates and append them to the points list
_, coords = get_base_info(elem, osm_type) _, coords = get_base_info(elem, osm_type)
if coords is not None : if coords is not None :
@ -217,7 +217,7 @@ class ClusterManager:
# Define the bounding box for a given radius around the coordinates # Define the bounding box for a given radius around the coordinates
bbox = create_bbox(cluster.centroid, 300) bbox = create_bbox(cluster.centroid, 300)
# Query neighborhoods and shopping malls # Query neighborhoods and shopping malls
selectors = ['"place"~"^(suburb|neighborhood|neighbourhood|quarter|city_block)$"'] selectors = ['"place"~"^(suburb|neighborhood|neighbourhood|quarter|city_block)$"']
@ -242,27 +242,25 @@ class ClusterManager:
out = 'ids center tags' out = 'ids center tags'
) )
except Exception as e: except Exception as e:
self.logger.error(f"Error fetching clusters: {e}") self.logger.warning(f"Error fetching clusters: {e}")
continue continue
if result is None : if result is None :
self.logger.error(f"Error fetching clusters: {e}") self.logger.warning(f"Error fetching clusters: query result is None")
continue continue
for elem in result: for elem in result:
osm_type = elem.get('type') # Get basic info
id, coords, name = get_base_info(elem, elem.get('type'), with_name=True)
id, coords, name = get_base_info(elem, osm_type, with_name=True)
if name is None or coords is None : if name is None or coords is None :
continue continue
d = get_distance(cluster.centroid, coords) d = get_distance(cluster.centroid, coords)
if d < min_dist : if d < min_dist :
min_dist = d min_dist = d
new_name = name # add name new_name = name # add name
osm_type = osm_type # add type: 'way' or 'relation' osm_type = elem.get('type') # add type: 'way' or 'relation'
osm_id = id # add OSM id osm_id = id # add OSM id
return Landmark( return Landmark(
name=new_name, name=new_name,

View File

@ -4,10 +4,10 @@ import yaml
from ..structs.preferences import Preferences from ..structs.preferences import Preferences
from ..structs.landmark import Landmark from ..structs.landmark import Landmark
from .take_most_important import take_most_important from ..utils.take_most_important import take_most_important
from .cluster_manager import ClusterManager from .cluster_manager import ClusterManager
from ..overpass.overpass import Overpass, get_base_info from ..overpass.overpass import Overpass, get_base_info
from .utils import create_bbox from ..utils.bbox import create_bbox
from ..constants import AMENITY_SELECTORS_PATH, LANDMARK_PARAMETERS_PATH, OPTIMIZER_PARAMETERS_PATH from ..constants import AMENITY_SELECTORS_PATH, LANDMARK_PARAMETERS_PATH, OPTIMIZER_PARAMETERS_PATH
@ -197,7 +197,7 @@ class LandmarkManager:
out = 'ids center tags' out = 'ids center tags'
) )
except Exception as e: except Exception as e:
self.logger.error(f"Error fetching landmarks: {e}") self.logger.debug(f"Failed to fetch landmarks, proceeding without: {str(e)}")
continue continue
return_list += self._to_landmarks(result, landmarktype, preference_level) return_list += self._to_landmarks(result, landmarktype, preference_level)
@ -246,8 +246,6 @@ class LandmarkManager:
attractiveness=0, attractiveness=0,
n_tags=len(tags)) n_tags=len(tags))
# self.logger.debug('added landmark.')
# Browse through tags to add information to landmark. # Browse through tags to add information to landmark.
for key, value in tags.items(): for key, value in tags.items():
@ -277,6 +275,7 @@ class LandmarkManager:
if 'building:' in key or 'pay' in key : if 'building:' in key or 'pay' in key :
landmark.n_tags -= 1 landmark.n_tags -= 1
# Set the duration. # Set the duration.
if value in ['museum', 'aquarium', 'planetarium'] : if value in ['museum', 'aquarium', 'planetarium'] :
landmark.duration = 60 landmark.duration = 60
@ -287,14 +286,138 @@ class LandmarkManager:
landmark.is_place_of_worship = False landmark.is_place_of_worship = False
landmark.duration = 10 landmark.duration = 10
else: landmark.description, landmark.keywords = self.description_and_keywords(tags)
self.set_landmark_score(landmark, landmarktype, preference_level) self.set_landmark_score(landmark, landmarktype, preference_level)
landmarks.append(landmark) landmarks.append(landmark)
continue continue
return landmarks return landmarks
def description_and_keywords(self, tags: dict):
"""
Generates a description and a set of keywords for a given landmark based on its tags.
Params:
tags (dict): A dictionary containing metadata about the landmark, including its name,
importance, height, date of construction, and visitor information.
Returns:
description (str): A string description of the landmark.
keywords (dict): A dictionary of keywords with fields such as 'importance', 'height',
'place_type', and 'date'.
"""
# Extract relevant fields
name = tags.get('name')
importance = tags.get('importance', None)
n_visitors = tags.get('tourism:visitors', None)
height = tags.get('height')
place_type = self.get_place_type(tags)
date = self.get_date(tags)
if place_type is None :
return None, None
# Start the description.
if importance is None :
if len(tags.keys()) < 5 :
return None, None
if len(tags.keys()) < 10 :
description = f"{name} is a well known {place_type}."
elif len(tags.keys()) < 17 :
importance = 'national'
description = f"{name} is a {place_type} of national importance."
else :
importance = 'international'
description = f"{name} is an internationally famous {place_type}."
else :
description = f"{name} is a {place_type} of {importance} importance."
if height is not None and date is not None :
description += f" This {place_type} was constructed in {date} and is ca. {height} meters high."
elif height is not None :
description += f" This {place_type} stands ca. {height} meters tall."
elif date is not None:
description += f" It was constructed in {date}."
# Format the visitor number
if n_visitors is not None :
n_visitors = int(n_visitors)
if n_visitors < 1000000 :
description += f" It welcomes {int(n_visitors/1000)} thousand visitors every year."
else :
description += f" It welcomes {round(n_visitors/1000000, 1)} million visitors every year."
# Set the keywords.
keywords = {"importance": importance,
"height": height,
"place_type": place_type,
"date": date}
return description, keywords
def get_place_type(self, data):
"""
Determines the type of the place based on available tags such as 'amenity', 'building',
'historic', and 'leisure'. The priority order is: 'historic' > 'building' (if not generic) >
'amenity' > 'leisure'.
Params:
data (dict): A dictionary containing metadata about the place.
Returns:
place_type (str): The determined type of the place, or None if no relevant type is found.
"""
amenity = data.get('amenity', None)
building = data.get('building', None)
historic = data.get('historic', None)
leisure = data.get('leisure')
if historic and historic != "yes":
return historic
if building and building not in ["yes", "civic", "government", "apartments", "residential", "commericial", "industrial", "retail", "religious", "public", "service"]:
return building
if amenity:
return amenity
if leisure:
return leisure
return None
def get_date(self, data):
"""
Extracts the most relevant date from the available tags, prioritizing 'construction_date',
'start_date', 'year_of_construction', and 'opening_date' in that order.
Params:
data (dict): A dictionary containing metadata about the place.
Returns:
date (str): The most relevant date found, or None if no date is available.
"""
construction_date = data.get('construction_date', None)
opening_date = data.get('opening_date', None)
start_date = data.get('start_date', None)
year_of_construction = data.get('year_of_construction', None)
# Prioritize based on availability
if construction_date:
return construction_date
if start_date:
return start_date
if year_of_construction:
return year_of_construction
if opening_date:
return opening_date
return None
def dict_to_selector_list(d: dict) -> list: def dict_to_selector_list(d: dict) -> list:
""" """
Convert a dictionary of key-value pairs to a list of Overpass query strings. Convert a dictionary of key-value pairs to a list of Overpass query strings.

View File

@ -29,7 +29,7 @@ def configure_logging():
logger.info(f"Logging to Loki at {loki_url} with {loki_handler.labels} and {is_debug=}") logger.info(f"Logging to Loki at {loki_url} with {loki_handler.labels} and {is_debug=}")
logging_handlers = [loki_handler, logging.StreamHandler()] logging_handlers = [loki_handler, logging.StreamHandler()]
logging_level = logging.DEBUG logging_level = logging.DEBUG if is_debug else logging.INFO
# silence the chatty logs loki generates itself # silence the chatty logs loki generates itself
logging.getLogger('urllib3.connectionpool').setLevel(logging.WARNING) logging.getLogger('urllib3.connectionpool').setLevel(logging.WARNING)
# no need for time since it's added by loki or can be shown in kube logs # no need for time since it's added by loki or can be shown in kube logs
@ -39,7 +39,7 @@ def configure_logging():
# if we are in a debug (local) session, set verbose and rich logging # if we are in a debug (local) session, set verbose and rich logging
from rich.logging import RichHandler from rich.logging import RichHandler
logging_handlers = [RichHandler()] logging_handlers = [RichHandler()]
logging_level = logging.DEBUG logging_level = logging.DEBUG if is_debug else logging.INFO
logging_format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s' logging_format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'

View File

@ -1,17 +1,16 @@
"""Main app for backend api""" """Main app for backend api"""
import logging import logging
import time import time
from contextlib import asynccontextmanager from contextlib import asynccontextmanager
from fastapi import FastAPI, HTTPException, BackgroundTasks, Query from fastapi import FastAPI, HTTPException, BackgroundTasks
from .logging_config import configure_logging from .logging_config import configure_logging
from .structs.landmark import Landmark, Toilets from .structs.landmark import Landmark
from .structs.preferences import Preferences from .structs.preferences import Preferences
from .structs.linked_landmarks import LinkedLandmarks from .structs.linked_landmarks import LinkedLandmarks
from .structs.trip import Trip from .structs.trip import Trip
from .utils.landmarks_manager import LandmarkManager from .landmarks.landmarks_manager import LandmarkManager
from .utils.toilets_manager import ToiletsManager from .toilets.toilet_routes import router as toilets_router
from .optimization.optimizer import Optimizer from .optimization.optimizer import Optimizer
from .optimization.refiner import Refiner from .optimization.refiner import Refiner
from .overpass.overpass import fill_cache from .overpass.overpass import fill_cache
@ -37,10 +36,14 @@ async def lifespan(app: FastAPI):
app = FastAPI(lifespan=lifespan) app = FastAPI(lifespan=lifespan)
app.include_router(toilets_router)
@app.post("/trip/new") @app.post("/trip/new")
def new_trip(preferences: Preferences, def new_trip(preferences: Preferences,
start: tuple[float, float], start: tuple[float, float],
end: tuple[float, float] | None = None, end: tuple[float, float] | None = None,
background_tasks: BackgroundTasks = None) -> Trip: background_tasks: BackgroundTasks = None) -> Trip:
""" """
Main function to call the optimizer. Main function to call the optimizer.
@ -66,6 +69,8 @@ def new_trip(preferences: Preferences,
end = start end = start
logger.info("No end coordinates provided. Using start=end.") logger.info("No end coordinates provided. Using start=end.")
logger.info(f"Requested new trip generation. Details:\n\tCoordinates: {start}\n\tTime: {preferences.max_time_minute}\n\tSightseeing: {preferences.sightseeing.score}\n\tNature: {preferences.nature.score}\n\tShopping: {preferences.shopping.score}")
start_landmark = Landmark(name='start', start_landmark = Landmark(name='start',
type='start', type='start',
location=(start[0], start[1]), location=(start[0], start[1]),
@ -87,6 +92,7 @@ def new_trip(preferences: Preferences,
n_tags=0) n_tags=0)
start_time = time.time() start_time = time.time()
# Generate the landmarks from the start location # Generate the landmarks from the start location
landmarks, landmarks_short = manager.generate_landmarks_list( landmarks, landmarks_short = manager.generate_landmarks_list(
center_coordinates = start, center_coordinates = start,
@ -108,6 +114,7 @@ def new_trip(preferences: Preferences,
try: try:
base_tour = optimizer.solve_optimization(preferences.max_time_minute, landmarks_short) base_tour = optimizer.solve_optimization(preferences.max_time_minute, landmarks_short)
except Exception as exc: except Exception as exc:
logger.error(f"Trip generation failed: {str(exc)}")
raise HTTPException(status_code=500, detail=f"Optimization failed: {str(exc)}") from exc raise HTTPException(status_code=500, detail=f"Optimization failed: {str(exc)}") from exc
t_first_stage = time.time() - start_time t_first_stage = time.time() - start_time
@ -119,11 +126,9 @@ def new_trip(preferences: Preferences,
refined_tour = refiner.refine_optimization(landmarks, base_tour, refined_tour = refiner.refine_optimization(landmarks, base_tour,
preferences.max_time_minute, preferences.max_time_minute,
preferences.detour_tolerance_minute) preferences.detour_tolerance_minute)
except TimeoutError as te :
logger.error(f'Refiner failed : {str(te)} Using base tour.')
refined_tour = base_tour
except Exception as exc : except Exception as exc :
raise HTTPException(status_code=500, detail=f"An unexpected error occurred: {str(exc)}") from exc logger.warning(f"Refiner failed. Proceeding with base trip {str(exc)}")
refined_tour = base_tour
t_second_stage = time.time() - start_time t_second_stage = time.time() - start_time
@ -158,6 +163,7 @@ def get_trip(trip_uuid: str) -> Trip:
trip = cache_client.get(f"trip_{trip_uuid}") trip = cache_client.get(f"trip_{trip_uuid}")
return trip return trip
except KeyError as exc: except KeyError as exc:
logger.error(f"Failed to fetch trip with UUID {trip_uuid}: {str(exc)}")
raise HTTPException(status_code=404, detail="Trip not found") from exc raise HTTPException(status_code=404, detail="Trip not found") from exc
@ -176,6 +182,7 @@ def get_landmark(landmark_uuid: str) -> Landmark:
landmark = cache_client.get(f"landmark_{landmark_uuid}") landmark = cache_client.get(f"landmark_{landmark_uuid}")
return landmark return landmark
except KeyError as exc: except KeyError as exc:
logger.error(f"Failed to fetch landmark with UUID {landmark_uuid}: {str(exc)}")
raise HTTPException(status_code=404, detail="Landmark not found") from exc raise HTTPException(status_code=404, detail="Landmark not found") from exc
@ -194,6 +201,7 @@ def update_trip_time(trip_uuid: str, removed_landmark_uuid: str) -> Trip:
try: try:
trip = cache_client.get(f'trip_{trip_uuid}') trip = cache_client.get(f'trip_{trip_uuid}')
except KeyError as exc: except KeyError as exc:
logger.error(f"Failed to update trip with UUID {trip_uuid} (trip not found): {str(exc)}")
raise HTTPException(status_code=404, detail='Trip not found') from exc raise HTTPException(status_code=404, detail='Trip not found') from exc
landmarks = [] landmarks = []
@ -208,6 +216,7 @@ def update_trip_time(trip_uuid: str, removed_landmark_uuid: str) -> Trip:
landmarks.append(landmark) landmarks.append(landmark)
next_uuid = landmark.next_uuid # Prepare for the next iteration next_uuid = landmark.next_uuid # Prepare for the next iteration
except KeyError as exc: except KeyError as exc:
logger.error(f"Failed to update trip with UUID {trip_uuid} : {str(exc)}")
raise HTTPException(status_code=404, detail=f'landmark {next_uuid} not found') from exc raise HTTPException(status_code=404, detail=f'landmark {next_uuid} not found') from exc
# Re-link every thing and compute times again # Re-link every thing and compute times again
@ -215,32 +224,3 @@ def update_trip_time(trip_uuid: str, removed_landmark_uuid: str) -> Trip:
trip = Trip.from_linked_landmarks(linked_tour, cache_client) trip = Trip.from_linked_landmarks(linked_tour, cache_client)
return trip return trip
@app.post("/toilets/new")
def get_toilets(location: tuple[float, float] = Query(...), radius: int = 500) -> list[Toilets] :
"""
Endpoint to find toilets within a specified radius from a given location.
This endpoint expects the `location` and `radius` as **query parameters**, not in the request body.
Args:
location (tuple[float, float]): The latitude and longitude of the location to search from.
radius (int, optional): The radius (in meters) within which to search for toilets. Defaults to 500 meters.
Returns:
list[Toilets]: A list of Toilets objects that meet the criteria.
"""
if location is None:
raise HTTPException(status_code=406, detail="Coordinates not provided or invalid")
if not (-90 <= location[0] <= 90 or -180 <= location[1] <= 180):
raise HTTPException(status_code=422, detail="Start coordinates not in range")
toilets_manager = ToiletsManager(location, radius)
try :
toilets_list = toilets_manager.generate_toilet_list()
return toilets_list
except KeyError as exc:
raise HTTPException(status_code=404, detail="No toilets found") from exc

View File

@ -257,7 +257,6 @@ class Optimizer:
Returns: Returns:
None: This function modifies the `prob` object by adding L-2 equality constraints in-place. None: This function modifies the `prob` object by adding L-2 equality constraints in-place.
""" """
# FIXME: weird 0 artifact in the coefficients popping up
# Loop through rows 1 to L-2 to prevent stacked ones # Loop through rows 1 to L-2 to prevent stacked ones
for i in range(1, L-1): for i in range(1, L-1):
# Add the constraint that sums across each "row" or "block" in the decision variables # Add the constraint that sums across each "row" or "block" in the decision variables
@ -590,7 +589,7 @@ class Optimizer:
try : try :
prob.solve(pl.PULP_CBC_CMD(msg=False, timeLimit=self.time_limit+1, gapRel=self.gap_rel)) prob.solve(pl.PULP_CBC_CMD(msg=False, timeLimit=self.time_limit+1, gapRel=self.gap_rel))
except Exception as exc : except Exception as exc :
raise Exception(f"No solution found: {exc}") from exc raise Exception(f"No solution found: {str(exc)}") from exc
status = pl.LpStatus[prob.status] status = pl.LpStatus[prob.status]
solution = [pl.value(var) for var in x] # The values of the decision variables (will be 0 or 1) solution = [pl.value(var) for var in x] # The values of the decision variables (will be 0 or 1)
@ -598,7 +597,7 @@ class Optimizer:
# Raise error if no solution is found. FIXME: for now this throws the internal server error # Raise error if no solution is found. FIXME: for now this throws the internal server error
if status != 'Optimal' : if status != 'Optimal' :
self.logger.error("The problem is overconstrained, no solution on first try.") self.logger.warning("The problem is overconstrained, no solution on first try.")
raise ArithmeticError("No solution could be found. Please try again with more time or different preferences.") raise ArithmeticError("No solution could be found. Please try again with more time or different preferences.")
# If there is a solution, we're good to go, just check for connectiveness # If there is a solution, we're good to go, just check for connectiveness
@ -608,7 +607,7 @@ class Optimizer:
while circles is not None : while circles is not None :
i += 1 i += 1
if i == self.max_iter : if i == self.max_iter :
self.logger.error(f'Timeout: No solution found after {self.max_iter} iterations.') self.logger.warning(f'Timeout: No solution found after {self.max_iter} iterations.')
raise TimeoutError(f"Optimization took too long. No solution found after {self.max_iter} iterations.") raise TimeoutError(f"Optimization took too long. No solution found after {self.max_iter} iterations.")
for circle in circles : for circle in circles :
@ -618,12 +617,13 @@ class Optimizer:
try : try :
prob.solve(pl.PULP_CBC_CMD(msg=False, timeLimit=self.time_limit, gapRel=self.gap_rel)) prob.solve(pl.PULP_CBC_CMD(msg=False, timeLimit=self.time_limit, gapRel=self.gap_rel))
except Exception as exc : except Exception as exc :
raise Exception(f"No solution found: {exc}") from exc self.logger.warning("No solution found: {str(exc)")
raise Exception(f"No solution found: {str(exc)}") from exc
solution = [pl.value(var) for var in x] solution = [pl.value(var) for var in x]
if pl.LpStatus[prob.status] != 'Optimal' : if pl.LpStatus[prob.status] != 'Optimal' :
self.logger.error("The problem is overconstrained, no solution after {i} cycles.") self.logger.warning("The problem is overconstrained, no solution after {i} cycles.")
raise ArithmeticError("No solution could be found. Please try again with more time or different preferences.") raise ArithmeticError("No solution could be found. Please try again with more time or different preferences.")
circles = self.is_connected(solution) circles = self.is_connected(solution)

View File

@ -278,7 +278,7 @@ class Refiner :
better_tour_poly = concave_hull(MultiPoint(coords)) # Create concave hull with "core" of tour leaving out start and finish better_tour_poly = concave_hull(MultiPoint(coords)) # Create concave hull with "core" of tour leaving out start and finish
xs, ys = better_tour_poly.exterior.xy xs, ys = better_tour_poly.exterior.xy
""" """
ERROR HERE : FIXED : ERROR HERE :
Exception has occurred: AttributeError Exception has occurred: AttributeError
'LineString' object has no attribute 'exterior' 'LineString' object has no attribute 'exterior'
""" """
@ -356,7 +356,7 @@ class Refiner :
# If unsuccessful optimization, use the base_tour. # If unsuccessful optimization, use the base_tour.
if new_tour is None: if new_tour is None:
self.logger.warning("No solution found for the refined tour. Returning the initial tour.") self.logger.warning("Refiner failed: No solution found during second stage optimization.")
new_tour = base_tour new_tour = base_tour
# If only one landmark, return it. # If only one landmark, return it.
@ -369,6 +369,7 @@ class Refiner :
# Fix the tour using Polygons if the path looks weird. # Fix the tour using Polygons if the path looks weird.
# Conditions : circular trip and invalid polygon. # Conditions : circular trip and invalid polygon.
if base_tour[0].location == base_tour[-1].location and not better_poly.is_valid : if base_tour[0].location == base_tour[-1].location and not better_poly.is_valid :
self.logger.debug("Tours might be funky, attempting to correct with polygons")
better_tour = self.fix_using_polygon(better_tour) better_tour = self.fix_using_polygon(better_tour)
return better_tour return better_tour

View File

@ -1,3 +1,4 @@
"""Module defining the handling of cache data from Overpass requests."""
import os import os
import json import json
import hashlib import hashlib
@ -61,7 +62,7 @@ class JSONCache(CachingStrategyBase):
return None return None
def set(self, key, value): def set(self, key, value):
"""Save the JSON data as an ElementTree to the cache.""" """Save the JSON data in the cache."""
filename = self._filename(key) filename = self._filename(key)
try: try:
# Write the JSON data to the cache file # Write the JSON data to the cache file
@ -94,7 +95,7 @@ class JSONCache(CachingStrategyBase):
def close(self): def close(self):
"""Cleanup method, if needed.""" """Cleanup method, if needed."""
pass
class CachingStrategy: class CachingStrategy:
""" """
@ -107,6 +108,7 @@ class CachingStrategy:
@classmethod @classmethod
def use(cls, strategy_name='JSON', **kwargs): def use(cls, strategy_name='JSON', **kwargs):
"""Define the caching strategy to use."""
if cls.__strategy: if cls.__strategy:
cls.__strategy.close() cls.__strategy.close()
@ -119,10 +121,12 @@ class CachingStrategy:
@classmethod @classmethod
def get(cls, key): def get(cls, key):
"""Get the data from the cache."""
return cls.__strategy.get(key) return cls.__strategy.get(key)
@classmethod @classmethod
def set(cls, key, value): def set(cls, key, value):
"""Save the data in the cache."""
cls.__strategy.set(key, value) cls.__strategy.set(key, value)
@classmethod @classmethod

View File

@ -1,5 +1,6 @@
"""Module allowing connexion to overpass api and fectch data from OSM.""" """Module allowing connexion to overpass api and fectch data from OSM."""
import os import os
import time
import urllib import urllib
import math import math
import logging import logging
@ -59,19 +60,17 @@ class Overpass :
return Overpass._filter_landmarks(cached_responses, bbox) return Overpass._filter_landmarks(cached_responses, bbox)
# If there is no cached data, fetch all from Overpass. # If there is no cached data, fetch all from Overpass.
elif not cached_responses : if not cached_responses :
query_str = Overpass.build_query(bbox, osm_types, selector, conditions, out) query_str = Overpass.build_query(bbox, osm_types, selector, conditions, out)
self.logger.debug(f'Query string: {query_str}') self.logger.debug(f'Query string: {query_str}')
return self.fetch_data_from_api(query_str) return self.fetch_data_from_api(query_str)
# Hybrid cache: some data from Overpass, some data from cache. # Resize the bbox for smaller search area and build new query string.
else : non_cached_bbox = Overpass._get_non_cached_bbox(non_cached_cells, bbox)
# Resize the bbox for smaller search area and build new query string. query_str = Overpass.build_query(non_cached_bbox, osm_types, selector, conditions, out)
non_cached_bbox = Overpass._get_non_cached_bbox(non_cached_cells, bbox) self.logger.debug(f'Query string: {query_str}')
query_str = Overpass.build_query(non_cached_bbox, osm_types, selector, conditions, out) non_cached_responses = self.fetch_data_from_api(query_str)
self.logger.debug(f'Query string: {query_str}') return Overpass._filter_landmarks(cached_responses, bbox) + non_cached_responses
non_cached_responses = self.fetch_data_from_api(query_str)
return Overpass._filter_landmarks(cached_responses, bbox) + non_cached_responses
def fetch_data_from_api(self, query_str: str) -> List[dict]: def fetch_data_from_api(self, query_str: str) -> List[dict]:
@ -96,9 +95,10 @@ class Overpass :
return elements return elements
except urllib.error.URLError as e: except urllib.error.URLError as e:
self.logger.error(f"Error connecting to Overpass API: {e}") self.logger.error(f"Error connecting to Overpass API: {str(e)}")
raise ConnectionError(f"Error connecting to Overpass API: {e}") from e raise ConnectionError(f"Error connecting to Overpass API: {str(e)}") from e
except Exception as exc : except Exception as exc :
self.logger.error(f"unexpected error while fetching data from Overpass: {str(exc)}")
raise Exception(f'An unexpected error occured: {str(exc)}') from exc raise Exception(f'An unexpected error occured: {str(exc)}') from exc
@ -114,7 +114,7 @@ class Overpass :
with urllib.request.urlopen(request) as response: with urllib.request.urlopen(request) as response:
# Convert the HTTPResponse to a string and load data # Convert the HTTPResponse to a string and load data
response_data = response.read().decode('utf-8') response_data = response.read().decode('utf-8')
data = json.loads(response_data) data = json.loads(response_data)
# Get elements and set cache # Get elements and set cache
@ -122,7 +122,7 @@ class Overpass :
self.caching_strategy.set(cache_key, elements) self.caching_strategy.set(cache_key, elements)
self.logger.debug(f'Cache set for {cache_key}') self.logger.debug(f'Cache set for {cache_key}')
except urllib.error.URLError as e: except urllib.error.URLError as e:
raise ConnectionError(f"Error connecting to Overpass API: {e}") from e raise ConnectionError(f"Error connecting to Overpass API: {str(e)}") from e
except Exception as exc : except Exception as exc :
raise Exception(f'An unexpected error occured: {str(exc)}') from exc raise Exception(f'An unexpected error occured: {str(exc)}') from exc
@ -153,7 +153,7 @@ class Overpass :
- If no conditions are provided, the query will just use the `selector` to filter the OSM - If no conditions are provided, the query will just use the `selector` to filter the OSM
elements without additional constraints. elements without additional constraints.
""" """
query = '[out:json];(' query = '[out:json][timeout:20];('
# convert the bbox to string. # convert the bbox to string.
bbox_str = f"({','.join(map(str, bbox))})" bbox_str = f"({','.join(map(str, bbox))})"
@ -309,9 +309,9 @@ class Overpass :
if min_lat == float('inf') or min_lon == float('inf'): if min_lat == float('inf') or min_lon == float('inf'):
return None return None
return (max(min_lat, original_bbox[0]), return (max(min_lat, original_bbox[0]),
max(min_lon, original_bbox[1]), max(min_lon, original_bbox[1]),
min(max_lat, original_bbox[2]), min(max_lat, original_bbox[2]),
min(max_lon, original_bbox[3])) min(max_lon, original_bbox[3]))
@ -388,8 +388,8 @@ def get_base_info(elem: dict, osm_type: OSM_TYPES, with_name=False) :
if with_name : if with_name :
name = elem.get('tags', {}).get('name') name = elem.get('tags', {}).get('name')
return osm_id, coords, name return osm_id, coords, name
else :
return osm_id, coords return osm_id, coords
def fill_cache(): def fill_cache():
@ -399,18 +399,25 @@ def fill_cache():
""" """
overpass = Overpass() overpass = Overpass()
n_files = 0
total = 0
with os.scandir(OSM_CACHE_DIR) as it: with os.scandir(OSM_CACHE_DIR) as it:
for entry in it: for entry in it:
if entry.is_file() and entry.name.startswith('hollow_'): if entry.is_file() and entry.name.startswith('hollow_'):
total += 1
try : try :
# Read the whole file content as a string # Read the whole file content as a string
with open(entry.path, 'r') as f: with open(entry.path, 'r', encoding='utf-8') as f:
# load data and fill the cache with the query and key # load data and fill the cache with the query and key
json_data = json.load(f) json_data = json.load(f)
overpass.fill_cache(json_data) overpass.fill_cache(json_data)
n_files += 1
time.sleep(1)
# Now delete the file as the cache is filled # Now delete the file as the cache is filled
os.remove(entry.path) os.remove(entry.path)
except Exception as exc : except Exception as exc :
overpass.logger.error(f'An error occured while parsing file {entry.path} as .json file') overpass.logger.error(f'An error occured while parsing file {entry.path} as .json file: {str(exc)}')
overpass.logger.info(f"Successfully filled {n_files}/{total} cache files.")

View File

@ -72,6 +72,7 @@ sightseeing:
# - castle # - castle
# - museum # - museum
museums: museums:
tourism: tourism:
- museum - museum

View File

@ -6,4 +6,4 @@ max_landmarks_refiner: 20
overshoot: 0.0016 overshoot: 0.0016
time_limit: 1 time_limit: 1
gap_rel: 0.025 gap_rel: 0.025
max_iter: 40 max_iter: 80

View File

@ -1,8 +1,7 @@
"""Definition of the Landmark class to handle visitable objects across the world.""" """Definition of the Landmark class to handle visitable objects across the world."""
from typing import Optional, Literal from typing import Optional, Literal
from uuid import uuid4, UUID from uuid import uuid4, UUID
from pydantic import BaseModel, ConfigDict, Field from pydantic import BaseModel, Field
# Output to frontend # Output to frontend
@ -50,7 +49,8 @@ class Landmark(BaseModel) :
image_url : Optional[str] = None image_url : Optional[str] = None
website_url : Optional[str] = None website_url : Optional[str] = None
wiki_url : Optional[str] = None wiki_url : Optional[str] = None
description : Optional[str] = None # TODO future keywords: Optional[dict] = {}
description : Optional[str] = None
duration : Optional[int] = 5 duration : Optional[int] = 5
name_en : Optional[str] = None name_en : Optional[str] = None
@ -69,6 +69,7 @@ class Landmark(BaseModel) :
is_viewpoint : Optional[bool] = False is_viewpoint : Optional[bool] = False
is_place_of_worship : Optional[bool] = False is_place_of_worship : Optional[bool] = False
def __str__(self) -> str: def __str__(self) -> str:
""" """
String representation of the Landmark object. String representation of the Landmark object.
@ -122,26 +123,3 @@ class Landmark(BaseModel) :
return (self.uuid == value.uuid or return (self.uuid == value.uuid or
self.osm_id == value.osm_id or self.osm_id == value.osm_id or
(self.name == value.name and self.distance(value) < 0.001)) (self.name == value.name and self.distance(value) < 0.001))
class Toilets(BaseModel) :
"""
Model for toilets. When false/empty the information is either false either not known.
"""
location : tuple
wheelchair : Optional[bool] = False
changing_table : Optional[bool] = False
fee : Optional[bool] = False
opening_hours : Optional[str] = ""
def __str__(self) -> str:
"""
String representation of the Toilets object.
Returns:
str: A formatted string with the toilets location.
"""
return f'Toilets @{self.location}'
model_config = ConfigDict(from_attributes=True)

View File

@ -0,0 +1,26 @@
"""Definition of the Toilets class."""
from typing import Optional
from pydantic import BaseModel, ConfigDict
class Toilets(BaseModel) :
"""
Model for toilets. When false/empty the information is either false either not known.
"""
location : tuple
wheelchair : Optional[bool] = False
changing_table : Optional[bool] = False
fee : Optional[bool] = False
opening_hours : Optional[str] = ""
def __str__(self) -> str:
"""
String representation of the Toilets object.
Returns:
str: A formatted string with the toilets location.
"""
return f'Toilets @{self.location}'
model_config = ConfigDict(from_attributes=True)

View File

@ -46,8 +46,6 @@ def test_turckheim(client, request): # pylint: disable=redefined-outer-name
# Add details to report # Add details to report
log_trip_details(request, landmarks, result['total_time'], duration_minutes) log_trip_details(request, landmarks, result['total_time'], duration_minutes)
# for elem in landmarks :
# print(elem)
# checks : # checks :
assert response.status_code == 200 # check for successful planning assert response.status_code == 200 # check for successful planning
@ -342,4 +340,4 @@ def test_shopping(client, request) : # pylint: disable=redefined-outer-name
assert response.status_code == 200 # check for successful planning assert response.status_code == 200 # check for successful planning
assert comp_time < 30, f"Computation time exceeded 30 seconds: {comp_time:.2f} seconds" assert comp_time < 30, f"Computation time exceeded 30 seconds: {comp_time:.2f} seconds"
assert duration_minutes*0.8 < result['total_time'], f"Trip too short: {result['total_time']} instead of {duration_minutes}" assert duration_minutes*0.8 < result['total_time'], f"Trip too short: {result['total_time']} instead of {duration_minutes}"
assert duration_minutes*1.2 > result['total_time'], f"Trip too long: {result['total_time']} instead of {duration_minutes}" assert duration_minutes*1.2 > result['total_time'], f"Trip too long: {result['total_time']} instead of {duration_minutes}"

View File

@ -3,7 +3,7 @@
from fastapi.testclient import TestClient from fastapi.testclient import TestClient
import pytest import pytest
from ..structs.landmark import Toilets from ..structs.toilets import Toilets
from ..main import app from ..main import app

View File

@ -1,7 +1,6 @@
"""Helper methods for testing.""" """Helper methods for testing."""
import logging import logging
from fastapi import HTTPException from fastapi import HTTPException
from pydantic import ValidationError
from ..structs.landmark import Landmark from ..structs.landmark import Landmark
from ..cache import client as cache_client from ..cache import client as cache_client
@ -39,7 +38,7 @@ def fetch_landmark(landmark_uuid: str):
try: try:
landmark = cache_client.get(f'landmark_{landmark_uuid}') landmark = cache_client.get(f'landmark_{landmark_uuid}')
if not landmark : if not landmark :
logger.warning(f'Cache miss for landmark UUID: {landmark_uuid}') logger.error(f'Cache miss for landmark UUID: {landmark_uuid}')
raise HTTPException(status_code=404, detail=f'Landmark with UUID {landmark_uuid} not found in cache.') raise HTTPException(status_code=404, detail=f'Landmark with UUID {landmark_uuid} not found in cache.')
# Validate that the fetched data is a dictionary # Validate that the fetched data is a dictionary

View File

View File

@ -0,0 +1,38 @@
"""Defines the endpoint for fetching toilet locations."""
from fastapi import HTTPException, APIRouter, Query
from ..structs.toilets import Toilets
from .toilets_manager import ToiletsManager
# Define the API router
router = APIRouter()
@router.post("/toilets/new")
def get_toilets(location: tuple[float, float] = Query(...), radius: int = 500) -> list[Toilets] :
"""
Endpoint to find toilets within a specified radius from a given location.
This endpoint expects the `location` and `radius` as **query parameters**, not in the request body.
Args:
location (tuple[float, float]): The latitude and longitude of the location to search from.
radius (int, optional): The radius (in meters) within which to search for toilets. Defaults to 500 meters.
Returns:
list[Toilets]: A list of Toilets objects that meet the criteria.
"""
if location is None:
raise HTTPException(status_code=406, detail="Coordinates not provided or invalid")
if not (-90 <= location[0] <= 90 or -180 <= location[1] <= 180):
raise HTTPException(status_code=422, detail="Start coordinates not in range")
toilets_manager = ToiletsManager(location, radius)
try :
toilets_list = toilets_manager.generate_toilet_list()
except KeyError as exc:
raise HTTPException(status_code=404, detail="No toilets found") from exc
return toilets_list

View File

@ -2,8 +2,8 @@
import logging import logging
from ..overpass.overpass import Overpass, get_base_info from ..overpass.overpass import Overpass, get_base_info
from ..structs.landmark import Toilets from ..structs.toilets import Toilets
from .utils import create_bbox from ..utils.bbox import create_bbox
# silence the overpass logger # silence the overpass logger
@ -65,7 +65,7 @@ class ToiletsManager:
try: try:
result = self.overpass.fetch_data_from_api(query_str=query) result = self.overpass.fetch_data_from_api(query_str=query)
except Exception as e: except Exception as e:
self.logger.error(f"Error fetching landmarks: {e}") self.logger.error(f"Error fetching toilets: {e}")
return None return None
toilets_list = self.to_toilets(result) toilets_list = self.to_toilets(result)

View File

@ -24,4 +24,4 @@ def create_bbox(coords: tuple[float, float], radius: int):
lon_min = lon - d_lon * 180 / m.pi lon_min = lon - d_lon * 180 / m.pi
lon_max = lon + d_lon * 180 / m.pi lon_max = lon + d_lon * 180 / m.pi
return (lat_min, lon_min, lat_max, lon_max) return (lat_min, lon_min, lat_max, lon_max)

48
status Normal file
View File

@ -0,0 +1,48 @@
error: wrong number of arguments, should be from 1 to 2
usage: git config [<options>]
Config file location
--[no-]global use global config file
--[no-]system use system config file
--[no-]local use repository config file
--[no-]worktree use per-worktree config file
-f, --[no-]file <file>
use given config file
--[no-]blob <blob-id> read config from given blob object
Action
--[no-]get get value: name [value-pattern]
--[no-]get-all get all values: key [value-pattern]
--[no-]get-regexp get values for regexp: name-regex [value-pattern]
--[no-]get-urlmatch get value specific for the URL: section[.var] URL
--[no-]replace-all replace all matching variables: name value [value-pattern]
--[no-]add add a new variable: name value
--[no-]unset remove a variable: name [value-pattern]
--[no-]unset-all remove all matches: name [value-pattern]
--[no-]rename-section rename section: old-name new-name
--[no-]remove-section remove a section: name
-l, --[no-]list list all
--[no-]fixed-value use string equality when comparing values to 'value-pattern'
-e, --[no-]edit open an editor
--[no-]get-color find the color configured: slot [default]
--[no-]get-colorbool find the color setting: slot [stdout-is-tty]
Type
-t, --[no-]type <type>
value is given this type
--bool value is "true" or "false"
--int value is decimal number
--bool-or-int value is --bool or --int
--bool-or-str value is --bool or string
--path value is a path (file or directory name)
--expiry-date value is an expiry date
Other
-z, --[no-]null terminate values with NUL byte
--[no-]name-only show variable names only
--[no-]includes respect include directives on lookup
--[no-]show-origin show origin of config (file, standard input, blob, command line)
--[no-]show-scope show scope of config (worktree, local, global, system, command)
--[no-]default <value>
with --get, use default value when missing entry