better logs

This commit is contained in:
Helldragon67 2025-01-09 16:58:38 +01:00
parent a0a3d76b78
commit 11bbf34375
7 changed files with 60 additions and 68 deletions

@ -1,6 +1,7 @@
"""Main app for backend api""" """Main app for backend api"""
import logging import logging
import time
from fastapi import FastAPI, HTTPException, Query from fastapi import FastAPI, HTTPException, Query
from contextlib import asynccontextmanager from contextlib import asynccontextmanager
@ -81,6 +82,7 @@ def new_trip(preferences: Preferences,
must_do=True, must_do=True,
n_tags=0) n_tags=0)
start_time = time.time()
# Generate the landmarks from the start location # Generate the landmarks from the start location
landmarks, landmarks_short = manager.generate_landmarks_list( landmarks, landmarks_short = manager.generate_landmarks_list(
center_coordinates = start, center_coordinates = start,
@ -91,6 +93,9 @@ def new_trip(preferences: Preferences,
landmarks_short.insert(0, start_landmark) landmarks_short.insert(0, start_landmark)
landmarks_short.append(end_landmark) landmarks_short.append(end_landmark)
t_generate_landmarks = time.time() - start_time
start_time = time.time()
# First stage optimization # First stage optimization
try: try:
base_tour = optimizer.solve_optimization(preferences.max_time_minute, landmarks_short) base_tour = optimizer.solve_optimization(preferences.max_time_minute, landmarks_short)
@ -99,11 +104,20 @@ def new_trip(preferences: Preferences,
except TimeoutError as exc: except TimeoutError as exc:
raise HTTPException(status_code=500, detail="Optimzation took too long") from exc raise HTTPException(status_code=500, detail="Optimzation took too long") from exc
t_first_stage = time.time() - start_time
start_time = time.time()
# Second stage optimization # Second stage optimization
refined_tour = refiner.refine_optimization(landmarks, base_tour, refined_tour = refiner.refine_optimization(landmarks, base_tour,
preferences.max_time_minute, preferences.max_time_minute,
preferences.detour_tolerance_minute) preferences.detour_tolerance_minute)
t_second_stage = time.time() - start_time
logger.debug(f'Generating landmarks : {round(t_generate_landmarks,3)} seconds')
logger.debug(f'First stage optimization : {round(t_first_stage,3)} seconds')
logger.debug(f'Second stage optimization : {round(t_second_stage,3)} seconds')
logger.info(f'Total computation time : {round(t_generate_landmarks + t_first_stage + t_generate_landmarks,3)} seconds')
linked_tour = LinkedLandmarks(refined_tour) linked_tour = LinkedLandmarks(refined_tour)
# upon creation of the trip, persistence of both the trip and its landmarks is ensured. # upon creation of the trip, persistence of both the trip and its landmarks is ensured.
trip = Trip.from_linked_landmarks(linked_tour, cache_client) trip = Trip.from_linked_landmarks(linked_tour, cache_client)

@ -1,7 +1,7 @@
"""Collection of tests to ensure correct implementation and track progress. """ """Collection of tests to ensure correct implementation and track progress. """
from fastapi.testclient import TestClient from fastapi.testclient import TestClient
import pytest import pytest, time
from .test_utils import landmarks_to_osmid, load_trip_landmarks, log_trip_details from .test_utils import landmarks_to_osmid, load_trip_landmarks, log_trip_details
from ..main import app from ..main import app
@ -20,7 +20,9 @@ def test_turckheim(client, request): # pylint: disable=redefined-outer-name
client: client:
request: request:
""" """
start_time = time.time() # Start timer
duration_minutes = 15 duration_minutes = 15
response = client.post( response = client.post(
"/trip/new", "/trip/new",
json={ json={
@ -35,6 +37,9 @@ def test_turckheim(client, request): # pylint: disable=redefined-outer-name
result = response.json() result = response.json()
landmarks = load_trip_landmarks(client, result['first_landmark_uuid']) landmarks = load_trip_landmarks(client, result['first_landmark_uuid'])
# Get computation time
comp_time = time.time() - start_time
# Add details to report # Add details to report
log_trip_details(request, landmarks, result['total_time'], duration_minutes) log_trip_details(request, landmarks, result['total_time'], duration_minutes)
@ -43,6 +48,7 @@ def test_turckheim(client, request): # pylint: disable=redefined-outer-name
assert isinstance(landmarks, list) # check that the return type is a list assert isinstance(landmarks, list) # check that the return type is a list
assert duration_minutes*0.8 < int(result['total_time']) < duration_minutes*1.2 assert duration_minutes*0.8 < int(result['total_time']) < duration_minutes*1.2
assert len(landmarks) > 2 # check that there is something to visit assert len(landmarks) > 2 # check that there is something to visit
assert comp_time < 30, f"Computation time exceeded 30 seconds: {comp_time:.2f} seconds"
def test_bellecour(client, request) : # pylint: disable=redefined-outer-name def test_bellecour(client, request) : # pylint: disable=redefined-outer-name
@ -53,7 +59,9 @@ def test_bellecour(client, request) : # pylint: disable=redefined-outer-name
client: client:
request: request:
""" """
start_time = time.time() # Start timer
duration_minutes = 120 duration_minutes = 120
response = client.post( response = client.post(
"/trip/new", "/trip/new",
json={ json={
@ -67,7 +75,9 @@ def test_bellecour(client, request) : # pylint: disable=redefined-outer-name
) )
result = response.json() result = response.json()
landmarks = load_trip_landmarks(client, result['first_landmark_uuid']) landmarks = load_trip_landmarks(client, result['first_landmark_uuid'])
osm_ids = landmarks_to_osmid(landmarks)
# Get computation time
comp_time = time.time() - start_time
# Add details to report # Add details to report
log_trip_details(request, landmarks, result['total_time'], duration_minutes) log_trip_details(request, landmarks, result['total_time'], duration_minutes)
@ -79,8 +89,7 @@ def test_bellecour(client, request) : # pylint: disable=redefined-outer-name
# checks : # checks :
assert response.status_code == 200 # check for successful planning assert response.status_code == 200 # check for successful planning
assert duration_minutes*0.8 < int(result['total_time']) < duration_minutes*1.2 assert duration_minutes*0.8 < int(result['total_time']) < duration_minutes*1.2
assert 136200148 in osm_ids # check for Cathédrale St. Jean in trip assert comp_time < 30, f"Computation time exceeded 30 seconds: {comp_time:.2f} seconds"
# assert response.status_code == 2000 # check for successful planning
@ -92,7 +101,9 @@ def test_shopping(client, request) : # pylint: disable=redefined-outer-name
client: client:
request: request:
""" """
start_time = time.time() # Start timer
duration_minutes = 240 duration_minutes = 240
response = client.post( response = client.post(
"/trip/new", "/trip/new",
json={ json={
@ -107,12 +118,16 @@ def test_shopping(client, request) : # pylint: disable=redefined-outer-name
result = response.json() result = response.json()
landmarks = load_trip_landmarks(client, result['first_landmark_uuid']) landmarks = load_trip_landmarks(client, result['first_landmark_uuid'])
# Get computation time
comp_time = time.time() - start_time
# Add details to report # Add details to report
log_trip_details(request, landmarks, result['total_time'], duration_minutes) log_trip_details(request, landmarks, result['total_time'], duration_minutes)
# checks : # checks :
assert response.status_code == 200 # check for successful planning assert response.status_code == 200 # check for successful planning
assert duration_minutes*0.8 < int(result['total_time']) < duration_minutes*1.2 assert duration_minutes*0.8 < int(result['total_time']) < duration_minutes*1.2
assert comp_time < 30, f"Computation time exceeded 30 seconds: {comp_time:.2f} seconds"
# def test_new_trip_single_prefs(client): # def test_new_trip_single_prefs(client):
# response = client.post( # response = client.post(

@ -34,27 +34,26 @@ def fetch_landmark(client, landmark_uuid: str):
dict: Landmark data fetched from the API. dict: Landmark data fetched from the API.
""" """
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
response = client.get(f"/landmark/{landmark_uuid}") response = client.get(f'/landmark/{landmark_uuid}')
if response.status_code != 200: if response.status_code != 200:
raise HTTPException(status_code=500, raise HTTPException(status_code=500,
detail=f"Failed to fetch landmark with UUID {landmark_uuid}: {response.status_code}") detail=f'Failed to fetch landmark with UUID {landmark_uuid}: {response.status_code}')
try: try:
json_data = response.json() json_data = response.json()
logger.info(f"API Response: {json_data}") logger.info(f'API Response: {json_data}')
except ValueError as e: except ValueError as e:
logger.error(f"Failed to parse response as JSON: {response.text}") logger.error(f'Failed to parse response as JSON: {response.text}')
raise HTTPException(status_code=500, detail="Invalid response format from API") raise HTTPException(status_code=500, detail="Invalid response format from API")
# Try validating against the Landmark model here to ensure consistency # Try validating against the Landmark model here to ensure consistency
try: try:
landmark = Landmark(**json_data) landmark = Landmark(**json_data)
except ValidationError as ve: except ValidationError as ve:
logging.error(f"Validation error: {ve}") logging.error(f'Validation error: {ve}')
raise HTTPException(status_code=500, detail="Invalid data format received from API") raise HTTPException(status_code=500, detail="Invalid data format received from API")
if "detail" in json_data: if "detail" in json_data:
raise HTTPException(status_code=500, detail=json_data["detail"]) raise HTTPException(status_code=500, detail=json_data["detail"])
@ -75,25 +74,23 @@ def fetch_landmark_cache(landmark_uuid: str):
# Try to fetch the landmark data from the cache # Try to fetch the landmark data from the cache
try: try:
landmark = cache_client.get(f"landmark_{landmark_uuid}") landmark = cache_client.get(f'landmark_{landmark_uuid}')
if not landmark : if not landmark :
logger.warning(f"Cache miss for landmark UUID: {landmark_uuid}") logger.warning(f'Cache miss for landmark UUID: {landmark_uuid}')
raise HTTPException(status_code=404, detail=f"Landmark with UUID {landmark_uuid} not found in cache.") raise HTTPException(status_code=404, detail=f'Landmark with UUID {landmark_uuid} not found in cache.')
# Validate that the fetched data is a dictionary # Validate that the fetched data is a dictionary
if not isinstance(landmark, Landmark): if not isinstance(landmark, Landmark):
logger.error(f"Invalid cache data format for landmark UUID: {landmark_uuid}. Expected dict, got {type(landmark).__name__}.") logger.error(f'Invalid cache data format for landmark UUID: {landmark_uuid}. Expected dict, got {type(landmark).__name__}.')
raise HTTPException(status_code=500, detail="Invalid cache data format.") raise HTTPException(status_code=500, detail="Invalid cache data format.")
return landmark return landmark
except Exception as exc: except Exception as exc:
logger.error(f"Unexpected error occurred while fetching landmark UUID {landmark_uuid}: {exc}") logger.error(f'Unexpected error occurred while fetching landmark UUID {landmark_uuid}: {exc}')
raise HTTPException(status_code=500, detail="An unexpected error occurred while fetching the landmark from the cache") from exc raise HTTPException(status_code=500, detail="An unexpected error occurred while fetching the landmark from the cache") from exc
def load_trip_landmarks(client, first_uuid: str, from_cache=None) -> list[Landmark]: def load_trip_landmarks(client, first_uuid: str, from_cache=None) -> list[Landmark]:
""" """
Load all landmarks for a trip using the response from the API. Load all landmarks for a trip using the response from the API.
@ -129,7 +126,7 @@ def log_trip_details(request, landmarks: list[Landmark], duration: int, target_d
duration (int): the total duration of this trip duration (int): the total duration of this trip
target_duration(int): the target duration of this trip target_duration(int): the target duration of this trip
""" """
trip_string = [f"{landmark.name} ({landmark.attractiveness} | {landmark.duration}) - {landmark.time_to_reach_next}" for landmark in landmarks] trip_string = [f'{landmark.name} ({landmark.attractiveness} | {landmark.duration}) - {landmark.time_to_reach_next}' for landmark in landmarks]
# Pass additional info to pytest for reporting # Pass additional info to pytest for reporting
request.node.trip_details = trip_string request.node.trip_details = trip_string

@ -212,9 +212,6 @@ class LandmarkManager:
for sel in dict_to_selector_list(amenity_selector): for sel in dict_to_selector_list(amenity_selector):
self.logger.debug(f"Current selector: {sel}") self.logger.debug(f"Current selector: {sel}")
# query_conditions = ['count_tags()>5']
# if landmarktype == 'shopping' : # use this later for shopping clusters
# element_types = ['node']
element_types = ['way', 'relation'] element_types = ['way', 'relation']
if 'viewpoint' in sel : if 'viewpoint' in sel :
@ -269,7 +266,7 @@ class LandmarkManager:
tag_values = set(elem.tags().values()) # Store tag values tag_values = set(elem.tags().values()) # Store tag values
# Use simple tags : # Retrieve image, name and website :
image_url = elem.tag('image') image_url = elem.tag('image')
website_url = elem.tag('website') website_url = elem.tag('website')
if website_url is None : if website_url is None :
@ -279,6 +276,9 @@ class LandmarkManager:
if elem_type != "nature" and elem.tag('leisure') == "park": if elem_type != "nature" and elem.tag('leisure') == "park":
elem_type = "nature" elem_type = "nature"
if elem.tag('wikipedia') is not None :
score += self.wikipedia_bonus
# Skip element if it is an administrative boundary or a disused thing or it is an appartement and useless amenities # Skip element if it is an administrative boundary or a disused thing or it is an appartement and useless amenities
if elem.tag('boundary') is not None or elem.tag('disused') is not None: if elem.tag('boundary') is not None or elem.tag('disused') is not None:
continue continue
@ -297,20 +297,16 @@ class LandmarkManager:
# do not count the building description as being particularly useful # do not count the building description as being particularly useful
n_tags -= 1 n_tags -= 1
if "wiki" in tag_key: # if landmarktype != "shopping":
# wikipedia entries count more # if "shop" in tag_key:
score += self.wikipedia_bonus # skip = True
# break
if landmarktype != "shopping":
if "shop" in tag_key:
skip = True
break
# if tag_key == "building" and elem.tag('building') in ['retail', 'supermarket', 'parking']: # if tag_key == "building" and elem.tag('building') in ['retail', 'supermarket', 'parking']:
# skip = True # skip = True
# break # break
if skip: # if skip:
continue # continue
score = score_function(score) score = score_function(score)

@ -9,9 +9,6 @@ from .get_time_separation import get_time
from ..constants import OPTIMIZER_PARAMETERS_PATH from ..constants import OPTIMIZER_PARAMETERS_PATH
class Optimizer: class Optimizer:
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)

@ -333,7 +333,7 @@ class Refiner :
minor_landmarks = self.get_minor_landmarks(all_landmarks, base_tour, self.detour_corridor_width) minor_landmarks = self.get_minor_landmarks(all_landmarks, base_tour, self.detour_corridor_width)
self.logger.info(f"Using {len(minor_landmarks)} minor landmarks around the predicted path") self.logger.debug(f"Using {len(minor_landmarks)} minor landmarks around the predicted path")
# Full set of visitable landmarks. # Full set of visitable landmarks.
full_set = self.integrate_landmarks(minor_landmarks, base_tour) # could probably be optimized with less overhead full_set = self.integrate_landmarks(minor_landmarks, base_tour) # could probably be optimized with less overhead

@ -1,27 +0,0 @@
from OSMPythonTools.overpass import Overpass, overpassQueryBuilder
overpass = Overpass()
query = overpassQueryBuilder(
bbox = (45.7300, 4.7900, 45.8000, 4.8600),
elementType = ['way'],
# selector can in principle be a list already,
# but it generates the intersection of the queries
# we want the union
selector = '"historic"="building"',
includeCenter = True,
out = 'body'
)
res = overpass.query(query)
# for elem in res.elements() :
elem = res.elements()[1]
tags = elem.tags()
test = elem.tag('sgehs')
print(test)