amazing cache #55

Merged
kscheidecker merged 22 commits from backend/grid-based-cache into main 2025-01-30 12:40:36 +00:00
6 changed files with 17 additions and 23 deletions
Showing only changes of commit 4a904c3d3c - Show all commits

File diff suppressed because one or more lines are too long

View File

@ -135,8 +135,6 @@ def new_trip(preferences: Preferences,
trip = Trip.from_linked_landmarks(linked_tour, cache_client) trip = Trip.from_linked_landmarks(linked_tour, cache_client)
logger.info(f'Generated a trip of {trip.total_time} minutes with {len(refined_tour)} landmarks in {round(t_generate_landmarks + t_first_stage + t_second_stage,3)} seconds.') logger.info(f'Generated a trip of {trip.total_time} minutes with {len(refined_tour)} landmarks in {round(t_generate_landmarks + t_first_stage + t_second_stage,3)} seconds.')
background_tasks = BackgroundTasks(fill_cache())
return trip return trip
@ -154,6 +152,7 @@ def get_trip(trip_uuid: str) -> Trip:
""" """
try: try:
trip = cache_client.get(f"trip_{trip_uuid}") trip = cache_client.get(f"trip_{trip_uuid}")
background_tasks = BackgroundTasks(fill_cache())
return trip return trip
except KeyError as exc: except KeyError as exc:
raise HTTPException(status_code=404, detail="Trip not found") from exc raise HTTPException(status_code=404, detail="Trip not found") from exc

View File

@ -98,7 +98,7 @@ class Overpass :
def fill_cache(self, json_data) : def fill_cache(self, json_data: list) :
""" """
Fill cache with data by using a hollow cache entry's information. Fill cache with data by using a hollow cache entry's information.
""" """
@ -194,7 +194,7 @@ class Overpass :
@staticmethod @staticmethod
def _build_query_from_hollow(json_data): def _build_query_from_hollow(json_data: list):
""" """
Build query string using information from a hollow cache entry. Build query string using information from a hollow cache entry.
""" """
@ -322,7 +322,7 @@ def get_base_info(elem: dict, osm_type: OSM_TYPES, with_name=False) :
def fill_cache(): def fill_cache():
overpass = Overpass(caching_strategy='JSON', cache_dir=OSM_CACHE_DIR) overpass = Overpass()
with os.scandir(OSM_CACHE_DIR) as it: with os.scandir(OSM_CACHE_DIR) as it:
for entry in it: for entry in it:

View File

@ -334,8 +334,8 @@ def test_shopping(client, request) : # pylint: disable=redefined-outer-name
# Add details to report # Add details to report
log_trip_details(request, landmarks, result['total_time'], duration_minutes) log_trip_details(request, landmarks, result['total_time'], duration_minutes)
for elem in landmarks : # for elem in landmarks :
print(elem) # print(elem)
# checks : # checks :
assert response.status_code == 200 # check for successful planning assert response.status_code == 200 # check for successful planning

View File

@ -1,10 +1,7 @@
"""Module used to import data from OSM and arrange them in categories.""" """Module used to import data from OSM and arrange them in categories."""
import logging import logging
import math as m
import xml.etree.ElementTree as ET
import yaml import yaml
from ..structs.preferences import Preferences from ..structs.preferences import Preferences
from ..structs.landmark import Landmark from ..structs.landmark import Landmark
from .take_most_important import take_most_important from .take_most_important import take_most_important
@ -204,18 +201,18 @@ class LandmarkManager:
self.logger.error(f"Error fetching landmarks: {e}") self.logger.error(f"Error fetching landmarks: {e}")
continue continue
return_list += self.json_to_landmarks(result, landmarktype, preference_level) return_list += self._to_landmarks(result, landmarktype, preference_level)
self.logger.debug(f"Fetched {len(return_list)} landmarks of type {landmarktype} in {bbox}") self.logger.debug(f"Fetched {len(return_list)} landmarks of type {landmarktype} in {bbox}")
return return_list return return_list
def json_to_landmarks(self, elements: list, landmarktype, preference_level) -> list[Landmark]: def _to_landmarks(self, elements: list, landmarktype, preference_level) -> list[Landmark]:
""" """
Parse the Overpass API result and extract landmarks. Parse the Overpass API result and extract landmarks.
This method processes the XML root element returned by the Overpass API and This method processes the JSON elements returned by the Overpass API and
extracts landmarks of types 'node', 'way', and 'relation'. It retrieves extracts landmarks of types 'node', 'way', and 'relation'. It retrieves
relevant information such as name, coordinates, and tags, and converts them relevant information such as name, coordinates, and tags, and converts them
into Landmark objects. into Landmark objects.
@ -225,9 +222,8 @@ class LandmarkManager:
elem_type (str): The type of landmark (e.g., node, way, relation). elem_type (str): The type of landmark (e.g., node, way, relation).
Returns: Returns:
list[Landmark]: A list of Landmark objects extracted from the XML data. list[Landmark]: A list of Landmark objects extracted from the JSON data.
""" """
print(f'in landmarks manager : {type(elements)}')
if elements is None : if elements is None :
return [] return []

View File

@ -1,6 +1,5 @@
"""Module for finding public toilets around given coordinates.""" """Module for finding public toilets around given coordinates."""
import logging import logging
import xml.etree.ElementTree as ET
from ..overpass.overpass import Overpass, get_base_info from ..overpass.overpass import Overpass, get_base_info
from ..structs.landmark import Toilets from ..structs.landmark import Toilets
@ -88,7 +87,7 @@ class ToiletsManager:
elem_type (str): The type of landmark (e.g., node, way, relation). elem_type (str): The type of landmark (e.g., node, way, relation).
Returns: Returns:
list[Landmark]: A list of Landmark objects extracted from the XML data. list[Landmark]: A list of Landmark objects extracted from the JSON data.
""" """
if elements is None : if elements is None :
return [] return []