new test against cache
Some checks failed
Run testing on the backend code / Build (pull_request) Failing after 1m29s
Build and deploy the backend to staging / Build and push image (pull_request) Failing after 41s
Build and deploy the backend to staging / Deploy to staging (pull_request) Has been skipped
Run linting on the backend code / Build (pull_request) Successful in 27s

This commit is contained in:
2024-12-13 09:01:01 +01:00
parent f0873ff313
commit b1b09ccf58
3 changed files with 98 additions and 15 deletions

View File

@@ -2,8 +2,10 @@
import logging
from typing import List
from fastapi import HTTPException
from pydantic import ValidationError
from ..structs.landmark import Landmark
from ..persistence import client as cache_client
def landmarks_to_osmid(landmarks: List[Landmark]) -> List[int] :
@@ -42,18 +44,58 @@ def fetch_landmark(client, landmark_uuid: str):
try:
json_data = response.json()
logger.info(f"API Response: {json_data}")
print(f"API Response of type {type(json_data)} in json format: {json_data}")
except ValueError as e:
logger.error(f"Failed to parse response as JSON: {response.text}")
raise HTTPException(status_code=500, detail="Invalid response format from API")
# Try validating against the Landmark model here to ensure consistency
try:
landmark = Landmark(**json_data)
except ValidationError as ve:
logging.error(f"Validation error: {ve}")
raise HTTPException(status_code=500, detail="Invalid data format received from API")
if "detail" in json_data:
raise HTTPException(status_code=500, detail=json_data["detail"])
return json_data
return Landmark(**json_data)
def load_trip_landmarks(client, first_uuid: str) -> List[Landmark]:
def fetch_landmark_cache(landmark_uuid: str):
"""
Fetch landmark data from the cache based on the landmark UUID.
Args:
landmark_uuid (str): The UUID of the landmark.
Returns:
dict: Landmark data fetched from the cache or raises an HTTP exception.
"""
logger = logging.getLogger(__name__)
# Try to fetch the landmark data from the cache
try:
landmark = cache_client.get(f"landmark_{landmark_uuid}")
if not landmark :
logger.warning(f"Cache miss for landmark UUID: {landmark_uuid}")
raise HTTPException(status_code=404, detail=f"Landmark with UUID {landmark_uuid} not found in cache.")
# Validate that the fetched data is a dictionary
if not isinstance(landmark, Landmark):
logger.error(f"Invalid cache data format for landmark UUID: {landmark_uuid}. Expected dict, got {type(landmark).__name__}.")
raise HTTPException(status_code=500, detail="Invalid cache data format.")
return landmark
except Exception as exc:
logger.error(f"Unexpected error occurred while fetching landmark UUID {landmark_uuid}: {exc}")
raise HTTPException(status_code=500, detail="An unexpected error occurred while fetching the landmark from the cache") from exc
def load_trip_landmarks(client, first_uuid: str, from_cache=None) -> List[Landmark]:
"""
Load all landmarks for a trip using the response from the API.
@@ -67,14 +109,13 @@ def load_trip_landmarks(client, first_uuid: str) -> List[Landmark]:
next_uuid = first_uuid
while next_uuid is not None:
landmark_data = fetch_landmark(client, next_uuid)
# # Convert UUIDs to strings explicitly
# landmark_data = {
# key: str(value) if isinstance(value, UUID) else value
# for key, value in landmark_data.items()
# }
landmarks.append(Landmark(**landmark_data)) # Create Landmark objects
next_uuid = landmark_data.get('next_uuid') # Prepare for the next iteration
if from_cache :
landmark = fetch_landmark_cache(next_uuid)
else :
landmark = fetch_landmark(client, next_uuid)
landmarks.append(landmark)
next_uuid = landmark.next_uuid # Prepare for the next iteration
return landmarks