diff --git a/backend/src/landmarks/cluster_manager.py b/backend/src/landmarks/cluster_manager.py index 38523f6..85f9af5 100644 --- a/backend/src/landmarks/cluster_manager.py +++ b/backend/src/landmarks/cluster_manager.py @@ -103,7 +103,7 @@ class ClusterManager: out = out ) except Exception as e: - self.logger.error(f"Error fetching clusters: {e}") + self.logger.warning(f"Error fetching clusters: {e}") if result is None : self.logger.debug(f"Found no {cluster_type} clusters, overpass query returned no datapoints.") @@ -242,11 +242,11 @@ class ClusterManager: out = 'ids center tags' ) except Exception as e: - self.logger.error(f"Error fetching clusters: {e}") + self.logger.warning(f"Error fetching clusters: {e}") continue if result is None : - self.logger.error(f"Error fetching clusters: {e}") + self.logger.warning(f"Error fetching clusters: query result is None") continue for elem in result: diff --git a/backend/src/landmarks/landmarks_manager.py b/backend/src/landmarks/landmarks_manager.py index a02a44f..97258d8 100644 --- a/backend/src/landmarks/landmarks_manager.py +++ b/backend/src/landmarks/landmarks_manager.py @@ -197,7 +197,7 @@ class LandmarkManager: out = 'ids center tags' ) except Exception as e: - self.logger.error(f"Error fetching landmarks: {str(e)}") + self.logger.warning(f"Error fetching landmarks: {str(e)}") continue return_list += self._to_landmarks(result, landmarktype, preference_level) diff --git a/backend/src/main.py b/backend/src/main.py index 57538f4..51c0196 100644 --- a/backend/src/main.py +++ b/backend/src/main.py @@ -114,6 +114,7 @@ def new_trip(preferences: Preferences, try: base_tour = optimizer.solve_optimization(preferences.max_time_minute, landmarks_short) except Exception as exc: + logger.error(f"Trip generation failed: {str(exc)}") raise HTTPException(status_code=500, detail=f"Optimization failed: {str(exc)}") from exc t_first_stage = time.time() - start_time @@ -126,9 +127,10 @@ def new_trip(preferences: Preferences, preferences.max_time_minute, preferences.detour_tolerance_minute) except TimeoutError as te : - logger.error(f'Refiner failed : {str(te)} Using base tour.') + logger.warning(f'Refiner failed : {str(te)} Using base tour.') refined_tour = base_tour except Exception as exc : + logger.error(f"Trip generation failed: {str(exc)}") raise HTTPException(status_code=500, detail=f"An unexpected error occurred: {str(exc)}") from exc t_second_stage = time.time() - start_time @@ -164,6 +166,7 @@ def get_trip(trip_uuid: str) -> Trip: trip = cache_client.get(f"trip_{trip_uuid}") return trip except KeyError as exc: + logger.error(f"Failed to fetch trip with UUID {trip_uuid}: {str(exc)}") raise HTTPException(status_code=404, detail="Trip not found") from exc @@ -182,6 +185,7 @@ def get_landmark(landmark_uuid: str) -> Landmark: landmark = cache_client.get(f"landmark_{landmark_uuid}") return landmark except KeyError as exc: + logger.error(f"Failed to fetch landmark with UUID {landmark_uuid}: {str(exc)}") raise HTTPException(status_code=404, detail="Landmark not found") from exc @@ -200,6 +204,7 @@ def update_trip_time(trip_uuid: str, removed_landmark_uuid: str) -> Trip: try: trip = cache_client.get(f'trip_{trip_uuid}') except KeyError as exc: + logger.error(f"Failed to update trip with UUID {trip_uuid} (trip not found): {str(exc)}") raise HTTPException(status_code=404, detail='Trip not found') from exc landmarks = [] @@ -214,6 +219,7 @@ def update_trip_time(trip_uuid: str, removed_landmark_uuid: str) -> Trip: landmarks.append(landmark) next_uuid = landmark.next_uuid # Prepare for the next iteration except KeyError as exc: + logger.error(f"Failed to update trip with UUID {trip_uuid} : {str(exc)}") raise HTTPException(status_code=404, detail=f'landmark {next_uuid} not found') from exc # Re-link every thing and compute times again diff --git a/backend/src/optimization/optimizer.py b/backend/src/optimization/optimizer.py index faa8f01..c51b6c1 100644 --- a/backend/src/optimization/optimizer.py +++ b/backend/src/optimization/optimizer.py @@ -597,7 +597,7 @@ class Optimizer: # Raise error if no solution is found. FIXME: for now this throws the internal server error if status != 'Optimal' : - self.logger.error("The problem is overconstrained, no solution on first try.") + self.logger.warning("The problem is overconstrained, no solution on first try.") raise ArithmeticError("No solution could be found. Please try again with more time or different preferences.") # If there is a solution, we're good to go, just check for connectiveness @@ -607,7 +607,7 @@ class Optimizer: while circles is not None : i += 1 if i == self.max_iter : - self.logger.error(f'Timeout: No solution found after {self.max_iter} iterations.') + self.logger.warning(f'Timeout: No solution found after {self.max_iter} iterations.') raise TimeoutError(f"Optimization took too long. No solution found after {self.max_iter} iterations.") for circle in circles : @@ -617,12 +617,13 @@ class Optimizer: try : prob.solve(pl.PULP_CBC_CMD(msg=False, timeLimit=self.time_limit, gapRel=self.gap_rel)) except Exception as exc : + self.logger.warning("No solution found: {str(exc)") raise Exception(f"No solution found: {str(exc)}") from exc solution = [pl.value(var) for var in x] if pl.LpStatus[prob.status] != 'Optimal' : - self.logger.error("The problem is overconstrained, no solution after {i} cycles.") + self.logger.warning("The problem is overconstrained, no solution after {i} cycles.") raise ArithmeticError("No solution could be found. Please try again with more time or different preferences.") circles = self.is_connected(solution) diff --git a/backend/src/overpass/overpass.py b/backend/src/overpass/overpass.py index 2b725ce..330e4d8 100644 --- a/backend/src/overpass/overpass.py +++ b/backend/src/overpass/overpass.py @@ -95,9 +95,10 @@ class Overpass : return elements except urllib.error.URLError as e: - self.logger.error(f"Error connecting to Overpass API: {str(exc)}") - raise ConnectionError(f"Error connecting to Overpass API: {str(exc)}") from e + self.logger.error(f"Error connecting to Overpass API: {str(e)}") + raise ConnectionError(f"Error connecting to Overpass API: {str(e)}") from e except Exception as exc : + self.logger.error(f"unexpected error while fetching data from Overpass: {str(exc)}") raise Exception(f'An unexpected error occured: {str(exc)}') from exc @@ -121,7 +122,7 @@ class Overpass : self.caching_strategy.set(cache_key, elements) self.logger.debug(f'Cache set for {cache_key}') except urllib.error.URLError as e: - raise ConnectionError(f"Error connecting to Overpass API: {e}") from e + raise ConnectionError(f"Error connecting to Overpass API: {str(e)}") from e except Exception as exc : raise Exception(f'An unexpected error occured: {str(exc)}') from exc diff --git a/backend/src/tests/test_utils.py b/backend/src/tests/test_utils.py index 5f5bf66..f0d93af 100644 --- a/backend/src/tests/test_utils.py +++ b/backend/src/tests/test_utils.py @@ -38,7 +38,7 @@ def fetch_landmark(landmark_uuid: str): try: landmark = cache_client.get(f'landmark_{landmark_uuid}') if not landmark : - logger.warning(f'Cache miss for landmark UUID: {landmark_uuid}') + logger.error(f'Cache miss for landmark UUID: {landmark_uuid}') raise HTTPException(status_code=404, detail=f'Landmark with UUID {landmark_uuid} not found in cache.') # Validate that the fetched data is a dictionary diff --git a/backend/src/toilets/toilets_manager.py b/backend/src/toilets/toilets_manager.py index 9abf7fa..0de6d8a 100644 --- a/backend/src/toilets/toilets_manager.py +++ b/backend/src/toilets/toilets_manager.py @@ -65,7 +65,7 @@ class ToiletsManager: try: result = self.overpass.fetch_data_from_api(query_str=query) except Exception as e: - self.logger.error(f"Error fetching landmarks: {e}") + self.logger.error(f"Error fetching toilets: {e}") return None toilets_list = self.to_toilets(result)