better logs
	
		
			
	
		
	
	
		
	
		
			Some checks failed
		
		
	
	
		
			
				
	
				Build and deploy the backend to staging / Build and push image (pull_request) Successful in 1m40s
				
			
		
			
				
	
				Run linting on the backend code / Build (pull_request) Successful in 55s
				
			
		
			
				
	
				Run testing on the backend code / Build (pull_request) Has been cancelled
				
			
		
			
				
	
				Build and deploy the backend to staging / Deploy to staging (pull_request) Successful in 25s
				
			
		
		
	
	
				
					
				
			
		
			Some checks failed
		
		
	
	Build and deploy the backend to staging / Build and push image (pull_request) Successful in 1m40s
				
			Run linting on the backend code / Build (pull_request) Successful in 55s
				
			Run testing on the backend code / Build (pull_request) Has been cancelled
				
			Build and deploy the backend to staging / Deploy to staging (pull_request) Successful in 25s
				
			This commit is contained in:
		| @@ -103,7 +103,7 @@ class ClusterManager: | |||||||
|             out = out |             out = out | ||||||
|         ) |         ) | ||||||
|         except Exception as e: |         except Exception as e: | ||||||
|             self.logger.error(f"Error fetching clusters: {e}") |             self.logger.warning(f"Error fetching clusters: {e}") | ||||||
|  |  | ||||||
|         if result is None : |         if result is None : | ||||||
|             self.logger.debug(f"Found no {cluster_type} clusters, overpass query returned no datapoints.") |             self.logger.debug(f"Found no {cluster_type} clusters, overpass query returned no datapoints.") | ||||||
| @@ -242,11 +242,11 @@ class ClusterManager: | |||||||
|                                                   out = 'ids center tags' |                                                   out = 'ids center tags' | ||||||
|                                                   ) |                                                   ) | ||||||
|             except Exception as e: |             except Exception as e: | ||||||
|                 self.logger.error(f"Error fetching clusters: {e}") |                 self.logger.warning(f"Error fetching clusters: {e}") | ||||||
|                 continue |                 continue | ||||||
|  |  | ||||||
|             if result is None : |             if result is None : | ||||||
|                 self.logger.error(f"Error fetching clusters: {e}") |                 self.logger.warning(f"Error fetching clusters: query result is None") | ||||||
|                 continue |                 continue | ||||||
|  |  | ||||||
|             for elem in result: |             for elem in result: | ||||||
|   | |||||||
| @@ -197,7 +197,7 @@ class LandmarkManager: | |||||||
|                     out = 'ids center tags' |                     out = 'ids center tags' | ||||||
|                     ) |                     ) | ||||||
|             except Exception as e: |             except Exception as e: | ||||||
|                 self.logger.error(f"Error fetching landmarks: {str(e)}") |                 self.logger.warning(f"Error fetching landmarks: {str(e)}") | ||||||
|                 continue |                 continue | ||||||
|  |  | ||||||
|             return_list += self._to_landmarks(result, landmarktype, preference_level) |             return_list += self._to_landmarks(result, landmarktype, preference_level) | ||||||
|   | |||||||
| @@ -114,6 +114,7 @@ def new_trip(preferences: Preferences, | |||||||
|     try: |     try: | ||||||
|         base_tour = optimizer.solve_optimization(preferences.max_time_minute, landmarks_short) |         base_tour = optimizer.solve_optimization(preferences.max_time_minute, landmarks_short) | ||||||
|     except Exception as exc: |     except Exception as exc: | ||||||
|  |         logger.error(f"Trip generation failed: {str(exc)}") | ||||||
|         raise HTTPException(status_code=500, detail=f"Optimization failed: {str(exc)}") from exc |         raise HTTPException(status_code=500, detail=f"Optimization failed: {str(exc)}") from exc | ||||||
|  |  | ||||||
|     t_first_stage = time.time() - start_time |     t_first_stage = time.time() - start_time | ||||||
| @@ -126,9 +127,10 @@ def new_trip(preferences: Preferences, | |||||||
|                                                preferences.max_time_minute, |                                                preferences.max_time_minute, | ||||||
|                                                preferences.detour_tolerance_minute) |                                                preferences.detour_tolerance_minute) | ||||||
|     except TimeoutError as te : |     except TimeoutError as te : | ||||||
|         logger.error(f'Refiner failed : {str(te)} Using base tour.') |         logger.warning(f'Refiner failed : {str(te)} Using base tour.') | ||||||
|         refined_tour = base_tour |         refined_tour = base_tour | ||||||
|     except Exception as exc : |     except Exception as exc : | ||||||
|  |         logger.error(f"Trip generation failed: {str(exc)}") | ||||||
|         raise HTTPException(status_code=500, detail=f"An unexpected error occurred: {str(exc)}") from exc |         raise HTTPException(status_code=500, detail=f"An unexpected error occurred: {str(exc)}") from exc | ||||||
|  |  | ||||||
|     t_second_stage = time.time() - start_time |     t_second_stage = time.time() - start_time | ||||||
| @@ -164,6 +166,7 @@ def get_trip(trip_uuid: str) -> Trip: | |||||||
|         trip = cache_client.get(f"trip_{trip_uuid}") |         trip = cache_client.get(f"trip_{trip_uuid}") | ||||||
|         return trip |         return trip | ||||||
|     except KeyError as exc: |     except KeyError as exc: | ||||||
|  |         logger.error(f"Failed to fetch trip with UUID {trip_uuid}: {str(exc)}") | ||||||
|         raise HTTPException(status_code=404, detail="Trip not found") from exc |         raise HTTPException(status_code=404, detail="Trip not found") from exc | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -182,6 +185,7 @@ def get_landmark(landmark_uuid: str) -> Landmark: | |||||||
|         landmark = cache_client.get(f"landmark_{landmark_uuid}") |         landmark = cache_client.get(f"landmark_{landmark_uuid}") | ||||||
|         return landmark |         return landmark | ||||||
|     except KeyError as exc: |     except KeyError as exc: | ||||||
|  |         logger.error(f"Failed to fetch landmark with UUID {landmark_uuid}: {str(exc)}") | ||||||
|         raise HTTPException(status_code=404, detail="Landmark not found") from exc |         raise HTTPException(status_code=404, detail="Landmark not found") from exc | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -200,6 +204,7 @@ def update_trip_time(trip_uuid: str, removed_landmark_uuid: str) -> Trip: | |||||||
|     try: |     try: | ||||||
|         trip = cache_client.get(f'trip_{trip_uuid}') |         trip = cache_client.get(f'trip_{trip_uuid}') | ||||||
|     except KeyError as exc: |     except KeyError as exc: | ||||||
|  |         logger.error(f"Failed to update trip with UUID {trip_uuid} (trip not found): {str(exc)}") | ||||||
|         raise HTTPException(status_code=404, detail='Trip not found') from exc |         raise HTTPException(status_code=404, detail='Trip not found') from exc | ||||||
|  |  | ||||||
|     landmarks = [] |     landmarks = [] | ||||||
| @@ -214,6 +219,7 @@ def update_trip_time(trip_uuid: str, removed_landmark_uuid: str) -> Trip: | |||||||
|                 landmarks.append(landmark) |                 landmarks.append(landmark) | ||||||
|             next_uuid = landmark.next_uuid  # Prepare for the next iteration |             next_uuid = landmark.next_uuid  # Prepare for the next iteration | ||||||
|     except KeyError as exc: |     except KeyError as exc: | ||||||
|  |         logger.error(f"Failed to update trip with UUID {trip_uuid} : {str(exc)}") | ||||||
|         raise HTTPException(status_code=404, detail=f'landmark {next_uuid} not found') from exc |         raise HTTPException(status_code=404, detail=f'landmark {next_uuid} not found') from exc | ||||||
|  |  | ||||||
|     # Re-link every thing and compute times again |     # Re-link every thing and compute times again | ||||||
|   | |||||||
| @@ -597,7 +597,7 @@ class Optimizer: | |||||||
|  |  | ||||||
|         # Raise error if no solution is found. FIXME: for now this throws the internal server error |         # Raise error if no solution is found. FIXME: for now this throws the internal server error | ||||||
|         if status != 'Optimal' : |         if status != 'Optimal' : | ||||||
|             self.logger.error("The problem is overconstrained, no solution on first try.") |             self.logger.warning("The problem is overconstrained, no solution on first try.") | ||||||
|             raise ArithmeticError("No solution could be found. Please try again with more time or different preferences.") |             raise ArithmeticError("No solution could be found. Please try again with more time or different preferences.") | ||||||
|  |  | ||||||
|         # If there is a solution, we're good to go, just check for connectiveness |         # If there is a solution, we're good to go, just check for connectiveness | ||||||
| @@ -607,7 +607,7 @@ class Optimizer: | |||||||
|         while circles is not None : |         while circles is not None : | ||||||
|             i += 1 |             i += 1 | ||||||
|             if i == self.max_iter : |             if i == self.max_iter : | ||||||
|                 self.logger.error(f'Timeout: No solution found after {self.max_iter} iterations.') |                 self.logger.warning(f'Timeout: No solution found after {self.max_iter} iterations.') | ||||||
|                 raise TimeoutError(f"Optimization took too long. No solution found after {self.max_iter} iterations.") |                 raise TimeoutError(f"Optimization took too long. No solution found after {self.max_iter} iterations.") | ||||||
|  |  | ||||||
|             for circle in circles : |             for circle in circles : | ||||||
| @@ -617,12 +617,13 @@ class Optimizer: | |||||||
|             try : |             try : | ||||||
|                 prob.solve(pl.PULP_CBC_CMD(msg=False, timeLimit=self.time_limit, gapRel=self.gap_rel)) |                 prob.solve(pl.PULP_CBC_CMD(msg=False, timeLimit=self.time_limit, gapRel=self.gap_rel)) | ||||||
|             except Exception as exc : |             except Exception as exc : | ||||||
|  |                 self.logger.warning("No solution found: {str(exc)") | ||||||
|                 raise Exception(f"No solution found: {str(exc)}") from exc |                 raise Exception(f"No solution found: {str(exc)}") from exc | ||||||
|  |  | ||||||
|             solution = [pl.value(var) for var in x] |             solution = [pl.value(var) for var in x] | ||||||
|  |  | ||||||
|             if pl.LpStatus[prob.status] != 'Optimal' : |             if pl.LpStatus[prob.status] != 'Optimal' : | ||||||
|                 self.logger.error("The problem is overconstrained, no solution after {i} cycles.") |                 self.logger.warning("The problem is overconstrained, no solution after {i} cycles.") | ||||||
|                 raise ArithmeticError("No solution could be found. Please try again with more time or different preferences.") |                 raise ArithmeticError("No solution could be found. Please try again with more time or different preferences.") | ||||||
|  |  | ||||||
|             circles = self.is_connected(solution) |             circles = self.is_connected(solution) | ||||||
|   | |||||||
| @@ -95,9 +95,10 @@ class Overpass : | |||||||
|                 return elements |                 return elements | ||||||
|  |  | ||||||
|         except urllib.error.URLError as e: |         except urllib.error.URLError as e: | ||||||
|             self.logger.error(f"Error connecting to Overpass API: {str(exc)}") |             self.logger.error(f"Error connecting to Overpass API: {str(e)}") | ||||||
|             raise ConnectionError(f"Error connecting to Overpass API: {str(exc)}") from e |             raise ConnectionError(f"Error connecting to Overpass API: {str(e)}") from e | ||||||
|         except Exception as exc : |         except Exception as exc : | ||||||
|  |             self.logger.error(f"unexpected error while fetching data from Overpass: {str(exc)}") | ||||||
|             raise Exception(f'An unexpected error occured: {str(exc)}') from exc |             raise Exception(f'An unexpected error occured: {str(exc)}') from exc | ||||||
|  |  | ||||||
|  |  | ||||||
| @@ -121,7 +122,7 @@ class Overpass : | |||||||
|                 self.caching_strategy.set(cache_key, elements) |                 self.caching_strategy.set(cache_key, elements) | ||||||
|                 self.logger.debug(f'Cache set for {cache_key}') |                 self.logger.debug(f'Cache set for {cache_key}') | ||||||
|         except urllib.error.URLError as e: |         except urllib.error.URLError as e: | ||||||
|             raise ConnectionError(f"Error connecting to Overpass API: {e}") from e |             raise ConnectionError(f"Error connecting to Overpass API: {str(e)}") from e | ||||||
|         except Exception as exc : |         except Exception as exc : | ||||||
|             raise Exception(f'An unexpected error occured: {str(exc)}') from exc |             raise Exception(f'An unexpected error occured: {str(exc)}') from exc | ||||||
|  |  | ||||||
|   | |||||||
| @@ -38,7 +38,7 @@ def fetch_landmark(landmark_uuid: str): | |||||||
|     try: |     try: | ||||||
|         landmark = cache_client.get(f'landmark_{landmark_uuid}') |         landmark = cache_client.get(f'landmark_{landmark_uuid}') | ||||||
|         if not landmark : |         if not landmark : | ||||||
|             logger.warning(f'Cache miss for landmark UUID: {landmark_uuid}') |             logger.error(f'Cache miss for landmark UUID: {landmark_uuid}') | ||||||
|             raise HTTPException(status_code=404, detail=f'Landmark with UUID {landmark_uuid} not found in cache.') |             raise HTTPException(status_code=404, detail=f'Landmark with UUID {landmark_uuid} not found in cache.') | ||||||
|  |  | ||||||
|         # Validate that the fetched data is a dictionary |         # Validate that the fetched data is a dictionary | ||||||
|   | |||||||
| @@ -65,7 +65,7 @@ class ToiletsManager: | |||||||
|         try: |         try: | ||||||
|             result = self.overpass.fetch_data_from_api(query_str=query) |             result = self.overpass.fetch_data_from_api(query_str=query) | ||||||
|         except Exception as e: |         except Exception as e: | ||||||
|             self.logger.error(f"Error fetching landmarks: {e}") |             self.logger.error(f"Error fetching toilets: {e}") | ||||||
|             return None |             return None | ||||||
|  |  | ||||||
|         toilets_list = self.to_toilets(result) |         toilets_list = self.to_toilets(result) | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user