From 83be4b761648c7c77f1fc58a3c9c08d7241a8244 Mon Sep 17 00:00:00 2001 From: kscheidecker Date: Wed, 19 Feb 2025 14:51:38 +0100 Subject: [PATCH] linting --- backend/src/main.py | 4 ++-- backend/src/optimization/optimizer.py | 5 ++--- backend/src/overpass/caching_strategy.py | 8 ++++++-- backend/src/overpass/overpass.py | 14 +++++++------- backend/src/utils/cluster_manager.py | 12 +++++------- backend/src/utils/landmarks_manager.py | 17 +++++++---------- 6 files changed, 29 insertions(+), 31 deletions(-) diff --git a/backend/src/main.py b/backend/src/main.py index 0ef416b..112e52e 100644 --- a/backend/src/main.py +++ b/backend/src/main.py @@ -41,7 +41,7 @@ app = FastAPI(lifespan=lifespan) @app.post("/trip/new") def new_trip(preferences: Preferences, start: tuple[float, float], - end: tuple[float, float] | None = None, + end: tuple[float, float] | None = None, background_tasks: BackgroundTasks = None) -> Trip: """ Main function to call the optimizer. @@ -100,7 +100,7 @@ def new_trip(preferences: Preferences, ###################### store landmarks in json file for debug ###################### - landmarks_list = [jsonable_encoder(item) for item in landmarks] + landmarks_list = [jsonable_encoder(item) for item in landmarks] with open('landmarks.json', 'w+') as file: json.dump(landmarks_list, file, indent=4) #################################################################################### diff --git a/backend/src/optimization/optimizer.py b/backend/src/optimization/optimizer.py index b3a66f9..faa8f01 100644 --- a/backend/src/optimization/optimizer.py +++ b/backend/src/optimization/optimizer.py @@ -257,7 +257,6 @@ class Optimizer: Returns: None: This function modifies the `prob` object by adding L-2 equality constraints in-place. """ - # FIXME: weird 0 artifact in the coefficients popping up # Loop through rows 1 to L-2 to prevent stacked ones for i in range(1, L-1): # Add the constraint that sums across each "row" or "block" in the decision variables @@ -590,7 +589,7 @@ class Optimizer: try : prob.solve(pl.PULP_CBC_CMD(msg=False, timeLimit=self.time_limit+1, gapRel=self.gap_rel)) except Exception as exc : - raise Exception(f"No solution found: {exc}") from exc + raise Exception(f"No solution found: {str(exc)}") from exc status = pl.LpStatus[prob.status] solution = [pl.value(var) for var in x] # The values of the decision variables (will be 0 or 1) @@ -618,7 +617,7 @@ class Optimizer: try : prob.solve(pl.PULP_CBC_CMD(msg=False, timeLimit=self.time_limit, gapRel=self.gap_rel)) except Exception as exc : - raise Exception(f"No solution found: {exc}") from exc + raise Exception(f"No solution found: {str(exc)}") from exc solution = [pl.value(var) for var in x] diff --git a/backend/src/overpass/caching_strategy.py b/backend/src/overpass/caching_strategy.py index f334872..b04a50b 100644 --- a/backend/src/overpass/caching_strategy.py +++ b/backend/src/overpass/caching_strategy.py @@ -1,3 +1,4 @@ +"""Module defining the handling of cache data from Overpass requests.""" import os import json import hashlib @@ -61,7 +62,7 @@ class JSONCache(CachingStrategyBase): return None def set(self, key, value): - """Save the JSON data as an ElementTree to the cache.""" + """Save the JSON data in the cache.""" filename = self._filename(key) try: # Write the JSON data to the cache file @@ -94,7 +95,7 @@ class JSONCache(CachingStrategyBase): def close(self): """Cleanup method, if needed.""" - pass + class CachingStrategy: """ @@ -107,6 +108,7 @@ class CachingStrategy: @classmethod def use(cls, strategy_name='JSON', **kwargs): + """Define the caching strategy to use.""" if cls.__strategy: cls.__strategy.close() @@ -119,10 +121,12 @@ class CachingStrategy: @classmethod def get(cls, key): + """Get the data from the cache.""" return cls.__strategy.get(key) @classmethod def set(cls, key, value): + """Save the data in the cache.""" cls.__strategy.set(key, value) @classmethod diff --git a/backend/src/overpass/overpass.py b/backend/src/overpass/overpass.py index e651979..fabc0ee 100644 --- a/backend/src/overpass/overpass.py +++ b/backend/src/overpass/overpass.py @@ -59,7 +59,7 @@ class Overpass : return Overpass._filter_landmarks(cached_responses, bbox) # If there is no cached data, fetch all from Overpass. - elif not cached_responses : + if not cached_responses : query_str = Overpass.build_query(bbox, osm_types, selector, conditions, out) self.logger.debug(f'Query string: {query_str}') return self.fetch_data_from_api(query_str) @@ -114,7 +114,7 @@ class Overpass : with urllib.request.urlopen(request) as response: # Convert the HTTPResponse to a string and load data - response_data = response.read().decode('utf-8') + response_data = response.read().decode('utf-8') data = json.loads(response_data) # Get elements and set cache @@ -309,9 +309,9 @@ class Overpass : if min_lat == float('inf') or min_lon == float('inf'): return None - return (max(min_lat, original_bbox[0]), - max(min_lon, original_bbox[1]), - min(max_lat, original_bbox[2]), + return (max(min_lat, original_bbox[0]), + max(min_lon, original_bbox[1]), + min(max_lat, original_bbox[2]), min(max_lon, original_bbox[3])) @@ -405,7 +405,7 @@ def fill_cache(): try : # Read the whole file content as a string - with open(entry.path, 'r') as f: + with open(entry.path, 'r', encoding='utf-8') as f: # load data and fill the cache with the query and key json_data = json.load(f) overpass.fill_cache(json_data) @@ -413,4 +413,4 @@ def fill_cache(): os.remove(entry.path) except Exception as exc : - overpass.logger.error(f'An error occured while parsing file {entry.path} as .json file') + overpass.logger.error(f'An error occured while parsing file {entry.path} as .json file: {str(exc)}') diff --git a/backend/src/utils/cluster_manager.py b/backend/src/utils/cluster_manager.py index 0f0b363..86f926a 100644 --- a/backend/src/utils/cluster_manager.py +++ b/backend/src/utils/cluster_manager.py @@ -250,19 +250,17 @@ class ClusterManager: continue for elem in result: - osm_type = elem.get('type') - - id, coords, name = get_base_info(elem, osm_type, with_name=True) - + # Get basic info + id, coords, name = get_base_info(elem, elem.get('type'), with_name=True) if name is None or coords is None : continue d = get_distance(cluster.centroid, coords) if d < min_dist : min_dist = d - new_name = name # add name - osm_type = osm_type # add type: 'way' or 'relation' - osm_id = id # add OSM id + new_name = name # add name + osm_type = elem.get('type') # add type: 'way' or 'relation' + osm_id = id # add OSM id return Landmark( name=new_name, diff --git a/backend/src/utils/landmarks_manager.py b/backend/src/utils/landmarks_manager.py index 2538185..b015919 100644 --- a/backend/src/utils/landmarks_manager.py +++ b/backend/src/utils/landmarks_manager.py @@ -197,7 +197,7 @@ class LandmarkManager: out = 'ids center tags' ) except Exception as e: - self.logger.error(f"Error fetching landmarks: {e}") + self.logger.error(f"Error fetching landmarks: {str(e)}") continue return_list += self._to_landmarks(result, landmarktype, preference_level) @@ -294,9 +294,11 @@ class LandmarkManager: return landmarks - - + + def description_and_keywords(self, tags: dict): + """ + """ # Extract relevant fields name = tags.get('name') importance = tags.get('importance', None) @@ -314,7 +316,6 @@ class LandmarkManager: return None, None elif len(tags.keys()) < 10 : description = f"{name} is a well known {place_type}." - elif len(tags.keys()) < 17 : importance = 'national' description = f"{name} is a {place_type} of national importance." @@ -328,9 +329,9 @@ class LandmarkManager: description += f" This {place_type} was constructed in {date} and is ca. {height} meters high." elif height is not None : description += f" This {place_type} stands ca. {height} meters tall." - elif date is not None: + elif date is not None: description += f" It was constructed in {date}." - + # Format the visitor number if n_visitors is not None : n_visitors = int(n_visitors) @@ -385,8 +386,6 @@ class LandmarkManager: return None - - def dict_to_selector_list(d: dict) -> list: """ @@ -408,5 +407,3 @@ def dict_to_selector_list(d: dict) -> list: else: return_list.append(f'{key}={value}') return return_list - -