linting
Some checks failed
Build and deploy the backend to staging / Build and push image (pull_request) Has been cancelled
Build and deploy the backend to staging / Deploy to staging (pull_request) Has been cancelled
Run testing on the backend code / Build (pull_request) Has been cancelled
Run linting on the backend code / Build (pull_request) Successful in 28s

This commit is contained in:
kscheidecker 2025-02-19 14:51:38 +01:00
parent 8a9ec6b4d8
commit 83be4b7616
6 changed files with 29 additions and 31 deletions

View File

@ -257,7 +257,6 @@ class Optimizer:
Returns:
None: This function modifies the `prob` object by adding L-2 equality constraints in-place.
"""
# FIXME: weird 0 artifact in the coefficients popping up
# Loop through rows 1 to L-2 to prevent stacked ones
for i in range(1, L-1):
# Add the constraint that sums across each "row" or "block" in the decision variables
@ -590,7 +589,7 @@ class Optimizer:
try :
prob.solve(pl.PULP_CBC_CMD(msg=False, timeLimit=self.time_limit+1, gapRel=self.gap_rel))
except Exception as exc :
raise Exception(f"No solution found: {exc}") from exc
raise Exception(f"No solution found: {str(exc)}") from exc
status = pl.LpStatus[prob.status]
solution = [pl.value(var) for var in x] # The values of the decision variables (will be 0 or 1)
@ -618,7 +617,7 @@ class Optimizer:
try :
prob.solve(pl.PULP_CBC_CMD(msg=False, timeLimit=self.time_limit, gapRel=self.gap_rel))
except Exception as exc :
raise Exception(f"No solution found: {exc}") from exc
raise Exception(f"No solution found: {str(exc)}") from exc
solution = [pl.value(var) for var in x]

View File

@ -1,3 +1,4 @@
"""Module defining the handling of cache data from Overpass requests."""
import os
import json
import hashlib
@ -61,7 +62,7 @@ class JSONCache(CachingStrategyBase):
return None
def set(self, key, value):
"""Save the JSON data as an ElementTree to the cache."""
"""Save the JSON data in the cache."""
filename = self._filename(key)
try:
# Write the JSON data to the cache file
@ -94,7 +95,7 @@ class JSONCache(CachingStrategyBase):
def close(self):
"""Cleanup method, if needed."""
pass
class CachingStrategy:
"""
@ -107,6 +108,7 @@ class CachingStrategy:
@classmethod
def use(cls, strategy_name='JSON', **kwargs):
"""Define the caching strategy to use."""
if cls.__strategy:
cls.__strategy.close()
@ -119,10 +121,12 @@ class CachingStrategy:
@classmethod
def get(cls, key):
"""Get the data from the cache."""
return cls.__strategy.get(key)
@classmethod
def set(cls, key, value):
"""Save the data in the cache."""
cls.__strategy.set(key, value)
@classmethod

View File

@ -59,7 +59,7 @@ class Overpass :
return Overpass._filter_landmarks(cached_responses, bbox)
# If there is no cached data, fetch all from Overpass.
elif not cached_responses :
if not cached_responses :
query_str = Overpass.build_query(bbox, osm_types, selector, conditions, out)
self.logger.debug(f'Query string: {query_str}')
return self.fetch_data_from_api(query_str)
@ -405,7 +405,7 @@ def fill_cache():
try :
# Read the whole file content as a string
with open(entry.path, 'r') as f:
with open(entry.path, 'r', encoding='utf-8') as f:
# load data and fill the cache with the query and key
json_data = json.load(f)
overpass.fill_cache(json_data)
@ -413,4 +413,4 @@ def fill_cache():
os.remove(entry.path)
except Exception as exc :
overpass.logger.error(f'An error occured while parsing file {entry.path} as .json file')
overpass.logger.error(f'An error occured while parsing file {entry.path} as .json file: {str(exc)}')

View File

@ -250,10 +250,8 @@ class ClusterManager:
continue
for elem in result:
osm_type = elem.get('type')
id, coords, name = get_base_info(elem, osm_type, with_name=True)
# Get basic info
id, coords, name = get_base_info(elem, elem.get('type'), with_name=True)
if name is None or coords is None :
continue
@ -261,7 +259,7 @@ class ClusterManager:
if d < min_dist :
min_dist = d
new_name = name # add name
osm_type = osm_type # add type: 'way' or 'relation'
osm_type = elem.get('type') # add type: 'way' or 'relation'
osm_id = id # add OSM id
return Landmark(

View File

@ -197,7 +197,7 @@ class LandmarkManager:
out = 'ids center tags'
)
except Exception as e:
self.logger.error(f"Error fetching landmarks: {e}")
self.logger.error(f"Error fetching landmarks: {str(e)}")
continue
return_list += self._to_landmarks(result, landmarktype, preference_level)
@ -297,6 +297,8 @@ class LandmarkManager:
def description_and_keywords(self, tags: dict):
"""
"""
# Extract relevant fields
name = tags.get('name')
importance = tags.get('importance', None)
@ -314,7 +316,6 @@ class LandmarkManager:
return None, None
elif len(tags.keys()) < 10 :
description = f"{name} is a well known {place_type}."
elif len(tags.keys()) < 17 :
importance = 'national'
description = f"{name} is a {place_type} of national importance."
@ -386,8 +387,6 @@ class LandmarkManager:
return None
def dict_to_selector_list(d: dict) -> list:
"""
Convert a dictionary of key-value pairs to a list of Overpass query strings.
@ -408,5 +407,3 @@ def dict_to_selector_list(d: dict) -> list:
else:
return_list.append(f'{key}={value}')
return return_list