linting
Some checks failed
Build and deploy the backend to staging / Build and push image (pull_request) Has been cancelled
Build and deploy the backend to staging / Deploy to staging (pull_request) Has been cancelled
Run testing on the backend code / Build (pull_request) Has been cancelled
Run linting on the backend code / Build (pull_request) Successful in 28s

This commit is contained in:
kscheidecker 2025-02-19 14:51:38 +01:00
parent 8a9ec6b4d8
commit 83be4b7616
6 changed files with 29 additions and 31 deletions

View File

@ -41,7 +41,7 @@ app = FastAPI(lifespan=lifespan)
@app.post("/trip/new")
def new_trip(preferences: Preferences,
start: tuple[float, float],
end: tuple[float, float] | None = None,
end: tuple[float, float] | None = None,
background_tasks: BackgroundTasks = None) -> Trip:
"""
Main function to call the optimizer.
@ -100,7 +100,7 @@ def new_trip(preferences: Preferences,
###################### store landmarks in json file for debug ######################
landmarks_list = [jsonable_encoder(item) for item in landmarks]
landmarks_list = [jsonable_encoder(item) for item in landmarks]
with open('landmarks.json', 'w+') as file:
json.dump(landmarks_list, file, indent=4)
####################################################################################

View File

@ -257,7 +257,6 @@ class Optimizer:
Returns:
None: This function modifies the `prob` object by adding L-2 equality constraints in-place.
"""
# FIXME: weird 0 artifact in the coefficients popping up
# Loop through rows 1 to L-2 to prevent stacked ones
for i in range(1, L-1):
# Add the constraint that sums across each "row" or "block" in the decision variables
@ -590,7 +589,7 @@ class Optimizer:
try :
prob.solve(pl.PULP_CBC_CMD(msg=False, timeLimit=self.time_limit+1, gapRel=self.gap_rel))
except Exception as exc :
raise Exception(f"No solution found: {exc}") from exc
raise Exception(f"No solution found: {str(exc)}") from exc
status = pl.LpStatus[prob.status]
solution = [pl.value(var) for var in x] # The values of the decision variables (will be 0 or 1)
@ -618,7 +617,7 @@ class Optimizer:
try :
prob.solve(pl.PULP_CBC_CMD(msg=False, timeLimit=self.time_limit, gapRel=self.gap_rel))
except Exception as exc :
raise Exception(f"No solution found: {exc}") from exc
raise Exception(f"No solution found: {str(exc)}") from exc
solution = [pl.value(var) for var in x]

View File

@ -1,3 +1,4 @@
"""Module defining the handling of cache data from Overpass requests."""
import os
import json
import hashlib
@ -61,7 +62,7 @@ class JSONCache(CachingStrategyBase):
return None
def set(self, key, value):
"""Save the JSON data as an ElementTree to the cache."""
"""Save the JSON data in the cache."""
filename = self._filename(key)
try:
# Write the JSON data to the cache file
@ -94,7 +95,7 @@ class JSONCache(CachingStrategyBase):
def close(self):
"""Cleanup method, if needed."""
pass
class CachingStrategy:
"""
@ -107,6 +108,7 @@ class CachingStrategy:
@classmethod
def use(cls, strategy_name='JSON', **kwargs):
"""Define the caching strategy to use."""
if cls.__strategy:
cls.__strategy.close()
@ -119,10 +121,12 @@ class CachingStrategy:
@classmethod
def get(cls, key):
"""Get the data from the cache."""
return cls.__strategy.get(key)
@classmethod
def set(cls, key, value):
"""Save the data in the cache."""
cls.__strategy.set(key, value)
@classmethod

View File

@ -59,7 +59,7 @@ class Overpass :
return Overpass._filter_landmarks(cached_responses, bbox)
# If there is no cached data, fetch all from Overpass.
elif not cached_responses :
if not cached_responses :
query_str = Overpass.build_query(bbox, osm_types, selector, conditions, out)
self.logger.debug(f'Query string: {query_str}')
return self.fetch_data_from_api(query_str)
@ -114,7 +114,7 @@ class Overpass :
with urllib.request.urlopen(request) as response:
# Convert the HTTPResponse to a string and load data
response_data = response.read().decode('utf-8')
response_data = response.read().decode('utf-8')
data = json.loads(response_data)
# Get elements and set cache
@ -309,9 +309,9 @@ class Overpass :
if min_lat == float('inf') or min_lon == float('inf'):
return None
return (max(min_lat, original_bbox[0]),
max(min_lon, original_bbox[1]),
min(max_lat, original_bbox[2]),
return (max(min_lat, original_bbox[0]),
max(min_lon, original_bbox[1]),
min(max_lat, original_bbox[2]),
min(max_lon, original_bbox[3]))
@ -405,7 +405,7 @@ def fill_cache():
try :
# Read the whole file content as a string
with open(entry.path, 'r') as f:
with open(entry.path, 'r', encoding='utf-8') as f:
# load data and fill the cache with the query and key
json_data = json.load(f)
overpass.fill_cache(json_data)
@ -413,4 +413,4 @@ def fill_cache():
os.remove(entry.path)
except Exception as exc :
overpass.logger.error(f'An error occured while parsing file {entry.path} as .json file')
overpass.logger.error(f'An error occured while parsing file {entry.path} as .json file: {str(exc)}')

View File

@ -250,19 +250,17 @@ class ClusterManager:
continue
for elem in result:
osm_type = elem.get('type')
id, coords, name = get_base_info(elem, osm_type, with_name=True)
# Get basic info
id, coords, name = get_base_info(elem, elem.get('type'), with_name=True)
if name is None or coords is None :
continue
d = get_distance(cluster.centroid, coords)
if d < min_dist :
min_dist = d
new_name = name # add name
osm_type = osm_type # add type: 'way' or 'relation'
osm_id = id # add OSM id
new_name = name # add name
osm_type = elem.get('type') # add type: 'way' or 'relation'
osm_id = id # add OSM id
return Landmark(
name=new_name,

View File

@ -197,7 +197,7 @@ class LandmarkManager:
out = 'ids center tags'
)
except Exception as e:
self.logger.error(f"Error fetching landmarks: {e}")
self.logger.error(f"Error fetching landmarks: {str(e)}")
continue
return_list += self._to_landmarks(result, landmarktype, preference_level)
@ -294,9 +294,11 @@ class LandmarkManager:
return landmarks
def description_and_keywords(self, tags: dict):
"""
"""
# Extract relevant fields
name = tags.get('name')
importance = tags.get('importance', None)
@ -314,7 +316,6 @@ class LandmarkManager:
return None, None
elif len(tags.keys()) < 10 :
description = f"{name} is a well known {place_type}."
elif len(tags.keys()) < 17 :
importance = 'national'
description = f"{name} is a {place_type} of national importance."
@ -328,9 +329,9 @@ class LandmarkManager:
description += f" This {place_type} was constructed in {date} and is ca. {height} meters high."
elif height is not None :
description += f" This {place_type} stands ca. {height} meters tall."
elif date is not None:
elif date is not None:
description += f" It was constructed in {date}."
# Format the visitor number
if n_visitors is not None :
n_visitors = int(n_visitors)
@ -385,8 +386,6 @@ class LandmarkManager:
return None
def dict_to_selector_list(d: dict) -> list:
"""
@ -408,5 +407,3 @@ def dict_to_selector_list(d: dict) -> list:
else:
return_list.append(f'{key}={value}')
return return_list