linting
Some checks failed
Build and deploy the backend to staging / Build and push image (pull_request) Has been cancelled
Build and deploy the backend to staging / Deploy to staging (pull_request) Has been cancelled
Run testing on the backend code / Build (pull_request) Has been cancelled
Run linting on the backend code / Build (pull_request) Successful in 28s
Some checks failed
Build and deploy the backend to staging / Build and push image (pull_request) Has been cancelled
Build and deploy the backend to staging / Deploy to staging (pull_request) Has been cancelled
Run testing on the backend code / Build (pull_request) Has been cancelled
Run linting on the backend code / Build (pull_request) Successful in 28s
This commit is contained in:
parent
8a9ec6b4d8
commit
83be4b7616
@ -41,7 +41,7 @@ app = FastAPI(lifespan=lifespan)
|
|||||||
@app.post("/trip/new")
|
@app.post("/trip/new")
|
||||||
def new_trip(preferences: Preferences,
|
def new_trip(preferences: Preferences,
|
||||||
start: tuple[float, float],
|
start: tuple[float, float],
|
||||||
end: tuple[float, float] | None = None,
|
end: tuple[float, float] | None = None,
|
||||||
background_tasks: BackgroundTasks = None) -> Trip:
|
background_tasks: BackgroundTasks = None) -> Trip:
|
||||||
"""
|
"""
|
||||||
Main function to call the optimizer.
|
Main function to call the optimizer.
|
||||||
@ -100,7 +100,7 @@ def new_trip(preferences: Preferences,
|
|||||||
|
|
||||||
|
|
||||||
###################### store landmarks in json file for debug ######################
|
###################### store landmarks in json file for debug ######################
|
||||||
landmarks_list = [jsonable_encoder(item) for item in landmarks]
|
landmarks_list = [jsonable_encoder(item) for item in landmarks]
|
||||||
with open('landmarks.json', 'w+') as file:
|
with open('landmarks.json', 'w+') as file:
|
||||||
json.dump(landmarks_list, file, indent=4)
|
json.dump(landmarks_list, file, indent=4)
|
||||||
####################################################################################
|
####################################################################################
|
||||||
|
@ -257,7 +257,6 @@ class Optimizer:
|
|||||||
Returns:
|
Returns:
|
||||||
None: This function modifies the `prob` object by adding L-2 equality constraints in-place.
|
None: This function modifies the `prob` object by adding L-2 equality constraints in-place.
|
||||||
"""
|
"""
|
||||||
# FIXME: weird 0 artifact in the coefficients popping up
|
|
||||||
# Loop through rows 1 to L-2 to prevent stacked ones
|
# Loop through rows 1 to L-2 to prevent stacked ones
|
||||||
for i in range(1, L-1):
|
for i in range(1, L-1):
|
||||||
# Add the constraint that sums across each "row" or "block" in the decision variables
|
# Add the constraint that sums across each "row" or "block" in the decision variables
|
||||||
@ -590,7 +589,7 @@ class Optimizer:
|
|||||||
try :
|
try :
|
||||||
prob.solve(pl.PULP_CBC_CMD(msg=False, timeLimit=self.time_limit+1, gapRel=self.gap_rel))
|
prob.solve(pl.PULP_CBC_CMD(msg=False, timeLimit=self.time_limit+1, gapRel=self.gap_rel))
|
||||||
except Exception as exc :
|
except Exception as exc :
|
||||||
raise Exception(f"No solution found: {exc}") from exc
|
raise Exception(f"No solution found: {str(exc)}") from exc
|
||||||
status = pl.LpStatus[prob.status]
|
status = pl.LpStatus[prob.status]
|
||||||
solution = [pl.value(var) for var in x] # The values of the decision variables (will be 0 or 1)
|
solution = [pl.value(var) for var in x] # The values of the decision variables (will be 0 or 1)
|
||||||
|
|
||||||
@ -618,7 +617,7 @@ class Optimizer:
|
|||||||
try :
|
try :
|
||||||
prob.solve(pl.PULP_CBC_CMD(msg=False, timeLimit=self.time_limit, gapRel=self.gap_rel))
|
prob.solve(pl.PULP_CBC_CMD(msg=False, timeLimit=self.time_limit, gapRel=self.gap_rel))
|
||||||
except Exception as exc :
|
except Exception as exc :
|
||||||
raise Exception(f"No solution found: {exc}") from exc
|
raise Exception(f"No solution found: {str(exc)}") from exc
|
||||||
|
|
||||||
solution = [pl.value(var) for var in x]
|
solution = [pl.value(var) for var in x]
|
||||||
|
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
"""Module defining the handling of cache data from Overpass requests."""
|
||||||
import os
|
import os
|
||||||
import json
|
import json
|
||||||
import hashlib
|
import hashlib
|
||||||
@ -61,7 +62,7 @@ class JSONCache(CachingStrategyBase):
|
|||||||
return None
|
return None
|
||||||
|
|
||||||
def set(self, key, value):
|
def set(self, key, value):
|
||||||
"""Save the JSON data as an ElementTree to the cache."""
|
"""Save the JSON data in the cache."""
|
||||||
filename = self._filename(key)
|
filename = self._filename(key)
|
||||||
try:
|
try:
|
||||||
# Write the JSON data to the cache file
|
# Write the JSON data to the cache file
|
||||||
@ -94,7 +95,7 @@ class JSONCache(CachingStrategyBase):
|
|||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
"""Cleanup method, if needed."""
|
"""Cleanup method, if needed."""
|
||||||
pass
|
|
||||||
|
|
||||||
class CachingStrategy:
|
class CachingStrategy:
|
||||||
"""
|
"""
|
||||||
@ -107,6 +108,7 @@ class CachingStrategy:
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def use(cls, strategy_name='JSON', **kwargs):
|
def use(cls, strategy_name='JSON', **kwargs):
|
||||||
|
"""Define the caching strategy to use."""
|
||||||
if cls.__strategy:
|
if cls.__strategy:
|
||||||
cls.__strategy.close()
|
cls.__strategy.close()
|
||||||
|
|
||||||
@ -119,10 +121,12 @@ class CachingStrategy:
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get(cls, key):
|
def get(cls, key):
|
||||||
|
"""Get the data from the cache."""
|
||||||
return cls.__strategy.get(key)
|
return cls.__strategy.get(key)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def set(cls, key, value):
|
def set(cls, key, value):
|
||||||
|
"""Save the data in the cache."""
|
||||||
cls.__strategy.set(key, value)
|
cls.__strategy.set(key, value)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
@ -59,7 +59,7 @@ class Overpass :
|
|||||||
return Overpass._filter_landmarks(cached_responses, bbox)
|
return Overpass._filter_landmarks(cached_responses, bbox)
|
||||||
|
|
||||||
# If there is no cached data, fetch all from Overpass.
|
# If there is no cached data, fetch all from Overpass.
|
||||||
elif not cached_responses :
|
if not cached_responses :
|
||||||
query_str = Overpass.build_query(bbox, osm_types, selector, conditions, out)
|
query_str = Overpass.build_query(bbox, osm_types, selector, conditions, out)
|
||||||
self.logger.debug(f'Query string: {query_str}')
|
self.logger.debug(f'Query string: {query_str}')
|
||||||
return self.fetch_data_from_api(query_str)
|
return self.fetch_data_from_api(query_str)
|
||||||
@ -114,7 +114,7 @@ class Overpass :
|
|||||||
with urllib.request.urlopen(request) as response:
|
with urllib.request.urlopen(request) as response:
|
||||||
|
|
||||||
# Convert the HTTPResponse to a string and load data
|
# Convert the HTTPResponse to a string and load data
|
||||||
response_data = response.read().decode('utf-8')
|
response_data = response.read().decode('utf-8')
|
||||||
data = json.loads(response_data)
|
data = json.loads(response_data)
|
||||||
|
|
||||||
# Get elements and set cache
|
# Get elements and set cache
|
||||||
@ -309,9 +309,9 @@ class Overpass :
|
|||||||
if min_lat == float('inf') or min_lon == float('inf'):
|
if min_lat == float('inf') or min_lon == float('inf'):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return (max(min_lat, original_bbox[0]),
|
return (max(min_lat, original_bbox[0]),
|
||||||
max(min_lon, original_bbox[1]),
|
max(min_lon, original_bbox[1]),
|
||||||
min(max_lat, original_bbox[2]),
|
min(max_lat, original_bbox[2]),
|
||||||
min(max_lon, original_bbox[3]))
|
min(max_lon, original_bbox[3]))
|
||||||
|
|
||||||
|
|
||||||
@ -405,7 +405,7 @@ def fill_cache():
|
|||||||
|
|
||||||
try :
|
try :
|
||||||
# Read the whole file content as a string
|
# Read the whole file content as a string
|
||||||
with open(entry.path, 'r') as f:
|
with open(entry.path, 'r', encoding='utf-8') as f:
|
||||||
# load data and fill the cache with the query and key
|
# load data and fill the cache with the query and key
|
||||||
json_data = json.load(f)
|
json_data = json.load(f)
|
||||||
overpass.fill_cache(json_data)
|
overpass.fill_cache(json_data)
|
||||||
@ -413,4 +413,4 @@ def fill_cache():
|
|||||||
os.remove(entry.path)
|
os.remove(entry.path)
|
||||||
|
|
||||||
except Exception as exc :
|
except Exception as exc :
|
||||||
overpass.logger.error(f'An error occured while parsing file {entry.path} as .json file')
|
overpass.logger.error(f'An error occured while parsing file {entry.path} as .json file: {str(exc)}')
|
||||||
|
@ -250,19 +250,17 @@ class ClusterManager:
|
|||||||
continue
|
continue
|
||||||
|
|
||||||
for elem in result:
|
for elem in result:
|
||||||
osm_type = elem.get('type')
|
# Get basic info
|
||||||
|
id, coords, name = get_base_info(elem, elem.get('type'), with_name=True)
|
||||||
id, coords, name = get_base_info(elem, osm_type, with_name=True)
|
|
||||||
|
|
||||||
if name is None or coords is None :
|
if name is None or coords is None :
|
||||||
continue
|
continue
|
||||||
|
|
||||||
d = get_distance(cluster.centroid, coords)
|
d = get_distance(cluster.centroid, coords)
|
||||||
if d < min_dist :
|
if d < min_dist :
|
||||||
min_dist = d
|
min_dist = d
|
||||||
new_name = name # add name
|
new_name = name # add name
|
||||||
osm_type = osm_type # add type: 'way' or 'relation'
|
osm_type = elem.get('type') # add type: 'way' or 'relation'
|
||||||
osm_id = id # add OSM id
|
osm_id = id # add OSM id
|
||||||
|
|
||||||
return Landmark(
|
return Landmark(
|
||||||
name=new_name,
|
name=new_name,
|
||||||
|
@ -197,7 +197,7 @@ class LandmarkManager:
|
|||||||
out = 'ids center tags'
|
out = 'ids center tags'
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.logger.error(f"Error fetching landmarks: {e}")
|
self.logger.error(f"Error fetching landmarks: {str(e)}")
|
||||||
continue
|
continue
|
||||||
|
|
||||||
return_list += self._to_landmarks(result, landmarktype, preference_level)
|
return_list += self._to_landmarks(result, landmarktype, preference_level)
|
||||||
@ -294,9 +294,11 @@ class LandmarkManager:
|
|||||||
|
|
||||||
|
|
||||||
return landmarks
|
return landmarks
|
||||||
|
|
||||||
|
|
||||||
def description_and_keywords(self, tags: dict):
|
def description_and_keywords(self, tags: dict):
|
||||||
|
"""
|
||||||
|
"""
|
||||||
# Extract relevant fields
|
# Extract relevant fields
|
||||||
name = tags.get('name')
|
name = tags.get('name')
|
||||||
importance = tags.get('importance', None)
|
importance = tags.get('importance', None)
|
||||||
@ -314,7 +316,6 @@ class LandmarkManager:
|
|||||||
return None, None
|
return None, None
|
||||||
elif len(tags.keys()) < 10 :
|
elif len(tags.keys()) < 10 :
|
||||||
description = f"{name} is a well known {place_type}."
|
description = f"{name} is a well known {place_type}."
|
||||||
|
|
||||||
elif len(tags.keys()) < 17 :
|
elif len(tags.keys()) < 17 :
|
||||||
importance = 'national'
|
importance = 'national'
|
||||||
description = f"{name} is a {place_type} of national importance."
|
description = f"{name} is a {place_type} of national importance."
|
||||||
@ -328,9 +329,9 @@ class LandmarkManager:
|
|||||||
description += f" This {place_type} was constructed in {date} and is ca. {height} meters high."
|
description += f" This {place_type} was constructed in {date} and is ca. {height} meters high."
|
||||||
elif height is not None :
|
elif height is not None :
|
||||||
description += f" This {place_type} stands ca. {height} meters tall."
|
description += f" This {place_type} stands ca. {height} meters tall."
|
||||||
elif date is not None:
|
elif date is not None:
|
||||||
description += f" It was constructed in {date}."
|
description += f" It was constructed in {date}."
|
||||||
|
|
||||||
# Format the visitor number
|
# Format the visitor number
|
||||||
if n_visitors is not None :
|
if n_visitors is not None :
|
||||||
n_visitors = int(n_visitors)
|
n_visitors = int(n_visitors)
|
||||||
@ -385,8 +386,6 @@ class LandmarkManager:
|
|||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def dict_to_selector_list(d: dict) -> list:
|
def dict_to_selector_list(d: dict) -> list:
|
||||||
"""
|
"""
|
||||||
@ -408,5 +407,3 @@ def dict_to_selector_list(d: dict) -> list:
|
|||||||
else:
|
else:
|
||||||
return_list.append(f'{key}={value}')
|
return_list.append(f'{key}={value}')
|
||||||
return return_list
|
return return_list
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user