now working
Some checks failed
Build and deploy the backend to staging / Build and push image (pull_request) Successful in 1m38s
Run linting on the backend code / Build (pull_request) Successful in 26s
Run testing on the backend code / Build (pull_request) Failing after 2m50s
Build and deploy the backend to staging / Deploy to staging (pull_request) Successful in 24s
Some checks failed
Build and deploy the backend to staging / Build and push image (pull_request) Successful in 1m38s
Run linting on the backend code / Build (pull_request) Successful in 26s
Run testing on the backend code / Build (pull_request) Failing after 2m50s
Build and deploy the backend to staging / Deploy to staging (pull_request) Successful in 24s
This commit is contained in:
parent
4a904c3d3c
commit
2ac8499dfb
@ -445,7 +445,9 @@ disable=raw-checker-failed,
|
||||
logging-fstring-interpolation,
|
||||
duplicate-code,
|
||||
relative-beyond-top-level,
|
||||
invalid-name
|
||||
invalid-name,
|
||||
too-many-arguments,
|
||||
too-many-positional-arguments
|
||||
|
||||
# Enable the message, report, category or checker with the given id(s). You can
|
||||
# either give multiple identifier separated by comma (,) or put this option
|
||||
|
File diff suppressed because one or more lines are too long
@ -93,7 +93,7 @@ def new_trip(preferences: Preferences,
|
||||
)
|
||||
|
||||
if len(landmarks) == 0 :
|
||||
raise HTTPException(status_code=500, detail=f"No landmarks were found.")
|
||||
raise HTTPException(status_code=500, detail="No landmarks were found.")
|
||||
|
||||
# insert start and finish to the landmarks list
|
||||
landmarks_short.insert(0, start_landmark)
|
||||
@ -134,7 +134,8 @@ def new_trip(preferences: Preferences,
|
||||
# upon creation of the trip, persistence of both the trip and its landmarks is ensured.
|
||||
trip = Trip.from_linked_landmarks(linked_tour, cache_client)
|
||||
logger.info(f'Generated a trip of {trip.total_time} minutes with {len(refined_tour)} landmarks in {round(t_generate_landmarks + t_first_stage + t_second_stage,3)} seconds.')
|
||||
|
||||
|
||||
|
||||
return trip
|
||||
|
||||
|
||||
@ -152,7 +153,7 @@ def get_trip(trip_uuid: str) -> Trip:
|
||||
"""
|
||||
try:
|
||||
trip = cache_client.get(f"trip_{trip_uuid}")
|
||||
background_tasks = BackgroundTasks(fill_cache())
|
||||
BackgroundTasks(fill_cache())
|
||||
return trip
|
||||
except KeyError as exc:
|
||||
raise HTTPException(status_code=404, detail="Trip not found") from exc
|
||||
|
@ -493,10 +493,21 @@ class Optimizer:
|
||||
|
||||
|
||||
def warm_start(self, x: list[pl.LpVariable], L: int) :
|
||||
"""
|
||||
This function sets the initial values of the decision variables to a feasible solution.
|
||||
This can help the solver start with a feasible or heuristic solution,
|
||||
potentially speeding up convergence.
|
||||
|
||||
Args:
|
||||
x (list[pl.LpVariable]): A list of PuLP decision variables (binary variables).
|
||||
L (int): The size parameter, representing a dimension (likely related to a grid or matrix).
|
||||
|
||||
Returns:
|
||||
list[pl.LpVariable]: The modified list of PuLP decision variables with initial values set.
|
||||
"""
|
||||
for i in range(L*L) :
|
||||
x[i].setInitialValue(0)
|
||||
|
||||
|
||||
x[1].setInitialValue(1)
|
||||
x[2*L-1].setInitialValue(1)
|
||||
|
||||
@ -579,7 +590,7 @@ class Optimizer:
|
||||
try :
|
||||
prob.solve(pl.PULP_CBC_CMD(msg=False, timeLimit=self.time_limit+1, gapRel=self.gap_rel))
|
||||
except Exception as exc :
|
||||
raise Exception(f"No solution found: {exc}") from exc
|
||||
raise Exception(f"No solution found: {exc}") from exc
|
||||
status = pl.LpStatus[prob.status]
|
||||
solution = [pl.value(var) for var in x] # The values of the decision variables (will be 0 or 1)
|
||||
|
||||
@ -607,8 +618,8 @@ class Optimizer:
|
||||
try :
|
||||
prob.solve(pl.PULP_CBC_CMD(msg=False, timeLimit=self.time_limit, gapRel=self.gap_rel))
|
||||
except Exception as exc :
|
||||
raise Exception(f"No solution found: {exc}") from exc
|
||||
|
||||
raise Exception(f"No solution found: {exc}") from exc
|
||||
|
||||
solution = [pl.value(var) for var in x]
|
||||
|
||||
if pl.LpStatus[prob.status] != 'Optimal' :
|
||||
|
@ -59,7 +59,7 @@ class JSONCache(CachingStrategyBase):
|
||||
with open(filename, 'r', encoding='utf-8') as file:
|
||||
data = json.load(file)
|
||||
return data # Return the parsed JSON data
|
||||
except json.JSONDecodeError as err:
|
||||
except json.JSONDecodeError:
|
||||
return None # Return None if parsing fails
|
||||
return None
|
||||
|
||||
@ -73,8 +73,8 @@ class JSONCache(CachingStrategyBase):
|
||||
except IOError as e:
|
||||
raise IOError(f"Error writing to cache file: {filename} - {e}") from e
|
||||
|
||||
def set_hollow(self, key, cell: tuple, osm_types: OSM_TYPES,
|
||||
selector: str, conditions=[], out='center'):
|
||||
def set_hollow(self, key, cell: tuple, osm_types: list,
|
||||
selector: str, conditions: list=None, out='center'):
|
||||
"""Create an empty placeholder cache entry for a future fill."""
|
||||
hollow_key = f'hollow_{key}'
|
||||
filename = self._filename(hollow_key)
|
||||
@ -85,7 +85,7 @@ class JSONCache(CachingStrategyBase):
|
||||
"cell": list(cell),
|
||||
"osm_types": list(osm_types),
|
||||
"selector": selector,
|
||||
"conditions": conditions if conditions else "none",
|
||||
"conditions": conditions,
|
||||
"out": out
|
||||
}
|
||||
# Write the hollow data to the cache file
|
||||
@ -130,11 +130,6 @@ class CachingStrategy:
|
||||
|
||||
@classmethod
|
||||
def set_hollow(cls, key, cell: tuple, osm_types: OSM_TYPES,
|
||||
selector: str, conditions=[], out='center'):
|
||||
selector: str, conditions: list=None, out='center'):
|
||||
"""Create a hollow cache entry."""
|
||||
cls.__strategy.set_hollow(key, cell, osm_types, selector, conditions, out)
|
||||
|
||||
@classmethod
|
||||
def fill_hollow(cls, key, value):
|
||||
"""Fill in the hollow cache entry with actual data."""
|
||||
cls.__strategy.fill_hollow(key, value)
|
||||
|
@ -30,7 +30,7 @@ class Overpass :
|
||||
|
||||
|
||||
def send_query(self, bbox: tuple, osm_types: OSM_TYPES,
|
||||
selector: str, conditions=[], out='center'):
|
||||
selector: str, conditions: list=None, out='center'):
|
||||
"""
|
||||
Sends the Overpass QL query to the Overpass API and returns the parsed json response.
|
||||
|
||||
@ -52,24 +52,22 @@ class Overpass :
|
||||
|
||||
# If there is no missing data, return the cached responses
|
||||
if not hollow_cache_keys :
|
||||
self.logger.debug(f'Cache hit.')
|
||||
self.logger.info('Cache hit.')
|
||||
return self._combine_cached_data(cached_responses)
|
||||
|
||||
|
||||
# TODO If there is SOME missing data : hybrid stuff with partial cache
|
||||
|
||||
|
||||
# Missing data: Make a query to Overpass API
|
||||
query_str = Overpass.build_query(bbox, osm_types, selector, conditions, out)
|
||||
return self.fetch_data_from_api(query_str)
|
||||
|
||||
|
||||
def fetch_data_from_api(self, query_str: str, cache_key: str = None) -> dict:
|
||||
def fetch_data_from_api(self, query_str: str) -> list:
|
||||
"""
|
||||
Fetch data from the Overpass API and update the cache.
|
||||
Fetch data from the Overpass API and return the json data.
|
||||
|
||||
Args:
|
||||
query_str (str): The Overpass query string.
|
||||
cached_responses (list): Cached responses to combine with fetched data.
|
||||
hollow_cache_keys (list): Cache keys for missing data to be updated.
|
||||
|
||||
Returns:
|
||||
dict: Combined cached and fetched data.
|
||||
@ -83,32 +81,45 @@ class Overpass :
|
||||
data = json.loads(response_data) # Load the JSON from the string
|
||||
elements = data.get('elements', [])
|
||||
|
||||
if cache_key is not None :
|
||||
self.caching_strategy.set(cache_key, elements)
|
||||
self.logger.debug(f'Cache set.')
|
||||
else :
|
||||
self.logger.debug(f'Cache miss. Fetching data through Overpass\nQuery = {query_str}')
|
||||
return elements
|
||||
self.logger.info(f'Cache miss. Fetching data through Overpass.')
|
||||
self.logger.debug(f'Query = {query_str}')
|
||||
return elements
|
||||
|
||||
except urllib.error.URLError as e:
|
||||
self.logger.error(f"Error connecting to Overpass API: {e}")
|
||||
raise ConnectionError(f"Error connecting to Overpass API: {e}")
|
||||
raise ConnectionError(f"Error connecting to Overpass API: {e}") from e
|
||||
except Exception as exc :
|
||||
raise Exception(f'An unexpected error occured: {str(exc)}') from exc
|
||||
|
||||
|
||||
|
||||
def fill_cache(self, json_data: list) :
|
||||
def fill_cache(self, json_data: dict) :
|
||||
"""
|
||||
Fill cache with data by using a hollow cache entry's information.
|
||||
"""
|
||||
query_str, cache_key = Overpass._build_query_from_hollow(json_data)
|
||||
self.fetch_data_from_api(query_str, cache_key)
|
||||
try:
|
||||
data = urllib.parse.urlencode({'data': query_str}).encode('utf-8')
|
||||
request = urllib.request.Request(self.overpass_url, data=data, headers=self.headers)
|
||||
|
||||
with urllib.request.urlopen(request) as response:
|
||||
# Convert the HTTPResponse to a string and load data
|
||||
response_data = response.read().decode('utf-8')
|
||||
data = json.loads(response_data)
|
||||
|
||||
# Get elements and set cache
|
||||
elements = data.get('elements', [])
|
||||
self.caching_strategy.set(cache_key, elements)
|
||||
self.logger.debug(f'Cache set for {cache_key}')
|
||||
|
||||
except urllib.error.URLError as e:
|
||||
raise ConnectionError(f"Error connecting to Overpass API: {e}") from e
|
||||
except Exception as exc :
|
||||
raise Exception(f'An unexpected error occured: {str(exc)}') from exc
|
||||
|
||||
|
||||
@staticmethod
|
||||
def build_query(bbox: tuple, osm_types: OSM_TYPES,
|
||||
selector: str, conditions=[], out='center') -> str:
|
||||
selector: str, conditions: list=None, out='center') -> str:
|
||||
"""
|
||||
Constructs a query string for the Overpass API to retrieve OpenStreetMap (OSM) data.
|
||||
|
||||
@ -132,17 +143,12 @@ class Overpass :
|
||||
- If no conditions are provided, the query will just use the `selector` to filter the OSM
|
||||
elements without additional constraints.
|
||||
"""
|
||||
if not isinstance(conditions, list) :
|
||||
conditions = [conditions]
|
||||
if not isinstance(osm_types, list) :
|
||||
osm_types = [osm_types]
|
||||
|
||||
query = '[out:json];('
|
||||
|
||||
# convert the bbox to string.
|
||||
bbox_str = f"({','.join(map(str, bbox))})"
|
||||
|
||||
if conditions :
|
||||
if conditions is not None and len(conditions) > 0:
|
||||
conditions = '(if: ' + ' && '.join(conditions) + ')'
|
||||
else :
|
||||
conditions = ''
|
||||
@ -175,7 +181,7 @@ class Overpass :
|
||||
for cell in overlapping_cells :
|
||||
for elem in osm_types :
|
||||
key_str = f"{elem}[{selector}]{conditions}({','.join(map(str, cell))})"
|
||||
|
||||
|
||||
cell_key_dict[cell] = get_cache_key(key_str)
|
||||
|
||||
cached_responses = []
|
||||
@ -194,24 +200,21 @@ class Overpass :
|
||||
|
||||
|
||||
@staticmethod
|
||||
def _build_query_from_hollow(json_data: list):
|
||||
def _build_query_from_hollow(json_data: dict):
|
||||
"""
|
||||
Build query string using information from a hollow cache entry.
|
||||
"""
|
||||
# Parse the JSON string into a dictionary
|
||||
data = json.loads(json_data)
|
||||
|
||||
# Extract values from the JSON object
|
||||
key = data.get('key')
|
||||
cell = tuple(data.get('cell'))
|
||||
key = json_data.get('key')
|
||||
cell = tuple(json_data.get('cell'))
|
||||
bbox = Overpass._get_bbox_from_grid_cell(cell[0], cell[1])
|
||||
osm_types = data.get('osm_types')
|
||||
selector = data.get('selector')
|
||||
conditions = data.get('conditions') if data.get('conditions') != "none" else []
|
||||
out = data.get('out')
|
||||
osm_types = json_data.get('osm_types')
|
||||
selector = json_data.get('selector')
|
||||
conditions = json_data.get('conditions')
|
||||
out = json_data.get('out')
|
||||
|
||||
|
||||
query_str = Overpass.build_query(bbox, osm_types, selector, conditions, out)
|
||||
|
||||
return query_str, key
|
||||
|
||||
|
||||
@ -230,7 +233,7 @@ class Overpass :
|
||||
for lat_idx in range(min_lat_cell, max_lat_cell + 1):
|
||||
for lon_idx in range(min_lon_cell, max_lon_cell + 1):
|
||||
overlapping_cells.add((lat_idx, lon_idx))
|
||||
|
||||
|
||||
return overlapping_cells
|
||||
|
||||
|
||||
@ -274,7 +277,7 @@ class Overpass :
|
||||
for element in cached_data:
|
||||
combined_data.append(element)
|
||||
return combined_data
|
||||
|
||||
|
||||
|
||||
def get_base_info(elem: dict, osm_type: OSM_TYPES, with_name=False) :
|
||||
"""
|
||||
@ -327,14 +330,13 @@ def fill_cache():
|
||||
with os.scandir(OSM_CACHE_DIR) as it:
|
||||
for entry in it:
|
||||
if entry.is_file() and entry.name.startswith('hollow_'):
|
||||
|
||||
|
||||
# Read the whole file content as a string
|
||||
with open(entry.path, 'r') as f:
|
||||
json_data = f.read()
|
||||
json_data = json.load(f)
|
||||
|
||||
# Fill the cache with the query and key
|
||||
overpass.fill_cache(json_data)
|
||||
|
||||
# Now delete the file as the cache is filled
|
||||
os.remove(entry.path)
|
||||
|
||||
|
@ -103,10 +103,10 @@ class ClusterManager:
|
||||
out = out
|
||||
)
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error fetching landmarks: {e}")
|
||||
self.logger.error(f"Error fetching clusters: {e}")
|
||||
|
||||
if result is None :
|
||||
self.logger.error(f"Error fetching {cluster_type} clusters, overpass query returned None.")
|
||||
self.logger.debug(f"Found no {cluster_type} clusters, overpass query returned no datapoints.")
|
||||
self.valid = False
|
||||
|
||||
else :
|
||||
@ -134,7 +134,7 @@ class ClusterManager:
|
||||
|
||||
# Check that there are is least 1 cluster
|
||||
if len(set(labels)) > 1 :
|
||||
self.logger.debug(f"Found {len(set(labels))} different clusters.")
|
||||
self.logger.info(f"Found {len(set(labels))} different {cluster_type} clusters.")
|
||||
# Separate clustered points and noise points
|
||||
self.cluster_points = self.all_points[labels != -1]
|
||||
self.cluster_labels = labels[labels != -1]
|
||||
@ -142,7 +142,7 @@ class ClusterManager:
|
||||
self.valid = True
|
||||
|
||||
else :
|
||||
self.logger.debug(f"Detected 0 {cluster_type} clusters.")
|
||||
self.logger.info(f"Found 0 {cluster_type} clusters.")
|
||||
self.valid = False
|
||||
|
||||
else :
|
||||
@ -241,11 +241,11 @@ class ClusterManager:
|
||||
out = 'ids center'
|
||||
)
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error fetching landmarks: {e}")
|
||||
self.logger.error(f"Error fetching clusters: {e}")
|
||||
continue
|
||||
|
||||
if result is None :
|
||||
self.logger.error(f"Error fetching landmarks: {e}")
|
||||
self.logger.error(f"Error fetching clusters: {e}")
|
||||
continue
|
||||
|
||||
for elem in result:
|
||||
|
@ -88,26 +88,26 @@ class LandmarkManager:
|
||||
self.logger.debug('Fetching sightseeing landmarks...')
|
||||
current_landmarks = self.fetch_landmarks(bbox, self.amenity_selectors['sightseeing'], preferences.sightseeing.type, preferences.sightseeing.score)
|
||||
all_landmarks.update(current_landmarks)
|
||||
self.logger.debug('Fetching sightseeing clusters...')
|
||||
self.logger.info(f'Found {len(current_landmarks)} sightseeing landmarks')
|
||||
|
||||
# special pipeline for historic neighborhoods
|
||||
neighborhood_manager = ClusterManager(bbox, 'sightseeing')
|
||||
historic_clusters = neighborhood_manager.generate_clusters()
|
||||
all_landmarks.update(historic_clusters)
|
||||
self.logger.debug('Sightseeing clusters done')
|
||||
|
||||
# list for nature
|
||||
if preferences.nature.score != 0:
|
||||
self.logger.debug('Fetching nature landmarks...')
|
||||
current_landmarks = self.fetch_landmarks(bbox, self.amenity_selectors['nature'], preferences.nature.type, preferences.nature.score)
|
||||
all_landmarks.update(current_landmarks)
|
||||
self.logger.info(f'Found {len(current_landmarks)} nature landmarks')
|
||||
|
||||
|
||||
# list for shopping
|
||||
if preferences.shopping.score != 0:
|
||||
self.logger.debug('Fetching shopping landmarks...')
|
||||
current_landmarks = self.fetch_landmarks(bbox, self.amenity_selectors['shopping'], preferences.shopping.type, preferences.shopping.score)
|
||||
self.logger.debug('Fetching shopping clusters...')
|
||||
self.logger.info(f'Found {len(current_landmarks)} shopping landmarks')
|
||||
|
||||
# set time for all shopping activites :
|
||||
for landmark in current_landmarks :
|
||||
@ -118,8 +118,6 @@ class LandmarkManager:
|
||||
shopping_manager = ClusterManager(bbox, 'shopping')
|
||||
shopping_clusters = shopping_manager.generate_clusters()
|
||||
all_landmarks.update(shopping_clusters)
|
||||
self.logger.debug('Shopping clusters done')
|
||||
|
||||
|
||||
|
||||
landmarks_constrained = take_most_important(all_landmarks, self.n_important)
|
||||
@ -174,7 +172,7 @@ class LandmarkManager:
|
||||
"""
|
||||
return_list = []
|
||||
|
||||
if landmarktype == 'nature' : query_conditions = []
|
||||
if landmarktype == 'nature' : query_conditions = None
|
||||
else : query_conditions = ['count_tags()>5']
|
||||
|
||||
# caution, when applying a list of selectors, overpass will search for elements that match ALL selectors simultaneously
|
||||
@ -185,7 +183,7 @@ class LandmarkManager:
|
||||
osm_types = ['way', 'relation']
|
||||
|
||||
if 'viewpoint' in sel :
|
||||
query_conditions = []
|
||||
query_conditions = None
|
||||
osm_types.append('node')
|
||||
|
||||
# Send the overpass query
|
||||
|
@ -56,7 +56,7 @@ class ToiletsManager:
|
||||
osm_types = ['node', 'way', 'relation']
|
||||
toilets_list = []
|
||||
|
||||
query = self.overpass.build_query(
|
||||
query = Overpass.build_query(
|
||||
bbox = bbox,
|
||||
osm_types = osm_types,
|
||||
selector = '"amenity"="toilets"',
|
||||
|
Loading…
x
Reference in New Issue
Block a user