Compare commits

...

6 Commits

Author SHA1 Message Date
8b34f3727b remove prints 2025-01-15 10:11:22 +01:00
bb40743db2 test assertion for formatting 2025-01-15 09:09:55 +01:00
7a74606c03 formatting for tests 2025-01-15 09:00:16 +01:00
ce4b331c0a better logs 2025-01-15 08:04:09 +01:00
c5a08125f6 better clusters 2025-01-15 07:10:00 +01:00
85f70d835a parallel test to compare speeds 2025-01-15 06:58:25 +01:00
5 changed files with 165 additions and 29 deletions

@ -1,8 +1,9 @@
"""Main app for backend api""" """Main app for backend api"""
import logging import logging
from fastapi import FastAPI, HTTPException, Query import time
from contextlib import asynccontextmanager from contextlib import asynccontextmanager
from fastapi import FastAPI, HTTPException, Query
from .logging_config import configure_logging from .logging_config import configure_logging
from .structs.landmark import Landmark, Toilets from .structs.landmark import Landmark, Toilets
@ -81,6 +82,7 @@ def new_trip(preferences: Preferences,
must_do=True, must_do=True,
n_tags=0) n_tags=0)
start_time = time.time()
# Generate the landmarks from the start location # Generate the landmarks from the start location
landmarks, landmarks_short = manager.generate_landmarks_list( landmarks, landmarks_short = manager.generate_landmarks_list(
center_coordinates = start, center_coordinates = start,
@ -91,18 +93,34 @@ def new_trip(preferences: Preferences,
landmarks_short.insert(0, start_landmark) landmarks_short.insert(0, start_landmark)
landmarks_short.append(end_landmark) landmarks_short.append(end_landmark)
t_generate_landmarks = time.time() - start_time
start_time = time.time()
# First stage optimization # First stage optimization
try: try:
base_tour = optimizer.solve_optimization(preferences.max_time_minute, landmarks_short) base_tour = optimizer.solve_optimization(preferences.max_time_minute, landmarks_short)
except ArithmeticError as exc: except ArithmeticError as exc:
raise HTTPException(status_code=500, detail="No solution found") from exc raise HTTPException(status_code=500) from exc
except TimeoutError as exc: except TimeoutError as exc:
raise HTTPException(status_code=500, detail="Optimzation took too long") from exc raise HTTPException(status_code=500, detail="Optimzation took too long") from exc
except Exception as exc:
raise HTTPException(status_code=500, detail=f"An unexpected error occurred: {str(exc)}") from exc
t_first_stage = time.time() - start_time
start_time = time.time()
# Second stage optimization # Second stage optimization
refined_tour = refiner.refine_optimization(landmarks, base_tour, try :
refined_tour = refiner.refine_optimization(landmarks, base_tour,
preferences.max_time_minute, preferences.max_time_minute,
preferences.detour_tolerance_minute) preferences.detour_tolerance_minute)
except Exception as exc :
raise HTTPException(status_code=500, detail=f"An unexpected error occurred: {str(exc)}") from exc
t_second_stage = time.time() - start_time
logger.debug(f'Generating landmarks : {round(t_generate_landmarks,3)} seconds')
logger.debug(f'First stage optimization : {round(t_first_stage,3)} seconds')
logger.debug(f'Second stage optimization : {round(t_second_stage,3)} seconds')
logger.info(f'Total computation time : {round(t_generate_landmarks + t_first_stage + t_second_stage,3)} seconds')
linked_tour = LinkedLandmarks(refined_tour) linked_tour = LinkedLandmarks(refined_tour)
# upon creation of the trip, persistence of both the trip and its landmarks is ensured. # upon creation of the trip, persistence of both the trip and its landmarks is ensured.
@ -165,7 +183,7 @@ def get_toilets(location: tuple[float, float] = Query(...), radius: int = 500) -
raise HTTPException(status_code=406, detail="Coordinates not provided or invalid") raise HTTPException(status_code=406, detail="Coordinates not provided or invalid")
if not (-90 <= location[0] <= 90 or -180 <= location[1] <= 180): if not (-90 <= location[0] <= 90 or -180 <= location[1] <= 180):
raise HTTPException(status_code=422, detail="Start coordinates not in range") raise HTTPException(status_code=422, detail="Start coordinates not in range")
toilets_manager = ToiletsManager(location, radius) toilets_manager = ToiletsManager(location, radius)
try : try :

@ -1,9 +1,9 @@
"""Collection of tests to ensure correct implementation and track progress. """ """Collection of tests to ensure correct implementation and track progress. """
import time
from fastapi.testclient import TestClient from fastapi.testclient import TestClient
import pytest import pytest
from .test_utils import landmarks_to_osmid, load_trip_landmarks, log_trip_details from .test_utils import load_trip_landmarks, log_trip_details
from ..main import app from ..main import app
@pytest.fixture(scope="module") @pytest.fixture(scope="module")
@ -20,7 +20,9 @@ def test_turckheim(client, request): # pylint: disable=redefined-outer-name
client: client:
request: request:
""" """
start_time = time.time() # Start timer
duration_minutes = 15 duration_minutes = 15
response = client.post( response = client.post(
"/trip/new", "/trip/new",
json={ json={
@ -35,16 +37,24 @@ def test_turckheim(client, request): # pylint: disable=redefined-outer-name
result = response.json() result = response.json()
landmarks = load_trip_landmarks(client, result['first_landmark_uuid']) landmarks = load_trip_landmarks(client, result['first_landmark_uuid'])
# Get computation time
comp_time = time.time() - start_time
# Add details to report # Add details to report
log_trip_details(request, landmarks, result['total_time'], duration_minutes) log_trip_details(request, landmarks, result['total_time'], duration_minutes)
# for elem in landmarks :
# print(elem)
# checks : # checks :
assert response.status_code == 200 # check for successful planning assert response.status_code == 200 # check for successful planning
assert isinstance(landmarks, list) # check that the return type is a list assert isinstance(landmarks, list) # check that the return type is a list
assert duration_minutes*0.8 < int(result['total_time']) < duration_minutes*1.2 assert duration_minutes*0.8 < int(result['total_time']) < duration_minutes*1.2
assert len(landmarks) > 2 # check that there is something to visit assert len(landmarks) > 2 # check that there is something to visit
assert comp_time < 30, f"Computation time exceeded 30 seconds: {comp_time:.2f} seconds"
assert 2==3
'''
def test_bellecour(client, request) : # pylint: disable=redefined-outer-name def test_bellecour(client, request) : # pylint: disable=redefined-outer-name
""" """
Test n°2 : Custom test in Lyon centre to ensure proper decision making in crowded area. Test n°2 : Custom test in Lyon centre to ensure proper decision making in crowded area.
@ -53,7 +63,9 @@ def test_bellecour(client, request) : # pylint: disable=redefined-outer-name
client: client:
request: request:
""" """
start_time = time.time() # Start timer
duration_minutes = 120 duration_minutes = 120
response = client.post( response = client.post(
"/trip/new", "/trip/new",
json={ json={
@ -67,22 +79,102 @@ def test_bellecour(client, request) : # pylint: disable=redefined-outer-name
) )
result = response.json() result = response.json()
landmarks = load_trip_landmarks(client, result['first_landmark_uuid']) landmarks = load_trip_landmarks(client, result['first_landmark_uuid'])
osm_ids = landmarks_to_osmid(landmarks)
# Get computation time
comp_time = time.time() - start_time
# Add details to report # Add details to report
log_trip_details(request, landmarks, result['total_time'], duration_minutes) log_trip_details(request, landmarks, result['total_time'], duration_minutes)
for elem in landmarks : # for elem in landmarks :
print(elem) # print(elem)
print(elem.osm_id)
# checks : # checks :
assert response.status_code == 200 # check for successful planning assert response.status_code == 200 # check for successful planning
assert comp_time < 30, f"Computation time exceeded 30 seconds: {comp_time:.2f} seconds"
assert duration_minutes*0.8 < int(result['total_time']) < duration_minutes*1.2 assert duration_minutes*0.8 < int(result['total_time']) < duration_minutes*1.2
assert 136200148 in osm_ids # check for Cathédrale St. Jean in trip # assert 2 == 3
# assert response.status_code == 2000 # check for successful planning
def test_Paris(client, request) : # pylint: disable=redefined-outer-name
"""
Test n°2 : Custom test in Paris (les Halles) centre to ensure proper decision making in crowded area.
Args:
client:
request:
"""
start_time = time.time() # Start timer
duration_minutes = 300
response = client.post(
"/trip/new",
json={
"preferences": {"sightseeing": {"type": "sightseeing", "score": 5},
"nature": {"type": "nature", "score": 5},
"shopping": {"type": "shopping", "score": 5},
"max_time_minute": duration_minutes,
"detour_tolerance_minute": 0},
"start": [48.86248803298562, 2.346451131285925]
}
)
result = response.json()
landmarks = load_trip_landmarks(client, result['first_landmark_uuid'])
# Get computation time
comp_time = time.time() - start_time
# Add details to report
log_trip_details(request, landmarks, result['total_time'], duration_minutes)
# for elem in landmarks :
# print(elem)
# checks :
assert response.status_code == 200 # check for successful planning
assert comp_time < 30, f"Computation time exceeded 30 seconds: {comp_time:.2f} seconds"
assert duration_minutes*0.8 < int(result['total_time']) < duration_minutes*1.2
def test_New_York(client, request) : # pylint: disable=redefined-outer-name
"""
Test n°2 : Custom test in New York (les Halles) centre to ensure proper decision making in crowded area.
Args:
client:
request:
"""
start_time = time.time() # Start timer
duration_minutes = 600
response = client.post(
"/trip/new",
json={
"preferences": {"sightseeing": {"type": "sightseeing", "score": 5},
"nature": {"type": "nature", "score": 5},
"shopping": {"type": "shopping", "score": 5},
"max_time_minute": duration_minutes,
"detour_tolerance_minute": 0},
"start": [40.72592726802, -73.9920434795]
}
)
result = response.json()
landmarks = load_trip_landmarks(client, result['first_landmark_uuid'])
# Get computation time
comp_time = time.time() - start_time
# Add details to report
log_trip_details(request, landmarks, result['total_time'], duration_minutes)
# for elem in landmarks :
# print(elem)
# checks :
assert response.status_code == 200 # check for successful planning
assert comp_time < 30, f"Computation time exceeded 30 seconds: {comp_time:.2f} seconds"
assert duration_minutes*0.8 < int(result['total_time']) < duration_minutes*1.2
def test_shopping(client, request) : # pylint: disable=redefined-outer-name def test_shopping(client, request) : # pylint: disable=redefined-outer-name
""" """
@ -92,7 +184,9 @@ def test_shopping(client, request) : # pylint: disable=redefined-outer-name
client: client:
request: request:
""" """
start_time = time.time() # Start timer
duration_minutes = 240 duration_minutes = 240
response = client.post( response = client.post(
"/trip/new", "/trip/new",
json={ json={
@ -107,12 +201,20 @@ def test_shopping(client, request) : # pylint: disable=redefined-outer-name
result = response.json() result = response.json()
landmarks = load_trip_landmarks(client, result['first_landmark_uuid']) landmarks = load_trip_landmarks(client, result['first_landmark_uuid'])
# Get computation time
comp_time = time.time() - start_time
# Add details to report # Add details to report
log_trip_details(request, landmarks, result['total_time'], duration_minutes) log_trip_details(request, landmarks, result['total_time'], duration_minutes)
# for elem in landmarks :
# print(elem)
# checks : # checks :
assert response.status_code == 200 # check for successful planning assert response.status_code == 200 # check for successful planning
assert comp_time < 30, f"Computation time exceeded 30 seconds: {comp_time:.2f} seconds"
assert duration_minutes*0.8 < int(result['total_time']) < duration_minutes*1.2 assert duration_minutes*0.8 < int(result['total_time']) < duration_minutes*1.2
'''
# def test_new_trip_single_prefs(client): # def test_new_trip_single_prefs(client):
# response = client.post( # response = client.post(

@ -12,6 +12,10 @@ from ..utils.get_time_separation import get_distance
from ..constants import OSM_CACHE_DIR from ..constants import OSM_CACHE_DIR
# silence the overpass logger
logging.getLogger('OSMPythonTools').setLevel(level=logging.CRITICAL)
class Cluster(BaseModel): class Cluster(BaseModel):
"""" """"
A class representing an interesting area for shopping or sightseeing. A class representing an interesting area for shopping or sightseeing.
@ -102,7 +106,6 @@ class ClusterManager:
points.append(coords) points.append(coords)
self.all_points = np.array(points) self.all_points = np.array(points)
self.valid = True
# Apply DBSCAN to find clusters. Choose different settings for different cities. # Apply DBSCAN to find clusters. Choose different settings for different cities.
if self.cluster_type == 'shopping' and len(self.all_points) > 200 : if self.cluster_type == 'shopping' and len(self.all_points) > 200 :
@ -114,12 +117,17 @@ class ClusterManager:
labels = dbscan.fit_predict(self.all_points) labels = dbscan.fit_predict(self.all_points)
# Separate clustered points and noise points # Check that there are at least 2 different clusters
self.cluster_points = self.all_points[labels != -1] if len(set(labels)) > 2 :
self.cluster_labels = labels[labels != -1] self.logger.debug(f"Found {len(set(labels))} different clusters.")
# Separate clustered points and noise points
self.cluster_points = self.all_points[labels != -1]
self.cluster_labels = labels[labels != -1]
self.filter_clusters() # ValueError here sometimes. I dont know why. # Filter the clusters to keep only the largest ones.
self.valid = True
# filter the clusters to keep only the largest ones else :
self.filter_clusters() self.valid = False
def generate_clusters(self) -> list[Landmark]: def generate_clusters(self) -> list[Landmark]:
@ -224,6 +232,9 @@ class ClusterManager:
for elem in result.elements(): for elem in result.elements():
location = (elem.centerLat(), elem.centerLon()) location = (elem.centerLat(), elem.centerLon())
# Skip if element has neither name or location
if elem.tag('name') is None :
continue
if location[0] is None : if location[0] is None :
location = (elem.lat(), elem.lon()) location = (elem.lat(), elem.lon())
if location[0] is None : if location[0] is None :
@ -277,6 +288,6 @@ class ClusterManager:
filtered_cluster_labels.append(np.full((label_counts[label],), label)) # Replicate the label filtered_cluster_labels.append(np.full((label_counts[label],), label)) # Replicate the label
# update the cluster points and labels with the filtered data # update the cluster points and labels with the filtered data
self.cluster_points = np.vstack(filtered_cluster_points) self.cluster_points = np.vstack(filtered_cluster_points) # ValueError here
self.cluster_labels = np.concatenate(filtered_cluster_labels) self.cluster_labels = np.concatenate(filtered_cluster_labels)

@ -210,7 +210,7 @@ class LandmarkManager:
# caution, when applying a list of selectors, overpass will search for elements that match ALL selectors simultaneously # caution, when applying a list of selectors, overpass will search for elements that match ALL selectors simultaneously
# we need to split the selectors into separate queries and merge the results # we need to split the selectors into separate queries and merge the results
for sel in dict_to_selector_list(amenity_selector): for sel in dict_to_selector_list(amenity_selector):
self.logger.debug(f"Current selector: {sel}") # self.logger.debug(f"Current selector: {sel}")
# query_conditions = ['count_tags()>5'] # query_conditions = ['count_tags()>5']
# if landmarktype == 'shopping' : # use this later for shopping clusters # if landmarktype == 'shopping' : # use this later for shopping clusters
@ -232,7 +232,7 @@ class LandmarkManager:
includeCenter = True, includeCenter = True,
out = 'center' out = 'center'
) )
self.logger.debug(f"Query: {query}") # self.logger.debug(f"Query: {query}")
try: try:
result = self.overpass.query(query) result = self.overpass.query(query)

@ -466,18 +466,23 @@ class Optimizer:
# SET CONSTRAINTS FOR EQUALITY # SET CONSTRAINTS FOR EQUALITY
A_eq, b_eq = self.init_eq_not_stay(L) # Force solution not to stay in same place A_eq, b_eq = self.init_eq_not_stay(L) # Force solution not to stay in same place
A, b = self.respect_user_must_do(landmarks) # Check if there are user_defined must_see. Also takes care of start/goal
A_eq = np.vstack((A_eq, A), dtype=np.int8)
b_eq += b
A, b = self.respect_user_must_avoid(landmarks) # Check if there are user_defined must_see. Also takes care of start/goal
A_eq = np.vstack((A_eq, A), dtype=np.int8)
b_eq += b
A, b = self.respect_start_finish(L) # Force start and finish positions A, b = self.respect_start_finish(L) # Force start and finish positions
A_eq = np.vstack((A_eq, A), dtype=np.int8) A_eq = np.vstack((A_eq, A), dtype=np.int8)
b_eq += b b_eq += b
A, b = self.respect_order(L) # Respect order of visit (only works when max_time is limiting factor) A, b = self.respect_order(L) # Respect order of visit (only works when max_time is limiting factor)
A_eq = np.vstack((A_eq, A), dtype=np.int8) A_eq = np.vstack((A_eq, A), dtype=np.int8)
b_eq += b b_eq += b
A, b = self.respect_user_must_do(landmarks) # Check if there are user_defined must_see. Also takes care of start/goal
A_eq = np.vstack((A_eq, A), dtype=np.int8)
b_eq += b
A, b = self.respect_user_must_avoid(landmarks) # Check if there are user_defined must_see. Also takes care of start/goal
A_eq = np.vstack((A_eq, A), dtype=np.int8)
b_eq += b
print(A_eq)
print('\n\n')
print(b_eq)
print('\n\n')
# SET BOUNDS FOR DECISION VARIABLE (x can only be 0 or 1) # SET BOUNDS FOR DECISION VARIABLE (x can only be 0 or 1)
x_bounds = [(0, 1)]*L*L x_bounds = [(0, 1)]*L*L