persistence for recurring api calls
All checks were successful
Build and push docker image / Build (pull_request) Successful in 1m48s

This commit is contained in:
Remy Moll 2024-07-31 12:54:25 +02:00
parent db82495f11
commit 07dde5ab58
11 changed files with 118 additions and 81 deletions

View File

@ -13,5 +13,6 @@ EXPOSE 8000
# Set environment variables used by the deployment. These can be overridden by the user using this image.
ENV NUM_WORKERS=1
ENV OSM_CACHE_DIR=/cache
ENV MEMCACHED_HOST=none
CMD fastapi run src/main.py --port 8000 --workers $NUM_WORKERS

View File

@ -14,3 +14,4 @@ shapely = "*"
scipy = "*"
osmpythontools = "*"
pywikibot = "*"
pymemcache = "*"

25
backend/Pipfile.lock generated
View File

@ -1,7 +1,7 @@
{
"_meta": {
"hash": {
"sha256": "f0de801038593d42d8b780d14c2c72bb4f5f5e66df02f72244917ede5d5ebce6"
"sha256": "4f8b3f0395b4e5352330616870da13acf41e16d1b69ba31b15fd688e90b8b628"
},
"pipfile-spec": 6,
"requires": {},
@ -1102,6 +1102,15 @@
"markers": "python_version >= '3.8'",
"version": "==2.18.0"
},
"pymemcache": {
"hashes": [
"sha256:27bf9bd1bbc1e20f83633208620d56de50f14185055e49504f4f5e94e94aff94",
"sha256:f507bc20e0dc8d562f8df9d872107a278df049fa496805c1431b926f3ddd0eab"
],
"index": "pypi",
"markers": "python_version >= '3.7'",
"version": "==4.0.0"
},
"pyparsing": {
"hashes": [
"sha256:a1bac0ce561155ecc3ed78ca94d3c9378656ad4c94c1270de543f621420f94ad",
@ -1142,12 +1151,12 @@
},
"pywikibot": {
"hashes": [
"sha256:3f4fbc57f1765aa0fa1ccf84125bcfa475cae95b9cc0291867b751f3d4ac8fa2",
"sha256:a26d918cf88ef56fdb1421b65b09def200cc28031cdc922d72a4198fbfddd225"
"sha256:0dd8291f1a26abb9fce2c2108a90dc338274988e60d21723aec1d3b0de321b5e",
"sha256:7953fc4a6c498057e6eb7d9b762bbccb61348af0a599b89d7e246d5175b20a9b"
],
"index": "pypi",
"markers": "python_full_version >= '3.7.0'",
"version": "==9.2.1"
"version": "==9.3.0"
},
"pyyaml": {
"hashes": [
@ -1349,7 +1358,7 @@
"sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d",
"sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"
],
"markers": "python_version >= '3.8'",
"markers": "python_version < '3.13'",
"version": "==4.12.2"
},
"tzdata": {
@ -1658,11 +1667,11 @@
},
"xarray": {
"hashes": [
"sha256:0b91e0bc4dc0296947947640fe31ec6e867ce258d2f7cbc10bedf4a6d68340c7",
"sha256:721a7394e8ec3d592b2d8ebe21eed074ac077dc1bb1bd777ce00e41700b4866c"
"sha256:1b0fd51ec408474aa1f4a355d75c00cc1c02bd425d97b2c2e551fd21810e7f64",
"sha256:4cae512d121a8522d41e66d942fb06c526bc1fd32c2c181d5fe62fe65b671638"
],
"markers": "python_version >= '3.9'",
"version": "==2024.6.0"
"version": "==2024.7.0"
}
},
"develop": {}

View File

@ -25,3 +25,7 @@ logging.config.dictConfig(config)
# if we are in a debug session, set the log level to debug
if os.getenv('DEBUG', False):
logging.getLogger().setLevel(logging.DEBUG)
MEMCACHE_HOST = os.getenv('MEMCACHE_HOST', None)
if MEMCACHE_HOST == "none":
MEMCACHE_HOST = None

View File

@ -1,12 +1,14 @@
import logging
from fastapi import FastAPI, Query, Body
from fastapi import FastAPI, Query, Body, HTTPException
from structs.landmark import Landmark
from structs.preferences import Preferences
from structs.linked_landmarks import LinkedLandmarks
from structs.trip import Trip
from utils.landmarks_manager import LandmarkManager
from utils.optimizer import Optimizer
from utils.refiner import Refiner
from persistence import client as cache_client
logger = logging.getLogger(__name__)
@ -17,8 +19,8 @@ optimizer = Optimizer()
refiner = Refiner(optimizer=optimizer)
@app.post("/route/new")
def get_route(preferences: Preferences, start: tuple[float, float], end: tuple[float, float] | None = None) -> str:
@app.post("/trip/new")
def new_trip(preferences: Preferences, start: tuple[float, float], end: tuple[float, float] | None = None) -> Trip:
'''
Main function to call the optimizer.
:param preferences: the preferences specified by the user as the post body
@ -47,22 +49,32 @@ def get_route(preferences: Preferences, start: tuple[float, float], end: tuple[f
landmarks_short.insert(0, start_landmark)
landmarks_short.append(end_landmark)
# TODO infer these parameters from the preferences
max_walking_time = 4 # hours
detour = 30 # minutes
# First stage optimization
base_tour = optimizer.solve_optimization(max_walking_time*60, landmarks_short)
base_tour = optimizer.solve_optimization(preferences.max_time_minute, landmarks_short)
# Second stage optimization
refined_tour = refiner.refine_optimization(landmarks, base_tour, max_walking_time*60, detour)
refined_tour = refiner.refine_optimization(landmarks, base_tour, preferences.max_time_minute, preferences.detour_tolerance_minute)
linked_tour = LinkedLandmarks(refined_tour)
return linked_tour[0].uuid
# upon creation of the trip, persistence of both the trip and its landmarks is ensured. Ca
trip = Trip.from_linked_landmarks(linked_tour, cache_client)
return trip
#### For already existing trips/landmarks
@app.get("/trip/{trip_uuid}")
def get_trip(trip_uuid: str) -> Trip:
try:
trip = cache_client.get(f"trip_{trip_uuid}")
return trip
except KeyError:
raise HTTPException(status_code=404, detail="Trip not found")
@app.get("/landmark/{landmark_uuid}")
def get_landmark(landmark_uuid: str) -> Landmark:
#cherche dans linked_tour et retourne le landmark correspondant
pass
try:
landmark = cache_client.get(f"landmark_{landmark_uuid}")
return landmark
except KeyError:
raise HTTPException(status_code=404, detail="Landmark not found")

View File

@ -0,0 +1,18 @@
from pymemcache.client.base import Client
import constants
class DummyClient:
_data = {}
def set(self, key, value, **kwargs):
self._data[key] = value
def get(self, key, **kwargs):
return self._data[key]
if constants.MEMCACHE_HOST is None:
client = DummyClient()
else:
client = Client(constants.MEMCACHE_HOST, timeout=1)

View File

@ -1,4 +1,3 @@
import uuid
from .landmark import Landmark
from utils.get_time_separation import get_time
@ -9,8 +8,7 @@ class LinkedLandmarks:
"""
_landmarks = list[Landmark]
total_time = int
uuid = str
total_time: int = 0
def __init__(self, data: list[Landmark] = None) -> None:
"""
@ -19,7 +17,6 @@ class LinkedLandmarks:
Args:
data (list[Landmark], optional): The list of landmarks that are linked together. Defaults to None.
"""
self.uuid = uuid.uuid4()
self._landmarks = data if data else []
self._link_landmarks()
@ -28,7 +25,6 @@ class LinkedLandmarks:
"""
Create the links between the landmarks in the list by setting their .next_uuid and the .time_to_next attributes.
"""
self.total_time = 0
for i, landmark in enumerate(self._landmarks[:-1]):
landmark.next_uuid = self._landmarks[i + 1].uuid
time_to_next = get_time(landmark.location, self._landmarks[i + 1].location)
@ -44,18 +40,4 @@ class LinkedLandmarks:
def __str__(self) -> str:
return f"LinkedLandmarks, total time: {self.total_time} minutes, {len(self._landmarks)} stops: [{','.join([str(landmark) for landmark in self._landmarks])}]"
def asdict(self) -> dict:
"""
Convert the linked landmarks to a json serializable dictionary.
Returns:
dict: A dictionary representation of the linked landmarks.
"""
return {
'uuid': self.uuid,
'total_time': self.total_time,
'landmarks': [landmark.dict() for landmark in self._landmarks]
}
return f"LinkedLandmarks [{' ->'.join([str(landmark) for landmark in self._landmarks])}]"

View File

@ -2,7 +2,6 @@ from pydantic import BaseModel
from typing import Optional, Literal
class Preference(BaseModel) :
name: str
type: Literal['sightseeing', 'nature', 'shopping', 'start', 'finish']
score: int # score could be from 1 to 5

View File

@ -0,0 +1,28 @@
from pydantic import BaseModel, Field
from .landmark import Landmark
from .linked_landmarks import LinkedLandmarks
import uuid
class Trip(BaseModel):
uuid: str = Field(default_factory=uuid.uuid4)
total_time: int
first_landmark_uuid: str
@classmethod
def from_linked_landmarks(self, landmarks: LinkedLandmarks, cache_client) -> "Trip":
"""
Initialize a new Trip object and ensure it is stored in the cache.
"""
trip = Trip(
total_time = landmarks.total_time,
first_landmark_uuid = str(landmarks[0].uuid)
)
# Store the trip in the cache
cache_client.set(f"trip_{trip.uuid}", trip)
for landmark in landmarks:
cache_client.set(f"landmark_{landmark.uuid}", landmark)
return trip

View File

@ -20,18 +20,9 @@ def test(start_coords: tuple[float, float], finish_coords: tuple[float, float] =
preferences = Preferences(
sightseeing=Preference(
name='sightseeing',
type='sightseeing',
score = 5),
nature=Preference(
name='nature',
type='nature',
score = 5),
shopping=Preference(
name='shopping',
type='shopping',
score = 5),
sightseeing=Preference(type='sightseeing', score = 5),
nature=Preference(type='nature', score = 5),
shopping=Preference(type='shopping', score = 5),
max_time_minute=180,
detour_tolerance_minute=30

View File

@ -15,10 +15,6 @@ from .take_most_important import take_most_important
import constants
SIGHTSEEING = 'sightseeing'
NATURE = 'nature'
SHOPPING = 'shopping'
class LandmarkManager:
@ -74,25 +70,25 @@ class LandmarkManager:
# list for sightseeing
if preferences.sightseeing.score != 0:
score_function = lambda loc, n_tags: int((self.count_elements_close_to(loc) + ((n_tags**1.2)*self.tag_coeff) )*self.church_coeff)
L1 = self.fetch_landmarks(bbox, self.amenity_selectors['sightseeing'], SIGHTSEEING, score_function)
self.correct_score(L1, preferences.sightseeing)
L1 = self.fetch_landmarks(bbox, self.amenity_selectors['sightseeing'], preferences.sightseeing.type, score_function)
L += L1
# list for nature
if preferences.nature.score != 0:
score_function = lambda loc, n_tags: int((self.count_elements_close_to(loc) + ((n_tags**1.2)*self.tag_coeff) )*self.park_coeff)
L2 = self.fetch_landmarks(bbox, self.amenity_selectors['nature'], NATURE, score_function)
self.correct_score(L2, preferences.nature)
L2 = self.fetch_landmarks(bbox, self.amenity_selectors['nature'], preferences.nature.type, score_function)
L += L2
# list for shopping
if preferences.shopping.score != 0:
score_function = lambda loc, n_tags: int(self.count_elements_close_to(loc) + ((n_tags**1.2)*self.tag_coeff))
L3 = self.fetch_landmarks(bbox, self.amenity_selectors['shopping'], SHOPPING, score_function)
self.correct_score(L3, preferences.shopping)
L3 = self.fetch_landmarks(bbox, self.amenity_selectors['shopping'], preferences.shopping.type, score_function)
L += L3
L = self.remove_duplicates(L)
self.correct_score(L, preferences)
L_constrained = take_most_important(L, self.N_important)
self.logger.info(f'Generated {len(L)} landmarks around {center_coordinates}, and constrained to {len(L_constrained)} most important ones.')
@ -123,7 +119,7 @@ class LandmarkManager:
return L_clean
def correct_score(self, landmarks: list[Landmark], preference: Preference):
def correct_score(self, landmarks: list[Landmark], preferences: Preferences) -> None:
"""
Adjust the attractiveness score of each landmark in the list based on user preferences.
@ -132,20 +128,16 @@ class LandmarkManager:
Args:
landmarks (list[Landmark]): A list of landmarks whose scores need to be corrected.
preference (Preference): The user's preference settings that influence the attractiveness score adjustment.
Raises:
TypeError: If the type of any landmark in the list does not match the expected type in the preference.
preferences (Preferences): The user's preference settings that influence the attractiveness score adjustment.
"""
if len(landmarks) == 0:
return
if landmarks[0].type != preference.type:
raise TypeError(f"LandmarkType {preference.type} does not match the type of Landmark {landmarks[0].name}")
for elem in landmarks:
elem.attractiveness = int(elem.attractiveness*preference.score/5) # arbitrary computation
score_dict = {
preferences.sightseeing.type: preferences.sightseeing.score,
preferences.nature.type: preferences.nature.score,
preferences.shopping.type: preferences.shopping.score
}
for landmark in landmarks:
landmark.attractiveness = int(landmark.attractiveness * score_dict[landmark.type] / 5)
def count_elements_close_to(self, coordinates: tuple[float, float]) -> int:
@ -310,7 +302,7 @@ class LandmarkManager:
if "leisure" in tag and elem.tag('leisure') == "park":
elem_type = "nature"
if landmarktype != SHOPPING:
if landmarktype != "shopping":
if "shop" in tag:
skip = True
break