Compare commits
8 Commits
f6d0cd5360
...
backend/fe
Author | SHA1 | Date | |
---|---|---|---|
dd277287af | |||
f258df8e72 | |||
fd091a9ccc | |||
f81c28f2ac | |||
361b2b1f42 | |||
16918369d7 | |||
2c49480966 | |||
3a9ef4e7d3 |
10
.vscode/launch.json
vendored
10
.vscode/launch.json
vendored
@@ -21,10 +21,16 @@
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Backend - tester",
|
||||
"name": "Backend - test",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "src/tester.py",
|
||||
"module": "pytest",
|
||||
"args": [
|
||||
"src/tests",
|
||||
"--log-cli-level=DEBUG",
|
||||
"--html=report.html",
|
||||
"--self-contained-html"
|
||||
],
|
||||
"env": {
|
||||
"DEBUG": "true"
|
||||
},
|
||||
|
@@ -25,3 +25,5 @@ loki-logger-handler = "*"
|
||||
pulp = "*"
|
||||
scipy = "*"
|
||||
requests = "*"
|
||||
supabase = "*"
|
||||
paypalrestsdk = "*"
|
||||
|
1077
backend/Pipfile.lock
generated
1077
backend/Pipfile.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,363 +0,0 @@
|
||||
[
|
||||
{
|
||||
"name": "Chinatown",
|
||||
"type": "shopping",
|
||||
"location": [
|
||||
45.7554934,
|
||||
4.8444852
|
||||
],
|
||||
"osm_type": "way",
|
||||
"osm_id": 996515596,
|
||||
"attractiveness": 129,
|
||||
"n_tags": 0,
|
||||
"image_url": null,
|
||||
"website_url": null,
|
||||
"wiki_url": null,
|
||||
"keywords": {},
|
||||
"description": null,
|
||||
"duration": 30,
|
||||
"name_en": null,
|
||||
"uuid": "285d159c-68ee-4b37-8d71-f27ee3d38b02",
|
||||
"must_do": false,
|
||||
"must_avoid": false,
|
||||
"is_secondary": false,
|
||||
"time_to_reach_next": 0,
|
||||
"next_uuid": null,
|
||||
"is_viewpoint": false,
|
||||
"is_place_of_worship": false
|
||||
},
|
||||
{
|
||||
"name": "Galeries Lafayette",
|
||||
"type": "shopping",
|
||||
"location": [
|
||||
45.7627107,
|
||||
4.8556833
|
||||
],
|
||||
"osm_type": "way",
|
||||
"osm_id": 1069872743,
|
||||
"attractiveness": 197,
|
||||
"n_tags": 11,
|
||||
"image_url": null,
|
||||
"website_url": "http://www.galerieslafayette.com/",
|
||||
"wiki_url": null,
|
||||
"keywords": null,
|
||||
"description": null,
|
||||
"duration": 30,
|
||||
"name_en": null,
|
||||
"uuid": "28f1bc30-10d3-4944-8861-0ed9abca012d",
|
||||
"must_do": false,
|
||||
"must_avoid": false,
|
||||
"is_secondary": false,
|
||||
"time_to_reach_next": 0,
|
||||
"next_uuid": null,
|
||||
"is_viewpoint": false,
|
||||
"is_place_of_worship": false
|
||||
},
|
||||
{
|
||||
"name": "Muji",
|
||||
"type": "shopping",
|
||||
"location": [
|
||||
45.7615971,
|
||||
4.8543781
|
||||
],
|
||||
"osm_type": "way",
|
||||
"osm_id": 1044165817,
|
||||
"attractiveness": 259,
|
||||
"n_tags": 14,
|
||||
"image_url": null,
|
||||
"website_url": "https://www.muji.com/fr/",
|
||||
"wiki_url": null,
|
||||
"keywords": null,
|
||||
"description": null,
|
||||
"duration": 30,
|
||||
"name_en": "Muji",
|
||||
"uuid": "957f86a5-6c00-41a2-815d-d6f739052be4",
|
||||
"must_do": false,
|
||||
"must_avoid": false,
|
||||
"is_secondary": false,
|
||||
"time_to_reach_next": 0,
|
||||
"next_uuid": null,
|
||||
"is_viewpoint": false,
|
||||
"is_place_of_worship": false
|
||||
},
|
||||
{
|
||||
"name": "HEMA",
|
||||
"type": "shopping",
|
||||
"location": [
|
||||
45.7619133,
|
||||
4.8565239
|
||||
],
|
||||
"osm_type": "way",
|
||||
"osm_id": 1069872750,
|
||||
"attractiveness": 156,
|
||||
"n_tags": 9,
|
||||
"image_url": null,
|
||||
"website_url": "https://fr.westfield.com/lapartdieu/store/HEMA/www.hema.fr",
|
||||
"wiki_url": null,
|
||||
"keywords": null,
|
||||
"description": null,
|
||||
"duration": 30,
|
||||
"name_en": null,
|
||||
"uuid": "8dae9d3e-e4c4-4e80-941d-0b106e22c85b",
|
||||
"must_do": false,
|
||||
"must_avoid": false,
|
||||
"is_secondary": false,
|
||||
"time_to_reach_next": 0,
|
||||
"next_uuid": null,
|
||||
"is_viewpoint": false,
|
||||
"is_place_of_worship": false
|
||||
},
|
||||
{
|
||||
"name": "Cordeliers",
|
||||
"type": "shopping",
|
||||
"location": [
|
||||
45.7622752,
|
||||
4.8337998
|
||||
],
|
||||
"osm_type": "node",
|
||||
"osm_id": 5545183519,
|
||||
"attractiveness": 813,
|
||||
"n_tags": 0,
|
||||
"image_url": null,
|
||||
"website_url": null,
|
||||
"wiki_url": null,
|
||||
"keywords": {},
|
||||
"description": null,
|
||||
"duration": 30,
|
||||
"name_en": null,
|
||||
"uuid": "ba02adb5-e28f-4645-8c2d-25ead6232379",
|
||||
"must_do": false,
|
||||
"must_avoid": false,
|
||||
"is_secondary": false,
|
||||
"time_to_reach_next": 0,
|
||||
"next_uuid": null,
|
||||
"is_viewpoint": false,
|
||||
"is_place_of_worship": false
|
||||
},
|
||||
{
|
||||
"name": "Halles de Lyon Paul Bocuse",
|
||||
"type": "shopping",
|
||||
"location": [
|
||||
45.7628282,
|
||||
4.8505601
|
||||
],
|
||||
"osm_type": "relation",
|
||||
"osm_id": 971529,
|
||||
"attractiveness": 272,
|
||||
"n_tags": 12,
|
||||
"image_url": null,
|
||||
"website_url": "https://www.halles-de-lyon-paulbocuse.com/",
|
||||
"wiki_url": "fr:Halles de Lyon-Paul Bocuse",
|
||||
"keywords": {
|
||||
"importance": "national",
|
||||
"height": null,
|
||||
"place_type": "marketplace",
|
||||
"date": null
|
||||
},
|
||||
"description": "Halles de Lyon Paul Bocuse is a marketplace of national importance.",
|
||||
"duration": 30,
|
||||
"name_en": null,
|
||||
"uuid": "bbd50de3-aa91-425d-90c2-d4abfd1b4abe",
|
||||
"must_do": false,
|
||||
"must_avoid": false,
|
||||
"is_secondary": false,
|
||||
"time_to_reach_next": 0,
|
||||
"next_uuid": null,
|
||||
"is_viewpoint": false,
|
||||
"is_place_of_worship": false
|
||||
},
|
||||
{
|
||||
"name": "Grand Bazar",
|
||||
"type": "shopping",
|
||||
"location": [
|
||||
45.7632141,
|
||||
4.8361975
|
||||
],
|
||||
"osm_type": "way",
|
||||
"osm_id": 82399951,
|
||||
"attractiveness": 93,
|
||||
"n_tags": 7,
|
||||
"image_url": null,
|
||||
"website_url": null,
|
||||
"wiki_url": null,
|
||||
"keywords": null,
|
||||
"description": null,
|
||||
"duration": 30,
|
||||
"name_en": null,
|
||||
"uuid": "3de9131c-87c5-4efb-9fa8-064896fb8b29",
|
||||
"must_do": false,
|
||||
"must_avoid": false,
|
||||
"is_secondary": false,
|
||||
"time_to_reach_next": 0,
|
||||
"next_uuid": null,
|
||||
"is_viewpoint": false,
|
||||
"is_place_of_worship": false
|
||||
},
|
||||
{
|
||||
"name": "Shopping Area",
|
||||
"type": "shopping",
|
||||
"location": [
|
||||
45.7673452,
|
||||
4.8438683
|
||||
],
|
||||
"osm_type": "node",
|
||||
"osm_id": 0,
|
||||
"attractiveness": 156,
|
||||
"n_tags": 0,
|
||||
"image_url": null,
|
||||
"website_url": null,
|
||||
"wiki_url": null,
|
||||
"keywords": {},
|
||||
"description": null,
|
||||
"duration": 30,
|
||||
"name_en": null,
|
||||
"uuid": "df2482a8-7e2e-4536-aad3-564899b2fa65",
|
||||
"must_do": false,
|
||||
"must_avoid": false,
|
||||
"is_secondary": false,
|
||||
"time_to_reach_next": 0,
|
||||
"next_uuid": null,
|
||||
"is_viewpoint": false,
|
||||
"is_place_of_worship": false
|
||||
},
|
||||
{
|
||||
"name": "Cour Oxyg\u00e8ne",
|
||||
"type": "shopping",
|
||||
"location": [
|
||||
45.7620905,
|
||||
4.8568873
|
||||
],
|
||||
"osm_type": "way",
|
||||
"osm_id": 132673030,
|
||||
"attractiveness": 63,
|
||||
"n_tags": 5,
|
||||
"image_url": null,
|
||||
"website_url": null,
|
||||
"wiki_url": null,
|
||||
"keywords": null,
|
||||
"description": null,
|
||||
"duration": 30,
|
||||
"name_en": null,
|
||||
"uuid": "ed134f76-9a02-4bee-9c10-78454f7bc4ce",
|
||||
"must_do": false,
|
||||
"must_avoid": false,
|
||||
"is_secondary": false,
|
||||
"time_to_reach_next": 0,
|
||||
"next_uuid": null,
|
||||
"is_viewpoint": false,
|
||||
"is_place_of_worship": false
|
||||
},
|
||||
{
|
||||
"name": "P\u00f4le de Commerces et de Loisirs Confluence",
|
||||
"type": "shopping",
|
||||
"location": [
|
||||
45.7410414,
|
||||
4.8171031
|
||||
],
|
||||
"osm_type": "way",
|
||||
"osm_id": 440270633,
|
||||
"attractiveness": 259,
|
||||
"n_tags": 14,
|
||||
"image_url": null,
|
||||
"website_url": "https://www.confluence.fr/",
|
||||
"wiki_url": null,
|
||||
"keywords": null,
|
||||
"description": null,
|
||||
"duration": 30,
|
||||
"name_en": null,
|
||||
"uuid": "dd7e2f5f-0e60-4560-b903-e5ded4b6e36a",
|
||||
"must_do": false,
|
||||
"must_avoid": false,
|
||||
"is_secondary": false,
|
||||
"time_to_reach_next": 0,
|
||||
"next_uuid": null,
|
||||
"is_viewpoint": false,
|
||||
"is_place_of_worship": false
|
||||
},
|
||||
{
|
||||
"name": "Grand H\u00f4tel-Dieu",
|
||||
"type": "shopping",
|
||||
"location": [
|
||||
45.7586955,
|
||||
4.8364597
|
||||
],
|
||||
"osm_type": "relation",
|
||||
"osm_id": 300128,
|
||||
"attractiveness": 546,
|
||||
"n_tags": 22,
|
||||
"image_url": null,
|
||||
"website_url": "https://grand-hotel-dieu.com",
|
||||
"wiki_url": "fr:H\u00f4tel-Dieu de Lyon",
|
||||
"keywords": {
|
||||
"importance": "international",
|
||||
"height": null,
|
||||
"place_type": "building",
|
||||
"date": "C17"
|
||||
},
|
||||
"description": "Grand H\u00f4tel-Dieu is an internationally famous building. It was constructed in C17.",
|
||||
"duration": 30,
|
||||
"name_en": null,
|
||||
"uuid": "a91265a8-ffbd-44f7-a7ab-3ff75f08fbab",
|
||||
"must_do": false,
|
||||
"must_avoid": false,
|
||||
"is_secondary": false,
|
||||
"time_to_reach_next": 0,
|
||||
"next_uuid": null,
|
||||
"is_viewpoint": false,
|
||||
"is_place_of_worship": false
|
||||
},
|
||||
{
|
||||
"name": "Westfield La Part-Dieu",
|
||||
"type": "shopping",
|
||||
"location": [
|
||||
45.761331,
|
||||
4.855676
|
||||
],
|
||||
"osm_type": "way",
|
||||
"osm_id": 62338376,
|
||||
"attractiveness": 546,
|
||||
"n_tags": 22,
|
||||
"image_url": null,
|
||||
"website_url": "https://fr.westfield.com/lapartdieu",
|
||||
"wiki_url": "fr:La Part-Dieu (centre commercial)",
|
||||
"keywords": null,
|
||||
"description": null,
|
||||
"duration": 30,
|
||||
"name_en": null,
|
||||
"uuid": "7d60316f-d689-4fcf-be68-ffc09353b826",
|
||||
"must_do": false,
|
||||
"must_avoid": false,
|
||||
"is_secondary": false,
|
||||
"time_to_reach_next": 0,
|
||||
"next_uuid": null,
|
||||
"is_viewpoint": false,
|
||||
"is_place_of_worship": false
|
||||
},
|
||||
{
|
||||
"name": "Ainay",
|
||||
"type": "shopping",
|
||||
"location": [
|
||||
45.7553105,
|
||||
4.8312084
|
||||
],
|
||||
"osm_type": "node",
|
||||
"osm_id": 5545126047,
|
||||
"attractiveness": 132,
|
||||
"n_tags": 0,
|
||||
"image_url": null,
|
||||
"website_url": null,
|
||||
"wiki_url": null,
|
||||
"keywords": {},
|
||||
"description": null,
|
||||
"duration": 30,
|
||||
"name_en": null,
|
||||
"uuid": "ad214f3d-a4b9-4078-876a-446caa7ab01c",
|
||||
"must_do": false,
|
||||
"must_avoid": false,
|
||||
"is_secondary": false,
|
||||
"time_to_reach_next": 0,
|
||||
"next_uuid": null,
|
||||
"is_viewpoint": false,
|
||||
"is_place_of_worship": false
|
||||
}
|
||||
]
|
File diff suppressed because one or more lines are too long
@@ -49,7 +49,7 @@ This file configures the logging system for the application. It defines how logs
|
||||
This file contains the main application logic and API endpoints for interacting with the system. The application is built using the FastAPI framework, which provides several endpoints for creating trips, fetching trips, and retrieving landmarks or nearby facilities. The key endpoints include:
|
||||
|
||||
- **POST /trip/new**:
|
||||
- This endpoint allows users to create a new trip by specifying preferences, start coordinates, and optionally end coordinates. The preferences guide the optimization process for selecting landmarks.
|
||||
- This endpoint allows users to create a new trip by specifying user_id, preferences, start coordinates, and optionally end coordinates. The preferences guide the optimization process for selecting landmarks. The user id is needed to verify that the user's credit balance.
|
||||
- Returns: A `Trip` object containing the optimized route, landmarks, and trip details.
|
||||
|
||||
- **GET /trip/{trip_uuid}**:
|
||||
|
@@ -12,6 +12,14 @@ LANDMARK_PARAMETERS_PATH = PARAMETERS_DIR / 'landmark_parameters.yaml'
|
||||
OPTIMIZER_PARAMETERS_PATH = PARAMETERS_DIR / 'optimizer_parameters.yaml'
|
||||
|
||||
|
||||
PAYPAL_CLIENT_ID = os.getenv("future-paypal-client-id", None)
|
||||
PAYPAL_SECRET = os.getenv("future-paypal-secret", None)
|
||||
PAYPAL_API_URL = "https://api-m.sandbox.paypal.com"
|
||||
|
||||
SUPABASE_URL = os.getenv("SUPABASE_URL", None)
|
||||
SUPABASE_KEY = os.getenv("SUPABASE_API_KEY", None)
|
||||
|
||||
|
||||
cache_dir_string = os.getenv('OSM_CACHE_DIR', './cache')
|
||||
OSM_CACHE_DIR = Path(cache_dir_string)
|
||||
|
||||
|
@@ -1,20 +1,24 @@
|
||||
"""Main app for backend api"""
|
||||
|
||||
import logging
|
||||
import time
|
||||
from contextlib import asynccontextmanager
|
||||
from fastapi import FastAPI, HTTPException, BackgroundTasks
|
||||
from fastapi import FastAPI, HTTPException, BackgroundTasks, Query, Body
|
||||
|
||||
from .logging_config import configure_logging
|
||||
from .structs.landmark import Landmark
|
||||
from .structs.landmark import Landmark, Toilets
|
||||
from .structs.preferences import Preferences
|
||||
from .structs.linked_landmarks import LinkedLandmarks
|
||||
from .structs.trip import Trip
|
||||
from .landmarks.landmarks_manager import LandmarkManager
|
||||
from .toilets.toilet_routes import router as toilets_router
|
||||
from .utils.landmarks_manager import LandmarkManager
|
||||
from .utils.toilets_manager import ToiletsManager
|
||||
from .optimization.optimizer import Optimizer
|
||||
from .optimization.refiner import Refiner
|
||||
from .overpass.overpass import fill_cache
|
||||
from .cache import client as cache_client
|
||||
from .payments.supabase import Supabase
|
||||
from .payments.payment_routes import router as payment_router
|
||||
from .payments.supabase_routes import router as supabase_router
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -22,6 +26,7 @@ logger = logging.getLogger(__name__)
|
||||
manager = LandmarkManager()
|
||||
optimizer = Optimizer()
|
||||
refiner = Refiner(optimizer=optimizer)
|
||||
supabase = Supabase()
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
@@ -36,14 +41,16 @@ async def lifespan(app: FastAPI):
|
||||
app = FastAPI(lifespan=lifespan)
|
||||
|
||||
|
||||
|
||||
app.include_router(toilets_router)
|
||||
# Include the payment routes and supabase routes
|
||||
app.include_router(payment_router)
|
||||
app.include_router(supabase_router)
|
||||
|
||||
|
||||
@app.post("/trip/new")
|
||||
def new_trip(preferences: Preferences,
|
||||
start: tuple[float, float],
|
||||
end: tuple[float, float] | None = None,
|
||||
def new_trip(user_id: str = Body(...),
|
||||
preferences: Preferences = Body(...),
|
||||
start: tuple[float, float] = Body(...),
|
||||
end: tuple[float, float] | None = Body(None),
|
||||
background_tasks: BackgroundTasks = None) -> Trip:
|
||||
"""
|
||||
Main function to call the optimizer.
|
||||
@@ -55,6 +62,19 @@ def new_trip(preferences: Preferences,
|
||||
Returns:
|
||||
(uuid) : The uuid of the first landmark in the optimized route
|
||||
"""
|
||||
# Check for valid user balance.
|
||||
try:
|
||||
if not supabase.check_balance(user_id=user_id):
|
||||
logger.warning('Insufficient credits to perform this action.')
|
||||
return {"error": "Insufficient credits"}, 400 # Return a 400 Bad Request with an appropriate message
|
||||
except SyntaxError as se :
|
||||
raise HTTPException(status_code=400, detail=str(se)) from se
|
||||
except ValueError as ve :
|
||||
raise HTTPException(status_code=406, detail=str(ve)) from ve
|
||||
except Exception as exc:
|
||||
raise HTTPException(status_code=500, detail=f"Internal Server Error: {str(exc)}") from exc
|
||||
|
||||
# Check for invalid input.
|
||||
if preferences is None:
|
||||
raise HTTPException(status_code=406, detail="Preferences not provided or incomplete.")
|
||||
if (preferences.shopping.score == 0 and
|
||||
@@ -69,8 +89,6 @@ def new_trip(preferences: Preferences,
|
||||
end = start
|
||||
logger.info("No end coordinates provided. Using start=end.")
|
||||
|
||||
logger.info(f"Requested new trip generation. Details:\n\tCoordinates: {start}\n\tTime: {preferences.max_time_minute}\n\tSightseeing: {preferences.sightseeing.score}\n\tNature: {preferences.nature.score}\n\tShopping: {preferences.shopping.score}")
|
||||
|
||||
start_landmark = Landmark(name='start',
|
||||
type='start',
|
||||
location=(start[0], start[1]),
|
||||
@@ -92,7 +110,6 @@ def new_trip(preferences: Preferences,
|
||||
n_tags=0)
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
# Generate the landmarks from the start location
|
||||
landmarks, landmarks_short = manager.generate_landmarks_list(
|
||||
center_coordinates = start,
|
||||
@@ -114,7 +131,6 @@ def new_trip(preferences: Preferences,
|
||||
try:
|
||||
base_tour = optimizer.solve_optimization(preferences.max_time_minute, landmarks_short)
|
||||
except Exception as exc:
|
||||
logger.error(f"Trip generation failed: {str(exc)}")
|
||||
raise HTTPException(status_code=500, detail=f"Optimization failed: {str(exc)}") from exc
|
||||
|
||||
t_first_stage = time.time() - start_time
|
||||
@@ -126,9 +142,11 @@ def new_trip(preferences: Preferences,
|
||||
refined_tour = refiner.refine_optimization(landmarks, base_tour,
|
||||
preferences.max_time_minute,
|
||||
preferences.detour_tolerance_minute)
|
||||
except Exception as exc :
|
||||
logger.warning(f"Refiner failed. Proceeding with base trip {str(exc)}")
|
||||
except TimeoutError as te :
|
||||
logger.error(f'Refiner failed : {str(te)} Using base tour.')
|
||||
refined_tour = base_tour
|
||||
except Exception as exc :
|
||||
raise HTTPException(status_code=500, detail=f"An unexpected error occurred: {str(exc)}") from exc
|
||||
|
||||
t_second_stage = time.time() - start_time
|
||||
|
||||
@@ -143,6 +161,7 @@ def new_trip(preferences: Preferences,
|
||||
logger.debug('Detailed trip :\n\t' + '\n\t'.join(f'{landmark}' for landmark in refined_tour))
|
||||
|
||||
background_tasks.add_task(fill_cache)
|
||||
supabase.decrement_credit_balance(user_id=user_id)
|
||||
|
||||
return trip
|
||||
|
||||
@@ -163,7 +182,6 @@ def get_trip(trip_uuid: str) -> Trip:
|
||||
trip = cache_client.get(f"trip_{trip_uuid}")
|
||||
return trip
|
||||
except KeyError as exc:
|
||||
logger.error(f"Failed to fetch trip with UUID {trip_uuid}: {str(exc)}")
|
||||
raise HTTPException(status_code=404, detail="Trip not found") from exc
|
||||
|
||||
|
||||
@@ -182,7 +200,6 @@ def get_landmark(landmark_uuid: str) -> Landmark:
|
||||
landmark = cache_client.get(f"landmark_{landmark_uuid}")
|
||||
return landmark
|
||||
except KeyError as exc:
|
||||
logger.error(f"Failed to fetch landmark with UUID {landmark_uuid}: {str(exc)}")
|
||||
raise HTTPException(status_code=404, detail="Landmark not found") from exc
|
||||
|
||||
|
||||
@@ -201,7 +218,6 @@ def update_trip_time(trip_uuid: str, removed_landmark_uuid: str) -> Trip:
|
||||
try:
|
||||
trip = cache_client.get(f'trip_{trip_uuid}')
|
||||
except KeyError as exc:
|
||||
logger.error(f"Failed to update trip with UUID {trip_uuid} (trip not found): {str(exc)}")
|
||||
raise HTTPException(status_code=404, detail='Trip not found') from exc
|
||||
|
||||
landmarks = []
|
||||
@@ -216,7 +232,6 @@ def update_trip_time(trip_uuid: str, removed_landmark_uuid: str) -> Trip:
|
||||
landmarks.append(landmark)
|
||||
next_uuid = landmark.next_uuid # Prepare for the next iteration
|
||||
except KeyError as exc:
|
||||
logger.error(f"Failed to update trip with UUID {trip_uuid} : {str(exc)}")
|
||||
raise HTTPException(status_code=404, detail=f'landmark {next_uuid} not found') from exc
|
||||
|
||||
# Re-link every thing and compute times again
|
||||
@@ -224,3 +239,35 @@ def update_trip_time(trip_uuid: str, removed_landmark_uuid: str) -> Trip:
|
||||
trip = Trip.from_linked_landmarks(linked_tour, cache_client)
|
||||
|
||||
return trip
|
||||
|
||||
|
||||
|
||||
@app.post("/toilets/new")
|
||||
def get_toilets(location: tuple[float, float] = Query(...), radius: int = 500) -> list[Toilets] :
|
||||
"""
|
||||
Endpoint to find toilets within a specified radius from a given location.
|
||||
|
||||
This endpoint expects the `location` and `radius` as **query parameters**, not in the request body.
|
||||
|
||||
Args:
|
||||
location (tuple[float, float]): The latitude and longitude of the location to search from.
|
||||
radius (int, optional): The radius (in meters) within which to search for toilets. Defaults to 500 meters.
|
||||
|
||||
Returns:
|
||||
list[Toilets]: A list of Toilets objects that meet the criteria.
|
||||
"""
|
||||
if location is None:
|
||||
raise HTTPException(status_code=406, detail="Coordinates not provided or invalid")
|
||||
if not (-90 <= location[0] <= 90 or -180 <= location[1] <= 180):
|
||||
raise HTTPException(status_code=422, detail="Start coordinates not in range")
|
||||
|
||||
toilets_manager = ToiletsManager(location, radius)
|
||||
|
||||
try :
|
||||
toilets_list = toilets_manager.generate_toilet_list()
|
||||
return toilets_list
|
||||
except KeyError as exc:
|
||||
raise HTTPException(status_code=404, detail="No toilets found") from exc
|
||||
|
||||
|
||||
|
||||
|
@@ -257,6 +257,7 @@ class Optimizer:
|
||||
Returns:
|
||||
None: This function modifies the `prob` object by adding L-2 equality constraints in-place.
|
||||
"""
|
||||
# FIXME: weird 0 artifact in the coefficients popping up
|
||||
# Loop through rows 1 to L-2 to prevent stacked ones
|
||||
for i in range(1, L-1):
|
||||
# Add the constraint that sums across each "row" or "block" in the decision variables
|
||||
@@ -589,7 +590,7 @@ class Optimizer:
|
||||
try :
|
||||
prob.solve(pl.PULP_CBC_CMD(msg=False, timeLimit=self.time_limit+1, gapRel=self.gap_rel))
|
||||
except Exception as exc :
|
||||
raise Exception(f"No solution found: {str(exc)}") from exc
|
||||
raise Exception(f"No solution found: {exc}") from exc
|
||||
status = pl.LpStatus[prob.status]
|
||||
solution = [pl.value(var) for var in x] # The values of the decision variables (will be 0 or 1)
|
||||
|
||||
@@ -597,7 +598,7 @@ class Optimizer:
|
||||
|
||||
# Raise error if no solution is found. FIXME: for now this throws the internal server error
|
||||
if status != 'Optimal' :
|
||||
self.logger.warning("The problem is overconstrained, no solution on first try.")
|
||||
self.logger.error("The problem is overconstrained, no solution on first try.")
|
||||
raise ArithmeticError("No solution could be found. Please try again with more time or different preferences.")
|
||||
|
||||
# If there is a solution, we're good to go, just check for connectiveness
|
||||
@@ -607,7 +608,7 @@ class Optimizer:
|
||||
while circles is not None :
|
||||
i += 1
|
||||
if i == self.max_iter :
|
||||
self.logger.warning(f'Timeout: No solution found after {self.max_iter} iterations.')
|
||||
self.logger.error(f'Timeout: No solution found after {self.max_iter} iterations.')
|
||||
raise TimeoutError(f"Optimization took too long. No solution found after {self.max_iter} iterations.")
|
||||
|
||||
for circle in circles :
|
||||
@@ -617,13 +618,12 @@ class Optimizer:
|
||||
try :
|
||||
prob.solve(pl.PULP_CBC_CMD(msg=False, timeLimit=self.time_limit, gapRel=self.gap_rel))
|
||||
except Exception as exc :
|
||||
self.logger.warning("No solution found: {str(exc)")
|
||||
raise Exception(f"No solution found: {str(exc)}") from exc
|
||||
raise Exception(f"No solution found: {exc}") from exc
|
||||
|
||||
solution = [pl.value(var) for var in x]
|
||||
|
||||
if pl.LpStatus[prob.status] != 'Optimal' :
|
||||
self.logger.warning("The problem is overconstrained, no solution after {i} cycles.")
|
||||
self.logger.error("The problem is overconstrained, no solution after {i} cycles.")
|
||||
raise ArithmeticError("No solution could be found. Please try again with more time or different preferences.")
|
||||
|
||||
circles = self.is_connected(solution)
|
||||
|
@@ -278,7 +278,7 @@ class Refiner :
|
||||
better_tour_poly = concave_hull(MultiPoint(coords)) # Create concave hull with "core" of tour leaving out start and finish
|
||||
xs, ys = better_tour_poly.exterior.xy
|
||||
"""
|
||||
FIXED : ERROR HERE :
|
||||
ERROR HERE :
|
||||
Exception has occurred: AttributeError
|
||||
'LineString' object has no attribute 'exterior'
|
||||
"""
|
||||
@@ -356,7 +356,7 @@ class Refiner :
|
||||
|
||||
# If unsuccessful optimization, use the base_tour.
|
||||
if new_tour is None:
|
||||
self.logger.warning("Refiner failed: No solution found during second stage optimization.")
|
||||
self.logger.warning("No solution found for the refined tour. Returning the initial tour.")
|
||||
new_tour = base_tour
|
||||
|
||||
# If only one landmark, return it.
|
||||
@@ -369,7 +369,6 @@ class Refiner :
|
||||
# Fix the tour using Polygons if the path looks weird.
|
||||
# Conditions : circular trip and invalid polygon.
|
||||
if base_tour[0].location == base_tour[-1].location and not better_poly.is_valid :
|
||||
self.logger.debug("Tours might be funky, attempting to correct with polygons")
|
||||
better_tour = self.fix_using_polygon(better_tour)
|
||||
|
||||
return better_tour
|
||||
|
@@ -1,4 +1,3 @@
|
||||
"""Module defining the handling of cache data from Overpass requests."""
|
||||
import os
|
||||
import json
|
||||
import hashlib
|
||||
@@ -62,7 +61,7 @@ class JSONCache(CachingStrategyBase):
|
||||
return None
|
||||
|
||||
def set(self, key, value):
|
||||
"""Save the JSON data in the cache."""
|
||||
"""Save the JSON data as an ElementTree to the cache."""
|
||||
filename = self._filename(key)
|
||||
try:
|
||||
# Write the JSON data to the cache file
|
||||
@@ -95,7 +94,7 @@ class JSONCache(CachingStrategyBase):
|
||||
|
||||
def close(self):
|
||||
"""Cleanup method, if needed."""
|
||||
|
||||
pass
|
||||
|
||||
class CachingStrategy:
|
||||
"""
|
||||
@@ -108,7 +107,6 @@ class CachingStrategy:
|
||||
|
||||
@classmethod
|
||||
def use(cls, strategy_name='JSON', **kwargs):
|
||||
"""Define the caching strategy to use."""
|
||||
if cls.__strategy:
|
||||
cls.__strategy.close()
|
||||
|
||||
@@ -121,12 +119,10 @@ class CachingStrategy:
|
||||
|
||||
@classmethod
|
||||
def get(cls, key):
|
||||
"""Get the data from the cache."""
|
||||
return cls.__strategy.get(key)
|
||||
|
||||
@classmethod
|
||||
def set(cls, key, value):
|
||||
"""Save the data in the cache."""
|
||||
cls.__strategy.set(key, value)
|
||||
|
||||
@classmethod
|
||||
|
@@ -1,6 +1,5 @@
|
||||
"""Module allowing connexion to overpass api and fectch data from OSM."""
|
||||
import os
|
||||
import time
|
||||
import urllib
|
||||
import math
|
||||
import logging
|
||||
@@ -60,11 +59,13 @@ class Overpass :
|
||||
return Overpass._filter_landmarks(cached_responses, bbox)
|
||||
|
||||
# If there is no cached data, fetch all from Overpass.
|
||||
if not cached_responses :
|
||||
elif not cached_responses :
|
||||
query_str = Overpass.build_query(bbox, osm_types, selector, conditions, out)
|
||||
self.logger.debug(f'Query string: {query_str}')
|
||||
return self.fetch_data_from_api(query_str)
|
||||
|
||||
# Hybrid cache: some data from Overpass, some data from cache.
|
||||
else :
|
||||
# Resize the bbox for smaller search area and build new query string.
|
||||
non_cached_bbox = Overpass._get_non_cached_bbox(non_cached_cells, bbox)
|
||||
query_str = Overpass.build_query(non_cached_bbox, osm_types, selector, conditions, out)
|
||||
@@ -95,10 +96,9 @@ class Overpass :
|
||||
return elements
|
||||
|
||||
except urllib.error.URLError as e:
|
||||
self.logger.error(f"Error connecting to Overpass API: {str(e)}")
|
||||
raise ConnectionError(f"Error connecting to Overpass API: {str(e)}") from e
|
||||
self.logger.error(f"Error connecting to Overpass API: {e}")
|
||||
raise ConnectionError(f"Error connecting to Overpass API: {e}") from e
|
||||
except Exception as exc :
|
||||
self.logger.error(f"unexpected error while fetching data from Overpass: {str(exc)}")
|
||||
raise Exception(f'An unexpected error occured: {str(exc)}') from exc
|
||||
|
||||
|
||||
@@ -122,7 +122,7 @@ class Overpass :
|
||||
self.caching_strategy.set(cache_key, elements)
|
||||
self.logger.debug(f'Cache set for {cache_key}')
|
||||
except urllib.error.URLError as e:
|
||||
raise ConnectionError(f"Error connecting to Overpass API: {str(e)}") from e
|
||||
raise ConnectionError(f"Error connecting to Overpass API: {e}") from e
|
||||
except Exception as exc :
|
||||
raise Exception(f'An unexpected error occured: {str(exc)}') from exc
|
||||
|
||||
@@ -153,7 +153,7 @@ class Overpass :
|
||||
- If no conditions are provided, the query will just use the `selector` to filter the OSM
|
||||
elements without additional constraints.
|
||||
"""
|
||||
query = '[out:json][timeout:20];('
|
||||
query = '[out:json];('
|
||||
|
||||
# convert the bbox to string.
|
||||
bbox_str = f"({','.join(map(str, bbox))})"
|
||||
@@ -388,7 +388,7 @@ def get_base_info(elem: dict, osm_type: OSM_TYPES, with_name=False) :
|
||||
if with_name :
|
||||
name = elem.get('tags', {}).get('name')
|
||||
return osm_id, coords, name
|
||||
|
||||
else :
|
||||
return osm_id, coords
|
||||
|
||||
|
||||
@@ -399,25 +399,18 @@ def fill_cache():
|
||||
"""
|
||||
overpass = Overpass()
|
||||
|
||||
n_files = 0
|
||||
total = 0
|
||||
|
||||
with os.scandir(OSM_CACHE_DIR) as it:
|
||||
for entry in it:
|
||||
if entry.is_file() and entry.name.startswith('hollow_'):
|
||||
total += 1
|
||||
|
||||
try :
|
||||
# Read the whole file content as a string
|
||||
with open(entry.path, 'r', encoding='utf-8') as f:
|
||||
with open(entry.path, 'r') as f:
|
||||
# load data and fill the cache with the query and key
|
||||
json_data = json.load(f)
|
||||
overpass.fill_cache(json_data)
|
||||
n_files += 1
|
||||
time.sleep(1)
|
||||
# Now delete the file as the cache is filled
|
||||
os.remove(entry.path)
|
||||
|
||||
except Exception as exc :
|
||||
overpass.logger.error(f'An error occured while parsing file {entry.path} as .json file: {str(exc)}')
|
||||
|
||||
overpass.logger.info(f"Successfully filled {n_files}/{total} cache files.")
|
||||
overpass.logger.error(f'An error occured while parsing file {entry.path} as .json file')
|
||||
|
@@ -72,7 +72,6 @@ sightseeing:
|
||||
# - castle
|
||||
# - museum
|
||||
|
||||
|
||||
museums:
|
||||
tourism:
|
||||
- museum
|
||||
|
@@ -6,4 +6,4 @@ max_landmarks_refiner: 20
|
||||
overshoot: 0.0016
|
||||
time_limit: 1
|
||||
gap_rel: 0.025
|
||||
max_iter: 80
|
||||
max_iter: 40
|
70
backend/src/payments/payment_handler.py
Normal file
70
backend/src/payments/payment_handler.py
Normal file
@@ -0,0 +1,70 @@
|
||||
from typing import Literal
|
||||
import paypalrestsdk
|
||||
from pydantic import BaseModel
|
||||
from fastapi import HTTPException
|
||||
import logging
|
||||
|
||||
|
||||
# Model for payment request body
|
||||
class PaymentRequest(BaseModel):
|
||||
user_id: str
|
||||
credit_amount: Literal[10, 50, 100]
|
||||
currency: Literal["USD", "EUR", "CHF"]
|
||||
description: str = "Purchase of credits"
|
||||
|
||||
|
||||
# Payment handler class for managing PayPal payments
|
||||
class PaymentHandler:
|
||||
|
||||
payment_id: str
|
||||
|
||||
def __init__(self, transaction_details: PaymentRequest):
|
||||
self.details = transaction_details
|
||||
self.logger = logging.getLogger(__name__)
|
||||
|
||||
# Only support purchase of credit 'bundles': 10, 50 or 100 credits worth of trip generation
|
||||
def fetch_price(self) -> float:
|
||||
"""
|
||||
Fetches the price of credits in the specified currency.
|
||||
"""
|
||||
result = self.supabase.table("prices").select("credit_amount").eq("currency", self.details.currency).single().execute()
|
||||
if result.data:
|
||||
return result.data.get("price")
|
||||
else:
|
||||
self.logger.error(f"Unsupported currency: {self.details.currency}")
|
||||
return None
|
||||
|
||||
def create_paypal_payment(self) -> str:
|
||||
"""
|
||||
Creates a PayPal payment and returns the approval URL.
|
||||
"""
|
||||
price = self.fetch_price()
|
||||
payment = paypalrestsdk.Payment({
|
||||
"intent": "sale",
|
||||
"payer": {
|
||||
"payment_method": "paypal"
|
||||
},
|
||||
"transactions": [{
|
||||
"amount": {
|
||||
"total": f"{price:.2f}",
|
||||
"currency": self.details.currency
|
||||
},
|
||||
"description": self.details.description
|
||||
}],
|
||||
"redirect_urls": {
|
||||
"return_url": "http://localhost:8000/payment/success",
|
||||
"cancel_url": "http://localhost:8000/payment/cancel"
|
||||
}
|
||||
})
|
||||
|
||||
if payment.create():
|
||||
self.logger.info("Payment created successfully")
|
||||
self.payment_id = payment.id
|
||||
|
||||
# Get the approval URL and return it for the user to approve
|
||||
for link in payment.links:
|
||||
if link.rel == "approval_url":
|
||||
return link.href
|
||||
else:
|
||||
self.logger.error(f"Failed to create payment: {payment.error}")
|
||||
raise HTTPException(status_code=500, detail="Payment creation failed")
|
79
backend/src/payments/payment_routes.py
Normal file
79
backend/src/payments/payment_routes.py
Normal file
@@ -0,0 +1,79 @@
|
||||
import logging
|
||||
import paypalrestsdk
|
||||
from fastapi import HTTPException, APIRouter
|
||||
|
||||
from .payment_handler import PaymentRequest, PaymentHandler
|
||||
from .supabase import Supabase
|
||||
|
||||
# Set up logging and supabase
|
||||
logger = logging.getLogger(__name__)
|
||||
supabase = Supabase()
|
||||
|
||||
# Configure PayPal SDK
|
||||
paypalrestsdk.configure({
|
||||
"mode": "sandbox", # Use 'live' for production
|
||||
"client_id": "YOUR_PAYPAL_CLIENT_ID",
|
||||
"client_secret": "YOUR_PAYPAL_SECRET"
|
||||
})
|
||||
|
||||
|
||||
# Define the API router
|
||||
router = APIRouter()
|
||||
|
||||
@router.post("/purchase/credits")
|
||||
def purchase_credits(payment_request: PaymentRequest):
|
||||
"""
|
||||
Handles token purchases. Calculates the number of tokens based on the amount paid,
|
||||
updates the user's balance, and processes PayPal payment.
|
||||
"""
|
||||
payment_handler = PaymentHandler(payment_request)
|
||||
|
||||
# Create PayPal payment and get the approval URL
|
||||
approval_url = payment_handler.create_paypal_payment()
|
||||
|
||||
return {
|
||||
"message": "Purchase initiated successfully",
|
||||
"payment_id": payment_handler.payment_id,
|
||||
"credits": payment_request.credit_amount,
|
||||
"approval_url": approval_url,
|
||||
}
|
||||
|
||||
|
||||
@router.get("/payment/success")
|
||||
def payment_success(paymentId: str, PayerID: str):
|
||||
"""
|
||||
Handles successful PayPal payment.
|
||||
"""
|
||||
payment = paypalrestsdk.Payment.find(paymentId)
|
||||
|
||||
if payment.execute({"payer_id": PayerID}):
|
||||
logger.info("Payment executed successfully")
|
||||
|
||||
# Retrieve transaction details from the database
|
||||
result = supabase.table("pending_payments").select("*").eq("payment_id", paymentId).single().execute()
|
||||
if not result.data:
|
||||
raise HTTPException(status_code=404, detail="Transaction not found")
|
||||
|
||||
# Extract the necessary information
|
||||
user_id = result.data["user_id"]
|
||||
credit_amount = result.data["credit_amount"]
|
||||
|
||||
# Update the user's balance
|
||||
supabase.increment_credit_balance(user_id, amount=credit_amount)
|
||||
|
||||
# Optionally, delete the pending payment entry since the transaction is completed
|
||||
supabase.table("pending_payments").delete().eq("payment_id", paymentId).execute()
|
||||
|
||||
return {"message": "Payment completed successfully"}
|
||||
else:
|
||||
logger.error(f"Payment execution failed: {payment.error}")
|
||||
raise HTTPException(status_code=500, detail="Payment execution failed")
|
||||
|
||||
|
||||
@router.get("/payment/cancel")
|
||||
def payment_cancel():
|
||||
"""
|
||||
Handles PayPal payment cancellation.
|
||||
"""
|
||||
return {"message": "Payment was cancelled"}
|
||||
|
170
backend/src/payments/supabase.py
Normal file
170
backend/src/payments/supabase.py
Normal file
@@ -0,0 +1,170 @@
|
||||
import os
|
||||
import logging
|
||||
import yaml
|
||||
from fastapi import HTTPException, status
|
||||
from supabase import create_client, Client, ClientOptions
|
||||
|
||||
from ..constants import PARAMETERS_DIR
|
||||
|
||||
# Silence the supabase logger
|
||||
logging.getLogger("httpx").setLevel(logging.CRITICAL)
|
||||
logging.getLogger("hpack").setLevel(logging.CRITICAL)
|
||||
logging.getLogger("httpcore").setLevel(logging.CRITICAL)
|
||||
|
||||
|
||||
class Supabase:
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def __init__(self):
|
||||
|
||||
with open(os.path.join(PARAMETERS_DIR, 'secrets.yaml')) as f:
|
||||
secrets = yaml.safe_load(f)
|
||||
self.SUPABASE_URL = secrets['SUPABASE_URL']
|
||||
self.SUPABASE_ADMIN_KEY = secrets['SUPABASE_ADMIN_KEY']
|
||||
self.SUPABASE_TEST_USER_ID = secrets['SUPABASE_TEST_USER_ID']
|
||||
|
||||
self.supabase = create_client(
|
||||
self.SUPABASE_URL,
|
||||
self.SUPABASE_ADMIN_KEY,
|
||||
options=ClientOptions(schema='public')
|
||||
)
|
||||
self.logger.debug('Supabase client initialized.')
|
||||
|
||||
|
||||
def check_balance(self, user_id: str) -> bool:
|
||||
"""
|
||||
Checks if the user has enough 'credit' for generating a new trip.
|
||||
|
||||
Args:
|
||||
user_id (str): The ID of the current user.
|
||||
|
||||
Returns:
|
||||
bool: True if the balance is positive, False otherwise.
|
||||
"""
|
||||
try:
|
||||
# Query the public.credits table to get the user's credits
|
||||
response = (
|
||||
self.supabase.table("credits")
|
||||
.select('*')
|
||||
.eq('id', user_id)
|
||||
.single()
|
||||
.execute()
|
||||
)
|
||||
# self.logger.critical(response)
|
||||
|
||||
except Exception as e:
|
||||
if e.code == '22P02' :
|
||||
self.logger.error(f"Failed querying credits : {str(e)}")
|
||||
raise SyntaxError(f"Failed querying credits : {str(e)}") from e
|
||||
if e.code == 'PGRST116' :
|
||||
self.logger.error(f"User not found : {str(e)}")
|
||||
raise ValueError(f"User not found : {str(e)}") from e
|
||||
else :
|
||||
self.logger.error(f"An unexpected error occured while checking user balance : {str(e)}")
|
||||
raise Exception(f"An unexpected error occured while checking user balance : {str(e)}") from e
|
||||
|
||||
# Proceed to check the user's credit balance
|
||||
credits = response.data['credit_amount']
|
||||
self.logger.debug(f'Credits of user {user_id}: {credits}')
|
||||
|
||||
if credits > 0:
|
||||
self.logger.info(f'Credit balance is positive for user {user_id}. Proceeding with trip generation.')
|
||||
return True
|
||||
|
||||
self.logger.warning(f'Insufficient balance for user {user_id}. Trip generation cannot proceed.')
|
||||
return False
|
||||
|
||||
|
||||
def decrement_credit_balance(self, user_id: str, amount: int=1) -> bool:
|
||||
"""
|
||||
Decrements the user's credit balance by 1.
|
||||
|
||||
Args:
|
||||
user_id (str): The ID of the current user.
|
||||
"""
|
||||
try:
|
||||
# Query the public.credits table to get the user's current credits
|
||||
response = (
|
||||
self.supabase.table("credits")
|
||||
.select('*')
|
||||
.eq('id', user_id)
|
||||
.single()
|
||||
.execute()
|
||||
)
|
||||
except Exception as e:
|
||||
if e.code == '22P02' :
|
||||
self.logger.error(f"Failed decrementing credits : {str(e)}")
|
||||
raise SyntaxError(f"Failed decrementing credits : {str(e)}") from e
|
||||
if e.code == 'PGRST116' :
|
||||
self.logger.error(f"User not found : {str(e)}")
|
||||
raise ValueError(f"User not found : {str(e)}") from e
|
||||
else :
|
||||
self.logger.error(f"An unexpected error occured while decrementing user balance : {str(e)}")
|
||||
raise Exception(f"An unexpected error occured while decrementing user balance : {str(e)}") from e
|
||||
|
||||
|
||||
current_credits = response.data['credit_amount']
|
||||
updated_credits = current_credits - amount
|
||||
|
||||
# Update the user's credits in the table
|
||||
update_response = (
|
||||
self.supabase.table('credits')
|
||||
.update({'credit_amount': updated_credits})
|
||||
.eq('id', user_id)
|
||||
.execute()
|
||||
)
|
||||
|
||||
# Check if the update was successful
|
||||
if update_response.data:
|
||||
self.logger.debug(f'Credit balance successfully decremented.')
|
||||
return True
|
||||
else:
|
||||
raise Exception("Error decrementing credit balance.")
|
||||
|
||||
|
||||
def increment_credit_balance(self, user_id: str, amount: int=1) -> bool:
|
||||
"""
|
||||
Increments the user's credit balance by 1.
|
||||
|
||||
Args:
|
||||
user_id (str): The ID of the current user.
|
||||
"""
|
||||
try:
|
||||
# Query the public.credits table to get the user's current credits
|
||||
response = (
|
||||
self.supabase.table("credits")
|
||||
.select('*')
|
||||
.eq('id', user_id)
|
||||
.single()
|
||||
.execute()
|
||||
)
|
||||
except Exception as e:
|
||||
if e.code == '22P02' :
|
||||
self.logger.error(f"Failed incrementing credits : {str(e)}")
|
||||
raise SyntaxError(f"Failed incrementing credits : {str(e)}") from e
|
||||
if e.code == 'PGRST116' :
|
||||
self.logger.error(f"User not found : {str(e)}")
|
||||
raise ValueError(f"User not found : {str(e)}") from e
|
||||
else :
|
||||
self.logger.error(f"An unexpected error occured while incrementing user balance : {str(e)}")
|
||||
raise Exception(f"An unexpected error occured while incrementing user balance : {str(e)}") from e
|
||||
|
||||
|
||||
current_credits = response.data['credit_amount']
|
||||
updated_credits = current_credits + amount
|
||||
|
||||
# Update the user's credits in the table
|
||||
update_response = (
|
||||
self.supabase.table('credits')
|
||||
.update({'credit_amount': updated_credits})
|
||||
.eq('id', user_id)
|
||||
.execute()
|
||||
)
|
||||
|
||||
# Check if the update was successful
|
||||
if update_response.data:
|
||||
self.logger.debug(f'Credit balance successfully incremented.')
|
||||
return True
|
||||
else:
|
||||
raise Exception("Error incrementing credit balance.")
|
52
backend/src/payments/supabase_routes.py
Normal file
52
backend/src/payments/supabase_routes.py
Normal file
@@ -0,0 +1,52 @@
|
||||
"""Endpoints for supabase user handling."""
|
||||
import logging
|
||||
from fastapi import APIRouter, HTTPException
|
||||
|
||||
from .supabase import Supabase
|
||||
|
||||
|
||||
# Set up logging and supabase.
|
||||
logger = logging.getLogger(__name__)
|
||||
supabase = Supabase()
|
||||
|
||||
# Create fastapi router
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.post("/user/create/{email}/{password}")
|
||||
def register_user(email: str, password: str) -> str:
|
||||
try:
|
||||
response = supabase.supabase.auth.admin.create_user({
|
||||
"email": email,
|
||||
"password": password
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
if e.code == 'email_exists' :
|
||||
logger.error(f"Failed to create user : {str(e.code)}")
|
||||
raise HTTPException(status_code=422, detail=str(e)) from e
|
||||
logger.error(f"Failed to create user : {str(e.code)}")
|
||||
raise HTTPException(status_code=500, detail=str(e)) from e
|
||||
|
||||
# Extract the identity_id and user_id
|
||||
user_id = response.user.id
|
||||
|
||||
logger.info(f"User created successfully, ID: {user_id}")
|
||||
return user_id
|
||||
|
||||
|
||||
|
||||
@router.post("/user/delete/{user_id}")
|
||||
def delete_user(user_id: str):
|
||||
|
||||
try:
|
||||
response = supabase.supabase.auth.admin.delete_user(user_id)
|
||||
logger.debug(response)
|
||||
except Exception as e:
|
||||
if e.code == 'user_not_found' :
|
||||
logger.error(f"Failed to delete user : {str(e.code)}")
|
||||
raise HTTPException(status_code=404, detail=str(e)) from e
|
||||
logger.error(f"Failed to create user : {str(e.code)}")
|
||||
raise HTTPException(status_code=500, detail=str(e)) from e
|
||||
|
||||
logger.info(f"User with ID {user_id} deleted successfully")
|
@@ -1,7 +1,8 @@
|
||||
"""Definition of the Landmark class to handle visitable objects across the world."""
|
||||
|
||||
from typing import Optional, Literal
|
||||
from uuid import uuid4, UUID
|
||||
from pydantic import BaseModel, Field
|
||||
from pydantic import BaseModel, ConfigDict, Field
|
||||
|
||||
|
||||
# Output to frontend
|
||||
@@ -49,8 +50,7 @@ class Landmark(BaseModel) :
|
||||
image_url : Optional[str] = None
|
||||
website_url : Optional[str] = None
|
||||
wiki_url : Optional[str] = None
|
||||
keywords: Optional[dict] = {}
|
||||
description : Optional[str] = None
|
||||
description : Optional[str] = None # TODO future
|
||||
duration : Optional[int] = 5
|
||||
name_en : Optional[str] = None
|
||||
|
||||
@@ -69,7 +69,6 @@ class Landmark(BaseModel) :
|
||||
is_viewpoint : Optional[bool] = False
|
||||
is_place_of_worship : Optional[bool] = False
|
||||
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""
|
||||
String representation of the Landmark object.
|
||||
@@ -123,3 +122,26 @@ class Landmark(BaseModel) :
|
||||
return (self.uuid == value.uuid or
|
||||
self.osm_id == value.osm_id or
|
||||
(self.name == value.name and self.distance(value) < 0.001))
|
||||
|
||||
|
||||
class Toilets(BaseModel) :
|
||||
"""
|
||||
Model for toilets. When false/empty the information is either false either not known.
|
||||
"""
|
||||
location : tuple
|
||||
wheelchair : Optional[bool] = False
|
||||
changing_table : Optional[bool] = False
|
||||
fee : Optional[bool] = False
|
||||
opening_hours : Optional[str] = ""
|
||||
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""
|
||||
String representation of the Toilets object.
|
||||
|
||||
Returns:
|
||||
str: A formatted string with the toilets location.
|
||||
"""
|
||||
return f'Toilets @{self.location}'
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
@@ -1,26 +0,0 @@
|
||||
"""Definition of the Toilets class."""
|
||||
from typing import Optional
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
|
||||
|
||||
class Toilets(BaseModel) :
|
||||
"""
|
||||
Model for toilets. When false/empty the information is either false either not known.
|
||||
"""
|
||||
location : tuple
|
||||
wheelchair : Optional[bool] = False
|
||||
changing_table : Optional[bool] = False
|
||||
fee : Optional[bool] = False
|
||||
opening_hours : Optional[str] = ""
|
||||
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""
|
||||
String representation of the Toilets object.
|
||||
|
||||
Returns:
|
||||
str: A formatted string with the toilets location.
|
||||
"""
|
||||
return f'Toilets @{self.location}'
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
@@ -4,6 +4,7 @@ from fastapi.testclient import TestClient
|
||||
import pytest
|
||||
|
||||
from ..main import app
|
||||
from ..constants import SUPABASE_TEST_USER_ID
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
@@ -55,8 +56,38 @@ def test_input(invalid_client, start, preferences, status_code): # pylint: dis
|
||||
response = invalid_client.post(
|
||||
"/trip/new",
|
||||
json={
|
||||
"user_id": SUPABASE_TEST_USER_ID,
|
||||
"preferences": preferences,
|
||||
"start": start
|
||||
}
|
||||
)
|
||||
assert response.status_code == status_code
|
||||
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"user_id,status_code",
|
||||
[
|
||||
# No user id :
|
||||
({}, 422),
|
||||
("invalid_user_id", 400),
|
||||
# ("12345678-1234-5678-1234-567812345678", 406)
|
||||
]
|
||||
)
|
||||
def test_input(invalid_client, user_id, status_code): # pylint: disable=redefined-outer-name
|
||||
"""
|
||||
Test new trip creation with invalid user ID.
|
||||
"""
|
||||
response = invalid_client.post(
|
||||
"/trip/new",
|
||||
json={
|
||||
"user_id": user_id,
|
||||
"preferences": {"sightseeing": {"type": "sightseeing", "score": 5},
|
||||
"nature": {"type": "nature", "score": 0},
|
||||
"shopping": {"type": "shopping", "score": 0},
|
||||
"max_time_minute": 20,
|
||||
"detour_tolerance_minute": 0},
|
||||
"start": [48.084588, 7.280405]
|
||||
}
|
||||
)
|
||||
assert response.status_code == status_code
|
||||
|
@@ -1,10 +1,17 @@
|
||||
"""Collection of tests to ensure correct implementation and track progress. """
|
||||
import time
|
||||
import logging
|
||||
from fastapi.testclient import TestClient
|
||||
import pytest
|
||||
|
||||
from .test_utils import load_trip_landmarks, log_trip_details
|
||||
from ..main import app
|
||||
from ..payments.supabase import Supabase
|
||||
|
||||
supabase = Supabase()
|
||||
logger = logging.getLogger(__name__)
|
||||
USER_ID = supabase.SUPABASE_TEST_USER_ID
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def client():
|
||||
@@ -22,10 +29,12 @@ def test_turckheim(client, request): # pylint: disable=redefined-outer-name
|
||||
"""
|
||||
start_time = time.time() # Start timer
|
||||
duration_minutes = 20
|
||||
logger.debug('Running test in Turckheim')
|
||||
|
||||
response = client.post(
|
||||
"/trip/new",
|
||||
json={
|
||||
"user_id": USER_ID,
|
||||
"preferences": {"sightseeing": {"type": "sightseeing", "score": 5},
|
||||
"nature": {"type": "nature", "score": 0},
|
||||
"shopping": {"type": "shopping", "score": 0},
|
||||
@@ -37,6 +46,7 @@ def test_turckheim(client, request): # pylint: disable=redefined-outer-name
|
||||
}
|
||||
)
|
||||
result = response.json()
|
||||
supabase.increment_credit_balance(user_id=USER_ID)
|
||||
landmarks = load_trip_landmarks(client, result['first_landmark_uuid'])
|
||||
|
||||
|
||||
@@ -46,6 +56,8 @@ def test_turckheim(client, request): # pylint: disable=redefined-outer-name
|
||||
# Add details to report
|
||||
log_trip_details(request, landmarks, result['total_time'], duration_minutes)
|
||||
|
||||
# for elem in landmarks :
|
||||
# print(elem)
|
||||
|
||||
# checks :
|
||||
assert response.status_code == 200 # check for successful planning
|
||||
@@ -56,6 +68,8 @@ def test_turckheim(client, request): # pylint: disable=redefined-outer-name
|
||||
assert duration_minutes*1.2 > result['total_time'], f"Trip too long: {result['total_time']} instead of {duration_minutes}"
|
||||
# assert 2!= 3
|
||||
|
||||
|
||||
|
||||
def test_bellecour(client, request) : # pylint: disable=redefined-outer-name
|
||||
"""
|
||||
Test n°2 : Custom test in Lyon centre to ensure proper decision making in crowded area.
|
||||
@@ -71,6 +85,7 @@ def test_bellecour(client, request) : # pylint: disable=redefined-outer-name
|
||||
response = client.post(
|
||||
"/trip/new",
|
||||
json={
|
||||
"user_id": USER_ID,
|
||||
"preferences": {"sightseeing": {"type": "sightseeing", "score": 5},
|
||||
"nature": {"type": "nature", "score": 5},
|
||||
"shopping": {"type": "shopping", "score": 5},
|
||||
@@ -80,6 +95,7 @@ def test_bellecour(client, request) : # pylint: disable=redefined-outer-name
|
||||
}
|
||||
)
|
||||
result = response.json()
|
||||
supabase.increment_credit_balance(user_id=USER_ID)
|
||||
landmarks = load_trip_landmarks(client, result['first_landmark_uuid'])
|
||||
|
||||
# Get computation time
|
||||
@@ -111,6 +127,7 @@ def test_cologne(client, request) : # pylint: disable=redefined-outer-name
|
||||
response = client.post(
|
||||
"/trip/new",
|
||||
json={
|
||||
"user_id": USER_ID,
|
||||
"preferences": {"sightseeing": {"type": "sightseeing", "score": 5},
|
||||
"nature": {"type": "nature", "score": 5},
|
||||
"shopping": {"type": "shopping", "score": 5},
|
||||
@@ -120,6 +137,7 @@ def test_cologne(client, request) : # pylint: disable=redefined-outer-name
|
||||
}
|
||||
)
|
||||
result = response.json()
|
||||
supabase.increment_credit_balance(user_id=USER_ID)
|
||||
landmarks = load_trip_landmarks(client, result['first_landmark_uuid'])
|
||||
|
||||
# Get computation time
|
||||
@@ -152,6 +170,7 @@ def test_strasbourg(client, request) : # pylint: disable=redefined-outer-name
|
||||
response = client.post(
|
||||
"/trip/new",
|
||||
json={
|
||||
"user_id": USER_ID,
|
||||
"preferences": {"sightseeing": {"type": "sightseeing", "score": 5},
|
||||
"nature": {"type": "nature", "score": 5},
|
||||
"shopping": {"type": "shopping", "score": 5},
|
||||
@@ -161,6 +180,7 @@ def test_strasbourg(client, request) : # pylint: disable=redefined-outer-name
|
||||
}
|
||||
)
|
||||
result = response.json()
|
||||
supabase.increment_credit_balance(user_id=USER_ID)
|
||||
landmarks = load_trip_landmarks(client, result['first_landmark_uuid'])
|
||||
|
||||
# Get computation time
|
||||
@@ -193,6 +213,7 @@ def test_zurich(client, request) : # pylint: disable=redefined-outer-name
|
||||
response = client.post(
|
||||
"/trip/new",
|
||||
json={
|
||||
"user_id": USER_ID,
|
||||
"preferences": {"sightseeing": {"type": "sightseeing", "score": 5},
|
||||
"nature": {"type": "nature", "score": 5},
|
||||
"shopping": {"type": "shopping", "score": 5},
|
||||
@@ -202,6 +223,7 @@ def test_zurich(client, request) : # pylint: disable=redefined-outer-name
|
||||
}
|
||||
)
|
||||
result = response.json()
|
||||
supabase.increment_credit_balance(user_id=USER_ID)
|
||||
landmarks = load_trip_landmarks(client, result['first_landmark_uuid'])
|
||||
|
||||
# Get computation time
|
||||
@@ -234,6 +256,7 @@ def test_paris(client, request) : # pylint: disable=redefined-outer-name
|
||||
response = client.post(
|
||||
"/trip/new",
|
||||
json={
|
||||
"user_id": USER_ID,
|
||||
"preferences": {"sightseeing": {"type": "sightseeing", "score": 5},
|
||||
"nature": {"type": "nature", "score": 0},
|
||||
"shopping": {"type": "shopping", "score": 5},
|
||||
@@ -243,6 +266,7 @@ def test_paris(client, request) : # pylint: disable=redefined-outer-name
|
||||
}
|
||||
)
|
||||
result = response.json()
|
||||
supabase.increment_credit_balance(user_id=USER_ID)
|
||||
landmarks = load_trip_landmarks(client, result['first_landmark_uuid'])
|
||||
|
||||
# Get computation time
|
||||
@@ -275,6 +299,7 @@ def test_new_york(client, request) : # pylint: disable=redefined-outer-name
|
||||
response = client.post(
|
||||
"/trip/new",
|
||||
json={
|
||||
"user_id": USER_ID,
|
||||
"preferences": {"sightseeing": {"type": "sightseeing", "score": 5},
|
||||
"nature": {"type": "nature", "score": 5},
|
||||
"shopping": {"type": "shopping", "score": 5},
|
||||
@@ -284,6 +309,7 @@ def test_new_york(client, request) : # pylint: disable=redefined-outer-name
|
||||
}
|
||||
)
|
||||
result = response.json()
|
||||
supabase.increment_credit_balance(user_id=USER_ID)
|
||||
landmarks = load_trip_landmarks(client, result['first_landmark_uuid'])
|
||||
|
||||
# Get computation time
|
||||
@@ -316,6 +342,7 @@ def test_shopping(client, request) : # pylint: disable=redefined-outer-name
|
||||
response = client.post(
|
||||
"/trip/new",
|
||||
json={
|
||||
"user_id": USER_ID,
|
||||
"preferences": {"sightseeing": {"type": "sightseeing", "score": 0},
|
||||
"nature": {"type": "nature", "score": 0},
|
||||
"shopping": {"type": "shopping", "score": 5},
|
||||
@@ -325,6 +352,7 @@ def test_shopping(client, request) : # pylint: disable=redefined-outer-name
|
||||
}
|
||||
)
|
||||
result = response.json()
|
||||
supabase.increment_credit_balance(user_id=USER_ID)
|
||||
landmarks = load_trip_landmarks(client, result['first_landmark_uuid'])
|
||||
|
||||
# Get computation time
|
||||
|
@@ -3,7 +3,7 @@
|
||||
from fastapi.testclient import TestClient
|
||||
import pytest
|
||||
|
||||
from ..structs.toilets import Toilets
|
||||
from ..structs.landmark import Toilets
|
||||
from ..main import app
|
||||
|
||||
|
||||
|
48
backend/src/tests/test_user.py
Normal file
48
backend/src/tests/test_user.py
Normal file
@@ -0,0 +1,48 @@
|
||||
"""Collection of tests to ensure correct handling of user data."""
|
||||
|
||||
from fastapi.testclient import TestClient
|
||||
import pytest
|
||||
|
||||
from ..main import app
|
||||
|
||||
TEST_EMAIL = "dummy@example.com"
|
||||
TEST_PW = "DummyPassword123"
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def client():
|
||||
"""Client used to call the app."""
|
||||
return TestClient(app)
|
||||
|
||||
|
||||
def test_user_handling(client) :
|
||||
"""
|
||||
Test the creation of a new user.
|
||||
"""
|
||||
# Create a new user
|
||||
response = client.post(f"/user/create/{TEST_EMAIL}/{TEST_PW}")
|
||||
|
||||
# Verify user has been created
|
||||
assert response.status_code == 200, "Failed to create dummy user"
|
||||
user_id = response.json()
|
||||
|
||||
|
||||
# Create same user again to raise an error
|
||||
response = client.post(f"/user/create/{TEST_EMAIL}/{TEST_PW}")
|
||||
# Verify user already exists
|
||||
assert response.status_code == 422, "Failed to simulate dummy user already created."
|
||||
|
||||
|
||||
# Delete the user.
|
||||
response = client.post(f"/user/delete/{user_id}")
|
||||
|
||||
# Verify user has been deleted
|
||||
assert response.status_code == 200, "Failed to delete dummy user."
|
||||
|
||||
|
||||
# Delete the user again to raise an error
|
||||
response = client.post(f"/user/delete/{user_id}")
|
||||
# Verify user has been deleted
|
||||
assert response.status_code == 404, "Failed to simulate dummy user already deleted."
|
||||
|
||||
|
||||
|
@@ -1,6 +1,7 @@
|
||||
"""Helper methods for testing."""
|
||||
import logging
|
||||
from fastapi import HTTPException
|
||||
from pydantic import ValidationError
|
||||
|
||||
from ..structs.landmark import Landmark
|
||||
from ..cache import client as cache_client
|
||||
@@ -38,7 +39,7 @@ def fetch_landmark(landmark_uuid: str):
|
||||
try:
|
||||
landmark = cache_client.get(f'landmark_{landmark_uuid}')
|
||||
if not landmark :
|
||||
logger.error(f'Cache miss for landmark UUID: {landmark_uuid}')
|
||||
logger.warning(f'Cache miss for landmark UUID: {landmark_uuid}')
|
||||
raise HTTPException(status_code=404, detail=f'Landmark with UUID {landmark_uuid} not found in cache.')
|
||||
|
||||
# Validate that the fetched data is a dictionary
|
||||
|
@@ -1,38 +0,0 @@
|
||||
"""Defines the endpoint for fetching toilet locations."""
|
||||
from fastapi import HTTPException, APIRouter, Query
|
||||
|
||||
from ..structs.toilets import Toilets
|
||||
from .toilets_manager import ToiletsManager
|
||||
|
||||
|
||||
# Define the API router
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.post("/toilets/new")
|
||||
def get_toilets(location: tuple[float, float] = Query(...), radius: int = 500) -> list[Toilets] :
|
||||
"""
|
||||
Endpoint to find toilets within a specified radius from a given location.
|
||||
|
||||
This endpoint expects the `location` and `radius` as **query parameters**, not in the request body.
|
||||
|
||||
Args:
|
||||
location (tuple[float, float]): The latitude and longitude of the location to search from.
|
||||
radius (int, optional): The radius (in meters) within which to search for toilets. Defaults to 500 meters.
|
||||
|
||||
Returns:
|
||||
list[Toilets]: A list of Toilets objects that meet the criteria.
|
||||
"""
|
||||
if location is None:
|
||||
raise HTTPException(status_code=406, detail="Coordinates not provided or invalid")
|
||||
if not (-90 <= location[0] <= 90 or -180 <= location[1] <= 180):
|
||||
raise HTTPException(status_code=422, detail="Start coordinates not in range")
|
||||
|
||||
toilets_manager = ToiletsManager(location, radius)
|
||||
|
||||
try :
|
||||
toilets_list = toilets_manager.generate_toilet_list()
|
||||
except KeyError as exc:
|
||||
raise HTTPException(status_code=404, detail="No toilets found") from exc
|
||||
|
||||
return toilets_list
|
@@ -8,8 +8,8 @@ from pydantic import BaseModel
|
||||
|
||||
from ..overpass.overpass import Overpass, get_base_info
|
||||
from ..structs.landmark import Landmark
|
||||
from ..utils.get_time_distance import get_distance
|
||||
from ..utils.bbox import create_bbox
|
||||
from .get_time_distance import get_distance
|
||||
from .utils import create_bbox
|
||||
|
||||
|
||||
|
||||
@@ -103,7 +103,7 @@ class ClusterManager:
|
||||
out = out
|
||||
)
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Error fetching clusters: {e}")
|
||||
self.logger.error(f"Error fetching clusters: {e}")
|
||||
|
||||
if result is None :
|
||||
self.logger.debug(f"Found no {cluster_type} clusters, overpass query returned no datapoints.")
|
||||
@@ -134,7 +134,7 @@ class ClusterManager:
|
||||
|
||||
# Check that there are is least 1 cluster
|
||||
if len(set(labels)) > 1 :
|
||||
self.logger.info(f"Found {len(set(labels))} different {cluster_type} clusters.")
|
||||
self.logger.info(f"Found {len(set(labels))} {cluster_type} clusters.")
|
||||
# Separate clustered points and noise points
|
||||
self.cluster_points = self.all_points[labels != -1]
|
||||
self.cluster_labels = labels[labels != -1]
|
||||
@@ -242,16 +242,18 @@ class ClusterManager:
|
||||
out = 'ids center tags'
|
||||
)
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Error fetching clusters: {e}")
|
||||
self.logger.error(f"Error fetching clusters: {e}")
|
||||
continue
|
||||
|
||||
if result is None :
|
||||
self.logger.warning(f"Error fetching clusters: query result is None")
|
||||
self.logger.error(f"Error fetching clusters: {e}")
|
||||
continue
|
||||
|
||||
for elem in result:
|
||||
# Get basic info
|
||||
id, coords, name = get_base_info(elem, elem.get('type'), with_name=True)
|
||||
osm_type = elem.get('type')
|
||||
|
||||
id, coords, name = get_base_info(elem, osm_type, with_name=True)
|
||||
|
||||
if name is None or coords is None :
|
||||
continue
|
||||
|
||||
@@ -259,7 +261,7 @@ class ClusterManager:
|
||||
if d < min_dist :
|
||||
min_dist = d
|
||||
new_name = name # add name
|
||||
osm_type = elem.get('type') # add type: 'way' or 'relation'
|
||||
osm_type = osm_type # add type: 'way' or 'relation'
|
||||
osm_id = id # add OSM id
|
||||
|
||||
return Landmark(
|
@@ -4,10 +4,10 @@ import yaml
|
||||
|
||||
from ..structs.preferences import Preferences
|
||||
from ..structs.landmark import Landmark
|
||||
from ..utils.take_most_important import take_most_important
|
||||
from .take_most_important import take_most_important
|
||||
from .cluster_manager import ClusterManager
|
||||
from ..overpass.overpass import Overpass, get_base_info
|
||||
from ..utils.bbox import create_bbox
|
||||
from .utils import create_bbox
|
||||
|
||||
from ..constants import AMENITY_SELECTORS_PATH, LANDMARK_PARAMETERS_PATH, OPTIMIZER_PARAMETERS_PATH
|
||||
|
||||
@@ -197,7 +197,7 @@ class LandmarkManager:
|
||||
out = 'ids center tags'
|
||||
)
|
||||
except Exception as e:
|
||||
self.logger.debug(f"Failed to fetch landmarks, proceeding without: {str(e)}")
|
||||
self.logger.error(f"Error fetching landmarks: {e}")
|
||||
continue
|
||||
|
||||
return_list += self._to_landmarks(result, landmarktype, preference_level)
|
||||
@@ -246,6 +246,8 @@ class LandmarkManager:
|
||||
attractiveness=0,
|
||||
n_tags=len(tags))
|
||||
|
||||
# self.logger.debug('added landmark.')
|
||||
|
||||
# Browse through tags to add information to landmark.
|
||||
for key, value in tags.items():
|
||||
|
||||
@@ -275,7 +277,6 @@ class LandmarkManager:
|
||||
if 'building:' in key or 'pay' in key :
|
||||
landmark.n_tags -= 1
|
||||
|
||||
|
||||
# Set the duration.
|
||||
if value in ['museum', 'aquarium', 'planetarium'] :
|
||||
landmark.duration = 60
|
||||
@@ -286,138 +287,14 @@ class LandmarkManager:
|
||||
landmark.is_place_of_worship = False
|
||||
landmark.duration = 10
|
||||
|
||||
landmark.description, landmark.keywords = self.description_and_keywords(tags)
|
||||
else:
|
||||
self.set_landmark_score(landmark, landmarktype, preference_level)
|
||||
landmarks.append(landmark)
|
||||
|
||||
continue
|
||||
|
||||
|
||||
return landmarks
|
||||
|
||||
|
||||
def description_and_keywords(self, tags: dict):
|
||||
"""
|
||||
Generates a description and a set of keywords for a given landmark based on its tags.
|
||||
|
||||
Params:
|
||||
tags (dict): A dictionary containing metadata about the landmark, including its name,
|
||||
importance, height, date of construction, and visitor information.
|
||||
|
||||
Returns:
|
||||
description (str): A string description of the landmark.
|
||||
keywords (dict): A dictionary of keywords with fields such as 'importance', 'height',
|
||||
'place_type', and 'date'.
|
||||
"""
|
||||
# Extract relevant fields
|
||||
name = tags.get('name')
|
||||
importance = tags.get('importance', None)
|
||||
n_visitors = tags.get('tourism:visitors', None)
|
||||
height = tags.get('height')
|
||||
place_type = self.get_place_type(tags)
|
||||
date = self.get_date(tags)
|
||||
|
||||
if place_type is None :
|
||||
return None, None
|
||||
|
||||
# Start the description.
|
||||
if importance is None :
|
||||
if len(tags.keys()) < 5 :
|
||||
return None, None
|
||||
if len(tags.keys()) < 10 :
|
||||
description = f"{name} is a well known {place_type}."
|
||||
elif len(tags.keys()) < 17 :
|
||||
importance = 'national'
|
||||
description = f"{name} is a {place_type} of national importance."
|
||||
else :
|
||||
importance = 'international'
|
||||
description = f"{name} is an internationally famous {place_type}."
|
||||
else :
|
||||
description = f"{name} is a {place_type} of {importance} importance."
|
||||
|
||||
if height is not None and date is not None :
|
||||
description += f" This {place_type} was constructed in {date} and is ca. {height} meters high."
|
||||
elif height is not None :
|
||||
description += f" This {place_type} stands ca. {height} meters tall."
|
||||
elif date is not None:
|
||||
description += f" It was constructed in {date}."
|
||||
|
||||
# Format the visitor number
|
||||
if n_visitors is not None :
|
||||
n_visitors = int(n_visitors)
|
||||
if n_visitors < 1000000 :
|
||||
description += f" It welcomes {int(n_visitors/1000)} thousand visitors every year."
|
||||
else :
|
||||
description += f" It welcomes {round(n_visitors/1000000, 1)} million visitors every year."
|
||||
|
||||
# Set the keywords.
|
||||
keywords = {"importance": importance,
|
||||
"height": height,
|
||||
"place_type": place_type,
|
||||
"date": date}
|
||||
|
||||
return description, keywords
|
||||
|
||||
|
||||
def get_place_type(self, data):
|
||||
"""
|
||||
Determines the type of the place based on available tags such as 'amenity', 'building',
|
||||
'historic', and 'leisure'. The priority order is: 'historic' > 'building' (if not generic) >
|
||||
'amenity' > 'leisure'.
|
||||
|
||||
Params:
|
||||
data (dict): A dictionary containing metadata about the place.
|
||||
|
||||
Returns:
|
||||
place_type (str): The determined type of the place, or None if no relevant type is found.
|
||||
"""
|
||||
amenity = data.get('amenity', None)
|
||||
building = data.get('building', None)
|
||||
historic = data.get('historic', None)
|
||||
leisure = data.get('leisure')
|
||||
|
||||
if historic and historic != "yes":
|
||||
return historic
|
||||
if building and building not in ["yes", "civic", "government", "apartments", "residential", "commericial", "industrial", "retail", "religious", "public", "service"]:
|
||||
return building
|
||||
if amenity:
|
||||
return amenity
|
||||
if leisure:
|
||||
return leisure
|
||||
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def get_date(self, data):
|
||||
"""
|
||||
Extracts the most relevant date from the available tags, prioritizing 'construction_date',
|
||||
'start_date', 'year_of_construction', and 'opening_date' in that order.
|
||||
|
||||
Params:
|
||||
data (dict): A dictionary containing metadata about the place.
|
||||
|
||||
Returns:
|
||||
date (str): The most relevant date found, or None if no date is available.
|
||||
"""
|
||||
construction_date = data.get('construction_date', None)
|
||||
opening_date = data.get('opening_date', None)
|
||||
start_date = data.get('start_date', None)
|
||||
year_of_construction = data.get('year_of_construction', None)
|
||||
|
||||
# Prioritize based on availability
|
||||
if construction_date:
|
||||
return construction_date
|
||||
if start_date:
|
||||
return start_date
|
||||
if year_of_construction:
|
||||
return year_of_construction
|
||||
if opening_date:
|
||||
return opening_date
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def dict_to_selector_list(d: dict) -> list:
|
||||
"""
|
||||
Convert a dictionary of key-value pairs to a list of Overpass query strings.
|
@@ -2,8 +2,8 @@
|
||||
import logging
|
||||
|
||||
from ..overpass.overpass import Overpass, get_base_info
|
||||
from ..structs.toilets import Toilets
|
||||
from ..utils.bbox import create_bbox
|
||||
from ..structs.landmark import Toilets
|
||||
from .utils import create_bbox
|
||||
|
||||
|
||||
# silence the overpass logger
|
||||
@@ -65,7 +65,7 @@ class ToiletsManager:
|
||||
try:
|
||||
result = self.overpass.fetch_data_from_api(query_str=query)
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error fetching toilets: {e}")
|
||||
self.logger.error(f"Error fetching landmarks: {e}")
|
||||
return None
|
||||
|
||||
toilets_list = self.to_toilets(result)
|
@@ -33,6 +33,7 @@ fetchTrip(
|
||||
UserPreferences preferences,
|
||||
) async {
|
||||
Map<String, dynamic> data = {
|
||||
// Add user ID here for API request
|
||||
"preferences": preferences.toJson(),
|
||||
"start": trip.landmarks!.first.location,
|
||||
};
|
||||
|
1091
report.html
Normal file
1091
report.html
Normal file
File diff suppressed because it is too large
Load Diff
48
status
48
status
@@ -1,48 +0,0 @@
|
||||
error: wrong number of arguments, should be from 1 to 2
|
||||
usage: git config [<options>]
|
||||
|
||||
Config file location
|
||||
--[no-]global use global config file
|
||||
--[no-]system use system config file
|
||||
--[no-]local use repository config file
|
||||
--[no-]worktree use per-worktree config file
|
||||
-f, --[no-]file <file>
|
||||
use given config file
|
||||
--[no-]blob <blob-id> read config from given blob object
|
||||
|
||||
Action
|
||||
--[no-]get get value: name [value-pattern]
|
||||
--[no-]get-all get all values: key [value-pattern]
|
||||
--[no-]get-regexp get values for regexp: name-regex [value-pattern]
|
||||
--[no-]get-urlmatch get value specific for the URL: section[.var] URL
|
||||
--[no-]replace-all replace all matching variables: name value [value-pattern]
|
||||
--[no-]add add a new variable: name value
|
||||
--[no-]unset remove a variable: name [value-pattern]
|
||||
--[no-]unset-all remove all matches: name [value-pattern]
|
||||
--[no-]rename-section rename section: old-name new-name
|
||||
--[no-]remove-section remove a section: name
|
||||
-l, --[no-]list list all
|
||||
--[no-]fixed-value use string equality when comparing values to 'value-pattern'
|
||||
-e, --[no-]edit open an editor
|
||||
--[no-]get-color find the color configured: slot [default]
|
||||
--[no-]get-colorbool find the color setting: slot [stdout-is-tty]
|
||||
|
||||
Type
|
||||
-t, --[no-]type <type>
|
||||
value is given this type
|
||||
--bool value is "true" or "false"
|
||||
--int value is decimal number
|
||||
--bool-or-int value is --bool or --int
|
||||
--bool-or-str value is --bool or string
|
||||
--path value is a path (file or directory name)
|
||||
--expiry-date value is an expiry date
|
||||
|
||||
Other
|
||||
-z, --[no-]null terminate values with NUL byte
|
||||
--[no-]name-only show variable names only
|
||||
--[no-]includes respect include directives on lookup
|
||||
--[no-]show-origin show origin of config (file, standard input, blob, command line)
|
||||
--[no-]show-scope show scope of config (worktree, local, global, system, command)
|
||||
--[no-]default <value>
|
||||
with --get, use default value when missing entry
|
||||
|
Reference in New Issue
Block a user