cleanup
Some checks failed
Build and deploy the backend to staging / Build and push image (pull_request) Successful in 2m16s
Run linting on the backend code / Build (pull_request) Successful in 28s
Run testing on the backend code / Build (pull_request) Failing after 1m14s
Build and deploy the backend to staging / Deploy to staging (pull_request) Successful in 24s
Some checks failed
Build and deploy the backend to staging / Build and push image (pull_request) Successful in 2m16s
Run linting on the backend code / Build (pull_request) Successful in 28s
Run testing on the backend code / Build (pull_request) Failing after 1m14s
Build and deploy the backend to staging / Deploy to staging (pull_request) Successful in 24s
This commit is contained in:
parent
b9356dc4ee
commit
28ff0460ab
@ -28,5 +28,5 @@ jobs:
|
||||
working-directory: backend
|
||||
|
||||
- name: Run linter
|
||||
run: pipenv run pylint src --fail-under=9
|
||||
run: pipenv run pylint src --fail-under=9
|
||||
working-directory: backend
|
||||
|
@ -24,7 +24,6 @@ Refer to the READMEs in the `frontend` and `backend` directories for instruction
|
||||
- `google_maps_flutter` plugin
|
||||
- Python 3
|
||||
- `fastapi`
|
||||
- `OSMPythonTools`
|
||||
- `numpy, scipy`
|
||||
- Docker
|
||||
|
||||
|
7
backend/.gitignore
vendored
7
backend/.gitignore
vendored
@ -1,10 +1,5 @@
|
||||
# osm-cache and wikidata cache
|
||||
# osm-cache
|
||||
cache_XML/
|
||||
cache/
|
||||
apicache/
|
||||
|
||||
# wikidata throttle
|
||||
*.ctrl
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
|
File diff suppressed because one or more lines are too long
@ -116,7 +116,7 @@ def new_trip(preferences: Preferences,
|
||||
raise HTTPException(status_code=500, detail=f"An unexpected error occurred: {str(exc)}") from exc
|
||||
|
||||
t_second_stage = time.time() - start_time
|
||||
# logger.debug(f'Generating landmarks : {round(t_generate_landmarks,3)} seconds')
|
||||
|
||||
logger.debug(f'First stage optimization\t: {round(t_first_stage,3)} seconds')
|
||||
logger.debug(f'Second stage optimization\t: {round(t_second_stage,3)} seconds')
|
||||
logger.info(f'Total computation time\t: {round(t_first_stage + t_second_stage,3)} seconds')
|
||||
@ -124,6 +124,7 @@ def new_trip(preferences: Preferences,
|
||||
linked_tour = LinkedLandmarks(refined_tour)
|
||||
# upon creation of the trip, persistence of both the trip and its landmarks is ensured.
|
||||
trip = Trip.from_linked_landmarks(linked_tour, cache_client)
|
||||
logger.info(f'Generated a trip of {trip.total_time} minutes with {len(refined_tour)} landmarks in {round(t_generate_landmarks + t_first_stage + t_second_stage,3)} seconds.')
|
||||
return trip
|
||||
|
||||
|
||||
|
0
backend/src/overpass/__init__.py
Normal file
0
backend/src/overpass/__init__.py
Normal file
@ -106,7 +106,7 @@ class XMLCache(CachingStrategyBase):
|
||||
tree = ET.parse(filename)
|
||||
return tree.getroot() # Return the root element of the parsed XML
|
||||
except ET.ParseError:
|
||||
print(f"Error parsing cached XML file: {filename}")
|
||||
# print(f"Error parsing cached XML file: {filename}")
|
||||
return None
|
||||
return None
|
||||
|
||||
@ -119,7 +119,7 @@ class XMLCache(CachingStrategyBase):
|
||||
with open(filename, 'wb') as file:
|
||||
tree.write(file, encoding='utf-8', xml_declaration=True)
|
||||
except IOError as e:
|
||||
print(f"Error writing to cache file: {filename} - {e}")
|
||||
raise IOError(f"Error writing to cache file: {filename} - {e}") from e
|
||||
|
||||
|
||||
class CachingStrategy:
|
||||
|
@ -1,12 +1,13 @@
|
||||
"""Module allowing connexion to overpass api and fectch data from OSM."""
|
||||
from typing import Literal, List
|
||||
import urllib
|
||||
import json
|
||||
import logging
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
from .caching_strategy import get_cache_key, CachingStrategy
|
||||
|
||||
|
||||
logger = logging.getLogger('overpass')
|
||||
ElementTypes = List[Literal['way', 'node', 'relation']]
|
||||
|
||||
|
||||
@ -46,8 +47,13 @@ def build_query(area: tuple, element_types: ElementTypes,
|
||||
query = '('
|
||||
|
||||
# Round the radius to nearest 50 and coordinates to generate less queries
|
||||
search_radius = round(area[0] / 50) * 50
|
||||
loc = tuple((round(area[1], 2), round(area[2], 2)))
|
||||
if area[0] > 500 :
|
||||
search_radius = round(area[0] / 50) * 50
|
||||
loc = tuple((round(area[1], 2), round(area[2], 2)))
|
||||
else :
|
||||
search_radius = round(area[0] / 25) * 25
|
||||
loc = tuple((round(area[1], 3), round(area[2], 3)))
|
||||
|
||||
search_area = f"(around:{search_radius}, {str(loc[0])}, {str(loc[1])})"
|
||||
|
||||
if conditions :
|
||||
@ -80,7 +86,7 @@ def send_query(query: str) -> dict:
|
||||
# Try to fetch the result from the cache
|
||||
cached_response = CachingStrategy.get(cache_key)
|
||||
if cached_response is not None :
|
||||
print("Cache hit!")
|
||||
logger.debug("Cache hit.")
|
||||
return cached_response
|
||||
|
||||
# Define the Overpass API endpoint
|
||||
@ -106,12 +112,9 @@ def send_query(query: str) -> dict:
|
||||
|
||||
# Cache the response data as an ElementTree root
|
||||
CachingStrategy.set(cache_key, root)
|
||||
logger.debug("Response data added to cache.")
|
||||
|
||||
return root
|
||||
|
||||
except urllib.error.URLError as e:
|
||||
print(f"Error connecting to Overpass API: {e}")
|
||||
return None
|
||||
except json.JSONDecodeError:
|
||||
print("Error decoding the JSON response from Overpass API.")
|
||||
return None
|
||||
raise ConnectionError(f"Error connecting to Overpass API: {e}") from e
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,698 +0,0 @@
|
||||
{
|
||||
"type": "FeatureCollection",
|
||||
"generator": "overpass-turbo",
|
||||
"copyright": "The data included in this document is from www.openstreetmap.org. The data is made available under ODbL.",
|
||||
"timestamp": "2024-12-02T21:14:59Z",
|
||||
"features": [
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/1345741798",
|
||||
"name": "Cordonnerie Saint-Joseph",
|
||||
"shop": "shoes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3481705,
|
||||
48.0816462
|
||||
]
|
||||
},
|
||||
"id": "node/1345741798"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/2659184738",
|
||||
"brand": "Armand Thiery",
|
||||
"brand:wikidata": "Q2861975",
|
||||
"brand:wikipedia": "fr:Armand Thiery",
|
||||
"name": "Armand Thiery",
|
||||
"opening_hours": "Mo-Sa 09:30-19:00",
|
||||
"shop": "clothes",
|
||||
"wheelchair": "limited"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3594454,
|
||||
48.0785574
|
||||
]
|
||||
},
|
||||
"id": "node/2659184738"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/3618136290",
|
||||
"name": "Chez Dominique",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3362362,
|
||||
48.0712174
|
||||
]
|
||||
},
|
||||
"id": "node/3618136290"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/3618136605",
|
||||
"name": "Divamod",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3304253,
|
||||
48.0782989
|
||||
]
|
||||
},
|
||||
"id": "node/3618136605"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/3618284507",
|
||||
"name": "Star tendances et voyages",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3474029,
|
||||
48.0830993
|
||||
]
|
||||
},
|
||||
"id": "node/3618284507"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/3619696125",
|
||||
"brand": "Zeeman",
|
||||
"brand:wikidata": "Q184399",
|
||||
"name": "Zeeman",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3413834,
|
||||
48.0638444
|
||||
]
|
||||
},
|
||||
"id": "node/3619696125"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/4594398129",
|
||||
"name": "Miss et Mister",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3308309,
|
||||
48.0779118
|
||||
]
|
||||
},
|
||||
"id": "node/4594398129"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/4907320441",
|
||||
"brand": "Sergent Major",
|
||||
"brand:wikidata": "Q62521738",
|
||||
"clothes": "babies;children",
|
||||
"name": "Sergent Major",
|
||||
"opening_hours": "Mo-Sa 09:30-19:00",
|
||||
"shop": "clothes",
|
||||
"wheelchair": "no"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.359116,
|
||||
48.0787229
|
||||
]
|
||||
},
|
||||
"id": "node/4907320441"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/4907364791",
|
||||
"brand": "Armand Thiery",
|
||||
"brand:wikidata": "Q2861975",
|
||||
"brand:wikipedia": "fr:Armand Thiery",
|
||||
"clothes": "women",
|
||||
"name": "Armand Thiery",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3601857,
|
||||
48.0783373
|
||||
]
|
||||
},
|
||||
"id": "node/4907364791"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/4907385675",
|
||||
"check_date": "2024-05-19",
|
||||
"clothes": "children",
|
||||
"name": "Du Pareil...au même",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3604521,
|
||||
48.0779726
|
||||
]
|
||||
},
|
||||
"id": "node/4907385675"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/4922191645",
|
||||
"name": "Abilos",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3566167,
|
||||
48.0794136
|
||||
]
|
||||
},
|
||||
"id": "node/4922191645"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/4922191648",
|
||||
"brand": "Esprit",
|
||||
"brand:wikidata": "Q532746",
|
||||
"brand:wikipedia": "en:Esprit Holdings",
|
||||
"name": "Esprit",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3554004,
|
||||
48.0787549
|
||||
]
|
||||
},
|
||||
"id": "node/4922191648"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/4922191972",
|
||||
"brand": "Guess",
|
||||
"brand:wikidata": "Q2470307",
|
||||
"brand:wikipedia": "en:Guess (clothing)",
|
||||
"name": "Guess",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.355273,
|
||||
48.0788003
|
||||
]
|
||||
},
|
||||
"id": "node/4922191972"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/4922192001",
|
||||
"name": "Lingerie",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3575453,
|
||||
48.0779317
|
||||
]
|
||||
},
|
||||
"id": "node/4922192001"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/5359915869",
|
||||
"name": "Al Assil",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3305665,
|
||||
48.0780902
|
||||
]
|
||||
},
|
||||
"id": "node/5359915869"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/9089360040",
|
||||
"brand": "Grain de Malice",
|
||||
"brand:wikidata": "Q66757157",
|
||||
"clothes": "women",
|
||||
"name": "Grain de Malice",
|
||||
"shop": "clothes",
|
||||
"short_name": "GDM"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3593125,
|
||||
48.0786234
|
||||
]
|
||||
},
|
||||
"id": "node/9089360040"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/9095193153",
|
||||
"brand": "Undiz",
|
||||
"brand:wikidata": "Q105306275",
|
||||
"clothes": "underwear",
|
||||
"name": "Undiz",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3599579,
|
||||
48.0782846
|
||||
]
|
||||
},
|
||||
"id": "node/9095193153"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/9095193154",
|
||||
"branch": "Lingerie",
|
||||
"brand": "RougeGorge",
|
||||
"brand:wikidata": "Q104600739",
|
||||
"clothes": "underwear",
|
||||
"name": "RougeGorge",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3604883,
|
||||
48.0781607
|
||||
]
|
||||
},
|
||||
"id": "node/9095193154"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/9095212690",
|
||||
"alt_name": "North Face",
|
||||
"brand": "The North Face",
|
||||
"brand:wikidata": "Q152784",
|
||||
"brand:wikipedia": "en:The North Face",
|
||||
"check_date": "2024-05-19",
|
||||
"name": "The North Face",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3603923,
|
||||
48.0773727
|
||||
]
|
||||
},
|
||||
"id": "node/9095212690"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/9095270059",
|
||||
"air_conditioning": "no",
|
||||
"clothes": "men",
|
||||
"level": "0",
|
||||
"name": "Maison Aume",
|
||||
"second_hand": "no",
|
||||
"shop": "clothes",
|
||||
"wheelchair": "no"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.361364,
|
||||
48.0799999
|
||||
]
|
||||
},
|
||||
"id": "node/9095270059"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/9098624272",
|
||||
"name": "Destock Place",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3575161,
|
||||
48.0793009
|
||||
]
|
||||
},
|
||||
"id": "node/9098624272"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/9123861652",
|
||||
"name": "Weackers",
|
||||
"shop": "shoes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.361329,
|
||||
48.0785972
|
||||
]
|
||||
},
|
||||
"id": "node/9123861652"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/9162179887",
|
||||
"brand": "Calzedonia",
|
||||
"brand:wikidata": "Q1027874",
|
||||
"brand:wikipedia": "en:Calzedonia",
|
||||
"name": "Calzedonia",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3606374,
|
||||
48.0780809
|
||||
]
|
||||
},
|
||||
"id": "node/9162179887"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/9162206449",
|
||||
"clothes": "women",
|
||||
"name": "Cop. Copine",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3600947,
|
||||
48.078399
|
||||
]
|
||||
},
|
||||
"id": "node/9162206449"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/9162226360",
|
||||
"brand": "Okaïdi",
|
||||
"brand:wikidata": "Q3350027",
|
||||
"brand:wikipedia": "fr:Okaïdi",
|
||||
"name": "Okaïdi",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3596986,
|
||||
48.078428
|
||||
]
|
||||
},
|
||||
"id": "node/9162226360"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/9162227010",
|
||||
"brand": "Jules",
|
||||
"brand:wikidata": "Q3188386",
|
||||
"brand:wikipedia": "fr:Jules (enseigne)",
|
||||
"clothes": "men",
|
||||
"name": "Jules",
|
||||
"opening_hours": "Mo-Sa 09:30-19:00",
|
||||
"phone": "+33 3 89 41 03 62",
|
||||
"shop": "clothes",
|
||||
"website": "https://www.jules.com/fr-fr/magasins/1600133/"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3600323,
|
||||
48.0782229
|
||||
]
|
||||
},
|
||||
"id": "node/9162227010"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/10151865029",
|
||||
"name": "Atelier Cinq",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3571756,
|
||||
48.0772657
|
||||
]
|
||||
},
|
||||
"id": "node/10151865029"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/10862176110",
|
||||
"name": "L'hexagone",
|
||||
"shop": "bag"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3808571,
|
||||
48.0814138
|
||||
]
|
||||
},
|
||||
"id": "node/10862176110"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/11150877331",
|
||||
"brand": "Punt Roma",
|
||||
"brand:wikidata": "Q101423290",
|
||||
"clothes": "women",
|
||||
"name": "Punt Roma",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3571859,
|
||||
48.0779406
|
||||
]
|
||||
},
|
||||
"id": "node/11150877331"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/11150959880",
|
||||
"name": "Caroll",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3579354,
|
||||
48.0779291
|
||||
]
|
||||
},
|
||||
"id": "node/11150959880"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/11302242094",
|
||||
"branch": "Wintzenheim",
|
||||
"name": "Label Fripe",
|
||||
"opening_hours": "Mo-Sa 09:00-18:45",
|
||||
"phone": "+33 3 89 27 39 25",
|
||||
"second_hand": "only",
|
||||
"shop": "clothes",
|
||||
"website": "https://labelfripe.fr/label-fripe-wintzenheim/"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3109899,
|
||||
48.0850362
|
||||
]
|
||||
},
|
||||
"id": "node/11302242094"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/11392247003",
|
||||
"name": "Lingerie Sipp",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3111507,
|
||||
48.0841835
|
||||
]
|
||||
},
|
||||
"id": "node/11392247003"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/11778819781",
|
||||
"addr:city": "Colmar",
|
||||
"addr:housenumber": "10",
|
||||
"addr:postcode": "68000",
|
||||
"addr:street": "Rue des Têtes",
|
||||
"clothes": "suits;hats;men",
|
||||
"name": "Phillipe",
|
||||
"phone": "0389411983",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3559389,
|
||||
48.0789064
|
||||
]
|
||||
},
|
||||
"id": "node/11778819781"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/11799215969",
|
||||
"brand": "Petit Bateau",
|
||||
"brand:wikidata": "Q3377090",
|
||||
"name": "Petit Bateau",
|
||||
"opening_hours": "Mo-Sa 10:00-19:00; Su 10:00-18:00",
|
||||
"phone": "+33 3 89 24 97 85",
|
||||
"shop": "clothes",
|
||||
"website": "https://stores.petit-bateau.com/france/colmar/9-rue-des-boulangers"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.355149,
|
||||
48.0780213
|
||||
]
|
||||
},
|
||||
"id": "node/11799215969"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/11816704669",
|
||||
"addr:housenumber": "10",
|
||||
"addr:street": "Rue des Boulangers",
|
||||
"name": "des petits hauts",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3555001,
|
||||
48.0780768
|
||||
]
|
||||
},
|
||||
"id": "node/11816704669"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/12320343534",
|
||||
"addr:city": "Colmar",
|
||||
"addr:housenumber": "44",
|
||||
"addr:postcode": "68000",
|
||||
"addr:street": "Rue des Clefs",
|
||||
"brand": "Un Jour Ailleurs",
|
||||
"brand:wikidata": "Q105106211",
|
||||
"clothes": "women",
|
||||
"name": "Un Jour Ailleurs",
|
||||
"opening_hours": "Mo-Fr 10:00-19:00; Sa 10:00-18:30",
|
||||
"phone": "+33368318572",
|
||||
"shop": "clothes",
|
||||
"website": "https://boutique.unjourailleurs.com/fr/mode-femme/boutique-colmar-76"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.35897,
|
||||
48.0789807
|
||||
]
|
||||
},
|
||||
"id": "node/12320343534"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/12320343536",
|
||||
"addr:city": "Colmar",
|
||||
"addr:housenumber": "38",
|
||||
"addr:postcode": "68000",
|
||||
"addr:street": "Rue des Clefs",
|
||||
"brand": "Timberland",
|
||||
"brand:wikidata": "Q1539185",
|
||||
"name": "Timberland",
|
||||
"opening_hours": "Mo-Sa 10:00-19:00",
|
||||
"phone": "+33389298650",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3592409,
|
||||
48.0788785
|
||||
]
|
||||
},
|
||||
"id": "node/12320343536"
|
||||
}
|
||||
]
|
||||
}
|
@ -1,350 +0,0 @@
|
||||
# pylint: skip-file
|
||||
|
||||
import numpy as np
|
||||
import json
|
||||
import os
|
||||
from typing import Optional, Literal
|
||||
from sklearn.cluster import DBSCAN
|
||||
from sklearn.decomposition import PCA
|
||||
import matplotlib.pyplot as plt
|
||||
from pydantic import BaseModel
|
||||
from OSMPythonTools.overpass import Overpass, overpassQueryBuilder
|
||||
from OSMPythonTools.cachingStrategy import CachingStrategy, JSON
|
||||
from math import sin, cos, sqrt, atan2, radians
|
||||
|
||||
|
||||
EARTH_RADIUS_KM = 6373
|
||||
|
||||
|
||||
class ShoppingLocation(BaseModel):
|
||||
type: Literal['street', 'area']
|
||||
importance: int
|
||||
centroid: tuple
|
||||
start: Optional[list] = None
|
||||
end: Optional[list] = None
|
||||
|
||||
|
||||
# Output to frontend
|
||||
class Landmark(BaseModel) :
|
||||
# Properties of the landmark
|
||||
name : str
|
||||
type: Literal['sightseeing', 'nature', 'shopping', 'start', 'finish']
|
||||
location : tuple
|
||||
osm_type : str
|
||||
osm_id : int
|
||||
attractiveness : int
|
||||
n_tags : int
|
||||
image_url : Optional[str] = None
|
||||
website_url : Optional[str] = None
|
||||
description : Optional[str] = None # TODO future
|
||||
duration : Optional[int] = 0
|
||||
name_en : Optional[str] = None
|
||||
|
||||
# Additional properties depending on specific tour
|
||||
must_do : Optional[bool] = False
|
||||
must_avoid : Optional[bool] = False
|
||||
is_secondary : Optional[bool] = False
|
||||
|
||||
time_to_reach_next : Optional[int] = 0
|
||||
next_uuid : Optional[str] = None
|
||||
|
||||
|
||||
def extract_points(filestr: str) :
|
||||
"""
|
||||
Extract points from geojson file.
|
||||
|
||||
Returns :
|
||||
np.array containing the points
|
||||
"""
|
||||
points = []
|
||||
|
||||
with open(os.path.dirname(__file__) + '/' + filestr, 'r') as f:
|
||||
geojson = json.load(f)
|
||||
|
||||
for feature in geojson['features']:
|
||||
if feature['geometry']['type'] == 'Point':
|
||||
centroid = feature['geometry']['coordinates']
|
||||
points.append(centroid)
|
||||
|
||||
elif feature['geometry']['type'] == 'Polygon':
|
||||
centroid = np.array(feature['geometry']['coordinates'][0][0])
|
||||
points.append(centroid)
|
||||
|
||||
# Convert the list of points to a NumPy array
|
||||
return np.array(points)
|
||||
|
||||
|
||||
def get_distance(p1: tuple[float, float], p2: tuple[float, float]) -> int:
|
||||
"""
|
||||
Calculate the time in minutes to travel from one location to another.
|
||||
|
||||
Args:
|
||||
p1 (tuple[float, float]): Coordinates of the starting location.
|
||||
p2 (tuple[float, float]): Coordinates of the destination.
|
||||
|
||||
Returns:
|
||||
int: Time to travel from p1 to p2 in minutes.
|
||||
"""
|
||||
|
||||
|
||||
if p1 == p2:
|
||||
return 0
|
||||
else:
|
||||
# Compute the distance in km along the surface of the Earth
|
||||
# (assume spherical Earth)
|
||||
# this is the haversine formula, stolen from stackoverflow
|
||||
# in order to not use any external libraries
|
||||
lat1, lon1 = radians(p1[0]), radians(p1[1])
|
||||
lat2, lon2 = radians(p2[0]), radians(p2[1])
|
||||
|
||||
dlon = lon2 - lon1
|
||||
dlat = lat2 - lat1
|
||||
|
||||
a = sin(dlat / 2)**2 + cos(lat1) * cos(lat2) * sin(dlon / 2)**2
|
||||
c = 2 * atan2(sqrt(a), sqrt(1 - a))
|
||||
|
||||
return EARTH_RADIUS_KM * c
|
||||
|
||||
def filter_clusters(cluster_points, cluster_labels):
|
||||
"""
|
||||
Remove clusters of less importance.
|
||||
"""
|
||||
label_counts = np.bincount(cluster_labels)
|
||||
|
||||
# Step 3: Get the indices (labels) of the 5 largest clusters
|
||||
top_5_labels = np.argsort(label_counts)[-5:] # Get the largest 5 clusters
|
||||
|
||||
# Step 4: Filter points to keep only the points in the top 5 clusters
|
||||
filtered_cluster_points = []
|
||||
filtered_cluster_labels = []
|
||||
|
||||
for label in top_5_labels:
|
||||
filtered_cluster_points.append(cluster_points[cluster_labels == label])
|
||||
filtered_cluster_labels.append(np.full((label_counts[label],), label)) # Replicate the label
|
||||
|
||||
# Concatenate filtered clusters into a single array
|
||||
return np.vstack(filtered_cluster_points), np.concatenate(filtered_cluster_labels)
|
||||
|
||||
|
||||
def fit_lines(points, labels):
|
||||
"""
|
||||
Fit lines to identified clusters.
|
||||
"""
|
||||
all_x = []
|
||||
all_y = []
|
||||
lines = []
|
||||
locations = []
|
||||
|
||||
for label in set(labels):
|
||||
cluster_points = points[labels == label]
|
||||
|
||||
# If there's not enough points, skip
|
||||
if len(cluster_points) < 2:
|
||||
continue
|
||||
|
||||
# Apply PCA to find the principal component (i.e., the line of best fit)
|
||||
pca = PCA(n_components=1)
|
||||
pca.fit(cluster_points)
|
||||
|
||||
direction = pca.components_[0]
|
||||
centroid = pca.mean_
|
||||
|
||||
# Project the cluster points onto the principal direction (line direction)
|
||||
projections = np.dot(cluster_points - centroid, direction)
|
||||
|
||||
# Get the range of the projections to find the approximate length of the cluster
|
||||
cluster_length = projections.max() - projections.min()
|
||||
|
||||
# Now adjust `t` so that it scales with the cluster length
|
||||
t = np.linspace(-cluster_length / 2.75, cluster_length / 2.75, 10)
|
||||
|
||||
# Calculate the start and end of the line based on min/max projections
|
||||
start_point = centroid[0] + t*direction[0]
|
||||
end_point = centroid[1] + t*direction[1]
|
||||
|
||||
# Store the line
|
||||
lines.append((start_point, end_point))
|
||||
|
||||
# For visualization, store the points
|
||||
all_x.append(min(start_point))
|
||||
all_x.append(max(start_point))
|
||||
all_y.append(min(end_point))
|
||||
all_y.append(max(end_point))
|
||||
|
||||
if np.linalg.norm(t) <= 0.0045 :
|
||||
loc = ShoppingLocation(
|
||||
type='area',
|
||||
centroid=tuple((centroid[1], centroid[0])),
|
||||
importance = len(cluster_points),
|
||||
)
|
||||
else :
|
||||
loc = ShoppingLocation(
|
||||
type='street',
|
||||
centroid=tuple((centroid[1], centroid[0])),
|
||||
importance = len(cluster_points),
|
||||
start=start_point,
|
||||
end=end_point
|
||||
)
|
||||
|
||||
locations.append(loc)
|
||||
|
||||
xmin = min(all_x)
|
||||
xmax = max(all_x)
|
||||
ymin = min(all_y)
|
||||
ymax = max(all_y)
|
||||
corners = (xmin, xmax, ymin, ymax)
|
||||
|
||||
return corners, locations
|
||||
|
||||
|
||||
|
||||
def create_landmark(shopping_location: ShoppingLocation):
|
||||
|
||||
# Define the bounding box for a given radius around the coordinates
|
||||
lat, lon = shopping_location.centroid
|
||||
bbox = ("around:1000", str(lat), str(lon))
|
||||
|
||||
overpass = Overpass()
|
||||
# CachingStrategy.use(JSON, cacheDir=OSM_CACHE_DIR)
|
||||
|
||||
# Query neighborhoods and shopping malls
|
||||
selectors = ['"place"~"^(suburb|neighborhood|neighbourhood|quarter|city_block)$"', '"shop"="mall"']
|
||||
|
||||
min_dist = float('inf')
|
||||
new_name = 'Shopping Area'
|
||||
new_name_en = None
|
||||
osm_id = 0
|
||||
osm_type = 'node'
|
||||
|
||||
for sel in selectors :
|
||||
query = overpassQueryBuilder(
|
||||
bbox = bbox,
|
||||
elementType = ['node', 'way', 'relation'],
|
||||
selector = sel,
|
||||
includeCenter = True,
|
||||
out = 'center'
|
||||
)
|
||||
|
||||
try:
|
||||
result = overpass.query(query)
|
||||
except Exception as e:
|
||||
raise Exception("query unsuccessful")
|
||||
|
||||
for elem in result.elements():
|
||||
|
||||
location = (elem.centerLat(), elem.centerLon())
|
||||
|
||||
if location[0] is None :
|
||||
location = (elem.lat(), elem.lon())
|
||||
if location[0] is None :
|
||||
# print(f"Fetching coordinates failed with {elem.type()}/{elem.id()}")
|
||||
continue
|
||||
|
||||
# print(f"Distance : {get_distance(shopping_location.centroid, location)}")
|
||||
d = get_distance(shopping_location.centroid, location)
|
||||
if d < min_dist :
|
||||
min_dist = d
|
||||
new_name = elem.tag('name')
|
||||
osm_type = elem.type() # Add type: 'way' or 'relation'
|
||||
osm_id = elem.id() # Add OSM id
|
||||
|
||||
# add english name if it exists
|
||||
try :
|
||||
new_name_en = elem.tag('name:en')
|
||||
except:
|
||||
pass
|
||||
|
||||
return Landmark(
|
||||
name=new_name,
|
||||
type='shopping',
|
||||
location=shopping_location.centroid, # TODO: use the fact the we can also recognize streets.
|
||||
attractiveness=shopping_location.importance,
|
||||
n_tags=0,
|
||||
osm_id=osm_id,
|
||||
osm_type=osm_type,
|
||||
name_en=new_name_en
|
||||
)
|
||||
|
||||
|
||||
# Extract points
|
||||
points = extract_points('vienna_data.json')
|
||||
|
||||
# print(len(points))
|
||||
|
||||
######## Create a figure with 1 row and 3 columns for side-by-side plots
|
||||
fig, axes = plt.subplots(1, 3, figsize=(15, 5))
|
||||
# Plot Raw data points
|
||||
axes[0].set_title('Raw Data')
|
||||
axes[0].scatter(points[:, 0], points[:, 1], color='blue', s=20)
|
||||
|
||||
|
||||
# Apply DBSCAN to find clusters. Choose different settings for different cities.
|
||||
if len(points) > 400 :
|
||||
dbscan = DBSCAN(eps=0.00118, min_samples=15, algorithm='kd_tree') # for large cities
|
||||
else :
|
||||
dbscan = DBSCAN(eps=0.00075, min_samples=10, algorithm='kd_tree') # for small cities
|
||||
|
||||
labels = dbscan.fit_predict(points)
|
||||
|
||||
# Separate clustered points and noise points
|
||||
clustered_points = points[labels != -1]
|
||||
clustered_labels = labels[labels != -1]
|
||||
noise_points = points[labels == -1]
|
||||
|
||||
######## Plot n°1: DBSCAN Clustering Results
|
||||
axes[1].set_title('DBSCAN Clusters')
|
||||
axes[1].scatter(clustered_points[:, 0], clustered_points[:, 1], c=clustered_labels, cmap='rainbow', s=20)
|
||||
axes[1].scatter(noise_points[:, 0], noise_points[:, 1], c='blue', s=7, label='Noise')
|
||||
|
||||
# Keep the 5 biggest clusters
|
||||
clustered_points, clustered_labels = filter_clusters(clustered_points, clustered_labels)
|
||||
|
||||
# Fit lines
|
||||
corners, locations = fit_lines(clustered_points, clustered_labels)
|
||||
(xmin, xmax, ymin, ymax) = corners
|
||||
|
||||
|
||||
######## Plot clustered points in normal size and noise points separately
|
||||
axes[2].scatter(clustered_points[:, 0], clustered_points[:, 1], c=clustered_labels, cmap='rainbow', s=30)
|
||||
axes[2].set_title('PCA Fitted Lines on Clusters')
|
||||
|
||||
# Create a list of Landmarks for the shopping things
|
||||
shopping_landmarks = []
|
||||
for loc in locations :
|
||||
axes[2].scatter(loc.centroid[1], loc.centroid[0], color='red', marker='x', s=200, linewidth=3)
|
||||
landmark = create_landmark(loc)
|
||||
shopping_landmarks.append(landmark)
|
||||
axes[2].text(loc.centroid[1], loc.centroid[0], landmark.name,
|
||||
ha='center', va='top', fontsize=6,
|
||||
bbox=dict(facecolor='white', edgecolor='black', boxstyle='round,pad=0.2'),
|
||||
zorder=3)
|
||||
|
||||
|
||||
|
||||
####### Plot the detected lines in the final plot #######
|
||||
# for loc in locations:
|
||||
# if loc.type == 'street' :
|
||||
# line_x = loc.start
|
||||
# line_y = loc.end
|
||||
# axes[2].plot(line_x, line_y, color='lime', linewidth=3)
|
||||
# else :
|
||||
|
||||
|
||||
|
||||
axes[0].set_xlim(xmin-0.01, xmax+0.01)
|
||||
axes[0].set_ylim(ymin-0.01, ymax+0.01)
|
||||
|
||||
axes[1].set_xlim(xmin-0.01, xmax+0.01)
|
||||
axes[1].set_ylim(ymin-0.01, ymax+0.01)
|
||||
|
||||
axes[2].set_xlim(xmin-0.01, xmax+0.01)
|
||||
axes[2].set_ylim(ymin-0.01, ymax+0.01)
|
||||
|
||||
|
||||
print("\n\n\n")
|
||||
for landmark in shopping_landmarks :
|
||||
print(f"{landmark.name} is a shopping area with a score of {landmark.attractiveness}")
|
||||
|
||||
|
||||
plt.tight_layout()
|
||||
plt.show()
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -1,288 +0,0 @@
|
||||
from typing import Literal, List, Optional
|
||||
from pydantic import BaseModel
|
||||
import urllib.request
|
||||
import urllib.parse
|
||||
import json
|
||||
import yaml
|
||||
from pathlib import Path
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
|
||||
|
||||
OSM_ENDPOINT = 'http://overpass-api.de/api/'
|
||||
LOCATION_PREFIX = Path('src')
|
||||
PARAMETERS_DIR = LOCATION_PREFIX / 'parameters'
|
||||
AMENITY_SELECTORS_PATH = PARAMETERS_DIR / 'amenity_selectors.yaml'
|
||||
|
||||
|
||||
ElementTypes = List[Literal['way', 'node', 'relation']]
|
||||
|
||||
|
||||
|
||||
# Output to frontend
|
||||
class Landmark(BaseModel) :
|
||||
"""
|
||||
A class representing a landmark or point of interest (POI) in the context of a trip.
|
||||
|
||||
The Landmark class is used to model visitable locations, such as tourist attractions,
|
||||
natural sites, shopping locations, and start/end points in travel itineraries. It
|
||||
holds information about the landmark's attributes and supports comparisons and
|
||||
calculations, such as distance between landmarks.
|
||||
|
||||
Attributes:
|
||||
name (str): The name of the landmark.
|
||||
type (Literal): The type of the landmark, which can be one of ['sightseeing', 'nature',
|
||||
'shopping', 'start', 'finish'].
|
||||
location (tuple): A tuple representing the (latitude, longitude) of the landmark.
|
||||
osm_type (str): The OpenStreetMap (OSM) type of the landmark.
|
||||
osm_id (int): The OpenStreetMap (OSM) ID of the landmark.
|
||||
attractiveness (int): A score representing the attractiveness of the landmark.
|
||||
n_tags (int): The number of tags associated with the landmark.
|
||||
image_url (Optional[str]): A URL to an image of the landmark.
|
||||
website_url (Optional[str]): A URL to the landmark's official website.
|
||||
description (Optional[str]): A text description of the landmark.
|
||||
duration (Optional[int]): The estimated time to visit the landmark (in minutes).
|
||||
name_en (Optional[str]): The English name of the landmark.
|
||||
uuid (UUID): A unique identifier for the landmark, generated by default using uuid4.
|
||||
must_do (Optional[bool]): Whether the landmark is a "must-do" attraction.
|
||||
must_avoid (Optional[bool]): Whether the landmark should be avoided.
|
||||
is_secondary (Optional[bool]): Whether the landmark is secondary or less important.
|
||||
time_to_reach_next (Optional[int]): Estimated time (in minutes) to reach the next landmark.
|
||||
next_uuid (Optional[UUID]): UUID of the next landmark in sequence (if applicable).
|
||||
"""
|
||||
|
||||
# Properties of the landmark
|
||||
name : str
|
||||
type: Literal['sightseeing', 'nature', 'shopping', 'start', 'finish']
|
||||
location : tuple
|
||||
osm_type : str
|
||||
osm_id : int
|
||||
attractiveness : int
|
||||
n_tags : int
|
||||
image_url : Optional[str] = None
|
||||
website_url : Optional[str] = None
|
||||
wiki_url : Optional[str] = None
|
||||
description : Optional[str] = None # TODO future
|
||||
duration : Optional[int] = 0
|
||||
name_en : Optional[str] = None
|
||||
|
||||
|
||||
# Additional properties depending on specific tour
|
||||
must_do : Optional[bool] = False
|
||||
must_avoid : Optional[bool] = False
|
||||
is_secondary : Optional[bool] = False
|
||||
|
||||
time_to_reach_next : Optional[int] = 0
|
||||
|
||||
# More properties to define the score
|
||||
is_viewpoint : Optional[bool] = False
|
||||
is_cathedral : Optional[bool] = False
|
||||
is_place_of_worship : Optional[bool] = False
|
||||
|
||||
|
||||
def OverpassQueryBuilder(area: tuple, element_types: ElementTypes, selector: str,
|
||||
conditions=[], out='center'):
|
||||
|
||||
if not isinstance(conditions, list) :
|
||||
conditions = [conditions]
|
||||
|
||||
query = '('
|
||||
search_area = f"({', '.join(map(str, area))})"
|
||||
if conditions :
|
||||
conditions = '(if: ' + ' && '.join(conditions) + ')'
|
||||
else :
|
||||
conditions = ''
|
||||
|
||||
for elem in element_types :
|
||||
query += elem + '[' + selector + ']' + conditions + search_area + ';'
|
||||
|
||||
query += ');' + f'out {out};'
|
||||
|
||||
return query
|
||||
|
||||
def dict_to_selector_list(d: dict) -> list:
|
||||
"""
|
||||
Convert a dictionary of key-value pairs to a list of Overpass query strings.
|
||||
|
||||
Args:
|
||||
d (dict): A dictionary of key-value pairs representing the selector.
|
||||
|
||||
Returns:
|
||||
list: A list of strings representing the Overpass query selectors.
|
||||
"""
|
||||
return_list = []
|
||||
for key, value in d.items():
|
||||
if isinstance(value, list):
|
||||
val = '|'.join(value)
|
||||
return_list.append(f'{key}~"^({val})$"')
|
||||
elif isinstance(value, str) and len(value) == 0:
|
||||
return_list.append(f'{key}')
|
||||
else:
|
||||
return_list.append(f'{key}={value}')
|
||||
return return_list
|
||||
|
||||
|
||||
def send_query(query: str) -> dict:
|
||||
"""
|
||||
Sends the Overpass QL query to the Overpass API and returns the parsed JSON response.
|
||||
|
||||
Args:
|
||||
query (str): The Overpass QL query to be sent to the Overpass API.
|
||||
|
||||
Returns:
|
||||
dict: The parsed JSON response from the Overpass API, or None if the request fails.
|
||||
"""
|
||||
|
||||
# Define the Overpass API endpoint
|
||||
overpass_url = "https://overpass-api.de/api/interpreter"
|
||||
|
||||
# Prepare the data to be sent as POST request, encoded as bytes
|
||||
data = urllib.parse.urlencode({'data': query}).encode('utf-8')
|
||||
|
||||
# Create a custom header with a User-Agent
|
||||
headers = {
|
||||
'User-Agent': 'Mozilla/5.0 (compatible; OverpassQuery/1.0; +http://example.com)',
|
||||
}
|
||||
|
||||
try:
|
||||
# Create a Request object with the specified URL, data, and headers
|
||||
request = urllib.request.Request(overpass_url, data=data, headers=headers)
|
||||
|
||||
# Send the request and read the response
|
||||
with urllib.request.urlopen(request) as response:
|
||||
# Read and decode the response
|
||||
response_data = response.read().decode('utf-8')
|
||||
return ET.fromstring(response_data)
|
||||
|
||||
except urllib.error.URLError as e:
|
||||
print(f"Error connecting to Overpass API: {e}")
|
||||
return None
|
||||
except json.JSONDecodeError:
|
||||
print("Error decoding the JSON response from Overpass API.")
|
||||
return None
|
||||
|
||||
|
||||
|
||||
def parse_result(root: ET.Element, elem_type) -> List[Landmark]:
|
||||
|
||||
landmarks = []
|
||||
if root is None :
|
||||
return landmarks
|
||||
|
||||
for osm_type in ['node', 'way', 'relation'] :
|
||||
for elem in root.findall(osm_type):
|
||||
|
||||
# Extract basic info from the landmark.
|
||||
name = elem.find("tag[@k='name']").get('v') if elem.find("tag[@k='name']") is not None else None
|
||||
center = elem.find('center')
|
||||
tags = elem.findall('tag')
|
||||
|
||||
# Extract the center latitude and longitude if available.
|
||||
if name is not None and center is not None:
|
||||
lat = center.get('lat')
|
||||
lon = center.get('lon')
|
||||
coords = tuple((lat, lon))
|
||||
else :
|
||||
continue
|
||||
|
||||
# Convert this to Landmark object
|
||||
landmark = Landmark(name=name,
|
||||
type=elem_type,
|
||||
location=coords,
|
||||
osm_id=elem.get('id'),
|
||||
osm_type=osm_type,
|
||||
attractiveness=0,
|
||||
n_tags=len(tags))
|
||||
|
||||
# Browse through tags to add information to landmark.
|
||||
for tag in tags:
|
||||
key = tag.get('k')
|
||||
value = tag.get('v')
|
||||
|
||||
# Skip this landmark if not suitable.
|
||||
if key == 'building:part' and value == 'yes' :
|
||||
break
|
||||
if 'disused:' in key :
|
||||
break
|
||||
if 'boundary:' in key :
|
||||
break
|
||||
if 'shop' in key and elem_type != 'shopping' :
|
||||
break
|
||||
# if value == 'apartments' :
|
||||
# break
|
||||
|
||||
# Fill in the other attributes.
|
||||
if key == 'image' :
|
||||
landmark.image_url = value
|
||||
if key == 'website' :
|
||||
landmark.website_url = value
|
||||
if key == 'place_of_worship' :
|
||||
landmark.is_place_of_worship = True
|
||||
if key == 'wikipedia' :
|
||||
landmark.wiki_url = value
|
||||
if key == 'name:en' :
|
||||
landmark.name_en = value
|
||||
if 'building:' in key or 'pay' in key :
|
||||
landmark.n_tags -= 1
|
||||
|
||||
# Set the duration.
|
||||
if value in ['museum', 'aquarium', 'planetarium'] :
|
||||
landmark.duration = 60
|
||||
elif value == 'viewpoint' :
|
||||
landmark.is_viewpoint = True
|
||||
landmark.duration = 10
|
||||
elif value == 'cathedral' :
|
||||
landmark.is_place_of_worship = False
|
||||
landmark.duration = 10
|
||||
else :
|
||||
landmark.duration = 5
|
||||
|
||||
else:
|
||||
set_score(landmark, elem_type)
|
||||
landmarks.append(landmark)
|
||||
continue
|
||||
|
||||
return landmarks
|
||||
|
||||
|
||||
def set_score(landmark: Landmark, landmarktype: str) :
|
||||
|
||||
score = landmark.n_tags**1.15
|
||||
if landmark.wiki_url :
|
||||
score *= 1.1
|
||||
if landmark.image_url :
|
||||
score *= 1.1
|
||||
if landmark.website_url :
|
||||
score *= 1.1
|
||||
if landmark.is_place_of_worship :
|
||||
score *= 0.65
|
||||
if landmark.is_viewpoint :
|
||||
# print(f"{landmark.name}: n_tags={landmark.n_tags} and score={score*3*1.35*10}")
|
||||
score *= 3
|
||||
if landmarktype == 'nature' :
|
||||
score *= 1.35
|
||||
|
||||
landmark.attractiveness = int(score * 10)
|
||||
|
||||
|
||||
with AMENITY_SELECTORS_PATH.open('r') as f:
|
||||
amenity_selectors = yaml.safe_load(f)
|
||||
amenity_selector = amenity_selectors['nature']
|
||||
bbox = tuple(('around:1714', 45.7576485, 4.8330241))
|
||||
|
||||
landmarks = []
|
||||
for sel in dict_to_selector_list(amenity_selector):
|
||||
|
||||
query = OverpassQueryBuilder(area=bbox,
|
||||
element_types=['way', 'relation'],
|
||||
selector=sel,
|
||||
# conditions='count_tags()>5',
|
||||
out='center')
|
||||
print(query + '\n')
|
||||
|
||||
root = send_query(query)
|
||||
|
||||
landmarks += parse_result(root, 'nature')
|
||||
|
||||
|
||||
print(len(landmarks))
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -1,42 +0,0 @@
|
||||
"""Collection of tests to ensure correct handling of invalid input."""
|
||||
|
||||
from fastapi.testclient import TestClient
|
||||
import pytest
|
||||
|
||||
from .test_utils import load_trip_landmarks
|
||||
from ..main import app
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def client():
|
||||
"""Client used to call the app."""
|
||||
return TestClient(app)
|
||||
|
||||
|
||||
def test_cache(client): # pylint: disable=redefined-outer-name
|
||||
"""
|
||||
Test n°1 : Custom test in Turckheim to ensure small villages are also supported.
|
||||
|
||||
Args:
|
||||
client:
|
||||
request:
|
||||
"""
|
||||
duration_minutes = 15
|
||||
response = client.post(
|
||||
"/trip/new",
|
||||
json={
|
||||
"preferences": {"sightseeing": {"type": "sightseeing", "score": 5},
|
||||
"nature": {"type": "nature", "score": 5},
|
||||
"shopping": {"type": "shopping", "score": 5},
|
||||
"max_time_minute": duration_minutes,
|
||||
"detour_tolerance_minute": 0},
|
||||
"start": [48.084588, 7.280405]
|
||||
}
|
||||
)
|
||||
result = response.json()
|
||||
landmarks = load_trip_landmarks(client, result['first_landmark_uuid'])
|
||||
landmarks_cached = load_trip_landmarks(client, result['first_landmark_uuid'], True)
|
||||
|
||||
# checks :
|
||||
assert response.status_code == 200 # check for successful planning
|
||||
assert landmarks_cached == landmarks
|
@ -11,7 +11,7 @@ def client():
|
||||
"""Client used to call the app."""
|
||||
return TestClient(app)
|
||||
|
||||
'''
|
||||
|
||||
def test_turckheim(client, request): # pylint: disable=redefined-outer-name
|
||||
"""
|
||||
Test n°1 : Custom test in Turckheim to ensure small villages are also supported.
|
||||
@ -54,8 +54,8 @@ def test_turckheim(client, request): # pylint: disable=redefined-outer-name
|
||||
assert len(landmarks) > 2 # check that there is something to visit
|
||||
assert comp_time < 30, f"Computation time exceeded 30 seconds: {comp_time:.2f} seconds"
|
||||
# assert 2==3
|
||||
'''
|
||||
|
||||
'''
|
||||
def test_bellecour(client, request) : # pylint: disable=redefined-outer-name
|
||||
"""
|
||||
Test n°2 : Custom test in Lyon centre to ensure proper decision making in crowded area.
|
||||
@ -97,7 +97,7 @@ def test_bellecour(client, request) : # pylint: disable=redefined-outer-name
|
||||
assert duration_minutes*0.8 < int(result['total_time']) < duration_minutes*1.2
|
||||
# assert 2 == 3
|
||||
|
||||
'''
|
||||
|
||||
def test_cologne(client, request) : # pylint: disable=redefined-outer-name
|
||||
"""
|
||||
Test n°2 : Custom test in Lyon centre to ensure proper decision making in crowded area.
|
||||
@ -216,7 +216,7 @@ def test_zurich(client, request) : # pylint: disable=redefined-outer-name
|
||||
assert response.status_code == 200 # check for successful planning
|
||||
assert comp_time < 30, f"Computation time exceeded 30 seconds: {comp_time:.2f} seconds"
|
||||
assert duration_minutes*0.8 < int(result['total_time']) < duration_minutes*1.2
|
||||
'''
|
||||
|
||||
|
||||
def test_paris(client, request) : # pylint: disable=redefined-outer-name
|
||||
"""
|
||||
@ -257,7 +257,7 @@ def test_paris(client, request) : # pylint: disable=redefined-outer-name
|
||||
assert comp_time < 30, f"Computation time exceeded 30 seconds: {comp_time:.2f} seconds"
|
||||
assert duration_minutes*0.8 < int(result['total_time']) < duration_minutes*1.2
|
||||
|
||||
'''
|
||||
|
||||
def test_new_york(client, request) : # pylint: disable=redefined-outer-name
|
||||
"""
|
||||
Test n°2 : Custom test in New York (les Halles) centre to ensure proper decision making in crowded area.
|
||||
|
@ -23,44 +23,7 @@ def landmarks_to_osmid(landmarks: list[Landmark]) -> list[int] :
|
||||
|
||||
return ids
|
||||
|
||||
def fetch_landmark(client, landmark_uuid: str):
|
||||
"""
|
||||
Fetch landmark data from the API based on the landmark UUID.
|
||||
|
||||
Args:
|
||||
landmark_uuid (str): The UUID of the landmark.
|
||||
|
||||
Returns:
|
||||
dict: Landmark data fetched from the API.
|
||||
"""
|
||||
logger = logging.getLogger(__name__)
|
||||
response = client.get(f'/landmark/{landmark_uuid}')
|
||||
|
||||
if response.status_code != 200:
|
||||
raise HTTPException(status_code=500,
|
||||
detail=f'Failed to fetch landmark with UUID {landmark_uuid}: {response.status_code}')
|
||||
|
||||
try:
|
||||
json_data = response.json()
|
||||
# logger.info(f'API Response: {json_data}')
|
||||
except ValueError as e:
|
||||
logger.error(f'Failed to parse response as JSON: {response.text}')
|
||||
raise HTTPException(status_code=500, detail="Invalid response format from API") from e
|
||||
|
||||
# Try validating against the Landmark model here to ensure consistency
|
||||
try:
|
||||
landmark = Landmark(**json_data)
|
||||
except ValidationError as ve:
|
||||
logging.error(f'Validation error: {ve}')
|
||||
raise HTTPException(status_code=500, detail="Invalid data format received from API") from ve
|
||||
|
||||
if "detail" in json_data:
|
||||
raise HTTPException(status_code=500, detail=json_data["detail"])
|
||||
|
||||
return landmark
|
||||
|
||||
|
||||
def fetch_landmark_cache(landmark_uuid: str):
|
||||
def fetch_landmark(landmark_uuid: str):
|
||||
"""
|
||||
Fetch landmark data from the cache based on the landmark UUID.
|
||||
|
||||
@ -91,7 +54,7 @@ def fetch_landmark_cache(landmark_uuid: str):
|
||||
raise HTTPException(status_code=500, detail="An unexpected error occurred while fetching the landmark from the cache") from exc
|
||||
|
||||
|
||||
def load_trip_landmarks(client, first_uuid: str, from_cache=None) -> list[Landmark]:
|
||||
def load_trip_landmarks(client, first_uuid: str) -> list[Landmark]:
|
||||
"""
|
||||
Load all landmarks for a trip using the response from the API.
|
||||
|
||||
@ -105,10 +68,7 @@ def load_trip_landmarks(client, first_uuid: str, from_cache=None) -> list[Landma
|
||||
next_uuid = first_uuid
|
||||
|
||||
while next_uuid is not None:
|
||||
if from_cache :
|
||||
landmark = fetch_landmark_cache(next_uuid)
|
||||
else :
|
||||
landmark = fetch_landmark(client, next_uuid)
|
||||
landmark = fetch_landmark(next_uuid)
|
||||
|
||||
landmarks.append(landmark)
|
||||
next_uuid = landmark.next_uuid # Prepare for the next iteration
|
||||
|
0
backend/src/utils/__init__.py
Normal file
0
backend/src/utils/__init__.py
Normal file
@ -14,7 +14,7 @@ from ..constants import OSM_CACHE_DIR
|
||||
|
||||
|
||||
# silence the overpass logger
|
||||
logging.getLogger('OSMPythonTools').setLevel(level=logging.CRITICAL)
|
||||
logging.getLogger('overpass').setLevel(level=logging.CRITICAL)
|
||||
|
||||
|
||||
class Cluster(BaseModel):
|
||||
@ -127,30 +127,34 @@ class ClusterManager:
|
||||
lon = float(elem.get('lon'))
|
||||
points.append(tuple((lat, lon)))
|
||||
|
||||
if points :
|
||||
self.all_points = np.array(points)
|
||||
|
||||
self.all_points = np.array(points)
|
||||
# Apply DBSCAN to find clusters. Choose different settings for different cities.
|
||||
if self.cluster_type == 'shopping' and len(self.all_points) > 200 :
|
||||
dbscan = DBSCAN(eps=0.00118, min_samples=15, algorithm='kd_tree') # for large cities
|
||||
elif self.cluster_type == 'sightseeing' :
|
||||
dbscan = DBSCAN(eps=0.0025, min_samples=15, algorithm='kd_tree') # for historic neighborhoods
|
||||
else :
|
||||
dbscan = DBSCAN(eps=0.00075, min_samples=10, algorithm='kd_tree') # for small cities
|
||||
|
||||
# Apply DBSCAN to find clusters. Choose different settings for different cities.
|
||||
if self.cluster_type == 'shopping' and len(self.all_points) > 200 :
|
||||
dbscan = DBSCAN(eps=0.00118, min_samples=15, algorithm='kd_tree') # for large cities
|
||||
elif self.cluster_type == 'sightseeing' :
|
||||
dbscan = DBSCAN(eps=0.0025, min_samples=15, algorithm='kd_tree') # for historic neighborhoods
|
||||
else :
|
||||
dbscan = DBSCAN(eps=0.00075, min_samples=10, algorithm='kd_tree') # for small cities
|
||||
labels = dbscan.fit_predict(self.all_points)
|
||||
|
||||
labels = dbscan.fit_predict(self.all_points)
|
||||
# Check that there are is least 1 cluster
|
||||
if len(set(labels)) > 1 :
|
||||
self.logger.debug(f"Found {len(set(labels))} different clusters.")
|
||||
# Separate clustered points and noise points
|
||||
self.cluster_points = self.all_points[labels != -1]
|
||||
self.cluster_labels = labels[labels != -1]
|
||||
self.filter_clusters() # ValueError here sometimes. I dont know why. # Filter the clusters to keep only the largest ones.
|
||||
self.valid = True
|
||||
|
||||
# Check that there are is least 1 cluster
|
||||
if len(set(labels)) > 1 :
|
||||
self.logger.debug(f"Found {len(set(labels))} different clusters.")
|
||||
# Separate clustered points and noise points
|
||||
self.cluster_points = self.all_points[labels != -1]
|
||||
self.cluster_labels = labels[labels != -1]
|
||||
self.filter_clusters() # ValueError here sometimes. I dont know why. # Filter the clusters to keep only the largest ones.
|
||||
self.valid = True
|
||||
else :
|
||||
self.logger.debug(f"Detected 0 {cluster_type} clusters.")
|
||||
self.valid = False
|
||||
|
||||
else :
|
||||
self.logger.error(f"Detected 0 {cluster_type} clusters.")
|
||||
self.logger.debug(f"Detected 0 {cluster_type} clusters.")
|
||||
self.valid = False
|
||||
|
||||
|
||||
|
@ -14,7 +14,7 @@ from ..overpass.caching_strategy import CachingStrategy
|
||||
from ..constants import AMENITY_SELECTORS_PATH, LANDMARK_PARAMETERS_PATH, OPTIMIZER_PARAMETERS_PATH, OSM_CACHE_DIR
|
||||
|
||||
# silence the overpass logger
|
||||
logging.getLogger('OSMPythonTools').setLevel(level=logging.CRITICAL)
|
||||
logging.getLogger('overpass').setLevel(level=logging.CRITICAL)
|
||||
|
||||
|
||||
class LandmarkManager:
|
||||
|
@ -9,7 +9,7 @@ from ..constants import OSM_CACHE_DIR
|
||||
|
||||
|
||||
# silence the overpass logger
|
||||
logging.getLogger('OSMPythonTools').setLevel(level=logging.CRITICAL)
|
||||
logging.getLogger('overpass').setLevel(level=logging.CRITICAL)
|
||||
|
||||
class ToiletsManager:
|
||||
"""
|
||||
|
Loading…
x
Reference in New Issue
Block a user