linting
Some checks failed
Build and deploy the backend to staging / Build and push image (pull_request) Successful in 2m51s
Run linting on the backend code / Build (pull_request) Successful in 37s
Run testing on the backend code / Build (pull_request) Failing after 3m8s
Build and deploy the backend to staging / Deploy to staging (pull_request) Successful in 24s
Some checks failed
Build and deploy the backend to staging / Build and push image (pull_request) Successful in 2m51s
Run linting on the backend code / Build (pull_request) Successful in 37s
Run testing on the backend code / Build (pull_request) Failing after 3m8s
Build and deploy the backend to staging / Deploy to staging (pull_request) Successful in 24s
This commit is contained in:
parent
78f1dcaab4
commit
b9356dc4ee
@ -1,3 +1,4 @@
|
|||||||
|
"""Module defining the caching strategy for overpass requests."""
|
||||||
import os
|
import os
|
||||||
import xml.etree.ElementTree as ET
|
import xml.etree.ElementTree as ET
|
||||||
import hashlib
|
import hashlib
|
||||||
@ -15,18 +16,41 @@ def get_cache_key(query: str) -> str:
|
|||||||
|
|
||||||
|
|
||||||
class CachingStrategyBase:
|
class CachingStrategyBase:
|
||||||
|
"""
|
||||||
|
Base class for implementing caching strategies.
|
||||||
|
|
||||||
|
This class defines the structure for a caching strategy with basic methods
|
||||||
|
that must be implemented by subclasses. Subclasses should define how to
|
||||||
|
retrieve, store, and close the cache.
|
||||||
|
"""
|
||||||
def get(self, key):
|
def get(self, key):
|
||||||
|
"""Retrieve the cached data associated with the provided key."""
|
||||||
raise NotImplementedError('Subclass should implement get')
|
raise NotImplementedError('Subclass should implement get')
|
||||||
|
|
||||||
def set(self, key, data):
|
def set(self, key, value):
|
||||||
|
"""Store data in the cache with the specified key."""
|
||||||
raise NotImplementedError('Subclass should implement set')
|
raise NotImplementedError('Subclass should implement set')
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
pass
|
"""Clean up or close any resources used by the caching strategy."""
|
||||||
|
|
||||||
|
|
||||||
# For later use if xml does not suit well
|
|
||||||
class JSONCache(CachingStrategyBase):
|
class JSONCache(CachingStrategyBase):
|
||||||
|
"""
|
||||||
|
A caching strategy that stores and retrieves data in JSON format.
|
||||||
|
|
||||||
|
This class provides methods to cache data as JSON files in a specified directory.
|
||||||
|
The directory is automatically suffixed with '_JSON' to distinguish it from other
|
||||||
|
caching strategies. The data is stored and retrieved using JSON serialization.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
cache_dir (str): The base directory where JSON cache files will be stored.
|
||||||
|
Defaults to 'OSM_CACHE_DIR' with a '_JSON' suffix.
|
||||||
|
|
||||||
|
Methods:
|
||||||
|
get(key): Retrieve cached data from a JSON file associated with the given key.
|
||||||
|
set(key, value): Store data in a JSON file with the specified key.
|
||||||
|
"""
|
||||||
def __init__(self, cache_dir=OSM_CACHE_DIR):
|
def __init__(self, cache_dir=OSM_CACHE_DIR):
|
||||||
# Add the class name as a suffix to the directory
|
# Add the class name as a suffix to the directory
|
||||||
self._cache_dir = f'{cache_dir}_JSON'
|
self._cache_dir = f'{cache_dir}_JSON'
|
||||||
@ -39,16 +63,31 @@ class JSONCache(CachingStrategyBase):
|
|||||||
def get(self, key):
|
def get(self, key):
|
||||||
filename = self._filename(key)
|
filename = self._filename(key)
|
||||||
if os.path.exists(filename):
|
if os.path.exists(filename):
|
||||||
with open(filename, 'r') as file:
|
with open(filename, 'r', encoding='utf-8') as file:
|
||||||
return ujson.load(file)
|
return ujson.load(file)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def set(self, key, value):
|
def set(self, key, value):
|
||||||
with open(self._filename(key), 'w') as file:
|
with open(self._filename(key), 'w', encoding='utf-8') as file:
|
||||||
ujson.dump(value, file)
|
ujson.dump(value, file)
|
||||||
|
|
||||||
|
|
||||||
class XMLCache(CachingStrategyBase):
|
class XMLCache(CachingStrategyBase):
|
||||||
|
"""
|
||||||
|
A caching strategy that stores and retrieves data in XML format.
|
||||||
|
|
||||||
|
This class provides methods to cache data as XML files in a specified directory.
|
||||||
|
The directory is automatically suffixed with '_XML' to distinguish it from other
|
||||||
|
caching strategies. The data is stored and retrieved using XML serialization.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
cache_dir (str): The base directory where XML cache files will be stored.
|
||||||
|
Defaults to 'OSM_CACHE_DIR' with a '_XML' suffix.
|
||||||
|
|
||||||
|
Methods:
|
||||||
|
get(key): Retrieve cached data from a XML file associated with the given key.
|
||||||
|
set(key, value): Store data in a XML file with the specified key.
|
||||||
|
"""
|
||||||
def __init__(self, cache_dir=OSM_CACHE_DIR):
|
def __init__(self, cache_dir=OSM_CACHE_DIR):
|
||||||
# Add the class name as a suffix to the directory
|
# Add the class name as a suffix to the directory
|
||||||
self._cache_dir = f'{cache_dir}_XML'
|
self._cache_dir = f'{cache_dir}_XML'
|
||||||
@ -84,13 +123,22 @@ class XMLCache(CachingStrategyBase):
|
|||||||
|
|
||||||
|
|
||||||
class CachingStrategy:
|
class CachingStrategy:
|
||||||
__strategy = XMLCache() # Default caching strategy
|
"""
|
||||||
|
A class to manage different caching strategies.
|
||||||
|
|
||||||
# Dictionary to map string identifiers to caching strategy classes
|
This class provides an interface to switch between different caching strategies
|
||||||
|
(e.g., XMLCache, JSONCache) dynamically. It allows caching data in different formats,
|
||||||
|
depending on the strategy being used. By default, it uses the XMLCache strategy.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
__strategy (CachingStrategyBase): The currently active caching strategy.
|
||||||
|
__strategies (dict): A mapping between strategy names (as strings) and their corresponding
|
||||||
|
classes, allowing dynamic selection of caching strategies.
|
||||||
|
"""
|
||||||
|
__strategy = XMLCache() # Default caching strategy
|
||||||
__strategies = {
|
__strategies = {
|
||||||
'XML': XMLCache,
|
'XML': XMLCache,
|
||||||
'JSON': JSONCache,
|
'JSON': JSONCache,
|
||||||
# Add more strategies here if needed
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@ -129,5 +177,3 @@ class CachingStrategy:
|
|||||||
if not cls.__strategy:
|
if not cls.__strategy:
|
||||||
raise RuntimeError("Caching strategy has not been set.")
|
raise RuntimeError("Caching strategy has not been set.")
|
||||||
cls.__strategy.set(key, value)
|
cls.__strategy.set(key, value)
|
||||||
|
|
||||||
|
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
"""Module allowing connexion to overpass api and fectch data from OSM."""
|
||||||
from typing import Literal, List
|
from typing import Literal, List
|
||||||
import urllib
|
import urllib
|
||||||
import json
|
import json
|
||||||
@ -9,8 +10,8 @@ from .caching_strategy import get_cache_key, CachingStrategy
|
|||||||
ElementTypes = List[Literal['way', 'node', 'relation']]
|
ElementTypes = List[Literal['way', 'node', 'relation']]
|
||||||
|
|
||||||
|
|
||||||
def build_query(area: tuple, element_types: ElementTypes, selector: str,
|
def build_query(area: tuple, element_types: ElementTypes,
|
||||||
conditions=[], out='center'):
|
selector: str, conditions=[], out='center'):
|
||||||
"""
|
"""
|
||||||
Constructs a query string for the Overpass API to retrieve OpenStreetMap (OSM) data.
|
Constructs a query string for the Overpass API to retrieve OpenStreetMap (OSM) data.
|
||||||
|
|
||||||
@ -62,7 +63,7 @@ def build_query(area: tuple, element_types: ElementTypes, selector: str,
|
|||||||
return query
|
return query
|
||||||
|
|
||||||
|
|
||||||
def send_overpass_query(query: str) -> dict:
|
def send_query(query: str) -> dict:
|
||||||
"""
|
"""
|
||||||
Sends the Overpass QL query to the Overpass API and returns the parsed JSON response.
|
Sends the Overpass QL query to the Overpass API and returns the parsed JSON response.
|
||||||
|
|
||||||
|
@ -122,7 +122,7 @@ def dict_to_selector_list(d: dict) -> list:
|
|||||||
return return_list
|
return return_list
|
||||||
|
|
||||||
|
|
||||||
def send_overpass_query(query: str) -> dict:
|
def send_query(query: str) -> dict:
|
||||||
"""
|
"""
|
||||||
Sends the Overpass QL query to the Overpass API and returns the parsed JSON response.
|
Sends the Overpass QL query to the Overpass API and returns the parsed JSON response.
|
||||||
|
|
||||||
@ -280,7 +280,7 @@ for sel in dict_to_selector_list(amenity_selector):
|
|||||||
out='center')
|
out='center')
|
||||||
print(query + '\n')
|
print(query + '\n')
|
||||||
|
|
||||||
root = send_overpass_query(query)
|
root = send_query(query)
|
||||||
|
|
||||||
landmarks += parse_result(root, 'nature')
|
landmarks += parse_result(root, 'nature')
|
||||||
|
|
||||||
|
@ -5,11 +5,9 @@ from typing import Literal
|
|||||||
import numpy as np
|
import numpy as np
|
||||||
from sklearn.cluster import DBSCAN
|
from sklearn.cluster import DBSCAN
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
# from OSMPythonTools.overpass import Overpass, overpassQueryBuilder
|
|
||||||
# from OSMPythonTools.cachingStrategy import CachingStrategy, JSON
|
|
||||||
from ..overpass.overpass import build_query, send_overpass_query
|
|
||||||
from ..overpass.caching_strategy import CachingStrategy
|
|
||||||
|
|
||||||
|
from ..overpass.overpass import build_query, send_query
|
||||||
|
from ..overpass.caching_strategy import CachingStrategy
|
||||||
from ..structs.landmark import Landmark
|
from ..structs.landmark import Landmark
|
||||||
from .get_time_distance import get_distance
|
from .get_time_distance import get_distance
|
||||||
from ..constants import OSM_CACHE_DIR
|
from ..constants import OSM_CACHE_DIR
|
||||||
@ -81,8 +79,6 @@ class ClusterManager:
|
|||||||
Args:
|
Args:
|
||||||
bbox: The bounding box coordinates (around:radius, center_lat, center_lon).
|
bbox: The bounding box coordinates (around:radius, center_lat, center_lon).
|
||||||
"""
|
"""
|
||||||
# self.overpass = Overpass()
|
|
||||||
# CachingStrategy.use(JSON, cacheDir=OSM_CACHE_DIR)
|
|
||||||
CachingStrategy.use('XML', cache_dir=OSM_CACHE_DIR)
|
CachingStrategy.use('XML', cache_dir=OSM_CACHE_DIR)
|
||||||
|
|
||||||
self.cluster_type = cluster_type
|
self.cluster_type = cluster_type
|
||||||
@ -94,6 +90,8 @@ class ClusterManager:
|
|||||||
osm_types = ['way']
|
osm_types = ['way']
|
||||||
sel = '"historic"~"^(monument|building|yes)$"'
|
sel = '"historic"~"^(monument|building|yes)$"'
|
||||||
out = 'ids center'
|
out = 'ids center'
|
||||||
|
else :
|
||||||
|
raise NotImplementedError("Please choose only an available option for cluster detection")
|
||||||
|
|
||||||
# Initialize the points for cluster detection
|
# Initialize the points for cluster detection
|
||||||
query = build_query(
|
query = build_query(
|
||||||
@ -105,7 +103,7 @@ class ClusterManager:
|
|||||||
self.logger.debug(f"Cluster query: {query}")
|
self.logger.debug(f"Cluster query: {query}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = send_overpass_query(query)
|
result = send_query(query)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.logger.error(f"Error fetching landmarks: {e}")
|
self.logger.error(f"Error fetching landmarks: {e}")
|
||||||
|
|
||||||
@ -117,9 +115,9 @@ class ClusterManager:
|
|||||||
points = []
|
points = []
|
||||||
for osm_type in osm_types :
|
for osm_type in osm_types :
|
||||||
for elem in result.findall(osm_type):
|
for elem in result.findall(osm_type):
|
||||||
center = elem.find('center')
|
|
||||||
|
|
||||||
if osm_type != 'node' :
|
if osm_type != 'node' :
|
||||||
|
center = elem.find('center')
|
||||||
lat = float(center.get('lat'))
|
lat = float(center.get('lat'))
|
||||||
lon = float(center.get('lon'))
|
lon = float(center.get('lon'))
|
||||||
points.append(tuple((lat, lon)))
|
points.append(tuple((lat, lon)))
|
||||||
@ -136,7 +134,7 @@ class ClusterManager:
|
|||||||
if self.cluster_type == 'shopping' and len(self.all_points) > 200 :
|
if self.cluster_type == 'shopping' and len(self.all_points) > 200 :
|
||||||
dbscan = DBSCAN(eps=0.00118, min_samples=15, algorithm='kd_tree') # for large cities
|
dbscan = DBSCAN(eps=0.00118, min_samples=15, algorithm='kd_tree') # for large cities
|
||||||
elif self.cluster_type == 'sightseeing' :
|
elif self.cluster_type == 'sightseeing' :
|
||||||
dbscan = DBSCAN(eps=0.003, min_samples=10, algorithm='kd_tree') # for historic neighborhoods
|
dbscan = DBSCAN(eps=0.0025, min_samples=15, algorithm='kd_tree') # for historic neighborhoods
|
||||||
else :
|
else :
|
||||||
dbscan = DBSCAN(eps=0.00075, min_samples=10, algorithm='kd_tree') # for small cities
|
dbscan = DBSCAN(eps=0.00075, min_samples=10, algorithm='kd_tree') # for small cities
|
||||||
|
|
||||||
@ -249,7 +247,7 @@ class ClusterManager:
|
|||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = send_overpass_query(query)
|
result = send_query(query)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.logger.error(f"Error fetching landmarks: {e}")
|
self.logger.error(f"Error fetching landmarks: {e}")
|
||||||
continue
|
continue
|
||||||
@ -290,7 +288,7 @@ class ClusterManager:
|
|||||||
return Landmark(
|
return Landmark(
|
||||||
name=new_name,
|
name=new_name,
|
||||||
type=self.cluster_type,
|
type=self.cluster_type,
|
||||||
location=cluster.centroid, # TODO: use the fact the we can also recognize streets.
|
location=cluster.centroid, # later: use the fact the we can also recognize streets.
|
||||||
attractiveness=cluster.importance,
|
attractiveness=cluster.importance,
|
||||||
n_tags=0,
|
n_tags=0,
|
||||||
osm_id=osm_id,
|
osm_id=osm_id,
|
||||||
|
@ -1,14 +1,14 @@
|
|||||||
"""Module used to import data from OSM and arrange them in categories."""
|
"""Module used to import data from OSM and arrange them in categories."""
|
||||||
import logging
|
import logging
|
||||||
import yaml
|
|
||||||
import xml.etree.ElementTree as ET
|
import xml.etree.ElementTree as ET
|
||||||
|
import yaml
|
||||||
|
|
||||||
|
|
||||||
from ..structs.preferences import Preferences
|
from ..structs.preferences import Preferences
|
||||||
from ..structs.landmark import Landmark
|
from ..structs.landmark import Landmark
|
||||||
from .take_most_important import take_most_important
|
from .take_most_important import take_most_important
|
||||||
from .cluster_manager import ClusterManager
|
from .cluster_manager import ClusterManager
|
||||||
from ..overpass.overpass import build_query, send_overpass_query
|
from ..overpass.overpass import build_query, send_query
|
||||||
from ..overpass.caching_strategy import CachingStrategy
|
from ..overpass.caching_strategy import CachingStrategy
|
||||||
|
|
||||||
from ..constants import AMENITY_SELECTORS_PATH, LANDMARK_PARAMETERS_PATH, OPTIMIZER_PARAMETERS_PATH, OSM_CACHE_DIR
|
from ..constants import AMENITY_SELECTORS_PATH, LANDMARK_PARAMETERS_PATH, OPTIMIZER_PARAMETERS_PATH, OSM_CACHE_DIR
|
||||||
@ -205,7 +205,7 @@ class LandmarkManager:
|
|||||||
self.logger.debug(f"Query: {query}")
|
self.logger.debug(f"Query: {query}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = send_overpass_query(query)
|
result = send_query(query)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.logger.error(f"Error fetching landmarks: {e}")
|
self.logger.error(f"Error fetching landmarks: {e}")
|
||||||
continue
|
continue
|
||||||
@ -240,17 +240,21 @@ class LandmarkManager:
|
|||||||
for osm_type in ['node', 'way', 'relation'] :
|
for osm_type in ['node', 'way', 'relation'] :
|
||||||
for elem in root.findall(osm_type):
|
for elem in root.findall(osm_type):
|
||||||
name = elem.find("tag[@k='name']").get('v') if elem.find("tag[@k='name']") is not None else None
|
name = elem.find("tag[@k='name']").get('v') if elem.find("tag[@k='name']") is not None else None
|
||||||
center = elem.find('center')
|
|
||||||
tags = elem.findall('tag')
|
tags = elem.findall('tag')
|
||||||
|
|
||||||
# Extract the center latitude and longitude if available.
|
if osm_type != 'node' :
|
||||||
if name is not None and center is not None:
|
center = elem.find('center')
|
||||||
lat = float(center.get('lat'))
|
lat = float(center.get('lat'))
|
||||||
lon = float(center.get('lon'))
|
lon = float(center.get('lon'))
|
||||||
coords = tuple((lat, lon))
|
coords = tuple((lat, lon))
|
||||||
else :
|
|
||||||
continue
|
|
||||||
|
|
||||||
|
else :
|
||||||
|
lat = float(elem.get('lat'))
|
||||||
|
lon = float(elem.get('lon'))
|
||||||
|
coords = tuple((lat, lon))
|
||||||
|
|
||||||
|
if name is None or coords is None :
|
||||||
|
continue
|
||||||
|
|
||||||
# Convert this to Landmark object
|
# Convert this to Landmark object
|
||||||
landmark = Landmark(name=name,
|
landmark = Landmark(name=name,
|
||||||
@ -305,11 +309,9 @@ class LandmarkManager:
|
|||||||
landmark.duration = 5
|
landmark.duration = 5
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# add them to cache here before setting the score
|
|
||||||
# name should be : 'osm_type + str(osm_id) + 'json'
|
|
||||||
self.set_landmark_score(landmark, landmarktype, preference_level)
|
self.set_landmark_score(landmark, landmarktype, preference_level)
|
||||||
landmarks.append(landmark)
|
landmarks.append(landmark)
|
||||||
# self.logger.debug('new landmark added')
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
return landmarks
|
return landmarks
|
||||||
|
@ -153,10 +153,10 @@ class Optimizer:
|
|||||||
up_ind_y = upper_ind[1]
|
up_ind_y = upper_ind[1]
|
||||||
|
|
||||||
# Loop over the upper triangular indices, excluding diagonal elements
|
# Loop over the upper triangular indices, excluding diagonal elements
|
||||||
for i in range(len(up_ind_x)):
|
for i, up_ind in enumerate(up_ind_x):
|
||||||
if up_ind_x[i] != up_ind_y[i]:
|
if up_ind != up_ind_y[i]:
|
||||||
# Add (L*L-L)/2 constraints to break symmetry
|
# Add (L*L-L)/2 constraints to break symmetry
|
||||||
prob += (x[up_ind_x[i]*L + up_ind_y[i]] + x[up_ind_y[i]*L + up_ind_x[i]] <= 1)
|
prob += (x[up_ind*L + up_ind_y[i]] + x[up_ind_y[i]*L + up_ind] <= 1)
|
||||||
|
|
||||||
|
|
||||||
def init_eq_not_stay(self, prob: pl.LpProblem, x: pl.LpVariable, L: int):
|
def init_eq_not_stay(self, prob: pl.LpProblem, x: pl.LpVariable, L: int):
|
||||||
|
@ -6,7 +6,8 @@ from shapely import buffer, LineString, Point, Polygon, MultiPoint, concave_hull
|
|||||||
|
|
||||||
|
|
||||||
from ..structs.landmark import Landmark
|
from ..structs.landmark import Landmark
|
||||||
from . import get_time_distance, take_most_important
|
from .get_time_distance import get_time
|
||||||
|
from .take_most_important import take_most_important
|
||||||
from .optimizer import Optimizer
|
from .optimizer import Optimizer
|
||||||
from ..constants import OPTIMIZER_PARAMETERS_PATH
|
from ..constants import OPTIMIZER_PARAMETERS_PATH
|
||||||
|
|
||||||
@ -195,7 +196,7 @@ class Refiner :
|
|||||||
|
|
||||||
# Step 4: Use nearest neighbor heuristic to visit all landmarks
|
# Step 4: Use nearest neighbor heuristic to visit all landmarks
|
||||||
while unvisited_landmarks:
|
while unvisited_landmarks:
|
||||||
nearest_landmark = min(unvisited_landmarks, key=lambda lm: get_time_distance.get_time(current_landmark.location, lm.location))
|
nearest_landmark = min(unvisited_landmarks, key=lambda lm: get_time(current_landmark.location, lm.location))
|
||||||
path.append(nearest_landmark)
|
path.append(nearest_landmark)
|
||||||
coordinates.append(nearest_landmark.location)
|
coordinates.append(nearest_landmark.location)
|
||||||
current_landmark = nearest_landmark
|
current_landmark = nearest_landmark
|
||||||
@ -238,7 +239,7 @@ class Refiner :
|
|||||||
if self.is_in_area(area, landmark.location) and landmark.name not in visited_names:
|
if self.is_in_area(area, landmark.location) and landmark.name not in visited_names:
|
||||||
second_order_landmarks.append(landmark)
|
second_order_landmarks.append(landmark)
|
||||||
|
|
||||||
return take_most_important.take_most_important(second_order_landmarks, int(self.max_landmarks_refiner*0.75))
|
return take_most_important(second_order_landmarks, int(self.max_landmarks_refiner*0.75))
|
||||||
|
|
||||||
|
|
||||||
# Try fix the shortest path using shapely
|
# Try fix the shortest path using shapely
|
||||||
|
@ -1,8 +1,9 @@
|
|||||||
"""Module for finding public toilets around given coordinates."""
|
"""Module for finding public toilets around given coordinates."""
|
||||||
import logging
|
import logging
|
||||||
from OSMPythonTools.overpass import Overpass, overpassQueryBuilder
|
import xml.etree.ElementTree as ET
|
||||||
from OSMPythonTools.cachingStrategy import CachingStrategy, JSON
|
|
||||||
|
|
||||||
|
from ..overpass.overpass import build_query, send_query
|
||||||
|
from ..overpass.caching_strategy import CachingStrategy
|
||||||
from ..structs.landmark import Toilets
|
from ..structs.landmark import Toilets
|
||||||
from ..constants import OSM_CACHE_DIR
|
from ..constants import OSM_CACHE_DIR
|
||||||
|
|
||||||
@ -39,8 +40,7 @@ class ToiletsManager:
|
|||||||
|
|
||||||
self.radius = radius
|
self.radius = radius
|
||||||
self.location = location
|
self.location = location
|
||||||
self.overpass = Overpass()
|
CachingStrategy.use('XML', cacheDir=OSM_CACHE_DIR)
|
||||||
CachingStrategy.use(JSON, cacheDir=OSM_CACHE_DIR)
|
|
||||||
|
|
||||||
|
|
||||||
def generate_toilet_list(self) -> list[Toilets] :
|
def generate_toilet_list(self) -> list[Toilets] :
|
||||||
@ -53,49 +53,83 @@ class ToiletsManager:
|
|||||||
about the toilets found around the given coordinates.
|
about the toilets found around the given coordinates.
|
||||||
"""
|
"""
|
||||||
bbox = tuple((f"around:{self.radius}", str(self.location[0]), str(self.location[1])))
|
bbox = tuple((f"around:{self.radius}", str(self.location[0]), str(self.location[1])))
|
||||||
|
osm_types = ['node', 'way', 'relation']
|
||||||
toilets_list = []
|
toilets_list = []
|
||||||
|
|
||||||
query = overpassQueryBuilder(
|
query = build_query(
|
||||||
bbox = bbox,
|
area = bbox,
|
||||||
elementType = ['node', 'way', 'relation'],
|
element_types = osm_types,
|
||||||
# selector can in principle be a list already,
|
|
||||||
# but it generates the intersection of the queries
|
|
||||||
# we want the union
|
|
||||||
selector = ['"amenity"="toilets"'],
|
selector = ['"amenity"="toilets"'],
|
||||||
includeCenter = True,
|
out = 'ids center tags'
|
||||||
out = 'center'
|
|
||||||
)
|
)
|
||||||
self.logger.debug(f"Query: {query}")
|
self.logger.debug(f"Query: {query}")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
result = self.overpass.query(query)
|
result = send_query(query)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.logger.error(f"Error fetching landmarks: {e}")
|
self.logger.error(f"Error fetching landmarks: {e}")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
for elem in result.elements():
|
toilets_list = self.xml_to_toilets(result)
|
||||||
location = (elem.centerLat(), elem.centerLon())
|
|
||||||
|
return toilets_list
|
||||||
# handle unprecise and no-name locations
|
|
||||||
if location[0] is None:
|
|
||||||
location = (elem.lat(), elem.lon())
|
def xml_to_toilets(self, root: ET.Element) -> list[Toilets]:
|
||||||
else :
|
"""
|
||||||
continue
|
Parse the Overpass API result and extract landmarks.
|
||||||
|
|
||||||
toilets = Toilets(location=location)
|
This method processes the XML root element returned by the Overpass API and
|
||||||
|
extracts landmarks of types 'node', 'way', and 'relation'. It retrieves
|
||||||
if 'wheelchair' in elem.tags().keys() and elem.tag('wheelchair') == 'yes':
|
relevant information such as name, coordinates, and tags, and converts them
|
||||||
toilets.wheelchair = True
|
into Landmark objects.
|
||||||
|
|
||||||
if 'changing_table' in elem.tags().keys() and elem.tag('changing_table') == 'yes':
|
Args:
|
||||||
toilets.changing_table = True
|
root (ET.Element): The root element of the XML response from Overpass API.
|
||||||
|
elem_type (str): The type of landmark (e.g., node, way, relation).
|
||||||
if 'fee' in elem.tags().keys() and elem.tag('fee') == 'yes':
|
|
||||||
toilets.fee = True
|
Returns:
|
||||||
|
list[Landmark]: A list of Landmark objects extracted from the XML data.
|
||||||
if 'opening_hours' in elem.tags().keys() :
|
"""
|
||||||
toilets.opening_hours = elem.tag('opening_hours')
|
if root is None :
|
||||||
|
return []
|
||||||
toilets_list.append(toilets)
|
|
||||||
|
toilets_list = []
|
||||||
|
for osm_type in ['node', 'way', 'relation'] :
|
||||||
|
for elem in root.findall(osm_type):
|
||||||
|
center = elem.find('center')
|
||||||
|
|
||||||
|
# Extract the center latitude and longitude if available.
|
||||||
|
if osm_type != 'node' :
|
||||||
|
lat = float(center.get('lat'))
|
||||||
|
lon = float(center.get('lon'))
|
||||||
|
location = tuple((lat, lon))
|
||||||
|
|
||||||
|
else :
|
||||||
|
lat = float(elem.get('lat'))
|
||||||
|
lon = float(elem.get('lon'))
|
||||||
|
location = tuple((lat, lon))
|
||||||
|
|
||||||
|
if location is None :
|
||||||
|
continue
|
||||||
|
|
||||||
|
toilets = Toilets(location=location)
|
||||||
|
|
||||||
|
# Extract tags as a dictionary
|
||||||
|
tags = {tag.get('k'): tag.get('v') for tag in elem.findall('tag')}
|
||||||
|
|
||||||
|
if 'wheelchair' in tags().keys() and tags['wheelchair'] == 'yes':
|
||||||
|
toilets.wheelchair = True
|
||||||
|
|
||||||
|
if 'changing_table' in tags().keys() and tags['changing_table'] == 'yes':
|
||||||
|
toilets.changing_table = True
|
||||||
|
|
||||||
|
if 'fee' in tags().keys() and tags['fee'] == 'yes':
|
||||||
|
toilets.fee = True
|
||||||
|
|
||||||
|
if 'opening_hours' in tags().keys() :
|
||||||
|
toilets.opening_hours = elem.tag('opening_hours')
|
||||||
|
|
||||||
|
toilets_list.append(toilets)
|
||||||
|
|
||||||
return toilets_list
|
return toilets_list
|
||||||
|
Loading…
x
Reference in New Issue
Block a user