Compare commits
13 Commits
v0.0.36
...
160059d94b
| Author | SHA1 | Date | |
|---|---|---|---|
| 160059d94b | |||
| 18d59012cb | |||
| f297094c1a | |||
| 86187d9069 | |||
| 4e07c10969 | |||
| bc63b57154 | |||
| fa083a1080 | |||
| c448e2dfb7 | |||
| d9061388dd | |||
| a9851f9627 | |||
| e764393706 | |||
| a0467e1e19 | |||
| 9b61471c94 |
@@ -25,8 +25,6 @@ jobs:
|
||||
ls -la
|
||||
# only install dev-packages
|
||||
pipenv install --categories=dev-packages
|
||||
pipenv run pip freeze
|
||||
|
||||
working-directory: backend
|
||||
|
||||
- name: Run linter
|
||||
|
||||
@@ -25,7 +25,6 @@ jobs:
|
||||
ls -la
|
||||
# install all packages, including dev-packages
|
||||
pipenv install --dev
|
||||
pipenv run pip freeze
|
||||
working-directory: backend
|
||||
|
||||
- name: Run Tests
|
||||
|
||||
14
.vscode/launch.json
vendored
14
.vscode/launch.json
vendored
@@ -9,18 +9,16 @@
|
||||
"name": "Backend - debug",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"module": "uvicorn",
|
||||
"env": {
|
||||
"DEBUG": "true"
|
||||
},
|
||||
"args": [
|
||||
// "--app-dir",
|
||||
// "src",
|
||||
"src.main:app",
|
||||
"--reload",
|
||||
],
|
||||
"jinja": true,
|
||||
"cwd": "${workspaceFolder}/backend"
|
||||
"cwd": "${workspaceFolder}/backend",
|
||||
"module": "fastapi",
|
||||
"args": [
|
||||
"dev",
|
||||
"src/main.py"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Backend - tester",
|
||||
|
||||
3
.vscode/settings.json
vendored
3
.vscode/settings.json
vendored
@@ -1,3 +0,0 @@
|
||||
{
|
||||
"cmake.ignoreCMakeListsMissing": true
|
||||
}
|
||||
@@ -14,5 +14,7 @@ EXPOSE 8000
|
||||
ENV NUM_WORKERS=1
|
||||
ENV OSM_CACHE_DIR=/cache
|
||||
ENV MEMCACHED_HOST_PATH=none
|
||||
ENV LOKI_URL=none
|
||||
|
||||
# explicitly use a string instead of an argument list to force a shell and variable expansion
|
||||
CMD fastapi run src/main.py --port 8000 --workers $NUM_WORKERS
|
||||
|
||||
@@ -25,3 +25,4 @@ pymemcache = "*"
|
||||
fastapi-cli = "*"
|
||||
scikit-learn = "*"
|
||||
pyqt6 = "*"
|
||||
loki-logger-handler = "*"
|
||||
|
||||
11
backend/Pipfile.lock
generated
11
backend/Pipfile.lock
generated
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"_meta": {
|
||||
"hash": {
|
||||
"sha256": "bb22b4e28c7aa199c94b688ad93d3ab0ccf1089a172131f4aec03b78e7bd7f1c"
|
||||
"sha256": "6edd6644586e8814a0b4526adb3352dfc17828ca129de7a68c1d5929efe94daa"
|
||||
},
|
||||
"pipfile-spec": 6,
|
||||
"requires": {},
|
||||
@@ -507,6 +507,15 @@
|
||||
"markers": "python_version >= '3.8'",
|
||||
"version": "==1.4.7"
|
||||
},
|
||||
"loki-logger-handler": {
|
||||
"hashes": [
|
||||
"sha256:aa1a9c933282c134a1e4271aba3cbaa2a3660eab6ea415bad7a072444ab98aa8",
|
||||
"sha256:f6114727a9e5e6f3f2058b9b5324d1cab6d1a04e802079f7b57a8aeb7bd0a112"
|
||||
],
|
||||
"index": "pypi",
|
||||
"markers": "python_version >= '2.7'",
|
||||
"version": "==1.0.2"
|
||||
},
|
||||
"lxml": {
|
||||
"hashes": [
|
||||
"sha256:01220dca0d066d1349bd6a1726856a78f7929f3878f7e2ee83c296c69495309e",
|
||||
|
||||
Submodule backend/deployment updated: 718df09e88...904f16bfc0
1094
backend/report.html
1094
backend/report.html
File diff suppressed because one or more lines are too long
@@ -1,6 +1,5 @@
|
||||
"""Module allowing to access the parameters of route generation"""
|
||||
"""Module setting global parameters for the application such as cache, route generation, etc."""
|
||||
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
@@ -16,21 +15,6 @@ cache_dir_string = os.getenv('OSM_CACHE_DIR', './cache')
|
||||
OSM_CACHE_DIR = Path(cache_dir_string)
|
||||
|
||||
|
||||
# if we are in a debug session, set verbose and rich logging
|
||||
if os.getenv('DEBUG', "false") == "true":
|
||||
from rich.logging import RichHandler
|
||||
logging.basicConfig(
|
||||
level=logging.DEBUG,
|
||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
||||
handlers=[RichHandler()]
|
||||
)
|
||||
else:
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
||||
)
|
||||
|
||||
|
||||
MEMCACHED_HOST_PATH = os.getenv('MEMCACHED_HOST_PATH', None)
|
||||
if MEMCACHED_HOST_PATH == "none":
|
||||
MEMCACHED_HOST_PATH = None
|
||||
|
||||
58
backend/src/logging_config.py
Normal file
58
backend/src/logging_config.py
Normal file
@@ -0,0 +1,58 @@
|
||||
"""Sets up global logging configuration for the application."""
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def configure_logging():
|
||||
"""
|
||||
Called at startup of a FastAPI application instance to setup logging. Depending on the environment, it will log to stdout or to Loki.
|
||||
"""
|
||||
|
||||
is_debug = os.getenv('DEBUG', "false") == "true"
|
||||
is_kubernetes = os.getenv('KUBERNETES_SERVICE_HOST') is not None
|
||||
|
||||
|
||||
if is_kubernetes:
|
||||
# in that case we want to log to stdout and also to loki
|
||||
from loki_logger_handler.loki_logger_handler import LokiLoggerHandler
|
||||
loki_url = os.getenv('LOKI_URL')
|
||||
loki_url = "http://localhost:3100/loki/api/v1/push"
|
||||
if loki_url is None:
|
||||
raise ValueError("LOKI_URL environment variable is not set")
|
||||
|
||||
loki_handler = LokiLoggerHandler(
|
||||
url = loki_url,
|
||||
labels = {'app': 'anyway', 'environment': 'staging' if is_debug else 'production'}
|
||||
)
|
||||
|
||||
logger.info(f"Logging to Loki at {loki_url} with {loki_handler.labels} and {is_debug=}")
|
||||
logging_handlers = [loki_handler, logging.StreamHandler()]
|
||||
logging_level = logging.DEBUG if is_debug else logging.INFO
|
||||
# silence the chatty logs loki generates itself
|
||||
logging.getLogger('urllib3.connectionpool').setLevel(logging.WARNING)
|
||||
# no need for time since it's added by loki or can be shown in kube logs
|
||||
logging_format = '%(name)s - %(levelname)s - %(message)s'
|
||||
|
||||
else:
|
||||
# if we are in a debug (local) session, set verbose and rich logging
|
||||
from rich.logging import RichHandler
|
||||
logging_handlers = [RichHandler()]
|
||||
logging_level = logging.DEBUG if is_debug else logging.INFO
|
||||
logging_format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
||||
|
||||
|
||||
|
||||
logging.basicConfig(
|
||||
level = logging_level,
|
||||
format = logging_format,
|
||||
handlers = logging_handlers
|
||||
)
|
||||
|
||||
# also overwrite the uvicorn loggers
|
||||
logging.getLogger('uvicorn').handlers = logging_handlers
|
||||
logging.getLogger('uvicorn.access').handlers = logging_handlers
|
||||
logging.getLogger('uvicorn.error').handlers = logging_handlers
|
||||
|
||||
@@ -2,7 +2,9 @@
|
||||
|
||||
import logging
|
||||
from fastapi import FastAPI, HTTPException, Query
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
from .logging_config import configure_logging
|
||||
from .structs.landmark import Landmark, Toilets
|
||||
from .structs.preferences import Preferences
|
||||
from .structs.linked_landmarks import LinkedLandmarks
|
||||
@@ -11,17 +13,28 @@ from .utils.landmarks_manager import LandmarkManager
|
||||
from .utils.toilets_manager import ToiletsManager
|
||||
from .utils.optimizer import Optimizer
|
||||
from .utils.refiner import Refiner
|
||||
from .persistence import client as cache_client
|
||||
|
||||
from .cache import client as cache_client
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
app = FastAPI()
|
||||
manager = LandmarkManager()
|
||||
optimizer = Optimizer()
|
||||
refiner = Refiner(optimizer=optimizer)
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
"""Function to run at the start of the app"""
|
||||
logger.info("Setting up logging")
|
||||
configure_logging()
|
||||
yield
|
||||
logger.info("Shutting down logging")
|
||||
|
||||
|
||||
app = FastAPI(lifespan=lifespan)
|
||||
|
||||
|
||||
|
||||
@app.post("/trip/new")
|
||||
def new_trip(preferences: Preferences,
|
||||
start: tuple[float, float],
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
city_bbox_side: 7500 #m
|
||||
radius_close_to: 50
|
||||
church_coeff: 0.9
|
||||
nature_coeff: 1.25
|
||||
church_coeff: 0.65
|
||||
nature_coeff: 1.35
|
||||
overall_coeff: 10
|
||||
tag_exponent: 1.15
|
||||
image_bonus: 10
|
||||
viewpoint_bonus: 15
|
||||
viewpoint_bonus: 5
|
||||
wikipedia_bonus: 4
|
||||
name_bonus: 3
|
||||
N_important: 40
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Definition of the Landmark class to handle visitable objects across the world."""
|
||||
|
||||
from typing import Optional, Literal
|
||||
from uuid import uuid4
|
||||
from uuid import uuid4, UUID
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
@@ -29,12 +29,12 @@ class Landmark(BaseModel) :
|
||||
description (Optional[str]): A text description of the landmark.
|
||||
duration (Optional[int]): The estimated time to visit the landmark (in minutes).
|
||||
name_en (Optional[str]): The English name of the landmark.
|
||||
uuid (str): A unique identifier for the landmark, generated by default using uuid4.
|
||||
uuid (UUID): A unique identifier for the landmark, generated by default using uuid4.
|
||||
must_do (Optional[bool]): Whether the landmark is a "must-do" attraction.
|
||||
must_avoid (Optional[bool]): Whether the landmark should be avoided.
|
||||
is_secondary (Optional[bool]): Whether the landmark is secondary or less important.
|
||||
time_to_reach_next (Optional[int]): Estimated time (in minutes) to reach the next landmark.
|
||||
next_uuid (Optional[str]): UUID of the next landmark in sequence (if applicable).
|
||||
next_uuid (Optional[UUID]): UUID of the next landmark in sequence (if applicable).
|
||||
"""
|
||||
|
||||
# Properties of the landmark
|
||||
@@ -52,7 +52,7 @@ class Landmark(BaseModel) :
|
||||
name_en : Optional[str] = None
|
||||
|
||||
# Unique ID of a given landmark
|
||||
uuid: str = Field(default_factory=uuid4)
|
||||
uuid: UUID = Field(default_factory=uuid4)
|
||||
|
||||
# Additional properties depending on specific tour
|
||||
must_do : Optional[bool] = False
|
||||
@@ -60,7 +60,7 @@ class Landmark(BaseModel) :
|
||||
is_secondary : Optional[bool] = False
|
||||
|
||||
time_to_reach_next : Optional[int] = 0
|
||||
next_uuid : Optional[str] = None
|
||||
next_uuid : Optional[UUID] = None
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""
|
||||
@@ -139,4 +139,4 @@ class Toilets(BaseModel) :
|
||||
|
||||
class Config:
|
||||
# This allows us to easily convert the model to and from dictionaries
|
||||
orm_mode = True
|
||||
from_attributes = True
|
||||
@@ -1,6 +1,6 @@
|
||||
"""Definition of the Trip class."""
|
||||
|
||||
import uuid
|
||||
from uuid import uuid4, UUID
|
||||
from pydantic import BaseModel, Field
|
||||
from pymemcache.client.base import Client
|
||||
|
||||
@@ -19,9 +19,9 @@ class Trip(BaseModel):
|
||||
Methods:
|
||||
from_linked_landmarks: create a Trip from LinkedLandmarks object.
|
||||
"""
|
||||
uuid: str = Field(default_factory=uuid.uuid4)
|
||||
uuid: UUID = Field(default_factory=uuid4)
|
||||
total_time: int
|
||||
first_landmark_uuid: str
|
||||
first_landmark_uuid: UUID
|
||||
|
||||
|
||||
@classmethod
|
||||
@@ -31,7 +31,7 @@ class Trip(BaseModel):
|
||||
"""
|
||||
trip = Trip(
|
||||
total_time = landmarks.total_time,
|
||||
first_landmark_uuid = str(landmarks[0].uuid)
|
||||
first_landmark_uuid = landmarks[0].uuid
|
||||
)
|
||||
|
||||
# Store the trip in the cache
|
||||
|
||||
@@ -53,7 +53,7 @@ def test_bellecour(client, request) : # pylint: disable=redefined-outer-name
|
||||
client:
|
||||
request:
|
||||
"""
|
||||
duration_minutes = 30
|
||||
duration_minutes = 120
|
||||
response = client.post(
|
||||
"/trip/new",
|
||||
json={
|
||||
@@ -72,10 +72,16 @@ def test_bellecour(client, request) : # pylint: disable=redefined-outer-name
|
||||
# Add details to report
|
||||
log_trip_details(request, landmarks, result['total_time'], duration_minutes)
|
||||
|
||||
for elem in landmarks :
|
||||
print(elem)
|
||||
print(elem.osm_id)
|
||||
|
||||
# checks :
|
||||
assert response.status_code == 200 # check for successful planning
|
||||
assert duration_minutes*0.8 < int(result['total_time']) < duration_minutes*1.2
|
||||
assert 136200148 in osm_ids # check for Cathédrale St. Jean in trip
|
||||
# assert response.status_code == 2000 # check for successful planning
|
||||
|
||||
|
||||
|
||||
def test_shopping(client, request) : # pylint: disable=redefined-outer-name
|
||||
@@ -86,7 +92,7 @@ def test_shopping(client, request) : # pylint: disable=redefined-outer-name
|
||||
client:
|
||||
request:
|
||||
"""
|
||||
duration_minutes = 600
|
||||
duration_minutes = 240
|
||||
response = client.post(
|
||||
"/trip/new",
|
||||
json={
|
||||
@@ -100,7 +106,6 @@ def test_shopping(client, request) : # pylint: disable=redefined-outer-name
|
||||
)
|
||||
result = response.json()
|
||||
landmarks = load_trip_landmarks(client, result['first_landmark_uuid'])
|
||||
# osm_ids = landmarks_to_osmid(landmarks)
|
||||
|
||||
# Add details to report
|
||||
log_trip_details(request, landmarks, result['total_time'], duration_minutes)
|
||||
|
||||
@@ -4,7 +4,7 @@ from fastapi import HTTPException
|
||||
from pydantic import ValidationError
|
||||
|
||||
from ..structs.landmark import Landmark
|
||||
from ..persistence import client as cache_client
|
||||
from ..cache import client as cache_client
|
||||
|
||||
|
||||
def landmarks_to_osmid(landmarks: list[Landmark]) -> list[int] :
|
||||
|
||||
@@ -9,12 +9,12 @@ from OSMPythonTools.cachingStrategy import CachingStrategy, JSON
|
||||
|
||||
from ..structs.landmark import Landmark
|
||||
from ..utils.get_time_separation import get_distance
|
||||
from ..constants import AMENITY_SELECTORS_PATH, LANDMARK_PARAMETERS_PATH, OPTIMIZER_PARAMETERS_PATH, OSM_CACHE_DIR
|
||||
from ..constants import OSM_CACHE_DIR
|
||||
|
||||
|
||||
class ShoppingLocation(BaseModel):
|
||||
class Cluster(BaseModel):
|
||||
""""
|
||||
A classe representing an interesting area for shopping.
|
||||
A class representing an interesting area for shopping or sightseeing.
|
||||
|
||||
It can represent either a general area or a specifc route with start and end point.
|
||||
The importance represents the number of shops found in this cluster.
|
||||
@@ -33,7 +33,7 @@ class ShoppingLocation(BaseModel):
|
||||
# end: Optional[list] = None
|
||||
|
||||
|
||||
class ShoppingManager:
|
||||
class ClusterManager:
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -42,12 +42,21 @@ class ShoppingManager:
|
||||
all_points: list
|
||||
cluster_points: list
|
||||
cluster_labels: list
|
||||
shopping_locations: list[ShoppingLocation]
|
||||
cluster_type: Literal['sightseeing', 'shopping']
|
||||
|
||||
def __init__(self, bbox: tuple) -> None:
|
||||
def __init__(self, bbox: tuple, cluster_type: Literal['sightseeing', 'shopping']) -> None:
|
||||
"""
|
||||
Upon intialization, generate the point cloud used for cluster detection.
|
||||
The points represent bag/clothes shops and general boutiques.
|
||||
If the first step is successful, it applies the DBSCAN clustering algorithm with different
|
||||
parameters depending on the size of the city (number of points).
|
||||
It filters out noise points and keeps only the largest clusters.
|
||||
|
||||
A successful initialization updates:
|
||||
- `self.cluster_points`: The points belonging to clusters.
|
||||
- `self.cluster_labels`: The labels for the points in clusters.
|
||||
|
||||
The method also calls `filter_clusters()` to retain only the largest clusters.
|
||||
|
||||
Args:
|
||||
bbox: The bounding box coordinates (around:radius, center_lat, center_lon).
|
||||
@@ -57,13 +66,23 @@ class ShoppingManager:
|
||||
self.overpass = Overpass()
|
||||
CachingStrategy.use(JSON, cacheDir=OSM_CACHE_DIR)
|
||||
|
||||
self.cluster_type = cluster_type
|
||||
if cluster_type == 'shopping' :
|
||||
elem_type = ['node']
|
||||
sel = ['"shop"~"^(bag|boutique|clothes)$"']
|
||||
out = 'skel'
|
||||
else :
|
||||
elem_type = ['way']
|
||||
sel = ['"historic"="building"']
|
||||
out = 'center'
|
||||
|
||||
# Initialize the points for cluster detection
|
||||
query = overpassQueryBuilder(
|
||||
bbox = bbox,
|
||||
elementType = ['node'],
|
||||
selector = ['"shop"~"^(bag|boutique|clothes)$"'],
|
||||
elementType = elem_type,
|
||||
selector = sel,
|
||||
includeCenter = True,
|
||||
out = 'skel'
|
||||
out = out
|
||||
)
|
||||
|
||||
try:
|
||||
@@ -77,59 +96,19 @@ class ShoppingManager:
|
||||
else :
|
||||
points = []
|
||||
for elem in result.elements() :
|
||||
points.append(tuple((elem.lat(), elem.lon())))
|
||||
coords = tuple((elem.lat(), elem.lon()))
|
||||
if coords[0] is None :
|
||||
coords = tuple((elem.centerLat(), elem.centerLon()))
|
||||
points.append(coords)
|
||||
|
||||
self.all_points = np.array(points)
|
||||
self.valid = True
|
||||
|
||||
|
||||
def generate_shopping_landmarks(self) -> list[Landmark]:
|
||||
"""
|
||||
Generate shopping landmarks based on clustered locations.
|
||||
|
||||
This method first generates clusters of locations and then extracts shopping-related
|
||||
locations from these clusters. It transforms each shopping location into a `Landmark` object.
|
||||
|
||||
Returns:
|
||||
list[Landmark]: A list of `Landmark` objects representing shopping locations.
|
||||
Returns an empty list if no clusters are found.
|
||||
"""
|
||||
|
||||
self.generate_clusters()
|
||||
|
||||
if len(set(self.cluster_labels)) == 0 :
|
||||
return [] # Return empty list if no clusters were found
|
||||
|
||||
# Then generate the shopping locations
|
||||
self.generate_shopping_locations()
|
||||
|
||||
# Transform the locations in landmarks and return the list
|
||||
shopping_landmarks = []
|
||||
for location in self.shopping_locations :
|
||||
shopping_landmarks.append(self.create_landmark(location))
|
||||
|
||||
return shopping_landmarks
|
||||
|
||||
|
||||
|
||||
def generate_clusters(self) :
|
||||
"""
|
||||
Generate clusters of points using DBSCAN.
|
||||
|
||||
This method applies the DBSCAN clustering algorithm with different
|
||||
parameters depending on the size of the city (number of points).
|
||||
It filters out noise points and keeps only the largest clusters.
|
||||
|
||||
The method updates:
|
||||
- `self.cluster_points`: The points belonging to clusters.
|
||||
- `self.cluster_labels`: The labels for the points in clusters.
|
||||
|
||||
The method also calls `filter_clusters()` to retain only the largest clusters.
|
||||
"""
|
||||
|
||||
# Apply DBSCAN to find clusters. Choose different settings for different cities.
|
||||
if len(self.all_points) > 200 :
|
||||
if self.cluster_type == 'shopping' and len(self.all_points) > 200 :
|
||||
dbscan = DBSCAN(eps=0.00118, min_samples=15, algorithm='kd_tree') # for large cities
|
||||
elif self.cluster_type == 'sightseeing' :
|
||||
dbscan = DBSCAN(eps=0.0025, min_samples=15, algorithm='kd_tree') # for historic neighborhoods
|
||||
else :
|
||||
dbscan = DBSCAN(eps=0.00075, min_samples=10, algorithm='kd_tree') # for small cities
|
||||
|
||||
@@ -143,21 +122,24 @@ class ShoppingManager:
|
||||
self.filter_clusters()
|
||||
|
||||
|
||||
def generate_shopping_locations(self) :
|
||||
def generate_clusters(self) -> list[Landmark]:
|
||||
"""
|
||||
Generate shopping locations based on clustered points.
|
||||
Generate a list of landmarks based on identified clusters.
|
||||
|
||||
This method iterates over the different clusters, calculates the centroid
|
||||
(as the mean of the points within each cluster), and assigns an importance
|
||||
based on the size of the cluster.
|
||||
|
||||
The generated shopping locations are stored in `self.shopping_locations`
|
||||
as a list of `ShoppingLocation` objects, each with:
|
||||
The generated shopping locations are stored in `self.clusters`
|
||||
as a list of `Cluster` objects, each with:
|
||||
- `type`: Set to 'area'.
|
||||
- `centroid`: The calculated centroid of the cluster.
|
||||
- `importance`: The number of points in the cluster.
|
||||
"""
|
||||
|
||||
if not self.valid :
|
||||
return [] # Return empty list if no clusters were found
|
||||
|
||||
locations = []
|
||||
|
||||
# loop through the different clusters
|
||||
@@ -169,16 +151,25 @@ class ShoppingManager:
|
||||
# Calculate the centroid as the mean of the points
|
||||
centroid = np.mean(current_cluster, axis=0)
|
||||
|
||||
locations.append(ShoppingLocation(
|
||||
if self.cluster_type == 'shopping' :
|
||||
score = len(current_cluster)*2
|
||||
else :
|
||||
score = len(current_cluster)*8
|
||||
locations.append(Cluster(
|
||||
type='area',
|
||||
centroid=centroid,
|
||||
importance = len(current_cluster)
|
||||
importance = score
|
||||
))
|
||||
|
||||
self.shopping_locations = locations
|
||||
# Transform the locations in landmarks and return the list
|
||||
cluster_landmarks = []
|
||||
for cluster in locations :
|
||||
cluster_landmarks.append(self.create_landmark(cluster))
|
||||
|
||||
return cluster_landmarks
|
||||
|
||||
|
||||
def create_landmark(self, shopping_location: ShoppingLocation) -> Landmark:
|
||||
def create_landmark(self, cluster: Cluster) -> Landmark:
|
||||
"""
|
||||
Create a Landmark object based on the given shopping location.
|
||||
|
||||
@@ -187,7 +178,7 @@ class ShoppingManager:
|
||||
result and creates a landmark with the associated details such as name, type, and OSM ID.
|
||||
|
||||
Parameters:
|
||||
shopping_location (ShoppingLocation): A ShoppingLocation object containing
|
||||
shopping_location (Cluster): A Cluster object containing
|
||||
the centroid and importance of the area.
|
||||
|
||||
Returns:
|
||||
@@ -196,14 +187,21 @@ class ShoppingManager:
|
||||
"""
|
||||
|
||||
# Define the bounding box for a given radius around the coordinates
|
||||
lat, lon = shopping_location.centroid
|
||||
lat, lon = cluster.centroid
|
||||
bbox = ("around:1000", str(lat), str(lon))
|
||||
|
||||
# Query neighborhoods and shopping malls
|
||||
selectors = ['"place"~"^(suburb|neighborhood|neighbourhood|quarter|city_block)$"', '"shop"="mall"']
|
||||
selectors = ['"place"~"^(suburb|neighborhood|neighbourhood|quarter|city_block)$"']
|
||||
|
||||
if self.cluster_type == 'shopping' :
|
||||
selectors.append('"shop"="mall"')
|
||||
new_name = 'Shopping Area'
|
||||
t = 40
|
||||
else :
|
||||
new_name = 'Neighborhood'
|
||||
t = 15
|
||||
|
||||
min_dist = float('inf')
|
||||
new_name = 'Shopping Area'
|
||||
new_name_en = None
|
||||
osm_id = 0
|
||||
osm_type = 'node'
|
||||
@@ -231,7 +229,7 @@ class ShoppingManager:
|
||||
if location[0] is None :
|
||||
continue
|
||||
|
||||
d = get_distance(shopping_location.centroid, location)
|
||||
d = get_distance(cluster.centroid, location)
|
||||
if d < min_dist :
|
||||
min_dist = d
|
||||
new_name = elem.tag('name')
|
||||
@@ -246,13 +244,14 @@ class ShoppingManager:
|
||||
|
||||
return Landmark(
|
||||
name=new_name,
|
||||
type='shopping',
|
||||
location=shopping_location.centroid, # TODO: use the fact the we can also recognize streets.
|
||||
attractiveness=shopping_location.importance,
|
||||
type=self.cluster_type,
|
||||
location=cluster.centroid, # TODO: use the fact the we can also recognize streets.
|
||||
attractiveness=cluster.importance,
|
||||
n_tags=0,
|
||||
osm_id=osm_id,
|
||||
osm_type=osm_type,
|
||||
name_en=new_name_en
|
||||
name_en=new_name_en,
|
||||
duration=t
|
||||
)
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ from OSMPythonTools.cachingStrategy import CachingStrategy, JSON
|
||||
from ..structs.preferences import Preferences
|
||||
from ..structs.landmark import Landmark
|
||||
from .take_most_important import take_most_important
|
||||
from .cluster_processing import ShoppingManager
|
||||
from .cluster_manager import ClusterManager
|
||||
|
||||
from ..constants import AMENITY_SELECTORS_PATH, LANDMARK_PARAMETERS_PATH, OPTIMIZER_PARAMETERS_PATH, OSM_CACHE_DIR
|
||||
|
||||
@@ -86,6 +86,11 @@ class LandmarkManager:
|
||||
current_landmarks = self.fetch_landmarks(bbox, self.amenity_selectors['sightseeing'], preferences.sightseeing.type, score_function)
|
||||
all_landmarks.update(current_landmarks)
|
||||
|
||||
# special pipeline for historic neighborhoods
|
||||
neighborhood_manager = ClusterManager(bbox, 'sightseeing')
|
||||
historic_clusters = neighborhood_manager.generate_clusters()
|
||||
all_landmarks.update(historic_clusters)
|
||||
|
||||
# list for nature
|
||||
if preferences.nature.score != 0:
|
||||
score_function = lambda score: score * 10 * self.nature_coeff * preferences.nature.score / 5
|
||||
@@ -102,10 +107,8 @@ class LandmarkManager:
|
||||
all_landmarks.update(current_landmarks)
|
||||
|
||||
# special pipeline for shopping malls
|
||||
shopping_manager = ShoppingManager(bbox)
|
||||
if shopping_manager.valid :
|
||||
shopping_clusters = shopping_manager.generate_shopping_landmarks()
|
||||
for landmark in shopping_clusters : landmark.duration = 45
|
||||
shopping_manager = ClusterManager(bbox, 'shopping')
|
||||
shopping_clusters = shopping_manager.generate_clusters()
|
||||
all_landmarks.update(shopping_clusters)
|
||||
|
||||
|
||||
@@ -277,6 +280,11 @@ class LandmarkManager:
|
||||
skip = True
|
||||
break
|
||||
|
||||
if "building:" in tag_key:
|
||||
# do not count the building description as being particularly useful
|
||||
n_tags -= 1
|
||||
|
||||
|
||||
if "boundary" in tag_key:
|
||||
# skip "areas" like administrative boundaries and stuff
|
||||
skip = True
|
||||
@@ -328,12 +336,15 @@ class LandmarkManager:
|
||||
|
||||
score = score_function(score)
|
||||
if "place_of_worship" in elem.tags().values() :
|
||||
if "cathedral" not in elem.tags().values() :
|
||||
score = score * self.church_coeff
|
||||
duration = 5
|
||||
else :
|
||||
duration = 10
|
||||
|
||||
if 'viewpoint' in elem.tags().values() :
|
||||
elif 'viewpoint' in elem.tags().values() :
|
||||
# viewpoints must count more
|
||||
score += self.viewpoint_bonus
|
||||
score = score * self.viewpoint_bonus
|
||||
duration = 10
|
||||
|
||||
elif "museum" in elem.tags().values() or "aquarium" in elem.tags().values() or "planetarium" in elem.tags().values():
|
||||
|
||||
24
frontend/.github/workflows/build_app_ios.yaml
vendored
24
frontend/.github/workflows/build_app_ios.yaml
vendored
@@ -6,14 +6,17 @@ on:
|
||||
jobs:
|
||||
build:
|
||||
runs-on: macos-latest
|
||||
env:
|
||||
# $BUNDLE_GEMFILE must be set at the job level, so it is set for all steps
|
||||
BUNDLE_GEMFILE: ${{ github.workspace }}/ios/Gemfile
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up ruby env
|
||||
uses: ruby/setup-ruby@v1
|
||||
with:
|
||||
ruby-version: 3.2.1
|
||||
bundler-cache: true
|
||||
ruby-version: 3.3
|
||||
bundler-cache: true # runs 'bundle install' and caches installed gems automatically
|
||||
|
||||
- name: Install Flutter
|
||||
uses: subosito/flutter-action@v2
|
||||
@@ -31,16 +34,24 @@ jobs:
|
||||
echo "BUILD_NAME=${REF_NAME//v}" >> $GITHUB_ENV
|
||||
|
||||
- name: Setup SSH key for match git repo
|
||||
run: echo "$MATCH_REPO_SSH_KEY" | base64 --decode > ~/.ssh/id_rsa && chmod 600 ~/.ssh/id_rsa
|
||||
# and mark the host as known
|
||||
run: |
|
||||
echo $MATCH_REPO_SSH_KEY | base64 --decode > ~/.ssh/id_rsa
|
||||
chmod 600 ~/.ssh/id_rsa
|
||||
ssh-keyscan -p 2222 git.kluster.moll.re > ~/.ssh/known_hosts
|
||||
env:
|
||||
MATCH_REPO_SSH_KEY: ${{ secrets.IOS_MATCH_REPO_SSH_KEY_BASE64 }}
|
||||
|
||||
- name: Install fastlane
|
||||
run: bundle install
|
||||
- name: Install dependencies and clean up
|
||||
run: |
|
||||
flutter pub get
|
||||
bundle exec pod install
|
||||
flutter clean
|
||||
bundle exec pod cache clean --all
|
||||
working-directory: ios
|
||||
|
||||
- name: Run fastlane lane
|
||||
run: bundle exec fastlane deploy_release
|
||||
run: bundle exec fastlane deploy_release --verbose
|
||||
working-directory: ios
|
||||
env:
|
||||
BUILD_NUMBER: ${{ github.run_number }}
|
||||
@@ -50,3 +61,4 @@ jobs:
|
||||
IOS_ASC_ISSUER_ID: ${{ secrets.IOS_ASC_ISSUER_ID }}
|
||||
IOS_ASC_KEY: ${{ secrets.IOS_ASC_KEY }}
|
||||
MATCH_PASSWORD: ${{ secrets.IOS_MATCH_PASSWORD }}
|
||||
IOS_GOOGLE_MAPS_API_KEY: ${{ secrets.IOS_GOOGLE_MAPS_API_KEY }}
|
||||
|
||||
@@ -50,13 +50,12 @@ Secrets used by fastlane are stored on hashicorp vault and are fetched by the CI
|
||||
|
||||
## Secrets
|
||||
These are mostly used by the CI/CD pipeline to deploy the application. The main usage for github actions is documented under [https://github.com/hashicorp/vault-action](https://github.com/hashicorp/vault-action).
|
||||
**Global secrets** are used for both versions of the app (android and ios).
|
||||
- `GOOGLE_MAPS_API_KEY` is used to authenticate with the Google Maps API
|
||||
|
||||
**Platform-specific secrets** are used by the CI/CD pipeline to deploy to the respective app stores.
|
||||
- `GOOGLE_MAPS_API_KEY` is used to authenticate with the Google Maps API and is scoped to the android platform
|
||||
- `ANDROID_KEYSTORE` is used to sign the android apk
|
||||
- `ANDROID_GOOGLE_KEY` is used to authenticate with the Google Play Store api
|
||||
- `IOS_GOOGLE_...`
|
||||
- `IOS_GOOGLE_MAPS_API_KEY` is used to authenticate with the Google Maps API and is scoped to the ios platform
|
||||
- `IOS_GOOGLE_...`
|
||||
- `IOS_GOOGLE_...`
|
||||
- `IOS_GOOGLE_...`
|
||||
@@ -4,17 +4,15 @@ PODS:
|
||||
- Flutter
|
||||
- geolocator_apple (1.2.0):
|
||||
- Flutter
|
||||
- Google-Maps-iOS-Utils (6.0.0):
|
||||
- Google-Maps-iOS-Utils (6.1.0):
|
||||
- GoogleMaps (~> 9.0)
|
||||
- google_maps_flutter_ios (0.0.1):
|
||||
- Flutter
|
||||
- Google-Maps-iOS-Utils (< 7.0, >= 5.0)
|
||||
- GoogleMaps (< 10.0, >= 8.4)
|
||||
- GoogleMaps (9.1.1):
|
||||
- GoogleMaps/Maps (= 9.1.1)
|
||||
- GoogleMaps/Base (9.1.1)
|
||||
- GoogleMaps/Maps (9.1.1):
|
||||
- GoogleMaps/Base
|
||||
- GoogleMaps (9.2.0):
|
||||
- GoogleMaps/Maps (= 9.2.0)
|
||||
- GoogleMaps/Maps (9.2.0)
|
||||
- map_launcher (0.0.1):
|
||||
- Flutter
|
||||
- path_provider_foundation (0.0.1):
|
||||
@@ -74,9 +72,9 @@ SPEC CHECKSUMS:
|
||||
Flutter: e0871f40cf51350855a761d2e70bf5af5b9b5de7
|
||||
geocoding_ios: bcbdaa6bddd7d3129c9bcb8acddc5d8778689768
|
||||
geolocator_apple: d981750b9f47dbdb02427e1476d9a04397beb8d9
|
||||
Google-Maps-iOS-Utils: cfe6a0239c7ca634b7e001ad059a6707143dc8dc
|
||||
Google-Maps-iOS-Utils: 0a484b05ed21d88c9f9ebbacb007956edd508a96
|
||||
google_maps_flutter_ios: 0291eb2aa252298a769b04d075e4a9d747ff7264
|
||||
GoogleMaps: 80ea184ed6bf44139f383a8b0e248ba3ec1cc8c9
|
||||
GoogleMaps: 634ec3ca99698b31ca2253d64f017217d70cfb38
|
||||
map_launcher: fe43bda6720bb73c12fcc1bdd86123ff49a4d4d6
|
||||
path_provider_foundation: 080d55be775b7414fd5a5ef3ac137b97b097e564
|
||||
permission_handler_apple: 4ed2196e43d0651e8ff7ca3483a069d469701f2d
|
||||
@@ -84,6 +82,6 @@ SPEC CHECKSUMS:
|
||||
sqflite: c35dad70033b8862124f8337cc994a809fcd9fa3
|
||||
url_launcher_ios: 694010445543906933d732453a59da0a173ae33d
|
||||
|
||||
PODFILE CHECKSUM: 819463e6a0290f5a72f145ba7cde16e8b6ef0796
|
||||
PODFILE CHECKSUM: bd1a78910c05ac1e3a220e80f392c61ab2cc8789
|
||||
|
||||
COCOAPODS: 1.10.2
|
||||
|
||||
@@ -8,9 +8,7 @@ import GoogleMaps
|
||||
_ application: UIApplication,
|
||||
didFinishLaunchingWithOptions launchOptions: [UIApplication.LaunchOptionsKey: Any]?
|
||||
) -> Bool {
|
||||
// load the key from env
|
||||
let key = ProcessInfo.processInfo.environment["GOOGLE_MAPS_API_KEY"]!
|
||||
GMSServices.provideAPIKey(key)
|
||||
GMSServices.provideAPIKey("IOS_GOOGLE_MAPS_API_KEY")
|
||||
GeneratedPluginRegistrant.register(with: self)
|
||||
return super.application(application, didFinishLaunchingWithOptions: launchOptions)
|
||||
}
|
||||
|
||||
@@ -10,4 +10,4 @@ IOS_ASC_ISSUER_ID="sample"
|
||||
SIGNING_KEY_FILE_PATH="sample"
|
||||
SIGNING_KEY_PASSWORD="sample"
|
||||
|
||||
GOOGLE_MAPS_API_KEY="sample"
|
||||
IOS_GOOGLE_MAPS_API_KEY="sample"
|
||||
|
||||
@@ -33,7 +33,7 @@ platform :ios do
|
||||
"flutter",
|
||||
"build",
|
||||
"ipa",
|
||||
"--release",
|
||||
"--debug",
|
||||
"--build-name=#{build_name}",
|
||||
"--build-number=#{build_number}",
|
||||
)
|
||||
@@ -44,7 +44,9 @@ platform :ios do
|
||||
archive_path: "../build/ios/archive/Runner.xcarchive"
|
||||
)
|
||||
|
||||
upload_to_testflight
|
||||
upload_to_testflight(
|
||||
skip_waiting_for_build_processing: true,
|
||||
)
|
||||
end
|
||||
|
||||
|
||||
@@ -62,6 +64,16 @@ platform :ios do
|
||||
readonly: true,
|
||||
)
|
||||
|
||||
# replace secrets by real values, the stupid way
|
||||
sh(
|
||||
"sed",
|
||||
"-i",
|
||||
"",
|
||||
"s/IOS_GOOGLE_MAPS_API_KEY/#{ENV["IOS_GOOGLE_MAPS_API_KEY"]}/g",
|
||||
"../Runner/AppDelegate.swift"
|
||||
|
||||
)
|
||||
|
||||
sh(
|
||||
"flutter",
|
||||
"build",
|
||||
|
||||
Reference in New Issue
Block a user