Compare commits
104 Commits
v0.0.37
...
backend/fe
Author | SHA1 | Date | |
---|---|---|---|
dd277287af | |||
f258df8e72 | |||
fd091a9ccc | |||
f81c28f2ac | |||
361b2b1f42 | |||
16918369d7 | |||
2c49480966 | |||
3a9ef4e7d3 | |||
c15e257dea | |||
5a698dd02c | |||
7e4a4b3dc7 | |||
84e5902436 | |||
81330e5eb3 | |||
9002483036 | |||
0271c3d7a7 | |||
4fd1272ea4 | |||
6bedd04a57 | |||
d31ca9f81f | |||
f6e396e54b | |||
d4de945df8 | |||
6f54522b8c | |||
080ecd28ae | |||
21706ea7e6 | |||
83c1533e78 | |||
1f4815c991 | |||
699737bc40 | |||
1240f86d6e | |||
2a5023df4b | |||
581644a108 | |||
f48dcf80c2 | |||
757773f433 | |||
25c2b6b0d1 | |||
b527318eec | |||
f2943eb3ad | |||
2ac8499dfb | |||
4a904c3d3c | |||
978cae290b | |||
bab6cfe74e | |||
71abeabbd2 | |||
f64e60ddf6 | |||
d6f723bee1 | |||
a3243431e0 | |||
3605408ebb | |||
431ae7c670 | |||
e612a82921 | |||
163e10032c | |||
06c01837cf | |||
cd24ee4a67 | |||
85c69d5e01 | |||
d02ba85c31 | |||
0c9b829c3f | |||
b9d45ac9f1 | |||
2f86536893 | |||
8d9e2d9207 | |||
259b0d36fd | |||
577ee232fc | |||
1cc935fb34 | |||
4818bde820 | |||
b30fa1f02e | |||
150055c1b2 | |||
f863c41653 | |||
f67e2b5dd6 | |||
28ff0460ab | |||
b9356dc4ee | |||
78f1dcaab4 | |||
ca40de82dd | |||
c668158341 | |||
98576cff0a | |||
7027444602 | |||
e5a4645f7a | |||
e2e54f5205 | |||
2be7cd1e61 | |||
3ebe0b7191 | |||
814da4b5f6 | |||
3fe6056f3c | |||
d62dddd424 | |||
133f81ce3b | |||
14385342cc | |||
dba988629d | |||
ecd505a9ce | |||
4fae658dbb | |||
41976e3e85 | |||
73373e0fc3 | |||
c6cebd0fdf | |||
11bbf34375 | |||
a0a3d76b78 | |||
160059d94b | |||
18d59012cb | |||
f297094c1a | |||
86187d9069 | |||
4e07c10969 | |||
bc63b57154 | |||
fa083a1080 | |||
c448e2dfb7 | |||
d9061388dd | |||
a9851f9627 | |||
d992b62533 | |||
e78bee4597 | |||
a0467e1e19 | |||
9b61471c94 | |||
d186a51a87 | |||
4baf045c8c | |||
3f1fe463bf | |||
d58ef2562d |
@@ -25,10 +25,8 @@ jobs:
|
||||
ls -la
|
||||
# only install dev-packages
|
||||
pipenv install --categories=dev-packages
|
||||
pipenv run pip freeze
|
||||
|
||||
working-directory: backend
|
||||
|
||||
- name: Run linter
|
||||
run: pipenv run pylint src --fail-under=9
|
||||
run: pipenv run pylint src --fail-under=9
|
||||
working-directory: backend
|
||||
|
@@ -25,11 +25,10 @@ jobs:
|
||||
ls -la
|
||||
# install all packages, including dev-packages
|
||||
pipenv install --dev
|
||||
pipenv run pip freeze
|
||||
working-directory: backend
|
||||
|
||||
- name: Run Tests
|
||||
run: pipenv run pytest src --html=report.html --self-contained-html
|
||||
run: pipenv run pytest src --html=report.html --self-contained-html --log-cli-level=DEBUG
|
||||
working-directory: backend
|
||||
|
||||
- name: Upload HTML report
|
||||
|
26
.vscode/launch.json
vendored
26
.vscode/launch.json
vendored
@@ -9,29 +9,33 @@
|
||||
"name": "Backend - debug",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"module": "uvicorn",
|
||||
"env": {
|
||||
"DEBUG": "true"
|
||||
},
|
||||
"args": [
|
||||
// "--app-dir",
|
||||
// "src",
|
||||
"src.main:app",
|
||||
"--reload",
|
||||
],
|
||||
"jinja": true,
|
||||
"cwd": "${workspaceFolder}/backend"
|
||||
"cwd": "${workspaceFolder}/backend",
|
||||
"module": "fastapi",
|
||||
"args": [
|
||||
"dev",
|
||||
"src/main.py"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Backend - tester",
|
||||
"name": "Backend - test",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "src/tester.py",
|
||||
"module": "pytest",
|
||||
"args": [
|
||||
"src/tests",
|
||||
"--log-cli-level=DEBUG",
|
||||
"--html=report.html",
|
||||
"--self-contained-html"
|
||||
],
|
||||
"env": {
|
||||
"DEBUG": "true"
|
||||
},
|
||||
"cwd": "${workspaceFolder}/backend"
|
||||
},
|
||||
},
|
||||
// frontend - flutter app
|
||||
{
|
||||
"name": "Frontend - debug",
|
||||
|
3
.vscode/settings.json
vendored
3
.vscode/settings.json
vendored
@@ -1,3 +0,0 @@
|
||||
{
|
||||
"cmake.ignoreCMakeListsMissing": true
|
||||
}
|
30
LICENSE.md
Normal file
30
LICENSE.md
Normal file
@@ -0,0 +1,30 @@
|
||||
# License
|
||||
|
||||
## Proprietary License
|
||||
|
||||
All code and resources in this repository are the property of AnyDev. The software and related documentation are provided solely for use with services provided by AnyDev. Redistribution, modification, or use of this software outside of its intended service is strictly prohibited without explicit permission.
|
||||
|
||||
### Copyright © 2024 AnyDev
|
||||
|
||||
All rights reserved.
|
||||
|
||||
### Restrictions
|
||||
|
||||
- You may not modify, distribute, copy, or reverse engineer any part of this codebase.
|
||||
- This software is licensed for use solely in conjunction with services provided by AnyDev.
|
||||
- Any commercial use of this software is strictly prohibited without explicit written consent from AnyDev.
|
||||
|
||||
## Third-Party Dependencies
|
||||
|
||||
This project uses third-party dependencies, which are subject to their respective licenses.
|
||||
|
||||
- Python backend dependencies: fastapi, pydantic, numpy, shapely, etc. – Licensed under their respective licenses.
|
||||
- Flutter frontend dependencies: Cupertino Icons, sliding_up_panel, http, etc. – Licensed under their respective licenses.
|
||||
|
||||
Please refer to each project's documentation for the specific terms and conditions.
|
||||
|
||||
## OpenStreetMap Data Usage
|
||||
|
||||
This project uses data derived from **OpenStreetMap**. OpenStreetMap data is available under the [Open Database License (ODbL)](https://www.openstreetmap.org/copyright). We comply with the ODbL license, and some of the data displayed in the service may be derived from OpenStreetMap sources. We do not redistribute raw OpenStreetMap data; instead, it is processed and transformed before being used in our services.
|
||||
|
||||
More information about OpenStreetMap data usage can be found [here](https://www.openstreetmap.org/copyright).
|
@@ -15,7 +15,7 @@ This project is divided into two main components: a frontend and a backend. The
|
||||
See the [frontend README](frontend/README.md) for more information. The application is centered around its map view, which displays the user's itinerary. This is based on the Google Maps API.
|
||||
|
||||
### Backend
|
||||
See the [backend README](backend/README.md) for more information. The backend is responsible for generating the itinerary based on the user's preferences and constraints. Rather than using google maps, we use the OpenStreetMap API, which is much more flexible.
|
||||
See the [backend README](backend/README.md) for more information. The backend is responsible for generating the itinerary based on the user's preferences and constraints. Rather than using google maps, we use the OpenStreetMap database through the Overpass API, which is much more flexible.
|
||||
|
||||
|
||||
## Getting Started
|
||||
@@ -24,8 +24,8 @@ Refer to the READMEs in the `frontend` and `backend` directories for instruction
|
||||
- `google_maps_flutter` plugin
|
||||
- Python 3
|
||||
- `fastapi`
|
||||
- `OSMPythonTools`
|
||||
- `numpy, scipy`
|
||||
- `numpy`
|
||||
- `pydantic`
|
||||
- Docker
|
||||
|
||||
|
||||
|
9
backend/.gitignore
vendored
9
backend/.gitignore
vendored
@@ -1,9 +1,8 @@
|
||||
# osm-cache and wikidata cache
|
||||
cache/
|
||||
apicache/
|
||||
# osm-cache
|
||||
cache_XML/
|
||||
|
||||
# wikidata throttle
|
||||
*.ctrl
|
||||
# secrets
|
||||
*secrets.yaml
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
|
@@ -293,7 +293,7 @@ ignored-parents=
|
||||
max-args=5
|
||||
|
||||
# Maximum number of attributes for a class (see R0902).
|
||||
max-attributes=7
|
||||
max-attributes=20
|
||||
|
||||
# Maximum number of boolean expressions in an if statement (see R0916).
|
||||
max-bool-expr=5
|
||||
@@ -302,7 +302,7 @@ max-bool-expr=5
|
||||
max-branches=12
|
||||
|
||||
# Maximum number of locals for function / method body.
|
||||
max-locals=15
|
||||
max-locals=30
|
||||
|
||||
# Maximum number of parents for a class (see R0901).
|
||||
max-parents=7
|
||||
@@ -402,7 +402,7 @@ preferred-modules=
|
||||
|
||||
# The type of string formatting that logging methods do. `old` means using %
|
||||
# formatting, `new` is for `{}` formatting.
|
||||
logging-format-style=old
|
||||
logging-format-style=new
|
||||
|
||||
# Logging modules to check that the string format arguments are in logging
|
||||
# function parameter format.
|
||||
@@ -440,7 +440,14 @@ disable=raw-checker-failed,
|
||||
use-implicit-booleaness-not-comparison-to-string,
|
||||
use-implicit-booleaness-not-comparison-to-zero,
|
||||
import-error,
|
||||
line-too-long
|
||||
multiple-statements,
|
||||
line-too-long,
|
||||
logging-fstring-interpolation,
|
||||
duplicate-code,
|
||||
relative-beyond-top-level,
|
||||
invalid-name,
|
||||
too-many-arguments,
|
||||
too-many-positional-arguments
|
||||
|
||||
# Enable the message, report, category or checker with the given id(s). You can
|
||||
# either give multiple identifier separated by comma (,) or put this option
|
||||
|
@@ -14,5 +14,7 @@ EXPOSE 8000
|
||||
ENV NUM_WORKERS=1
|
||||
ENV OSM_CACHE_DIR=/cache
|
||||
ENV MEMCACHED_HOST_PATH=none
|
||||
ENV LOKI_URL=none
|
||||
|
||||
# explicitly use a string instead of an argument list to force a shell and variable expansion
|
||||
CMD fastapi run src/main.py --port 8000 --workers $NUM_WORKERS
|
||||
|
@@ -18,10 +18,12 @@ numpy = "*"
|
||||
fastapi = "*"
|
||||
pydantic = "*"
|
||||
shapely = "*"
|
||||
scipy = "*"
|
||||
osmpythontools = "*"
|
||||
pywikibot = "*"
|
||||
pymemcache = "*"
|
||||
fastapi-cli = "*"
|
||||
scikit-learn = "*"
|
||||
pyqt6 = "*"
|
||||
loki-logger-handler = "*"
|
||||
pulp = "*"
|
||||
scipy = "*"
|
||||
requests = "*"
|
||||
supabase = "*"
|
||||
paypalrestsdk = "*"
|
||||
|
2634
backend/Pipfile.lock
generated
2634
backend/Pipfile.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -38,7 +38,19 @@ To deploy the backend docker container, we use kubernetes. Modifications to the
|
||||
|
||||
The deployment configuration is included as a submodule in the `deployment` directory. The standalone repository is under [https://git.kluster.moll.re/anydev/anyway-backend-deployment/](https://git.kluster.moll.re/anydev/anyway-backend-deployment/).
|
||||
|
||||
|
||||
## Development
|
||||
TBD
|
||||
|
||||
The backend application is structured around the `src` directory, which contains the core components for handling route optimization and API logic. Development generally involves working with key modules such as the optimization engine, Overpass API integration, and utilities for managing landmarks and trip data.
|
||||
|
||||
### Key Areas:
|
||||
- **API Endpoints**: The main interaction with the backend is through the endpoints defined in `src/main.py`. FastAPI simplifies the creation of RESTful services that manage trip and landmark data.
|
||||
- **Optimization Logic**: The trip optimization and refinement are handled in the `src/optimization` module. This is where the core algorithms are implemented.
|
||||
- **Landmark Management**: Fetching and prioritizing points of interest (POIs) based on user preferences happens in `src/utils/LandmarkManager`.
|
||||
- **Testing**: The `src/tests` directory includes tests in various scenarii, ensuring that the logic works as expected.
|
||||
|
||||
For detailed information, refer to the [src README](backend/src/README.md).
|
||||
|
||||
### Running the Application:
|
||||
To run the backend locally, ensure that the virtual environment is activated and all dependencies are installed as outlined in the "Getting Started" section. You can start the FastAPI server with:
|
||||
```bash
|
||||
uvicorn src.main:app --reload
|
||||
|
Submodule backend/deployment updated: 718df09e88...904f16bfc0
File diff suppressed because one or more lines are too long
65
backend/src/README.md
Normal file
65
backend/src/README.md
Normal file
@@ -0,0 +1,65 @@
|
||||
# Overview of backend/src
|
||||
|
||||
This project is structured into several components that handle different aspects of the application's functionality. Below is a high-level overview of each folder and the key Python files in the |src| directory.
|
||||
|
||||
## Folders
|
||||
|
||||
### src/optimization
|
||||
This folder contains modules related to the optimization algorithm used to compute the optimal trip. It comprises the optimizer for the first rough trip and a refiner to include less famous landmarks as well.
|
||||
|
||||
### src/overpass
|
||||
This folder handles interactions with the Overpass API, including constructing and sending queries, caching responses, and parsing results from the Overpass database.
|
||||
|
||||
### src/parameters
|
||||
The modules in this folder define and manage parameters for various parts of the application. This includes configuration values for the optimizer or the list of selectors for Overpass queries.
|
||||
|
||||
### src/structs
|
||||
This folder defines the commonly used data structures used within the project. The models leverage Pydantic's `BaseModel` to ensure data validation, serialization, and easy interaction between different components of the application. The main classes are:
|
||||
- **Landmark**:
|
||||
- Represents a point of interest in the context of a trip. It stores various attributes like the landmark's name, type, location (latitude and longitude), and its OSM details.
|
||||
- It also includes other optional fields like image URLs, website links, and descriptions. Additionally, the class has properties to track its attractiveness score or elative importance.
|
||||
|
||||
- **Preferences**:
|
||||
- This class captures user-defined preferences needed to personalize a trip. Preferences are provided for sightseeing (history and culture), nature (parks and gardens), and shopping. These preferences guide the trip optimization process.
|
||||
|
||||
- **Trip**:
|
||||
- The `Trip` class represents the complete travel plan generated by the system. It holds key information like the trip's total time and the first landmark's UUID.
|
||||
|
||||
### src/tests
|
||||
This folder contains unit tests and test cases for the application's various modules. It is used to ensure the correctness and stability of the code.
|
||||
|
||||
### src/utils
|
||||
The `utils` folder contains utility classes and functions that provide core functionality for the application. The main component in this folder is the `LandmarkManager`, which is central to the process of fetching and organizing landmarks.
|
||||
|
||||
- **LandmarkManager**:
|
||||
- The `LandmarkManager` is responsible for fetching landmarks from OpenStreetMap (via the Overpass API) and managing their classification based on user preferences. It processes raw geographical data, filters landmarks into relevant categories (such as sightseeing, nature, shopping), and prioritizes them for trip planning.
|
||||
|
||||
## Files
|
||||
|
||||
### src/cache.py
|
||||
This file manages the caching mechanisms used throughout the application. It defines the caching strategy for storing and retrieving data, improving the performance of repeated operations by avoiding redundant API calls or computations.
|
||||
|
||||
### src/constants.py
|
||||
This module defines global constants used throughout the project. These constants may include API endpoints, fixed configuration values, or reusable strings and integers that need to remain consistent.
|
||||
|
||||
### src/logging_config.py
|
||||
This file configures the logging system for the application. It defines how logs are formatted, where they are output (e.g., console or file), and the logging levels (e.g., debug, info, error).
|
||||
|
||||
### src/main.py
|
||||
This file contains the main application logic and API endpoints for interacting with the system. The application is built using the FastAPI framework, which provides several endpoints for creating trips, fetching trips, and retrieving landmarks or nearby facilities. The key endpoints include:
|
||||
|
||||
- **POST /trip/new**:
|
||||
- This endpoint allows users to create a new trip by specifying user_id, preferences, start coordinates, and optionally end coordinates. The preferences guide the optimization process for selecting landmarks. The user id is needed to verify that the user's credit balance.
|
||||
- Returns: A `Trip` object containing the optimized route, landmarks, and trip details.
|
||||
|
||||
- **GET /trip/{trip_uuid}**:
|
||||
- This endpoint fetches an already generated trip by its unique identifier (`trip_uuid`). It retrieves the trip data from the cache.
|
||||
- Returns: A `Trip` object corresponding to the given `trip_uuid`.
|
||||
|
||||
- **GET /landmark/{landmark_uuid}**:
|
||||
- This endpoint retrieves a specific landmark by its unique identifier (`landmark_uuid`) from the cache.
|
||||
- Returns: A `Landmark` object containing the details of the requested landmark.
|
||||
|
||||
- **POST /toilets/new**:
|
||||
- This endpoint searches for public toilets near a specified location within a given radius. The location and radius are passed as query parameters.
|
||||
- Returns: A list of `Toilets` objects located within the specified radius of the provided coordinates.
|
@@ -70,6 +70,6 @@ else:
|
||||
MEMCACHED_HOST_PATH,
|
||||
timeout=1,
|
||||
allow_unicode_keys=True,
|
||||
encoding='utf-8',
|
||||
encoding='utf-8',
|
||||
serde=serde.pickle_serde
|
||||
)
|
@@ -1,8 +1,8 @@
|
||||
"""Module allowing to access the parameters of route generation"""
|
||||
"""Module setting global parameters for the application such as cache, route generation, etc."""
|
||||
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import List, Literal, Tuple
|
||||
|
||||
|
||||
LOCATION_PREFIX = Path('src')
|
||||
@@ -12,24 +12,19 @@ LANDMARK_PARAMETERS_PATH = PARAMETERS_DIR / 'landmark_parameters.yaml'
|
||||
OPTIMIZER_PARAMETERS_PATH = PARAMETERS_DIR / 'optimizer_parameters.yaml'
|
||||
|
||||
|
||||
PAYPAL_CLIENT_ID = os.getenv("future-paypal-client-id", None)
|
||||
PAYPAL_SECRET = os.getenv("future-paypal-secret", None)
|
||||
PAYPAL_API_URL = "https://api-m.sandbox.paypal.com"
|
||||
|
||||
SUPABASE_URL = os.getenv("SUPABASE_URL", None)
|
||||
SUPABASE_KEY = os.getenv("SUPABASE_API_KEY", None)
|
||||
|
||||
|
||||
cache_dir_string = os.getenv('OSM_CACHE_DIR', './cache')
|
||||
OSM_CACHE_DIR = Path(cache_dir_string)
|
||||
|
||||
|
||||
# if we are in a debug session, set verbose and rich logging
|
||||
if os.getenv('DEBUG', "false") == "true":
|
||||
from rich.logging import RichHandler
|
||||
logging.basicConfig(
|
||||
level=logging.DEBUG,
|
||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
||||
handlers=[RichHandler()]
|
||||
)
|
||||
else:
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
||||
)
|
||||
|
||||
OSM_TYPES = List[Literal['way', 'node', 'relation']]
|
||||
BBOX = Tuple[float, float, float, float]
|
||||
|
||||
MEMCACHED_HOST_PATH = os.getenv('MEMCACHED_HOST_PATH', None)
|
||||
if MEMCACHED_HOST_PATH == "none":
|
||||
|
56
backend/src/logging_config.py
Normal file
56
backend/src/logging_config.py
Normal file
@@ -0,0 +1,56 @@
|
||||
"""Sets up global logging configuration for the application."""
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def configure_logging():
|
||||
"""
|
||||
Called at startup of a FastAPI application instance to setup logging. Depending on the environment, it will log to stdout or to Loki.
|
||||
"""
|
||||
|
||||
is_debug = os.getenv('DEBUG', "false") == "true"
|
||||
is_kubernetes = os.getenv('KUBERNETES_SERVICE_HOST') is not None
|
||||
|
||||
|
||||
if is_kubernetes:
|
||||
# in that case we want to log to stdout and also to loki
|
||||
from loki_logger_handler.loki_logger_handler import LokiLoggerHandler
|
||||
loki_url = os.getenv('LOKI_URL')
|
||||
if loki_url is None:
|
||||
raise ValueError("LOKI_URL environment variable is not set")
|
||||
|
||||
loki_handler = LokiLoggerHandler(
|
||||
url = loki_url,
|
||||
labels = {'app': 'anyway', 'environment': 'staging' if is_debug else 'production'}
|
||||
)
|
||||
|
||||
logger.info(f"Logging to Loki at {loki_url} with {loki_handler.labels} and {is_debug=}")
|
||||
logging_handlers = [loki_handler, logging.StreamHandler()]
|
||||
logging_level = logging.DEBUG if is_debug else logging.INFO
|
||||
# silence the chatty logs loki generates itself
|
||||
logging.getLogger('urllib3.connectionpool').setLevel(logging.WARNING)
|
||||
# no need for time since it's added by loki or can be shown in kube logs
|
||||
logging_format = '%(name)s - %(levelname)s - %(message)s'
|
||||
|
||||
else:
|
||||
# if we are in a debug (local) session, set verbose and rich logging
|
||||
from rich.logging import RichHandler
|
||||
logging_handlers = [RichHandler()]
|
||||
logging_level = logging.DEBUG if is_debug else logging.INFO
|
||||
logging_format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
||||
|
||||
|
||||
|
||||
logging.basicConfig(
|
||||
level = logging_level,
|
||||
format = logging_format,
|
||||
handlers = logging_handlers
|
||||
)
|
||||
|
||||
# also overwrite the uvicorn loggers
|
||||
logging.getLogger('uvicorn').handlers = logging_handlers
|
||||
logging.getLogger('uvicorn.access').handlers = logging_handlers
|
||||
logging.getLogger('uvicorn.error').handlers = logging_handlers
|
@@ -1,31 +1,57 @@
|
||||
"""Main app for backend api"""
|
||||
|
||||
import logging
|
||||
from fastapi import FastAPI, HTTPException, Query
|
||||
import time
|
||||
from contextlib import asynccontextmanager
|
||||
from fastapi import FastAPI, HTTPException, BackgroundTasks, Query, Body
|
||||
|
||||
from .logging_config import configure_logging
|
||||
from .structs.landmark import Landmark, Toilets
|
||||
from .structs.preferences import Preferences
|
||||
from .structs.linked_landmarks import LinkedLandmarks
|
||||
from .structs.trip import Trip
|
||||
from .utils.landmarks_manager import LandmarkManager
|
||||
from .utils.toilets_manager import ToiletsManager
|
||||
from .utils.optimizer import Optimizer
|
||||
from .utils.refiner import Refiner
|
||||
from .persistence import client as cache_client
|
||||
from .optimization.optimizer import Optimizer
|
||||
from .optimization.refiner import Refiner
|
||||
from .overpass.overpass import fill_cache
|
||||
from .cache import client as cache_client
|
||||
from .payments.supabase import Supabase
|
||||
from .payments.payment_routes import router as payment_router
|
||||
from .payments.supabase_routes import router as supabase_router
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
app = FastAPI()
|
||||
manager = LandmarkManager()
|
||||
optimizer = Optimizer()
|
||||
refiner = Refiner(optimizer=optimizer)
|
||||
supabase = Supabase()
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
"""Function to run at the start of the app"""
|
||||
logger.info("Setting up logging")
|
||||
configure_logging()
|
||||
yield
|
||||
logger.info("Shutting down logging")
|
||||
|
||||
|
||||
app = FastAPI(lifespan=lifespan)
|
||||
|
||||
|
||||
# Include the payment routes and supabase routes
|
||||
app.include_router(payment_router)
|
||||
app.include_router(supabase_router)
|
||||
|
||||
|
||||
@app.post("/trip/new")
|
||||
def new_trip(preferences: Preferences,
|
||||
start: tuple[float, float],
|
||||
end: tuple[float, float] | None = None) -> Trip:
|
||||
def new_trip(user_id: str = Body(...),
|
||||
preferences: Preferences = Body(...),
|
||||
start: tuple[float, float] = Body(...),
|
||||
end: tuple[float, float] | None = Body(None),
|
||||
background_tasks: BackgroundTasks = None) -> Trip:
|
||||
"""
|
||||
Main function to call the optimizer.
|
||||
|
||||
@@ -36,6 +62,19 @@ def new_trip(preferences: Preferences,
|
||||
Returns:
|
||||
(uuid) : The uuid of the first landmark in the optimized route
|
||||
"""
|
||||
# Check for valid user balance.
|
||||
try:
|
||||
if not supabase.check_balance(user_id=user_id):
|
||||
logger.warning('Insufficient credits to perform this action.')
|
||||
return {"error": "Insufficient credits"}, 400 # Return a 400 Bad Request with an appropriate message
|
||||
except SyntaxError as se :
|
||||
raise HTTPException(status_code=400, detail=str(se)) from se
|
||||
except ValueError as ve :
|
||||
raise HTTPException(status_code=406, detail=str(ve)) from ve
|
||||
except Exception as exc:
|
||||
raise HTTPException(status_code=500, detail=f"Internal Server Error: {str(exc)}") from exc
|
||||
|
||||
# Check for invalid input.
|
||||
if preferences is None:
|
||||
raise HTTPException(status_code=406, detail="Preferences not provided or incomplete.")
|
||||
if (preferences.shopping.score == 0 and
|
||||
@@ -56,6 +95,7 @@ def new_trip(preferences: Preferences,
|
||||
osm_type='start',
|
||||
osm_id=0,
|
||||
attractiveness=0,
|
||||
duration=0,
|
||||
must_do=True,
|
||||
n_tags = 0)
|
||||
|
||||
@@ -65,35 +105,64 @@ def new_trip(preferences: Preferences,
|
||||
osm_type='end',
|
||||
osm_id=0,
|
||||
attractiveness=0,
|
||||
duration=0,
|
||||
must_do=True,
|
||||
n_tags=0)
|
||||
|
||||
start_time = time.time()
|
||||
# Generate the landmarks from the start location
|
||||
landmarks, landmarks_short = manager.generate_landmarks_list(
|
||||
center_coordinates = start,
|
||||
preferences = preferences
|
||||
)
|
||||
|
||||
if len(landmarks) == 0 :
|
||||
raise HTTPException(status_code=500, detail="No landmarks were found.")
|
||||
|
||||
# insert start and finish to the landmarks list
|
||||
landmarks_short.insert(0, start_landmark)
|
||||
landmarks_short.append(end_landmark)
|
||||
|
||||
t_generate_landmarks = time.time() - start_time
|
||||
logger.info(f'Fetched {len(landmarks)} landmarks in \t: {round(t_generate_landmarks,3)} seconds')
|
||||
start_time = time.time()
|
||||
|
||||
# First stage optimization
|
||||
try:
|
||||
base_tour = optimizer.solve_optimization(preferences.max_time_minute, landmarks_short)
|
||||
except ArithmeticError as exc:
|
||||
raise HTTPException(status_code=500, detail="No solution found") from exc
|
||||
except TimeoutError as exc:
|
||||
raise HTTPException(status_code=500, detail="Optimzation took too long") from exc
|
||||
except Exception as exc:
|
||||
raise HTTPException(status_code=500, detail=f"Optimization failed: {str(exc)}") from exc
|
||||
|
||||
t_first_stage = time.time() - start_time
|
||||
start_time = time.time()
|
||||
|
||||
# Second stage optimization
|
||||
refined_tour = refiner.refine_optimization(landmarks, base_tour,
|
||||
# TODO : only if necessary (not enough landmarks for ex.)
|
||||
try :
|
||||
refined_tour = refiner.refine_optimization(landmarks, base_tour,
|
||||
preferences.max_time_minute,
|
||||
preferences.detour_tolerance_minute)
|
||||
except TimeoutError as te :
|
||||
logger.error(f'Refiner failed : {str(te)} Using base tour.')
|
||||
refined_tour = base_tour
|
||||
except Exception as exc :
|
||||
raise HTTPException(status_code=500, detail=f"An unexpected error occurred: {str(exc)}") from exc
|
||||
|
||||
t_second_stage = time.time() - start_time
|
||||
|
||||
logger.debug(f'First stage optimization\t: {round(t_first_stage,3)} seconds')
|
||||
logger.debug(f'Second stage optimization\t: {round(t_second_stage,3)} seconds')
|
||||
logger.info(f'Total computation time\t: {round(t_first_stage + t_second_stage,3)} seconds')
|
||||
linked_tour = LinkedLandmarks(refined_tour)
|
||||
|
||||
# upon creation of the trip, persistence of both the trip and its landmarks is ensured.
|
||||
trip = Trip.from_linked_landmarks(linked_tour, cache_client)
|
||||
logger.info(f'Generated a trip of {trip.total_time} minutes with {len(refined_tour)} landmarks in {round(t_generate_landmarks + t_first_stage + t_second_stage,3)} seconds.')
|
||||
logger.debug('Detailed trip :\n\t' + '\n\t'.join(f'{landmark}' for landmark in refined_tour))
|
||||
|
||||
background_tasks.add_task(fill_cache)
|
||||
supabase.decrement_credit_balance(user_id=user_id)
|
||||
|
||||
return trip
|
||||
|
||||
|
||||
@@ -134,6 +203,45 @@ def get_landmark(landmark_uuid: str) -> Landmark:
|
||||
raise HTTPException(status_code=404, detail="Landmark not found") from exc
|
||||
|
||||
|
||||
@app.post("/trip/recompute-time/{trip_uuid}/{removed_landmark_uuid}")
|
||||
def update_trip_time(trip_uuid: str, removed_landmark_uuid: str) -> Trip:
|
||||
"""
|
||||
Updates the reaching times of a given trip when removing a landmark.
|
||||
|
||||
Args:
|
||||
landmark_uuid (str) : unique identifier for a Landmark.
|
||||
|
||||
Returns:
|
||||
(Landmark) : the corresponding Landmark.
|
||||
"""
|
||||
# First, fetch the trip in the cache.
|
||||
try:
|
||||
trip = cache_client.get(f'trip_{trip_uuid}')
|
||||
except KeyError as exc:
|
||||
raise HTTPException(status_code=404, detail='Trip not found') from exc
|
||||
|
||||
landmarks = []
|
||||
next_uuid = trip.first_landmark_uuid
|
||||
|
||||
# Extract landmarks
|
||||
try :
|
||||
while next_uuid is not None:
|
||||
landmark = cache_client.get(f'landmark_{next_uuid}')
|
||||
# Filter out the removed landmark.
|
||||
if next_uuid != removed_landmark_uuid :
|
||||
landmarks.append(landmark)
|
||||
next_uuid = landmark.next_uuid # Prepare for the next iteration
|
||||
except KeyError as exc:
|
||||
raise HTTPException(status_code=404, detail=f'landmark {next_uuid} not found') from exc
|
||||
|
||||
# Re-link every thing and compute times again
|
||||
linked_tour = LinkedLandmarks(landmarks)
|
||||
trip = Trip.from_linked_landmarks(linked_tour, cache_client)
|
||||
|
||||
return trip
|
||||
|
||||
|
||||
|
||||
@app.post("/toilets/new")
|
||||
def get_toilets(location: tuple[float, float] = Query(...), radius: int = 500) -> list[Toilets] :
|
||||
"""
|
||||
@@ -152,7 +260,7 @@ def get_toilets(location: tuple[float, float] = Query(...), radius: int = 500) -
|
||||
raise HTTPException(status_code=406, detail="Coordinates not provided or invalid")
|
||||
if not (-90 <= location[0] <= 90 or -180 <= location[1] <= 180):
|
||||
raise HTTPException(status_code=422, detail="Start coordinates not in range")
|
||||
|
||||
|
||||
toilets_manager = ToiletsManager(location, radius)
|
||||
|
||||
try :
|
||||
@@ -160,3 +268,6 @@ def get_toilets(location: tuple[float, float] = Query(...), radius: int = 500) -
|
||||
return toilets_list
|
||||
except KeyError as exc:
|
||||
raise HTTPException(status_code=404, detail="No toilets found") from exc
|
||||
|
||||
|
||||
|
||||
|
0
backend/src/optimization/__init__.py
Normal file
0
backend/src/optimization/__init__.py
Normal file
638
backend/src/optimization/optimizer.py
Normal file
638
backend/src/optimization/optimizer.py
Normal file
@@ -0,0 +1,638 @@
|
||||
"""Module responsible for sloving an MILP to find best tour around the given landmarks."""
|
||||
import logging
|
||||
from collections import defaultdict, deque
|
||||
import yaml
|
||||
import numpy as np
|
||||
import pulp as pl
|
||||
|
||||
from ..structs.landmark import Landmark
|
||||
from ..utils.get_time_distance import get_time
|
||||
from ..constants import OPTIMIZER_PARAMETERS_PATH
|
||||
|
||||
|
||||
# Silence the pupl logger
|
||||
logging.getLogger('pulp').setLevel(level=logging.CRITICAL)
|
||||
|
||||
|
||||
class Optimizer:
|
||||
"""
|
||||
Optimizes the balance between the efficiency of a tour and the inclusion of landmarks.
|
||||
|
||||
The `Optimizer` class is responsible for calculating the best possible detour adjustments
|
||||
to a tour based on specific parameters such as detour time, walking speed, and the maximum
|
||||
number of landmarks to visit. It helps refine a tour by determining whether adding additional
|
||||
landmarks would significantly reduce the overall efficiency.
|
||||
|
||||
Responsibilities:
|
||||
- Calculates the maximum detour time allowed for a given tour.
|
||||
- Considers the detour factor, which accounts for real-world walking paths versus straight-line distance.
|
||||
- Takes into account the average walking speed to estimate walking times.
|
||||
- Limits the number of landmarks that can be added to the tour to prevent excessive detouring.
|
||||
- Allows some overflow (overshoot) in the maximum detour time to accommodate for slight inefficiencies.
|
||||
|
||||
Attributes:
|
||||
logger (logging.Logger): Logger for capturing relevant events and errors.
|
||||
detour (int): The accepted maximum detour time in minutes.
|
||||
detour_factor (float): The ratio between straight-line distance and actual walking distance in cities.
|
||||
average_walking_speed (float): The average walking speed of an adult (in meters per second or kilometers per hour).
|
||||
max_landmarks (int): The maximum number of landmarks to include in the tour.
|
||||
overshoot (float): The overshoot allowance for exceeding the maximum detour time in a restrictive manner.
|
||||
"""
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
detour: int = None # accepted max detour time (in minutes)
|
||||
detour_factor: float # detour factor of straight line vs real distance in cities
|
||||
average_walking_speed: float # average walking speed of adult
|
||||
max_landmarks: int # max number of landmarks to visit
|
||||
overshoot: float # overshoot to allow maxtime to overflow. Optimizer is a bit restrictive
|
||||
|
||||
def __init__(self) :
|
||||
|
||||
# load parameters from file
|
||||
with OPTIMIZER_PARAMETERS_PATH.open('r') as f:
|
||||
parameters = yaml.safe_load(f)
|
||||
self.detour_factor = parameters['detour_factor']
|
||||
self.average_walking_speed = parameters['average_walking_speed']
|
||||
self.max_landmarks = parameters['max_landmarks']
|
||||
self.overshoot = parameters['overshoot']
|
||||
self.time_limit = parameters['time_limit']
|
||||
self.gap_rel = parameters['gap_rel']
|
||||
self.max_iter = parameters['max_iter']
|
||||
|
||||
|
||||
def init_ub_time(self, prob: pl.LpProblem, x: pl.LpVariable, L: int, landmarks: list[Landmark], max_time: int):
|
||||
"""
|
||||
Initialize the objective function and inequality constraints for the linear program.
|
||||
|
||||
This function sets up the objective to maximize the attractiveness of visiting landmarks,
|
||||
while ensuring that the total time (including travel and visit duration) does not exceed
|
||||
the maximum allowed time. It calculates the pairwise travel times between landmarks and
|
||||
incorporates visit duration to form the inequality constraints.
|
||||
|
||||
The objective is to maximize sightseeing by selecting the most attractive landmarks within
|
||||
the time limit.
|
||||
|
||||
Args:
|
||||
prob (pl.LpProblem): The linear programming problem where constraints and the objective will be added.
|
||||
x (pl.LpVariable): A decision variable representing whether a landmark is visited.
|
||||
L (int): The number of landmarks.
|
||||
landmarks (list[Landmark]): List of landmarks to visit.
|
||||
max_time (int): Maximum allowable time for sightseeing, including travel and visit duration.
|
||||
|
||||
Returns:
|
||||
None: Adds the objective function and constraints to the LP problem directly.
|
||||
constraint coefficients, and the right-hand side of the inequality constraint.
|
||||
"""
|
||||
L = len(landmarks)
|
||||
|
||||
# Objective function coefficients. a*x1 + b*x2 + c*x3 + ...
|
||||
c = np.zeros(L, dtype=np.int16)
|
||||
|
||||
# inequality matrix and vector
|
||||
A_ub = np.zeros(L*L, dtype=np.int16)
|
||||
b_ub = round(max_time*(1.1+max_time*self.overshoot))
|
||||
|
||||
for i, spot1 in enumerate(landmarks) :
|
||||
c[i] = spot1.attractiveness
|
||||
for j in range(i+1, L) :
|
||||
if i !=j :
|
||||
t = get_time(spot1.location, landmarks[j].location)
|
||||
A_ub[i*L + j] = t + spot1.duration
|
||||
A_ub[j*L + i] = t + landmarks[j].duration
|
||||
|
||||
# Expand 'c' to L*L for every decision variable and ad
|
||||
c = np.tile(c, L)
|
||||
|
||||
# Now sort and modify A_ub for each row
|
||||
if L > 22 :
|
||||
for i in range(L):
|
||||
# Get indices of the 4 smallest values in row i
|
||||
row_values = A_ub[i*L:i*L+L]
|
||||
closest_indices = np.argpartition(row_values, 22)[:22]
|
||||
|
||||
# Create a mask for non-closest landmarks
|
||||
mask = np.ones(L, dtype=bool)
|
||||
mask[closest_indices] = False
|
||||
|
||||
# Set non-closest landmarks to 32765
|
||||
row_values[mask] = 32765
|
||||
A_ub[i*L:i*L+L] = row_values
|
||||
|
||||
# Add the objective and the 1 distance constraint
|
||||
prob += pl.lpSum([c[j] * x[j] for j in range(L*L)])
|
||||
prob += (pl.lpSum([A_ub[j] * x[j] for j in range(L*L)]) <= b_ub)
|
||||
|
||||
|
||||
def respect_number(self, prob: pl.LpProblem, x: pl.LpVariable, L: int, max_landmarks: int):
|
||||
"""
|
||||
Generate constraints to ensure each landmark is visited at most once and cap the total number of visited landmarks.
|
||||
|
||||
This function adds the following constraints to the linear program:
|
||||
1. Each landmark is visited at most once by creating L-2 constraints (one for each landmark).
|
||||
2. The total number of visited landmarks is capped by the specified maximum number (`max_landmarks`) plus 2.
|
||||
|
||||
Args:
|
||||
prob (pl.LpProblem): The linear programming problem where constraints will be added.
|
||||
x (pl.LpVariable): Decision variable indicating whether a landmark is visited.
|
||||
L (int): The total number of landmarks.
|
||||
max_landmarks (int): The maximum number of landmarks that can be visited.
|
||||
|
||||
Returns:
|
||||
None: This function directly modifies the `prob` object by adding constraints.
|
||||
"""
|
||||
# L-2 constraints: each landmark is visited exactly once
|
||||
for i in range(1, L-1):
|
||||
prob += (pl.lpSum([x[L*i + j] for j in range(L)]) <= 1)
|
||||
|
||||
# 1 constraint: cap the total number of visits
|
||||
prob += (pl.lpSum([1 * x[j] for j in range(L*L)]) <= max_landmarks+2)
|
||||
|
||||
|
||||
def break_sym(self, prob: pl.LpProblem, x: pl.LpVariable, L: int):
|
||||
"""
|
||||
Generate constraints to prevent simultaneous travel between two landmarks
|
||||
in both directions. This constraint ensures that, for any pair of landmarks,
|
||||
travel from landmark i to landmark j (dij) and travel from landmark j to landmark i (dji)
|
||||
cannot happen simultaneously.
|
||||
|
||||
This method adds constraints to break symmetry, specifically to prevent
|
||||
cyclic paths with only two elements. It does not prevent cyclic paths involving more than two elements.
|
||||
|
||||
Args:
|
||||
prob (pl.LpProblem): The linear programming problem where constraints will be added.
|
||||
x (pl.LpVariable): Decision variable representing travel between landmarks.
|
||||
L (int): The total number of landmarks.
|
||||
|
||||
Returns:
|
||||
None: This function modifies the `prob` object by adding constraints in-place.
|
||||
"""
|
||||
upper_ind = np.triu_indices(L, 0, L) # Get the upper triangular indices
|
||||
up_ind_x = upper_ind[0]
|
||||
up_ind_y = upper_ind[1]
|
||||
|
||||
# Loop over the upper triangular indices, excluding diagonal elements
|
||||
for i, up_ind in enumerate(up_ind_x):
|
||||
if up_ind != up_ind_y[i]:
|
||||
# Add (L*L-L)/2 constraints to break symmetry
|
||||
prob += (x[up_ind*L + up_ind_y[i]] + x[up_ind_y[i]*L + up_ind] <= 1)
|
||||
|
||||
|
||||
def init_eq_not_stay(self, prob: pl.LpProblem, x: pl.LpVariable, L: int):
|
||||
"""
|
||||
Generate constraints to prevent staying at the same position during travel.
|
||||
Specifically, it removes travel from a landmark to itself (e.g., d11, d22, d33, etc.).
|
||||
|
||||
This function adds one equality constraint to the optimization problem that ensures
|
||||
no decision variable corresponding to staying at the same landmark is included
|
||||
in the solution. This helps in ensuring that the path does not include self-loops.
|
||||
|
||||
Args:
|
||||
prob (pl.LpProblem): The linear programming problem where constraints will be added.
|
||||
x (pl.LpVariable): Decision variable representing travel between landmarks.
|
||||
L (int): The total number of landmarks.
|
||||
|
||||
Returns:
|
||||
None: This function modifies the `prob` object by adding an equality constraint in-place.
|
||||
"""
|
||||
A_eq = np.zeros((L, L), dtype=np.int8)
|
||||
|
||||
# Set diagonal elements to 1 (to prevent staying in the same position)
|
||||
np.fill_diagonal(A_eq, 1)
|
||||
A_eq = A_eq.flatten()
|
||||
|
||||
# First equality constraint
|
||||
prob += (pl.lpSum([A_eq[j] * x[j] for j in range(L*L)]) == 0)
|
||||
|
||||
|
||||
def respect_start_finish(self, prob: pl.LpProblem, x: pl.LpVariable, L: int):
|
||||
"""
|
||||
Generate constraints to ensure that the optimization starts at the designated
|
||||
start landmark and finishes at the goal landmark.
|
||||
|
||||
Specifically, this function adds three equality constraints:
|
||||
1. Ensures that the path starts at the designated start landmark (row 0).
|
||||
2. Ensures that the path finishes at the designated goal landmark (row 1).
|
||||
3. Prevents any arrivals at the start landmark or departures from the goal landmark (row 2).
|
||||
|
||||
Args:
|
||||
prob (pl.LpProblem): The linear programming problem where constraints will be added.
|
||||
x (pl.LpVariable): Decision variable representing travel between landmarks.
|
||||
L (int): The total number of landmarks.
|
||||
|
||||
Returns:
|
||||
None: This function modifies the `prob` object by adding three equality constraints in-place.
|
||||
"""
|
||||
# Fill-in row 0.
|
||||
A_eq = np.zeros((3,L*L), dtype=np.int8)
|
||||
A_eq[0, :L] = np.ones(L, dtype=np.int8) # sets departures only for start (horizontal ones)
|
||||
for k in range(L-1) :
|
||||
if k != 0 :
|
||||
# Fill-in row 1
|
||||
A_eq[1, k*L+L-1] = 1 # sets arrivals only for finish (vertical ones)
|
||||
# Fill-in row 1
|
||||
A_eq[2, k*L] = 1
|
||||
|
||||
A_eq[2, L*(L-1):] = np.ones(L, dtype=np.int8) # prevents arrivals at start and departures from goal
|
||||
b_eq= [1, 1, 0]
|
||||
|
||||
# Add the constraints to pulp
|
||||
for i in range(3) :
|
||||
prob += (pl.lpSum([A_eq[i][j] * x[j] for j in range(L*L)]) == b_eq[i])
|
||||
|
||||
|
||||
def respect_order(self, prob: pl.LpProblem, x: pl.LpVariable, L: int):
|
||||
"""
|
||||
Generate constraints to tie the optimization problem together and prevent
|
||||
stacked ones, although this does not fully prevent circles.
|
||||
|
||||
This function adds constraints to the optimization problem that prevent
|
||||
simultaneous travel between landmarks in a way that would result in stacked ones.
|
||||
However, it does not fully prevent circular paths.
|
||||
|
||||
Args:
|
||||
prob (pl.LpProblem): The linear programming problem where constraints will be added.
|
||||
x (pl.LpVariable): Decision variable representing travel between landmarks.
|
||||
L (int): The total number of landmarks.
|
||||
|
||||
Returns:
|
||||
None: This function modifies the `prob` object by adding L-2 equality constraints in-place.
|
||||
"""
|
||||
# FIXME: weird 0 artifact in the coefficients popping up
|
||||
# Loop through rows 1 to L-2 to prevent stacked ones
|
||||
for i in range(1, L-1):
|
||||
# Add the constraint that sums across each "row" or "block" in the decision variables
|
||||
row_sum = -pl.lpSum(x[i + j*L] for j in range(L)) + pl.lpSum(x[i*L:(i+1)*L])
|
||||
prob += (row_sum == 0)
|
||||
|
||||
|
||||
def respect_user_must(self, prob: pl.LpProblem, x: pl.LpVariable, L: int, landmarks: list[Landmark]) :
|
||||
"""
|
||||
Generate constraints to ensure that landmarks marked as 'must_do' are included in the optimization.
|
||||
|
||||
This function adds constraints to the optimization problem to ensure that landmarks marked as
|
||||
'must_do' are included in the solution. It precomputes the constraints and adds them to the
|
||||
problem accordingly.
|
||||
|
||||
Args:
|
||||
prob (pl.LpProblem): The linear programming problem where constraints will be added.
|
||||
x (pl.LpVariable): Decision variable representing travel between landmarks.
|
||||
L (int): The total number of landmarks.
|
||||
landmarks (list[Landmark]): List of landmarks, where some are marked as 'must_do'.
|
||||
|
||||
Returns:
|
||||
None: This function modifies the `prob` object by adding equality constraints in-place.
|
||||
"""
|
||||
ones = np.ones(L, dtype=np.int8)
|
||||
A_eq = np.zeros(L*L, dtype=np.int8)
|
||||
|
||||
for i, elem in enumerate(landmarks) :
|
||||
if elem.must_do is True and i not in [0, L-1]:
|
||||
A_eq[i*L:i*L+L] = ones
|
||||
prob += (pl.lpSum([A_eq[j] * x[j] for j in range(L*L)]) == 1)
|
||||
if elem.must_avoid is True and i not in [0, L-1]:
|
||||
A_eq[i*L:i*L+L] = ones
|
||||
prob += (pl.lpSum([A_eq[j] * x[j] for j in range(L*L)]) == 2)
|
||||
|
||||
|
||||
def prevent_circle(self, prob: pl.LpProblem, x: pl.LpVariable, circle_vertices: list, L: int) :
|
||||
"""
|
||||
Prevent circular paths by adding constraints to the optimization.
|
||||
|
||||
This function ensures that circular paths in both directions (i.e., forward and reverse)
|
||||
between landmarks are avoided in the optimization problem by adding the corresponding constraints.
|
||||
|
||||
Args:
|
||||
prob (pl.LpProblem): The linear programming problem instance to which the constraints will be added.
|
||||
x (pl.LpVariable): Decision variable representing the travel between landmarks in the problem.
|
||||
circle_vertices (list): List of indices representing the landmarks that form a circular path.
|
||||
L (int): The total number of landmarks.
|
||||
|
||||
Returns:
|
||||
None: This function modifies the `prob` object by adding two equality constraints that
|
||||
prevent circular paths in both directions for the specified circle vertices.
|
||||
"""
|
||||
l = np.zeros((2, L*L), dtype=np.int8)
|
||||
|
||||
for i, node in enumerate(circle_vertices[:-1]) :
|
||||
next = circle_vertices[i+1]
|
||||
|
||||
l[0, node*L + next] = 1
|
||||
l[1, next*L + node] = 1
|
||||
|
||||
s = circle_vertices[0]
|
||||
g = circle_vertices[-1]
|
||||
|
||||
l[0, g*L + s] = 1
|
||||
l[1, s*L + g] = 1
|
||||
|
||||
# Add the constraints
|
||||
prob += (pl.lpSum([l[0][j] * x[j] for j in range(L*L)]) == 0)
|
||||
prob += (pl.lpSum([l[1][j] * x[j] for j in range(L*L)]) == 0)
|
||||
|
||||
|
||||
def is_connected(self, resx) :
|
||||
"""
|
||||
Determine the order of visits and detect any circular paths in the given configuration.
|
||||
|
||||
Args:
|
||||
resx (list): List of edge weights.
|
||||
|
||||
Returns:
|
||||
tuple[list[int], Optional[list[list[int]]]]: A tuple containing the visit order and a list of any detected circles.
|
||||
"""
|
||||
resx = np.round(resx).astype(np.int8) # round all elements and cast them to int
|
||||
|
||||
N = len(resx) # length of res
|
||||
L = int(np.sqrt(N)) # number of landmarks. CAST INTO INT but should not be a problem because N = L**2 by def.
|
||||
|
||||
nonzeroind = np.nonzero(resx)[0] # the return is a little funny so I use the [0]
|
||||
nonzero_tup = np.unravel_index(nonzeroind, (L,L))
|
||||
|
||||
ind_a = nonzero_tup[0]
|
||||
ind_b = nonzero_tup[1]
|
||||
|
||||
# Extract all journeys
|
||||
all_journeys_nodes = []
|
||||
visited_nodes = set()
|
||||
|
||||
for node in ind_a:
|
||||
if node not in visited_nodes:
|
||||
journey_nodes = self.get_journey(node, ind_a, ind_b)
|
||||
all_journeys_nodes.append(journey_nodes)
|
||||
visited_nodes.update(journey_nodes)
|
||||
|
||||
for l in all_journeys_nodes :
|
||||
if 0 in l :
|
||||
all_journeys_nodes.remove(l)
|
||||
break
|
||||
|
||||
if not all_journeys_nodes :
|
||||
return None
|
||||
|
||||
return all_journeys_nodes
|
||||
|
||||
|
||||
def get_journey(self, start, ind_a, ind_b):
|
||||
"""
|
||||
Trace the journey starting from a given node and follow the connections between landmarks.
|
||||
This method constructs a graph from two lists of landmark connections, `ind_a` and `ind_b`,
|
||||
where each element in `ind_a` is connected to the corresponding element in `ind_b`.
|
||||
It then performs a depth-first search (DFS) starting from the `start` node to determine
|
||||
the path (journey) by following the connections.
|
||||
|
||||
Args:
|
||||
start (int): The starting node of the journey.
|
||||
ind_a (list[int]): List of "from" nodes, representing the starting points of each connection.
|
||||
ind_b (list[int]): List of "to" nodes, representing the endpoints of each connection.
|
||||
|
||||
Returns:
|
||||
list[int]: A list of nodes representing the order of the journey, starting from the `start` node.
|
||||
|
||||
Example:
|
||||
If `ind_a = [0, 1, 2]` and `ind_b = [1, 2, 3]`, starting from node 0, the journey would be `[0, 1, 2, 3]`.
|
||||
"""
|
||||
graph = defaultdict(list)
|
||||
for a, b in zip(ind_a, ind_b):
|
||||
graph[a].append(b)
|
||||
|
||||
journey_nodes = []
|
||||
visited = set()
|
||||
stack = deque([start])
|
||||
|
||||
while stack:
|
||||
node = stack.pop()
|
||||
if node not in visited:
|
||||
visited.add(node)
|
||||
journey_nodes.append(node)
|
||||
for neighbor in graph[node]:
|
||||
if neighbor not in visited:
|
||||
stack.append(neighbor)
|
||||
|
||||
return journey_nodes
|
||||
|
||||
|
||||
def get_order(self, resx):
|
||||
"""
|
||||
Determine the order of visits given the result of the optimization.
|
||||
|
||||
Args:
|
||||
resx (list): List of edge weights.
|
||||
|
||||
Returns:
|
||||
list[int]: A list containing the visit order.
|
||||
"""
|
||||
resx = np.round(resx).astype(np.uint8) # must contain only 0 and 1
|
||||
|
||||
N = len(resx) # length of res
|
||||
L = int(np.sqrt(N)) # number of landmarks. CAST INTO INT but should not be a problem because N = L**2 by def.
|
||||
|
||||
nonzeroind = np.nonzero(resx)[0] # the return is a little funny so I use the [0]
|
||||
nonzero_tup = np.unravel_index(nonzeroind, (L,L))
|
||||
|
||||
ind_a = nonzero_tup[0].tolist()
|
||||
ind_b = nonzero_tup[1].tolist()
|
||||
|
||||
order = [0]
|
||||
current = 0
|
||||
used_indices = set() # Track visited index pairs
|
||||
|
||||
while True:
|
||||
# Find index of the current node in ind_a
|
||||
try:
|
||||
i = ind_a.index(current)
|
||||
except ValueError:
|
||||
break # No more links, stop the search
|
||||
|
||||
if i in used_indices:
|
||||
break # Prevent infinite loops
|
||||
|
||||
used_indices.add(i) # Mark this index as visited
|
||||
next_node = ind_b[i] # Get the corresponding node in ind_b
|
||||
order.append(next_node) # Add it to the path
|
||||
|
||||
# Switch roles, now look for next_node in ind_a
|
||||
try:
|
||||
current = next_node
|
||||
except ValueError:
|
||||
break # No further connections, end the path
|
||||
|
||||
return order
|
||||
|
||||
|
||||
def link_list(self, order: list[int], landmarks: list[Landmark])->list[Landmark] :
|
||||
"""
|
||||
Compute the time to reach from each landmark to the next and create a list of landmarks with updated travel times.
|
||||
|
||||
Args:
|
||||
order (list[int]): List of indices representing the order of landmarks to visit.
|
||||
landmarks (list[Landmark]): List of all landmarks.
|
||||
|
||||
Returns:
|
||||
list[Landmark]]: The updated linked list of landmarks with travel times
|
||||
"""
|
||||
L = []
|
||||
j = 0
|
||||
while j < len(order)-1 :
|
||||
# get landmarks involved
|
||||
elem = landmarks[order[j]]
|
||||
next = landmarks[order[j+1]]
|
||||
|
||||
# get attributes
|
||||
elem.time_to_reach_next = get_time(elem.location, next.location)
|
||||
elem.must_do = True
|
||||
elem.location = (round(elem.location[0], 5), round(elem.location[1], 5))
|
||||
elem.next_uuid = next.uuid
|
||||
L.append(elem)
|
||||
j += 1
|
||||
|
||||
next.location = (round(next.location[0], 5), round(next.location[1], 5))
|
||||
next.must_do = True
|
||||
L.append(next)
|
||||
|
||||
return L
|
||||
|
||||
|
||||
def warm_start(self, x: list[pl.LpVariable], L: int) :
|
||||
"""
|
||||
This function sets the initial values of the decision variables to a feasible solution.
|
||||
This can help the solver start with a feasible or heuristic solution,
|
||||
potentially speeding up convergence.
|
||||
|
||||
Args:
|
||||
x (list[pl.LpVariable]): A list of PuLP decision variables (binary variables).
|
||||
L (int): The size parameter, representing a dimension (likely related to a grid or matrix).
|
||||
|
||||
Returns:
|
||||
list[pl.LpVariable]: The modified list of PuLP decision variables with initial values set.
|
||||
"""
|
||||
for i in range(L*L) :
|
||||
x[i].setInitialValue(0)
|
||||
|
||||
x[1].setInitialValue(1)
|
||||
x[2*L-1].setInitialValue(1)
|
||||
|
||||
return x
|
||||
|
||||
|
||||
def pre_processing(self, L: int, landmarks: list[Landmark], max_time: int, max_landmarks: int | None) :
|
||||
"""
|
||||
Preprocesses the optimization problem by setting up constraints and variables for the tour optimization.
|
||||
|
||||
This method initializes and prepares the linear programming problem to optimize a tour that includes landmarks,
|
||||
while respecting various constraints such as time limits, the number of landmarks to visit, and user preferences.
|
||||
The pre-processing step sets up the problem before solving it using a linear programming solver.
|
||||
|
||||
Responsibilities:
|
||||
- Defines the optimization problem using linear programming (LP) with the objective to maximize the tour value.
|
||||
- Creates binary decision variables for each potential transition between landmarks.
|
||||
- Sets up inequality constraints to respect the maximum time available for the tour and the maximum number of landmarks.
|
||||
- Implements equality constraints to ensure the tour respects the start and finish positions, avoids staying in the same place,
|
||||
and adheres to a visit order.
|
||||
- Forces inclusion or exclusion of specific landmarks based on user preferences.
|
||||
|
||||
Attributes:
|
||||
prob (pl.LpProblem): The linear programming problem to be solved.
|
||||
x (list): A list of binary variables representing transitions between landmarks.
|
||||
L (int): The total number of landmarks considered in the optimization.
|
||||
landmarks (list[Landmark]): The list of landmarks to be visited in the tour.
|
||||
max_time (int): The maximum allowable time for the entire tour.
|
||||
max_landmarks (int | None): The maximum number of landmarks to visit in the tour, or None if no limit is set.
|
||||
|
||||
Returns:
|
||||
prob (pl.LpProblem): The linear programming problem setup for optimization.
|
||||
x (list): The list of binary variables for transitions between landmarks in the tour.
|
||||
"""
|
||||
|
||||
if max_landmarks is None :
|
||||
max_landmarks = self.max_landmarks
|
||||
|
||||
# Initalize the optimization problem
|
||||
prob = pl.LpProblem("OptimizationProblem", pl.LpMaximize)
|
||||
|
||||
# Define the problem
|
||||
x_bounds = [(0, 1)]*L*L
|
||||
x = [pl.LpVariable(f"x_{i}", lowBound=x_bounds[i][0], upBound=x_bounds[i][1], cat='Binary') for i in range(L*L)]
|
||||
|
||||
# Setup the inequality constraints
|
||||
self.init_ub_time(prob, x, L, landmarks, max_time) # Adds the distances from each landmark to the other.
|
||||
self.respect_number(prob, x, L, max_landmarks) # Respects max number of visits (no more possible stops than landmarks).
|
||||
self.break_sym(prob, x, L) # Breaks the 'zig-zag' symmetry. Avoids d12 and d21 but not larger cirlces.
|
||||
|
||||
# Setup the equality constraints
|
||||
self.init_eq_not_stay(prob, x, L) # Force solution not to stay in same place
|
||||
self.respect_start_finish(prob, x, L) # Force start and finish positions
|
||||
self.respect_order(prob, x, L) # Respect order of visit (only works when max_time is limiting factor)
|
||||
self.respect_user_must(prob, x, L, landmarks) # Force to do/avoid landmarks set by user.
|
||||
|
||||
# return prob, self.warm_start(x, L)
|
||||
return prob, x
|
||||
|
||||
|
||||
def solve_optimization(self, max_time: int, landmarks: list[Landmark], max_landmarks: int = None) -> list[Landmark]:
|
||||
"""
|
||||
Main optimization pipeline to solve the landmark visiting problem.
|
||||
|
||||
This method sets up and solves a linear programming problem with constraints to find an optimal tour of landmarks,
|
||||
considering user-defined must-visit landmarks, start and finish points, and ensuring no cycles are present.
|
||||
|
||||
Args:
|
||||
max_time (int): Maximum time allowed for the tour in minutes.
|
||||
landmarks (list[Landmark]): List of landmarks to visit.
|
||||
max_landmarks (int): Maximum number of landmarks visited
|
||||
Returns:
|
||||
list[Landmark]: The optimized tour of landmarks with updated travel times, or None if no valid solution is found.
|
||||
"""
|
||||
# Setup the optimization proplem.
|
||||
L = len(landmarks)
|
||||
prob, x = self.pre_processing(L, landmarks, max_time, max_landmarks)
|
||||
|
||||
# Solve the problem and extract results.
|
||||
try :
|
||||
prob.solve(pl.PULP_CBC_CMD(msg=False, timeLimit=self.time_limit+1, gapRel=self.gap_rel))
|
||||
except Exception as exc :
|
||||
raise Exception(f"No solution found: {exc}") from exc
|
||||
status = pl.LpStatus[prob.status]
|
||||
solution = [pl.value(var) for var in x] # The values of the decision variables (will be 0 or 1)
|
||||
|
||||
self.logger.debug("First results are out. Looking out for circles and correcting...")
|
||||
|
||||
# Raise error if no solution is found. FIXME: for now this throws the internal server error
|
||||
if status != 'Optimal' :
|
||||
self.logger.error("The problem is overconstrained, no solution on first try.")
|
||||
raise ArithmeticError("No solution could be found. Please try again with more time or different preferences.")
|
||||
|
||||
# If there is a solution, we're good to go, just check for connectiveness
|
||||
circles = self.is_connected(solution)
|
||||
|
||||
i = 0
|
||||
while circles is not None :
|
||||
i += 1
|
||||
if i == self.max_iter :
|
||||
self.logger.error(f'Timeout: No solution found after {self.max_iter} iterations.')
|
||||
raise TimeoutError(f"Optimization took too long. No solution found after {self.max_iter} iterations.")
|
||||
|
||||
for circle in circles :
|
||||
self.prevent_circle(prob, x, circle, L)
|
||||
|
||||
# Solve the problem again
|
||||
try :
|
||||
prob.solve(pl.PULP_CBC_CMD(msg=False, timeLimit=self.time_limit, gapRel=self.gap_rel))
|
||||
except Exception as exc :
|
||||
raise Exception(f"No solution found: {exc}") from exc
|
||||
|
||||
solution = [pl.value(var) for var in x]
|
||||
|
||||
if pl.LpStatus[prob.status] != 'Optimal' :
|
||||
self.logger.error("The problem is overconstrained, no solution after {i} cycles.")
|
||||
raise ArithmeticError("No solution could be found. Please try again with more time or different preferences.")
|
||||
|
||||
circles = self.is_connected(solution)
|
||||
if circles is None :
|
||||
break
|
||||
|
||||
# Sort the landmarks in the order of the solution
|
||||
order = self.get_order(solution)
|
||||
tour = [landmarks[i] for i in order]
|
||||
|
||||
self.logger.info(f"Re-optimized {i} times, objective value : {int(pl.value(prob.objective))}")
|
||||
return tour
|
@@ -1,23 +1,32 @@
|
||||
import yaml, logging
|
||||
|
||||
from shapely import buffer, LineString, Point, Polygon, MultiPoint, concave_hull
|
||||
"""Allows to refine the tour by adding more landmarks and making the path easier to follow."""
|
||||
import logging
|
||||
from math import pi
|
||||
import yaml
|
||||
from shapely import buffer, LineString, Point, Polygon, MultiPoint, concave_hull
|
||||
|
||||
from ..structs.landmark import Landmark
|
||||
from . import take_most_important, get_time_separation
|
||||
from ..utils.get_time_distance import get_time
|
||||
from ..utils.take_most_important import take_most_important
|
||||
from .optimizer import Optimizer
|
||||
from ..constants import OPTIMIZER_PARAMETERS_PATH
|
||||
|
||||
|
||||
|
||||
class Refiner :
|
||||
"""
|
||||
Refines a tour by incorporating smaller landmarks along the path to enhance the experience.
|
||||
|
||||
This class is designed to adjust an existing tour by considering additional,
|
||||
smaller points of interest (landmarks) that may require minor detours but
|
||||
improve the overall quality of the tour. It balances the efficiency of travel
|
||||
with the added value of visiting these landmarks.
|
||||
"""
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
detour_factor: float # detour factor of straight line vs real distance in cities
|
||||
detour_corridor_width: float # width of the corridor around the path
|
||||
average_walking_speed: float # average walking speed of adult
|
||||
max_landmarks_refiner: int # max number of landmarks to visit
|
||||
max_landmarks_refiner: int # max number of landmarks to visit
|
||||
optimizer: Optimizer # optimizer object
|
||||
|
||||
def __init__(self, optimizer: Optimizer) :
|
||||
@@ -45,7 +54,7 @@ class Refiner :
|
||||
"""
|
||||
|
||||
corrected_width = (180*width)/(6371000*pi)
|
||||
|
||||
|
||||
path = self.create_linestring(landmarks)
|
||||
obj = buffer(path, corrected_width, join_style="mitre", cap_style="square", mitre_limit=2)
|
||||
|
||||
@@ -70,7 +79,7 @@ class Refiner :
|
||||
return LineString(points)
|
||||
|
||||
|
||||
# Check if some coordinates are in area. Used for the corridor
|
||||
# Check if some coordinates are in area. Used for the corridor
|
||||
def is_in_area(self, area: Polygon, coordinates) -> bool :
|
||||
"""
|
||||
Check if a given point is within a specified area.
|
||||
@@ -86,7 +95,7 @@ class Refiner :
|
||||
return point.within(area)
|
||||
|
||||
|
||||
# Function to determine if two landmarks are close to each other
|
||||
# Function to determine if two landmarks are close to each other
|
||||
def is_close_to(self, location1: tuple[float], location2: tuple[float]):
|
||||
"""
|
||||
Determine if two locations are close to each other by rounding their coordinates to 3 decimal places.
|
||||
@@ -119,7 +128,7 @@ class Refiner :
|
||||
Returns:
|
||||
list[Landmark]: The rearranged list of landmarks with grouped nearby visits.
|
||||
"""
|
||||
|
||||
|
||||
i = 1
|
||||
while i < len(tour):
|
||||
j = i+1
|
||||
@@ -131,9 +140,9 @@ class Refiner :
|
||||
break # Move to the next i-th element after rearrangement
|
||||
j += 1
|
||||
i += 1
|
||||
|
||||
|
||||
return tour
|
||||
|
||||
|
||||
def integrate_landmarks(self, sub_list: list[Landmark], main_list: list[Landmark]) :
|
||||
"""
|
||||
Inserts 'sub_list' of Landmarks inside the 'main_list' by leaving the ends untouched.
|
||||
@@ -166,27 +175,27 @@ class Refiner :
|
||||
should be visited, and the second element is a `Polygon` representing
|
||||
the path connecting all landmarks.
|
||||
"""
|
||||
|
||||
|
||||
# Step 1: Find 'start' and 'finish' landmarks
|
||||
start_idx = next(i for i, lm in enumerate(landmarks) if lm.type == 'start')
|
||||
finish_idx = next(i for i, lm in enumerate(landmarks) if lm.type == 'finish')
|
||||
|
||||
|
||||
start_landmark = landmarks[start_idx]
|
||||
finish_landmark = landmarks[finish_idx]
|
||||
|
||||
|
||||
|
||||
# Step 2: Create a list of unvisited landmarks excluding 'start' and 'finish'
|
||||
unvisited_landmarks = [lm for i, lm in enumerate(landmarks) if i not in [start_idx, finish_idx]]
|
||||
|
||||
|
||||
# Step 3: Initialize the path with the 'start' landmark
|
||||
path = [start_landmark]
|
||||
coordinates = [landmarks[start_idx].location]
|
||||
|
||||
current_landmark = start_landmark
|
||||
|
||||
|
||||
# Step 4: Use nearest neighbor heuristic to visit all landmarks
|
||||
while unvisited_landmarks:
|
||||
nearest_landmark = min(unvisited_landmarks, key=lambda lm: get_time_separation.get_time(current_landmark.location, lm.location))
|
||||
nearest_landmark = min(unvisited_landmarks, key=lambda lm: get_time(current_landmark.location, lm.location))
|
||||
path.append(nearest_landmark)
|
||||
coordinates.append(nearest_landmark.location)
|
||||
current_landmark = nearest_landmark
|
||||
@@ -224,12 +233,12 @@ class Refiner :
|
||||
|
||||
for visited in visited_landmarks :
|
||||
visited_names.append(visited.name)
|
||||
|
||||
|
||||
for landmark in all_landmarks :
|
||||
if self.is_in_area(area, landmark.location) and landmark.name not in visited_names:
|
||||
second_order_landmarks.append(landmark)
|
||||
|
||||
return take_most_important.take_most_important(second_order_landmarks, int(self.max_landmarks_refiner*0.75))
|
||||
return take_most_important(second_order_landmarks, int(self.max_landmarks_refiner*0.75))
|
||||
|
||||
|
||||
# Try fix the shortest path using shapely
|
||||
@@ -256,7 +265,7 @@ class Refiner :
|
||||
coords_dict[landmark.location] = landmark
|
||||
|
||||
tour_poly = Polygon(coords)
|
||||
|
||||
|
||||
better_tour_poly = tour_poly.buffer(0)
|
||||
try :
|
||||
xs, ys = better_tour_poly.exterior.xy
|
||||
@@ -265,7 +274,7 @@ class Refiner :
|
||||
better_tour_poly = concave_hull(MultiPoint(coords)) # Create concave hull with "core" of tour leaving out start and finish
|
||||
xs, ys = better_tour_poly.exterior.xy
|
||||
|
||||
except :
|
||||
except Exception:
|
||||
better_tour_poly = concave_hull(MultiPoint(coords)) # Create concave hull with "core" of tour leaving out start and finish
|
||||
xs, ys = better_tour_poly.exterior.xy
|
||||
"""
|
||||
@@ -299,7 +308,7 @@ class Refiner :
|
||||
# Rearrange only if polygon still not simple
|
||||
if not better_tour_poly.is_simple :
|
||||
better_tour = self.rearrange(better_tour)
|
||||
|
||||
|
||||
return better_tour
|
||||
|
||||
|
||||
@@ -330,10 +339,10 @@ class Refiner :
|
||||
# No need to refine if no detour is taken
|
||||
# if detour == 0:
|
||||
# return base_tour
|
||||
|
||||
|
||||
minor_landmarks = self.get_minor_landmarks(all_landmarks, base_tour, self.detour_corridor_width)
|
||||
|
||||
self.logger.info(f"Using {len(minor_landmarks)} minor landmarks around the predicted path")
|
||||
self.logger.debug(f"Using {len(minor_landmarks)} minor landmarks around the predicted path")
|
||||
|
||||
# Full set of visitable landmarks.
|
||||
full_set = self.integrate_landmarks(minor_landmarks, base_tour) # could probably be optimized with less overhead
|
||||
@@ -341,7 +350,7 @@ class Refiner :
|
||||
# Generate a new tour with the optimizer.
|
||||
new_tour = self.optimizer.solve_optimization(
|
||||
max_time = max_time + detour,
|
||||
landmarks = full_set,
|
||||
landmarks = full_set,
|
||||
max_landmarks = self.max_landmarks_refiner
|
||||
)
|
||||
|
||||
@@ -357,7 +366,7 @@ class Refiner :
|
||||
# Find shortest path using the nearest neighbor heuristic.
|
||||
better_tour, better_poly = self.find_shortest_path_through_all_landmarks(new_tour)
|
||||
|
||||
# Fix the tour using Polygons if the path looks weird.
|
||||
# Fix the tour using Polygons if the path looks weird.
|
||||
# Conditions : circular trip and invalid polygon.
|
||||
if base_tour[0].location == base_tour[-1].location and not better_poly.is_valid :
|
||||
better_tour = self.fix_using_polygon(better_tour)
|
0
backend/src/overpass/__init__.py
Normal file
0
backend/src/overpass/__init__.py
Normal file
132
backend/src/overpass/caching_strategy.py
Normal file
132
backend/src/overpass/caching_strategy.py
Normal file
@@ -0,0 +1,132 @@
|
||||
import os
|
||||
import json
|
||||
import hashlib
|
||||
|
||||
from ..constants import OSM_CACHE_DIR, OSM_TYPES
|
||||
|
||||
|
||||
def get_cache_key(query: str) -> str:
|
||||
"""
|
||||
Generate a unique cache key for the query using a hash function.
|
||||
This ensures that queries with different parameters are cached separately.
|
||||
"""
|
||||
return hashlib.md5(query.encode('utf-8')).hexdigest()
|
||||
|
||||
|
||||
class CachingStrategyBase:
|
||||
"""
|
||||
Base class for implementing caching strategies.
|
||||
"""
|
||||
def get(self, key):
|
||||
"""Retrieve the cached data associated with the provided key."""
|
||||
raise NotImplementedError('Subclass should implement get')
|
||||
|
||||
def set(self, key, value):
|
||||
"""Store data in the cache with the specified key."""
|
||||
raise NotImplementedError('Subclass should implement set')
|
||||
|
||||
def set_hollow(self, key, **kwargs):
|
||||
"""Create a hollow (empty) cache entry with a specific key."""
|
||||
raise NotImplementedError('Subclass should implement set_hollow')
|
||||
|
||||
def close(self):
|
||||
"""Clean up or close any resources used by the caching strategy."""
|
||||
|
||||
|
||||
class JSONCache(CachingStrategyBase):
|
||||
"""
|
||||
A caching strategy that stores and retrieves data in JSON format.
|
||||
"""
|
||||
def __init__(self, cache_dir=OSM_CACHE_DIR):
|
||||
# Add the class name as a suffix to the directory
|
||||
self._cache_dir = f'{cache_dir}'
|
||||
if not os.path.exists(self._cache_dir):
|
||||
os.makedirs(self._cache_dir)
|
||||
|
||||
def _filename(self, key):
|
||||
return os.path.join(self._cache_dir, f'{key}.json')
|
||||
|
||||
def get(self, key):
|
||||
"""Retrieve JSON data from the cache and parse it as an ElementTree."""
|
||||
filename = self._filename(key)
|
||||
if os.path.exists(filename):
|
||||
try:
|
||||
# Open and parse the cached JSON data
|
||||
with open(filename, 'r', encoding='utf-8') as file:
|
||||
data = json.load(file)
|
||||
# Return the data as a list of dicts.
|
||||
return data
|
||||
except json.JSONDecodeError:
|
||||
return None # Return None if parsing fails
|
||||
return None
|
||||
|
||||
def set(self, key, value):
|
||||
"""Save the JSON data as an ElementTree to the cache."""
|
||||
filename = self._filename(key)
|
||||
try:
|
||||
# Write the JSON data to the cache file
|
||||
with open(filename, 'w', encoding='utf-8') as file:
|
||||
json.dump(value, file, ensure_ascii=False, indent=4)
|
||||
except IOError as e:
|
||||
raise IOError(f"Error writing to cache file: {filename} - {e}") from e
|
||||
|
||||
def set_hollow(self, key, cell: tuple, osm_types: list,
|
||||
selector: str, conditions: list=None, out='center'):
|
||||
"""Create an empty placeholder cache entry for a future fill."""
|
||||
hollow_key = f'hollow_{key}'
|
||||
filename = self._filename(hollow_key)
|
||||
|
||||
# Create the hollow JSON structure
|
||||
hollow_data = {
|
||||
"key": key,
|
||||
"cell": list(cell),
|
||||
"osm_types": list(osm_types),
|
||||
"selector": selector,
|
||||
"conditions": conditions,
|
||||
"out": out
|
||||
}
|
||||
# Write the hollow data to the cache file
|
||||
try:
|
||||
with open(filename, 'w', encoding='utf-8') as file:
|
||||
json.dump(hollow_data, file, ensure_ascii=False, indent=4)
|
||||
except IOError as e:
|
||||
raise IOError(f"Error writing hollow cache to file: {filename} - {e}") from e
|
||||
|
||||
def close(self):
|
||||
"""Cleanup method, if needed."""
|
||||
pass
|
||||
|
||||
class CachingStrategy:
|
||||
"""
|
||||
A class to manage different caching strategies.
|
||||
"""
|
||||
__strategy = JSONCache() # Default caching strategy
|
||||
__strategies = {
|
||||
'JSON': JSONCache,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def use(cls, strategy_name='JSON', **kwargs):
|
||||
if cls.__strategy:
|
||||
cls.__strategy.close()
|
||||
|
||||
strategy_class = cls.__strategies.get(strategy_name)
|
||||
if not strategy_class:
|
||||
raise ValueError(f"Unknown caching strategy: {strategy_name}")
|
||||
|
||||
cls.__strategy = strategy_class(**kwargs)
|
||||
return cls.__strategy
|
||||
|
||||
@classmethod
|
||||
def get(cls, key):
|
||||
return cls.__strategy.get(key)
|
||||
|
||||
@classmethod
|
||||
def set(cls, key, value):
|
||||
cls.__strategy.set(key, value)
|
||||
|
||||
@classmethod
|
||||
def set_hollow(cls, key, cell: tuple, osm_types: OSM_TYPES,
|
||||
selector: str, conditions: list=None, out='center'):
|
||||
"""Create a hollow cache entry."""
|
||||
cls.__strategy.set_hollow(key, cell, osm_types, selector, conditions, out)
|
416
backend/src/overpass/overpass.py
Normal file
416
backend/src/overpass/overpass.py
Normal file
@@ -0,0 +1,416 @@
|
||||
"""Module allowing connexion to overpass api and fectch data from OSM."""
|
||||
import os
|
||||
import urllib
|
||||
import math
|
||||
import logging
|
||||
import json
|
||||
from typing import List, Tuple
|
||||
|
||||
from .caching_strategy import get_cache_key, CachingStrategy
|
||||
from ..constants import OSM_CACHE_DIR, OSM_TYPES, BBOX
|
||||
|
||||
|
||||
RESOLUTION = 0.05
|
||||
CELL = Tuple[int, int]
|
||||
|
||||
|
||||
class Overpass :
|
||||
"""
|
||||
Overpass class to manage the query building and sending to overpass api.
|
||||
The caching strategy is a part of this class and initialized upon creation of the Overpass object.
|
||||
"""
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def __init__(self, caching_strategy: str = 'JSON', cache_dir: str = OSM_CACHE_DIR) :
|
||||
"""
|
||||
Initialize the Overpass instance with the url, headers and caching strategy.
|
||||
"""
|
||||
self.overpass_url = "https://overpass-api.de/api/interpreter"
|
||||
self.headers = {'User-Agent': 'Mozilla/5.0 (compatible; OverpassQuery/1.0; +http://example.com)',}
|
||||
self.caching_strategy = CachingStrategy.use(caching_strategy, cache_dir=cache_dir)
|
||||
|
||||
|
||||
def send_query(self, bbox: BBOX, osm_types: OSM_TYPES,
|
||||
selector: str, conditions: list=None, out='center') -> List[dict]:
|
||||
"""
|
||||
Sends the Overpass QL query to the Overpass API and returns the parsed json response.
|
||||
|
||||
Args:
|
||||
bbox (tuple): Bounding box for the query.
|
||||
osm_types (list[str]): List of OSM element types (e.g., 'node', 'way').
|
||||
selector (str): Key or tag to filter OSM elements (e.g., 'highway').
|
||||
conditions (list): Optional list of additional filter conditions in Overpass QL format.
|
||||
out (str): Output format ('center', 'body', etc.). Defaults to 'center'.
|
||||
|
||||
Returns:
|
||||
list: Parsed json response from the Overpass API, or cached data if available.
|
||||
"""
|
||||
# Determine which grid cells overlap with this bounding box.
|
||||
overlapping_cells = Overpass._get_overlapping_cells(bbox)
|
||||
|
||||
# Retrieve cached data and identify missing cache entries
|
||||
cached_responses, non_cached_cells = self._retrieve_cached_data(overlapping_cells, osm_types, selector, conditions, out)
|
||||
|
||||
self.logger.debug(f'Cache hit for {len(overlapping_cells)-len(non_cached_cells)}/{len(overlapping_cells)} quadrants.')
|
||||
|
||||
# If there is no missing data, return the cached responses after filtering.
|
||||
if not non_cached_cells :
|
||||
return Overpass._filter_landmarks(cached_responses, bbox)
|
||||
|
||||
# If there is no cached data, fetch all from Overpass.
|
||||
elif not cached_responses :
|
||||
query_str = Overpass.build_query(bbox, osm_types, selector, conditions, out)
|
||||
self.logger.debug(f'Query string: {query_str}')
|
||||
return self.fetch_data_from_api(query_str)
|
||||
|
||||
# Hybrid cache: some data from Overpass, some data from cache.
|
||||
else :
|
||||
# Resize the bbox for smaller search area and build new query string.
|
||||
non_cached_bbox = Overpass._get_non_cached_bbox(non_cached_cells, bbox)
|
||||
query_str = Overpass.build_query(non_cached_bbox, osm_types, selector, conditions, out)
|
||||
self.logger.debug(f'Query string: {query_str}')
|
||||
non_cached_responses = self.fetch_data_from_api(query_str)
|
||||
return Overpass._filter_landmarks(cached_responses, bbox) + non_cached_responses
|
||||
|
||||
|
||||
def fetch_data_from_api(self, query_str: str) -> List[dict]:
|
||||
"""
|
||||
Fetch data from the Overpass API and return the json data.
|
||||
|
||||
Args:
|
||||
query_str (str): The Overpass query string.
|
||||
|
||||
Returns:
|
||||
dict: Combined cached and fetched data.
|
||||
"""
|
||||
try:
|
||||
data = urllib.parse.urlencode({'data': query_str}).encode('utf-8')
|
||||
request = urllib.request.Request(self.overpass_url, data=data, headers=self.headers)
|
||||
|
||||
with urllib.request.urlopen(request) as response:
|
||||
response_data = response.read().decode('utf-8') # Convert the HTTPResponse to a string
|
||||
data = json.loads(response_data) # Load the JSON from the string
|
||||
elements = data.get('elements', [])
|
||||
# self.logger.debug(f'Query = {query_str}')
|
||||
return elements
|
||||
|
||||
except urllib.error.URLError as e:
|
||||
self.logger.error(f"Error connecting to Overpass API: {e}")
|
||||
raise ConnectionError(f"Error connecting to Overpass API: {e}") from e
|
||||
except Exception as exc :
|
||||
raise Exception(f'An unexpected error occured: {str(exc)}') from exc
|
||||
|
||||
|
||||
def fill_cache(self, json_data: dict) :
|
||||
"""
|
||||
Fill cache with data by using a hollow cache entry's information.
|
||||
"""
|
||||
query_str, cache_key = Overpass._build_query_from_hollow(json_data)
|
||||
try:
|
||||
data = urllib.parse.urlencode({'data': query_str}).encode('utf-8')
|
||||
request = urllib.request.Request(self.overpass_url, data=data, headers=self.headers)
|
||||
|
||||
with urllib.request.urlopen(request) as response:
|
||||
|
||||
# Convert the HTTPResponse to a string and load data
|
||||
response_data = response.read().decode('utf-8')
|
||||
data = json.loads(response_data)
|
||||
|
||||
# Get elements and set cache
|
||||
elements = data.get('elements', [])
|
||||
self.caching_strategy.set(cache_key, elements)
|
||||
self.logger.debug(f'Cache set for {cache_key}')
|
||||
except urllib.error.URLError as e:
|
||||
raise ConnectionError(f"Error connecting to Overpass API: {e}") from e
|
||||
except Exception as exc :
|
||||
raise Exception(f'An unexpected error occured: {str(exc)}') from exc
|
||||
|
||||
|
||||
@staticmethod
|
||||
def build_query(bbox: BBOX, osm_types: OSM_TYPES,
|
||||
selector: str, conditions: list=None, out='center') -> str:
|
||||
"""
|
||||
Constructs a query string for the Overpass API to retrieve OpenStreetMap (OSM) data.
|
||||
|
||||
Args:
|
||||
bbox (tuple): A tuple representing the geographical search area, typically in the format
|
||||
(lat_min, lon_min, lat_max, lon_max).
|
||||
osm_types (list[str]): A list of OSM element types to search for. Must be one or more of
|
||||
'Way', 'Node', or 'Relation'.
|
||||
selector (str): The key or tag to filter the OSM elements (e.g., 'amenity', 'highway', etc.).
|
||||
conditions (list, optional): A list of conditions to apply as additional filters for the
|
||||
selected OSM elements. The conditions should be written in
|
||||
the Overpass QL format, and they are combined with '&&' if
|
||||
multiple are provided. Defaults to an empty list.
|
||||
out (str, optional): Specifies the output type, such as 'center', 'body', or 'tags'.
|
||||
Defaults to 'center'.
|
||||
|
||||
Returns:
|
||||
str: The constructed Overpass QL query string.
|
||||
|
||||
Notes:
|
||||
- If no conditions are provided, the query will just use the `selector` to filter the OSM
|
||||
elements without additional constraints.
|
||||
"""
|
||||
query = '[out:json];('
|
||||
|
||||
# convert the bbox to string.
|
||||
bbox_str = f"({','.join(map(str, bbox))})"
|
||||
|
||||
if conditions is not None and len(conditions) > 0:
|
||||
conditions = '(if: ' + ' && '.join(conditions) + ')'
|
||||
else :
|
||||
conditions = ''
|
||||
|
||||
for elem in osm_types :
|
||||
query += elem + '[' + selector + ']' + conditions + bbox_str + ';'
|
||||
|
||||
query += ');' + f'out {out};'
|
||||
|
||||
return query
|
||||
|
||||
|
||||
def _retrieve_cached_data(self, overlapping_cells: CELL, osm_types: OSM_TYPES,
|
||||
selector: str, conditions: list, out: str) -> Tuple[List[dict], list[CELL]]:
|
||||
"""
|
||||
Retrieve cached data and identify missing cache quadrants.
|
||||
|
||||
Args:
|
||||
overlapping_cells (list): Cells to check for cached data.
|
||||
osm_types (list): OSM types (e.g., 'node', 'way').
|
||||
selector (str): Key or tag to filter OSM elements.
|
||||
conditions (list): Additional conditions to apply.
|
||||
out (str): Output format.
|
||||
|
||||
Returns:
|
||||
tuple: A tuple containing:
|
||||
- cached_responses (list): List of cached data found.
|
||||
- non_cached_cells (list(tuple)): List of cells with missing data.
|
||||
"""
|
||||
cell_key_dict = {}
|
||||
for cell in overlapping_cells :
|
||||
for elem in osm_types :
|
||||
key_str = f"{elem}[{selector}]{conditions}({','.join(map(str, cell))})"
|
||||
|
||||
cell_key_dict[cell] = get_cache_key(key_str)
|
||||
|
||||
cached_responses = []
|
||||
non_cached_cells = []
|
||||
|
||||
# Retrieve the cached data and mark the missing entries as hollow
|
||||
for cell, key in cell_key_dict.items():
|
||||
cached_data = self.caching_strategy.get(key)
|
||||
if cached_data is not None :
|
||||
cached_responses += cached_data
|
||||
else:
|
||||
self.caching_strategy.set_hollow(key, cell, osm_types, selector, conditions, out)
|
||||
non_cached_cells.append(cell)
|
||||
|
||||
return cached_responses, non_cached_cells
|
||||
|
||||
|
||||
@staticmethod
|
||||
def _build_query_from_hollow(json_data: dict) -> Tuple[str, str]:
|
||||
"""
|
||||
Build query string using information from a hollow cache entry.
|
||||
"""
|
||||
# Extract values from the JSON object
|
||||
key = json_data.get('key')
|
||||
cell = tuple(json_data.get('cell'))
|
||||
bbox = Overpass._get_bbox_from_grid_cell(cell)
|
||||
osm_types = json_data.get('osm_types')
|
||||
selector = json_data.get('selector')
|
||||
conditions = json_data.get('conditions')
|
||||
out = json_data.get('out')
|
||||
|
||||
|
||||
query_str = Overpass.build_query(bbox, osm_types, selector, conditions, out)
|
||||
return query_str, key
|
||||
|
||||
|
||||
@staticmethod
|
||||
def _get_overlapping_cells(query_bbox: tuple) -> List[CELL]:
|
||||
"""
|
||||
Returns a set of all grid cells that overlap with the given bounding box.
|
||||
"""
|
||||
# Extract location from the query bbox
|
||||
lat_min, lon_min, lat_max, lon_max = query_bbox
|
||||
|
||||
min_lat_cell, min_lon_cell = Overpass._get_grid_cell(lat_min, lon_min)
|
||||
max_lat_cell, max_lon_cell = Overpass._get_grid_cell(lat_max, lon_max)
|
||||
|
||||
overlapping_cells = set()
|
||||
for lat_idx in range(min_lat_cell, max_lat_cell + 1):
|
||||
for lon_idx in range(min_lon_cell, max_lon_cell + 1):
|
||||
overlapping_cells.add((lat_idx, lon_idx))
|
||||
|
||||
return overlapping_cells
|
||||
|
||||
|
||||
@staticmethod
|
||||
def _get_grid_cell(lat: float, lon: float) -> CELL:
|
||||
"""
|
||||
Returns the grid cell coordinates for a given latitude and longitude.
|
||||
Each grid cell is 0.05°lat x 0.05°lon resolution in size.
|
||||
"""
|
||||
lat_index = math.floor(lat / RESOLUTION)
|
||||
lon_index = math.floor(lon / RESOLUTION)
|
||||
return (lat_index, lon_index)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def _get_bbox_from_grid_cell(cell: CELL) -> BBOX:
|
||||
"""
|
||||
Returns the bounding box for a given grid cell index.
|
||||
Each grid cell is resolution x resolution in size.
|
||||
|
||||
The bounding box is returned as (min_lat, min_lon, max_lat, max_lon).
|
||||
"""
|
||||
# Calculate the southwest (min_lat, min_lon) corner of the bounding box
|
||||
min_lat = round(cell[0] * RESOLUTION, 2)
|
||||
min_lon = round(cell[1] * RESOLUTION, 2)
|
||||
|
||||
# Calculate the northeast (max_lat, max_lon) corner of the bounding box
|
||||
max_lat = round((cell[0] + 1) * RESOLUTION, 2)
|
||||
max_lon = round((cell[1] + 1) * RESOLUTION, 2)
|
||||
|
||||
return (min_lat, min_lon, max_lat, max_lon)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def _get_non_cached_bbox(non_cached_cells: List[CELL], original_bbox: BBOX):
|
||||
"""
|
||||
Calculate the non-cached bounding box by excluding cached cells.
|
||||
|
||||
Args:
|
||||
non_cached_cells (list): The list of cells that were not found in the cache.
|
||||
original_bbox (tuple): The original bounding box (min_lat, min_lon, max_lat, max_lon).
|
||||
|
||||
Returns:
|
||||
tuple: The new bounding box that excludes cached cells, or None if all cells are cached.
|
||||
"""
|
||||
if not non_cached_cells:
|
||||
return None # All cells were cached
|
||||
|
||||
# Initialize the non-cached bounding box with extreme values
|
||||
min_lat, min_lon, max_lat, max_lon = float('inf'), float('inf'), float('-inf'), float('-inf')
|
||||
|
||||
# Iterate over non-cached cells to find the new bounding box
|
||||
for cell in non_cached_cells:
|
||||
cell_min_lat, cell_min_lon, cell_max_lat, cell_max_lon = Overpass._get_bbox_from_grid_cell(cell)
|
||||
|
||||
min_lat = min(min_lat, cell_min_lat)
|
||||
min_lon = min(min_lon, cell_min_lon)
|
||||
max_lat = max(max_lat, cell_max_lat)
|
||||
max_lon = max(max_lon, cell_max_lon)
|
||||
|
||||
# If no update to bounding box, return the original
|
||||
if min_lat == float('inf') or min_lon == float('inf'):
|
||||
return None
|
||||
|
||||
return (max(min_lat, original_bbox[0]),
|
||||
max(min_lon, original_bbox[1]),
|
||||
min(max_lat, original_bbox[2]),
|
||||
min(max_lon, original_bbox[3]))
|
||||
|
||||
|
||||
@staticmethod
|
||||
def _filter_landmarks(elements: List[dict], bbox: BBOX) -> List[dict]:
|
||||
"""
|
||||
Filters elements based on whether their coordinates are inside the given bbox.
|
||||
|
||||
Args:
|
||||
- elements (list of dict): List of elements containing coordinates.
|
||||
- bbox (tuple): A bounding box defined as (min_lat, min_lon, max_lat, max_lon).
|
||||
|
||||
Returns:
|
||||
- list: A list of elements whose coordinates are inside the bounding box.
|
||||
"""
|
||||
|
||||
filtered_elements = []
|
||||
min_lat, min_lon, max_lat, max_lon = bbox
|
||||
|
||||
for elem in elements:
|
||||
# Extract coordinates based on the 'type' of element
|
||||
if elem.get('type') != 'node':
|
||||
center = elem.get('center', {})
|
||||
lat = float(center.get('lat', 0))
|
||||
lon = float(center.get('lon', 0))
|
||||
else:
|
||||
lat = float(elem.get('lat', 0))
|
||||
lon = float(elem.get('lon', 0))
|
||||
|
||||
# Check if the coordinates fall within the given bounding box
|
||||
if min_lat <= lat <= max_lat and min_lon <= lon <= max_lon:
|
||||
filtered_elements.append(elem)
|
||||
|
||||
return filtered_elements
|
||||
|
||||
|
||||
def get_base_info(elem: dict, osm_type: OSM_TYPES, with_name=False) :
|
||||
"""
|
||||
Extracts base information (coordinates, OSM ID, and optionally a name) from an OSM element.
|
||||
|
||||
This function retrieves the latitude and longitude coordinates, OSM ID, and optionally the name
|
||||
of a given OpenStreetMap (OSM) element. It handles different OSM types (e.g., 'node', 'way') by
|
||||
extracting coordinates either directly or from a center tag, depending on the element type.
|
||||
|
||||
Args:
|
||||
elem (dict): The JSON element representing the OSM entity.
|
||||
osm_type (str): The type of the OSM entity (e.g., 'node', 'way'). If 'node', the coordinates
|
||||
are extracted directly from the element; otherwise, from the 'center' tag.
|
||||
with_name (bool): Whether to extract and return the name of the element. If True, it attempts
|
||||
to find the 'name' tag within the element and return its value. Defaults to False.
|
||||
|
||||
Returns:
|
||||
tuple: A tuple containing:
|
||||
- osm_id (str): The OSM ID of the element.
|
||||
- coords (tuple): A tuple of (latitude, longitude) coordinates.
|
||||
- name (str, optional): The name of the element if `with_name` is True; otherwise, not included.
|
||||
"""
|
||||
# 1. extract coordinates
|
||||
if osm_type != 'node' :
|
||||
center = elem.get('center')
|
||||
lat = float(center.get('lat'))
|
||||
lon = float(center.get('lon'))
|
||||
|
||||
else :
|
||||
lat = float(elem.get('lat'))
|
||||
lon = float(elem.get('lon'))
|
||||
|
||||
coords = tuple((lat, lon))
|
||||
|
||||
# 2. Extract OSM id
|
||||
osm_id = elem.get('id')
|
||||
|
||||
# 3. Extract name if specified and return
|
||||
if with_name :
|
||||
name = elem.get('tags', {}).get('name')
|
||||
return osm_id, coords, name
|
||||
else :
|
||||
return osm_id, coords
|
||||
|
||||
|
||||
def fill_cache():
|
||||
"""
|
||||
Scans the specified cache directory for files starting with 'hollow_' and attempts to load
|
||||
their contents as JSON to fill the cache of the Overpass system.
|
||||
"""
|
||||
overpass = Overpass()
|
||||
|
||||
with os.scandir(OSM_CACHE_DIR) as it:
|
||||
for entry in it:
|
||||
if entry.is_file() and entry.name.startswith('hollow_'):
|
||||
|
||||
try :
|
||||
# Read the whole file content as a string
|
||||
with open(entry.path, 'r') as f:
|
||||
# load data and fill the cache with the query and key
|
||||
json_data = json.load(f)
|
||||
overpass.fill_cache(json_data)
|
||||
# Now delete the file as the cache is filled
|
||||
os.remove(entry.path)
|
||||
|
||||
except Exception as exc :
|
||||
overpass.logger.error(f'An error occured while parsing file {entry.path} as .json file')
|
@@ -51,25 +51,26 @@ sightseeing:
|
||||
- place_of_worship
|
||||
- fountain
|
||||
- townhall
|
||||
water:
|
||||
- reflecting_pool
|
||||
water: reflecting_pool
|
||||
bridge:
|
||||
- aqueduct
|
||||
- viaduct
|
||||
- boardwalk
|
||||
- cantilever
|
||||
- abandoned
|
||||
building:
|
||||
- church
|
||||
- chapel
|
||||
- mosque
|
||||
- synagogue
|
||||
- ruins
|
||||
- temple
|
||||
- government
|
||||
- cathedral
|
||||
- castle
|
||||
- museum
|
||||
building: cathedral
|
||||
|
||||
# unused sightseeing/buildings:
|
||||
# - church
|
||||
# - chapel
|
||||
# - mosque
|
||||
# - synagogue
|
||||
# - ruins
|
||||
# - temple
|
||||
# - government
|
||||
# - cathedral
|
||||
# - castle
|
||||
# - museum
|
||||
|
||||
museums:
|
||||
tourism:
|
||||
|
@@ -1,12 +1,11 @@
|
||||
city_bbox_side: 7500 #m
|
||||
max_bbox_side: 4000 #m
|
||||
radius_close_to: 50
|
||||
church_coeff: 0.9
|
||||
nature_coeff: 1.25
|
||||
church_coeff: 0.75
|
||||
nature_coeff: 1.6
|
||||
overall_coeff: 10
|
||||
tag_exponent: 1.15
|
||||
image_bonus: 10
|
||||
viewpoint_bonus: 15
|
||||
wikipedia_bonus: 4
|
||||
name_bonus: 3
|
||||
N_important: 40
|
||||
image_bonus: 1.1
|
||||
viewpoint_bonus: 10
|
||||
wikipedia_bonus: 1.25
|
||||
N_important: 60
|
||||
pay_bonus: -1
|
||||
|
@@ -2,5 +2,8 @@ detour_factor: 1.4
|
||||
detour_corridor_width: 300
|
||||
average_walking_speed: 4.8
|
||||
max_landmarks: 10
|
||||
max_landmarks_refiner: 30
|
||||
overshoot: 1.1
|
||||
max_landmarks_refiner: 20
|
||||
overshoot: 0.0016
|
||||
time_limit: 1
|
||||
gap_rel: 0.025
|
||||
max_iter: 40
|
0
backend/src/payments/__init__.py
Normal file
0
backend/src/payments/__init__.py
Normal file
70
backend/src/payments/payment_handler.py
Normal file
70
backend/src/payments/payment_handler.py
Normal file
@@ -0,0 +1,70 @@
|
||||
from typing import Literal
|
||||
import paypalrestsdk
|
||||
from pydantic import BaseModel
|
||||
from fastapi import HTTPException
|
||||
import logging
|
||||
|
||||
|
||||
# Model for payment request body
|
||||
class PaymentRequest(BaseModel):
|
||||
user_id: str
|
||||
credit_amount: Literal[10, 50, 100]
|
||||
currency: Literal["USD", "EUR", "CHF"]
|
||||
description: str = "Purchase of credits"
|
||||
|
||||
|
||||
# Payment handler class for managing PayPal payments
|
||||
class PaymentHandler:
|
||||
|
||||
payment_id: str
|
||||
|
||||
def __init__(self, transaction_details: PaymentRequest):
|
||||
self.details = transaction_details
|
||||
self.logger = logging.getLogger(__name__)
|
||||
|
||||
# Only support purchase of credit 'bundles': 10, 50 or 100 credits worth of trip generation
|
||||
def fetch_price(self) -> float:
|
||||
"""
|
||||
Fetches the price of credits in the specified currency.
|
||||
"""
|
||||
result = self.supabase.table("prices").select("credit_amount").eq("currency", self.details.currency).single().execute()
|
||||
if result.data:
|
||||
return result.data.get("price")
|
||||
else:
|
||||
self.logger.error(f"Unsupported currency: {self.details.currency}")
|
||||
return None
|
||||
|
||||
def create_paypal_payment(self) -> str:
|
||||
"""
|
||||
Creates a PayPal payment and returns the approval URL.
|
||||
"""
|
||||
price = self.fetch_price()
|
||||
payment = paypalrestsdk.Payment({
|
||||
"intent": "sale",
|
||||
"payer": {
|
||||
"payment_method": "paypal"
|
||||
},
|
||||
"transactions": [{
|
||||
"amount": {
|
||||
"total": f"{price:.2f}",
|
||||
"currency": self.details.currency
|
||||
},
|
||||
"description": self.details.description
|
||||
}],
|
||||
"redirect_urls": {
|
||||
"return_url": "http://localhost:8000/payment/success",
|
||||
"cancel_url": "http://localhost:8000/payment/cancel"
|
||||
}
|
||||
})
|
||||
|
||||
if payment.create():
|
||||
self.logger.info("Payment created successfully")
|
||||
self.payment_id = payment.id
|
||||
|
||||
# Get the approval URL and return it for the user to approve
|
||||
for link in payment.links:
|
||||
if link.rel == "approval_url":
|
||||
return link.href
|
||||
else:
|
||||
self.logger.error(f"Failed to create payment: {payment.error}")
|
||||
raise HTTPException(status_code=500, detail="Payment creation failed")
|
79
backend/src/payments/payment_routes.py
Normal file
79
backend/src/payments/payment_routes.py
Normal file
@@ -0,0 +1,79 @@
|
||||
import logging
|
||||
import paypalrestsdk
|
||||
from fastapi import HTTPException, APIRouter
|
||||
|
||||
from .payment_handler import PaymentRequest, PaymentHandler
|
||||
from .supabase import Supabase
|
||||
|
||||
# Set up logging and supabase
|
||||
logger = logging.getLogger(__name__)
|
||||
supabase = Supabase()
|
||||
|
||||
# Configure PayPal SDK
|
||||
paypalrestsdk.configure({
|
||||
"mode": "sandbox", # Use 'live' for production
|
||||
"client_id": "YOUR_PAYPAL_CLIENT_ID",
|
||||
"client_secret": "YOUR_PAYPAL_SECRET"
|
||||
})
|
||||
|
||||
|
||||
# Define the API router
|
||||
router = APIRouter()
|
||||
|
||||
@router.post("/purchase/credits")
|
||||
def purchase_credits(payment_request: PaymentRequest):
|
||||
"""
|
||||
Handles token purchases. Calculates the number of tokens based on the amount paid,
|
||||
updates the user's balance, and processes PayPal payment.
|
||||
"""
|
||||
payment_handler = PaymentHandler(payment_request)
|
||||
|
||||
# Create PayPal payment and get the approval URL
|
||||
approval_url = payment_handler.create_paypal_payment()
|
||||
|
||||
return {
|
||||
"message": "Purchase initiated successfully",
|
||||
"payment_id": payment_handler.payment_id,
|
||||
"credits": payment_request.credit_amount,
|
||||
"approval_url": approval_url,
|
||||
}
|
||||
|
||||
|
||||
@router.get("/payment/success")
|
||||
def payment_success(paymentId: str, PayerID: str):
|
||||
"""
|
||||
Handles successful PayPal payment.
|
||||
"""
|
||||
payment = paypalrestsdk.Payment.find(paymentId)
|
||||
|
||||
if payment.execute({"payer_id": PayerID}):
|
||||
logger.info("Payment executed successfully")
|
||||
|
||||
# Retrieve transaction details from the database
|
||||
result = supabase.table("pending_payments").select("*").eq("payment_id", paymentId).single().execute()
|
||||
if not result.data:
|
||||
raise HTTPException(status_code=404, detail="Transaction not found")
|
||||
|
||||
# Extract the necessary information
|
||||
user_id = result.data["user_id"]
|
||||
credit_amount = result.data["credit_amount"]
|
||||
|
||||
# Update the user's balance
|
||||
supabase.increment_credit_balance(user_id, amount=credit_amount)
|
||||
|
||||
# Optionally, delete the pending payment entry since the transaction is completed
|
||||
supabase.table("pending_payments").delete().eq("payment_id", paymentId).execute()
|
||||
|
||||
return {"message": "Payment completed successfully"}
|
||||
else:
|
||||
logger.error(f"Payment execution failed: {payment.error}")
|
||||
raise HTTPException(status_code=500, detail="Payment execution failed")
|
||||
|
||||
|
||||
@router.get("/payment/cancel")
|
||||
def payment_cancel():
|
||||
"""
|
||||
Handles PayPal payment cancellation.
|
||||
"""
|
||||
return {"message": "Payment was cancelled"}
|
||||
|
170
backend/src/payments/supabase.py
Normal file
170
backend/src/payments/supabase.py
Normal file
@@ -0,0 +1,170 @@
|
||||
import os
|
||||
import logging
|
||||
import yaml
|
||||
from fastapi import HTTPException, status
|
||||
from supabase import create_client, Client, ClientOptions
|
||||
|
||||
from ..constants import PARAMETERS_DIR
|
||||
|
||||
# Silence the supabase logger
|
||||
logging.getLogger("httpx").setLevel(logging.CRITICAL)
|
||||
logging.getLogger("hpack").setLevel(logging.CRITICAL)
|
||||
logging.getLogger("httpcore").setLevel(logging.CRITICAL)
|
||||
|
||||
|
||||
class Supabase:
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def __init__(self):
|
||||
|
||||
with open(os.path.join(PARAMETERS_DIR, 'secrets.yaml')) as f:
|
||||
secrets = yaml.safe_load(f)
|
||||
self.SUPABASE_URL = secrets['SUPABASE_URL']
|
||||
self.SUPABASE_ADMIN_KEY = secrets['SUPABASE_ADMIN_KEY']
|
||||
self.SUPABASE_TEST_USER_ID = secrets['SUPABASE_TEST_USER_ID']
|
||||
|
||||
self.supabase = create_client(
|
||||
self.SUPABASE_URL,
|
||||
self.SUPABASE_ADMIN_KEY,
|
||||
options=ClientOptions(schema='public')
|
||||
)
|
||||
self.logger.debug('Supabase client initialized.')
|
||||
|
||||
|
||||
def check_balance(self, user_id: str) -> bool:
|
||||
"""
|
||||
Checks if the user has enough 'credit' for generating a new trip.
|
||||
|
||||
Args:
|
||||
user_id (str): The ID of the current user.
|
||||
|
||||
Returns:
|
||||
bool: True if the balance is positive, False otherwise.
|
||||
"""
|
||||
try:
|
||||
# Query the public.credits table to get the user's credits
|
||||
response = (
|
||||
self.supabase.table("credits")
|
||||
.select('*')
|
||||
.eq('id', user_id)
|
||||
.single()
|
||||
.execute()
|
||||
)
|
||||
# self.logger.critical(response)
|
||||
|
||||
except Exception as e:
|
||||
if e.code == '22P02' :
|
||||
self.logger.error(f"Failed querying credits : {str(e)}")
|
||||
raise SyntaxError(f"Failed querying credits : {str(e)}") from e
|
||||
if e.code == 'PGRST116' :
|
||||
self.logger.error(f"User not found : {str(e)}")
|
||||
raise ValueError(f"User not found : {str(e)}") from e
|
||||
else :
|
||||
self.logger.error(f"An unexpected error occured while checking user balance : {str(e)}")
|
||||
raise Exception(f"An unexpected error occured while checking user balance : {str(e)}") from e
|
||||
|
||||
# Proceed to check the user's credit balance
|
||||
credits = response.data['credit_amount']
|
||||
self.logger.debug(f'Credits of user {user_id}: {credits}')
|
||||
|
||||
if credits > 0:
|
||||
self.logger.info(f'Credit balance is positive for user {user_id}. Proceeding with trip generation.')
|
||||
return True
|
||||
|
||||
self.logger.warning(f'Insufficient balance for user {user_id}. Trip generation cannot proceed.')
|
||||
return False
|
||||
|
||||
|
||||
def decrement_credit_balance(self, user_id: str, amount: int=1) -> bool:
|
||||
"""
|
||||
Decrements the user's credit balance by 1.
|
||||
|
||||
Args:
|
||||
user_id (str): The ID of the current user.
|
||||
"""
|
||||
try:
|
||||
# Query the public.credits table to get the user's current credits
|
||||
response = (
|
||||
self.supabase.table("credits")
|
||||
.select('*')
|
||||
.eq('id', user_id)
|
||||
.single()
|
||||
.execute()
|
||||
)
|
||||
except Exception as e:
|
||||
if e.code == '22P02' :
|
||||
self.logger.error(f"Failed decrementing credits : {str(e)}")
|
||||
raise SyntaxError(f"Failed decrementing credits : {str(e)}") from e
|
||||
if e.code == 'PGRST116' :
|
||||
self.logger.error(f"User not found : {str(e)}")
|
||||
raise ValueError(f"User not found : {str(e)}") from e
|
||||
else :
|
||||
self.logger.error(f"An unexpected error occured while decrementing user balance : {str(e)}")
|
||||
raise Exception(f"An unexpected error occured while decrementing user balance : {str(e)}") from e
|
||||
|
||||
|
||||
current_credits = response.data['credit_amount']
|
||||
updated_credits = current_credits - amount
|
||||
|
||||
# Update the user's credits in the table
|
||||
update_response = (
|
||||
self.supabase.table('credits')
|
||||
.update({'credit_amount': updated_credits})
|
||||
.eq('id', user_id)
|
||||
.execute()
|
||||
)
|
||||
|
||||
# Check if the update was successful
|
||||
if update_response.data:
|
||||
self.logger.debug(f'Credit balance successfully decremented.')
|
||||
return True
|
||||
else:
|
||||
raise Exception("Error decrementing credit balance.")
|
||||
|
||||
|
||||
def increment_credit_balance(self, user_id: str, amount: int=1) -> bool:
|
||||
"""
|
||||
Increments the user's credit balance by 1.
|
||||
|
||||
Args:
|
||||
user_id (str): The ID of the current user.
|
||||
"""
|
||||
try:
|
||||
# Query the public.credits table to get the user's current credits
|
||||
response = (
|
||||
self.supabase.table("credits")
|
||||
.select('*')
|
||||
.eq('id', user_id)
|
||||
.single()
|
||||
.execute()
|
||||
)
|
||||
except Exception as e:
|
||||
if e.code == '22P02' :
|
||||
self.logger.error(f"Failed incrementing credits : {str(e)}")
|
||||
raise SyntaxError(f"Failed incrementing credits : {str(e)}") from e
|
||||
if e.code == 'PGRST116' :
|
||||
self.logger.error(f"User not found : {str(e)}")
|
||||
raise ValueError(f"User not found : {str(e)}") from e
|
||||
else :
|
||||
self.logger.error(f"An unexpected error occured while incrementing user balance : {str(e)}")
|
||||
raise Exception(f"An unexpected error occured while incrementing user balance : {str(e)}") from e
|
||||
|
||||
|
||||
current_credits = response.data['credit_amount']
|
||||
updated_credits = current_credits + amount
|
||||
|
||||
# Update the user's credits in the table
|
||||
update_response = (
|
||||
self.supabase.table('credits')
|
||||
.update({'credit_amount': updated_credits})
|
||||
.eq('id', user_id)
|
||||
.execute()
|
||||
)
|
||||
|
||||
# Check if the update was successful
|
||||
if update_response.data:
|
||||
self.logger.debug(f'Credit balance successfully incremented.')
|
||||
return True
|
||||
else:
|
||||
raise Exception("Error incrementing credit balance.")
|
52
backend/src/payments/supabase_routes.py
Normal file
52
backend/src/payments/supabase_routes.py
Normal file
@@ -0,0 +1,52 @@
|
||||
"""Endpoints for supabase user handling."""
|
||||
import logging
|
||||
from fastapi import APIRouter, HTTPException
|
||||
|
||||
from .supabase import Supabase
|
||||
|
||||
|
||||
# Set up logging and supabase.
|
||||
logger = logging.getLogger(__name__)
|
||||
supabase = Supabase()
|
||||
|
||||
# Create fastapi router
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.post("/user/create/{email}/{password}")
|
||||
def register_user(email: str, password: str) -> str:
|
||||
try:
|
||||
response = supabase.supabase.auth.admin.create_user({
|
||||
"email": email,
|
||||
"password": password
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
if e.code == 'email_exists' :
|
||||
logger.error(f"Failed to create user : {str(e.code)}")
|
||||
raise HTTPException(status_code=422, detail=str(e)) from e
|
||||
logger.error(f"Failed to create user : {str(e.code)}")
|
||||
raise HTTPException(status_code=500, detail=str(e)) from e
|
||||
|
||||
# Extract the identity_id and user_id
|
||||
user_id = response.user.id
|
||||
|
||||
logger.info(f"User created successfully, ID: {user_id}")
|
||||
return user_id
|
||||
|
||||
|
||||
|
||||
@router.post("/user/delete/{user_id}")
|
||||
def delete_user(user_id: str):
|
||||
|
||||
try:
|
||||
response = supabase.supabase.auth.admin.delete_user(user_id)
|
||||
logger.debug(response)
|
||||
except Exception as e:
|
||||
if e.code == 'user_not_found' :
|
||||
logger.error(f"Failed to delete user : {str(e.code)}")
|
||||
raise HTTPException(status_code=404, detail=str(e)) from e
|
||||
logger.error(f"Failed to create user : {str(e.code)}")
|
||||
raise HTTPException(status_code=500, detail=str(e)) from e
|
||||
|
||||
logger.info(f"User with ID {user_id} deleted successfully")
|
File diff suppressed because it is too large
Load Diff
@@ -1,698 +0,0 @@
|
||||
{
|
||||
"type": "FeatureCollection",
|
||||
"generator": "overpass-turbo",
|
||||
"copyright": "The data included in this document is from www.openstreetmap.org. The data is made available under ODbL.",
|
||||
"timestamp": "2024-12-02T21:14:59Z",
|
||||
"features": [
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/1345741798",
|
||||
"name": "Cordonnerie Saint-Joseph",
|
||||
"shop": "shoes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3481705,
|
||||
48.0816462
|
||||
]
|
||||
},
|
||||
"id": "node/1345741798"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/2659184738",
|
||||
"brand": "Armand Thiery",
|
||||
"brand:wikidata": "Q2861975",
|
||||
"brand:wikipedia": "fr:Armand Thiery",
|
||||
"name": "Armand Thiery",
|
||||
"opening_hours": "Mo-Sa 09:30-19:00",
|
||||
"shop": "clothes",
|
||||
"wheelchair": "limited"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3594454,
|
||||
48.0785574
|
||||
]
|
||||
},
|
||||
"id": "node/2659184738"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/3618136290",
|
||||
"name": "Chez Dominique",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3362362,
|
||||
48.0712174
|
||||
]
|
||||
},
|
||||
"id": "node/3618136290"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/3618136605",
|
||||
"name": "Divamod",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3304253,
|
||||
48.0782989
|
||||
]
|
||||
},
|
||||
"id": "node/3618136605"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/3618284507",
|
||||
"name": "Star tendances et voyages",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3474029,
|
||||
48.0830993
|
||||
]
|
||||
},
|
||||
"id": "node/3618284507"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/3619696125",
|
||||
"brand": "Zeeman",
|
||||
"brand:wikidata": "Q184399",
|
||||
"name": "Zeeman",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3413834,
|
||||
48.0638444
|
||||
]
|
||||
},
|
||||
"id": "node/3619696125"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/4594398129",
|
||||
"name": "Miss et Mister",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3308309,
|
||||
48.0779118
|
||||
]
|
||||
},
|
||||
"id": "node/4594398129"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/4907320441",
|
||||
"brand": "Sergent Major",
|
||||
"brand:wikidata": "Q62521738",
|
||||
"clothes": "babies;children",
|
||||
"name": "Sergent Major",
|
||||
"opening_hours": "Mo-Sa 09:30-19:00",
|
||||
"shop": "clothes",
|
||||
"wheelchair": "no"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.359116,
|
||||
48.0787229
|
||||
]
|
||||
},
|
||||
"id": "node/4907320441"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/4907364791",
|
||||
"brand": "Armand Thiery",
|
||||
"brand:wikidata": "Q2861975",
|
||||
"brand:wikipedia": "fr:Armand Thiery",
|
||||
"clothes": "women",
|
||||
"name": "Armand Thiery",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3601857,
|
||||
48.0783373
|
||||
]
|
||||
},
|
||||
"id": "node/4907364791"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/4907385675",
|
||||
"check_date": "2024-05-19",
|
||||
"clothes": "children",
|
||||
"name": "Du Pareil...au même",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3604521,
|
||||
48.0779726
|
||||
]
|
||||
},
|
||||
"id": "node/4907385675"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/4922191645",
|
||||
"name": "Abilos",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3566167,
|
||||
48.0794136
|
||||
]
|
||||
},
|
||||
"id": "node/4922191645"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/4922191648",
|
||||
"brand": "Esprit",
|
||||
"brand:wikidata": "Q532746",
|
||||
"brand:wikipedia": "en:Esprit Holdings",
|
||||
"name": "Esprit",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3554004,
|
||||
48.0787549
|
||||
]
|
||||
},
|
||||
"id": "node/4922191648"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/4922191972",
|
||||
"brand": "Guess",
|
||||
"brand:wikidata": "Q2470307",
|
||||
"brand:wikipedia": "en:Guess (clothing)",
|
||||
"name": "Guess",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.355273,
|
||||
48.0788003
|
||||
]
|
||||
},
|
||||
"id": "node/4922191972"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/4922192001",
|
||||
"name": "Lingerie",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3575453,
|
||||
48.0779317
|
||||
]
|
||||
},
|
||||
"id": "node/4922192001"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/5359915869",
|
||||
"name": "Al Assil",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3305665,
|
||||
48.0780902
|
||||
]
|
||||
},
|
||||
"id": "node/5359915869"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/9089360040",
|
||||
"brand": "Grain de Malice",
|
||||
"brand:wikidata": "Q66757157",
|
||||
"clothes": "women",
|
||||
"name": "Grain de Malice",
|
||||
"shop": "clothes",
|
||||
"short_name": "GDM"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3593125,
|
||||
48.0786234
|
||||
]
|
||||
},
|
||||
"id": "node/9089360040"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/9095193153",
|
||||
"brand": "Undiz",
|
||||
"brand:wikidata": "Q105306275",
|
||||
"clothes": "underwear",
|
||||
"name": "Undiz",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3599579,
|
||||
48.0782846
|
||||
]
|
||||
},
|
||||
"id": "node/9095193153"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/9095193154",
|
||||
"branch": "Lingerie",
|
||||
"brand": "RougeGorge",
|
||||
"brand:wikidata": "Q104600739",
|
||||
"clothes": "underwear",
|
||||
"name": "RougeGorge",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3604883,
|
||||
48.0781607
|
||||
]
|
||||
},
|
||||
"id": "node/9095193154"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/9095212690",
|
||||
"alt_name": "North Face",
|
||||
"brand": "The North Face",
|
||||
"brand:wikidata": "Q152784",
|
||||
"brand:wikipedia": "en:The North Face",
|
||||
"check_date": "2024-05-19",
|
||||
"name": "The North Face",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3603923,
|
||||
48.0773727
|
||||
]
|
||||
},
|
||||
"id": "node/9095212690"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/9095270059",
|
||||
"air_conditioning": "no",
|
||||
"clothes": "men",
|
||||
"level": "0",
|
||||
"name": "Maison Aume",
|
||||
"second_hand": "no",
|
||||
"shop": "clothes",
|
||||
"wheelchair": "no"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.361364,
|
||||
48.0799999
|
||||
]
|
||||
},
|
||||
"id": "node/9095270059"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/9098624272",
|
||||
"name": "Destock Place",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3575161,
|
||||
48.0793009
|
||||
]
|
||||
},
|
||||
"id": "node/9098624272"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/9123861652",
|
||||
"name": "Weackers",
|
||||
"shop": "shoes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.361329,
|
||||
48.0785972
|
||||
]
|
||||
},
|
||||
"id": "node/9123861652"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/9162179887",
|
||||
"brand": "Calzedonia",
|
||||
"brand:wikidata": "Q1027874",
|
||||
"brand:wikipedia": "en:Calzedonia",
|
||||
"name": "Calzedonia",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3606374,
|
||||
48.0780809
|
||||
]
|
||||
},
|
||||
"id": "node/9162179887"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/9162206449",
|
||||
"clothes": "women",
|
||||
"name": "Cop. Copine",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3600947,
|
||||
48.078399
|
||||
]
|
||||
},
|
||||
"id": "node/9162206449"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/9162226360",
|
||||
"brand": "Okaïdi",
|
||||
"brand:wikidata": "Q3350027",
|
||||
"brand:wikipedia": "fr:Okaïdi",
|
||||
"name": "Okaïdi",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3596986,
|
||||
48.078428
|
||||
]
|
||||
},
|
||||
"id": "node/9162226360"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/9162227010",
|
||||
"brand": "Jules",
|
||||
"brand:wikidata": "Q3188386",
|
||||
"brand:wikipedia": "fr:Jules (enseigne)",
|
||||
"clothes": "men",
|
||||
"name": "Jules",
|
||||
"opening_hours": "Mo-Sa 09:30-19:00",
|
||||
"phone": "+33 3 89 41 03 62",
|
||||
"shop": "clothes",
|
||||
"website": "https://www.jules.com/fr-fr/magasins/1600133/"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3600323,
|
||||
48.0782229
|
||||
]
|
||||
},
|
||||
"id": "node/9162227010"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/10151865029",
|
||||
"name": "Atelier Cinq",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3571756,
|
||||
48.0772657
|
||||
]
|
||||
},
|
||||
"id": "node/10151865029"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/10862176110",
|
||||
"name": "L'hexagone",
|
||||
"shop": "bag"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3808571,
|
||||
48.0814138
|
||||
]
|
||||
},
|
||||
"id": "node/10862176110"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/11150877331",
|
||||
"brand": "Punt Roma",
|
||||
"brand:wikidata": "Q101423290",
|
||||
"clothes": "women",
|
||||
"name": "Punt Roma",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3571859,
|
||||
48.0779406
|
||||
]
|
||||
},
|
||||
"id": "node/11150877331"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/11150959880",
|
||||
"name": "Caroll",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3579354,
|
||||
48.0779291
|
||||
]
|
||||
},
|
||||
"id": "node/11150959880"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/11302242094",
|
||||
"branch": "Wintzenheim",
|
||||
"name": "Label Fripe",
|
||||
"opening_hours": "Mo-Sa 09:00-18:45",
|
||||
"phone": "+33 3 89 27 39 25",
|
||||
"second_hand": "only",
|
||||
"shop": "clothes",
|
||||
"website": "https://labelfripe.fr/label-fripe-wintzenheim/"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3109899,
|
||||
48.0850362
|
||||
]
|
||||
},
|
||||
"id": "node/11302242094"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/11392247003",
|
||||
"name": "Lingerie Sipp",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3111507,
|
||||
48.0841835
|
||||
]
|
||||
},
|
||||
"id": "node/11392247003"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/11778819781",
|
||||
"addr:city": "Colmar",
|
||||
"addr:housenumber": "10",
|
||||
"addr:postcode": "68000",
|
||||
"addr:street": "Rue des Têtes",
|
||||
"clothes": "suits;hats;men",
|
||||
"name": "Phillipe",
|
||||
"phone": "0389411983",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3559389,
|
||||
48.0789064
|
||||
]
|
||||
},
|
||||
"id": "node/11778819781"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/11799215969",
|
||||
"brand": "Petit Bateau",
|
||||
"brand:wikidata": "Q3377090",
|
||||
"name": "Petit Bateau",
|
||||
"opening_hours": "Mo-Sa 10:00-19:00; Su 10:00-18:00",
|
||||
"phone": "+33 3 89 24 97 85",
|
||||
"shop": "clothes",
|
||||
"website": "https://stores.petit-bateau.com/france/colmar/9-rue-des-boulangers"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.355149,
|
||||
48.0780213
|
||||
]
|
||||
},
|
||||
"id": "node/11799215969"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/11816704669",
|
||||
"addr:housenumber": "10",
|
||||
"addr:street": "Rue des Boulangers",
|
||||
"name": "des petits hauts",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3555001,
|
||||
48.0780768
|
||||
]
|
||||
},
|
||||
"id": "node/11816704669"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/12320343534",
|
||||
"addr:city": "Colmar",
|
||||
"addr:housenumber": "44",
|
||||
"addr:postcode": "68000",
|
||||
"addr:street": "Rue des Clefs",
|
||||
"brand": "Un Jour Ailleurs",
|
||||
"brand:wikidata": "Q105106211",
|
||||
"clothes": "women",
|
||||
"name": "Un Jour Ailleurs",
|
||||
"opening_hours": "Mo-Fr 10:00-19:00; Sa 10:00-18:30",
|
||||
"phone": "+33368318572",
|
||||
"shop": "clothes",
|
||||
"website": "https://boutique.unjourailleurs.com/fr/mode-femme/boutique-colmar-76"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.35897,
|
||||
48.0789807
|
||||
]
|
||||
},
|
||||
"id": "node/12320343534"
|
||||
},
|
||||
{
|
||||
"type": "Feature",
|
||||
"properties": {
|
||||
"@id": "node/12320343536",
|
||||
"addr:city": "Colmar",
|
||||
"addr:housenumber": "38",
|
||||
"addr:postcode": "68000",
|
||||
"addr:street": "Rue des Clefs",
|
||||
"brand": "Timberland",
|
||||
"brand:wikidata": "Q1539185",
|
||||
"name": "Timberland",
|
||||
"opening_hours": "Mo-Sa 10:00-19:00",
|
||||
"phone": "+33389298650",
|
||||
"shop": "clothes"
|
||||
},
|
||||
"geometry": {
|
||||
"type": "Point",
|
||||
"coordinates": [
|
||||
7.3592409,
|
||||
48.0788785
|
||||
]
|
||||
},
|
||||
"id": "node/12320343536"
|
||||
}
|
||||
]
|
||||
}
|
@@ -1,350 +0,0 @@
|
||||
# pylint: skip-file
|
||||
|
||||
import numpy as np
|
||||
import json
|
||||
import os
|
||||
from typing import Optional, Literal
|
||||
from sklearn.cluster import DBSCAN
|
||||
from sklearn.decomposition import PCA
|
||||
import matplotlib.pyplot as plt
|
||||
from pydantic import BaseModel
|
||||
from OSMPythonTools.overpass import Overpass, overpassQueryBuilder
|
||||
from OSMPythonTools.cachingStrategy import CachingStrategy, JSON
|
||||
from math import sin, cos, sqrt, atan2, radians
|
||||
|
||||
|
||||
EARTH_RADIUS_KM = 6373
|
||||
|
||||
|
||||
class ShoppingLocation(BaseModel):
|
||||
type: Literal['street', 'area']
|
||||
importance: int
|
||||
centroid: tuple
|
||||
start: Optional[list] = None
|
||||
end: Optional[list] = None
|
||||
|
||||
|
||||
# Output to frontend
|
||||
class Landmark(BaseModel) :
|
||||
# Properties of the landmark
|
||||
name : str
|
||||
type: Literal['sightseeing', 'nature', 'shopping', 'start', 'finish']
|
||||
location : tuple
|
||||
osm_type : str
|
||||
osm_id : int
|
||||
attractiveness : int
|
||||
n_tags : int
|
||||
image_url : Optional[str] = None
|
||||
website_url : Optional[str] = None
|
||||
description : Optional[str] = None # TODO future
|
||||
duration : Optional[int] = 0
|
||||
name_en : Optional[str] = None
|
||||
|
||||
# Additional properties depending on specific tour
|
||||
must_do : Optional[bool] = False
|
||||
must_avoid : Optional[bool] = False
|
||||
is_secondary : Optional[bool] = False
|
||||
|
||||
time_to_reach_next : Optional[int] = 0
|
||||
next_uuid : Optional[str] = None
|
||||
|
||||
|
||||
def extract_points(filestr: str) :
|
||||
"""
|
||||
Extract points from geojson file.
|
||||
|
||||
Returns :
|
||||
np.array containing the points
|
||||
"""
|
||||
points = []
|
||||
|
||||
with open(os.path.dirname(__file__) + '/' + filestr, 'r') as f:
|
||||
geojson = json.load(f)
|
||||
|
||||
for feature in geojson['features']:
|
||||
if feature['geometry']['type'] == 'Point':
|
||||
centroid = feature['geometry']['coordinates']
|
||||
points.append(centroid)
|
||||
|
||||
elif feature['geometry']['type'] == 'Polygon':
|
||||
centroid = np.array(feature['geometry']['coordinates'][0][0])
|
||||
points.append(centroid)
|
||||
|
||||
# Convert the list of points to a NumPy array
|
||||
return np.array(points)
|
||||
|
||||
|
||||
def get_distance(p1: tuple[float, float], p2: tuple[float, float]) -> int:
|
||||
"""
|
||||
Calculate the time in minutes to travel from one location to another.
|
||||
|
||||
Args:
|
||||
p1 (tuple[float, float]): Coordinates of the starting location.
|
||||
p2 (tuple[float, float]): Coordinates of the destination.
|
||||
|
||||
Returns:
|
||||
int: Time to travel from p1 to p2 in minutes.
|
||||
"""
|
||||
|
||||
|
||||
if p1 == p2:
|
||||
return 0
|
||||
else:
|
||||
# Compute the distance in km along the surface of the Earth
|
||||
# (assume spherical Earth)
|
||||
# this is the haversine formula, stolen from stackoverflow
|
||||
# in order to not use any external libraries
|
||||
lat1, lon1 = radians(p1[0]), radians(p1[1])
|
||||
lat2, lon2 = radians(p2[0]), radians(p2[1])
|
||||
|
||||
dlon = lon2 - lon1
|
||||
dlat = lat2 - lat1
|
||||
|
||||
a = sin(dlat / 2)**2 + cos(lat1) * cos(lat2) * sin(dlon / 2)**2
|
||||
c = 2 * atan2(sqrt(a), sqrt(1 - a))
|
||||
|
||||
return EARTH_RADIUS_KM * c
|
||||
|
||||
def filter_clusters(cluster_points, cluster_labels):
|
||||
"""
|
||||
Remove clusters of less importance.
|
||||
"""
|
||||
label_counts = np.bincount(cluster_labels)
|
||||
|
||||
# Step 3: Get the indices (labels) of the 5 largest clusters
|
||||
top_5_labels = np.argsort(label_counts)[-5:] # Get the largest 5 clusters
|
||||
|
||||
# Step 4: Filter points to keep only the points in the top 5 clusters
|
||||
filtered_cluster_points = []
|
||||
filtered_cluster_labels = []
|
||||
|
||||
for label in top_5_labels:
|
||||
filtered_cluster_points.append(cluster_points[cluster_labels == label])
|
||||
filtered_cluster_labels.append(np.full((label_counts[label],), label)) # Replicate the label
|
||||
|
||||
# Concatenate filtered clusters into a single array
|
||||
return np.vstack(filtered_cluster_points), np.concatenate(filtered_cluster_labels)
|
||||
|
||||
|
||||
def fit_lines(points, labels):
|
||||
"""
|
||||
Fit lines to identified clusters.
|
||||
"""
|
||||
all_x = []
|
||||
all_y = []
|
||||
lines = []
|
||||
locations = []
|
||||
|
||||
for label in set(labels):
|
||||
cluster_points = points[labels == label]
|
||||
|
||||
# If there's not enough points, skip
|
||||
if len(cluster_points) < 2:
|
||||
continue
|
||||
|
||||
# Apply PCA to find the principal component (i.e., the line of best fit)
|
||||
pca = PCA(n_components=1)
|
||||
pca.fit(cluster_points)
|
||||
|
||||
direction = pca.components_[0]
|
||||
centroid = pca.mean_
|
||||
|
||||
# Project the cluster points onto the principal direction (line direction)
|
||||
projections = np.dot(cluster_points - centroid, direction)
|
||||
|
||||
# Get the range of the projections to find the approximate length of the cluster
|
||||
cluster_length = projections.max() - projections.min()
|
||||
|
||||
# Now adjust `t` so that it scales with the cluster length
|
||||
t = np.linspace(-cluster_length / 2.75, cluster_length / 2.75, 10)
|
||||
|
||||
# Calculate the start and end of the line based on min/max projections
|
||||
start_point = centroid[0] + t*direction[0]
|
||||
end_point = centroid[1] + t*direction[1]
|
||||
|
||||
# Store the line
|
||||
lines.append((start_point, end_point))
|
||||
|
||||
# For visualization, store the points
|
||||
all_x.append(min(start_point))
|
||||
all_x.append(max(start_point))
|
||||
all_y.append(min(end_point))
|
||||
all_y.append(max(end_point))
|
||||
|
||||
if np.linalg.norm(t) <= 0.0045 :
|
||||
loc = ShoppingLocation(
|
||||
type='area',
|
||||
centroid=tuple((centroid[1], centroid[0])),
|
||||
importance = len(cluster_points),
|
||||
)
|
||||
else :
|
||||
loc = ShoppingLocation(
|
||||
type='street',
|
||||
centroid=tuple((centroid[1], centroid[0])),
|
||||
importance = len(cluster_points),
|
||||
start=start_point,
|
||||
end=end_point
|
||||
)
|
||||
|
||||
locations.append(loc)
|
||||
|
||||
xmin = min(all_x)
|
||||
xmax = max(all_x)
|
||||
ymin = min(all_y)
|
||||
ymax = max(all_y)
|
||||
corners = (xmin, xmax, ymin, ymax)
|
||||
|
||||
return corners, locations
|
||||
|
||||
|
||||
|
||||
def create_landmark(shopping_location: ShoppingLocation):
|
||||
|
||||
# Define the bounding box for a given radius around the coordinates
|
||||
lat, lon = shopping_location.centroid
|
||||
bbox = ("around:1000", str(lat), str(lon))
|
||||
|
||||
overpass = Overpass()
|
||||
# CachingStrategy.use(JSON, cacheDir=OSM_CACHE_DIR)
|
||||
|
||||
# Query neighborhoods and shopping malls
|
||||
selectors = ['"place"~"^(suburb|neighborhood|neighbourhood|quarter|city_block)$"', '"shop"="mall"']
|
||||
|
||||
min_dist = float('inf')
|
||||
new_name = 'Shopping Area'
|
||||
new_name_en = None
|
||||
osm_id = 0
|
||||
osm_type = 'node'
|
||||
|
||||
for sel in selectors :
|
||||
query = overpassQueryBuilder(
|
||||
bbox = bbox,
|
||||
elementType = ['node', 'way', 'relation'],
|
||||
selector = sel,
|
||||
includeCenter = True,
|
||||
out = 'center'
|
||||
)
|
||||
|
||||
try:
|
||||
result = overpass.query(query)
|
||||
except Exception as e:
|
||||
raise Exception("query unsuccessful")
|
||||
|
||||
for elem in result.elements():
|
||||
|
||||
location = (elem.centerLat(), elem.centerLon())
|
||||
|
||||
if location[0] is None :
|
||||
location = (elem.lat(), elem.lon())
|
||||
if location[0] is None :
|
||||
# print(f"Fetching coordinates failed with {elem.type()}/{elem.id()}")
|
||||
continue
|
||||
|
||||
# print(f"Distance : {get_distance(shopping_location.centroid, location)}")
|
||||
d = get_distance(shopping_location.centroid, location)
|
||||
if d < min_dist :
|
||||
min_dist = d
|
||||
new_name = elem.tag('name')
|
||||
osm_type = elem.type() # Add type: 'way' or 'relation'
|
||||
osm_id = elem.id() # Add OSM id
|
||||
|
||||
# add english name if it exists
|
||||
try :
|
||||
new_name_en = elem.tag('name:en')
|
||||
except:
|
||||
pass
|
||||
|
||||
return Landmark(
|
||||
name=new_name,
|
||||
type='shopping',
|
||||
location=shopping_location.centroid, # TODO: use the fact the we can also recognize streets.
|
||||
attractiveness=shopping_location.importance,
|
||||
n_tags=0,
|
||||
osm_id=osm_id,
|
||||
osm_type=osm_type,
|
||||
name_en=new_name_en
|
||||
)
|
||||
|
||||
|
||||
# Extract points
|
||||
points = extract_points('vienna_data.json')
|
||||
|
||||
# print(len(points))
|
||||
|
||||
######## Create a figure with 1 row and 3 columns for side-by-side plots
|
||||
fig, axes = plt.subplots(1, 3, figsize=(15, 5))
|
||||
# Plot Raw data points
|
||||
axes[0].set_title('Raw Data')
|
||||
axes[0].scatter(points[:, 0], points[:, 1], color='blue', s=20)
|
||||
|
||||
|
||||
# Apply DBSCAN to find clusters. Choose different settings for different cities.
|
||||
if len(points) > 400 :
|
||||
dbscan = DBSCAN(eps=0.00118, min_samples=15, algorithm='kd_tree') # for large cities
|
||||
else :
|
||||
dbscan = DBSCAN(eps=0.00075, min_samples=10, algorithm='kd_tree') # for small cities
|
||||
|
||||
labels = dbscan.fit_predict(points)
|
||||
|
||||
# Separate clustered points and noise points
|
||||
clustered_points = points[labels != -1]
|
||||
clustered_labels = labels[labels != -1]
|
||||
noise_points = points[labels == -1]
|
||||
|
||||
######## Plot n°1: DBSCAN Clustering Results
|
||||
axes[1].set_title('DBSCAN Clusters')
|
||||
axes[1].scatter(clustered_points[:, 0], clustered_points[:, 1], c=clustered_labels, cmap='rainbow', s=20)
|
||||
axes[1].scatter(noise_points[:, 0], noise_points[:, 1], c='blue', s=7, label='Noise')
|
||||
|
||||
# Keep the 5 biggest clusters
|
||||
clustered_points, clustered_labels = filter_clusters(clustered_points, clustered_labels)
|
||||
|
||||
# Fit lines
|
||||
corners, locations = fit_lines(clustered_points, clustered_labels)
|
||||
(xmin, xmax, ymin, ymax) = corners
|
||||
|
||||
|
||||
######## Plot clustered points in normal size and noise points separately
|
||||
axes[2].scatter(clustered_points[:, 0], clustered_points[:, 1], c=clustered_labels, cmap='rainbow', s=30)
|
||||
axes[2].set_title('PCA Fitted Lines on Clusters')
|
||||
|
||||
# Create a list of Landmarks for the shopping things
|
||||
shopping_landmarks = []
|
||||
for loc in locations :
|
||||
axes[2].scatter(loc.centroid[1], loc.centroid[0], color='red', marker='x', s=200, linewidth=3)
|
||||
landmark = create_landmark(loc)
|
||||
shopping_landmarks.append(landmark)
|
||||
axes[2].text(loc.centroid[1], loc.centroid[0], landmark.name,
|
||||
ha='center', va='top', fontsize=6,
|
||||
bbox=dict(facecolor='white', edgecolor='black', boxstyle='round,pad=0.2'),
|
||||
zorder=3)
|
||||
|
||||
|
||||
|
||||
####### Plot the detected lines in the final plot #######
|
||||
# for loc in locations:
|
||||
# if loc.type == 'street' :
|
||||
# line_x = loc.start
|
||||
# line_y = loc.end
|
||||
# axes[2].plot(line_x, line_y, color='lime', linewidth=3)
|
||||
# else :
|
||||
|
||||
|
||||
|
||||
axes[0].set_xlim(xmin-0.01, xmax+0.01)
|
||||
axes[0].set_ylim(ymin-0.01, ymax+0.01)
|
||||
|
||||
axes[1].set_xlim(xmin-0.01, xmax+0.01)
|
||||
axes[1].set_ylim(ymin-0.01, ymax+0.01)
|
||||
|
||||
axes[2].set_xlim(xmin-0.01, xmax+0.01)
|
||||
axes[2].set_ylim(ymin-0.01, ymax+0.01)
|
||||
|
||||
|
||||
print("\n\n\n")
|
||||
for landmark in shopping_landmarks :
|
||||
print(f"{landmark.name} is a shopping area with a score of {landmark.attractiveness}")
|
||||
|
||||
|
||||
plt.tight_layout()
|
||||
plt.show()
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,8 +1,8 @@
|
||||
"""Definition of the Landmark class to handle visitable objects across the world."""
|
||||
|
||||
from typing import Optional, Literal
|
||||
from uuid import uuid4
|
||||
from pydantic import BaseModel, Field
|
||||
from uuid import uuid4, UUID
|
||||
from pydantic import BaseModel, ConfigDict, Field
|
||||
|
||||
|
||||
# Output to frontend
|
||||
@@ -29,12 +29,12 @@ class Landmark(BaseModel) :
|
||||
description (Optional[str]): A text description of the landmark.
|
||||
duration (Optional[int]): The estimated time to visit the landmark (in minutes).
|
||||
name_en (Optional[str]): The English name of the landmark.
|
||||
uuid (str): A unique identifier for the landmark, generated by default using uuid4.
|
||||
uuid (UUID): A unique identifier for the landmark, generated by default using uuid4.
|
||||
must_do (Optional[bool]): Whether the landmark is a "must-do" attraction.
|
||||
must_avoid (Optional[bool]): Whether the landmark should be avoided.
|
||||
is_secondary (Optional[bool]): Whether the landmark is secondary or less important.
|
||||
time_to_reach_next (Optional[int]): Estimated time (in minutes) to reach the next landmark.
|
||||
next_uuid (Optional[str]): UUID of the next landmark in sequence (if applicable).
|
||||
next_uuid (Optional[UUID]): UUID of the next landmark in sequence (if applicable).
|
||||
"""
|
||||
|
||||
# Properties of the landmark
|
||||
@@ -45,14 +45,17 @@ class Landmark(BaseModel) :
|
||||
osm_id : int
|
||||
attractiveness : int
|
||||
n_tags : int
|
||||
|
||||
# Optional properties to gather more information.
|
||||
image_url : Optional[str] = None
|
||||
website_url : Optional[str] = None
|
||||
wiki_url : Optional[str] = None
|
||||
description : Optional[str] = None # TODO future
|
||||
duration : Optional[int] = 0
|
||||
duration : Optional[int] = 5
|
||||
name_en : Optional[str] = None
|
||||
|
||||
# Unique ID of a given landmark
|
||||
uuid: str = Field(default_factory=uuid4)
|
||||
uuid: UUID = Field(default_factory=uuid4)
|
||||
|
||||
# Additional properties depending on specific tour
|
||||
must_do : Optional[bool] = False
|
||||
@@ -60,7 +63,11 @@ class Landmark(BaseModel) :
|
||||
is_secondary : Optional[bool] = False
|
||||
|
||||
time_to_reach_next : Optional[int] = 0
|
||||
next_uuid : Optional[str] = None
|
||||
next_uuid : Optional[UUID] = None
|
||||
|
||||
# More properties to define the score
|
||||
is_viewpoint : Optional[bool] = False
|
||||
is_place_of_worship : Optional[bool] = False
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""
|
||||
@@ -136,7 +143,5 @@ class Toilets(BaseModel) :
|
||||
str: A formatted string with the toilets location.
|
||||
"""
|
||||
return f'Toilets @{self.location}'
|
||||
|
||||
class Config:
|
||||
# This allows us to easily convert the model to and from dictionaries
|
||||
orm_mode = True
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
@@ -1,7 +1,7 @@
|
||||
"""Linked and ordered list of Landmarks that represents the visiting order."""
|
||||
|
||||
from .landmark import Landmark
|
||||
from ..utils.get_time_separation import get_time
|
||||
from ..utils.get_time_distance import get_time
|
||||
|
||||
class LinkedLandmarks:
|
||||
"""
|
||||
|
@@ -1,6 +1,6 @@
|
||||
"""Definition of the Trip class."""
|
||||
|
||||
import uuid
|
||||
from uuid import uuid4, UUID
|
||||
from pydantic import BaseModel, Field
|
||||
from pymemcache.client.base import Client
|
||||
|
||||
@@ -19,9 +19,9 @@ class Trip(BaseModel):
|
||||
Methods:
|
||||
from_linked_landmarks: create a Trip from LinkedLandmarks object.
|
||||
"""
|
||||
uuid: str = Field(default_factory=uuid.uuid4)
|
||||
uuid: UUID = Field(default_factory=uuid4)
|
||||
total_time: int
|
||||
first_landmark_uuid: str
|
||||
first_landmark_uuid: UUID
|
||||
|
||||
|
||||
@classmethod
|
||||
@@ -31,7 +31,7 @@ class Trip(BaseModel):
|
||||
"""
|
||||
trip = Trip(
|
||||
total_time = landmarks.total_time,
|
||||
first_landmark_uuid = str(landmarks[0].uuid)
|
||||
first_landmark_uuid = landmarks[0].uuid
|
||||
)
|
||||
|
||||
# Store the trip in the cache
|
||||
|
@@ -1,42 +0,0 @@
|
||||
"""Collection of tests to ensure correct handling of invalid input."""
|
||||
|
||||
from fastapi.testclient import TestClient
|
||||
import pytest
|
||||
|
||||
from .test_utils import load_trip_landmarks
|
||||
from ..main import app
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def client():
|
||||
"""Client used to call the app."""
|
||||
return TestClient(app)
|
||||
|
||||
|
||||
def test_cache(client, request): # pylint: disable=redefined-outer-name
|
||||
"""
|
||||
Test n°1 : Custom test in Turckheim to ensure small villages are also supported.
|
||||
|
||||
Args:
|
||||
client:
|
||||
request:
|
||||
"""
|
||||
duration_minutes = 15
|
||||
response = client.post(
|
||||
"/trip/new",
|
||||
json={
|
||||
"preferences": {"sightseeing": {"type": "sightseeing", "score": 5},
|
||||
"nature": {"type": "nature", "score": 5},
|
||||
"shopping": {"type": "shopping", "score": 5},
|
||||
"max_time_minute": duration_minutes,
|
||||
"detour_tolerance_minute": 0},
|
||||
"start": [48.084588, 7.280405]
|
||||
}
|
||||
)
|
||||
result = response.json()
|
||||
landmarks = load_trip_landmarks(client, result['first_landmark_uuid'])
|
||||
landmarks_cached = load_trip_landmarks(client, result['first_landmark_uuid'], True)
|
||||
|
||||
# checks :
|
||||
assert response.status_code == 200 # check for successful planning
|
||||
assert landmarks_cached == landmarks
|
@@ -4,6 +4,7 @@ from fastapi.testclient import TestClient
|
||||
import pytest
|
||||
|
||||
from ..main import app
|
||||
from ..constants import SUPABASE_TEST_USER_ID
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
@@ -55,8 +56,38 @@ def test_input(invalid_client, start, preferences, status_code): # pylint: dis
|
||||
response = invalid_client.post(
|
||||
"/trip/new",
|
||||
json={
|
||||
"user_id": SUPABASE_TEST_USER_ID,
|
||||
"preferences": preferences,
|
||||
"start": start
|
||||
}
|
||||
)
|
||||
assert response.status_code == status_code
|
||||
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"user_id,status_code",
|
||||
[
|
||||
# No user id :
|
||||
({}, 422),
|
||||
("invalid_user_id", 400),
|
||||
# ("12345678-1234-5678-1234-567812345678", 406)
|
||||
]
|
||||
)
|
||||
def test_input(invalid_client, user_id, status_code): # pylint: disable=redefined-outer-name
|
||||
"""
|
||||
Test new trip creation with invalid user ID.
|
||||
"""
|
||||
response = invalid_client.post(
|
||||
"/trip/new",
|
||||
json={
|
||||
"user_id": user_id,
|
||||
"preferences": {"sightseeing": {"type": "sightseeing", "score": 5},
|
||||
"nature": {"type": "nature", "score": 0},
|
||||
"shopping": {"type": "shopping", "score": 0},
|
||||
"max_time_minute": 20,
|
||||
"detour_tolerance_minute": 0},
|
||||
"start": [48.084588, 7.280405]
|
||||
}
|
||||
)
|
||||
assert response.status_code == status_code
|
||||
|
@@ -1,10 +1,17 @@
|
||||
"""Collection of tests to ensure correct implementation and track progress. """
|
||||
|
||||
import time
|
||||
import logging
|
||||
from fastapi.testclient import TestClient
|
||||
import pytest
|
||||
|
||||
from .test_utils import landmarks_to_osmid, load_trip_landmarks, log_trip_details
|
||||
from .test_utils import load_trip_landmarks, log_trip_details
|
||||
from ..main import app
|
||||
from ..payments.supabase import Supabase
|
||||
|
||||
supabase = Supabase()
|
||||
logger = logging.getLogger(__name__)
|
||||
USER_ID = supabase.SUPABASE_TEST_USER_ID
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def client():
|
||||
@@ -20,29 +27,47 @@ def test_turckheim(client, request): # pylint: disable=redefined-outer-name
|
||||
client:
|
||||
request:
|
||||
"""
|
||||
duration_minutes = 15
|
||||
start_time = time.time() # Start timer
|
||||
duration_minutes = 20
|
||||
logger.debug('Running test in Turckheim')
|
||||
|
||||
response = client.post(
|
||||
"/trip/new",
|
||||
json={
|
||||
"user_id": USER_ID,
|
||||
"preferences": {"sightseeing": {"type": "sightseeing", "score": 5},
|
||||
"nature": {"type": "nature", "score": 5},
|
||||
"shopping": {"type": "shopping", "score": 5},
|
||||
"max_time_minute": duration_minutes,
|
||||
"detour_tolerance_minute": 0},
|
||||
"nature": {"type": "nature", "score": 0},
|
||||
"shopping": {"type": "shopping", "score": 0},
|
||||
"max_time_minute": duration_minutes,
|
||||
"detour_tolerance_minute": 0},
|
||||
"start": [48.084588, 7.280405]
|
||||
# "start": [45.74445023349939, 4.8222687890538865]
|
||||
# "start": [45.75156398104873, 4.827154464827647]
|
||||
}
|
||||
)
|
||||
result = response.json()
|
||||
supabase.increment_credit_balance(user_id=USER_ID)
|
||||
landmarks = load_trip_landmarks(client, result['first_landmark_uuid'])
|
||||
|
||||
|
||||
# Get computation time
|
||||
comp_time = time.time() - start_time
|
||||
|
||||
# Add details to report
|
||||
log_trip_details(request, landmarks, result['total_time'], duration_minutes)
|
||||
|
||||
# for elem in landmarks :
|
||||
# print(elem)
|
||||
|
||||
# checks :
|
||||
assert response.status_code == 200 # check for successful planning
|
||||
assert isinstance(landmarks, list) # check that the return type is a list
|
||||
assert duration_minutes*0.8 < int(result['total_time']) < duration_minutes*1.2
|
||||
assert len(landmarks) > 2 # check that there is something to visit
|
||||
assert comp_time < 30, f"Computation time exceeded 30 seconds: {comp_time:.2f} seconds"
|
||||
assert duration_minutes*0.8 < result['total_time'], f"Trip too short: {result['total_time']} instead of {duration_minutes}"
|
||||
assert duration_minutes*1.2 > result['total_time'], f"Trip too long: {result['total_time']} instead of {duration_minutes}"
|
||||
# assert 2!= 3
|
||||
|
||||
|
||||
|
||||
def test_bellecour(client, request) : # pylint: disable=redefined-outer-name
|
||||
@@ -53,10 +78,14 @@ def test_bellecour(client, request) : # pylint: disable=redefined-outer-name
|
||||
client:
|
||||
request:
|
||||
"""
|
||||
duration_minutes = 30
|
||||
start_time = time.time() # Start timer
|
||||
duration_minutes = 120
|
||||
|
||||
|
||||
response = client.post(
|
||||
"/trip/new",
|
||||
json={
|
||||
"user_id": USER_ID,
|
||||
"preferences": {"sightseeing": {"type": "sightseeing", "score": 5},
|
||||
"nature": {"type": "nature", "score": 5},
|
||||
"shopping": {"type": "shopping", "score": 5},
|
||||
@@ -66,30 +95,254 @@ def test_bellecour(client, request) : # pylint: disable=redefined-outer-name
|
||||
}
|
||||
)
|
||||
result = response.json()
|
||||
supabase.increment_credit_balance(user_id=USER_ID)
|
||||
landmarks = load_trip_landmarks(client, result['first_landmark_uuid'])
|
||||
osm_ids = landmarks_to_osmid(landmarks)
|
||||
|
||||
# Get computation time
|
||||
comp_time = time.time() - start_time
|
||||
|
||||
# Add details to report
|
||||
log_trip_details(request, landmarks, result['total_time'], duration_minutes)
|
||||
|
||||
# for elem in landmarks :
|
||||
# print(elem)
|
||||
|
||||
# checks :
|
||||
assert response.status_code == 200 # check for successful planning
|
||||
assert duration_minutes*0.8 < int(result['total_time']) < duration_minutes*1.2
|
||||
assert 136200148 in osm_ids # check for Cathédrale St. Jean in trip
|
||||
assert comp_time < 30, f"Computation time exceeded 30 seconds: {comp_time:.2f} seconds"
|
||||
assert duration_minutes*0.8 < result['total_time'], f"Trip too short: {result['total_time']} instead of {duration_minutes}"
|
||||
assert duration_minutes*1.2 > result['total_time'], f"Trip too long: {result['total_time']} instead of {duration_minutes}"
|
||||
|
||||
|
||||
def test_shopping(client, request) : # pylint: disable=redefined-outer-name
|
||||
def test_cologne(client, request) : # pylint: disable=redefined-outer-name
|
||||
"""
|
||||
Test n°3 : Custom test in Lyon centre to ensure shopping clusters are found.
|
||||
Test n°3 : Custom test in Cologne to ensure proper decision making in crowded area.
|
||||
|
||||
Args:
|
||||
client:
|
||||
request:
|
||||
"""
|
||||
duration_minutes = 600
|
||||
start_time = time.time() # Start timer
|
||||
duration_minutes = 240
|
||||
|
||||
response = client.post(
|
||||
"/trip/new",
|
||||
json={
|
||||
"user_id": USER_ID,
|
||||
"preferences": {"sightseeing": {"type": "sightseeing", "score": 5},
|
||||
"nature": {"type": "nature", "score": 5},
|
||||
"shopping": {"type": "shopping", "score": 5},
|
||||
"max_time_minute": duration_minutes,
|
||||
"detour_tolerance_minute": 0},
|
||||
"start": [50.942352665, 6.957777972392]
|
||||
}
|
||||
)
|
||||
result = response.json()
|
||||
supabase.increment_credit_balance(user_id=USER_ID)
|
||||
landmarks = load_trip_landmarks(client, result['first_landmark_uuid'])
|
||||
|
||||
# Get computation time
|
||||
comp_time = time.time() - start_time
|
||||
|
||||
# Add details to report
|
||||
log_trip_details(request, landmarks, result['total_time'], duration_minutes)
|
||||
|
||||
# for elem in landmarks :
|
||||
# print(elem)
|
||||
|
||||
# checks :
|
||||
assert response.status_code == 200 # check for successful planning
|
||||
assert comp_time < 30, f"Computation time exceeded 30 seconds: {comp_time:.2f} seconds"
|
||||
assert duration_minutes*0.8 < result['total_time'], f"Trip too short: {result['total_time']} instead of {duration_minutes}"
|
||||
assert duration_minutes*1.2 > result['total_time'], f"Trip too long: {result['total_time']} instead of {duration_minutes}"
|
||||
|
||||
|
||||
def test_strasbourg(client, request) : # pylint: disable=redefined-outer-name
|
||||
"""
|
||||
Test n°4 : Custom test in Strasbourg to ensure proper decision making in crowded area.
|
||||
|
||||
Args:
|
||||
client:
|
||||
request:
|
||||
"""
|
||||
start_time = time.time() # Start timer
|
||||
duration_minutes = 180
|
||||
|
||||
response = client.post(
|
||||
"/trip/new",
|
||||
json={
|
||||
"user_id": USER_ID,
|
||||
"preferences": {"sightseeing": {"type": "sightseeing", "score": 5},
|
||||
"nature": {"type": "nature", "score": 5},
|
||||
"shopping": {"type": "shopping", "score": 5},
|
||||
"max_time_minute": duration_minutes,
|
||||
"detour_tolerance_minute": 0},
|
||||
"start": [48.5846589226, 7.74078715721]
|
||||
}
|
||||
)
|
||||
result = response.json()
|
||||
supabase.increment_credit_balance(user_id=USER_ID)
|
||||
landmarks = load_trip_landmarks(client, result['first_landmark_uuid'])
|
||||
|
||||
# Get computation time
|
||||
comp_time = time.time() - start_time
|
||||
|
||||
# Add details to report
|
||||
log_trip_details(request, landmarks, result['total_time'], duration_minutes)
|
||||
|
||||
# for elem in landmarks :
|
||||
# print(elem)
|
||||
|
||||
# checks :
|
||||
assert response.status_code == 200 # check for successful planning
|
||||
assert comp_time < 30, f"Computation time exceeded 30 seconds: {comp_time:.2f} seconds"
|
||||
assert duration_minutes*0.8 < result['total_time'], f"Trip too short: {result['total_time']} instead of {duration_minutes}"
|
||||
assert duration_minutes*1.2 > result['total_time'], f"Trip too long: {result['total_time']} instead of {duration_minutes}"
|
||||
|
||||
|
||||
def test_zurich(client, request) : # pylint: disable=redefined-outer-name
|
||||
"""
|
||||
Test n°5 : Custom test in Zurich to ensure proper decision making in crowded area.
|
||||
|
||||
Args:
|
||||
client:
|
||||
request:
|
||||
"""
|
||||
start_time = time.time() # Start timer
|
||||
duration_minutes = 180
|
||||
|
||||
response = client.post(
|
||||
"/trip/new",
|
||||
json={
|
||||
"user_id": USER_ID,
|
||||
"preferences": {"sightseeing": {"type": "sightseeing", "score": 5},
|
||||
"nature": {"type": "nature", "score": 5},
|
||||
"shopping": {"type": "shopping", "score": 5},
|
||||
"max_time_minute": duration_minutes,
|
||||
"detour_tolerance_minute": 0},
|
||||
"start": [47.377884227, 8.5395114066]
|
||||
}
|
||||
)
|
||||
result = response.json()
|
||||
supabase.increment_credit_balance(user_id=USER_ID)
|
||||
landmarks = load_trip_landmarks(client, result['first_landmark_uuid'])
|
||||
|
||||
# Get computation time
|
||||
comp_time = time.time() - start_time
|
||||
|
||||
# Add details to report
|
||||
log_trip_details(request, landmarks, result['total_time'], duration_minutes)
|
||||
|
||||
# for elem in landmarks :
|
||||
# print(elem)
|
||||
|
||||
# checks :
|
||||
assert response.status_code == 200 # check for successful planning
|
||||
assert comp_time < 30, f"Computation time exceeded 30 seconds: {comp_time:.2f} seconds"
|
||||
assert duration_minutes*0.8 < result['total_time'], f"Trip too short: {result['total_time']} instead of {duration_minutes}"
|
||||
assert duration_minutes*1.2 > result['total_time'], f"Trip too long: {result['total_time']} instead of {duration_minutes}"
|
||||
|
||||
|
||||
def test_paris(client, request) : # pylint: disable=redefined-outer-name
|
||||
"""
|
||||
Test n°6 : Custom test in Paris (les Halles) centre to ensure proper decision making in crowded area.
|
||||
|
||||
Args:
|
||||
client:
|
||||
request:
|
||||
"""
|
||||
start_time = time.time() # Start timer
|
||||
duration_minutes = 200
|
||||
|
||||
response = client.post(
|
||||
"/trip/new",
|
||||
json={
|
||||
"user_id": USER_ID,
|
||||
"preferences": {"sightseeing": {"type": "sightseeing", "score": 5},
|
||||
"nature": {"type": "nature", "score": 0},
|
||||
"shopping": {"type": "shopping", "score": 5},
|
||||
"max_time_minute": duration_minutes,
|
||||
"detour_tolerance_minute": 0},
|
||||
"start": [48.85468881798671, 2.3423925755998374]
|
||||
}
|
||||
)
|
||||
result = response.json()
|
||||
supabase.increment_credit_balance(user_id=USER_ID)
|
||||
landmarks = load_trip_landmarks(client, result['first_landmark_uuid'])
|
||||
|
||||
# Get computation time
|
||||
comp_time = time.time() - start_time
|
||||
|
||||
# Add details to report
|
||||
log_trip_details(request, landmarks, result['total_time'], duration_minutes)
|
||||
|
||||
# for elem in landmarks :
|
||||
# print(elem)
|
||||
|
||||
# checks :
|
||||
assert response.status_code == 200 # check for successful planning
|
||||
assert comp_time < 30, f"Computation time exceeded 30 seconds: {comp_time:.2f} seconds"
|
||||
assert duration_minutes*0.8 < result['total_time'], f"Trip too short: {result['total_time']} instead of {duration_minutes}"
|
||||
assert duration_minutes*1.2 > result['total_time'], f"Trip too long: {result['total_time']} instead of {duration_minutes}"
|
||||
|
||||
|
||||
def test_new_york(client, request) : # pylint: disable=redefined-outer-name
|
||||
"""
|
||||
Test n°7 : Custom test in New York to ensure proper decision making in crowded area.
|
||||
|
||||
Args:
|
||||
client:
|
||||
request:
|
||||
"""
|
||||
start_time = time.time() # Start timer
|
||||
duration_minutes = 600
|
||||
|
||||
response = client.post(
|
||||
"/trip/new",
|
||||
json={
|
||||
"user_id": USER_ID,
|
||||
"preferences": {"sightseeing": {"type": "sightseeing", "score": 5},
|
||||
"nature": {"type": "nature", "score": 5},
|
||||
"shopping": {"type": "shopping", "score": 5},
|
||||
"max_time_minute": duration_minutes,
|
||||
"detour_tolerance_minute": 0},
|
||||
"start": [40.72592726802, -73.9920434795]
|
||||
}
|
||||
)
|
||||
result = response.json()
|
||||
supabase.increment_credit_balance(user_id=USER_ID)
|
||||
landmarks = load_trip_landmarks(client, result['first_landmark_uuid'])
|
||||
|
||||
# Get computation time
|
||||
comp_time = time.time() - start_time
|
||||
|
||||
# Add details to report
|
||||
log_trip_details(request, landmarks, result['total_time'], duration_minutes)
|
||||
|
||||
# for elem in landmarks :
|
||||
# print(elem)
|
||||
|
||||
# checks :
|
||||
assert response.status_code == 200 # check for successful planning
|
||||
assert comp_time < 30, f"Computation time exceeded 30 seconds: {comp_time:.2f} seconds"
|
||||
assert duration_minutes*0.8 < result['total_time'], f"Trip too short: {result['total_time']} instead of {duration_minutes}"
|
||||
assert duration_minutes*1.2 > result['total_time'], f"Trip too long: {result['total_time']} instead of {duration_minutes}"
|
||||
|
||||
|
||||
def test_shopping(client, request) : # pylint: disable=redefined-outer-name
|
||||
"""
|
||||
Test n°8 : Custom test in Lyon centre to ensure shopping clusters are found.
|
||||
|
||||
Args:
|
||||
client:
|
||||
request:
|
||||
"""
|
||||
start_time = time.time() # Start timer
|
||||
duration_minutes = 240
|
||||
|
||||
response = client.post(
|
||||
"/trip/new",
|
||||
json={
|
||||
"user_id": USER_ID,
|
||||
"preferences": {"sightseeing": {"type": "sightseeing", "score": 0},
|
||||
"nature": {"type": "nature", "score": 0},
|
||||
"shopping": {"type": "shopping", "score": 5},
|
||||
@@ -99,30 +352,20 @@ def test_shopping(client, request) : # pylint: disable=redefined-outer-name
|
||||
}
|
||||
)
|
||||
result = response.json()
|
||||
supabase.increment_credit_balance(user_id=USER_ID)
|
||||
landmarks = load_trip_landmarks(client, result['first_landmark_uuid'])
|
||||
# osm_ids = landmarks_to_osmid(landmarks)
|
||||
|
||||
# Get computation time
|
||||
comp_time = time.time() - start_time
|
||||
|
||||
# Add details to report
|
||||
log_trip_details(request, landmarks, result['total_time'], duration_minutes)
|
||||
|
||||
# for elem in landmarks :
|
||||
# print(elem)
|
||||
|
||||
# checks :
|
||||
assert response.status_code == 200 # check for successful planning
|
||||
assert duration_minutes*0.8 < int(result['total_time']) < duration_minutes*1.2
|
||||
|
||||
# def test_new_trip_single_prefs(client):
|
||||
# response = client.post(
|
||||
# "/trip/new",
|
||||
# json={
|
||||
# "preferences": {"sightseeing": {"type": "sightseeing", "score": 1},
|
||||
# "nature": {"type": "nature", "score": 1},
|
||||
# "shopping": {"type": "shopping", "score": 1},
|
||||
# "max_time_minute": 360,
|
||||
# "detour_tolerance_minute": 0},
|
||||
# "start": [48.8566, 2.3522]
|
||||
# }
|
||||
# )
|
||||
# assert response.status_code == 200
|
||||
|
||||
|
||||
# def test_new_trip_matches_prefs(client):
|
||||
# pass
|
||||
assert comp_time < 30, f"Computation time exceeded 30 seconds: {comp_time:.2f} seconds"
|
||||
assert duration_minutes*0.8 < result['total_time'], f"Trip too short: {result['total_time']} instead of {duration_minutes}"
|
||||
assert duration_minutes*1.2 > result['total_time'], f"Trip too long: {result['total_time']} instead of {duration_minutes}"
|
||||
|
@@ -6,11 +6,13 @@ import pytest
|
||||
from ..structs.landmark import Toilets
|
||||
from ..main import app
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def client():
|
||||
"""Client used to call the app."""
|
||||
return TestClient(app)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"location,radius,status_code",
|
||||
[
|
||||
@@ -39,8 +41,6 @@ def test_invalid_input(client, location, radius, status_code): # pylint: disa
|
||||
assert response.status_code == status_code
|
||||
|
||||
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"location,status_code",
|
||||
[
|
||||
@@ -66,11 +66,10 @@ def test_no_toilets(client, location, status_code): # pylint: disable=redefin
|
||||
toilets_list = [Toilets.model_validate(toilet) for toilet in response.json()]
|
||||
|
||||
# checks :
|
||||
assert response.status_code == 200 # check for successful planning
|
||||
assert response.status_code == status_code # check for successful planning
|
||||
assert isinstance(toilets_list, list) # check that the return type is a list
|
||||
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"location,status_code",
|
||||
[
|
||||
@@ -97,6 +96,6 @@ def test_toilets(client, location, status_code): # pylint: disable=redefined-
|
||||
toilets_list = [Toilets.model_validate(toilet) for toilet in response.json()]
|
||||
|
||||
# checks :
|
||||
assert response.status_code == 200 # check for successful planning
|
||||
assert response.status_code == status_code # check for successful planning
|
||||
assert isinstance(toilets_list, list) # check that the return type is a list
|
||||
assert len(toilets_list) > 0
|
||||
assert len(toilets_list) > 0
|
||||
|
48
backend/src/tests/test_user.py
Normal file
48
backend/src/tests/test_user.py
Normal file
@@ -0,0 +1,48 @@
|
||||
"""Collection of tests to ensure correct handling of user data."""
|
||||
|
||||
from fastapi.testclient import TestClient
|
||||
import pytest
|
||||
|
||||
from ..main import app
|
||||
|
||||
TEST_EMAIL = "dummy@example.com"
|
||||
TEST_PW = "DummyPassword123"
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def client():
|
||||
"""Client used to call the app."""
|
||||
return TestClient(app)
|
||||
|
||||
|
||||
def test_user_handling(client) :
|
||||
"""
|
||||
Test the creation of a new user.
|
||||
"""
|
||||
# Create a new user
|
||||
response = client.post(f"/user/create/{TEST_EMAIL}/{TEST_PW}")
|
||||
|
||||
# Verify user has been created
|
||||
assert response.status_code == 200, "Failed to create dummy user"
|
||||
user_id = response.json()
|
||||
|
||||
|
||||
# Create same user again to raise an error
|
||||
response = client.post(f"/user/create/{TEST_EMAIL}/{TEST_PW}")
|
||||
# Verify user already exists
|
||||
assert response.status_code == 422, "Failed to simulate dummy user already created."
|
||||
|
||||
|
||||
# Delete the user.
|
||||
response = client.post(f"/user/delete/{user_id}")
|
||||
|
||||
# Verify user has been deleted
|
||||
assert response.status_code == 200, "Failed to delete dummy user."
|
||||
|
||||
|
||||
# Delete the user again to raise an error
|
||||
response = client.post(f"/user/delete/{user_id}")
|
||||
# Verify user has been deleted
|
||||
assert response.status_code == 404, "Failed to simulate dummy user already deleted."
|
||||
|
||||
|
||||
|
@@ -4,7 +4,7 @@ from fastapi import HTTPException
|
||||
from pydantic import ValidationError
|
||||
|
||||
from ..structs.landmark import Landmark
|
||||
from ..persistence import client as cache_client
|
||||
from ..cache import client as cache_client
|
||||
|
||||
|
||||
def landmarks_to_osmid(landmarks: list[Landmark]) -> list[int] :
|
||||
@@ -23,45 +23,7 @@ def landmarks_to_osmid(landmarks: list[Landmark]) -> list[int] :
|
||||
|
||||
return ids
|
||||
|
||||
def fetch_landmark(client, landmark_uuid: str):
|
||||
"""
|
||||
Fetch landmark data from the API based on the landmark UUID.
|
||||
|
||||
Args:
|
||||
landmark_uuid (str): The UUID of the landmark.
|
||||
|
||||
Returns:
|
||||
dict: Landmark data fetched from the API.
|
||||
"""
|
||||
logger = logging.getLogger(__name__)
|
||||
response = client.get(f"/landmark/{landmark_uuid}")
|
||||
|
||||
if response.status_code != 200:
|
||||
raise HTTPException(status_code=500,
|
||||
detail=f"Failed to fetch landmark with UUID {landmark_uuid}: {response.status_code}")
|
||||
|
||||
try:
|
||||
json_data = response.json()
|
||||
logger.info(f"API Response: {json_data}")
|
||||
except ValueError as e:
|
||||
logger.error(f"Failed to parse response as JSON: {response.text}")
|
||||
raise HTTPException(status_code=500, detail="Invalid response format from API")
|
||||
|
||||
# Try validating against the Landmark model here to ensure consistency
|
||||
try:
|
||||
landmark = Landmark(**json_data)
|
||||
except ValidationError as ve:
|
||||
logging.error(f"Validation error: {ve}")
|
||||
raise HTTPException(status_code=500, detail="Invalid data format received from API")
|
||||
|
||||
|
||||
if "detail" in json_data:
|
||||
raise HTTPException(status_code=500, detail=json_data["detail"])
|
||||
|
||||
return Landmark(**json_data)
|
||||
|
||||
|
||||
def fetch_landmark_cache(landmark_uuid: str):
|
||||
def fetch_landmark(landmark_uuid: str):
|
||||
"""
|
||||
Fetch landmark data from the cache based on the landmark UUID.
|
||||
|
||||
@@ -75,26 +37,24 @@ def fetch_landmark_cache(landmark_uuid: str):
|
||||
|
||||
# Try to fetch the landmark data from the cache
|
||||
try:
|
||||
landmark = cache_client.get(f"landmark_{landmark_uuid}")
|
||||
landmark = cache_client.get(f'landmark_{landmark_uuid}')
|
||||
if not landmark :
|
||||
logger.warning(f"Cache miss for landmark UUID: {landmark_uuid}")
|
||||
raise HTTPException(status_code=404, detail=f"Landmark with UUID {landmark_uuid} not found in cache.")
|
||||
|
||||
logger.warning(f'Cache miss for landmark UUID: {landmark_uuid}')
|
||||
raise HTTPException(status_code=404, detail=f'Landmark with UUID {landmark_uuid} not found in cache.')
|
||||
|
||||
# Validate that the fetched data is a dictionary
|
||||
if not isinstance(landmark, Landmark):
|
||||
logger.error(f"Invalid cache data format for landmark UUID: {landmark_uuid}. Expected dict, got {type(landmark).__name__}.")
|
||||
logger.error(f'Invalid cache data format for landmark UUID: {landmark_uuid}. Expected dict, got {type(landmark).__name__}.')
|
||||
raise HTTPException(status_code=500, detail="Invalid cache data format.")
|
||||
|
||||
return landmark
|
||||
|
||||
|
||||
except Exception as exc:
|
||||
logger.error(f"Unexpected error occurred while fetching landmark UUID {landmark_uuid}: {exc}")
|
||||
logger.error(f'Unexpected error occurred while fetching landmark UUID {landmark_uuid}: {exc}')
|
||||
raise HTTPException(status_code=500, detail="An unexpected error occurred while fetching the landmark from the cache") from exc
|
||||
|
||||
|
||||
|
||||
|
||||
def load_trip_landmarks(client, first_uuid: str, from_cache=None) -> list[Landmark]:
|
||||
def load_trip_landmarks(client, first_uuid: str) -> list[Landmark]:
|
||||
"""
|
||||
Load all landmarks for a trip using the response from the API.
|
||||
|
||||
@@ -108,10 +68,7 @@ def load_trip_landmarks(client, first_uuid: str, from_cache=None) -> list[Landma
|
||||
next_uuid = first_uuid
|
||||
|
||||
while next_uuid is not None:
|
||||
if from_cache :
|
||||
landmark = fetch_landmark_cache(next_uuid)
|
||||
else :
|
||||
landmark = fetch_landmark(client, next_uuid)
|
||||
landmark = fetch_landmark(next_uuid)
|
||||
|
||||
landmarks.append(landmark)
|
||||
next_uuid = landmark.next_uuid # Prepare for the next iteration
|
||||
@@ -122,14 +79,14 @@ def load_trip_landmarks(client, first_uuid: str, from_cache=None) -> list[Landma
|
||||
def log_trip_details(request, landmarks: list[Landmark], duration: int, target_duration: int) :
|
||||
"""
|
||||
Allows to show the detailed trip in the html test report.
|
||||
|
||||
|
||||
Args:
|
||||
request:
|
||||
landmarks (list): the ordered list of visited landmarks
|
||||
duration (int): the total duration of this trip
|
||||
target_duration(int): the target duration of this trip
|
||||
"""
|
||||
trip_string = [f"{landmark.name} ({landmark.attractiveness} | {landmark.duration}) - {landmark.time_to_reach_next}" for landmark in landmarks]
|
||||
trip_string = [f'{landmark.name} ({landmark.attractiveness} | {landmark.duration}) - {landmark.time_to_reach_next}' for landmark in landmarks]
|
||||
|
||||
# Pass additional info to pytest for reporting
|
||||
request.node.trip_details = trip_string
|
||||
|
0
backend/src/utils/__init__.py
Normal file
0
backend/src/utils/__init__.py
Normal file
302
backend/src/utils/cluster_manager.py
Normal file
302
backend/src/utils/cluster_manager.py
Normal file
@@ -0,0 +1,302 @@
|
||||
"""Find clusters of interest to add more general areas of visit to the tour."""
|
||||
import logging
|
||||
from typing import Literal, Tuple
|
||||
|
||||
import numpy as np
|
||||
from sklearn.cluster import DBSCAN
|
||||
from pydantic import BaseModel
|
||||
|
||||
from ..overpass.overpass import Overpass, get_base_info
|
||||
from ..structs.landmark import Landmark
|
||||
from .get_time_distance import get_distance
|
||||
from .utils import create_bbox
|
||||
|
||||
|
||||
|
||||
# silence the overpass logger
|
||||
logging.getLogger('Overpass').setLevel(level=logging.CRITICAL)
|
||||
|
||||
|
||||
class Cluster(BaseModel):
|
||||
""""
|
||||
A class representing an interesting area for shopping or sightseeing.
|
||||
|
||||
It can represent either a general area or a specifc route with start and end point.
|
||||
The importance represents the number of shops found in this cluster.
|
||||
|
||||
Attributes:
|
||||
type : either a 'street' or 'area' (representing a denser field of shops).
|
||||
importance : size of the cluster (number of points).
|
||||
centroid : center of the cluster.
|
||||
start : if the type is a street it goes from here...
|
||||
end : ...to here
|
||||
"""
|
||||
type: Literal['street', 'area']
|
||||
importance: int
|
||||
centroid: Tuple[float, float]
|
||||
# start: Optional[list] = None # for later use if we want to have streets as well
|
||||
# end: Optional[list] = None
|
||||
|
||||
|
||||
class ClusterManager:
|
||||
"""
|
||||
A manager responsible for clustering points of interest, such as shops or historic sites,
|
||||
to identify areas worth visiting. It uses the DBSCAN algorithm to detect clusters
|
||||
based on a set of points retrieved from OpenStreetMap (OSM).
|
||||
|
||||
Attributes:
|
||||
logger (logging.Logger): Logger for capturing relevant events and errors.
|
||||
valid (bool): Indicates whether clusters were successfully identified.
|
||||
all_points (list): All points retrieved from OSM, representing locations of interest.
|
||||
cluster_points (list): Points identified as part of a cluster.
|
||||
cluster_labels (list): Labels corresponding to the clusters each point belongs to.
|
||||
cluster_type (Literal['sightseeing', 'shopping']): Type of clustering, either for sightseeing
|
||||
landmarks or shopping areas.
|
||||
"""
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# NOTE: all points are in (lat, lon) format
|
||||
valid: bool # Ensure the manager is valid (ie there are some clusters to be found)
|
||||
all_points: list
|
||||
cluster_points: list
|
||||
cluster_labels: list
|
||||
cluster_type: Literal['sightseeing', 'shopping']
|
||||
|
||||
def __init__(self, bbox: tuple, cluster_type: Literal['sightseeing', 'shopping']) -> None:
|
||||
"""
|
||||
Upon intialization, generate the point cloud used for cluster detection.
|
||||
The points represent bag/clothes shops and general boutiques.
|
||||
If the first step is successful, it applies the DBSCAN clustering algorithm with different
|
||||
parameters depending on the size of the city (number of points).
|
||||
It filters out noise points and keeps only the largest clusters.
|
||||
|
||||
A successful initialization updates:
|
||||
- `self.cluster_points`: The points belonging to clusters.
|
||||
- `self.cluster_labels`: The labels for the points in clusters.
|
||||
|
||||
The method also calls `filter_clusters()` to retain only the largest clusters.
|
||||
|
||||
Args:
|
||||
bbox: The bounding box coordinates (around:radius, center_lat, center_lon).
|
||||
"""
|
||||
# Setup the caching in the Overpass class.
|
||||
self.overpass = Overpass()
|
||||
|
||||
self.cluster_type = cluster_type
|
||||
if cluster_type == 'shopping' :
|
||||
osm_types = ['node']
|
||||
sel = '"shop"~"^(bag|boutique|clothes)$"'
|
||||
out = 'ids center'
|
||||
elif cluster_type == 'sightseeing' :
|
||||
osm_types = ['way']
|
||||
sel = '"historic"~"^(monument|building|yes)$"'
|
||||
out = 'ids center'
|
||||
else :
|
||||
raise NotImplementedError("Please choose only an available option for cluster detection")
|
||||
|
||||
# Initialize the points for cluster detection
|
||||
try:
|
||||
result = self.overpass.send_query(
|
||||
bbox = bbox,
|
||||
osm_types = osm_types,
|
||||
selector = sel,
|
||||
out = out
|
||||
)
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error fetching clusters: {e}")
|
||||
|
||||
if result is None :
|
||||
self.logger.debug(f"Found no {cluster_type} clusters, overpass query returned no datapoints.")
|
||||
self.valid = False
|
||||
|
||||
else :
|
||||
points = []
|
||||
for elem in result:
|
||||
osm_type = elem.get('type')
|
||||
|
||||
# Get coordinates and append them to the points list
|
||||
_, coords = get_base_info(elem, osm_type)
|
||||
if coords is not None :
|
||||
points.append(coords)
|
||||
|
||||
if points :
|
||||
self.all_points = np.array(points)
|
||||
|
||||
# Apply DBSCAN to find clusters. Choose different settings for different cities.
|
||||
if self.cluster_type == 'shopping' and len(self.all_points) > 200 :
|
||||
dbscan = DBSCAN(eps=0.00118, min_samples=15, algorithm='kd_tree') # for large cities
|
||||
elif self.cluster_type == 'sightseeing' :
|
||||
dbscan = DBSCAN(eps=0.0025, min_samples=15, algorithm='kd_tree') # for historic neighborhoods
|
||||
else :
|
||||
dbscan = DBSCAN(eps=0.00075, min_samples=10, algorithm='kd_tree') # for small cities
|
||||
|
||||
labels = dbscan.fit_predict(self.all_points)
|
||||
|
||||
# Check that there are is least 1 cluster
|
||||
if len(set(labels)) > 1 :
|
||||
self.logger.info(f"Found {len(set(labels))} {cluster_type} clusters.")
|
||||
# Separate clustered points and noise points
|
||||
self.cluster_points = self.all_points[labels != -1]
|
||||
self.cluster_labels = labels[labels != -1]
|
||||
self.filter_clusters() # ValueError here sometimes. I dont know why. # Filter the clusters to keep only the largest ones.
|
||||
self.valid = True
|
||||
|
||||
else :
|
||||
self.logger.info(f"Found 0 {cluster_type} clusters.")
|
||||
self.valid = False
|
||||
|
||||
else :
|
||||
self.logger.debug(f"Detected 0 {cluster_type} clusters.")
|
||||
self.valid = False
|
||||
|
||||
|
||||
def generate_clusters(self) -> list[Landmark]:
|
||||
"""
|
||||
Generate a list of landmarks based on identified clusters.
|
||||
|
||||
This method iterates over the different clusters, calculates the centroid
|
||||
(as the mean of the points within each cluster), and assigns an importance
|
||||
based on the size of the cluster.
|
||||
|
||||
The generated shopping locations are stored in `self.clusters`
|
||||
as a list of `Cluster` objects, each with:
|
||||
- `type`: Set to 'area'.
|
||||
- `centroid`: The calculated centroid of the cluster.
|
||||
- `importance`: The number of points in the cluster.
|
||||
"""
|
||||
|
||||
if not self.valid :
|
||||
return [] # Return empty list if no clusters were found
|
||||
|
||||
locations = []
|
||||
|
||||
# loop through the different clusters
|
||||
for label in set(self.cluster_labels):
|
||||
|
||||
# Extract points belonging to the current cluster
|
||||
current_cluster = self.cluster_points[self.cluster_labels == label]
|
||||
|
||||
# Calculate the centroid as the mean of the points
|
||||
centroid = np.mean(current_cluster, axis=0)
|
||||
centroid = tuple((round(centroid[0], 7), round(centroid[1], 7)))
|
||||
|
||||
if self.cluster_type == 'shopping' :
|
||||
score = len(current_cluster)*3
|
||||
else :
|
||||
score = len(current_cluster)*15
|
||||
locations.append(Cluster(
|
||||
type='area',
|
||||
centroid=centroid,
|
||||
importance = score
|
||||
))
|
||||
|
||||
# Transform the locations in landmarks and return the list
|
||||
cluster_landmarks = []
|
||||
for cluster in locations :
|
||||
cluster_landmarks.append(self.create_landmark(cluster))
|
||||
|
||||
return cluster_landmarks
|
||||
|
||||
|
||||
def create_landmark(self, cluster: Cluster) -> Landmark:
|
||||
"""
|
||||
Create a Landmark object based on the given shopping location.
|
||||
|
||||
This method queries the Overpass API for nearby neighborhoods and shopping malls
|
||||
within a 1000m radius around the shopping location centroid. It selects the closest
|
||||
result and creates a landmark with the associated details such as name, type, and OSM ID.
|
||||
|
||||
Parameters:
|
||||
shopping_location (Cluster): A Cluster object containing
|
||||
the centroid and importance of the area.
|
||||
|
||||
Returns:
|
||||
Landmark: A Landmark object containing details such as the name, type,
|
||||
location, attractiveness, and OSM details.
|
||||
"""
|
||||
|
||||
# Define the bounding box for a given radius around the coordinates
|
||||
bbox = create_bbox(cluster.centroid, 300)
|
||||
|
||||
# Query neighborhoods and shopping malls
|
||||
selectors = ['"place"~"^(suburb|neighborhood|neighbourhood|quarter|city_block)$"']
|
||||
|
||||
if self.cluster_type == 'shopping' :
|
||||
selectors.append('"shop"="mall"')
|
||||
new_name = 'Shopping Area'
|
||||
t = 30
|
||||
else :
|
||||
new_name = 'Neighborhood'
|
||||
t = 20
|
||||
|
||||
min_dist = float('inf')
|
||||
osm_id = 0
|
||||
osm_type = 'node'
|
||||
osm_types = ['node', 'way', 'relation']
|
||||
|
||||
for sel in selectors :
|
||||
try:
|
||||
result = self.overpass.send_query(bbox = bbox,
|
||||
osm_types = osm_types,
|
||||
selector = sel,
|
||||
out = 'ids center tags'
|
||||
)
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error fetching clusters: {e}")
|
||||
continue
|
||||
|
||||
if result is None :
|
||||
self.logger.error(f"Error fetching clusters: {e}")
|
||||
continue
|
||||
|
||||
for elem in result:
|
||||
osm_type = elem.get('type')
|
||||
|
||||
id, coords, name = get_base_info(elem, osm_type, with_name=True)
|
||||
|
||||
if name is None or coords is None :
|
||||
continue
|
||||
|
||||
d = get_distance(cluster.centroid, coords)
|
||||
if d < min_dist :
|
||||
min_dist = d
|
||||
new_name = name # add name
|
||||
osm_type = osm_type # add type: 'way' or 'relation'
|
||||
osm_id = id # add OSM id
|
||||
|
||||
return Landmark(
|
||||
name=new_name,
|
||||
type=self.cluster_type,
|
||||
location=cluster.centroid, # later: use the fact the we can also recognize streets.
|
||||
attractiveness=cluster.importance,
|
||||
n_tags=0,
|
||||
osm_id=osm_id,
|
||||
osm_type=osm_type,
|
||||
duration=t
|
||||
)
|
||||
|
||||
|
||||
def filter_clusters(self):
|
||||
"""
|
||||
Filter clusters to retain only the 5 largest clusters by point count.
|
||||
|
||||
This method calculates the size of each cluster and filters out all but the
|
||||
5 largest clusters. It then updates the cluster points and labels to reflect
|
||||
only those from the top 5 clusters.
|
||||
"""
|
||||
label_counts = np.bincount(self.cluster_labels)
|
||||
|
||||
# Step 3: Get the indices (labels) of the 5 largest clusters
|
||||
top_5_labels = np.argsort(label_counts)[-5:] # Get the largest 5 clusters
|
||||
|
||||
# Step 4: Filter points to keep only the points in the top 5 clusters
|
||||
filtered_cluster_points = []
|
||||
filtered_cluster_labels = []
|
||||
|
||||
for label in top_5_labels:
|
||||
filtered_cluster_points.append(self.cluster_points[self.cluster_labels == label])
|
||||
filtered_cluster_labels.append(np.full((label_counts[label],), label)) # Replicate the label
|
||||
|
||||
# update the cluster points and labels with the filtered data
|
||||
self.cluster_points = np.vstack(filtered_cluster_points) # ValueError here
|
||||
self.cluster_labels = np.concatenate(filtered_cluster_labels)
|
@@ -1,283 +0,0 @@
|
||||
import logging
|
||||
from typing import Literal
|
||||
|
||||
import numpy as np
|
||||
from sklearn.cluster import DBSCAN
|
||||
from pydantic import BaseModel
|
||||
from OSMPythonTools.overpass import Overpass, overpassQueryBuilder
|
||||
from OSMPythonTools.cachingStrategy import CachingStrategy, JSON
|
||||
|
||||
from ..structs.landmark import Landmark
|
||||
from ..utils.get_time_separation import get_distance
|
||||
from ..constants import AMENITY_SELECTORS_PATH, LANDMARK_PARAMETERS_PATH, OPTIMIZER_PARAMETERS_PATH, OSM_CACHE_DIR
|
||||
|
||||
|
||||
class ShoppingLocation(BaseModel):
|
||||
""""
|
||||
A classe representing an interesting area for shopping.
|
||||
|
||||
It can represent either a general area or a specifc route with start and end point.
|
||||
The importance represents the number of shops found in this cluster.
|
||||
|
||||
Attributes:
|
||||
type : either a 'street' or 'area' (representing a denser field of shops).
|
||||
importance : size of the cluster (number of points).
|
||||
centroid : center of the cluster.
|
||||
start : if the type is a street it goes from here...
|
||||
end : ...to here
|
||||
"""
|
||||
type: Literal['street', 'area']
|
||||
importance: int
|
||||
centroid: tuple
|
||||
# start: Optional[list] = None # for later use if we want to have streets as well
|
||||
# end: Optional[list] = None
|
||||
|
||||
|
||||
class ShoppingManager:
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# NOTE: all points are in (lat, lon) format
|
||||
valid: bool # Ensure the manager is valid (ie there are some clusters to be found)
|
||||
all_points: list
|
||||
cluster_points: list
|
||||
cluster_labels: list
|
||||
shopping_locations: list[ShoppingLocation]
|
||||
|
||||
def __init__(self, bbox: tuple) -> None:
|
||||
"""
|
||||
Upon intialization, generate the point cloud used for cluster detection.
|
||||
The points represent bag/clothes shops and general boutiques.
|
||||
|
||||
Args:
|
||||
bbox: The bounding box coordinates (around:radius, center_lat, center_lon).
|
||||
"""
|
||||
|
||||
# Initialize overpass and cache
|
||||
self.overpass = Overpass()
|
||||
CachingStrategy.use(JSON, cacheDir=OSM_CACHE_DIR)
|
||||
|
||||
# Initialize the points for cluster detection
|
||||
query = overpassQueryBuilder(
|
||||
bbox = bbox,
|
||||
elementType = ['node'],
|
||||
selector = ['"shop"~"^(bag|boutique|clothes)$"'],
|
||||
includeCenter = True,
|
||||
out = 'skel'
|
||||
)
|
||||
|
||||
try:
|
||||
result = self.overpass.query(query)
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error fetching landmarks: {e}")
|
||||
|
||||
if len(result.elements()) == 0 :
|
||||
self.valid = False
|
||||
|
||||
else :
|
||||
points = []
|
||||
for elem in result.elements() :
|
||||
points.append(tuple((elem.lat(), elem.lon())))
|
||||
|
||||
self.all_points = np.array(points)
|
||||
self.valid = True
|
||||
|
||||
|
||||
def generate_shopping_landmarks(self) -> list[Landmark]:
|
||||
"""
|
||||
Generate shopping landmarks based on clustered locations.
|
||||
|
||||
This method first generates clusters of locations and then extracts shopping-related
|
||||
locations from these clusters. It transforms each shopping location into a `Landmark` object.
|
||||
|
||||
Returns:
|
||||
list[Landmark]: A list of `Landmark` objects representing shopping locations.
|
||||
Returns an empty list if no clusters are found.
|
||||
"""
|
||||
|
||||
self.generate_clusters()
|
||||
|
||||
if len(set(self.cluster_labels)) == 0 :
|
||||
return [] # Return empty list if no clusters were found
|
||||
|
||||
# Then generate the shopping locations
|
||||
self.generate_shopping_locations()
|
||||
|
||||
# Transform the locations in landmarks and return the list
|
||||
shopping_landmarks = []
|
||||
for location in self.shopping_locations :
|
||||
shopping_landmarks.append(self.create_landmark(location))
|
||||
|
||||
return shopping_landmarks
|
||||
|
||||
|
||||
|
||||
def generate_clusters(self) :
|
||||
"""
|
||||
Generate clusters of points using DBSCAN.
|
||||
|
||||
This method applies the DBSCAN clustering algorithm with different
|
||||
parameters depending on the size of the city (number of points).
|
||||
It filters out noise points and keeps only the largest clusters.
|
||||
|
||||
The method updates:
|
||||
- `self.cluster_points`: The points belonging to clusters.
|
||||
- `self.cluster_labels`: The labels for the points in clusters.
|
||||
|
||||
The method also calls `filter_clusters()` to retain only the largest clusters.
|
||||
"""
|
||||
|
||||
# Apply DBSCAN to find clusters. Choose different settings for different cities.
|
||||
if len(self.all_points) > 200 :
|
||||
dbscan = DBSCAN(eps=0.00118, min_samples=15, algorithm='kd_tree') # for large cities
|
||||
else :
|
||||
dbscan = DBSCAN(eps=0.00075, min_samples=10, algorithm='kd_tree') # for small cities
|
||||
|
||||
labels = dbscan.fit_predict(self.all_points)
|
||||
|
||||
# Separate clustered points and noise points
|
||||
self.cluster_points = self.all_points[labels != -1]
|
||||
self.cluster_labels = labels[labels != -1]
|
||||
|
||||
# filter the clusters to keep only the largest ones
|
||||
self.filter_clusters()
|
||||
|
||||
|
||||
def generate_shopping_locations(self) :
|
||||
"""
|
||||
Generate shopping locations based on clustered points.
|
||||
|
||||
This method iterates over the different clusters, calculates the centroid
|
||||
(as the mean of the points within each cluster), and assigns an importance
|
||||
based on the size of the cluster.
|
||||
|
||||
The generated shopping locations are stored in `self.shopping_locations`
|
||||
as a list of `ShoppingLocation` objects, each with:
|
||||
- `type`: Set to 'area'.
|
||||
- `centroid`: The calculated centroid of the cluster.
|
||||
- `importance`: The number of points in the cluster.
|
||||
"""
|
||||
|
||||
locations = []
|
||||
|
||||
# loop through the different clusters
|
||||
for label in set(self.cluster_labels):
|
||||
|
||||
# Extract points belonging to the current cluster
|
||||
current_cluster = self.cluster_points[self.cluster_labels == label]
|
||||
|
||||
# Calculate the centroid as the mean of the points
|
||||
centroid = np.mean(current_cluster, axis=0)
|
||||
|
||||
locations.append(ShoppingLocation(
|
||||
type='area',
|
||||
centroid=centroid,
|
||||
importance = len(current_cluster)
|
||||
))
|
||||
|
||||
self.shopping_locations = locations
|
||||
|
||||
|
||||
def create_landmark(self, shopping_location: ShoppingLocation) -> Landmark:
|
||||
"""
|
||||
Create a Landmark object based on the given shopping location.
|
||||
|
||||
This method queries the Overpass API for nearby neighborhoods and shopping malls
|
||||
within a 1000m radius around the shopping location centroid. It selects the closest
|
||||
result and creates a landmark with the associated details such as name, type, and OSM ID.
|
||||
|
||||
Parameters:
|
||||
shopping_location (ShoppingLocation): A ShoppingLocation object containing
|
||||
the centroid and importance of the area.
|
||||
|
||||
Returns:
|
||||
Landmark: A Landmark object containing details such as the name, type,
|
||||
location, attractiveness, and OSM details.
|
||||
"""
|
||||
|
||||
# Define the bounding box for a given radius around the coordinates
|
||||
lat, lon = shopping_location.centroid
|
||||
bbox = ("around:1000", str(lat), str(lon))
|
||||
|
||||
# Query neighborhoods and shopping malls
|
||||
selectors = ['"place"~"^(suburb|neighborhood|neighbourhood|quarter|city_block)$"', '"shop"="mall"']
|
||||
|
||||
min_dist = float('inf')
|
||||
new_name = 'Shopping Area'
|
||||
new_name_en = None
|
||||
osm_id = 0
|
||||
osm_type = 'node'
|
||||
|
||||
for sel in selectors :
|
||||
query = overpassQueryBuilder(
|
||||
bbox = bbox,
|
||||
elementType = ['node', 'way', 'relation'],
|
||||
selector = sel,
|
||||
includeCenter = True,
|
||||
out = 'center'
|
||||
)
|
||||
|
||||
try:
|
||||
result = self.overpass.query(query)
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error fetching landmarks: {e}")
|
||||
continue
|
||||
|
||||
for elem in result.elements():
|
||||
location = (elem.centerLat(), elem.centerLon())
|
||||
|
||||
if location[0] is None :
|
||||
location = (elem.lat(), elem.lon())
|
||||
if location[0] is None :
|
||||
continue
|
||||
|
||||
d = get_distance(shopping_location.centroid, location)
|
||||
if d < min_dist :
|
||||
min_dist = d
|
||||
new_name = elem.tag('name')
|
||||
osm_type = elem.type() # Add type: 'way' or 'relation'
|
||||
osm_id = elem.id() # Add OSM id
|
||||
|
||||
# Add english name if it exists
|
||||
try :
|
||||
new_name_en = elem.tag('name:en')
|
||||
except:
|
||||
pass
|
||||
|
||||
return Landmark(
|
||||
name=new_name,
|
||||
type='shopping',
|
||||
location=shopping_location.centroid, # TODO: use the fact the we can also recognize streets.
|
||||
attractiveness=shopping_location.importance,
|
||||
n_tags=0,
|
||||
osm_id=osm_id,
|
||||
osm_type=osm_type,
|
||||
name_en=new_name_en
|
||||
)
|
||||
|
||||
|
||||
def filter_clusters(self):
|
||||
"""
|
||||
Filter clusters to retain only the 5 largest clusters by point count.
|
||||
|
||||
This method calculates the size of each cluster and filters out all but the
|
||||
5 largest clusters. It then updates the cluster points and labels to reflect
|
||||
only those from the top 5 clusters.
|
||||
"""
|
||||
label_counts = np.bincount(self.cluster_labels)
|
||||
|
||||
# Step 3: Get the indices (labels) of the 5 largest clusters
|
||||
top_5_labels = np.argsort(label_counts)[-5:] # Get the largest 5 clusters
|
||||
|
||||
# Step 4: Filter points to keep only the points in the top 5 clusters
|
||||
filtered_cluster_points = []
|
||||
filtered_cluster_labels = []
|
||||
|
||||
for label in top_5_labels:
|
||||
filtered_cluster_points.append(self.cluster_points[self.cluster_labels == label])
|
||||
filtered_cluster_labels.append(np.full((label_counts[label],), label)) # Replicate the label
|
||||
|
||||
# update the cluster points and labels with the filtered data
|
||||
self.cluster_points = np.vstack(filtered_cluster_points)
|
||||
self.cluster_labels = np.concatenate(filtered_cluster_labels)
|
||||
|
@@ -1,8 +1,10 @@
|
||||
import yaml
|
||||
"""Contains various helper functions to help with distance or score computations."""
|
||||
from math import sin, cos, sqrt, atan2, radians
|
||||
import yaml
|
||||
|
||||
from ..constants import OPTIMIZER_PARAMETERS_PATH
|
||||
|
||||
|
||||
with OPTIMIZER_PARAMETERS_PATH.open('r') as f:
|
||||
parameters = yaml.safe_load(f)
|
||||
DETOUR_FACTOR = parameters['detour_factor']
|
||||
@@ -10,6 +12,7 @@ with OPTIMIZER_PARAMETERS_PATH.open('r') as f:
|
||||
|
||||
EARTH_RADIUS_KM = 6373
|
||||
|
||||
|
||||
def get_time(p1: tuple[float, float], p2: tuple[float, float]) -> int:
|
||||
"""
|
||||
Calculate the time in minutes to travel from one location to another.
|
||||
@@ -21,25 +24,23 @@ def get_time(p1: tuple[float, float], p2: tuple[float, float]) -> int:
|
||||
Returns:
|
||||
int: Time to travel from p1 to p2 in minutes.
|
||||
"""
|
||||
# if p1 == p2:
|
||||
# return 0
|
||||
# else:
|
||||
# Compute the distance in km along the surface of the Earth
|
||||
# (assume spherical Earth)
|
||||
# this is the haversine formula, stolen from stackoverflow
|
||||
# in order to not use any external libraries
|
||||
lat1, lon1 = radians(p1[0]), radians(p1[1])
|
||||
lat2, lon2 = radians(p2[0]), radians(p2[1])
|
||||
|
||||
dlon = lon2 - lon1
|
||||
dlat = lat2 - lat1
|
||||
|
||||
if p1 == p2:
|
||||
return 0
|
||||
else:
|
||||
# Compute the distance in km along the surface of the Earth
|
||||
# (assume spherical Earth)
|
||||
# this is the haversine formula, stolen from stackoverflow
|
||||
# in order to not use any external libraries
|
||||
lat1, lon1 = radians(p1[0]), radians(p1[1])
|
||||
lat2, lon2 = radians(p2[0]), radians(p2[1])
|
||||
a = sin(dlat / 2)**2 + cos(lat1) * cos(lat2) * sin(dlon / 2)**2
|
||||
c = 2 * atan2(sqrt(a), sqrt(1 - a))
|
||||
|
||||
dlon = lon2 - lon1
|
||||
dlat = lat2 - lat1
|
||||
|
||||
a = sin(dlat / 2)**2 + cos(lat1) * cos(lat2) * sin(dlon / 2)**2
|
||||
c = 2 * atan2(sqrt(a), sqrt(1 - a))
|
||||
|
||||
distance = EARTH_RADIUS_KM * c
|
||||
distance = EARTH_RADIUS_KM * c
|
||||
|
||||
# Consider the detour factor for average an average city
|
||||
walk_distance = distance * DETOUR_FACTOR
|
||||
@@ -47,7 +48,7 @@ def get_time(p1: tuple[float, float], p2: tuple[float, float]) -> int:
|
||||
# Time to walk this distance (in minutes)
|
||||
walk_time = walk_distance / AVERAGE_WALKING_SPEED * 60
|
||||
|
||||
return round(walk_time)
|
||||
return min(round(walk_time), 32765)
|
||||
|
||||
|
||||
def get_distance(p1: tuple[float, float], p2: tuple[float, float]) -> int:
|
||||
@@ -61,22 +62,19 @@ def get_distance(p1: tuple[float, float], p2: tuple[float, float]) -> int:
|
||||
Returns:
|
||||
int: Time to travel from p1 to p2 in minutes.
|
||||
"""
|
||||
|
||||
|
||||
if p1 == p2:
|
||||
return 0
|
||||
else:
|
||||
# Compute the distance in km along the surface of the Earth
|
||||
# (assume spherical Earth)
|
||||
# this is the haversine formula, stolen from stackoverflow
|
||||
# in order to not use any external libraries
|
||||
lat1, lon1 = radians(p1[0]), radians(p1[1])
|
||||
lat2, lon2 = radians(p2[0]), radians(p2[1])
|
||||
# Compute the distance in km along the surface of the Earth
|
||||
# (assume spherical Earth)
|
||||
# this is the haversine formula, stolen from stackoverflow
|
||||
# in order to not use any external libraries
|
||||
lat1, lon1 = radians(p1[0]), radians(p1[1])
|
||||
lat2, lon2 = radians(p2[0]), radians(p2[1])
|
||||
|
||||
dlon = lon2 - lon1
|
||||
dlat = lat2 - lat1
|
||||
dlon = lon2 - lon1
|
||||
dlat = lat2 - lat1
|
||||
|
||||
a = sin(dlat / 2)**2 + cos(lat1) * cos(lat2) * sin(dlon / 2)**2
|
||||
c = 2 * atan2(sqrt(a), sqrt(1 - a))
|
||||
a = sin(dlat / 2)**2 + cos(lat1) * cos(lat2) * sin(dlon / 2)**2
|
||||
c = 2 * atan2(sqrt(a), sqrt(1 - a))
|
||||
|
||||
return EARTH_RADIUS_KM * c
|
||||
return EARTH_RADIUS_KM * c
|
@@ -1,27 +1,29 @@
|
||||
import math, yaml, logging
|
||||
from OSMPythonTools.overpass import Overpass, overpassQueryBuilder
|
||||
from OSMPythonTools.cachingStrategy import CachingStrategy, JSON
|
||||
"""Module used to import data from OSM and arrange them in categories."""
|
||||
import logging
|
||||
import yaml
|
||||
|
||||
from ..structs.preferences import Preferences
|
||||
from ..structs.landmark import Landmark
|
||||
from .take_most_important import take_most_important
|
||||
from .cluster_processing import ShoppingManager
|
||||
from .cluster_manager import ClusterManager
|
||||
from ..overpass.overpass import Overpass, get_base_info
|
||||
from .utils import create_bbox
|
||||
|
||||
from ..constants import AMENITY_SELECTORS_PATH, LANDMARK_PARAMETERS_PATH, OPTIMIZER_PARAMETERS_PATH, OSM_CACHE_DIR
|
||||
|
||||
# silence the overpass logger
|
||||
logging.getLogger('OSMPythonTools').setLevel(level=logging.CRITICAL)
|
||||
from ..constants import AMENITY_SELECTORS_PATH, LANDMARK_PARAMETERS_PATH, OPTIMIZER_PARAMETERS_PATH
|
||||
|
||||
|
||||
class LandmarkManager:
|
||||
|
||||
"""
|
||||
Use this to manage landmarks.
|
||||
Uses the overpass api to fetch landmarks and classify them.
|
||||
"""
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
radius_close_to: int # radius in meters
|
||||
church_coeff: float # coeff to adjsut score of churches
|
||||
nature_coeff: float # coeff to adjust score of parks
|
||||
overall_coeff: float # coeff to adjust weight of tags
|
||||
N_important: int # number of important landmarks to consider
|
||||
n_important: int # number of important landmarks to consider
|
||||
|
||||
|
||||
def __init__(self) -> None:
|
||||
@@ -31,26 +33,26 @@ class LandmarkManager:
|
||||
|
||||
with LANDMARK_PARAMETERS_PATH.open('r') as f:
|
||||
parameters = yaml.safe_load(f)
|
||||
self.max_bbox_side = parameters['city_bbox_side']
|
||||
self.radius_close_to = parameters['radius_close_to']
|
||||
self.max_bbox_side = parameters['max_bbox_side']
|
||||
self.church_coeff = parameters['church_coeff']
|
||||
self.nature_coeff = parameters['nature_coeff']
|
||||
self.overall_coeff = parameters['overall_coeff']
|
||||
self.tag_exponent = parameters['tag_exponent']
|
||||
self.image_bonus = parameters['image_bonus']
|
||||
self.name_bonus = parameters['name_bonus']
|
||||
self.wikipedia_bonus = parameters['wikipedia_bonus']
|
||||
self.viewpoint_bonus = parameters['viewpoint_bonus']
|
||||
self.pay_bonus = parameters['pay_bonus']
|
||||
self.N_important = parameters['N_important']
|
||||
self.n_important = parameters['N_important']
|
||||
|
||||
with OPTIMIZER_PARAMETERS_PATH.open('r') as f:
|
||||
parameters = yaml.safe_load(f)
|
||||
self.walking_speed = parameters['average_walking_speed']
|
||||
self.detour_factor = parameters['detour_factor']
|
||||
|
||||
# Setup the caching in the Overpass class.
|
||||
self.overpass = Overpass()
|
||||
CachingStrategy.use(JSON, cacheDir=OSM_CACHE_DIR)
|
||||
|
||||
self.logger.info('LandmakManager successfully initialized.')
|
||||
|
||||
|
||||
def generate_landmarks_list(self, center_coordinates: tuple[float, float], preferences: Preferences) -> tuple[list[Landmark], list[Landmark]]:
|
||||
@@ -70,117 +72,89 @@ class LandmarkManager:
|
||||
- A list of all existing landmarks.
|
||||
- A list of the most important landmarks based on the user's preferences.
|
||||
"""
|
||||
|
||||
max_walk_dist = (preferences.max_time_minute/2)/60*self.walking_speed*1000/self.detour_factor
|
||||
reachable_bbox_side = min(max_walk_dist, self.max_bbox_side)
|
||||
self.logger.debug('Starting to fetch landmarks...')
|
||||
max_walk_dist = int((preferences.max_time_minute/2)/60*self.walking_speed*1000/self.detour_factor)
|
||||
radius = min(max_walk_dist, int(self.max_bbox_side/2))
|
||||
|
||||
# use set to avoid duplicates, this requires some __methods__ to be set in Landmark
|
||||
all_landmarks = set()
|
||||
|
||||
# Create a bbox using the around technique
|
||||
bbox = tuple((f"around:{reachable_bbox_side/2}", str(center_coordinates[0]), str(center_coordinates[1])))
|
||||
|
||||
# Create a bbox using the around technique, tuple of strings
|
||||
bbox = create_bbox(center_coordinates, radius)
|
||||
|
||||
# list for sightseeing
|
||||
if preferences.sightseeing.score != 0:
|
||||
score_function = lambda score: score * 10 * preferences.sightseeing.score / 5
|
||||
current_landmarks = self.fetch_landmarks(bbox, self.amenity_selectors['sightseeing'], preferences.sightseeing.type, score_function)
|
||||
self.logger.debug('Fetching sightseeing landmarks...')
|
||||
current_landmarks = self.fetch_landmarks(bbox, self.amenity_selectors['sightseeing'], preferences.sightseeing.type, preferences.sightseeing.score)
|
||||
all_landmarks.update(current_landmarks)
|
||||
self.logger.info(f'Found {len(current_landmarks)} sightseeing landmarks')
|
||||
|
||||
# special pipeline for historic neighborhoods
|
||||
neighborhood_manager = ClusterManager(bbox, 'sightseeing')
|
||||
historic_clusters = neighborhood_manager.generate_clusters()
|
||||
all_landmarks.update(historic_clusters)
|
||||
|
||||
# list for nature
|
||||
if preferences.nature.score != 0:
|
||||
score_function = lambda score: score * 10 * self.nature_coeff * preferences.nature.score / 5
|
||||
current_landmarks = self.fetch_landmarks(bbox, self.amenity_selectors['nature'], preferences.nature.type, score_function)
|
||||
self.logger.debug('Fetching nature landmarks...')
|
||||
current_landmarks = self.fetch_landmarks(bbox, self.amenity_selectors['nature'], preferences.nature.type, preferences.nature.score)
|
||||
all_landmarks.update(current_landmarks)
|
||||
self.logger.info(f'Found {len(current_landmarks)} nature landmarks')
|
||||
|
||||
|
||||
# list for shopping
|
||||
if preferences.shopping.score != 0:
|
||||
score_function = lambda score: score * 10 * preferences.shopping.score / 5
|
||||
current_landmarks = self.fetch_landmarks(bbox, self.amenity_selectors['shopping'], preferences.shopping.type, score_function)
|
||||
|
||||
self.logger.debug('Fetching shopping landmarks...')
|
||||
current_landmarks = self.fetch_landmarks(bbox, self.amenity_selectors['shopping'], preferences.shopping.type, preferences.shopping.score)
|
||||
self.logger.info(f'Found {len(current_landmarks)} shopping landmarks')
|
||||
|
||||
# set time for all shopping activites :
|
||||
for landmark in current_landmarks : landmark.duration = 30
|
||||
for landmark in current_landmarks :
|
||||
landmark.duration = 30
|
||||
all_landmarks.update(current_landmarks)
|
||||
|
||||
# special pipeline for shopping malls
|
||||
shopping_manager = ShoppingManager(bbox)
|
||||
if shopping_manager.valid :
|
||||
shopping_clusters = shopping_manager.generate_shopping_landmarks()
|
||||
for landmark in shopping_clusters : landmark.duration = 45
|
||||
all_landmarks.update(shopping_clusters)
|
||||
|
||||
shopping_manager = ClusterManager(bbox, 'shopping')
|
||||
shopping_clusters = shopping_manager.generate_clusters()
|
||||
all_landmarks.update(shopping_clusters)
|
||||
|
||||
|
||||
landmarks_constrained = take_most_important(all_landmarks, self.N_important)
|
||||
self.logger.info(f'Generated {len(all_landmarks)} landmarks around {center_coordinates}, and constrained to {len(landmarks_constrained)} most important ones.')
|
||||
landmarks_constrained = take_most_important(all_landmarks, self.n_important)
|
||||
# self.logger.info(f'All landmarks generated : {len(all_landmarks)} landmarks around {center_coordinates}, and constrained to {len(landmarks_constrained)} most important ones.')
|
||||
|
||||
return all_landmarks, landmarks_constrained
|
||||
|
||||
|
||||
|
||||
def count_elements_close_to(self, coordinates: tuple[float, float]) -> int:
|
||||
def set_landmark_score(self, landmark: Landmark, landmarktype: str, preference_level: int) :
|
||||
"""
|
||||
Count the number of OpenStreetMap elements (nodes, ways, relations) within a specified radius of the given location.
|
||||
Calculate and set the attractiveness score for a given landmark.
|
||||
|
||||
This function constructs a bounding box around the specified coordinates based on the radius. It then queries
|
||||
OpenStreetMap data to count the number of elements within that bounding box.
|
||||
This method evaluates the landmark's attractiveness based on its properties
|
||||
(number of tags, presence of Wikipedia URL, image, website, and whether it's
|
||||
a place of worship) and adjusts the score using the user's preference level.
|
||||
|
||||
Args:
|
||||
coordinates (tuple[float, float]): The latitude and longitude of the location to search around.
|
||||
|
||||
Returns:
|
||||
int: The number of elements (nodes, ways, relations) within the specified radius. Returns 0 if no elements
|
||||
are found or if an error occurs during the query.
|
||||
landmark (Landmark): The landmark object to score.
|
||||
landmarktype (str): The type of the landmark (currently unused).
|
||||
preference_level (int): The user's preference level for this landmark type.
|
||||
"""
|
||||
|
||||
lat = coordinates[0]
|
||||
lon = coordinates[1]
|
||||
score = landmark.n_tags**self.tag_exponent
|
||||
if landmark.wiki_url :
|
||||
score *= self.wikipedia_bonus
|
||||
if landmark.image_url :
|
||||
score *= self.image_bonus
|
||||
if landmark.website_url :
|
||||
score *= self.wikipedia_bonus
|
||||
if landmark.is_place_of_worship :
|
||||
score *= self.church_coeff
|
||||
if landmark.is_viewpoint :
|
||||
score *= self.viewpoint_bonus
|
||||
if landmarktype == 'nature' :
|
||||
score *= self.nature_coeff
|
||||
|
||||
radius = self.radius_close_to
|
||||
|
||||
alpha = (180 * radius) / (6371000 * math.pi)
|
||||
bbox = {'latLower':lat-alpha,'lonLower':lon-alpha,'latHigher':lat+alpha,'lonHigher': lon+alpha}
|
||||
|
||||
# Build the query to find elements within the radius
|
||||
radius_query = overpassQueryBuilder(
|
||||
bbox=[bbox['latLower'],
|
||||
bbox['lonLower'],
|
||||
bbox['latHigher'],
|
||||
bbox['lonHigher']],
|
||||
elementType=['node', 'way', 'relation']
|
||||
)
|
||||
|
||||
try:
|
||||
radius_result = self.overpass.query(radius_query)
|
||||
N_elem = radius_result.countWays() + radius_result.countRelations()
|
||||
self.logger.debug(f"There are {N_elem} ways/relations within 50m")
|
||||
if N_elem is None:
|
||||
return 0
|
||||
return N_elem
|
||||
except:
|
||||
return 0
|
||||
landmark.attractiveness = int(score * preference_level * 2)
|
||||
|
||||
|
||||
# def create_bbox(self, coordinates: tuple[float, float], reachable_bbox_side: int) -> tuple[float, float, float, float]:
|
||||
# """
|
||||
# Create a bounding box around the given coordinates.
|
||||
|
||||
# Args:
|
||||
# coordinates (tuple[float, float]): The latitude and longitude of the center of the bounding box.
|
||||
# reachable_bbox_side (int): The side length of the bounding box in meters.
|
||||
|
||||
# Returns:
|
||||
# tuple[float, float, float, float]: The minimum latitude, minimum longitude, maximum latitude, and maximum longitude
|
||||
# defining the bounding box.
|
||||
# """
|
||||
|
||||
# # Half the side length in m (since it's a square bbox)
|
||||
# half_side_length_m = reachable_bbox_side / 2
|
||||
|
||||
# return tuple((f"around:{half_side_length_m}", str(coordinates[0]), str(coordinates[1])))
|
||||
|
||||
|
||||
|
||||
def fetch_landmarks(self, bbox: tuple, amenity_selector: dict, landmarktype: str, score_function: callable) -> list[Landmark]:
|
||||
def fetch_landmarks(self, bbox: tuple, amenity_selector: dict, landmarktype: str, preference_level: int) -> list[Landmark]:
|
||||
"""
|
||||
Fetches landmarks of a specified type from OpenStreetMap (OSM) within a bounding box centered on given coordinates.
|
||||
|
||||
@@ -188,7 +162,6 @@ class LandmarkManager:
|
||||
bbox (tuple[float, float, float, float]): The bounding box coordinates (around:radius, center_lat, center_lon).
|
||||
amenity_selector (dict): The Overpass API query selector for the desired landmark type.
|
||||
landmarktype (str): The type of the landmark (e.g., 'sightseeing', 'nature', 'shopping').
|
||||
score_function (callable): The function to compute the score of the landmark based on its attributes.
|
||||
|
||||
Returns:
|
||||
list[Landmark]: A list of Landmark objects that were fetched and filtered based on the provided criteria.
|
||||
@@ -197,173 +170,131 @@ class LandmarkManager:
|
||||
- Landmarks are fetched using Overpass API queries.
|
||||
- Selectors are translated from the dictionary to the Overpass query format. (e.g., 'amenity'='place_of_worship')
|
||||
- Landmarks are filtered based on various conditions including tags and type.
|
||||
- Scores are assigned to landmarks based on their attributes and surrounding elements.
|
||||
"""
|
||||
return_list = []
|
||||
|
||||
if landmarktype == 'nature' : query_conditions = []
|
||||
if landmarktype == 'nature' : query_conditions = None
|
||||
else : query_conditions = ['count_tags()>5']
|
||||
|
||||
# caution, when applying a list of selectors, overpass will search for elements that match ALL selectors simultaneously
|
||||
# we need to split the selectors into separate queries and merge the results
|
||||
for sel in dict_to_selector_list(amenity_selector):
|
||||
self.logger.debug(f"Current selector: {sel}")
|
||||
# self.logger.debug(f"Current selector: {sel}")
|
||||
|
||||
# query_conditions = ['count_tags()>5']
|
||||
# if landmarktype == 'shopping' : # use this later for shopping clusters
|
||||
# element_types = ['node']
|
||||
element_types = ['way', 'relation']
|
||||
osm_types = ['way', 'relation']
|
||||
|
||||
if 'viewpoint' in sel :
|
||||
query_conditions = []
|
||||
element_types.append('node')
|
||||
|
||||
query = overpassQueryBuilder(
|
||||
bbox = bbox,
|
||||
elementType = element_types,
|
||||
# selector can in principle be a list already,
|
||||
# but it generates the intersection of the queries
|
||||
# we want the union
|
||||
selector = sel,
|
||||
conditions = query_conditions, # except for nature....
|
||||
includeCenter = True,
|
||||
out = 'center'
|
||||
)
|
||||
self.logger.debug(f"Query: {query}")
|
||||
query_conditions = None
|
||||
osm_types.append('node')
|
||||
|
||||
# Send the overpass query
|
||||
try:
|
||||
result = self.overpass.query(query)
|
||||
result = self.overpass.send_query(
|
||||
bbox = bbox,
|
||||
osm_types = osm_types,
|
||||
selector = sel,
|
||||
conditions = query_conditions, # except for nature....
|
||||
out = 'ids center tags'
|
||||
)
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error fetching landmarks: {e}")
|
||||
continue
|
||||
|
||||
for elem in result.elements():
|
||||
return_list += self._to_landmarks(result, landmarktype, preference_level)
|
||||
|
||||
name = elem.tag('name')
|
||||
location = (elem.centerLat(), elem.centerLon())
|
||||
osm_type = elem.type() # Add type: 'way' or 'relation'
|
||||
osm_id = elem.id() # Add OSM id
|
||||
|
||||
# TODO: exclude these from the get go
|
||||
# handle unprecise and no-name locations
|
||||
if name is None or location[0] is None:
|
||||
if osm_type == 'node' and 'viewpoint' in elem.tags().values():
|
||||
name = 'Viewpoint'
|
||||
name_en = 'Viewpoint'
|
||||
location = (elem.lat(), elem.lon())
|
||||
else :
|
||||
continue
|
||||
|
||||
# skip if part of another building
|
||||
if 'building:part' in elem.tags().keys() and elem.tag('building:part') == 'yes':
|
||||
continue
|
||||
|
||||
elem_type = landmarktype # Add the landmark type as 'sightseeing,
|
||||
n_tags = len(elem.tags().keys()) # Add number of tags
|
||||
score = n_tags**self.tag_exponent # Add score
|
||||
website_url = None
|
||||
image_url = None
|
||||
name_en = None
|
||||
|
||||
# Adjust scoring, browse through tag keys
|
||||
skip = False
|
||||
for tag_key in elem.tags().keys():
|
||||
if "pay" in tag_key:
|
||||
# payment options are misleading and should not count for the scoring.
|
||||
score += self.pay_bonus
|
||||
|
||||
if "disused" in tag_key:
|
||||
# skip disused amenities
|
||||
skip = True
|
||||
break
|
||||
|
||||
if "boundary" in tag_key:
|
||||
# skip "areas" like administrative boundaries and stuff
|
||||
skip = True
|
||||
break
|
||||
|
||||
if "historic" in tag_key and elem.tag('historic') in ['manor', 'optical_telegraph', 'pound', 'shieling', 'wayside_cross']:
|
||||
# skip useless amenities
|
||||
skip = True
|
||||
break
|
||||
|
||||
if "name" in tag_key :
|
||||
score += self.name_bonus
|
||||
|
||||
if "wiki" in tag_key:
|
||||
# wikipedia entries count more
|
||||
score += self.wikipedia_bonus
|
||||
|
||||
if "image" in tag_key:
|
||||
# images must count more
|
||||
score += self.image_bonus
|
||||
|
||||
if elem_type != "nature":
|
||||
if "leisure" in tag_key and elem.tag('leisure') == "park":
|
||||
elem_type = "nature"
|
||||
|
||||
if landmarktype != "shopping":
|
||||
if "shop" in tag_key:
|
||||
skip = True
|
||||
break
|
||||
|
||||
if tag_key == "building" and elem.tag('building') in ['retail', 'supermarket', 'parking']:
|
||||
skip = True
|
||||
break
|
||||
|
||||
# Extract image, website and english name
|
||||
if tag_key in ['website', 'contact:website']:
|
||||
website_url = elem.tag(tag_key)
|
||||
if tag_key == 'image':
|
||||
image_url = elem.tag('image')
|
||||
if tag_key =='name:en':
|
||||
name_en = elem.tag('name:en')
|
||||
|
||||
if skip:
|
||||
continue
|
||||
|
||||
# Don't visit random apartments
|
||||
if 'apartments' in elem.tags().values():
|
||||
continue
|
||||
|
||||
score = score_function(score)
|
||||
if "place_of_worship" in elem.tags().values():
|
||||
score = score * self.church_coeff
|
||||
duration = 10
|
||||
|
||||
if 'viewpoint' in elem.tags().values() :
|
||||
# viewpoints must count more
|
||||
score += self.viewpoint_bonus
|
||||
duration = 10
|
||||
|
||||
elif "museum" in elem.tags().values() or "aquarium" in elem.tags().values() or "planetarium" in elem.tags().values():
|
||||
duration = 60
|
||||
|
||||
else:
|
||||
duration = 5
|
||||
|
||||
# finally create our own landmark object
|
||||
landmark = Landmark(
|
||||
name = name,
|
||||
type = elem_type,
|
||||
location = location,
|
||||
osm_type = osm_type,
|
||||
osm_id = osm_id,
|
||||
attractiveness = int(score),
|
||||
must_do = False,
|
||||
n_tags = int(n_tags),
|
||||
duration = int(duration),
|
||||
name_en = name_en,
|
||||
image_url = image_url,
|
||||
website_url = website_url
|
||||
)
|
||||
return_list.append(landmark)
|
||||
|
||||
self.logger.debug(f"Fetched {len(return_list)} landmarks of type {landmarktype} in {bbox}")
|
||||
# self.logger.debug(f"Fetched {len(return_list)} landmarks of type {landmarktype} in {bbox}")
|
||||
|
||||
return return_list
|
||||
|
||||
|
||||
def _to_landmarks(self, elements: list, landmarktype, preference_level) -> list[Landmark]:
|
||||
"""
|
||||
Parse the Overpass API result and extract landmarks.
|
||||
|
||||
This method processes the JSON elements returned by the Overpass API and
|
||||
extracts landmarks of types 'node', 'way', and 'relation'. It retrieves
|
||||
relevant information such as name, coordinates, and tags, and converts them
|
||||
into Landmark objects.
|
||||
|
||||
Args:
|
||||
elements (list): The elements of json response from Overpass API.
|
||||
elem_type (str): The type of landmark (e.g., node, way, relation).
|
||||
|
||||
Returns:
|
||||
list[Landmark]: A list of Landmark objects extracted from the JSON data.
|
||||
"""
|
||||
if elements is None :
|
||||
return []
|
||||
|
||||
landmarks = []
|
||||
for elem in elements:
|
||||
osm_type = elem.get('type')
|
||||
|
||||
id, coords, name = get_base_info(elem, osm_type, with_name=True)
|
||||
|
||||
if name is None or coords is None :
|
||||
continue
|
||||
|
||||
tags = elem.get('tags')
|
||||
|
||||
# Convert this to Landmark object
|
||||
landmark = Landmark(name=name,
|
||||
type=landmarktype,
|
||||
location=coords,
|
||||
osm_id=id,
|
||||
osm_type=osm_type,
|
||||
attractiveness=0,
|
||||
n_tags=len(tags))
|
||||
|
||||
# self.logger.debug('added landmark.')
|
||||
|
||||
# Browse through tags to add information to landmark.
|
||||
for key, value in tags.items():
|
||||
|
||||
# Skip this landmark if not suitable.
|
||||
if key == 'building:part' and value == 'yes' :
|
||||
break
|
||||
if 'disused:' in key :
|
||||
break
|
||||
if 'boundary:' in key :
|
||||
break
|
||||
if 'shop' in key and landmarktype != 'shopping' :
|
||||
break
|
||||
# if value == 'apartments' :
|
||||
# break
|
||||
|
||||
# Fill in the other attributes.
|
||||
if key == 'image' :
|
||||
landmark.image_url = value
|
||||
if key == 'website' :
|
||||
landmark.website_url = value
|
||||
if value == 'place_of_worship' :
|
||||
landmark.is_place_of_worship = True
|
||||
if key == 'wikipedia' :
|
||||
landmark.wiki_url = value
|
||||
if key == 'name:en' :
|
||||
landmark.name_en = value
|
||||
if 'building:' in key or 'pay' in key :
|
||||
landmark.n_tags -= 1
|
||||
|
||||
# Set the duration.
|
||||
if value in ['museum', 'aquarium', 'planetarium'] :
|
||||
landmark.duration = 60
|
||||
elif value == 'viewpoint' :
|
||||
landmark.is_viewpoint = True
|
||||
landmark.duration = 10
|
||||
elif value == 'cathedral' :
|
||||
landmark.is_place_of_worship = False
|
||||
landmark.duration = 10
|
||||
|
||||
else:
|
||||
self.set_landmark_score(landmark, landmarktype, preference_level)
|
||||
landmarks.append(landmark)
|
||||
|
||||
continue
|
||||
|
||||
return landmarks
|
||||
|
||||
def dict_to_selector_list(d: dict) -> list:
|
||||
"""
|
||||
Convert a dictionary of key-value pairs to a list of Overpass query strings.
|
||||
@@ -376,10 +307,10 @@ def dict_to_selector_list(d: dict) -> list:
|
||||
"""
|
||||
return_list = []
|
||||
for key, value in d.items():
|
||||
if type(value) == list:
|
||||
if isinstance(value, list):
|
||||
val = '|'.join(value)
|
||||
return_list.append(f'{key}~"^({val})$"')
|
||||
elif type(value) == str and len(value) == 0:
|
||||
elif isinstance(value, str) and len(value) == 0:
|
||||
return_list.append(f'{key}')
|
||||
else:
|
||||
return_list.append(f'{key}={value}')
|
||||
|
@@ -1,524 +0,0 @@
|
||||
import yaml, logging
|
||||
import numpy as np
|
||||
|
||||
from scipy.optimize import linprog
|
||||
from collections import defaultdict, deque
|
||||
|
||||
from ..structs.landmark import Landmark
|
||||
from .get_time_separation import get_time
|
||||
from ..constants import OPTIMIZER_PARAMETERS_PATH
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
class Optimizer:
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
detour: int = None # accepted max detour time (in minutes)
|
||||
detour_factor: float # detour factor of straight line vs real distance in cities
|
||||
average_walking_speed: float # average walking speed of adult
|
||||
max_landmarks: int # max number of landmarks to visit
|
||||
overshoot: float # overshoot to allow maxtime to overflow. Optimizer is a bit restrictive
|
||||
|
||||
|
||||
def __init__(self) :
|
||||
|
||||
# load parameters from file
|
||||
with OPTIMIZER_PARAMETERS_PATH.open('r') as f:
|
||||
parameters = yaml.safe_load(f)
|
||||
self.detour_factor = parameters['detour_factor']
|
||||
self.average_walking_speed = parameters['average_walking_speed']
|
||||
self.max_landmarks = parameters['max_landmarks']
|
||||
self.overshoot = parameters['overshoot']
|
||||
|
||||
|
||||
|
||||
# Prevent the use of a particular solution
|
||||
def prevent_config(self, resx):
|
||||
"""
|
||||
Prevent the use of a particular solution by adding constraints to the optimization.
|
||||
|
||||
Args:
|
||||
resx (list[float]): List of edge weights.
|
||||
|
||||
Returns:
|
||||
tuple[list[int], list[int]]: A tuple containing a new row for constraint matrix and new value for upper bound vector.
|
||||
"""
|
||||
|
||||
for i, elem in enumerate(resx):
|
||||
resx[i] = round(elem)
|
||||
|
||||
N = len(resx) # Number of edges
|
||||
L = int(np.sqrt(N)) # Number of landmarks
|
||||
|
||||
nonzeroind = np.nonzero(resx)[0] # the return is a little funky so I use the [0]
|
||||
nonzero_tup = np.unravel_index(nonzeroind, (L,L))
|
||||
|
||||
ind_a = nonzero_tup[0].tolist()
|
||||
vertices_visited = ind_a
|
||||
vertices_visited.remove(0)
|
||||
|
||||
ones = [1]*L
|
||||
h = [0]*N
|
||||
for i in range(L) :
|
||||
if i in vertices_visited :
|
||||
h[i*L:i*L+L] = ones
|
||||
|
||||
return h, [len(vertices_visited)-1]
|
||||
|
||||
|
||||
# Prevents the creation of the same circle (both directions)
|
||||
def prevent_circle(self, circle_vertices: list, L: int) :
|
||||
"""
|
||||
Prevent circular paths by by adding constraints to the optimization.
|
||||
|
||||
Args:
|
||||
circle_vertices (list): List of vertices forming a circle.
|
||||
L (int): Number of landmarks.
|
||||
|
||||
Returns:
|
||||
tuple[np.ndarray, list[int]]: A tuple containing a new row for constraint matrix and new value for upper bound vector.
|
||||
"""
|
||||
|
||||
l1 = [0]*L*L
|
||||
l2 = [0]*L*L
|
||||
for i, node in enumerate(circle_vertices[:-1]) :
|
||||
next = circle_vertices[i+1]
|
||||
|
||||
l1[node*L + next] = 1
|
||||
l2[next*L + node] = 1
|
||||
|
||||
s = circle_vertices[0]
|
||||
g = circle_vertices[-1]
|
||||
|
||||
l1[g*L + s] = 1
|
||||
l2[s*L + g] = 1
|
||||
|
||||
return np.vstack((l1, l2)), [0, 0]
|
||||
|
||||
|
||||
def is_connected(self, resx) :
|
||||
"""
|
||||
Determine the order of visits and detect any circular paths in the given configuration.
|
||||
|
||||
Args:
|
||||
resx (list): List of edge weights.
|
||||
|
||||
Returns:
|
||||
tuple[list[int], Optional[list[list[int]]]]: A tuple containing the visit order and a list of any detected circles.
|
||||
"""
|
||||
|
||||
# first round the results to have only 0-1 values
|
||||
for i, elem in enumerate(resx):
|
||||
resx[i] = round(elem)
|
||||
|
||||
N = len(resx) # length of res
|
||||
L = int(np.sqrt(N)) # number of landmarks. CAST INTO INT but should not be a problem because N = L**2 by def.
|
||||
|
||||
nonzeroind = np.nonzero(resx)[0] # the return is a little funny so I use the [0]
|
||||
nonzero_tup = np.unravel_index(nonzeroind, (L,L))
|
||||
|
||||
ind_a = nonzero_tup[0].tolist()
|
||||
ind_b = nonzero_tup[1].tolist()
|
||||
|
||||
# Step 1: Create a graph representation
|
||||
graph = defaultdict(list)
|
||||
for a, b in zip(ind_a, ind_b):
|
||||
graph[a].append(b)
|
||||
|
||||
# Step 2: Function to perform BFS/DFS to extract journeys
|
||||
def get_journey(start):
|
||||
journey_nodes = []
|
||||
visited = set()
|
||||
stack = deque([start])
|
||||
|
||||
while stack:
|
||||
node = stack.pop()
|
||||
if node not in visited:
|
||||
visited.add(node)
|
||||
journey_nodes.append(node)
|
||||
for neighbor in graph[node]:
|
||||
if neighbor not in visited:
|
||||
stack.append(neighbor)
|
||||
|
||||
return journey_nodes
|
||||
|
||||
# Step 3: Extract all journeys
|
||||
all_journeys_nodes = []
|
||||
visited_nodes = set()
|
||||
|
||||
for node in ind_a:
|
||||
if node not in visited_nodes:
|
||||
journey_nodes = get_journey(node)
|
||||
all_journeys_nodes.append(journey_nodes)
|
||||
visited_nodes.update(journey_nodes)
|
||||
|
||||
for l in all_journeys_nodes :
|
||||
if 0 in l :
|
||||
order = l
|
||||
all_journeys_nodes.remove(l)
|
||||
break
|
||||
|
||||
if len(all_journeys_nodes) == 0 :
|
||||
return order, None
|
||||
|
||||
return order, all_journeys_nodes
|
||||
|
||||
|
||||
|
||||
def init_ub_dist(self, landmarks: list[Landmark], max_time: int):
|
||||
"""
|
||||
Initialize the objective function coefficients and inequality constraints for the optimization problem.
|
||||
|
||||
This function computes the distances between all landmarks and stores their attractiveness to maximize sightseeing.
|
||||
The goal is to maximize the objective function subject to the constraints A*x < b and A_eq*x = b_eq.
|
||||
|
||||
Args:
|
||||
landmarks (list[Landmark]): List of landmarks.
|
||||
max_time (int): Maximum time of visit allowed.
|
||||
|
||||
Returns:
|
||||
tuple[list[float], list[float], list[int]]: Objective function coefficients, inequality constraint coefficients, and the right-hand side of the inequality constraint.
|
||||
"""
|
||||
|
||||
# Objective function coefficients. a*x1 + b*x2 + c*x3 + ...
|
||||
c = []
|
||||
# Coefficients of inequality constraints (left-hand side)
|
||||
A_ub = []
|
||||
|
||||
for spot1 in landmarks :
|
||||
dist_table = [0]*len(landmarks)
|
||||
c.append(-spot1.attractiveness)
|
||||
for j, spot2 in enumerate(landmarks) :
|
||||
t = get_time(spot1.location, spot2.location) + spot1.duration
|
||||
dist_table[j] = t
|
||||
closest = sorted(dist_table)[:25]
|
||||
for i, dist in enumerate(dist_table) :
|
||||
if dist not in closest :
|
||||
dist_table[i] = 32700
|
||||
A_ub += dist_table
|
||||
c = c*len(landmarks)
|
||||
|
||||
return c, A_ub, [max_time*self.overshoot]
|
||||
|
||||
|
||||
def respect_number(self, L, max_landmarks: int):
|
||||
"""
|
||||
Generate constraints to ensure each landmark is visited only once and cap the total number of visited landmarks.
|
||||
|
||||
Args:
|
||||
L (int): Number of landmarks.
|
||||
|
||||
Returns:
|
||||
tuple[np.ndarray, list[int]]: Inequality constraint coefficients and the right-hand side of the inequality constraints.
|
||||
"""
|
||||
|
||||
ones = [1]*L
|
||||
zeros = [0]*L
|
||||
A = ones + zeros*(L-1)
|
||||
b = [1]
|
||||
for i in range(L-1) :
|
||||
h_new = zeros*i + ones + zeros*(L-1-i)
|
||||
A = np.vstack((A, h_new))
|
||||
b.append(1)
|
||||
|
||||
A = np.vstack((A, ones*L))
|
||||
b.append(max_landmarks+1)
|
||||
|
||||
return A, b
|
||||
|
||||
|
||||
# Constraint to not have d14 and d41 simultaneously. Does not prevent cyclic paths with more elements
|
||||
def break_sym(self, L):
|
||||
"""
|
||||
Generate constraints to prevent simultaneous travel between two landmarks in both directions.
|
||||
|
||||
Args:
|
||||
L (int): Number of landmarks.
|
||||
|
||||
Returns:
|
||||
tuple[np.ndarray, list[int]]: Inequality constraint coefficients and the right-hand side of the inequality constraints.
|
||||
"""
|
||||
|
||||
upper_ind = np.triu_indices(L,0,L)
|
||||
|
||||
up_ind_x = upper_ind[0]
|
||||
up_ind_y = upper_ind[1]
|
||||
|
||||
A = [0]*L*L
|
||||
b = [1]
|
||||
|
||||
for i, _ in enumerate(up_ind_x[1:]) :
|
||||
l = [0]*L*L
|
||||
if up_ind_x[i] != up_ind_y[i] :
|
||||
l[up_ind_x[i]*L + up_ind_y[i]] = 1
|
||||
l[up_ind_y[i]*L + up_ind_x[i]] = 1
|
||||
|
||||
A = np.vstack((A,l))
|
||||
b.append(1)
|
||||
|
||||
return A, b
|
||||
|
||||
|
||||
def init_eq_not_stay(self, L: int):
|
||||
"""
|
||||
Generate constraints to prevent staying in the same position (e.g., removing d11, d22, d33, etc.).
|
||||
|
||||
Args:
|
||||
L (int): Number of landmarks.
|
||||
|
||||
Returns:
|
||||
tuple[list[np.ndarray], list[int]]: Equality constraint coefficients and the right-hand side of the equality constraints.
|
||||
"""
|
||||
|
||||
l = [0]*L*L
|
||||
|
||||
for i in range(L) :
|
||||
for j in range(L) :
|
||||
if j == i :
|
||||
l[j + i*L] = 1
|
||||
|
||||
l = np.array(np.array(l), dtype=np.int8)
|
||||
|
||||
return [l], [0]
|
||||
|
||||
|
||||
def respect_user_must_do(self, landmarks: list[Landmark]) :
|
||||
"""
|
||||
Generate constraints to ensure that landmarks marked as 'must_do' are included in the optimization.
|
||||
|
||||
Args:
|
||||
landmarks (list[Landmark]): List of landmarks, where some are marked as 'must_do'.
|
||||
|
||||
Returns:
|
||||
tuple[np.ndarray, list[int]]: Inequality constraint coefficients and the right-hand side of the inequality constraints.
|
||||
"""
|
||||
|
||||
L = len(landmarks)
|
||||
A = [0]*L*L
|
||||
b = [0]
|
||||
|
||||
for i, elem in enumerate(landmarks[1:]) :
|
||||
if elem.must_do is True and elem.name not in ['finish', 'start']:
|
||||
l = [0]*L*L
|
||||
l[i*L:i*L+L] = [1]*L # set mandatory departures from landmarks tagged as 'must_do'
|
||||
|
||||
A = np.vstack((A,l))
|
||||
b.append(1)
|
||||
|
||||
return A, b
|
||||
|
||||
|
||||
def respect_user_must_avoid(self, landmarks: list[Landmark]) :
|
||||
"""
|
||||
Generate constraints to ensure that landmarks marked as 'must_avoid' are skipped in the optimization.
|
||||
|
||||
Args:
|
||||
landmarks (list[Landmark]): List of landmarks, where some are marked as 'must_avoid'.
|
||||
|
||||
Returns:
|
||||
tuple[np.ndarray, list[int]]: Inequality constraint coefficients and the right-hand side of the inequality constraints.
|
||||
"""
|
||||
|
||||
L = len(landmarks)
|
||||
A = [0]*L*L
|
||||
b = [0]
|
||||
|
||||
for i, elem in enumerate(landmarks[1:]) :
|
||||
if elem.must_avoid is True and elem.name not in ['finish', 'start']:
|
||||
l = [0]*L*L
|
||||
l[i*L:i*L+L] = [1]*L
|
||||
|
||||
A = np.vstack((A,l))
|
||||
b.append(0) # prevent departures from landmarks tagged as 'must_do'
|
||||
|
||||
return A, b
|
||||
|
||||
|
||||
# Constraint to ensure start at start and finish at goal
|
||||
def respect_start_finish(self, L: int):
|
||||
"""
|
||||
Generate constraints to ensure that the optimization starts at the designated start landmark and finishes at the goal landmark.
|
||||
|
||||
Args:
|
||||
L (int): Number of landmarks.
|
||||
|
||||
Returns:
|
||||
tuple[np.ndarray, list[int]]: Inequality constraint coefficients and the right-hand side of the inequality constraints.
|
||||
"""
|
||||
|
||||
l_start = [1]*L + [0]*L*(L-1) # sets departures only for start (horizontal ones)
|
||||
l_start[L-1] = 0 # prevents the jump from start to finish
|
||||
l_goal = [0]*L*L # sets arrivals only for finish (vertical ones)
|
||||
l_L = [0]*L*(L-1) + [1]*L # prevents arrivals at start and departures from goal
|
||||
for k in range(L-1) : # sets only vertical ones for goal (go to)
|
||||
l_L[k*L] = 1
|
||||
if k != 0 :
|
||||
l_goal[k*L+L-1] = 1
|
||||
|
||||
A = np.vstack((l_start, l_goal))
|
||||
b = [1, 1]
|
||||
A = np.vstack((A,l_L))
|
||||
b.append(0)
|
||||
|
||||
return A, b
|
||||
|
||||
|
||||
def respect_order(self, L: int):
|
||||
"""
|
||||
Generate constraints to tie the optimization problem together and prevent stacked ones, although this does not fully prevent circles.
|
||||
|
||||
Args:
|
||||
L (int): Number of landmarks.
|
||||
|
||||
Returns:
|
||||
tuple[np.ndarray, list[int]]: Inequality constraint coefficients and the right-hand side of the inequality constraints.
|
||||
"""
|
||||
|
||||
A = [0]*L*L
|
||||
b = [0]
|
||||
for i in range(L-1) : # Prevent stacked ones
|
||||
if i == 0 or i == L-1: # Don't touch start or finish
|
||||
continue
|
||||
else :
|
||||
l = [0]*L
|
||||
l[i] = -1
|
||||
l = l*L
|
||||
for j in range(L) :
|
||||
l[i*L + j] = 1
|
||||
|
||||
A = np.vstack((A,l))
|
||||
b.append(0)
|
||||
|
||||
return A, b
|
||||
|
||||
|
||||
def link_list(self, order: list[int], landmarks: list[Landmark])->list[Landmark] :
|
||||
"""
|
||||
Compute the time to reach from each landmark to the next and create a list of landmarks with updated travel times.
|
||||
|
||||
Args:
|
||||
order (list[int]): List of indices representing the order of landmarks to visit.
|
||||
landmarks (list[Landmark]): List of all landmarks.
|
||||
|
||||
Returns:
|
||||
list[Landmark]]: The updated linked list of landmarks with travel times
|
||||
"""
|
||||
|
||||
L = []
|
||||
j = 0
|
||||
while j < len(order)-1 :
|
||||
# get landmarks involved
|
||||
elem = landmarks[order[j]]
|
||||
next = landmarks[order[j+1]]
|
||||
|
||||
# get attributes
|
||||
elem.time_to_reach_next = get_time(elem.location, next.location)
|
||||
elem.must_do = True
|
||||
elem.location = (round(elem.location[0], 5), round(elem.location[1], 5))
|
||||
elem.next_uuid = next.uuid
|
||||
L.append(elem)
|
||||
j += 1
|
||||
|
||||
next.location = (round(next.location[0], 5), round(next.location[1], 5))
|
||||
next.must_do = True
|
||||
L.append(next)
|
||||
|
||||
return L
|
||||
|
||||
|
||||
# Main optimization pipeline
|
||||
def solve_optimization(
|
||||
self,
|
||||
max_time: int,
|
||||
landmarks: list[Landmark],
|
||||
max_landmarks: int = None
|
||||
) -> list[Landmark]:
|
||||
"""
|
||||
Main optimization pipeline to solve the landmark visiting problem.
|
||||
|
||||
This method sets up and solves a linear programming problem with constraints to find an optimal tour of landmarks,
|
||||
considering user-defined must-visit landmarks, start and finish points, and ensuring no cycles are present.
|
||||
|
||||
Args:
|
||||
max_time (int): Maximum time allowed for the tour in minutes.
|
||||
landmarks (list[Landmark]): List of landmarks to visit.
|
||||
max_landmarks (int): Maximum number of landmarks visited
|
||||
Returns:
|
||||
list[Landmark]: The optimized tour of landmarks with updated travel times, or None if no valid solution is found.
|
||||
"""
|
||||
if max_landmarks is None :
|
||||
max_landmarks = self.max_landmarks
|
||||
|
||||
L = len(landmarks)
|
||||
|
||||
# SET CONSTRAINTS FOR INEQUALITY
|
||||
c, A_ub, b_ub = self.init_ub_dist(landmarks, max_time) # Add the distances from each landmark to the other
|
||||
A, b = self.respect_number(L, max_landmarks) # Respect max number of visits (no more possible stops than landmarks).
|
||||
A_ub = np.vstack((A_ub, A), dtype=np.int16)
|
||||
b_ub += b
|
||||
A, b = self.break_sym(L) # break the 'zig-zag' symmetry
|
||||
A_ub = np.vstack((A_ub, A), dtype=np.int16)
|
||||
b_ub += b
|
||||
|
||||
|
||||
# SET CONSTRAINTS FOR EQUALITY
|
||||
A_eq, b_eq = self.init_eq_not_stay(L) # Force solution not to stay in same place
|
||||
A, b = self.respect_user_must_do(landmarks) # Check if there are user_defined must_see. Also takes care of start/goal
|
||||
A_eq = np.vstack((A_eq, A), dtype=np.int8)
|
||||
b_eq += b
|
||||
A, b = self.respect_user_must_avoid(landmarks) # Check if there are user_defined must_see. Also takes care of start/goal
|
||||
A_eq = np.vstack((A_eq, A), dtype=np.int8)
|
||||
b_eq += b
|
||||
A, b = self.respect_start_finish(L) # Force start and finish positions
|
||||
A_eq = np.vstack((A_eq, A), dtype=np.int8)
|
||||
b_eq += b
|
||||
A, b = self.respect_order(L) # Respect order of visit (only works when max_time is limiting factor)
|
||||
A_eq = np.vstack((A_eq, A), dtype=np.int8)
|
||||
b_eq += b
|
||||
|
||||
# SET BOUNDS FOR DECISION VARIABLE (x can only be 0 or 1)
|
||||
x_bounds = [(0, 1)]*L*L
|
||||
|
||||
# Solve linear programming problem
|
||||
res = linprog(c, A_ub=A_ub, b_ub=b_ub, A_eq=A_eq, b_eq = b_eq, bounds=x_bounds, method='highs', integrality=3)
|
||||
|
||||
# Raise error if no solution is found
|
||||
if not res.success :
|
||||
raise ArithmeticError("No solution could be found, the problem is overconstrained. Try with a longer trip (>30 minutes).")
|
||||
|
||||
# If there is a solution, we're good to go, just check for connectiveness
|
||||
order, circles = self.is_connected(res.x)
|
||||
#nodes, edges = is_connected(res.x)
|
||||
i = 0
|
||||
timeout = 80
|
||||
while circles is not None and i < timeout:
|
||||
A, b = self.prevent_config(res.x)
|
||||
A_ub = np.vstack((A_ub, A))
|
||||
b_ub += b
|
||||
#A_ub, b_ub = prevent_circle(order, len(landmarks), A_ub, b_ub)
|
||||
for circle in circles :
|
||||
A, b = self.prevent_circle(circle, L)
|
||||
A_eq = np.vstack((A_eq, A))
|
||||
b_eq += b
|
||||
res = linprog(c, A_ub=A_ub, b_ub=b_ub, A_eq=A_eq, b_eq = b_eq, bounds=x_bounds, method='highs', integrality=3)
|
||||
if not res.success :
|
||||
raise ArithmeticError("Solving failed because of overconstrained problem")
|
||||
return None
|
||||
order, circles = self.is_connected(res.x)
|
||||
#nodes, edges = is_connected(res.x)
|
||||
if circles is None :
|
||||
break
|
||||
# print(i)
|
||||
i += 1
|
||||
|
||||
if i == timeout :
|
||||
raise TimeoutError(f"Optimization took too long. No solution found after {timeout} iterations.")
|
||||
|
||||
#sort the landmarks in the order of the solution
|
||||
tour = [landmarks[i] for i in order]
|
||||
|
||||
self.logger.debug(f"Re-optimized {i} times, score: {int(-res.fun)}")
|
||||
return tour
|
@@ -1,3 +1,4 @@
|
||||
"""Helper function to return only the major landmarks from a large list."""
|
||||
from ..structs.landmark import Landmark
|
||||
|
||||
def take_most_important(landmarks: list[Landmark], n_important) -> list[Landmark]:
|
||||
|
@@ -1,16 +1,33 @@
|
||||
import logging, yaml
|
||||
from OSMPythonTools.overpass import Overpass, overpassQueryBuilder
|
||||
from OSMPythonTools.cachingStrategy import CachingStrategy, JSON
|
||||
"""Module for finding public toilets around given coordinates."""
|
||||
import logging
|
||||
|
||||
from ..overpass.overpass import Overpass, get_base_info
|
||||
from ..structs.landmark import Toilets
|
||||
from ..constants import LANDMARK_PARAMETERS_PATH, OSM_CACHE_DIR
|
||||
from .utils import create_bbox
|
||||
|
||||
|
||||
# silence the overpass logger
|
||||
logging.getLogger('OSMPythonTools').setLevel(level=logging.CRITICAL)
|
||||
logging.getLogger('Overpass').setLevel(level=logging.CRITICAL)
|
||||
|
||||
class ToiletsManager:
|
||||
"""
|
||||
Manages the process of fetching and caching toilet information from
|
||||
OpenStreetMap (OSM) based on a specified location and radius.
|
||||
|
||||
This class is responsible for:
|
||||
- Fetching toilet data from OSM using Overpass API around a given set of
|
||||
coordinates (latitude, longitude).
|
||||
- Using a caching strategy to optimize requests by saving and retrieving
|
||||
data from a local cache.
|
||||
- Logging important events and errors related to data fetching.
|
||||
|
||||
Attributes:
|
||||
logger (logging.Logger): Logger for the class to capture events.
|
||||
location (tuple[float, float]): Latitude and longitude representing the
|
||||
location to search around.
|
||||
radius (int): The search radius in meters for finding nearby toilets.
|
||||
overpass (Overpass): The Overpass API instance used to query OSM.
|
||||
"""
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
location: tuple[float, float]
|
||||
@@ -21,57 +38,84 @@ class ToiletsManager:
|
||||
|
||||
self.radius = radius
|
||||
self.location = location
|
||||
|
||||
# Setup the caching in the Overpass class.
|
||||
self.overpass = Overpass()
|
||||
CachingStrategy.use(JSON, cacheDir=OSM_CACHE_DIR)
|
||||
|
||||
|
||||
def generate_toilet_list(self) -> list[Toilets] :
|
||||
"""
|
||||
Generates a list of toilet locations by fetching data from OpenStreetMap (OSM)
|
||||
around the given coordinates stored in `self.location`.
|
||||
|
||||
|
||||
# Create a bbox using the around technique
|
||||
bbox = tuple((f"around:{self.radius}", str(self.location[0]), str(self.location[1])))
|
||||
Returns:
|
||||
list[Toilets]: A list of `Toilets` objects containing detailed information
|
||||
about the toilets found around the given coordinates.
|
||||
"""
|
||||
bbox = create_bbox(self.location, self.radius)
|
||||
osm_types = ['node', 'way', 'relation']
|
||||
toilets_list = []
|
||||
|
||||
query = overpassQueryBuilder(
|
||||
bbox = bbox,
|
||||
elementType = ['node', 'way', 'relation'],
|
||||
# selector can in principle be a list already,
|
||||
# but it generates the intersection of the queries
|
||||
# we want the union
|
||||
selector = ['"amenity"="toilets"'],
|
||||
includeCenter = True,
|
||||
out = 'center'
|
||||
)
|
||||
self.logger.debug(f"Query: {query}")
|
||||
|
||||
query = Overpass.build_query(
|
||||
bbox = bbox,
|
||||
osm_types = osm_types,
|
||||
selector = '"amenity"="toilets"',
|
||||
out = 'ids center tags'
|
||||
)
|
||||
try:
|
||||
result = self.overpass.query(query)
|
||||
result = self.overpass.fetch_data_from_api(query_str=query)
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error fetching landmarks: {e}")
|
||||
return None
|
||||
|
||||
for elem in result.elements():
|
||||
location = (elem.centerLat(), elem.centerLon())
|
||||
toilets_list = self.to_toilets(result)
|
||||
|
||||
# handle unprecise and no-name locations
|
||||
if location[0] is None:
|
||||
location = (elem.lat(), elem.lon())
|
||||
else :
|
||||
return toilets_list
|
||||
|
||||
|
||||
def to_toilets(self, elements: list) -> list[Toilets]:
|
||||
"""
|
||||
Parse the Overpass API result and extract landmarks.
|
||||
|
||||
This method processes the JSON elements returned by the Overpass API and
|
||||
extracts landmarks of types 'node', 'way', and 'relation'. It retrieves
|
||||
relevant information such as name, coordinates, and tags, and converts them
|
||||
into Landmark objects.
|
||||
|
||||
Args:
|
||||
list (osm elements): The root element of the JSON response from Overpass API.
|
||||
elem_type (str): The type of landmark (e.g., node, way, relation).
|
||||
|
||||
Returns:
|
||||
list[Landmark]: A list of Landmark objects extracted from the JSON data.
|
||||
"""
|
||||
if elements is None :
|
||||
return []
|
||||
|
||||
toilets_list = []
|
||||
for elem in elements:
|
||||
osm_type = elem.get('type')
|
||||
# Get coordinates and append them to the points list
|
||||
_, coords = get_base_info(elem, osm_type)
|
||||
if coords is None :
|
||||
continue
|
||||
|
||||
toilets = Toilets(location=location)
|
||||
|
||||
if 'wheelchair' in elem.tags().keys() and elem.tag('wheelchair') == 'yes':
|
||||
|
||||
toilets = Toilets(location=coords)
|
||||
|
||||
# Extract tags as a dictionary
|
||||
tags = elem.get('tags')
|
||||
|
||||
if 'wheelchair' in tags.keys() and tags['wheelchair'] == 'yes':
|
||||
toilets.wheelchair = True
|
||||
|
||||
if 'changing_table' in elem.tags().keys() and elem.tag('changing_table') == 'yes':
|
||||
if 'changing_table' in tags.keys() and tags['changing_table'] == 'yes':
|
||||
toilets.changing_table = True
|
||||
|
||||
if 'fee' in elem.tags().keys() and elem.tag('fee') == 'yes':
|
||||
if 'fee' in tags.keys() and tags['fee'] == 'yes':
|
||||
toilets.fee = True
|
||||
|
||||
if 'opening_hours' in elem.tags().keys() :
|
||||
toilets.opening_hours = elem.tag('opening_hours')
|
||||
if 'opening_hours' in tags.keys() :
|
||||
toilets.opening_hours = tags['opening_hours']
|
||||
|
||||
toilets_list.append(toilets)
|
||||
|
||||
|
27
backend/src/utils/utils.py
Normal file
27
backend/src/utils/utils.py
Normal file
@@ -0,0 +1,27 @@
|
||||
"""Various helper functions"""
|
||||
import math as m
|
||||
|
||||
def create_bbox(coords: tuple[float, float], radius: int):
|
||||
"""
|
||||
Create a bounding box around the given coordinates.
|
||||
|
||||
Args:
|
||||
coords (tuple[float, float]): The latitude and longitude of the center of the bounding box.
|
||||
radius (int): The half-side length of the bounding box in meters.
|
||||
|
||||
Returns:
|
||||
tuple[float, float, float, float]: The minimum latitude, minimum longitude, maximum latitude, and maximum longitude
|
||||
defining the bounding box.
|
||||
"""
|
||||
# Earth's radius in meters
|
||||
R = 6378137
|
||||
lat, lon = coords
|
||||
d_lat = radius / R
|
||||
d_lon = radius / (R * m.cos(m.pi * lat / 180))
|
||||
|
||||
lat_min = lat - d_lat * 180 / m.pi
|
||||
lat_max = lat + d_lat * 180 / m.pi
|
||||
lon_min = lon - d_lon * 180 / m.pi
|
||||
lon_max = lon + d_lon * 180 / m.pi
|
||||
|
||||
return (lat_min, lon_min, lat_max, lon_max)
|
@@ -8,7 +8,7 @@ jobs:
|
||||
runs-on: macos-latest
|
||||
env:
|
||||
# $BUNDLE_GEMFILE must be set at the job level, so it is set for all steps
|
||||
BUNDLE_GEMFILE: ios/Gemfile
|
||||
BUNDLE_GEMFILE: ${{ github.workspace }}/ios/Gemfile
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
|
427
frontend/assets/confused.svg
Normal file
427
frontend/assets/confused.svg
Normal file
File diff suppressed because one or more lines are too long
After Width: | Height: | Size: 40 KiB |
@@ -1,10 +1,12 @@
|
||||
import 'package:anyway/utils/get_first_page.dart';
|
||||
import 'package:anyway/utils/load_trips.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
import 'package:anyway/constants.dart';
|
||||
import 'package:anyway/layout.dart';
|
||||
|
||||
void main() => runApp(const App());
|
||||
|
||||
final GlobalKey<ScaffoldMessengerState> rootScaffoldMessengerKey = GlobalKey<ScaffoldMessengerState>();
|
||||
final SavedTrips savedTrips = SavedTrips();
|
||||
|
||||
class App extends StatelessWidget {
|
||||
const App({super.key});
|
||||
@@ -14,7 +16,7 @@ class App extends StatelessWidget {
|
||||
Widget build(BuildContext context) {
|
||||
return MaterialApp(
|
||||
title: APP_NAME,
|
||||
home: BasePage(mainScreen: "map"),
|
||||
home: getFirstPage(),
|
||||
theme: APP_THEME,
|
||||
scaffoldMessengerKey: rootScaffoldMessengerKey
|
||||
);
|
||||
|
@@ -5,7 +5,6 @@ import 'package:flutter/material.dart';
|
||||
import 'package:anyway/modules/landmark_card.dart';
|
||||
import 'package:anyway/structs/landmark.dart';
|
||||
import 'package:anyway/structs/trip.dart';
|
||||
import 'package:anyway/main.dart';
|
||||
|
||||
|
||||
|
||||
@@ -25,30 +24,7 @@ List<Widget> landmarksList(Trip trip) {
|
||||
|
||||
for (Landmark landmark in trip.landmarks) {
|
||||
children.add(
|
||||
Dismissible(
|
||||
key: ValueKey<int>(landmark.hashCode),
|
||||
child: LandmarkCard(landmark),
|
||||
dismissThresholds: {DismissDirection.endToStart: 0.95, DismissDirection.startToEnd: 0.95},
|
||||
onDismissed: (direction) {
|
||||
log('Removing ${landmark.name}');
|
||||
trip.removeLandmark(landmark);
|
||||
|
||||
rootScaffoldMessengerKey.currentState!.showSnackBar(
|
||||
SnackBar(content: Text("We won't show ${landmark.name} again"))
|
||||
);
|
||||
},
|
||||
|
||||
background: Container(color: Colors.red),
|
||||
secondaryBackground: Container(
|
||||
color: Colors.red,
|
||||
child: Icon(
|
||||
Icons.delete,
|
||||
color: Colors.white,
|
||||
),
|
||||
padding: EdgeInsets.all(15),
|
||||
alignment: Alignment.centerRight,
|
||||
),
|
||||
)
|
||||
LandmarkCard(landmark, trip),
|
||||
);
|
||||
|
||||
if (landmark.next != null) {
|
||||
|
@@ -1,9 +1,20 @@
|
||||
import 'package:anyway/constants.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
import 'package:auto_size_text/auto_size_text.dart';
|
||||
|
||||
import 'package:anyway/structs/trip.dart';
|
||||
import 'package:anyway/pages/current_trip.dart';
|
||||
|
||||
|
||||
final List<String> statusTexts = [
|
||||
'Parsing your preferences...',
|
||||
'Finding the best places...',
|
||||
'Crunching the numbers...',
|
||||
'Calculating the best route...',
|
||||
'Making sure you have a great time...',
|
||||
];
|
||||
|
||||
|
||||
class CurrentTripLoadingIndicator extends StatefulWidget {
|
||||
final Trip trip;
|
||||
const CurrentTripLoadingIndicator({
|
||||
@@ -15,46 +26,137 @@ class CurrentTripLoadingIndicator extends StatefulWidget {
|
||||
State<CurrentTripLoadingIndicator> createState() => _CurrentTripLoadingIndicatorState();
|
||||
}
|
||||
|
||||
|
||||
class _CurrentTripLoadingIndicatorState extends State<CurrentTripLoadingIndicator> {
|
||||
@override
|
||||
Widget build(BuildContext context) => Center(
|
||||
child: FutureBuilder(
|
||||
future: widget.trip.cityName,
|
||||
builder: (BuildContext context, AsyncSnapshot<String> snapshot) {
|
||||
Widget greeter;
|
||||
Widget loadingIndicator = const Padding(
|
||||
padding: EdgeInsets.only(top: 10),
|
||||
child: CircularProgressIndicator()
|
||||
);
|
||||
|
||||
if (snapshot.hasData) {
|
||||
greeter = AutoSizeText(
|
||||
maxLines: 1,
|
||||
'Generating your trip to ${snapshot.data}...',
|
||||
style: greeterStyle,
|
||||
);
|
||||
} else if (snapshot.hasError) {
|
||||
// the exact error is shown in the central part of the trip overview. No need to show it here
|
||||
greeter = AutoSizeText(
|
||||
maxLines: 1,
|
||||
'Error while loading trip.',
|
||||
style: greeterStyle,
|
||||
);
|
||||
} else {
|
||||
greeter = AutoSizeText(
|
||||
maxLines: 1,
|
||||
'Generating your trip...',
|
||||
style: greeterStyle,
|
||||
);
|
||||
}
|
||||
return Column(
|
||||
mainAxisAlignment: MainAxisAlignment.center,
|
||||
children: [
|
||||
greeter,
|
||||
loadingIndicator,
|
||||
],
|
||||
);
|
||||
}
|
||||
)
|
||||
Widget build(BuildContext context) => Stack(
|
||||
fit: StackFit.expand,
|
||||
children: [
|
||||
// In the very center of the panel, show the greeter which tells the user that the trip is being generated
|
||||
Center(child: loadingText(widget.trip)),
|
||||
// As a gimmick, and a way to show that the app is still working, show a few loading dots
|
||||
Align(
|
||||
alignment: Alignment.bottomCenter,
|
||||
child: statusText(),
|
||||
)
|
||||
],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// automatically cycle through the greeter texts
|
||||
class statusText extends StatefulWidget {
|
||||
const statusText({Key? key}) : super(key: key);
|
||||
|
||||
@override
|
||||
_statusTextState createState() => _statusTextState();
|
||||
}
|
||||
|
||||
class _statusTextState extends State<statusText> {
|
||||
int statusIndex = 0;
|
||||
|
||||
@override
|
||||
void initState() {
|
||||
super.initState();
|
||||
Future.delayed(Duration(seconds: 5), () {
|
||||
setState(() {
|
||||
statusIndex = (statusIndex + 1) % statusTexts.length;
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
return AutoSizeText(
|
||||
statusTexts[statusIndex],
|
||||
style: Theme.of(context).textTheme.labelSmall,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
Widget loadingText(Trip trip) => FutureBuilder(
|
||||
future: trip.cityName,
|
||||
builder: (BuildContext context, AsyncSnapshot<String> snapshot) {
|
||||
Widget greeter;
|
||||
|
||||
if (snapshot.hasData) {
|
||||
greeter = AnimatedGradientText(
|
||||
text: 'Creating your trip to ${snapshot.data}...',
|
||||
style: greeterStyle,
|
||||
);
|
||||
} else if (snapshot.hasError) {
|
||||
// the exact error is shown in the central part of the trip overview. No need to show it here
|
||||
greeter = AnimatedGradientText(
|
||||
text: 'Error while loading trip.',
|
||||
style: greeterStyle,
|
||||
);
|
||||
} else {
|
||||
greeter = AnimatedGradientText(
|
||||
text: 'Creating your trip...',
|
||||
style: greeterStyle,
|
||||
);
|
||||
}
|
||||
return greeter;
|
||||
}
|
||||
);
|
||||
|
||||
class AnimatedGradientText extends StatefulWidget {
|
||||
final String text;
|
||||
final TextStyle style;
|
||||
|
||||
const AnimatedGradientText({
|
||||
Key? key,
|
||||
required this.text,
|
||||
required this.style,
|
||||
}) : super(key: key);
|
||||
|
||||
@override
|
||||
_AnimatedGradientTextState createState() => _AnimatedGradientTextState();
|
||||
}
|
||||
|
||||
class _AnimatedGradientTextState extends State<AnimatedGradientText> with SingleTickerProviderStateMixin {
|
||||
late AnimationController _controller;
|
||||
|
||||
@override
|
||||
void initState() {
|
||||
super.initState();
|
||||
_controller = AnimationController(
|
||||
duration: const Duration(seconds: 1),
|
||||
vsync: this,
|
||||
)..repeat();
|
||||
}
|
||||
|
||||
@override
|
||||
void dispose() {
|
||||
_controller.dispose();
|
||||
super.dispose();
|
||||
}
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
return AnimatedBuilder(
|
||||
animation: _controller,
|
||||
builder: (context, child) {
|
||||
return ShaderMask(
|
||||
shaderCallback: (bounds) {
|
||||
return LinearGradient(
|
||||
colors: [GRADIENT_START, GRADIENT_END, GRADIENT_START],
|
||||
stops: [
|
||||
_controller.value - 1.0,
|
||||
_controller.value,
|
||||
_controller.value + 1.0,
|
||||
],
|
||||
tileMode: TileMode.mirror,
|
||||
).createShader(bounds);
|
||||
},
|
||||
child: Text(
|
||||
widget.text,
|
||||
style: widget.style,
|
||||
),
|
||||
);
|
||||
},
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
@@ -36,7 +36,7 @@ class _CurrentTripPanelState extends State<CurrentTripPanel> {
|
||||
child: SizedBox(
|
||||
// reuse the exact same height as the panel has when collapsed
|
||||
// this way the greeter will be centered when the panel is collapsed
|
||||
height: MediaQuery.of(context).size.height * TRIP_PANEL_MIN_HEIGHT - 20,
|
||||
height: MediaQuery.of(context).size.height * TRIP_PANEL_MIN_HEIGHT,
|
||||
child: CurrentTripErrorMessage(trip: widget.trip)
|
||||
),
|
||||
);
|
||||
@@ -46,19 +46,20 @@ class _CurrentTripPanelState extends State<CurrentTripPanel> {
|
||||
child: SizedBox(
|
||||
// reuse the exact same height as the panel has when collapsed
|
||||
// this way the greeter will be centered when the panel is collapsed
|
||||
height: MediaQuery.of(context).size.height * TRIP_PANEL_MIN_HEIGHT - 20,
|
||||
height: MediaQuery.of(context).size.height * TRIP_PANEL_MIN_HEIGHT,
|
||||
child: CurrentTripLoadingIndicator(trip: widget.trip),
|
||||
),
|
||||
);
|
||||
} else {
|
||||
return ListView(
|
||||
controller: widget.controller,
|
||||
padding: const EdgeInsets.only(bottom: 30),
|
||||
padding: const EdgeInsets.only(top: 10, left: 10, right: 10, bottom: 30),
|
||||
children: [
|
||||
SizedBox(
|
||||
// reuse the exact same height as the panel has when collapsed
|
||||
// this way the greeter will be centered when the panel is collapsed
|
||||
height: MediaQuery.of(context).size.height * TRIP_PANEL_MIN_HEIGHT - 20,
|
||||
// note that we need to account for the padding above
|
||||
height: MediaQuery.of(context).size.height * TRIP_PANEL_MIN_HEIGHT - 10,
|
||||
child: CurrentTripGreeter(trip: widget.trip),
|
||||
),
|
||||
|
||||
@@ -72,7 +73,7 @@ class _CurrentTripPanelState extends State<CurrentTripPanel> {
|
||||
|
||||
const Padding(padding: EdgeInsets.only(top: 10)),
|
||||
|
||||
Center(child: saveButton(widget.trip)),
|
||||
Center(child: saveButton(trip: widget.trip)),
|
||||
],
|
||||
);
|
||||
}
|
||||
|
@@ -3,39 +3,53 @@ import 'package:anyway/main.dart';
|
||||
import 'package:anyway/structs/trip.dart';
|
||||
import 'package:auto_size_text/auto_size_text.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
import 'package:shared_preferences/shared_preferences.dart';
|
||||
|
||||
Widget saveButton(Trip trip) => ElevatedButton(
|
||||
onPressed: () async {
|
||||
SharedPreferences prefs = await SharedPreferences.getInstance();
|
||||
trip.toPrefs(prefs);
|
||||
rootScaffoldMessengerKey.currentState!.showSnackBar(
|
||||
SnackBar(
|
||||
content: Text('Trip saved'),
|
||||
duration: Duration(seconds: 2),
|
||||
dismissDirection: DismissDirection.horizontal
|
||||
|
||||
class saveButton extends StatefulWidget {
|
||||
Trip trip;
|
||||
saveButton({super.key, required this.trip});
|
||||
|
||||
@override
|
||||
State<saveButton> createState() => _saveButtonState();
|
||||
}
|
||||
|
||||
class _saveButtonState extends State<saveButton> {
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
return ElevatedButton(
|
||||
onPressed: () async {
|
||||
savedTrips.addTrip(widget.trip);
|
||||
// SharedPreferences prefs = await SharedPreferences.getInstance();
|
||||
// setState(() => widget.trip.toPrefs(prefs));
|
||||
rootScaffoldMessengerKey.currentState!.showSnackBar(
|
||||
SnackBar(
|
||||
content: Text('Trip saved'),
|
||||
duration: Duration(seconds: 2),
|
||||
dismissDirection: DismissDirection.horizontal
|
||||
)
|
||||
);
|
||||
},
|
||||
child: SizedBox(
|
||||
width: 100,
|
||||
child: Row(
|
||||
mainAxisAlignment: MainAxisAlignment.center,
|
||||
children: [
|
||||
Icon(
|
||||
Icons.save,
|
||||
),
|
||||
Expanded(
|
||||
child: Padding(
|
||||
padding: EdgeInsets.only(left: 10, top: 5, bottom: 5, right: 5),
|
||||
child: AutoSizeText(
|
||||
'Save trip',
|
||||
maxLines: 2,
|
||||
),
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
)
|
||||
);
|
||||
},
|
||||
child: SizedBox(
|
||||
width: 100,
|
||||
child: Row(
|
||||
mainAxisAlignment: MainAxisAlignment.center,
|
||||
children: [
|
||||
Icon(
|
||||
Icons.save,
|
||||
),
|
||||
Expanded(
|
||||
child: Padding(
|
||||
padding: EdgeInsets.only(left: 10, top: 5, bottom: 5, right: 5),
|
||||
child: AutoSizeText(
|
||||
'Save trip',
|
||||
maxLines: 2,
|
||||
),
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
)
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
|
25
frontend/lib/modules/help_dialog.dart
Normal file
25
frontend/lib/modules/help_dialog.dart
Normal file
@@ -0,0 +1,25 @@
|
||||
|
||||
import 'package:flutter/material.dart';
|
||||
|
||||
Future<void> helpDialog(BuildContext context, String title, String content) {
|
||||
return showDialog<void>(
|
||||
context: context,
|
||||
builder: (BuildContext context) {
|
||||
return AlertDialog(
|
||||
title: Text(title),
|
||||
content: Text(content),
|
||||
actions: <Widget>[
|
||||
TextButton(
|
||||
style: TextButton.styleFrom(
|
||||
textStyle: Theme.of(context).textTheme.labelLarge,
|
||||
),
|
||||
child: const Text('Got it!'),
|
||||
onPressed: () {
|
||||
Navigator.of(context).pop();
|
||||
},
|
||||
),
|
||||
],
|
||||
);
|
||||
},
|
||||
);
|
||||
}
|
@@ -1,3 +1,5 @@
|
||||
import 'package:anyway/main.dart';
|
||||
import 'package:anyway/structs/trip.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
import 'package:cached_network_image/cached_network_image.dart';
|
||||
import 'package:url_launcher/url_launcher.dart';
|
||||
@@ -6,8 +8,12 @@ import 'package:anyway/structs/landmark.dart';
|
||||
|
||||
class LandmarkCard extends StatefulWidget {
|
||||
final Landmark landmark;
|
||||
final Trip parentTrip;
|
||||
|
||||
LandmarkCard(this.landmark);
|
||||
LandmarkCard(
|
||||
this.landmark,
|
||||
this.parentTrip,
|
||||
);
|
||||
|
||||
@override
|
||||
_LandmarkCardState createState() => _LandmarkCardState();
|
||||
@@ -17,110 +23,149 @@ class LandmarkCard extends StatefulWidget {
|
||||
class _LandmarkCardState extends State<LandmarkCard> {
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
ThemeData theme = Theme.of(context);
|
||||
if (widget.landmark.type == typeStart || widget.landmark.type == typeFinish) {
|
||||
return TextButton.icon(
|
||||
onPressed: () {},
|
||||
icon: widget.landmark.type.icon,
|
||||
label: Text(widget.landmark.name),
|
||||
);
|
||||
|
||||
}
|
||||
// else:
|
||||
return Container(
|
||||
height: 160,
|
||||
child: Card(
|
||||
shape: RoundedRectangleBorder(
|
||||
borderRadius: BorderRadius.circular(15.0),
|
||||
),
|
||||
elevation: 5,
|
||||
clipBehavior: Clip.antiAliasWithSaveLayer,
|
||||
child: Row(
|
||||
crossAxisAlignment: CrossAxisAlignment.start,
|
||||
children: [
|
||||
Container( // the image on the left
|
||||
// inherit the height of the parent container
|
||||
height: double.infinity,
|
||||
// force a fixed width
|
||||
width: 160,
|
||||
child: CachedNetworkImage(
|
||||
imageUrl: widget.landmark.imageURL ?? '',
|
||||
placeholder: (context, url) => Center(child: CircularProgressIndicator()),
|
||||
errorWidget: (context, error, stackTrace) => Icon(Icons.question_mark_outlined),
|
||||
// TODO: make this a switch statement to load a placeholder if null
|
||||
// cover the whole container meaning the image will be cropped
|
||||
fit: BoxFit.cover,
|
||||
),
|
||||
),
|
||||
Flexible(
|
||||
child: Padding(
|
||||
padding: EdgeInsets.all(10),
|
||||
child: Column(
|
||||
children: [
|
||||
Row(
|
||||
children: [
|
||||
Flexible(
|
||||
child: Text(
|
||||
widget.landmark.name,
|
||||
style: const TextStyle(
|
||||
fontSize: 18,
|
||||
fontWeight: FontWeight.bold,
|
||||
),
|
||||
maxLines: 2,
|
||||
),
|
||||
)
|
||||
],
|
||||
),
|
||||
if (widget.landmark.nameEN != null)
|
||||
Row(
|
||||
children: [
|
||||
Flexible(
|
||||
child: Text(
|
||||
widget.landmark.nameEN!,
|
||||
style: const TextStyle(
|
||||
fontSize: 16,
|
||||
),
|
||||
maxLines: 1,
|
||||
),
|
||||
)
|
||||
],
|
||||
),
|
||||
SingleChildScrollView(
|
||||
// allows the buttons to be scrolled
|
||||
scrollDirection: Axis.horizontal,
|
||||
child: Wrap(
|
||||
spacing: 10,
|
||||
// show the type, the website, and the wikipedia link as buttons/labels in a row
|
||||
children: [
|
||||
TextButton.icon(
|
||||
onPressed: () {},
|
||||
icon: widget.landmark.type.icon,
|
||||
label: Text(widget.landmark.type.name),
|
||||
),
|
||||
if (widget.landmark.duration != null && widget.landmark.duration!.inMinutes > 0)
|
||||
TextButton.icon(
|
||||
onPressed: () {},
|
||||
icon: Icon(Icons.hourglass_bottom),
|
||||
label: Text('${widget.landmark.duration!.inMinutes} minutes'),
|
||||
),
|
||||
if (widget.landmark.websiteURL != null)
|
||||
TextButton.icon(
|
||||
onPressed: () async {
|
||||
// open a browser with the website link
|
||||
await launchUrl(Uri.parse(widget.landmark.websiteURL!));
|
||||
},
|
||||
icon: Icon(Icons.link),
|
||||
label: Text('Website'),
|
||||
),
|
||||
if (widget.landmark.wikipediaURL != null)
|
||||
TextButton.icon(
|
||||
onPressed: () async {
|
||||
// open a browser with the wikipedia link
|
||||
await launchUrl(Uri.parse(widget.landmark.wikipediaURL!));
|
||||
},
|
||||
icon: Icon(Icons.book),
|
||||
label: Text('Wikipedia'),
|
||||
),
|
||||
],
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
),
|
||||
),
|
||||
],
|
||||
// if the image is available, display it on the left side of the card, otherwise only display the text
|
||||
child: widget.landmark.imageURL != null ? splitLayout() : textLayout(),
|
||||
),
|
||||
);
|
||||
}
|
||||
|
||||
Widget splitLayout() {
|
||||
// If an image is available, display it on the left side of the card
|
||||
return Row(
|
||||
crossAxisAlignment: CrossAxisAlignment.start,
|
||||
children: [
|
||||
Container(
|
||||
// the image on the left
|
||||
width: 160,
|
||||
height: 160,
|
||||
|
||||
child: CachedNetworkImage(
|
||||
imageUrl: widget.landmark.imageURL ?? '',
|
||||
placeholder: (context, url) => Center(child: CircularProgressIndicator()),
|
||||
errorWidget: (context, error, stackTrace) => Icon(Icons.question_mark_outlined),
|
||||
fit: BoxFit.cover,
|
||||
),
|
||||
),
|
||||
Flexible(
|
||||
child: textLayout(),
|
||||
),
|
||||
],
|
||||
);
|
||||
}
|
||||
|
||||
Widget textLayout() {
|
||||
return Padding(
|
||||
padding: EdgeInsets.all(10),
|
||||
child: Column(
|
||||
children: [
|
||||
Row(
|
||||
children: [
|
||||
Flexible(
|
||||
child: Text(
|
||||
widget.landmark.name,
|
||||
style: const TextStyle(
|
||||
fontSize: 18,
|
||||
fontWeight: FontWeight.bold,
|
||||
),
|
||||
maxLines: 2,
|
||||
),
|
||||
)
|
||||
],
|
||||
),
|
||||
if (widget.landmark.nameEN != null)
|
||||
Row(
|
||||
children: [
|
||||
Flexible(
|
||||
child: Text(
|
||||
widget.landmark.nameEN!,
|
||||
style: const TextStyle(
|
||||
fontSize: 16,
|
||||
),
|
||||
maxLines: 1,
|
||||
),
|
||||
)
|
||||
],
|
||||
),
|
||||
Padding(padding: EdgeInsets.only(top: 10)),
|
||||
Align(
|
||||
alignment: Alignment.centerLeft,
|
||||
child: SingleChildScrollView(
|
||||
// allows the buttons to be scrolled
|
||||
scrollDirection: Axis.horizontal,
|
||||
child: Wrap(
|
||||
spacing: 10,
|
||||
// show the type, the website, and the wikipedia link as buttons/labels in a row
|
||||
children: [
|
||||
TextButton.icon(
|
||||
onPressed: () {},
|
||||
icon: widget.landmark.type.icon,
|
||||
label: Text(widget.landmark.type.name),
|
||||
),
|
||||
if (widget.landmark.duration != null && widget.landmark.duration!.inMinutes > 0)
|
||||
TextButton.icon(
|
||||
onPressed: () {},
|
||||
icon: Icon(Icons.hourglass_bottom),
|
||||
label: Text('${widget.landmark.duration!.inMinutes} minutes'),
|
||||
),
|
||||
if (widget.landmark.websiteURL != null)
|
||||
TextButton.icon(
|
||||
onPressed: () async {
|
||||
// open a browser with the website link
|
||||
await launchUrl(Uri.parse(widget.landmark.websiteURL!));
|
||||
},
|
||||
icon: Icon(Icons.link),
|
||||
label: Text('Website'),
|
||||
),
|
||||
PopupMenuButton(
|
||||
icon: Icon(Icons.settings),
|
||||
style: TextButtonTheme.of(context).style,
|
||||
itemBuilder: (context) => [
|
||||
PopupMenuItem(
|
||||
child: ListTile(
|
||||
leading: Icon(Icons.delete),
|
||||
title: Text('Delete'),
|
||||
onTap: () async {
|
||||
widget.parentTrip.removeLandmark(widget.landmark);
|
||||
rootScaffoldMessengerKey.currentState!.showSnackBar(
|
||||
SnackBar(content: Text("We won't show ${widget.landmark.name} again"))
|
||||
);
|
||||
},
|
||||
),
|
||||
),
|
||||
PopupMenuItem(
|
||||
child: ListTile(
|
||||
leading: Icon(Icons.star),
|
||||
title: Text('Favorite'),
|
||||
onTap: () async {
|
||||
// delete the landmark
|
||||
// await deleteLandmark(widget.landmark);
|
||||
},
|
||||
),
|
||||
),
|
||||
],
|
||||
)
|
||||
|
||||
],
|
||||
),
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
);
|
||||
}
|
||||
|
@@ -1,5 +1,5 @@
|
||||
import 'package:anyway/layout.dart';
|
||||
import 'package:anyway/main.dart';
|
||||
import 'package:anyway/pages/current_trip.dart';
|
||||
import 'package:anyway/structs/preferences.dart';
|
||||
import 'package:anyway/structs/trip.dart';
|
||||
import 'package:anyway/utils/fetch_trip.dart';
|
||||
@@ -57,7 +57,7 @@ class _NewTripButtonState extends State<NewTripButton> {
|
||||
fetchTrip(trip, widget.preferences);
|
||||
Navigator.of(context).push(
|
||||
MaterialPageRoute(
|
||||
builder: (context) => BasePage(mainScreen: "map", trip: trip)
|
||||
builder: (context) => TripPage(trip: trip)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
@@ -9,6 +9,15 @@ import 'package:flutter/material.dart';
|
||||
import 'package:geolocator/geolocator.dart';
|
||||
import 'package:shared_preferences/shared_preferences.dart';
|
||||
|
||||
const Map<String, List> debugLocations = {
|
||||
'paris': [48.8575, 2.3514],
|
||||
'london': [51.5074, -0.1278],
|
||||
'new york': [40.7128, -74.0060],
|
||||
'tokyo': [35.6895, 139.6917],
|
||||
};
|
||||
|
||||
|
||||
|
||||
class NewTripLocationSearch extends StatefulWidget {
|
||||
Future<SharedPreferences> prefs = SharedPreferences.getInstance();
|
||||
Trip trip;
|
||||
@@ -27,26 +36,35 @@ class _NewTripLocationSearchState extends State<NewTripLocationSearch> {
|
||||
|
||||
setTripLocation (String query) async {
|
||||
List<Location> locations = [];
|
||||
Location startLocation;
|
||||
log('Searching for: $query');
|
||||
|
||||
try{
|
||||
locations = await locationFromAddress(query);
|
||||
} catch (e) {
|
||||
log('No results found for: $query : $e');
|
||||
if (GeocodingPlatform.instance != null) {
|
||||
locations.addAll(await locationFromAddress(query));
|
||||
}
|
||||
|
||||
if (locations.isNotEmpty) {
|
||||
Location location = locations.first;
|
||||
widget.trip.landmarks.clear();
|
||||
widget.trip.addLandmark(
|
||||
Landmark(
|
||||
uuid: 'pending',
|
||||
name: query,
|
||||
location: [location.latitude, location.longitude],
|
||||
type: typeStart
|
||||
)
|
||||
startLocation = locations.first;
|
||||
} else {
|
||||
log('No results found for: $query. Is geocoding available?');
|
||||
log('Setting Fallback location');
|
||||
List coordinates = debugLocations[query.toLowerCase()] ?? [48.8575, 2.3514];
|
||||
startLocation = Location(
|
||||
latitude: coordinates[0],
|
||||
longitude: coordinates[1],
|
||||
timestamp: DateTime.now(),
|
||||
);
|
||||
}
|
||||
|
||||
widget.trip.landmarks.clear();
|
||||
widget.trip.addLandmark(
|
||||
Landmark(
|
||||
uuid: 'pending',
|
||||
name: query,
|
||||
location: [startLocation.latitude, startLocation.longitude],
|
||||
type: typeStart
|
||||
)
|
||||
);
|
||||
|
||||
}
|
||||
|
||||
late Widget locationSearchBar = SearchBar(
|
||||
|
@@ -26,7 +26,7 @@ class _NewTripMapState extends State<NewTripMap> {
|
||||
target: LatLng(48.8566, 2.3522),
|
||||
zoom: 11.0,
|
||||
);
|
||||
late GoogleMapController _mapController;
|
||||
GoogleMapController? _mapController;
|
||||
final Set<Marker> _markers = <Marker>{};
|
||||
|
||||
_onLongPress(LatLng location) {
|
||||
@@ -56,11 +56,15 @@ class _NewTripMapState extends State<NewTripMap> {
|
||||
),
|
||||
)
|
||||
);
|
||||
_mapController.moveCamera(
|
||||
CameraUpdate.newLatLng(
|
||||
LatLng(landmark.location[0], landmark.location[1])
|
||||
)
|
||||
);
|
||||
// check if the controller is ready
|
||||
|
||||
if (_mapController != null) {
|
||||
_mapController!.animateCamera(
|
||||
CameraUpdate.newLatLng(
|
||||
LatLng(landmark.location[0], landmark.location[1])
|
||||
)
|
||||
);
|
||||
}
|
||||
setState(() {});
|
||||
}
|
||||
}
|
||||
|
@@ -2,13 +2,11 @@ import 'package:flutter/material.dart';
|
||||
import 'package:flutter_svg/flutter_svg.dart';
|
||||
|
||||
class OnboardingCard extends StatelessWidget {
|
||||
int index;
|
||||
String title;
|
||||
String description;
|
||||
String imagePath;
|
||||
final String title;
|
||||
final String description;
|
||||
final String imagePath;
|
||||
|
||||
OnboardingCard({
|
||||
required this.index,
|
||||
const OnboardingCard({
|
||||
required this.title,
|
||||
required this.description,
|
||||
required this.imagePath,
|
||||
@@ -16,41 +14,35 @@ class OnboardingCard extends StatelessWidget {
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
Color baseColor = Theme.of(context).colorScheme.secondary;
|
||||
// have a different color for each card, incrementing the hue
|
||||
Color currentColor = baseColor.withAlpha(baseColor.alpha - index * 30);
|
||||
return Container(
|
||||
color: currentColor,
|
||||
alignment: Alignment.center,
|
||||
child: Padding(
|
||||
padding: EdgeInsets.all(20),
|
||||
child: Column(
|
||||
mainAxisAlignment: MainAxisAlignment.center,
|
||||
children: [
|
||||
Text(
|
||||
title,
|
||||
style: TextStyle(
|
||||
fontSize: 24,
|
||||
fontWeight: FontWeight.bold,
|
||||
color: Colors.white,
|
||||
),
|
||||
|
||||
return Padding(
|
||||
padding: EdgeInsets.all(20),
|
||||
child: Column(
|
||||
mainAxisAlignment: MainAxisAlignment.center,
|
||||
children: [
|
||||
Text(
|
||||
title,
|
||||
style: TextStyle(
|
||||
fontSize: 24,
|
||||
fontWeight: FontWeight.bold,
|
||||
color: Colors.white,
|
||||
),
|
||||
Padding(padding: EdgeInsets.only(top: 20)),
|
||||
SvgPicture.asset(
|
||||
imagePath,
|
||||
height: 200,
|
||||
),
|
||||
Padding(padding: EdgeInsets.only(top: 20)),
|
||||
Text(
|
||||
description,
|
||||
style: TextStyle(
|
||||
fontSize: 16,
|
||||
),
|
||||
),
|
||||
Padding(padding: EdgeInsets.only(top: 20)),
|
||||
SvgPicture.asset(
|
||||
imagePath,
|
||||
height: 200,
|
||||
),
|
||||
Padding(padding: EdgeInsets.only(top: 20)),
|
||||
Text(
|
||||
description,
|
||||
style: TextStyle(
|
||||
fontSize: 16,
|
||||
),
|
||||
),
|
||||
|
||||
]
|
||||
),
|
||||
)
|
||||
]
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
@@ -19,8 +19,7 @@ class StepBetweenLandmarks extends StatefulWidget {
|
||||
class _StepBetweenLandmarksState extends State<StepBetweenLandmarks> {
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
int timeRounded = 5 * ((widget.current.tripTime?.inMinutes ?? 0) ~/ 5);
|
||||
// ~/ is integer division (rounding)
|
||||
int time = widget.current.tripTime?.inMinutes ?? 0;
|
||||
return Container(
|
||||
margin: EdgeInsets.all(10),
|
||||
padding: EdgeInsets.all(10),
|
||||
@@ -34,7 +33,7 @@ class _StepBetweenLandmarksState extends State<StepBetweenLandmarks> {
|
||||
Column(
|
||||
children: [
|
||||
Icon(Icons.directions_walk),
|
||||
Text("~$timeRounded min", style: TextStyle(fontSize: 10)),
|
||||
Text("$time min", style: TextStyle(fontSize: 10)),
|
||||
],
|
||||
),
|
||||
Spacer(),
|
||||
|
@@ -1,11 +1,12 @@
|
||||
import 'package:anyway/pages/current_trip.dart';
|
||||
import 'package:anyway/utils/load_trips.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
|
||||
import 'package:anyway/layout.dart';
|
||||
import 'package:anyway/structs/trip.dart';
|
||||
|
||||
|
||||
class TripsOverview extends StatefulWidget {
|
||||
final Future<List<Trip>> trips;
|
||||
final SavedTrips trips;
|
||||
const TripsOverview({
|
||||
super.key,
|
||||
required this.trips,
|
||||
@@ -16,50 +17,34 @@ class TripsOverview extends StatefulWidget {
|
||||
}
|
||||
|
||||
class _TripsOverviewState extends State<TripsOverview> {
|
||||
|
||||
Widget listBuild (BuildContext context, AsyncSnapshot<List<Trip>> snapshot) {
|
||||
Widget listBuild (BuildContext context, SavedTrips trips) {
|
||||
List<Widget> children;
|
||||
if (snapshot.hasData) {
|
||||
children = List<Widget>.generate(snapshot.data!.length, (index) {
|
||||
Trip trip = snapshot.data![index];
|
||||
return ListTile(
|
||||
title: FutureBuilder(
|
||||
future: trip.cityName,
|
||||
builder: (BuildContext context, AsyncSnapshot<String> snapshot) {
|
||||
if (snapshot.hasData) {
|
||||
return Text("Trip to ${snapshot.data}");
|
||||
} else if (snapshot.hasError) {
|
||||
return Text("Error: ${snapshot.error}");
|
||||
} else {
|
||||
return const Text("Trip to ...");
|
||||
}
|
||||
},
|
||||
),
|
||||
leading: Icon(Icons.pin_drop),
|
||||
onTap: () {
|
||||
Navigator.of(context).push(
|
||||
MaterialPageRoute(
|
||||
builder: (context) => BasePage(mainScreen: "map", trip: trip)
|
||||
)
|
||||
);
|
||||
List<Trip> items = trips.trips;
|
||||
children = List<Widget>.generate(items.length, (index) {
|
||||
Trip trip = items[index];
|
||||
return ListTile(
|
||||
title: FutureBuilder(
|
||||
future: trip.cityName,
|
||||
builder: (BuildContext context, AsyncSnapshot<String> snapshot) {
|
||||
if (snapshot.hasData) {
|
||||
return Text("Trip to ${snapshot.data}");
|
||||
} else if (snapshot.hasError) {
|
||||
return Text("Error: ${snapshot.error}");
|
||||
} else {
|
||||
return const Text("Trip to ...");
|
||||
}
|
||||
},
|
||||
);
|
||||
});
|
||||
} else if (snapshot.hasError) {
|
||||
children = [
|
||||
const Icon(
|
||||
Icons.error_outline,
|
||||
color: Colors.red,
|
||||
size: 60,
|
||||
),
|
||||
Padding(
|
||||
padding: const EdgeInsets.only(top: 16),
|
||||
child: Text('Error: ${snapshot.error}'),
|
||||
),
|
||||
];
|
||||
} else {
|
||||
children = [Center(child: CircularProgressIndicator())];
|
||||
}
|
||||
leading: Icon(Icons.pin_drop),
|
||||
onTap: () {
|
||||
Navigator.of(context).push(
|
||||
MaterialPageRoute(
|
||||
builder: (context) => TripPage(trip: trip)
|
||||
)
|
||||
);
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
return ListView(
|
||||
children: children,
|
||||
@@ -69,9 +54,11 @@ class _TripsOverviewState extends State<TripsOverview> {
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
return FutureBuilder(
|
||||
future: widget.trips,
|
||||
builder: listBuild,
|
||||
return ListenableBuilder(
|
||||
listenable: widget.trips,
|
||||
builder: (BuildContext context, Widget? child) {
|
||||
return listBuild(context, widget.trips);
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -1,3 +1,6 @@
|
||||
import 'package:anyway/main.dart';
|
||||
import 'package:anyway/modules/help_dialog.dart';
|
||||
import 'package:anyway/pages/current_trip.dart';
|
||||
import 'package:anyway/pages/settings.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
|
||||
@@ -8,22 +11,24 @@ import 'package:anyway/modules/trips_saved_list.dart';
|
||||
import 'package:anyway/utils/load_trips.dart';
|
||||
|
||||
import 'package:anyway/pages/new_trip_location.dart';
|
||||
import 'package:anyway/pages/current_trip.dart';
|
||||
import 'package:anyway/pages/onboarding.dart';
|
||||
|
||||
|
||||
|
||||
|
||||
// BasePage is the scaffold that holds all other pages
|
||||
// A side drawer is used to switch between pages
|
||||
// BasePage is the scaffold that holds a child page and a side drawer
|
||||
// The side drawer is the main way to switch between pages
|
||||
|
||||
class BasePage extends StatefulWidget {
|
||||
final String mainScreen;
|
||||
final Trip? trip;
|
||||
final Widget mainScreen;
|
||||
final Widget title;
|
||||
final List<String> helpTexts;
|
||||
|
||||
const BasePage({
|
||||
super.key,
|
||||
required this.mainScreen,
|
||||
this.trip,
|
||||
this.title = const Text(APP_NAME),
|
||||
this.helpTexts = const [],
|
||||
});
|
||||
|
||||
@override
|
||||
@@ -34,53 +39,25 @@ class _BasePageState extends State<BasePage> {
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
Widget currentView = const Text("loading...");
|
||||
Future<List<Trip>> trips = loadTrips();
|
||||
|
||||
|
||||
if (widget.mainScreen == "map") {
|
||||
if (widget.trip != null) {
|
||||
currentView = TripPage(trip: widget.trip!);
|
||||
} else {
|
||||
currentView = FutureBuilder(
|
||||
future: trips,
|
||||
builder: (context, snapshot) {
|
||||
if (snapshot.hasData) {
|
||||
List<Trip> availableTrips = snapshot.data!;
|
||||
if (availableTrips.isNotEmpty) {
|
||||
return TripPage(trip: availableTrips[0]);
|
||||
} else {
|
||||
return Scaffold(
|
||||
body: Center(
|
||||
child: Text("Wow, so empty!"),
|
||||
),
|
||||
floatingActionButton: FloatingActionButton.extended(
|
||||
onPressed: () {
|
||||
Navigator.of(context).push(
|
||||
MaterialPageRoute(
|
||||
builder: (context) => const NewTripPage()
|
||||
)
|
||||
);
|
||||
},
|
||||
label: Text("Plan a trip"),
|
||||
),
|
||||
);
|
||||
}
|
||||
} else {
|
||||
return const Text("loading...");
|
||||
}
|
||||
},
|
||||
);
|
||||
}
|
||||
} else if (widget.mainScreen == "tutorial") {
|
||||
currentView = OnboardingPage();
|
||||
} else if (widget.mainScreen == "settings") {
|
||||
currentView = SettingsPage();
|
||||
}
|
||||
savedTrips.loadTrips();
|
||||
|
||||
|
||||
return Scaffold(
|
||||
appBar: AppBar(title: Text(APP_NAME)),
|
||||
body: Center(child: currentView),
|
||||
appBar: AppBar(
|
||||
title: widget.title,
|
||||
actions: [
|
||||
IconButton(
|
||||
icon: const Icon(Icons.help),
|
||||
tooltip: 'Help',
|
||||
onPressed: () {
|
||||
if (widget.helpTexts.isNotEmpty) {
|
||||
helpDialog(context, widget.helpTexts[0], widget.helpTexts[1]);
|
||||
}
|
||||
}
|
||||
),
|
||||
],
|
||||
),
|
||||
body: Center(child: widget.mainScreen),
|
||||
drawer: Drawer(
|
||||
child: Column(
|
||||
children: [
|
||||
@@ -104,7 +81,8 @@ class _BasePageState extends State<BasePage> {
|
||||
ListTile(
|
||||
title: const Text('Your Trips'),
|
||||
leading: const Icon(Icons.map),
|
||||
selected: widget.mainScreen == "map",
|
||||
// TODO: this is not working!
|
||||
selected: widget.mainScreen is TripPage,
|
||||
onTap: () {},
|
||||
trailing: ElevatedButton(
|
||||
onPressed: () {
|
||||
@@ -122,11 +100,11 @@ class _BasePageState extends State<BasePage> {
|
||||
// through the options in the drawer if there isn't enough vertical
|
||||
// space to fit everything.
|
||||
Expanded(
|
||||
child: TripsOverview(trips: trips),
|
||||
child: TripsOverview(trips: savedTrips),
|
||||
),
|
||||
ElevatedButton(
|
||||
onPressed: () async {
|
||||
removeAllTripsFromPrefs();
|
||||
savedTrips.clearTrips();
|
||||
},
|
||||
child: const Text('Clear trips'),
|
||||
),
|
||||
@@ -134,11 +112,12 @@ class _BasePageState extends State<BasePage> {
|
||||
ListTile(
|
||||
title: const Text('How to use'),
|
||||
leading: Icon(Icons.help),
|
||||
selected: widget.mainScreen == "tutorial",
|
||||
// TODO: this is not working!
|
||||
selected: widget.mainScreen is OnboardingPage,
|
||||
onTap: () {
|
||||
Navigator.of(context).push(
|
||||
MaterialPageRoute(
|
||||
builder: (context) => BasePage(mainScreen: "tutorial")
|
||||
builder: (context) => OnboardingPage()
|
||||
)
|
||||
);
|
||||
},
|
||||
@@ -148,11 +127,12 @@ class _BasePageState extends State<BasePage> {
|
||||
ListTile(
|
||||
title: const Text('Settings'),
|
||||
leading: const Icon(Icons.settings),
|
||||
selected: widget.mainScreen == "settings",
|
||||
// TODO: this is not working!
|
||||
selected: widget.mainScreen is SettingsPage,
|
||||
onTap: () {
|
||||
Navigator.of(context).push(
|
||||
MaterialPageRoute(
|
||||
builder: (context) => BasePage(mainScreen: "settings")
|
||||
builder: (context) => SettingsPage()
|
||||
)
|
||||
);
|
||||
},
|
@@ -1,4 +1,5 @@
|
||||
import 'package:anyway/constants.dart';
|
||||
import 'package:anyway/pages/base_page.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
import 'package:sliding_up_panel/sliding_up_panel.dart';
|
||||
|
||||
@@ -10,7 +11,7 @@ final Shader textGradient = APP_GRADIENT.createShader(Rect.fromLTWH(0.0, 0.0, 20
|
||||
TextStyle greeterStyle = TextStyle(
|
||||
foreground: Paint()..shader = textGradient,
|
||||
fontWeight: FontWeight.bold,
|
||||
fontSize: 26
|
||||
fontSize: 25
|
||||
);
|
||||
|
||||
|
||||
@@ -31,7 +32,8 @@ class _TripPageState extends State<TripPage> {
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
return SlidingUpPanel(
|
||||
return BasePage(
|
||||
mainScreen: SlidingUpPanel(
|
||||
// use panelBuilder instead of panel so that we can reuse the scrollcontroller for the listview
|
||||
panelBuilder: (scrollcontroller) => CurrentTripPanel(controller: scrollcontroller, trip: widget.trip),
|
||||
// using collapsed and panelBuilder seems to show both at the same time, so we include the greeter in the panelBuilder
|
||||
@@ -41,7 +43,7 @@ class _TripPageState extends State<TripPage> {
|
||||
maxHeight: MediaQuery.of(context).size.height * TRIP_PANEL_MAX_HEIGHT,
|
||||
// padding in this context is annoying: it offsets the notion of vertical alignment.
|
||||
// children that want to be centered vertically need to have their size adjusted by 2x the padding
|
||||
padding: const EdgeInsets.all(10.0),
|
||||
// padding: const EdgeInsets.all(10.0),
|
||||
// Panel snapping should not be disabled because it significantly improves the user experience
|
||||
// panelSnapping: false
|
||||
borderRadius: const BorderRadius.only(topLeft: Radius.circular(25), topRight: Radius.circular(25)),
|
||||
@@ -52,6 +54,13 @@ class _TripPageState extends State<TripPage> {
|
||||
color: Colors.black,
|
||||
)
|
||||
],
|
||||
),
|
||||
title: FutureBuilder(
|
||||
future: widget.trip.cityName,
|
||||
builder: (context, snapshot) => Text(
|
||||
'Your trip to ${snapshot.hasData ? snapshot.data! : "..."}',
|
||||
)
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@@ -1,5 +1,5 @@
|
||||
import 'package:anyway/modules/new_trip_button.dart';
|
||||
import 'package:anyway/modules/new_trip_options_button.dart';
|
||||
import 'package:anyway/pages/base_page.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
|
||||
import "package:anyway/structs/trip.dart";
|
||||
@@ -19,23 +19,28 @@ class _NewTripPageState extends State<NewTripPage> {
|
||||
final TextEditingController lonController = TextEditingController();
|
||||
Trip trip = Trip();
|
||||
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
// floating search bar and map as a background
|
||||
return Scaffold(
|
||||
appBar: AppBar(
|
||||
title: const Text('New Trip'),
|
||||
return BasePage(
|
||||
mainScreen: Scaffold(
|
||||
body: Stack(
|
||||
children: [
|
||||
NewTripMap(trip),
|
||||
Padding(
|
||||
padding: EdgeInsets.all(15),
|
||||
child: NewTripLocationSearch(trip),
|
||||
),
|
||||
],
|
||||
),
|
||||
floatingActionButton: NewTripOptionsButton(trip: trip),
|
||||
),
|
||||
body: Stack(
|
||||
children: [
|
||||
NewTripMap(trip),
|
||||
Padding(
|
||||
padding: EdgeInsets.all(15),
|
||||
child: NewTripLocationSearch(trip),
|
||||
),
|
||||
],
|
||||
),
|
||||
floatingActionButton: NewTripOptionsButton(trip: trip),
|
||||
title: Text("New Trip"),
|
||||
helpTexts: [
|
||||
"Setting the start location",
|
||||
"To set the starting point, type a city name in the search bar. You can also navigate the map like you're used to and long press anywhere to set a starting point."
|
||||
],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@@ -1,4 +1,5 @@
|
||||
import 'package:anyway/modules/new_trip_button.dart';
|
||||
import 'package:anyway/pages/base_page.dart';
|
||||
import 'package:anyway/structs/preferences.dart';
|
||||
import 'package:anyway/structs/trip.dart';
|
||||
import 'package:flutter/cupertino.dart';
|
||||
@@ -19,41 +20,54 @@ class _NewTripPreferencesPageState extends State<NewTripPreferencesPage> {
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
return Scaffold(
|
||||
body: ListView(
|
||||
children: [
|
||||
// Center(
|
||||
// child: CircleAvatar(
|
||||
// radius: 100,
|
||||
// child: Icon(Icons.person, size: 100),
|
||||
// )
|
||||
// ),
|
||||
Padding(padding: EdgeInsets.only(top: 30)),
|
||||
Center(
|
||||
child: FutureBuilder(
|
||||
future: widget.trip.cityName,
|
||||
builder: (context, snapshot) => Text(
|
||||
'Your trip to ${snapshot.hasData ? snapshot.data! : "..."}',
|
||||
style: TextStyle(fontSize: 24, fontWeight: FontWeight.bold)
|
||||
)
|
||||
)
|
||||
),
|
||||
return BasePage(
|
||||
mainScreen: Scaffold(
|
||||
body: ListView(
|
||||
children: [
|
||||
// Center(
|
||||
// child: CircleAvatar(
|
||||
// radius: 100,
|
||||
// child: Icon(Icons.person, size: 100),
|
||||
// )
|
||||
// ),
|
||||
// Padding(padding: EdgeInsets.only(top: 30)),
|
||||
// Center(
|
||||
// child: FutureBuilder(
|
||||
// future: widget.trip.cityName,
|
||||
// builder: (context, snapshot) => Text(
|
||||
// 'Your trip to ${snapshot.hasData ? snapshot.data! : "..."}',
|
||||
// style: TextStyle(fontSize: 24, fontWeight: FontWeight.bold)
|
||||
// )
|
||||
// )
|
||||
// ),
|
||||
|
||||
Center(
|
||||
child: Padding(
|
||||
padding: EdgeInsets.only(left: 10, right: 10, top: 20, bottom: 0),
|
||||
child: Text('Tell us about your ideal trip.', style: TextStyle(fontSize: 18))
|
||||
Center(
|
||||
child: Padding(
|
||||
padding: EdgeInsets.only(left: 10, right: 10, top: 20, bottom: 0),
|
||||
child: Text('Tell us about your ideal trip.', style: TextStyle(fontSize: 18))
|
||||
),
|
||||
),
|
||||
),
|
||||
|
||||
Divider(indent: 25, endIndent: 25, height: 50),
|
||||
Divider(indent: 25, endIndent: 25, height: 50),
|
||||
|
||||
durationPicker(preferences.maxTime),
|
||||
durationPicker(preferences.maxTime),
|
||||
|
||||
preferenceSliders([preferences.sightseeing, preferences.shopping, preferences.nature]),
|
||||
]
|
||||
preferenceSliders([preferences.sightseeing, preferences.shopping, preferences.nature]),
|
||||
]
|
||||
),
|
||||
floatingActionButton: NewTripButton(trip: widget.trip, preferences: preferences),
|
||||
),
|
||||
floatingActionButton: NewTripButton(trip: widget.trip, preferences: preferences),
|
||||
|
||||
title: FutureBuilder(
|
||||
future: widget.trip.cityName,
|
||||
builder: (context, snapshot) => Text(
|
||||
'Your trip to ${snapshot.hasData ? snapshot.data! : "..."}',
|
||||
)
|
||||
),
|
||||
helpTexts: [
|
||||
'Trip preferences',
|
||||
'Set your preferences for this trip. These will be used to generate a custom itinerary.'
|
||||
],
|
||||
);
|
||||
}
|
||||
|
||||
|
@@ -1,7 +1,33 @@
|
||||
import 'dart:ui';
|
||||
|
||||
import 'package:anyway/constants.dart';
|
||||
import 'package:anyway/modules/onboarding_card.dart';
|
||||
import 'package:anyway/pages/new_trip_location.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
|
||||
const List<Widget> onboardingCards = [
|
||||
OnboardingCard(
|
||||
title: "Welcome to anyway!",
|
||||
description: "Anyway helps you plan a city trip that suits your wishes.",
|
||||
imagePath: "assets/city.svg"
|
||||
),
|
||||
OnboardingCard(
|
||||
title: "Find your way",
|
||||
description: "Bored by churches? No problem! Hate shopping? No worries! Instead of suggesting the generic trips that bore you, anyway will try to give you recommendations that really suit you.",
|
||||
imagePath: "assets/plan.svg"
|
||||
),
|
||||
OnboardingCard(
|
||||
title: "Change your mind",
|
||||
description: "Feet get sore, the weather changes. Anyway understands that! Move or remove destinations, visit hidden gems along your journey, do your own thing. Anyway adapts to your spontaneous decisions.",
|
||||
imagePath: "assets/cat.svg"
|
||||
),
|
||||
OnboardingCard(
|
||||
title: "Feeling lost?",
|
||||
description: "Whenever you are confused or need help with the app, look out for the question mark in the top right corner. Help is just a tap away!",
|
||||
imagePath: "assets/confused.svg"
|
||||
),
|
||||
];
|
||||
|
||||
class OnboardingPage extends StatefulWidget {
|
||||
const OnboardingPage({super.key});
|
||||
|
||||
@@ -10,37 +36,83 @@ class OnboardingPage extends StatefulWidget {
|
||||
}
|
||||
|
||||
class _OnboardingPageState extends State<OnboardingPage> {
|
||||
final PageController _controller = PageController();
|
||||
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
final PageController _controller = PageController();
|
||||
return Scaffold(
|
||||
body: Stack(
|
||||
children: [
|
||||
AnimatedBuilder(
|
||||
animation: _controller,
|
||||
builder: (context, child) {
|
||||
return Stack(
|
||||
children: [
|
||||
Container(
|
||||
decoration: BoxDecoration(
|
||||
gradient: LinearGradient(
|
||||
begin: Alignment.topLeft,
|
||||
end: Alignment.bottomRight,
|
||||
colors: APP_GRADIENT.colors,
|
||||
stops: [
|
||||
(_controller.hasClients ? _controller.page ?? _controller.initialPage : _controller.initialPage) / onboardingCards.length,
|
||||
(_controller.hasClients ? _controller.page ?? _controller.initialPage + 1 : _controller.initialPage + 1) / onboardingCards.length,
|
||||
],
|
||||
),
|
||||
),
|
||||
),
|
||||
BackdropFilter(
|
||||
filter: ImageFilter.blur(sigmaX: 100, sigmaY: 100),
|
||||
child: Container(
|
||||
color: Colors.black.withOpacity(0),
|
||||
),
|
||||
),
|
||||
],
|
||||
);
|
||||
},
|
||||
),
|
||||
PageView(
|
||||
// horizontally scrollable list of pages
|
||||
controller: _controller,
|
||||
|
||||
children: [
|
||||
OnboardingCard(index: 1, title: "Welcome to anyway!", description: "Anyway helps you plan a city trip that suits your wishes.", imagePath: "assets/city.svg"),
|
||||
OnboardingCard(index: 2, title: "Find your way", description: "Bored by churches? No problem! Hate shopping? No worries! More than showing you the typical 'must-sees' of a city, anyway will try to give you recommendations that really suit you.", imagePath: "assets/plan.svg"),
|
||||
OnboardingCard(index: 3, title: "Change your mind", description: "Life happens when you're busy making plans. Anyway understands that! Move or remove destinations, visit hidden gems along your journey, do your own thing. Anyway adapts to your spontaneous decisions.", imagePath: "assets/cat.svg"),
|
||||
],
|
||||
children: List.generate(
|
||||
onboardingCards.length,
|
||||
(index) {
|
||||
return Container(
|
||||
alignment: Alignment.center,
|
||||
child: onboardingCards[index],
|
||||
);
|
||||
}
|
||||
),
|
||||
),
|
||||
],
|
||||
),
|
||||
floatingActionButton: FloatingActionButton(
|
||||
onPressed: () {
|
||||
if (_controller.page == 2) {
|
||||
Navigator.of(context).push(
|
||||
MaterialPageRoute(
|
||||
builder: (context) => const NewTripPage()
|
||||
)
|
||||
floatingActionButton: FloatingActionButton.extended(
|
||||
onPressed: () {
|
||||
if (_controller.page == onboardingCards.length - 1) {
|
||||
Navigator.of(context).push(
|
||||
MaterialPageRoute(
|
||||
builder: (context) => const NewTripPage()
|
||||
)
|
||||
);
|
||||
} else {
|
||||
_controller.nextPage(duration: Duration(milliseconds: 500), curve: Curves.ease);
|
||||
}
|
||||
},
|
||||
label: AnimatedBuilder(
|
||||
animation: _controller,
|
||||
builder: (context, child) {
|
||||
if ((_controller.page ?? _controller.initialPage) == onboardingCards.length - 1) {
|
||||
return Row(
|
||||
children: [
|
||||
const Text("Start planning!"),
|
||||
Padding(padding: const EdgeInsets.only(right: 8.0)),
|
||||
const Icon(Icons.map_outlined)
|
||||
],
|
||||
);
|
||||
} else {
|
||||
_controller.nextPage(duration: Duration(milliseconds: 500), curve: Curves.ease);
|
||||
return const Icon(Icons.arrow_forward);
|
||||
}
|
||||
},
|
||||
child: Icon(Icons.arrow_forward),
|
||||
}
|
||||
)
|
||||
),
|
||||
);
|
||||
}
|
||||
|
@@ -1,5 +1,6 @@
|
||||
import 'package:anyway/constants.dart';
|
||||
import 'package:anyway/main.dart';
|
||||
import 'package:anyway/pages/base_page.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
import 'package:permission_handler/permission_handler.dart';
|
||||
import 'package:shared_preferences/shared_preferences.dart';
|
||||
@@ -16,30 +17,37 @@ class SettingsPage extends StatefulWidget {
|
||||
class _SettingsPageState extends State<SettingsPage> {
|
||||
@override
|
||||
Widget build(BuildContext context) {
|
||||
return ListView(
|
||||
padding: EdgeInsets.all(15),
|
||||
children: [
|
||||
// First a round, centered image
|
||||
Center(
|
||||
child: CircleAvatar(
|
||||
radius: 75,
|
||||
child: Icon(Icons.settings, size: 100),
|
||||
)
|
||||
),
|
||||
Center(
|
||||
child: Text('Global settings', style: TextStyle(fontSize: 24))
|
||||
),
|
||||
return BasePage(
|
||||
mainScreen: ListView(
|
||||
padding: EdgeInsets.all(15),
|
||||
children: [
|
||||
// First a round, centered image
|
||||
Center(
|
||||
child: CircleAvatar(
|
||||
radius: 75,
|
||||
child: Icon(Icons.settings, size: 100),
|
||||
)
|
||||
),
|
||||
Center(
|
||||
child: Text('Global settings', style: TextStyle(fontSize: 24))
|
||||
),
|
||||
|
||||
Divider(indent: 25, endIndent: 25, height: 50),
|
||||
Divider(indent: 25, endIndent: 25, height: 50),
|
||||
|
||||
darkMode(),
|
||||
setLocationUsage(),
|
||||
setDebugMode(),
|
||||
darkMode(),
|
||||
setLocationUsage(),
|
||||
setDebugMode(),
|
||||
|
||||
Divider(indent: 25, endIndent: 25, height: 50),
|
||||
Divider(indent: 25, endIndent: 25, height: 50),
|
||||
|
||||
privacyInfo(),
|
||||
]
|
||||
privacyInfo(),
|
||||
]
|
||||
),
|
||||
title: Text('Settings'),
|
||||
helpTexts: [
|
||||
'Settings',
|
||||
'Preferences set in this page are global and will affect the entire application.'
|
||||
],
|
||||
);
|
||||
}
|
||||
|
||||
@@ -169,7 +177,9 @@ class _SettingsPageState extends State<SettingsPage> {
|
||||
return Center(
|
||||
child: Column(
|
||||
children: [
|
||||
Text('Our privacy policy is available under:'),
|
||||
Text('AnyWay does not collect or store any of the data that is submitted via the app. The location of your trip is not stored. The location feature is only used to show your current location on the map, it is not transmitted to our servers.', textAlign: TextAlign.center),
|
||||
Padding(padding: EdgeInsets.only(top: 3)),
|
||||
Text('Our full privacy policy is available under:', textAlign: TextAlign.center),
|
||||
|
||||
TextButton.icon(
|
||||
icon: Icon(Icons.info),
|
||||
|
@@ -24,8 +24,7 @@ final class Landmark extends LinkedListEntry<Landmark>{
|
||||
// description to be shown in the overview
|
||||
final String? nameEN;
|
||||
final String? websiteURL;
|
||||
final String? wikipediaURL;
|
||||
final String? imageURL;
|
||||
String? imageURL; // not final because it can be patched
|
||||
final String? description;
|
||||
final Duration? duration;
|
||||
final bool? visited;
|
||||
@@ -44,7 +43,6 @@ final class Landmark extends LinkedListEntry<Landmark>{
|
||||
|
||||
this.nameEN,
|
||||
this.websiteURL,
|
||||
this.wikipediaURL,
|
||||
this.imageURL,
|
||||
this.description,
|
||||
this.duration,
|
||||
@@ -70,7 +68,6 @@ final class Landmark extends LinkedListEntry<Landmark>{
|
||||
final isSecondary = json['is_secondary'] as bool?;
|
||||
final nameEN = json['name_en'] as String?;
|
||||
final websiteURL = json['website_url'] as String?;
|
||||
final wikipediaURL = json['wikipedia_url'] as String?;
|
||||
final imageURL = json['image_url'] as String?;
|
||||
final description = json['description'] as String?;
|
||||
var duration = Duration(minutes: json['duration'] ?? 0) as Duration?;
|
||||
@@ -85,7 +82,6 @@ final class Landmark extends LinkedListEntry<Landmark>{
|
||||
isSecondary: isSecondary,
|
||||
nameEN: nameEN,
|
||||
websiteURL: websiteURL,
|
||||
wikipediaURL: wikipediaURL,
|
||||
imageURL: imageURL,
|
||||
description: description,
|
||||
duration: duration,
|
||||
@@ -112,7 +108,6 @@ final class Landmark extends LinkedListEntry<Landmark>{
|
||||
'is_secondary': isSecondary,
|
||||
'name_en': nameEN,
|
||||
'website_url': websiteURL,
|
||||
'wikipedia_url': wikipediaURL,
|
||||
'image_url': imageURL,
|
||||
'description': description,
|
||||
'duration': duration?.inMinutes,
|
||||
@@ -130,7 +125,7 @@ class LandmarkType {
|
||||
LandmarkType({required this.name, this.icon = const Icon(Icons.location_on)}) {
|
||||
switch (name) {
|
||||
case 'sightseeing':
|
||||
icon = const Icon(Icons.church);
|
||||
icon = const Icon(Icons.castle);
|
||||
break;
|
||||
case 'nature':
|
||||
icon = const Icon(Icons.eco);
|
||||
|
@@ -113,10 +113,3 @@ LinkedList<Landmark> readLandmarks(SharedPreferences prefs, String? firstUUID) {
|
||||
}
|
||||
return landmarks;
|
||||
}
|
||||
|
||||
|
||||
|
||||
void removeAllTripsFromPrefs () async {
|
||||
SharedPreferences prefs = await SharedPreferences.getInstance();
|
||||
prefs.clear();
|
||||
}
|
||||
|
@@ -1,5 +1,6 @@
|
||||
import "dart:convert";
|
||||
import "dart:developer";
|
||||
import "package:anyway/utils/load_landmark_image.dart";
|
||||
import 'package:dio/dio.dart';
|
||||
|
||||
import 'package:anyway/constants.dart';
|
||||
@@ -32,6 +33,7 @@ fetchTrip(
|
||||
UserPreferences preferences,
|
||||
) async {
|
||||
Map<String, dynamic> data = {
|
||||
// Add user ID here for API request
|
||||
"preferences": preferences.toJson(),
|
||||
"start": trip.landmarks!.first.location,
|
||||
};
|
||||
@@ -85,6 +87,20 @@ fetchTrip(
|
||||
}
|
||||
|
||||
|
||||
patchLandmarkImage(Landmark landmark) async {
|
||||
// patch the landmark to include an image from an external source
|
||||
if (landmark.imageURL == null) {
|
||||
String? newUrl = await getImageUrlFromName(landmark.name);
|
||||
if (newUrl != null) {
|
||||
landmark.imageURL = newUrl;
|
||||
}
|
||||
} else if (landmark.imageURL!.contains("photos.app.goo.gl")) {
|
||||
// the image is a google photos link, we should get the image behind the link
|
||||
String? newUrl = await getImageUrlFromGooglePhotos(landmark.imageURL!);
|
||||
// also set the new url if it is null
|
||||
landmark.imageURL = newUrl;
|
||||
}
|
||||
}
|
||||
|
||||
Future<(Landmark, String?)> fetchLandmark(String uuid) async {
|
||||
final response = await dio.get(
|
||||
@@ -101,5 +117,7 @@ Future<(Landmark, String?)> fetchLandmark(String uuid) async {
|
||||
log(response.data.toString());
|
||||
Map<String, dynamic> json = response.data;
|
||||
String? nextUUID = json["next_uuid"];
|
||||
return (Landmark.fromJson(json), nextUUID);
|
||||
Landmark landmark = Landmark.fromJson(json);
|
||||
patchLandmarkImage(landmark);
|
||||
return (landmark, nextUUID);
|
||||
}
|
||||
|
41
frontend/lib/utils/get_first_page.dart
Normal file
41
frontend/lib/utils/get_first_page.dart
Normal file
@@ -0,0 +1,41 @@
|
||||
import 'package:anyway/pages/current_trip.dart';
|
||||
import 'package:anyway/pages/onboarding.dart';
|
||||
import 'package:anyway/structs/trip.dart';
|
||||
import 'package:anyway/utils/load_trips.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
|
||||
Widget getFirstPage() {
|
||||
SavedTrips trips = SavedTrips();
|
||||
trips.loadTrips();
|
||||
|
||||
return ListenableBuilder(
|
||||
listenable: trips,
|
||||
builder: (BuildContext context, Widget? child) {
|
||||
List<Trip> items = trips.trips;
|
||||
if (items.isNotEmpty) {
|
||||
return TripPage(trip: items[0]);
|
||||
} else {
|
||||
return OnboardingPage();
|
||||
}
|
||||
}
|
||||
);
|
||||
// Future<List<Trip>> trips = loadTrips();
|
||||
// // test if there are any active trips
|
||||
// // if there are, return the trip list
|
||||
// // if there are not, return the onboarding page
|
||||
// return FutureBuilder(
|
||||
// future: trips,
|
||||
// builder: (context, snapshot) {
|
||||
// if (snapshot.hasData) {
|
||||
// List<Trip> availableTrips = snapshot.data!;
|
||||
// if (availableTrips.isNotEmpty) {
|
||||
// return TripPage(trip: availableTrips[0]);
|
||||
// } else {
|
||||
// return OnboardingPage();
|
||||
// }
|
||||
// } else {
|
||||
// return CircularProgressIndicator();
|
||||
// }
|
||||
// }
|
||||
// );
|
||||
}
|
71
frontend/lib/utils/load_landmark_image.dart
Normal file
71
frontend/lib/utils/load_landmark_image.dart
Normal file
@@ -0,0 +1,71 @@
|
||||
import 'dart:developer';
|
||||
|
||||
import 'package:dio/dio.dart';
|
||||
import 'package:fuzzywuzzy/fuzzywuzzy.dart';
|
||||
import 'dart:convert';
|
||||
|
||||
import 'package:fuzzywuzzy/model/extracted_result.dart';
|
||||
|
||||
const String baseUrl = "https://en.wikipedia.org/w/api.php";
|
||||
final Dio dio = Dio();
|
||||
|
||||
Future<int?> bestPageMatch(String title) async {
|
||||
final response = await dio.get(baseUrl, queryParameters: {
|
||||
"action": "query",
|
||||
"format": "json",
|
||||
"list": "prefixsearch",
|
||||
"pssearch": title,
|
||||
});
|
||||
|
||||
final data = jsonDecode(response.toString());
|
||||
log(data.toString());
|
||||
final List<dynamic> results = data["query"]["prefixsearch"] ?? {};
|
||||
final Map<String, int> titlesAndIds = {
|
||||
for (var d in results) d["title"]: d["pageid"]
|
||||
};
|
||||
if (titlesAndIds.isEmpty) {
|
||||
log("No pages found for $title");
|
||||
return null;
|
||||
}
|
||||
|
||||
// after the empty check, we can safely assume that there is a best match
|
||||
final ExtractedResult<String> bestMatch = extractOne(
|
||||
query: title,
|
||||
choices: titlesAndIds.keys.toList(),
|
||||
cutoff: 70,
|
||||
);
|
||||
return titlesAndIds[bestMatch.choice];
|
||||
}
|
||||
|
||||
Future<String?> getImageUrl(int pageId) async {
|
||||
final response = await dio.get(baseUrl, queryParameters: {
|
||||
"action": "query",
|
||||
"format": "json",
|
||||
"prop": "pageimages",
|
||||
"pageids": pageId,
|
||||
"pithumbsize": 500,
|
||||
});
|
||||
|
||||
final data = jsonDecode(response.toString());
|
||||
final pageData = data["query"]["pages"][pageId.toString()];
|
||||
return pageData["thumbnail"]?["source"];
|
||||
}
|
||||
|
||||
Future<String?> getImageUrlFromName(String title) async {
|
||||
int? pageId = await bestPageMatch(title);
|
||||
if (pageId == null) {
|
||||
return null;
|
||||
}
|
||||
return await getImageUrl(pageId);
|
||||
}
|
||||
|
||||
|
||||
Future<String?> getImageUrlFromGooglePhotos(String url) async {
|
||||
// this is a very simple implementation that just gets the image behind the link
|
||||
// it is not guaranteed to work for all google photos links
|
||||
final response = await dio.get(url);
|
||||
final data = response.toString();
|
||||
final int start = data.indexOf("https://lh3.googleusercontent.com");
|
||||
final int end = data.indexOf('"', start);
|
||||
return data.substring(start, end);
|
||||
}
|
@@ -1,19 +1,39 @@
|
||||
import 'dart:collection';
|
||||
|
||||
import 'package:anyway/structs/trip.dart';
|
||||
import 'package:anyway/structs/landmark.dart';
|
||||
import 'package:shared_preferences/shared_preferences.dart';
|
||||
|
||||
Future<List<Trip>> loadTrips() async {
|
||||
SharedPreferences prefs = await SharedPreferences.getInstance();
|
||||
import 'package:flutter/foundation.dart';
|
||||
|
||||
List<Trip> trips = [];
|
||||
Set<String> keys = prefs.getKeys();
|
||||
for (String key in keys) {
|
||||
if (key.startsWith('trip_')) {
|
||||
String uuid = key.replaceFirst('trip_', '');
|
||||
trips.add(Trip.fromPrefs(prefs, uuid));
|
||||
class SavedTrips extends ChangeNotifier {
|
||||
List<Trip> _trips = [];
|
||||
|
||||
List<Trip> get trips => _trips;
|
||||
|
||||
void loadTrips() async {
|
||||
SharedPreferences prefs = await SharedPreferences.getInstance();
|
||||
|
||||
List<Trip> trips = [];
|
||||
Set<String> keys = prefs.getKeys();
|
||||
for (String key in keys) {
|
||||
if (key.startsWith('trip_')) {
|
||||
String uuid = key.replaceFirst('trip_', '');
|
||||
trips.add(Trip.fromPrefs(prefs, uuid));
|
||||
}
|
||||
}
|
||||
_trips = trips;
|
||||
notifyListeners();
|
||||
}
|
||||
|
||||
void addTrip(Trip trip) async {
|
||||
SharedPreferences prefs = await SharedPreferences.getInstance();
|
||||
trip.toPrefs(prefs);
|
||||
_trips.add(trip);
|
||||
notifyListeners();
|
||||
}
|
||||
|
||||
void clearTrips () async {
|
||||
SharedPreferences prefs = await SharedPreferences.getInstance();
|
||||
prefs.clear();
|
||||
_trips = [];
|
||||
notifyListeners();
|
||||
}
|
||||
return trips;
|
||||
}
|
||||
|
@@ -101,10 +101,10 @@ packages:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: collection
|
||||
sha256: ee67cb0715911d28db6bf4af1026078bd6f0128b07a5f66fb2ed94ec6783c09a
|
||||
sha256: a1ace0a119f20aabc852d165077c036cd864315bd99b7eaa10a60100341941bf
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "1.18.0"
|
||||
version: "1.19.0"
|
||||
crypto:
|
||||
dependency: transitive
|
||||
description:
|
||||
@@ -232,6 +232,14 @@ packages:
|
||||
description: flutter
|
||||
source: sdk
|
||||
version: "0.0.0"
|
||||
fuzzywuzzy:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
name: fuzzywuzzy
|
||||
sha256: "3004379ffd6e7f476a0c2091f38f16588dc45f67de7adf7c41aa85dec06b432c"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "1.2.0"
|
||||
geocoding:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
@@ -404,18 +412,18 @@ packages:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: leak_tracker
|
||||
sha256: "3f87a60e8c63aecc975dda1ceedbc8f24de75f09e4856ea27daf8958f2f0ce05"
|
||||
sha256: "7bb2830ebd849694d1ec25bf1f44582d6ac531a57a365a803a6034ff751d2d06"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "10.0.5"
|
||||
version: "10.0.7"
|
||||
leak_tracker_flutter_testing:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: leak_tracker_flutter_testing
|
||||
sha256: "932549fb305594d82d7183ecd9fa93463e9914e1b67cacc34bc40906594a1806"
|
||||
sha256: "9491a714cca3667b60b5c420da8217e6de0d1ba7a5ec322fab01758f6998f379"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "3.0.5"
|
||||
version: "3.0.8"
|
||||
leak_tracker_testing:
|
||||
dependency: transitive
|
||||
description:
|
||||
@@ -700,7 +708,7 @@ packages:
|
||||
dependency: transitive
|
||||
description: flutter
|
||||
source: sdk
|
||||
version: "0.0.99"
|
||||
version: "0.0.0"
|
||||
sliding_up_panel:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
@@ -745,10 +753,10 @@ packages:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: stack_trace
|
||||
sha256: "73713990125a6d93122541237550ee3352a2d84baad52d375a4cad2eb9b7ce0b"
|
||||
sha256: "9f47fd3630d76be3ab26f0ee06d213679aa425996925ff3feffdec504931c377"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "1.11.1"
|
||||
version: "1.12.0"
|
||||
stream_channel:
|
||||
dependency: transitive
|
||||
description:
|
||||
@@ -769,10 +777,10 @@ packages:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: string_scanner
|
||||
sha256: "556692adab6cfa87322a115640c11f13cb77b3f076ddcc5d6ae3c20242bedcde"
|
||||
sha256: "688af5ed3402a4bde5b3a6c15fd768dbf2621a614950b17f04626c431ab3c4c3"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "1.2.0"
|
||||
version: "1.3.0"
|
||||
synchronized:
|
||||
dependency: transitive
|
||||
description:
|
||||
@@ -793,10 +801,10 @@ packages:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: test_api
|
||||
sha256: "5b8a98dafc4d5c4c9c72d8b31ab2b23fc13422348d2997120294d3bac86b4ddb"
|
||||
sha256: "664d3a9a64782fcdeb83ce9c6b39e78fd2971d4e37827b9b06c3aa1edc5e760c"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "0.7.2"
|
||||
version: "0.7.3"
|
||||
typed_data:
|
||||
dependency: transitive
|
||||
description:
|
||||
@@ -913,10 +921,10 @@ packages:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: vm_service
|
||||
sha256: "5c5f338a667b4c644744b661f309fb8080bb94b18a7e91ef1dbd343bed00ed6d"
|
||||
sha256: f6be3ed8bd01289b34d679c2b62226f63c0e69f9fd2e50a6b3c1c729a961041b
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "14.2.5"
|
||||
version: "14.3.0"
|
||||
web:
|
||||
dependency: transitive
|
||||
description:
|
||||
|
@@ -51,6 +51,7 @@ dependencies:
|
||||
flutter_launcher_icons: ^0.13.1
|
||||
permission_handler: ^11.3.1
|
||||
geolocator: ^13.0.1
|
||||
fuzzywuzzy: ^1.2.0
|
||||
|
||||
dev_dependencies:
|
||||
flutter_test:
|
||||
|
@@ -1,30 +0,0 @@
|
||||
// This is a basic Flutter widget test.
|
||||
//
|
||||
// To perform an interaction with a widget in your test, use the WidgetTester
|
||||
// utility in the flutter_test package. For example, you can send tap and scroll
|
||||
// gestures. You can also use WidgetTester to find child widgets in the widget
|
||||
// tree, read text, and verify that the values of widget properties are correct.
|
||||
|
||||
import 'package:flutter/material.dart';
|
||||
import 'package:flutter_test/flutter_test.dart';
|
||||
|
||||
// import 'package:anyway/main.dart';
|
||||
import 'package:anyway/layout.dart';
|
||||
|
||||
void main() {
|
||||
testWidgets('Counter increments smoke test', (WidgetTester tester) async {
|
||||
// Build our app and trigger a frame.
|
||||
await tester.pumpWidget(BasePage(mainScreen: "map",));
|
||||
|
||||
// Verfiy that the title is displayed
|
||||
expect(find.text('City Nav'), findsOneWidget);
|
||||
|
||||
// Tap the '+' icon and trigger a frame.
|
||||
await tester.tap(find.byIcon(Icons.add));
|
||||
await tester.pump();
|
||||
|
||||
// Verify that our counter has incremented.
|
||||
expect(find.text('0'), findsNothing);
|
||||
expect(find.text('1'), findsOneWidget);
|
||||
});
|
||||
}
|
1091
report.html
Normal file
1091
report.html
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user