Merge pull request 'fix/backend/veiwpoint-nodes-and-churches' (#38) from fix/backend/veiwpoint-nodes-and-churches into main
Reviewed-on: #38
This commit is contained in:
commit
881f6a901d
34
.gitea/workflows/backend_run_lint.yaml
Normal file
34
.gitea/workflows/backend_run_lint.yaml
Normal file
@ -0,0 +1,34 @@
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- backend/**
|
||||
|
||||
name: Run linting on the backend code
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
- uses: https://gitea.com/actions/checkout@v4
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
apt-get update && apt-get install -y python3 python3-pip
|
||||
pip install pipenv
|
||||
|
||||
- name: Install packages
|
||||
run: |
|
||||
ls -la
|
||||
# only install dev-packages
|
||||
pipenv install --categories=dev-packages
|
||||
pipenv run pip freeze
|
||||
|
||||
working-directory: backend
|
||||
|
||||
- name: Run linter
|
||||
run: pipenv run pylint src
|
||||
working-directory: backend
|
33
.gitea/workflows/backend_run_test.yaml
Normal file
33
.gitea/workflows/backend_run_test.yaml
Normal file
@ -0,0 +1,33 @@
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- backend/**
|
||||
|
||||
name: Run testing on the backend code
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
- uses: https://gitea.com/actions/checkout@v4
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
apt-get update && apt-get install -y python3 python3-pip
|
||||
pip install pipenv
|
||||
|
||||
- name: Install packages
|
||||
run: |
|
||||
ls -la
|
||||
# install all packages, including dev-packages
|
||||
pipenv install --dev
|
||||
pipenv run pip freeze
|
||||
working-directory: backend
|
||||
|
||||
- name: Run Tests
|
||||
run: pipenv run pytest src
|
||||
working-directory: backend
|
6
.vscode/launch.json
vendored
6
.vscode/launch.json
vendored
@ -14,9 +14,9 @@
|
||||
"DEBUG": "true"
|
||||
},
|
||||
"args": [
|
||||
"--app-dir",
|
||||
"src",
|
||||
"main:app",
|
||||
// "--app-dir",
|
||||
// "src",
|
||||
"src.main:app",
|
||||
"--reload",
|
||||
],
|
||||
"jinja": true,
|
||||
|
2
backend/.pylintrc
Normal file
2
backend/.pylintrc
Normal file
@ -0,0 +1,2 @@
|
||||
[MAIN]
|
||||
max-line-length=240
|
@ -4,6 +4,11 @@ verify_ssl = true
|
||||
name = "pypi"
|
||||
|
||||
[dev-packages]
|
||||
pylint = "*"
|
||||
pytest = "*"
|
||||
tomli = "*"
|
||||
httpx = "*"
|
||||
exceptiongroup = "*"
|
||||
|
||||
[packages]
|
||||
numpy = "*"
|
||||
|
1038
backend/Pipfile.lock
generated
1038
backend/Pipfile.lock
generated
File diff suppressed because it is too large
Load Diff
0
backend/src/__init__.py
Normal file
0
backend/src/__init__.py
Normal file
@ -1,14 +1,14 @@
|
||||
import logging
|
||||
from fastapi import FastAPI, Query, Body, HTTPException
|
||||
|
||||
from structs.landmark import Landmark
|
||||
from structs.preferences import Preferences
|
||||
from structs.linked_landmarks import LinkedLandmarks
|
||||
from structs.trip import Trip
|
||||
from utils.landmarks_manager import LandmarkManager
|
||||
from utils.optimizer import Optimizer
|
||||
from utils.refiner import Refiner
|
||||
from persistence import client as cache_client
|
||||
from .structs.landmark import Landmark
|
||||
from .structs.preferences import Preferences
|
||||
from .structs.linked_landmarks import LinkedLandmarks
|
||||
from .structs.trip import Trip
|
||||
from .utils.landmarks_manager import LandmarkManager
|
||||
from .utils.optimizer import Optimizer
|
||||
from .utils.refiner import Refiner
|
||||
from .persistence import client as cache_client
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
@ -1,11 +1,12 @@
|
||||
city_bbox_side: 7500 #m
|
||||
radius_close_to: 50
|
||||
church_coeff: 0.5
|
||||
church_coeff: 0.9
|
||||
nature_coeff: 1.25
|
||||
overall_coeff: 10
|
||||
tag_exponent: 1.15
|
||||
image_bonus: 10
|
||||
viewpoint_bonus: 15
|
||||
wikipedia_bonus: 6
|
||||
wikipedia_bonus: 4
|
||||
name_bonus: 3
|
||||
N_important: 40
|
||||
pay_bonus: -1
|
||||
|
@ -3,4 +3,4 @@ detour_corridor_width: 300
|
||||
average_walking_speed: 4.8
|
||||
max_landmarks: 10
|
||||
max_landmarks_refiner: 30
|
||||
overshoot: 1.8
|
||||
overshoot: 1.15
|
||||
|
@ -1,7 +1,6 @@
|
||||
from pymemcache.client.base import Client
|
||||
from pymemcache import serde
|
||||
|
||||
import constants
|
||||
from .constants import MEMCACHED_HOST_PATH
|
||||
|
||||
|
||||
class DummyClient:
|
||||
@ -16,13 +15,12 @@ class DummyClient:
|
||||
return self._data[key]
|
||||
|
||||
|
||||
if constants.MEMCACHED_HOST_PATH is None:
|
||||
if MEMCACHED_HOST_PATH is None:
|
||||
client = DummyClient()
|
||||
else:
|
||||
client = Client(
|
||||
constants.MEMCACHED_HOST_PATH,
|
||||
timeout = 1,
|
||||
allow_unicode_keys = True,
|
||||
encoding = 'utf-8',
|
||||
serde = serde.pickle_serde
|
||||
MEMCACHED_HOST_PATH,
|
||||
timeout=1,
|
||||
allow_unicode_keys=True,
|
||||
encoding='utf-8'
|
||||
)
|
||||
|
@ -1,5 +1,5 @@
|
||||
from .landmark import Landmark
|
||||
from utils.get_time_separation import get_time
|
||||
from ..utils.get_time_separation import get_time
|
||||
|
||||
class LinkedLandmarks:
|
||||
"""
|
||||
@ -35,6 +35,7 @@ class LinkedLandmarks:
|
||||
time_to_next = get_time(landmark.location, self._landmarks[i + 1].location)
|
||||
landmark.time_to_reach_next = time_to_next
|
||||
self.total_time += time_to_next
|
||||
self.total_time += landmark.duration
|
||||
|
||||
self._landmarks[-1].next_uuid = None
|
||||
self._landmarks[-1].time_to_reach_next = 0
|
||||
|
@ -1,12 +1,12 @@
|
||||
import logging
|
||||
import yaml
|
||||
|
||||
from utils.landmarks_manager import LandmarkManager
|
||||
from utils.optimizer import Optimizer
|
||||
from utils.refiner import Refiner
|
||||
from structs.landmark import Landmark
|
||||
from structs.linked_landmarks import LinkedLandmarks
|
||||
from structs.preferences import Preferences, Preference
|
||||
from .utils.landmarks_manager import LandmarkManager
|
||||
from .utils.optimizer import Optimizer
|
||||
from .utils.refiner import Refiner
|
||||
from .structs.landmark import Landmark
|
||||
from .structs.linked_landmarks import LinkedLandmarks
|
||||
from .structs.preferences import Preferences, Preference
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -22,8 +22,8 @@ def test(start_coords: tuple[float, float], finish_coords: tuple[float, float] =
|
||||
preferences = Preferences(
|
||||
sightseeing=Preference(type='sightseeing', score = 5),
|
||||
nature=Preference(type='nature', score = 5),
|
||||
shopping=Preference(type='shopping', score = 5),
|
||||
max_time_minute=15,
|
||||
shopping=Preference(type='shopping', score = 0),
|
||||
max_time_minute=30,
|
||||
detour_tolerance_minute=0
|
||||
)
|
||||
|
||||
@ -74,7 +74,7 @@ def test(start_coords: tuple[float, float], finish_coords: tuple[float, float] =
|
||||
# test(tuple((48.8344400, 2.3220540))) # Café Chez César
|
||||
# test(tuple((48.8375946, 2.2949904))) # Point random
|
||||
# test(tuple((47.377859, 8.540585))) # Zurich HB
|
||||
# test(tuple((45.758217, 4.831814))) # Lyon Bellecour
|
||||
test(tuple((45.758217, 4.831814))) # Lyon Bellecour
|
||||
# test(tuple((48.5848435, 7.7332974))) # Strasbourg Gare
|
||||
# test(tuple((48.2067858, 16.3692340))) # Vienne
|
||||
test(tuple((48.084588, 7.280405))) # Turckheim
|
||||
# test(tuple((48.2432090, 7.3892691))) # Orschwiller
|
||||
|
0
backend/src/tests/__init__.py
Normal file
0
backend/src/tests/__init__.py
Normal file
141
backend/src/tests/test_main.py
Normal file
141
backend/src/tests/test_main.py
Normal file
@ -0,0 +1,141 @@
|
||||
from fastapi.testclient import TestClient
|
||||
from typing import List
|
||||
import pytest
|
||||
from ..main import app
|
||||
from ..structs.landmark import Landmark
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def client():
|
||||
return TestClient(app)
|
||||
|
||||
|
||||
# Base test for checking if the API returns correct error code when no preferences are specified.
|
||||
def test_new_trip_invalid_prefs(client):
|
||||
response = client.post(
|
||||
"/trip/new",
|
||||
json={
|
||||
"preferences": {},
|
||||
"start": [48.8566, 2.3522]
|
||||
}
|
||||
)
|
||||
assert response.status_code == 422
|
||||
|
||||
|
||||
# Test no. 1
|
||||
def test_turckheim(client):
|
||||
duration_minutes = 15
|
||||
response = client.post(
|
||||
"/trip/new",
|
||||
json={
|
||||
"preferences": {"sightseeing": {"type": "sightseeing", "score": 5}, "nature": {"type": "nature", "score": 5}, "shopping": {"type": "shopping", "score": 5}, "max_time_minute": duration_minutes, "detour_tolerance_minute": 0},
|
||||
"start": [48.084588, 7.280405]
|
||||
}
|
||||
)
|
||||
result = response.json()
|
||||
landmarks = load_trip_landmarks(client, result['first_landmark_uuid'])
|
||||
|
||||
# checks :
|
||||
assert response.status_code == 200 # check for successful planning
|
||||
assert isinstance(landmarks, list) # check that the return type is a list
|
||||
assert duration_minutes*0.8 < int(result['total_time']) < duration_minutes*1.2
|
||||
assert len(landmarks) > 2 # check that there is something to visit
|
||||
|
||||
|
||||
# Test no. 2
|
||||
def test_bellecour(client) :
|
||||
duration_minutes = 35
|
||||
response = client.post(
|
||||
"/trip/new",
|
||||
json={
|
||||
"preferences": {"sightseeing": {"type": "sightseeing", "score": 5}, "nature": {"type": "nature", "score": 5}, "shopping": {"type": "shopping", "score": 5}, "max_time_minute": duration_minutes, "detour_tolerance_minute": 0},
|
||||
"start": [45.7576485, 4.8330241]
|
||||
}
|
||||
)
|
||||
result = response.json()
|
||||
landmarks = load_trip_landmarks(client, result['first_landmark_uuid'])
|
||||
osm_ids = landmarks_to_osmid(landmarks)
|
||||
|
||||
# checks :
|
||||
assert response.status_code == 200 # check for successful planning
|
||||
assert duration_minutes*0.8 < int(result['total_time']) < duration_minutes*1.2
|
||||
assert 136200148 in osm_ids # check for Cathédrale St. Jean in trip
|
||||
|
||||
|
||||
|
||||
def landmarks_to_osmid(landmarks: List[Landmark]) -> list :
|
||||
"""
|
||||
Convert the list of landmarks into a list containing their osm ids for quick landmark checking.
|
||||
|
||||
Args :
|
||||
landmarks (list): the list of landmarks
|
||||
|
||||
Returns :
|
||||
ids (list) : the list of corresponding OSM ids
|
||||
"""
|
||||
ids = []
|
||||
for landmark in landmarks :
|
||||
ids.append(landmark.osm_id)
|
||||
|
||||
return ids
|
||||
|
||||
def fetch_landmark(client, landmark_uuid):
|
||||
"""
|
||||
Fetch landmark data from the API based on the landmark UUID.
|
||||
|
||||
Args:
|
||||
landmark_uuid (str): The UUID of the landmark.
|
||||
|
||||
Returns:
|
||||
dict: Landmark data fetched from the API.
|
||||
"""
|
||||
response = client.get(f"/landmark/{landmark_uuid}")
|
||||
|
||||
if response.status_code != 200:
|
||||
raise Exception(f"Failed to fetch landmark with UUID {landmark_uuid}: {response.status_code}")
|
||||
|
||||
json_data = response.json()
|
||||
|
||||
if "detail" in json_data:
|
||||
raise Exception(json_data["detail"])
|
||||
|
||||
return json_data
|
||||
|
||||
|
||||
def load_trip_landmarks(client, first_uuid):
|
||||
"""
|
||||
Load all landmarks for a trip using the response from the API.
|
||||
|
||||
Args:
|
||||
first_uuid (str) : The first UUID of the landmark.
|
||||
|
||||
Returns:
|
||||
landmarks (list) : An list containing all landmarks for the trip.
|
||||
"""
|
||||
landmarks = []
|
||||
next_uuid = first_uuid
|
||||
|
||||
while next_uuid is not None:
|
||||
landmark_data = fetch_landmark(client, next_uuid)
|
||||
landmarks.append(Landmark(**landmark_data)) # Create Landmark objects
|
||||
next_uuid = landmark_data.get('next_uuid') # Prepare for the next iteration
|
||||
|
||||
return landmarks
|
||||
|
||||
|
||||
|
||||
|
||||
# def test_new_trip_single_prefs(client):
|
||||
# response = client.post(
|
||||
# "/trip/new",
|
||||
# json={
|
||||
# "preferences": {"sightseeing": {"type": "sightseeing", "score": 1}, "nature": {"type": "nature", "score": 1}, "shopping": {"type": "shopping", "score": 1}, "max_time_minute": 360, "detour_tolerance_minute": 0},
|
||||
# "start": [48.8566, 2.3522]
|
||||
# }
|
||||
# )
|
||||
# assert response.status_code == 200
|
||||
|
||||
|
||||
# def test_new_trip_matches_prefs(client):
|
||||
# # todo
|
||||
# pass
|
@ -1,9 +1,9 @@
|
||||
import yaml
|
||||
from math import sin, cos, sqrt, atan2, radians
|
||||
|
||||
import constants
|
||||
from ..constants import OPTIMIZER_PARAMETERS_PATH
|
||||
|
||||
with constants.OPTIMIZER_PARAMETERS_PATH.open('r') as f:
|
||||
with OPTIMIZER_PARAMETERS_PATH.open('r') as f:
|
||||
parameters = yaml.safe_load(f)
|
||||
DETOUR_FACTOR = parameters['detour_factor']
|
||||
AVERAGE_WALKING_SPEED = parameters['average_walking_speed']
|
||||
|
@ -5,10 +5,11 @@ import logging
|
||||
from OSMPythonTools.overpass import Overpass, overpassQueryBuilder
|
||||
from OSMPythonTools.cachingStrategy import CachingStrategy, JSON
|
||||
|
||||
from structs.preferences import Preferences
|
||||
from structs.landmark import Landmark
|
||||
from ..structs.preferences import Preferences
|
||||
from ..structs.landmark import Landmark
|
||||
from .take_most_important import take_most_important
|
||||
import constants
|
||||
|
||||
from ..constants import AMENITY_SELECTORS_PATH, LANDMARK_PARAMETERS_PATH, OPTIMIZER_PARAMETERS_PATH, OSM_CACHE_DIR
|
||||
|
||||
# silence the overpass logger
|
||||
logging.getLogger('OSMPythonTools').setLevel(level=logging.CRITICAL)
|
||||
@ -27,10 +28,10 @@ class LandmarkManager:
|
||||
|
||||
def __init__(self) -> None:
|
||||
|
||||
with constants.AMENITY_SELECTORS_PATH.open('r') as f:
|
||||
with AMENITY_SELECTORS_PATH.open('r') as f:
|
||||
self.amenity_selectors = yaml.safe_load(f)
|
||||
|
||||
with constants.LANDMARK_PARAMETERS_PATH.open('r') as f:
|
||||
with LANDMARK_PARAMETERS_PATH.open('r') as f:
|
||||
parameters = yaml.safe_load(f)
|
||||
self.max_bbox_side = parameters['city_bbox_side']
|
||||
self.radius_close_to = parameters['radius_close_to']
|
||||
@ -39,18 +40,19 @@ class LandmarkManager:
|
||||
self.overall_coeff = parameters['overall_coeff']
|
||||
self.tag_exponent = parameters['tag_exponent']
|
||||
self.image_bonus = parameters['image_bonus']
|
||||
self.name_bonus = parameters['name_bonus']
|
||||
self.wikipedia_bonus = parameters['wikipedia_bonus']
|
||||
self.viewpoint_bonus = parameters['viewpoint_bonus']
|
||||
self.pay_bonus = parameters['pay_bonus']
|
||||
self.N_important = parameters['N_important']
|
||||
|
||||
with constants.OPTIMIZER_PARAMETERS_PATH.open('r') as f:
|
||||
with OPTIMIZER_PARAMETERS_PATH.open('r') as f:
|
||||
parameters = yaml.safe_load(f)
|
||||
self.walking_speed = parameters['average_walking_speed']
|
||||
self.detour_factor = parameters['detour_factor']
|
||||
|
||||
self.overpass = Overpass()
|
||||
CachingStrategy.use(JSON, cacheDir=constants.OSM_CACHE_DIR)
|
||||
CachingStrategy.use(JSON, cacheDir=OSM_CACHE_DIR)
|
||||
|
||||
|
||||
def generate_landmarks_list(self, center_coordinates: tuple[float, float], preferences: Preferences) -> tuple[list[Landmark], list[Landmark]]:
|
||||
@ -202,18 +204,29 @@ class LandmarkManager:
|
||||
"""
|
||||
return_list = []
|
||||
|
||||
if landmarktype == 'nature' : query_conditions = []
|
||||
else : query_conditions = ['count_tags()>5']
|
||||
|
||||
# caution, when applying a list of selectors, overpass will search for elements that match ALL selectors simultaneously
|
||||
# we need to split the selectors into separate queries and merge the results
|
||||
for sel in dict_to_selector_list(amenity_selector):
|
||||
self.logger.debug(f"Current selector: {sel}")
|
||||
|
||||
query_conditions = ['count_tags()>5']
|
||||
element_types = ['way', 'relation']
|
||||
|
||||
if 'viewpoint' in sel :
|
||||
query_conditions = []
|
||||
element_types.append('node')
|
||||
|
||||
query = overpassQueryBuilder(
|
||||
bbox = bbox,
|
||||
elementType = ['way', 'relation'],
|
||||
elementType = element_types,
|
||||
# selector can in principle be a list already,
|
||||
# but it generates the intersection of the queries
|
||||
# we want the union
|
||||
selector = sel,
|
||||
conditions = ['count_tags()>5'],
|
||||
conditions = query_conditions, # except for nature....
|
||||
includeCenter = True,
|
||||
out = 'body'
|
||||
)
|
||||
@ -229,18 +242,23 @@ class LandmarkManager:
|
||||
|
||||
name = elem.tag('name')
|
||||
location = (elem.centerLat(), elem.centerLon())
|
||||
osm_type = elem.type() # Add type: 'way' or 'relation'
|
||||
osm_id = elem.id() # Add OSM id
|
||||
|
||||
# TODO: exclude these from the get go
|
||||
# skip if unprecise location
|
||||
# handle unprecise and no-name locations
|
||||
if name is None or location[0] is None:
|
||||
continue
|
||||
if osm_type == 'node' and 'viewpoint' in elem.tags().values():
|
||||
name = 'Viewpoint'
|
||||
name_en = 'Viewpoint'
|
||||
location = (elem.lat(), elem.lon())
|
||||
else :
|
||||
continue
|
||||
|
||||
# skip if part of another building
|
||||
if 'building:part' in elem.tags().keys() and elem.tag('building:part') == 'yes':
|
||||
continue
|
||||
|
||||
osm_type = elem.type() # Add type: 'way' or 'relation'
|
||||
osm_id = elem.id() # Add OSM id
|
||||
elem_type = landmarktype # Add the landmark type as 'sightseeing,
|
||||
n_tags = len(elem.tags().keys()) # Add number of tags
|
||||
score = n_tags**self.tag_exponent # Add score
|
||||
@ -248,59 +266,66 @@ class LandmarkManager:
|
||||
image_url = None
|
||||
name_en = None
|
||||
|
||||
# Adjust scoring
|
||||
# Adjust scoring, browse through tag keys
|
||||
skip = False
|
||||
for tag in elem.tags().keys():
|
||||
if "pay" in tag:
|
||||
for tag_key in elem.tags().keys():
|
||||
if "pay" in tag_key:
|
||||
# payment options are misleading and should not count for the scoring.
|
||||
score += self.pay_bonus
|
||||
|
||||
if "disused" in tag:
|
||||
if "disused" in tag_key:
|
||||
# skip disused amenities
|
||||
skip = True
|
||||
break
|
||||
|
||||
if "wiki" in tag:
|
||||
if "name" in tag_key :
|
||||
score += self.name_bonus
|
||||
|
||||
if "wiki" in tag_key:
|
||||
# wikipedia entries count more
|
||||
score += self.wikipedia_bonus
|
||||
|
||||
if "viewpoint" in tag:
|
||||
# viewpoints must count more
|
||||
score += self.viewpoint_bonus
|
||||
duration = 10
|
||||
|
||||
if "image" in tag:
|
||||
if "image" in tag_key:
|
||||
# images must count more
|
||||
score += self.image_bonus
|
||||
|
||||
if elem_type != "nature":
|
||||
if "leisure" in tag and elem.tag('leisure') == "park":
|
||||
if "leisure" in tag_key and elem.tag('leisure') == "park":
|
||||
elem_type = "nature"
|
||||
|
||||
if landmarktype != "shopping":
|
||||
if "shop" in tag:
|
||||
if "shop" in tag_key:
|
||||
skip = True
|
||||
break
|
||||
|
||||
if tag == "building" and elem.tag('building') in ['retail', 'supermarket', 'parking']:
|
||||
if tag_key == "building" and elem.tag('building') in ['retail', 'supermarket', 'parking']:
|
||||
skip = True
|
||||
break
|
||||
|
||||
# Extract image, website and english name
|
||||
if tag in ['website', 'contact:website']:
|
||||
website_url = elem.tag(tag)
|
||||
if tag == 'image':
|
||||
if tag_key in ['website', 'contact:website']:
|
||||
website_url = elem.tag(tag_key)
|
||||
if tag_key == 'image':
|
||||
image_url = elem.tag('image')
|
||||
if tag =='name:en':
|
||||
if tag_key =='name:en':
|
||||
name_en = elem.tag('name:en')
|
||||
|
||||
if skip:
|
||||
continue
|
||||
|
||||
# Don't visit random apartments
|
||||
if 'apartments' in elem.tags().values():
|
||||
continue
|
||||
|
||||
score = score_function(score)
|
||||
if "place_of_worship" in elem.tags().values():
|
||||
score = score * self.church_coeff
|
||||
duration = 10
|
||||
|
||||
if 'viewpoint' in elem.tags().values() :
|
||||
# viewpoints must count more
|
||||
score += self.viewpoint_bonus
|
||||
duration = 10
|
||||
|
||||
elif "museum" in elem.tags().values() or "aquarium" in elem.tags().values() or "planetarium" in elem.tags().values():
|
||||
duration = 60
|
||||
|
@ -4,9 +4,9 @@ import numpy as np
|
||||
from scipy.optimize import linprog
|
||||
from collections import defaultdict, deque
|
||||
|
||||
from structs.landmark import Landmark
|
||||
from ..structs.landmark import Landmark
|
||||
from .get_time_separation import get_time
|
||||
import constants
|
||||
from ..constants import OPTIMIZER_PARAMETERS_PATH
|
||||
|
||||
|
||||
|
||||
@ -26,7 +26,7 @@ class Optimizer:
|
||||
def __init__(self) :
|
||||
|
||||
# load parameters from file
|
||||
with constants.OPTIMIZER_PARAMETERS_PATH.open('r') as f:
|
||||
with OPTIMIZER_PARAMETERS_PATH.open('r') as f:
|
||||
parameters = yaml.safe_load(f)
|
||||
self.detour_factor = parameters['detour_factor']
|
||||
self.average_walking_speed = parameters['average_walking_speed']
|
||||
|
@ -4,10 +4,10 @@ from shapely import buffer, LineString, Point, Polygon, MultiPoint, concave_hull
|
||||
from math import pi
|
||||
from typing import List
|
||||
|
||||
from structs.landmark import Landmark
|
||||
from ..structs.landmark import Landmark
|
||||
from . import take_most_important, get_time_separation
|
||||
from .optimizer import Optimizer
|
||||
import constants
|
||||
from ..constants import OPTIMIZER_PARAMETERS_PATH
|
||||
|
||||
|
||||
|
||||
@ -25,7 +25,7 @@ class Refiner :
|
||||
self.optimizer = optimizer
|
||||
|
||||
# load parameters from file
|
||||
with constants.OPTIMIZER_PARAMETERS_PATH.open('r') as f:
|
||||
with OPTIMIZER_PARAMETERS_PATH.open('r') as f:
|
||||
parameters = yaml.safe_load(f)
|
||||
self.detour_factor = parameters['detour_factor']
|
||||
self.detour_corridor_width = parameters['detour_corridor_width']
|
||||
|
@ -1,4 +1,4 @@
|
||||
from structs.landmark import Landmark
|
||||
from ..structs.landmark import Landmark
|
||||
|
||||
def take_most_important(landmarks: list[Landmark], n_important) -> list[Landmark]:
|
||||
"""
|
||||
|
Loading…
x
Reference in New Issue
Block a user