This commit is contained in:
Helldragon67 2024-07-05 17:21:47 +02:00
parent bec1827891
commit 006b80018a
9 changed files with 396 additions and 133 deletions

28
.vscode/launch.json vendored
View File

@ -1,28 +0,0 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "frontend",
"cwd": "frontend",
"request": "launch",
"type": "dart"
},
{
"name": "frontend (profile mode)",
"cwd": "frontend",
"request": "launch",
"type": "dart",
"flutterMode": "profile"
},
{
"name": "frontend (release mode)",
"cwd": "frontend",
"request": "launch",
"type": "dart",
"flutterMode": "release"
},
]
}

View File

@ -10,5 +10,7 @@ fastapi = "*"
osmpythontools = "*"
pydantic = "*"
shapely = "*"
networkx = "*"
geopy = "*"
[dev-packages]

82
backend/Pipfile.lock generated
View File

@ -1,7 +1,7 @@
{
"_meta": {
"hash": {
"sha256": "0f88c01cde3be9a6332acec33fa0ccf13b6e122a6df8ee5cfefa52ba1e98034f"
"sha256": "435b1baa4287d1a344b86a7aff2f9616ccc7a1a419ed9f01e7efc270ce968157"
},
"pipfile-spec": 6,
"requires": {},
@ -201,6 +201,14 @@
"markers": "python_version >= '3.8'",
"version": "==4.53.0"
},
"geographiclib": {
"hashes": [
"sha256:6b7225248e45ff7edcee32becc4e0a1504c606ac5ee163a5656d482e0cd38734",
"sha256:f7f41c85dc3e1c2d3d935ec86660dc3b2c848c83e17f9a9e51ba9d5146a15859"
],
"markers": "python_version >= '3.7'",
"version": "==2.0"
},
"geojson": {
"hashes": [
"sha256:58a7fa40727ea058efc28b0e9ff0099eadf6d0965e04690830208d3ef571adac",
@ -209,6 +217,15 @@
"markers": "python_version >= '3.7'",
"version": "==3.1.0"
},
"geopy": {
"hashes": [
"sha256:50283d8e7ad07d89be5cb027338c6365a32044df3ae2556ad3f52f4840b3d0d1",
"sha256:ae8b4bc5c1131820f4d75fce9d4aaaca0c85189b3aa5d64c3dcaf5e3b7b882a7"
],
"index": "pypi",
"markers": "python_version >= '3.7'",
"version": "==2.4.1"
},
"h11": {
"hashes": [
"sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d",
@ -665,6 +682,15 @@
"markers": "python_version >= '3.7'",
"version": "==0.1.2"
},
"networkx": {
"hashes": [
"sha256:0c127d8b2f4865f59ae9cb8aafcd60b5c70f3241ebd66f7defad7c4ab90126c9",
"sha256:28575580c6ebdaf4505b22c6256a2b9de86b316dc63ba9e93abde3d78dfdbcf2"
],
"index": "pypi",
"markers": "python_version >= '3.10'",
"version": "==3.3"
},
"numpy": {
"hashes": [
"sha256:04494f6ec467ccb5369d1808570ae55f6ed9b5809d7f035059000a37b8d7e86f",
@ -1100,35 +1126,35 @@
},
"scipy": {
"hashes": [
"sha256:017367484ce5498445aade74b1d5ab377acdc65e27095155e448c88497755a5d",
"sha256:095a87a0312b08dfd6a6155cbbd310a8c51800fc931b8c0b84003014b874ed3c",
"sha256:20335853b85e9a49ff7572ab453794298bcf0354d8068c5f6775a0eabf350aca",
"sha256:27e52b09c0d3a1d5b63e1105f24177e544a222b43611aaf5bc44d4a0979e32f9",
"sha256:2831f0dc9c5ea9edd6e51e6e769b655f08ec6db6e2e10f86ef39bd32eb11da54",
"sha256:2ac65fb503dad64218c228e2dc2d0a0193f7904747db43014645ae139c8fad16",
"sha256:392e4ec766654852c25ebad4f64e4e584cf19820b980bc04960bca0b0cd6eaa2",
"sha256:436bbb42a94a8aeef855d755ce5a465479c721e9d684de76bf61a62e7c2b81d5",
"sha256:45484bee6d65633752c490404513b9ef02475b4284c4cfab0ef946def50b3f59",
"sha256:54f430b00f0133e2224c3ba42b805bfd0086fe488835effa33fa291561932326",
"sha256:5713f62f781eebd8d597eb3f88b8bf9274e79eeabf63afb4a737abc6c84ad37b",
"sha256:5d72782f39716b2b3509cd7c33cdc08c96f2f4d2b06d51e52fb45a19ca0c86a1",
"sha256:637e98dcf185ba7f8e663e122ebf908c4702420477ae52a04f9908707456ba4d",
"sha256:8335549ebbca860c52bf3d02f80784e91a004b71b059e3eea9678ba994796a24",
"sha256:949ae67db5fa78a86e8fa644b9a6b07252f449dcf74247108c50e1d20d2b4627",
"sha256:a014c2b3697bde71724244f63de2476925596c24285c7a637364761f8710891c",
"sha256:a78b4b3345f1b6f68a763c6e25c0c9a23a9fd0f39f5f3d200efe8feda560a5fa",
"sha256:cdd7dacfb95fea358916410ec61bbc20440f7860333aee6d882bb8046264e949",
"sha256:cfa31f1def5c819b19ecc3a8b52d28ffdcc7ed52bb20c9a7589669dd3c250989",
"sha256:d533654b7d221a6a97304ab63c41c96473ff04459e404b83275b60aa8f4b7004",
"sha256:d605e9c23906d1994f55ace80e0125c587f96c020037ea6aa98d01b4bd2e222f",
"sha256:de3ade0e53bc1f21358aa74ff4830235d716211d7d077e340c7349bc3542e884",
"sha256:e89369d27f9e7b0884ae559a3a956e77c02114cc60a6058b4e5011572eea9299",
"sha256:eccfa1906eacc02de42d70ef4aecea45415f5be17e72b61bafcfd329bdc52e94",
"sha256:f26264b282b9da0952a024ae34710c2aff7d27480ee91a2e82b7b7073c24722f"
"sha256:076c27284c768b84a45dcf2e914d4000aac537da74236a0d45d82c6fa4b7b3c0",
"sha256:07e179dc0205a50721022344fb85074f772eadbda1e1b3eecdc483f8033709b7",
"sha256:176c6f0d0470a32f1b2efaf40c3d37a24876cebf447498a4cefb947a79c21e9d",
"sha256:42470ea0195336df319741e230626b6225a740fd9dce9642ca13e98f667047c0",
"sha256:4c4161597c75043f7154238ef419c29a64ac4a7c889d588ea77690ac4d0d9b20",
"sha256:5b083c8940028bb7e0b4172acafda6df762da1927b9091f9611b0bcd8676f2bc",
"sha256:64b2ff514a98cf2bb734a9f90d32dc89dc6ad4a4a36a312cd0d6327170339eb0",
"sha256:65df4da3c12a2bb9ad52b86b4dcf46813e869afb006e58be0f516bc370165159",
"sha256:687af0a35462402dd851726295c1a5ae5f987bd6e9026f52e9505994e2f84ef6",
"sha256:6a9c9a9b226d9a21e0a208bdb024c3982932e43811b62d202aaf1bb59af264b1",
"sha256:6d056a8709ccda6cf36cdd2eac597d13bc03dba38360f418560a93050c76a16e",
"sha256:7d3da42fbbbb860211a811782504f38ae7aaec9de8764a9bef6b262de7a2b50f",
"sha256:7e911933d54ead4d557c02402710c2396529540b81dd554fc1ba270eb7308484",
"sha256:94c164a9e2498e68308e6e148646e486d979f7fcdb8b4cf34b5441894bdb9caf",
"sha256:9e3154691b9f7ed73778d746da2df67a19d046a6c8087c8b385bc4cdb2cfca74",
"sha256:9eee2989868e274aae26125345584254d97c56194c072ed96cb433f32f692ed8",
"sha256:a01cc03bcdc777c9da3cfdcc74b5a75caffb48a6c39c8450a9a05f82c4250a14",
"sha256:a7d46c3e0aea5c064e734c3eac5cf9eb1f8c4ceee756262f2c7327c4c2691c86",
"sha256:ad36af9626d27a4326c8e884917b7ec321d8a1841cd6dacc67d2a9e90c2f0359",
"sha256:b5923f48cb840380f9854339176ef21763118a7300a88203ccd0bdd26e58527b",
"sha256:bbc0471b5f22c11c389075d091d3885693fd3f5e9a54ce051b46308bc787e5d4",
"sha256:bff2438ea1330e06e53c424893ec0072640dac00f29c6a43a575cbae4c99b2b9",
"sha256:c40003d880f39c11c1edbae8144e3813904b10514cd3d3d00c277ae996488cdb",
"sha256:d91db2c41dd6c20646af280355d41dfa1ec7eead235642178bd57635a3f82209",
"sha256:f0a50da861a7ec4573b7c716b2ebdcdf142b66b756a0d392c236ae568b3a93fb"
],
"index": "pypi",
"markers": "python_version >= '3.9'",
"version": "==1.13.1"
"markers": "python_version >= '3.10'",
"version": "==1.14.0"
},
"shapely": {
"hashes": [

View File

@ -344,6 +344,8 @@ def link_list_simple(ordered_visit: List[Landmark])-> List[Landmark] :
elem.next_uuid = next.uuid
d = get_distance(elem.location, next.location, detour_factor, speed)[1]
elem.time_to_reach_next = d
if elem.name not in ['start', 'finish'] :
elem.must_do = True
L.append(elem)
j += 1
total_dist += d

260
backend/src/optimizer_v2.py Normal file
View File

@ -0,0 +1,260 @@
import networkx as nx
from typing import List, Tuple
from geopy.distance import geodesic
from scipy.spatial import KDTree
import numpy as np
from itertools import combinations
from structs.landmarks import Landmark
from optimizer import print_res, link_list_simple
import os
import json
import heapq
# Define the get_distance function
def get_distance(loc1: Tuple[float, float], loc2: Tuple[float, float], detour: float, speed: float) -> Tuple[float, float]:
# Placeholder implementation, should be replaced with the actual logic
distance = geodesic(loc1, loc2).meters
return distance, distance * detour / speed
# Heuristic function: distance to the goal
def heuristic(loc1: Tuple[float, float], loc2: Tuple[float, float]) -> float:
return geodesic(loc1, loc2).meters
def a_star(G, start_id, end_id, max_walking_time, must_do_nodes, max_landmarks, detour, speed):
open_set = []
heapq.heappush(open_set, (0, start_id, 0, [start_id], set([start_id])))
best_path = None
max_attractiveness = 0
visited_must_do = set()
while open_set:
_, current_node, current_length, path, visited = heapq.heappop(open_set)
# If current node is a must_do node and hasn't been visited yet, mark it as visited
if current_node in must_do_nodes and current_node not in visited_must_do:
visited_must_do.add(current_node)
# Check if path includes all must_do nodes and reaches the end
if current_node == end_id and all(node in visited for node in must_do_nodes):
attractiveness = sum(G.nodes[node]['weight'] for node in path)
if attractiveness > max_attractiveness:
best_path = path
max_attractiveness = attractiveness
continue
if len(path) > max_landmarks + 1:
continue
for neighbor in G.neighbors(current_node):
if neighbor not in visited:
distance = int(geodesic(G.nodes[current_node]['pos'], G.nodes[neighbor]['pos']).meters * detour / (speed * 16.6666))
if current_length + distance <= max_walking_time:
new_path = path + [neighbor]
new_visited = visited | {neighbor}
estimated_cost = current_length + distance + heuristic(G.nodes[neighbor]['pos'], G.nodes[end_id]['pos'])
heapq.heappush(open_set, (estimated_cost, neighbor, current_length + distance, new_path, new_visited))
# Check if all must_do_nodes have been visited
if all(node in visited_must_do for node in must_do_nodes):
return best_path, max_attractiveness
else:
return None, 0
def dfs(G, current_node, end_id, current_length, path, visited, max_walking_time, must_do_nodes, max_landmarks, detour, speed):
# If the path includes all must_do nodes and reaches the end
if current_node == end_id and all(node in path for node in must_do_nodes):
return path, sum(G.nodes[node]['weight'] for node in path)
# If the number of landmarks exceeds the maximum allowed, return None
if len(path) > max_landmarks+1:
return None, 0
best_path = None
max_attractiveness = 0
for neighbor in G.neighbors(current_node):
if neighbor not in visited:
distance = int(geodesic(G.nodes[current_node]['pos'], G.nodes[neighbor]['pos']).meters * detour / (speed*16.6666))
if current_length + distance <= max_walking_time:
new_path = path + [neighbor]
new_visited = visited | {neighbor}
result_path, attractiveness = dfs(G, neighbor, end_id, current_length + distance, new_path, new_visited, max_walking_time, must_do_nodes, max_landmarks, detour, speed)
if attractiveness > max_attractiveness:
best_path = result_path
max_attractiveness = attractiveness
return best_path, max_attractiveness
def find_path(G, start_id, finish_id, max_walking_time, must_do_nodes, max_landmarks) -> List[str]:
# Read the parameters from the file
with open (os.path.dirname(os.path.abspath(__file__)) + '/parameters/optimizer.params', "r") as f :
parameters = json.loads(f.read())
detour = parameters['detour factor']
speed = parameters['average walking speed']
"""if G[start_id]['pos'] == G[finish_id]['pos'] :
best_path, _ = dfs(G, start_id, finish_id, 0, [start_id], {start_id}, max_walking_time, must_do_nodes, max_landmarks, detour, speed)
else :"""
best_path, _ = a_star(G, start_id, finish_id, max_walking_time, must_do_nodes, max_landmarks, detour, speed)
return best_path if best_path else []
# Function to dynamically adjust theta
def adjust_theta(num_nodes, theta_opt, target_ratio=2.0):
# Start with an initial guess
initial_theta = theta_opt
# Adjust theta to aim for the target ratio of edges to nodes
return initial_theta / (num_nodes ** (1 / target_ratio))
# Create a graph using NetworkX and generate the path
def generate_path(landmarks: List[Landmark], max_walking_time: float, max_landmarks: int, theta_opt = 0.0008) -> List[List[Landmark]]:
landmap = {}
pos_dict = {}
weight_dict = {}
# Add nodes to the graph with attractiveness
for i, landmark in enumerate(landmarks):
#G.nodes[i]['attractiveness'] = landmark.attractiveness
pos_dict[i] = landmark.location
weight_dict[i] = landmark.attractiveness
#G.nodes[i]['pos'] = landmark.location
landmap[i] = landmark
if landmark.name == 'start' :
start_id = i
elif landmark.name == 'finish' :
end_id = i
# Lambda version of get_distance
get_dist = lambda loc1, loc2: geodesic(loc1, loc2).meters + 0.001 #.meters*detour/speed +0.0000001
theta = adjust_theta(len(landmarks), theta_opt)
G = nx.geographical_threshold_graph(n=len(landmarks), theta=theta, pos=pos_dict, weight=weight_dict, metric=get_dist)
# good theta : 0.000125
# Define must_do nodes
must_do_nodes = [i for i in G.nodes() if landmap[i].must_do]
for node1, node2 in combinations(must_do_nodes, 2):
if not G.has_edge(node1, node2):
distance = geodesic(G.nodes[node1]['pos'], G.nodes[node2]['pos']).meters + 0.001
G.add_edge(node1, node2, weight=distance)
print(f"Graph with {G.number_of_nodes()} nodes")
print(f"Graph with {G.number_of_edges()} edges")
print("Computing path...")
# Find the valid path using the greedy algorithm
valid_path = find_path(G, start_id, end_id, max_walking_time, must_do_nodes, max_landmarks)
if not valid_path:
return [] # No valid path found
lis = [landmap[id] for id in valid_path]
lis, tot_dist = link_list_simple(lis)
print_res(lis, len(landmarks))
return lis
# Create a graph using NetworkX and generate the path
def generate_path2(landmarks: List[Landmark], max_walking_time: float, max_landmarks: int) -> List[List[Landmark]]:
# Read the parameters from the file
with open (os.path.dirname(os.path.abspath(__file__)) + '/parameters/optimizer.params', "r") as f :
parameters = json.loads(f.read())
detour = parameters['detour factor']
speed = parameters['average walking speed']
landmap = {}
pos_dict = {}
weight_dict = {}
G = nx.Graph()
# Add nodes to the graph with attractiveness
for i, landmark in enumerate(landmarks):
pos_dict[i] = landmark.location
weight_dict[i] = landmark.attractiveness
landmap[i] = landmark
G.add_node(i, pos=landmark.location, weight=landmark.attractiveness)
if landmark.name == 'start' :
start_id = i
elif landmark.name == 'finish' :
finish_id = i
"""# If start and finish are the same no need to add another node
if pos_dict[finish_id] == pos_dict[start_id] :
end_id = start_id
else :
G.add_node(finish_id, pos=pos_dict[finish_id], weight=weight_dict[finish_id])
end_id = finish_id"""
# Lambda version of get_distance
#get_dist = lambda loc1, loc2: geodesic(loc1, loc2).meters + 0.001 #.meters*detour/speed +0.0000001
coords = np.array(list(pos_dict.values()))
kdtree = KDTree(coords)
k = 4
for node, coord in pos_dict.items():
indices = kdtree.query(coord, k + 1)[1] # k+1 because the closest neighbor is the node itself
for idx in indices[1:]: # skip the first one (itself)
neighbor = list(pos_dict.keys())[idx]
distance = get_distance(coord, pos_dict[neighbor], detour, speed)
G.add_edge(node, neighbor, weight=distance)
# Define must_do nodes
must_do_nodes = [i for i in G.nodes() if landmap[i].must_do]
# Add special edges between must_do nodes
if len(must_do_nodes) > 0 :
for node1, node2 in combinations(must_do_nodes, 2):
if not G.has_edge(node1, node2):
distance = get_distance(G.nodes[node1]['pos'], G.nodes[node2]['pos'], detour, speed)
G.add_edge(node1, node2, weight=distance)
print(f"Graph with {G.number_of_nodes()} nodes")
print(f"Graph with {G.number_of_edges()} edges")
print("Computing path...")
# Find the valid path using the greedy algorithm
valid_path = find_path(G, start_id, finish_id, max_walking_time, must_do_nodes, max_landmarks)
if not valid_path:
return [] # No valid path found
lis = [landmap[id] for id in valid_path]
lis, tot_dist = link_list_simple(lis)
print_res(lis, len(landmarks))
return lis
def correct_path(tour: List[Landmark]) -> List[Landmark] :
coords = []
for landmark in tour :
coords.append(landmark.location)
G = nx.circulant_graph(n=len(tour), create_using=coords)
path = nx.shortest_path(G=G, source=tour[0].location, target=tour[-1].location)
return path

View File

@ -1,7 +1,7 @@
{
"city bbox side" : 10,
"city bbox side" : 3,
"radius close to" : 27.5,
"church coeff" : 0.6,
"church coeff" : 0.7,
"park coeff" : 1.5,
"tag coeff" : 100,
"N important" : 40

View File

@ -1,5 +1,5 @@
{
"detour factor" : 1.4,
"average walking speed" : 4.8,
"max landmarks" : 10
"max landmarks" : 8
}

View File

@ -10,6 +10,7 @@ from math import pi
from structs.landmarks import Landmark
from landmarks_manager import take_most_important
from optimizer import solve_optimization, link_list_simple, print_res, get_distance
from optimizer_v2 import generate_path, generate_path2
def create_corridor(landmarks: List[Landmark], width: float) :
@ -62,65 +63,6 @@ def rearrange(landmarks: List[Landmark]) -> List[Landmark]:
return landmarks
"""
def find_shortest_path(landmarks: List[Landmark]) -> List[Landmark]:
# Read from data
with open (os.path.dirname(os.path.abspath(__file__)) + '/parameters/optimizer.params', "r") as f :
parameters = json.loads(f.read())
detour = parameters['detour factor']
speed = parameters['average walking speed']
# Step 1: Build the graph
graph = defaultdict(list)
for i in range(len(landmarks)):
for j in range(len(landmarks)):
if i != j:
distance = get_distance(landmarks[i].location, landmarks[j].location, detour, speed)[1]
graph[i].append((distance, j))
# Step 2: Dijkstra's algorithm to find the shortest path from start to finish
start_idx = next(i for i, lm in enumerate(landmarks) if lm.name == 'start')
finish_idx = next(i for i, lm in enumerate(landmarks) if lm.name == 'finish')
distances = {i: float('inf') for i in range(len(landmarks))}
previous_nodes = {i: None for i in range(len(landmarks))}
distances[start_idx] = 0
priority_queue = [(0, start_idx)]
while priority_queue:
current_distance, current_index = heappop(priority_queue)
if current_distance > distances[current_index]:
continue
for neighbor_distance, neighbor_index in graph[current_index]:
distance = current_distance + neighbor_distance
if distance < distances[neighbor_index]:
distances[neighbor_index] = distance
previous_nodes[neighbor_index] = current_index
heappush(priority_queue, (distance, neighbor_index))
# Step 3: Backtrack from finish to start to find the path
path = []
current_index = finish_idx
while current_index is not None:
path.append(landmarks[current_index])
current_index = previous_nodes[current_index]
path.reverse()
return path
"""
"""
def total_path_distance(path: List[Landmark], detour, speed) -> float:
total_distance = 0
for i in range(len(path) - 1):
total_distance += get_distance(path[i].location, path[i + 1].location, detour, speed)[1]
return total_distance
"""
def find_shortest_path_through_all_landmarks(landmarks: List[Landmark]) -> List[Landmark]:
# Read from data
@ -178,6 +120,23 @@ def get_minor_landmarks(all_landmarks: List[Landmark], visited_landmarks: List[L
return take_most_important(second_order_landmarks, len(visited_landmarks))
def get_minor_landmarks2(all_landmarks: List[Landmark], visited_landmarks: List[Landmark], width: float) -> List[Landmark] :
second_order_landmarks = []
visited_names = []
area = create_corridor(visited_landmarks, width)
for visited in visited_landmarks :
visited_names.append(visited.name)
for landmark in all_landmarks :
if is_in_area(area, landmark.location) and landmark.name not in visited_names:
second_order_landmarks.append(landmark)
return take_most_important(second_order_landmarks, len(visited_landmarks))
"""def refine_optimization(landmarks: List[Landmark], base_tour: List[Landmark], max_time: int, print_infos: bool) -> List[Landmark] :
@ -198,7 +157,7 @@ def refine_optimization(landmarks: List[Landmark], base_tour: List[Landmark], ma
# Read from the file
with open (os.path.dirname(os.path.abspath(__file__)) + '/parameters/optimizer.params', "r") as f :
parameters = json.loads(f.read())
max_landmarks = parameters['max landmarks']
max_landmarks = parameters['max landmarks'] + 4
if len(base_tour)-2 >= max_landmarks :
return base_tour
@ -284,10 +243,37 @@ def refine_optimization(landmarks: List[Landmark], base_tour: List[Landmark], ma
final_tour = better_tour
if print_infos :
print("\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n")
print("\nRefined tour (result of second stage optimization): ")
print("\n\n\nRefined tour (result of second stage optimization): ")
print_res(final_tour, len(full_set))
return final_tour
def refine_path(landmarks: List[Landmark], base_tour: List[Landmark], max_time: int, print_infos: bool) -> List[Landmark] :
print("\nRefining the base tour...")
# Read from the file
with open (os.path.dirname(os.path.abspath(__file__)) + '/parameters/optimizer.params', "r") as f :
parameters = json.loads(f.read())
max_landmarks = parameters['max landmarks'] + 4
"""if len(base_tour)-2 >= max_landmarks :
return base_tour"""
minor_landmarks = get_minor_landmarks2(landmarks, base_tour, 200)
if print_infos : print("Using " + str(len(minor_landmarks)) + " minor landmarks around the predicted path")
full_set = base_tour + minor_landmarks # create full set of possible landmarks
print("\nRefined tour (result of second stage optimization): ")
new_path = generate_path2(full_set, max_time, max_landmarks)
return new_path

View File

@ -1,11 +1,14 @@
import pandas as pd
import os
import json
from typing import List
from landmarks_manager import generate_landmarks
from fastapi.encoders import jsonable_encoder
from optimizer import solve_optimization
from refiner import refine_optimization
from optimizer_v2 import generate_path, generate_path2
from refiner import refine_optimization, refine_path
from structs.landmarks import Landmark
from structs.landmarktype import LandmarkType
from structs.preferences import Preferences, Preference
@ -82,8 +85,8 @@ def test4(coordinates: tuple[float, float]) -> List[Landmark]:
# Create start and finish
start = Landmark(name='start', type=LandmarkType(landmark_type='start'), location=coordinates, osm_type='start', osm_id=0, attractiveness=0, must_do=True, n_tags = 0)
finish = Landmark(name='finish', type=LandmarkType(landmark_type='finish'), location=coordinates, osm_type='finish', osm_id=0, attractiveness=0, must_do=True, n_tags = 0)
start = Landmark(name='start', type=LandmarkType(landmark_type='start'), location=coordinates, osm_type='start', osm_id=0, attractiveness=0, must_do=False, n_tags = 0)
finish = Landmark(name='finish', type=LandmarkType(landmark_type='finish'), location=coordinates, osm_type='finish', osm_id=0, attractiveness=0, must_do=False, n_tags = 0)
#finish = Landmark(name='finish', type=LandmarkType(landmark_type='finish'), location=(48.8777055, 2.3640967), osm_type='finish', osm_id=0, attractiveness=0, must_do=True, n_tags = 0)
#start = Landmark(name='start', type=LandmarkType(landmark_type='start'), location=(48.847132, 2.312359), osm_type='start', osm_id=0, attractiveness=0, must_do=True, n_tags = 0)
#finish = Landmark(name='finish', type=LandmarkType(landmark_type='finish'), location=(48.843185, 2.344533), osm_type='finish', osm_id=0, attractiveness=0, must_do=True, n_tags = 0)
@ -98,19 +101,31 @@ def test4(coordinates: tuple[float, float]) -> List[Landmark]:
landmarks_short.append(finish)
# TODO use these parameters in another way
max_walking_time = 2 # hours
detour = 30 # minutes
with open (os.path.dirname(os.path.abspath(__file__)) + '/parameters/optimizer.params', "r") as f :
parameters = json.loads(f.read())
max_landmarks = parameters['max landmarks']
max_walking_time = 45 # minutes
detour = 10 # minutes
# First stage optimization
base_tour = solve_optimization(landmarks_short, max_walking_time*60, True)
# Second stage optimization
refined_tour = refine_optimization(landmarks, base_tour, max_walking_time*60+detour, True)
return refined_tour
#base_tour = solve_optimization(landmarks_short, max_walking_time*60, True)
test4(tuple((48.8344400, 2.3220540))) # Café Chez César
# First stage using NetworkX
base_tour = generate_path2(landmarks_short, max_walking_time, max_landmarks)
# Second stage using linear optimization
#refined_tour = refine_optimization(landmarks, base_tour, max_walking_time+detour, True)
# Use NetworkX again to correct to shortest path
refined_tour = refine_path(landmarks, base_tour, max_walking_time+detour, True)
return base_tour
#test4(tuple((48.8344400, 2.3220540))) # Café Chez César
#test4(tuple((48.8375946, 2.2949904))) # Point random
#test4(tuple((47.377859, 8.540585))) # Zurich HB
test4(tuple((45.7576485, 4.8330241))) # Lyon Bellecour
#test3('Vienna, Austria')