From d4e964c5d4bc024533e2cc4c7a87e9374f6d522a Mon Sep 17 00:00:00 2001 From: Helldragon67 Date: Sun, 7 Jul 2024 16:24:15 +0200 Subject: [PATCH] fixed the optimizer_v4 --- backend/src/amenities/nature.am | 1 + backend/src/amenities/sightseeing.am | 1 + backend/src/optimizer_v4.py | 21 +- .../src/parameters/landmarks_manager.params | 4 +- backend/src/refiner.py | 121 ++----- backend/src/refiner_v2.py | 306 ++++++++++++++++++ backend/src/tester.py | 17 +- 7 files changed, 358 insertions(+), 113 deletions(-) create mode 100644 backend/src/refiner_v2.py diff --git a/backend/src/amenities/nature.am b/backend/src/amenities/nature.am index dcc4061..bcad9d1 100644 --- a/backend/src/amenities/nature.am +++ b/backend/src/amenities/nature.am @@ -8,4 +8,5 @@ geological 'tourism'='alpine_hut' 'tourism'='viewpoint' 'tourism'='zoo' +#'tourism'='artwork' 'waterway'='waterfall' \ No newline at end of file diff --git a/backend/src/amenities/sightseeing.am b/backend/src/amenities/sightseeing.am index f9a4eed..053f9c9 100644 --- a/backend/src/amenities/sightseeing.am +++ b/backend/src/amenities/sightseeing.am @@ -1,6 +1,7 @@ 'tourism'='museum' 'tourism'='attraction' 'tourism'='gallery' +'tourism'='artwork' historic 'amenity'='planetarium' 'amenity'='place_of_worship' diff --git a/backend/src/optimizer_v4.py b/backend/src/optimizer_v4.py index acb2b87..b3c09ee 100644 --- a/backend/src/optimizer_v4.py +++ b/backend/src/optimizer_v4.py @@ -179,6 +179,10 @@ def init_ub_dist(landmarks: List[Landmark], max_steps: int): for j, spot2 in enumerate(landmarks) : t = get_time(spot1.location, spot2.location, detour, speed) dist_table[j] = t + closest = sorted(dist_table)[:22] + for i, dist in enumerate(dist_table) : + if dist not in closest : + dist_table[i] = 10000000 A_ub += dist_table c = c*len(landmarks) @@ -186,7 +190,7 @@ def init_ub_dist(landmarks: List[Landmark], max_steps: int): # Constraint to respect only one travel per landmark. Also caps the total number of visited landmarks -def respect_number(L:int, A_ub, b_ub): +def respect_number(L: int, A_ub, b_ub, max_landmarks): ones = [1]*L zeros = [0]*L @@ -195,10 +199,11 @@ def respect_number(L:int, A_ub, b_ub): A_ub = np.vstack((A_ub, h)) b_ub.append(1) - # Read the parameters from the file - with open (os.path.dirname(os.path.abspath(__file__)) + '/parameters/optimizer.params', "r") as f : - parameters = json.loads(f.read()) - max_landmarks = parameters['max landmarks'] + if max_landmarks is None : + # Read the parameters from the file + with open (os.path.dirname(os.path.abspath(__file__)) + '/parameters/optimizer.params', "r") as f : + parameters = json.loads(f.read()) + max_landmarks = parameters['max landmarks'] A_ub = np.vstack((A_ub, ones*L)) b_ub.append(max_landmarks+1) @@ -361,13 +366,13 @@ def link_list_simple(ordered_visit: List[Landmark])-> List[Landmark] : # Main optimization pipeline -def solve_optimization (landmarks :List[Landmark], max_steps: int, printing_details: bool) : +def solve_optimization (landmarks :List[Landmark], max_steps: int, printing_details: bool, max_landmarks = None) : L = len(landmarks) # SET CONSTRAINTS FOR INEQUALITY c, A_ub, b_ub = init_ub_dist(landmarks, max_steps) # Add the distances from each landmark to the other - A_ub, b_ub = respect_number(L, A_ub, b_ub) # Respect max number of visits (no more possible stops than landmarks). + A_ub, b_ub = respect_number(L, A_ub, b_ub, max_landmarks) # Respect max number of visits (no more possible stops than landmarks). A_ub, b_ub = break_sym(L, A_ub, b_ub) # break the 'zig-zag' symmetry # SET CONSTRAINTS FOR EQUALITY @@ -392,7 +397,7 @@ def solve_optimization (landmarks :List[Landmark], max_steps: int, printing_deta i = 0 timeout = 80 while len(circle) != 0 and i < timeout: - #A_ub, b_ub = prevent_config(res.x, A_ub, b_ub) + A_ub, b_ub = prevent_config(res.x, A_ub, b_ub) A_ub, b_ub = break_cricle(order, len(landmarks), A_ub, b_ub) res = linprog(c, A_ub=A_ub, b_ub=b_ub, A_eq=A_eq, b_eq = b_eq, bounds=x_bounds, method='highs', integrality=3) order, circle = is_connected(res.x) diff --git a/backend/src/parameters/landmarks_manager.params b/backend/src/parameters/landmarks_manager.params index 17afc2b..54638b5 100644 --- a/backend/src/parameters/landmarks_manager.params +++ b/backend/src/parameters/landmarks_manager.params @@ -1,8 +1,8 @@ { - "city bbox side" : 3, + "city bbox side" : 10, "radius close to" : 50, "church coeff" : 0.9, "park coeff" : 1.2, "tag coeff" : 10, - "N important" : 30 + "N important" : 40 } \ No newline at end of file diff --git a/backend/src/refiner.py b/backend/src/refiner.py index 465de3e..9d18383 100644 --- a/backend/src/refiner.py +++ b/backend/src/refiner.py @@ -1,20 +1,15 @@ -from collections import defaultdict -from heapq import heappop, heappush -from itertools import permutations import os, json -from shapely import buffer, LineString, Point, Polygon, MultiPoint, convex_hull, concave_hull, LinearRing +from shapely import buffer, LineString, Point, Polygon, MultiPoint, concave_hull from typing import List, Tuple -from scipy.spatial import KDTree from math import pi -import networkx as nx from structs.landmarks import Landmark from landmarks_manager import take_most_important from optimizer_v4 import solve_optimization, link_list_simple, print_res, get_time -from optimizer_v2 import generate_path, generate_path2 +# Create corridor from tour def create_corridor(landmarks: List[Landmark], width: float) : corrected_width = (180*width)/(6371000*pi) @@ -25,6 +20,7 @@ def create_corridor(landmarks: List[Landmark], width: float) : return obj +# Create linestring from tour def create_linestring(landmarks: List[Landmark])->List[Point] : points = [] @@ -35,11 +31,13 @@ def create_linestring(landmarks: List[Landmark])->List[Point] : return LineString(points) +# Check if some coordinates are in area. Used for the corridor def is_in_area(area: Polygon, coordinates) -> bool : point = Point(coordinates) return point.within(area) +# Function to determine if two landmarks are close to each other def is_close_to(location1: Tuple[float], location2: Tuple[float]): """Determine if two locations are close by rounding their coordinates to 3 decimals.""" absx = abs(location1[0] - location2[0]) @@ -49,6 +47,7 @@ def is_close_to(location1: Tuple[float], location2: Tuple[float]): #return (round(location1[0], 3), round(location1[1], 3)) == (round(location2[0], 3), round(location2[1], 3)) +# Rearrange some landmarks in the order of visit def rearrange(landmarks: List[Landmark]) -> List[Landmark]: i = 1 @@ -65,6 +64,8 @@ def rearrange(landmarks: List[Landmark]) -> List[Landmark]: return landmarks + +# Simple nearest neighbour planner to try to fix the path def find_shortest_path_through_all_landmarks(landmarks: List[Landmark]) -> Tuple[List[Landmark], Polygon]: # Read from data @@ -106,6 +107,8 @@ def find_shortest_path_through_all_landmarks(landmarks: List[Landmark]) -> Tuple return path, path_poly + +# Returns a list of minor landmarks around the planned path to enhance experience def get_minor_landmarks(all_landmarks: List[Landmark], visited_landmarks: List[Landmark], width: float) -> List[Landmark] : second_order_landmarks = [] @@ -122,22 +125,7 @@ def get_minor_landmarks(all_landmarks: List[Landmark], visited_landmarks: List[L return take_most_important(second_order_landmarks, len(visited_landmarks)) - -"""def refine_optimization(landmarks: List[Landmark], base_tour: List[Landmark], max_time: int, print_infos: bool) -> List[Landmark] : - - minor_landmarks = get_minor_landmarks(landmarks, base_tour, 200) - - if print_infos : print("There are " + str(len(minor_landmarks)) + " minor landmarks around the predicted path") - - full_set = base_tour[:-1] + minor_landmarks # create full set of possible landmarks (without finish) - full_set.append(base_tour[-1]) # add finish back - - new_tour = solve_optimization(full_set, max_time, print_infos) - - return new_tour""" - - - +# Try fix the shortest path using shapely def fix_using_polygon(tour: List[Landmark])-> List[Landmark] : coords = [] @@ -150,12 +138,18 @@ def fix_using_polygon(tour: List[Landmark])-> List[Landmark] : tour_poly = Polygon(coords) better_tour_poly = tour_poly.buffer(0) - xs, ys = better_tour_poly.exterior.xy + try : + xs, ys = better_tour_poly.exterior.xy - if len(xs) != len(tour) : + if len(xs) != len(tour) : + better_tour_poly = concave_hull(MultiPoint(coords)) # Create concave hull with "core" of tour leaving out start and finish + xs, ys = better_tour_poly.exterior.xy + + except : better_tour_poly = concave_hull(MultiPoint(coords)) # Create concave hull with "core" of tour leaving out start and finish xs, ys = better_tour_poly.exterior.xy + # reverse the xs and ys xs.reverse() ys.reverse() @@ -183,6 +177,7 @@ def fix_using_polygon(tour: List[Landmark])-> List[Landmark] : return better_tour +# Second stage of the optimization. Use linear programming again to refine the path def refine_optimization(landmarks: List[Landmark], base_tour: List[Landmark], max_time: int, print_infos: bool) -> List[Landmark] : # Read from the file @@ -199,7 +194,7 @@ def refine_optimization(landmarks: List[Landmark], base_tour: List[Landmark], ma full_set.append(base_tour[-1]) # add finish back # get a new tour - new_tour = solve_optimization(full_set, max_time, False) + new_tour = solve_optimization(full_set, max_time, False, max_landmarks) new_tour, new_dist = link_list_simple(new_tour) better_tour, better_poly = find_shortest_path_through_all_landmarks(new_tour) @@ -218,78 +213,14 @@ def refine_optimization(landmarks: List[Landmark], base_tour: List[Landmark], ma if print_infos : print("\n\n\nRefined tour (result of second stage optimization): ") print_res(final_tour, len(full_set)) + total_score = 0 + for elem in final_tour : + total_score += elem.attractiveness + + print("\nTotal score : " + str(total_score)) return final_tour - -def refine_path(landmarks: List[Landmark], base_tour: List[Landmark], max_time: int, print_infos: bool) -> List[Landmark] : - - print("\nRefining the base tour...") - # Read from the file - with open (os.path.dirname(os.path.abspath(__file__)) + '/parameters/optimizer.params', "r") as f : - parameters = json.loads(f.read()) - max_landmarks = parameters['max landmarks'] + 3 - - """if len(base_tour)-2 >= max_landmarks : - return base_tour""" - - minor_landmarks = get_minor_landmarks(landmarks, base_tour, 200) - - if print_infos : print("Using " + str(len(minor_landmarks)) + " minor landmarks around the predicted path") - - full_set = base_tour + minor_landmarks # create full set of possible landmarks - - print("\nRefined tour (result of second stage optimization): ") - - new_path = generate_path2(full_set, max_time, max_landmarks) - - return new_path - - - - -# If a tour is not connected -def correct_path(tour: List[Landmark]) -> List[Landmark] : - - # Read the parameters from the file - with open (os.path.dirname(os.path.abspath(__file__)) + '/parameters/optimizer.params', "r") as f : - parameters = json.loads(f.read()) - detour = parameters['detour factor'] - speed = parameters['average walking speed'] - - G = nx.Graph() - - coords = [] - landmap = {} - for i, landmark in enumerate(tour) : - coords.append(landmark.location) - landmap[i] = landmark - G.add_node(i, pos=landmark.location, weight=landmark.attractiveness) - - kdtree = KDTree(coords) - - k = 3 - for node, coord in coords: - indices = kdtree.query(coord, k + 1)[1] # k+1 because the closest neighbor is the node itself - for idx in indices[1:]: # skip the first one (itself) - neighbor = list(coords)[idx] - distance = get_time(coord, coords[neighbor], detour, speed) - G.add_edge(node, neighbor, weight=distance) - - path = nx.approximation.traveling_salesman_problem(G, weight='weight', cycle=True) - - if len(path) != len(tour) : - print("nope") - - lis = [landmap[id] for id in path] - - lis, tot_dist = link_list_simple(lis) - - print_res(lis, len(tour)) - - return path - - diff --git a/backend/src/refiner_v2.py b/backend/src/refiner_v2.py new file mode 100644 index 0000000..236217f --- /dev/null +++ b/backend/src/refiner_v2.py @@ -0,0 +1,306 @@ +from collections import defaultdict +from heapq import heappop, heappush +from itertools import permutations +import os, json + +from shapely import buffer, LineString, Point, Polygon, MultiPoint, convex_hull, concave_hull, LinearRing +from typing import List, Tuple +from scipy.spatial import KDTree +from math import pi +import networkx as nx + +from structs.landmarks import Landmark +from landmarks_manager import take_most_important +from optimizer_v4 import solve_optimization, link_list_simple, print_res, get_time +from optimizer_v2 import generate_path, generate_path2 + + +def create_corridor(landmarks: List[Landmark], width: float) : + + corrected_width = (180*width)/(6371000*pi) + + path = create_linestring(landmarks) + obj = buffer(path, corrected_width, join_style="mitre", cap_style="square", mitre_limit=2) + + return obj + + +def create_linestring(landmarks: List[Landmark])->List[Point] : + + points = [] + + for landmark in landmarks : + points.append(Point(landmark.location)) + + return LineString(points) + + +def is_in_area(area: Polygon, coordinates) -> bool : + point = Point(coordinates) + return point.within(area) + + +def is_close_to(location1: Tuple[float], location2: Tuple[float]): + """Determine if two locations are close by rounding their coordinates to 3 decimals.""" + absx = abs(location1[0] - location2[0]) + absy = abs(location1[1] - location2[1]) + + return absx < 0.001 and absy < 0.001 + #return (round(location1[0], 3), round(location1[1], 3)) == (round(location2[0], 3), round(location2[1], 3)) + + +def rearrange(landmarks: List[Landmark]) -> List[Landmark]: + + i = 1 + while i < len(landmarks): + j = i+1 + while j < len(landmarks): + if is_close_to(landmarks[i].location, landmarks[j].location) and landmarks[i].name not in ['start', 'finish'] and landmarks[j].name not in ['start', 'finish']: + # If they are not adjacent, move the j-th element to be adjacent to the i-th element + if j != i + 1: + landmarks.insert(i + 1, landmarks.pop(j)) + break # Move to the next i-th element after rearrangement + j += 1 + i += 1 + + return landmarks + +def find_shortest_path_through_all_landmarks(landmarks: List[Landmark]) -> Tuple[List[Landmark], Polygon]: + + # Read from data + with open (os.path.dirname(os.path.abspath(__file__)) + '/parameters/optimizer.params', "r") as f : + parameters = json.loads(f.read()) + detour = parameters['detour factor'] + speed = parameters['average walking speed'] + + # Step 1: Find 'start' and 'finish' landmarks + start_idx = next(i for i, lm in enumerate(landmarks) if lm.name == 'start') + finish_idx = next(i for i, lm in enumerate(landmarks) if lm.name == 'finish') + + start_landmark = landmarks[start_idx] + finish_landmark = landmarks[finish_idx] + + + # Step 2: Create a list of unvisited landmarks excluding 'start' and 'finish' + unvisited_landmarks = [lm for i, lm in enumerate(landmarks) if i not in [start_idx, finish_idx]] + + # Step 3: Initialize the path with the 'start' landmark + path = [start_landmark] + coordinates = [landmarks[start_idx].location] + + current_landmark = start_landmark + + # Step 4: Use nearest neighbor heuristic to visit all landmarks + while unvisited_landmarks: + nearest_landmark = min(unvisited_landmarks, key=lambda lm: get_time(current_landmark.location, lm.location, detour, speed)) + path.append(nearest_landmark) + coordinates.append(nearest_landmark.location) + current_landmark = nearest_landmark + unvisited_landmarks.remove(nearest_landmark) + + # Step 5: Finally add the 'finish' landmark to the path + path.append(finish_landmark) + coordinates.append(landmarks[finish_idx].location) + + path_poly = Polygon(coordinates) + + return path, path_poly + +def get_minor_landmarks(all_landmarks: List[Landmark], visited_landmarks: List[Landmark], width: float) -> List[Landmark] : + + second_order_landmarks = [] + visited_names = [] + area = create_corridor(visited_landmarks, width) + + for visited in visited_landmarks : + visited_names.append(visited.name) + + for landmark in all_landmarks : + if is_in_area(area, landmark.location) and landmark.name not in visited_names: + second_order_landmarks.append(landmark) + + return take_most_important(second_order_landmarks, len(visited_landmarks)) + + + +"""def refine_optimization(landmarks: List[Landmark], base_tour: List[Landmark], max_time: int, print_infos: bool) -> List[Landmark] : + + minor_landmarks = get_minor_landmarks(landmarks, base_tour, 200) + + if print_infos : print("There are " + str(len(minor_landmarks)) + " minor landmarks around the predicted path") + + full_set = base_tour[:-1] + minor_landmarks # create full set of possible landmarks (without finish) + full_set.append(base_tour[-1]) # add finish back + + new_tour = solve_optimization(full_set, max_time, print_infos) + + return new_tour""" + + + +def fix_using_polygon(tour: List[Landmark])-> List[Landmark] : + + coords = [] + coords_dict = {} + for landmark in tour : + coords.append(landmark.location) + if landmark.name != 'finish' : + coords_dict[landmark.location] = landmark + + tour_poly = Polygon(coords) + + better_tour_poly = tour_poly.buffer(0) + try : + xs, ys = better_tour_poly.exterior.xy + + if len(xs) != len(tour) : + better_tour_poly = concave_hull(MultiPoint(coords)) # Create concave hull with "core" of tour leaving out start and finish + xs, ys = better_tour_poly.exterior.xy + + except : + better_tour_poly = concave_hull(MultiPoint(coords)) # Create concave hull with "core" of tour leaving out start and finish + xs, ys = better_tour_poly.exterior.xy + + + # reverse the xs and ys + xs.reverse() + ys.reverse() + + better_tour = [] # List of ordered visit + name_index = {} # Maps the name of a landmark to its index in the concave polygon + + # Loop through the polygon and generate the better (ordered) tour + for i,x in enumerate(xs[:-1]) : + y = ys[i] + better_tour.append(coords_dict[tuple((x,y))]) + name_index[coords_dict[tuple((x,y))].name] = i + + + # Scroll the list to have start in front again + start_index = name_index['start'] + better_tour = better_tour[start_index:] + better_tour[:start_index] + + # Append the finish back and correct the time to reach + better_tour.append(tour[-1]) + + # Rearrange only if polygon + better_tour = rearrange(better_tour) + + return better_tour + + +def refine_optimization(landmarks: List[Landmark], base_tour: List[Landmark], max_time: int, print_infos: bool) -> List[Landmark] : + + # Read from the file + with open (os.path.dirname(os.path.abspath(__file__)) + '/parameters/optimizer.params', "r") as f : + parameters = json.loads(f.read()) + max_landmarks = parameters['max landmarks'] + 4 + + minor_landmarks = get_minor_landmarks(landmarks, base_tour, 200) + + if print_infos : print("Using " + str(len(minor_landmarks)) + " minor landmarks around the predicted path") + + # full set of visitable landmarks + full_set = base_tour[:-1] + minor_landmarks # create full set of possible landmarks (without finish) + full_set.append(base_tour[-1]) # add finish back + + # get a new tour + new_tour = solve_optimization(full_set, max_time, False, max_landmarks) + new_tour, new_dist = link_list_simple(new_tour) + + better_tour, better_poly = find_shortest_path_through_all_landmarks(new_tour) + + if base_tour[0].location == base_tour[-1].location and not better_poly.is_valid : + better_tour = fix_using_polygon(better_tour) + + + better_tour, better_dist = link_list_simple(better_tour) + + if new_dist < better_dist : + final_tour = new_tour + else : + final_tour = better_tour + + if print_infos : + print("\n\n\nRefined tour (result of second stage optimization): ") + print_res(final_tour, len(full_set)) + total_score = 0 + for elem in final_tour : + total_score += elem.attractiveness + + print("\nTotal score : " + str(total_score)) + + + + return final_tour + + + +def refine_path(landmarks: List[Landmark], base_tour: List[Landmark], max_time: int, print_infos: bool) -> List[Landmark] : + + print("\nRefining the base tour...") + # Read from the file + with open (os.path.dirname(os.path.abspath(__file__)) + '/parameters/optimizer.params', "r") as f : + parameters = json.loads(f.read()) + max_landmarks = parameters['max landmarks'] + 3 + + """if len(base_tour)-2 >= max_landmarks : + return base_tour""" + + minor_landmarks = get_minor_landmarks(landmarks, base_tour, 200) + + if print_infos : print("Using " + str(len(minor_landmarks)) + " minor landmarks around the predicted path") + + full_set = base_tour + minor_landmarks # create full set of possible landmarks + + print("\nRefined tour (result of second stage optimization): ") + + new_path = generate_path2(full_set, max_time, max_landmarks) + + return new_path + + + + +# If a tour is not connected +def correct_path(tour: List[Landmark]) -> List[Landmark] : + + # Read the parameters from the file + with open (os.path.dirname(os.path.abspath(__file__)) + '/parameters/optimizer.params', "r") as f : + parameters = json.loads(f.read()) + detour = parameters['detour factor'] + speed = parameters['average walking speed'] + + G = nx.Graph() + + coords = [] + landmap = {} + for i, landmark in enumerate(tour) : + coords.append(landmark.location) + landmap[i] = landmark + G.add_node(i, pos=landmark.location, weight=landmark.attractiveness) + + kdtree = KDTree(coords) + + k = 3 + for node, coord in coords: + indices = kdtree.query(coord, k + 1)[1] # k+1 because the closest neighbor is the node itself + for idx in indices[1:]: # skip the first one (itself) + neighbor = list(coords)[idx] + distance = get_time(coord, coords[neighbor], detour, speed) + G.add_edge(node, neighbor, weight=distance) + + path = nx.approximation.traveling_salesman_problem(G, weight='weight', cycle=True) + + if len(path) != len(tour) : + print("nope") + + lis = [landmap[id] for id in path] + + lis, tot_dist = link_list_simple(lis) + + print_res(lis, len(tour)) + + return path + + diff --git a/backend/src/tester.py b/backend/src/tester.py index 3215ffd..eef098a 100644 --- a/backend/src/tester.py +++ b/backend/src/tester.py @@ -73,7 +73,7 @@ def test4(coordinates: tuple[float, float]) -> List[Landmark]: sightseeing=Preference( name='sightseeing', type=LandmarkType(landmark_type='sightseeing'), - score = 5), + score = 0), nature=Preference( name='nature', type=LandmarkType(landmark_type='nature'), @@ -105,21 +105,22 @@ def test4(coordinates: tuple[float, float]) -> List[Landmark]: with open (os.path.dirname(os.path.abspath(__file__)) + '/parameters/optimizer.params', "r") as f : parameters = json.loads(f.read()) max_landmarks = parameters['max landmarks'] - max_walking_time = 120 # minutes - detour = 30 # minutes + max_walking_time = 120 # minutes + detour = 10 # minutes # First stage optimization - #base_tour = solve_optimization(landmarks_short, max_walking_time, True) + base_tour = solve_optimization(landmarks_short, max_walking_time, True) #base_tour = solve_optimization(landmarks_short, max_walking_time, True) # First stage using NetworkX - base_tour = generate_path(landmarks_short, max_walking_time, max_landmarks) + #base_tour = generate_path(landmarks_short, max_walking_time, max_landmarks) # Second stage using linear optimization - refined_tour = refine_optimization(landmarks, base_tour, max_walking_time+detour, True) + if detour != 0 : + refined_tour = refine_optimization(landmarks, base_tour, max_walking_time+detour, True) # Second stage using NetworkX #refined_tour = refine_path(landmarks, base_tour, max_walking_time+detour, True) @@ -130,8 +131,8 @@ def test4(coordinates: tuple[float, float]) -> List[Landmark]: return refined_tour -#test4(tuple((48.8344400, 2.3220540))) # Café Chez César +test4(tuple((48.8344400, 2.3220540))) # Café Chez César #test4(tuple((48.8375946, 2.2949904))) # Point random #test4(tuple((47.377859, 8.540585))) # Zurich HB -test4(tuple((45.7576485, 4.8330241))) # Lyon Bellecour +#test4(tuple((45.7576485, 4.8330241))) # Lyon Bellecour #test3('Vienna, Austria') \ No newline at end of file