From 0514fa063f1f640aff1122764016ed601b9058d2 Mon Sep 17 00:00:00 2001 From: kilian Date: Thu, 20 Nov 2025 18:47:34 +0100 Subject: [PATCH] suggested fix to avoid UnboundLocalError --- backend/src/landmarks/cluster_manager.py | 93 +++++++++++++----------- 1 file changed, 49 insertions(+), 44 deletions(-) diff --git a/backend/src/landmarks/cluster_manager.py b/backend/src/landmarks/cluster_manager.py index 85f9af5..92a26a7 100644 --- a/backend/src/landmarks/cluster_manager.py +++ b/backend/src/landmarks/cluster_manager.py @@ -102,52 +102,57 @@ class ClusterManager: selector = sel, out = out ) - except Exception as e: - self.logger.warning(f"Error fetching clusters: {e}") - - if result is None : - self.logger.debug(f"Found no {cluster_type} clusters, overpass query returned no datapoints.") - self.valid = False - - else : - points = [] - for elem in result: - osm_type = elem.get('type') - - # Get coordinates and append them to the points list - _, coords = get_base_info(elem, osm_type) - if coords is not None : - points.append(coords) - - if points : - self.all_points = np.array(points) - - # Apply DBSCAN to find clusters. Choose different settings for different cities. - if self.cluster_type == 'shopping' and len(self.all_points) > 200 : - dbscan = DBSCAN(eps=0.00118, min_samples=15, algorithm='kd_tree') # for large cities - elif self.cluster_type == 'sightseeing' : - dbscan = DBSCAN(eps=0.0025, min_samples=15, algorithm='kd_tree') # for historic neighborhoods - else : - dbscan = DBSCAN(eps=0.00075, min_samples=10, algorithm='kd_tree') # for small cities - - labels = dbscan.fit_predict(self.all_points) - - # Check that there are is least 1 cluster - if len(set(labels)) > 1 : - self.logger.info(f"Found {len(set(labels))} different {cluster_type} clusters.") - # Separate clustered points and noise points - self.cluster_points = self.all_points[labels != -1] - self.cluster_labels = labels[labels != -1] - self.filter_clusters() # ValueError here sometimes. I dont know why. # Filter the clusters to keep only the largest ones. - self.valid = True - - else : - self.logger.info(f"Found 0 {cluster_type} clusters.") - self.valid = False + if result is None : + self.logger.debug(f"Found no {cluster_type} clusters, overpass query returned no datapoints.") + self.valid = False else : - self.logger.debug(f"Detected 0 {cluster_type} clusters.") - self.valid = False + points = [] + for elem in result: + osm_type = elem.get('type') + + # Get coordinates and append them to the points list + _, coords = get_base_info(elem, osm_type) + if coords is not None : + points.append(coords) + + if points : + self.all_points = np.array(points) + + # Apply DBSCAN to find clusters. Choose different settings for different cities. + if self.cluster_type == 'shopping' and len(self.all_points) > 200 : + dbscan = DBSCAN(eps=0.00118, min_samples=15, algorithm='kd_tree') # for large cities + elif self.cluster_type == 'sightseeing' : + dbscan = DBSCAN(eps=0.0025, min_samples=15, algorithm='kd_tree') # for historic neighborhoods + else : + dbscan = DBSCAN(eps=0.00075, min_samples=10, algorithm='kd_tree') # for small cities + + labels = dbscan.fit_predict(self.all_points) + + # Check that there are is least 1 cluster + if len(set(labels)) > 1 : + self.logger.info(f"Found {len(set(labels))} different {cluster_type} clusters.") + # Separate clustered points and noise points + self.cluster_points = self.all_points[labels != -1] + self.cluster_labels = labels[labels != -1] + self.filter_clusters() # ValueError here sometimes. I dont know why. # Filter the clusters to keep only the largest ones. + self.valid = True + + else : + self.logger.info(f"Found 0 {cluster_type} clusters.") + self.valid = False + + else : + self.logger.debug(f"Detected 0 {cluster_type} clusters.") + self.valid = False + + except UnboundLocalError as ule: + self.logger.warning(f"Error fetching clusters (most likely due to overpass): {e}") + self.valid = False + except Exception as e: + self.logger.warning(f"Error fetching clusters: {e}") + raise Exception from e + def generate_clusters(self) -> list[Landmark]: