now working
Some checks failed
Build and deploy the backend to staging / Build and push image (pull_request) Successful in 1m38s
Run linting on the backend code / Build (pull_request) Successful in 26s
Run testing on the backend code / Build (pull_request) Failing after 2m50s
Build and deploy the backend to staging / Deploy to staging (pull_request) Successful in 24s
Some checks failed
Build and deploy the backend to staging / Build and push image (pull_request) Successful in 1m38s
Run linting on the backend code / Build (pull_request) Successful in 26s
Run testing on the backend code / Build (pull_request) Failing after 2m50s
Build and deploy the backend to staging / Deploy to staging (pull_request) Successful in 24s
This commit is contained in:
@@ -30,7 +30,7 @@ class Overpass :
|
||||
|
||||
|
||||
def send_query(self, bbox: tuple, osm_types: OSM_TYPES,
|
||||
selector: str, conditions=[], out='center'):
|
||||
selector: str, conditions: list=None, out='center'):
|
||||
"""
|
||||
Sends the Overpass QL query to the Overpass API and returns the parsed json response.
|
||||
|
||||
@@ -52,24 +52,22 @@ class Overpass :
|
||||
|
||||
# If there is no missing data, return the cached responses
|
||||
if not hollow_cache_keys :
|
||||
self.logger.debug(f'Cache hit.')
|
||||
self.logger.info('Cache hit.')
|
||||
return self._combine_cached_data(cached_responses)
|
||||
|
||||
|
||||
# TODO If there is SOME missing data : hybrid stuff with partial cache
|
||||
|
||||
|
||||
# Missing data: Make a query to Overpass API
|
||||
query_str = Overpass.build_query(bbox, osm_types, selector, conditions, out)
|
||||
return self.fetch_data_from_api(query_str)
|
||||
|
||||
|
||||
def fetch_data_from_api(self, query_str: str, cache_key: str = None) -> dict:
|
||||
def fetch_data_from_api(self, query_str: str) -> list:
|
||||
"""
|
||||
Fetch data from the Overpass API and update the cache.
|
||||
Fetch data from the Overpass API and return the json data.
|
||||
|
||||
Args:
|
||||
query_str (str): The Overpass query string.
|
||||
cached_responses (list): Cached responses to combine with fetched data.
|
||||
hollow_cache_keys (list): Cache keys for missing data to be updated.
|
||||
|
||||
Returns:
|
||||
dict: Combined cached and fetched data.
|
||||
@@ -83,32 +81,45 @@ class Overpass :
|
||||
data = json.loads(response_data) # Load the JSON from the string
|
||||
elements = data.get('elements', [])
|
||||
|
||||
if cache_key is not None :
|
||||
self.caching_strategy.set(cache_key, elements)
|
||||
self.logger.debug(f'Cache set.')
|
||||
else :
|
||||
self.logger.debug(f'Cache miss. Fetching data through Overpass\nQuery = {query_str}')
|
||||
return elements
|
||||
self.logger.info(f'Cache miss. Fetching data through Overpass.')
|
||||
self.logger.debug(f'Query = {query_str}')
|
||||
return elements
|
||||
|
||||
except urllib.error.URLError as e:
|
||||
self.logger.error(f"Error connecting to Overpass API: {e}")
|
||||
raise ConnectionError(f"Error connecting to Overpass API: {e}")
|
||||
raise ConnectionError(f"Error connecting to Overpass API: {e}") from e
|
||||
except Exception as exc :
|
||||
raise Exception(f'An unexpected error occured: {str(exc)}') from exc
|
||||
|
||||
|
||||
|
||||
def fill_cache(self, json_data: list) :
|
||||
def fill_cache(self, json_data: dict) :
|
||||
"""
|
||||
Fill cache with data by using a hollow cache entry's information.
|
||||
"""
|
||||
query_str, cache_key = Overpass._build_query_from_hollow(json_data)
|
||||
self.fetch_data_from_api(query_str, cache_key)
|
||||
try:
|
||||
data = urllib.parse.urlencode({'data': query_str}).encode('utf-8')
|
||||
request = urllib.request.Request(self.overpass_url, data=data, headers=self.headers)
|
||||
|
||||
with urllib.request.urlopen(request) as response:
|
||||
# Convert the HTTPResponse to a string and load data
|
||||
response_data = response.read().decode('utf-8')
|
||||
data = json.loads(response_data)
|
||||
|
||||
# Get elements and set cache
|
||||
elements = data.get('elements', [])
|
||||
self.caching_strategy.set(cache_key, elements)
|
||||
self.logger.debug(f'Cache set for {cache_key}')
|
||||
|
||||
except urllib.error.URLError as e:
|
||||
raise ConnectionError(f"Error connecting to Overpass API: {e}") from e
|
||||
except Exception as exc :
|
||||
raise Exception(f'An unexpected error occured: {str(exc)}') from exc
|
||||
|
||||
|
||||
@staticmethod
|
||||
def build_query(bbox: tuple, osm_types: OSM_TYPES,
|
||||
selector: str, conditions=[], out='center') -> str:
|
||||
selector: str, conditions: list=None, out='center') -> str:
|
||||
"""
|
||||
Constructs a query string for the Overpass API to retrieve OpenStreetMap (OSM) data.
|
||||
|
||||
@@ -132,17 +143,12 @@ class Overpass :
|
||||
- If no conditions are provided, the query will just use the `selector` to filter the OSM
|
||||
elements without additional constraints.
|
||||
"""
|
||||
if not isinstance(conditions, list) :
|
||||
conditions = [conditions]
|
||||
if not isinstance(osm_types, list) :
|
||||
osm_types = [osm_types]
|
||||
|
||||
query = '[out:json];('
|
||||
|
||||
# convert the bbox to string.
|
||||
bbox_str = f"({','.join(map(str, bbox))})"
|
||||
|
||||
if conditions :
|
||||
if conditions is not None and len(conditions) > 0:
|
||||
conditions = '(if: ' + ' && '.join(conditions) + ')'
|
||||
else :
|
||||
conditions = ''
|
||||
@@ -175,7 +181,7 @@ class Overpass :
|
||||
for cell in overlapping_cells :
|
||||
for elem in osm_types :
|
||||
key_str = f"{elem}[{selector}]{conditions}({','.join(map(str, cell))})"
|
||||
|
||||
|
||||
cell_key_dict[cell] = get_cache_key(key_str)
|
||||
|
||||
cached_responses = []
|
||||
@@ -194,24 +200,21 @@ class Overpass :
|
||||
|
||||
|
||||
@staticmethod
|
||||
def _build_query_from_hollow(json_data: list):
|
||||
def _build_query_from_hollow(json_data: dict):
|
||||
"""
|
||||
Build query string using information from a hollow cache entry.
|
||||
"""
|
||||
# Parse the JSON string into a dictionary
|
||||
data = json.loads(json_data)
|
||||
|
||||
# Extract values from the JSON object
|
||||
key = data.get('key')
|
||||
cell = tuple(data.get('cell'))
|
||||
key = json_data.get('key')
|
||||
cell = tuple(json_data.get('cell'))
|
||||
bbox = Overpass._get_bbox_from_grid_cell(cell[0], cell[1])
|
||||
osm_types = data.get('osm_types')
|
||||
selector = data.get('selector')
|
||||
conditions = data.get('conditions') if data.get('conditions') != "none" else []
|
||||
out = data.get('out')
|
||||
osm_types = json_data.get('osm_types')
|
||||
selector = json_data.get('selector')
|
||||
conditions = json_data.get('conditions')
|
||||
out = json_data.get('out')
|
||||
|
||||
|
||||
query_str = Overpass.build_query(bbox, osm_types, selector, conditions, out)
|
||||
|
||||
return query_str, key
|
||||
|
||||
|
||||
@@ -230,7 +233,7 @@ class Overpass :
|
||||
for lat_idx in range(min_lat_cell, max_lat_cell + 1):
|
||||
for lon_idx in range(min_lon_cell, max_lon_cell + 1):
|
||||
overlapping_cells.add((lat_idx, lon_idx))
|
||||
|
||||
|
||||
return overlapping_cells
|
||||
|
||||
|
||||
@@ -274,7 +277,7 @@ class Overpass :
|
||||
for element in cached_data:
|
||||
combined_data.append(element)
|
||||
return combined_data
|
||||
|
||||
|
||||
|
||||
def get_base_info(elem: dict, osm_type: OSM_TYPES, with_name=False) :
|
||||
"""
|
||||
@@ -327,14 +330,13 @@ def fill_cache():
|
||||
with os.scandir(OSM_CACHE_DIR) as it:
|
||||
for entry in it:
|
||||
if entry.is_file() and entry.name.startswith('hollow_'):
|
||||
|
||||
|
||||
# Read the whole file content as a string
|
||||
with open(entry.path, 'r') as f:
|
||||
json_data = f.read()
|
||||
json_data = json.load(f)
|
||||
|
||||
# Fill the cache with the query and key
|
||||
overpass.fill_cache(json_data)
|
||||
|
||||
# Now delete the file as the cache is filled
|
||||
os.remove(entry.path)
|
||||
|
||||
|
Reference in New Issue
Block a user