Compare commits
7 Commits
main
...
2288a50b60
Author | SHA1 | Date | |
---|---|---|---|
2288a50b60 | |||
02133a4abe | |||
03cf58ce43 | |||
c4fddc1a57 | |||
af5aa0097c | |||
b82f9997a4 | |||
1d5553f7f2 |
67
.gitea/workflows/frontend_build-android.yaml
Normal file
@@ -0,0 +1,67 @@
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- frontend/**
|
||||
|
||||
|
||||
name: Build and release debug APK
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build APK
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
- name: Install prerequisites
|
||||
run: |
|
||||
apt-get update
|
||||
apt-get install -y jq
|
||||
|
||||
- uses: https://gitea.com/actions/checkout@v4
|
||||
|
||||
|
||||
- uses: https://github.com/actions/setup-java@v4
|
||||
with:
|
||||
java-version: '17'
|
||||
distribution: 'zulu'
|
||||
|
||||
- name: Fix flutter SDK folder permission
|
||||
run: git config --global --add safe.directory "*"
|
||||
|
||||
- uses: https://github.com/subosito/flutter-action@v2
|
||||
with:
|
||||
channel: stable
|
||||
flutter-version: 3.22.0
|
||||
cache: true
|
||||
|
||||
- name: Setup Android SDK
|
||||
uses: https://github.com/android-actions/setup-android@v3
|
||||
|
||||
- run: flutter pub get
|
||||
working-directory: ./frontend
|
||||
|
||||
- name: Add required secrets
|
||||
env:
|
||||
ANDROID_SECRETS_PROPERTIES: ${{ secrets.ANDROID_SECRETS_PROPERTIES }}
|
||||
run: |
|
||||
echo "$ANDROID_SECRETS_PROPERTIES" >> ./android/secrets.properties
|
||||
working-directory: ./frontend
|
||||
|
||||
- name: Sanity check
|
||||
run: |
|
||||
ls
|
||||
ls -lah android
|
||||
working-directory: ./frontend
|
||||
|
||||
- run: flutter build apk --debug --split-per-abi --build-number=${{ gitea.run_number }}
|
||||
working-directory: ./frontend
|
||||
|
||||
- name: Upload APKs to artifacts
|
||||
uses: https://gitea.com/actions/upload-artifact@v3
|
||||
with:
|
||||
name: app-release
|
||||
path: frontend/build/app/outputs/flutter-apk/
|
||||
if-no-files-found: error
|
||||
retention-days: 15
|
34
.gitea/workflows/frontend_build-web.yaml
Normal file
@@ -0,0 +1,34 @@
|
||||
# on:
|
||||
# pull_request:
|
||||
# branches:
|
||||
# - main
|
||||
# paths:
|
||||
# - frontend/**
|
||||
|
||||
|
||||
# name: Build web
|
||||
|
||||
# jobs:
|
||||
# build:
|
||||
# name: Build Web
|
||||
# runs-on: ubuntu-latest
|
||||
# steps:
|
||||
|
||||
# - name: Install prerequisites
|
||||
# run: |
|
||||
# sudo apt-get update
|
||||
# sudo apt-get install -y xz-utils
|
||||
|
||||
# - uses: actions/checkout@v4
|
||||
|
||||
# - uses: https://github.com/subosito/flutter-action@v2
|
||||
# with:
|
||||
# channel: stable
|
||||
# flutter-version: 3.19.6
|
||||
# cache: true
|
||||
|
||||
# - run: flutter pub get
|
||||
# working-directory: ./frontend
|
||||
|
||||
# - run: flutter build web
|
||||
# working-directory: ./frontend
|
@@ -1,59 +0,0 @@
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- frontend/**
|
||||
|
||||
name: Build and release apps to beta track
|
||||
|
||||
jobs:
|
||||
get-version:
|
||||
name: Get version
|
||||
runs-on: macos
|
||||
steps:
|
||||
- uses: https://gitea.com/actions/checkout@v4
|
||||
|
||||
- name: Fetch tags from main branch
|
||||
# since this workflow is triggered by a pull request, we want to match the latest tag of the main branch
|
||||
id: version
|
||||
run: |
|
||||
git fetch origin main --tags
|
||||
LATEST_TAG=$(git describe --tags $(git rev-list --tags --max-count=1))
|
||||
# remove the 'v' prefix from the tag name
|
||||
echo "BUILD_NAME=${LATEST_TAG//v}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Output the version that is being used
|
||||
run: |
|
||||
echo "Building for version ${{ steps.version.outputs.BUILD_NAME }}"
|
||||
|
||||
outputs:
|
||||
build_name: ${{ steps.version.outputs.BUILD_NAME }}
|
||||
|
||||
build-android:
|
||||
name: Build and upload android app
|
||||
uses: ./.gitea/workflows/workflow_build-app-android.yaml
|
||||
with:
|
||||
build_type: beta
|
||||
build_name: ${{ needs.get-version.outputs.build_name }}
|
||||
secrets:
|
||||
ANDROID_SECRET_PROPERTIES_BASE64: ${{ secrets.ANDROID_SECRET_PROPERTIES_BASE64 }}
|
||||
ANDROID_GOOGLE_PLAY_JSON_BASE64: ${{ secrets.ANDROID_GOOGLE_PLAY_JSON_BASE64 }}
|
||||
ANDROID_KEYSTORE_BASE64: ${{ secrets.ANDROID_KEYSTORE_BASE64 }}
|
||||
ANDROID_GOOGLE_MAPS_API_KEY: ${{ secrets.ANDROID_GOOGLE_MAPS_API_KEY }}
|
||||
needs: get-version
|
||||
|
||||
build-ios:
|
||||
name: Build and upload ios app
|
||||
uses: ./.gitea/workflows/workflow_build-app-ios.yaml
|
||||
with:
|
||||
build_type: beta
|
||||
build_name: ${{ needs.get-version.outputs.build_name }}
|
||||
secrets:
|
||||
IOS_ASC_KEY_ID: ${{ secrets.IOS_ASC_KEY_ID }}
|
||||
IOS_ASC_ISSUER_ID: ${{ secrets.IOS_ASC_ISSUER_ID }}
|
||||
IOS_ASC_KEY: ${{ secrets.IOS_ASC_KEY }}
|
||||
IOS_MATCH_REPO_SSH_KEY_BASE64: ${{ secrets.IOS_MATCH_REPO_SSH_KEY_BASE64 }}
|
||||
IOS_MATCH_PASSWORD: ${{ secrets.IOS_MATCH_PASSWORD }}
|
||||
IOS_GOOGLE_MAPS_API_KEY: ${{ secrets.IOS_GOOGLE_MAPS_API_KEY }}
|
||||
needs: build-android # technically not needed, but this prevents the builds from running in parallel
|
@@ -1,56 +0,0 @@
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- v*
|
||||
|
||||
name: Build and release apps to production track
|
||||
|
||||
jobs:
|
||||
get-version:
|
||||
name: Get version
|
||||
runs-on: macos
|
||||
steps:
|
||||
- uses: https://gitea.com/actions/checkout@v4
|
||||
|
||||
- name: Get version from git tag
|
||||
id: version
|
||||
env:
|
||||
REF_NAME: ${{ gitea.ref_name }}
|
||||
# remove the 'v' prefix from the tag name
|
||||
run: |
|
||||
echo "BUILD_NAME=${REF_NAME//v}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Output the version that is being used
|
||||
run: |
|
||||
echo "Building for version ${{ steps.version.outputs.BUILD_NAME }}"
|
||||
|
||||
outputs:
|
||||
build_name: ${{ steps.version.outputs.BUILD_NAME }}
|
||||
|
||||
build-android:
|
||||
name: Build and upload android app
|
||||
uses: ./.gitea/workflows/workflow_build-app-android.yaml
|
||||
with:
|
||||
build_type: release
|
||||
build_name: ${{ needs.get-version.outputs.build_name }}
|
||||
secrets:
|
||||
ANDROID_SECRET_PROPERTIES_BASE64: ${{ secrets.ANDROID_SECRET_PROPERTIES_BASE64 }}
|
||||
ANDROID_GOOGLE_PLAY_JSON_BASE64: ${{ secrets.ANDROID_GOOGLE_PLAY_JSON_BASE64 }}
|
||||
ANDROID_KEYSTORE_BASE64: ${{ secrets.ANDROID_KEYSTORE_BASE64 }}
|
||||
ANDROID_GOOGLE_MAPS_API_KEY: ${{ secrets.ANDROID_GOOGLE_MAPS_API_KEY }}
|
||||
needs: get-version
|
||||
|
||||
build-ios:
|
||||
name: Build and upload ios app
|
||||
uses: ./.gitea/workflows/workflow_build-app-ios.yaml
|
||||
with:
|
||||
build_type: release
|
||||
build_name: ${{ needs.get-version.outputs.build_name }}
|
||||
secrets:
|
||||
IOS_ASC_KEY_ID: ${{ secrets.IOS_ASC_KEY_ID }}
|
||||
IOS_ASC_ISSUER_ID: ${{ secrets.IOS_ASC_ISSUER_ID }}
|
||||
IOS_ASC_KEY: ${{ secrets.IOS_ASC_KEY }}
|
||||
IOS_MATCH_REPO_SSH_KEY_BASE64: ${{ secrets.IOS_MATCH_REPO_SSH_KEY_BASE64 }}
|
||||
IOS_MATCH_PASSWORD: ${{ secrets.IOS_MATCH_PASSWORD }}
|
||||
IOS_GOOGLE_MAPS_API_KEY: ${{ secrets.IOS_GOOGLE_MAPS_API_KEY }}
|
||||
needs: build-android # technically not needed, but this prevents the builds from running in parallel
|
39
.gitea/workflows/frontend_trigger_deployment.yaml
Normal file
@@ -0,0 +1,39 @@
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- v*
|
||||
|
||||
jobs:
|
||||
push-to-remote:
|
||||
# We want to use the macos runner provided by github actions. This requires to push to a remote first.
|
||||
# After the push we can use the action under frontend/.github/actions/ to deploy properly using fastlane on macos.
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
path: 'src'
|
||||
|
||||
- name: Checkout remote repository
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
path: 'dest'
|
||||
ref: 'main'
|
||||
github-server-url: 'https://github.com'
|
||||
repository: 'moll-re/anyway-frontend-builder'
|
||||
token: ${{ secrets.PUSH_GITHUB_API_TOKEN }}
|
||||
fetch-depth: 0
|
||||
persist-credentials: true
|
||||
|
||||
- name: Copy files to remote repository
|
||||
run: cp -r src/frontend/. dest/
|
||||
|
||||
- name: Commit and push changes
|
||||
run: |
|
||||
cd dest
|
||||
git config --global user.email "me@moll.re"
|
||||
git config --global user.name "[bot]"
|
||||
git add .
|
||||
git commit -m "Automatic code update for tag"
|
||||
git tag -a ${{ github.ref_name }} -m "mirrored tag"
|
||||
git push origin main --tags
|
@@ -1,78 +0,0 @@
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
build_type:
|
||||
description: 'Release type (release, beta)'
|
||||
required: true
|
||||
type: string
|
||||
build_name:
|
||||
description: 'Build name'
|
||||
required: true
|
||||
type: string
|
||||
secrets:
|
||||
ANDROID_SECRET_PROPERTIES_BASE64:
|
||||
required: true
|
||||
ANDROID_GOOGLE_PLAY_JSON_BASE64:
|
||||
required: true
|
||||
ANDROID_KEYSTORE_BASE64:
|
||||
required: true
|
||||
ANDROID_GOOGLE_MAPS_API_KEY:
|
||||
required: true
|
||||
|
||||
name: Build and release android appbundle to specfied track
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: frontend/android
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: macos-14
|
||||
env:
|
||||
# $BUNDLE_GEMFILE must be set at the job level, so it is set for all steps
|
||||
BUNDLE_GEMFILE: ${{ gitea.workspace }}/frontend/android/Gemfile
|
||||
|
||||
steps:
|
||||
- uses: https://gitea.com/actions/checkout@v4
|
||||
|
||||
- uses: https://github.com/actions/setup-java@v4
|
||||
with:
|
||||
java-version: '17'
|
||||
distribution: 'zulu'
|
||||
|
||||
- name: Setup Android SDK
|
||||
uses: https://github.com/android-actions/setup-android@v3
|
||||
|
||||
- name: Fix flutter SDK folder permission
|
||||
run: git config --global --add safe.directory "*"
|
||||
|
||||
- uses: https://github.com/subosito/flutter-action@v2
|
||||
with:
|
||||
channel: stable
|
||||
flutter-version-file: ${{ gitea.workspace }}/frontend/pubspec.yaml
|
||||
architecture: x64
|
||||
cache: true
|
||||
|
||||
- name: Install dependencies and clean up
|
||||
run: |
|
||||
flutter pub get
|
||||
flutter clean
|
||||
|
||||
- name: Set up ruby env and install fastlane
|
||||
uses: https://github.com/ruby/setup-ruby@v1
|
||||
with:
|
||||
ruby-version: 3.3
|
||||
bundler-cache: true # runs 'bundle install' and caches installed gems automatically
|
||||
|
||||
- name: Add required secret files
|
||||
run: |
|
||||
echo "${{ secrets.ANDROID_SECRET_PROPERTIES_BASE64 }}" | base64 -d > secrets.properties
|
||||
echo "${{ secrets.ANDROID_GOOGLE_PLAY_JSON_BASE64 }}" | base64 -d > google-key.json
|
||||
echo "${{ secrets.ANDROID_KEYSTORE_BASE64 }}" | base64 -d > release.keystore
|
||||
|
||||
- name: Run fastlane lane
|
||||
run: bundle exec fastlane deploy_${{ inputs.build_type }}
|
||||
env:
|
||||
BUILD_NUMBER: ${{ gitea.run_number }}
|
||||
BUILD_NAME: ${{ inputs.build_name }}
|
||||
ANDROID_GOOGLE_MAPS_API_KEY: ${{ secrets.ANDROID_GOOGLE_MAPS_API_KEY }}
|
@@ -1,90 +0,0 @@
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
build_type:
|
||||
description: 'Release type (release, beta)'
|
||||
required: true
|
||||
type: string
|
||||
build_name:
|
||||
description: 'Build name'
|
||||
required: true
|
||||
type: string
|
||||
secrets:
|
||||
IOS_ASC_KEY_ID:
|
||||
required: true
|
||||
IOS_ASC_ISSUER_ID:
|
||||
required: true
|
||||
IOS_ASC_KEY:
|
||||
required: true
|
||||
IOS_MATCH_REPO_SSH_KEY_BASE64:
|
||||
required: true
|
||||
IOS_MATCH_PASSWORD:
|
||||
required: true
|
||||
IOS_GOOGLE_MAPS_API_KEY:
|
||||
required: true
|
||||
|
||||
name: Build and release ipa to specified track
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: frontend/ios
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: macos-14
|
||||
env:
|
||||
# $BUNDLE_GEMFILE must be set at the job level, so it is set for all steps
|
||||
BUNDLE_GEMFILE: ${{ gitea.workspace }}/frontend/ios/Gemfile
|
||||
|
||||
steps:
|
||||
- uses: https://gitea.com/actions/checkout@v4
|
||||
|
||||
- name: Install Flutter
|
||||
uses: https://github.com/subosito/flutter-action@v2
|
||||
with:
|
||||
channel: stable
|
||||
flutter-version-file: ${{ gitea.workspace }}/frontend/pubspec.yaml
|
||||
architecture: x64
|
||||
cache: true
|
||||
|
||||
- name: Set up ruby env
|
||||
uses: https://github.com/ruby/setup-ruby@v1
|
||||
with:
|
||||
ruby-version: 3.3
|
||||
bundler-cache: true # runs 'bundle install' and caches installed gems automatically
|
||||
|
||||
- uses: GuillaumeFalourd/setup-rsync@v1.2
|
||||
# rsync is required by the google maps ios tools
|
||||
|
||||
- name: Install dependencies and clean up
|
||||
run: |
|
||||
flutter pub get
|
||||
flutter precache --ios
|
||||
bundle exec pod install --allow-root
|
||||
flutter clean
|
||||
bundle exec pod cache clean --all --allow-root
|
||||
|
||||
- name: Setup SSH key for match git repo
|
||||
# and mark the host as known
|
||||
run: |
|
||||
echo $MATCH_REPO_SSH_KEY | base64 --decode > ~/.ssh/id_rsa
|
||||
chmod 600 ~/.ssh/id_rsa
|
||||
ssh-keyscan -p 2222 git.kluster.moll.re > ~/.ssh/known_hosts
|
||||
env:
|
||||
MATCH_REPO_SSH_KEY: ${{ secrets.IOS_MATCH_REPO_SSH_KEY_BASE64 }}
|
||||
|
||||
- name: Replace API Key from secret
|
||||
# on a macOS runner, sed requires a replacement suffix after the -i flag
|
||||
run: |
|
||||
sed -i '' -e "s/IOS_GOOGLE_MAPS_API_KEY/${{ secrets.IOS_GOOGLE_MAPS_API_KEY }}/g" Runner/AppDelegate.swift
|
||||
|
||||
- name: Run fastlane lane
|
||||
run: bundle exec fastlane deploy_${{ inputs.build_type }}
|
||||
env:
|
||||
BUILD_NUMBER: ${{ gitea.run_number }}
|
||||
BUILD_NAME: ${{ inputs.build_name }}
|
||||
IOS_ASC_KEY_ID: ${{ secrets.IOS_ASC_KEY_ID }}
|
||||
IOS_ASC_ISSUER_ID: ${{ secrets.IOS_ASC_ISSUER_ID }}
|
||||
IOS_ASC_KEY: ${{ secrets.IOS_ASC_KEY }}
|
||||
MATCH_PASSWORD: ${{ secrets.IOS_MATCH_PASSWORD }}
|
||||
IOS_GOOGLE_MAPS_API_KEY: ${{ secrets.IOS_GOOGLE_MAPS_API_KEY }}
|
30
LICENSE.md
@@ -1,30 +0,0 @@
|
||||
# License
|
||||
|
||||
## Proprietary License
|
||||
|
||||
All code and resources in this repository are the property of AnyDev. The software and related documentation are provided solely for use with services provided by AnyDev. Redistribution, modification, or use of this software outside of its intended service is strictly prohibited without explicit permission.
|
||||
|
||||
### Copyright © 2024 AnyDev
|
||||
|
||||
All rights reserved.
|
||||
|
||||
### Restrictions
|
||||
|
||||
- You may not modify, distribute, copy, or reverse engineer any part of this codebase.
|
||||
- This software is licensed for use solely in conjunction with services provided by AnyDev.
|
||||
- Any commercial use of this software is strictly prohibited without explicit written consent from AnyDev.
|
||||
|
||||
## Third-Party Dependencies
|
||||
|
||||
This project uses third-party dependencies, which are subject to their respective licenses.
|
||||
|
||||
- Python backend dependencies: fastapi, pydantic, numpy, shapely, etc. – Licensed under their respective licenses.
|
||||
- Flutter frontend dependencies: Cupertino Icons, sliding_up_panel, http, etc. – Licensed under their respective licenses.
|
||||
|
||||
Please refer to each project's documentation for the specific terms and conditions.
|
||||
|
||||
## OpenStreetMap Data Usage
|
||||
|
||||
This project uses data derived from **OpenStreetMap**. OpenStreetMap data is available under the [Open Database License (ODbL)](https://www.openstreetmap.org/copyright). We comply with the ODbL license, and some of the data displayed in the service may be derived from OpenStreetMap sources. We do not redistribute raw OpenStreetMap data; instead, it is processed and transformed before being used in our services.
|
||||
|
||||
More information about OpenStreetMap data usage can be found [here](https://www.openstreetmap.org/copyright).
|
3
backend/.gitignore
vendored
@@ -1,9 +1,6 @@
|
||||
# osm-cache
|
||||
cache_XML/
|
||||
|
||||
# secrets
|
||||
*secrets.yaml
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
|
@@ -1,363 +0,0 @@
|
||||
[
|
||||
{
|
||||
"name": "Chinatown",
|
||||
"type": "shopping",
|
||||
"location": [
|
||||
45.7554934,
|
||||
4.8444852
|
||||
],
|
||||
"osm_type": "way",
|
||||
"osm_id": 996515596,
|
||||
"attractiveness": 129,
|
||||
"n_tags": 0,
|
||||
"image_url": null,
|
||||
"website_url": null,
|
||||
"wiki_url": null,
|
||||
"keywords": {},
|
||||
"description": null,
|
||||
"duration": 30,
|
||||
"name_en": null,
|
||||
"uuid": "285d159c-68ee-4b37-8d71-f27ee3d38b02",
|
||||
"must_do": false,
|
||||
"must_avoid": false,
|
||||
"is_secondary": false,
|
||||
"time_to_reach_next": 0,
|
||||
"next_uuid": null,
|
||||
"is_viewpoint": false,
|
||||
"is_place_of_worship": false
|
||||
},
|
||||
{
|
||||
"name": "Galeries Lafayette",
|
||||
"type": "shopping",
|
||||
"location": [
|
||||
45.7627107,
|
||||
4.8556833
|
||||
],
|
||||
"osm_type": "way",
|
||||
"osm_id": 1069872743,
|
||||
"attractiveness": 197,
|
||||
"n_tags": 11,
|
||||
"image_url": null,
|
||||
"website_url": "http://www.galerieslafayette.com/",
|
||||
"wiki_url": null,
|
||||
"keywords": null,
|
||||
"description": null,
|
||||
"duration": 30,
|
||||
"name_en": null,
|
||||
"uuid": "28f1bc30-10d3-4944-8861-0ed9abca012d",
|
||||
"must_do": false,
|
||||
"must_avoid": false,
|
||||
"is_secondary": false,
|
||||
"time_to_reach_next": 0,
|
||||
"next_uuid": null,
|
||||
"is_viewpoint": false,
|
||||
"is_place_of_worship": false
|
||||
},
|
||||
{
|
||||
"name": "Muji",
|
||||
"type": "shopping",
|
||||
"location": [
|
||||
45.7615971,
|
||||
4.8543781
|
||||
],
|
||||
"osm_type": "way",
|
||||
"osm_id": 1044165817,
|
||||
"attractiveness": 259,
|
||||
"n_tags": 14,
|
||||
"image_url": null,
|
||||
"website_url": "https://www.muji.com/fr/",
|
||||
"wiki_url": null,
|
||||
"keywords": null,
|
||||
"description": null,
|
||||
"duration": 30,
|
||||
"name_en": "Muji",
|
||||
"uuid": "957f86a5-6c00-41a2-815d-d6f739052be4",
|
||||
"must_do": false,
|
||||
"must_avoid": false,
|
||||
"is_secondary": false,
|
||||
"time_to_reach_next": 0,
|
||||
"next_uuid": null,
|
||||
"is_viewpoint": false,
|
||||
"is_place_of_worship": false
|
||||
},
|
||||
{
|
||||
"name": "HEMA",
|
||||
"type": "shopping",
|
||||
"location": [
|
||||
45.7619133,
|
||||
4.8565239
|
||||
],
|
||||
"osm_type": "way",
|
||||
"osm_id": 1069872750,
|
||||
"attractiveness": 156,
|
||||
"n_tags": 9,
|
||||
"image_url": null,
|
||||
"website_url": "https://fr.westfield.com/lapartdieu/store/HEMA/www.hema.fr",
|
||||
"wiki_url": null,
|
||||
"keywords": null,
|
||||
"description": null,
|
||||
"duration": 30,
|
||||
"name_en": null,
|
||||
"uuid": "8dae9d3e-e4c4-4e80-941d-0b106e22c85b",
|
||||
"must_do": false,
|
||||
"must_avoid": false,
|
||||
"is_secondary": false,
|
||||
"time_to_reach_next": 0,
|
||||
"next_uuid": null,
|
||||
"is_viewpoint": false,
|
||||
"is_place_of_worship": false
|
||||
},
|
||||
{
|
||||
"name": "Cordeliers",
|
||||
"type": "shopping",
|
||||
"location": [
|
||||
45.7622752,
|
||||
4.8337998
|
||||
],
|
||||
"osm_type": "node",
|
||||
"osm_id": 5545183519,
|
||||
"attractiveness": 813,
|
||||
"n_tags": 0,
|
||||
"image_url": null,
|
||||
"website_url": null,
|
||||
"wiki_url": null,
|
||||
"keywords": {},
|
||||
"description": null,
|
||||
"duration": 30,
|
||||
"name_en": null,
|
||||
"uuid": "ba02adb5-e28f-4645-8c2d-25ead6232379",
|
||||
"must_do": false,
|
||||
"must_avoid": false,
|
||||
"is_secondary": false,
|
||||
"time_to_reach_next": 0,
|
||||
"next_uuid": null,
|
||||
"is_viewpoint": false,
|
||||
"is_place_of_worship": false
|
||||
},
|
||||
{
|
||||
"name": "Halles de Lyon Paul Bocuse",
|
||||
"type": "shopping",
|
||||
"location": [
|
||||
45.7628282,
|
||||
4.8505601
|
||||
],
|
||||
"osm_type": "relation",
|
||||
"osm_id": 971529,
|
||||
"attractiveness": 272,
|
||||
"n_tags": 12,
|
||||
"image_url": null,
|
||||
"website_url": "https://www.halles-de-lyon-paulbocuse.com/",
|
||||
"wiki_url": "fr:Halles de Lyon-Paul Bocuse",
|
||||
"keywords": {
|
||||
"importance": "national",
|
||||
"height": null,
|
||||
"place_type": "marketplace",
|
||||
"date": null
|
||||
},
|
||||
"description": "Halles de Lyon Paul Bocuse is a marketplace of national importance.",
|
||||
"duration": 30,
|
||||
"name_en": null,
|
||||
"uuid": "bbd50de3-aa91-425d-90c2-d4abfd1b4abe",
|
||||
"must_do": false,
|
||||
"must_avoid": false,
|
||||
"is_secondary": false,
|
||||
"time_to_reach_next": 0,
|
||||
"next_uuid": null,
|
||||
"is_viewpoint": false,
|
||||
"is_place_of_worship": false
|
||||
},
|
||||
{
|
||||
"name": "Grand Bazar",
|
||||
"type": "shopping",
|
||||
"location": [
|
||||
45.7632141,
|
||||
4.8361975
|
||||
],
|
||||
"osm_type": "way",
|
||||
"osm_id": 82399951,
|
||||
"attractiveness": 93,
|
||||
"n_tags": 7,
|
||||
"image_url": null,
|
||||
"website_url": null,
|
||||
"wiki_url": null,
|
||||
"keywords": null,
|
||||
"description": null,
|
||||
"duration": 30,
|
||||
"name_en": null,
|
||||
"uuid": "3de9131c-87c5-4efb-9fa8-064896fb8b29",
|
||||
"must_do": false,
|
||||
"must_avoid": false,
|
||||
"is_secondary": false,
|
||||
"time_to_reach_next": 0,
|
||||
"next_uuid": null,
|
||||
"is_viewpoint": false,
|
||||
"is_place_of_worship": false
|
||||
},
|
||||
{
|
||||
"name": "Shopping Area",
|
||||
"type": "shopping",
|
||||
"location": [
|
||||
45.7673452,
|
||||
4.8438683
|
||||
],
|
||||
"osm_type": "node",
|
||||
"osm_id": 0,
|
||||
"attractiveness": 156,
|
||||
"n_tags": 0,
|
||||
"image_url": null,
|
||||
"website_url": null,
|
||||
"wiki_url": null,
|
||||
"keywords": {},
|
||||
"description": null,
|
||||
"duration": 30,
|
||||
"name_en": null,
|
||||
"uuid": "df2482a8-7e2e-4536-aad3-564899b2fa65",
|
||||
"must_do": false,
|
||||
"must_avoid": false,
|
||||
"is_secondary": false,
|
||||
"time_to_reach_next": 0,
|
||||
"next_uuid": null,
|
||||
"is_viewpoint": false,
|
||||
"is_place_of_worship": false
|
||||
},
|
||||
{
|
||||
"name": "Cour Oxyg\u00e8ne",
|
||||
"type": "shopping",
|
||||
"location": [
|
||||
45.7620905,
|
||||
4.8568873
|
||||
],
|
||||
"osm_type": "way",
|
||||
"osm_id": 132673030,
|
||||
"attractiveness": 63,
|
||||
"n_tags": 5,
|
||||
"image_url": null,
|
||||
"website_url": null,
|
||||
"wiki_url": null,
|
||||
"keywords": null,
|
||||
"description": null,
|
||||
"duration": 30,
|
||||
"name_en": null,
|
||||
"uuid": "ed134f76-9a02-4bee-9c10-78454f7bc4ce",
|
||||
"must_do": false,
|
||||
"must_avoid": false,
|
||||
"is_secondary": false,
|
||||
"time_to_reach_next": 0,
|
||||
"next_uuid": null,
|
||||
"is_viewpoint": false,
|
||||
"is_place_of_worship": false
|
||||
},
|
||||
{
|
||||
"name": "P\u00f4le de Commerces et de Loisirs Confluence",
|
||||
"type": "shopping",
|
||||
"location": [
|
||||
45.7410414,
|
||||
4.8171031
|
||||
],
|
||||
"osm_type": "way",
|
||||
"osm_id": 440270633,
|
||||
"attractiveness": 259,
|
||||
"n_tags": 14,
|
||||
"image_url": null,
|
||||
"website_url": "https://www.confluence.fr/",
|
||||
"wiki_url": null,
|
||||
"keywords": null,
|
||||
"description": null,
|
||||
"duration": 30,
|
||||
"name_en": null,
|
||||
"uuid": "dd7e2f5f-0e60-4560-b903-e5ded4b6e36a",
|
||||
"must_do": false,
|
||||
"must_avoid": false,
|
||||
"is_secondary": false,
|
||||
"time_to_reach_next": 0,
|
||||
"next_uuid": null,
|
||||
"is_viewpoint": false,
|
||||
"is_place_of_worship": false
|
||||
},
|
||||
{
|
||||
"name": "Grand H\u00f4tel-Dieu",
|
||||
"type": "shopping",
|
||||
"location": [
|
||||
45.7586955,
|
||||
4.8364597
|
||||
],
|
||||
"osm_type": "relation",
|
||||
"osm_id": 300128,
|
||||
"attractiveness": 546,
|
||||
"n_tags": 22,
|
||||
"image_url": null,
|
||||
"website_url": "https://grand-hotel-dieu.com",
|
||||
"wiki_url": "fr:H\u00f4tel-Dieu de Lyon",
|
||||
"keywords": {
|
||||
"importance": "international",
|
||||
"height": null,
|
||||
"place_type": "building",
|
||||
"date": "C17"
|
||||
},
|
||||
"description": "Grand H\u00f4tel-Dieu is an internationally famous building. It was constructed in C17.",
|
||||
"duration": 30,
|
||||
"name_en": null,
|
||||
"uuid": "a91265a8-ffbd-44f7-a7ab-3ff75f08fbab",
|
||||
"must_do": false,
|
||||
"must_avoid": false,
|
||||
"is_secondary": false,
|
||||
"time_to_reach_next": 0,
|
||||
"next_uuid": null,
|
||||
"is_viewpoint": false,
|
||||
"is_place_of_worship": false
|
||||
},
|
||||
{
|
||||
"name": "Westfield La Part-Dieu",
|
||||
"type": "shopping",
|
||||
"location": [
|
||||
45.761331,
|
||||
4.855676
|
||||
],
|
||||
"osm_type": "way",
|
||||
"osm_id": 62338376,
|
||||
"attractiveness": 546,
|
||||
"n_tags": 22,
|
||||
"image_url": null,
|
||||
"website_url": "https://fr.westfield.com/lapartdieu",
|
||||
"wiki_url": "fr:La Part-Dieu (centre commercial)",
|
||||
"keywords": null,
|
||||
"description": null,
|
||||
"duration": 30,
|
||||
"name_en": null,
|
||||
"uuid": "7d60316f-d689-4fcf-be68-ffc09353b826",
|
||||
"must_do": false,
|
||||
"must_avoid": false,
|
||||
"is_secondary": false,
|
||||
"time_to_reach_next": 0,
|
||||
"next_uuid": null,
|
||||
"is_viewpoint": false,
|
||||
"is_place_of_worship": false
|
||||
},
|
||||
{
|
||||
"name": "Ainay",
|
||||
"type": "shopping",
|
||||
"location": [
|
||||
45.7553105,
|
||||
4.8312084
|
||||
],
|
||||
"osm_type": "node",
|
||||
"osm_id": 5545126047,
|
||||
"attractiveness": 132,
|
||||
"n_tags": 0,
|
||||
"image_url": null,
|
||||
"website_url": null,
|
||||
"wiki_url": null,
|
||||
"keywords": {},
|
||||
"description": null,
|
||||
"duration": 30,
|
||||
"name_en": null,
|
||||
"uuid": "ad214f3d-a4b9-4078-876a-446caa7ab01c",
|
||||
"must_do": false,
|
||||
"must_avoid": false,
|
||||
"is_secondary": false,
|
||||
"time_to_reach_next": 0,
|
||||
"next_uuid": null,
|
||||
"is_viewpoint": false,
|
||||
"is_place_of_worship": false
|
||||
}
|
||||
]
|
@@ -1,16 +1,17 @@
|
||||
"""Main app for backend api"""
|
||||
|
||||
import logging
|
||||
import time
|
||||
from contextlib import asynccontextmanager
|
||||
from fastapi import FastAPI, HTTPException, BackgroundTasks
|
||||
from fastapi import FastAPI, HTTPException, BackgroundTasks, Query
|
||||
|
||||
from .logging_config import configure_logging
|
||||
from .structs.landmark import Landmark
|
||||
from .structs.landmark import Landmark, Toilets
|
||||
from .structs.preferences import Preferences
|
||||
from .structs.linked_landmarks import LinkedLandmarks
|
||||
from .structs.trip import Trip
|
||||
from .landmarks.landmarks_manager import LandmarkManager
|
||||
from .toilets.toilet_routes import router as toilets_router
|
||||
from .utils.landmarks_manager import LandmarkManager
|
||||
from .utils.toilets_manager import ToiletsManager
|
||||
from .optimization.optimizer import Optimizer
|
||||
from .optimization.refiner import Refiner
|
||||
from .overpass.overpass import fill_cache
|
||||
@@ -36,10 +37,6 @@ async def lifespan(app: FastAPI):
|
||||
app = FastAPI(lifespan=lifespan)
|
||||
|
||||
|
||||
|
||||
app.include_router(toilets_router)
|
||||
|
||||
|
||||
@app.post("/trip/new")
|
||||
def new_trip(preferences: Preferences,
|
||||
start: tuple[float, float],
|
||||
@@ -69,8 +66,6 @@ def new_trip(preferences: Preferences,
|
||||
end = start
|
||||
logger.info("No end coordinates provided. Using start=end.")
|
||||
|
||||
logger.info(f"Requested new trip generation. Details:\n\tCoordinates: {start}\n\tTime: {preferences.max_time_minute}\n\tSightseeing: {preferences.sightseeing.score}\n\tNature: {preferences.nature.score}\n\tShopping: {preferences.shopping.score}")
|
||||
|
||||
start_landmark = Landmark(name='start',
|
||||
type='start',
|
||||
location=(start[0], start[1]),
|
||||
@@ -92,7 +87,6 @@ def new_trip(preferences: Preferences,
|
||||
n_tags=0)
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
# Generate the landmarks from the start location
|
||||
landmarks, landmarks_short = manager.generate_landmarks_list(
|
||||
center_coordinates = start,
|
||||
@@ -114,7 +108,6 @@ def new_trip(preferences: Preferences,
|
||||
try:
|
||||
base_tour = optimizer.solve_optimization(preferences.max_time_minute, landmarks_short)
|
||||
except Exception as exc:
|
||||
logger.error(f"Trip generation failed: {str(exc)}")
|
||||
raise HTTPException(status_code=500, detail=f"Optimization failed: {str(exc)}") from exc
|
||||
|
||||
t_first_stage = time.time() - start_time
|
||||
@@ -126,21 +119,22 @@ def new_trip(preferences: Preferences,
|
||||
refined_tour = refiner.refine_optimization(landmarks, base_tour,
|
||||
preferences.max_time_minute,
|
||||
preferences.detour_tolerance_minute)
|
||||
except Exception as exc :
|
||||
logger.warning(f"Refiner failed. Proceeding with base trip {str(exc)}")
|
||||
except TimeoutError as te :
|
||||
logger.error(f'Refiner failed : {str(te)} Using base tour.')
|
||||
refined_tour = base_tour
|
||||
except Exception as exc :
|
||||
raise HTTPException(status_code=500, detail=f"An unexpected error occurred: {str(exc)}") from exc
|
||||
|
||||
t_second_stage = time.time() - start_time
|
||||
|
||||
logger.debug(f'First stage optimization\t: {round(t_first_stage,3)} seconds')
|
||||
logger.debug(f'Second stage optimization\t: {round(t_second_stage,3)} seconds')
|
||||
logger.info(f'Total computation time\t: {round(t_first_stage + t_second_stage,3)} seconds')
|
||||
linked_tour = LinkedLandmarks(refined_tour)
|
||||
|
||||
linked_tour = LinkedLandmarks(refined_tour)
|
||||
# upon creation of the trip, persistence of both the trip and its landmarks is ensured.
|
||||
trip = Trip.from_linked_landmarks(linked_tour, cache_client)
|
||||
logger.info(f'Generated a trip of {trip.total_time} minutes with {len(refined_tour)} landmarks in {round(t_generate_landmarks + t_first_stage + t_second_stage,3)} seconds.')
|
||||
logger.debug('Detailed trip :\n\t' + '\n\t'.join(f'{landmark}' for landmark in refined_tour))
|
||||
|
||||
background_tasks.add_task(fill_cache)
|
||||
|
||||
@@ -163,7 +157,6 @@ def get_trip(trip_uuid: str) -> Trip:
|
||||
trip = cache_client.get(f"trip_{trip_uuid}")
|
||||
return trip
|
||||
except KeyError as exc:
|
||||
logger.error(f"Failed to fetch trip with UUID {trip_uuid}: {str(exc)}")
|
||||
raise HTTPException(status_code=404, detail="Trip not found") from exc
|
||||
|
||||
|
||||
@@ -182,45 +175,32 @@ def get_landmark(landmark_uuid: str) -> Landmark:
|
||||
landmark = cache_client.get(f"landmark_{landmark_uuid}")
|
||||
return landmark
|
||||
except KeyError as exc:
|
||||
logger.error(f"Failed to fetch landmark with UUID {landmark_uuid}: {str(exc)}")
|
||||
raise HTTPException(status_code=404, detail="Landmark not found") from exc
|
||||
|
||||
|
||||
@app.post("/trip/recompute-time/{trip_uuid}/{removed_landmark_uuid}")
|
||||
def update_trip_time(trip_uuid: str, removed_landmark_uuid: str) -> Trip:
|
||||
@app.post("/toilets/new")
|
||||
def get_toilets(location: tuple[float, float] = Query(...), radius: int = 500) -> list[Toilets] :
|
||||
"""
|
||||
Updates the reaching times of a given trip when removing a landmark.
|
||||
Endpoint to find toilets within a specified radius from a given location.
|
||||
|
||||
This endpoint expects the `location` and `radius` as **query parameters**, not in the request body.
|
||||
|
||||
Args:
|
||||
landmark_uuid (str) : unique identifier for a Landmark.
|
||||
location (tuple[float, float]): The latitude and longitude of the location to search from.
|
||||
radius (int, optional): The radius (in meters) within which to search for toilets. Defaults to 500 meters.
|
||||
|
||||
Returns:
|
||||
(Landmark) : the corresponding Landmark.
|
||||
list[Toilets]: A list of Toilets objects that meet the criteria.
|
||||
"""
|
||||
# First, fetch the trip in the cache.
|
||||
try:
|
||||
trip = cache_client.get(f'trip_{trip_uuid}')
|
||||
except KeyError as exc:
|
||||
logger.error(f"Failed to update trip with UUID {trip_uuid} (trip not found): {str(exc)}")
|
||||
raise HTTPException(status_code=404, detail='Trip not found') from exc
|
||||
if location is None:
|
||||
raise HTTPException(status_code=406, detail="Coordinates not provided or invalid")
|
||||
if not (-90 <= location[0] <= 90 or -180 <= location[1] <= 180):
|
||||
raise HTTPException(status_code=422, detail="Start coordinates not in range")
|
||||
|
||||
landmarks = []
|
||||
next_uuid = trip.first_landmark_uuid
|
||||
toilets_manager = ToiletsManager(location, radius)
|
||||
|
||||
# Extract landmarks
|
||||
try :
|
||||
while next_uuid is not None:
|
||||
landmark = cache_client.get(f'landmark_{next_uuid}')
|
||||
# Filter out the removed landmark.
|
||||
if next_uuid != removed_landmark_uuid :
|
||||
landmarks.append(landmark)
|
||||
next_uuid = landmark.next_uuid # Prepare for the next iteration
|
||||
toilets_list = toilets_manager.generate_toilet_list()
|
||||
return toilets_list
|
||||
except KeyError as exc:
|
||||
logger.error(f"Failed to update trip with UUID {trip_uuid} : {str(exc)}")
|
||||
raise HTTPException(status_code=404, detail=f'landmark {next_uuid} not found') from exc
|
||||
|
||||
# Re-link every thing and compute times again
|
||||
linked_tour = LinkedLandmarks(landmarks)
|
||||
trip = Trip.from_linked_landmarks(linked_tour, cache_client)
|
||||
|
||||
return trip
|
||||
raise HTTPException(status_code=404, detail="No toilets found") from exc
|
||||
|
@@ -257,6 +257,7 @@ class Optimizer:
|
||||
Returns:
|
||||
None: This function modifies the `prob` object by adding L-2 equality constraints in-place.
|
||||
"""
|
||||
# FIXME: weird 0 artifact in the coefficients popping up
|
||||
# Loop through rows 1 to L-2 to prevent stacked ones
|
||||
for i in range(1, L-1):
|
||||
# Add the constraint that sums across each "row" or "block" in the decision variables
|
||||
@@ -589,15 +590,15 @@ class Optimizer:
|
||||
try :
|
||||
prob.solve(pl.PULP_CBC_CMD(msg=False, timeLimit=self.time_limit+1, gapRel=self.gap_rel))
|
||||
except Exception as exc :
|
||||
raise Exception(f"No solution found: {str(exc)}") from exc
|
||||
raise Exception(f"No solution found: {exc}") from exc
|
||||
status = pl.LpStatus[prob.status]
|
||||
solution = [pl.value(var) for var in x] # The values of the decision variables (will be 0 or 1)
|
||||
|
||||
self.logger.debug("First results are out. Looking out for circles and correcting...")
|
||||
self.logger.debug("First results are out. Looking out for circles and correcting.")
|
||||
|
||||
# Raise error if no solution is found. FIXME: for now this throws the internal server error
|
||||
if status != 'Optimal' :
|
||||
self.logger.warning("The problem is overconstrained, no solution on first try.")
|
||||
self.logger.error("The problem is overconstrained, no solution on first try.")
|
||||
raise ArithmeticError("No solution could be found. Please try again with more time or different preferences.")
|
||||
|
||||
# If there is a solution, we're good to go, just check for connectiveness
|
||||
@@ -607,7 +608,7 @@ class Optimizer:
|
||||
while circles is not None :
|
||||
i += 1
|
||||
if i == self.max_iter :
|
||||
self.logger.warning(f'Timeout: No solution found after {self.max_iter} iterations.')
|
||||
self.logger.error(f'Timeout: No solution found after {self.max_iter} iterations.')
|
||||
raise TimeoutError(f"Optimization took too long. No solution found after {self.max_iter} iterations.")
|
||||
|
||||
for circle in circles :
|
||||
@@ -617,13 +618,12 @@ class Optimizer:
|
||||
try :
|
||||
prob.solve(pl.PULP_CBC_CMD(msg=False, timeLimit=self.time_limit, gapRel=self.gap_rel))
|
||||
except Exception as exc :
|
||||
self.logger.warning("No solution found: {str(exc)")
|
||||
raise Exception(f"No solution found: {str(exc)}") from exc
|
||||
raise Exception(f"No solution found: {exc}") from exc
|
||||
|
||||
solution = [pl.value(var) for var in x]
|
||||
|
||||
if pl.LpStatus[prob.status] != 'Optimal' :
|
||||
self.logger.warning("The problem is overconstrained, no solution after {i} cycles.")
|
||||
self.logger.error("The problem is overconstrained, no solution after {i} cycles.")
|
||||
raise ArithmeticError("No solution could be found. Please try again with more time or different preferences.")
|
||||
|
||||
circles = self.is_connected(solution)
|
||||
|
@@ -278,7 +278,7 @@ class Refiner :
|
||||
better_tour_poly = concave_hull(MultiPoint(coords)) # Create concave hull with "core" of tour leaving out start and finish
|
||||
xs, ys = better_tour_poly.exterior.xy
|
||||
"""
|
||||
FIXED : ERROR HERE :
|
||||
ERROR HERE :
|
||||
Exception has occurred: AttributeError
|
||||
'LineString' object has no attribute 'exterior'
|
||||
"""
|
||||
@@ -356,7 +356,7 @@ class Refiner :
|
||||
|
||||
# If unsuccessful optimization, use the base_tour.
|
||||
if new_tour is None:
|
||||
self.logger.warning("Refiner failed: No solution found during second stage optimization.")
|
||||
self.logger.warning("No solution found for the refined tour. Returning the initial tour.")
|
||||
new_tour = base_tour
|
||||
|
||||
# If only one landmark, return it.
|
||||
@@ -369,7 +369,6 @@ class Refiner :
|
||||
# Fix the tour using Polygons if the path looks weird.
|
||||
# Conditions : circular trip and invalid polygon.
|
||||
if base_tour[0].location == base_tour[-1].location and not better_poly.is_valid :
|
||||
self.logger.debug("Tours might be funky, attempting to correct with polygons")
|
||||
better_tour = self.fix_using_polygon(better_tour)
|
||||
|
||||
return better_tour
|
||||
|
@@ -1,4 +1,3 @@
|
||||
"""Module defining the handling of cache data from Overpass requests."""
|
||||
import os
|
||||
import json
|
||||
import hashlib
|
||||
@@ -62,7 +61,7 @@ class JSONCache(CachingStrategyBase):
|
||||
return None
|
||||
|
||||
def set(self, key, value):
|
||||
"""Save the JSON data in the cache."""
|
||||
"""Save the JSON data as an ElementTree to the cache."""
|
||||
filename = self._filename(key)
|
||||
try:
|
||||
# Write the JSON data to the cache file
|
||||
@@ -95,7 +94,7 @@ class JSONCache(CachingStrategyBase):
|
||||
|
||||
def close(self):
|
||||
"""Cleanup method, if needed."""
|
||||
|
||||
pass
|
||||
|
||||
class CachingStrategy:
|
||||
"""
|
||||
@@ -108,7 +107,6 @@ class CachingStrategy:
|
||||
|
||||
@classmethod
|
||||
def use(cls, strategy_name='JSON', **kwargs):
|
||||
"""Define the caching strategy to use."""
|
||||
if cls.__strategy:
|
||||
cls.__strategy.close()
|
||||
|
||||
@@ -121,12 +119,10 @@ class CachingStrategy:
|
||||
|
||||
@classmethod
|
||||
def get(cls, key):
|
||||
"""Get the data from the cache."""
|
||||
return cls.__strategy.get(key)
|
||||
|
||||
@classmethod
|
||||
def set(cls, key, value):
|
||||
"""Save the data in the cache."""
|
||||
cls.__strategy.set(key, value)
|
||||
|
||||
@classmethod
|
||||
|
@@ -1,6 +1,5 @@
|
||||
"""Module allowing connexion to overpass api and fectch data from OSM."""
|
||||
import os
|
||||
import time
|
||||
import urllib
|
||||
import math
|
||||
import logging
|
||||
@@ -53,22 +52,22 @@ class Overpass :
|
||||
# Retrieve cached data and identify missing cache entries
|
||||
cached_responses, non_cached_cells = self._retrieve_cached_data(overlapping_cells, osm_types, selector, conditions, out)
|
||||
|
||||
self.logger.debug(f'Cache hit for {len(overlapping_cells)-len(non_cached_cells)}/{len(overlapping_cells)} quadrants.')
|
||||
self.logger.info(f'Cache hit for {len(overlapping_cells)-len(non_cached_cells)}/{len(overlapping_cells)} quadrants.')
|
||||
|
||||
# If there is no missing data, return the cached responses after filtering.
|
||||
if not non_cached_cells :
|
||||
return Overpass._filter_landmarks(cached_responses, bbox)
|
||||
|
||||
# If there is no cached data, fetch all from Overpass.
|
||||
if not cached_responses :
|
||||
elif not cached_responses :
|
||||
query_str = Overpass.build_query(bbox, osm_types, selector, conditions, out)
|
||||
self.logger.debug(f'Query string: {query_str}')
|
||||
return self.fetch_data_from_api(query_str)
|
||||
|
||||
# Hybrid cache: some data from Overpass, some data from cache.
|
||||
else :
|
||||
# Resize the bbox for smaller search area and build new query string.
|
||||
non_cached_bbox = Overpass._get_non_cached_bbox(non_cached_cells, bbox)
|
||||
query_str = Overpass.build_query(non_cached_bbox, osm_types, selector, conditions, out)
|
||||
self.logger.debug(f'Query string: {query_str}')
|
||||
non_cached_responses = self.fetch_data_from_api(query_str)
|
||||
return Overpass._filter_landmarks(cached_responses, bbox) + non_cached_responses
|
||||
|
||||
@@ -95,10 +94,9 @@ class Overpass :
|
||||
return elements
|
||||
|
||||
except urllib.error.URLError as e:
|
||||
self.logger.error(f"Error connecting to Overpass API: {str(e)}")
|
||||
raise ConnectionError(f"Error connecting to Overpass API: {str(e)}") from e
|
||||
self.logger.error(f"Error connecting to Overpass API: {e}")
|
||||
raise ConnectionError(f"Error connecting to Overpass API: {e}") from e
|
||||
except Exception as exc :
|
||||
self.logger.error(f"unexpected error while fetching data from Overpass: {str(exc)}")
|
||||
raise Exception(f'An unexpected error occured: {str(exc)}') from exc
|
||||
|
||||
|
||||
@@ -122,7 +120,7 @@ class Overpass :
|
||||
self.caching_strategy.set(cache_key, elements)
|
||||
self.logger.debug(f'Cache set for {cache_key}')
|
||||
except urllib.error.URLError as e:
|
||||
raise ConnectionError(f"Error connecting to Overpass API: {str(e)}") from e
|
||||
raise ConnectionError(f"Error connecting to Overpass API: {e}") from e
|
||||
except Exception as exc :
|
||||
raise Exception(f'An unexpected error occured: {str(exc)}') from exc
|
||||
|
||||
@@ -153,7 +151,7 @@ class Overpass :
|
||||
- If no conditions are provided, the query will just use the `selector` to filter the OSM
|
||||
elements without additional constraints.
|
||||
"""
|
||||
query = '[out:json][timeout:20];('
|
||||
query = '[out:json];('
|
||||
|
||||
# convert the bbox to string.
|
||||
bbox_str = f"({','.join(map(str, bbox))})"
|
||||
@@ -388,7 +386,7 @@ def get_base_info(elem: dict, osm_type: OSM_TYPES, with_name=False) :
|
||||
if with_name :
|
||||
name = elem.get('tags', {}).get('name')
|
||||
return osm_id, coords, name
|
||||
|
||||
else :
|
||||
return osm_id, coords
|
||||
|
||||
|
||||
@@ -399,25 +397,18 @@ def fill_cache():
|
||||
"""
|
||||
overpass = Overpass()
|
||||
|
||||
n_files = 0
|
||||
total = 0
|
||||
|
||||
with os.scandir(OSM_CACHE_DIR) as it:
|
||||
for entry in it:
|
||||
if entry.is_file() and entry.name.startswith('hollow_'):
|
||||
total += 1
|
||||
|
||||
try :
|
||||
# Read the whole file content as a string
|
||||
with open(entry.path, 'r', encoding='utf-8') as f:
|
||||
with open(entry.path, 'r') as f:
|
||||
# load data and fill the cache with the query and key
|
||||
json_data = json.load(f)
|
||||
overpass.fill_cache(json_data)
|
||||
n_files += 1
|
||||
time.sleep(1)
|
||||
# Now delete the file as the cache is filled
|
||||
os.remove(entry.path)
|
||||
|
||||
except Exception as exc :
|
||||
overpass.logger.error(f'An error occured while parsing file {entry.path} as .json file: {str(exc)}')
|
||||
|
||||
overpass.logger.info(f"Successfully filled {n_files}/{total} cache files.")
|
||||
overpass.logger.error(f'An error occured while parsing file {entry.path} as .json file')
|
||||
|
@@ -72,7 +72,6 @@ sightseeing:
|
||||
# - castle
|
||||
# - museum
|
||||
|
||||
|
||||
museums:
|
||||
tourism:
|
||||
- museum
|
||||
|
@@ -1,11 +1,12 @@
|
||||
max_bbox_side: 4000 #m
|
||||
radius_close_to: 50
|
||||
church_coeff: 0.75
|
||||
nature_coeff: 1.6
|
||||
church_coeff: 0.55
|
||||
nature_coeff: 1.4
|
||||
overall_coeff: 10
|
||||
tag_exponent: 1.15
|
||||
image_bonus: 1.1
|
||||
viewpoint_bonus: 10
|
||||
viewpoint_bonus: 5
|
||||
wikipedia_bonus: 1.25
|
||||
name_bonus: 3
|
||||
N_important: 60
|
||||
pay_bonus: -1
|
||||
|
@@ -5,5 +5,5 @@ max_landmarks: 10
|
||||
max_landmarks_refiner: 20
|
||||
overshoot: 0.0016
|
||||
time_limit: 1
|
||||
gap_rel: 0.025
|
||||
max_iter: 80
|
||||
gap_rel: 0.05
|
||||
max_iter: 40
|
@@ -1,7 +1,8 @@
|
||||
"""Definition of the Landmark class to handle visitable objects across the world."""
|
||||
|
||||
from typing import Optional, Literal
|
||||
from uuid import uuid4, UUID
|
||||
from pydantic import BaseModel, Field
|
||||
from pydantic import BaseModel, ConfigDict, Field
|
||||
|
||||
|
||||
# Output to frontend
|
||||
@@ -49,8 +50,7 @@ class Landmark(BaseModel) :
|
||||
image_url : Optional[str] = None
|
||||
website_url : Optional[str] = None
|
||||
wiki_url : Optional[str] = None
|
||||
keywords: Optional[dict] = {}
|
||||
description : Optional[str] = None
|
||||
description : Optional[str] = None # TODO future
|
||||
duration : Optional[int] = 5
|
||||
name_en : Optional[str] = None
|
||||
|
||||
@@ -69,7 +69,6 @@ class Landmark(BaseModel) :
|
||||
is_viewpoint : Optional[bool] = False
|
||||
is_place_of_worship : Optional[bool] = False
|
||||
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""
|
||||
String representation of the Landmark object.
|
||||
@@ -123,3 +122,26 @@ class Landmark(BaseModel) :
|
||||
return (self.uuid == value.uuid or
|
||||
self.osm_id == value.osm_id or
|
||||
(self.name == value.name and self.distance(value) < 0.001))
|
||||
|
||||
|
||||
class Toilets(BaseModel) :
|
||||
"""
|
||||
Model for toilets. When false/empty the information is either false either not known.
|
||||
"""
|
||||
location : tuple
|
||||
wheelchair : Optional[bool] = False
|
||||
changing_table : Optional[bool] = False
|
||||
fee : Optional[bool] = False
|
||||
opening_hours : Optional[str] = ""
|
||||
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""
|
||||
String representation of the Toilets object.
|
||||
|
||||
Returns:
|
||||
str: A formatted string with the toilets location.
|
||||
"""
|
||||
return f'Toilets @{self.location}'
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
@@ -1,26 +0,0 @@
|
||||
"""Definition of the Toilets class."""
|
||||
from typing import Optional
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
|
||||
|
||||
class Toilets(BaseModel) :
|
||||
"""
|
||||
Model for toilets. When false/empty the information is either false either not known.
|
||||
"""
|
||||
location : tuple
|
||||
wheelchair : Optional[bool] = False
|
||||
changing_table : Optional[bool] = False
|
||||
fee : Optional[bool] = False
|
||||
opening_hours : Optional[str] = ""
|
||||
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""
|
||||
String representation of the Toilets object.
|
||||
|
||||
Returns:
|
||||
str: A formatted string with the toilets location.
|
||||
"""
|
||||
return f'Toilets @{self.location}'
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
@@ -31,9 +31,9 @@ def test_turckheim(client, request): # pylint: disable=redefined-outer-name
|
||||
"shopping": {"type": "shopping", "score": 0},
|
||||
"max_time_minute": duration_minutes,
|
||||
"detour_tolerance_minute": 0},
|
||||
"start": [48.084588, 7.280405]
|
||||
# "start": [48.084588, 7.280405]
|
||||
# "start": [45.74445023349939, 4.8222687890538865]
|
||||
# "start": [45.75156398104873, 4.827154464827647]
|
||||
"start": [45.75156398104873, 4.827154464827647]
|
||||
}
|
||||
)
|
||||
result = response.json()
|
||||
@@ -46,6 +46,8 @@ def test_turckheim(client, request): # pylint: disable=redefined-outer-name
|
||||
# Add details to report
|
||||
log_trip_details(request, landmarks, result['total_time'], duration_minutes)
|
||||
|
||||
# for elem in landmarks :
|
||||
# print(elem)
|
||||
|
||||
# checks :
|
||||
assert response.status_code == 200 # check for successful planning
|
||||
|
@@ -3,7 +3,7 @@
|
||||
from fastapi.testclient import TestClient
|
||||
import pytest
|
||||
|
||||
from ..structs.toilets import Toilets
|
||||
from ..structs.landmark import Toilets
|
||||
from ..main import app
|
||||
|
||||
|
||||
|
@@ -1,6 +1,7 @@
|
||||
"""Helper methods for testing."""
|
||||
import logging
|
||||
from fastapi import HTTPException
|
||||
from pydantic import ValidationError
|
||||
|
||||
from ..structs.landmark import Landmark
|
||||
from ..cache import client as cache_client
|
||||
@@ -38,7 +39,7 @@ def fetch_landmark(landmark_uuid: str):
|
||||
try:
|
||||
landmark = cache_client.get(f'landmark_{landmark_uuid}')
|
||||
if not landmark :
|
||||
logger.error(f'Cache miss for landmark UUID: {landmark_uuid}')
|
||||
logger.warning(f'Cache miss for landmark UUID: {landmark_uuid}')
|
||||
raise HTTPException(status_code=404, detail=f'Landmark with UUID {landmark_uuid} not found in cache.')
|
||||
|
||||
# Validate that the fetched data is a dictionary
|
||||
|
@@ -1,38 +0,0 @@
|
||||
"""Defines the endpoint for fetching toilet locations."""
|
||||
from fastapi import HTTPException, APIRouter, Query
|
||||
|
||||
from ..structs.toilets import Toilets
|
||||
from .toilets_manager import ToiletsManager
|
||||
|
||||
|
||||
# Define the API router
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.post("/toilets/new")
|
||||
def get_toilets(location: tuple[float, float] = Query(...), radius: int = 500) -> list[Toilets] :
|
||||
"""
|
||||
Endpoint to find toilets within a specified radius from a given location.
|
||||
|
||||
This endpoint expects the `location` and `radius` as **query parameters**, not in the request body.
|
||||
|
||||
Args:
|
||||
location (tuple[float, float]): The latitude and longitude of the location to search from.
|
||||
radius (int, optional): The radius (in meters) within which to search for toilets. Defaults to 500 meters.
|
||||
|
||||
Returns:
|
||||
list[Toilets]: A list of Toilets objects that meet the criteria.
|
||||
"""
|
||||
if location is None:
|
||||
raise HTTPException(status_code=406, detail="Coordinates not provided or invalid")
|
||||
if not (-90 <= location[0] <= 90 or -180 <= location[1] <= 180):
|
||||
raise HTTPException(status_code=422, detail="Start coordinates not in range")
|
||||
|
||||
toilets_manager = ToiletsManager(location, radius)
|
||||
|
||||
try :
|
||||
toilets_list = toilets_manager.generate_toilet_list()
|
||||
except KeyError as exc:
|
||||
raise HTTPException(status_code=404, detail="No toilets found") from exc
|
||||
|
||||
return toilets_list
|
@@ -1,6 +1,6 @@
|
||||
"""Find clusters of interest to add more general areas of visit to the tour."""
|
||||
import logging
|
||||
from typing import Literal, Tuple
|
||||
from typing import Literal
|
||||
|
||||
import numpy as np
|
||||
from sklearn.cluster import DBSCAN
|
||||
@@ -8,8 +8,8 @@ from pydantic import BaseModel
|
||||
|
||||
from ..overpass.overpass import Overpass, get_base_info
|
||||
from ..structs.landmark import Landmark
|
||||
from ..utils.get_time_distance import get_distance
|
||||
from ..utils.bbox import create_bbox
|
||||
from .get_time_distance import get_distance
|
||||
from .utils import create_bbox
|
||||
|
||||
|
||||
|
||||
@@ -33,7 +33,7 @@ class Cluster(BaseModel):
|
||||
"""
|
||||
type: Literal['street', 'area']
|
||||
importance: int
|
||||
centroid: Tuple[float, float]
|
||||
centroid: tuple
|
||||
# start: Optional[list] = None # for later use if we want to have streets as well
|
||||
# end: Optional[list] = None
|
||||
|
||||
@@ -103,7 +103,7 @@ class ClusterManager:
|
||||
out = out
|
||||
)
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Error fetching clusters: {e}")
|
||||
self.logger.error(f"Error fetching clusters: {e}")
|
||||
|
||||
if result is None :
|
||||
self.logger.debug(f"Found no {cluster_type} clusters, overpass query returned no datapoints.")
|
||||
@@ -178,12 +178,11 @@ class ClusterManager:
|
||||
|
||||
# Calculate the centroid as the mean of the points
|
||||
centroid = np.mean(current_cluster, axis=0)
|
||||
centroid = tuple((round(centroid[0], 7), round(centroid[1], 7)))
|
||||
|
||||
if self.cluster_type == 'shopping' :
|
||||
score = len(current_cluster)*3
|
||||
score = len(current_cluster)*2
|
||||
else :
|
||||
score = len(current_cluster)*15
|
||||
score = len(current_cluster)*8
|
||||
locations.append(Cluster(
|
||||
type='area',
|
||||
centroid=centroid,
|
||||
@@ -216,7 +215,7 @@ class ClusterManager:
|
||||
"""
|
||||
|
||||
# Define the bounding box for a given radius around the coordinates
|
||||
bbox = create_bbox(cluster.centroid, 300)
|
||||
bbox = create_bbox(cluster.centroid, 1000)
|
||||
|
||||
# Query neighborhoods and shopping malls
|
||||
selectors = ['"place"~"^(suburb|neighborhood|neighbourhood|quarter|city_block)$"']
|
||||
@@ -224,10 +223,10 @@ class ClusterManager:
|
||||
if self.cluster_type == 'shopping' :
|
||||
selectors.append('"shop"="mall"')
|
||||
new_name = 'Shopping Area'
|
||||
t = 30
|
||||
t = 40
|
||||
else :
|
||||
new_name = 'Neighborhood'
|
||||
t = 20
|
||||
t = 15
|
||||
|
||||
min_dist = float('inf')
|
||||
osm_id = 0
|
||||
@@ -239,28 +238,30 @@ class ClusterManager:
|
||||
result = self.overpass.send_query(bbox = bbox,
|
||||
osm_types = osm_types,
|
||||
selector = sel,
|
||||
out = 'ids center tags'
|
||||
out = 'ids center'
|
||||
)
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Error fetching clusters: {e}")
|
||||
self.logger.error(f"Error fetching clusters: {e}")
|
||||
continue
|
||||
|
||||
if result is None :
|
||||
self.logger.warning(f"Error fetching clusters: query result is None")
|
||||
self.logger.error(f"Error fetching clusters: {e}")
|
||||
continue
|
||||
|
||||
for elem in result:
|
||||
# Get basic info
|
||||
id, coords, name = get_base_info(elem, elem.get('type'), with_name=True)
|
||||
osm_type = elem.get('type')
|
||||
|
||||
id, coords, name = get_base_info(elem, osm_type, with_name=True)
|
||||
|
||||
if name is None or coords is None :
|
||||
continue
|
||||
|
||||
d = get_distance(cluster.centroid, coords)
|
||||
if d < min_dist :
|
||||
min_dist = d
|
||||
new_name = name # add name
|
||||
osm_type = elem.get('type') # add type: 'way' or 'relation'
|
||||
osm_id = id # add OSM id
|
||||
new_name = name
|
||||
osm_type = osm_type # Add type: 'way' or 'relation'
|
||||
osm_id = id # Add OSM id
|
||||
|
||||
return Landmark(
|
||||
name=new_name,
|
@@ -4,10 +4,10 @@ import yaml
|
||||
|
||||
from ..structs.preferences import Preferences
|
||||
from ..structs.landmark import Landmark
|
||||
from ..utils.take_most_important import take_most_important
|
||||
from .take_most_important import take_most_important
|
||||
from .cluster_manager import ClusterManager
|
||||
from ..overpass.overpass import Overpass, get_base_info
|
||||
from ..utils.bbox import create_bbox
|
||||
from .utils import create_bbox
|
||||
|
||||
from ..constants import AMENITY_SELECTORS_PATH, LANDMARK_PARAMETERS_PATH, OPTIMIZER_PARAMETERS_PATH
|
||||
|
||||
@@ -39,6 +39,7 @@ class LandmarkManager:
|
||||
self.overall_coeff = parameters['overall_coeff']
|
||||
self.tag_exponent = parameters['tag_exponent']
|
||||
self.image_bonus = parameters['image_bonus']
|
||||
self.name_bonus = parameters['name_bonus']
|
||||
self.wikipedia_bonus = parameters['wikipedia_bonus']
|
||||
self.viewpoint_bonus = parameters['viewpoint_bonus']
|
||||
self.pay_bonus = parameters['pay_bonus']
|
||||
@@ -146,8 +147,6 @@ class LandmarkManager:
|
||||
score *= self.wikipedia_bonus
|
||||
if landmark.is_place_of_worship :
|
||||
score *= self.church_coeff
|
||||
if landmark.is_viewpoint :
|
||||
score *= self.viewpoint_bonus
|
||||
if landmarktype == 'nature' :
|
||||
score *= self.nature_coeff
|
||||
|
||||
@@ -197,12 +196,12 @@ class LandmarkManager:
|
||||
out = 'ids center tags'
|
||||
)
|
||||
except Exception as e:
|
||||
self.logger.debug(f"Failed to fetch landmarks, proceeding without: {str(e)}")
|
||||
self.logger.error(f"Error fetching landmarks: {e}")
|
||||
continue
|
||||
|
||||
return_list += self._to_landmarks(result, landmarktype, preference_level)
|
||||
|
||||
# self.logger.debug(f"Fetched {len(return_list)} landmarks of type {landmarktype} in {bbox}")
|
||||
self.logger.debug(f"Fetched {len(return_list)} landmarks of type {landmarktype} in {bbox}")
|
||||
|
||||
return return_list
|
||||
|
||||
@@ -246,6 +245,8 @@ class LandmarkManager:
|
||||
attractiveness=0,
|
||||
n_tags=len(tags))
|
||||
|
||||
# self.logger.debug('added landmark.')
|
||||
|
||||
# Browse through tags to add information to landmark.
|
||||
for key, value in tags.items():
|
||||
|
||||
@@ -266,7 +267,7 @@ class LandmarkManager:
|
||||
landmark.image_url = value
|
||||
if key == 'website' :
|
||||
landmark.website_url = value
|
||||
if value == 'place_of_worship' :
|
||||
if key == 'place_of_worship' :
|
||||
landmark.is_place_of_worship = True
|
||||
if key == 'wikipedia' :
|
||||
landmark.wiki_url = value
|
||||
@@ -275,7 +276,6 @@ class LandmarkManager:
|
||||
if 'building:' in key or 'pay' in key :
|
||||
landmark.n_tags -= 1
|
||||
|
||||
|
||||
# Set the duration.
|
||||
if value in ['museum', 'aquarium', 'planetarium'] :
|
||||
landmark.duration = 60
|
||||
@@ -286,138 +286,14 @@ class LandmarkManager:
|
||||
landmark.is_place_of_worship = False
|
||||
landmark.duration = 10
|
||||
|
||||
landmark.description, landmark.keywords = self.description_and_keywords(tags)
|
||||
else:
|
||||
self.set_landmark_score(landmark, landmarktype, preference_level)
|
||||
landmarks.append(landmark)
|
||||
|
||||
continue
|
||||
|
||||
|
||||
return landmarks
|
||||
|
||||
|
||||
def description_and_keywords(self, tags: dict):
|
||||
"""
|
||||
Generates a description and a set of keywords for a given landmark based on its tags.
|
||||
|
||||
Params:
|
||||
tags (dict): A dictionary containing metadata about the landmark, including its name,
|
||||
importance, height, date of construction, and visitor information.
|
||||
|
||||
Returns:
|
||||
description (str): A string description of the landmark.
|
||||
keywords (dict): A dictionary of keywords with fields such as 'importance', 'height',
|
||||
'place_type', and 'date'.
|
||||
"""
|
||||
# Extract relevant fields
|
||||
name = tags.get('name')
|
||||
importance = tags.get('importance', None)
|
||||
n_visitors = tags.get('tourism:visitors', None)
|
||||
height = tags.get('height')
|
||||
place_type = self.get_place_type(tags)
|
||||
date = self.get_date(tags)
|
||||
|
||||
if place_type is None :
|
||||
return None, None
|
||||
|
||||
# Start the description.
|
||||
if importance is None :
|
||||
if len(tags.keys()) < 5 :
|
||||
return None, None
|
||||
if len(tags.keys()) < 10 :
|
||||
description = f"{name} is a well known {place_type}."
|
||||
elif len(tags.keys()) < 17 :
|
||||
importance = 'national'
|
||||
description = f"{name} is a {place_type} of national importance."
|
||||
else :
|
||||
importance = 'international'
|
||||
description = f"{name} is an internationally famous {place_type}."
|
||||
else :
|
||||
description = f"{name} is a {place_type} of {importance} importance."
|
||||
|
||||
if height is not None and date is not None :
|
||||
description += f" This {place_type} was constructed in {date} and is ca. {height} meters high."
|
||||
elif height is not None :
|
||||
description += f" This {place_type} stands ca. {height} meters tall."
|
||||
elif date is not None:
|
||||
description += f" It was constructed in {date}."
|
||||
|
||||
# Format the visitor number
|
||||
if n_visitors is not None :
|
||||
n_visitors = int(n_visitors)
|
||||
if n_visitors < 1000000 :
|
||||
description += f" It welcomes {int(n_visitors/1000)} thousand visitors every year."
|
||||
else :
|
||||
description += f" It welcomes {round(n_visitors/1000000, 1)} million visitors every year."
|
||||
|
||||
# Set the keywords.
|
||||
keywords = {"importance": importance,
|
||||
"height": height,
|
||||
"place_type": place_type,
|
||||
"date": date}
|
||||
|
||||
return description, keywords
|
||||
|
||||
|
||||
def get_place_type(self, data):
|
||||
"""
|
||||
Determines the type of the place based on available tags such as 'amenity', 'building',
|
||||
'historic', and 'leisure'. The priority order is: 'historic' > 'building' (if not generic) >
|
||||
'amenity' > 'leisure'.
|
||||
|
||||
Params:
|
||||
data (dict): A dictionary containing metadata about the place.
|
||||
|
||||
Returns:
|
||||
place_type (str): The determined type of the place, or None if no relevant type is found.
|
||||
"""
|
||||
amenity = data.get('amenity', None)
|
||||
building = data.get('building', None)
|
||||
historic = data.get('historic', None)
|
||||
leisure = data.get('leisure')
|
||||
|
||||
if historic and historic != "yes":
|
||||
return historic
|
||||
if building and building not in ["yes", "civic", "government", "apartments", "residential", "commericial", "industrial", "retail", "religious", "public", "service"]:
|
||||
return building
|
||||
if amenity:
|
||||
return amenity
|
||||
if leisure:
|
||||
return leisure
|
||||
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def get_date(self, data):
|
||||
"""
|
||||
Extracts the most relevant date from the available tags, prioritizing 'construction_date',
|
||||
'start_date', 'year_of_construction', and 'opening_date' in that order.
|
||||
|
||||
Params:
|
||||
data (dict): A dictionary containing metadata about the place.
|
||||
|
||||
Returns:
|
||||
date (str): The most relevant date found, or None if no date is available.
|
||||
"""
|
||||
construction_date = data.get('construction_date', None)
|
||||
opening_date = data.get('opening_date', None)
|
||||
start_date = data.get('start_date', None)
|
||||
year_of_construction = data.get('year_of_construction', None)
|
||||
|
||||
# Prioritize based on availability
|
||||
if construction_date:
|
||||
return construction_date
|
||||
if start_date:
|
||||
return start_date
|
||||
if year_of_construction:
|
||||
return year_of_construction
|
||||
if opening_date:
|
||||
return opening_date
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def dict_to_selector_list(d: dict) -> list:
|
||||
"""
|
||||
Convert a dictionary of key-value pairs to a list of Overpass query strings.
|
@@ -2,8 +2,8 @@
|
||||
import logging
|
||||
|
||||
from ..overpass.overpass import Overpass, get_base_info
|
||||
from ..structs.toilets import Toilets
|
||||
from ..utils.bbox import create_bbox
|
||||
from ..structs.landmark import Toilets
|
||||
from .utils import create_bbox
|
||||
|
||||
|
||||
# silence the overpass logger
|
||||
@@ -65,7 +65,7 @@ class ToiletsManager:
|
||||
try:
|
||||
result = self.overpass.fetch_data_from_api(query_str=query)
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error fetching toilets: {e}")
|
||||
self.logger.error(f"Error fetching landmarks: {e}")
|
||||
return None
|
||||
|
||||
toilets_list = self.to_toilets(result)
|
59
frontend/.github/workflows/build_app_android.yaml
vendored
Normal file
@@ -0,0 +1,59 @@
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up ruby env
|
||||
uses: ruby/setup-ruby@v1
|
||||
with:
|
||||
ruby-version: 3.2.1
|
||||
bundler-cache: true
|
||||
|
||||
- name: Setup java for android build
|
||||
uses: actions/setup-java@v4
|
||||
with:
|
||||
java-version: '17'
|
||||
distribution: 'zulu'
|
||||
|
||||
- name: Setup android SDK
|
||||
uses: android-actions/setup-android@v3
|
||||
|
||||
- name: Install Flutter
|
||||
uses: subosito/flutter-action@v2
|
||||
with:
|
||||
channel: stable
|
||||
flutter-version: 3.22.0
|
||||
cache: true
|
||||
|
||||
- name: Infer version number from git tag
|
||||
id: version
|
||||
env:
|
||||
REF_NAME: ${{ github.ref_name }}
|
||||
run:
|
||||
# remove the 'v' prefix from the tag name
|
||||
echo "BUILD_NAME=${REF_NAME//v}" >> $GITHUB_ENV
|
||||
|
||||
- name: Put selected secrets into files
|
||||
run: |
|
||||
echo "${{ secrets.ANDROID_SECRET_PROPERTIES_BASE64 }}" | base64 -d > secrets.properties
|
||||
echo "${{ secrets.ANDROID_GOOGLE_PLAY_JSON_BASE64 }}" | base64 -d > google-key.json
|
||||
echo "${{ secrets.ANDROID_KEYSTORE_BASE64 }}" | base64 -d > release.keystore
|
||||
working-directory: android
|
||||
|
||||
- name: Install fastlane
|
||||
run: bundle install
|
||||
working-directory: android
|
||||
|
||||
- name: Run fastlane lane
|
||||
run: bundle exec fastlane deploy_release
|
||||
working-directory: android
|
||||
env:
|
||||
BUILD_NUMBER: ${{ github.run_number }}
|
||||
# BUILD_NAME is implicitly available
|
||||
GOOGLE_MAPS_API_KEY: ${{ secrets.GOOGLE_MAPS_API_KEY }}
|
64
frontend/.github/workflows/build_app_ios.yaml
vendored
Normal file
@@ -0,0 +1,64 @@
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: macos-latest
|
||||
env:
|
||||
# $BUNDLE_GEMFILE must be set at the job level, so it is set for all steps
|
||||
BUNDLE_GEMFILE: ${{ github.workspace }}/ios/Gemfile
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up ruby env
|
||||
uses: ruby/setup-ruby@v1
|
||||
with:
|
||||
ruby-version: 3.3
|
||||
bundler-cache: true # runs 'bundle install' and caches installed gems automatically
|
||||
|
||||
- name: Install Flutter
|
||||
uses: subosito/flutter-action@v2
|
||||
with:
|
||||
channel: stable
|
||||
flutter-version: 3.22.0
|
||||
cache: true
|
||||
|
||||
- name: Infer version number from git tag
|
||||
id: version
|
||||
env:
|
||||
REF_NAME: ${{ github.ref_name }}
|
||||
run:
|
||||
# remove the 'v' prefix from the tag name
|
||||
echo "BUILD_NAME=${REF_NAME//v}" >> $GITHUB_ENV
|
||||
|
||||
- name: Setup SSH key for match git repo
|
||||
# and mark the host as known
|
||||
run: |
|
||||
echo $MATCH_REPO_SSH_KEY | base64 --decode > ~/.ssh/id_rsa
|
||||
chmod 600 ~/.ssh/id_rsa
|
||||
ssh-keyscan -p 2222 git.kluster.moll.re > ~/.ssh/known_hosts
|
||||
env:
|
||||
MATCH_REPO_SSH_KEY: ${{ secrets.IOS_MATCH_REPO_SSH_KEY_BASE64 }}
|
||||
|
||||
- name: Install dependencies and clean up
|
||||
run: |
|
||||
flutter pub get
|
||||
bundle exec pod install
|
||||
flutter clean
|
||||
bundle exec pod cache clean --all
|
||||
working-directory: ios
|
||||
|
||||
- name: Run fastlane lane
|
||||
run: bundle exec fastlane deploy_release --verbose
|
||||
working-directory: ios
|
||||
env:
|
||||
BUILD_NUMBER: ${{ github.run_number }}
|
||||
# BUILD_NAME is implicitly available
|
||||
GOOGLE_MAPS_API_KEY: ${{ secrets.GOOGLE_MAPS_API_KEY }}
|
||||
IOS_ASC_KEY_ID: ${{ secrets.IOS_ASC_KEY_ID }}
|
||||
IOS_ASC_ISSUER_ID: ${{ secrets.IOS_ASC_ISSUER_ID }}
|
||||
IOS_ASC_KEY: ${{ secrets.IOS_ASC_KEY }}
|
||||
MATCH_PASSWORD: ${{ secrets.IOS_MATCH_PASSWORD }}
|
||||
IOS_GOOGLE_MAPS_API_KEY: ${{ secrets.IOS_GOOGLE_MAPS_API_KEY }}
|
@@ -4,7 +4,7 @@
|
||||
# This file should be version controlled and should not be manually edited.
|
||||
|
||||
version:
|
||||
revision: "09de023485e95e6d1225c2baa44b8feb85e0d45f"
|
||||
revision: "54e66469a933b60ddf175f858f82eaeb97e48c8d"
|
||||
channel: "stable"
|
||||
|
||||
project_type: app
|
||||
@@ -13,11 +13,26 @@ project_type: app
|
||||
migration:
|
||||
platforms:
|
||||
- platform: root
|
||||
create_revision: 09de023485e95e6d1225c2baa44b8feb85e0d45f
|
||||
base_revision: 09de023485e95e6d1225c2baa44b8feb85e0d45f
|
||||
create_revision: 54e66469a933b60ddf175f858f82eaeb97e48c8d
|
||||
base_revision: 54e66469a933b60ddf175f858f82eaeb97e48c8d
|
||||
- platform: android
|
||||
create_revision: 54e66469a933b60ddf175f858f82eaeb97e48c8d
|
||||
base_revision: 54e66469a933b60ddf175f858f82eaeb97e48c8d
|
||||
- platform: ios
|
||||
create_revision: 54e66469a933b60ddf175f858f82eaeb97e48c8d
|
||||
base_revision: 54e66469a933b60ddf175f858f82eaeb97e48c8d
|
||||
- platform: linux
|
||||
create_revision: 09de023485e95e6d1225c2baa44b8feb85e0d45f
|
||||
base_revision: 09de023485e95e6d1225c2baa44b8feb85e0d45f
|
||||
create_revision: 54e66469a933b60ddf175f858f82eaeb97e48c8d
|
||||
base_revision: 54e66469a933b60ddf175f858f82eaeb97e48c8d
|
||||
- platform: macos
|
||||
create_revision: 54e66469a933b60ddf175f858f82eaeb97e48c8d
|
||||
base_revision: 54e66469a933b60ddf175f858f82eaeb97e48c8d
|
||||
- platform: web
|
||||
create_revision: 54e66469a933b60ddf175f858f82eaeb97e48c8d
|
||||
base_revision: 54e66469a933b60ddf175f858f82eaeb97e48c8d
|
||||
- platform: windows
|
||||
create_revision: 54e66469a933b60ddf175f858f82eaeb97e48c8d
|
||||
base_revision: 54e66469a933b60ddf175f858f82eaeb97e48c8d
|
||||
|
||||
# User provided section
|
||||
|
||||
|
@@ -17,17 +17,7 @@ flutter pub get
|
||||
```
|
||||
|
||||
## Development
|
||||
### TODO
|
||||
|
||||
## Deployment and metadata
|
||||
### Deploying a new version
|
||||
To truly deploy a new version of the application, i.e. to the official app stores, a special CI step is required. This listens for new tags. To create a new tag position yourself on the main branch and run
|
||||
```bash
|
||||
git tag -a v<name> -m "Release <name>"
|
||||
git push origin v<name>
|
||||
```
|
||||
We adhere to the [Semantic Versioning](https://semver.org/) standard, so the tag should be of the form `v0.1.8` for example.
|
||||
|
||||
### ...
|
||||
### Icons and logos
|
||||
The application uses a custom launcher icon and splash screen. These are managed platform-independently using the `flutter_launcher_icons` package.
|
||||
|
||||
@@ -35,10 +25,14 @@ To update the icons, change the `flutter_launcher_icons.yaml` configuration file
|
||||
```bash
|
||||
dart run flutter_launcher_icons
|
||||
```
|
||||
### Other metadata
|
||||
Fastlane provides mechanisms to update the metadata of the application. This includes the name, description, screenshots, etc. The metadata is stored in the `fastlane/metadata` directory of both the `android`and the `ios` version of the application. Both versions have different structures but **they should be kept in sync**. For more information see the [fastlane documentation](https://docs.fastlane.tools/):
|
||||
- https://docs.fastlane.tools/actions/deliver/
|
||||
- https://docs.fastlane.tools/actions/supply/
|
||||
|
||||
### Deploying a new version
|
||||
To truly deploy a new version of the application, i.e. to the official app stores, a special CI step is required. This listens for new tags. To create a new tag position yourself on the main branch and run
|
||||
```bash
|
||||
git tag -a v<name> -m "Release <name>"
|
||||
git push origin v<name>
|
||||
```
|
||||
We adhere to the [Semantic Versioning](https://semver.org/) standard, so the tag should be of the form `v0.1.8` for example.
|
||||
|
||||
|
||||
## Fastlane - in depth
|
||||
@@ -52,17 +46,16 @@ bundle exec fastlane <lane>
|
||||
```
|
||||
This is reused in the CI/CD pipeline to automate the deployment process.
|
||||
|
||||
Secrets used by fastlane are stored on hashicorp vault and are fetched by the CI/CD pipeline. See below.
|
||||
|
||||
## Secrets
|
||||
These are used by the CI/CD pipeline to deploy the application.
|
||||
These are mostly used by the CI/CD pipeline to deploy the application. The main usage for github actions is documented under [https://github.com/hashicorp/vault-action](https://github.com/hashicorp/vault-action).
|
||||
|
||||
**Platform-specific secrets** are used by the CI/CD pipeline to deploy to the respective app stores.
|
||||
- `ANDROID_GOOGLE_MAPS_API_KEY` is used to authenticate with the Google Maps API and is scoped to the android platform
|
||||
- `GOOGLE_MAPS_API_KEY` is used to authenticate with the Google Maps API and is scoped to the android platform
|
||||
- `ANDROID_KEYSTORE` is used to sign the android apk
|
||||
- `ANDROID_GOOGLE_KEY` is used to authenticate with the Google Play Store api
|
||||
- `IOS_GOOGLE_MAPS_API_KEY` is used to authenticate with the Google Maps API and is scoped to the ios platform
|
||||
- `IOS_ASC_ISSUER_ID` is used to authenticate with the App Store Connect API
|
||||
- `IOS_ASC_KEY` as well
|
||||
- `IOS_ASC_KEY_ID` as well
|
||||
- `IOS_MATCH_PASSWORD` is used by fastlane match to download the certificates
|
||||
- `IOS_MATCH_REPO_SSH_KEY_BASE64` is used to authenticate with the git repository where the certificates are stored
|
||||
- `IOS_GOOGLE_...`
|
||||
- `IOS_GOOGLE_...`
|
||||
- `IOS_GOOGLE_...`
|
220
frontend/android/Gemfile.lock
Normal file
@@ -0,0 +1,220 @@
|
||||
GEM
|
||||
remote: https://rubygems.org/
|
||||
specs:
|
||||
CFPropertyList (3.0.7)
|
||||
base64
|
||||
nkf
|
||||
rexml
|
||||
addressable (2.8.7)
|
||||
public_suffix (>= 2.0.2, < 7.0)
|
||||
artifactory (3.0.17)
|
||||
atomos (0.1.3)
|
||||
aws-eventstream (1.3.0)
|
||||
aws-partitions (1.970.0)
|
||||
aws-sdk-core (3.202.2)
|
||||
aws-eventstream (~> 1, >= 1.3.0)
|
||||
aws-partitions (~> 1, >= 1.651.0)
|
||||
aws-sigv4 (~> 1.9)
|
||||
jmespath (~> 1, >= 1.6.1)
|
||||
aws-sdk-kms (1.88.0)
|
||||
aws-sdk-core (~> 3, >= 3.201.0)
|
||||
aws-sigv4 (~> 1.5)
|
||||
aws-sdk-s3 (1.159.0)
|
||||
aws-sdk-core (~> 3, >= 3.201.0)
|
||||
aws-sdk-kms (~> 1)
|
||||
aws-sigv4 (~> 1.5)
|
||||
aws-sigv4 (1.9.1)
|
||||
aws-eventstream (~> 1, >= 1.0.2)
|
||||
babosa (1.0.4)
|
||||
base64 (0.2.0)
|
||||
claide (1.1.0)
|
||||
colored (1.2)
|
||||
colored2 (3.1.2)
|
||||
commander (4.6.0)
|
||||
highline (~> 2.0.0)
|
||||
declarative (0.0.20)
|
||||
digest-crc (0.6.5)
|
||||
rake (>= 12.0.0, < 14.0.0)
|
||||
domain_name (0.6.20240107)
|
||||
dotenv (2.8.1)
|
||||
emoji_regex (3.2.3)
|
||||
excon (0.111.0)
|
||||
faraday (1.10.3)
|
||||
faraday-em_http (~> 1.0)
|
||||
faraday-em_synchrony (~> 1.0)
|
||||
faraday-excon (~> 1.1)
|
||||
faraday-httpclient (~> 1.0)
|
||||
faraday-multipart (~> 1.0)
|
||||
faraday-net_http (~> 1.0)
|
||||
faraday-net_http_persistent (~> 1.0)
|
||||
faraday-patron (~> 1.0)
|
||||
faraday-rack (~> 1.0)
|
||||
faraday-retry (~> 1.0)
|
||||
ruby2_keywords (>= 0.0.4)
|
||||
faraday-cookie_jar (0.0.7)
|
||||
faraday (>= 0.8.0)
|
||||
http-cookie (~> 1.0.0)
|
||||
faraday-em_http (1.0.0)
|
||||
faraday-em_synchrony (1.0.0)
|
||||
faraday-excon (1.1.0)
|
||||
faraday-httpclient (1.0.1)
|
||||
faraday-multipart (1.0.4)
|
||||
multipart-post (~> 2)
|
||||
faraday-net_http (1.0.2)
|
||||
faraday-net_http_persistent (1.2.0)
|
||||
faraday-patron (1.0.0)
|
||||
faraday-rack (1.0.0)
|
||||
faraday-retry (1.0.3)
|
||||
faraday_middleware (1.2.0)
|
||||
faraday (~> 1.0)
|
||||
fastimage (2.3.1)
|
||||
fastlane (2.222.0)
|
||||
CFPropertyList (>= 2.3, < 4.0.0)
|
||||
addressable (>= 2.8, < 3.0.0)
|
||||
artifactory (~> 3.0)
|
||||
aws-sdk-s3 (~> 1.0)
|
||||
babosa (>= 1.0.3, < 2.0.0)
|
||||
bundler (>= 1.12.0, < 3.0.0)
|
||||
colored (~> 1.2)
|
||||
commander (~> 4.6)
|
||||
dotenv (>= 2.1.1, < 3.0.0)
|
||||
emoji_regex (>= 0.1, < 4.0)
|
||||
excon (>= 0.71.0, < 1.0.0)
|
||||
faraday (~> 1.0)
|
||||
faraday-cookie_jar (~> 0.0.6)
|
||||
faraday_middleware (~> 1.0)
|
||||
fastimage (>= 2.1.0, < 3.0.0)
|
||||
gh_inspector (>= 1.1.2, < 2.0.0)
|
||||
google-apis-androidpublisher_v3 (~> 0.3)
|
||||
google-apis-playcustomapp_v1 (~> 0.1)
|
||||
google-cloud-env (>= 1.6.0, < 2.0.0)
|
||||
google-cloud-storage (~> 1.31)
|
||||
highline (~> 2.0)
|
||||
http-cookie (~> 1.0.5)
|
||||
json (< 3.0.0)
|
||||
jwt (>= 2.1.0, < 3)
|
||||
mini_magick (>= 4.9.4, < 5.0.0)
|
||||
multipart-post (>= 2.0.0, < 3.0.0)
|
||||
naturally (~> 2.2)
|
||||
optparse (>= 0.1.1, < 1.0.0)
|
||||
plist (>= 3.1.0, < 4.0.0)
|
||||
rubyzip (>= 2.0.0, < 3.0.0)
|
||||
security (= 0.1.5)
|
||||
simctl (~> 1.6.3)
|
||||
terminal-notifier (>= 2.0.0, < 3.0.0)
|
||||
terminal-table (~> 3)
|
||||
tty-screen (>= 0.6.3, < 1.0.0)
|
||||
tty-spinner (>= 0.8.0, < 1.0.0)
|
||||
word_wrap (~> 1.0.0)
|
||||
xcodeproj (>= 1.13.0, < 2.0.0)
|
||||
xcpretty (~> 0.3.0)
|
||||
xcpretty-travis-formatter (>= 0.0.3, < 2.0.0)
|
||||
gh_inspector (1.1.3)
|
||||
google-apis-androidpublisher_v3 (0.54.0)
|
||||
google-apis-core (>= 0.11.0, < 2.a)
|
||||
google-apis-core (0.11.3)
|
||||
addressable (~> 2.5, >= 2.5.1)
|
||||
googleauth (>= 0.16.2, < 2.a)
|
||||
httpclient (>= 2.8.1, < 3.a)
|
||||
mini_mime (~> 1.0)
|
||||
representable (~> 3.0)
|
||||
retriable (>= 2.0, < 4.a)
|
||||
rexml
|
||||
google-apis-iamcredentials_v1 (0.17.0)
|
||||
google-apis-core (>= 0.11.0, < 2.a)
|
||||
google-apis-playcustomapp_v1 (0.13.0)
|
||||
google-apis-core (>= 0.11.0, < 2.a)
|
||||
google-apis-storage_v1 (0.31.0)
|
||||
google-apis-core (>= 0.11.0, < 2.a)
|
||||
google-cloud-core (1.7.1)
|
||||
google-cloud-env (>= 1.0, < 3.a)
|
||||
google-cloud-errors (~> 1.0)
|
||||
google-cloud-env (1.6.0)
|
||||
faraday (>= 0.17.3, < 3.0)
|
||||
google-cloud-errors (1.4.0)
|
||||
google-cloud-storage (1.47.0)
|
||||
addressable (~> 2.8)
|
||||
digest-crc (~> 0.4)
|
||||
google-apis-iamcredentials_v1 (~> 0.1)
|
||||
google-apis-storage_v1 (~> 0.31.0)
|
||||
google-cloud-core (~> 1.6)
|
||||
googleauth (>= 0.16.2, < 2.a)
|
||||
mini_mime (~> 1.0)
|
||||
googleauth (1.8.1)
|
||||
faraday (>= 0.17.3, < 3.a)
|
||||
jwt (>= 1.4, < 3.0)
|
||||
multi_json (~> 1.11)
|
||||
os (>= 0.9, < 2.0)
|
||||
signet (>= 0.16, < 2.a)
|
||||
highline (2.0.3)
|
||||
http-cookie (1.0.7)
|
||||
domain_name (~> 0.5)
|
||||
httpclient (2.8.3)
|
||||
jmespath (1.6.2)
|
||||
json (2.7.2)
|
||||
jwt (2.8.2)
|
||||
base64
|
||||
mini_magick (4.13.2)
|
||||
mini_mime (1.1.5)
|
||||
multi_json (1.15.0)
|
||||
multipart-post (2.4.1)
|
||||
nanaimo (0.3.0)
|
||||
naturally (2.2.1)
|
||||
nkf (0.2.0)
|
||||
optparse (0.5.0)
|
||||
os (1.1.4)
|
||||
plist (3.7.1)
|
||||
public_suffix (6.0.1)
|
||||
rake (13.2.1)
|
||||
representable (3.2.0)
|
||||
declarative (< 0.1.0)
|
||||
trailblazer-option (>= 0.1.1, < 0.2.0)
|
||||
uber (< 0.2.0)
|
||||
retriable (3.1.2)
|
||||
rexml (3.3.6)
|
||||
strscan
|
||||
rouge (2.0.7)
|
||||
ruby2_keywords (0.0.5)
|
||||
rubyzip (2.3.2)
|
||||
security (0.1.5)
|
||||
signet (0.19.0)
|
||||
addressable (~> 2.8)
|
||||
faraday (>= 0.17.5, < 3.a)
|
||||
jwt (>= 1.5, < 3.0)
|
||||
multi_json (~> 1.10)
|
||||
simctl (1.6.10)
|
||||
CFPropertyList
|
||||
naturally
|
||||
strscan (3.1.0)
|
||||
terminal-notifier (2.0.0)
|
||||
terminal-table (3.0.2)
|
||||
unicode-display_width (>= 1.1.1, < 3)
|
||||
trailblazer-option (0.1.2)
|
||||
tty-cursor (0.7.1)
|
||||
tty-screen (0.8.2)
|
||||
tty-spinner (0.9.3)
|
||||
tty-cursor (~> 0.7)
|
||||
uber (0.1.0)
|
||||
unicode-display_width (2.5.0)
|
||||
word_wrap (1.0.0)
|
||||
xcodeproj (1.25.0)
|
||||
CFPropertyList (>= 2.3.3, < 4.0)
|
||||
atomos (~> 0.1.3)
|
||||
claide (>= 1.0.2, < 2.0)
|
||||
colored2 (~> 3.1)
|
||||
nanaimo (~> 0.3.0)
|
||||
rexml (>= 3.3.2, < 4.0)
|
||||
xcpretty (0.3.0)
|
||||
rouge (~> 2.0.7)
|
||||
xcpretty-travis-formatter (1.0.1)
|
||||
xcpretty (~> 0.2, >= 0.0.7)
|
||||
|
||||
PLATFORMS
|
||||
ruby
|
||||
x86_64-linux
|
||||
|
||||
DEPENDENCIES
|
||||
fastlane
|
||||
|
||||
BUNDLED WITH
|
||||
2.5.18
|
@@ -77,7 +77,7 @@ android {
|
||||
versionCode flutterVersionCode.toInteger()
|
||||
versionName flutterVersionName
|
||||
// // Placeholders of keys that are replaced by the build system.
|
||||
manifestPlaceholders += ['MAPS_API_KEY': System.getenv('ANDROID_GOOGLE_MAPS_API_KEY')]
|
||||
manifestPlaceholders += ['MAPS_API_KEY': System.getenv('GOOGLE_MAPS_API_KEY')]
|
||||
|
||||
}
|
||||
|
||||
|
@@ -3,7 +3,7 @@ default_platform(:android)
|
||||
platform :android do
|
||||
|
||||
desc "Deploy a new version to closed testing (play store)"
|
||||
lane :deploy_beta do
|
||||
lane :deploy_testing do
|
||||
build_name = ENV["BUILD_NAME"]
|
||||
build_number = ENV["BUILD_NUMBER"]
|
||||
|
||||
@@ -17,8 +17,7 @@ platform :android do
|
||||
)
|
||||
|
||||
upload_to_play_store(
|
||||
track: 'beta',
|
||||
# upload aab files intstead
|
||||
track: 'alpha',
|
||||
skip_upload_apk: true,
|
||||
skip_upload_changelogs: true,
|
||||
aab: "../build/app/outputs/bundle/release/app-release.aab",
|
||||
@@ -48,7 +47,6 @@ platform :android do
|
||||
skip_upload_apk: true,
|
||||
skip_upload_changelogs: true,
|
||||
aab: "../build/app/outputs/bundle/release/app-release.aab",
|
||||
metadata_path: "fastlane/metadata",
|
||||
)
|
||||
end
|
||||
end
|
||||
|
@@ -0,0 +1,7 @@
|
||||
AnyWay - plan city trips your way
|
||||
|
||||
AnyWay is a mobile application that helps users plan city trips. The app allows users to specify their preferences and constraints, and then generates a personalized itinerary for them. The planning follows some guiding principles:
|
||||
- **Personalization**:The user's preferences should be reflected in the choice of destinations.
|
||||
- **Efficiency**:The itinerary should be optimized for the user's constraints.
|
||||
- **Flexibility**: We aknowledge that tourism is a dynamic activity, and that users may want to change their plans on the go.
|
||||
- **Discoverability**: Tourism is an inherently exploratory activity. Once a rough itinerary is generated, detours and spontaneous decisions should be encouraged.
|
After Width: | Height: | Size: 106 KiB |
After Width: | Height: | Size: 1.3 MiB |
After Width: | Height: | Size: 637 KiB |
After Width: | Height: | Size: 573 KiB |
After Width: | Height: | Size: 175 KiB |
After Width: | Height: | Size: 360 KiB |
@@ -1,7 +0,0 @@
|
||||
AnyWay is an application that helps you plan truly unique city trips. When planning a new trip, you can specify your preferences and constraints and anyway generates a personalized itinerary just for you.
|
||||
|
||||
Anyway follows these core principles:
|
||||
- Personalization: Trips should be match your interests - not just the most popular destinations.
|
||||
- Efficiency: Don't just walk in circles! Anyway creates the most efficient route for you.
|
||||
- Flexibility: Vacations are the time to be spontaneous. Anyway lets you update your plans on the go.
|
||||
- Discoverability: Tourism means exploration. Anyway encourages you to take detours and make spontaneous decisions.
|
Before Width: | Height: | Size: 3.0 MiB |
Before Width: | Height: | Size: 4.1 MiB |
Before Width: | Height: | Size: 1.1 MiB |
Before Width: | Height: | Size: 1.1 MiB |
288
frontend/ios/Gemfile.lock
Normal file
@@ -0,0 +1,288 @@
|
||||
GEM
|
||||
remote: https://rubygems.org/
|
||||
specs:
|
||||
CFPropertyList (3.0.7)
|
||||
base64
|
||||
nkf
|
||||
rexml
|
||||
activesupport (5.2.8.1)
|
||||
concurrent-ruby (~> 1.0, >= 1.0.2)
|
||||
i18n (>= 0.7, < 2)
|
||||
minitest (~> 5.1)
|
||||
tzinfo (~> 1.1)
|
||||
addressable (2.8.7)
|
||||
public_suffix (>= 2.0.2, < 7.0)
|
||||
algoliasearch (1.27.5)
|
||||
httpclient (~> 2.8, >= 2.8.3)
|
||||
json (>= 1.5.1)
|
||||
artifactory (3.0.17)
|
||||
atomos (0.1.3)
|
||||
aws-eventstream (1.3.0)
|
||||
aws-partitions (1.1004.0)
|
||||
aws-sdk-core (3.212.0)
|
||||
aws-eventstream (~> 1, >= 1.3.0)
|
||||
aws-partitions (~> 1, >= 1.992.0)
|
||||
aws-sigv4 (~> 1.9)
|
||||
jmespath (~> 1, >= 1.6.1)
|
||||
aws-sdk-kms (1.95.0)
|
||||
aws-sdk-core (~> 3, >= 3.210.0)
|
||||
aws-sigv4 (~> 1.5)
|
||||
aws-sdk-s3 (1.170.1)
|
||||
aws-sdk-core (~> 3, >= 3.210.0)
|
||||
aws-sdk-kms (~> 1)
|
||||
aws-sigv4 (~> 1.5)
|
||||
aws-sigv4 (1.10.1)
|
||||
aws-eventstream (~> 1, >= 1.0.2)
|
||||
babosa (1.0.4)
|
||||
base64 (0.2.0)
|
||||
claide (1.1.0)
|
||||
cocoapods (1.10.2)
|
||||
addressable (~> 2.6)
|
||||
claide (>= 1.0.2, < 2.0)
|
||||
cocoapods-core (= 1.10.2)
|
||||
cocoapods-deintegrate (>= 1.0.3, < 2.0)
|
||||
cocoapods-downloader (>= 1.4.0, < 2.0)
|
||||
cocoapods-plugins (>= 1.0.0, < 2.0)
|
||||
cocoapods-search (>= 1.0.0, < 2.0)
|
||||
cocoapods-trunk (>= 1.4.0, < 2.0)
|
||||
cocoapods-try (>= 1.1.0, < 2.0)
|
||||
colored2 (~> 3.1)
|
||||
escape (~> 0.0.4)
|
||||
fourflusher (>= 2.3.0, < 3.0)
|
||||
gh_inspector (~> 1.0)
|
||||
molinillo (~> 0.6.6)
|
||||
nap (~> 1.0)
|
||||
ruby-macho (~> 1.4)
|
||||
xcodeproj (>= 1.19.0, < 2.0)
|
||||
cocoapods-core (1.10.2)
|
||||
activesupport (> 5.0, < 6)
|
||||
addressable (~> 2.6)
|
||||
algoliasearch (~> 1.0)
|
||||
concurrent-ruby (~> 1.1)
|
||||
fuzzy_match (~> 2.0.4)
|
||||
nap (~> 1.0)
|
||||
netrc (~> 0.11)
|
||||
public_suffix
|
||||
typhoeus (~> 1.0)
|
||||
cocoapods-deintegrate (1.0.5)
|
||||
cocoapods-downloader (1.6.3)
|
||||
cocoapods-plugins (1.0.0)
|
||||
nap
|
||||
cocoapods-search (1.0.1)
|
||||
cocoapods-trunk (1.6.0)
|
||||
nap (>= 0.8, < 2.0)
|
||||
netrc (~> 0.11)
|
||||
cocoapods-try (1.2.0)
|
||||
colored (1.2)
|
||||
colored2 (3.1.2)
|
||||
commander (4.6.0)
|
||||
highline (~> 2.0.0)
|
||||
concurrent-ruby (1.3.4)
|
||||
declarative (0.0.20)
|
||||
digest-crc (0.6.5)
|
||||
rake (>= 12.0.0, < 14.0.0)
|
||||
domain_name (0.6.20240107)
|
||||
dotenv (2.8.1)
|
||||
emoji_regex (3.2.3)
|
||||
escape (0.0.4)
|
||||
ethon (0.16.0)
|
||||
ffi (>= 1.15.0)
|
||||
excon (0.112.0)
|
||||
faraday (1.10.4)
|
||||
faraday-em_http (~> 1.0)
|
||||
faraday-em_synchrony (~> 1.0)
|
||||
faraday-excon (~> 1.1)
|
||||
faraday-httpclient (~> 1.0)
|
||||
faraday-multipart (~> 1.0)
|
||||
faraday-net_http (~> 1.0)
|
||||
faraday-net_http_persistent (~> 1.0)
|
||||
faraday-patron (~> 1.0)
|
||||
faraday-rack (~> 1.0)
|
||||
faraday-retry (~> 1.0)
|
||||
ruby2_keywords (>= 0.0.4)
|
||||
faraday-cookie_jar (0.0.7)
|
||||
faraday (>= 0.8.0)
|
||||
http-cookie (~> 1.0.0)
|
||||
faraday-em_http (1.0.0)
|
||||
faraday-em_synchrony (1.0.0)
|
||||
faraday-excon (1.1.0)
|
||||
faraday-httpclient (1.0.1)
|
||||
faraday-multipart (1.0.4)
|
||||
multipart-post (~> 2)
|
||||
faraday-net_http (1.0.2)
|
||||
faraday-net_http_persistent (1.2.0)
|
||||
faraday-patron (1.0.0)
|
||||
faraday-rack (1.0.0)
|
||||
faraday-retry (1.0.3)
|
||||
faraday_middleware (1.2.1)
|
||||
faraday (~> 1.0)
|
||||
fastimage (2.3.1)
|
||||
fastlane (2.225.0)
|
||||
CFPropertyList (>= 2.3, < 4.0.0)
|
||||
addressable (>= 2.8, < 3.0.0)
|
||||
artifactory (~> 3.0)
|
||||
aws-sdk-s3 (~> 1.0)
|
||||
babosa (>= 1.0.3, < 2.0.0)
|
||||
bundler (>= 1.12.0, < 3.0.0)
|
||||
colored (~> 1.2)
|
||||
commander (~> 4.6)
|
||||
dotenv (>= 2.1.1, < 3.0.0)
|
||||
emoji_regex (>= 0.1, < 4.0)
|
||||
excon (>= 0.71.0, < 1.0.0)
|
||||
faraday (~> 1.0)
|
||||
faraday-cookie_jar (~> 0.0.6)
|
||||
faraday_middleware (~> 1.0)
|
||||
fastimage (>= 2.1.0, < 3.0.0)
|
||||
fastlane-sirp (>= 1.0.0)
|
||||
gh_inspector (>= 1.1.2, < 2.0.0)
|
||||
google-apis-androidpublisher_v3 (~> 0.3)
|
||||
google-apis-playcustomapp_v1 (~> 0.1)
|
||||
google-cloud-env (>= 1.6.0, < 2.0.0)
|
||||
google-cloud-storage (~> 1.31)
|
||||
highline (~> 2.0)
|
||||
http-cookie (~> 1.0.5)
|
||||
json (< 3.0.0)
|
||||
jwt (>= 2.1.0, < 3)
|
||||
mini_magick (>= 4.9.4, < 5.0.0)
|
||||
multipart-post (>= 2.0.0, < 3.0.0)
|
||||
naturally (~> 2.2)
|
||||
optparse (>= 0.1.1, < 1.0.0)
|
||||
plist (>= 3.1.0, < 4.0.0)
|
||||
rubyzip (>= 2.0.0, < 3.0.0)
|
||||
security (= 0.1.5)
|
||||
simctl (~> 1.6.3)
|
||||
terminal-notifier (>= 2.0.0, < 3.0.0)
|
||||
terminal-table (~> 3)
|
||||
tty-screen (>= 0.6.3, < 1.0.0)
|
||||
tty-spinner (>= 0.8.0, < 1.0.0)
|
||||
word_wrap (~> 1.0.0)
|
||||
xcodeproj (>= 1.13.0, < 2.0.0)
|
||||
xcpretty (~> 0.3.0)
|
||||
xcpretty-travis-formatter (>= 0.0.3, < 2.0.0)
|
||||
fastlane-sirp (1.0.0)
|
||||
sysrandom (~> 1.0)
|
||||
ffi (1.17.0)
|
||||
ffi (1.17.0-x86_64-darwin)
|
||||
fourflusher (2.3.1)
|
||||
fuzzy_match (2.0.4)
|
||||
gh_inspector (1.1.3)
|
||||
google-apis-androidpublisher_v3 (0.54.0)
|
||||
google-apis-core (>= 0.11.0, < 2.a)
|
||||
google-apis-core (0.11.3)
|
||||
addressable (~> 2.5, >= 2.5.1)
|
||||
googleauth (>= 0.16.2, < 2.a)
|
||||
httpclient (>= 2.8.1, < 3.a)
|
||||
mini_mime (~> 1.0)
|
||||
representable (~> 3.0)
|
||||
retriable (>= 2.0, < 4.a)
|
||||
rexml
|
||||
google-apis-iamcredentials_v1 (0.17.0)
|
||||
google-apis-core (>= 0.11.0, < 2.a)
|
||||
google-apis-playcustomapp_v1 (0.13.0)
|
||||
google-apis-core (>= 0.11.0, < 2.a)
|
||||
google-apis-storage_v1 (0.31.0)
|
||||
google-apis-core (>= 0.11.0, < 2.a)
|
||||
google-cloud-core (1.7.1)
|
||||
google-cloud-env (>= 1.0, < 3.a)
|
||||
google-cloud-errors (~> 1.0)
|
||||
google-cloud-env (1.6.0)
|
||||
faraday (>= 0.17.3, < 3.0)
|
||||
google-cloud-errors (1.4.0)
|
||||
google-cloud-storage (1.47.0)
|
||||
addressable (~> 2.8)
|
||||
digest-crc (~> 0.4)
|
||||
google-apis-iamcredentials_v1 (~> 0.1)
|
||||
google-apis-storage_v1 (~> 0.31.0)
|
||||
google-cloud-core (~> 1.6)
|
||||
googleauth (>= 0.16.2, < 2.a)
|
||||
mini_mime (~> 1.0)
|
||||
googleauth (1.8.1)
|
||||
faraday (>= 0.17.3, < 3.a)
|
||||
jwt (>= 1.4, < 3.0)
|
||||
multi_json (~> 1.11)
|
||||
os (>= 0.9, < 2.0)
|
||||
signet (>= 0.16, < 2.a)
|
||||
highline (2.0.3)
|
||||
http-cookie (1.0.7)
|
||||
domain_name (~> 0.5)
|
||||
httpclient (2.8.3)
|
||||
i18n (1.14.6)
|
||||
concurrent-ruby (~> 1.0)
|
||||
jmespath (1.6.2)
|
||||
json (2.8.1)
|
||||
jwt (2.9.3)
|
||||
base64
|
||||
mini_magick (4.13.2)
|
||||
mini_mime (1.1.5)
|
||||
minitest (5.25.1)
|
||||
molinillo (0.6.6)
|
||||
multi_json (1.15.0)
|
||||
multipart-post (2.4.1)
|
||||
nanaimo (0.4.0)
|
||||
nap (1.1.0)
|
||||
naturally (2.2.1)
|
||||
netrc (0.11.0)
|
||||
nkf (0.2.0)
|
||||
optparse (0.6.0)
|
||||
os (1.1.4)
|
||||
plist (3.7.1)
|
||||
public_suffix (6.0.1)
|
||||
rake (13.2.1)
|
||||
representable (3.2.0)
|
||||
declarative (< 0.1.0)
|
||||
trailblazer-option (>= 0.1.1, < 0.2.0)
|
||||
uber (< 0.2.0)
|
||||
retriable (3.1.2)
|
||||
rexml (3.3.9)
|
||||
rouge (2.0.7)
|
||||
ruby-macho (1.4.0)
|
||||
ruby2_keywords (0.0.5)
|
||||
rubyzip (2.3.2)
|
||||
security (0.1.5)
|
||||
signet (0.19.0)
|
||||
addressable (~> 2.8)
|
||||
faraday (>= 0.17.5, < 3.a)
|
||||
jwt (>= 1.5, < 3.0)
|
||||
multi_json (~> 1.10)
|
||||
simctl (1.6.10)
|
||||
CFPropertyList
|
||||
naturally
|
||||
sysrandom (1.0.5)
|
||||
terminal-notifier (2.0.0)
|
||||
terminal-table (3.0.2)
|
||||
unicode-display_width (>= 1.1.1, < 3)
|
||||
thread_safe (0.3.6)
|
||||
trailblazer-option (0.1.2)
|
||||
tty-cursor (0.7.1)
|
||||
tty-screen (0.8.2)
|
||||
tty-spinner (0.9.3)
|
||||
tty-cursor (~> 0.7)
|
||||
typhoeus (1.4.1)
|
||||
ethon (>= 0.9.0)
|
||||
tzinfo (1.2.11)
|
||||
thread_safe (~> 0.1)
|
||||
uber (0.1.0)
|
||||
unicode-display_width (2.6.0)
|
||||
word_wrap (1.0.0)
|
||||
xcodeproj (1.27.0)
|
||||
CFPropertyList (>= 2.3.3, < 4.0)
|
||||
atomos (~> 0.1.3)
|
||||
claide (>= 1.0.2, < 2.0)
|
||||
colored2 (~> 3.1)
|
||||
nanaimo (~> 0.4.0)
|
||||
rexml (>= 3.3.6, < 4.0)
|
||||
xcpretty (0.3.0)
|
||||
rouge (~> 2.0.7)
|
||||
xcpretty-travis-formatter (1.0.1)
|
||||
xcpretty (~> 0.2, >= 0.0.7)
|
||||
|
||||
PLATFORMS
|
||||
ruby
|
||||
x86_64-darwin-23
|
||||
|
||||
DEPENDENCIES
|
||||
cocoapods
|
||||
fastlane
|
||||
|
||||
BUNDLED WITH
|
||||
2.5.23
|
@@ -1,4 +1,4 @@
|
||||
app_identifier("info.anydev.anyway") # The bundle identifier of your app
|
||||
app_identifier("info.anydev.testing") # The bundle identifier of your app
|
||||
apple_id("me@moll.re") # Your Apple Developer Portal username
|
||||
|
||||
itc_team_id("127439860") # App Store Connect Team ID
|
||||
|
@@ -1,3 +0,0 @@
|
||||
# The Deliverfile allows you to store various App Store Connect metadata
|
||||
# For more information, check out the docs
|
||||
# https://docs.fastlane.tools/actions/deliver/
|
@@ -15,7 +15,7 @@ platform :ios do
|
||||
|
||||
|
||||
desc "Deploy a new version to closed testing (testflight)"
|
||||
lane :deploy_beta do
|
||||
lane :deploy_testing do
|
||||
build_name = ENV["BUILD_NAME"]
|
||||
build_number = ENV["BUILD_NUMBER"]
|
||||
|
||||
@@ -28,11 +28,12 @@ platform :ios do
|
||||
readonly: true,
|
||||
)
|
||||
|
||||
|
||||
sh(
|
||||
"flutter",
|
||||
"build",
|
||||
"ipa",
|
||||
"--release",
|
||||
"--debug",
|
||||
"--build-name=#{build_name}",
|
||||
"--build-number=#{build_number}",
|
||||
)
|
||||
@@ -63,6 +64,15 @@ platform :ios do
|
||||
readonly: true,
|
||||
)
|
||||
|
||||
# replace secrets by real values, the stupid way
|
||||
sh(
|
||||
"sed",
|
||||
"-i",
|
||||
"",
|
||||
"s/IOS_GOOGLE_MAPS_API_KEY/#{ENV["IOS_GOOGLE_MAPS_API_KEY"]}/g",
|
||||
"../Runner/AppDelegate.swift"
|
||||
)
|
||||
|
||||
sh(
|
||||
"flutter",
|
||||
"build",
|
||||
@@ -79,11 +89,10 @@ platform :ios do
|
||||
)
|
||||
|
||||
upload_to_app_store(
|
||||
overwrite_screenshots: true,
|
||||
metadata_path: "fastlane/metadata",
|
||||
screenshots_path: "fastlane/screenshots",
|
||||
skip_screenshots: true,
|
||||
skip_metadata: true,
|
||||
precheck_include_in_app_purchases: false,
|
||||
force: true, # Skip HTMl report verification
|
||||
|
||||
submit_for_review: true,
|
||||
automatic_release: true,
|
||||
# automatically release the app after review
|
||||
|
@@ -1 +0,0 @@
|
||||
2025 anydev
|
@@ -1,7 +0,0 @@
|
||||
AnyWay is an application that helps you plan truly unique city trips. When planning a new trip, you can specify your preferences and constraints and anyway generates a personalized itinerary just for you.
|
||||
|
||||
Anyway follows these core principles:
|
||||
- Personalization: Trips should be match your interests - not just the most popular destinations.
|
||||
- Efficiency: Don't just walk in circles! Anyway creates the most efficient route for you.
|
||||
- Flexibility: Vacations are the time to be spontaneous. Anyway lets you update your plans on the go.
|
||||
- Discoverability: Tourism means exploration. Anyway encourages you to take detours and make spontaneous decisions.
|
@@ -1 +0,0 @@
|
||||
tourism, cities, travel, guide
|
@@ -1 +0,0 @@
|
||||
https://anydev.info
|
@@ -1 +0,0 @@
|
||||
Any.Way
|
@@ -1 +0,0 @@
|
||||
https://anydev.info/privacy
|
@@ -1 +0,0 @@
|
||||
AnyWay - plan city trips your way!
|
@@ -1 +0,0 @@
|
||||
|
@@ -1 +0,0 @@
|
||||
Plan city trips your way!
|
@@ -1 +0,0 @@
|
||||
|
@@ -1 +0,0 @@
|
||||
TRAVEL
|
@@ -1 +0,0 @@
|
||||
anydev.anyway@gmail.com
|
@@ -1 +0,0 @@
|
||||
Remy
|
@@ -1 +0,0 @@
|
||||
Moll
|
@@ -1 +0,0 @@
|
||||
|
@@ -1 +0,0 @@
|
||||
+4915128785827
|
@@ -1 +0,0 @@
|
||||
|
@@ -1,30 +0,0 @@
|
||||
## Screenshots Naming Rules
|
||||
|
||||
Put all screenshots you want to use inside the folder of its language (e.g. `en-US`).
|
||||
The device type will automatically be recognized using the image resolution.
|
||||
|
||||
The screenshots can be named whatever you want, but keep in mind they are sorted
|
||||
alphabetically, in a human-friendly way. See https://github.com/fastlane/fastlane/pull/18200 for more details.
|
||||
|
||||
### Exceptions
|
||||
|
||||
#### iPad Pro (3rd Gen) 12.9"
|
||||
|
||||
Since iPad Pro (3rd Gen) 12.9" and iPad Pro (2nd Gen) 12.9" have the same image
|
||||
resolution, screenshots of the iPad Pro (3rd gen) 12.9" must contain either the
|
||||
string `iPad Pro (12.9-inch) (3rd generation)`, `IPAD_PRO_3GEN_129`, or `ipadPro129`
|
||||
(App Store Connect's internal naming of the display family for the 3rd generation iPad Pro)
|
||||
in its filename to be assigned the correct display family and to be uploaded to
|
||||
the correct screenshot slot in your app's metadata.
|
||||
|
||||
### Other Platforms
|
||||
|
||||
#### Apple TV
|
||||
|
||||
Apple TV screenshots should be stored in a subdirectory named `appleTV` with language
|
||||
folders inside of it.
|
||||
|
||||
#### iMessage
|
||||
|
||||
iMessage screenshots, like the Apple TV ones, should also be stored in a subdirectory
|
||||
named `iMessage`, with language folders inside of it.
|
Before Width: | Height: | Size: 1.6 MiB |
Before Width: | Height: | Size: 2.1 MiB |
Before Width: | Height: | Size: 626 KiB |
Before Width: | Height: | Size: 758 KiB |
Before Width: | Height: | Size: 2.2 MiB |
Before Width: | Height: | Size: 2.5 MiB |
Before Width: | Height: | Size: 574 KiB |
Before Width: | Height: | Size: 800 KiB |
@@ -16,27 +16,22 @@ class CurrentTripErrorMessage extends StatefulWidget {
|
||||
class _CurrentTripErrorMessageState extends State<CurrentTripErrorMessage> {
|
||||
@override
|
||||
Widget build(BuildContext context) => Center(
|
||||
child: Column(
|
||||
child: Row(
|
||||
mainAxisAlignment: MainAxisAlignment.center,
|
||||
crossAxisAlignment: CrossAxisAlignment.center,
|
||||
children: [
|
||||
Text(
|
||||
"😢",
|
||||
style: TextStyle(
|
||||
fontSize: 40,
|
||||
const Icon(
|
||||
Icons.error_outline,
|
||||
color: Colors.red,
|
||||
size: 50,
|
||||
),
|
||||
const Padding(
|
||||
padding: EdgeInsets.only(left: 10),
|
||||
),
|
||||
const SizedBox(height: 10),
|
||||
AutoSizeText(
|
||||
// at this point the trip is guaranteed to have an error message
|
||||
widget.trip.errorDescription!,
|
||||
maxLines: 30,
|
||||
style: Theme.of(context).textTheme.bodyMedium,
|
||||
textAlign: TextAlign.center,
|
||||
|
||||
'Error: ${widget.trip.errorDescription}',
|
||||
maxLines: 3,
|
||||
),
|
||||
],
|
||||
),
|
||||
)
|
||||
);
|
||||
|
||||
}
|
||||
|
@@ -1,5 +1,3 @@
|
||||
import 'package:anyway/pages/current_trip.dart';
|
||||
import 'package:auto_size_text/auto_size_text.dart';
|
||||
import 'package:flutter/material.dart';
|
||||
|
||||
import 'package:anyway/constants.dart';
|
||||
@@ -36,26 +34,14 @@ class _CurrentTripPanelState extends State<CurrentTripPanel> {
|
||||
listenable: widget.trip,
|
||||
builder: (context, child) {
|
||||
if (widget.trip.uuid == 'error') {
|
||||
return ListView(
|
||||
controller: widget.controller,
|
||||
padding: const EdgeInsets.only(top: 10, left: 10, right: 10, bottom: 30),
|
||||
children: [
|
||||
SizedBox(
|
||||
return Align(
|
||||
alignment: Alignment.topCenter,
|
||||
child: SizedBox(
|
||||
// reuse the exact same height as the panel has when collapsed
|
||||
// this way the greeter will be centered when the panel is collapsed
|
||||
// note that we need to account for the padding above
|
||||
height: MediaQuery.of(context).size.height * TRIP_PANEL_MIN_HEIGHT - 10,
|
||||
child: Center(child:
|
||||
AutoSizeText(
|
||||
maxLines: 1,
|
||||
'Error',
|
||||
style: greeterStyle
|
||||
)
|
||||
height: MediaQuery.of(context).size.height * TRIP_PANEL_MIN_HEIGHT,
|
||||
child: CurrentTripErrorMessage(trip: widget.trip)
|
||||
),
|
||||
),
|
||||
|
||||
CurrentTripErrorMessage(trip: widget.trip),
|
||||
],
|
||||
);
|
||||
} else if (widget.trip.uuid == 'pending') {
|
||||
return Align(
|
||||
|
@@ -34,6 +34,23 @@ class _NewTripPreferencesPageState extends State<NewTripPreferencesPage> with Sc
|
||||
child: Scaffold(
|
||||
body: ListView(
|
||||
children: [
|
||||
// Center(
|
||||
// child: CircleAvatar(
|
||||
// radius: 100,
|
||||
// child: Icon(Icons.person, size: 100),
|
||||
// )
|
||||
// ),
|
||||
// Padding(padding: EdgeInsets.only(top: 30)),
|
||||
// Center(
|
||||
// child: FutureBuilder(
|
||||
// future: widget.trip.cityName,
|
||||
// builder: (context, snapshot) => Text(
|
||||
// 'Your trip to ${snapshot.hasData ? snapshot.data! : "..."}',
|
||||
// style: TextStyle(fontSize: 24, fontWeight: FontWeight.bold)
|
||||
// )
|
||||
// )
|
||||
// ),
|
||||
|
||||
Center(
|
||||
child: Padding(
|
||||
padding: EdgeInsets.only(left: 10, right: 10, top: 20, bottom: 0),
|
||||
@@ -46,11 +63,6 @@ class _NewTripPreferencesPageState extends State<NewTripPreferencesPage> with Sc
|
||||
durationPicker(preferences.maxTime),
|
||||
|
||||
preferenceSliders([preferences.sightseeing, preferences.shopping, preferences.nature]),
|
||||
|
||||
// Add a conditional padding to avoid the floating button covering the last slider
|
||||
Padding(
|
||||
padding: EdgeInsets.only(bottom: MediaQuery.of(context).viewInsets.bottom + 80),
|
||||
),
|
||||
]
|
||||
),
|
||||
floatingActionButton: NewTripButton(trip: widget.trip, preferences: preferences),
|
||||
|
@@ -21,13 +21,10 @@ class _NoTripsPageState extends State<NoTripsPage> with ScaffoldLayout {
|
||||
Text(
|
||||
"No trips yet",
|
||||
style: Theme.of(context).textTheme.headlineMedium,
|
||||
textAlign: TextAlign.center,
|
||||
),
|
||||
Padding(padding: EdgeInsets.only(bottom: 10)),
|
||||
Text(
|
||||
"You can start a new trip by clicking the button below",
|
||||
style: Theme.of(context).textTheme.bodyMedium,
|
||||
textAlign: TextAlign.center,
|
||||
),
|
||||
],
|
||||
),
|
||||
|
@@ -177,7 +177,7 @@ class _SettingsPageState extends State<SettingsPage> with ScaffoldLayout {
|
||||
return Center(
|
||||
child: Column(
|
||||
children: [
|
||||
Text('AnyWay does not collect or store any of the data that is submitted via the app. The location of your trip is not stored. The location feature is only used to show your current location on the map.', textAlign: TextAlign.center),
|
||||
Text('AnyWay does not collect or store any of the data that is submitted via the app. The location of your trip is not stored. The location feature is only used to show your current location on the map, it is not transmitted to our servers.', textAlign: TextAlign.center),
|
||||
Padding(padding: EdgeInsets.only(top: 3)),
|
||||
Text('Our full privacy policy is available under:', textAlign: TextAlign.center),
|
||||
|
||||
|
@@ -1,7 +1,5 @@
|
||||
import "dart:async";
|
||||
import "dart:convert";
|
||||
import "dart:developer";
|
||||
import "dart:io";
|
||||
import "package:anyway/main.dart";
|
||||
import 'package:dio/dio.dart';
|
||||
|
||||
@@ -20,6 +18,11 @@ Dio dio = Dio(
|
||||
// also accept 500 errors, since we cannot rule out that the server is at fault. We still want to gracefully handle these errors
|
||||
validateStatus: (status) => status! <= 500,
|
||||
receiveDataWhenStatusError: true,
|
||||
// api is notoriously slow
|
||||
// headers: {
|
||||
// HttpHeaders.userAgentHeader: 'dio',
|
||||
// 'api': '1.0.0',
|
||||
// },
|
||||
contentType: Headers.jsonContentType,
|
||||
responseType: ResponseType.json,
|
||||
),
|
||||
@@ -45,70 +48,25 @@ fetchTrip(
|
||||
);
|
||||
} catch (e) {
|
||||
trip.updateUUID("error");
|
||||
|
||||
// Format the error message to be more user friendly
|
||||
String errorDescription;
|
||||
if (e is DioException) {
|
||||
errorDescription = e.message ?? "Unknown error";
|
||||
} else if (e is SocketException) {
|
||||
errorDescription = "No internet connection";
|
||||
} else if (e is TimeoutException) {
|
||||
errorDescription = "Request timed out";
|
||||
} else {
|
||||
errorDescription = "Unknown error";
|
||||
}
|
||||
|
||||
String errorMessage = """
|
||||
We're sorry, the following error was generated:
|
||||
|
||||
${errorDescription.trim()}
|
||||
""".trim();
|
||||
|
||||
trip.updateError(errorMessage);
|
||||
trip.updateError(e.toString());
|
||||
log(e.toString());
|
||||
log(errorMessage);
|
||||
return;
|
||||
}
|
||||
|
||||
// handle more specific errors
|
||||
// handle errors
|
||||
if (response.statusCode != 200) {
|
||||
trip.updateUUID("error");
|
||||
String errorDescription;
|
||||
String errorDetail;
|
||||
if (response.data.runtimeType == String) {
|
||||
errorDescription = response.data;
|
||||
} else if (response.data.runtimeType == Map<String, dynamic>) {
|
||||
errorDescription = response.data["detail"] ?? "Unknown error";
|
||||
errorDetail = response.data;
|
||||
} else {
|
||||
errorDescription = "Unknown error";
|
||||
errorDetail = response.data["detail"] ?? "Unknown error";
|
||||
}
|
||||
|
||||
String errorMessage = """
|
||||
We're sorry, our servers generated the following error:
|
||||
|
||||
${errorDescription.trim()}
|
||||
Please try again.
|
||||
""".trim();
|
||||
trip.updateError(errorMessage);
|
||||
log(errorMessage);
|
||||
trip.updateError(errorDetail);
|
||||
log(errorDetail);
|
||||
// Actualy no need to throw an exception, we can just log the error and let the user retry
|
||||
// throw Exception(errorDetail);
|
||||
} else {
|
||||
|
||||
// if the response data is not json, throw an error
|
||||
if (response.data is! Map<String, dynamic>) {
|
||||
log("${response.data.runtimeType}");
|
||||
trip.updateUUID("error");
|
||||
String errorMessage = """
|
||||
We're sorry, our servers generated the following error:
|
||||
|
||||
${response.data.trim()}
|
||||
Please try again.
|
||||
""".trim();
|
||||
trip.updateError(errorMessage);
|
||||
log(errorMessage);
|
||||
return;
|
||||
}
|
||||
|
||||
Map<String, dynamic> json = response.data;
|
||||
|
||||
// only fill in the trip "meta" data for now
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# Project-level configuration.
|
||||
cmake_minimum_required(VERSION 3.13)
|
||||
cmake_minimum_required(VERSION 3.10)
|
||||
project(runner LANGUAGES CXX)
|
||||
|
||||
# The name of the executable created for the application. Change this to change
|
||||
@@ -7,7 +7,7 @@ project(runner LANGUAGES CXX)
|
||||
set(BINARY_NAME "anyway")
|
||||
# The unique GTK application identifier for this application. See:
|
||||
# https://wiki.gnome.org/HowDoI/ChooseApplicationID
|
||||
set(APPLICATION_ID "com.anydev.anyway")
|
||||
set(APPLICATION_ID "com.example.anyway")
|
||||
|
||||
# Explicitly opt in to modern CMake behaviors to avoid warnings with recent
|
||||
# versions of CMake.
|
||||
@@ -54,8 +54,25 @@ add_subdirectory(${FLUTTER_MANAGED_DIR})
|
||||
find_package(PkgConfig REQUIRED)
|
||||
pkg_check_modules(GTK REQUIRED IMPORTED_TARGET gtk+-3.0)
|
||||
|
||||
# Application build; see runner/CMakeLists.txt.
|
||||
add_subdirectory("runner")
|
||||
add_definitions(-DAPPLICATION_ID="${APPLICATION_ID}")
|
||||
|
||||
# Define the application target. To change its name, change BINARY_NAME above,
|
||||
# not the value here, or `flutter run` will no longer work.
|
||||
#
|
||||
# Any new source files that you add to the application should be added here.
|
||||
add_executable(${BINARY_NAME}
|
||||
"main.cc"
|
||||
"my_application.cc"
|
||||
"${FLUTTER_MANAGED_DIR}/generated_plugin_registrant.cc"
|
||||
)
|
||||
|
||||
# Apply the standard set of build settings. This can be removed for applications
|
||||
# that need different build settings.
|
||||
apply_standard_settings(${BINARY_NAME})
|
||||
|
||||
# Add dependency libraries. Add any application-specific dependencies here.
|
||||
target_link_libraries(${BINARY_NAME} PRIVATE flutter)
|
||||
target_link_libraries(${BINARY_NAME} PRIVATE PkgConfig::GTK)
|
||||
|
||||
# Run the Flutter tool portions of the build. This must not be removed.
|
||||
add_dependencies(${BINARY_NAME} flutter_assemble)
|
||||
|
@@ -117,12 +117,6 @@ static void my_application_class_init(MyApplicationClass* klass) {
|
||||
static void my_application_init(MyApplication* self) {}
|
||||
|
||||
MyApplication* my_application_new() {
|
||||
// Set the program name to the application ID, which helps various systems
|
||||
// like GTK and desktop environments map this running application to its
|
||||
// corresponding .desktop file. This ensures better integration by allowing
|
||||
// the application to be recognized beyond its binary name.
|
||||
g_set_prgname(APPLICATION_ID);
|
||||
|
||||
return MY_APPLICATION(g_object_new(my_application_get_type(),
|
||||
"application-id", APPLICATION_ID,
|
||||
"flags", G_APPLICATION_NON_UNIQUE,
|