Compare commits
104 Commits
2288a50b60
...
v0.1.4
| Author | SHA1 | Date | |
|---|---|---|---|
| e5c57518e2 | |||
| c3e72001be | |||
| f09836c470 | |||
| 9e90aed957 | |||
| d176416b15 | |||
| 5063bc2b69 | |||
| e95088ad25 | |||
| 313bdede03 | |||
| 8ceaf549e4 | |||
| 63d3ff63e7 | |||
| e1dbbbb274 | |||
| 139555dc8e | |||
| 1785e125c7 | |||
| 82d37288d6 | |||
| 9b0821926c | |||
| 0514fa063f | |||
| 71c7325370 | |||
| aeed9c7dc9 | |||
| 89c5fc9370 | |||
| d8c6bfcda0 | |||
| ec3ed054fd | |||
| 219cfcf1a6 | |||
| 708c07cf49 | |||
| e14900e9f0 | |||
| d1cbf972fe | |||
| fe2a0cf1d5 | |||
| c11faee824 | |||
| 9ccf68d983 | |||
| 132aa5a19b | |||
| 19b0c37a97 | |||
| ecdef605a7 | |||
| e2a918112b | |||
| 96b0718081 | |||
| d9e5d9dac6 | |||
| b0f9d31ee2 | |||
| 54bc9028ad | |||
| 37926e68ec | |||
| e2d3d29956 | |||
| 6921ab57f8 | |||
| 97dacb1189 | |||
| 8e1e6ac33c | |||
| 8ad69f48ab | |||
| a22b7183b3 | |||
| 56a30a2eba | |||
| 4b708b74a3 | |||
| 4c66f3e124 | |||
| 1f1efec804 | |||
| 1e2690f8ce | |||
| 9fc70ef3d4 | |||
| 7967b96d4e | |||
| b05950d595 | |||
| df51a6473b | |||
| 0b8f2bc94f | |||
| 3c5485cda8 | |||
| 720e4d1c17 | |||
| c977e1bd08 | |||
| ac8bb3cbf4 | |||
| 0b45ccabf5 | |||
| 8ef60104f0 | |||
| efd332f8c5 | |||
| bda87859ee | |||
| a7e3553246 | |||
| 21f57f6929 | |||
| 86fd50e21d | |||
| 2df8a22239 | |||
| 6af74804ec | |||
| e3d2c51c6d | |||
| 0819b8b201 | |||
| 5bc2918a39 | |||
| 114acaf93d | |||
| 615f028f94 | |||
| 45c860329f | |||
| a676af3a67 | |||
| e148c851e1 | |||
| 4ad867e609 | |||
| 6f2f86f936 | |||
| 56c55883ea | |||
| 8f6dfd404d | |||
| aed407e2d0 | |||
| f6d0cd5360 | |||
| 7a18830e99 | |||
| ba14a0279e | |||
| 5a2c61d343 | |||
| 5e27dd9d79 | |||
| d92001faaf | |||
| 73f0dc8361 | |||
| 05092e55f1 | |||
| 83be4b7616 | |||
| 8a9ec6b4d8 | |||
| 8c3145dfc9 | |||
| 2bf38119d6 | |||
| ca711c614f | |||
| 357edf3000 | |||
| 444c47e3a4 | |||
| da6ab207d9 | |||
| c15e257dea | |||
| 5a698dd02c | |||
| 7e4a4b3dc7 | |||
| 84e5902436 | |||
| 81330e5eb3 | |||
| 9002483036 | |||
| 0271c3d7a7 | |||
| 4fd1272ea4 | |||
| 6bedd04a57 |
@@ -3,6 +3,9 @@ on:
|
||||
tags:
|
||||
- v*
|
||||
|
||||
permissions:
|
||||
pull-requests: write
|
||||
|
||||
name: Build and deploy the backend to production
|
||||
|
||||
jobs:
|
||||
@@ -10,15 +13,7 @@ jobs:
|
||||
name: Build and push image
|
||||
uses: ./.gitea/workflows/workflow_build-image.yaml
|
||||
with:
|
||||
tag: stable
|
||||
# sets the tag to the git tag that triggered the workflow - the deployment (configured in a separate repository) will use this tag and be deployed to production by argocd
|
||||
tag: ${{ github.ref_name }}
|
||||
secrets:
|
||||
PACKAGE_REGISTRY_ACCESS: ${{ secrets.PACKAGE_REGISTRY_ACCESS }}
|
||||
|
||||
deploy-prod:
|
||||
name: Deploy to production
|
||||
uses: ./.gitea/workflows/workflow_deploy-container.yaml
|
||||
with:
|
||||
overlay: prod
|
||||
secrets:
|
||||
KUBE_CONFIG: ${{ secrets.KUBE_CONFIG }}
|
||||
needs: build-and-push
|
||||
|
||||
@@ -12,15 +12,32 @@ jobs:
|
||||
name: Build and push image
|
||||
uses: ./.gitea/workflows/workflow_build-image.yaml
|
||||
with:
|
||||
tag: unstable
|
||||
# sets a unique tag for each commit in the PR - this gets deployed to a separate application instance using argocd
|
||||
tag: sha${{ github.sha }}
|
||||
secrets:
|
||||
PACKAGE_REGISTRY_ACCESS: ${{ secrets.PACKAGE_REGISTRY_ACCESS }}
|
||||
|
||||
deploy-prod:
|
||||
name: Deploy to staging
|
||||
uses: ./.gitea/workflows/workflow_deploy-container.yaml
|
||||
with:
|
||||
overlay: stg
|
||||
secrets:
|
||||
KUBE_CONFIG: ${{ secrets.KUBE_CONFIG }}
|
||||
needs: build-and-push
|
||||
|
||||
notify:
|
||||
runs-on: ubuntu-latest
|
||||
name: Add a comment to the PR to notify about the deployment
|
||||
steps:
|
||||
- name: Download gitea client
|
||||
run: |
|
||||
curl -sSL -o tea https://dl.gitea.com/tea/0.11.0/tea-0.11.0-linux-amd64
|
||||
chmod +x tea
|
||||
|
||||
- name: Login
|
||||
run: |
|
||||
./tea login add --url git.kluster.moll.re --name bot --token ${{ secrets.GITEA_TOKEN }}
|
||||
./tea login default
|
||||
- name: Post comment
|
||||
run: |
|
||||
./tea comment --repo anydev/anyway --login bot ${{ github.event.number }} """
|
||||
The backend has been deployed to staging with url https://pr-${{ github.event.number }}.anyway-stg.anydev.info. Check the deployment status in ArgoCD:
|
||||
|
||||
[](https://argocd.kluster.moll.re/applications/anydev-anyway-backend-stg-pr-${{ github.event.number }})
|
||||
"""
|
||||
env:
|
||||
GITEA_TOKEN: ${{ secrets.GITEA_TOKEN }}
|
||||
GITEA_BASE_URL: ${{ secrets.GITEA_BASE_URL }}
|
||||
GITEA_REPO: ${{ secrets.GITEA_REPO }}
|
||||
|
||||
@@ -15,18 +15,10 @@ jobs:
|
||||
|
||||
- uses: https://gitea.com/actions/checkout@v4
|
||||
|
||||
- name: Install dependencies
|
||||
- name: Install pylint
|
||||
run: |
|
||||
apt-get update && apt-get install -y python3 python3-pip
|
||||
pip install pipenv
|
||||
|
||||
- name: Install packages
|
||||
run: |
|
||||
ls -la
|
||||
# only install dev-packages
|
||||
pipenv install --categories=dev-packages
|
||||
working-directory: backend
|
||||
apt-get update && apt-get install -y python3 python3-pip pylint
|
||||
|
||||
- name: Run linter
|
||||
run: pipenv run pylint src --fail-under=9
|
||||
run: pylint src --fail-under=9
|
||||
working-directory: backend
|
||||
|
||||
@@ -15,20 +15,18 @@ jobs:
|
||||
|
||||
- uses: https://gitea.com/actions/checkout@v4
|
||||
|
||||
- name: Install dependencies
|
||||
- name: Install uv (manually)
|
||||
run: |
|
||||
apt-get update && apt-get install -y python3 python3-pip
|
||||
pip install pipenv
|
||||
curl -LsSf https://astral.sh/uv/install.sh | sh
|
||||
echo "$HOME/.local/bin" >> $GITHUB_PATH
|
||||
|
||||
- name: Install packages
|
||||
run: |
|
||||
ls -la
|
||||
# install all packages, including dev-packages
|
||||
pipenv install --dev
|
||||
- name: Install dependencies
|
||||
working-directory: backend
|
||||
run: |
|
||||
uv sync --frozen --no-cache --no-dev
|
||||
|
||||
- name: Run Tests
|
||||
run: pipenv run pytest src --html=report.html --self-contained-html --log-cli-level=DEBUG
|
||||
run: uv run pytest src --html=report.html --self-contained-html --log-cli-level=DEBUG
|
||||
working-directory: backend
|
||||
|
||||
- name: Upload HTML report
|
||||
|
||||
@@ -1,67 +0,0 @@
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- frontend/**
|
||||
|
||||
|
||||
name: Build and release debug APK
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build APK
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
- name: Install prerequisites
|
||||
run: |
|
||||
apt-get update
|
||||
apt-get install -y jq
|
||||
|
||||
- uses: https://gitea.com/actions/checkout@v4
|
||||
|
||||
|
||||
- uses: https://github.com/actions/setup-java@v4
|
||||
with:
|
||||
java-version: '17'
|
||||
distribution: 'zulu'
|
||||
|
||||
- name: Fix flutter SDK folder permission
|
||||
run: git config --global --add safe.directory "*"
|
||||
|
||||
- uses: https://github.com/subosito/flutter-action@v2
|
||||
with:
|
||||
channel: stable
|
||||
flutter-version: 3.22.0
|
||||
cache: true
|
||||
|
||||
- name: Setup Android SDK
|
||||
uses: https://github.com/android-actions/setup-android@v3
|
||||
|
||||
- run: flutter pub get
|
||||
working-directory: ./frontend
|
||||
|
||||
- name: Add required secrets
|
||||
env:
|
||||
ANDROID_SECRETS_PROPERTIES: ${{ secrets.ANDROID_SECRETS_PROPERTIES }}
|
||||
run: |
|
||||
echo "$ANDROID_SECRETS_PROPERTIES" >> ./android/secrets.properties
|
||||
working-directory: ./frontend
|
||||
|
||||
- name: Sanity check
|
||||
run: |
|
||||
ls
|
||||
ls -lah android
|
||||
working-directory: ./frontend
|
||||
|
||||
- run: flutter build apk --debug --split-per-abi --build-number=${{ gitea.run_number }}
|
||||
working-directory: ./frontend
|
||||
|
||||
- name: Upload APKs to artifacts
|
||||
uses: https://gitea.com/actions/upload-artifact@v3
|
||||
with:
|
||||
name: app-release
|
||||
path: frontend/build/app/outputs/flutter-apk/
|
||||
if-no-files-found: error
|
||||
retention-days: 15
|
||||
@@ -1,34 +0,0 @@
|
||||
# on:
|
||||
# pull_request:
|
||||
# branches:
|
||||
# - main
|
||||
# paths:
|
||||
# - frontend/**
|
||||
|
||||
|
||||
# name: Build web
|
||||
|
||||
# jobs:
|
||||
# build:
|
||||
# name: Build Web
|
||||
# runs-on: ubuntu-latest
|
||||
# steps:
|
||||
|
||||
# - name: Install prerequisites
|
||||
# run: |
|
||||
# sudo apt-get update
|
||||
# sudo apt-get install -y xz-utils
|
||||
|
||||
# - uses: actions/checkout@v4
|
||||
|
||||
# - uses: https://github.com/subosito/flutter-action@v2
|
||||
# with:
|
||||
# channel: stable
|
||||
# flutter-version: 3.19.6
|
||||
# cache: true
|
||||
|
||||
# - run: flutter pub get
|
||||
# working-directory: ./frontend
|
||||
|
||||
# - run: flutter build web
|
||||
# working-directory: ./frontend
|
||||
59
.gitea/workflows/frontend_deploy-beta.yaml
Normal file
@@ -0,0 +1,59 @@
|
||||
on:
|
||||
pull_request:
|
||||
branches:
|
||||
- main
|
||||
paths:
|
||||
- frontend/**
|
||||
|
||||
name: Build and release apps to beta track
|
||||
|
||||
jobs:
|
||||
get-version:
|
||||
name: Get version
|
||||
runs-on: macos
|
||||
steps:
|
||||
- uses: https://gitea.com/actions/checkout@v4
|
||||
|
||||
- name: Fetch tags from main branch
|
||||
# since this workflow is triggered by a pull request, we want to match the latest tag of the main branch
|
||||
id: version
|
||||
run: |
|
||||
git fetch origin main --tags
|
||||
LATEST_TAG=$(git describe --tags $(git rev-list --tags --max-count=1))
|
||||
# remove the 'v' prefix from the tag name
|
||||
echo "BUILD_NAME=${LATEST_TAG//v}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Output the version that is being used
|
||||
run: |
|
||||
echo "Building for version ${{ steps.version.outputs.BUILD_NAME }}"
|
||||
|
||||
outputs:
|
||||
build_name: ${{ steps.version.outputs.BUILD_NAME }}
|
||||
|
||||
build-android:
|
||||
name: Build and upload android app
|
||||
uses: ./.gitea/workflows/workflow_build-app-android.yaml
|
||||
with:
|
||||
build_type: beta
|
||||
build_name: ${{ needs.get-version.outputs.build_name }}
|
||||
secrets:
|
||||
ANDROID_SECRET_PROPERTIES_BASE64: ${{ secrets.ANDROID_SECRET_PROPERTIES_BASE64 }}
|
||||
ANDROID_GOOGLE_PLAY_JSON_BASE64: ${{ secrets.ANDROID_GOOGLE_PLAY_JSON_BASE64 }}
|
||||
ANDROID_KEYSTORE_BASE64: ${{ secrets.ANDROID_KEYSTORE_BASE64 }}
|
||||
ANDROID_GOOGLE_MAPS_API_KEY: ${{ secrets.ANDROID_GOOGLE_MAPS_API_KEY }}
|
||||
needs: get-version
|
||||
|
||||
build-ios:
|
||||
name: Build and upload ios app
|
||||
uses: ./.gitea/workflows/workflow_build-app-ios.yaml
|
||||
with:
|
||||
build_type: beta
|
||||
build_name: ${{ needs.get-version.outputs.build_name }}
|
||||
secrets:
|
||||
IOS_ASC_KEY_ID: ${{ secrets.IOS_ASC_KEY_ID }}
|
||||
IOS_ASC_ISSUER_ID: ${{ secrets.IOS_ASC_ISSUER_ID }}
|
||||
IOS_ASC_KEY: ${{ secrets.IOS_ASC_KEY }}
|
||||
IOS_MATCH_REPO_SSH_KEY_BASE64: ${{ secrets.IOS_MATCH_REPO_SSH_KEY_BASE64 }}
|
||||
IOS_MATCH_PASSWORD: ${{ secrets.IOS_MATCH_PASSWORD }}
|
||||
IOS_GOOGLE_MAPS_API_KEY: ${{ secrets.IOS_GOOGLE_MAPS_API_KEY }}
|
||||
needs: build-android # technically not needed, but this prevents the builds from running in parallel
|
||||
56
.gitea/workflows/frontend_deploy-release.yaml
Normal file
@@ -0,0 +1,56 @@
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- v*
|
||||
|
||||
name: Build and release apps to production track
|
||||
|
||||
jobs:
|
||||
get-version:
|
||||
name: Get version
|
||||
runs-on: macos
|
||||
steps:
|
||||
- uses: https://gitea.com/actions/checkout@v4
|
||||
|
||||
- name: Get version from git tag
|
||||
id: version
|
||||
env:
|
||||
REF_NAME: ${{ gitea.ref_name }}
|
||||
# remove the 'v' prefix from the tag name
|
||||
run: |
|
||||
echo "BUILD_NAME=${REF_NAME//v}" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Output the version that is being used
|
||||
run: |
|
||||
echo "Building for version ${{ steps.version.outputs.BUILD_NAME }}"
|
||||
|
||||
outputs:
|
||||
build_name: ${{ steps.version.outputs.BUILD_NAME }}
|
||||
|
||||
build-android:
|
||||
name: Build and upload android app
|
||||
uses: ./.gitea/workflows/workflow_build-app-android.yaml
|
||||
with:
|
||||
build_type: release
|
||||
build_name: ${{ needs.get-version.outputs.build_name }}
|
||||
secrets:
|
||||
ANDROID_SECRET_PROPERTIES_BASE64: ${{ secrets.ANDROID_SECRET_PROPERTIES_BASE64 }}
|
||||
ANDROID_GOOGLE_PLAY_JSON_BASE64: ${{ secrets.ANDROID_GOOGLE_PLAY_JSON_BASE64 }}
|
||||
ANDROID_KEYSTORE_BASE64: ${{ secrets.ANDROID_KEYSTORE_BASE64 }}
|
||||
ANDROID_GOOGLE_MAPS_API_KEY: ${{ secrets.ANDROID_GOOGLE_MAPS_API_KEY }}
|
||||
needs: get-version
|
||||
|
||||
build-ios:
|
||||
name: Build and upload ios app
|
||||
uses: ./.gitea/workflows/workflow_build-app-ios.yaml
|
||||
with:
|
||||
build_type: release
|
||||
build_name: ${{ needs.get-version.outputs.build_name }}
|
||||
secrets:
|
||||
IOS_ASC_KEY_ID: ${{ secrets.IOS_ASC_KEY_ID }}
|
||||
IOS_ASC_ISSUER_ID: ${{ secrets.IOS_ASC_ISSUER_ID }}
|
||||
IOS_ASC_KEY: ${{ secrets.IOS_ASC_KEY }}
|
||||
IOS_MATCH_REPO_SSH_KEY_BASE64: ${{ secrets.IOS_MATCH_REPO_SSH_KEY_BASE64 }}
|
||||
IOS_MATCH_PASSWORD: ${{ secrets.IOS_MATCH_PASSWORD }}
|
||||
IOS_GOOGLE_MAPS_API_KEY: ${{ secrets.IOS_GOOGLE_MAPS_API_KEY }}
|
||||
needs: build-android # technically not needed, but this prevents the builds from running in parallel
|
||||
@@ -1,39 +0,0 @@
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- v*
|
||||
|
||||
jobs:
|
||||
push-to-remote:
|
||||
# We want to use the macos runner provided by github actions. This requires to push to a remote first.
|
||||
# After the push we can use the action under frontend/.github/actions/ to deploy properly using fastlane on macos.
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
path: 'src'
|
||||
|
||||
- name: Checkout remote repository
|
||||
uses: actions/checkout@v3
|
||||
with:
|
||||
path: 'dest'
|
||||
ref: 'main'
|
||||
github-server-url: 'https://github.com'
|
||||
repository: 'moll-re/anyway-frontend-builder'
|
||||
token: ${{ secrets.PUSH_GITHUB_API_TOKEN }}
|
||||
fetch-depth: 0
|
||||
persist-credentials: true
|
||||
|
||||
- name: Copy files to remote repository
|
||||
run: cp -r src/frontend/. dest/
|
||||
|
||||
- name: Commit and push changes
|
||||
run: |
|
||||
cd dest
|
||||
git config --global user.email "me@moll.re"
|
||||
git config --global user.name "[bot]"
|
||||
git add .
|
||||
git commit -m "Automatic code update for tag"
|
||||
git tag -a ${{ github.ref_name }} -m "mirrored tag"
|
||||
git push origin main --tags
|
||||
78
.gitea/workflows/workflow_build-app-android.yaml
Normal file
@@ -0,0 +1,78 @@
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
build_type:
|
||||
description: 'Release type (release, beta)'
|
||||
required: true
|
||||
type: string
|
||||
build_name:
|
||||
description: 'Build name'
|
||||
required: true
|
||||
type: string
|
||||
secrets:
|
||||
ANDROID_SECRET_PROPERTIES_BASE64:
|
||||
required: true
|
||||
ANDROID_GOOGLE_PLAY_JSON_BASE64:
|
||||
required: true
|
||||
ANDROID_KEYSTORE_BASE64:
|
||||
required: true
|
||||
ANDROID_GOOGLE_MAPS_API_KEY:
|
||||
required: true
|
||||
|
||||
name: Build and release android appbundle to specfied track
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: frontend/android
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: macos-14
|
||||
env:
|
||||
# $BUNDLE_GEMFILE must be set at the job level, so it is set for all steps
|
||||
BUNDLE_GEMFILE: ${{ gitea.workspace }}/frontend/android/Gemfile
|
||||
|
||||
steps:
|
||||
- uses: https://gitea.com/actions/checkout@v4
|
||||
|
||||
- uses: https://github.com/actions/setup-java@v4
|
||||
with:
|
||||
java-version: '17'
|
||||
distribution: 'zulu'
|
||||
|
||||
- name: Setup Android SDK
|
||||
uses: https://github.com/android-actions/setup-android@v3
|
||||
|
||||
- name: Fix flutter SDK folder permission
|
||||
run: git config --global --add safe.directory "*"
|
||||
|
||||
- uses: https://github.com/subosito/flutter-action@v2
|
||||
with:
|
||||
channel: stable
|
||||
flutter-version-file: ${{ gitea.workspace }}/frontend/pubspec.yaml
|
||||
architecture: x64
|
||||
cache: true
|
||||
|
||||
- name: Install dependencies and clean up
|
||||
run: |
|
||||
flutter pub get
|
||||
flutter clean
|
||||
|
||||
- name: Set up ruby env and install fastlane
|
||||
uses: https://github.com/ruby/setup-ruby@v1
|
||||
with:
|
||||
ruby-version: 3.3
|
||||
bundler-cache: true # runs 'bundle install' and caches installed gems automatically
|
||||
|
||||
- name: Add required secret files
|
||||
run: |
|
||||
echo "${{ secrets.ANDROID_SECRET_PROPERTIES_BASE64 }}" | base64 -d > secrets.properties
|
||||
echo "${{ secrets.ANDROID_GOOGLE_PLAY_JSON_BASE64 }}" | base64 -d > google-key.json
|
||||
echo "${{ secrets.ANDROID_KEYSTORE_BASE64 }}" | base64 -d > release.keystore
|
||||
|
||||
- name: Run fastlane lane
|
||||
run: bundle exec fastlane deploy_${{ inputs.build_type }}
|
||||
env:
|
||||
BUILD_NUMBER: ${{ gitea.run_number }}
|
||||
BUILD_NAME: ${{ inputs.build_name }}
|
||||
ANDROID_GOOGLE_MAPS_API_KEY: ${{ secrets.ANDROID_GOOGLE_MAPS_API_KEY }}
|
||||
90
.gitea/workflows/workflow_build-app-ios.yaml
Normal file
@@ -0,0 +1,90 @@
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
build_type:
|
||||
description: 'Release type (release, beta)'
|
||||
required: true
|
||||
type: string
|
||||
build_name:
|
||||
description: 'Build name'
|
||||
required: true
|
||||
type: string
|
||||
secrets:
|
||||
IOS_ASC_KEY_ID:
|
||||
required: true
|
||||
IOS_ASC_ISSUER_ID:
|
||||
required: true
|
||||
IOS_ASC_KEY:
|
||||
required: true
|
||||
IOS_MATCH_REPO_SSH_KEY_BASE64:
|
||||
required: true
|
||||
IOS_MATCH_PASSWORD:
|
||||
required: true
|
||||
IOS_GOOGLE_MAPS_API_KEY:
|
||||
required: true
|
||||
|
||||
name: Build and release ipa to specified track
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: frontend/ios
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: macos-14
|
||||
env:
|
||||
# $BUNDLE_GEMFILE must be set at the job level, so it is set for all steps
|
||||
BUNDLE_GEMFILE: ${{ gitea.workspace }}/frontend/ios/Gemfile
|
||||
|
||||
steps:
|
||||
- uses: https://gitea.com/actions/checkout@v4
|
||||
|
||||
- name: Install Flutter
|
||||
uses: https://github.com/subosito/flutter-action@v2
|
||||
with:
|
||||
channel: stable
|
||||
flutter-version-file: ${{ gitea.workspace }}/frontend/pubspec.yaml
|
||||
architecture: x64
|
||||
cache: true
|
||||
|
||||
- name: Set up ruby env
|
||||
uses: https://github.com/ruby/setup-ruby@v1
|
||||
with:
|
||||
ruby-version: 3.3
|
||||
bundler-cache: true # runs 'bundle install' and caches installed gems automatically
|
||||
|
||||
- uses: GuillaumeFalourd/setup-rsync@v1.2
|
||||
# rsync is required by the google maps ios tools
|
||||
|
||||
- name: Install dependencies and clean up
|
||||
run: |
|
||||
flutter pub get
|
||||
flutter precache --ios
|
||||
bundle exec pod install --allow-root
|
||||
flutter clean
|
||||
bundle exec pod cache clean --all --allow-root
|
||||
|
||||
- name: Setup SSH key for match git repo
|
||||
# and mark the host as known
|
||||
run: |
|
||||
echo $MATCH_REPO_SSH_KEY | base64 --decode > ~/.ssh/id_rsa
|
||||
chmod 600 ~/.ssh/id_rsa
|
||||
ssh-keyscan -p 2222 git.kluster.moll.re > ~/.ssh/known_hosts
|
||||
env:
|
||||
MATCH_REPO_SSH_KEY: ${{ secrets.IOS_MATCH_REPO_SSH_KEY_BASE64 }}
|
||||
|
||||
- name: Replace API Key from secret
|
||||
# on a macOS runner, sed requires a replacement suffix after the -i flag
|
||||
run: |
|
||||
sed -i '' -e "s/IOS_GOOGLE_MAPS_API_KEY/${{ secrets.IOS_GOOGLE_MAPS_API_KEY }}/g" Runner/AppDelegate.swift
|
||||
|
||||
- name: Run fastlane lane
|
||||
run: bundle exec fastlane deploy_${{ inputs.build_type }}
|
||||
env:
|
||||
BUILD_NUMBER: ${{ gitea.run_number }}
|
||||
BUILD_NAME: ${{ inputs.build_name }}
|
||||
IOS_ASC_KEY_ID: ${{ secrets.IOS_ASC_KEY_ID }}
|
||||
IOS_ASC_ISSUER_ID: ${{ secrets.IOS_ASC_ISSUER_ID }}
|
||||
IOS_ASC_KEY: ${{ secrets.IOS_ASC_KEY }}
|
||||
MATCH_PASSWORD: ${{ secrets.IOS_MATCH_PASSWORD }}
|
||||
IOS_GOOGLE_MAPS_API_KEY: ${{ secrets.IOS_GOOGLE_MAPS_API_KEY }}
|
||||
@@ -1,35 +0,0 @@
|
||||
on:
|
||||
workflow_call:
|
||||
inputs:
|
||||
overlay:
|
||||
required: true
|
||||
type: string
|
||||
secrets:
|
||||
KUBE_CONFIG:
|
||||
required: true
|
||||
|
||||
|
||||
name: Deploy the newly built container
|
||||
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
name: Deploy
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
- uses: https://gitea.com/actions/checkout@v4
|
||||
with:
|
||||
submodules: true
|
||||
|
||||
- name: setup kubectl
|
||||
uses: https://github.com/azure/setup-kubectl@v4
|
||||
|
||||
- name: Set kubeconfig
|
||||
run: |
|
||||
echo "${{ secrets.KUBE_CONFIG }}" > kubeconfig
|
||||
|
||||
- name: Deploy to k8s
|
||||
run: |
|
||||
kubectl apply -k backend/deployment/overlays/${{ inputs.overlay }} --kubeconfig=kubeconfig
|
||||
kubectl -n anyway-backend rollout restart deployment/anyway-backend-${{ inputs.overlay }} --kubeconfig=kubeconfig
|
||||
1
.gitignore
vendored
@@ -1 +1,2 @@
|
||||
cache/
|
||||
.direnv/
|
||||
|
||||
3
.gitmodules
vendored
@@ -1,3 +0,0 @@
|
||||
[submodule "backend/deployment"]
|
||||
path = backend/deployment
|
||||
url = https://git.kluster.moll.re/anydev/anyway-backend-deployment
|
||||
8
.vscode/launch.json
vendored
@@ -9,9 +9,7 @@
|
||||
"name": "Backend - debug",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"env": {
|
||||
"DEBUG": "true"
|
||||
},
|
||||
"envFile": "${workspaceFolder}/backend/debug.env",
|
||||
"jinja": true,
|
||||
"cwd": "${workspaceFolder}/backend",
|
||||
"module": "fastapi",
|
||||
@@ -25,9 +23,7 @@
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "src/tester.py",
|
||||
"env": {
|
||||
"DEBUG": "true"
|
||||
},
|
||||
"envFile": "${workspaceFolder}/backend/debug.env",
|
||||
"cwd": "${workspaceFolder}/backend"
|
||||
},
|
||||
// frontend - flutter app
|
||||
|
||||
3
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
{
|
||||
"nixEnvSelector.nixFile": "${workspaceFolder}/default.nix"
|
||||
}
|
||||
30
LICENSE.md
Normal file
@@ -0,0 +1,30 @@
|
||||
# License
|
||||
|
||||
## Proprietary License
|
||||
|
||||
All code and resources in this repository are the property of AnyDev. The software and related documentation are provided solely for use with services provided by AnyDev. Redistribution, modification, or use of this software outside of its intended service is strictly prohibited without explicit permission.
|
||||
|
||||
### Copyright © 2024 AnyDev
|
||||
|
||||
All rights reserved.
|
||||
|
||||
### Restrictions
|
||||
|
||||
- You may not modify, distribute, copy, or reverse engineer any part of this codebase.
|
||||
- This software is licensed for use solely in conjunction with services provided by AnyDev.
|
||||
- Any commercial use of this software is strictly prohibited without explicit written consent from AnyDev.
|
||||
|
||||
## Third-Party Dependencies
|
||||
|
||||
This project uses third-party dependencies, which are subject to their respective licenses.
|
||||
|
||||
- Python backend dependencies: fastapi, pydantic, numpy, shapely, etc. – Licensed under their respective licenses.
|
||||
- Flutter frontend dependencies: Cupertino Icons, sliding_up_panel, http, etc. – Licensed under their respective licenses.
|
||||
|
||||
Please refer to each project's documentation for the specific terms and conditions.
|
||||
|
||||
## OpenStreetMap Data Usage
|
||||
|
||||
This project uses data derived from **OpenStreetMap**. OpenStreetMap data is available under the [Open Database License (ODbL)](https://www.openstreetmap.org/copyright). We comply with the ODbL license, and some of the data displayed in the service may be derived from OpenStreetMap sources. We do not redistribute raw OpenStreetMap data; instead, it is processed and transformed before being used in our services.
|
||||
|
||||
More information about OpenStreetMap data usage can be found [here](https://www.openstreetmap.org/copyright).
|
||||
13
backend/.gitignore
vendored
@@ -1,6 +1,12 @@
|
||||
# all .env files
|
||||
*.env
|
||||
|
||||
# osm-cache
|
||||
cache_XML/
|
||||
|
||||
# secrets
|
||||
*secrets.yaml
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
@@ -9,6 +15,9 @@ __pycache__/
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Pytest reports
|
||||
report.html
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
@@ -125,7 +134,7 @@ celerybeat.pid
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
*.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
@@ -162,4 +171,4 @@ cython_debug/
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
#.idea/
|
||||
|
||||
1
backend/.python-version
Normal file
@@ -0,0 +1 @@
|
||||
3.12.9
|
||||
@@ -1,11 +1,29 @@
|
||||
FROM python:3.11-slim
|
||||
FROM python:3.12-slim-bookworm
|
||||
|
||||
# The installer requires curl (and certificates) to download the release archive
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends curl ca-certificates
|
||||
|
||||
# Download the latest installer
|
||||
ADD https://astral.sh/uv/install.sh /uv-installer.sh
|
||||
|
||||
# Run the installer then remove it
|
||||
RUN sh /uv-installer.sh && rm /uv-installer.sh
|
||||
|
||||
# Ensure the installed binary is on the `PATH`
|
||||
ENV PATH="/root/.local/bin/:$PATH"
|
||||
|
||||
# Set the working directory
|
||||
WORKDIR /app
|
||||
COPY Pipfile Pipfile.lock .
|
||||
|
||||
RUN pip install pipenv
|
||||
RUN pipenv install --deploy --system
|
||||
# Copy uv files
|
||||
COPY pyproject.toml pyproject.toml
|
||||
COPY uv.lock uv.lock
|
||||
COPY .python-version .python-version
|
||||
|
||||
# Sync the venv
|
||||
RUN uv sync --frozen --no-cache --no-dev
|
||||
|
||||
# Copy application files
|
||||
COPY src src
|
||||
|
||||
EXPOSE 8000
|
||||
@@ -17,4 +35,4 @@ ENV MEMCACHED_HOST_PATH=none
|
||||
ENV LOKI_URL=none
|
||||
|
||||
# explicitly use a string instead of an argument list to force a shell and variable expansion
|
||||
CMD fastapi run src/main.py --port 8000 --workers $NUM_WORKERS
|
||||
CMD uv run fastapi run src/main.py --port 8000 --workers $NUM_WORKERS
|
||||
|
||||
@@ -1,27 +0,0 @@
|
||||
[[source]]
|
||||
url = "https://pypi.org/simple"
|
||||
verify_ssl = true
|
||||
name = "pypi"
|
||||
|
||||
[dev-packages]
|
||||
pylint = "*"
|
||||
pytest = "*"
|
||||
tomli = "*"
|
||||
httpx = "*"
|
||||
exceptiongroup = "*"
|
||||
pytest-html = "*"
|
||||
typing-extensions = "*"
|
||||
dill = "*"
|
||||
|
||||
[packages]
|
||||
numpy = "*"
|
||||
fastapi = "*"
|
||||
pydantic = "*"
|
||||
shapely = "*"
|
||||
pymemcache = "*"
|
||||
fastapi-cli = "*"
|
||||
scikit-learn = "*"
|
||||
loki-logger-handler = "*"
|
||||
pulp = "*"
|
||||
scipy = "*"
|
||||
requests = "*"
|
||||
1246
backend/Pipfile.lock
generated
6
backend/main.py
Normal file
@@ -0,0 +1,6 @@
|
||||
def main():
|
||||
print("Hello from backend!")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
55
backend/pyproject.toml
Normal file
@@ -0,0 +1,55 @@
|
||||
[project]
|
||||
name = "backend"
|
||||
version = "0.1.0"
|
||||
description = "Add your description here"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.12"
|
||||
dependencies = [
|
||||
"annotated-types==0.7.0 ; python_full_version >= '3.8'",
|
||||
"anyio==4.8.0 ; python_full_version >= '3.9'",
|
||||
"certifi==2024.12.14 ; python_full_version >= '3.6'",
|
||||
"charset-normalizer==3.4.1 ; python_full_version >= '3.7'",
|
||||
"click==8.1.8 ; python_full_version >= '3.7'",
|
||||
"fastapi==0.115.7 ; python_full_version >= '3.8'",
|
||||
"fastapi-cli==0.0.7 ; python_full_version >= '3.8'",
|
||||
"h11==0.14.0 ; python_full_version >= '3.7'",
|
||||
"httptools==0.6.4",
|
||||
"idna==3.10 ; python_full_version >= '3.6'",
|
||||
"joblib==1.4.2 ; python_full_version >= '3.8'",
|
||||
"loki-logger-handler==1.1.0 ; python_full_version >= '2.7'",
|
||||
"markdown-it-py==3.0.0 ; python_full_version >= '3.8'",
|
||||
"mdurl==0.1.2 ; python_full_version >= '3.7'",
|
||||
"numpy==2.2.2 ; python_full_version >= '3.10'",
|
||||
"pulp==2.9.0 ; python_full_version >= '3.7'",
|
||||
"pydantic==2.10.6 ; python_full_version >= '3.8'",
|
||||
"pydantic-core==2.27.2 ; python_full_version >= '3.8'",
|
||||
"pygments==2.19.1 ; python_full_version >= '3.8'",
|
||||
"pymemcache==4.0.0 ; python_full_version >= '3.7'",
|
||||
"python-dotenv==1.0.1",
|
||||
"pyyaml==6.0.2",
|
||||
"requests==2.32.3 ; python_full_version >= '3.8'",
|
||||
"rich==13.9.4 ; python_full_version >= '3.8'",
|
||||
"rich-toolkit==0.13.2 ; python_full_version >= '3.8'",
|
||||
"scikit-learn==1.6.1 ; python_full_version >= '3.9'",
|
||||
"scipy==1.15.1 ; python_full_version >= '3.10'",
|
||||
"shapely==2.0.6 ; python_full_version >= '3.7'",
|
||||
"shellingham==1.5.4 ; python_full_version >= '3.7'",
|
||||
"sniffio==1.3.1 ; python_full_version >= '3.7'",
|
||||
"starlette==0.45.3 ; python_full_version >= '3.9'",
|
||||
"threadpoolctl==3.5.0 ; python_full_version >= '3.8'",
|
||||
"typer==0.15.1 ; python_full_version >= '3.7'",
|
||||
"typing-extensions==4.12.2 ; python_full_version >= '3.8'",
|
||||
"urllib3==2.3.0 ; python_full_version >= '3.9'",
|
||||
"uvicorn[standard]==0.34.0 ; python_full_version >= '3.9'",
|
||||
"uvloop==0.21.0",
|
||||
"watchfiles==1.0.4",
|
||||
"websockets==14.2",
|
||||
]
|
||||
|
||||
[dependency-groups]
|
||||
dev = [
|
||||
"httpx>=0.28.1",
|
||||
"ipykernel>=6.30.0",
|
||||
"pytest>=8.4.1",
|
||||
"pytest-html>=4.1.1",
|
||||
]
|
||||
@@ -1,6 +1,6 @@
|
||||
"""Find clusters of interest to add more general areas of visit to the tour."""
|
||||
import logging
|
||||
from typing import Literal
|
||||
from typing import Literal, Tuple
|
||||
|
||||
import numpy as np
|
||||
from sklearn.cluster import DBSCAN
|
||||
@@ -8,8 +8,8 @@ from pydantic import BaseModel
|
||||
|
||||
from ..overpass.overpass import Overpass, get_base_info
|
||||
from ..structs.landmark import Landmark
|
||||
from .get_time_distance import get_distance
|
||||
from .utils import create_bbox
|
||||
from ..utils.get_time_distance import get_distance
|
||||
from ..utils.bbox import create_bbox
|
||||
|
||||
|
||||
|
||||
@@ -33,7 +33,7 @@ class Cluster(BaseModel):
|
||||
"""
|
||||
type: Literal['street', 'area']
|
||||
importance: int
|
||||
centroid: tuple
|
||||
centroid: Tuple[float, float]
|
||||
# start: Optional[list] = None # for later use if we want to have streets as well
|
||||
# end: Optional[list] = None
|
||||
|
||||
@@ -102,52 +102,54 @@ class ClusterManager:
|
||||
selector = sel,
|
||||
out = out
|
||||
)
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error fetching clusters: {e}")
|
||||
|
||||
if result is None :
|
||||
self.logger.debug(f"Found no {cluster_type} clusters, overpass query returned no datapoints.")
|
||||
self.valid = False
|
||||
|
||||
else :
|
||||
points = []
|
||||
for elem in result:
|
||||
osm_type = elem.get('type')
|
||||
|
||||
# Get coordinates and append them to the points list
|
||||
_, coords = get_base_info(elem, osm_type)
|
||||
if coords is not None :
|
||||
points.append(coords)
|
||||
|
||||
if points :
|
||||
self.all_points = np.array(points)
|
||||
|
||||
# Apply DBSCAN to find clusters. Choose different settings for different cities.
|
||||
if self.cluster_type == 'shopping' and len(self.all_points) > 200 :
|
||||
dbscan = DBSCAN(eps=0.00118, min_samples=15, algorithm='kd_tree') # for large cities
|
||||
elif self.cluster_type == 'sightseeing' :
|
||||
dbscan = DBSCAN(eps=0.0025, min_samples=15, algorithm='kd_tree') # for historic neighborhoods
|
||||
else :
|
||||
dbscan = DBSCAN(eps=0.00075, min_samples=10, algorithm='kd_tree') # for small cities
|
||||
|
||||
labels = dbscan.fit_predict(self.all_points)
|
||||
|
||||
# Check that there are is least 1 cluster
|
||||
if len(set(labels)) > 1 :
|
||||
self.logger.info(f"Found {len(set(labels))} different {cluster_type} clusters.")
|
||||
# Separate clustered points and noise points
|
||||
self.cluster_points = self.all_points[labels != -1]
|
||||
self.cluster_labels = labels[labels != -1]
|
||||
self.filter_clusters() # ValueError here sometimes. I dont know why. # Filter the clusters to keep only the largest ones.
|
||||
self.valid = True
|
||||
|
||||
else :
|
||||
self.logger.info(f"Found 0 {cluster_type} clusters.")
|
||||
self.valid = False
|
||||
if result is None :
|
||||
self.logger.debug(f"Found no {cluster_type} clusters, overpass query returned no datapoints.")
|
||||
self.valid = False
|
||||
|
||||
else :
|
||||
self.logger.debug(f"Detected 0 {cluster_type} clusters.")
|
||||
self.valid = False
|
||||
points = []
|
||||
for elem in result:
|
||||
osm_type = elem.get('type')
|
||||
|
||||
# Get coordinates and append them to the points list
|
||||
_, coords = get_base_info(elem, osm_type)
|
||||
if coords is not None :
|
||||
points.append(coords)
|
||||
|
||||
if points :
|
||||
self.all_points = np.array(points)
|
||||
|
||||
# Apply DBSCAN to find clusters. Choose different settings for different cities.
|
||||
if self.cluster_type == 'shopping' and len(self.all_points) > 200 :
|
||||
dbscan = DBSCAN(eps=0.00118, min_samples=15, algorithm='kd_tree') # for large cities
|
||||
elif self.cluster_type == 'sightseeing' :
|
||||
dbscan = DBSCAN(eps=0.0025, min_samples=15, algorithm='kd_tree') # for historic neighborhoods
|
||||
else :
|
||||
dbscan = DBSCAN(eps=0.00075, min_samples=10, algorithm='kd_tree') # for small cities
|
||||
|
||||
labels = dbscan.fit_predict(self.all_points)
|
||||
|
||||
# Check that there are is least 1 cluster
|
||||
if len(set(labels)) > 1 :
|
||||
self.logger.info(f"Found {len(set(labels))} different {cluster_type} clusters.")
|
||||
# Separate clustered points and noise points
|
||||
self.cluster_points = self.all_points[labels != -1]
|
||||
self.cluster_labels = labels[labels != -1]
|
||||
self.filter_clusters() # ValueError here sometimes. I dont know why. # Filter the clusters to keep only the largest ones.
|
||||
self.valid = True
|
||||
|
||||
else :
|
||||
self.logger.info(f"Found 0 {cluster_type} clusters.")
|
||||
self.valid = False
|
||||
|
||||
else :
|
||||
self.logger.debug(f"Found 0 {cluster_type} clusters.")
|
||||
self.valid = False
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Could not fetch clusters: {e}")
|
||||
self.valid = False
|
||||
|
||||
|
||||
|
||||
def generate_clusters(self) -> list[Landmark]:
|
||||
@@ -178,11 +180,12 @@ class ClusterManager:
|
||||
|
||||
# Calculate the centroid as the mean of the points
|
||||
centroid = np.mean(current_cluster, axis=0)
|
||||
centroid = tuple((round(centroid[0], 7), round(centroid[1], 7)))
|
||||
|
||||
if self.cluster_type == 'shopping' :
|
||||
score = len(current_cluster)*2
|
||||
score = len(current_cluster)*3
|
||||
else :
|
||||
score = len(current_cluster)*8
|
||||
score = len(current_cluster)*15
|
||||
locations.append(Cluster(
|
||||
type='area',
|
||||
centroid=centroid,
|
||||
@@ -215,18 +218,18 @@ class ClusterManager:
|
||||
"""
|
||||
|
||||
# Define the bounding box for a given radius around the coordinates
|
||||
bbox = create_bbox(cluster.centroid, 1000)
|
||||
|
||||
bbox = create_bbox(cluster.centroid, 300)
|
||||
|
||||
# Query neighborhoods and shopping malls
|
||||
selectors = ['"place"~"^(suburb|neighborhood|neighbourhood|quarter|city_block)$"']
|
||||
|
||||
if self.cluster_type == 'shopping' :
|
||||
selectors.append('"shop"="mall"')
|
||||
new_name = 'Shopping Area'
|
||||
t = 40
|
||||
t = 30
|
||||
else :
|
||||
new_name = 'Neighborhood'
|
||||
t = 15
|
||||
t = 20
|
||||
|
||||
min_dist = float('inf')
|
||||
osm_id = 0
|
||||
@@ -238,30 +241,28 @@ class ClusterManager:
|
||||
result = self.overpass.send_query(bbox = bbox,
|
||||
osm_types = osm_types,
|
||||
selector = sel,
|
||||
out = 'ids center'
|
||||
out = 'ids center tags'
|
||||
)
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error fetching clusters: {e}")
|
||||
self.logger.warning(f"Error fetching clusters: {e}")
|
||||
continue
|
||||
|
||||
if result is None :
|
||||
self.logger.error(f"Error fetching clusters: {e}")
|
||||
self.logger.warning(f"Error fetching clusters: query result is None")
|
||||
continue
|
||||
|
||||
for elem in result:
|
||||
osm_type = elem.get('type')
|
||||
|
||||
id, coords, name = get_base_info(elem, osm_type, with_name=True)
|
||||
|
||||
# Get basic info
|
||||
id, coords, name = get_base_info(elem, elem.get('type'), with_name=True)
|
||||
if name is None or coords is None :
|
||||
continue
|
||||
|
||||
d = get_distance(cluster.centroid, coords)
|
||||
if d < min_dist :
|
||||
min_dist = d
|
||||
new_name = name
|
||||
osm_type = osm_type # Add type: 'way' or 'relation'
|
||||
osm_id = id # Add OSM id
|
||||
new_name = name # add name
|
||||
osm_type = elem.get('type') # add type: 'way' or 'relation'
|
||||
osm_id = id # add OSM id
|
||||
|
||||
return Landmark(
|
||||
name=new_name,
|
||||
@@ -4,10 +4,9 @@ import yaml
|
||||
|
||||
from ..structs.preferences import Preferences
|
||||
from ..structs.landmark import Landmark
|
||||
from .take_most_important import take_most_important
|
||||
from .cluster_manager import ClusterManager
|
||||
from ..overpass.overpass import Overpass, get_base_info
|
||||
from .utils import create_bbox
|
||||
from ..utils.bbox import create_bbox
|
||||
|
||||
from ..constants import AMENITY_SELECTORS_PATH, LANDMARK_PARAMETERS_PATH, OPTIMIZER_PARAMETERS_PATH
|
||||
|
||||
@@ -23,7 +22,7 @@ class LandmarkManager:
|
||||
church_coeff: float # coeff to adjsut score of churches
|
||||
nature_coeff: float # coeff to adjust score of parks
|
||||
overall_coeff: float # coeff to adjust weight of tags
|
||||
n_important: int # number of important landmarks to consider
|
||||
# n_important: int # number of important landmarks to consider
|
||||
|
||||
|
||||
def __init__(self) -> None:
|
||||
@@ -39,11 +38,10 @@ class LandmarkManager:
|
||||
self.overall_coeff = parameters['overall_coeff']
|
||||
self.tag_exponent = parameters['tag_exponent']
|
||||
self.image_bonus = parameters['image_bonus']
|
||||
self.name_bonus = parameters['name_bonus']
|
||||
self.wikipedia_bonus = parameters['wikipedia_bonus']
|
||||
self.viewpoint_bonus = parameters['viewpoint_bonus']
|
||||
self.pay_bonus = parameters['pay_bonus']
|
||||
self.n_important = parameters['N_important']
|
||||
# self.n_important = parameters['N_important']
|
||||
|
||||
with OPTIMIZER_PARAMETERS_PATH.open('r') as f:
|
||||
parameters = yaml.safe_load(f)
|
||||
@@ -56,7 +54,12 @@ class LandmarkManager:
|
||||
self.logger.info('LandmakManager successfully initialized.')
|
||||
|
||||
|
||||
def generate_landmarks_list(self, center_coordinates: tuple[float, float], preferences: Preferences) -> tuple[list[Landmark], list[Landmark]]:
|
||||
def generate_landmarks_list(
|
||||
self,
|
||||
center_coordinates: tuple[float, float],
|
||||
preferences: Preferences,
|
||||
allow_clusters: bool = True
|
||||
) -> list[Landmark] :
|
||||
"""
|
||||
Generate and prioritize a list of landmarks based on user preferences.
|
||||
|
||||
@@ -64,16 +67,17 @@ class LandmarkManager:
|
||||
and current location. It scores and corrects these landmarks, removes duplicates, and then selects the most important
|
||||
landmarks based on a predefined criterion.
|
||||
|
||||
Args:
|
||||
center_coordinates (tuple[float, float]): The latitude and longitude of the center location around which to search.
|
||||
preferences (Preferences): The user's preference settings that influence the landmark selection.
|
||||
Parameters :
|
||||
center_coordinates (tuple[float, float]): The latitude and longitude of the center location around which to search.
|
||||
preferences (Preferences): The user's preference settings that influence the landmark selection.
|
||||
allow_clusters (bool, optional) : If set to False, no clusters will be fetched. Mainly used for the option to fetch landmarks nearby.
|
||||
|
||||
Returns:
|
||||
tuple[list[Landmark], list[Landmark]]:
|
||||
- A list of all existing landmarks.
|
||||
- A list of the most important landmarks based on the user's preferences.
|
||||
"""
|
||||
self.logger.debug('Starting to fetch landmarks...')
|
||||
self.logger.info(f'Starting to fetch landmarks around {center_coordinates}...')
|
||||
max_walk_dist = int((preferences.max_time_minute/2)/60*self.walking_speed*1000/self.detour_factor)
|
||||
radius = min(max_walk_dist, int(self.max_bbox_side/2))
|
||||
|
||||
@@ -90,10 +94,11 @@ class LandmarkManager:
|
||||
all_landmarks.update(current_landmarks)
|
||||
self.logger.info(f'Found {len(current_landmarks)} sightseeing landmarks')
|
||||
|
||||
if allow_clusters :
|
||||
# special pipeline for historic neighborhoods
|
||||
neighborhood_manager = ClusterManager(bbox, 'sightseeing')
|
||||
historic_clusters = neighborhood_manager.generate_clusters()
|
||||
all_landmarks.update(historic_clusters)
|
||||
neighborhood_manager = ClusterManager(bbox, 'sightseeing')
|
||||
historic_clusters = neighborhood_manager.generate_clusters()
|
||||
all_landmarks.update(historic_clusters)
|
||||
|
||||
# list for nature
|
||||
if preferences.nature.score != 0:
|
||||
@@ -114,16 +119,19 @@ class LandmarkManager:
|
||||
landmark.duration = 30
|
||||
all_landmarks.update(current_landmarks)
|
||||
|
||||
# special pipeline for shopping malls
|
||||
shopping_manager = ClusterManager(bbox, 'shopping')
|
||||
shopping_clusters = shopping_manager.generate_clusters()
|
||||
all_landmarks.update(shopping_clusters)
|
||||
if allow_clusters :
|
||||
# special pipeline for shopping malls
|
||||
shopping_manager = ClusterManager(bbox, 'shopping')
|
||||
shopping_clusters = shopping_manager.generate_clusters()
|
||||
all_landmarks.update(shopping_clusters)
|
||||
|
||||
|
||||
landmarks_constrained = take_most_important(all_landmarks, self.n_important)
|
||||
# DETAILS HERE
|
||||
# self.logger.info(f'All landmarks generated : {len(all_landmarks)} landmarks around {center_coordinates}, and constrained to {len(landmarks_constrained)} most important ones.')
|
||||
self.logger.info(f'Found {len(all_landmarks)} landmarks in total.')
|
||||
|
||||
return all_landmarks, landmarks_constrained
|
||||
return sorted(all_landmarks, key=lambda x: x.attractiveness, reverse=True)
|
||||
|
||||
|
||||
def set_landmark_score(self, landmark: Landmark, landmarktype: str, preference_level: int) :
|
||||
"""
|
||||
@@ -147,6 +155,8 @@ class LandmarkManager:
|
||||
score *= self.wikipedia_bonus
|
||||
if landmark.is_place_of_worship :
|
||||
score *= self.church_coeff
|
||||
if landmark.is_viewpoint :
|
||||
score *= self.viewpoint_bonus
|
||||
if landmarktype == 'nature' :
|
||||
score *= self.nature_coeff
|
||||
|
||||
@@ -196,12 +206,12 @@ class LandmarkManager:
|
||||
out = 'ids center tags'
|
||||
)
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error fetching landmarks: {e}")
|
||||
self.logger.debug(f"Failed to fetch landmarks, proceeding without: {str(e)}")
|
||||
continue
|
||||
|
||||
return_list += self._to_landmarks(result, landmarktype, preference_level)
|
||||
|
||||
self.logger.debug(f"Fetched {len(return_list)} landmarks of type {landmarktype} in {bbox}")
|
||||
# self.logger.debug(f"Fetched {len(return_list)} landmarks of type {landmarktype} in {bbox}")
|
||||
|
||||
return return_list
|
||||
|
||||
@@ -235,6 +245,17 @@ class LandmarkManager:
|
||||
continue
|
||||
|
||||
tags = elem.get('tags')
|
||||
n_tags=len(tags)
|
||||
|
||||
# Skip this landmark if not suitable
|
||||
if tags.get('building:part') is not None :
|
||||
continue
|
||||
if tags.get('disused') is not None :
|
||||
continue
|
||||
if tags.get('boundary') is not None :
|
||||
continue
|
||||
if tags.get('shop') is not None and landmarktype != 'shopping' :
|
||||
continue
|
||||
|
||||
# Convert this to Landmark object
|
||||
landmark = Landmark(name=name,
|
||||
@@ -243,57 +264,36 @@ class LandmarkManager:
|
||||
osm_id=id,
|
||||
osm_type=osm_type,
|
||||
attractiveness=0,
|
||||
n_tags=len(tags))
|
||||
n_tags=n_tags)
|
||||
|
||||
# self.logger.debug('added landmark.')
|
||||
# Extract useful information for score calculation later down the road.
|
||||
landmark.image_url = tags.get('image')
|
||||
landmark.website_url = tags.get('website')
|
||||
landmark.wiki_url = tags.get('wikipedia')
|
||||
landmark.name_en = tags.get('name:en')
|
||||
|
||||
# Browse through tags to add information to landmark.
|
||||
for key, value in tags.items():
|
||||
|
||||
# Skip this landmark if not suitable.
|
||||
if key == 'building:part' and value == 'yes' :
|
||||
break
|
||||
if 'disused:' in key :
|
||||
break
|
||||
if 'boundary:' in key :
|
||||
break
|
||||
if 'shop' in key and landmarktype != 'shopping' :
|
||||
break
|
||||
# if value == 'apartments' :
|
||||
# break
|
||||
|
||||
# Fill in the other attributes.
|
||||
if key == 'image' :
|
||||
landmark.image_url = value
|
||||
if key == 'website' :
|
||||
landmark.website_url = value
|
||||
if key == 'place_of_worship' :
|
||||
# Check for place of worship
|
||||
if tags.get('place_of_worship') is not None :
|
||||
landmark.is_place_of_worship = True
|
||||
if key == 'wikipedia' :
|
||||
landmark.wiki_url = value
|
||||
if key == 'name:en' :
|
||||
landmark.name_en = value
|
||||
if 'building:' in key or 'pay' in key :
|
||||
landmark.n_tags -= 1
|
||||
landmark.name_en = tags.get('place_of_worship')
|
||||
|
||||
# Set the duration.
|
||||
if value in ['museum', 'aquarium', 'planetarium'] :
|
||||
landmark.duration = 60
|
||||
elif value == 'viewpoint' :
|
||||
landmark.is_viewpoint = True
|
||||
landmark.duration = 10
|
||||
elif value == 'cathedral' :
|
||||
landmark.is_place_of_worship = False
|
||||
landmark.duration = 10
|
||||
# Set the duration. Needed for the optimization.
|
||||
if tags.get('amenity') in ['aquarium', 'planetarium'] or tags.get('tourism') in ['aquarium', 'museum', 'zoo']:
|
||||
landmark.duration = 60
|
||||
elif tags.get('tourism') == 'viewpoint' :
|
||||
landmark.is_viewpoint = True
|
||||
landmark.duration = 10
|
||||
elif tags.get('building') == 'cathedral' :
|
||||
landmark.is_place_of_worship = False
|
||||
landmark.duration = 10
|
||||
|
||||
else:
|
||||
self.set_landmark_score(landmark, landmarktype, preference_level)
|
||||
landmarks.append(landmark)
|
||||
|
||||
continue
|
||||
# Compute the score and add landmark to the list.
|
||||
self.set_landmark_score(landmark, landmarktype, preference_level)
|
||||
landmarks.append(landmark)
|
||||
|
||||
return landmarks
|
||||
|
||||
|
||||
def dict_to_selector_list(d: dict) -> list:
|
||||
"""
|
||||
Convert a dictionary of key-value pairs to a list of Overpass query strings.
|
||||
123
backend/src/landmarks/landmarks_router.py
Normal file
@@ -0,0 +1,123 @@
|
||||
"""Main app for backend api"""
|
||||
import logging
|
||||
import time
|
||||
import random
|
||||
from fastapi import HTTPException, APIRouter
|
||||
|
||||
from ..structs.landmark import Landmark
|
||||
from ..structs.preferences import Preferences, Preference
|
||||
from .landmarks_manager import LandmarkManager
|
||||
|
||||
|
||||
# Setup the logger and the Landmarks Manager
|
||||
logger = logging.getLogger(__name__)
|
||||
manager = LandmarkManager()
|
||||
|
||||
|
||||
# Initialize the API router
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.post("/get/landmarks")
|
||||
def get_landmarks(
|
||||
preferences: Preferences,
|
||||
start: tuple[float, float],
|
||||
) -> list[Landmark]:
|
||||
"""
|
||||
Function that returns all available landmarks given some preferences and a start position.
|
||||
|
||||
Args:
|
||||
preferences : the preferences specified by the user as the post body
|
||||
start : the coordinates of the starting point
|
||||
Returns:
|
||||
list[Landmark] : The full list of fetched landmarks
|
||||
"""
|
||||
if preferences is None:
|
||||
raise HTTPException(status_code=406, detail="Preferences not provided or incomplete.")
|
||||
if (preferences.shopping.score == 0 and
|
||||
preferences.sightseeing.score == 0 and
|
||||
preferences.nature.score == 0) :
|
||||
raise HTTPException(status_code=406, detail="All preferences are 0.")
|
||||
if start is None:
|
||||
raise HTTPException(status_code=406, detail="Start coordinates not provided")
|
||||
if not (-90 <= start[0] <= 90 or -180 <= start[1] <= 180):
|
||||
raise HTTPException(status_code=422, detail="Start coordinates not in range")
|
||||
|
||||
logger.info(f"Requested new trip generation. Details:\n\tCoordinates: {start}\n\tTime: {preferences.max_time_minute}\n\tSightseeing: {preferences.sightseeing.score}\n\tNature: {preferences.nature.score}\n\tShopping: {preferences.shopping.score}")
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
# Generate the landmarks from the start location
|
||||
landmarks = manager.generate_landmarks_list(
|
||||
center_coordinates = start,
|
||||
preferences = preferences
|
||||
)
|
||||
|
||||
if len(landmarks) == 0 :
|
||||
raise HTTPException(status_code=500, detail="No landmarks were found.")
|
||||
|
||||
t_generate_landmarks = time.time() - start_time
|
||||
logger.info(f'Fetched {len(landmarks)} landmarks in \t: {round(t_generate_landmarks,3)} seconds')
|
||||
|
||||
return landmarks
|
||||
|
||||
|
||||
@router.post("/get-nearby/landmarks/{lat}/{lon}")
|
||||
def get_landmarks_nearby(
|
||||
lat: float,
|
||||
lon: float
|
||||
) -> list[Landmark] :
|
||||
"""
|
||||
Suggests nearby landmarks based on a given latitude and longitude.
|
||||
|
||||
This endpoint returns a curated list of up to 5 landmarks around the given geographical coordinates. It uses fixed preferences for
|
||||
sightseeing, shopping, and nature, with a maximum time constraint of 30 minutes to limit the number of landmarks returned.
|
||||
|
||||
Args:
|
||||
lat (float): Latitude of the user's current location.
|
||||
lon (float): Longitude of the user's current location.
|
||||
|
||||
Returns:
|
||||
list[Landmark]: A list of selected nearby landmarks.
|
||||
"""
|
||||
logger.info(f'Fetching landmarks nearby ({lat}, {lon}).')
|
||||
|
||||
# Define fixed preferences:
|
||||
prefs = Preferences(
|
||||
sightseeing = Preference(
|
||||
type='sightseeing',
|
||||
score=5
|
||||
),
|
||||
shopping = Preference(
|
||||
type='shopping',
|
||||
score=2
|
||||
),
|
||||
nature = Preference(
|
||||
type='nature',
|
||||
score=5
|
||||
),
|
||||
max_time_minute=30,
|
||||
detour_tolerance_minute=0,
|
||||
)
|
||||
|
||||
# Find the landmarks around the location
|
||||
landmarks_around = manager.generate_landmarks_list(
|
||||
center_coordinates = (lat, lon),
|
||||
preferences = prefs,
|
||||
allow_clusters=False,
|
||||
)
|
||||
|
||||
if len(landmarks_around) == 0 :
|
||||
raise HTTPException(status_code=500, detail="No landmarks were found.")
|
||||
|
||||
# select 8 - 12 landmarks from there
|
||||
if len(landmarks_around) > 8 :
|
||||
n_imp = random.randint(2,5)
|
||||
rest = random.randint(8 - n_imp, min(12, len(landmarks_around))-n_imp)
|
||||
|
||||
print(f'len = {len(landmarks_around)}\nn_imp = {n_imp}\nrest = {rest}')
|
||||
landmarks_around = landmarks_around[:n_imp] + random.sample(landmarks_around[n_imp:], rest)
|
||||
|
||||
logger.info(f'Found {len(landmarks_around)} landmarks to suggest nearby ({lat}, {lon}).')
|
||||
# logger.debug('Suggested landmarks :\n\t' + '\n\t'.join(f'{landmark}' for landmark in landmarks_around))
|
||||
return landmarks_around
|
||||
@@ -33,14 +33,14 @@ def configure_logging():
|
||||
# silence the chatty logs loki generates itself
|
||||
logging.getLogger('urllib3.connectionpool').setLevel(logging.WARNING)
|
||||
# no need for time since it's added by loki or can be shown in kube logs
|
||||
logging_format = '%(name)s - %(levelname)s - %(message)s'
|
||||
logging_format = '%(name)-55s - %(levelname)-7s - %(message)s'
|
||||
|
||||
else:
|
||||
# if we are in a debug (local) session, set verbose and rich logging
|
||||
from rich.logging import RichHandler
|
||||
logging_handlers = [RichHandler()]
|
||||
logging_level = logging.DEBUG if is_debug else logging.INFO
|
||||
logging_format = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
||||
logging_format = '%(asctime)s - %(name)-55s - %(levelname)-7s - %(message)s'
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,21 +1,24 @@
|
||||
"""Main app for backend api"""
|
||||
|
||||
import logging
|
||||
import yaml
|
||||
import time
|
||||
from contextlib import asynccontextmanager
|
||||
from fastapi import FastAPI, HTTPException, BackgroundTasks, Query
|
||||
from fastapi import FastAPI, HTTPException, BackgroundTasks
|
||||
|
||||
from .logging_config import configure_logging
|
||||
from .structs.landmark import Landmark, Toilets
|
||||
from .structs.landmark import Landmark
|
||||
from .structs.preferences import Preferences
|
||||
from .structs.linked_landmarks import LinkedLandmarks
|
||||
from .structs.trip import Trip
|
||||
from .utils.landmarks_manager import LandmarkManager
|
||||
from .utils.toilets_manager import ToiletsManager
|
||||
from .overpass.overpass import fill_cache
|
||||
from .landmarks.landmarks_manager import LandmarkManager
|
||||
from .toilets.toilets_router import router as toilets_router
|
||||
from .optimization.optimization_router import router as optimization_router
|
||||
from .landmarks.landmarks_router import router as landmarks_router, get_landmarks_nearby
|
||||
from .optimization.optimizer import Optimizer
|
||||
from .optimization.refiner import Refiner
|
||||
from .overpass.overpass import fill_cache
|
||||
from .cache import client as cache_client
|
||||
from .constants import OPTIMIZER_PARAMETERS_PATH
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -37,10 +40,29 @@ async def lifespan(app: FastAPI):
|
||||
app = FastAPI(lifespan=lifespan)
|
||||
|
||||
|
||||
|
||||
# Fetches the global list of landmarks given preferences and start/end coordinates. Two routes
|
||||
# Call with "/get/landmarks/" for main entry point of the trip generation pipeline.
|
||||
# Call with "/get-nearby/landmarks/" for the NEARBY feature.
|
||||
app.include_router(landmarks_router)
|
||||
|
||||
|
||||
# Optimizes the trip given preferences. Second step in the main trip generation pipeline
|
||||
# Call with "/optimize/trip"
|
||||
app.include_router(optimization_router)
|
||||
|
||||
|
||||
# Fetches toilets near given coordinates.
|
||||
# Call with "/get/toilets" for fetching toilets around coordinates
|
||||
app.include_router(toilets_router)
|
||||
|
||||
|
||||
|
||||
###### TO REMOVE ONCE THE FRONTEND IS UP TO DATE ######
|
||||
@app.post("/trip/new")
|
||||
def new_trip(preferences: Preferences,
|
||||
start: tuple[float, float],
|
||||
end: tuple[float, float] | None = None,
|
||||
end: tuple[float, float] | None = None,
|
||||
background_tasks: BackgroundTasks = None) -> Trip:
|
||||
"""
|
||||
Main function to call the optimizer.
|
||||
@@ -62,85 +84,101 @@ def new_trip(preferences: Preferences,
|
||||
raise HTTPException(status_code=406, detail="Start coordinates not provided")
|
||||
if not (-90 <= start[0] <= 90 or -180 <= start[1] <= 180):
|
||||
raise HTTPException(status_code=422, detail="Start coordinates not in range")
|
||||
if end is None:
|
||||
end = start
|
||||
logger.info("No end coordinates provided. Using start=end.")
|
||||
|
||||
start_landmark = Landmark(name='start',
|
||||
type='start',
|
||||
location=(start[0], start[1]),
|
||||
osm_type='start',
|
||||
osm_id=0,
|
||||
attractiveness=0,
|
||||
duration=0,
|
||||
must_do=True,
|
||||
n_tags = 0)
|
||||
|
||||
end_landmark = Landmark(name='finish',
|
||||
type='finish',
|
||||
location=(end[0], end[1]),
|
||||
osm_type='end',
|
||||
osm_id=0,
|
||||
attractiveness=0,
|
||||
duration=0,
|
||||
must_do=True,
|
||||
n_tags=0)
|
||||
logger.info(f"Requested new trip generation. Details:\n\tCoordinates: {start}\n\tTime: {preferences.max_time_minute}\n\tSightseeing: {preferences.sightseeing.score}\n\tNature: {preferences.nature.score}\n\tShopping: {preferences.shopping.score}")
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
# Generate the landmarks from the start location
|
||||
landmarks, landmarks_short = manager.generate_landmarks_list(
|
||||
landmarks = manager.generate_landmarks_list(
|
||||
center_coordinates = start,
|
||||
preferences = preferences
|
||||
)
|
||||
|
||||
if len(landmarks) == 0 :
|
||||
raise HTTPException(status_code=500, detail="No landmarks were found.")
|
||||
|
||||
# insert start and finish to the landmarks list
|
||||
landmarks_short.insert(0, start_landmark)
|
||||
landmarks_short.append(end_landmark)
|
||||
|
||||
t_generate_landmarks = time.time() - start_time
|
||||
logger.info(f'Fetched {len(landmarks)} landmarks in \t: {round(t_generate_landmarks,3)} seconds')
|
||||
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
logger.info(f"Requested new trip generation. Details:\n\tCoordinates: {start}\n\tTime: {preferences.max_time_minute}\n\tSightseeing: {preferences.sightseeing.score}\n\tNature: {preferences.nature.score}\n\tShopping: {preferences.shopping.score}")
|
||||
|
||||
start_landmark = Landmark(
|
||||
name='start',
|
||||
type='start',
|
||||
location=(start[0], start[1]),
|
||||
osm_type='start',
|
||||
osm_id=0,
|
||||
attractiveness=0,
|
||||
duration=0,
|
||||
must_do=True,
|
||||
n_tags = 0
|
||||
)
|
||||
|
||||
end_landmark = Landmark(
|
||||
name='finish',
|
||||
type='finish',
|
||||
location=(end[0], end[1]),
|
||||
osm_type='end',
|
||||
osm_id=0,
|
||||
attractiveness=0,
|
||||
duration=0,
|
||||
must_do=True,
|
||||
n_tags=0
|
||||
)
|
||||
|
||||
# From the parameters load the length at which to truncate the landmarks list.
|
||||
with OPTIMIZER_PARAMETERS_PATH.open('r') as f:
|
||||
parameters = yaml.safe_load(f)
|
||||
n_important = parameters['N_important']
|
||||
|
||||
# Truncate to the most important landmarks for a shorter list
|
||||
landmarks_short = landmarks[:n_important]
|
||||
|
||||
# insert start and finish to the shorter landmarks list
|
||||
landmarks_short.insert(0, start_landmark)
|
||||
landmarks_short.append(end_landmark)
|
||||
|
||||
# First stage optimization
|
||||
try:
|
||||
base_tour = optimizer.solve_optimization(preferences.max_time_minute, landmarks_short)
|
||||
except Exception as exc:
|
||||
logger.error(f"Trip generation failed: {str(exc)}")
|
||||
raise HTTPException(status_code=500, detail=f"Optimization failed: {str(exc)}") from exc
|
||||
|
||||
t_first_stage = time.time() - start_time
|
||||
start_time = time.time()
|
||||
|
||||
# Second stage optimization
|
||||
# TODO : only if necessary (not enough landmarks for ex.)
|
||||
try :
|
||||
refined_tour = refiner.refine_optimization(landmarks, base_tour,
|
||||
preferences.max_time_minute,
|
||||
preferences.detour_tolerance_minute)
|
||||
except TimeoutError as te :
|
||||
logger.error(f'Refiner failed : {str(te)} Using base tour.')
|
||||
refined_tour = base_tour
|
||||
refined_tour = refiner.refine_optimization(
|
||||
landmarks, base_tour,
|
||||
preferences.max_time_minute,
|
||||
preferences.detour_tolerance_minute
|
||||
)
|
||||
except Exception as exc :
|
||||
raise HTTPException(status_code=500, detail=f"An unexpected error occurred: {str(exc)}") from exc
|
||||
logger.warning(f"Refiner failed. Proceeding with base trip {str(exc)}")
|
||||
refined_tour = base_tour
|
||||
|
||||
t_second_stage = time.time() - start_time
|
||||
|
||||
logger.debug(f'First stage optimization\t: {round(t_first_stage,3)} seconds')
|
||||
logger.debug(f'Second stage optimization\t: {round(t_second_stage,3)} seconds')
|
||||
logger.info(f'Total computation time\t: {round(t_first_stage + t_second_stage,3)} seconds')
|
||||
|
||||
linked_tour = LinkedLandmarks(refined_tour)
|
||||
|
||||
# upon creation of the trip, persistence of both the trip and its landmarks is ensured.
|
||||
trip = Trip.from_linked_landmarks(linked_tour, cache_client)
|
||||
logger.info(f'Generated a trip of {trip.total_time} minutes with {len(refined_tour)} landmarks in {round(t_generate_landmarks + t_first_stage + t_second_stage,3)} seconds.')
|
||||
logger.info(f'Optimized a trip of {trip.total_time} minutes with {len(refined_tour)} landmarks in {round(t_first_stage + t_second_stage,3)} seconds.')
|
||||
logger.info('Detailed trip :\n\t' + '\n\t'.join(f'{landmark}' for landmark in refined_tour))
|
||||
|
||||
background_tasks.add_task(fill_cache)
|
||||
|
||||
return trip
|
||||
|
||||
|
||||
|
||||
|
||||
#### For already existing trips/landmarks
|
||||
@app.get("/trip/{trip_uuid}")
|
||||
def get_trip(trip_uuid: str) -> Trip:
|
||||
@@ -157,6 +195,7 @@ def get_trip(trip_uuid: str) -> Trip:
|
||||
trip = cache_client.get(f"trip_{trip_uuid}")
|
||||
return trip
|
||||
except KeyError as exc:
|
||||
logger.error(f"Failed to fetch trip with UUID {trip_uuid}: {str(exc)}")
|
||||
raise HTTPException(status_code=404, detail="Trip not found") from exc
|
||||
|
||||
|
||||
@@ -175,32 +214,46 @@ def get_landmark(landmark_uuid: str) -> Landmark:
|
||||
landmark = cache_client.get(f"landmark_{landmark_uuid}")
|
||||
return landmark
|
||||
except KeyError as exc:
|
||||
logger.error(f"Failed to fetch landmark with UUID {landmark_uuid}: {str(exc)}")
|
||||
raise HTTPException(status_code=404, detail="Landmark not found") from exc
|
||||
|
||||
|
||||
@app.post("/toilets/new")
|
||||
def get_toilets(location: tuple[float, float] = Query(...), radius: int = 500) -> list[Toilets] :
|
||||
@app.post("/trip/recompute-time/{trip_uuid}/{removed_landmark_uuid}")
|
||||
def update_trip_time(trip_uuid: str, removed_landmark_uuid: str) -> Trip:
|
||||
"""
|
||||
Endpoint to find toilets within a specified radius from a given location.
|
||||
|
||||
This endpoint expects the `location` and `radius` as **query parameters**, not in the request body.
|
||||
Updates the reaching times of a given trip when removing a landmark.
|
||||
|
||||
Args:
|
||||
location (tuple[float, float]): The latitude and longitude of the location to search from.
|
||||
radius (int, optional): The radius (in meters) within which to search for toilets. Defaults to 500 meters.
|
||||
landmark_uuid (str) : unique identifier for a Landmark.
|
||||
|
||||
Returns:
|
||||
list[Toilets]: A list of Toilets objects that meet the criteria.
|
||||
(Landmark) : the corresponding Landmark.
|
||||
"""
|
||||
if location is None:
|
||||
raise HTTPException(status_code=406, detail="Coordinates not provided or invalid")
|
||||
if not (-90 <= location[0] <= 90 or -180 <= location[1] <= 180):
|
||||
raise HTTPException(status_code=422, detail="Start coordinates not in range")
|
||||
|
||||
toilets_manager = ToiletsManager(location, radius)
|
||||
|
||||
try :
|
||||
toilets_list = toilets_manager.generate_toilet_list()
|
||||
return toilets_list
|
||||
# First, fetch the trip in the cache.
|
||||
try:
|
||||
trip = cache_client.get(f'trip_{trip_uuid}')
|
||||
except KeyError as exc:
|
||||
raise HTTPException(status_code=404, detail="No toilets found") from exc
|
||||
logger.error(f"Failed to update trip with UUID {trip_uuid} (trip not found): {str(exc)}")
|
||||
raise HTTPException(status_code=404, detail='Trip not found') from exc
|
||||
|
||||
landmarks = []
|
||||
next_uuid = trip.first_landmark_uuid
|
||||
|
||||
# Extract landmarks
|
||||
try :
|
||||
while next_uuid is not None:
|
||||
landmark = cache_client.get(f'landmark_{next_uuid}')
|
||||
# Filter out the removed landmark.
|
||||
if next_uuid != removed_landmark_uuid :
|
||||
landmarks.append(landmark)
|
||||
next_uuid = landmark.next_uuid # Prepare for the next iteration
|
||||
except KeyError as exc:
|
||||
logger.error(f"Failed to update trip with UUID {trip_uuid} : {str(exc)}")
|
||||
raise HTTPException(status_code=404, detail=f'landmark {next_uuid} not found') from exc
|
||||
|
||||
# Re-link every thing and compute times again
|
||||
linked_tour = LinkedLandmarks(landmarks)
|
||||
trip = Trip.from_linked_landmarks(linked_tour, cache_client)
|
||||
|
||||
return trip
|
||||
|
||||
|
||||
141
backend/src/optimization/optimization_router.py
Normal file
@@ -0,0 +1,141 @@
|
||||
"""API entry point for the trip optimization."""
|
||||
|
||||
import logging
|
||||
import time
|
||||
import yaml
|
||||
from fastapi import HTTPException, APIRouter, BackgroundTasks
|
||||
|
||||
from .optimizer import Optimizer
|
||||
from .refiner import Refiner
|
||||
from ..structs.landmark import Landmark
|
||||
from ..structs.preferences import Preferences
|
||||
from ..structs.linked_landmarks import LinkedLandmarks
|
||||
from ..structs.trip import Trip
|
||||
from ..overpass.overpass import fill_cache
|
||||
from ..cache import client as cache_client
|
||||
from ..constants import OPTIMIZER_PARAMETERS_PATH
|
||||
|
||||
|
||||
# Setup the Logger, Optimizer and Refiner
|
||||
logger = logging.getLogger(__name__)
|
||||
optimizer = Optimizer()
|
||||
refiner = Refiner(optimizer=optimizer)
|
||||
|
||||
|
||||
# Initialize the API router
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.post("/optimize/trip")
|
||||
def optimize_trip(
|
||||
preferences: Preferences,
|
||||
landmarks: list[Landmark],
|
||||
start: tuple[float, float],
|
||||
end: tuple[float, float] | None = None,
|
||||
background_tasks: BackgroundTasks = None
|
||||
) -> Trip:
|
||||
"""
|
||||
Main function to call the optimizer.
|
||||
|
||||
Args:
|
||||
preferences (Preferences) : the preferences specified by the user as the post body.
|
||||
start (tuple[float, float]) : the coordinates of the starting point.
|
||||
end tuple[float, float] : the coordinates of the finishing point.
|
||||
backgroud_tasks (BackgroundTasks) : necessary to fill the cache after the trip has been returned.
|
||||
Returns:
|
||||
(uuid) : The uuid of the first landmark in the optimized route
|
||||
"""
|
||||
if preferences is None:
|
||||
raise HTTPException(status_code=406, detail="Preferences not provided or incomplete.")
|
||||
if len(landmarks) == 0 :
|
||||
raise HTTPException(status_code=406, detail="No landmarks provided for computing the trip.")
|
||||
if (preferences.shopping.score == 0 and
|
||||
preferences.sightseeing.score == 0 and
|
||||
preferences.nature.score == 0) :
|
||||
raise HTTPException(status_code=406, detail="All preferences are 0.")
|
||||
if start is None:
|
||||
raise HTTPException(status_code=406, detail="Start coordinates not provided")
|
||||
if not (-90 <= start[0] <= 90 or -180 <= start[1] <= 180):
|
||||
raise HTTPException(status_code=422, detail="Start coordinates not in range")
|
||||
if end is None:
|
||||
end = start
|
||||
logger.info("No end coordinates provided. Using start=end.")
|
||||
|
||||
# Start the timer
|
||||
start_time = time.time()
|
||||
|
||||
logger.info(f"Requested new trip generation. Details:\n\tCoordinates: {start}\n\tTime: {preferences.max_time_minute}\n\tSightseeing: {preferences.sightseeing.score}\n\tNature: {preferences.nature.score}\n\tShopping: {preferences.shopping.score}")
|
||||
|
||||
start_landmark = Landmark(
|
||||
name='start',
|
||||
type='start',
|
||||
location=(start[0], start[1]),
|
||||
osm_type='start',
|
||||
osm_id=0,
|
||||
attractiveness=0,
|
||||
duration=0,
|
||||
must_do=True,
|
||||
n_tags = 0
|
||||
)
|
||||
|
||||
end_landmark = Landmark(
|
||||
name='finish',
|
||||
type='finish',
|
||||
location=(end[0], end[1]),
|
||||
osm_type='end',
|
||||
osm_id=0,
|
||||
attractiveness=0,
|
||||
duration=0,
|
||||
must_do=True,
|
||||
n_tags=0
|
||||
)
|
||||
|
||||
# From the parameters load the length at which to truncate the landmarks list.
|
||||
with OPTIMIZER_PARAMETERS_PATH.open('r') as f:
|
||||
parameters = yaml.safe_load(f)
|
||||
n_important = parameters['N_important']
|
||||
|
||||
# Truncate to the most important landmarks for a shorter list
|
||||
landmarks_short = landmarks[:n_important]
|
||||
|
||||
# insert start and finish to the shorter landmarks list
|
||||
landmarks_short.insert(0, start_landmark)
|
||||
landmarks_short.append(end_landmark)
|
||||
|
||||
# First stage optimization
|
||||
try:
|
||||
base_tour = optimizer.solve_optimization(preferences.max_time_minute, landmarks_short)
|
||||
except Exception as exc:
|
||||
logger.error(f"Trip generation failed: {str(exc)}")
|
||||
raise HTTPException(status_code=500, detail=f"Optimization failed: {str(exc)}") from exc
|
||||
|
||||
t_first_stage = time.time() - start_time
|
||||
start_time = time.time()
|
||||
|
||||
# Second stage optimization
|
||||
try :
|
||||
refined_tour = refiner.refine_optimization(
|
||||
landmarks, base_tour,
|
||||
preferences.max_time_minute,
|
||||
preferences.detour_tolerance_minute
|
||||
)
|
||||
except Exception as exc :
|
||||
logger.warning(f"Refiner failed. Proceeding with base trip {str(exc)}")
|
||||
refined_tour = base_tour
|
||||
|
||||
t_second_stage = time.time() - start_time
|
||||
|
||||
logger.debug(f'First stage optimization\t: {round(t_first_stage,3)} seconds')
|
||||
logger.debug(f'Second stage optimization\t: {round(t_second_stage,3)} seconds')
|
||||
logger.info(f'Total computation time\t: {round(t_first_stage + t_second_stage,3)} seconds')
|
||||
linked_tour = LinkedLandmarks(refined_tour)
|
||||
|
||||
# upon creation of the trip, persistence of both the trip and its landmarks is ensured.
|
||||
trip = Trip.from_linked_landmarks(linked_tour, cache_client)
|
||||
logger.info(f'Optimized a trip of {trip.total_time} minutes with {len(refined_tour)} landmarks in {round(t_first_stage + t_second_stage,3)} seconds.')
|
||||
logger.info('Detailed trip :\n\t' + '\n\t'.join(f'{landmark}' for landmark in refined_tour))
|
||||
|
||||
background_tasks.add_task(fill_cache)
|
||||
|
||||
return trip
|
||||
|
||||
@@ -257,7 +257,6 @@ class Optimizer:
|
||||
Returns:
|
||||
None: This function modifies the `prob` object by adding L-2 equality constraints in-place.
|
||||
"""
|
||||
# FIXME: weird 0 artifact in the coefficients popping up
|
||||
# Loop through rows 1 to L-2 to prevent stacked ones
|
||||
for i in range(1, L-1):
|
||||
# Add the constraint that sums across each "row" or "block" in the decision variables
|
||||
@@ -590,15 +589,15 @@ class Optimizer:
|
||||
try :
|
||||
prob.solve(pl.PULP_CBC_CMD(msg=False, timeLimit=self.time_limit+1, gapRel=self.gap_rel))
|
||||
except Exception as exc :
|
||||
raise Exception(f"No solution found: {exc}") from exc
|
||||
raise Exception(f"No solution found: {str(exc)}") from exc
|
||||
status = pl.LpStatus[prob.status]
|
||||
solution = [pl.value(var) for var in x] # The values of the decision variables (will be 0 or 1)
|
||||
|
||||
self.logger.debug("First results are out. Looking out for circles and correcting.")
|
||||
self.logger.debug("First results are out. Looking out for circles and correcting...")
|
||||
|
||||
# Raise error if no solution is found. FIXME: for now this throws the internal server error
|
||||
if status != 'Optimal' :
|
||||
self.logger.error("The problem is overconstrained, no solution on first try.")
|
||||
self.logger.warning("The problem is overconstrained, no solution on first try.")
|
||||
raise ArithmeticError("No solution could be found. Please try again with more time or different preferences.")
|
||||
|
||||
# If there is a solution, we're good to go, just check for connectiveness
|
||||
@@ -608,7 +607,7 @@ class Optimizer:
|
||||
while circles is not None :
|
||||
i += 1
|
||||
if i == self.max_iter :
|
||||
self.logger.error(f'Timeout: No solution found after {self.max_iter} iterations.')
|
||||
self.logger.warning(f'Timeout: No solution found after {self.max_iter} iterations.')
|
||||
raise TimeoutError(f"Optimization took too long. No solution found after {self.max_iter} iterations.")
|
||||
|
||||
for circle in circles :
|
||||
@@ -618,12 +617,13 @@ class Optimizer:
|
||||
try :
|
||||
prob.solve(pl.PULP_CBC_CMD(msg=False, timeLimit=self.time_limit, gapRel=self.gap_rel))
|
||||
except Exception as exc :
|
||||
raise Exception(f"No solution found: {exc}") from exc
|
||||
self.logger.warning("No solution found: {str(exc)")
|
||||
raise Exception(f"No solution found: {str(exc)}") from exc
|
||||
|
||||
solution = [pl.value(var) for var in x]
|
||||
|
||||
if pl.LpStatus[prob.status] != 'Optimal' :
|
||||
self.logger.error("The problem is overconstrained, no solution after {i} cycles.")
|
||||
self.logger.warning("The problem is overconstrained, no solution after {i} cycles.")
|
||||
raise ArithmeticError("No solution could be found. Please try again with more time or different preferences.")
|
||||
|
||||
circles = self.is_connected(solution)
|
||||
|
||||
@@ -6,7 +6,6 @@ from shapely import buffer, LineString, Point, Polygon, MultiPoint, concave_hull
|
||||
|
||||
from ..structs.landmark import Landmark
|
||||
from ..utils.get_time_distance import get_time
|
||||
from ..utils.take_most_important import take_most_important
|
||||
from .optimizer import Optimizer
|
||||
from ..constants import OPTIMIZER_PARAMETERS_PATH
|
||||
|
||||
@@ -238,7 +237,7 @@ class Refiner :
|
||||
if self.is_in_area(area, landmark.location) and landmark.name not in visited_names:
|
||||
second_order_landmarks.append(landmark)
|
||||
|
||||
return take_most_important(second_order_landmarks, int(self.max_landmarks_refiner*0.75))
|
||||
return sorted(second_order_landmarks, key=lambda x: x.attractiveness, reverse=True)[:int(self.max_landmarks_refiner*0.75)]
|
||||
|
||||
|
||||
# Try fix the shortest path using shapely
|
||||
@@ -278,7 +277,7 @@ class Refiner :
|
||||
better_tour_poly = concave_hull(MultiPoint(coords)) # Create concave hull with "core" of tour leaving out start and finish
|
||||
xs, ys = better_tour_poly.exterior.xy
|
||||
"""
|
||||
ERROR HERE :
|
||||
FIXED : ERROR HERE :
|
||||
Exception has occurred: AttributeError
|
||||
'LineString' object has no attribute 'exterior'
|
||||
"""
|
||||
@@ -356,7 +355,7 @@ class Refiner :
|
||||
|
||||
# If unsuccessful optimization, use the base_tour.
|
||||
if new_tour is None:
|
||||
self.logger.warning("No solution found for the refined tour. Returning the initial tour.")
|
||||
self.logger.warning("Refiner failed: No solution found during second stage optimization.")
|
||||
new_tour = base_tour
|
||||
|
||||
# If only one landmark, return it.
|
||||
@@ -369,6 +368,7 @@ class Refiner :
|
||||
# Fix the tour using Polygons if the path looks weird.
|
||||
# Conditions : circular trip and invalid polygon.
|
||||
if base_tour[0].location == base_tour[-1].location and not better_poly.is_valid :
|
||||
self.logger.debug("Tours might be funky, attempting to correct with polygons")
|
||||
better_tour = self.fix_using_polygon(better_tour)
|
||||
|
||||
return better_tour
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
"""Module defining the handling of cache data from Overpass requests."""
|
||||
import os
|
||||
import json
|
||||
import hashlib
|
||||
@@ -61,7 +62,7 @@ class JSONCache(CachingStrategyBase):
|
||||
return None
|
||||
|
||||
def set(self, key, value):
|
||||
"""Save the JSON data as an ElementTree to the cache."""
|
||||
"""Save the JSON data in the cache."""
|
||||
filename = self._filename(key)
|
||||
try:
|
||||
# Write the JSON data to the cache file
|
||||
@@ -94,7 +95,7 @@ class JSONCache(CachingStrategyBase):
|
||||
|
||||
def close(self):
|
||||
"""Cleanup method, if needed."""
|
||||
pass
|
||||
|
||||
|
||||
class CachingStrategy:
|
||||
"""
|
||||
@@ -107,6 +108,7 @@ class CachingStrategy:
|
||||
|
||||
@classmethod
|
||||
def use(cls, strategy_name='JSON', **kwargs):
|
||||
"""Define the caching strategy to use."""
|
||||
if cls.__strategy:
|
||||
cls.__strategy.close()
|
||||
|
||||
@@ -119,10 +121,12 @@ class CachingStrategy:
|
||||
|
||||
@classmethod
|
||||
def get(cls, key):
|
||||
"""Get the data from the cache."""
|
||||
return cls.__strategy.get(key)
|
||||
|
||||
@classmethod
|
||||
def set(cls, key, value):
|
||||
"""Save the data in the cache."""
|
||||
cls.__strategy.set(key, value)
|
||||
|
||||
@classmethod
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
"""Module allowing connexion to overpass api and fectch data from OSM."""
|
||||
import os
|
||||
import time
|
||||
import urllib
|
||||
import math
|
||||
import logging
|
||||
@@ -52,24 +53,24 @@ class Overpass :
|
||||
# Retrieve cached data and identify missing cache entries
|
||||
cached_responses, non_cached_cells = self._retrieve_cached_data(overlapping_cells, osm_types, selector, conditions, out)
|
||||
|
||||
self.logger.info(f'Cache hit for {len(overlapping_cells)-len(non_cached_cells)}/{len(overlapping_cells)} quadrants.')
|
||||
self.logger.debug(f'Cache hit for {len(overlapping_cells)-len(non_cached_cells)}/{len(overlapping_cells)} quadrants.')
|
||||
|
||||
# If there is no missing data, return the cached responses after filtering.
|
||||
if not non_cached_cells :
|
||||
return Overpass._filter_landmarks(cached_responses, bbox)
|
||||
|
||||
# If there is no cached data, fetch all from Overpass.
|
||||
elif not cached_responses :
|
||||
if not cached_responses :
|
||||
query_str = Overpass.build_query(bbox, osm_types, selector, conditions, out)
|
||||
self.logger.debug(f'Query string: {query_str}')
|
||||
return self.fetch_data_from_api(query_str)
|
||||
|
||||
# Hybrid cache: some data from Overpass, some data from cache.
|
||||
else :
|
||||
# Resize the bbox for smaller search area and build new query string.
|
||||
non_cached_bbox = Overpass._get_non_cached_bbox(non_cached_cells, bbox)
|
||||
query_str = Overpass.build_query(non_cached_bbox, osm_types, selector, conditions, out)
|
||||
non_cached_responses = self.fetch_data_from_api(query_str)
|
||||
return Overpass._filter_landmarks(cached_responses, bbox) + non_cached_responses
|
||||
# Resize the bbox for smaller search area and build new query string.
|
||||
non_cached_bbox = Overpass._get_non_cached_bbox(non_cached_cells, bbox)
|
||||
query_str = Overpass.build_query(non_cached_bbox, osm_types, selector, conditions, out)
|
||||
self.logger.debug(f'Query string: {query_str}')
|
||||
non_cached_responses = self.fetch_data_from_api(query_str)
|
||||
return Overpass._filter_landmarks(cached_responses, bbox) + non_cached_responses
|
||||
|
||||
|
||||
def fetch_data_from_api(self, query_str: str) -> List[dict]:
|
||||
@@ -94,9 +95,10 @@ class Overpass :
|
||||
return elements
|
||||
|
||||
except urllib.error.URLError as e:
|
||||
self.logger.error(f"Error connecting to Overpass API: {e}")
|
||||
raise ConnectionError(f"Error connecting to Overpass API: {e}") from e
|
||||
self.logger.error(f"Error connecting to Overpass API: {str(e)}")
|
||||
raise ConnectionError(f"Error connecting to Overpass API: {str(e)}") from e
|
||||
except Exception as exc :
|
||||
self.logger.error(f"unexpected error while fetching data from Overpass: {str(exc)}")
|
||||
raise Exception(f'An unexpected error occured: {str(exc)}') from exc
|
||||
|
||||
|
||||
@@ -112,7 +114,7 @@ class Overpass :
|
||||
with urllib.request.urlopen(request) as response:
|
||||
|
||||
# Convert the HTTPResponse to a string and load data
|
||||
response_data = response.read().decode('utf-8')
|
||||
response_data = response.read().decode('utf-8')
|
||||
data = json.loads(response_data)
|
||||
|
||||
# Get elements and set cache
|
||||
@@ -120,7 +122,7 @@ class Overpass :
|
||||
self.caching_strategy.set(cache_key, elements)
|
||||
self.logger.debug(f'Cache set for {cache_key}')
|
||||
except urllib.error.URLError as e:
|
||||
raise ConnectionError(f"Error connecting to Overpass API: {e}") from e
|
||||
raise ConnectionError(f"Error connecting to Overpass API: {str(e)}") from e
|
||||
except Exception as exc :
|
||||
raise Exception(f'An unexpected error occured: {str(exc)}') from exc
|
||||
|
||||
@@ -151,7 +153,7 @@ class Overpass :
|
||||
- If no conditions are provided, the query will just use the `selector` to filter the OSM
|
||||
elements without additional constraints.
|
||||
"""
|
||||
query = '[out:json];('
|
||||
query = '[out:json][timeout:20];('
|
||||
|
||||
# convert the bbox to string.
|
||||
bbox_str = f"({','.join(map(str, bbox))})"
|
||||
@@ -307,9 +309,9 @@ class Overpass :
|
||||
if min_lat == float('inf') or min_lon == float('inf'):
|
||||
return None
|
||||
|
||||
return (max(min_lat, original_bbox[0]),
|
||||
max(min_lon, original_bbox[1]),
|
||||
min(max_lat, original_bbox[2]),
|
||||
return (max(min_lat, original_bbox[0]),
|
||||
max(min_lon, original_bbox[1]),
|
||||
min(max_lat, original_bbox[2]),
|
||||
min(max_lon, original_bbox[3]))
|
||||
|
||||
|
||||
@@ -386,8 +388,8 @@ def get_base_info(elem: dict, osm_type: OSM_TYPES, with_name=False) :
|
||||
if with_name :
|
||||
name = elem.get('tags', {}).get('name')
|
||||
return osm_id, coords, name
|
||||
else :
|
||||
return osm_id, coords
|
||||
|
||||
return osm_id, coords
|
||||
|
||||
|
||||
def fill_cache():
|
||||
@@ -397,18 +399,27 @@ def fill_cache():
|
||||
"""
|
||||
overpass = Overpass()
|
||||
|
||||
n_files = 0
|
||||
total = 0
|
||||
|
||||
overpass.logger.info('Trip successfully returned, starting to fill cache.')
|
||||
|
||||
with os.scandir(OSM_CACHE_DIR) as it:
|
||||
for entry in it:
|
||||
if entry.is_file() and entry.name.startswith('hollow_'):
|
||||
|
||||
total += 1
|
||||
try :
|
||||
# Read the whole file content as a string
|
||||
with open(entry.path, 'r') as f:
|
||||
with open(entry.path, 'r', encoding='utf-8') as f:
|
||||
# load data and fill the cache with the query and key
|
||||
json_data = json.load(f)
|
||||
overpass.fill_cache(json_data)
|
||||
n_files += 1
|
||||
time.sleep(1)
|
||||
# Now delete the file as the cache is filled
|
||||
os.remove(entry.path)
|
||||
|
||||
except Exception as exc :
|
||||
overpass.logger.error(f'An error occured while parsing file {entry.path} as .json file')
|
||||
overpass.logger.error(f'An error occured while parsing file {entry.path} as .json file: {str(exc)}')
|
||||
|
||||
overpass.logger.info(f"Successfully filled {n_files}/{total} cache files.")
|
||||
|
||||
@@ -72,6 +72,7 @@ sightseeing:
|
||||
# - castle
|
||||
# - museum
|
||||
|
||||
|
||||
museums:
|
||||
tourism:
|
||||
- museum
|
||||
|
||||
@@ -1,12 +1,10 @@
|
||||
max_bbox_side: 4000 #m
|
||||
radius_close_to: 50
|
||||
church_coeff: 0.55
|
||||
nature_coeff: 1.4
|
||||
church_coeff: 0.75
|
||||
nature_coeff: 1.6
|
||||
overall_coeff: 10
|
||||
tag_exponent: 1.15
|
||||
image_bonus: 1.1
|
||||
viewpoint_bonus: 5
|
||||
viewpoint_bonus: 10
|
||||
wikipedia_bonus: 1.25
|
||||
name_bonus: 3
|
||||
N_important: 60
|
||||
pay_bonus: -1
|
||||
|
||||
@@ -5,5 +5,6 @@ max_landmarks: 10
|
||||
max_landmarks_refiner: 20
|
||||
overshoot: 0.0016
|
||||
time_limit: 1
|
||||
gap_rel: 0.05
|
||||
max_iter: 40
|
||||
gap_rel: 0.025
|
||||
max_iter: 80
|
||||
N_important: 60
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
"""Definition of the Landmark class to handle visitable objects across the world."""
|
||||
|
||||
from typing import Optional, Literal
|
||||
from uuid import uuid4, UUID
|
||||
from pydantic import BaseModel, ConfigDict, Field
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
# Output to frontend
|
||||
@@ -50,7 +49,8 @@ class Landmark(BaseModel) :
|
||||
image_url : Optional[str] = None
|
||||
website_url : Optional[str] = None
|
||||
wiki_url : Optional[str] = None
|
||||
description : Optional[str] = None # TODO future
|
||||
# keywords: Optional[dict] = {}
|
||||
# description : Optional[str] = None
|
||||
duration : Optional[int] = 5
|
||||
name_en : Optional[str] = None
|
||||
|
||||
@@ -69,6 +69,7 @@ class Landmark(BaseModel) :
|
||||
is_viewpoint : Optional[bool] = False
|
||||
is_place_of_worship : Optional[bool] = False
|
||||
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""
|
||||
String representation of the Landmark object.
|
||||
@@ -122,26 +123,3 @@ class Landmark(BaseModel) :
|
||||
return (self.uuid == value.uuid or
|
||||
self.osm_id == value.osm_id or
|
||||
(self.name == value.name and self.distance(value) < 0.001))
|
||||
|
||||
|
||||
class Toilets(BaseModel) :
|
||||
"""
|
||||
Model for toilets. When false/empty the information is either false either not known.
|
||||
"""
|
||||
location : tuple
|
||||
wheelchair : Optional[bool] = False
|
||||
changing_table : Optional[bool] = False
|
||||
fee : Optional[bool] = False
|
||||
opening_hours : Optional[str] = ""
|
||||
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""
|
||||
String representation of the Toilets object.
|
||||
|
||||
Returns:
|
||||
str: A formatted string with the toilets location.
|
||||
"""
|
||||
return f'Toilets @{self.location}'
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from .landmark import Landmark
|
||||
from ..utils.get_time_distance import get_time
|
||||
from ..utils.description import description_and_keywords
|
||||
|
||||
class LinkedLandmarks:
|
||||
"""
|
||||
@@ -35,18 +36,23 @@ class LinkedLandmarks:
|
||||
Create the links between the landmarks in the list by setting their
|
||||
.next_uuid and the .time_to_next attributes.
|
||||
"""
|
||||
|
||||
# Mark secondary landmarks as such
|
||||
self.update_secondary_landmarks()
|
||||
|
||||
|
||||
for i, landmark in enumerate(self._landmarks[:-1]):
|
||||
# Set uuid of the next landmark
|
||||
landmark.next_uuid = self._landmarks[i + 1].uuid
|
||||
|
||||
# Adjust time to reach and total time
|
||||
time_to_next = get_time(landmark.location, self._landmarks[i + 1].location)
|
||||
landmark.time_to_reach_next = time_to_next
|
||||
self.total_time += time_to_next
|
||||
self.total_time += landmark.duration
|
||||
|
||||
# Fill in the keywords and description. GOOD IDEA, BAD EXECUTION, tags aren't available anymore at this stage
|
||||
# landmark.description, landmark.keywords = description_and_keywords(tags)
|
||||
|
||||
|
||||
self._landmarks[-1].next_uuid = None
|
||||
self._landmarks[-1].time_to_reach_next = 0
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
"""Defines the Preferences used as input for trip generation."""
|
||||
|
||||
from typing import Optional, Literal
|
||||
from pydantic import BaseModel
|
||||
from pydantic import BaseModel, field_validator
|
||||
|
||||
|
||||
class Preference(BaseModel) :
|
||||
@@ -15,6 +15,13 @@ class Preference(BaseModel) :
|
||||
type: Literal['sightseeing', 'nature', 'shopping', 'start', 'finish']
|
||||
score: int # score could be from 1 to 5
|
||||
|
||||
@field_validator("type")
|
||||
@classmethod
|
||||
def validate_type(cls, v):
|
||||
if v not in {'sightseeing', 'nature', 'shopping', 'start', 'finish'}:
|
||||
raise ValueError(f"Invalid type: {v}")
|
||||
return v
|
||||
|
||||
|
||||
# Input for optimization
|
||||
class Preferences(BaseModel) :
|
||||
|
||||
26
backend/src/structs/toilets.py
Normal file
@@ -0,0 +1,26 @@
|
||||
"""Definition of the Toilets class."""
|
||||
from typing import Optional
|
||||
from pydantic import BaseModel, ConfigDict
|
||||
|
||||
|
||||
class Toilets(BaseModel) :
|
||||
"""
|
||||
Model for toilets. When false/empty the information is either false either not known.
|
||||
"""
|
||||
location : tuple
|
||||
wheelchair : Optional[bool] = False
|
||||
changing_table : Optional[bool] = False
|
||||
fee : Optional[bool] = False
|
||||
opening_hours : Optional[str] = ""
|
||||
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""
|
||||
String representation of the Toilets object.
|
||||
|
||||
Returns:
|
||||
str: A formatted string with the toilets location.
|
||||
"""
|
||||
return f'Toilets @{self.location}'
|
||||
|
||||
model_config = ConfigDict(from_attributes=True)
|
||||
@@ -19,30 +19,50 @@ def invalid_client():
|
||||
([48.8566, 2.3522], {}, 422),
|
||||
|
||||
# Invalid cases: incomplete preferences.
|
||||
([48.084588, 7.280405], {"sightseeing": {"type": "nature", "score": 5}, # no shopping
|
||||
([48.084588, 7.280405], {"sightseeing": {"type": "sightseeing", "score": 5}, # no shopping pref
|
||||
"nature": {"type": "nature", "score": 5},
|
||||
}, 422),
|
||||
([48.084588, 7.280405], {"sightseeing": {"type": "nature", "score": 5}, # no nature
|
||||
([48.084588, 7.280405], {"sightseeing": {"type": "sightseeing", "score": 5}, # no nature pref
|
||||
"shopping": {"type": "shopping", "score": 5},
|
||||
}, 422),
|
||||
([48.084588, 7.280405], {"nature": {"type": "nature", "score": 5}, # no sightseeing
|
||||
([48.084588, 7.280405], {"nature": {"type": "nature", "score": 5}, # no sightseeing pref
|
||||
"shopping": {"type": "shopping", "score": 5},
|
||||
}, 422),
|
||||
([48.084588, 7.280405], {"sightseeing": {"type": "nature", "score": 1}, # mixed up preferences types. TODO: i suggest reducing the complexity by remove the Preference object.
|
||||
"nature": {"type": "shopping", "score": 1},
|
||||
"shopping": {"type": "shopping", "score": 1},
|
||||
}, 422),
|
||||
([48.084588, 7.280405], {"doesnotexist": {"type": "sightseeing", "score": 2}, # non-existing preferences types
|
||||
"nature": {"type": "nature", "score": 2},
|
||||
"shopping": {"type": "shopping", "score": 2},
|
||||
}, 422),
|
||||
([48.084588, 7.280405], {"sightseeing": {"type": "sightseeing", "score": 3}, # non-existing preferences types
|
||||
"nature": {"type": "doesntexisteither", "score": 3},
|
||||
"shopping": {"type": "shopping", "score": 3},
|
||||
}, 422),
|
||||
([48.084588, 7.280405], {"sightseeing": {"type": "sightseeing", "score": -1}, # negative preference value
|
||||
"nature": {"type": "doesntexisteither", "score": 4},
|
||||
"shopping": {"type": "shopping", "score": 4},
|
||||
}, 422),
|
||||
([48.084588, 7.280405], {"sightseeing": {"type": "sightseeing", "score": 10}, # too high preference value
|
||||
"nature": {"type": "doesntexisteither", "score": 4},
|
||||
"shopping": {"type": "shopping", "score": 4},
|
||||
}, 422),
|
||||
|
||||
# Invalid cases: unexisting coords
|
||||
([91, 181], {"sightseeing": {"type": "nature", "score": 5},
|
||||
([91, 181], {"sightseeing": {"type": "sightseeing", "score": 5},
|
||||
"nature": {"type": "nature", "score": 5},
|
||||
"shopping": {"type": "shopping", "score": 5},
|
||||
}, 422),
|
||||
([-91, 181], {"sightseeing": {"type": "nature", "score": 5},
|
||||
([-91, 181], {"sightseeing": {"type": "sightseeing", "score": 5},
|
||||
"nature": {"type": "nature", "score": 5},
|
||||
"shopping": {"type": "shopping", "score": 5},
|
||||
}, 422),
|
||||
([91, -181], {"sightseeing": {"type": "nature", "score": 5},
|
||||
([91, -181], {"sightseeing": {"type": "sightseeing", "score": 5},
|
||||
"nature": {"type": "nature", "score": 5},
|
||||
"shopping": {"type": "shopping", "score": 5},
|
||||
}, 422),
|
||||
([-91, -181], {"sightseeing": {"type": "nature", "score": 5},
|
||||
([-91, -181], {"sightseeing": {"type": "sightseeing", "score": 5},
|
||||
"nature": {"type": "nature", "score": 5},
|
||||
"shopping": {"type": "shopping", "score": 5},
|
||||
}, 422),
|
||||
@@ -53,8 +73,8 @@ def test_input(invalid_client, start, preferences, status_code): # pylint: dis
|
||||
Test new trip creation with different sets of preferences and locations.
|
||||
"""
|
||||
response = invalid_client.post(
|
||||
"/trip/new",
|
||||
json={
|
||||
"/get/landmarks",
|
||||
json ={
|
||||
"preferences": preferences,
|
||||
"start": start
|
||||
}
|
||||
|
||||
@@ -1,345 +0,0 @@
|
||||
"""Collection of tests to ensure correct implementation and track progress. """
|
||||
import time
|
||||
from fastapi.testclient import TestClient
|
||||
import pytest
|
||||
|
||||
from .test_utils import load_trip_landmarks, log_trip_details
|
||||
from ..main import app
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def client():
|
||||
"""Client used to call the app."""
|
||||
return TestClient(app)
|
||||
|
||||
|
||||
def test_turckheim(client, request): # pylint: disable=redefined-outer-name
|
||||
"""
|
||||
Test n°1 : Custom test in Turckheim to ensure small villages are also supported.
|
||||
|
||||
Args:
|
||||
client:
|
||||
request:
|
||||
"""
|
||||
start_time = time.time() # Start timer
|
||||
duration_minutes = 20
|
||||
|
||||
response = client.post(
|
||||
"/trip/new",
|
||||
json={
|
||||
"preferences": {"sightseeing": {"type": "sightseeing", "score": 5},
|
||||
"nature": {"type": "nature", "score": 0},
|
||||
"shopping": {"type": "shopping", "score": 0},
|
||||
"max_time_minute": duration_minutes,
|
||||
"detour_tolerance_minute": 0},
|
||||
# "start": [48.084588, 7.280405]
|
||||
# "start": [45.74445023349939, 4.8222687890538865]
|
||||
"start": [45.75156398104873, 4.827154464827647]
|
||||
}
|
||||
)
|
||||
result = response.json()
|
||||
landmarks = load_trip_landmarks(client, result['first_landmark_uuid'])
|
||||
|
||||
|
||||
# Get computation time
|
||||
comp_time = time.time() - start_time
|
||||
|
||||
# Add details to report
|
||||
log_trip_details(request, landmarks, result['total_time'], duration_minutes)
|
||||
|
||||
# for elem in landmarks :
|
||||
# print(elem)
|
||||
|
||||
# checks :
|
||||
assert response.status_code == 200 # check for successful planning
|
||||
assert isinstance(landmarks, list) # check that the return type is a list
|
||||
assert len(landmarks) > 2 # check that there is something to visit
|
||||
assert comp_time < 30, f"Computation time exceeded 30 seconds: {comp_time:.2f} seconds"
|
||||
assert duration_minutes*0.8 < result['total_time'], f"Trip too short: {result['total_time']} instead of {duration_minutes}"
|
||||
assert duration_minutes*1.2 > result['total_time'], f"Trip too long: {result['total_time']} instead of {duration_minutes}"
|
||||
# assert 2!= 3
|
||||
|
||||
def test_bellecour(client, request) : # pylint: disable=redefined-outer-name
|
||||
"""
|
||||
Test n°2 : Custom test in Lyon centre to ensure proper decision making in crowded area.
|
||||
|
||||
Args:
|
||||
client:
|
||||
request:
|
||||
"""
|
||||
start_time = time.time() # Start timer
|
||||
duration_minutes = 120
|
||||
|
||||
|
||||
response = client.post(
|
||||
"/trip/new",
|
||||
json={
|
||||
"preferences": {"sightseeing": {"type": "sightseeing", "score": 5},
|
||||
"nature": {"type": "nature", "score": 5},
|
||||
"shopping": {"type": "shopping", "score": 5},
|
||||
"max_time_minute": duration_minutes,
|
||||
"detour_tolerance_minute": 0},
|
||||
"start": [45.7576485, 4.8330241]
|
||||
}
|
||||
)
|
||||
result = response.json()
|
||||
landmarks = load_trip_landmarks(client, result['first_landmark_uuid'])
|
||||
|
||||
# Get computation time
|
||||
comp_time = time.time() - start_time
|
||||
|
||||
# Add details to report
|
||||
log_trip_details(request, landmarks, result['total_time'], duration_minutes)
|
||||
|
||||
# for elem in landmarks :
|
||||
# print(elem)
|
||||
|
||||
# checks :
|
||||
assert response.status_code == 200 # check for successful planning
|
||||
assert comp_time < 30, f"Computation time exceeded 30 seconds: {comp_time:.2f} seconds"
|
||||
assert duration_minutes*0.8 < result['total_time'], f"Trip too short: {result['total_time']} instead of {duration_minutes}"
|
||||
assert duration_minutes*1.2 > result['total_time'], f"Trip too long: {result['total_time']} instead of {duration_minutes}"
|
||||
|
||||
def test_cologne(client, request) : # pylint: disable=redefined-outer-name
|
||||
"""
|
||||
Test n°3 : Custom test in Cologne to ensure proper decision making in crowded area.
|
||||
|
||||
Args:
|
||||
client:
|
||||
request:
|
||||
"""
|
||||
start_time = time.time() # Start timer
|
||||
duration_minutes = 240
|
||||
|
||||
response = client.post(
|
||||
"/trip/new",
|
||||
json={
|
||||
"preferences": {"sightseeing": {"type": "sightseeing", "score": 5},
|
||||
"nature": {"type": "nature", "score": 5},
|
||||
"shopping": {"type": "shopping", "score": 5},
|
||||
"max_time_minute": duration_minutes,
|
||||
"detour_tolerance_minute": 0},
|
||||
"start": [50.942352665, 6.957777972392]
|
||||
}
|
||||
)
|
||||
result = response.json()
|
||||
landmarks = load_trip_landmarks(client, result['first_landmark_uuid'])
|
||||
|
||||
# Get computation time
|
||||
comp_time = time.time() - start_time
|
||||
|
||||
# Add details to report
|
||||
log_trip_details(request, landmarks, result['total_time'], duration_minutes)
|
||||
|
||||
# for elem in landmarks :
|
||||
# print(elem)
|
||||
|
||||
# checks :
|
||||
assert response.status_code == 200 # check for successful planning
|
||||
assert comp_time < 30, f"Computation time exceeded 30 seconds: {comp_time:.2f} seconds"
|
||||
assert duration_minutes*0.8 < result['total_time'], f"Trip too short: {result['total_time']} instead of {duration_minutes}"
|
||||
assert duration_minutes*1.2 > result['total_time'], f"Trip too long: {result['total_time']} instead of {duration_minutes}"
|
||||
|
||||
|
||||
def test_strasbourg(client, request) : # pylint: disable=redefined-outer-name
|
||||
"""
|
||||
Test n°4 : Custom test in Strasbourg to ensure proper decision making in crowded area.
|
||||
|
||||
Args:
|
||||
client:
|
||||
request:
|
||||
"""
|
||||
start_time = time.time() # Start timer
|
||||
duration_minutes = 180
|
||||
|
||||
response = client.post(
|
||||
"/trip/new",
|
||||
json={
|
||||
"preferences": {"sightseeing": {"type": "sightseeing", "score": 5},
|
||||
"nature": {"type": "nature", "score": 5},
|
||||
"shopping": {"type": "shopping", "score": 5},
|
||||
"max_time_minute": duration_minutes,
|
||||
"detour_tolerance_minute": 0},
|
||||
"start": [48.5846589226, 7.74078715721]
|
||||
}
|
||||
)
|
||||
result = response.json()
|
||||
landmarks = load_trip_landmarks(client, result['first_landmark_uuid'])
|
||||
|
||||
# Get computation time
|
||||
comp_time = time.time() - start_time
|
||||
|
||||
# Add details to report
|
||||
log_trip_details(request, landmarks, result['total_time'], duration_minutes)
|
||||
|
||||
# for elem in landmarks :
|
||||
# print(elem)
|
||||
|
||||
# checks :
|
||||
assert response.status_code == 200 # check for successful planning
|
||||
assert comp_time < 30, f"Computation time exceeded 30 seconds: {comp_time:.2f} seconds"
|
||||
assert duration_minutes*0.8 < result['total_time'], f"Trip too short: {result['total_time']} instead of {duration_minutes}"
|
||||
assert duration_minutes*1.2 > result['total_time'], f"Trip too long: {result['total_time']} instead of {duration_minutes}"
|
||||
|
||||
|
||||
def test_zurich(client, request) : # pylint: disable=redefined-outer-name
|
||||
"""
|
||||
Test n°5 : Custom test in Zurich to ensure proper decision making in crowded area.
|
||||
|
||||
Args:
|
||||
client:
|
||||
request:
|
||||
"""
|
||||
start_time = time.time() # Start timer
|
||||
duration_minutes = 180
|
||||
|
||||
response = client.post(
|
||||
"/trip/new",
|
||||
json={
|
||||
"preferences": {"sightseeing": {"type": "sightseeing", "score": 5},
|
||||
"nature": {"type": "nature", "score": 5},
|
||||
"shopping": {"type": "shopping", "score": 5},
|
||||
"max_time_minute": duration_minutes,
|
||||
"detour_tolerance_minute": 0},
|
||||
"start": [47.377884227, 8.5395114066]
|
||||
}
|
||||
)
|
||||
result = response.json()
|
||||
landmarks = load_trip_landmarks(client, result['first_landmark_uuid'])
|
||||
|
||||
# Get computation time
|
||||
comp_time = time.time() - start_time
|
||||
|
||||
# Add details to report
|
||||
log_trip_details(request, landmarks, result['total_time'], duration_minutes)
|
||||
|
||||
# for elem in landmarks :
|
||||
# print(elem)
|
||||
|
||||
# checks :
|
||||
assert response.status_code == 200 # check for successful planning
|
||||
assert comp_time < 30, f"Computation time exceeded 30 seconds: {comp_time:.2f} seconds"
|
||||
assert duration_minutes*0.8 < result['total_time'], f"Trip too short: {result['total_time']} instead of {duration_minutes}"
|
||||
assert duration_minutes*1.2 > result['total_time'], f"Trip too long: {result['total_time']} instead of {duration_minutes}"
|
||||
|
||||
|
||||
def test_paris(client, request) : # pylint: disable=redefined-outer-name
|
||||
"""
|
||||
Test n°6 : Custom test in Paris (les Halles) centre to ensure proper decision making in crowded area.
|
||||
|
||||
Args:
|
||||
client:
|
||||
request:
|
||||
"""
|
||||
start_time = time.time() # Start timer
|
||||
duration_minutes = 200
|
||||
|
||||
response = client.post(
|
||||
"/trip/new",
|
||||
json={
|
||||
"preferences": {"sightseeing": {"type": "sightseeing", "score": 5},
|
||||
"nature": {"type": "nature", "score": 0},
|
||||
"shopping": {"type": "shopping", "score": 5},
|
||||
"max_time_minute": duration_minutes,
|
||||
"detour_tolerance_minute": 0},
|
||||
"start": [48.85468881798671, 2.3423925755998374]
|
||||
}
|
||||
)
|
||||
result = response.json()
|
||||
landmarks = load_trip_landmarks(client, result['first_landmark_uuid'])
|
||||
|
||||
# Get computation time
|
||||
comp_time = time.time() - start_time
|
||||
|
||||
# Add details to report
|
||||
log_trip_details(request, landmarks, result['total_time'], duration_minutes)
|
||||
|
||||
# for elem in landmarks :
|
||||
# print(elem)
|
||||
|
||||
# checks :
|
||||
assert response.status_code == 200 # check for successful planning
|
||||
assert comp_time < 30, f"Computation time exceeded 30 seconds: {comp_time:.2f} seconds"
|
||||
assert duration_minutes*0.8 < result['total_time'], f"Trip too short: {result['total_time']} instead of {duration_minutes}"
|
||||
assert duration_minutes*1.2 > result['total_time'], f"Trip too long: {result['total_time']} instead of {duration_minutes}"
|
||||
|
||||
|
||||
def test_new_york(client, request) : # pylint: disable=redefined-outer-name
|
||||
"""
|
||||
Test n°7 : Custom test in New York to ensure proper decision making in crowded area.
|
||||
|
||||
Args:
|
||||
client:
|
||||
request:
|
||||
"""
|
||||
start_time = time.time() # Start timer
|
||||
duration_minutes = 600
|
||||
|
||||
response = client.post(
|
||||
"/trip/new",
|
||||
json={
|
||||
"preferences": {"sightseeing": {"type": "sightseeing", "score": 5},
|
||||
"nature": {"type": "nature", "score": 5},
|
||||
"shopping": {"type": "shopping", "score": 5},
|
||||
"max_time_minute": duration_minutes,
|
||||
"detour_tolerance_minute": 0},
|
||||
"start": [40.72592726802, -73.9920434795]
|
||||
}
|
||||
)
|
||||
result = response.json()
|
||||
landmarks = load_trip_landmarks(client, result['first_landmark_uuid'])
|
||||
|
||||
# Get computation time
|
||||
comp_time = time.time() - start_time
|
||||
|
||||
# Add details to report
|
||||
log_trip_details(request, landmarks, result['total_time'], duration_minutes)
|
||||
|
||||
# for elem in landmarks :
|
||||
# print(elem)
|
||||
|
||||
# checks :
|
||||
assert response.status_code == 200 # check for successful planning
|
||||
assert comp_time < 30, f"Computation time exceeded 30 seconds: {comp_time:.2f} seconds"
|
||||
assert duration_minutes*0.8 < result['total_time'], f"Trip too short: {result['total_time']} instead of {duration_minutes}"
|
||||
assert duration_minutes*1.2 > result['total_time'], f"Trip too long: {result['total_time']} instead of {duration_minutes}"
|
||||
|
||||
|
||||
def test_shopping(client, request) : # pylint: disable=redefined-outer-name
|
||||
"""
|
||||
Test n°8 : Custom test in Lyon centre to ensure shopping clusters are found.
|
||||
|
||||
Args:
|
||||
client:
|
||||
request:
|
||||
"""
|
||||
start_time = time.time() # Start timer
|
||||
duration_minutes = 240
|
||||
|
||||
response = client.post(
|
||||
"/trip/new",
|
||||
json={
|
||||
"preferences": {"sightseeing": {"type": "sightseeing", "score": 0},
|
||||
"nature": {"type": "nature", "score": 0},
|
||||
"shopping": {"type": "shopping", "score": 5},
|
||||
"max_time_minute": duration_minutes,
|
||||
"detour_tolerance_minute": 0},
|
||||
"start": [45.7576485, 4.8330241]
|
||||
}
|
||||
)
|
||||
result = response.json()
|
||||
landmarks = load_trip_landmarks(client, result['first_landmark_uuid'])
|
||||
|
||||
# Get computation time
|
||||
comp_time = time.time() - start_time
|
||||
|
||||
# Add details to report
|
||||
log_trip_details(request, landmarks, result['total_time'], duration_minutes)
|
||||
|
||||
# for elem in landmarks :
|
||||
# print(elem)
|
||||
|
||||
# checks :
|
||||
assert response.status_code == 200 # check for successful planning
|
||||
assert comp_time < 30, f"Computation time exceeded 30 seconds: {comp_time:.2f} seconds"
|
||||
assert duration_minutes*0.8 < result['total_time'], f"Trip too short: {result['total_time']} instead of {duration_minutes}"
|
||||
assert duration_minutes*1.2 > result['total_time'], f"Trip too long: {result['total_time']} instead of {duration_minutes}"
|
||||
46
backend/src/tests/test_nearby.py
Normal file
@@ -0,0 +1,46 @@
|
||||
"""Collection of tests to ensure correct implementation and track progress of the get_landmarks_nearby feature. """
|
||||
|
||||
from fastapi.testclient import TestClient
|
||||
import pytest
|
||||
|
||||
from ..main import app
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def client():
|
||||
"""Client used to call the app."""
|
||||
return TestClient(app)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"location,status_code",
|
||||
[
|
||||
([45.7576485, 4.8330241], 200), # Lyon, France
|
||||
([41.4020572, 2.1818985], 200), # Barcelona, Spain
|
||||
([59.3293, 18.0686], 200), # Stockholm, Sweden
|
||||
([43.6532, -79.3832], 200), # Toronto, Canada
|
||||
([38.7223, -9.1393], 200), # Lisbon, Portugal
|
||||
([6.5244, 3.3792], 200), # Lagos, Nigeria
|
||||
([17.3850, 78.4867], 200), # Hyderabad, India
|
||||
([30.0444, 31.2357], 200), # Cairo, Egypt
|
||||
([50.8503, 4.3517], 200), # Brussels, Belgium
|
||||
([35.2271, -80.8431], 200), # Charlotte, USA
|
||||
([10.4806, -66.9036], 200), # Caracas, Venezuela
|
||||
([9.51074, -13.71118], 200), # Conakry, Guinea
|
||||
]
|
||||
)
|
||||
def test_nearby(client, location, status_code): # pylint: disable=redefined-outer-name
|
||||
"""
|
||||
Test n°1 : Verify handling of invalid input.
|
||||
|
||||
Args:
|
||||
client:
|
||||
request:
|
||||
"""
|
||||
response = client.post(f"/get-nearby/landmarks/{location[0]}/{location[1]}")
|
||||
suggestions = response.json()
|
||||
|
||||
# checks :
|
||||
assert response.status_code == status_code # check for successful planning
|
||||
assert isinstance(suggestions, list) # check that the return type is a list
|
||||
assert len(suggestions) > 0
|
||||
@@ -3,7 +3,7 @@
|
||||
from fastapi.testclient import TestClient
|
||||
import pytest
|
||||
|
||||
from ..structs.landmark import Toilets
|
||||
from ..structs.toilets import Toilets
|
||||
from ..main import app
|
||||
|
||||
|
||||
@@ -18,7 +18,7 @@ def client():
|
||||
[
|
||||
({}, None, 422), # Invalid case: no location at all.
|
||||
([443], None, 422), # Invalid cases: invalid location.
|
||||
([443, 433], None, 422), # Invalid cases: invalid location.
|
||||
([443, 433], None, 422), # Invalid cases: invalid location.
|
||||
]
|
||||
)
|
||||
def test_invalid_input(client, location, radius, status_code): # pylint: disable=redefined-outer-name
|
||||
@@ -30,7 +30,7 @@ def test_invalid_input(client, location, radius, status_code): # pylint: disa
|
||||
request:
|
||||
"""
|
||||
response = client.post(
|
||||
"/toilets/new",
|
||||
"/get/toilets",
|
||||
params={
|
||||
"location": location,
|
||||
"radius": radius
|
||||
@@ -58,7 +58,7 @@ def test_no_toilets(client, location, status_code): # pylint: disable=redefin
|
||||
request:
|
||||
"""
|
||||
response = client.post(
|
||||
"/toilets/new",
|
||||
"/get/toilets",
|
||||
params={
|
||||
"location": location
|
||||
}
|
||||
@@ -87,7 +87,7 @@ def test_toilets(client, location, status_code): # pylint: disable=redefined-
|
||||
request:
|
||||
"""
|
||||
response = client.post(
|
||||
"/toilets/new",
|
||||
"/get/toilets",
|
||||
params={
|
||||
"location": location,
|
||||
"radius" : 600
|
||||
|
||||
81
backend/src/tests/test_trip_generation.py
Normal file
@@ -0,0 +1,81 @@
|
||||
"""Collection of tests to ensure correct implementation and track progress."""
|
||||
import time
|
||||
from fastapi.testclient import TestClient
|
||||
import pytest
|
||||
|
||||
from .test_utils import load_trip_landmarks, log_trip_details
|
||||
from ..structs.preferences import Preferences, Preference
|
||||
from ..main import app
|
||||
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
def client():
|
||||
"""Client used to call the app."""
|
||||
return TestClient(app)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"sightseeing, shopping, nature, max_time_minute, start_coords, end_coords",
|
||||
[
|
||||
# Edge cases
|
||||
(0, 0, 5, 240, [45.7576485, 4.8330241], None), # Lyon, Bellecour - test shopping only
|
||||
|
||||
# Realistic
|
||||
(5, 0, 0, 20, [48.0845881, 7.2804050], None), # Turckheim
|
||||
(5, 5, 5, 120, [45.7576485, 4.8330241], None), # Lyon, Bellecour
|
||||
(5, 2, 5, 240, [50.9423526, 6.9577780], None), # Cologne, centre
|
||||
(3, 5, 0, 180, [48.5846589226, 7.74078715721], None), # Strasbourg, centre
|
||||
(2, 4, 5, 180, [47.377884227, 8.5395114066], None), # Zurich, centre
|
||||
(5, 0, 5, 200, [48.85468881798671, 2.3423925755998374], None), # Paris, centre
|
||||
(5, 5, 5, 600, [40.72592726802, -73.9920434795], None), # New York, Lower Manhattan
|
||||
]
|
||||
)
|
||||
def test_trip(client, request, sightseeing, shopping, nature, max_time_minute, start_coords, end_coords):
|
||||
|
||||
start_time = time.time() # Start timer
|
||||
|
||||
prefs = Preferences(
|
||||
sightseeing=Preference(type='sightseeing', score=sightseeing),
|
||||
shopping=Preference(type='shopping', score=shopping),
|
||||
nature=Preference(type='nature', score=nature),
|
||||
max_time_minute=max_time_minute,
|
||||
detour_tolerance_minute=0,
|
||||
)
|
||||
start = start_coords
|
||||
end = end_coords
|
||||
|
||||
# Step 1: request the list of landmarks in the vicinty of the starting point
|
||||
response = client.post(
|
||||
"/get/landmarks",
|
||||
json={
|
||||
"preferences": prefs.model_dump(),
|
||||
"start": start_coords,
|
||||
"end": end_coords,
|
||||
}
|
||||
)
|
||||
landmarks = response.json()
|
||||
|
||||
# Step 2: Feed the landmarks to the optimizer to compute the trip
|
||||
response = client.post(
|
||||
"/optimize/trip",
|
||||
json={
|
||||
"preferences": prefs.model_dump(),
|
||||
"landmarks": landmarks,
|
||||
"start": start,
|
||||
"end": end,
|
||||
}
|
||||
)
|
||||
result = response.json()
|
||||
landmarks = load_trip_landmarks(client, result['first_landmark_uuid'])
|
||||
|
||||
# Get computation time
|
||||
comp_time = time.time() - start_time
|
||||
|
||||
# Add details to report
|
||||
log_trip_details(request, landmarks, result['total_time'], prefs.max_time_minute)
|
||||
|
||||
# checks :
|
||||
assert response.status_code == 200 # check for successful planning
|
||||
assert comp_time < 30, f"Computation time exceeded 30 seconds: {comp_time:.2f} seconds"
|
||||
assert prefs.max_time_minute*0.8 < result['total_time'], f"Trip too short: {result['total_time']} instead of {prefs.max_time_minute}"
|
||||
assert prefs.max_time_minute*1.2 > result['total_time'], f"Trip too long: {result['total_time']} instead of {prefs.max_time_minute}"
|
||||
@@ -1,10 +1,12 @@
|
||||
"""Helper methods for testing."""
|
||||
import time
|
||||
import logging
|
||||
from functools import wraps
|
||||
from fastapi import HTTPException
|
||||
from pydantic import ValidationError
|
||||
|
||||
from ..structs.landmark import Landmark
|
||||
from ..cache import client as cache_client
|
||||
from ..structs.landmark import Landmark
|
||||
from ..structs.preferences import Preferences, Preference
|
||||
|
||||
|
||||
def landmarks_to_osmid(landmarks: list[Landmark]) -> list[int] :
|
||||
@@ -39,7 +41,7 @@ def fetch_landmark(landmark_uuid: str):
|
||||
try:
|
||||
landmark = cache_client.get(f'landmark_{landmark_uuid}')
|
||||
if not landmark :
|
||||
logger.warning(f'Cache miss for landmark UUID: {landmark_uuid}')
|
||||
logger.error(f'Cache miss for landmark UUID: {landmark_uuid}')
|
||||
raise HTTPException(status_code=404, detail=f'Landmark with UUID {landmark_uuid} not found in cache.')
|
||||
|
||||
# Validate that the fetched data is a dictionary
|
||||
@@ -92,3 +94,34 @@ def log_trip_details(request, landmarks: list[Landmark], duration: int, target_d
|
||||
request.node.trip_details = trip_string
|
||||
request.node.trip_duration = str(duration) # result['total_time']
|
||||
request.node.target_duration = str(target_duration)
|
||||
|
||||
|
||||
|
||||
|
||||
def trip_params(
|
||||
sightseeing: int,
|
||||
shopping: int,
|
||||
nature: int,
|
||||
max_time_minute: int,
|
||||
start_coords: tuple[float, float] = None,
|
||||
end_coords: tuple[float, float] = None,
|
||||
):
|
||||
def decorator(test_func):
|
||||
@wraps(test_func)
|
||||
def wrapper(client, request):
|
||||
prefs = Preferences(
|
||||
sightseeing=Preference(type='sightseeing', score=sightseeing),
|
||||
shopping=Preference(type='shopping', score=shopping),
|
||||
nature=Preference(type='nature', score=nature),
|
||||
max_time_minute=max_time_minute,
|
||||
detour_tolerance_minute=0,
|
||||
)
|
||||
|
||||
start = start_coords
|
||||
end = end_coords
|
||||
|
||||
# Inject into test function
|
||||
return test_func(client, request, prefs, start, end)
|
||||
|
||||
return wrapper
|
||||
return decorator
|
||||
0
backend/src/toilets/__init__.py
Normal file
@@ -2,8 +2,8 @@
|
||||
import logging
|
||||
|
||||
from ..overpass.overpass import Overpass, get_base_info
|
||||
from ..structs.landmark import Toilets
|
||||
from .utils import create_bbox
|
||||
from ..structs.toilets import Toilets
|
||||
from ..utils.bbox import create_bbox
|
||||
|
||||
|
||||
# silence the overpass logger
|
||||
@@ -65,11 +65,13 @@ class ToiletsManager:
|
||||
try:
|
||||
result = self.overpass.fetch_data_from_api(query_str=query)
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error fetching landmarks: {e}")
|
||||
self.logger.error(f"Error fetching toilets: {e}")
|
||||
return None
|
||||
|
||||
toilets_list = self.to_toilets(result)
|
||||
|
||||
self.logger.debug(f'Found {len(toilets_list)} toilets around {self.location}')
|
||||
|
||||
return toilets_list
|
||||
|
||||
|
||||
43
backend/src/toilets/toilets_router.py
Normal file
@@ -0,0 +1,43 @@
|
||||
"""API entry point for fetching toilet locations."""
|
||||
|
||||
from fastapi import HTTPException, APIRouter, Query
|
||||
|
||||
from .toilets_manager import ToiletsManager
|
||||
from ..structs.toilets import Toilets
|
||||
|
||||
|
||||
# Initialize the API router
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.post("/get/toilets")
|
||||
def get_toilets(
|
||||
location: tuple[float, float] = Query(...),
|
||||
radius: int = 500
|
||||
) -> list[Toilets] :
|
||||
"""
|
||||
Endpoint to find toilets within a specified radius from a given location.
|
||||
|
||||
This endpoint expects the `location` and `radius` as **query parameters**, not in the request body.
|
||||
|
||||
Args:
|
||||
location (tuple[float, float]): The latitude and longitude of the location to search from.
|
||||
radius (int, optional): The radius (in meters) within which to search for toilets. Defaults to 500 meters.
|
||||
|
||||
Returns:
|
||||
list[Toilets]: A list of Toilets objects that meet the criteria.
|
||||
"""
|
||||
if location is None:
|
||||
raise HTTPException(status_code=406, detail="Coordinates not provided or invalid")
|
||||
if not (-90 <= location[0] <= 90 or -180 <= location[1] <= 180):
|
||||
raise HTTPException(status_code=422, detail="Start coordinates not in range")
|
||||
|
||||
toilets_manager = ToiletsManager(location, radius)
|
||||
|
||||
try :
|
||||
toilets_list = toilets_manager.generate_toilet_list()
|
||||
except KeyError as exc:
|
||||
raise HTTPException(status_code=404, detail="No toilets found") from exc
|
||||
|
||||
|
||||
return toilets_list
|
||||
@@ -24,4 +24,4 @@ def create_bbox(coords: tuple[float, float], radius: int):
|
||||
lon_min = lon - d_lon * 180 / m.pi
|
||||
lon_max = lon + d_lon * 180 / m.pi
|
||||
|
||||
return (lat_min, lon_min, lat_max, lon_max)
|
||||
return (lat_min, lon_min, lat_max, lon_max)
|
||||
123
backend/src/utils/description.py
Normal file
@@ -0,0 +1,123 @@
|
||||
"""Add more information about the landmarks by writing a short description and keywords. """
|
||||
|
||||
|
||||
def description_and_keywords(tags: dict):
|
||||
"""
|
||||
Generates a description and a set of keywords for a given landmark based on its tags.
|
||||
|
||||
Params:
|
||||
tags (dict): A dictionary containing metadata about the landmark, including its name,
|
||||
importance, height, date of construction, and visitor information.
|
||||
|
||||
Returns:
|
||||
description (str): A string description of the landmark.
|
||||
keywords (dict): A dictionary of keywords with fields such as 'importance', 'height',
|
||||
'place_type', and 'date'.
|
||||
"""
|
||||
# Extract relevant fields
|
||||
name = tags.get('name')
|
||||
importance = tags.get('importance', None)
|
||||
n_visitors = tags.get('tourism:visitors', None)
|
||||
height = tags.get('height')
|
||||
place_type = get_place_type(tags)
|
||||
date = get_date(tags)
|
||||
|
||||
if place_type is None :
|
||||
return None, None
|
||||
|
||||
# Start the description.
|
||||
if importance is None :
|
||||
if len(tags.keys()) < 5 :
|
||||
return None, None
|
||||
if len(tags.keys()) < 10 :
|
||||
description = f"{name} is a well known {place_type}."
|
||||
elif len(tags.keys()) < 17 :
|
||||
importance = 'national'
|
||||
description = f"{name} is a {place_type} of national importance."
|
||||
else :
|
||||
importance = 'international'
|
||||
description = f"{name} is an internationally famous {place_type}."
|
||||
else :
|
||||
description = f"{name} is a {place_type} of {importance} importance."
|
||||
|
||||
if height is not None and date is not None :
|
||||
description += f" This {place_type} was constructed in {date} and is ca. {height} meters high."
|
||||
elif height is not None :
|
||||
description += f" This {place_type} stands ca. {height} meters tall."
|
||||
elif date is not None:
|
||||
description += f" It was constructed in {date}."
|
||||
|
||||
# Format the visitor number
|
||||
if n_visitors is not None :
|
||||
n_visitors = int(n_visitors)
|
||||
if n_visitors < 1000000 :
|
||||
description += f" It welcomes {int(n_visitors/1000)} thousand visitors every year."
|
||||
else :
|
||||
description += f" It welcomes {round(n_visitors/1000000, 1)} million visitors every year."
|
||||
|
||||
# Set the keywords.
|
||||
keywords = {"importance": importance,
|
||||
"height": height,
|
||||
"place_type": place_type,
|
||||
"date": date}
|
||||
|
||||
return description, keywords
|
||||
|
||||
|
||||
def get_place_type(tags):
|
||||
"""
|
||||
Determines the type of the place based on available tags such as 'amenity', 'building',
|
||||
'historic', and 'leisure'. The priority order is: 'historic' > 'building' (if not generic) >
|
||||
'amenity' > 'leisure'.
|
||||
|
||||
Params:
|
||||
tags (dict): A dictionary containing metadata about the place.
|
||||
|
||||
Returns:
|
||||
place_type (str): The determined type of the place, or None if no relevant type is found.
|
||||
"""
|
||||
amenity = tags.get('amenity', None)
|
||||
building = tags.get('building', None)
|
||||
historic = tags.get('historic', None)
|
||||
leisure = tags.get('leisure')
|
||||
|
||||
if historic and historic != "yes":
|
||||
return historic
|
||||
if building and building not in ["yes", "civic", "government", "apartments", "residential", "commericial", "industrial", "retail", "religious", "public", "service"]:
|
||||
return building
|
||||
if amenity:
|
||||
return amenity
|
||||
if leisure:
|
||||
return leisure
|
||||
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def get_date(tags):
|
||||
"""
|
||||
Extracts the most relevant date from the available tags, prioritizing 'construction_date',
|
||||
'start_date', 'year_of_construction', and 'opening_date' in that order.
|
||||
|
||||
Params:
|
||||
tags (dict): A dictionary containing metadata about the place.
|
||||
|
||||
Returns:
|
||||
date (str): The most relevant date found, or None if no date is available.
|
||||
"""
|
||||
construction_date = tags.get('construction_date', None)
|
||||
opening_date = tags.get('opening_date', None)
|
||||
start_date = tags.get('start_date', None)
|
||||
year_of_construction = tags.get('year_of_construction', None)
|
||||
|
||||
# Prioritize based on availability
|
||||
if construction_date:
|
||||
return construction_date
|
||||
if start_date:
|
||||
return start_date
|
||||
if year_of_construction:
|
||||
return year_of_construction
|
||||
if opening_date:
|
||||
return opening_date
|
||||
|
||||
return None
|
||||
@@ -1,17 +0,0 @@
|
||||
"""Helper function to return only the major landmarks from a large list."""
|
||||
from ..structs.landmark import Landmark
|
||||
|
||||
def take_most_important(landmarks: list[Landmark], n_important) -> list[Landmark]:
|
||||
"""
|
||||
Given a list of landmarks, return the n_important most important landmarks
|
||||
Args:
|
||||
landmarks: list[Landmark] - list of landmarks
|
||||
n_important: int - number of most important landmarks to return
|
||||
Returns:
|
||||
list[Landmark] - list of the n_important most important landmarks
|
||||
"""
|
||||
|
||||
# Sort landmarks by attractiveness (descending)
|
||||
sorted_landmarks = sorted(landmarks, key=lambda x: x.attractiveness, reverse=True)
|
||||
|
||||
return sorted_landmarks[:n_important]
|
||||
1330
backend/uv.lock
generated
Normal file
17
default.nix
Normal file
@@ -0,0 +1,17 @@
|
||||
{ pkgs ? import <nixpkgs> { config.android_sdk.accept_license = true; config.allowUnfree = true; } }:
|
||||
|
||||
pkgs.mkShell {
|
||||
buildInputs = [
|
||||
pkgs.flutter
|
||||
#pkgs.android-tools # for adb
|
||||
#pkgs.openjdk # required for Android builds
|
||||
];
|
||||
|
||||
# Set up Android SDK paths if needed
|
||||
shellHook = ''
|
||||
export ANDROID_SDK_ROOT=${pkgs.androidsdk}/libexec/android-sdk
|
||||
export PATH=$PATH:${pkgs.androidsdk}/libexec/android-sdk/platform-tools
|
||||
echo "Flutter dev environment ready. 'adb' and 'flutter' are available."
|
||||
'';
|
||||
}
|
||||
|
||||
@@ -1,59 +0,0 @@
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: macos-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up ruby env
|
||||
uses: ruby/setup-ruby@v1
|
||||
with:
|
||||
ruby-version: 3.2.1
|
||||
bundler-cache: true
|
||||
|
||||
- name: Setup java for android build
|
||||
uses: actions/setup-java@v4
|
||||
with:
|
||||
java-version: '17'
|
||||
distribution: 'zulu'
|
||||
|
||||
- name: Setup android SDK
|
||||
uses: android-actions/setup-android@v3
|
||||
|
||||
- name: Install Flutter
|
||||
uses: subosito/flutter-action@v2
|
||||
with:
|
||||
channel: stable
|
||||
flutter-version: 3.22.0
|
||||
cache: true
|
||||
|
||||
- name: Infer version number from git tag
|
||||
id: version
|
||||
env:
|
||||
REF_NAME: ${{ github.ref_name }}
|
||||
run:
|
||||
# remove the 'v' prefix from the tag name
|
||||
echo "BUILD_NAME=${REF_NAME//v}" >> $GITHUB_ENV
|
||||
|
||||
- name: Put selected secrets into files
|
||||
run: |
|
||||
echo "${{ secrets.ANDROID_SECRET_PROPERTIES_BASE64 }}" | base64 -d > secrets.properties
|
||||
echo "${{ secrets.ANDROID_GOOGLE_PLAY_JSON_BASE64 }}" | base64 -d > google-key.json
|
||||
echo "${{ secrets.ANDROID_KEYSTORE_BASE64 }}" | base64 -d > release.keystore
|
||||
working-directory: android
|
||||
|
||||
- name: Install fastlane
|
||||
run: bundle install
|
||||
working-directory: android
|
||||
|
||||
- name: Run fastlane lane
|
||||
run: bundle exec fastlane deploy_release
|
||||
working-directory: android
|
||||
env:
|
||||
BUILD_NUMBER: ${{ github.run_number }}
|
||||
# BUILD_NAME is implicitly available
|
||||
GOOGLE_MAPS_API_KEY: ${{ secrets.GOOGLE_MAPS_API_KEY }}
|
||||
64
frontend/.github/workflows/build_app_ios.yaml
vendored
@@ -1,64 +0,0 @@
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- 'v*'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: macos-latest
|
||||
env:
|
||||
# $BUNDLE_GEMFILE must be set at the job level, so it is set for all steps
|
||||
BUNDLE_GEMFILE: ${{ github.workspace }}/ios/Gemfile
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up ruby env
|
||||
uses: ruby/setup-ruby@v1
|
||||
with:
|
||||
ruby-version: 3.3
|
||||
bundler-cache: true # runs 'bundle install' and caches installed gems automatically
|
||||
|
||||
- name: Install Flutter
|
||||
uses: subosito/flutter-action@v2
|
||||
with:
|
||||
channel: stable
|
||||
flutter-version: 3.22.0
|
||||
cache: true
|
||||
|
||||
- name: Infer version number from git tag
|
||||
id: version
|
||||
env:
|
||||
REF_NAME: ${{ github.ref_name }}
|
||||
run:
|
||||
# remove the 'v' prefix from the tag name
|
||||
echo "BUILD_NAME=${REF_NAME//v}" >> $GITHUB_ENV
|
||||
|
||||
- name: Setup SSH key for match git repo
|
||||
# and mark the host as known
|
||||
run: |
|
||||
echo $MATCH_REPO_SSH_KEY | base64 --decode > ~/.ssh/id_rsa
|
||||
chmod 600 ~/.ssh/id_rsa
|
||||
ssh-keyscan -p 2222 git.kluster.moll.re > ~/.ssh/known_hosts
|
||||
env:
|
||||
MATCH_REPO_SSH_KEY: ${{ secrets.IOS_MATCH_REPO_SSH_KEY_BASE64 }}
|
||||
|
||||
- name: Install dependencies and clean up
|
||||
run: |
|
||||
flutter pub get
|
||||
bundle exec pod install
|
||||
flutter clean
|
||||
bundle exec pod cache clean --all
|
||||
working-directory: ios
|
||||
|
||||
- name: Run fastlane lane
|
||||
run: bundle exec fastlane deploy_release --verbose
|
||||
working-directory: ios
|
||||
env:
|
||||
BUILD_NUMBER: ${{ github.run_number }}
|
||||
# BUILD_NAME is implicitly available
|
||||
GOOGLE_MAPS_API_KEY: ${{ secrets.GOOGLE_MAPS_API_KEY }}
|
||||
IOS_ASC_KEY_ID: ${{ secrets.IOS_ASC_KEY_ID }}
|
||||
IOS_ASC_ISSUER_ID: ${{ secrets.IOS_ASC_ISSUER_ID }}
|
||||
IOS_ASC_KEY: ${{ secrets.IOS_ASC_KEY }}
|
||||
MATCH_PASSWORD: ${{ secrets.IOS_MATCH_PASSWORD }}
|
||||
IOS_GOOGLE_MAPS_API_KEY: ${{ secrets.IOS_GOOGLE_MAPS_API_KEY }}
|
||||
@@ -4,7 +4,7 @@
|
||||
# This file should be version controlled and should not be manually edited.
|
||||
|
||||
version:
|
||||
revision: "54e66469a933b60ddf175f858f82eaeb97e48c8d"
|
||||
revision: "09de023485e95e6d1225c2baa44b8feb85e0d45f"
|
||||
channel: "stable"
|
||||
|
||||
project_type: app
|
||||
@@ -13,26 +13,11 @@ project_type: app
|
||||
migration:
|
||||
platforms:
|
||||
- platform: root
|
||||
create_revision: 54e66469a933b60ddf175f858f82eaeb97e48c8d
|
||||
base_revision: 54e66469a933b60ddf175f858f82eaeb97e48c8d
|
||||
- platform: android
|
||||
create_revision: 54e66469a933b60ddf175f858f82eaeb97e48c8d
|
||||
base_revision: 54e66469a933b60ddf175f858f82eaeb97e48c8d
|
||||
- platform: ios
|
||||
create_revision: 54e66469a933b60ddf175f858f82eaeb97e48c8d
|
||||
base_revision: 54e66469a933b60ddf175f858f82eaeb97e48c8d
|
||||
create_revision: 09de023485e95e6d1225c2baa44b8feb85e0d45f
|
||||
base_revision: 09de023485e95e6d1225c2baa44b8feb85e0d45f
|
||||
- platform: linux
|
||||
create_revision: 54e66469a933b60ddf175f858f82eaeb97e48c8d
|
||||
base_revision: 54e66469a933b60ddf175f858f82eaeb97e48c8d
|
||||
- platform: macos
|
||||
create_revision: 54e66469a933b60ddf175f858f82eaeb97e48c8d
|
||||
base_revision: 54e66469a933b60ddf175f858f82eaeb97e48c8d
|
||||
- platform: web
|
||||
create_revision: 54e66469a933b60ddf175f858f82eaeb97e48c8d
|
||||
base_revision: 54e66469a933b60ddf175f858f82eaeb97e48c8d
|
||||
- platform: windows
|
||||
create_revision: 54e66469a933b60ddf175f858f82eaeb97e48c8d
|
||||
base_revision: 54e66469a933b60ddf175f858f82eaeb97e48c8d
|
||||
create_revision: 09de023485e95e6d1225c2baa44b8feb85e0d45f
|
||||
base_revision: 09de023485e95e6d1225c2baa44b8feb85e0d45f
|
||||
|
||||
# User provided section
|
||||
|
||||
|
||||
@@ -17,15 +17,9 @@ flutter pub get
|
||||
```
|
||||
|
||||
## Development
|
||||
### ...
|
||||
### Icons and logos
|
||||
The application uses a custom launcher icon and splash screen. These are managed platform-independently using the `flutter_launcher_icons` package.
|
||||
|
||||
To update the icons, change the `flutter_launcher_icons.yaml` configuration file. Especially the `image_path` is relevant. Then run
|
||||
```bash
|
||||
dart run flutter_launcher_icons
|
||||
```
|
||||
### TODO
|
||||
|
||||
## Deployment and metadata
|
||||
### Deploying a new version
|
||||
To truly deploy a new version of the application, i.e. to the official app stores, a special CI step is required. This listens for new tags. To create a new tag position yourself on the main branch and run
|
||||
```bash
|
||||
@@ -34,6 +28,18 @@ git push origin v<name>
|
||||
```
|
||||
We adhere to the [Semantic Versioning](https://semver.org/) standard, so the tag should be of the form `v0.1.8` for example.
|
||||
|
||||
### Icons and logos
|
||||
The application uses a custom launcher icon and splash screen. These are managed platform-independently using the `flutter_launcher_icons` package.
|
||||
|
||||
To update the icons, change the `flutter_launcher_icons.yaml` configuration file. Especially the `image_path` is relevant. Then run
|
||||
```bash
|
||||
dart run flutter_launcher_icons
|
||||
```
|
||||
### Other metadata
|
||||
Fastlane provides mechanisms to update the metadata of the application. This includes the name, description, screenshots, etc. The metadata is stored in the `fastlane/metadata` directory of both the `android`and the `ios` version of the application. Both versions have different structures but **they should be kept in sync**. For more information see the [fastlane documentation](https://docs.fastlane.tools/):
|
||||
- https://docs.fastlane.tools/actions/deliver/
|
||||
- https://docs.fastlane.tools/actions/supply/
|
||||
|
||||
|
||||
## Fastlane - in depth
|
||||
The application is deployed to the Google Play Store and the Apple App Store using fastlane: [https://docs.fastlane.tools/](https://docs.fastlane.tools/)
|
||||
@@ -46,16 +52,17 @@ bundle exec fastlane <lane>
|
||||
```
|
||||
This is reused in the CI/CD pipeline to automate the deployment process.
|
||||
|
||||
Secrets used by fastlane are stored on hashicorp vault and are fetched by the CI/CD pipeline. See below.
|
||||
|
||||
## Secrets
|
||||
These are mostly used by the CI/CD pipeline to deploy the application. The main usage for github actions is documented under [https://github.com/hashicorp/vault-action](https://github.com/hashicorp/vault-action).
|
||||
These are used by the CI/CD pipeline to deploy the application.
|
||||
|
||||
**Platform-specific secrets** are used by the CI/CD pipeline to deploy to the respective app stores.
|
||||
- `GOOGLE_MAPS_API_KEY` is used to authenticate with the Google Maps API and is scoped to the android platform
|
||||
- `ANDROID_GOOGLE_MAPS_API_KEY` is used to authenticate with the Google Maps API and is scoped to the android platform
|
||||
- `ANDROID_KEYSTORE` is used to sign the android apk
|
||||
- `ANDROID_GOOGLE_KEY` is used to authenticate with the Google Play Store api
|
||||
- `IOS_GOOGLE_MAPS_API_KEY` is used to authenticate with the Google Maps API and is scoped to the ios platform
|
||||
- `IOS_GOOGLE_...`
|
||||
- `IOS_GOOGLE_...`
|
||||
- `IOS_GOOGLE_...`
|
||||
- `IOS_ASC_ISSUER_ID` is used to authenticate with the App Store Connect API
|
||||
- `IOS_ASC_KEY` as well
|
||||
- `IOS_ASC_KEY_ID` as well
|
||||
- `IOS_MATCH_PASSWORD` is used by fastlane match to download the certificates
|
||||
- `IOS_MATCH_REPO_SSH_KEY_BASE64` is used to authenticate with the git repository where the certificates are stored
|
||||
|
||||
@@ -1,220 +0,0 @@
|
||||
GEM
|
||||
remote: https://rubygems.org/
|
||||
specs:
|
||||
CFPropertyList (3.0.7)
|
||||
base64
|
||||
nkf
|
||||
rexml
|
||||
addressable (2.8.7)
|
||||
public_suffix (>= 2.0.2, < 7.0)
|
||||
artifactory (3.0.17)
|
||||
atomos (0.1.3)
|
||||
aws-eventstream (1.3.0)
|
||||
aws-partitions (1.970.0)
|
||||
aws-sdk-core (3.202.2)
|
||||
aws-eventstream (~> 1, >= 1.3.0)
|
||||
aws-partitions (~> 1, >= 1.651.0)
|
||||
aws-sigv4 (~> 1.9)
|
||||
jmespath (~> 1, >= 1.6.1)
|
||||
aws-sdk-kms (1.88.0)
|
||||
aws-sdk-core (~> 3, >= 3.201.0)
|
||||
aws-sigv4 (~> 1.5)
|
||||
aws-sdk-s3 (1.159.0)
|
||||
aws-sdk-core (~> 3, >= 3.201.0)
|
||||
aws-sdk-kms (~> 1)
|
||||
aws-sigv4 (~> 1.5)
|
||||
aws-sigv4 (1.9.1)
|
||||
aws-eventstream (~> 1, >= 1.0.2)
|
||||
babosa (1.0.4)
|
||||
base64 (0.2.0)
|
||||
claide (1.1.0)
|
||||
colored (1.2)
|
||||
colored2 (3.1.2)
|
||||
commander (4.6.0)
|
||||
highline (~> 2.0.0)
|
||||
declarative (0.0.20)
|
||||
digest-crc (0.6.5)
|
||||
rake (>= 12.0.0, < 14.0.0)
|
||||
domain_name (0.6.20240107)
|
||||
dotenv (2.8.1)
|
||||
emoji_regex (3.2.3)
|
||||
excon (0.111.0)
|
||||
faraday (1.10.3)
|
||||
faraday-em_http (~> 1.0)
|
||||
faraday-em_synchrony (~> 1.0)
|
||||
faraday-excon (~> 1.1)
|
||||
faraday-httpclient (~> 1.0)
|
||||
faraday-multipart (~> 1.0)
|
||||
faraday-net_http (~> 1.0)
|
||||
faraday-net_http_persistent (~> 1.0)
|
||||
faraday-patron (~> 1.0)
|
||||
faraday-rack (~> 1.0)
|
||||
faraday-retry (~> 1.0)
|
||||
ruby2_keywords (>= 0.0.4)
|
||||
faraday-cookie_jar (0.0.7)
|
||||
faraday (>= 0.8.0)
|
||||
http-cookie (~> 1.0.0)
|
||||
faraday-em_http (1.0.0)
|
||||
faraday-em_synchrony (1.0.0)
|
||||
faraday-excon (1.1.0)
|
||||
faraday-httpclient (1.0.1)
|
||||
faraday-multipart (1.0.4)
|
||||
multipart-post (~> 2)
|
||||
faraday-net_http (1.0.2)
|
||||
faraday-net_http_persistent (1.2.0)
|
||||
faraday-patron (1.0.0)
|
||||
faraday-rack (1.0.0)
|
||||
faraday-retry (1.0.3)
|
||||
faraday_middleware (1.2.0)
|
||||
faraday (~> 1.0)
|
||||
fastimage (2.3.1)
|
||||
fastlane (2.222.0)
|
||||
CFPropertyList (>= 2.3, < 4.0.0)
|
||||
addressable (>= 2.8, < 3.0.0)
|
||||
artifactory (~> 3.0)
|
||||
aws-sdk-s3 (~> 1.0)
|
||||
babosa (>= 1.0.3, < 2.0.0)
|
||||
bundler (>= 1.12.0, < 3.0.0)
|
||||
colored (~> 1.2)
|
||||
commander (~> 4.6)
|
||||
dotenv (>= 2.1.1, < 3.0.0)
|
||||
emoji_regex (>= 0.1, < 4.0)
|
||||
excon (>= 0.71.0, < 1.0.0)
|
||||
faraday (~> 1.0)
|
||||
faraday-cookie_jar (~> 0.0.6)
|
||||
faraday_middleware (~> 1.0)
|
||||
fastimage (>= 2.1.0, < 3.0.0)
|
||||
gh_inspector (>= 1.1.2, < 2.0.0)
|
||||
google-apis-androidpublisher_v3 (~> 0.3)
|
||||
google-apis-playcustomapp_v1 (~> 0.1)
|
||||
google-cloud-env (>= 1.6.0, < 2.0.0)
|
||||
google-cloud-storage (~> 1.31)
|
||||
highline (~> 2.0)
|
||||
http-cookie (~> 1.0.5)
|
||||
json (< 3.0.0)
|
||||
jwt (>= 2.1.0, < 3)
|
||||
mini_magick (>= 4.9.4, < 5.0.0)
|
||||
multipart-post (>= 2.0.0, < 3.0.0)
|
||||
naturally (~> 2.2)
|
||||
optparse (>= 0.1.1, < 1.0.0)
|
||||
plist (>= 3.1.0, < 4.0.0)
|
||||
rubyzip (>= 2.0.0, < 3.0.0)
|
||||
security (= 0.1.5)
|
||||
simctl (~> 1.6.3)
|
||||
terminal-notifier (>= 2.0.0, < 3.0.0)
|
||||
terminal-table (~> 3)
|
||||
tty-screen (>= 0.6.3, < 1.0.0)
|
||||
tty-spinner (>= 0.8.0, < 1.0.0)
|
||||
word_wrap (~> 1.0.0)
|
||||
xcodeproj (>= 1.13.0, < 2.0.0)
|
||||
xcpretty (~> 0.3.0)
|
||||
xcpretty-travis-formatter (>= 0.0.3, < 2.0.0)
|
||||
gh_inspector (1.1.3)
|
||||
google-apis-androidpublisher_v3 (0.54.0)
|
||||
google-apis-core (>= 0.11.0, < 2.a)
|
||||
google-apis-core (0.11.3)
|
||||
addressable (~> 2.5, >= 2.5.1)
|
||||
googleauth (>= 0.16.2, < 2.a)
|
||||
httpclient (>= 2.8.1, < 3.a)
|
||||
mini_mime (~> 1.0)
|
||||
representable (~> 3.0)
|
||||
retriable (>= 2.0, < 4.a)
|
||||
rexml
|
||||
google-apis-iamcredentials_v1 (0.17.0)
|
||||
google-apis-core (>= 0.11.0, < 2.a)
|
||||
google-apis-playcustomapp_v1 (0.13.0)
|
||||
google-apis-core (>= 0.11.0, < 2.a)
|
||||
google-apis-storage_v1 (0.31.0)
|
||||
google-apis-core (>= 0.11.0, < 2.a)
|
||||
google-cloud-core (1.7.1)
|
||||
google-cloud-env (>= 1.0, < 3.a)
|
||||
google-cloud-errors (~> 1.0)
|
||||
google-cloud-env (1.6.0)
|
||||
faraday (>= 0.17.3, < 3.0)
|
||||
google-cloud-errors (1.4.0)
|
||||
google-cloud-storage (1.47.0)
|
||||
addressable (~> 2.8)
|
||||
digest-crc (~> 0.4)
|
||||
google-apis-iamcredentials_v1 (~> 0.1)
|
||||
google-apis-storage_v1 (~> 0.31.0)
|
||||
google-cloud-core (~> 1.6)
|
||||
googleauth (>= 0.16.2, < 2.a)
|
||||
mini_mime (~> 1.0)
|
||||
googleauth (1.8.1)
|
||||
faraday (>= 0.17.3, < 3.a)
|
||||
jwt (>= 1.4, < 3.0)
|
||||
multi_json (~> 1.11)
|
||||
os (>= 0.9, < 2.0)
|
||||
signet (>= 0.16, < 2.a)
|
||||
highline (2.0.3)
|
||||
http-cookie (1.0.7)
|
||||
domain_name (~> 0.5)
|
||||
httpclient (2.8.3)
|
||||
jmespath (1.6.2)
|
||||
json (2.7.2)
|
||||
jwt (2.8.2)
|
||||
base64
|
||||
mini_magick (4.13.2)
|
||||
mini_mime (1.1.5)
|
||||
multi_json (1.15.0)
|
||||
multipart-post (2.4.1)
|
||||
nanaimo (0.3.0)
|
||||
naturally (2.2.1)
|
||||
nkf (0.2.0)
|
||||
optparse (0.5.0)
|
||||
os (1.1.4)
|
||||
plist (3.7.1)
|
||||
public_suffix (6.0.1)
|
||||
rake (13.2.1)
|
||||
representable (3.2.0)
|
||||
declarative (< 0.1.0)
|
||||
trailblazer-option (>= 0.1.1, < 0.2.0)
|
||||
uber (< 0.2.0)
|
||||
retriable (3.1.2)
|
||||
rexml (3.3.6)
|
||||
strscan
|
||||
rouge (2.0.7)
|
||||
ruby2_keywords (0.0.5)
|
||||
rubyzip (2.3.2)
|
||||
security (0.1.5)
|
||||
signet (0.19.0)
|
||||
addressable (~> 2.8)
|
||||
faraday (>= 0.17.5, < 3.a)
|
||||
jwt (>= 1.5, < 3.0)
|
||||
multi_json (~> 1.10)
|
||||
simctl (1.6.10)
|
||||
CFPropertyList
|
||||
naturally
|
||||
strscan (3.1.0)
|
||||
terminal-notifier (2.0.0)
|
||||
terminal-table (3.0.2)
|
||||
unicode-display_width (>= 1.1.1, < 3)
|
||||
trailblazer-option (0.1.2)
|
||||
tty-cursor (0.7.1)
|
||||
tty-screen (0.8.2)
|
||||
tty-spinner (0.9.3)
|
||||
tty-cursor (~> 0.7)
|
||||
uber (0.1.0)
|
||||
unicode-display_width (2.5.0)
|
||||
word_wrap (1.0.0)
|
||||
xcodeproj (1.25.0)
|
||||
CFPropertyList (>= 2.3.3, < 4.0)
|
||||
atomos (~> 0.1.3)
|
||||
claide (>= 1.0.2, < 2.0)
|
||||
colored2 (~> 3.1)
|
||||
nanaimo (~> 0.3.0)
|
||||
rexml (>= 3.3.2, < 4.0)
|
||||
xcpretty (0.3.0)
|
||||
rouge (~> 2.0.7)
|
||||
xcpretty-travis-formatter (1.0.1)
|
||||
xcpretty (~> 0.2, >= 0.0.7)
|
||||
|
||||
PLATFORMS
|
||||
ruby
|
||||
x86_64-linux
|
||||
|
||||
DEPENDENCIES
|
||||
fastlane
|
||||
|
||||
BUNDLED WITH
|
||||
2.5.18
|
||||
@@ -77,7 +77,7 @@ android {
|
||||
versionCode flutterVersionCode.toInteger()
|
||||
versionName flutterVersionName
|
||||
// // Placeholders of keys that are replaced by the build system.
|
||||
manifestPlaceholders += ['MAPS_API_KEY': System.getenv('GOOGLE_MAPS_API_KEY')]
|
||||
manifestPlaceholders += ['MAPS_API_KEY': System.getenv('ANDROID_GOOGLE_MAPS_API_KEY')]
|
||||
|
||||
}
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ default_platform(:android)
|
||||
platform :android do
|
||||
|
||||
desc "Deploy a new version to closed testing (play store)"
|
||||
lane :deploy_testing do
|
||||
lane :deploy_beta do
|
||||
build_name = ENV["BUILD_NAME"]
|
||||
build_number = ENV["BUILD_NUMBER"]
|
||||
|
||||
@@ -17,7 +17,8 @@ platform :android do
|
||||
)
|
||||
|
||||
upload_to_play_store(
|
||||
track: 'alpha',
|
||||
track: 'beta',
|
||||
# upload aab files intstead
|
||||
skip_upload_apk: true,
|
||||
skip_upload_changelogs: true,
|
||||
aab: "../build/app/outputs/bundle/release/app-release.aab",
|
||||
@@ -47,6 +48,7 @@ platform :android do
|
||||
skip_upload_apk: true,
|
||||
skip_upload_changelogs: true,
|
||||
aab: "../build/app/outputs/bundle/release/app-release.aab",
|
||||
metadata_path: "fastlane/metadata",
|
||||
)
|
||||
end
|
||||
end
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
AnyWay - plan city trips your way
|
||||
|
||||
AnyWay is a mobile application that helps users plan city trips. The app allows users to specify their preferences and constraints, and then generates a personalized itinerary for them. The planning follows some guiding principles:
|
||||
- **Personalization**:The user's preferences should be reflected in the choice of destinations.
|
||||
- **Efficiency**:The itinerary should be optimized for the user's constraints.
|
||||
- **Flexibility**: We aknowledge that tourism is a dynamic activity, and that users may want to change their plans on the go.
|
||||
- **Discoverability**: Tourism is an inherently exploratory activity. Once a rough itinerary is generated, detours and spontaneous decisions should be encouraged.
|
||||
|
Before Width: | Height: | Size: 106 KiB |
|
Before Width: | Height: | Size: 1.3 MiB |
|
Before Width: | Height: | Size: 637 KiB |
|
Before Width: | Height: | Size: 573 KiB |
|
Before Width: | Height: | Size: 175 KiB |
|
Before Width: | Height: | Size: 360 KiB |
@@ -0,0 +1,7 @@
|
||||
AnyWay is an application that helps you plan truly unique city trips. When planning a new trip, you can specify your preferences and constraints and anyway generates a personalized itinerary just for you.
|
||||
|
||||
Anyway follows these core principles:
|
||||
- Personalization: Trips should be match your interests - not just the most popular destinations.
|
||||
- Efficiency: Don't just walk in circles! Anyway creates the most efficient route for you.
|
||||
- Flexibility: Vacations are the time to be spontaneous. Anyway lets you update your plans on the go.
|
||||
- Discoverability: Tourism means exploration. Anyway encourages you to take detours and make spontaneous decisions.
|
||||
|
After Width: | Height: | Size: 3.0 MiB |
|
After Width: | Height: | Size: 4.1 MiB |
|
After Width: | Height: | Size: 1.1 MiB |
|
After Width: | Height: | Size: 1.1 MiB |
0
frontend/android/fastlane/metadata/en-US/video.txt
Normal file
@@ -1,288 +0,0 @@
|
||||
GEM
|
||||
remote: https://rubygems.org/
|
||||
specs:
|
||||
CFPropertyList (3.0.7)
|
||||
base64
|
||||
nkf
|
||||
rexml
|
||||
activesupport (5.2.8.1)
|
||||
concurrent-ruby (~> 1.0, >= 1.0.2)
|
||||
i18n (>= 0.7, < 2)
|
||||
minitest (~> 5.1)
|
||||
tzinfo (~> 1.1)
|
||||
addressable (2.8.7)
|
||||
public_suffix (>= 2.0.2, < 7.0)
|
||||
algoliasearch (1.27.5)
|
||||
httpclient (~> 2.8, >= 2.8.3)
|
||||
json (>= 1.5.1)
|
||||
artifactory (3.0.17)
|
||||
atomos (0.1.3)
|
||||
aws-eventstream (1.3.0)
|
||||
aws-partitions (1.1004.0)
|
||||
aws-sdk-core (3.212.0)
|
||||
aws-eventstream (~> 1, >= 1.3.0)
|
||||
aws-partitions (~> 1, >= 1.992.0)
|
||||
aws-sigv4 (~> 1.9)
|
||||
jmespath (~> 1, >= 1.6.1)
|
||||
aws-sdk-kms (1.95.0)
|
||||
aws-sdk-core (~> 3, >= 3.210.0)
|
||||
aws-sigv4 (~> 1.5)
|
||||
aws-sdk-s3 (1.170.1)
|
||||
aws-sdk-core (~> 3, >= 3.210.0)
|
||||
aws-sdk-kms (~> 1)
|
||||
aws-sigv4 (~> 1.5)
|
||||
aws-sigv4 (1.10.1)
|
||||
aws-eventstream (~> 1, >= 1.0.2)
|
||||
babosa (1.0.4)
|
||||
base64 (0.2.0)
|
||||
claide (1.1.0)
|
||||
cocoapods (1.10.2)
|
||||
addressable (~> 2.6)
|
||||
claide (>= 1.0.2, < 2.0)
|
||||
cocoapods-core (= 1.10.2)
|
||||
cocoapods-deintegrate (>= 1.0.3, < 2.0)
|
||||
cocoapods-downloader (>= 1.4.0, < 2.0)
|
||||
cocoapods-plugins (>= 1.0.0, < 2.0)
|
||||
cocoapods-search (>= 1.0.0, < 2.0)
|
||||
cocoapods-trunk (>= 1.4.0, < 2.0)
|
||||
cocoapods-try (>= 1.1.0, < 2.0)
|
||||
colored2 (~> 3.1)
|
||||
escape (~> 0.0.4)
|
||||
fourflusher (>= 2.3.0, < 3.0)
|
||||
gh_inspector (~> 1.0)
|
||||
molinillo (~> 0.6.6)
|
||||
nap (~> 1.0)
|
||||
ruby-macho (~> 1.4)
|
||||
xcodeproj (>= 1.19.0, < 2.0)
|
||||
cocoapods-core (1.10.2)
|
||||
activesupport (> 5.0, < 6)
|
||||
addressable (~> 2.6)
|
||||
algoliasearch (~> 1.0)
|
||||
concurrent-ruby (~> 1.1)
|
||||
fuzzy_match (~> 2.0.4)
|
||||
nap (~> 1.0)
|
||||
netrc (~> 0.11)
|
||||
public_suffix
|
||||
typhoeus (~> 1.0)
|
||||
cocoapods-deintegrate (1.0.5)
|
||||
cocoapods-downloader (1.6.3)
|
||||
cocoapods-plugins (1.0.0)
|
||||
nap
|
||||
cocoapods-search (1.0.1)
|
||||
cocoapods-trunk (1.6.0)
|
||||
nap (>= 0.8, < 2.0)
|
||||
netrc (~> 0.11)
|
||||
cocoapods-try (1.2.0)
|
||||
colored (1.2)
|
||||
colored2 (3.1.2)
|
||||
commander (4.6.0)
|
||||
highline (~> 2.0.0)
|
||||
concurrent-ruby (1.3.4)
|
||||
declarative (0.0.20)
|
||||
digest-crc (0.6.5)
|
||||
rake (>= 12.0.0, < 14.0.0)
|
||||
domain_name (0.6.20240107)
|
||||
dotenv (2.8.1)
|
||||
emoji_regex (3.2.3)
|
||||
escape (0.0.4)
|
||||
ethon (0.16.0)
|
||||
ffi (>= 1.15.0)
|
||||
excon (0.112.0)
|
||||
faraday (1.10.4)
|
||||
faraday-em_http (~> 1.0)
|
||||
faraday-em_synchrony (~> 1.0)
|
||||
faraday-excon (~> 1.1)
|
||||
faraday-httpclient (~> 1.0)
|
||||
faraday-multipart (~> 1.0)
|
||||
faraday-net_http (~> 1.0)
|
||||
faraday-net_http_persistent (~> 1.0)
|
||||
faraday-patron (~> 1.0)
|
||||
faraday-rack (~> 1.0)
|
||||
faraday-retry (~> 1.0)
|
||||
ruby2_keywords (>= 0.0.4)
|
||||
faraday-cookie_jar (0.0.7)
|
||||
faraday (>= 0.8.0)
|
||||
http-cookie (~> 1.0.0)
|
||||
faraday-em_http (1.0.0)
|
||||
faraday-em_synchrony (1.0.0)
|
||||
faraday-excon (1.1.0)
|
||||
faraday-httpclient (1.0.1)
|
||||
faraday-multipart (1.0.4)
|
||||
multipart-post (~> 2)
|
||||
faraday-net_http (1.0.2)
|
||||
faraday-net_http_persistent (1.2.0)
|
||||
faraday-patron (1.0.0)
|
||||
faraday-rack (1.0.0)
|
||||
faraday-retry (1.0.3)
|
||||
faraday_middleware (1.2.1)
|
||||
faraday (~> 1.0)
|
||||
fastimage (2.3.1)
|
||||
fastlane (2.225.0)
|
||||
CFPropertyList (>= 2.3, < 4.0.0)
|
||||
addressable (>= 2.8, < 3.0.0)
|
||||
artifactory (~> 3.0)
|
||||
aws-sdk-s3 (~> 1.0)
|
||||
babosa (>= 1.0.3, < 2.0.0)
|
||||
bundler (>= 1.12.0, < 3.0.0)
|
||||
colored (~> 1.2)
|
||||
commander (~> 4.6)
|
||||
dotenv (>= 2.1.1, < 3.0.0)
|
||||
emoji_regex (>= 0.1, < 4.0)
|
||||
excon (>= 0.71.0, < 1.0.0)
|
||||
faraday (~> 1.0)
|
||||
faraday-cookie_jar (~> 0.0.6)
|
||||
faraday_middleware (~> 1.0)
|
||||
fastimage (>= 2.1.0, < 3.0.0)
|
||||
fastlane-sirp (>= 1.0.0)
|
||||
gh_inspector (>= 1.1.2, < 2.0.0)
|
||||
google-apis-androidpublisher_v3 (~> 0.3)
|
||||
google-apis-playcustomapp_v1 (~> 0.1)
|
||||
google-cloud-env (>= 1.6.0, < 2.0.0)
|
||||
google-cloud-storage (~> 1.31)
|
||||
highline (~> 2.0)
|
||||
http-cookie (~> 1.0.5)
|
||||
json (< 3.0.0)
|
||||
jwt (>= 2.1.0, < 3)
|
||||
mini_magick (>= 4.9.4, < 5.0.0)
|
||||
multipart-post (>= 2.0.0, < 3.0.0)
|
||||
naturally (~> 2.2)
|
||||
optparse (>= 0.1.1, < 1.0.0)
|
||||
plist (>= 3.1.0, < 4.0.0)
|
||||
rubyzip (>= 2.0.0, < 3.0.0)
|
||||
security (= 0.1.5)
|
||||
simctl (~> 1.6.3)
|
||||
terminal-notifier (>= 2.0.0, < 3.0.0)
|
||||
terminal-table (~> 3)
|
||||
tty-screen (>= 0.6.3, < 1.0.0)
|
||||
tty-spinner (>= 0.8.0, < 1.0.0)
|
||||
word_wrap (~> 1.0.0)
|
||||
xcodeproj (>= 1.13.0, < 2.0.0)
|
||||
xcpretty (~> 0.3.0)
|
||||
xcpretty-travis-formatter (>= 0.0.3, < 2.0.0)
|
||||
fastlane-sirp (1.0.0)
|
||||
sysrandom (~> 1.0)
|
||||
ffi (1.17.0)
|
||||
ffi (1.17.0-x86_64-darwin)
|
||||
fourflusher (2.3.1)
|
||||
fuzzy_match (2.0.4)
|
||||
gh_inspector (1.1.3)
|
||||
google-apis-androidpublisher_v3 (0.54.0)
|
||||
google-apis-core (>= 0.11.0, < 2.a)
|
||||
google-apis-core (0.11.3)
|
||||
addressable (~> 2.5, >= 2.5.1)
|
||||
googleauth (>= 0.16.2, < 2.a)
|
||||
httpclient (>= 2.8.1, < 3.a)
|
||||
mini_mime (~> 1.0)
|
||||
representable (~> 3.0)
|
||||
retriable (>= 2.0, < 4.a)
|
||||
rexml
|
||||
google-apis-iamcredentials_v1 (0.17.0)
|
||||
google-apis-core (>= 0.11.0, < 2.a)
|
||||
google-apis-playcustomapp_v1 (0.13.0)
|
||||
google-apis-core (>= 0.11.0, < 2.a)
|
||||
google-apis-storage_v1 (0.31.0)
|
||||
google-apis-core (>= 0.11.0, < 2.a)
|
||||
google-cloud-core (1.7.1)
|
||||
google-cloud-env (>= 1.0, < 3.a)
|
||||
google-cloud-errors (~> 1.0)
|
||||
google-cloud-env (1.6.0)
|
||||
faraday (>= 0.17.3, < 3.0)
|
||||
google-cloud-errors (1.4.0)
|
||||
google-cloud-storage (1.47.0)
|
||||
addressable (~> 2.8)
|
||||
digest-crc (~> 0.4)
|
||||
google-apis-iamcredentials_v1 (~> 0.1)
|
||||
google-apis-storage_v1 (~> 0.31.0)
|
||||
google-cloud-core (~> 1.6)
|
||||
googleauth (>= 0.16.2, < 2.a)
|
||||
mini_mime (~> 1.0)
|
||||
googleauth (1.8.1)
|
||||
faraday (>= 0.17.3, < 3.a)
|
||||
jwt (>= 1.4, < 3.0)
|
||||
multi_json (~> 1.11)
|
||||
os (>= 0.9, < 2.0)
|
||||
signet (>= 0.16, < 2.a)
|
||||
highline (2.0.3)
|
||||
http-cookie (1.0.7)
|
||||
domain_name (~> 0.5)
|
||||
httpclient (2.8.3)
|
||||
i18n (1.14.6)
|
||||
concurrent-ruby (~> 1.0)
|
||||
jmespath (1.6.2)
|
||||
json (2.8.1)
|
||||
jwt (2.9.3)
|
||||
base64
|
||||
mini_magick (4.13.2)
|
||||
mini_mime (1.1.5)
|
||||
minitest (5.25.1)
|
||||
molinillo (0.6.6)
|
||||
multi_json (1.15.0)
|
||||
multipart-post (2.4.1)
|
||||
nanaimo (0.4.0)
|
||||
nap (1.1.0)
|
||||
naturally (2.2.1)
|
||||
netrc (0.11.0)
|
||||
nkf (0.2.0)
|
||||
optparse (0.6.0)
|
||||
os (1.1.4)
|
||||
plist (3.7.1)
|
||||
public_suffix (6.0.1)
|
||||
rake (13.2.1)
|
||||
representable (3.2.0)
|
||||
declarative (< 0.1.0)
|
||||
trailblazer-option (>= 0.1.1, < 0.2.0)
|
||||
uber (< 0.2.0)
|
||||
retriable (3.1.2)
|
||||
rexml (3.3.9)
|
||||
rouge (2.0.7)
|
||||
ruby-macho (1.4.0)
|
||||
ruby2_keywords (0.0.5)
|
||||
rubyzip (2.3.2)
|
||||
security (0.1.5)
|
||||
signet (0.19.0)
|
||||
addressable (~> 2.8)
|
||||
faraday (>= 0.17.5, < 3.a)
|
||||
jwt (>= 1.5, < 3.0)
|
||||
multi_json (~> 1.10)
|
||||
simctl (1.6.10)
|
||||
CFPropertyList
|
||||
naturally
|
||||
sysrandom (1.0.5)
|
||||
terminal-notifier (2.0.0)
|
||||
terminal-table (3.0.2)
|
||||
unicode-display_width (>= 1.1.1, < 3)
|
||||
thread_safe (0.3.6)
|
||||
trailblazer-option (0.1.2)
|
||||
tty-cursor (0.7.1)
|
||||
tty-screen (0.8.2)
|
||||
tty-spinner (0.9.3)
|
||||
tty-cursor (~> 0.7)
|
||||
typhoeus (1.4.1)
|
||||
ethon (>= 0.9.0)
|
||||
tzinfo (1.2.11)
|
||||
thread_safe (~> 0.1)
|
||||
uber (0.1.0)
|
||||
unicode-display_width (2.6.0)
|
||||
word_wrap (1.0.0)
|
||||
xcodeproj (1.27.0)
|
||||
CFPropertyList (>= 2.3.3, < 4.0)
|
||||
atomos (~> 0.1.3)
|
||||
claide (>= 1.0.2, < 2.0)
|
||||
colored2 (~> 3.1)
|
||||
nanaimo (~> 0.4.0)
|
||||
rexml (>= 3.3.6, < 4.0)
|
||||
xcpretty (0.3.0)
|
||||
rouge (~> 2.0.7)
|
||||
xcpretty-travis-formatter (1.0.1)
|
||||
xcpretty (~> 0.2, >= 0.0.7)
|
||||
|
||||
PLATFORMS
|
||||
ruby
|
||||
x86_64-darwin-23
|
||||
|
||||
DEPENDENCIES
|
||||
cocoapods
|
||||
fastlane
|
||||
|
||||
BUNDLED WITH
|
||||
2.5.23
|
||||
@@ -1,4 +1,4 @@
|
||||
app_identifier("info.anydev.testing") # The bundle identifier of your app
|
||||
app_identifier("info.anydev.anyway") # The bundle identifier of your app
|
||||
apple_id("me@moll.re") # Your Apple Developer Portal username
|
||||
|
||||
itc_team_id("127439860") # App Store Connect Team ID
|
||||
|
||||
3
frontend/ios/fastlane/Deliverfile
Normal file
@@ -0,0 +1,3 @@
|
||||
# The Deliverfile allows you to store various App Store Connect metadata
|
||||
# For more information, check out the docs
|
||||
# https://docs.fastlane.tools/actions/deliver/
|
||||
@@ -15,7 +15,7 @@ platform :ios do
|
||||
|
||||
|
||||
desc "Deploy a new version to closed testing (testflight)"
|
||||
lane :deploy_testing do
|
||||
lane :deploy_beta do
|
||||
build_name = ENV["BUILD_NAME"]
|
||||
build_number = ENV["BUILD_NUMBER"]
|
||||
|
||||
@@ -28,12 +28,11 @@ platform :ios do
|
||||
readonly: true,
|
||||
)
|
||||
|
||||
|
||||
sh(
|
||||
"flutter",
|
||||
"build",
|
||||
"ipa",
|
||||
"--debug",
|
||||
"--release",
|
||||
"--build-name=#{build_name}",
|
||||
"--build-number=#{build_number}",
|
||||
)
|
||||
@@ -64,15 +63,6 @@ platform :ios do
|
||||
readonly: true,
|
||||
)
|
||||
|
||||
# replace secrets by real values, the stupid way
|
||||
sh(
|
||||
"sed",
|
||||
"-i",
|
||||
"",
|
||||
"s/IOS_GOOGLE_MAPS_API_KEY/#{ENV["IOS_GOOGLE_MAPS_API_KEY"]}/g",
|
||||
"../Runner/AppDelegate.swift"
|
||||
)
|
||||
|
||||
sh(
|
||||
"flutter",
|
||||
"build",
|
||||
@@ -87,12 +77,13 @@ platform :ios do
|
||||
skip_build_archive: true,
|
||||
archive_path: "../build/ios/archive/Runner.xcarchive"
|
||||
)
|
||||
|
||||
upload_to_app_store(
|
||||
skip_screenshots: true,
|
||||
skip_metadata: true,
|
||||
precheck_include_in_app_purchases: false,
|
||||
|
||||
upload_to_app_store(
|
||||
overwrite_screenshots: true,
|
||||
metadata_path: "fastlane/metadata",
|
||||
screenshots_path: "fastlane/screenshots",
|
||||
precheck_include_in_app_purchases: false,
|
||||
force: true, # Skip HTMl report verification
|
||||
submit_for_review: true,
|
||||
automatic_release: true,
|
||||
# automatically release the app after review
|
||||
|
||||
1
frontend/ios/fastlane/metadata/copyright.txt
Normal file
@@ -0,0 +1 @@
|
||||
2025 anydev
|
||||
@@ -0,0 +1 @@
|
||||
|
||||
7
frontend/ios/fastlane/metadata/en-US/description.txt
Normal file
@@ -0,0 +1,7 @@
|
||||
AnyWay is an application that helps you plan truly unique city trips. When planning a new trip, you can specify your preferences and constraints and anyway generates a personalized itinerary just for you.
|
||||
|
||||
Anyway follows these core principles:
|
||||
- Personalization: Trips should be match your interests - not just the most popular destinations.
|
||||
- Efficiency: Don't just walk in circles! Anyway creates the most efficient route for you.
|
||||
- Flexibility: Vacations are the time to be spontaneous. Anyway lets you update your plans on the go.
|
||||
- Discoverability: Tourism means exploration. Anyway encourages you to take detours and make spontaneous decisions.
|
||||
1
frontend/ios/fastlane/metadata/en-US/keywords.txt
Normal file
@@ -0,0 +1 @@
|
||||
tourism, cities, travel, guide
|
||||
1
frontend/ios/fastlane/metadata/en-US/marketing_url.txt
Normal file
@@ -0,0 +1 @@
|
||||
https://anydev.info
|
||||
1
frontend/ios/fastlane/metadata/en-US/name.txt
Normal file
@@ -0,0 +1 @@
|
||||
Any.Way
|
||||
1
frontend/ios/fastlane/metadata/en-US/privacy_url.txt
Normal file
@@ -0,0 +1 @@
|
||||
https://anydev.info/privacy
|
||||
@@ -0,0 +1 @@
|
||||
AnyWay - plan city trips your way!
|
||||
1
frontend/ios/fastlane/metadata/en-US/release_notes.txt
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
1
frontend/ios/fastlane/metadata/en-US/subtitle.txt
Normal file
@@ -0,0 +1 @@
|
||||
Plan city trips your way!
|
||||
1
frontend/ios/fastlane/metadata/en-US/support_url.txt
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
1
frontend/ios/fastlane/metadata/primary_category.txt
Normal file
@@ -0,0 +1 @@
|
||||
TRAVEL
|
||||
@@ -0,0 +1 @@
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
|
||||