diff --git a/.github/workflows/ecs-deploy.yml b/.github/workflows/ecs-deploy.yml index 25e355e557..2cc050ebbe 100644 --- a/.github/workflows/ecs-deploy.yml +++ b/.github/workflows/ecs-deploy.yml @@ -9,13 +9,13 @@ env: REGISTRY: ghcr.io AWS_REGION: us-east-1 ECS_CLUSTER: tasking-manager - ECS_SERVICE: tasking-manager-fastAPI + ECS_SERVICE: backend-fastAPI CONTAINER_NAME: backend IMAGE_NAME: hotosm/tasking-manager-backend # was ${{ github.repository }} jobs: - build-push-image: - name: Build Images + image-build-and-push: + name: Build Container Images runs-on: ubuntu-latest environment: production @@ -24,14 +24,11 @@ jobs: packages: write outputs: - imageid: steps.build-push-image.imageid + image_tags: ${{ steps.meta.outputs.tags }} steps: - - name: Setup QEMU - uses: docker/setup-qemu-action@v3 - - - name: Setup Buildx - uses: docker/setup-buildx-action@v3 + - uses: docker/setup-qemu-action@v3 + - uses: docker/setup-buildx-action@v3 - name: Log in to the Container registry uses: docker/login-action@v3 @@ -45,31 +42,34 @@ jobs: uses: docker/metadata-action@v5 with: images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} - tags: | - type=ref,event=branch + type=raw,value=fastapi - name: Build and push container image id: build-push-image uses: docker/build-push-action@v5 with: context: "{{defaultContext}}" + target: prod platforms: linux/amd64,linux/arm64 push: true tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + deploy: - name: Deploy + name: Deploy to ECS runs-on: ubuntu-latest environment: production + needs: image-build-and-push + permissions: contents: read id-token: write steps: - - name: Checkout - uses: actions/checkout@v4 + - uses: actions/checkout@v4 - name: Configure AWS credentials uses: aws-actions/configure-aws-credentials@v4 @@ -80,7 +80,7 @@ jobs: - name: Download task definition run: | - aws ecs describe-task-definition --task-definition tasking-manager --query taskDefinition > task-definition.json + aws ecs describe-task-definition --task-definition tasking-manager-fastAPI --query taskDefinition > task-definition.json - name: Task definition rendition id: task-def @@ -88,10 +88,10 @@ jobs: with: task-definition: task-definition.json container-name: ${{ env.CONTAINER_NAME }} - image: ${{ needs.build-push-image.outputs.imageid }} + image: ${{ needs.image-build-and-push.outputs.image_tags }} - name: Deploy task definition - uses: aws-actions/amazon-ecs-deploy-task-definition@v1 + uses: aws-actions/amazon-ecs-deploy-task-definition@v2 with: task-definition: ${{ steps.task-def.outputs.task-definition }} service: ${{ env.ECS_SERVICE }} diff --git a/.gitignore b/.gitignore index 556a135d51..5586c60072 100644 --- a/.gitignore +++ b/.gitignore @@ -68,3 +68,4 @@ htmlcov/ # Docker & Docker compose docker-compose.override.yml +postgres_data/ diff --git a/Dockerfile b/Dockerfile index 9b3a7ede9e..6c54a8d301 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,13 +1,10 @@ ARG DEBIAN_IMG_TAG=slim-bookworm ARG PYTHON_IMG_TAG=3.10 - - FROM docker.io/python:${PYTHON_IMG_TAG}-${DEBIAN_IMG_TAG} as base ARG APP_VERSION=0.1.0 ARG DOCKERFILE_VERSION=0.5.0 ARG ALPINE_IMG_TAG -ARG DEBIAN_IMG_TAG ARG PYTHON_IMG_TAG ARG MAINTAINER=sysadmin@hotosm.org LABEL org.hotosm.tasks.app-version="${APP_VERSION}" \ @@ -28,7 +25,7 @@ FROM base as extract-deps RUN pip install --no-cache-dir --upgrade pip WORKDIR /opt/python COPY pyproject.toml pdm.lock README.md /opt/python/ -RUN pip install --no-cache-dir pdm==2.18.1 +RUN pip install --no-cache-dir pdm==2.8.0 RUN pdm export --prod --without-hashes > requirements.txt @@ -37,13 +34,14 @@ FROM base as build RUN pip install --no-cache-dir --upgrade pip WORKDIR /opt/python # Setup backend build-time dependencies -RUN apt-get update && apt-get install --no-install-recommends -y \ - build-essential \ - libffi-dev \ - libgeos-dev \ - postgresql-server-dev-15 \ - python3-dev \ - && rm -rf /var/lib/apt/lists/* +RUN apt-get update && \ + DEBIAN_FRONTEND=noninteractive \ + apt-get -q install --no-install-recommends -y \ + build-essential \ + postgresql-server-dev-15 \ + python3-dev \ + libffi-dev \ + libgeos-dev # Setup backend Python dependencies COPY --from=extract-deps \ /opt/python/requirements.txt /opt/python/ @@ -60,15 +58,15 @@ ENV PYTHONDONTWRITEBYTECODE=1 \ PYTHONUNBUFFERED=1 \ PYTHONFAULTHANDLER=1 \ PATH="/home/appuser/.local/bin:$PATH" \ - PYTHONPATH="/usr/src/app:$PYTHONPATH" \ PYTHON_LIB="/home/appuser/.local/lib/python$PYTHON_IMG_TAG/site-packages" \ SSL_CERT_FILE=/etc/ssl/certs/ca-certificates.crt \ REQUESTS_CA_BUNDLE=/etc/ssl/certs/ca-certificates.crt # Setup backend runtime dependencies RUN apt-get update && \ - apt-get install --no-install-recommends -y \ - libgeos3.11.1 postgresql-client proj-bin && \ - rm -rf /var/lib/apt/lists/* + DEBIAN_FRONTEND=noninteractive \ + apt-get -q install --no-install-recommends -y \ + postgresql-client libgeos3.11.1 proj-bin curl && \ + apt-get clean && rm -rf /var/lib/apt/lists/* COPY --from=build \ /home/appuser/.local \ /home/appuser/.local @@ -83,25 +81,19 @@ COPY manage.py . FROM runtime as debug RUN pip install --user --no-warn-script-location \ - --no-cache-dir debugpy==1.8.5 + --no-cache-dir debugpy==1.8.1 EXPOSE 5678/tcp CMD ["python", "-m", "debugpy", "--wait-for-client", "--listen", "0.0.0.0:5678", \ - "-m", "gunicorn", "-c", "python:backend.gunicorn", "manage:application", \ + "-m", "uvicorn", "backend.main:api", "--host", "0.0.0.0", "--port", "5000", \ "--reload", "--log-level", "error"] - FROM runtime as prod USER root -RUN apt-get update && \ - apt-get install -y curl && \ - rm -rf /var/lib/apt/lists/* # Pre-compile packages to .pyc (init speed gains) RUN python -c "import compileall; compileall.compile_path(maxlevels=10, quiet=1)" RUN python -m compileall . EXPOSE 5000/tcp USER appuser:appuser -# Default gunicorn worker count is 1 -# For prod the WEB_CONCURRENCY env var can be used to set this -CMD ["gunicorn", "-c", "python:backend.gunicorn", "manage:application", \ - "--log-level", "error"] +CMD ["uvicorn", "backend.main:api", "--host", "0.0.0.0", "--port", "5000", \ + "--log-level", "error","--reload"] diff --git a/backend/__init__.py b/backend/__init__.py index 4e032534ae..453ffd1b32 100644 --- a/backend/__init__.py +++ b/backend/__init__.py @@ -17,15 +17,15 @@ import json from logging.handlers import RotatingFileHandler -from flask import Flask, redirect, request -from flask_cors import CORS -from flask_migrate import Migrate +# from flask import Flask, redirect, request +# from flask_cors import CORS +# from flask_migrate import Migrate from requests_oauthlib import OAuth2Session -from flask_restful import Api -from flask_sqlalchemy import SQLAlchemy -from flask_mail import Mail -from backend.config import EnvironmentConfig +# from flask_restful import Api +from fastapi_mail import FastMail, ConnectionConfig + +from backend.config import settings # Load error_messages.json and store it so that it is loaded only once at startup (Used in exceptions.py) @@ -50,8 +50,8 @@ def sentry_init(): ) sentry_sdk.init( - dsn=EnvironmentConfig.SENTRY_BACKEND_DSN, - environment=EnvironmentConfig.ENVIRONMENT, + dsn=settings.SENTRY_BACKEND_DSN, + environment=settings.ENVIRONMENT, integrations=[FlaskIntegration()], traces_sample_rate=0.1, ignore_errors=[ @@ -66,40 +66,53 @@ def sentry_init(): def format_url(endpoint): parts = endpoint.strip("/") - return "/api/{}/{}/".format(EnvironmentConfig.API_VERSION, parts) - - -db = SQLAlchemy() -migrate = Migrate() + return "/api/{}/{}/".format(settings.API_VERSION, parts) + + +# db = sqlalchemy +# migrate = Migrate() + +# Define the email configuration +conf = ConnectionConfig( + MAIL_USERNAME=settings.MAIL_USERNAME, + MAIL_PASSWORD=settings.MAIL_PASSWORD, + MAIL_FROM=settings.MAIL_DEFAULT_SENDER, + MAIL_PORT=settings.MAIL_PORT, + MAIL_SERVER=settings.MAIL_SERVER, + MAIL_FROM_NAME=settings.ORG_NAME, + MAIL_SSL_TLS=False, + MAIL_STARTTLS=True, + VALIDATE_CERTS=True, +) -mail = Mail() +mail = FastMail(conf) osm = OAuth2Session( - client_id=EnvironmentConfig.OAUTH_CLIENT_ID, - scope=EnvironmentConfig.OAUTH_SCOPE, - redirect_uri=EnvironmentConfig.OAUTH_REDIRECT_URI, + client_id=settings.OAUTH_CLIENT_ID, + scope=settings.OAUTH_SCOPE, + redirect_uri=settings.OAUTH_REDIRECT_URI, ) # Import all models so that they are registered with SQLAlchemy from backend.models.postgis import * # noqa -def create_app(env="backend.config.EnvironmentConfig"): +def create_app(env="backend.config.settings"): """ Bootstrap function to initialise the Flask app and config :return: Initialised Flask app """ # If SENTRY_BACKEND_DSN is configured, init sentry_sdk tracking - if EnvironmentConfig.SENTRY_BACKEND_DSN: + if settings.SENTRY_BACKEND_DSN: sentry_init() app = Flask(__name__, template_folder="services/messaging/templates/") # Load configuration options from environment - # Set env to TestEnvironmentConfig if TM_ENVIRONMENT is test + # Set env to Testsettings if TM_ENVIRONMENT is test if os.getenv("TM_ENVIRONMENT") == "test": - env = "backend.config.TestEnvironmentConfig" + env = "backend.config.Testsettings" app.config.from_object(env) # Enable logging to files initialise_logger(app) @@ -154,7 +167,7 @@ def index_redirect(): add_api_endpoints(app) # Enables CORS on all API routes, meaning API is callable from anywhere - CORS(app) + # CORS(app) # Add basic oauth setup app.secret_key = app.config[ @@ -193,837 +206,3 @@ def initialise_counters(app): with app.app_context(): StatsService.get_homepage_stats() - - -def add_api_endpoints(app): - """ - Define the routes the API exposes using Flask-Restful. - """ - app.logger.debug("Adding routes to API endpoints") - api = Api(app) - - # Projects API import - from backend.api.projects.resources import ( - ProjectsRestAPI, - ProjectsAllAPI, - ProjectsQueriesBboxAPI, - ProjectsQueriesOwnerAPI, - ProjectsQueriesTouchedAPI, - ProjectsQueriesSummaryAPI, - ProjectsQueriesNoGeometriesAPI, - ProjectsQueriesNoTasksAPI, - ProjectsQueriesAoiAPI, - ProjectsQueriesPriorityAreasAPI, - ProjectsQueriesFeaturedAPI, - ProjectQueriesSimilarProjectsAPI, - ProjectQueriesActiveProjectsAPI, - ) - from backend.api.projects.activities import ( - ProjectsActivitiesAPI, - ProjectsLastActivitiesAPI, - ) - from backend.api.projects.contributions import ( - ProjectsContributionsAPI, - ProjectsContributionsQueriesDayAPI, - ) - from backend.api.projects.statistics import ( - ProjectsStatisticsAPI, - ProjectsStatisticsQueriesUsernameAPI, - ProjectsStatisticsQueriesPopularAPI, - ) - from backend.api.projects.teams import ProjectsTeamsAPI - from backend.api.projects.campaigns import ProjectsCampaignsAPI - from backend.api.projects.actions import ( - ProjectsActionsTransferAPI, - ProjectsActionsMessageContributorsAPI, - ProjectsActionsFeatureAPI, - ProjectsActionsUnFeatureAPI, - ProjectsActionsSetInterestsAPI, - ProjectActionsIntersectingTilesAPI, - ) - - from backend.api.projects.favorites import ProjectsFavoritesAPI - from backend.api.projects.partnerships import ( - ProjectPartnershipsRestApi, - PartnersByProjectAPI, - ) - - # Partner statistics API - from backend.api.partners.statistics import ( - GroupPartnerStatisticsAPI, - FilteredPartnerStatisticsAPI, - ) - - # Tasks API import - from backend.api.tasks.resources import ( - TasksRestAPI, - TasksQueriesJsonAPI, - TasksQueriesXmlAPI, - TasksQueriesGpxAPI, - TasksQueriesAoiAPI, - TasksQueriesMappedAPI, - TasksQueriesOwnInvalidatedAPI, - ) - from backend.api.tasks.actions import ( - TasksActionsMappingLockAPI, - TasksActionsMappingStopAPI, - TasksActionsMappingUnlockAPI, - TasksActionsMappingUndoAPI, - TasksActionsValidationLockAPI, - TasksActionsValidationStopAPI, - TasksActionsValidationUnlockAPI, - TasksActionsMapAllAPI, - TasksActionsValidateAllAPI, - TasksActionsInvalidateAllAPI, - TasksActionsResetBadImageryAllAPI, - TasksActionsResetAllAPI, - TasksActionsSplitAPI, - TasksActionsExtendAPI, - TasksActionsReverUserTaskstAPI, - ) - from backend.api.tasks.statistics import ( - TasksStatisticsAPI, - ) - - # Comments API impor - from backend.api.comments.resources import ( - CommentsProjectsRestAPI, - CommentsProjectsAllAPI, - CommentsTasksRestAPI, - ) - - # Annotations API import - from backend.api.annotations.resources import AnnotationsRestAPI - - # Issues API import - from backend.api.issues.resources import IssuesRestAPI, IssuesAllAPI - - # Interests API import - from backend.api.interests.resources import InterestsRestAPI, InterestsAllAPI - - # Licenses API import - from backend.api.licenses.resources import LicensesRestAPI, LicensesAllAPI - from backend.api.licenses.actions import LicensesActionsAcceptAPI - - # Campaigns API endpoint - from backend.api.campaigns.resources import CampaignsRestAPI, CampaignsAllAPI - - # Partners API import - from backend.api.partners.resources import ( - PartnerRestAPI, - PartnersAllRestAPI, - PartnerPermalinkRestAPI, - ) - - # Organisations API endpoint - from backend.api.organisations.resources import ( - OrganisationsStatsAPI, - OrganisationsRestAPI, - OrganisationsBySlugRestAPI, - OrganisationsAllAPI, - ) - from backend.api.organisations.campaigns import OrganisationsCampaignsAPI - - # Countries API endpoint - from backend.api.countries.resources import CountriesRestAPI - - # Teams API endpoint - from backend.api.teams.resources import TeamsRestAPI, TeamsAllAPI - from backend.api.teams.actions import ( - TeamsActionsJoinAPI, - TeamsActionsAddAPI, - TeamsActionsLeaveAPI, - TeamsActionsMessageMembersAPI, - ) - - # Notifications API endpoint - from backend.api.notifications.resources import ( - NotificationsRestAPI, - NotificationsAllAPI, - NotificationsQueriesCountUnreadAPI, - NotificationsQueriesPostUnreadAPI, - ) - from backend.api.notifications.actions import ( - NotificationsActionsDeleteMultipleAPI, - NotificationsActionsDeleteAllAPI, - NotificationsActionsMarkAsReadAllAPI, - NotificationsActionsMarkAsReadMultipleAPI, - ) - - # Users API endpoint - from backend.api.users.resources import ( - UsersRestAPI, - UsersAllAPI, - UsersQueriesUsernameAPI, - UsersQueriesUsernameFilterAPI, - UsersQueriesOwnLockedAPI, - UsersQueriesOwnLockedDetailsAPI, - UsersQueriesFavoritesAPI, - UsersQueriesInterestsAPI, - UsersRecommendedProjectsAPI, - ) - from backend.api.users.tasks import UsersTasksAPI - from backend.api.users.actions import ( - UsersActionsSetUsersAPI, - UsersActionsSetLevelAPI, - UsersActionsSetRoleAPI, - UsersActionsSetExpertModeAPI, - UsersActionsVerifyEmailAPI, - UsersActionsRegisterEmailAPI, - UsersActionsSetInterestsAPI, - ) - from backend.api.users.openstreetmap import UsersOpenStreetMapAPI - from backend.api.users.statistics import ( - UsersStatisticsAPI, - UsersStatisticsInterestsAPI, - UsersStatisticsAllAPI, - OhsomeProxyAPI, - ) - - # System API endpoint - from backend.api.system.general import ( - SystemDocsAPI, - SystemHeartbeatAPI, - SystemLanguagesAPI, - SystemContactAdminRestAPI, - SystemReleaseAPI, - ) - from backend.api.system.banner import SystemBannerAPI - from backend.api.system.statistics import SystemStatisticsAPI - from backend.api.system.authentication import ( - SystemAuthenticationEmailAPI, - SystemAuthenticationLoginAPI, - SystemAuthenticationCallbackAPI, - ) - from backend.api.system.applications import SystemApplicationsRestAPI - from backend.api.system.image_upload import SystemImageUploadRestAPI - - # Projects REST endpoint - api.add_resource(ProjectsAllAPI, format_url("projects/"), methods=["GET"]) - api.add_resource( - ProjectsRestAPI, - format_url("projects/"), - endpoint="create_project", - methods=["POST"], - ) - api.add_resource( - ProjectsRestAPI, - format_url("projects//"), - methods=["GET", "PATCH", "DELETE"], - ) - - # Projects queries endoints (TODO: Refactor them into the REST endpoints) - api.add_resource(ProjectsQueriesBboxAPI, format_url("projects/queries/bbox/")) - api.add_resource( - ProjectsQueriesOwnerAPI, format_url("projects/queries/myself/owner/") - ) - api.add_resource( - ProjectsQueriesTouchedAPI, - format_url("projects/queries//touched/"), - ) - api.add_resource( - ProjectsQueriesSummaryAPI, - format_url("projects//queries/summary/"), - ) - api.add_resource( - ProjectsQueriesNoGeometriesAPI, - format_url("projects//queries/nogeometries/"), - ) - api.add_resource( - ProjectsQueriesNoTasksAPI, - format_url("projects//queries/notasks/"), - ) - api.add_resource( - ProjectsQueriesAoiAPI, format_url("projects//queries/aoi/") - ) - api.add_resource( - ProjectsQueriesPriorityAreasAPI, - format_url("projects//queries/priority-areas/"), - ) - api.add_resource( - ProjectsQueriesFeaturedAPI, format_url("projects/queries/featured/") - ) - api.add_resource( - ProjectQueriesSimilarProjectsAPI, - format_url("projects/queries//similar-projects/"), - ) - api.add_resource( - ProjectQueriesActiveProjectsAPI, - format_url("projects/queries/active/"), - ) - - # Projects' addtional resources - api.add_resource( - ProjectsActivitiesAPI, format_url("projects//activities/") - ) - api.add_resource( - ProjectsLastActivitiesAPI, - format_url("projects//activities/latest/"), - ) - api.add_resource( - ProjectsContributionsAPI, format_url("projects//contributions/") - ) - api.add_resource( - ProjectsContributionsQueriesDayAPI, - format_url("projects//contributions/queries/day/"), - ) - api.add_resource( - ProjectsStatisticsAPI, format_url("projects//statistics/") - ) - - api.add_resource( - ProjectsStatisticsQueriesUsernameAPI, - format_url("projects//statistics/queries//"), - ) - - api.add_resource( - ProjectsStatisticsQueriesPopularAPI, format_url("projects/queries/popular/") - ) - - api.add_resource( - ProjectPartnershipsRestApi, - format_url("projects/partnerships//"), - methods=["GET", "PATCH", "DELETE"], - ) - - api.add_resource( - ProjectPartnershipsRestApi, - format_url("projects/partnerships/"), - endpoint="create_partnership", - methods=["POST"], - ) - - api.add_resource( - PartnersByProjectAPI, - format_url("/projects//partners"), - methods=["GET"], - ) - - api.add_resource( - ProjectsTeamsAPI, - format_url("projects//teams/"), - endpoint="get_all_project_teams", - methods=["GET"], - ) - api.add_resource( - ProjectsTeamsAPI, - format_url("projects//teams//"), - methods=["POST", "DELETE", "PATCH"], - ) - api.add_resource( - ProjectsCampaignsAPI, - format_url("projects//campaigns/"), - endpoint="get_all_project_campaigns", - methods=["GET"], - ) - api.add_resource( - ProjectsCampaignsAPI, - format_url("projects//campaigns//"), - endpoint="assign_remove_campaign_to_project", - methods=["POST", "DELETE"], - ) - - # Projects actions endoints - api.add_resource( - ProjectsActionsMessageContributorsAPI, - format_url("projects//actions/message-contributors/"), - ) - api.add_resource( - ProjectsActionsTransferAPI, - format_url("projects//actions/transfer-ownership/"), - ) - api.add_resource( - ProjectsActionsFeatureAPI, - format_url("projects//actions/feature/"), - ) - api.add_resource( - ProjectsActionsUnFeatureAPI, - format_url("projects//actions/remove-feature/"), - methods=["POST"], - ) - - api.add_resource( - ProjectsFavoritesAPI, - format_url("projects//favorite/"), - methods=["GET", "POST", "DELETE"], - ) - - api.add_resource( - ProjectsActionsSetInterestsAPI, - format_url("projects//actions/set-interests/"), - methods=["POST"], - ) - - api.add_resource( - ProjectActionsIntersectingTilesAPI, - format_url("projects/actions/intersecting-tiles/"), - methods=["POST"], - ) - - api.add_resource( - UsersActionsSetInterestsAPI, - format_url("users/me/actions/set-interests/"), - endpoint="create_user_interest", - methods=["POST"], - ) - - api.add_resource( - UsersStatisticsInterestsAPI, - format_url("users//statistics/interests/"), - methods=["GET"], - ) - - api.add_resource( - InterestsAllAPI, - format_url("interests/"), - endpoint="create_interest", - methods=["POST", "GET"], - ) - api.add_resource( - InterestsRestAPI, - format_url("interests//"), - methods=["GET", "PATCH", "DELETE"], - ) - - # Partners REST endoints - api.add_resource( - PartnersAllRestAPI, - format_url("partners/"), - methods=["GET", "POST"], - ) - api.add_resource( - PartnerRestAPI, - format_url("partners//"), - methods=["GET", "DELETE", "PUT"], - ) - api.add_resource( - GroupPartnerStatisticsAPI, - format_url("/partners//general-statistics"), - methods=["GET"], - ) - api.add_resource( - FilteredPartnerStatisticsAPI, - format_url("/partners//filtered-statistics"), - methods=["GET"], - ) - api.add_resource( - PartnerPermalinkRestAPI, - format_url("partners//"), - methods=["GET"], - ) - - # Tasks REST endpoint - api.add_resource( - TasksRestAPI, format_url("projects//tasks//") - ) - - # Tasks queries endoints (TODO: Refactor them into the REST endpoints) - api.add_resource( - TasksQueriesJsonAPI, - format_url("projects//tasks/"), - methods=["GET", "DELETE"], - ) - api.add_resource( - TasksQueriesXmlAPI, format_url("projects//tasks/queries/xml/") - ) - api.add_resource( - TasksQueriesGpxAPI, format_url("projects//tasks/queries/gpx/") - ) - api.add_resource( - TasksQueriesAoiAPI, format_url("projects//tasks/queries/aoi/") - ) - api.add_resource( - TasksQueriesMappedAPI, - format_url("projects//tasks/queries/mapped/"), - ) - api.add_resource( - TasksQueriesOwnInvalidatedAPI, - format_url("projects//tasks/queries/own/invalidated/"), - ) - - # Tasks actions endoints - api.add_resource( - TasksActionsMappingLockAPI, - format_url( - "projects//tasks/actions/lock-for-mapping//" - ), - ) - api.add_resource( - TasksActionsMappingStopAPI, - format_url( - "projects//tasks/actions/stop-mapping//" - ), - ) - api.add_resource( - TasksActionsMappingUnlockAPI, - format_url( - "projects//tasks/actions/unlock-after-mapping//" - ), - ) - api.add_resource( - TasksActionsMappingUndoAPI, - format_url( - "projects//tasks/actions/undo-last-action//" - ), - ) - api.add_resource( - TasksActionsExtendAPI, - format_url("projects//tasks/actions/extend/"), - ) - api.add_resource( - TasksActionsValidationLockAPI, - format_url("projects//tasks/actions/lock-for-validation/"), - ) - api.add_resource( - TasksActionsValidationStopAPI, - format_url("projects//tasks/actions/stop-validation/"), - ) - api.add_resource( - TasksActionsValidationUnlockAPI, - format_url("projects//tasks/actions/unlock-after-validation/"), - ) - api.add_resource( - TasksActionsMapAllAPI, - format_url("projects//tasks/actions/map-all/"), - ) - api.add_resource( - TasksActionsValidateAllAPI, - format_url("projects//tasks/actions/validate-all/"), - ) - api.add_resource( - TasksActionsInvalidateAllAPI, - format_url("projects//tasks/actions/invalidate-all/"), - ) - api.add_resource( - TasksActionsResetBadImageryAllAPI, - format_url("projects//tasks/actions/reset-all-badimagery/"), - ) - api.add_resource( - TasksActionsResetAllAPI, - format_url("projects//tasks/actions/reset-all/"), - ) - api.add_resource( - TasksActionsReverUserTaskstAPI, - format_url("projects//tasks/actions/reset-by-user/"), - ) - api.add_resource( - TasksActionsSplitAPI, - format_url("projects//tasks/actions/split//"), - ) - - # Tasks Statistics endpoint - api.add_resource( - TasksStatisticsAPI, - format_url("tasks/statistics/"), - methods=["GET"], - ) - - # Comments REST endoints - api.add_resource( - CommentsProjectsAllAPI, - format_url("projects//comments/"), - methods=["GET", "POST"], - ) - api.add_resource( - CommentsProjectsRestAPI, - format_url("projects//comments//"), - methods=["DELETE"], - ) - api.add_resource( - CommentsTasksRestAPI, - format_url("projects//comments/tasks//"), - methods=["GET", "POST"], - ) - - # Annotations REST endoints - api.add_resource( - AnnotationsRestAPI, - format_url("projects//annotations//"), - format_url("projects//annotations/"), - methods=["GET", "POST"], - ) - - # Issues REST endpoints - api.add_resource( - IssuesAllAPI, format_url("tasks/issues/categories/"), methods=["GET", "POST"] - ) - api.add_resource( - IssuesRestAPI, - format_url("tasks/issues/categories//"), - methods=["GET", "PATCH", "DELETE"], - ) - - # Licenses REST endpoints - api.add_resource(LicensesAllAPI, format_url("licenses/")) - api.add_resource( - LicensesRestAPI, - format_url("licenses/"), - endpoint="create_license", - methods=["POST"], - ) - api.add_resource( - LicensesRestAPI, - format_url("licenses//"), - methods=["GET", "PATCH", "DELETE"], - ) - - # Licenses actions endpoint - api.add_resource( - LicensesActionsAcceptAPI, - format_url("licenses//actions/accept-for-me/"), - ) - - # Countries REST endpoints - api.add_resource(CountriesRestAPI, format_url("countries/")) - - # Organisations REST endpoints - api.add_resource(OrganisationsAllAPI, format_url("organisations/")) - api.add_resource( - OrganisationsRestAPI, - format_url("organisations/"), - endpoint="create_organisation", - methods=["POST"], - ) - api.add_resource( - OrganisationsRestAPI, - format_url("organisations//"), - endpoint="get_organisation", - methods=["GET"], - ) - api.add_resource( - OrganisationsBySlugRestAPI, - format_url("organisations//"), - endpoint="get_organisation_by_slug", - methods=["GET"], - ) - api.add_resource( - OrganisationsRestAPI, - format_url("organisations//"), - methods=["PUT", "DELETE", "PATCH"], - ) - - # Organisations additional resources endpoints - api.add_resource( - OrganisationsStatsAPI, - format_url("organisations//statistics/"), - endpoint="get_organisation_stats", - methods=["GET"], - ) - api.add_resource( - OrganisationsCampaignsAPI, - format_url("organisations//campaigns/"), - endpoint="get_all_organisation_campaigns", - methods=["GET"], - ) - api.add_resource( - OrganisationsCampaignsAPI, - format_url("organisations//campaigns//"), - endpoint="assign_campaign_to_organisation", - methods=["POST", "DELETE"], - ) - - # Teams REST endpoints - api.add_resource(TeamsAllAPI, format_url("teams"), methods=["GET"]) - api.add_resource( - TeamsAllAPI, format_url("teams/"), endpoint="create_team", methods=["POST"] - ) - api.add_resource( - TeamsRestAPI, - format_url("teams//"), - methods=["GET", "DELETE", "PATCH"], - ) - - # Teams actions endpoints - api.add_resource( - TeamsActionsJoinAPI, - format_url("teams//actions/join/"), - methods=["POST", "PATCH"], - ) - api.add_resource( - TeamsActionsAddAPI, - format_url("teams//actions/add/"), - methods=["POST"], - ) - api.add_resource( - TeamsActionsLeaveAPI, - format_url("teams//actions/leave/"), - endpoint="leave_team", - methods=["POST"], - ) - api.add_resource( - TeamsActionsMessageMembersAPI, - format_url("teams//actions/message-members/"), - ) - - # Campaigns REST endpoints - api.add_resource( - CampaignsAllAPI, - format_url("campaigns/"), - endpoint="get_all_campaign", - methods=["GET"], - ) - api.add_resource( - CampaignsAllAPI, - format_url("campaigns/"), - endpoint="create_campaign", - methods=["POST"], - ) - api.add_resource( - CampaignsRestAPI, - format_url("campaigns//"), - methods=["GET", "PATCH", "DELETE"], - ) - - # Notifications REST endpoints - api.add_resource( - NotificationsRestAPI, format_url("notifications//") - ) - api.add_resource(NotificationsAllAPI, format_url("notifications/")) - api.add_resource( - NotificationsQueriesCountUnreadAPI, - format_url("notifications/queries/own/count-unread/"), - ) - api.add_resource( - NotificationsQueriesPostUnreadAPI, - format_url("notifications/queries/own/post-unread/"), - methods=["POST"], - ) - # Notifications Actions endpoints - api.add_resource( - NotificationsActionsDeleteMultipleAPI, - format_url("notifications/delete-multiple/"), - methods=["DELETE"], - ) - api.add_resource( - NotificationsActionsDeleteAllAPI, - format_url("notifications/delete-all/"), - methods=["DELETE"], - ) - api.add_resource( - NotificationsActionsMarkAsReadAllAPI, - format_url("notifications/mark-as-read-all/"), - methods=["POST"], - ) - api.add_resource( - NotificationsActionsMarkAsReadMultipleAPI, - format_url("notifications/mark-as-read-multiple/"), - methods=["POST"], - ) - - # Users REST endpoint - api.add_resource(UsersAllAPI, format_url("users/")) - api.add_resource(UsersRestAPI, format_url("users//")) - api.add_resource( - UsersQueriesUsernameFilterAPI, - format_url("users/queries/filter//"), - ) - api.add_resource( - UsersQueriesUsernameAPI, format_url("users/queries//") - ) - api.add_resource(UsersQueriesFavoritesAPI, format_url("users/queries/favorites/")) - api.add_resource( - UsersQueriesOwnLockedAPI, format_url("users/queries/tasks/locked/") - ) - api.add_resource( - UsersQueriesOwnLockedDetailsAPI, - format_url("users/queries/tasks/locked/details/"), - ) - - # Users Actions endpoint - api.add_resource(UsersActionsSetUsersAPI, format_url("users/me/actions/set-user/")) - - api.add_resource( - UsersActionsSetLevelAPI, - format_url("users//actions/set-level//"), - ) - api.add_resource( - UsersActionsSetRoleAPI, - format_url("users//actions/set-role//"), - ) - api.add_resource( - UsersActionsSetExpertModeAPI, - format_url( - "users//actions/set-expert-mode//" - ), - ) - - api.add_resource(UsersTasksAPI, format_url("users//tasks/")) - api.add_resource( - UsersActionsVerifyEmailAPI, format_url("users/me/actions/verify-email/") - ) - api.add_resource( - UsersActionsRegisterEmailAPI, format_url("users/actions/register/") - ) - - # Users Statistics endpoint - api.add_resource( - UsersStatisticsAPI, format_url("users//statistics/") - ) - - api.add_resource( - UsersStatisticsAllAPI, - format_url("users/statistics/"), - ) - api.add_resource( - OhsomeProxyAPI, format_url("users/statistics/ohsome/"), methods=["GET"] - ) - # User RecommendedProjects endpoint - api.add_resource( - UsersRecommendedProjectsAPI, - format_url("users//recommended-projects/"), - ) - - # User Interests endpoint - api.add_resource( - UsersQueriesInterestsAPI, - format_url("users//queries/interests/"), - ) - - # Users openstreetmap endpoint - api.add_resource( - UsersOpenStreetMapAPI, format_url("users//openstreetmap/") - ) - - # System endpoint - api.add_resource(SystemDocsAPI, format_url("system/docs/json/")) - api.add_resource( - SystemBannerAPI, format_url("system/banner/"), methods=["GET", "PATCH"] - ) - api.add_resource(SystemHeartbeatAPI, format_url("system/heartbeat/")) - api.add_resource(SystemLanguagesAPI, format_url("system/languages/")) - api.add_resource(SystemStatisticsAPI, format_url("system/statistics/")) - api.add_resource( - SystemAuthenticationLoginAPI, format_url("system/authentication/login/") - ) - api.add_resource( - SystemAuthenticationCallbackAPI, format_url("system/authentication/callback/") - ) - api.add_resource( - SystemAuthenticationEmailAPI, format_url("system/authentication/email/") - ) - api.add_resource( - SystemImageUploadRestAPI, - format_url("system/image-upload/"), - methods=["POST"], - ) - api.add_resource( - SystemApplicationsRestAPI, - format_url("system/authentication/applications/"), - methods=["POST", "GET"], - ) - api.add_resource( - SystemApplicationsRestAPI, - format_url("system/authentication/applications//"), - endpoint="delete_application", - methods=["DELETE"], - ) - api.add_resource( - SystemApplicationsRestAPI, - format_url("system/authentication/applications//"), - endpoint="check_application", - methods=["PATCH"], - ) - api.add_resource( - SystemContactAdminRestAPI, format_url("system/contact-admin/"), methods=["POST"] - ) - api.add_resource(SystemReleaseAPI, format_url("system/release/"), methods=["POST"]) diff --git a/backend/api/annotations/resources.py b/backend/api/annotations/resources.py index 9505b066c6..1a909771b1 100644 --- a/backend/api/annotations/resources.py +++ b/backend/api/annotations/resources.py @@ -1,153 +1,168 @@ -from flask_restful import Resource, current_app, request -from schematics.exceptions import DataError +from databases import Database +from fastapi import APIRouter, Depends, Request +from loguru import logger +from starlette.authentication import requires + +from backend.db import get_db +from backend.models.dtos.user_dto import AuthUserDTO from backend.models.postgis.task import Task from backend.models.postgis.task_annotation import TaskAnnotation from backend.services.project_service import ProjectService from backend.services.task_annotations_service import TaskAnnotationsService -from backend.services.application_service import ApplicationService +from backend.services.users.authentication_service import login_required +router = APIRouter( + prefix="/projects", + tags=["projects"], + responses={404: {"description": "Not found"}}, +) -class AnnotationsRestAPI(Resource): - def get(self, project_id: int, annotation_type: str = None): - """ - Get all task annotations for a project - --- - tags: - - annotations - produces: - - application/json - parameters: - - name: project_id - in: path - description: The ID of the project - required: true - type: integer - - name: annotation_type - in: path - description: The type of annotation to fetch - required: false - type: integer - responses: - 200: - description: Project Annotations - 404: - description: Project or annotations not found - 500: - description: Internal Server Error - """ - ProjectService.exists(project_id) - if annotation_type: - annotations = TaskAnnotation.get_task_annotations_by_project_id_type( - project_id, annotation_type - ) - else: - annotations = TaskAnnotation.get_task_annotations_by_project_id(project_id) - return annotations.to_primitive(), 200 - def post(self, project_id: int, annotation_type: str): - """ - Store new task annotations for tasks of a project - --- - tags: - - annotations - produces: - - application/json - parameters: - - in: header - name: Content-Type - description: Content type for post body - required: true - type: string - default: application/json - - name: project_id - in: path - description: Unique project ID - required: true - type: integer - - name: annotation_type - in: path - description: Annotation type - required: true - type: string - - name: Application-Token - in: header - description: Application token registered with TM - required: true - type: string - - in: body - name: body - required: true - description: JSON object for creating draft project - schema: - projectId: - type: integer - required: true - annotationType: - type: string - required: true - tasks: - type: array - required: true - items: - schema: - taskId: - type: integer - required: true - annotationSource: - type: string - annotationMarkdown: - type: string - properties: - description: JSON object with properties - responses: - 200: - description: Project updated - 400: - description: Client Error - Invalid Request - 404: - description: Project or task not found - 500: - description: Internal Server Error - """ +@router.get("/{project_id}/annotations/{annotation_type}/") +@router.get("/{project_id}/annotations/") +async def get( + request: Request, + project_id: int, + annotation_type: str = None, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), +): + """ + Get all task annotations for a project + --- + tags: + - annotations + produces: + - application/json + parameters: + - name: project_id + in: path + description: The ID of the project + required: true + type: integer + - name: annotation_type + in: path + description: The type of annotation to fetch + required: false + type: integer + responses: + 200: + description: Project Annotations + 404: + description: Project or annotations not found + 500: + description: Internal Server Error + """ + ProjectService.exists(project_id) + if annotation_type: + annotations = TaskAnnotation.get_task_annotations_by_project_id_type( + project_id, annotation_type + ) + else: + annotations = TaskAnnotation.get_task_annotations_by_project_id(project_id) + return annotations.model_dump(by_alias=True), 200 - if "Application-Token" in request.headers: - application_token = request.headers["Application-Token"] - is_valid_token = ApplicationService.check_token(application_token) # noqa - else: - current_app.logger.error("No token supplied") - return {"Error": "No token supplied", "SubCode": "NotFound"}, 500 - try: - annotations = request.get_json() or {} - except DataError as e: - current_app.logger.error(f"Error validating request: {str(e)}") +@router.post("/{project_id}/annotations/{annotation_type}/") +@requires("authenticated") +async def post(request: Request, project_id: int, annotation_type: str): + """ + Store new task annotations for tasks of a project + --- + tags: + - annotations + produces: + - application/json + parameters: + - in: header + name: Content-Type + description: Content type for post body + required: true + type: string + default: application/json + - name: project_id + in: path + description: Unique project ID + required: true + type: integer + - name: annotation_type + in: path + description: Annotation type + required: true + type: string + - name: Application-Token + in: header + description: Application token registered with TM + required: true + type: string + - in: body + name: body + required: true + description: JSON object for creating draft project + schema: + projectId: + type: integer + required: true + annotationType: + type: string + required: true + tasks: + type: array + required: true + items: + schema: + taskId: + type: integer + required: true + annotationSource: + type: string + annotationMarkdown: + type: string + properties: + description: JSON object with properties + responses: + 200: + description: Project updated + 400: + description: Client Error - Invalid Request + 404: + description: Project or task not found + 500: + description: Internal Server Error + """ - ProjectService.exists(project_id) + # if "Application-Token" in request.headers: + # application_token = request.headers["Application-Token"] + # is_valid_token = ApplicationService.check_token(application_token) # noqa + # else: + # logger.error("No token supplied") + # return {"Error": "No token supplied", "SubCode": "NotFound"}, 500 - task_ids = [t["taskId"] for t in annotations["tasks"]] + try: + annotations = await request.json() or {} + except Exception as e: + logger.error(f"Error validating request: {str(e)}") - # check if task ids are valid - tasks = Task.get_tasks(project_id, task_ids) - tasks_ids_db = [t.id for t in tasks] - if len(task_ids) != len(tasks_ids_db): - return {"Error": "Invalid task id"}, 500 + ProjectService.exists(project_id) - for annotation in annotations["tasks"]: - try: - TaskAnnotationsService.add_or_update_annotation( - annotation, project_id, annotation_type - ) - except DataError as e: - current_app.logger.error(f"Error creating annotations: {str(e)}") - return { - "Error": "Error creating annotations", - "SubCode": "InvalidData", - }, 400 + task_ids = [t["taskId"] for t in annotations["tasks"]] - return project_id, 200 + # check if task ids are valid + tasks = Task.get_tasks(project_id, task_ids) + tasks_ids_db = [t.id for t in tasks] + if len(task_ids) != len(tasks_ids_db): + return {"Error": "Invalid task id"}, 500 + + for annotation in annotations["tasks"]: + try: + TaskAnnotationsService.add_or_update_annotation( + annotation, project_id, annotation_type + ) + except Exception as e: + logger.error(f"Error creating annotations: {str(e)}") + return { + "Error": "Error creating annotations", + "SubCode": "InvalidData", + }, 400 - def put(self, project_id: int, task_id: int): - """ - Update a single task's annotations - """ - pass + return project_id, 200 diff --git a/backend/api/campaigns/resources.py b/backend/api/campaigns/resources.py index 466090ce00..498bcc5d07 100644 --- a/backend/api/campaigns/resources.py +++ b/backend/api/campaigns/resources.py @@ -1,292 +1,322 @@ -from flask_restful import Resource, request, current_app -from schematics.exceptions import DataError +from databases import Database +from fastapi import APIRouter, Depends, Request +from fastapi.responses import JSONResponse -from backend.models.dtos.campaign_dto import CampaignDTO, NewCampaignDTO +from backend.db import get_db +from backend.models.dtos.campaign_dto import ( + CampaignDTO, + CampaignListDTO, + NewCampaignDTO, +) +from backend.models.dtos.user_dto import AuthUserDTO from backend.services.campaign_service import CampaignService from backend.services.organisation_service import OrganisationService -from backend.services.users.authentication_service import token_auth +from backend.services.users.authentication_service import login_required +router = APIRouter( + prefix="/campaigns", + tags=["campaigns"], + responses={404: {"description": "Not found"}}, +) -class CampaignsRestAPI(Resource): - def get(self, campaign_id): - """ - Get an active campaign's information - --- - tags: - - campaigns - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - type: string - default: Token sessionTokenHere== - - in: header - name: Accept-Language - description: Language user is requesting - type: string - required: true - default: en - - name: campaign_id - in: path - description: Campaign ID - required: true - type: integer - default: 1 - responses: - 200: - description: Campaign found - 404: - description: No Campaign found - 500: - description: Internal Server Error - """ - authenticated_user_id = token_auth.current_user() - if authenticated_user_id: - campaign = CampaignService.get_campaign_as_dto( - campaign_id, authenticated_user_id - ) - else: - campaign = CampaignService.get_campaign_as_dto(campaign_id, 0) - return campaign.to_primitive(), 200 - @token_auth.login_required - def patch(self, campaign_id): - """ - Updates an existing campaign - --- - tags: - - campaigns - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - type: string - required: true - default: Token sessionTokenHere== - - in: header - name: Accept-Language - description: Language user is requesting - type: string - required: true - default: en - - name: campaign_id - in: path - description: Campaign ID - required: true - type: integer - default: 1 - - in: body - name: body - required: true - description: JSON object for updating a Campaign - schema: - properties: - name: - type: string - example: HOT Campaign - logo: - type: string - example: https://tasks.hotosm.org/assets/img/hot-tm-logo.svg - url: - type: string - example: https://hotosm.org - organisations: - type: array - items: - type: integer - default: [ - 1 - ] - responses: - 200: - description: Campaign updated successfully - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 404: - description: Campaign not found - 409: - description: Resource duplication - 500: - description: Internal Server Error - """ - try: - orgs_dto = OrganisationService.get_organisations_managed_by_user_as_dto( - token_auth.current_user() - ) - if len(orgs_dto.organisations) < 1: - raise ValueError("User not a Org Manager") - except ValueError as e: - error_msg = f"CampaignsRestAPI PATCH: {str(e)}" - return {"Error": error_msg, "SubCode": "UserNotPermitted"}, 403 +@router.get("/{campaign_id}/", response_model=CampaignDTO) +async def retrieve_campaign( + request: Request, campaign_id: int, db: Database = Depends(get_db) +): + """ + Get an active campaign's information + --- + tags: + - campaigns + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + type: string + default: Token sessionTokenHere== + - in: header + name: Accept-Language + description: Language user is requesting + type: string + required: true + default: en + - name: campaign_id + in: path + description: Campaign ID + required: true + type: integer + default: 1 + responses: + 200: + description: Campaign found + 404: + description: No Campaign found + 500: + description: Internal Server Error + """ + campaign = await CampaignService.get_campaign_as_dto(campaign_id, db) + return campaign - try: - campaign_dto = CampaignDTO(request.get_json()) - campaign_dto.validate() - except DataError as e: - current_app.logger.error(f"error validating request: {str(e)}") - return {"Error": str(e), "SubCode": "InvalidData"}, 400 - try: - campaign = CampaignService.update_campaign(campaign_dto, campaign_id) - return {"Success": "Campaign {} updated".format(campaign.id)}, 200 - except ValueError: - error_msg = "Campaign PATCH - name already exists" - return {"Error": error_msg, "SubCode": "NameExists"}, 409 +@router.patch("/{campaign_id}/") +async def update_campaign( + campaign_dto: CampaignDTO, + request: Request, + campaign_id: int, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), +): + """ + Updates an existing campaign + --- + tags: + - campaigns + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + type: string + required: true + default: Token sessionTokenHere== + - in: header + name: Accept-Language + description: Language user is requesting + type: string + required: true + default: en + - name: campaign_id + in: path + description: Campaign ID + required: true + type: integer + default: 1 + - in: body + name: body + required: true + description: JSON object for updating a Campaign + schema: + properties: + name: + type: string + example: HOT Campaign + logo: + type: string + example: https://tasks.hotosm.org/assets/img/hot-tm-logo.svg + url: + type: string + example: https://hotosm.org + organisations: + type: array + items: + type: integer + default: [ + 1 + ] + responses: + 200: + description: Campaign updated successfully + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 404: + description: Campaign not found + 409: + description: Resource duplication + 500: + description: Internal Server Error + """ + try: + orgs_dto = await OrganisationService.get_organisations_managed_by_user_as_dto( + user.id, db + ) + if len(orgs_dto.organisations) < 1: + raise ValueError("User not a Org Manager") + except ValueError as e: + error_msg = f"CampaignsRestAPI PATCH: {str(e)}" + return JSONResponse( + content={"Error": error_msg, "SubCode": "UserNotPermitted"}, status_code=403 + ) + try: + campaign = await CampaignService.update_campaign(campaign_dto, campaign_id, db) + return JSONResponse( + content={"Success": "Campaign {} updated".format(campaign.id)}, + status_code=200, + ) + except ValueError: + error_msg = "Campaign PATCH - name already exists" + return JSONResponse( + content={"Error": error_msg, "SubCode": "NameExists"}, status_code=400 + ) - @token_auth.login_required - def delete(self, campaign_id): - """ - Deletes an existing campaign - --- - tags: - - campaigns - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - type: string - required: true - default: Token sessionTokenHere== - - in: header - name: Accept-Language - description: Language user is requesting - type: string - required: true - default: en - - name: campaign_id - in: path - description: Campaign ID - required: true - type: integer - default: 1 - responses: - 200: - description: Campaign deleted successfully - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 404: - description: Campaign not found - 500: - description: Internal Server Error - """ - try: - orgs_dto = OrganisationService.get_organisations_managed_by_user_as_dto( - token_auth.current_user() - ) - if len(orgs_dto.organisations) < 1: - raise ValueError("User not a Org Manager") - except ValueError as e: - error_msg = f"CampaignsRestAPI DELETE: {str(e)}" - return {"Error": error_msg, "SubCode": "UserNotPermitted"}, 403 - campaign = CampaignService.get_campaign(campaign_id) - CampaignService.delete_campaign(campaign.id) - return {"Success": "Campaign deleted"}, 200 +@router.delete("/{campaign_id}/") +async def delete_campaign( + request: Request, + campaign_id: int, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), +): + """ + Deletes an existing campaign + --- + tags: + - campaigns + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + type: string + required: true + default: Token sessionTokenHere== + - in: header + name: Accept-Language + description: Language user is requesting + type: string + required: true + default: en + - name: campaign_id + in: path + description: Campaign ID + required: true + type: integer + default: 1 + responses: + 200: + description: Campaign deleted successfully + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 404: + description: Campaign not found + 500: + description: Internal Server Error + """ + try: + orgs_dto = await OrganisationService.get_organisations_managed_by_user_as_dto( + request.user.display_name, db + ) + if len(orgs_dto.organisations) < 1: + raise ValueError("User not a Org Manager") + except ValueError as e: + error_msg = f"CampaignsRestAPI DELETE: {str(e)}" + return JSONResponse( + content={"Error": error_msg, "SubCode": "UserNotPermitted"}, status_code=403 + ) + campaign = await CampaignService.get_campaign(campaign_id, db) + await CampaignService.delete_campaign(campaign.id, db) + return JSONResponse(content={"Success": "Campaign deleted"}, status_code=200) -class CampaignsAllAPI(Resource): - def get(self): - """ - Get all active campaigns - --- - tags: - - campaigns - produces: - - application/json - responses: - 200: - description: All Campaigns returned successfully - 500: - description: Internal Server Error - """ - campaigns = CampaignService.get_all_campaigns() - return campaigns.to_primitive(), 200 - @token_auth.login_required - def post(self): - """ - Creates a new campaign - --- - tags: - - campaigns - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - type: string - required: true - default: Token sessionTokenHere== - - in: header - name: Accept-Language - description: Language user is requesting - type: string - required: true - default: en - - in: body - name: body - required: true - description: JSON object for creating a new Campaign - schema: - properties: - name: - type: string - example: HOT Campaign - logo: - type: string - example: https://tasks.hotosm.org/assets/img/hot-tm-logo.svg - url: - type: string - example: https://hotosm.org - organisations: - type: array - items: - type: integer - default: [ - 1 - ] - responses: - 201: - description: New campaign created successfully - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 409: - description: Resource duplication - 500: - description: Internal Server Error - """ - try: - orgs_dto = OrganisationService.get_organisations_managed_by_user_as_dto( - token_auth.current_user() - ) - if len(orgs_dto.organisations) < 1: - raise ValueError("User not a Org Manager") - except ValueError as e: - error_msg = f"CampaignsAllAPI POST: {str(e)}" - return {"Error": error_msg, "SubCode": "UserNotPermitted"}, 403 +@router.get("/", response_model=CampaignListDTO) +async def list_campaigns( + request: Request, + db: Database = Depends(get_db), +): + """ + Get all active campaigns + --- + tags: + - campaigns + produces: + - application/json + responses: + 200: + description: All Campaigns returned successfully + 500: + description: Internal Server Error + """ + campaigns = await CampaignService.get_all_campaigns(db) + return campaigns - try: - campaign_dto = NewCampaignDTO(request.get_json()) - campaign_dto.validate() - except DataError as e: - current_app.logger.error(f"error validating request: {str(e)}") - return {"Error": str(e), "SubCode": "InvalidData"}, 400 - try: - campaign = CampaignService.create_campaign(campaign_dto) - return {"campaignId": campaign.id}, 201 - except ValueError as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 409 +@router.post("/") +async def create_campaign( + campaign_dto: NewCampaignDTO, + request: Request, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), +): + """ + Creates a new campaign + --- + tags: + - campaigns + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + type: string + required: true + default: Token sessionTokenHere== + - in: header + name: Accept-Language + description: Language user is requesting + type: string + required: true + default: en + - in: body + name: body + required: true + description: JSON object for creating a new Campaign + schema: + properties: + name: + type: string + example: HOT Campaign + logo: + type: string + example: https://tasks.hotosm.org/assets/img/hot-tm-logo.svg + url: + type: string + example: https://hotosm.org + organisations: + type: array + items: + type: integer + default: [ + 1 + ] + responses: + 201: + description: New campaign created successfully + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 409: + description: Resource duplication + 500: + description: Internal Server Error + """ + try: + orgs_dto = await OrganisationService.get_organisations_managed_by_user_as_dto( + request.user.display_name, db + ) + if len(orgs_dto.organisations) < 1: + raise ValueError("User not a Org Manager") + except ValueError as e: + error_msg = f"CampaignsAllAPI POST: {str(e)}" + return JSONResponse( + content={"Error": error_msg, "SubCode": "UserNotPermitted"}, status_code=403 + ) + + try: + campaign_id = await CampaignService.create_campaign(campaign_dto, db) + return JSONResponse(content={"campaignId": campaign_id}, status_code=201) + except ValueError as e: + return JSONResponse( + content={"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, + status_code=409, + ) diff --git a/backend/api/comments/resources.py b/backend/api/comments/resources.py index 0e031360ef..7cb8b38435 100644 --- a/backend/api/comments/resources.py +++ b/backend/api/comments/resources.py @@ -1,311 +1,357 @@ -from flask_restful import Resource, request, current_app -from schematics.exceptions import DataError +from databases import Database +from fastapi import APIRouter, BackgroundTasks, Depends, Request +from fastapi.responses import JSONResponse +from loguru import logger -from backend.models.dtos.message_dto import ChatMessageDTO +from backend.db import get_db from backend.models.dtos.mapping_dto import TaskCommentDTO +from backend.models.dtos.message_dto import ChatMessageDTO +from backend.models.dtos.user_dto import AuthUserDTO +from backend.models.postgis.utils import timestamp +from backend.services.mapping_service import MappingService, MappingServiceError from backend.services.messaging.chat_service import ChatService -from backend.services.users.user_service import UserService from backend.services.project_service import ProjectService -from backend.services.mapping_service import MappingService, MappingServiceError -from backend.services.users.authentication_service import token_auth, tm - +from backend.services.users.authentication_service import login_required +from backend.services.users.user_service import UserService -class CommentsProjectsAllAPI(Resource): - @tm.pm_only(False) - @token_auth.login_required - def post(self, project_id): - """ - Add a message to project chat - --- - tags: - - comments - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: project_id - in: path - description: Project ID to attach the chat message to - required: true - type: integer - default: 1 - - in: body - name: body - required: true - description: JSON object for creating a new mapping license - schema: - properties: - message: - type: string - default: This is an awesome project - responses: - 201: - description: Message posted successfully - 400: - description: Invalid Request - 500: - description: Internal Server Error - """ - authenticated_user_id = token_auth.current_user() - if UserService.is_user_blocked(authenticated_user_id): - return {"Error": "User is on read only mode", "SubCode": "ReadOnly"}, 403 +router = APIRouter( + prefix="/projects", + tags=["projects"], + responses={404: {"description": "Not found"}}, +) - try: - chat_dto = ChatMessageDTO(request.get_json()) - chat_dto.user_id = authenticated_user_id - chat_dto.project_id = project_id - chat_dto.validate() - except DataError as e: - current_app.logger.error(f"Error validating request: {str(e)}") - return { - "Error": "Unable to add chat message", - "SubCode": "InvalidData", - }, 400 - try: - project_messages = ChatService.post_message( - chat_dto, project_id, authenticated_user_id +@router.post("/{project_id}/comments/") +async def post( + project_id: int, + request: Request, + background_tasks: BackgroundTasks, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), +): + """ + Add a message to project chat + --- + tags: + - comments + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: project_id + in: path + description: Project ID to attach the chat message to + required: true + type: integer + default: 1 + - in: body + name: body + required: true + description: JSON object for creating a new mapping license + schema: + properties: + message: + type: string + default: This is an awesome project + responses: + 201: + description: Message posted successfully + 400: + description: Invalid Request + 500: + description: Internal Server Error + """ + user = await UserService.get_user_by_id(user.id, db) + if await UserService.is_user_blocked(user.id, db): + return JSONResponse( + content={"Error": "User is on read only mode", "SubCode": "ReadOnly"}, + status_code=403, + ) + request_json = await request.json() + message = request_json.get("message") + chat_dto = ChatMessageDTO( + message=message, + user_id=user.id, + project_id=project_id, + timestamp=timestamp(), + username=user.username, + ) + try: + async with db.transaction(): + project_messages = await ChatService.post_message( + chat_dto, project_id, user.id, db, background_tasks ) - return project_messages.to_primitive(), 201 - except ValueError as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 403 + return project_messages + except ValueError as e: + return JSONResponse( + content={"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, + status_code=403, + ) - def get(self, project_id): - """ - Get all chat messages for a project - --- - tags: - - comments - produces: - - application/json - parameters: - - name: project_id - in: path - description: Project ID to attach the chat message to - required: true - type: integer - default: 1 - - in: query - name: page - description: Page of results user requested - type: integer - default: 1 - - in: query - name: perPage - description: Number of elements per page. - type: integer - default: 20 - responses: - 200: - description: All messages - 404: - description: No chat messages on project - 500: - description: Internal Server Error - """ - ProjectService.exists(project_id) - page = int(request.args.get("page")) if request.args.get("page") else 1 - per_page = int(request.args.get("perPage", 20)) - project_messages = ChatService.get_messages(project_id, page, per_page) - return project_messages.to_primitive(), 200 +@router.get("/{project_id}/comments/") +async def get(request: Request, project_id: int, db: Database = Depends(get_db)): + """ + Get all chat messages for a project + --- + tags: + - comments + produces: + - application/json + parameters: + - name: project_id + in: path + description: Project ID to attach the chat message to + required: true + type: integer + default: 1 + - in: query + name: page + description: Page of results user requested + type: integer + default: 1 + - in: query + name: perPage + description: Number of elements per page. + type: integer + default: 20 + responses: + 200: + description: All messages + 404: + description: No chat messages on project + 500: + description: Internal Server Error + """ + await ProjectService.exists(project_id, db) + page = ( + int(request.query_params.get("page")) if request.query_params.get("page") else 1 + ) + per_page = int(request.query_params.get("perPage", 20)) + project_messages = await ChatService.get_messages(project_id, db, page, per_page) + return project_messages -class CommentsProjectsRestAPI(Resource): - @token_auth.login_required - def delete(self, project_id, comment_id): - """ - Delete a chat message - --- - tags: - - comments - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: project_id - in: path - description: Project ID to attach the chat message to - required: true - type: integer - default: 1 - - name: comment_id - in: path - description: Comment ID to delete - required: true - type: integer - default: 1 - responses: - 200: - description: Comment deleted - 403: - description: User is not authorized to delete comment - 404: - description: Comment not found - 500: - description: Internal Server Error - """ - authenticated_user_id = token_auth.current_user() - try: - ChatService.delete_project_chat_by_id( - project_id, comment_id, authenticated_user_id + +@router.delete("/{project_id}/comments/{comment_id}/") +async def delete( + project_id: int, + comment_id: int, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), +): + """ + Delete a chat message + --- + tags: + - comments + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: project_id + in: path + description: Project ID to attach the chat message to + required: true + type: integer + default: 1 + - name: comment_id + in: path + description: Comment ID to delete + required: true + type: integer + default: 1 + responses: + 200: + description: Comment deleted + 403: + description: User is not authorized to delete comment + 404: + description: Comment not found + 500: + description: Internal Server Error + """ + authenticated_user_id = user.id + try: + async with db.transaction(): + await ChatService.delete_project_chat_by_id( + project_id, comment_id, authenticated_user_id, db ) - return {"Success": "Comment deleted"}, 200 - except ValueError as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 403 + return JSONResponse(content={"Success": "Comment deleted"}, status_code=200) + except ValueError as e: + return JSONResponse( + content={"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, + status_code=403, + ) -class CommentsTasksRestAPI(Resource): - @tm.pm_only(False) - @token_auth.login_required - def post(self, project_id, task_id): - """ - Adds a comment to the task outside of mapping/validation - --- - tags: - - comments - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: project_id - in: path - description: Project ID the task is associated with - required: true - type: integer - default: 1 - - name: task_id - in: path - description: Unique task ID - required: true - type: integer - default: 1 - - in: body - name: body - required: true - description: JSON object representing the comment - schema: - id: TaskComment - required: - - comment - properties: - comment: - type: string - description: user comment about the task - responses: - 200: - description: Comment added - 400: - description: Client Error - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 404: - description: Task not found - 500: - description: Internal Server Error - """ - authenticated_user_id = token_auth.current_user() - if UserService.is_user_blocked(authenticated_user_id): - return {"Error": "User is on read only mode", "SubCode": "ReadOnly"}, 403 +@router.post("/{project_id}/comments/tasks/{task_id}/") +# TODO Decorator +# @tm.pm_only(False) +async def post( + request: Request, + project_id: int, + task_id: int, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), +): + """ + Adds a comment to the task outside of mapping/validation + --- + tags: + - comments + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: project_id + in: path + description: Project ID the task is associated with + required: true + type: integer + default: 1 + - name: task_id + in: path + description: Unique task ID + required: true + type: integer + default: 1 + - in: body + name: body + required: true + description: JSON object representing the comment + schema: + id: TaskComment + required: + - comment + properties: + comment: + type: string + description: user comment about the task + responses: + 200: + description: Comment added + 400: + description: Client Error + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 404: + description: Task not found + 500: + description: Internal Server Error + """ + authenticated_user_id = request.user.display_name + if await UserService.is_user_blocked(authenticated_user_id, db): + return JSONResponse( + content={"Error": "User is on read only mode", "SubCode": "ReadOnly"}, + status_code=403, + ) - try: - task_comment = TaskCommentDTO(request.get_json()) - task_comment.user_id = token_auth.current_user() - task_comment.task_id = task_id - task_comment.project_id = project_id - task_comment.validate() - except DataError as e: - current_app.logger.error(f"Error validating request: {str(e)}") - return {"Error": "Unable to add comment", "SubCode": "InvalidData"}, 400 + try: + request_json = await request.json() + comment = request_json.get("comment") + task_comment = TaskCommentDTO( + user_id=user.id, task_id=task_id, project_id=project_id, comment=comment + ) + except Exception as e: + logger.error(f"Error validating request: {str(e)}") + return JSONResponse( + content={"Error": "Unable to add comment", "SubCode": "InvalidData"}, + status_code=400, + ) + try: + task = await MappingService.add_task_comment(task_comment, db) + return task + except MappingServiceError: + return JSONResponse(content={"Error": "Task update failed"}, status_code=403) - try: - task = MappingService.add_task_comment(task_comment) - return task.to_primitive(), 201 - except MappingServiceError: - return {"Error": "Task update failed"}, 403 - def get(self, project_id, task_id): - """ - Get comments for a task - --- - tags: - - comments - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: project_id - in: path - description: Project ID the task is associated with - required: true - type: integer - default: 1 - - name: task_id - in: path - description: Unique task ID - required: true - type: integer - default: 1 - - in: body - name: body - required: true - description: JSON object representing the comment - schema: - id: TaskComment - required: - - comment - properties: - comment: - type: string - description: user comment about the task - responses: - 200: - description: Comment retrieved - 400: - description: Client Error - 404: - description: Task not found - 500: - description: Internal Server Error - """ - try: - task_comment = TaskCommentDTO(request.get_json()) - task_comment.user_id = token_auth.current_user() - task_comment.task_id = task_id - task_comment.project_id = project_id - task_comment.validate() - except DataError as e: - current_app.logger.error(f"Error validating request: {str(e)}") - return { +@router.get("/{project_id}/comments/tasks/{task_id}/") +async def get(request: Request, project_id, task_id): + """ + Get comments for a task + --- + tags: + - comments + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: project_id + in: path + description: Project ID the task is associated with + required: true + type: integer + default: 1 + - name: task_id + in: path + description: Unique task ID + required: true + type: integer + default: 1 + - in: body + name: body + required: true + description: JSON object representing the comment + schema: + id: TaskComment + required: + - comment + properties: + comment: + type: string + description: user comment about the task + responses: + 200: + description: Comment retrieved + 400: + description: Client Error + 404: + description: Task not found + 500: + description: Internal Server Error + """ + try: + task_comment = TaskCommentDTO(request.json()) + task_comment.user_id = request.user.display_name + task_comment.task_id = task_id + task_comment.project_id = project_id + task_comment.validate() + except Exception as e: + logger.error(f"Error validating request: {str(e)}") + return JSONResponse( + content={ "Error": "Unable to fetch task comments", "SubCode": "InvalidData", - }, 400 + }, + status_code=400, + ) - try: - # NEW FUNCTION HAS TO BE ADDED - # task = MappingService.add_task_comment(task_comment) - # return task.to_primitive(), 200 - return - except MappingServiceError as e: - return {"Error": str(e)}, 403 + try: + # NEW FUNCTION HAS TO BE ADDED + # task = MappingService.add_task_comment(task_comment) + # return task.model_dump(by_alias=True), 200 + return + except MappingServiceError as e: + return JSONResponse(content={"Error": str(e)}, status_code=403) diff --git a/backend/api/countries/resources.py b/backend/api/countries/resources.py index 6389343e99..6fb50752c8 100644 --- a/backend/api/countries/resources.py +++ b/backend/api/countries/resources.py @@ -1,21 +1,31 @@ -from flask_restful import Resource +from backend.models.dtos.tags_dto import TagsDTO from backend.services.tags_service import TagsService +from fastapi import APIRouter, Depends +from backend.db import get_db +from databases import Database -class CountriesRestAPI(Resource): - def get(self): - """ - Fetch all Country tags - --- - tags: - - countries - produces: - - application/json - responses: - 200: - description: All Country tags returned - 500: - description: Internal Server Error - """ - tags = TagsService.get_all_countries() - return tags.to_primitive(), 200 +router = APIRouter( + prefix="/countries", + tags=["countries"], + responses={404: {"description": "Not found"}}, +) + + +@router.get("/", response_model=TagsDTO) +async def get(db: Database = Depends(get_db)): + """ + Fetch all Country tags + --- + tags: + - countries + produces: + - application/json + responses: + 200: + description: All Country tags returned + 500: + description: Internal Server Error + """ + tags = await TagsService.get_all_countries(db) + return tags diff --git a/backend/api/interests/resources.py b/backend/api/interests/resources.py index d1c0a0e1c6..531b7b478f 100644 --- a/backend/api/interests/resources.py +++ b/backend/api/interests/resources.py @@ -1,259 +1,281 @@ -from flask_restful import Resource, current_app, request -from schematics.exceptions import DataError +from asyncpg.exceptions import UniqueViolationError +from databases import Database +from fastapi import APIRouter, Depends +from fastapi.responses import JSONResponse +from backend.db import get_db from backend.models.dtos.interests_dto import InterestDTO +from backend.models.dtos.user_dto import AuthUserDTO from backend.services.interests_service import InterestService from backend.services.organisation_service import OrganisationService -from backend.services.users.authentication_service import token_auth +from backend.services.users.authentication_service import login_required -from sqlalchemy.exc import IntegrityError +router = APIRouter( + prefix="/interests", + tags=["interests"], + responses={404: {"description": "Not found"}}, +) INTEREST_NOT_FOUND = "Interest Not Found" -class InterestsAllAPI(Resource): - @token_auth.login_required - def post(self): - """ - Creates a new interest - --- - tags: - - interests - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - in: body - name: body - required: true - description: JSON object for creating a new interest - schema: - properties: - name: - type: string - default: Public Domain - responses: - 200: - description: New interest created - 400: - description: Invalid Request - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 500: - description: Internal Server Error - """ - try: - orgs_dto = OrganisationService.get_organisations_managed_by_user_as_dto( - token_auth.current_user() - ) - if len(orgs_dto.organisations) < 1: - raise ValueError("User not a Org Manager") - except ValueError as e: - error_msg = f"InterestsAllAPI POST: {str(e)}" - return {"Error": error_msg, "SubCode": "UserNotPermitted"}, 403 +@router.post("/") +async def post( + interest_dto: InterestDTO, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Creates a new interest + --- + tags: + - interests + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - in: body + name: body + required: true + description: JSON object for creating a new interest + schema: + properties: + name: + type: string + default: Public Domain + responses: + 200: + description: New interest created + 400: + description: Invalid Request + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 500: + description: Internal Server Error + """ + try: + orgs_dto = await OrganisationService.get_organisations_managed_by_user_as_dto( + user_id=user.id, db=db + ) + if len(orgs_dto.organisations) < 1: + raise ValueError("User not a Org Manager") + except ValueError as e: + error_msg = f"InterestsAllAPI POST: {str(e)}" + return JSONResponse( + content={"Error": error_msg, "SubCode": "UserNotPermitted"}, status_code=403 + ) - try: - interest_dto = InterestDTO(request.get_json()) - interest_dto.validate() - except DataError as e: - current_app.logger.error(f"Error validating request: {str(e)}") - return {"Error": str(e), "SubCode": "InvalidData"}, 400 + try: + new_interest_dto = await InterestService.create(interest_dto.name, db) + return new_interest_dto - try: - new_interest = InterestService.create(interest_dto.name) - return new_interest.to_primitive(), 200 - except IntegrityError: - return ( - { - "Error": "Value '{0}' already exists".format(interest_dto.name), - "SubCode": "NameExists", - }, - 400, - ) + except UniqueViolationError: + return JSONResponse( + content={ + "Error": "Value '{0}' already exists".format(interest_dto.name), + "SubCode": "NameExists", + }, + status_code=400, + ) - def get(self): - """ - Get all interests - --- - tags: - - interests - produces: - - application/json - responses: - 200: - description: List of interests - 500: - description: Internal Server Error - """ - interests = InterestService.get_all_interests() - return interests.to_primitive(), 200 +@router.get("/") +async def get(db: Database = Depends(get_db)): + """ + Get all interests + --- + tags: + - interests + produces: + - application/json + responses: + 200: + description: List of interests + 500: + description: Internal Server Error + """ + interests_dto = await InterestService.get_all_interests(db) + return interests_dto -class InterestsRestAPI(Resource): - @token_auth.login_required - def get(self, interest_id): - """ - Get an existing interest - --- - tags: - - interests - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: interest_id - in: path - description: Interest ID - required: true - type: integer - default: 1 - responses: - 200: - description: Interest - 400: - description: Invalid Request - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 404: - description: Interest not found - 500: - description: Internal Server Error - """ - try: - orgs_dto = OrganisationService.get_organisations_managed_by_user_as_dto( - token_auth.current_user() - ) - if len(orgs_dto.organisations) < 1: - raise ValueError("User not a Org Manager") - except ValueError as e: - error_msg = f"InterestsRestAPI GET: {str(e)}" - return {"Error": error_msg, "SubCode": "UserNotPermitted"}, 403 - interest = InterestService.get(interest_id) - return interest.to_primitive(), 200 +@router.get("/{interest_id}/") +async def get( + interest_id: int, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Get an existing interest + --- + tags: + - interests + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: interest_id + in: path + description: Interest ID + required: true + type: integer + default: 1 + responses: + 200: + description: Interest + 400: + description: Invalid Request + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 404: + description: Interest not found + 500: + description: Internal Server Error + """ + try: + orgs_dto = await OrganisationService.get_organisations_managed_by_user_as_dto( + user_id=user.id, db=db + ) + if len(orgs_dto.organisations) < 1: + raise ValueError("User not a Org Manager") + except ValueError as e: + error_msg = f"InterestsRestAPI GET: {str(e)}" + return JSONResponse( + content={"Error": error_msg, "SubCode": "UserNotPermitted"}, status_code=403 + ) - @token_auth.login_required - def patch(self, interest_id): - """ - Update an existing interest - --- - tags: - - interests - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: interest_id - in: path - description: Interest ID - required: true - type: integer - default: 1 - - in: body - name: body - required: true - description: JSON object for creating a new interest - schema: - properties: - name: - type: string - default: Public Domain - responses: - 200: - description: Interest updated - 400: - description: Invalid Request - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 404: - description: Interest not found - 500: - description: Internal Server Error - """ - try: - orgs_dto = OrganisationService.get_organisations_managed_by_user_as_dto( - token_auth.current_user() - ) - if len(orgs_dto.organisations) < 1: - raise ValueError("User not a Org Manager") - except ValueError as e: - error_msg = f"InterestsRestAPI PATCH: {str(e)}" - return {"Error": error_msg, "SubCode": "UserNotPermitted"}, 403 + interest_dto = await InterestService.get(interest_id, db) + return interest_dto - try: - interest_dto = InterestDTO(request.get_json()) - interest_dto.validate() - except DataError as e: - current_app.logger.error(f"Error validating request: {str(e)}") - return {"Error": str(e), "SubCode": "InvalidData"}, 400 - update_interest = InterestService.update(interest_id, interest_dto) - return update_interest.to_primitive(), 200 +@router.patch("/{interest_id}/") +async def patch( + interest_id: int, + interest_dto: InterestDTO, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Update an existing interest + --- + tags: + - interests + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: interest_id + in: path + description: Interest ID + required: true + type: integer + default: 1 + - in: body + name: body + required: true + description: JSON object for creating a new interest + schema: + properties: + name: + type: string + default: Public Domain + responses: + 200: + description: Interest updated + 400: + description: Invalid Request + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 404: + description: Interest not found + 500: + description: Internal Server Error + """ + try: + orgs_dto = await OrganisationService.get_organisations_managed_by_user_as_dto( + user_id=user.id, db=db + ) + if len(orgs_dto.organisations) < 1: + raise ValueError("User not a Org Manager") + except ValueError as e: + error_msg = f"InterestsRestAPI PATCH: {str(e)}" + return JSONResponse( + content={"Error": error_msg, "SubCode": "UserNotPermitted"}, status_code=403 + ) - @token_auth.login_required - def delete(self, interest_id): - """ - Delete a specified interest - --- - tags: - - interests - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: interest_id - in: path - description: Unique interest ID - required: true - type: integer - default: 1 - responses: - 200: - description: Interest deleted - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 404: - description: Interest not found - 500: - description: Internal Server Error - """ - try: - orgs_dto = OrganisationService.get_organisations_managed_by_user_as_dto( - token_auth.current_user() - ) - if len(orgs_dto.organisations) < 1: - raise ValueError("User not a Org Manager") - except ValueError as e: - error_msg = f"InterestsRestAPI DELETE: {str(e)}" - return {"Error": error_msg, "SubCode": "UserNotPermitted"}, 403 + update_interest = await InterestService.update(interest_id, interest_dto, db) + return update_interest - InterestService.delete(interest_id) - return {"Success": "Interest deleted"}, 200 + +@router.delete("/{interest_id}/") +async def delete( + interest_id: int, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Delete a specified interest + --- + tags: + - interests + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: interest_id + in: path + description: Unique interest ID + required: true + type: integer + default: 1 + responses: + 200: + description: Interest deleted + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 404: + description: Interest not found + 500: + description: Internal Server Error + """ + try: + orgs_dto = await OrganisationService.get_organisations_managed_by_user_as_dto( + user_id=user.id, db=db + ) + if len(orgs_dto.organisations) < 1: + raise ValueError("User not a Org Manager") + except ValueError as e: + error_msg = f"InterestsRestAPI DELETE: {str(e)}" + return JSONResponse( + content={"Error": error_msg, "SubCode": "UserNotPermitted"}, status_code=403 + ) + + await InterestService.delete(interest_id, db) + return JSONResponse(content={"Success": "Interest deleted"}, status_code=200) diff --git a/backend/api/issues/resources.py b/backend/api/issues/resources.py index 3a60deadf6..9c0563c941 100644 --- a/backend/api/issues/resources.py +++ b/backend/api/issues/resources.py @@ -1,219 +1,255 @@ -from flask_restful import Resource, current_app, request -from schematics.exceptions import DataError +from databases import Database +from fastapi import APIRouter, Body, Depends, Request +from fastapi.responses import JSONResponse +from loguru import logger +from backend.db import get_db from backend.models.dtos.mapping_issues_dto import MappingIssueCategoryDTO +from backend.models.dtos.user_dto import AuthUserDTO from backend.services.mapping_issues_service import MappingIssueCategoryService -from backend.services.users.authentication_service import token_auth, tm +from backend.services.users.authentication_service import login_required + +router = APIRouter( + prefix="/tasks", + tags=["issues"], + responses={404: {"description": "Not found"}}, +) ISSUE_NOT_FOUND = "Mapping-issue category not found" -class IssuesRestAPI(Resource): - def get(self, category_id): - """ - Get specified mapping-issue category - --- - tags: - - issues - produces: - - application/json - parameters: - - name: category_id - in: path - description: The unique mapping-issue category ID - required: true - type: integer - default: 1 - responses: - 200: - description: Mapping-issue category found - 404: - description: Mapping-issue category not found - 500: - description: Internal Server Error - """ - category_dto = MappingIssueCategoryService.get_mapping_issue_category_as_dto( - category_id - ) - return category_dto.to_primitive(), 200 +# class IssuesRestAPI(Resource): +@router.get("/issues/categories/{category_id}/") +async def get(category_id: int, db: Database = Depends(get_db)): + """ + Get specified mapping-issue category + --- + tags: + - issues + produces: + - application/json + parameters: + - name: category_id + in: path + description: The unique mapping-issue category ID + required: true + type: integer + default: 1 + responses: + 200: + description: Mapping-issue category found + 404: + description: Mapping-issue category not found + 500: + description: Internal Server Error + """ + category_dto = await MappingIssueCategoryService.get_mapping_issue_category_as_dto( + category_id, db + ) + return category_dto.model_dump(by_alias=True) + - @tm.pm_only() - @token_auth.login_required - def patch(self, category_id): - """ - Update an existing mapping-issue category - --- - tags: - - issues - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: category_id - in: path - description: The unique mapping-issue category ID - required: true - type: integer - default: 1 - - in: body - name: body - required: true - description: JSON object for updating a mapping-issue category - schema: - properties: - name: - type: string - description: - type: string - responses: - 200: - description: Mapping-issue category updated - 400: - description: Invalid Request - 401: - description: Unauthorized - Invalid credentials - 404: - description: Mapping-issue category not found - 500: - description: Internal Server Error - """ - try: - category_dto = MappingIssueCategoryDTO(request.get_json()) - category_dto.category_id = category_id - category_dto.validate() - except DataError as e: - current_app.logger.error(f"Error validating request: {str(e)}") - return { +@router.patch("/issues/categories/{category_id}/") +# @tm.pm_only() +async def patch( + request: Request, + category_id: int, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), + data: MappingIssueCategoryDTO = Body(...), +): + """ + Update an existing mapping-issue category + --- + tags: + - issues + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: category_id + in: path + description: The unique mapping-issue category ID + required: true + type: integer + default: 1 + - in: body + name: body + required: true + description: JSON object for updating a mapping-issue category + schema: + properties: + name: + type: string + description: + type: string + responses: + 200: + description: Mapping-issue category updated + 400: + description: Invalid Request + 401: + description: Unauthorized - Invalid credentials + 404: + description: Mapping-issue category not found + 500: + description: Internal Server Error + """ + try: + category_dto = data + category_dto.category_id = category_id + except Exception as e: + logger.error(f"Error validating request: {str(e)}") + return JSONResponse( + content={ "Error": "Unable to update mapping issue category", "SubCode": "InvalidData", - }, 400 - - updated_category = MappingIssueCategoryService.update_mapping_issue_category( - category_dto + }, + status_code=400, ) - return updated_category.to_primitive(), 200 - @tm.pm_only() - @token_auth.login_required - def delete(self, category_id): - """ - Delete the specified mapping-issue category. - Note that categories can be deleted only if they have never been associated with a task.\ - To instead archive a used category that is no longer needed, \ - update the category with its archived flag set to true. - --- - tags: - - issues - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: category_id - in: path - description: The unique mapping-issue category ID - required: true - type: integer - default: 1 - responses: - 200: - description: Mapping-issue category deleted - 401: - description: Unauthorized - Invalid credentials - 404: - description: Mapping-issue category not found - 500: - description: Internal Server Error - """ - MappingIssueCategoryService.delete_mapping_issue_category(category_id) - return {"Success": "Mapping-issue category deleted"}, 200 + updated_category = await MappingIssueCategoryService.update_mapping_issue_category( + category_dto, db + ) + return updated_category.model_dump(by_alias=True) -class IssuesAllAPI(Resource): - def get(self): - """ - Gets all mapping issue categories - --- - tags: - - issues - produces: - - application/json - parameters: - - in: query - name: includeArchived - description: Optional filter to include archived categories - type: boolean - default: false - responses: - 200: - description: Mapping issue categories - 500: - description: Internal Server Error - """ - include_archived = request.args.get("includeArchived") == "true" - categories = MappingIssueCategoryService.get_all_mapping_issue_categories( - include_archived - ) - return categories.to_primitive(), 200 +@router.delete("/issues/categories/{category_id}/") +# @tm.pm_only() +async def delete( + request: Request, + category_id: int, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), +): + """ + Delete the specified mapping-issue category. + Note that categories can be deleted only if they have never been associated with a task.\ + To instead archive a used category that is no longer needed, \ + update the category with its archived flag set to true. + --- + tags: + - issues + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: category_id + in: path + description: The unique mapping-issue category ID + required: true + type: integer + default: 1 + responses: + 200: + description: Mapping-issue category deleted + 401: + description: Unauthorized - Invalid credentials + 404: + description: Mapping-issue category not found + 500: + description: Internal Server Error + """ + await MappingIssueCategoryService.delete_mapping_issue_category(category_id, db) + return JSONResponse( + content={"Success": "Mapping-issue category deleted"}, status_code=200 + ) - @tm.pm_only() - @token_auth.login_required - def post(self): - """ - Creates a new mapping-issue category - --- - tags: - - issues - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - in: body - name: body - required: true - description: JSON object for creating a new mapping-issue category - schema: - properties: - name: - type: string - required: true - description: - type: string - responses: - 200: - description: New mapping-issue category created - 400: - description: Invalid Request - 401: - description: Unauthorized - Invalid credentials - 500: - description: Internal Server Error - """ - try: - category_dto = MappingIssueCategoryDTO(request.get_json()) - category_dto.validate() - except DataError as e: - current_app.logger.error(f"Error validating request: {str(e)}") - return { + +@router.get("/issues/categories/") +async def get(request: Request, db: Database = Depends(get_db)): + """ + Gets all mapping issue categories + --- + tags: + - issues + produces: + - application/json + parameters: + - in: query + name: includeArchived + description: Optional filter to include archived categories + type: boolean + default: false + responses: + 200: + description: Mapping issue categories + 500: + description: Internal Server Error + """ + include_archived = request.query_params.get("includeArchived") == "true" + categories = await MappingIssueCategoryService.get_all_mapping_issue_categories( + include_archived, db + ) + return categories.model_dump(by_alias=True) + + +@router.post("/issues/categories/", response_model=MappingIssueCategoryDTO) +# @tm.pm_only() +async def post( + request: Request, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), + data: dict = Body(...), +): + """ + Creates a new mapping-issue category + --- + tags: + - issues + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - in: body + name: body + required: true + description: JSON object for creating a new mapping-issue category + schema: + properties: + name: + type: string + required: true + description: + type: string + responses: + 200: + description: New mapping-issue category created + 400: + description: Invalid Request + 401: + description: Unauthorized - Invalid credentials + 500: + description: Internal Server Error + """ + try: + category_dto = MappingIssueCategoryDTO(**data) + except Exception as e: + logger.error(f"Error validating request: {str(e)}") + return JSONResponse( + content={ "Error": "Unable to create a new mapping issue category", "SubCode": "InvalidData", - }, 400 - - new_category_id = MappingIssueCategoryService.create_mapping_issue_category( - category_dto + }, + status_code=400, ) - return {"categoryId": new_category_id}, 200 + + new_category_id = await MappingIssueCategoryService.create_mapping_issue_category( + category_dto, db + ) + return JSONResponse(content={"categoryId": new_category_id}, status_code=200) diff --git a/backend/api/licenses/actions.py b/backend/api/licenses/actions.py index 86ec6f8e20..5d11278f4a 100644 --- a/backend/api/licenses/actions.py +++ b/backend/api/licenses/actions.py @@ -1,41 +1,55 @@ -from flask_restful import Resource +from databases import Database +from fastapi import APIRouter, Depends, Request +from fastapi.responses import JSONResponse -from backend.services.users.authentication_service import token_auth +from backend.db import get_db +from backend.models.dtos.user_dto import AuthUserDTO +from backend.services.users.authentication_service import login_required from backend.services.users.user_service import UserService +router = APIRouter( + prefix="/licenses", + tags=["licenses"], + responses={404: {"description": "Not found"}}, +) -class LicensesActionsAcceptAPI(Resource): - @token_auth.login_required - def post(self, license_id): - """ - Capture user acceptance of license terms - --- - tags: - - licenses - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: license_id - in: path - description: License ID terms have been accepted for - required: true - type: integer - default: 1 - responses: - 200: - description: Terms accepted - 401: - description: Unauthorized - Invalid credentials - 404: - description: User or license not found - 500: - description: Internal Server Error - """ - UserService.accept_license_terms(token_auth.current_user(), license_id) - return {"Success": "Terms Accepted"}, 200 + +@router.post("/{license_id}/actions/accept-for-me/") +async def post( + request: Request, + license_id: int, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), +): + """ + Capture user acceptance of license terms + --- + tags: + - licenses + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: license_id + in: path + description: License ID terms have been accepted for + required: true + type: integer + default: 1 + responses: + 200: + description: Terms accepted + 401: + description: Unauthorized - Invalid credentials + 404: + description: User or license not found + 500: + description: Internal Server Error + """ + await UserService.accept_license_terms(user.id, license_id, db) + return JSONResponse(content={"Success": "Terms Accepted"}, status_code=200) diff --git a/backend/api/licenses/resources.py b/backend/api/licenses/resources.py index 4b3e78066d..78e2a85490 100644 --- a/backend/api/licenses/resources.py +++ b/backend/api/licenses/resources.py @@ -1,205 +1,206 @@ -from flask_restful import Resource, current_app, request -from schematics.exceptions import DataError +from databases import Database +from fastapi import APIRouter, Depends +from fastapi.responses import JSONResponse +from backend.db import get_db from backend.models.dtos.licenses_dto import LicenseDTO from backend.services.license_service import LicenseService -from backend.services.users.authentication_service import token_auth, tm +router = APIRouter( + prefix="/licenses", + tags=["licenses"], + responses={404: {"description": "Not found"}}, +) -class LicensesRestAPI(Resource): - @tm.pm_only() - @token_auth.login_required - def post(self): - """ - Creates a new mapping license - --- - tags: - - licenses - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - in: body - name: body - required: true - description: JSON object for creating a new mapping license - schema: - properties: - name: - type: string - default: Public Domain - description: - type: string - default: This imagery is in the public domain. - plainText: - type: string - default: This imagery is in the public domain. - responses: - 201: - description: New license created - 400: - description: Invalid Request - 401: - description: Unauthorized - Invalid credentials - 500: - description: Internal Server Error - """ - try: - license_dto = LicenseDTO(request.get_json()) - license_dto.validate() - except DataError as e: - current_app.logger.error(f"Error validating request: {str(e)}") - return { - "Error": "Unable to create new mapping license", - "SubCode": "InvalidData", - }, 400 - new_license_id = LicenseService.create_licence(license_dto) - return {"licenseId": new_license_id}, 201 +@router.post("/") +# TODO: refactor decorator functions +# @requires("authenticated") +# @tm.pm_only() +async def post(license_dto: LicenseDTO, db: Database = Depends(get_db)): + """ + Creates a new mapping license + --- + tags: + - licenses + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - in: body + name: body + required: true + description: JSON object for creating a new mapping license + schema: + properties: + name: + type: string + default: Public Domain + description: + type: string + default: This imagery is in the public domain. + plainText: + type: string + default: This imagery is in the public domain. + responses: + 201: + description: New license created + 400: + description: Invalid Request + 401: + description: Unauthorized - Invalid credentials + 500: + description: Internal Server Error + """ + new_license_id = await LicenseService.create_license(license_dto, db) + return JSONResponse(content={"licenseId": new_license_id}, status_code=201) - def get(self, license_id): - """ - Get a specified mapping license - --- - tags: - - licenses - produces: - - application/json - parameters: - - name: license_id - in: path - description: Unique license ID - required: true - type: integer - default: 1 - responses: - 200: - description: License found - 404: - description: License not found - 500: - description: Internal Server Error - """ - license_dto = LicenseService.get_license_as_dto(license_id) - return license_dto.to_primitive(), 200 - @tm.pm_only() - @token_auth.login_required - def patch(self, license_id): - """ - Update a specified mapping license - --- - tags: - - licenses - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: license_id - in: path - description: Unique license ID - required: true - type: integer - default: 1 - - in: body - name: body - required: true - description: JSON object for updating a specified mapping license - schema: - properties: - name: - type: string - default: Public Domain - description: - type: string - default: This imagery is in the public domain. - plainText: - type: string - default: This imagery is in the public domain. - responses: - 200: - description: License updated - 400: - description: Invalid Request - 401: - description: Unauthorized - Invalid credentials - 500: - description: Internal Server Error - """ - try: - license_dto = LicenseDTO(request.get_json()) - license_dto.license_id = license_id - license_dto.validate() - except DataError as e: - current_app.logger.error(f"Error validating request: {str(e)}") - return {"Error": str(e), "SubCode": "InvalidData"}, 400 +@router.get("/{license_id}/") +async def get( + license_id: int, + db: Database = Depends(get_db), +): + """ + Get a specified mapping license + --- + tags: + - licenses + produces: + - application/json + parameters: + - name: license_id + in: path + description: Unique license ID + required: true + type: integer + default: 1 + responses: + 200: + description: License found + 404: + description: License not found + 500: + description: Internal Server Error + """ + license_dto = await LicenseService.get_license_as_dto(license_id, db) + return license_dto - updated_license = LicenseService.update_licence(license_dto) - return updated_license.to_primitive(), 200 - @tm.pm_only() - @token_auth.login_required - def delete(self, license_id): - """ - Delete a specified mapping license - --- - tags: - - licenses - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: license_id - in: path - description: Unique license ID - required: true - type: integer - default: 1 - responses: - 200: - description: License deleted - 401: - description: Unauthorized - Invalid credentials - 404: - description: License not found - 500: - description: Internal Server Error - """ - LicenseService.delete_license(license_id) - return {"Success": "License deleted"}, 200 +@router.patch("/{license_id}/") +# @requires("authenticated") +# @tm.pm_only() +async def patch( + license_dto: LicenseDTO, license_id: int, db: Database = Depends(get_db) +): + """ + Update a specified mapping license + --- + tags: + - licenses + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: license_id + in: path + description: Unique license ID + required: true + type: integer + default: 1 + - in: body + name: body + required: true + description: JSON object for updating a specified mapping license + schema: + properties: + name: + type: string + default: Public Domain + description: + type: string + default: This imagery is in the public domain. + plainText: + type: string + default: This imagery is in the public domain. + responses: + 200: + description: License updated + 400: + description: Invalid Request + 401: + description: Unauthorized - Invalid credentials + 500: + description: Internal Server Error + """ + await LicenseService.update_license(license_dto, license_id, db) + return JSONResponse(content={"status": "Updated"}, status_code=200) -class LicensesAllAPI(Resource): - def get(self): - """ - Get all imagery licenses - --- - tags: - - licenses - produces: - - application/json - responses: - 200: - description: Licenses found - 404: - description: Licenses not found - 500: - description: Internal Server Error - """ - licenses_dto = LicenseService.get_all_licenses() - return licenses_dto.to_primitive(), 200 +@router.delete("/{license_id}/") +# @requires("authenticated") +# @tm.pm_only() +async def delete(license_id: int, db: Database = Depends(get_db)): + """ + Delete a specified mapping license + --- + tags: + - licenses + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: license_id + in: path + description: Unique license ID + required: true + type: integer + default: 1 + responses: + 200: + description: License deleted + 401: + description: Unauthorized - Invalid credentials + 404: + description: License not found + 500: + description: Internal Server Error + """ + await LicenseService.delete_license(license_id, db) + return JSONResponse(content={"Success": "License deleted"}, status_code=200) + + +@router.get("/") +async def get(db: Database = Depends(get_db)): + """ + Get all imagery licenses + --- + tags: + - licenses + produces: + - application/json + responses: + 200: + description: Licenses found + 404: + description: Licenses not found + 500: + description: Internal Server Error + """ + licenses_dto = await LicenseService.get_all_licenses(db) + return licenses_dto diff --git a/backend/api/notifications/actions.py b/backend/api/notifications/actions.py index a6fa74c109..beb5d50522 100644 --- a/backend/api/notifications/actions.py +++ b/backend/api/notifications/actions.py @@ -1,153 +1,173 @@ -from flask_restful import Resource, request +from databases import Database +from fastapi import APIRouter, Depends, Request +from fastapi.responses import JSONResponse +from backend.db import get_db +from backend.models.dtos.user_dto import AuthUserDTO from backend.services.messaging.message_service import MessageService -from backend.services.users.authentication_service import token_auth, tm +from backend.services.users.authentication_service import login_required +router = APIRouter( + prefix="/notifications", + tags=["notifications"], + responses={404: {"description": "Not found"}}, +) -class NotificationsActionsDeleteMultipleAPI(Resource): - @tm.pm_only(False) - @token_auth.login_required - def delete(self): - """ - Delete specified messages for logged in user - --- - tags: - - notifications - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - in: body - name: body - required: true - description: JSON object containing message ids to delete - schema: - properties: - messageIds: - type: array - items: integer - required: true - responses: - 200: - description: Messages deleted - 500: - description: Internal Server Error - """ - message_ids = request.get_json()["messageIds"] - if message_ids: - MessageService.delete_multiple_messages( - message_ids, token_auth.current_user() - ) - return {"Success": "Messages deleted"}, 200 +@router.delete("/delete-multiple/") +async def delete( + request: Request, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), +): + """ + Delete specified messages for logged in user + --- + tags: + - notifications + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - in: body + name: body + required: true + description: JSON object containing message ids to delete + schema: + properties: + messageIds: + type: array + items: integer + required: true + responses: + 200: + description: Messages deleted + 500: + description: Internal Server Error + """ + data = await request.json() + message_ids = data["messageIds"] + if message_ids: + async with db.transaction(): + await MessageService.delete_multiple_messages(message_ids, user.id, db) + return JSONResponse(content={"Success": "Messages deleted"}, status_code=200) -class NotificationsActionsDeleteAllAPI(Resource): - @token_auth.login_required - def delete(self): - """ - Delete all messages for logged in user - --- - tags: - - notifications - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - in: query - name: messageType - type: string - description: Optional message-type filter; leave blank to delete all - responses: - 200: - description: Messages deleted - 500: - description: Internal Server Error - """ - message_type = request.args.get("messageType") - MessageService.delete_all_messages(token_auth.current_user(), message_type) - return {"Success": "Messages deleted"}, 200 +@router.delete("/delete-all/") +async def delete( + request: Request, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Delete all messages for logged in user + --- + tags: + - notifications + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - in: query + name: messageType + type: string + description: Optional message-type filter; leave blank to delete all + responses: + 200: + description: Messages deleted + 500: + description: Internal Server Error + """ + message_type = request.query_params.get("messageType") + async with db.transaction(): + await MessageService.delete_all_messages(user.id, db, message_type) + return JSONResponse(content={"Success": "Messages deleted"}, status_code=200) -class NotificationsActionsMarkAsReadAllAPI(Resource): - @token_auth.login_required - def post(self): - """ - Mark all messages as read for logged in user - --- - tags: - - notifications - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - in: query - name: messageType - type: string - description: Optional message-type filter; leave blank to mark all as read - responses: - 200: - description: Messages marked as read - 500: - description: Internal Server Error - """ - message_type = request.args.get("messageType") - MessageService.mark_all_messages_read(token_auth.current_user(), message_type) - return {"Success": "Messages marked as read"}, 200 +@router.post("/mark-as-read-all/") +async def post( + request: Request, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), +): + """ + Mark all messages as read for logged in user + --- + tags: + - notifications + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - in: query + name: messageType + type: string + description: Optional message-type filter; leave blank to mark all as read + responses: + 200: + description: Messages marked as read + 500: + description: Internal Server Error + """ + message_type = request.query_params.get("messageType") + await MessageService.mark_all_messages_read(user.id, db, message_type) + return JSONResponse(content={"Success": "Messages marked as read"}, status_code=200) -class NotificationsActionsMarkAsReadMultipleAPI(Resource): - @token_auth.login_required - def post(self): - """ - Mark specified messages as read for logged in user - --- - tags: - - notifications - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - in: body - name: body - required: true - description: JSON object containing message ids to mark as read - schema: - properties: - messageIds: - type: array - items: integer - required: true - responses: - 200: - description: Messages marked as read - 500: - description: Internal Server Error - """ - message_ids = request.get_json()["messageIds"] - if message_ids: - MessageService.mark_multiple_messages_read( - message_ids, token_auth.current_user() - ) +@router.post("/mark-as-read-multiple/") +async def post( + request: Request, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), +): + """ + Mark specified messages as read for logged in user + --- + tags: + - notifications + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - in: body + name: body + required: true + description: JSON object containing message ids to mark as read + schema: + properties: + messageIds: + type: array + items: integer + required: true + responses: + 200: + description: Messages marked as read + 500: + description: Internal Server Error + """ + data = await request.json() + message_ids = data["messageIds"] + if message_ids: + await MessageService.mark_multiple_messages_read(message_ids, user.id, db) - return {"Success": "Messages marked as read"}, 200 + return JSONResponse(content={"Success": "Messages marked as read"}, status_code=200) diff --git a/backend/api/notifications/resources.py b/backend/api/notifications/resources.py index 6575fad152..f3d94b8c28 100644 --- a/backend/api/notifications/resources.py +++ b/backend/api/notifications/resources.py @@ -1,241 +1,267 @@ -from flask_restful import Resource, request +from databases import Database +from fastapi import APIRouter, Depends, Request +from fastapi.responses import JSONResponse + + +from backend.db import get_db +from backend.models.dtos.message_dto import MessageDTO +from backend.models.dtos.user_dto import AuthUserDTO from backend.services.messaging.message_service import ( MessageService, MessageServiceError, ) from backend.services.notification_service import NotificationService -from backend.services.users.authentication_service import token_auth, tm +from backend.services.users.authentication_service import login_required +router = APIRouter( + prefix="/notifications", + tags=["notifications"], + responses={404: {"description": "Not found"}}, +) -class NotificationsRestAPI(Resource): - @tm.pm_only(False) - @token_auth.login_required - def get(self, message_id): - """ - Gets the specified message - --- - tags: - - notifications - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: message_id - in: path - description: The unique message - required: true - type: integer - default: 1 - responses: - 200: - description: Messages found - 403: - description: Forbidden, if user attempting to ready other messages - 404: - description: Not found - 500: - description: Internal Server Error - """ - try: - user_message = MessageService.get_message_as_dto( - message_id, token_auth.current_user() - ) - return user_message.to_primitive(), 200 - except MessageServiceError as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 403 - @tm.pm_only(False) - @token_auth.login_required - def delete(self, message_id): - """ - Deletes the specified message - --- - tags: - - notifications - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: message_id - in: path - description: The unique message - required: true - type: integer - default: 1 - responses: - 200: - description: Messages found - 403: - description: Forbidden, if user attempting to ready other messages - 404: - description: Not found - 500: - description: Internal Server Error - """ - try: - MessageService.delete_message(message_id, token_auth.current_user()) - return {"Success": "Message deleted"}, 200 - except MessageServiceError as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 403 +@router.get("/{message_id}/", response_model=MessageDTO) +async def get( + message_id: int, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Gets the specified message + --- + tags: + - notifications + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: message_id + in: path + description: The unique message + required: true + type: integer + default: 1 + responses: + 200: + description: Messages found + 403: + description: Forbidden, if user attempting to ready other messages + 404: + description: Not found + 500: + description: Internal Server Error + """ + try: + user_message = await MessageService.get_message_as_dto(message_id, user.id, db) + return user_message + except MessageServiceError as e: + return JSONResponse( + content={"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, + status_code=403, + ) -class NotificationsAllAPI(Resource): - @tm.pm_only(False) - @token_auth.login_required - def get(self): - """ - Get all messages for logged in user - --- - tags: - - notifications - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - in: query - name: messageType - type: string - description: Optional message-type filter; leave blank to retrieve all\n - Accepted values are 1 (System), 2 (Broadcast), 3 (Mention), 4 (Validation), - 5 (Invalidation), 6 (Request team), \n - 7 (Invitation), 8 (Task comment), 9 (Project chat), - 10 (Project Activity), and 11 (Team broadcast) - - in: query - name: from - description: Optional from username filter - type: string - - in: query - name: project - description: Optional project filter - type: string - - in: query - name: taskId - description: Optional task filter - type: integer - - in: query - name: status - description: Optional status filter (read or unread) - type: string - - in: query - name: sortBy - description: - field to sort by, defaults to 'date'. Other useful options are 'read', 'project_id' and 'message_type' - type: string - - in: query - name: sortDirection - description: sorting direction ('asc' or 'desc'), defaults to 'desc' - type: string - - in: query - name: page - description: Page of results - type: integer - - in: query - name: pageSize - description: Size of page, defaults to 10 - type: integer - responses: - 200: - description: Messages found - 404: - description: User has no messages - 500: - description: Internal Server Error - """ - preferred_locale = request.environ.get("HTTP_ACCEPT_LANGUAGE") - page = request.args.get("page", 1, int) - page_size = request.args.get("pageSize", 10, int) - sort_by = request.args.get("sortBy", "date") - sort_direction = request.args.get("sortDirection", "desc") - message_type = request.args.get("messageType", None) - from_username = request.args.get("from") - project = request.args.get("project", None, int) - task_id = request.args.get("taskId", None, int) - status = request.args.get("status", None, str) - user_messages = MessageService.get_all_messages( - token_auth.current_user(), - preferred_locale, - page, - page_size, - sort_by, - sort_direction, - message_type, - from_username, - project, - task_id, - status, +@router.delete("/{message_id}/") +async def delete( + message_id: int, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), +): + """ + Deletes the specified message + --- + tags: + - notifications + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: message_id + in: path + description: The unique message + required: true + type: integer + default: 1 + responses: + 200: + description: Messages found + 403: + description: Forbidden, if user attempting to ready other messages + 404: + description: Not found + 500: + description: Internal Server Error + """ + try: + async with db.transaction(): + await MessageService.delete_message(message_id, user.id, db) + return JSONResponse(content={"Success": "Message deleted"}, status_code=200) + except MessageServiceError as e: + return JSONResponse( + content={"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, + status_code=403, ) - return user_messages.to_primitive(), 200 -class NotificationsQueriesCountUnreadAPI(Resource): - @tm.pm_only(False) - @token_auth.login_required - def get(self): - """ - Gets count of unread messages - --- - tags: - - notifications - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - responses: - 200: - description: Message info - 500: - description: Internal Server Error - """ - unread_count = MessageService.has_user_new_messages(token_auth.current_user()) - return unread_count, 200 +@router.get("/") +async def get( + request: Request, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Get all messages for logged in user + --- + tags: + - notifications + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - in: query + name: messageType + type: string + description: Optional message-type filter; leave blank to retrieve all\n + Accepted values are 1 (System), 2 (Broadcast), 3 (Mention), 4 (Validation), + 5 (Invalidation), 6 (Request team), \n + 7 (Invitation), 8 (Task comment), 9 (Project chat), + 10 (Project Activity), and 11 (Team broadcast) + - in: query + name: from + description: Optional from username filter + type: string + - in: query + name: project + description: Optional project filter + type: string + - in: query + name: taskId + description: Optional task filter + type: integer + - in: query + name: status + description: Optional status filter (read or unread) + type: string + - in: query + name: sortBy + description: + field to sort by, defaults to 'date'. Other useful options are 'read', 'project_id' and 'message_type' + type: string + - in: query + name: sortDirection + description: sorting direction ('asc' or 'desc'), defaults to 'desc' + type: string + - in: query + name: page + description: Page of results + type: integer + - in: query + name: pageSize + description: Size of page, defaults to 10 + type: integer + responses: + 200: + description: Messages found + 404: + description: User has no messages + 500: + description: Internal Server Error + """ + preferred_locale = request.headers.get("accept-language") + page = request.query_params.get("page", 1) + page_size = request.query_params.get("pageSize", 10) + sort_by = request.query_params.get("sortBy", "date") + sort_direction = request.query_params.get("sortDirection", "desc") + message_type = request.query_params.get("messageType", None) + from_username = request.query_params.get("from") + project = request.query_params.get("project", None) + task_id = request.query_params.get("taskId", None) + status = request.query_params.get("status", None) + user_messages = await MessageService.get_all_messages( + db, + user.id, + preferred_locale, + page, + page_size, + sort_by, + sort_direction, + message_type, + from_username, + project, + task_id, + status, + ) + return user_messages + + +@router.get("/queries/own/count-unread/") +async def get( + user: AuthUserDTO = Depends(login_required), db: Database = Depends(get_db) +): + """ + Gets count of unread messages + --- + tags: + - notifications + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + responses: + 200: + description: Message info + 500: + description: Internal Server Error + """ + unread_count = await MessageService.has_user_new_messages(user.id, db) + return unread_count -class NotificationsQueriesPostUnreadAPI(Resource): - @tm.pm_only(False) - @token_auth.login_required - def post(self): - """ - Updates notification datetime for user - --- - tags: - - notifications - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - responses: - 404: - description: Notification not found. - 200: - description: Message info - 500: - description: Internal Server Error - """ - user_id = token_auth.current_user() - unread_count = NotificationService.update(user_id) - return unread_count, 200 +@router.post("/queries/own/post-unread/") +async def post( + user: AuthUserDTO = Depends(login_required), db: Database = Depends(get_db) +): + """ + Updates notification datetime for user + --- + tags: + - notifications + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + responses: + 404: + description: Notification not found. + 200: + description: Message info + 500: + description: Internal Server Error + """ + unread_count = await NotificationService.update(user.id, db) + return unread_count diff --git a/backend/api/organisations/campaigns.py b/backend/api/organisations/campaigns.py index 9abaf40440..2f5eba6751 100644 --- a/backend/api/organisations/campaigns.py +++ b/backend/api/organisations/campaigns.py @@ -1,155 +1,198 @@ -from flask_restful import Resource +from databases import Database +from fastapi import APIRouter, Depends, Request +from fastapi.responses import JSONResponse +from backend.db import get_db +from backend.models.dtos.campaign_dto import CampaignListDTO +from backend.models.dtos.user_dto import AuthUserDTO from backend.services.campaign_service import CampaignService from backend.services.organisation_service import OrganisationService -from backend.services.users.authentication_service import token_auth +from backend.services.users.authentication_service import login_required +router = APIRouter( + prefix="/organisations", + tags=["organisations"], + responses={404: {"description": "Not found"}}, +) -class OrganisationsCampaignsAPI(Resource): - @token_auth.login_required - def post(self, organisation_id, campaign_id): - """ - Assigns a campaign to an organisation - --- - tags: - - campaigns - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: organisation_id - in: path - description: Unique organisation ID - required: true - type: integer - default: 1 - - name: campaign_id - in: path - description: Unique campaign ID - required: true - type: integer - default: 1 - responses: - 200: - description: Organisation and campaign assigned successfully - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - users have submitted mapping - 404: - description: Project not found - 500: - description: Internal Server Error - """ - if OrganisationService.can_user_manage_organisation( - organisation_id, token_auth.current_user() + +@router.post("/{organisation_id}/campaigns/{campaign_id}/") +async def post( + request: Request, + organisation_id: int, + campaign_id: int, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), +): + """ + Assigns a campaign to an organisation + --- + tags: + - campaigns + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: organisation_id + in: path + description: Unique organisation ID + required: true + type: integer + default: 1 + - name: campaign_id + in: path + description: Unique campaign ID + required: true + type: integer + default: 1 + responses: + 200: + description: Organisation and campaign assigned successfully + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden - users have submitted mapping + 404: + description: Project not found + 500: + description: Internal Server Error + """ + if await OrganisationService.can_user_manage_organisation( + organisation_id, request.user.display_name, db + ): + if await CampaignService.campaign_organisation_exists( + campaign_id, organisation_id, db ): - if CampaignService.campaign_organisation_exists( + message = "Campaign {} is already assigned to organisation {}.".format( campaign_id, organisation_id - ): - message = "Campaign {} is already assigned to organisation {}.".format( - campaign_id, organisation_id - ) - return {"Error": message, "SubCode": "CampaignAlreadyAssigned"}, 400 - - CampaignService.create_campaign_organisation(organisation_id, campaign_id) + ) + return JSONResponse( + content={"Error": message, "SubCode": "CampaignAlreadyAssigned"}, + status_code=400, + ) + async with db.transaction(): + await CampaignService.create_campaign_organisation( + organisation_id, campaign_id, db + ) message = "campaign with id {} assigned for organisation with id {}".format( campaign_id, organisation_id ) - return {"Success": message}, 200 - else: - return { + return JSONResponse(content={"Success": message}, status_code=200) + else: + return JSONResponse( + content={ "Error": "User is not a manager of the organisation", "SubCode": "UserNotPermitted", - }, 403 + }, + status_code=403, + ) - def get(self, organisation_id): - """ - Returns all campaigns related to an organisation - --- - tags: - - campaigns - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: false - type: string - default: Token sessionTokenHere== - - name: organisation_id - in: path - description: Unique project ID - required: true - type: integer - default: 1 - responses: - 200: - description: Success - 404: - description: Organisation not found - 500: - description: Internal Server Error - """ - campaigns = CampaignService.get_organisation_campaigns_as_dto(organisation_id) - return campaigns.to_primitive(), 200 - @token_auth.login_required - def delete(self, organisation_id, campaign_id): - """ - Un-assigns an organization from an campaign - --- - tags: - - campaigns - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: organisation_id - in: path - description: Unique organisation ID - required: true - type: integer - default: 1 - - name: campaign_id - in: path - description: Unique campaign ID - required: true - type: integer - default: 1 - responses: - 200: - description: Organisation and campaign unassociated successfully - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - users have submitted mapping - 404: - description: Project not found - 500: - description: Internal Server Error - """ - if OrganisationService.can_user_manage_organisation( - organisation_id, token_auth.current_user() - ): - CampaignService.delete_organisation_campaign(organisation_id, campaign_id) - return ( - {"Success": "Organisation and campaign unassociated successfully"}, - 200, +@router.get("/{organisation_id}/campaigns/", response_model=CampaignListDTO) +async def get(organisation_id: int, db: Database = Depends(get_db)): + """ + Returns all campaigns related to an organisation + --- + tags: + - campaigns + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: false + type: string + default: Token sessionTokenHere== + - name: organisation_id + in: path + description: Unique project ID + required: true + type: integer + default: 1 + responses: + 200: + description: Success + 404: + description: Organisation not found + 500: + description: Internal Server Error + """ + campaigns = await CampaignService.get_organisation_campaigns_as_dto( + organisation_id, db + ) + return campaigns + + +@router.delete("/{organisation_id}/campaigns/{campaign_id}/") +async def delete( + request: Request, + organisation_id: int, + campaign_id: int, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), +): + """ + Un-assigns an organization from an campaign + --- + tags: + - campaigns + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: organisation_id + in: path + description: Unique organisation ID + required: true + type: integer + default: 1 + - name: campaign_id + in: path + description: Unique campaign ID + required: true + type: integer + default: 1 + responses: + 200: + description: Organisation and campaign unassociated successfully + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden - users have submitted mapping + 404: + description: Project not found + 500: + description: Internal Server Error + """ + if await OrganisationService.can_user_manage_organisation( + organisation_id, request.user.display_name, db + ): + async with db.transaction(): + await CampaignService.delete_organisation_campaign( + organisation_id, campaign_id, db + ) + return JSONResponse( + content={ + "Success": "Organisation and campaign unassociated successfully" + }, + status_code=200, ) - else: - return { + else: + return JSONResponse( + content={ "Error": "User is not a manager of the organisation", "SubCode": "UserNotPermitted", - }, 403 + }, + status_code=403, + ) diff --git a/backend/api/organisations/resources.py b/backend/api/organisations/resources.py index 86e2badb0b..ba45ff75db 100644 --- a/backend/api/organisations/resources.py +++ b/backend/api/organisations/resources.py @@ -1,436 +1,520 @@ -from distutils.util import strtobool -from flask_restful import Resource, request, current_app -from schematics.exceptions import DataError +from databases import Database +from fastapi import APIRouter, Depends, Query, Request +from fastapi.responses import JSONResponse, Response +from loguru import logger +from backend.db import get_db from backend.models.dtos.organisation_dto import ( + ListOrganisationsDTO, NewOrganisationDTO, + OrganisationDTO, UpdateOrganisationDTO, ) +from backend.models.dtos.stats_dto import OrganizationStatsDTO +from backend.models.dtos.user_dto import AuthUserDTO +from backend.models.postgis.statuses import OrganisationType from backend.models.postgis.user import User from backend.services.organisation_service import ( OrganisationService, OrganisationServiceError, ) -from backend.models.postgis.statuses import OrganisationType -from backend.services.users.authentication_service import token_auth +from backend.services.users.authentication_service import login_required +router = APIRouter( + prefix="/organisations", + tags=["organisations"], + responses={404: {"description": "Not found"}}, +) -class OrganisationsBySlugRestAPI(Resource): - @token_auth.login_required(optional=True) - def get(self, slug): - """ - Retrieves an organisation - --- - tags: - - organisations - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - type: string - default: Token sessionTokenHere== - - name: slug - in: path - description: The unique organisation slug - required: true - type: string - default: hot - - in: query - name: omitManagerList - type: boolean - description: Set it to true if you don't want the managers list on the response. - default: False - responses: - 200: - description: Organisation found - 404: - description: Organisation not found - 500: - description: Internal Server Error - """ - authenticated_user_id = token_auth.current_user() - if authenticated_user_id is None: - user_id = 0 - else: - user_id = authenticated_user_id - # Validate abbreviated. - omit_managers = strtobool(request.args.get("omitManagerList", "false")) - organisation_dto = OrganisationService.get_organisation_by_slug_as_dto( - slug, user_id, omit_managers - ) - return organisation_dto.to_primitive(), 200 +@router.get("/{organisation_id:int}/", response_model=OrganisationDTO) +async def retrieve_organisation( + request: Request, + organisation_id: int, + db: Database = Depends(get_db), + omit_managers: bool = Query( + False, + alias="omitManagerList", + description="Omit organization managers list from the response.", + ), +): + """ + Retrieves an organisation + --- + tags: + - organisations + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + type: string + default: Token sessionTokenHere== + - name: organisation_id + in: path + description: The unique organisation ID + required: true + type: integer + default: 1 + - in: query + name: omitManagerList + type: boolean + description: Set it to true if you don't want the managers list on the response. + default: False + responses: + 200: + description: Organisation found + 401: + description: Unauthorized - Invalid credentials + 404: + description: Organisation not found + 500: + description: Internal Server Error + """ + authenticated_user_id = ( + request.user.display_name + if request.user and request.user.display_name + else None + ) + if authenticated_user_id is None: + user_id = 0 + else: + user_id = authenticated_user_id + # Validate abbreviated. + organisation_dto = await OrganisationService.get_organisation_by_id_as_dto( + organisation_id, user_id, omit_managers, db + ) + return organisation_dto -class OrganisationsRestAPI(Resource): - @token_auth.login_required - def post(self): - """ - Creates a new organisation - --- - tags: - - organisations - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - in: body - name: body - required: true - description: JSON object for creating organisation - schema: - properties: - name: - type: string - default: HOT - slug: - type: string - default: hot - logo: - type: string - default: https://cdn.hotosm.org/tasking-manager/uploads/1588741335578_hot-logo.png - url: + +@router.get("/{slug:str}/", response_model=OrganisationDTO) +async def retrieve_organisation_by_slug( + request: Request, + slug: str, + db: Database = Depends(get_db), + omit_managers: bool = Query( + True, + alias="omitManagerList", + description="Omit organization managers list from the response.", + ), +): + """ + Retrieves an organisation + --- + tags: + - organisations + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + type: string + default: Token sessionTokenHere== + - name: slug + in: path + description: The unique organisation slug + required: true + type: string + default: hot + - in: query + name: omitManagerList + type: boolean + description: Set it to true if you don't want the managers list on the response. + default: False + responses: + 200: + description: Organisation found + 404: + description: Organisation not found + 500: + description: Internal Server Error + """ + authenticated_user_id = ( + request.user.display_name + if request.user and request.user.display_name + else None + ) + if authenticated_user_id is None: + user_id = 0 + else: + user_id = authenticated_user_id + organisation_dto = await OrganisationService.get_organisation_by_slug_as_dto( + slug, user_id, omit_managers, db + ) + return organisation_dto + + +@router.post("/") +async def create_organisation( + organisation_dto: NewOrganisationDTO, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Creates a new organisation + --- + tags: + - organisations + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - in: body + name: body + required: true + description: JSON object for creating organisation + schema: + properties: + name: + type: string + default: HOT + slug: + type: string + default: hot + logo: + type: string + default: https://cdn.hotosm.org/tasking-manager/uploads/1588741335578_hot-logo.png + url: + type: string + default: https://hotosm.org + managers: + type: array + items: type: string - default: https://hotosm.org - managers: - type: array - items: - type: string - default: [ - user_1, - user_2 - ] - responses: - 201: - description: Organisation created successfully - 400: - description: Client Error - Invalid Request - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 402: - description: Duplicate Name - Organisation name already exists - 500: - description: Internal Server Error - """ - request_user = User.get_by_id(token_auth.current_user()) - if request_user.role != 1: - return { + default: [ + user_1, + user_2 + ] + responses: + 201: + description: Organisation created successfully + 400: + description: Client Error - Invalid Request + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 402: + description: Duplicate Name - Organisation name already exists + 500: + description: Internal Server Error + """ + request_user = await User.get_by_id(user.id, db) + if request_user.role != 1: + return JSONResponse( + content={ "Error": "Only admin users can create organisations.", "SubCode": "OnlyAdminAccess", - }, 403 + }, + status_code=403, + ) + try: + if request_user.username not in organisation_dto.managers: + organisation_dto.managers.append(request_user.username) - try: - organisation_dto = NewOrganisationDTO(request.get_json()) - if request_user.username not in organisation_dto.managers: - organisation_dto.managers.append(request_user.username) - organisation_dto.validate() - except DataError as e: - current_app.logger.error(f"error validating request: {str(e)}") - return {"Error": str(e), "SubCode": "InvalidData"}, 400 + except Exception as e: + logger.error(f"error validating request: {str(e)}") + return JSONResponse( + content={"Error": str(e), "SubCode": "InvalidData"}, status_code=400 + ) + + try: + async with db.transaction(): + org_id = await OrganisationService.create_organisation(organisation_dto, db) + return JSONResponse(content={"organisationId": org_id}, status_code=201) + except OrganisationServiceError as e: + return JSONResponse( + content={"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, + status_code=400, + ) - try: - org_id = OrganisationService.create_organisation(organisation_dto) - return {"organisationId": org_id}, 201 - except OrganisationServiceError as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 400 - @token_auth.login_required - def delete(self, organisation_id): - """ - Deletes an organisation - --- - tags: - - organisations - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: organisation_id - in: path - description: The unique organisation ID - required: true - type: integer - default: 1 - responses: - 200: - description: Organisation deleted - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 404: - description: Organisation not found - 500: - description: Internal Server Error - """ - if not OrganisationService.can_user_manage_organisation( - organisation_id, token_auth.current_user() - ): - return { +@router.delete("/{organisation_id}/") +async def delete_organisation( + request: Request, + organisation_id: int, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Deletes an organisation + --- + tags: + - organisations + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: organisation_id + in: path + description: The unique organisation ID + required: true + type: integer + default: 1 + responses: + 200: + description: Organisation deleted + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 404: + description: Organisation not found + 500: + description: Internal Server Error + """ + if not await OrganisationService.can_user_manage_organisation( + organisation_id, user.id, db + ): + return JSONResponse( + content={ "Error": "User is not an admin for the org", "SubCode": "UserNotOrgAdmin", - }, 403 - try: - OrganisationService.delete_organisation(organisation_id) - return {"Success": "Organisation deleted"}, 200 - except OrganisationServiceError: - return { + }, + status_code=403, + ) + try: + async with db.transaction(): + await OrganisationService.delete_organisation(organisation_id, db) + return JSONResponse( + content={"Success": "Organisation deleted"}, status_code=200 + ) + + except OrganisationServiceError: + return JSONResponse( + content={ "Error": "Organisation has some projects", "SubCode": "OrgHasProjects", - }, 403 - - @token_auth.login_required(optional=True) - def get(self, organisation_id): - """ - Retrieves an organisation - --- - tags: - - organisations - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - type: string - default: Token sessionTokenHere== - - name: organisation_id - in: path - description: The unique organisation ID - required: true - type: integer - default: 1 - - in: query - name: omitManagerList - type: boolean - description: Set it to true if you don't want the managers list on the response. - default: False - responses: - 200: - description: Organisation found - 401: - description: Unauthorized - Invalid credentials - 404: - description: Organisation not found - 500: - description: Internal Server Error - """ - authenticated_user_id = token_auth.current_user() - if authenticated_user_id is None: - user_id = 0 - else: - user_id = authenticated_user_id - # Validate abbreviated. - omit_managers = strtobool(request.args.get("omitManagerList", "false")) - organisation_dto = OrganisationService.get_organisation_by_id_as_dto( - organisation_id, user_id, omit_managers + }, + status_code=403, ) - return organisation_dto.to_primitive(), 200 - @token_auth.login_required - def patch(self, organisation_id): - """ - Updates an organisation - --- - tags: - - organisations - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: organisation_id - in: path - description: The unique organisation ID - required: true - type: integer - default: 1 - - in: body - name: body - required: true - description: JSON object for updating an organisation - schema: - properties: - name: - type: string - default: HOT - slug: - type: string - default: HOT - logo: - type: string - default: https://tasks.hotosm.org/assets/img/hot-tm-logo.svg - url: + +@router.patch("/{organisation_id}/") +async def update_organisation( + organisation_dto: UpdateOrganisationDTO, + request: Request, + organisation_id: int, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Updates an organisation + --- + tags: + - organisations + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: organisation_id + in: path + description: The unique organisation ID + required: true + type: integer + default: 1 + - in: body + name: body + required: true + description: JSON object for updating an organisation + schema: + properties: + name: + type: string + default: HOT + slug: + type: string + default: HOT + logo: + type: string + default: https://tasks.hotosm.org/assets/img/hot-tm-logo.svg + url: + type: string + default: https://hotosm.org + managers: + type: array + items: type: string - default: https://hotosm.org - managers: - type: array - items: - type: string - default: [ - user_1, - user_2 - ] - responses: - 201: - description: Organisation updated successfully - 400: - description: Client Error - Invalid Request - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 500: - description: Internal Server Error - """ - if not OrganisationService.can_user_manage_organisation( - organisation_id, token_auth.current_user() - ): - return { + default: [ + user_1, + user_2 + ] + responses: + 201: + description: Organisation updated successfully + 400: + description: Client Error - Invalid Request + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 500: + description: Internal Server Error + """ + if not await OrganisationService.can_user_manage_organisation( + organisation_id, user.id, db + ): + return JSONResponse( + content={ "Error": "User is not an admin for the org", "SubCode": "UserNotOrgAdmin", - }, 403 - try: - organisation_dto = UpdateOrganisationDTO(request.get_json()) - organisation_dto.organisation_id = organisation_id - # Don't update organisation type and subscription_tier if request user is not an admin - if User.get_by_id(token_auth.current_user()).role != 1: - org = OrganisationService.get_organisation_by_id(organisation_id) - organisation_dto.type = OrganisationType(org.type).name - organisation_dto.subscription_tier = org.subscription_tier - organisation_dto.validate() - except DataError as e: - current_app.logger.error(f"error validating request: {str(e)}") - return {"Error": str(e), "SubCode": "InvalidData"}, 400 - - try: - OrganisationService.update_organisation(organisation_dto) - return {"Status": "Updated"}, 200 - except OrganisationServiceError as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 402 - - -class OrganisationsStatsAPI(Resource): - def get(self, organisation_id): - """ - Return statistics about projects and active tasks of an organisation - --- - tags: - - organisations - produces: - - application/json - parameters: - - name: organisation_id - in: path - description: The unique organisation ID - required: true - type: integer - default: 1 - responses: - 200: - description: Organisation found - 404: - description: Organisation not found - 500: - description: Internal Server Error - """ - OrganisationService.get_organisation_by_id(organisation_id) - organisation_dto = OrganisationService.get_organisation_stats( - organisation_id, None + }, + status_code=403, + ) + try: + organisation_dto.organisation_id = organisation_id + # Don't update organisation type and subscription_tier if request user is not an admin + user = await User.get_by_id(user.id, db) + if user.role != 1: + org = await OrganisationService.get_organisation_by_id(organisation_id, db) + organisation_dto.type = OrganisationType(org.type).name + organisation_dto.subscription_tier = org.subscription_tier + except Exception as e: + logger.error(f"error validating request: {str(e)}") + return JSONResponse( + content={"Error": str(e), "SubCode": "InvalidData"}, status_code=400 + ) + try: + async with db.transaction(): + await OrganisationService.update_organisation(organisation_dto, db) + return JSONResponse(content={"Status": "Updated"}, status_code=200) + except OrganisationServiceError as e: + return JSONResponse( + content={"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, + status_code=402, ) - return organisation_dto.to_primitive(), 200 -class OrganisationsAllAPI(Resource): - @token_auth.login_required(optional=True) - def get(self): - """ - List all organisations - --- - tags: - - organisations - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - type: string - default: Token sessionTokenHere== - - name: manager_user_id - in: query - description: Filter projects on managers with this user_id - required: false - type: integer - - in: query - name: omitManagerList - type: boolean - description: Set it to true if you don't want the managers list on the response. - default: False - - in: query - name: omitOrgStats - type: boolean - description: Set it to true if you don't want organisation stats on the response. \n - \n - Adds year to date organisation stats to response if set false. - default: True +@router.get("/{organisation_id}/statistics/", response_model=OrganizationStatsDTO) +async def get_organisation_with_statistics( + request: Request, + organisation_id: int, + db: Database = Depends(get_db), +): + """ + Return statistics about projects and active tasks of an organisation + --- + tags: + - organisations + produces: + - application/json + parameters: + - name: organisation_id + in: path + description: The unique organisation ID + required: true + type: integer + default: 1 + responses: + 200: + description: Organisation found + 404: + description: Organisation not found + 500: + description: Internal Server Error + """ + await OrganisationService.get_organisation_by_id(organisation_id, db) + organisation_dto = await OrganisationService.get_organisation_stats( + organisation_id, db, None + ) + return organisation_dto - responses: - 200: - description: Organisations found - 400: - description: Client Error - Invalid Request - 401: - description: Unauthorized - Invalid credentials - 403: - description: Unauthorized - Not allowed - 404: - description: Organisations not found - 500: - description: Internal Server Error - """ - # Restrict some of the parameters to some permissions - authenticated_user_id = token_auth.current_user() - try: - manager_user_id = int(request.args.get("manager_user_id")) - except Exception: - manager_user_id = None +@router.get("/", response_model=ListOrganisationsDTO) +async def list_organisation( + request: Request, + db: Database = Depends(get_db), + omit_stats: bool = Query( + True, + alias="omitOrgStats", + description="Omit organization stats from the response.", + ), + omit_managers: bool = Query( + False, + alias="omitManagerList", + description="Omit organization managers list from the response.", + ), + manager_user_id: int = Query( + None, alias="manager_user_id", description="ID of the manager user." + ), +): + """ + List all organisations + --- + tags: + - organisations + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + type: string + default: Token sessionTokenHere== + - name: manager_user_id + in: query + description: Filter projects on managers with this user_id + required: false + type: integer + - in: query + name: omitManagerList + type: boolean + description: Set it to true if you don't want the managers list on the response. + default: False + - in: query + name: omitOrgStats + type: boolean + description: Set it to true if you don't want organisation stats on the response. \n + \n + Adds year to date organisation stats to response if set false. + default: True - if manager_user_id is not None and not authenticated_user_id: - return ( - { - "Error": "Unauthorized - Filter by manager_user_id is not allowed to unauthenticated requests", - "SubCode": "LoginToFilterManager", - }, - 403, - ) + responses: + 200: + description: Organisations found + 400: + description: Client Error - Invalid Request + 401: + description: Unauthorized - Invalid credentials + 403: + description: Unauthorized - Not allowed + 404: + description: Organisations not found + 500: + description: Internal Server Error + """ + authenticated_user_id = ( + request.user.display_name + if request.user and request.user.display_name + else None + ) - # Validate abbreviated. - omit_managers = bool(strtobool(request.args.get("omitManagerList", "false"))) - omit_stats = bool(strtobool(request.args.get("omitOrgStats", "true"))) - # Obtain organisations - results_dto = OrganisationService.get_organisations_as_dto( - manager_user_id, - authenticated_user_id, - omit_managers, - omit_stats, + if manager_user_id is not None and not authenticated_user_id: + return Response( + content={ + "Error": "Unauthorized - Filter by manager_user_id is not allowed to unauthenticated requests", + "SubCode": "LoginToFilterManager", + }, + status_code=403, ) - return results_dto.to_primitive(), 200 + results_dto = await OrganisationService.get_organisations_as_dto( + manager_user_id, authenticated_user_id, omit_managers, omit_stats, db + ) + return results_dto diff --git a/backend/api/partners/resources.py b/backend/api/partners/resources.py index bd3b4ff247..2fd34e527d 100644 --- a/backend/api/partners/resources.py +++ b/backend/api/partners/resources.py @@ -1,400 +1,475 @@ -from flask_restful import Resource, request +# from flask_restful import Resource, request +import json +from databases import Database +from fastapi import APIRouter, Depends, Request +from fastapi.responses import JSONResponse -from backend.services.partner_service import PartnerService, PartnerServiceError -from backend.services.users.authentication_service import token_auth +from backend.db import get_db +from backend.models.dtos.partner_dto import PartnerDTO +from backend.models.dtos.user_dto import AuthUserDTO from backend.models.postgis.user import User +from backend.services.partner_service import PartnerService, PartnerServiceError +from backend.services.users.authentication_service import login_required + +router = APIRouter( + prefix="/partners", + tags=["partners"], + responses={404: {"description": "Not found"}}, +) -class PartnerRestAPI(Resource): - @token_auth.login_required - def get(self, partner_id): - """ - Get partner by id - --- - tags: - - partners - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: partner_id - in: path - description: The id of the partner - required: true - type: integer - default: 1 - responses: - 200: - description: Partner found - 401: - description: Unauthorized - Invalid credentials - 404: - description: Partner not found - 500: - description: Internal Server Error - """ +@router.get("/{partner_id:int}/") +async def retrieve_partner( + request: Request, + partner_id: int, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Get partner by id + --- + tags: + - partners + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: partner_id + in: path + description: The id of the partner + required: true + type: integer + default: 1 + responses: + 200: + description: Partner found + 401: + description: Unauthorized - Invalid credentials + 404: + description: Partner not found + 500: + description: Internal Server Error + """ - request_user = User.get_by_id(token_auth.current_user()) - if request_user.role != 1: - return { + request_user = await User.get_by_id(user.id, db) + if request_user.role != 1: + return JSONResponse( + content={ "Error": "Only admin users can manage partners.", "SubCode": "OnlyAdminAccess", - }, 403 + }, + status_code=403, + ) - partner = PartnerService.get_partner_by_id(partner_id) - if partner: - partner_dict = partner.as_dto().to_primitive() - website_links = partner_dict.pop("website_links", []) - for i, link in enumerate(website_links, start=1): - partner_dict[f"name_{i}"] = link["name"] - partner_dict[f"url_{i}"] = link["url"] - return partner_dict, 200 - else: - return {"message": "Partner not found"}, 404 + partner = await PartnerService.get_partner_by_id(partner_id, db) + if partner: + partner_dto = PartnerDTO.from_record(partner) + partner_dict = partner_dto.dict() + website_links = partner_dict.pop("website_links", []) + for i, link in enumerate(website_links, start=1): + partner_dict[f"name_{i}"] = link.get("name") + partner_dict[f"url_{i}"] = link.get("url") + + return partner_dict + else: + return JSONResponse(content={"message": "Partner not found"}, status_code=404) - @token_auth.login_required - def delete(self, partner_id): - """ - Deletes an existing partner - --- - tags: - - partners - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - type: string - required: true - default: Token sessionTokenHere== - - in: header - name: Accept-Language - description: Language partner is requesting - type: string - required: true - default: en - - name: partner_id - in: path - description: Partner ID - required: true - type: integer - default: 1 - responses: - 200: - description: Partner deleted successfully - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 404: - description: Partner not found - 500: - description: Internal Server Error - """ - request_user = User.get_by_id(token_auth.current_user()) - if request_user.role != 1: - return { + +@router.delete("/{partner_id}/") +async def delete_partner( + request: Request, + partner_id: int, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Deletes an existing partner + --- + tags: + - partners + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + type: string + required: true + default: Token sessionTokenHere== + - in: header + name: Accept-Language + description: Language partner is requesting + type: string + required: true + default: en + - name: partner_id + in: path + description: Partner ID + required: true + type: integer + default: 1 + responses: + 200: + description: Partner deleted successfully + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 404: + description: Partner not found + 500: + description: Internal Server Error + """ + request_user = await User.get_by_id(user.id, db) + if request_user.role != 1: + return JSONResponse( + content={ "Error": "Only admin users can manage partners.", "SubCode": "OnlyAdminAccess", - }, 403 + }, + status_code=403, + ) - try: - PartnerService.delete_partner(partner_id) - return {"Success": "Partner deleted"}, 200 - except PartnerServiceError as e: - return {"message": str(e)}, 404 + try: + async with db.transaction(): + await PartnerService.delete_partner(partner_id, db) + return JSONResponse(content={"Success": "Partner deleted"}, status_code=200) + except PartnerServiceError as e: + return JSONResponse(content={"message": str(e)}, status_code=404) - @token_auth.login_required - def put(self, partner_id): - """ - Updates an existing partner - --- - tags: - - partners - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - type: string - required: true - default: Token sessionTokenHere== - - in: header - name: Accept-Language - description: Language partner is requesting - type: string - required: true - default: en - - name: partner_id - in: path - description: Partner ID - required: true - type: integer - - in: body - name: body - required: true - description: JSON object for updating a Partner - schema: - properties: - name: - type: string - example: Cool Partner Inc. - primary_hashtag: - type: string - example: CoolPartner - secondary_hashtag: - type: string - example: CoolPartner,coolProject-* - link_x: - type: string - example: https://x.com/CoolPartner - link_meta: - type: string - example: https://facebook.com/CoolPartner - link_instagram: - type: string - example: https://instagram.com/CoolPartner - current_projects: - type: string - example: 3425,2134,2643 - permalink: - type: string - example: cool-partner - logo_url: - type: string - example: https://tasks.hotosm.org/assets/img/hot-tm-logo.svg - website_links: - type: array - items: - type: string - mapswipe_group_id: + +@router.put("/{partner_id}/") +async def update_partner( + request: Request, + partner_id: int, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Updates an existing partner + --- + tags: + - partners + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + type: string + required: true + default: Token sessionTokenHere== + - in: header + name: Accept-Language + description: Language partner is requesting + type: string + required: true + default: en + - name: partner_id + in: path + description: Partner ID + required: true + type: integer + - in: body + name: body + required: true + description: JSON object for updating a Partner + schema: + properties: + name: + type: string + example: Cool Partner Inc. + primary_hashtag: + type: string + example: CoolPartner + secondary_hashtag: + type: string + example: CoolPartner,coolProject-* + link_x: + type: string + example: https://x.com/CoolPartner + link_meta: + type: string + example: https://facebook.com/CoolPartner + link_instagram: + type: string + example: https://instagram.com/CoolPartner + current_projects: + type: string + example: 3425,2134,2643 + permalink: + type: string + example: cool-partner + logo_url: + type: string + example: https://tasks.hotosm.org/assets/img/hot-tm-logo.svg + website_links: + type: array + items: type: string - example: -NL6WXPOdFyWACqwNU2O - responses: - 200: - description: Partner updated successfully - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 404: - description: Partner not found - 409: - description: Resource duplication - 500: - description: Internal Server Error - """ + mapswipe_group_id: + type: string + example: -NL6WXPOdFyWACqwNU2O + responses: + 200: + description: Partner updated successfully + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 404: + description: Partner not found + 409: + description: Resource duplication + 500: + description: Internal Server Error + """ - request_user = User.get_by_id(token_auth.current_user()) - if request_user.role != 1: - return { + request_user = await User.get_by_id(user.id, db) + if request_user.role != 1: + return JSONResponse( + content={ "Error": "Only admin users can manage partners.", "SubCode": "OnlyAdminAccess", - }, 403 + }, + status_code=403, + ) - try: - data = request.json - updated_partner = PartnerService.update_partner(partner_id, data) - updated_partner_dict = updated_partner.as_dto().to_primitive() - return updated_partner_dict, 200 - except PartnerServiceError as e: - return {"message": str(e)}, 404 + try: + data = await request.json() + async with db.transaction(): + updated_partner = await PartnerService.update_partner(partner_id, data, db) + return updated_partner + except PartnerServiceError as e: + return JSONResponse(content={"message": str(e)}, status_code=404) -class PartnersAllRestAPI(Resource): - @token_auth.login_required - def get(self): - """ - Get all active partners - --- - tags: - - partners - produces: - - application/json - responses: - 200: - description: All Partners returned successfully - 500: - description: Internal Server Error - """ +@router.get("/") +async def list_partners( + request: Request, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Get all active partners + --- + tags: + - partners + produces: + - application/json + responses: + 200: + description: All Partners returned successfully + 500: + description: Internal Server Error + """ - request_user = User.get_by_id(token_auth.current_user()) - if request_user.role != 1: - return { + request_user = await User.get_by_id(user.id, db) + if request_user.role != 1: + return JSONResponse( + content={ "Error": "Only admin users can manage partners.", "SubCode": "OnlyAdminAccess", - }, 403 + }, + status_code=403, + ) - partner_ids = PartnerService.get_all_partners() - partners = [] - for partner_id in partner_ids: - partner = PartnerService.get_partner_by_id(partner_id) - partner_dict = partner.as_dto().to_primitive() - website_links = partner_dict.pop("website_links", []) - for i, link in enumerate(website_links, start=1): - partner_dict[f"name_{i}"] = link["name"] - partner_dict[f"url_{i}"] = link["url"] - partners.append(partner_dict) - return partners, 200 + partner_ids = await PartnerService.get_all_partners(db) + partners = [] + for partner_id in partner_ids: + partner = await PartnerService.get_partner_by_id(partner_id, db) + partner_dict = PartnerDTO.from_record(partner).dict() + website_links = partner_dict.pop("website_links", []) + for i, link in enumerate(website_links, start=1): + partner_dict[f"name_{i}"] = link.get("name") + partner_dict[f"url_{i}"] = link.get("url") + partners.append(partner_dict) - @token_auth.login_required - def post(self): - """ - Creates a new partner - --- - tags: - - partners - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - type: string - required: true - default: Token sessionTokenHere== - - in: header - name: Accept-Language - description: Language partner is requesting - type: string - required: true - default: en - - in: body - name: body - required: true - description: JSON object for creating a new Partner - schema: - properties: - name: - type: string - required: true - example: "American red cross" - primary_hashtag: - type: string - required: true - example: "#americanredcross" - logo_url: - type: string - example: https://tasks.hotosm.org/assets/img/hot-tm-logo.svg - name: - type: string - example: Cool Partner Inc. - primary_hashtag: - type: string - example: CoolPartner - secondary_hashtag: - type: string - example: CoolPartner,coolProject-* - link_x: - type: string - example: https://x.com/CoolPartner - link_meta: - type: string - example: https://facebook.com/CoolPartner - link_instagram: - type: string - example: https://instagram.com/CoolPartner - current_projects: - type: string - example: 3425,2134,2643 - permalink: - type: string - example: cool-partner - logo_url: - type: string - example: https://tasks.hotosm.org/assets/img/hot-tm-logo.svg - website_links: - type: array - items: - type: string - default: [ - ] - mapswipe_group_id: + return partners + + +@router.post("/") +async def create_partner( + request: Request, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Creates a new partner + --- + tags: + - partners + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + type: string + required: true + default: Token sessionTokenHere== + - in: header + name: Accept-Language + description: Language partner is requesting + type: string + required: true + default: en + - in: body + name: body + required: true + description: JSON object for creating a new Partner + schema: + properties: + name: + type: string + required: true + example: "American red cross" + primary_hashtag: + type: string + required: true + example: "#americanredcross" + logo_url: + type: string + example: https://tasks.hotosm.org/assets/img/hot-tm-logo.svg + name: + type: string + example: Cool Partner Inc. + primary_hashtag: + type: string + example: CoolPartner + secondary_hashtag: + type: string + example: CoolPartner,coolProject-* + link_x: + type: string + example: https://x.com/CoolPartner + link_meta: + type: string + example: https://facebook.com/CoolPartner + link_instagram: + type: string + example: https://instagram.com/CoolPartner + current_projects: + type: string + example: 3425,2134,2643 + permalink: + type: string + example: cool-partner + logo_url: + type: string + example: https://tasks.hotosm.org/assets/img/hot-tm-logo.svg + website_links: + type: array + items: type: string - example: -NL6WXPOdFyWACqwNU2O - responses: - 201: - description: New partner created successfully - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 409: - description: Resource duplication - 500: - description: Internal Server Error - """ + default: [ + ] + mapswipe_group_id: + type: string + example: -NL6WXPOdFyWACqwNU2O + responses: + 201: + description: New partner created successfully + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 409: + description: Resource duplication + 500: + description: Internal Server Error + """ - request_user = User.get_by_id(token_auth.current_user()) - if request_user.role != 1: - return { + request_user = await User.get_by_id(user.id, db) + if request_user.role != 1: + return JSONResponse( + content={ "Error": "Only admin users can manage partners.", "SubCode": "OnlyAdminAccess", - }, 403 + }, + status_code=403, + ) - try: - data = request.json - if data: - if data.get("name") is None: - return {"message": "Partner name is not provided"}, 400 + try: + data = await request.json() + if data: + if data.get("name") is None: + return JSONResponse( + content={"message": "Partner name is not provided"}, status_code=400 + ) - if data.get("primary_hashtag") is None: - return {"message": "Partner primary_hashtag is not provided"}, 400 + if data.get("primary_hashtag") is None: + return JSONResponse( + content={"message": "Partner primary_hashtag is not provided"}, + status_code=400, + ) + async with db.transaction(): + new_partner_id = await PartnerService.create_partner(data, db) + partner_data = await PartnerService.get_partner_by_id(new_partner_id, db) + return partner_data - new_partner = PartnerService.create_partner(data) - partner_dict = new_partner.as_dto().to_primitive() - return partner_dict, 201 - else: - return {"message": "Data not provided"}, 400 - except PartnerServiceError as e: - return {"message": str(e)}, 500 + else: + return JSONResponse( + content={"message": "Data not provided"}, status_code=400 + ) + except PartnerServiceError as e: + return JSONResponse(content={"message": str(e)}, status_code=500) -class PartnerPermalinkRestAPI(Resource): - def get(self, permalink): - """ - Get partner by permalink - --- - tags: - - partners - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: permalink - in: path - description: The permalink of the partner - required: true - type: string - responses: - 200: - description: Partner found - 401: - description: Unauthorized - Invalid credentials - 404: - description: Partner not found - 500: - description: Internal Server Error - """ - partner = PartnerService.get_partner_by_permalink(permalink) - if partner: - partner_dict = partner.as_dto().to_primitive() - website_links = partner_dict.pop("website_links", []) - for i, link in enumerate(website_links, start=1): - partner_dict[f"name_{i}"] = link["name"] - partner_dict[f"url_{i}"] = link["url"] - return partner_dict, 200 - else: - return {"message": "Partner not found"}, 404 +@router.get("/{permalink:str}/") +async def get_partner( + request: Request, + permalink: str, + db: Database = Depends(get_db), +): + """ + Get partner by permalink + --- + tags: + - partners + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: permalink + in: path + description: The permalink of the partner + required: true + type: string + responses: + 200: + description: Partner found + 401: + description: Unauthorized - Invalid credentials + 404: + description: Partner not found + 500: + description: Internal Server Error + """ + try: + partner_record = await PartnerService.get_partner_by_permalink(permalink, db) + if not partner_record: + return JSONResponse( + content={"message": "Partner not found"}, status_code=404 + ) + + partner = dict(partner_record) + website_links = json.loads(partner.get("website_links", "[]")) + for i, link in enumerate(website_links, start=1): + partner[f"name_{i}"] = link["name"] + partner[f"url_{i}"] = link["url"] + + partner.pop("website_links", None) + return partner + except Exception as e: + return JSONResponse(content={"message": str(e)}, status_code=500) diff --git a/backend/api/partners/statistics.py b/backend/api/partners/statistics.py index 6d661fec99..40179ffc44 100644 --- a/backend/api/partners/statistics.py +++ b/backend/api/partners/statistics.py @@ -1,15 +1,14 @@ import io -from flask import send_file -from flask_restful import Resource, request from typing import Optional +from databases import Database +from fastapi import APIRouter, Depends, Request +from fastapi.responses import StreamingResponse -from backend.services.partner_service import PartnerService +from backend.db import get_db from backend.exceptions import BadRequest - -# Replaceable by another service which implements the method: -# fetch_partner_stats(id_inside_service, from_date, to_date) -> PartnerStatsDTO from backend.services.mapswipe_service import MapswipeService +from backend.services.partner_service import PartnerService MAPSWIPE_GROUP_EMPTY_SUBCODE = "EMPTY_MAPSWIPE_GROUP" MAPSWIPE_GROUP_EMPTY_MESSAGE = "Mapswipe group is not set for this partner." @@ -19,156 +18,169 @@ def is_valid_group_id(group_id: Optional[str]) -> bool: return group_id is not None and len(group_id) > 0 -class FilteredPartnerStatisticsAPI(Resource): - def get(self, permalink: str): - """ - Get partner statistics by id and time range - --- - tags: - - partners - produces: - - application/json - parameters: - - in: query - name: fromDate - type: string - description: Fetch partner statistics from date as yyyy-mm-dd - example: "2024-01-01" - - in: query - name: toDate - type: string - example: "2024-09-01" - description: Fetch partner statistics to date as yyyy-mm-dd - - name: partner_id - in: path - - name: permalink - in: path - description: The permalink of the partner - required: true - type: string - responses: - 200: - description: Partner found - 401: - description: Unauthorized - Invalid credentials - 404: - description: Partner not found - 500: - description: Internal Server Error - """ - mapswipe = MapswipeService() - from_date = request.args.get("fromDate") - to_date = request.args.get("toDate") - - if from_date is None: - raise BadRequest( - sub_code="INVALID_TIME_RANGE", - message="fromDate is missing", - from_date=from_date, - to_date=to_date, - ) - - if to_date is None: - raise BadRequest( - sub_code="INVALID_TIME_RANGE", - message="toDate is missing", - from_date=from_date, - to_date=to_date, - ) - - if from_date > to_date: - raise BadRequest( - sub_code="INVALID_TIME_RANGE", - message="fromDate should be less than toDate", - from_date=from_date, - to_date=to_date, - ) - - partner = PartnerService.get_partner_by_permalink(permalink) - - if not is_valid_group_id(partner.mapswipe_group_id): - raise BadRequest( - sub_code=MAPSWIPE_GROUP_EMPTY_SUBCODE, - message=MAPSWIPE_GROUP_EMPTY_MESSAGE, - ) - - return ( - mapswipe.fetch_filtered_partner_stats( - partner.id, partner.mapswipe_group_id, from_date, to_date - ).to_primitive(), - 200, +router = APIRouter( + prefix="/partners", + tags=["partners"], + responses={404: {"description": "Not found"}}, +) + + +@router.get("/{permalink:str}/filtered-statistics/") +async def get_statistics( + request: Request, + permalink: str, + db: Database = Depends(get_db), +): + """ + Get partner statistics by id and time range + --- + tags: + - partners + produces: + - application/json + parameters: + - in: query + name: fromDate + type: string + description: Fetch partner statistics from date as yyyy-mm-dd + example: "2024-01-01" + - in: query + name: toDate + type: string + example: "2024-09-01" + description: Fetch partner statistics to date as yyyy-mm-dd + - name: partner_id + in: path + - name: permalink + in: path + description: The permalink of the partner + required: true + type: string + responses: + 200: + description: Partner found + 401: + description: Unauthorized - Invalid credentials + 404: + description: Partner not found + 500: + description: Internal Server Error + """ + mapswipe = MapswipeService() + from_date = request.query_params.get("fromDate") + to_date = request.query_params.get("toDate") + + if from_date is None: + raise BadRequest( + sub_code="INVALID_TIME_RANGE", + message="fromDate is missing", + from_date=from_date, + to_date=to_date, + ) + + if to_date is None: + raise BadRequest( + sub_code="INVALID_TIME_RANGE", + message="toDate is missing", + from_date=from_date, + to_date=to_date, + ) + + if from_date > to_date: + raise BadRequest( + sub_code="INVALID_TIME_RANGE", + message="fromDate should be less than toDate", + from_date=from_date, + to_date=to_date, ) + partner = await PartnerService.get_partner_by_permalink(permalink, db) -class GroupPartnerStatisticsAPI(Resource): - def get(self, permalink: str): - """ - Get partner statistics by id and broken down by each contributor. - This API is paginated with limit and offset query parameters. - --- - tags: - - partners - produces: - - application/json - parameters: - - in: query - name: limit - description: The number of partner members to fetch - type: integer - example: 10 - - in: query - name: offset - description: The starting index from which to fetch partner members - type: integer - example: 0 - - in: query - name: downloadAsCSV - description: Download users in this group as CSV - type: boolean - example: false - - name: permalink - in: path - description: The permalink of the partner - required: true - type: string - responses: - 200: - description: Partner found - 401: - description: Unauthorized - Invalid credentials - 404: - description: Partner not found - 500: - description: Internal Server Error - """ - - mapswipe = MapswipeService() - partner = PartnerService.get_partner_by_permalink(permalink) - - if not is_valid_group_id(partner.mapswipe_group_id): - raise BadRequest( - sub_code=MAPSWIPE_GROUP_EMPTY_SUBCODE, - message=MAPSWIPE_GROUP_EMPTY_MESSAGE, - ) - - limit = int(request.args.get("limit", 10)) - offset = int(request.args.get("offset", 0)) - download_as_csv = bool(request.args.get("downloadAsCSV", "false") == "true") - - group_dto = mapswipe.fetch_grouped_partner_stats( - partner.id, - partner.mapswipe_group_id, - limit, - offset, - download_as_csv, + if not is_valid_group_id(partner.mapswipe_group_id): + raise BadRequest( + sub_code=MAPSWIPE_GROUP_EMPTY_SUBCODE, + message=MAPSWIPE_GROUP_EMPTY_MESSAGE, ) - if download_as_csv: - return send_file( - io.BytesIO(group_dto.to_csv().encode()), - mimetype="text/csv", - as_attachment=True, - download_name="partner_members.csv", - ) + return mapswipe.fetch_filtered_partner_stats( + partner.id, partner.mapswipe_group_id, from_date, to_date + ) + + +@router.get("/{permalink:str}/general-statistics/") +async def get_statistics( + request: Request, + permalink: str, + db: Database = Depends(get_db), +): + """ + Get partner statistics by id and broken down by each contributor. + This API is paginated with limit and offset query parameters. + --- + tags: + - partners + produces: + - application/json + parameters: + - in: query + name: limit + description: The number of partner members to fetch + type: integer + example: 10 + - in: query + name: offset + description: The starting index from which to fetch partner members + type: integer + example: 0 + - in: query + name: downloadAsCSV + description: Download users in this group as CSV + type: boolean + example: false + - name: permalink + in: path + description: The permalink of the partner + required: true + type: string + responses: + 200: + description: Partner found + 401: + description: Unauthorized - Invalid credentials + 404: + description: Partner not found + 500: + description: Internal Server Error + """ + + mapswipe = MapswipeService() + partner = await PartnerService.get_partner_by_permalink(permalink, db) + + if not is_valid_group_id(partner.mapswipe_group_id): + raise BadRequest( + sub_code=MAPSWIPE_GROUP_EMPTY_SUBCODE, + message=MAPSWIPE_GROUP_EMPTY_MESSAGE, + ) + + limit = int(request.query_params.get("limit", 10)) + offset = int(request.query_params.get("offset", 0)) + download_as_csv = bool(request.query_params.get("downloadAsCSV", "false") == "true") + + group_dto = mapswipe.fetch_grouped_partner_stats( + partner.id, + partner.mapswipe_group_id, + limit, + offset, + download_as_csv, + ) + + if download_as_csv: + csv_content = group_dto.to_csv() + csv_buffer = io.StringIO(csv_content) + return StreamingResponse( + content=csv_buffer, + media_type="text/csv", + headers={"Content-Disposition": "attachment; filename=partner_members.csv"}, + ) - return group_dto.to_primitive(), 200 + return group_dto diff --git a/backend/api/projects/actions.py b/backend/api/projects/actions.py index 23bcb73e6f..d0b41b35ec 100644 --- a/backend/api/projects/actions.py +++ b/backend/api/projects/actions.py @@ -1,406 +1,468 @@ -import threading - -from flask_restful import Resource, request, current_app -from schematics.exceptions import DataError +from databases import Database +from fastapi import APIRouter, BackgroundTasks, Body, Depends, Request +from fastapi.responses import JSONResponse +from loguru import logger +from shapely import GEOSException +from shapely.errors import TopologicalError -from backend.models.dtos.message_dto import MessageDTO +from backend.db import get_db from backend.models.dtos.grid_dto import GridDTO -from backend.services.project_service import ProjectService +from backend.models.dtos.message_dto import MessageDTO +from backend.models.dtos.user_dto import AuthUserDTO +from backend.models.postgis.utils import InvalidGeoJson +from backend.services.grid.grid_service import GridService +from backend.services.interests_service import InterestService +from backend.services.messaging.message_service import MessageService from backend.services.project_admin_service import ( ProjectAdminService, ProjectAdminServiceError, ) -from backend.services.grid.grid_service import GridService -from backend.services.messaging.message_service import MessageService -from backend.services.users.authentication_service import token_auth, tm -from backend.services.interests_service import InterestService -from backend.models.postgis.utils import InvalidGeoJson +from backend.services.project_service import ProjectService +from backend.services.users.authentication_service import login_required -from shapely import GEOSException -from shapely.errors import TopologicalError +router = APIRouter( + prefix="/projects", + tags=["projects"], + responses={404: {"description": "Not found"}}, +) -class ProjectsActionsTransferAPI(Resource): - @token_auth.login_required - def post(self, project_id): - """ - Transfers a project to a new user - --- - tags: - - projects - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: project_id - in: path - description: Unique project ID - required: true - type: integer - default: 1 - - in: body - name: body - required: true - description: username of the new owner - schema: - properties: - username: - type: string - responses: - 200: - description: Project ownership transferred successfully - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 500: - description: Internal Server Error - """ - try: - username = request.get_json()["username"] - except Exception: - return {"Error": "Username not provided", "SubCode": "InvalidData"}, 400 - try: - authenticated_user_id = token_auth.current_user() - ProjectAdminService.transfer_project_to( - project_id, authenticated_user_id, username - ) - return {"Success": "Project Transferred"}, 200 - except (ValueError, ProjectAdminServiceError) as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 403 +@router.post("/{project_id}/actions/transfer-ownership/") +async def post( + request: Request, + background_tasks: BackgroundTasks, + project_id: int, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), + data: dict = Body(...), +): + """ + Transfers a project to a new user + --- + tags: + - projects + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: project_id + in: path + description: Unique project ID + required: true + type: integer + default: 1 + - in: body + name: body + required: true + description: username of the new owner + schema: + properties: + username: + type: string + responses: + 200: + description: Project ownership transferred successfully + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 500: + description: Internal Server Error + """ + try: + username = data["username"] + except Exception: + return JSONResponse( + content={"Error": "Username not provided", "SubCode": "InvalidData"}, + status_code=400, + ) + try: + await ProjectAdminService.transfer_project_to( + project_id, user.id, username, db, background_tasks + ) + return JSONResponse(content={"Success": "Project Transferred"}, status_code=200) + except (ValueError, ProjectAdminServiceError) as e: + return JSONResponse( + content={"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, + status_code=403, + ) -class ProjectsActionsMessageContributorsAPI(Resource): - @token_auth.login_required - def post(self, project_id): - """ - Send message to all contributors of a project - --- - tags: - - projects - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: project_id - in: path - description: Unique project ID - required: true - type: integer - default: 1 - - in: body - name: body - required: true - description: JSON object for creating message - schema: - properties: - subject: - type: string - default: Thanks - required: true - message: - type: string - default: Thanks for your contribution - required: true - responses: - 200: - description: Message sent successfully - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 500: - description: Internal Server Error - """ - try: - authenticated_user_id = token_auth.current_user() - message_dto = MessageDTO(request.get_json()) - message_dto.from_user_id = authenticated_user_id - message_dto.validate() - except DataError as e: - current_app.logger.error(f"Error validating request: {str(e)}") - return { - "Error": "Unable to send message to mappers", +@router.post("/{project_id}/actions/message-contributors/") +async def post( + request: Request, + background_tasks: BackgroundTasks, + project_id: int, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), +): + """ + Send message to all contributors of a project + --- + tags: + - projects + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: project_id + in: path + description: Unique project ID + required: true + type: integer + default: 1 + - in: body + name: body + required: true + description: JSON object for creating message + schema: + properties: + subject: + type: string + default: Thanks + required: true + message: + type: string + default: Thanks for your contribution + required: true + responses: + 200: + description: Message sent successfully + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 500: + description: Internal Server Error + """ + try: + request_json = await request.json() + request_json["from_user_id"] = user.id + message_dto = MessageDTO(**request_json) + except Exception as e: + logger.error(f"Error validating request: {str(e)}") + return JSONResponse( + content={ + "Error": "Unable to send message to contributors", "SubCode": "InvalidData", - }, 400 - - if not ProjectAdminService.is_user_action_permitted_on_project( - authenticated_user_id, project_id - ): - return { + }, + status_code=400, + ) + if not await ProjectAdminService.is_user_action_permitted_on_project( + user.id, project_id, db + ): + return JSONResponse( + content={ "Error": "User is not a manager of the project", "SubCode": "UserPermissionError", - }, 403 - threading.Thread( - target=MessageService.send_message_to_all_contributors, - args=(project_id, message_dto), - ).start() - return {"Success": "Messages started"}, 200 + }, + status_code=403, + ) + try: + background_tasks.add_task( + MessageService.send_message_to_all_contributors, + project_id, + message_dto, + ) + return JSONResponse(content={"Success": "Messages started"}, status_code=200) + except Exception as e: + logger.error(f"Error starting background task: {str(e)}") + return JSONResponse( + content={"Error": "Failed to send messages"}, status_code=500 + ) -class ProjectsActionsFeatureAPI(Resource): - @token_auth.login_required - def post(self, project_id): - """ - Set a project as featured - --- - tags: - - projects - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: project_id - in: path - description: Unique project ID - required: true - type: integer - default: 1 - responses: - 200: - description: Featured projects - 400: - description: Bad request - 403: - description: Forbidden - 404: - description: Project not found - 500: - description: Internal Server Error - """ - try: - authenticated_user_id = token_auth.current_user() - if not ProjectAdminService.is_user_action_permitted_on_project( - authenticated_user_id, project_id - ): - raise ValueError() - except ValueError: - return { +@router.post("/{project_id}/actions/feature/") +async def post( + request: Request, + project_id: int, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), +): + """ + Set a project as featured + --- + tags: + - projects + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: project_id + in: path + description: Unique project ID + required: true + type: integer + default: 1 + responses: + 200: + description: Featured projects + 400: + description: Bad request + 403: + description: Forbidden + 404: + description: Project not found + 500: + description: Internal Server Error + """ + try: + if not await ProjectAdminService.is_user_action_permitted_on_project( + user.id, project_id, db + ): + raise ValueError() + except ValueError: + return JSONResponse( + content={ "Error": "User is not a manager of the project", "SubCode": "UserPermissionError", - }, 403 + }, + status_code=403, + ) - try: - ProjectService.set_project_as_featured(project_id) - return {"Success": True}, 200 - except ValueError as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 403 + try: + await ProjectService.set_project_as_featured(project_id, db) + return JSONResponse(content={"Success": True}, status_code=200) + except ValueError as e: + return JSONResponse( + content={"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, + status_code=403, + ) -class ProjectsActionsUnFeatureAPI(Resource): - @token_auth.login_required - def post(self, project_id): - """ - Unset a project as featured - --- - tags: - - projects - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: project_id - in: path - description: Unique project ID - required: true - type: integer - default: 1 - responses: - 200: - description: Project is no longer featured - 400: - description: Bad request - 403: - description: Forbidden - 404: - description: Project not found - 500: - description: Internal Server Error - """ - try: - authenticated_user_id = token_auth.current_user() - if not ProjectAdminService.is_user_action_permitted_on_project( - authenticated_user_id, project_id - ): - raise ValueError() - except ValueError: - return { +@router.post("/{project_id}/actions/remove-feature/") +async def post( + request: Request, + project_id: int, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), +): + """ + Unset a project as featured + --- + tags: + - projects + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: project_id + in: path + description: Unique project ID + required: true + type: integer + default: 1 + responses: + 200: + description: Project is no longer featured + 400: + description: Bad request + 403: + description: Forbidden + 404: + description: Project not found + 500: + description: Internal Server Error + """ + try: + if not await ProjectAdminService.is_user_action_permitted_on_project( + user.id, project_id, db + ): + raise ValueError() + except ValueError: + return JSONResponse( + content={ "Error": "User is not a manager of the project", "SubCode": "UserPermissionError", - }, 403 + }, + status_code=403, + ) - try: - ProjectService.unset_project_as_featured(project_id) - return {"Success": True}, 200 - except ValueError as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 403 + try: + await ProjectService.unset_project_as_featured(project_id, db) + return JSONResponse(content={"Success": True}, status_code=200) + except ValueError as e: + return JSONResponse( + content={"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, + status_code=403, + ) -class ProjectsActionsSetInterestsAPI(Resource): - @token_auth.login_required - def post(self, project_id): - """ - Creates a relationship between project and interests - --- - tags: - - interests - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: project_id - in: path - description: Unique project ID - required: true - type: integer - default: 1 - - in: body - name: body - required: true - description: JSON object for creating/updating project and interests relationships - schema: - properties: - interests: - type: array - items: - type: integer - responses: - 200: - description: New project interest relationship created - 400: - description: Invalid Request - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 500: - description: Internal Server Error - """ - try: - authenticated_user_id = token_auth.current_user() - if not ProjectAdminService.is_user_action_permitted_on_project( - authenticated_user_id, project_id - ): - raise ValueError() - except ValueError: - return { +@router.post("/{project_id}/actions/set-interests/") +async def post( + request: Request, + project_id: int, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), + data: dict = Body(...), +): + """ + Creates a relationship between project and interests + --- + tags: + - interests + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: project_id + in: path + description: Unique project ID + required: true + type: integer + default: 1 + - in: body + name: body + required: true + description: JSON object for creating/updating project and interests relationships + schema: + properties: + interests: + type: array + items: + type: integer + responses: + 200: + description: New project interest relationship created + 400: + description: Invalid Request + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 500: + description: Internal Server Error + """ + try: + if not await ProjectAdminService.is_user_action_permitted_on_project( + user.id, project_id, db + ): + raise ValueError() + except ValueError: + return JSONResponse( + content={ "Error": "User is not a manager of the project", "SubCode": "UserPermissionError", - }, 403 - - data = request.get_json() - project_interests = InterestService.create_or_update_project_interests( - project_id, data["interests"] + }, + status_code=403, ) - return project_interests.to_primitive(), 200 + project_interests = await InterestService.create_or_update_project_interests( + project_id, data["interests"], db + ) + return project_interests.model_dump(by_alias=True) -class ProjectActionsIntersectingTilesAPI(Resource): - @tm.pm_only() - @token_auth.login_required - def post(self): - """ - Gets the tiles intersecting the aoi - --- - tags: - - grid - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - in: body - name: body - required: true - description: JSON object containing aoi and tasks and bool flag for controlling clip grid to aoi - schema: - properties: - clipToAoi: - type: boolean - default: true - areaOfInterest: - schema: - properties: - type: - type: string - default: FeatureCollection - features: - type: array - items: - schema: - $ref: "#/definitions/GeoJsonFeature" - grid: - schema: - properties: - type: - type: string - default: FeatureCollection - features: - type: array - items: - schema: - $ref: "#/definitions/GeoJsonFeature" - responses: - 200: - description: Intersecting tasks found successfully - 400: - description: Client Error - Invalid Request - 500: - description: Internal Server Error - """ - try: - grid_dto = GridDTO(request.get_json()) - grid_dto.validate() - except DataError as e: - current_app.logger.error(f"error validating request: {str(e)}") - return {"Error": str(e), "SubCode": "InvalidData"}, 400 - try: - grid = GridService.trim_grid_to_aoi(grid_dto) - return grid, 200 - except InvalidGeoJson as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 400 - except TopologicalError: - return { - "error": "Invalid geometry. Polygon is self intersecting", +@router.post("/actions/intersecting-tiles/") +# @tm.pm_only() +async def post( + request: Request, + user: AuthUserDTO = Depends(login_required), + grid_dto: GridDTO = Body(...), +): + """ + Gets the tiles intersecting the aoi + --- + tags: + - grid + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - in: body + name: body + required: true + description: JSON object containing aoi and tasks and bool flag for controlling clip grid to aoi + schema: + properties: + clipToAoi: + type: boolean + default: true + areaOfInterest: + schema: + properties: + type: + type: string + default: FeatureCollection + features: + type: array + items: + schema: + $ref: "#/definitions/GeoJsonFeature" + grid: + schema: + properties: + type: + type: string + default: FeatureCollection + features: + type: array + items: + schema: + $ref: "#/definitions/GeoJsonFeature" + responses: + 200: + description: Intersecting tasks found successfully + 400: + description: Client Error - Invalid Request + 500: + description: Internal Server Error + """ + try: + grid = GridService.trim_grid_to_aoi(grid_dto) + return JSONResponse(content=grid, status_code=200) + except InvalidGeoJson as e: + return JSONResponse( + content={"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, + status_code=400, + ) + except TopologicalError: + return JSONResponse( + content={ + "Error": "Invalid geometry. Polygon is self intersecting", "SubCode": "SelfIntersectingAOI", - }, 400 - except GEOSException as wrapped: - if ( - isinstance(wrapped.args[0], str) - and "Self-intersection" in wrapped.args[0] - ): - return { + }, + status_code=400, + ) + except GEOSException as wrapped: + if isinstance(wrapped.args[0], str) and "Self-intersection" in wrapped.args[0]: + return JSONResponse( + content={ "error": "Invalid geometry. Polygon is self intersecting", "SubCode": "SelfIntersectingAOI", - }, 400 - return {"error": str(wrapped), "SubCode": "InternalServerError"} + }, + status_code=400, + ) + return JSONResponse( + content={"error": str(wrapped), "SubCode": "InternalServerError"} + ) diff --git a/backend/api/projects/activities.py b/backend/api/projects/activities.py index 9d829dfd76..94e2061331 100644 --- a/backend/api/projects/activities.py +++ b/backend/api/projects/activities.py @@ -1,66 +1,76 @@ -from flask_restful import Resource, request +from databases import Database +from fastapi import APIRouter, Depends, Request -from backend.services.stats_service import StatsService +from backend.db import get_db from backend.services.project_service import ProjectService +from backend.services.stats_service import StatsService + +router = APIRouter( + prefix="/projects", + tags=["projects"], + responses={404: {"description": "Not found"}}, +) -class ProjectsActivitiesAPI(Resource): - def get(self, project_id): - """ - Get all user activity on a project - --- - tags: - - projects - produces: - - application/json - parameters: - - name: project_id - in: path - description: Unique project ID - required: true - type: integer - default: 1 - - in: query - name: page - description: Page of results user requested - type: integer - responses: - 200: - description: Project activity - 404: - description: No activity - 500: - description: Internal Server Error - """ - ProjectService.exists(project_id) - page = int(request.args.get("page")) if request.args.get("page") else 1 - activity = StatsService.get_latest_activity(project_id, page) - return activity.to_primitive(), 200 +@router.get("/{project_id}/activities/") +async def get(request: Request, project_id: int, db: Database = Depends(get_db)): + """ + Get all user activity on a project + --- + tags: + - projects + produces: + - application/json + parameters: + - name: project_id + in: path + description: Unique project ID + required: true + type: integer + default: 1 + - in: query + name: page + description: Page of results user requested + type: integer + responses: + 200: + description: Project activity + 404: + description: No activity + 500: + description: Internal Server Error + """ + await ProjectService.exists(project_id, db) + page = ( + int(request.query_params.get("page")) if request.query_params.get("page") else 1 + ) + activity = await StatsService.get_latest_activity(project_id, page, db) + return activity -class ProjectsLastActivitiesAPI(Resource): - def get(self, project_id): - """ - Get latest user activity on all of project task - --- - tags: - - projects - produces: - - application/json - parameters: - - name: project_id - in: path - required: true - type: integer - default: 1 - responses: - 200: - description: Project activity - 404: - description: No activity - 500: - description: Internal Server Error - """ - ProjectService.exists(project_id) - activity = StatsService.get_last_activity(project_id) - return activity.to_primitive(), 200 +@router.get("/{project_id}/activities/latest/") +async def get(request: Request, project_id: int, db: Database = Depends(get_db)): + """ + Get latest user activity on all of project task + --- + tags: + - projects + produces: + - application/json + parameters: + - name: project_id + in: path + required: true + type: integer + default: 1 + responses: + 200: + description: Project activity + 404: + description: No activity + 500: + description: Internal Server Error + """ + await ProjectService.exists(project_id, db) + activity = await StatsService.get_last_activity(project_id, db) + return activity diff --git a/backend/api/projects/campaigns.py b/backend/api/projects/campaigns.py index 6d29087970..c6a5ef889e 100644 --- a/backend/api/projects/campaigns.py +++ b/backend/api/projects/campaigns.py @@ -1,154 +1,194 @@ -from flask_restful import Resource, current_app -from schematics.exceptions import DataError +# from flask_restful import Resource, current_app +# from schematics.exceptions import DataError +from databases import Database +from fastapi import APIRouter, Depends +from fastapi.responses import JSONResponse + +from backend.db import get_db from backend.models.dtos.campaign_dto import CampaignProjectDTO +from backend.models.dtos.user_dto import AuthUserDTO from backend.services.campaign_service import CampaignService from backend.services.project_admin_service import ProjectAdminService -from backend.services.users.authentication_service import token_auth +# from backend.services.users.authentication_service import token_auth +from backend.services.users.authentication_service import login_required -class ProjectsCampaignsAPI(Resource): - @token_auth.login_required - def post(self, project_id, campaign_id): - """ - Assign a campaign for a project - --- - tags: - - campaigns - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: project_id - in: path - description: Unique project ID - required: true - type: integer - default: 1 - - name: campaign_id - in: path - description: Unique campaign ID - required: true - type: integer - default: 1 - responses: - 201: - description: Campaign assigned successfully - 400: - description: Client Error - Invalid Request - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 500: - description: Internal Server Error - """ - authenticated_user_id = token_auth.current_user() - if not ProjectAdminService.is_user_action_permitted_on_project( - authenticated_user_id, project_id - ): - return { - "Error": "User is not a manager of the project", - "SubCode": "UserPermissionError", - }, 403 - try: - campaign_project_dto = CampaignProjectDTO() - campaign_project_dto.campaign_id = campaign_id - campaign_project_dto.project_id = project_id - campaign_project_dto.validate() - except DataError as e: - current_app.logger.error(f"error validating request: {str(e)}") - return {"Error": str(e), "SubCode": "InvalidData"}, 400 +router = APIRouter( + prefix="/projects", + tags=["projects"], + responses={404: {"description": "Not found"}}, +) + + +@router.post("/{project_id}/campaigns/{campaign_id}/") +async def post( + project_id: int, + campaign_id: int, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Assign a campaign for a project + --- + tags: + - campaigns + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: project_id + in: path + description: Unique project ID + required: true + type: integer + default: 1 + - name: campaign_id + in: path + description: Unique campaign ID + required: true + type: integer + default: 1 + responses: + 201: + description: Campaign assigned successfully + 400: + description: Client Error - Invalid Request + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 500: + description: Internal Server Error + """ + if not await ProjectAdminService.is_user_action_permitted_on_project( + user.id, project_id, db + ): + return { + "Error": "User is not a manager of the project", + "SubCode": "UserPermissionError", + }, 403 + + # Check if the project is already assigned to the campaign + query = """ + SELECT COUNT(*) + FROM campaign_projects + WHERE project_id = :project_id AND campaign_id = :campaign_id + """ + result = await db.fetch_val( + query, values={"project_id": project_id, "campaign_id": campaign_id} + ) - CampaignService.create_campaign_project(campaign_project_dto) - message = ( - "campaign with id {} assigned successfully for project with id {}".format( - campaign_id, project_id - ) + if result > 0: + return JSONResponse( + content={ + "Error": "Project is already assigned to this campaign", + "SubCode": "CampaignAssignmentError", + }, + status_code=400, ) - return ({"Success": message}, 200) - def get(self, project_id): - """ - Gets all campaigns for a project - --- - tags: - - campaigns - produces: - - application/json - parameters: - - name: project_id - in: path - description: Unique project ID - required: true - type: integer - default: 1 - responses: - 200: - description: Campaign list returned successfully - 400: - description: Client Error - Invalid Request - 401: - description: Unauthorized - Invalid credentials - 500: - description: Internal Server Error - """ - campaigns = CampaignService.get_project_campaigns_as_dto(project_id) - return campaigns.to_primitive(), 200 + campaign_project_dto = CampaignProjectDTO( + project_id=project_id, campaign_id=campaign_id + ) - @token_auth.login_required - def delete(self, project_id, campaign_id): - """ - Delete a campaign for a project - --- - tags: - - campaigns - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: project_id - in: path - description: Unique project ID - required: true - type: integer - default: 1 - - name: campaign_id - in: path - description: Unique campaign ID - required: true - type: integer - default: 1 - responses: - 200: - description: Campaign assigned successfully - 400: - description: Client Error - Invalid Request - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 500: - description: Internal Server Error - """ - authenticated_user_id = token_auth.current_user() - if not ProjectAdminService.is_user_action_permitted_on_project( - authenticated_user_id, project_id - ): - return { + await CampaignService.create_campaign_project(campaign_project_dto, db) + message = "campaign with id {} assigned successfully for project with id {}".format( + campaign_id, project_id + ) + return JSONResponse(content={"Success": message}, status_code=200) + + +@router.get("/{project_id}/campaigns/") +async def get(project_id: int, db: Database = Depends(get_db)): + """ + Gets all campaigns for a project + --- + tags: + - campaigns + produces: + - application/json + parameters: + - name: project_id + in: path + description: Unique project ID + required: true + type: integer + default: 1 + responses: + 200: + description: Campaign list returned successfully + 400: + description: Client Error - Invalid Request + 401: + description: Unauthorized - Invalid credentials + 500: + description: Internal Server Error + """ + campaigns = await CampaignService.get_project_campaigns_as_dto(project_id, db) + return campaigns + + +@router.delete("/{project_id}/campaigns/{campaign_id}/") +async def delete( + project_id: int, + campaign_id: int, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Delete a campaign for a project + --- + tags: + - campaigns + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: project_id + in: path + description: Unique project ID + required: true + type: integer + default: 1 + - name: campaign_id + in: path + description: Unique campaign ID + required: true + type: integer + default: 1 + responses: + 200: + description: Campaign assigned successfully + 400: + description: Client Error - Invalid Request + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 500: + description: Internal Server Error + """ + if not await ProjectAdminService.is_user_action_permitted_on_project( + user.id, project_id, db + ): + return JSONResponse( + content={ "Error": "User is not a manager of the project", "SubCode": "UserPermissionError", - }, 403 + }, + status_code=403, + ) - CampaignService.delete_project_campaign(project_id, campaign_id) - return {"Success": "Campaigns Deleted"}, 200 + await CampaignService.delete_project_campaign(project_id, campaign_id, db) + return JSONResponse(content={"Success": "Campaigns Deleted"}, status_code=200) diff --git a/backend/api/projects/contributions.py b/backend/api/projects/contributions.py index 18bfb59146..913924de9f 100644 --- a/backend/api/projects/contributions.py +++ b/backend/api/projects/contributions.py @@ -1,61 +1,72 @@ -from flask_restful import Resource +# from flask_restful import Resource +from databases import Database +from fastapi import APIRouter, Depends + +from backend.db import get_db +from backend.models.postgis.project import Project from backend.services.project_service import ProjectService from backend.services.stats_service import StatsService +router = APIRouter( + prefix="/projects", + tags=["projects"], + responses={404: {"description": "Not found"}}, +) + -class ProjectsContributionsAPI(Resource): - def get(self, project_id): - """ - Get all user contributions on a project - --- - tags: - - projects - produces: - - application/json - parameters: - - name: project_id - in: path - description: Unique project ID - required: true - type: integer - default: 1 - responses: - 200: - description: User contributions - 404: - description: No contributions - 500: - description: Internal Server Error - """ - ProjectService.exists(project_id) - contributions = StatsService.get_user_contributions(project_id) - return contributions.to_primitive(), 200 +@router.get("/{project_id}/contributions/") +async def get(project_id: int, db: Database = Depends(get_db)): + """ + Get all user contributions on a project + --- + tags: + - projects + produces: + - application/json + parameters: + - name: project_id + in: path + description: Unique project ID + required: true + type: integer + default: 1 + responses: + 200: + description: User contributions + 404: + description: No contributions + 500: + description: Internal Server Error + """ + await Project.exists(project_id, db) + contributions = await StatsService.get_user_contributions(project_id, db) + return contributions -class ProjectsContributionsQueriesDayAPI(Resource): - def get(self, project_id): - """ - Get contributions by day for a project - --- - tags: - - projects - produces: - - application/json - parameters: - - name: project_id - in: path - description: Unique project ID - required: true - type: integer - default: 1 - responses: - 200: - description: Project contributions by day - 404: - description: Not found - 500: - description: Internal Server Error - """ - contribs = ProjectService.get_contribs_by_day(project_id) - return contribs.to_primitive(), 200 +@router.get("/{project_id}/contributions/queries/day/") +async def get(project_id: int, db: Database = Depends(get_db)): + """ + Get contributions by day for a project + --- + tags: + - projects + produces: + - application/json + parameters: + - name: project_id + in: path + description: Unique project ID + required: true + type: integer + default: 1 + responses: + 200: + description: Project contributions by day + 404: + description: Not found + 500: + description: Internal Server Error + """ + contribs = await ProjectService.get_contribs_by_day(project_id, db) + return contribs diff --git a/backend/api/projects/favorites.py b/backend/api/projects/favorites.py index 5dc9ea9009..8ac1871786 100644 --- a/backend/api/projects/favorites.py +++ b/backend/api/projects/favorites.py @@ -1,122 +1,149 @@ -from flask_restful import Resource +from databases import Database +from fastapi import APIRouter, Depends, Request +from fastapi.responses import JSONResponse -from backend.models.dtos.project_dto import ProjectFavoriteDTO +from backend.db import get_db +from backend.models.dtos.user_dto import AuthUserDTO from backend.services.project_service import ProjectService -from backend.services.users.authentication_service import token_auth +from backend.services.users.authentication_service import login_required +router = APIRouter( + prefix="/projects", + tags=["projects"], + responses={404: {"description": "Not found"}}, +) -class ProjectsFavoritesAPI(Resource): - @token_auth.login_required - def get(self, project_id: int): - """ - Validate that project is favorited - --- - tags: - - favorites - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: project_id - in: path - description: Unique project ID - required: true - type: integer - responses: - 200: - description: Project favorite - 400: - description: Invalid Request - 401: - description: Unauthorized - Invalid credentials - 500: - description: Internal Server Error - """ - user_id = token_auth.current_user() - favorited = ProjectService.is_favorited(project_id, user_id) - if favorited is True: - return {"favorited": True}, 200 - return {"favorited": False}, 200 +@router.get("/{project_id}/favorite/") +async def get( + request: Request, + project_id: int, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), +): + """ + Validate that project is favorited + --- + tags: + - favorites + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: project_id + in: path + description: Unique project ID + required: true + type: integer + responses: + 200: + description: Project favorite + 400: + description: Invalid Request + 401: + description: Unauthorized - Invalid credentials + 500: + description: Internal Server Error + """ + user_id = ( + request.user.display_name + if request.user and request.user.display_name + else None + ) - @token_auth.login_required - def post(self, project_id: int): - """ - Set a project as favorite - --- - tags: - - favorites - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: project_id - in: path - description: Unique project ID - required: true - type: integer - responses: - 200: - description: New favorite created - 400: - description: Invalid Request - 401: - description: Unauthorized - Invalid credentials - 500: - description: Internal Server Error - """ - authenticated_user_id = token_auth.current_user() - favorite_dto = ProjectFavoriteDTO() - favorite_dto.project_id = project_id - favorite_dto.user_id = authenticated_user_id + favorited = await ProjectService.is_favorited(project_id, user_id, db) + if favorited is True: + return JSONResponse(content={"favorited": True}, status_code=200) + return JSONResponse(content={"favorited": False}, status_code=200) - ProjectService.favorite(project_id, authenticated_user_id) - return {"project_id": project_id}, 200 - @token_auth.login_required - def delete(self, project_id: int): - """ - Unsets a project as favorite - --- - tags: - - favorites - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: project_id - in: path - description: Unique project ID - required: true - type: integer - responses: - 200: - description: New favorite created - 400: - description: Invalid Request - 401: - description: Unauthorized - Invalid credentials - 500: - description: Internal Server Error - """ - try: - ProjectService.unfavorite(project_id, token_auth.current_user()) - except ValueError as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 400 +@router.post("/{project_id}/favorite/") +async def post( + request: Request, + project_id: int, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), +): + """ + Set a project as favorite + --- + tags: + - favorites + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: project_id + in: path + description: Unique project ID + required: true + type: integer + responses: + 200: + description: New favorite created + 400: + description: Invalid Request + 401: + description: Unauthorized - Invalid credentials + 500: + description: Internal Server Error + """ - return {"project_id": project_id}, 200 + await ProjectService.favorite(project_id, user.id, db) + return JSONResponse(content={"project_id": project_id}, status_code=201) + + +@router.delete("/{project_id}/favorite/") +async def delete( + request: Request, + project_id: int, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), +): + """ + Unsets a project as favorite + --- + tags: + - favorites + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: project_id + in: path + description: Unique project ID + required: true + type: integer + responses: + 200: + description: New favorite created + 400: + description: Invalid Request + 401: + description: Unauthorized - Invalid credentials + 500: + description: Internal Server Error + """ + try: + await ProjectService.unfavorite(project_id, user.id, db) + except ValueError as e: + return JSONResponse( + content={"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, + status_code=400, + ) + return JSONResponse(content={"project_id": project_id}, status_code=200) diff --git a/backend/api/projects/partnerships.py b/backend/api/projects/partnerships.py index 6a5152996d..1bca1c7ab0 100644 --- a/backend/api/projects/partnerships.py +++ b/backend/api/projects/partnerships.py @@ -1,293 +1,342 @@ -from flask_restful import Resource, request -from backend.services.project_partnership_service import ProjectPartnershipService -from backend.services.users.authentication_service import token_auth -from backend.services.project_admin_service import ProjectAdminService +from databases import Database +from fastapi import APIRouter, Depends, Request +from fastapi.responses import JSONResponse + +from backend.db import get_db from backend.models.dtos.project_partner_dto import ( ProjectPartnershipDTO, ProjectPartnershipUpdateDTO, ) +from backend.models.dtos.user_dto import AuthUserDTO from backend.models.postgis.utils import timestamp +from backend.services.project_admin_service import ProjectAdminService +from backend.services.project_partnership_service import ProjectPartnershipService +from backend.services.users.authentication_service import login_required + +router = APIRouter( + prefix="/projects", + tags=["projects"], + responses={404: {"description": "Not found"}}, +) -def check_if_manager(partnership_dto: ProjectPartnershipDTO): - if not ProjectAdminService.is_user_action_permitted_on_project( - token_auth.current_user(), partnership_dto.project_id +@staticmethod +async def check_if_manager( + partnership_dto: ProjectPartnershipDTO, user_id: int, db: Database +): + if not await ProjectAdminService.is_user_action_permitted_on_project( + user_id, partnership_dto.project_id, db ): - return { - "Error": "User is not a manager of the project", - "SubCode": "UserPermissionError", - }, 401 + return JSONResponse( + content={ + "Error": "User is not a manager of the project", + "SubCode": "UserPermissionError", + }, + status_code=401, + ) -class ProjectPartnershipsRestApi(Resource): - @staticmethod - def get(partnership_id: int): - """ - Retrieves a Partnership by id - --- - tags: - - projects - - partners - - partnerships - produces: - - application/json - parameters: - - name: partnership_id - in: path - description: Unique partnership ID - required: true - type: integer - default: 1 - responses: - 200: - description: Partnership found - 404: - description: Partnership not found - 500: - description: Internal Server Error - """ +@router.get("/partnerships/{partnership_id}/") +async def retrieve_partnership( + request: Request, + partnership_id: int, + db: Database = Depends(get_db), +): + """ + Retrieves a Partnership by id + --- + tags: + - projects + - partners + - partnerships + produces: + - application/json + parameters: + - name: partnership_id + in: path + description: Unique partnership ID + required: true + type: integer + default: 1 + responses: + 200: + description: Partnership found + 404: + description: Partnership not found + 500: + description: Internal Server Error + """ - partnership_dto = ProjectPartnershipService.get_partnership_as_dto( - partnership_id - ) - return partnership_dto.to_primitive(), 200 + partnership_dto = await ProjectPartnershipService.get_partnership_as_dto( + partnership_id, db + ) + return partnership_dto - @token_auth.login_required - def post(self): - """Assign a partner to a project - --- - tags: - - projects - - partners - - partnerships - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - in: body - name: body - required: true - description: JSON object for creating a partnership - schema: - properties: - projectId: - required: true - type: int - description: Unique project ID - default: 1 - partnerId: - required: true - type: int - description: Unique partner ID - default: 1 - startedOn: - type: date - description: The timestamp when the partner is added to a project. Defaults to current time. - default: "2017-04-11T12:38:49" - endedOn: - type: date - description: The timestamp when the partner ended their work on a project. - default: "2018-04-11T12:38:49" - responses: - 201: - description: Partner project association created - 400: - description: Ivalid dates or started_on was after ended_on - 401: - description: Forbidden, if user is not a manager of this project - 403: - description: Forbidden, if user is not authenticated - 404: - description: Not found - 500: - description: Internal Server Error - """ - partnership_dto = ProjectPartnershipDTO(request.get_json()) - is_not_manager_error = check_if_manager(partnership_dto) - if is_not_manager_error is not None: - return is_not_manager_error +@router.post("/partnerships/") +async def create_partnership( + request: Request, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """Assign a partner to a project + --- + tags: + - projects + - partners + - partnerships + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - in: body + name: body + required: true + description: JSON object for creating a partnership + schema: + properties: + projectId: + required: true + type: int + description: Unique project ID + default: 1 + partnerId: + required: true + type: int + description: Unique partner ID + default: 1 + startedOn: + type: date + description: The timestamp when the partner is added to a project. Defaults to current time. + default: "2017-04-11T12:38:49" + endedOn: + type: date + description: The timestamp when the partner ended their work on a project. + default: "2018-04-11T12:38:49" + responses: + 201: + description: Partner project association created + 400: + description: Ivalid dates or started_on was after ended_on + 401: + description: Forbidden, if user is not a manager of this project + 403: + description: Forbidden, if user is not authenticated + 404: + description: Not found + 500: + description: Internal Server Error + """ + request_data = await request.json() - if partnership_dto.started_on is None: - partnership_dto.started_on = timestamp() + partnership_dto = ProjectPartnershipDTO(**request_data) + is_not_manager_error = await check_if_manager(partnership_dto, user.id, db) + if is_not_manager_error is not None: + return is_not_manager_error - partnership_dto = ProjectPartnershipDTO(request.get_json()) - partnership_id = ProjectPartnershipService.create_partnership( + if partnership_dto.started_on is None: + partnership_dto.started_on = timestamp() + + async with db.transaction(): + partnership_id = await ProjectPartnershipService.create_partnership( + db, partnership_dto.project_id, partnership_dto.partner_id, partnership_dto.started_on, partnership_dto.ended_on, ) - return ( - { + return ( + JSONResponse( + content={ "Success": "Partner {} assigned to project {}".format( partnership_dto.partner_id, partnership_dto.project_id ), "partnershipId": partnership_id, }, - 201, - ) + status_code=201, + ), + ) - @staticmethod - @token_auth.login_required - def patch(partnership_id: int): - """Update the time range for a partner project link - --- - tags: - - projects - - partners - - partnerships - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: partnership_id - in: path - description: Unique partnership ID - required: true - type: integer - default: 1 - - in: body - name: body - required: true - description: JSON object for creating a partnership - schema: - properties: - startedOn: - type: date - description: The timestamp when the partner is added to a project. Defaults to current time. - default: "2017-04-11T12:38:49" - endedOn: - type: date - description: The timestamp when the partner ended their work on a project. - default: "2018-04-11T12:38:49" - responses: - 201: - description: Partner project association created - 400: - description: Ivalid dates or started_on was after ended_on - 401: - description: Forbidden, if user is not a manager of this project - 403: - description: Forbidden, if user is not authenticated - 404: - description: Not found - 500: - description: Internal Server Error - """ - partnership_updates = ProjectPartnershipUpdateDTO(request.get_json()) - partnership_dto = ProjectPartnershipService.get_partnership_as_dto( - partnership_id - ) - is_not_manager_error = check_if_manager(partnership_dto) - if is_not_manager_error is not None: - return is_not_manager_error +@router.patch("/partnerships/{partnership_id}/") +async def patch_partnership( + request: Request, + partnership_id: int, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """Update the time range for a partner project link + --- + tags: + - projects + - partners + - partnerships + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: partnership_id + in: path + description: Unique partnership ID + required: true + type: integer + default: 1 + - in: body + name: body + required: true + description: JSON object for creating a partnership + schema: + properties: + startedOn: + type: date + description: The timestamp when the partner is added to a project. Defaults to current time. + default: "2017-04-11T12:38:49" + endedOn: + type: date + description: The timestamp when the partner ended their work on a project. + default: "2018-04-11T12:38:49" + responses: + 201: + description: Partner project association created + 400: + description: Ivalid dates or started_on was after ended_on + 401: + description: Forbidden, if user is not a manager of this project + 403: + description: Forbidden, if user is not authenticated + 404: + description: Not found + 500: + description: Internal Server Error + """ + request_data = await request.json() + partnership_updates = ProjectPartnershipUpdateDTO(**request_data) + partnership_dto = await ProjectPartnershipService.get_partnership_as_dto( + partnership_id, db + ) - partnership = ProjectPartnershipService.update_partnership_time_range( + is_not_manager_error = await check_if_manager(partnership_dto, user.id, db) + if is_not_manager_error is not None: + return is_not_manager_error + + async with db.transaction(): + partnership = await ProjectPartnershipService.update_partnership_time_range( + db, partnership_id, partnership_updates.started_on, partnership_updates.ended_on, ) - - return ( - { + return ( + JSONResponse( + content={ "Success": "Updated time range. startedOn: {}, endedOn: {}".format( partnership.started_on, partnership.ended_on ), "startedOn": f"{partnership.started_on}", "endedOn": f"{partnership.ended_on}", }, - 200, - ) - - @staticmethod - @token_auth.login_required - def delete(partnership_id: int): - """Deletes a link between a project and a partner - --- - tags: - - projects - - partners - - partnerships - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: partnership_id - in: path - description: Unique partnership ID - required: true - type: integer - default: 1 - responses: - 201: - description: Partner project association created - 401: - description: Forbidden, if user is not a manager of this project - 403: - description: Forbidden, if user is not authenticated - 404: - description: Not found - 500: - description: Internal Server Error - """ - partnership_dto = ProjectPartnershipService.get_partnership_as_dto( - partnership_id - ) + status_code=200, + ), + ) - is_not_manager_error = check_if_manager(partnership_dto) - if is_not_manager_error is not None: - return is_not_manager_error - ProjectPartnershipService.delete_partnership(partnership_id) - return ( - { +@router.delete("/partnerships/{partnership_id}/") +async def delete_partnership( + request: Request, + partnership_id: int, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """Deletes a link between a project and a partner + --- + tags: + - projects + - partners + - partnerships + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: partnership_id + in: path + description: Unique partnership ID + required: true + type: integer + default: 1 + responses: + 201: + description: Partner project association created + 401: + description: Forbidden, if user is not a manager of this project + 403: + description: Forbidden, if user is not authenticated + 404: + description: Not found + 500: + description: Internal Server Error + """ + partnership_dto = await ProjectPartnershipService.get_partnership_as_dto( + partnership_id, db + ) + is_not_manager_error = await check_if_manager(partnership_dto, user.id, db) + if is_not_manager_error is not None: + return is_not_manager_error + async with db.transaction(): + await ProjectPartnershipService.delete_partnership(partnership_id, db) + return ( + JSONResponse( + content={ "Success": "Partnership ID {} deleted".format(partnership_id), }, - 200, - ) + status_code=200, + ), + ) -class PartnersByProjectAPI(Resource): - @staticmethod - def get(project_id: int): - """ - Retrieves the list of partners associated with a project - --- - tags: - - projects - - partners - - partnerships - produces: - - application/json - parameters: - - name: project_id - in: path - description: Unique project ID - required: true - type: integer - default: 1 - responses: - 200: - description: List (possibly empty) of partners associated with this project_id - 500: - description: Internal Server Error - """ - partnerships = ProjectPartnershipService.get_partnerships_by_project(project_id) - return {"partnerships": partnerships}, 200 +@router.get("/{project_id}/partners/") +async def get_partners( + request: Request, + project_id: int, + db: Database = Depends(get_db), +): + """ + Retrieves the list of partners associated with a project + --- + tags: + - projects + - partners + - partnerships + produces: + - application/json + parameters: + - name: project_id + in: path + description: Unique project ID + required: true + type: integer + default: 1 + responses: + 200: + description: List (possibly empty) of partners associated with this project_id + 500: + description: Internal Server Error + """ + partnerships = await ProjectPartnershipService.get_partnerships_by_project( + project_id, db + ) + return {"partnerships": partnerships} diff --git a/backend/api/projects/resources.py b/backend/api/projects/resources.py index 334bcd1520..b648a419d1 100644 --- a/backend/api/projects/resources.py +++ b/backend/api/projects/resources.py @@ -1,161 +1,199 @@ -import geojson import io -from flask import send_file -from flask_restful import Resource, current_app, request -from schematics.exceptions import DataError +import json from distutils.util import strtobool +from typing import Optional + +import geojson +from databases import Database +from fastapi import APIRouter, Depends, Request +from fastapi.responses import FileResponse, JSONResponse, StreamingResponse +from loguru import logger + +from backend.db import get_db from backend.models.dtos.project_dto import ( DraftProjectDTO, ProjectDTO, - ProjectSearchDTO, ProjectSearchBBoxDTO, + ProjectSearchDTO, ) +from backend.models.dtos.user_dto import AuthUserDTO from backend.models.postgis.statuses import UserRole +from backend.services.organisation_service import OrganisationService +from backend.services.project_admin_service import ( + InvalidData, + InvalidGeoJson, + ProjectAdminService, + ProjectAdminServiceError, +) from backend.services.project_search_service import ( + BBoxTooBigError, ProjectSearchService, ProjectSearchServiceError, - BBoxTooBigError, ) from backend.services.project_service import ( + NotFound, ProjectService, ProjectServiceError, - NotFound, +) +from backend.services.recommendation_service import ProjectRecommendationService +from backend.services.users.authentication_service import ( + login_required, + login_required_optional, ) from backend.services.users.user_service import UserService -from backend.services.organisation_service import OrganisationService -from backend.services.users.authentication_service import token_auth -from backend.services.project_admin_service import ( - ProjectAdminService, - ProjectAdminServiceError, - InvalidGeoJson, - InvalidData, + +router = APIRouter( + prefix="/projects", + tags=["projects"], + responses={404: {"description": "Not found"}}, ) -from backend.services.recommendation_service import ProjectRecommendationService -class ProjectsRestAPI(Resource): - @token_auth.login_required(optional=True) - def get(self, project_id): - """ - Get a specified project including it's area - --- - tags: - - projects - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: false - type: string - default: Token sessionTokenHere== - - in: header - name: Accept-Language - description: Language user is requesting - type: string - required: true - default: en - - name: project_id - in: path - description: Unique project ID - required: true - type: integer - default: 1 - - in: query - name: as_file - type: boolean - description: Set to true if file download is preferred - default: False - - in: query - name: abbreviated - type: boolean - description: Set to true if only state information is desired - default: False - responses: - 200: - description: Project found - 403: - description: Forbidden - 404: - description: Project not found - 500: - description: Internal Server Error - """ - try: - authenticated_user_id = token_auth.current_user() - as_file = bool( - strtobool(request.args.get("as_file")) - if request.args.get("as_file") - else False - ) - abbreviated = bool( - strtobool(request.args.get("abbreviated")) - if request.args.get("abbreviated") - else False - ) - project_dto = ProjectService.get_project_dto_for_mapper( - project_id, - authenticated_user_id, - request.environ.get("HTTP_ACCEPT_LANGUAGE"), - abbreviated, - ) +@router.get("/{project_id}/") +async def get_project( + request: Request, + project_id: int, + as_file: str = "False", + abbreviated: bool = False, + db: Database = Depends(get_db), + user: Optional[AuthUserDTO] = Depends(login_required_optional), +): + """ + Get a specified project including it's area + --- + tags: + - projects + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: false + type: string + default: Token sessionTokenHere== + - in: header + name: Accept-Language + description: Language user is requesting + type: string + required: true + default: en + - name: project_id + in: path + description: Unique project ID + required: true + type: integer + default: 1 + - in: query + name: as_file + type: boolean + description: Set to true if file download is preferred + default: False + - in: query + name: abbreviated + type: boolean + description: Set to true if only state information is desired + default: False + responses: + 200: + description: Project found + 403: + description: Forbidden + 404: + description: Project not found + 500: + description: Internal Server Error + """ + try: + user_id = user.id if user else None + as_file = bool(strtobool(as_file) if as_file else False) + abbreviated = bool(strtobool(abbreviated) if abbreviated else False) + project_dto = await ProjectService.get_project_dto_for_mapper( + project_id, + user_id, + db, + request.headers.get("accept-language"), + abbreviated, + ) + if project_dto: + if as_file: + project_dto = json.dumps(project_dto, default=str) + return FileResponse( + geojson.dumps(project_dto).encode("utf-8"), + media_type="application/json", + content_disposition_type="attachment", + filename=f"project_{str(project_id)}.json", + ) + return project_dto - if project_dto: - project_dto = project_dto.to_primitive() - if as_file: - return send_file( - io.BytesIO(geojson.dumps(project_dto).encode("utf-8")), - mimetype="application/json", - as_attachment=True, - download_name=f"project_{str(project_id)}.json", - ) - - return project_dto, 200 - else: - return { + else: + return JSONResponse( + content={ "Error": "User not permitted: Private Project", "SubCode": "PrivateProject", - }, 403 - except ProjectServiceError as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 403 - finally: - # this will try to unlock tasks that have been locked too long - try: - ProjectService.auto_unlock_tasks(project_id) - except Exception as e: - current_app.logger.critical(str(e)) - - @token_auth.login_required - def post(self): - """ - Creates a tasking-manager project - --- - tags: - - projects - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - in: body - name: body - required: true - description: JSON object for creating draft project - schema: - properties: - cloneFromProjectId: - type: int - default: 1 - description: Specify this value if you want to clone a project, otherwise avoid information - projectName: - type: string - default: HOT Project - areaOfInterest: + }, + status_code=403, + ) + + except ProjectServiceError as e: + return JSONResponse( + content={"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, + status_code=403, + ) + finally: + # this will try to unlock tasks that have been locked too long + try: + await ProjectService.auto_unlock_tasks(project_id, db) + except Exception as e: + logger.critical(str(e)) + + +@router.post("/") +async def post( + request: Request, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), + draft_project_dto: DraftProjectDTO = None, +): + """ + Creates a tasking-manager project + --- + tags: + - projects + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - in: body + name: body + required: true + description: JSON object for creating draft project + schema: + properties: + cloneFromProjectId: + type: int + default: 1 + description: Specify this value if you want to clone a project, otherwise avoid information + projectName: + type: string + default: HOT Project + areaOfInterest: + schema: + properties: + type: + type: string + default: FeatureCollection + features: + type: array + items: + schema: + $ref: "#/definitions/GeoJsonFeature" + tasks: schema: properties: type: @@ -166,1125 +204,1199 @@ def post(self): items: schema: $ref: "#/definitions/GeoJsonFeature" - tasks: - schema: - properties: - type: - type: string - default: FeatureCollection - features: - type: array - items: - schema: - $ref: "#/definitions/GeoJsonFeature" - arbitraryTasks: - type: boolean - default: false - responses: - 201: - description: Draft project created successfully - 400: - description: Client Error - Invalid Request - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 500: - description: Internal Server Error - """ - try: - draft_project_dto = DraftProjectDTO(request.get_json()) - draft_project_dto.user_id = token_auth.current_user() - draft_project_dto.validate() - except DataError as e: - current_app.logger.error(f"error validating request: {str(e)}") - return {"Error": "Unable to create project", "SubCode": "InvalidData"}, 400 + arbitraryTasks: + type: boolean + default: false + responses: + 201: + description: Draft project created successfully + 400: + description: Client Error - Invalid Request + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 500: + description: Internal Server Error + """ + try: + draft_project_dto.user_id = user.id + except Exception as e: + logger.error(f"error validating request: {str(e)}") + return JSONResponse( + content={"Error": "Unable to create project", "SubCode": "InvalidData"}, + status_code=400, + ) - try: - draft_project_id = ProjectAdminService.create_draft_project( - draft_project_dto + try: + async with db.transaction(): + draft_project_id = await ProjectAdminService.create_draft_project( + draft_project_dto, db ) - return {"projectId": draft_project_id}, 201 - except ProjectAdminServiceError as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 403 - except (InvalidGeoJson, InvalidData) as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 400 - - @token_auth.login_required - def head(self, project_id): - """ - Retrieves a Tasking-Manager project - --- - tags: - - projects - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: project_id - in: path - description: Unique project ID - required: true - type: integer - default: 1 - responses: - 200: - description: Project found - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 404: - description: Project not found - 500: - description: Internal Server Error - """ - try: - ProjectAdminService.is_user_action_permitted_on_project( - token_auth.current_user(), project_id + return JSONResponse( + content={"projectId": draft_project_id}, status_code=201 ) - except ValueError: - return { + except ProjectAdminServiceError as e: + return JSONResponse( + content={"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, + status_code=403, + ) + + except (InvalidGeoJson, InvalidData) as e: + return JSONResponse( + content={"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, + status_code=400, + ) + + except Exception as e: + return JSONResponse( + content={"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, + status_code=400, + ) + + +def head(request: Request, project_id): + """ + Retrieves a Tasking-Manager project + --- + tags: + - projects + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: project_id + in: path + description: Unique project ID + required: true + type: integer + default: 1 + responses: + 200: + description: Project found + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 404: + description: Project not found + 500: + description: Internal Server Error + """ + try: + ProjectAdminService.is_user_action_permitted_on_project( + request.user.display_name, project_id + ) + except ValueError: + return JSONResponse( + content={ "Error": "User is not a manager of the project", "SubCode": "UserPermissionError", - }, 403 - - project_dto = ProjectAdminService.get_project_dto_for_admin(project_id) - return project_dto.to_primitive(), 200 - - @token_auth.login_required - def patch(self, project_id): - """ - Updates a Tasking-Manager project - --- - tags: - - projects - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: project_id - in: path - description: Unique project ID - required: true - type: integer - default: 1 - - in: body - name: body - required: true - description: JSON object for updating an existing project - schema: - properties: - projectStatus: - type: string - default: DRAFT - projectPriority: - type: string - default: MEDIUM - defaultLocale: - type: string - default: en - difficulty: - type: string - default: EASY - validation_permission: - type: string - default: ANY - mapping_permission: - type: string - default: ANY - private: - type: boolean - default: false - changesetComment: + }, + status_code=403, + ) + + project_dto = ProjectAdminService.get_project_dto_for_admin(project_id) + return project_dto.model_dump(by_alias=True), 200 + + +@router.patch("/{project_id}/") +async def patch( + request: Request, + project_id: int, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), + project_dto: dict = None, +): + """ + Updates a Tasking-Manager project + --- + tags: + - projects + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: project_id + in: path + description: Unique project ID + required: true + type: integer + default: 1 + - in: body + name: body + required: true + description: JSON object for updating an existing project + schema: + properties: + projectStatus: + type: string + default: DRAFT + projectPriority: + type: string + default: MEDIUM + defaultLocale: + type: string + default: en + difficulty: + type: string + default: EASY + validation_permission: + type: string + default: ANY + mapping_permission: + type: string + default: ANY + private: + type: boolean + default: false + changesetComment: + type: string + default: hotosm-project-1 + dueDate: + type: date + default: "2017-04-11T12:38:49" + imagery: + type: string + default: http//www.bing.com/maps/ + josmPreset: + type: string + default: josm preset goes here + mappingTypes: + type: array + items: type: string - default: hotosm-project-1 - dueDate: - type: date - default: "2017-04-11T12:38:49" - imagery: + default: [BUILDINGS, ROADS] + mappingEditors: + type: array + items: type: string - default: http//www.bing.com/maps/ - josmPreset: + default: [ID, JOSM, POTLATCH_2, FIELD_PAPERS] + validationEditors: + type: array + items: type: string - default: josm preset goes here - mappingTypes: - type: array - items: - type: string - default: [BUILDINGS, ROADS] - mappingEditors: + default: [ID, JOSM, POTLATCH_2, FIELD_PAPERS] + campaign: + type: string + default: malaria + organisation: + type: integer + default: 1 + countryTag: type: array items: type: string - default: [ID, JOSM, POTLATCH_2, FIELD_PAPERS] - validationEditors: - type: array - items: - type: string - default: [ID, JOSM, POTLATCH_2, FIELD_PAPERS] - campaign: + default: [] + licenseId: + type: integer + default: 1 + description: Id of imagery license associated with the project + allowedUsernames: + type: array + items: type: string - default: malaria - organisation: - type: integer - default: 1 - countryTag: - type: array - items: - type: string - default: [] - licenseId: - type: integer - default: 1 - description: Id of imagery license associated with the project - allowedUsernames: - type: array - items: - type: string - default: ["Iain Hunter", LindaA1] - priorityAreas: - type: array - items: - schema: - $ref: "#/definitions/GeoJsonPolygon" - projectInfoLocales: - type: array - items: - schema: - $ref: "#/definitions/ProjectInfo" - taskCreationMode: - type: integer - default: GRID - responses: - 200: - description: Project updated - 400: - description: Client Error - Invalid Request - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 404: - description: Project not found - 500: - description: Internal Server Error - """ - authenticated_user_id = token_auth.current_user() - if not ProjectAdminService.is_user_action_permitted_on_project( - authenticated_user_id, project_id - ): - return { + default: ["Iain Hunter", LindaA1] + priorityAreas: + type: array + items: + schema: + $ref: "#/definitions/GeoJsonPolygon" + projectInfoLocales: + type: array + items: + schema: + $ref: "#/definitions/ProjectInfo" + taskCreationMode: + type: integer + default: GRID + responses: + 200: + description: Project updated + 400: + description: Client Error - Invalid Request + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 404: + description: Project not found + 500: + description: Internal Server Error + """ + if not await ProjectAdminService.is_user_action_permitted_on_project( + user.id, project_id, db + ): + return JSONResponse( + content={ "Error": "User is not a manager of the project", "SubCode": "UserPermissionError", - }, 403 - try: - project_dto = ProjectDTO(request.get_json()) - project_dto.project_id = project_id - project_dto.validate() - except DataError as e: - current_app.logger.error(f"Error validating request: {str(e)}") - return {"Error": "Unable to update project", "SubCode": "InvalidData"}, 400 + }, + status_code=403, + ) + project_dto = ProjectDTO(**project_dto) + project_dto.project_id = project_id - try: - ProjectAdminService.update_project(project_dto, authenticated_user_id) - return {"Status": "Updated"}, 200 - except InvalidGeoJson as e: - return {"Invalid GeoJson": str(e)}, 400 - except ProjectAdminServiceError as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 403 - - @token_auth.login_required - def delete(self, project_id): - """ - Deletes a Tasking-Manager project - --- - tags: - - projects - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: project_id - in: path - description: Unique project ID - required: true - type: integer - default: 1 - responses: - 200: - description: Project deleted - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 404: - description: Project not found - 500: - description: Internal Server Error - """ - try: - authenticated_user_id = token_auth.current_user() - if not ProjectAdminService.is_user_action_permitted_on_project( - authenticated_user_id, project_id - ): - raise ValueError() - except ValueError: - return { + try: + async with db.transaction(): + await ProjectAdminService.update_project(project_dto, user.id, db) + return JSONResponse(content={"Status": "Updated"}, status_code=200) + except InvalidGeoJson as e: + return JSONResponse(content={"Invalid GeoJson": str(e)}, status_code=400) + except ProjectAdminServiceError as e: + return JSONResponse( + content={"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, + status_code=403, + ) + + +@router.delete("/{project_id}/") +async def delete( + request: Request, + project_id: int, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), +): + """ + Deletes a Tasking-Manager project + --- + tags: + - projects + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: project_id + in: path + description: Unique project ID + required: true + type: integer + default: 1 + responses: + 200: + description: Project deleted + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 404: + description: Project not found + 500: + description: Internal Server Error + """ + try: + if not await ProjectAdminService.is_user_action_permitted_on_project( + user.id, project_id, db + ): + raise ValueError() + except ValueError: + return JSONResponse( + content={ "Error": "User is not a manager of the project", "SubCode": "UserPermissionError", - }, 403 - - try: - ProjectAdminService.delete_project(project_id, authenticated_user_id) - return {"Success": "Project deleted"}, 200 - except ProjectAdminServiceError as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 403 - - -class ProjectSearchBase(Resource): - @token_auth.login_required(optional=True) - def setup_search_dto(self) -> ProjectSearchDTO: - search_dto = ProjectSearchDTO() - search_dto.preferred_locale = request.environ.get("HTTP_ACCEPT_LANGUAGE") - search_dto.difficulty = request.args.get("difficulty") - search_dto.action = request.args.get("action") - search_dto.organisation_name = request.args.get("organisationName") - search_dto.organisation_id = request.args.get("organisationId") - search_dto.team_id = request.args.get("teamId") - search_dto.campaign = request.args.get("campaign") - search_dto.order_by = request.args.get("orderBy", "priority") - search_dto.country = request.args.get("country") - search_dto.order_by_type = request.args.get("orderByType", "ASC") - search_dto.page = ( - int(request.args.get("page")) if request.args.get("page") else 1 + }, + status_code=403, ) - search_dto.text_search = request.args.get("textSearch") - search_dto.omit_map_results = strtobool( - request.args.get("omitMapResults", "false") + try: + async with db.transaction(): + await ProjectAdminService.delete_project(project_id, user.id, db) + return JSONResponse(content={"Success": "Project deleted"}, status_code=200) + except ProjectAdminServiceError as e: + return JSONResponse( + content={"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, + status_code=403, ) - search_dto.last_updated_gte = request.args.get("lastUpdatedFrom") - search_dto.last_updated_lte = request.args.get("lastUpdatedTo") - search_dto.created_gte = request.args.get("createdFrom") - search_dto.created_lte = request.args.get("createdTo") - search_dto.partner_id = request.args.get("partnerId") - search_dto.partnership_from = request.args.get("partnershipFrom") - search_dto.partnership_to = request.args.get("partnershipTo") - search_dto.download_as_csv = request.args.get("downloadAsCSV") - - # See https://github.com/hotosm/tasking-manager/pull/922 for more info - try: - authenticated_user_id = token_auth.current_user() - if request.args.get("createdByMe") == "true": - search_dto.created_by = authenticated_user_id - - if request.args.get("mappedByMe") == "true": - search_dto.mapped_by = authenticated_user_id - - if request.args.get("favoritedByMe") == "true": - search_dto.favorited_by = authenticated_user_id - - if request.args.get("managedByMe") == "true": - search_dto.managed_by = authenticated_user_id - if request.args.get("basedOnMyInterests") == "true": - search_dto.based_on_user_interests = authenticated_user_id - - except Exception: - pass - - mapping_types_str = request.args.get("mappingTypes") - if mapping_types_str: - search_dto.mapping_types = map( - str, mapping_types_str.split(",") - ) # Extract list from string - search_dto.mapping_types_exact = strtobool( - request.args.get("mappingTypesExact", "false") + + +def setup_search_dto(request) -> ProjectSearchDTO: + search_dto = ProjectSearchDTO() + search_dto.preferred_locale = request.headers.get("accept-language") + search_dto.difficulty = request.query_params.get("difficulty") + search_dto.action = request.query_params.get("action") + search_dto.organisation_name = request.query_params.get("organisationName") + search_dto.organisation_id = request.query_params.get("organisationId") + search_dto.team_id = request.query_params.get("teamId") + search_dto.campaign = request.query_params.get("campaign") + search_dto.order_by = request.query_params.get("orderBy", "priority") + search_dto.country = request.query_params.get("country") + search_dto.order_by_type = request.query_params.get("orderByType", "ASC") + search_dto.page = ( + int(request.query_params.get("page")) if request.query_params.get("page") else 1 + ) + search_dto.text_search = request.query_params.get("textSearch") + search_dto.omit_map_results = strtobool( + request.query_params.get("omitMapResults", "false") + ) + search_dto.last_updated_gte = request.query_params.get("lastUpdatedFrom") + search_dto.last_updated_lte = request.query_params.get("lastUpdatedTo") + search_dto.created_gte = request.query_params.get("createdFrom") + search_dto.created_lte = request.query_params.get("createdTo") + search_dto.partner_id = request.query_params.get("partnerId") + search_dto.partnership_from = request.query_params.get("partnershipFrom") + search_dto.partnership_to = request.query_params.get("partnershipTo") + search_dto.download_as_csv = request.query_params.get("downloadAsCSV") + + # See https://github.com/hotosm/tasking-manager/pull/922 for more info + try: + authenticated_user_id = ( + request.user.display_name + if request.user and request.user.display_name + else None ) - project_statuses_str = request.args.get("projectStatuses") - if project_statuses_str: - search_dto.project_statuses = map(str, project_statuses_str.split(",")) - interests_str = request.args.get("interests") - if interests_str: - search_dto.interests = map(int, interests_str.split(",")) - search_dto.validate() - - return search_dto - - -class ProjectsAllAPI(ProjectSearchBase): - @token_auth.login_required(optional=True) - def get(self): - """ - List and search for projects - --- - tags: - - projects - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - type: string - default: Token sessionTokenHere== - - in: header - name: Accept-Language - description: Language user is requesting - type: string - required: true - default: en - - in: query - name: difficulty - type: string - - in: query - name: orderBy - type: string - default: priority - enum: [id,difficulty,priority,status,last_updated,due_date,percent_mapped,percent_validated] - - in: query - name: orderByType - type: string - default: ASC - enum: [ASC, DESC] - - in: query - name: mappingTypes - type: string - - in: query - name: mappingTypesExact - type: boolean - default: false - description: if true, limits projects to match the exact mapping types requested - - in: query - name: organisationName - description: Organisation name to search for - type: string - - in: query - name: organisationId - description: Organisation ID to search for - type: integer - - in: query - name: campaign - description: Campaign name to search for - type: string - - in: query - name: page - description: Page of results user requested - type: integer - default: 1 - - in: query - name: textSearch - description: Text to search - type: string - - in: query - name: country - description: Project country - type: string - - in: query - name: action - description: Filter projects by possible actions - enum: [map, validate, any] - type: string - - in: query - name: projectStatuses - description: Authenticated PMs can search for archived or draft statuses - type: string - - in: query - name: lastUpdatedFrom - description: Filter projects whose last update date is equal or greater than a date - type: string - - in: query - name: lastUpdatedTo - description: Filter projects whose last update date is equal or lower than a date - type: string - - in: query - name: createdFrom - description: Filter projects whose creation date is equal or greater than a date - type: string - - in: query - name: createdTo - description: Filter projects whose creation date is equal or lower than a date - type: string - - in: query - name: interests - type: string - description: Filter by interest on project - default: null - - in: query - name: createdByMe - description: Limit to projects created by the authenticated user - type: boolean - default: false - - in: query - name: mappedByMe - description: Limit to projects mapped/validated by the authenticated user - type: boolean - default: false - - in: query - name: favoritedByMe - description: Limit to projects favorited by the authenticated user - type: boolean - default: false - - in: query - name: managedByMe - description: - Limit to projects that can be managed by the authenticated user, - excluding the ones created by them - type: boolean - default: false - - in: query - name: basedOnMyInterests - type: boolean - description: Filter projects based on user interests - default: false - - in: query - name: teamId - type: string - description: Filter by team on project - default: null - name: omitMapResults - type: boolean - description: If true, it will not return the project centroid's geometries. - default: false - - in: query - name: partnerId - type: int - description: Limit to projects currently linked to a specific partner ID - default: 1 - - in: query - name: partnershipFrom - type: date - description: Limit to projects with partners that began greater than or equal to a date - default: "2017-04-11" - - in: query - name: partnershipTo - type: date - description: Limit to projects with partners that ended less than or equal to a date - default: "2018-04-11" - - in: query - name: downloadAsCSV - type: boolean - description: Set to true to download search results as a CSV - default: false - responses: - 200: - description: Projects found - 400: - description: Bad input. - 401: - description: Search parameters partnerId, partnershipFrom, partnershipTo are not allowed for this user. - 404: - description: No projects found - 500: - description: Internal Server Error - """ - try: - user = None - user_id = token_auth.current_user() - if user_id: - user = UserService.get_user_by_id(user_id) - search_dto = self.setup_search_dto() + if request.query_params.get("createdByMe") == "true": + search_dto.created_by = authenticated_user_id + + if request.query_params.get("mappedByMe") == "true": + search_dto.mapped_by = authenticated_user_id + + if request.query_params.get("favoritedByMe") == "true": + search_dto.favorited_by = authenticated_user_id + + if request.query_params.get("managedByMe") == "true": + search_dto.managed_by = authenticated_user_id + + if request.query_params.get("basedOnMyInterests") == "true": + search_dto.based_on_user_interests = authenticated_user_id + + except Exception: + pass + + mapping_types_str = request.query_params.get("mappingTypes") + if mapping_types_str: + search_dto.mapping_types = list( + map(str, mapping_types_str.split(",")) + ) # Extract list from string + search_dto.mapping_types_exact = strtobool( + request.query_params.get("mappingTypesExact", "false") + ) + project_statuses_str = request.query_params.get("projectStatuses") + if project_statuses_str: + search_dto.project_statuses = list(map(str, project_statuses_str.split(","))) + interests_str = request.query_params.get("interests") + if interests_str: + search_dto.interests = map(int, interests_str.split(",")) + + return search_dto + - if search_dto.omit_map_results and search_dto.download_as_csv: - return { +@router.get("/") +async def get( + request: Request, + user: Optional[AuthUserDTO] = Depends(login_required_optional), + db: Database = Depends(get_db), +): + """ + List and search for projects + --- + tags: + - projects + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + type: string + default: Token sessionTokenHere== + - in: header + name: Accept-Language + description: Language user is requesting + type: string + required: true + default: en + - in: query + name: difficulty + type: string + - in: query + name: orderBy + type: string + default: priority + enum: [id,difficulty,priority,status,last_updated,due_date] + - in: query + name: orderByType + type: string + default: ASC + enum: [ASC, DESC] + - in: query + name: mappingTypes + type: string + - in: query + name: mappingTypesExact + type: boolean + default: false + description: if true, limits projects to match the exact mapping types requested + - in: query + name: organisationName + description: Organisation name to search for + type: string + - in: query + name: organisationId + description: Organisation ID to search for + type: integer + - in: query + name: campaign + description: Campaign name to search for + type: string + - in: query + name: page + description: Page of results user requested + type: integer + default: 1 + - in: query + name: textSearch + description: Text to search + type: string + - in: query + name: country + description: Project country + type: string + - in: query + name: action + description: Filter projects by possible actions + enum: [map, validate, any] + type: string + - in: query + name: projectStatuses + description: Authenticated PMs can search for archived or draft statuses + type: string + - in: query + name: lastUpdatedFrom + description: Filter projects whose last update date is equal or greater than a date + type: string + - in: query + name: lastUpdatedTo + description: Filter projects whose last update date is equal or lower than a date + type: string + - in: query + name: createdFrom + description: Filter projects whose creation date is equal or greater than a date + type: string + - in: query + name: createdTo + description: Filter projects whose creation date is equal or lower than a date + type: string + - in: query + name: interests + type: string + description: Filter by interest on project + default: null + - in: query + name: createdByMe + description: Limit to projects created by the authenticated user + type: boolean + default: false + - in: query + name: mappedByMe + description: Limit to projects mapped/validated by the authenticated user + type: boolean + default: false + - in: query + name: favoritedByMe + description: Limit to projects favorited by the authenticated user + type: boolean + default: false + - in: query + name: managedByMe + description: + Limit to projects that can be managed by the authenticated user, + excluding the ones created by them + type: boolean + default: false + - in: query + name: basedOnMyInterests + type: boolean + description: Filter projects based on user interests + default: false + - in: query + name: teamId + type: string + description: Filter by team on project + default: null + name: omitMapResults + type: boolean + description: If true, it will not return the project centroid's geometries. + default: false + responses: + 200: + description: Projects found + 404: + description: No projects found + 500: + description: Internal Server Error + """ + try: + user_id = user.id if user else None + user = None + if user_id: + user = await UserService.get_user_by_id(user_id, db) + search_dto = setup_search_dto(request) + + if search_dto.omit_map_results and search_dto.download_as_csv: + return JSONResponse( + content={ "Error": "omitMapResults and downloadAsCSV cannot be both set to true" - }, 400 + }, + status_code=400, + ) - if ( - search_dto.partnership_from is not None - or search_dto.partnership_to is not None - ) and search_dto.partner_id is None: - return { + if ( + search_dto.partnership_from is not None + or search_dto.partnership_to is not None + ) and search_dto.partner_id is None: + return JSONResponse( + content={ "Error": "partnershipFrom or partnershipTo cannot be provided without partnerId" - }, 400 - - if ( - search_dto.partner_id is not None - and search_dto.partnership_from is not None - and search_dto.partnership_to is not None - and search_dto.partnership_from > search_dto.partnership_to - ): - return { + }, + status_code=400, + ) + + if ( + search_dto.partner_id is not None + and search_dto.partnership_from is not None + and search_dto.partnership_to is not None + and search_dto.partnership_from > search_dto.partnership_to + ): + return JSONResponse( + content={ "Error": "partnershipFrom cannot be greater than partnershipTo" - }, 400 - - if any( - map( - lambda x: x is not None, - [ - search_dto.partner_id, - search_dto.partnership_from, - search_dto.partnership_to, - ], - ) - ) and (user is None or not user.role == UserRole.ADMIN.value): - error_msg = "Only admins can search projects by partnerId, partnershipFrom, partnershipTo" - return {"Error": error_msg}, 401 + }, + status_code=400, + ) - if search_dto.download_as_csv: - all_results_csv = ProjectSearchService.search_projects_as_csv( - search_dto, user - ) + if any( + map( + lambda x: x is not None, + [ + search_dto.partner_id, + search_dto.partnership_from, + search_dto.partnership_to, + ], + ) + ) and (user is None or not user.role == UserRole.ADMIN.value): + error_msg = "Only admins can search projects by partnerId, partnershipFrom, partnershipTo" + return JSONResponse(content={"Error": error_msg}, status_code=401) - return send_file( - io.BytesIO(all_results_csv.encode()), - mimetype="text/csv", - as_attachment=True, - download_name="projects_search_result.csv", - ) + if search_dto.download_as_csv: + all_results_csv = await ProjectSearchService.search_projects_as_csv( + search_dto, user, db, True + ) + return StreamingResponse( + iter([all_results_csv]), + media_type="text/csv", + headers={"Content-Disposition": "attachment; filename=data.csv"}, + ) + results_dto = await ProjectSearchService.search_projects(search_dto, user, db) + return results_dto + except NotFound: + return JSONResponse(content={"mapResults": {}, "results": []}, status_code=200) + except (KeyError, ValueError) as e: + error_msg = f"Projects GET - {str(e)}" + return JSONResponse(content={"Error": error_msg}, status_code=400) - results_dto = ProjectSearchService.search_projects(search_dto, user) - return results_dto.to_primitive(), 200 - except NotFound: - return {"mapResults": {}, "results": []}, 200 - except (KeyError, ValueError) as e: - error_msg = f"Projects GET - {str(e)}" - return {"Error": error_msg}, 400 - - -class ProjectsQueriesBboxAPI(Resource): - @token_auth.login_required - def get(self): - """ - List and search projects by bounding box - --- - tags: - - projects - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - in: header - name: Accept-Language - description: Language user is requesting - type: string - default: en - - in: query - name: bbox - description: comma separated list xmin, ymin, xmax, ymax - type: string - required: true - default: 34.404,-1.034, 34.717,-0.624 - - in: query - name: srid - description: srid of bbox coords - type: integer - default: 4326 - - in: query - name: createdByMe - description: limit to projects created by authenticated user - type: boolean - required: true - default: false - - responses: - 200: - description: ok - 400: - description: Client Error - Invalid Request - 403: - description: Forbidden - 500: - description: Internal Server Error - """ - authenticated_user_id = token_auth.current_user() - orgs_dto = OrganisationService.get_organisations_managed_by_user_as_dto( - authenticated_user_id - ) - if len(orgs_dto.organisations) < 1: - return { + +@router.get("/queries/bbox/") +async def get( + request: Request, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + List and search projects by bounding box + --- + tags: + - projects + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - in: header + name: Accept-Language + description: Language user is requesting + type: string + default: en + - in: query + name: bbox + description: comma separated list xmin, ymin, xmax, ymax + type: string + required: true + default: 34.404,-1.034, 34.717,-0.624 + - in: query + name: srid + description: srid of bbox coords + type: integer + default: 4326 + - in: query + name: createdByMe + description: limit to projects created by authenticated user + type: boolean + required: true + default: false + + responses: + 200: + description: ok + 400: + description: Client Error - Invalid Request + 403: + description: Forbidden + 500: + description: Internal Server Error + """ + authenticated_user_id = ( + request.user.display_name + if request.user and request.user.display_name + else None + ) + orgs_dto = await OrganisationService.get_organisations_managed_by_user_as_dto( + authenticated_user_id, db + ) + if len(orgs_dto.organisations) < 1: + return JSONResponse( + content={ "Error": "User is not a manager of the project", "SubCode": "UserPermissionError", - }, 403 + }, + status_code=403, + ) - try: - search_dto = ProjectSearchBBoxDTO() - search_dto.bbox = map(float, request.args.get("bbox").split(",")) - search_dto.input_srid = request.args.get("srid") - search_dto.preferred_locale = request.environ.get("HTTP_ACCEPT_LANGUAGE") - created_by_me = ( - strtobool(request.args.get("createdByMe")) - if request.args.get("createdByMe") - else False - ) - if created_by_me: - search_dto.project_author = authenticated_user_id - search_dto.validate() - except Exception as e: - current_app.logger.error(f"Error validating request: {str(e)}") - return { + try: + bbox = map(float, request.query_params.get("bbox").split(",")) + input_srid = request.query_params.get("srid") + search_dto = ProjectSearchBBoxDTO( + bbox=bbox, + input_srid=input_srid, + preferred_locale=request.headers.get("accept-language", "en"), + ) + created_by_me = ( + strtobool(request.query_params.get("createdByMe")) + if request.query_params.get("createdByMe") + else False + ) + if created_by_me: + search_dto.project_author = authenticated_user_id + # search_dto.validate() + except Exception as e: + logger.error(f"Error validating request: {str(e)}") + return JSONResponse( + content={ "Error": f"Error validating request: {str(e)}", "SubCode": "InvalidData", - }, 400 - try: - geojson = ProjectSearchService.get_projects_geojson(search_dto) - return geojson, 200 - except BBoxTooBigError as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 400 - except ProjectSearchServiceError as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 400 - - -class ProjectsQueriesOwnerAPI(ProjectSearchBase): - @token_auth.login_required - def get(self): - """ - Get all projects for logged in admin - --- - tags: - - projects - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - in: header - name: Accept-Language - description: Language user is requesting - type: string - required: true - default: en - responses: - 200: - description: All mapped tasks validated - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 404: - description: Admin has no projects - 500: - description: Internal Server Error - """ - authenticated_user_id = token_auth.current_user() - orgs_dto = OrganisationService.get_organisations_managed_by_user_as_dto( - authenticated_user_id + }, + status_code=400, + ) + try: + geojson = await ProjectSearchService.get_projects_geojson(search_dto, db) + return JSONResponse(content=geojson, status_code=200) + except BBoxTooBigError as e: + return JSONResponse( + content={"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, + status_code=400, + ) + except ProjectSearchServiceError as e: + return JSONResponse( + content={"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, + status_code=400, ) - if len(orgs_dto.organisations) < 1: - return { + + +@router.get("/queries/myself/owner/") +async def get( + request: Request, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Get all projects for logged in admin + --- + tags: + - projects + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - in: header + name: Accept-Language + description: Language user is requesting + type: string + required: true + default: en + responses: + 200: + description: All mapped tasks validated + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 404: + description: Admin has no projects + 500: + description: Internal Server Error + """ + authenticated_user_id = ( + request.user.display_name + if request.user and request.user.display_name + else None + ) + orgs_dto = await OrganisationService.get_organisations_managed_by_user_as_dto( + authenticated_user_id, db + ) + if len(orgs_dto.organisations) < 1: + return JSONResponse( + content={ "Error": "User is not a manager of the project", "SubCode": "UserPermissionError", - }, 403 + }, + status_code=403, + ) + + search_dto = setup_search_dto(request) + preferred_locale = request.headers.get("accept-language", "en") + admin_projects = await ProjectAdminService.get_projects_for_admin( + authenticated_user_id, preferred_locale, search_dto, db + ) + return admin_projects + + +@router.get("/queries/{username}/touched/") +async def get(request: Request, username, db: Database = Depends(get_db)): + """ + Gets projects user has mapped + --- + tags: + - projects + produces: + - application/json + parameters: + - in: header + name: Accept-Language + description: Language user is requesting + type: string + required: true + default: en + - name: username + in: path + description: The users username + required: true + type: string + default: Thinkwhere + responses: + 200: + description: Mapped projects found + 404: + description: User not found + 500: + description: Internal Server Error + """ + locale = ( + request.headers.get("accept-language") + if request.headers.get("accept-language") + else "en" + ) + user_dto = await UserService.get_mapped_projects(username, locale, db) + return user_dto + + +@router.get("/{project_id}/queries/summary/") +async def get(request: Request, project_id: int, db: Database = Depends(get_db)): + """ + Gets project summary + --- + tags: + - projects + produces: + - application/json + parameters: + - in: header + name: Accept-Language + description: Language user is requesting + type: string + required: true + default: en + - name: project_id + in: path + description: The ID of the project + required: true + type: integer + default: 1 + responses: + 200: + description: Project Summary + 404: + description: Project not found + 500: + description: Internal Server Error + """ + preferred_locale = request.headers.get("accept-language") + summary = await ProjectService.get_project_summary(project_id, db, preferred_locale) + return summary - search_dto = self.setup_search_dto() - admin_projects = ProjectAdminService.get_projects_for_admin( - authenticated_user_id, - request.environ.get("HTTP_ACCEPT_LANGUAGE"), - search_dto, + +@router.get("/{project_id}/queries/nogeometries/") +async def get(request: Request, project_id: int, db: Database = Depends(get_db)): + """ + Get HOT Project for mapping + --- + tags: + - projects + produces: + - application/json + parameters: + - in: header + name: Accept-Language + description: Language user is requesting + type: string + required: true + default: en + - name: project_id + in: path + description: Unique project ID + required: true + type: integer + default: 1 + - in: query + name: as_file + type: boolean + description: Set to true if file download is preferred + default: False + responses: + 200: + description: Project found + 403: + description: Forbidden + 404: + description: Project not found + 500: + description: Internal Server Error + """ + try: + as_file = ( + strtobool(request.query_params.get("as_file")) + if request.query_params.get("as_file") + else False ) - return admin_projects.to_primitive(), 200 - - -class ProjectsQueriesTouchedAPI(Resource): - def get(self, username): - """ - Gets projects user has mapped - --- - tags: - - projects - produces: - - application/json - parameters: - - in: header - name: Accept-Language - description: Language user is requesting - type: string - required: true - default: en - - name: username - in: path - description: The users username - required: true - type: string - default: Thinkwhere - responses: - 200: - description: Mapped projects found - 404: - description: User not found - 500: - description: Internal Server Error - """ - locale = ( - request.environ.get("HTTP_ACCEPT_LANGUAGE") - if request.environ.get("HTTP_ACCEPT_LANGUAGE") - else "en" + locale = request.headers.get("accept-language") + project_dto = await ProjectService.get_project_dto_for_mapper( + project_id, None, db, locale, True ) - user_dto = UserService.get_mapped_projects(username, locale) - return user_dto.to_primitive(), 200 - - -class ProjectsQueriesSummaryAPI(Resource): - def get(self, project_id: int): - """ - Gets project summary - --- - tags: - - projects - produces: - - application/json - parameters: - - in: header - name: Accept-Language - description: Language user is requesting - type: string - required: true - default: en - - name: project_id - in: path - description: The ID of the project - required: true - type: integer - default: 1 - responses: - 200: - description: Project Summary - 404: - description: Project not found - 500: - description: Internal Server Error - """ - preferred_locale = request.environ.get("HTTP_ACCEPT_LANGUAGE") - summary = ProjectService.get_project_summary(project_id, preferred_locale) - return summary.to_primitive(), 200 - - -class ProjectsQueriesNoGeometriesAPI(Resource): - def get(self, project_id): - """ - Get HOT Project for mapping - --- - tags: - - projects - produces: - - application/json - parameters: - - in: header - name: Accept-Language - description: Language user is requesting - type: string - required: true - default: en - - name: project_id - in: path - description: Unique project ID - required: true - type: integer - default: 1 - - in: query - name: as_file - type: boolean - description: Set to true if file download is preferred - default: False - responses: - 200: - description: Project found - 403: - description: Forbidden - 404: - description: Project not found - 500: - description: Internal Server Error - """ - try: - as_file = ( - strtobool(request.args.get("as_file")) - if request.args.get("as_file") - else False - ) - locale = request.environ.get("HTTP_ACCEPT_LANGUAGE") - project_dto = ProjectService.get_project_dto_for_mapper( - project_id, None, locale, True + # Handle file download if requested + if as_file: + project_dto_str = geojson.dumps( + project_dto, indent=4 + ) # Convert to GeoJSON string + file_bytes = io.BytesIO(project_dto_str.encode("utf-8")) + file_bytes.seek(0) # Reset stream position + + return StreamingResponse( + file_bytes, + media_type="application/geo+json", + headers={ + "Content-Disposition": f'attachment; filename="project_{project_id}.geojson"' + }, ) - project_dto = project_dto.to_primitive() - if as_file: - return send_file( - io.BytesIO(geojson.dumps(project_dto).encode("utf-8")), - mimetype="application/json", - as_attachment=True, - download_name=f"project_{str(project_id)}.json", - ) + return project_dto + except ProjectServiceError as e: + return JSONResponse( + content={"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, + status_code=403, + ) + finally: + # this will try to unlock tasks that have been locked too long + try: + ProjectService.auto_unlock_tasks(project_id) + except Exception as e: + logger.critical(str(e)) - return project_dto, 200 - except ProjectServiceError as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 403 - finally: - # this will try to unlock tasks that have been locked too long - try: - ProjectService.auto_unlock_tasks(project_id) - except Exception as e: - current_app.logger.critical(str(e)) - - -class ProjectsQueriesNoTasksAPI(Resource): - @token_auth.login_required - def get(self, project_id): - """ - Retrieves a Tasking-Manager project - --- - tags: - - projects - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: project_id - in: path - description: Unique project ID - required: true - type: integer - default: 1 - responses: - 200: - description: Project found - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 404: - description: Project not found - 500: - description: Internal Server Error - """ - if not ProjectAdminService.is_user_action_permitted_on_project( - token_auth.current_user(), project_id - ): - return { + +@router.get("/{project_id}/queries/notasks/") +async def get( + request: Request, + project_id: int, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Retrieves a Tasking-Manager project + --- + tags: + - projects + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: project_id + in: path + description: Unique project ID + required: true + type: integer + default: 1 + responses: + 200: + description: Project found + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 404: + description: Project not found + 500: + description: Internal Server Error + """ + if not await ProjectAdminService.is_user_action_permitted_on_project( + request.user.display_name, project_id, db + ): + return JSONResponse( + content={ "Error": "User is not a manager of the project", "SubCode": "UserPermissionError", - }, 403 - - project_dto = ProjectAdminService.get_project_dto_for_admin(project_id) - return project_dto.to_primitive(), 200 - - -class ProjectsQueriesAoiAPI(Resource): - def get(self, project_id): - """ - Get AOI of Project - --- - tags: - - projects - produces: - - application/json - parameters: - - name: project_id - in: path - description: Unique project ID - required: true - type: integer - default: 1 - - in: query - name: as_file - type: boolean - description: Set to false if file download not preferred - default: True - responses: - 200: - description: Project found - 403: - description: Forbidden - 404: - description: Project not found - 500: - description: Internal Server Error - """ - as_file = ( - strtobool(request.args.get("as_file")) - if request.args.get("as_file") - else True + }, + status_code=403, ) - project_aoi = ProjectService.get_project_aoi(project_id) + project_dto = await ProjectAdminService.get_project_dto_for_admin(project_id, db) + return project_dto - if as_file: - return send_file( - io.BytesIO(geojson.dumps(project_aoi).encode("utf-8")), - mimetype="application/json", - as_attachment=True, - download_name=f"{str(project_id)}.geojson", - ) - return project_aoi, 200 - - -class ProjectsQueriesPriorityAreasAPI(Resource): - def get(self, project_id): - """ - Get Priority Areas of a project - --- - tags: - - projects - produces: - - application/json - parameters: - - name: project_id - in: path - description: Unique project ID - required: true - type: integer - default: 1 - responses: - 200: - description: Project found - 403: - description: Forbidden - 404: - description: Project not found - 500: - description: Internal Server Error - """ - try: - priority_areas = ProjectService.get_project_priority_areas(project_id) - return priority_areas, 200 - except ProjectServiceError: - return {"Error": "Unable to fetch project"}, 403 - - -class ProjectsQueriesFeaturedAPI(Resource): - def get(self): - """ - Get featured projects - --- - tags: - - projects - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: false - type: string - default: Token sessionTokenHere== - responses: - 200: - description: Featured projects - 500: - description: Internal Server Error - """ - preferred_locale = request.environ.get("HTTP_ACCEPT_LANGUAGE") - projects_dto = ProjectService.get_featured_projects(preferred_locale) - return projects_dto.to_primitive(), 200 - - -class ProjectQueriesSimilarProjectsAPI(Resource): - @token_auth.login_required(optional=True) - def get(self, project_id): - """ - Get similar projects - --- - tags: - - projects - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: false - type: string - default: Token sessionTokenHere== - - name: project_id - in: path - description: Project ID to get similar projects for - required: true - type: integer - default: 1 - - in: query - name: limit - type: integer - description: Number of similar projects to return - default: 4 - responses: - 200: - description: Similar projects - 404: - description: Project not found or project is not published - 500: - description: Internal Server Error - """ - authenticated_user_id = ( - token_auth.current_user() if token_auth.current_user() else None +@router.get("/{project_id}/queries/aoi/") +async def get(request: Request, project_id: int, db: Database = Depends(get_db)): + """ + Get AOI of Project + --- + tags: + - projects + produces: + - application/json + parameters: + - name: project_id + in: path + description: Unique project ID + required: true + type: integer + default: 1 + - in: query + name: as_file + type: boolean + description: Set to false if file download not preferred + default: True + responses: + 200: + description: Project found + 403: + description: Forbidden + 404: + description: Project not found + 500: + description: Internal Server Error + """ + as_file = ( + strtobool(request.query_params.get("as_file")) + if request.query_params.get("as_file") + else False + ) + + project_aoi = await ProjectService.get_project_aoi(project_id, db) + + if as_file: + aoi_str = geojson.dumps(project_aoi, indent=4) # Convert AOI to GeoJSON string + file_bytes = io.BytesIO(aoi_str.encode("utf-8")) + file_bytes.seek(0) # Reset stream position + + return StreamingResponse( + file_bytes, + media_type="application/geo+json", + headers={ + "Content-Disposition": f'attachment; filename="{project_id}.geojson"' + }, ) - limit = int(request.args.get("limit", 4)) - preferred_locale = request.environ.get("HTTP_ACCEPT_LANGUAGE", "en") - projects_dto = ProjectRecommendationService.get_similar_projects( - project_id, authenticated_user_id, preferred_locale, limit + return project_aoi + + +@router.get("/{project_id}/queries/priority-areas/") +async def get(project_id: int, db: Database = Depends(get_db)): + """ + Get Priority Areas of a project + --- + tags: + - projects + produces: + - application/json + parameters: + - name: project_id + in: path + description: Unique project ID + required: true + type: integer + default: 1 + responses: + 200: + description: Project found + 403: + description: Forbidden + 404: + description: Project not found + 500: + description: Internal Server Error + """ + try: + priority_areas = await ProjectService.get_project_priority_areas(project_id, db) + return priority_areas + except ProjectServiceError: + return JSONResponse( + content={"Error": "Unable to fetch project"}, status_code=403 ) - return projects_dto.to_primitive(), 200 - - -class ProjectQueriesActiveProjectsAPI(Resource): - @token_auth.login_required(optional=True) - def get(self): - """ - Get active projects - --- - tags: - - projects - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: false - type: string - default: Token sessionTokenHere== - - name: interval - in: path - description: Time interval in hours to get active project - required: false - type: integer - default: 24 - responses: - 200: - description: Active projects geojson - 404: - description: Project not found or project is not published - 500: - description: Internal Server Error - """ - interval = request.args.get("interval", "24") - if not interval.isdigit(): - return { + + +@router.get("/queries/featured/") +async def get(request: Request, db: Database = Depends(get_db)): + """ + Get featured projects + --- + tags: + - projects + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: false + type: string + default: Token sessionTokenHere== + responses: + 200: + description: Featured projects + 500: + description: Internal Server Error + """ + preferred_locale = request.headers.get("accept-language") + projects_dto = await ProjectService.get_featured_projects(preferred_locale, db) + return projects_dto + + +@router.get("/queries/{project_id}/similar-projects/") +async def get(request: Request, project_id: int, db: Database = Depends(get_db)): + """ + Get similar projects + --- + tags: + - projects + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: false + type: string + default: Token sessionTokenHere== + - name: project_id + in: path + description: Project ID to get similar projects for + required: true + type: integer + default: 1 + - in: query + name: limit + type: integer + description: Number of similar projects to return + default: 4 + responses: + 200: + description: Similar projects + 404: + description: Project not found or project is not published + 500: + description: Internal Server Error + """ + authenticated_user_id = ( + request.user.display_name + if request.user and request.user.display_name + else None + ) + limit = int(request.query_params.get("limit", 4)) + preferred_locale = request.headers.get("accept-language", "en") + projects_dto = await ProjectRecommendationService.get_similar_projects( + db, project_id, authenticated_user_id, preferred_locale, limit + ) + return projects_dto + + +@router.get("/queries/active/") +async def get(request: Request, db: Database = Depends(get_db)): + """ + Get active projects + --- + tags: + - projects + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: false + type: string + default: Token sessionTokenHere== + - name: interval + in: path + description: Time interval in hours to get active project + required: false + type: integer + default: 24 + responses: + 200: + description: Active projects geojson + 404: + description: Project not found or project is not published + 500: + description: Internal Server Error + """ + interval = request.query_params.get("interval", "24") + if not interval.isdigit(): + return JSONResponse( + content={ "Error": "Interval must be a number greater than 0 and less than or equal to 24" - }, 400 - interval = int(interval) - if interval <= 0 or interval > 24: - return { + }, + status_code=400, + ) + interval = int(interval) + if interval <= 0 or interval > 24: + return JSONResponse( + content={ "Error": "Interval must be a number greater than 0 and less than or equal to 24" - }, 400 - projects_dto = ProjectService.get_active_projects(interval) - return projects_dto, 200 + }, + status_code=400, + ) + projects_dto = await ProjectService.get_active_projects(interval, db) + return projects_dto diff --git a/backend/api/projects/statistics.py b/backend/api/projects/statistics.py index afe0bd02b9..5861a9bd79 100644 --- a/backend/api/projects/statistics.py +++ b/backend/api/projects/statistics.py @@ -1,91 +1,100 @@ -from flask_restful import Resource -from backend.services.stats_service import StatsService +# from flask_restful import Resource +from databases import Database +from fastapi import APIRouter, Depends + +from backend.db import get_db from backend.services.project_service import ProjectService +from backend.services.stats_service import StatsService + +router = APIRouter( + prefix="/projects", + tags=["projects"], + responses={404: {"description": "Not found"}}, +) -class ProjectsStatisticsQueriesPopularAPI(Resource): - def get(self): - """ - Get popular projects - --- - tags: - - projects - produces: - - application/json - responses: - 200: - description: Popular Projects stats - 500: - description: Internal Server Error - """ - stats = StatsService.get_popular_projects() - return stats.to_primitive(), 200 +@router.get("/queries/popular/") +async def get(db: Database = Depends(get_db)): + """ + Get popular projects + --- + tags: + - projects + produces: + - application/json + responses: + 200: + description: Popular Projects stats + 500: + description: Internal Server Error + """ + stats = await StatsService.get_popular_projects(db) + return stats -class ProjectsStatisticsAPI(Resource): - def get(self, project_id): - """ - Get Project Stats - --- - tags: - - projects - produces: - - application/json - parameters: - - in: header - name: Accept-Language - description: Language user is requesting - type: string - required: true - default: en - - name: project_id - in: path - description: Unique project ID - required: true - type: integer - default: 1 - responses: - 200: - description: Project stats - 404: - description: Not found - 500: - description: Internal Server Error - """ - # preferred_locale = request.environ.get("HTTP_ACCEPT_LANGUAGE") - summary = ProjectService.get_project_stats(project_id) - return summary.to_primitive(), 200 +@router.get("/{project_id}/statistics/") +async def get(project_id: int, db: Database = Depends(get_db)): + """ + Get Project Stats + --- + tags: + - projects + produces: + - application/json + parameters: + - in: header + name: Accept-Language + description: Language user is requesting + type: string + required: true + default: en + - name: project_id + in: path + description: Unique project ID + required: true + type: integer + default: 1 + responses: + 200: + description: Project stats + 404: + description: Not found + 500: + description: Internal Server Error + """ + summary = await ProjectService.get_project_stats(project_id, db) + return summary -class ProjectsStatisticsQueriesUsernameAPI(Resource): - def get(self, project_id, username): - """ - Get detailed stats about user - --- - tags: - - projects - produces: - - application/json - parameters: - - name: project_id - in: path - description: Unique project ID - required: true - type: integer - default: 1 - - name: username - in: path - description: Mapper's OpenStreetMap username - required: true - type: string - default: Thinkwhere - responses: - 200: - description: User found - 404: - description: User not found - 500: - description: Internal Server Error - """ - stats_dto = ProjectService.get_project_user_stats(project_id, username) - return stats_dto.to_primitive(), 200 +@router.get("/{project_id}/statistics/queries/{username}/") +async def get(project_id: int, username: str, db: Database = Depends(get_db)): + """ + Get detailed stats about user + --- + tags: + - projects + produces: + - application/json + parameters: + - name: project_id + in: path + description: Unique project ID + required: true + type: integer + default: 1 + - name: username + in: path + description: Mapper's OpenStreetMap username + required: true + type: string + default: Thinkwhere + responses: + 200: + description: User found + 404: + description: User not found + 500: + description: Internal Server Error + """ + stats_dto = await ProjectService.get_project_user_stats(project_id, username, db) + return stats_dto diff --git a/backend/api/projects/teams.py b/backend/api/projects/teams.py index 3dbb12a337..38ad48385c 100644 --- a/backend/api/projects/teams.py +++ b/backend/api/projects/teams.py @@ -1,239 +1,294 @@ -from flask_restful import Resource, request, current_app -from schematics.exceptions import DataError +from databases import Database +from fastapi.responses import JSONResponse +from fastapi import APIRouter, Depends, Request, Body +from loguru import logger -from backend.services.team_service import TeamService, TeamServiceError +from backend.db import get_db +from backend.models.dtos.user_dto import AuthUserDTO from backend.services.project_admin_service import ProjectAdminService from backend.services.project_service import ProjectService -from backend.services.users.authentication_service import token_auth +from backend.services.team_service import TeamService, TeamServiceError +from backend.services.users.authentication_service import login_required + +router = APIRouter( + prefix="/projects", + tags=["projects"], + responses={404: {"description": "Not found"}}, +) -class ProjectsTeamsAPI(Resource): - @token_auth.login_required - def get(self, project_id): - """Get teams assigned with a project - --- - tags: - - teams - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: project_id - in: path - description: Unique project ID - required: true - type: integer - default: 1 - responses: - 200: - description: Teams listed successfully - 403: - description: Forbidden, if user is not authenticated - 404: - description: Not found - 500: - description: Internal Server Error - """ - # Check if project exists - ProjectService.exists(project_id) - teams_dto = TeamService.get_project_teams_as_dto(project_id) - return teams_dto.to_primitive(), 200 +@router.get("/{project_id}/teams/") +async def get( + request: Request, + project_id: int, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """Get teams assigned with a project + --- + tags: + - teams + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: project_id + in: path + description: Unique project ID + required: true + type: integer + default: 1 + responses: + 200: + description: Teams listed successfully + 403: + description: Forbidden, if user is not authenticated + 404: + description: Not found + 500: + description: Internal Server Error + """ + # Check if project exists + await ProjectService.exists(project_id, db) + teams_dto = await TeamService.get_project_teams_as_dto(project_id, db) + return teams_dto - @token_auth.login_required - def post(self, team_id, project_id): - """Assign a team to a project - --- - tags: - - teams - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: project_id - in: path - description: Unique project ID - required: true - type: integer - default: 1 - - name: team_id - in: path - description: Unique team ID - required: true - type: integer - default: 1 - - in: body - name: body - required: true - description: The role that the team will have on the project - schema: - properties: - role: - type: string - responses: - 201: - description: Team project assignment created - 401: - description: Forbidden, if user is not a manager of the project - 403: - description: Forbidden, if user is not authenticated - 404: - description: Not found - 500: - description: Internal Server Error - """ - if not TeamService.is_user_team_manager(team_id, token_auth.current_user()): - return { + +@router.post("/{project_id}/teams/{team_id}/") +async def post( + request: Request, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), + team_id: int = None, + project_id: int = None, + data: dict = Body(...), +): + """Assign a team to a project + --- + tags: + - teams + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: project_id + in: path + description: Unique project ID + required: true + type: integer + default: 1 + - name: team_id + in: path + description: Unique team ID + required: true + type: integer + default: 1 + - in: body + name: body + required: true + description: The role that the team will have on the project + schema: + properties: + role: + type: string + responses: + 201: + description: Team project assignment created + 401: + description: Forbidden, if user is not a manager of the project + 403: + description: Forbidden, if user is not authenticated + 404: + description: Not found + 500: + description: Internal Server Error + """ + if not await TeamService.is_user_team_manager(team_id, user.id, db): + return JSONResponse( + content={ "Error": "User is not an admin or a manager for the team", "SubCode": "UserPermissionError", - }, 401 + }, + status_code=403, + ) - try: - role = request.get_json(force=True)["role"] - except DataError as e: - current_app.logger.error(f"Error validating request: {str(e)}") - return {"Error": str(e), "SubCode": "InvalidData"}, 400 + try: + role = data["role"] + except ValueError as e: + logger.error(f"Error validating request: {str(e)}") + return JSONResponse( + content={"Error": str(e), "SubCode": "InvalidData"}, status_code=400 + ) - try: - if not ProjectAdminService.is_user_action_permitted_on_project( - token_auth.current_user, project_id - ): - raise ValueError() - TeamService.add_team_project(team_id, project_id, role) - return ( - { - "Success": "Team {} assigned to project {} with role {}".format( - team_id, project_id, role - ) - }, - 201, - ) - except ValueError: - return { + try: + if not await ProjectAdminService.is_user_action_permitted_on_project( + user.id, project_id, db + ): + raise ValueError() + await TeamService.add_team_project(team_id, project_id, role, db) + return JSONResponse( + content={ + "Success": "Team {} assigned to project {} with role {}".format( + team_id, project_id, role + ) + }, + status_code=201, + ) + except ValueError: + return JSONResponse( + content={ "Error": "User is not a manager of the project", "SubCode": "UserPermissionError", - }, 403 + }, + status_code=403, + ) + - @token_auth.login_required - def patch(self, team_id, project_id): - """Update role of a team on a project - --- - tags: - - teams - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: project_id - in: path - description: Unique project ID - required: true - type: integer - default: 1 - - name: team_id - in: path - description: Unique team ID - required: true - type: integer - default: 1 - - in: body - name: body - required: true - description: The role that the team will have on the project - schema: - properties: - role: - type: string - responses: - 201: - description: Team project assignment created - 401: - description: Forbidden, if user is not a manager of the project - 403: - description: Forbidden, if user is not authenticated - 404: - description: Not found - 500: - description: Internal Server Error - """ - try: - role = request.get_json(force=True)["role"] - except DataError as e: - current_app.logger.error(f"Error validating request: {str(e)}") - return {"Error": str(e), "SubCode": "InvalidData"}, 400 +@router.patch("/{team_id}/projects/{project_id}/") +async def patch( + request: Request, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), + team_id: int = None, + project_id: int = None, + data: dict = Body(...), +): + """Update role of a team on a project + --- + tags: + - teams + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: project_id + in: path + description: Unique project ID + required: true + type: integer + default: 1 + - name: team_id + in: path + description: Unique team ID + required: true + type: integer + default: 1 + - in: body + name: body + required: true + description: The role that the team will have on the project + schema: + properties: + role: + type: string + responses: + 201: + description: Team project assignment created + 401: + description: Forbidden, if user is not a manager of the project + 403: + description: Forbidden, if user is not authenticated + 404: + description: Not found + 500: + description: Internal Server Error + """ + try: + role = data["role"] + except ValueError as e: + logger.error(f"Error validating request: {str(e)}") + return JSONResponse( + content={"Error": str(e), "SubCode": "InvalidData"}, status_code=400 + ) - try: - if not ProjectAdminService.is_user_action_permitted_on_project( - token_auth.current_user, project_id - ): - raise ValueError() - TeamService.change_team_role(team_id, project_id, role) - return {"Status": "Team role updated successfully."}, 200 - except ValueError: - return { + try: + if not await ProjectAdminService.is_user_action_permitted_on_project( + user.id, project_id, db + ): + raise ValueError() + await TeamService.change_team_role(team_id, project_id, role, db) + return JSONResponse( + content={"Status": "Team role updated successfully."}, status_code=201 + ) + except ValueError: + return JSONResponse( + content={ "Error": "User is not a manager of the project", "SubCode": "UserPermissionError", - }, 403 - except TeamServiceError as e: - return str(e), 402 + }, + status_code=403, + ) + except TeamServiceError as e: + return JSONResponse(content={"Error": str(e)}, status_code=402) + - @token_auth.login_required - def delete(self, team_id, project_id): - """ - Deletes the specified team project assignment - --- - tags: - - teams - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: message_id - in: path - description: Unique message ID - required: true - type: integer - default: 1 - responses: - 200: - description: Team unassigned of the project - 401: - description: Forbidden, if user is not a manager of the project - 403: - description: Forbidden, if user is not authenticated - 404: - description: Not found - 500: - description: Internal Server Error - """ - try: - if not ProjectAdminService.is_user_action_permitted_on_project( - token_auth.current_user, project_id - ): - raise ValueError() - TeamService.delete_team_project(team_id, project_id) - return {"Success": True}, 200 - except ValueError: - return { +@router.delete("/{team_id}/projects/{project_id}/") +async def delete( + request: Request, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), + team_id: int = None, + project_id: int = None, +): + """ + Deletes the specified team project assignment + --- + tags: + - teams + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: message_id + in: path + description: Unique message ID + required: true + type: integer + default: 1 + responses: + 200: + description: Team unassigned of the project + 401: + description: Forbidden, if user is not a manager of the project + 403: + description: Forbidden, if user is not authenticated + 404: + description: Not found + 500: + description: Internal Server Error + """ + try: + if not await ProjectAdminService.is_user_action_permitted_on_project( + user.id, project_id, db + ): + raise ValueError() + await TeamService.delete_team_project(team_id, project_id, db) + return JSONResponse(content={"Success": True}, status_code=200) + except ValueError: + return JSONResponse( + content={ "Error": "User is not a manager of the project", "SubCode": "UserPermissionError", - }, 403 + }, + status_code=403, + ) diff --git a/backend/api/system/applications.py b/backend/api/system/applications.py index 13820c349c..dab728776d 100644 --- a/backend/api/system/applications.py +++ b/backend/api/system/applications.py @@ -1,132 +1,154 @@ -from flask_restful import Resource +from databases import Database +from fastapi import APIRouter, Depends, Request, Response +from backend.db import get_db +from backend.models.dtos.user_dto import AuthUserDTO +from backend.models.postgis.application import Application from backend.services.application_service import ApplicationService -from backend.services.users.authentication_service import token_auth +from backend.services.users.authentication_service import login_required +router = APIRouter( + prefix="/system", + tags=["system"], + responses={404: {"description": "Not found"}}, +) -class SystemApplicationsRestAPI(Resource): - @token_auth.login_required - def get(self): - """ - Gets application keys for a user - --- - tags: - - system - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - responses: - 200: - description: User keys retrieved - 404: - description: User has no keys - 500: - description: A problem occurred - """ - tokens = ApplicationService.get_all_tokens_for_logged_in_user( - token_auth.current_user() - ) - if len(tokens) == 0: - return 400 - return tokens.to_primitive(), 200 - @token_auth.login_required - def post(self): - """ - Creates an application key for the user - --- - tags: - - system - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - responses: - 200: - description: Key generated successfully - 302: - description: User is not authorized to create a key - 500: - description: A problem occurred - """ - token = ApplicationService.create_token(token_auth.current_user()) - return token.to_primitive(), 200 +@router.get("/authentication/applications/") +async def get( + request: Request, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), +): + """ + Gets application keys for a user + --- + tags: + - system + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + responses: + 200: + description: User keys retrieved + 404: + description: User has no keys + 500: + description: A problem occurred + """ + tokens = await ApplicationService.get_all_tokens_for_logged_in_user(user.id, db) + return tokens.model_dump(by_alias=True) - def patch(self, application_key): - """ - Checks the validity of an application key - --- - tags: - - system - produces: - - application/json - parameters: - - in: path - name: application_key - description: Application key to test - type: string - required: true - default: 1 - responses: - 200: - description: Key is valid - 302: - description: Key is not valid - 500: - description: A problem occurred - """ - is_valid = ApplicationService.check_token(application_key) - if is_valid: - return 200 - else: - return 302 - @token_auth.login_required - def delete(self, application_key): - """ - Deletes an application key for a user - --- - tags: - - system - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - in: path - name: application_key - description: Application key to remove - type: string - required: true - default: 1 - responses: - 200: - description: Key deleted successfully - 302: - description: User is not authorized to delete the key - 404: - description: Key not found - 500: - description: A problem occurred - """ - token = ApplicationService.get_token(application_key) - if token.user == token_auth.current_user(): - token.delete() - return 200 - else: - return 302 +@router.post("/authentication/applications/") +async def post( + request: Request, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), +): + """ + Creates an application key for the user + --- + tags: + - system + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + responses: + 200: + description: Key generated successfully + 302: + description: User is not authorized to create a key + 500: + description: A problem occurred + """ + token = await ApplicationService.create_token(user.id, db) + return token.model_dump(by_alias=True) + + +@router.patch("/authentication/applications/{application_key}/") +async def patch(request: Request, application_key: str, db: Database = Depends(get_db)): + """ + Checks the validity of an application key + --- + tags: + - system + produces: + - application/json + parameters: + - in: path + name: application_key + description: Application key to test + type: string + required: true + default: 1 + responses: + 200: + description: Key is valid + 302: + description: Key is not valid + 500: + description: A problem occurred + """ + is_valid = await ApplicationService.check_token(application_key, db) + if is_valid: + return Response(status_code=200) + else: + return Response(status_code=302) + + +@router.delete("/authentication/applications/{application_key}/") +async def delete( + request: Request, + application_key: str, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), +): + """ + Deletes an application key for a user + --- + tags: + - system + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - in: path + name: application_key + description: Application key to remove + type: string + required: true + default: 1 + responses: + 200: + description: Key deleted successfully + 302: + description: User is not authorized to delete the key + 404: + description: Key not found + 500: + description: A problem occurred + """ + token = await ApplicationService.get_token(application_key, db) + if token.user == user.id: + await Application.delete(token, db) + return Response(status_code=200) + else: + return Response(status_code=302) diff --git a/backend/api/system/authentication.py b/backend/api/system/authentication.py index 58453ddad6..c335c6681c 100644 --- a/backend/api/system/authentication.py +++ b/backend/api/system/authentication.py @@ -1,164 +1,187 @@ -from flask import current_app, request -from flask_restful import Resource +# from flask import current_app, request +# from flask_restful import +from databases import Database +from fastapi import APIRouter, Depends, Request +from fastapi.logger import logger +from fastapi.responses import JSONResponse from oauthlib.oauth2.rfc6749.errors import InvalidGrantError from backend import osm -from backend.config import EnvironmentConfig +from backend.config import settings +from backend.db import get_db from backend.services.users.authentication_service import ( AuthenticationService, AuthServiceError, ) +router = APIRouter( + prefix="/system", + tags=["system"], + responses={404: {"description": "Not found"}}, +) -class SystemAuthenticationLoginAPI(Resource): - def get(self): - """ - Redirects user to OSM to authenticate - --- - tags: - - system - produces: - - application/json - parameters: - - in: query - name: redirect_uri - description: Route to redirect user once authenticated - type: string - default: /take/me/here - responses: - 200: - description: oauth2 params - """ - redirect_uri = request.args.get( - "redirect_uri", EnvironmentConfig.OAUTH_REDIRECT_URI - ) - authorize_url = f"{EnvironmentConfig.OSM_SERVER_URL}/oauth2/authorize" - state = AuthenticationService.generate_random_state() - - osm.redirect_uri = redirect_uri - osm.state = state - - login_url, state = osm.authorization_url(authorize_url) - return {"auth_url": login_url, "state": state}, 200 - - -class SystemAuthenticationCallbackAPI(Resource): - def get(self): - """ - Handles the OSM OAuth callback - --- - tags: - - system - produces: - - application/json - parameters: - - in: query - name: redirect_uri - description: Route to redirect user once authenticated - type: string - default: /take/me/here - required: false - - in: query - name: code - description: Code obtained after user authorization - type: string - required: true - - in: query - name: email_address - description: Email address to used for email notifications from TM. - type: string - required: false - responses: - 302: - description: Redirects to login page, or login failed page - 400: - description: Missing/Invalid code parameter - 500: - description: A problem occurred authenticating the user - 502: - description: A problem occurred negotiating with the OSM API - """ - - token_url = f"{EnvironmentConfig.OSM_SERVER_URL}/oauth2/token" - authorization_code = request.args.get("code", None) - if authorization_code is None: - return {"SubCode": "InvalidData", "Error": "Missing code parameter"}, 400 - - email = request.args.get("email_address", None) - redirect_uri = request.args.get( - "redirect_uri", EnvironmentConfig.OAUTH_REDIRECT_URI + +# class SystemAuthenticationLoginAPI(): +@router.get("/authentication/login/") +async def get(request: Request): + """ + Redirects user to OSM to authenticate + --- + tags: + - system + produces: + - application/json + parameters: + - in: query + name: redirect_uri + description: Route to redirect user once authenticated + type: string + default: /take/me/here + responses: + 200: + description: oauth2 params + """ + redirect_uri = request.query_params.get("redirect_uri", settings.OAUTH_REDIRECT_URI) + authorize_url = f"{settings.OSM_SERVER_URL}/oauth2/authorize" + state = AuthenticationService.generate_random_state() + + osm.redirect_uri = redirect_uri + osm.state = state + + login_url, state = osm.authorization_url(authorize_url) + return {"auth_url": login_url, "state": state} + + +# class SystemAuthenticationCallbackAPI(): +@router.get("/authentication/callback/") +async def get(request: Request, db: Database = Depends(get_db)): + """ + Handles the OSM OAuth callback + --- + tags: + - system + produces: + - application/json + parameters: + - in: query + name: redirect_uri + description: Route to redirect user once authenticated + type: string + default: /take/me/here + required: false + - in: query + name: code + description: Code obtained after user authorization + type: string + required: true + - in: query + name: email_address + description: Email address to used for email notifications from TM. + type: string + required: false + responses: + 302: + description: Redirects to login page, or login failed page + 400: + description: Missing/Invalid code parameter + 500: + description: A problem occurred authenticating the user + 502: + description: A problem occurred negotiating with the OSM API + """ + + token_url = f"{settings.OSM_SERVER_URL}/oauth2/token" + authorization_code = request.query_params.get("code", None) + if authorization_code is None: + return {"SubCode": "InvalidData", "Error": "Missing code parameter"}, 400 + + email = request.query_params.get("email_address", None) + redirect_uri = request.query_params.get("redirect_uri", settings.OAUTH_REDIRECT_URI) + osm.redirect_uri = redirect_uri + try: + osm_resp = osm.fetch_token( + token_url=token_url, + client_secret=settings.OAUTH_CLIENT_SECRET, + code=authorization_code, ) - osm.redirect_uri = redirect_uri - try: - osm_resp = osm.fetch_token( - token_url=token_url, - client_secret=EnvironmentConfig.OAUTH_CLIENT_SECRET, - code=authorization_code, - ) - except InvalidGrantError: - return { + except InvalidGrantError: + return JSONResponse( + content={ "Error": "The provided authorization grant is invalid, expired or revoked", "SubCode": "InvalidGrantError", - }, 400 - if osm_resp is None: - current_app.logger.critical("Couldn't obtain token from OSM.") - return { + }, + status_code=400, + ) + if osm_resp is None: + logger.critical("Couldn't obtain token from OSM.") + return JSONResponse( + content={ "SubCode": "TokenFetchError", "Error": "Couldn't fetch token from OSM.", - }, 502 + }, + status_code=502, + ) - user_info_url = f"{EnvironmentConfig.OAUTH_API_URL}/user/details.json" - osm_response = osm.get(user_info_url) # Get details for the authenticating user + user_info_url = f"{settings.OAUTH_API_URL}/user/details.json" - if osm_response.status_code != 200: - current_app.logger.critical("Error response from OSM") - return { + osm_response = osm.get(user_info_url) # Get details for the authenticating user + if osm_response.status_code != 200: + logger.critical("Error response from OSM") + return JSONResponse( + content={ "SubCode": "OSMServiceError", "Error": "Couldn't fetch user details from OSM.", - }, 502 - - try: - user_params = AuthenticationService.login_user(osm_response.json(), email) - user_params["session"] = osm_resp - return user_params, 200 - except AuthServiceError: - return {"Error": "Unable to authenticate", "SubCode": "AuthError"}, 500 - - -class SystemAuthenticationEmailAPI(Resource): - def get(self): - """ - Authenticates user owns email address - --- - tags: - - system - produces: - - application/json - parameters: - - in: query - name: username - type: string - default: thinkwhere - - in: query - name: token - type: string - default: 1234dvsdf - responses: - 301: - description: Will redirect to email validation page - 403: - description: Forbidden - 404: - description: User not found - 500: - description: Internal Server Error - """ - try: - username = request.args.get("username") - token = request.args.get("token") - AuthenticationService.authenticate_email_token(username, token) - - return {"Status": "OK"}, 200 - - except AuthServiceError as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 403 + }, + status_code=502, + ) + + try: + user_params = await AuthenticationService.login_user( + osm_response.json(), email, db + ) + user_params["session"] = osm_resp + return user_params + except AuthServiceError: + return {"Error": "Unable to authenticate", "SubCode": "AuthError"} + + +@router.get("/authentication/email/") +async def get(request: Request, db: Database = Depends(get_db)): + """ + Authenticates user owns email address + --- + tags: + - system + produces: + - application/json + parameters: + - in: query + name: username + type: string + default: thinkwhere + - in: query + name: token + type: string + default: 1234dvsdf + responses: + 301: + description: Will redirect to email validation page + 403: + description: Forbidden + 404: + description: User not found + 500: + description: Internal Server Error + """ + try: + username = request.query_params.get("username") + token = request.query_params.get("token") + await AuthenticationService.authenticate_email_token(username, token, db) + + return JSONResponse(content={"Status": "OK"}, status_code=200) + + except AuthServiceError as e: + return JSONResponse( + content={"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, + status_code=403, + ) diff --git a/backend/api/system/banner.py b/backend/api/system/banner.py index 272bc3e12d..7f73285b52 100644 --- a/backend/api/system/banner.py +++ b/backend/api/system/banner.py @@ -1,96 +1,109 @@ -from flask import current_app -from flask_restful import Resource, request -from schematics.exceptions import DataError +from databases import Database +from fastapi import APIRouter, Body, Depends, Request +from fastapi.logger import logger +from pydantic import ValidationError -from backend.models.postgis.banner import Banner +from backend.db import get_db +from backend.models.dtos.user_dto import AuthUserDTO from backend.models.dtos.banner_dto import BannerDTO +from backend.models.postgis.banner import Banner from backend.models.postgis.statuses import UserRole -from backend.services.users.authentication_service import token_auth +from backend.services.users.authentication_service import login_required from backend.services.users.user_service import UserService +router = APIRouter( + prefix="/system", + tags=["system"], + responses={404: {"description": "Not found"}}, +) + + +@router.get("/banner/", response_model=BannerDTO) +async def get(db: Database = Depends(get_db)): + """ + Returns a banner + --- + tags: + - system + produces: + - application/json + responses: + 200: + description: Fetched banner successfully + 500: + description: Internal Server Error + """ -class SystemBannerAPI(Resource): - def get(self): - """ - Returns a banner - --- - tags: - - system - produces: - - application/json - responses: - 200: - description: Fetched banner successfully - 500: - description: Internal Server Error - """ + banner = await Banner.get(db) + return banner - banner = Banner.get() - return banner.as_dto().to_primitive(), 200 - @token_auth.login_required - def patch(self): - """ - Updates the current banner in the DB - --- - tags: - - system - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - in: body - name: body - required: true - description: JSON object for updating the banner. Message can be written in markdown (max 255 chars) \n - \n - Allowed tags are `a`, `b`, `i`, `h3`, `h4`, `h5`, `h6`, `p`, `pre`, `strong` - schema: - properties: - message: - description: The message to display on the banner. Max 255 characters allowed. - required: true - type: string - default: Welcome to the Tasking Manager - visible: - description: Whether the banner is visible or not - type: boolean - default: false - responses: - 201: - description: Banner updated successfully - 400: - description: Client Error - Invalid Request - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - """ +@router.patch("/banner/", response_model=BannerDTO) +async def patch( + request: Request, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), + banner: BannerDTO = Body(...), +): + """ + Updates the current banner in the DB + --- + tags: + - system + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - in: body + name: body + required: true + description: JSON object for updating the banner. Message can be written in markdown (max 255 chars) \n + \n + Allowed tags are `a`, `b`, `i`, `h3`, `h4`, `h5`, `h6`, `p`, `pre`, `strong` + schema: + properties: + message: + description: The message to display on the banner. Max 255 characters allowed. + required: true + type: string + default: Welcome to the Tasking Manager + visible: + description: Whether the banner is visible or not + type: boolean + default: false + responses: + 201: + description: Banner updated successfully + 400: + description: Client Error - Invalid Request + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + """ - try: - banner_dto = BannerDTO(request.get_json()) - banner_dto.validate() - except DataError as e: - current_app.logger.error(f"error validating request: {str(e)}") - return {"Error": "Unable to create project", "SubCode": "InvalidData"}, 400 + try: + banner_dto = banner + except ValidationError as e: + logger.error(f"error validating request: {str(e)}") + return {"Error": "Unable to create project", "SubCode": "InvalidData"}, 400 - # Check user permission for this action - authenticated_user_id = token_auth.current_user() - authenticated_user = UserService.get_user_by_id(authenticated_user_id) - if authenticated_user.role != UserRole.ADMIN.value: - return { - "Error": "Banner can only be updated by system admins", - "SubCode": "OnlyAdminAccess", - }, 403 + # Check user permission for this action + authenticated_user = await UserService.get_user_by_id(user.id, db) + if authenticated_user.role != UserRole.ADMIN.value: + return { + "Error": "Banner can only be updated by system admins", + "SubCode": "OnlyAdminAccess", + }, 403 - banner_dto.message = Banner.to_html( - banner_dto.message - ) # Convert the markdown message to html - banner = Banner.get() - banner.update_from_dto(banner_dto) - return banner.as_dto().to_primitive(), 200 + banner_dto.message = Banner.to_html( + banner_dto.message if banner_dto.message is not None else "" + ) # Convert the markdown message to html + banner = await Banner.get(db) + updated_banner = await Banner.update_from_dto(banner, db, banner_dto) + return updated_banner diff --git a/backend/api/system/general.py b/backend/api/system/general.py index 968c787548..ba2845e827 100644 --- a/backend/api/system/general.py +++ b/backend/api/system/general.py @@ -1,267 +1,283 @@ +from databases import Database +from datetime import datetime +from fastapi import APIRouter, Depends, Request, Body +from fastapi.responses import JSONResponse import requests -from flask import jsonify -from flask_restful import Resource, request, current_app -from flask_swagger import swagger -from backend.services.settings_service import SettingsService -from backend.services.messaging.smtp_service import SMTPService +from backend.db import get_db from backend.models.postgis.release_version import ReleaseVersion +from backend.services.messaging.smtp_service import SMTPService +from backend.services.settings_service import SettingsService +router = APIRouter( + prefix="/system", + tags=["system"], + responses={404: {"description": "Not found"}}, +) -class SystemDocsAPI(Resource): - """ - This Resource provides a simple endpoint for flask-swagger to generate the API docs, - https://github.com/gangverk/flask-swagger - """ - def get(self): - """ - Generates Swagger UI readable JSON - --- - tags: - - system - definitions: - - schema: - id: GeoJsonPolygon - properties: - type: - type: string - default: Polygon - coordinates: - type: array - items: - type: number - default: [[-4.0237,56.0904],[-3.9111,56.1715],[-3.8122,56.0980],[-4.0237,56.0904]] - - schema: - id: GeoJsonMultiPolygon - properties: - type: - type: string - default: MultiPolygon - coordinates: - type: array - items: - type: number - default: [[[-4.0237,56.0904],[-3.9111,56.1715],[-3.8122,56.0980],[-4.0237,56.0904]]] - - schema: - id: ProjectInfo - properties: - locale: - type: string - default: en - name: - type: string - default: Thinkwhere Project - shortDescription: - type: string - default: Awesome little project - description: - type: string - default: Awesome little project and a little bit more - instructions: - type: string - default: Complete the tasks - perTaskInstructions: - type: string - default: Use Thinkwhere Imagery Only - - schema: - id: GeoJsonFeature +@router.get("/docs/json/", response_class=JSONResponse) +async def get(request: Request): + """ + Generates Swagger UI readable JSON + --- + tags: + - system + definitions: + - schema: + id: GeoJsonPolygon + properties: + type: + type: string + default: Polygon + coordinates: + type: array + items: + type: number + default: [[-4.0237,56.0904],[-3.9111,56.1715],[-3.8122,56.0980],[-4.0237,56.0904]] + - schema: + id: GeoJsonMultiPolygon + properties: + type: + type: string + default: MultiPolygon + coordinates: + type: array + items: + type: number + default: [[[-4.0237,56.0904],[-3.9111,56.1715],[-3.8122,56.0980],[-4.0237,56.0904]]] + - schema: + id: ProjectInfo + properties: + locale: + type: string + default: en + name: + type: string + default: Thinkwhere Project + shortDescription: + type: string + default: Awesome little project + description: + type: string + default: Awesome little project and a little bit more + instructions: + type: string + default: Complete the tasks + perTaskInstructions: + type: string + default: Use Thinkwhere Imagery Only + - schema: + id: GeoJsonFeature + properties: + type: + type: string + default: Feature + geometry: + schema: + $ref: "#/definitions/GeoJsonMultiPolygon" properties: - type: - type: string - default: Feature - geometry: - schema: - $ref: "#/definitions/GeoJsonMultiPolygon" + type: object properties: - type: object - properties: - x: - type: integer - default: 2402 - y: - type: integer - default: 1736 - zoom: - type: integer - default: 12 - isSquare: - type: boolean - default: true - - schema: - id: ValidatedTask - properties: - taskId: - type: integer - default: 1 - status: - type: string - default: VALIDATED - comment: - type: string - default: Nice work :) - - schema: - id: ResetTask - properties: - taskId: - type: integer - default: 1 - comment: - type: string - default: Work in progress - - schema: - id: ProjectTeams - properties: - teamId: - type: integer - default: 1 - role: - type: string - default: MAPPER - - schema: - id: TeamMembers - properties: - userName: - type: string - default: user_1 - function: - type: string - default: MANAGER + x: + type: integer + default: 2402 + y: + type: integer + default: 1736 + zoom: + type: integer + default: 12 + isSquare: + type: boolean + default: true + - schema: + id: ValidatedTask + properties: + taskId: + type: integer + default: 1 + status: + type: string + default: VALIDATED + comment: + type: string + default: Nice work :) + - schema: + id: ResetTask + properties: + taskId: + type: integer + default: 1 + comment: + type: string + default: Work in progress + - schema: + id: ProjectTeams + properties: + teamId: + type: integer + default: 1 + role: + type: string + default: MAPPER + - schema: + id: TeamMembers + properties: + userName: + type: string + default: user_1 + function: + type: string + default: MANAGER - """ - swag = swagger(current_app) - swag["info"]["title"] = "Tasking Manager backend API" - swag["info"]["description"] = "API endpoints for the backend" - swag["info"]["version"] = "2.0.0" + """ + swag = request.app.openapi() + swag["info"]["title"] = "Tasking Manager backend API" + swag["info"]["description"] = "API endpoints for the backend" + swag["info"]["version"] = "2.0.0" - return jsonify(swag) + return JSONResponse(content=swag, status_code=200) -class SystemHeartbeatAPI(Resource): +@router.get("/languages/") +async def get(): """ - /api/health-check + Gets all supported languages + --- + tags: + - system + produces: + - application/json + responses: + 200: + description: Supported Languages + 500: + description: Internal Server Error """ - - def get(self): - """ - Simple health-check, if this is unreachable load balancers should be configures to raise an alert - --- - tags: - - system - produces: - - application/json - responses: - 200: - description: Service is Healthy - """ - release = ReleaseVersion.get() - if release is not None: - release = { - "version": release.tag_name, - "published_at": str(release.published_at), - } - return {"status": "healthy", "release": release}, 200 + languages = SettingsService.get_settings() + return languages.model_dump(by_alias=True) -class SystemLanguagesAPI(Resource): - def get(self): - """ - Gets all supported languages - --- - tags: - - system - produces: - - application/json - responses: - 200: - description: Supported Languages - 500: - description: Internal Server Error - """ - languages = SettingsService.get_settings() - return languages.to_primitive(), 200 +@router.get("/heartbeat/") +async def get(db: Database = Depends(get_db)): + """ + Simple health-check, if this is unreachable load balancers should be configured to raise an alert + --- + tags: + - system + produces: + - application/json + responses: + 200: + description: Service is Healthy + """ + query = """ + SELECT tag_name, published_at + FROM release_version + ORDER BY published_at DESC + LIMIT 1 + """ + release = await db.fetch_one(query) + if release: + release_info = { + "version": release["tag_name"], + "published_at": release["published_at"].isoformat(), + } + else: + release_info = None -class SystemContactAdminRestAPI(Resource): - def post(self): - """ - Send an email to the system admin - --- - tags: - - system - produces: - - application/json - parameters: - - in: body - name: body - required: true - description: JSON object with the data of the message to send to the system admin - schema: - properties: - name: - type: string - default: The name of the sender - email: - type: string - default: The email of the sender - content: - type: string - default: The content of the message - responses: - 201: - description: Email sent successfully - 400: - description: Invalid Request - 501: - description: Not Implemented - 500: - description: A problem occurred - """ - try: - data = request.get_json() - SMTPService.send_contact_admin_email(data) - return {"Success": "Email sent"}, 201 - except ValueError as e: - return {"Error": str(e), "SubCode": "NotImplemented"}, 501 + return {"status": "Fastapi healthy", "release": release_info} -class SystemReleaseAPI(Resource): - def post(self): - """ - Fetch latest release version form github and save to database. - --- - tags: - - system - produces: - - application/json - responses: - 201: - description: Saved version successfully to database - 502: - description: Couldn't fetch latest release from github - 500: - description: Internal server error - """ - response = requests.get( - "https://api.github.com/repos/hotosm/tasking-manager/releases/latest" +@router.post("/contact-admin/") +async def post(request: Request, data: dict = Body(...)): + """ + Send an email to the system admin + --- + tags: + - system + produces: + - application/json + parameters: + - in: body + name: body + required: true + description: JSON object with the data of the message to send to the system admin + schema: + properties: + name: + type: string + default: The name of the sender + email: + type: string + default: The email of the sender + content: + type: string + default: The content of the message + responses: + 201: + description: Email sent successfully + 400: + description: Invalid Request + 501: + description: Not Implemented + 500: + description: A problem occurred + """ + try: + await SMTPService.send_contact_admin_email(data) + return JSONResponse(content={"Success": "Email sent"}, status_code=201) + except ValueError as e: + return JSONResponse( + content={"Error": str(e), "SubCode": "NotImplemented"}, status_code=501 ) - try: - tag_name = response.json()["tag_name"] - published_date = response.json()["published_at"] - release = ReleaseVersion.get() - if release is None: - release = ReleaseVersion() - if tag_name != release.tag_name: - release.tag_name = tag_name - release.published_at = published_date - release.save() - return { - "release_version": release.tag_name, - "published_at": str(release.published_at), - }, 201 - except KeyError: - return { + + +@router.post("/release/") +async def post(db: Database = Depends(get_db)): + """ + Fetch latest release version form github and save to database. + --- + tags: + - system + produces: + - application/json + responses: + 201: + description: Saved version successfully to database + 502: + description: Couldn't fetch latest release from github + 500: + description: Internal server error + """ + response = requests.get( + "https://api.github.com/repos/hotosm/tasking-manager/releases/latest" + ) + try: + tag_name = response.json()["tag_name"] + published_date = response.json()["published_at"] + published_date = datetime.strptime( + published_date, "%Y-%m-%dT%H:%M:%SZ" + ).replace(tzinfo=None) + release = await ReleaseVersion.get(db) + if release is None: + release = ReleaseVersion() + if tag_name != release.tag_name: + release.tag_name = tag_name + release.published_at = published_date + await release.save(db) + return { + "release_version": release.tag_name, + "published_at": str(release.published_at), + } + except KeyError: + return JSONResponse( + content={ "Error": "Couldn't fetch latest release from github", "SubCode": "GithubFetchError", - }, 502 + }, + status_code=502, + ) diff --git a/backend/api/system/image_upload.py b/backend/api/system/image_upload.py index 2dee13dcf9..43484649b3 100644 --- a/backend/api/system/image_upload.py +++ b/backend/api/system/image_upload.py @@ -1,100 +1,115 @@ -import requests import json -from flask_restful import Resource, request, current_app +import requests +from fastapi import APIRouter, Body, Depends, Request +from fastapi.responses import JSONResponse + +from backend.config import settings +from backend.models.dtos.user_dto import AuthUserDTO +from backend.services.users.authentication_service import login_required -from backend.services.users.authentication_service import token_auth +router = APIRouter( + prefix="/system", + tags=["system"], + responses={404: {"description": "Not found"}}, +) -class SystemImageUploadRestAPI(Resource): - @token_auth.login_required - def post(self): - """ - Uploads an image using the image upload service - --- - tags: - - system - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - in: body - name: body - required: true - description: JSON object containing image data that will be uploaded - schema: - properties: - data: - type: string - default: base64 encoded image data - mime: - type: string - default: file mime/type - filename: - type: string - default: filename - responses: - 200: - description: Image uploaded successfully - 400: - description: Input parameter error - 403: - description: User is not authorized to upload images - 500: - description: A problem occurred - 501: - description: Image upload service not defined - """ - if ( - current_app.config["IMAGE_UPLOAD_API_URL"] is None - or current_app.config["IMAGE_UPLOAD_API_KEY"] is None - ): - return { +@router.post("/image-upload/") +async def post( + request: Request, + user: AuthUserDTO = Depends(login_required), + data: dict = Body(...), +): + """ + Uploads an image using the image upload service + --- + tags: + - system + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - in: body + name: body + required: true + description: JSON object containing image data that will be uploaded + schema: + properties: + data: + type: string + default: base64 encoded image data + mime: + type: string + default: file mime/type + filename: + type: string + default: filename + responses: + 200: + description: Image uploaded successfully + 400: + description: Input parameter error + 403: + description: User is not authorized to upload images + 500: + description: A problem occurred + 501: + description: Image upload service not defined + """ + if settings.IMAGE_UPLOAD_API_URL is None or settings.IMAGE_UPLOAD_API_KEY is None: + return JSONResponse( + content={ "Error": "Image upload service not defined", "SubCode": "UndefinedImageService", - }, 501 + }, + status_code=501, + ) - data = request.get_json() - if data.get("filename") is None: - return { + if data.get("filename") is None: + return JSONResponse( + content={ "Error": "Missing filename parameter", "SubCode": "MissingFilename", - }, 400 - if data.get("mime") in [ - "image/png", - "image/jpeg", - "image/webp", - "image/gif", - ]: - headers = { - "x-api-key": current_app.config["IMAGE_UPLOAD_API_KEY"], - "Content-Type": "application/json", - } - url = "{}?filename={}".format( - current_app.config["IMAGE_UPLOAD_API_URL"], data.get("filename") - ) - result = requests.post( - url, headers=headers, data=json.dumps({"image": data}) - ) - if result.ok: - return result.json(), 201 - else: - return result.json(), 400 - elif data.get("mime") is None: - return { + }, + status_code=400, + ) + if data.get("mime") in [ + "image/png", + "image/jpeg", + "image/webp", + "image/gif", + ]: + headers = { + "x-api-key": settings.IMAGE_UPLOAD_API_KEY, + "Content-Type": "application/json", + } + url = "{}?filename={}".format( + settings.IMAGE_UPLOAD_API_URL, data.get("filename") + ) + result = requests.post(url, headers=headers, data=json.dumps({"image": data})) + if result.ok: + return JSONResponse(content=result.json(), status_code=201) + else: + return JSONResponse(content=result.json(), status_code=400) + elif data.get("mime") is None: + return JSONResponse( + content={ "Error": "Missing mime parameter", "SubCode": "MissingMime", - }, 400 - else: - return ( - { - "Error": "Mimetype is not allowed. The supported formats are: png, jpeg, webp and gif.", - "SubCode": "UnsupportedFile", - }, - 400, - ) + }, + status_code=400, + ) + else: + return JSONResponse( + content={ + "Error": "Mimetype is not allowed. The supported formats are: png, jpeg, webp and gif.", + "SubCode": "UnsupportedFile", + }, + status_code=400, + ) diff --git a/backend/api/system/statistics.py b/backend/api/system/statistics.py index 5ee43e418b..61bf08c912 100644 --- a/backend/api/system/statistics.py +++ b/backend/api/system/statistics.py @@ -1,35 +1,43 @@ -from flask_restful import Resource -from backend.services.stats_service import StatsService -from flask_restful import request +from databases import Database from distutils.util import strtobool +from fastapi import APIRouter, Depends, Request + +from backend.db import get_db +from backend.services.stats_service import StatsService + +router = APIRouter( + prefix="/system", + tags=["system"], + responses={404: {"description": "Not found"}}, +) -class SystemStatisticsAPI(Resource): - def get(self): - """ - Get HomePage Stats - --- - tags: - - system - produces: - - application/json - parameters: - - in: query - name: abbreviated - type: boolean - description: Set to false if complete details on projects including total area, campaigns, orgs are required - default: True - responses: - 200: - description: Project stats - 500: - description: Internal Server Error - """ - abbreviated = ( - strtobool(request.args.get("abbreviated")) - if request.args.get("abbreviated") - else True - ) +@router.get("/statistics/") +async def get(request: Request, db: Database = Depends(get_db)): + """ + Get HomePage Stats + --- + tags: + - system + produces: + - application/json + parameters: + - in: query + name: abbreviated + type: boolean + description: Set to false if complete details on projects including total area, campaigns, orgs are required + default: True + responses: + 200: + description: Project stats + 500: + description: Internal Server Error + """ + abbreviated = ( + strtobool(request.query_params.get("abbreviated")) + if request.query_params.get("abbreviated") + else True + ) - stats = StatsService.get_homepage_stats(abbreviated) - return stats.to_primitive(), 200 + stats = await StatsService.get_homepage_stats(abbreviated, db) + return stats.model_dump(by_alias=True) diff --git a/backend/api/tasks/actions.py b/backend/api/tasks/actions.py index 10878ecffc..78ea9d5d90 100644 --- a/backend/api/tasks/actions.py +++ b/backend/api/tasks/actions.py @@ -1,1024 +1,1242 @@ -from flask_restful import Resource, current_app, request -from schematics.exceptions import DataError +from databases import Database +from fastapi import APIRouter, BackgroundTasks, Depends, Query, Request +from fastapi.responses import JSONResponse +from loguru import logger +from backend.db import get_db from backend.exceptions import NotFound from backend.models.dtos.grid_dto import SplitTaskDTO -from backend.models.postgis.utils import InvalidGeoJson -from backend.services.grid.split_service import SplitService, SplitServiceError -from backend.services.users.user_service import UserService -from backend.services.project_admin_service import ProjectAdminService -from backend.services.project_service import ProjectService -from backend.services.users.authentication_service import token_auth, tm +from backend.models.dtos.mapping_dto import ( + ExtendLockTimeDTO, + LockTaskDTO, + MappedTaskDTO, + StopMappingTaskDTO, +) +from backend.models.dtos.user_dto import AuthUserDTO from backend.models.dtos.validator_dto import ( LockForValidationDTO, - UnlockAfterValidationDTO, - StopValidationDTO, RevertUserTasksDTO, + StopValidationDTO, + UnlockAfterValidationDTO, ) +from backend.models.postgis.utils import InvalidGeoJson +from backend.services.grid.split_service import SplitService, SplitServiceError +from backend.services.mapping_service import MappingService, MappingServiceError +from backend.services.project_admin_service import ProjectAdminService +from backend.services.project_service import ProjectService +from backend.services.users.authentication_service import login_required +from backend.services.users.user_service import UserService from backend.services.validator_service import ( + UserLicenseError, ValidatorService, ValidatorServiceError, - UserLicenseError, ) -from backend.models.dtos.mapping_dto import ( - LockTaskDTO, - StopMappingTaskDTO, - MappedTaskDTO, - ExtendLockTimeDTO, -) -from backend.services.mapping_service import MappingService, MappingServiceError +router = APIRouter( + prefix="/projects", + tags=["tasks"], + responses={404: {"description": "Not found"}}, +) -class TasksActionsMappingLockAPI(Resource): - @token_auth.login_required - def post(self, project_id, task_id): - """ - Locks a task for mapping - --- - tags: - - tasks - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - in: header - name: Accept-Language - description: Language user is requesting - type: string - required: true - default: en - - name: project_id - in: path - description: Project ID the task is associated with - required: true - type: integer - default: 1 - - name: task_id - in: path - description: Unique task ID - required: true - type: integer - default: 1 - responses: - 200: - description: Task locked - 400: - description: Client Error - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 404: - description: Task not found - 409: - description: User has not accepted license terms of project - 500: - description: Internal Server Error - """ - try: - lock_task_dto = LockTaskDTO() - lock_task_dto.user_id = token_auth.current_user() - lock_task_dto.project_id = project_id - lock_task_dto.task_id = task_id - lock_task_dto.preferred_locale = request.environ.get("HTTP_ACCEPT_LANGUAGE") - lock_task_dto.validate() - except DataError as e: - current_app.logger.error(f"Error validating request: {str(e)}") - return {"Error": "Unable to lock task", "SubCode": "InvalidData"}, 400 - try: - ProjectService.exists(project_id) # Check if project exists - task = MappingService.lock_task_for_mapping(lock_task_dto) - return task.to_primitive(), 200 - except MappingServiceError as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 403 - except UserLicenseError: - return { +@router.post("/{project_id}/tasks/actions/lock-for-mapping/{task_id}/") +async def post( + request: Request, + project_id: int, + task_id: int, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Locks a task for mapping + --- + tags: + - tasks + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - in: header + name: Accept-Language + description: Language user is requesting + type: string + required: true + default: en + - name: project_id + in: path + description: Project ID the task is associated with + required: true + type: integer + default: 1 + - name: task_id + in: path + description: Unique task ID + required: true + type: integer + default: 1 + responses: + 200: + description: Task locked + 400: + description: Client Error + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 404: + description: Task not found + 409: + description: User has not accepted license terms of project + 500: + description: Internal Server Error + """ + try: + lock_task_dto = LockTaskDTO( + user_id=user.id, + project_id=project_id, + task_id=task_id, + preferred_locale=request.headers.get("accept-language"), + ) + except Exception as e: + logger.error(f"Error validating request: {str(e)}") + return JSONResponse( + content={"Error": "Unable to lock task", "SubCode": "InvalidData"}, + status_code=400, + ) + try: + await ProjectService.exists(project_id, db) + async with db.transaction(): + task = await MappingService.lock_task_for_mapping(lock_task_dto, db) + return task + except MappingServiceError as e: + return JSONResponse( + content={"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, + status_code=403, + ) + except UserLicenseError: + return JSONResponse( + { "Error": "User not accepted license terms", "SubCode": "UserLicenseError", - }, 409 + }, + status_code=409, + ) -class TasksActionsMappingStopAPI(Resource): - @token_auth.login_required - def post(self, project_id, task_id): - """ - Unlock a task that is locked for mapping resetting it to its last status - --- - tags: - - tasks - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - in: header - name: Accept-Language - description: Language user is requesting - type: string - required: true - default: en - - name: project_id - in: path - description: Project ID the task is associated with - required: true - type: integer - default: 1 - - name: task_id - in: path - description: Unique task ID - required: true - type: integer - default: 1 - - in: body - name: body - required: true - description: JSON object for unlocking a task - schema: - id: TaskUpdateStop - properties: - comment: - type: string - description: Optional user comment about the task - default: Comment about mapping done before stop - responses: - 200: - description: Task unlocked - 400: - description: Client Error - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 404: - description: Task not found - 500: - description: Internal Server Error - """ - try: - stop_task = StopMappingTaskDTO( - request.get_json() if request.is_json else {} - ) - stop_task.user_id = token_auth.current_user() - stop_task.task_id = task_id - stop_task.project_id = project_id - stop_task.preferred_locale = request.environ.get("HTTP_ACCEPT_LANGUAGE") - stop_task.validate() - except DataError as e: - current_app.logger.error(f"Error validating request: {str(e)}") - return {"Error": "Task unlock failed", "SubCode": "InvalidData"}, 400 +@router.post("/{project_id}/tasks/actions/stop-mapping/{task_id}/") +async def post( + request: Request, + project_id: int, + task_id: int, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Unlock a task that is locked for mapping resetting it to its last status + --- + tags: + - tasks + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - in: header + name: Accept-Language + description: Language user is requesting + type: string + required: true + default: en + - name: project_id + in: path + description: Project ID the task is associated with + required: true + type: integer + default: 1 + - name: task_id + in: path + description: Unique task ID + required: true + type: integer + default: 1 + - in: body + name: body + required: true + description: JSON object for unlocking a task + schema: + id: TaskUpdateStop + properties: + comment: + type: string + description: Optional user comment about the task + default: Comment about mapping done before stop + responses: + 200: + description: Task unlocked + 400: + description: Client Error + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 404: + description: Task not found + 500: + description: Internal Server Error + """ + try: + request_data = await request.json() + preferred_locale = request.headers.get("accept-language", None) + stop_task = StopMappingTaskDTO( + project_id=project_id, + task_id=task_id, + user_id=user.id, + comment=request_data.get("comment", None), + ) + if preferred_locale: + stop_task.preferred_locale = preferred_locale - try: - ProjectService.exists(project_id) # Check if project exists - task = MappingService.stop_mapping_task(stop_task) - return task.to_primitive(), 200 - except MappingServiceError as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 403 + except Exception as e: + logger.error(f"Error validating request: {str(e)}") + return JSONResponse( + content={"Error": "Task unlock failed", "SubCode": "InvalidData"}, + status_code=400, + ) + try: + await ProjectService.exists(project_id, db) + async with db.transaction(): + task = await MappingService.stop_mapping_task(stop_task, db) + return task + except MappingServiceError as e: + return JSONResponse( + content={"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, + status_code=403, + ) -class TasksActionsMappingUnlockAPI(Resource): - @token_auth.login_required - def post(self, project_id, task_id): - """ - Set a task as mapped - --- - tags: - - tasks - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: project_id - in: path - description: Project ID the task is associated with - required: true - type: integer - default: 1 - - name: task_id - in: path - description: Unique task ID - required: true - type: integer - default: 1 - - in: body - name: body - required: true - description: JSON object for unlocking a task - schema: - id: TaskUpdateUnlock - required: - - status - properties: - status: - type: string - description: The new status for the task - default: MAPPED - comment: - type: string - description: Optional user comment about the task - default: Comment about the mapping - responses: - 200: - description: Task unlocked - 400: - description: Client Error - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 404: - description: Task not found - 500: - description: Internal Server Error - """ - try: - authenticated_user_id = token_auth.current_user() - mapped_task = MappedTaskDTO(request.get_json()) - mapped_task.user_id = authenticated_user_id - mapped_task.task_id = task_id - mapped_task.project_id = project_id - mapped_task.validate() - except DataError as e: - current_app.logger.error(f"Error validating request: {str(e)}") - return {"Error": "Task unlock failed", "SubCode": "InvalidData"}, 400 +@router.post("/{project_id}/tasks/actions/unlock-after-mapping/{task_id}/") +async def post( + request: Request, + project_id: int, + task_id: int, + background_tasks: BackgroundTasks, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Set a task as mapped + --- + tags: + - tasks + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - in: header + name: Accept-Language + description: Language user is requesting + type: string + required: true + default: en + - name: project_id + in: path + description: Project ID the task is associated with + required: true + type: integer + default: 1 + - name: task_id + in: path + description: Unique task ID + required: true + type: integer + default: 1 + - in: body + name: body + required: true + description: JSON object for unlocking a task + schema: + id: TaskUpdateUnlock + required: + - status + properties: + status: + type: string + description: The new status for the task + default: MAPPED + comment: + type: string + description: Optional user comment about the task + default: Comment about the mapping + responses: + 200: + description: Task unlocked + 400: + description: Client Error + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 404: + description: Task not found + 500: + description: Internal Server Error + """ + try: + request_data = await request.json() + mapped_task = MappedTaskDTO( + user_id=user.id, + project_id=project_id, + task_id=task_id, + status=request_data.get("status"), + comment=request_data.get("comment", None), + ) + except Exception as e: + logger.error(f"Error validating request: {str(e)}") + return JSONResponse( + content={"Error": "Task unlock failed", "SubCode": "InvalidData"}, + status_code=400, + ) + try: + await ProjectService.exists(project_id, db) + async with db.transaction(): + task = await MappingService.unlock_task_after_mapping( + mapped_task, db, background_tasks + ) + return task - try: - ProjectService.exists(project_id) # Check if project exists - task = MappingService.unlock_task_after_mapping(mapped_task) - return task.to_primitive(), 200 - except MappingServiceError as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 403 - except NotFound as e: - return e.to_dict() - except Exception as e: - error_msg = f"Task Lock API - unhandled error: {str(e)}" - current_app.logger.critical(error_msg) - return { - "Error": "Task unlock failed", - "SubCode": "InternalServerError", - }, 500 - finally: - # Refresh mapper level after mapping - UserService.check_and_update_mapper_level(authenticated_user_id) + except MappingServiceError as e: + return JSONResponse( + content={"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, + status_code=403, + ) + except NotFound as e: + return JSONResponse( + content=e.to_dict(), + status_code=404, + ) + except Exception as e: + logger.critical(f"Task Unlock API - unhandled error: {str(e)}") + return JSONResponse( + content={"Error": "Task unlock failed", "SubCode": "InternalServerError"}, + status_code=500, + ) + finally: + await UserService.check_and_update_mapper_level(user.id, db) -class TasksActionsMappingUndoAPI(Resource): - @token_auth.login_required - def post(self, project_id, task_id): - """ - Undo a task's mapping status - --- - tags: - - tasks - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - in: header - name: Accept-Language - description: Language user is requesting - type: string - required: true - default: en - - name: project_id - in: path - description: Project ID the task is associated with - required: true - type: integer - default: 1 - - name: task_id - in: path - description: Unique task ID - required: true - type: integer - default: 1 - responses: - 200: - description: Task found - 403: - description: Forbidden - 404: - description: Task not found - 500: - description: Internal Server Error - """ - try: - preferred_locale = request.environ.get("HTTP_ACCEPT_LANGUAGE") - ProjectService.exists(project_id) # Check if project exists - task = MappingService.undo_mapping( - project_id, task_id, token_auth.current_user(), preferred_locale - ) - return task.to_primitive(), 200 - except MappingServiceError as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 403 +@router.post("/{project_id}/tasks/actions/undo-last-action/{task_id}/") +async def post( + request: Request, + project_id: int, + task_id: int, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Undo a task's mapping status + --- + tags: + - tasks + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - in: header + name: Accept-Language + description: Language user is requesting + type: string + required: true + default: en + - name: project_id + in: path + description: Project ID the task is associated with + required: true + type: integer + default: 1 + - name: task_id + in: path + description: Unique task ID + required: true + type: integer + default: 1 + responses: + 200: + description: Task found + 403: + description: Forbidden + 404: + description: Task not found + 500: + description: Internal Server Error + """ + try: + preferred_locale = request.headers.get("accept-language", None) + await ProjectService.exists(project_id, db) + async with db.transaction(): + if preferred_locale: + task = await MappingService.undo_mapping( + project_id, task_id, user.id, db, preferred_locale + ) + else: + task = await MappingService.undo_mapping( + project_id, task_id, user.id, db + ) + return task + except MappingServiceError as e: + return JSONResponse( + content={"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, + status_code=403, + ) -class TasksActionsValidationLockAPI(Resource): - @token_auth.login_required - def post(self, project_id): - """ - Lock tasks for validation - --- - tags: - - tasks - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - in: header - name: Accept-Language - description: Language user is requesting - type: string - required: true - default: en - - name: project_id - in: path - description: Project ID the tasks are associated with - required: true - type: integer - default: 1 - - in: body - name: body - required: true - description: JSON object for locking task(s) - schema: - properties: - taskIds: - type: array - items: - type: integer - description: Array of taskIds for locking - default: [1,2] - responses: - 200: - description: Task(s) locked for validation - 400: - description: Client Error - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 404: - description: Task not found - 409: - description: User has not accepted license terms of project - 500: - description: Internal Server Error - """ - try: - validator_dto = LockForValidationDTO(request.get_json()) - validator_dto.project_id = project_id - validator_dto.user_id = token_auth.current_user() - validator_dto.preferred_locale = request.environ.get("HTTP_ACCEPT_LANGUAGE") - validator_dto.validate() - except DataError as e: - current_app.logger.error(f"Error validating request: {str(e)}") - return {"Error": "Unable to lock task", "SubCode": "InvalidData"}, 400 +@router.post("/{project_id}/tasks/actions/lock-for-validation/") +async def post( + request: Request, + project_id: int, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Lock tasks for validation + --- + tags: + - tasks + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - in: header + name: Accept-Language + description: Language user is requesting + type: string + required: true + default: en + - name: project_id + in: path + description: Project ID the tasks are associated with + required: true + type: integer + default: 1 + - in: body + name: body + required: true + description: JSON object for locking task(s) + schema: + properties: + taskIds: + type: array + items: + type: integer + description: Array of taskIds for locking + default: [1,2] + responses: + 200: + description: Task(s) locked for validation + 400: + description: Client Error + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 404: + description: Task not found + 409: + description: User has not accepted license terms of project + 500: + description: Internal Server Error + """ + try: + request_data = await request.json() + task_ids = request_data.get("taskIds") + preferred_locale = request.headers.get("accept-language", None) + validator_dto = LockForValidationDTO( + project_id=project_id, task_ids=task_ids, user_id=user.id + ) + if preferred_locale: + validator_dto.preferred_locale = preferred_locale + except Exception as e: + logger.error(f"Error validating request: {str(e)}") + return JSONResponse( + content={"Error": "Unable to lock task", "SubCode": "InvalidData"}, + status_code=400, + ) - try: - ProjectService.exists(project_id) # Check if project exists - tasks = ValidatorService.lock_tasks_for_validation(validator_dto) - return tasks.to_primitive(), 200 - except ValidatorServiceError as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 403 - except UserLicenseError: - return { + try: + await ProjectService.exists(project_id, db) + async with db.transaction(): + tasks = await ValidatorService.lock_tasks_for_validation(validator_dto, db) + return tasks + except ValidatorServiceError as e: + return JSONResponse( + content={"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, + status_code=403, + ) + except UserLicenseError: + return JSONResponse( + content={ "Error": "User not accepted license terms", "SubCode": "UserLicenseError", - }, 409 - - -class TasksActionsValidationStopAPI(Resource): - @tm.pm_only(False) - @token_auth.login_required - def post(self, project_id): - """ - Unlock tasks that are locked for validation resetting them to their last status - --- - tags: - - tasks - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - in: header - name: Accept-Language - description: Language user is requesting - type: string - required: true - default: en - - name: project_id - in: path - description: Project ID the task is associated with - required: true - type: integer - default: 1 - - in: body - name: body - required: true - description: JSON object for unlocking a task - schema: - properties: - resetTasks: - type: array - items: - schema: - $ref: "#/definitions/ResetTask" - responses: - 200: - description: Task unlocked - 400: - description: Client Error - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 404: - description: Task not found - 500: - description: Internal Server Error - """ - try: - validated_dto = StopValidationDTO(request.get_json()) - validated_dto.project_id = project_id - validated_dto.user_id = token_auth.current_user() - validated_dto.preferred_locale = request.environ.get("HTTP_ACCEPT_LANGUAGE") - validated_dto.validate() - except DataError as e: - current_app.logger.error(f"Error validating request: {str(e)}") - return {"Error": "Task unlock failed", "SubCode": "InvalidData"}, 400 + }, + status_code=409, + ) - try: - ProjectService.exists(project_id) # Check if project exists - tasks = ValidatorService.stop_validating_tasks(validated_dto) - return tasks.to_primitive(), 200 - except ValidatorServiceError as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 403 +@router.post("/{project_id}/tasks/actions/stop-validation/") +async def post( + request: Request, + project_id: int, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Unlock tasks that are locked for validation resetting them to their last status + --- + tags: + - tasks + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - in: header + name: Accept-Language + description: Language user is requesting + type: string + required: true + default: en + - name: project_id + in: path + description: Project ID the task is associated with + required: true + type: integer + default: 1 + - in: body + name: body + required: true + description: JSON object for unlocking a task + schema: + properties: + resetTasks: + type: array + items: + schema: + $ref: "#/definitions/ResetTask" + responses: + 200: + description: Task unlocked + 400: + description: Client Error + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 404: + description: Task not found + 500: + description: Internal Server Error + """ + try: + request_data = await request.json() + reset_tasks = request_data.get("resetTasks") + preferred_locale = request.headers.get("accept-language", None) + validated_dto = StopValidationDTO( + project_id=project_id, user_id=user.id, reset_tasks=reset_tasks + ) + if preferred_locale: + validated_dto.preferred_locale = preferred_locale + except Exception as e: + logger.error(f"Error validating request: {str(e)}") + return JSONResponse( + content={"Error": "Task unlock failed", "SubCode": "InvalidData"}, + status_code=400, + ) + try: + await ProjectService.exists(project_id, db) + async with db.transaction(): + tasks = await ValidatorService.stop_validating_tasks(validated_dto, db) + return tasks + except ValidatorServiceError as e: + return JSONResponse( + content={"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, + status_code=403, + ) -class TasksActionsValidationUnlockAPI(Resource): - @token_auth.login_required - def post(self, project_id): - """ - Set tasks as validated - --- - tags: - - tasks - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - in: header - name: Accept-Language - description: Language user is requesting - type: string - required: true - default: en - - name: project_id - in: path - description: Project ID the task is associated with - required: true - type: integer - default: 1 - - in: body - name: body - required: true - description: JSON object for unlocking a task - schema: - properties: - validatedTasks: - type: array - items: - schema: - $ref: "#/definitions/ValidatedTask" - responses: - 200: - description: Task unlocked - 400: - description: Client Error - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 404: - description: Task not found - 500: - description: Internal Server Error - """ - try: - validated_dto = UnlockAfterValidationDTO(request.get_json()) - validated_dto.project_id = project_id - validated_dto.user_id = token_auth.current_user() - validated_dto.preferred_locale = request.environ.get("HTTP_ACCEPT_LANGUAGE") - validated_dto.validate() - except DataError as e: - current_app.logger.error(f"Error validating request: {str(e)}") - return {"Error": "Task unlock failed", "SubCode": "InvalidData"}, 400 - try: - ProjectService.exists(project_id) # Check if project exists - tasks = ValidatorService.unlock_tasks_after_validation(validated_dto) - return tasks.to_primitive(), 200 - except ValidatorServiceError as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 403 +@router.post("/{project_id}/tasks/actions/unlock-after-validation/") +async def post( + request: Request, + project_id: int, + background_tasks: BackgroundTasks, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Set tasks as validated + --- + tags: + - tasks + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - in: header + name: Accept-Language + description: Language user is requesting + type: string + required: true + default: en + - name: project_id + in: path + description: Project ID the task is associated with + required: true + type: integer + default: 1 + - in: body + name: body + required: true + description: JSON object for unlocking a task + schema: + properties: + validatedTasks: + type: array + items: + schema: + $ref: "#/definitions/ValidatedTask" + responses: + 200: + description: Task unlocked + 400: + description: Client Error + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 404: + description: Task not found + 500: + description: Internal Server Error + """ + try: + request_data = await request.json() + validated_tasks = request_data.get("validatedTasks") + preferred_locale = request.headers.get("accept-language", None) + validated_dto = UnlockAfterValidationDTO( + project_id=project_id, user_id=user.id, validated_tasks=validated_tasks + ) + if preferred_locale: + validated_dto.preferred_locale = preferred_locale + except Exception as e: + logger.error(f"Error validating request: {str(e)}") + return JSONResponse( + content={"Error": "Task unlock failed", "SubCode": "InvalidData"}, + status_code=400, + ) + try: + await ProjectService.exists(project_id, db) + tasks = await ValidatorService.unlock_tasks_after_validation( + validated_dto, db, background_tasks + ) + return tasks + except ValidatorServiceError as e: + return JSONResponse( + content={"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, + status_code=403, + ) -class TasksActionsMapAllAPI(Resource): - @token_auth.login_required - def post(self, project_id): - """ - Map all tasks on a project - --- - tags: - - tasks - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: project_id - in: path - description: Unique project ID - required: true - type: integer - default: 1 - responses: - 200: - description: All tasks mapped - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 500: - description: Internal Server Error - """ - try: - authenticated_user_id = token_auth.current_user() - if not ProjectAdminService.is_user_action_permitted_on_project( - authenticated_user_id, project_id - ): - raise ValueError() - except ValueError: - return { +@router.post("/{project_id}/tasks/actions/map-all/") +async def post( + request: Request, + project_id: int, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Map all tasks on a project + --- + tags: + - tasks + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: project_id + in: path + description: Unique project ID + required: true + type: integer + default: 1 + responses: + 200: + description: All tasks mapped + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 500: + description: Internal Server Error + """ + try: + authenticated_user_id = user.id + if not await ProjectAdminService.is_user_action_permitted_on_project( + authenticated_user_id, project_id, db + ): + raise ValueError() + except ValueError: + return JSONResponse( + content={ "Error": "User is not a manager of the project", "SubCode": "UserPermissionError", - }, 403 + }, + status_code=403, + ) - MappingService.map_all_tasks(project_id, authenticated_user_id) - return {"Success": "All tasks mapped"}, 200 + async with db.transaction(): + await MappingService.map_all_tasks(project_id, authenticated_user_id, db) + return JSONResponse(content={"Success": "All tasks mapped"}, status_code=200) -class TasksActionsValidateAllAPI(Resource): - @token_auth.login_required - def post(self, project_id): - """ - Validate all mapped tasks on a project - --- - tags: - - tasks - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: project_id - in: path - description: Unique project ID - required: true - type: integer - default: 1 - responses: - 200: - description: All mapped tasks validated - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 500: - description: Internal Server Error - """ - try: - authenticated_user_id = token_auth.current_user() - if not ProjectAdminService.is_user_action_permitted_on_project( - authenticated_user_id, project_id - ): - raise ValueError() - except ValueError: - return { +@router.post("/{project_id}/tasks/actions/validate-all/") +async def post( + request: Request, + project_id: int, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Validate all mapped tasks on a project + --- + tags: + - tasks + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: project_id + in: path + description: Unique project ID + required: true + type: integer + default: 1 + responses: + 200: + description: All mapped tasks validated + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 500: + description: Internal Server Error + """ + try: + authenticated_user_id = user.id + if not await ProjectAdminService.is_user_action_permitted_on_project( + authenticated_user_id, project_id, db + ): + raise ValueError() + except ValueError: + return JSONResponse( + content={ "Error": "User is not a manager of the project", "SubCode": "UserPermissionError", - }, 403 + }, + status_code=403, + ) - ValidatorService.validate_all_tasks(project_id, authenticated_user_id) - return {"Success": "All tasks validated"}, 200 + async with db.transaction(): + await ValidatorService.validate_all_tasks(project_id, authenticated_user_id, db) + return JSONResponse(content={"Success": "All tasks validated"}, status_code=200) -class TasksActionsInvalidateAllAPI(Resource): - @token_auth.login_required - def post(self, project_id): - """ - Invalidate all validated tasks on a project - --- - tags: - - tasks - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: project_id - in: path - description: Unique project ID - required: true - type: integer - default: 1 - responses: - 200: - description: All validated tasks invalidated - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 500: - description: Internal Server Error - """ - try: - authenticated_user_id = token_auth.current_user() - if not ProjectAdminService.is_user_action_permitted_on_project( - authenticated_user_id, project_id - ): - raise ValueError() - except ValueError: - return { +@router.post("/{project_id}/tasks/actions/invalidate-all/") +async def post( + request: Request, + project_id: int, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Invalidate all validated tasks on a project + --- + tags: + - tasks + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: project_id + in: path + description: Unique project ID + required: true + type: integer + default: 1 + responses: + 200: + description: All validated tasks invalidated + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 500: + description: Internal Server Error + """ + try: + authenticated_user_id = user.id + if not await ProjectAdminService.is_user_action_permitted_on_project( + authenticated_user_id, project_id, db + ): + raise ValueError() + except ValueError: + return JSONResponse( + content={ "Error": "User is not a manager of the project", "SubCode": "UserPermissionError", - }, 403 + }, + status_code=403, + ) + async with db.transaction(): + await ValidatorService.invalidate_all_tasks( + project_id, authenticated_user_id, db + ) + return JSONResponse( + content={"Success": "All tasks invalidated"}, status_code=200 + ) - ValidatorService.invalidate_all_tasks(project_id, authenticated_user_id) - return {"Success": "All tasks invalidated"}, 200 - -class TasksActionsResetBadImageryAllAPI(Resource): - @token_auth.login_required - def post(self, project_id): - """ - Set all bad imagery tasks as ready for mapping - --- - tags: - - tasks - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: project_id - in: path - description: Unique project ID - required: true - type: integer - default: 1 - responses: - 200: - description: All bad imagery tasks marked ready for mapping - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 500: - description: Internal Server Error - """ - try: - authenticated_user_id = token_auth.current_user() - if not ProjectAdminService.is_user_action_permitted_on_project( - authenticated_user_id, project_id - ): - raise ValueError() - except ValueError: - return { +@router.post("/{project_id}/tasks/actions/reset-all-badimagery/") +async def post( + request: Request, + project_id: int, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Set all bad imagery tasks as ready for mapping + --- + tags: + - tasks + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: project_id + in: path + description: Unique project ID + required: true + type: integer + default: 1 + responses: + 200: + description: All bad imagery tasks marked ready for mapping + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 500: + description: Internal Server Error + """ + try: + authenticated_user_id = user.id + if not await ProjectAdminService.is_user_action_permitted_on_project( + authenticated_user_id, project_id, db + ): + raise ValueError() + except ValueError: + return JSONResponse( + content={ "Error": "User is not a manager of the project", "SubCode": "UserPermissionError", - }, 403 - - MappingService.reset_all_badimagery(project_id, authenticated_user_id) - return {"Success": "All bad imagery tasks marked ready for mapping"}, 200 + }, + status_code=403, + ) + async with db.transaction(): + await MappingService.reset_all_badimagery(project_id, authenticated_user_id, db) + return JSONResponse( + content={"Success": "All bad imagery tasks marked ready for mapping"}, + status_code=200, + ) -class TasksActionsResetAllAPI(Resource): - @token_auth.login_required - def post(self, project_id): - """ - Reset all tasks on project back to ready, preserving history - --- - tags: - - tasks - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: project_id - in: path - description: Unique project ID - required: true - type: integer - default: 1 - responses: - 200: - description: All tasks reset - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 500: - description: Internal Server Error - """ - try: - authenticated_user_id = token_auth.current_user() - authenticated_user_id = token_auth.current_user() - if not ProjectAdminService.is_user_action_permitted_on_project( - authenticated_user_id, project_id - ): - raise ValueError() - except ValueError: - return { +@router.post("/{project_id}/tasks/actions/reset-all/") +async def post( + request: Request, + project_id: int, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Reset all tasks on project back to ready, preserving history + --- + tags: + - tasks + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: project_id + in: path + description: Unique project ID + required: true + type: integer + default: 1 + responses: + 200: + description: All tasks reset + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 500: + description: Internal Server Error + """ + try: + authenticated_user_id = user.id + if not await ProjectAdminService.is_user_action_permitted_on_project( + authenticated_user_id, project_id, db + ): + raise ValueError() + except ValueError: + return JSONResponse( + content={ "Error": "User is not a manager of the project", "SubCode": "UserPermissionError", - }, 403 + }, + status_code=403, + ) + async with db.transaction(): + await ProjectAdminService.reset_all_tasks(project_id, authenticated_user_id, db) + return JSONResponse(content={"Success": "All tasks reset"}, status_code=200) - ProjectAdminService.reset_all_tasks(project_id, authenticated_user_id) - return {"Success": "All tasks reset"}, 200 - -class TasksActionsSplitAPI(Resource): - @token_auth.login_required - def post(self, project_id, task_id): - """ - Split a task - --- - tags: - - tasks - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - in: header - name: Accept-Language - description: Language user is requesting - type: string - required: true - default: en - - name: project_id - in: path - description: Project ID the task is associated with - required: true - type: integer - default: 1 - - name: task_id - in: path - description: Unique task ID - required: true - type: integer - default: 1 - responses: - 200: - description: Task split OK - 400: - description: Client Error - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 404: - description: Task not found - 500: - description: Internal Server Error - """ - try: - split_task_dto = SplitTaskDTO() - split_task_dto.user_id = token_auth.current_user() - split_task_dto.project_id = project_id - split_task_dto.task_id = task_id - split_task_dto.preferred_locale = request.environ.get( - "HTTP_ACCEPT_LANGUAGE" - ) - split_task_dto.validate() - except DataError as e: - current_app.logger.error(f"Error validating request: {str(e)}") - return {"Error": "Unable to split task", "SubCode": "InvalidData"}, 400 - try: - ProjectService.exists(project_id) # Check if project exists - tasks = SplitService.split_task(split_task_dto) - return tasks.to_primitive(), 200 - except SplitServiceError as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 403 - except InvalidGeoJson as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 403 +@router.post("/{project_id}/tasks/actions/split/{task_id}/") +async def post( + request: Request, + project_id: int, + task_id: int, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Split a task + --- + tags: + - tasks + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - in: header + name: Accept-Language + description: Language user is requesting + type: string + required: true + default: en + - name: project_id + in: path + description: Project ID the task is associated with + required: true + type: integer + default: 1 + - name: task_id + in: path + description: Unique task ID + required: true + type: integer + default: 1 + responses: + 200: + description: Task split OK + 400: + description: Client Error + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 404: + description: Task not found + 500: + description: Internal Server Error + """ + try: + preferred_locale = request.headers.get("accept-language", None) + split_task_dto = SplitTaskDTO( + user_id=user.id, project_id=project_id, task_id=task_id + ) + if preferred_locale: + split_task_dto.preferred_locale = preferred_locale + except Exception as e: + logger.error(f"Error validating request: {str(e)}") + return JSONResponse( + content={"Error": "Unable to split task", "SubCode": "InvalidData"}, + status_code=400, + ) + try: + await ProjectService.exists(project_id, db) + async with db.transaction(): + tasks = await SplitService.split_task(split_task_dto, db) + return tasks + except SplitServiceError as e: + return JSONResponse( + content={"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, + status_code=403, + ) + except InvalidGeoJson as e: + return JSONResponse( + content={"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, + status_code=403, + ) -class TasksActionsExtendAPI(Resource): - @token_auth.login_required - def post(self, project_id): - """ - Extends duration of locked tasks - --- - tags: - - tasks - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - in: header - name: Accept-Language - description: Language user is requesting - type: string - required: true - default: en - - name: project_id - in: path - description: Project ID the tasks are associated with - required: true - type: integer - default: 1 - - in: body - name: body - required: true - description: JSON object for locking task(s) - schema: - properties: - taskIds: - type: array - items: - type: integer - description: Array of taskIds to extend time for - default: [1,2] - responses: - 200: - description: Task(s) locked for validation - 400: - description: Client Error - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 404: - description: Task not found - 500: - description: Internal Server Error - """ - try: - extend_dto = ExtendLockTimeDTO(request.get_json()) - extend_dto.project_id = project_id - extend_dto.user_id = token_auth.current_user() - extend_dto.validate() - except DataError as e: - current_app.logger.error(f"Error validating request: {str(e)}") - return { +@router.post("/{project_id}/tasks/actions/extend/") +async def post( + request: Request, + project_id: int, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Extends duration of locked tasks + --- + tags: + - tasks + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - in: header + name: Accept-Language + description: Language user is requesting + type: string + required: true + default: en + - name: project_id + in: path + description: Project ID the tasks are associated with + required: true + type: integer + default: 1 + - in: body + name: body + required: true + description: JSON object for locking task(s) + schema: + properties: + taskIds: + type: array + items: + type: integer + description: Array of taskIds to extend time for + default: [1,2] + responses: + 200: + description: Task(s) locked for validation + 400: + description: Client Error + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 404: + description: Task not found + 500: + description: Internal Server Error + """ + try: + request_data = await request.json() + task_ids = request_data.get("taskIds", None) + extend_dto = ExtendLockTimeDTO( + project_id=project_id, task_ids=task_ids, user_id=user.id + ) + except Exception as e: + logger.error(f"Error validating request: {str(e)}") + return JSONResponse( + content={ "Error": "Unable to extend lock time", "SubCode": "InvalidData", - }, 400 + }, + status_code=400, + ) + try: + await ProjectService.exists(project_id, db) + async with db.transaction(): + await MappingService.extend_task_lock_time(extend_dto, db) + return JSONResponse( + content={"Success": "Successfully extended task expiry"}, + status_code=200, + ) + except MappingServiceError as e: + return JSONResponse( + content={"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, + status_code=403, + ) - try: - ProjectService.exists(project_id) # Check if project exists - MappingService.extend_task_lock_time(extend_dto) - return {"Success": "Successfully extended task expiry"}, 200 - except MappingServiceError as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 403 +@router.post("/{project_id}/tasks/actions/reset-by-user/") +async def post( + request: Request, + project_id: int, + username: str | None = Query( + None, description="Username to revert tasks for", example="test" + ), + action: str | None = Query( + None, + description="Action to revert tasks for. Can be BADIMAGERY or VALIDATED", + example="BADIMAGERY", + ), + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Revert tasks by a specific user in a project + --- + tags: + - tasks + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token session + - in: header + name: Accept-Language + description: Language user is requesting + type: string + required: true + default: en + - in: path + name: project_id + description: Project ID the tasks are associated with + required: true + type: integer + default: 1 + - in: query + name: username + description: Username to revert tasks for + required: true + type: string + default: test + - in: query + name: action + description: Action to revert tasks for. Can be BADIMAGERY or VALIDATED + required: true + type: string + responses: + 200: + description: Tasks reverted + 400: + description: Client Error + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 404: + description: Task not found + 500: + description: Internal Server Error + """ + try: + if not (username or action): + return JSONResponse( + content={ + "Error": "Unable to revert tasks", + "SubCode": "InvalidData", + }, + status_code=400, + ) -class TasksActionsReverUserTaskstAPI(Resource): - @token_auth.login_required - def post(self, project_id): - """ - Revert tasks by a specific user in a project - --- - tags: - - tasks - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token session - - in: header - name: Accept-Language - description: Language user is requesting - type: string - required: true - default: en - - in: path - name: project_id - description: Project ID the tasks are associated with - required: true - type: integer - default: 1 - - in: query - name: username - description: Username to revert tasks for - required: true - type: string - default: test - - in: query - name: action - description: Action to revert tasks for. Can be BADIMAGERY or VALIDATED - required: true - type: string - responses: - 200: - description: Tasks reverted - 400: - description: Client Error - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 404: - description: Task not found - 500: - description: Internal Server Error - """ - try: - revert_dto = RevertUserTasksDTO() - revert_dto.project_id = project_id - revert_dto.action = request.args.get("action") - user = UserService.get_user_by_username(request.args.get("username")) - revert_dto.user_id = user.id - revert_dto.action_by = token_auth.current_user() - revert_dto.validate() - except DataError as e: - current_app.logger.error(f"Error validating request: {str(e)}") - return { + if username: + user = await UserService.get_user_by_username(username, db) + revert_dto = RevertUserTasksDTO( + project_id=project_id, user_id=user.id, action_by=user.id, action=action + ) + except Exception as e: + logger.error(f"Error validating request: {str(e)}") + return JSONResponse( + content={ "Error": "Unable to revert tasks", "SubCode": "InvalidData", - }, 400 - try: - ValidatorService.revert_user_tasks(revert_dto) - return {"Success": "Successfully reverted tasks"}, 200 - except ValidatorServiceError as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 403 + }, + status_code=400, + ) + try: + async with db.transaction(): + await ValidatorService.revert_user_tasks(revert_dto, db) + return JSONResponse( + content={"Success": "Successfully reverted tasks"}, status_code=200 + ) + except ValidatorServiceError as e: + return JSONResponse( + content={"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, + status_code=403, + ) diff --git a/backend/api/tasks/resources.py b/backend/api/tasks/resources.py index 90a04905cd..e223eea1f7 100644 --- a/backend/api/tasks/resources.py +++ b/backend/api/tasks/resources.py @@ -1,497 +1,518 @@ import io from distutils.util import strtobool -from flask import send_file, Response -from flask_restful import Resource, current_app, request -from schematics.exceptions import DataError +from databases import Database +from fastapi import APIRouter, Depends, Request +from fastapi.responses import Response, JSONResponse, StreamingResponse +from loguru import logger +from starlette.authentication import requires -from backend.services.mapping_service import MappingService +from backend.db import get_db from backend.models.dtos.grid_dto import GridDTO - -from backend.services.users.authentication_service import token_auth, tm -from backend.services.users.user_service import UserService -from backend.services.validator_service import ValidatorService - -from backend.services.project_service import ProjectService, ProjectServiceError -from backend.services.grid.grid_service import GridService from backend.models.postgis.statuses import UserRole from backend.models.postgis.utils import InvalidGeoJson - - -class TasksRestAPI(Resource): - def get(self, project_id, task_id): - """ - Get a task's metadata - --- - tags: - - tasks - produces: - - application/json - parameters: - - in: header - name: Accept-Language - description: Language user is requesting - type: string - required: true - default: en - - name: project_id - in: path - description: Project ID the task is associated with - required: true - type: integer - default: 1 - - name: task_id - in: path - description: Unique task ID - required: true - type: integer - default: 1 - responses: - 200: - description: Task found - 404: - description: Task not found - 500: - description: Internal Server Error - """ - preferred_locale = request.environ.get("HTTP_ACCEPT_LANGUAGE") - - task = MappingService.get_task_as_dto(task_id, project_id, preferred_locale) - return task.to_primitive(), 200 - - -class TasksQueriesJsonAPI(Resource): - def get(self, project_id): - """ - Get all tasks for a project as JSON - --- - tags: - - tasks - produces: - - application/json - parameters: - - name: project_id - in: path - description: Project ID the task is associated with - required: true - type: integer - default: 1 - - in: query - name: tasks - type: string - description: List of tasks; leave blank to retrieve all - default: 1,2 - - in: query - name: as_file - type: boolean - description: Set to true if file download preferred - default: True - responses: - 200: - description: Project found - 403: - description: Forbidden - 404: - description: Project not found - 500: - description: Internal Server Error - """ - try: - tasks = request.args.get("tasks") if request.args.get("tasks") else None - as_file = ( - strtobool(request.args.get("as_file")) - if request.args.get("as_file") - else True - ) - - tasks_json = ProjectService.get_project_tasks(int(project_id), tasks) - - if as_file: - tasks_json = str(tasks_json).encode("utf-8") - return send_file( - io.BytesIO(tasks_json), - mimetype="application/json", - as_attachment=True, - download_name=f"{str(project_id)}-tasks.geojson", - ) - - return tasks_json, 200 - except ProjectServiceError as e: - return {"Error": str(e)}, 403 - - @token_auth.login_required - def delete(self, project_id): - """ - Delete a list of tasks from a project - --- - tags: - - tasks - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: project_id - in: path - description: Project ID the task is associated with - required: true - type: integer - default: 1 - - in: body - name: body - required: true - description: JSON object with a list of tasks to delete - schema: - properties: - tasks: - type: array - items: - type: integer - default: [ 1, 2 ] - responses: - 200: - description: Task(s) deleted - 400: - description: Bad request - 403: - description: Forbidden - 404: - description: Project or Task Not Found - 500: - description: Internal Server Error - """ - user_id = token_auth.current_user() - user = UserService.get_user_by_id(user_id) - if user.role != UserRole.ADMIN.value: - return { - "Error": "This endpoint action is restricted to ADMIN users.", - "SubCode": "OnlyAdminAccess", - }, 403 - - tasks_ids = request.get_json().get("tasks") - if tasks_ids is None: - return {"Error": "Tasks ids not provided", "SubCode": "InvalidData"}, 400 - if isinstance(tasks_ids, list) is False: - return { - "Error": "Tasks were not provided as a list", - "SubCode": "InvalidData", - }, 400 - - try: - ProjectService.delete_tasks(project_id, tasks_ids) - return {"Success": "Task(s) deleted"}, 200 - except ProjectServiceError as e: - return {"Error": str(e)}, 403 - - -class TasksQueriesXmlAPI(Resource): - def get(self, project_id): - """ - Get all tasks for a project as OSM XML - --- - tags: - - tasks - produces: - - application/xml - parameters: - - name: project_id - in: path - description: Project ID the task is associated with - required: true - type: integer - default: 1 - - in: query - name: tasks - type: string - description: List of tasks; leave blank to retrieve all - default: 1,2 - - in: query - name: as_file - type: boolean - description: Set to true if file download preferred - default: False - responses: - 200: - description: OSM XML - 400: - description: Client Error - 404: - description: No mapped tasks - 500: - description: Internal Server Error - """ - tasks = request.args.get("tasks") if request.args.get("tasks") else None - as_file = ( - strtobool(request.args.get("as_file")) - if request.args.get("as_file") - else False +from backend.services.grid.grid_service import GridService +from backend.services.mapping_service import MappingService +from backend.services.project_service import ProjectService, ProjectServiceError +from backend.services.users.authentication_service import tm +from backend.services.users.user_service import UserService +from backend.services.validator_service import ValidatorService +import json + +router = APIRouter( + prefix="/projects", + tags=["projects"], + responses={404: {"description": "Not found"}}, +) + + +@router.get("/{project_id}/tasks/{task_id}/") +async def get( + request: Request, project_id: int, task_id: int, db: Database = Depends(get_db) +): + """ + Get a task's metadata + --- + tags: + - tasks + produces: + - application/json + parameters: + - in: header + name: Accept-Language + description: Language user is requesting + type: string + required: true + default: en + - name: project_id + in: path + description: Project ID the task is associated with + required: true + type: integer + default: 1 + - name: task_id + in: path + description: Unique task ID + required: true + type: integer + default: 1 + responses: + 200: + description: Task found + 404: + description: Task not found + 500: + description: Internal Server Error + """ + preferred_locale = request.headers.get("accept-language") + task = await MappingService.get_task_as_dto( + task_id, project_id, db, preferred_locale + ) + return task + + +@router.get("/{project_id}/tasks/") +async def get(request: Request, project_id: int, db: Database = Depends(get_db)): + """ + Get all tasks for a project as JSON + --- + tags: + - tasks + produces: + - application/json + parameters: + - name: project_id + in: path + description: Project ID the task is associated with + required: true + type: integer + default: 1 + - in: query + name: tasks + type: string + description: List of tasks; leave blank to retrieve all + default: 1,2 + - in: query + name: as_file + type: boolean + description: Set to true if file download preferred + default: True + responses: + 200: + description: Project found + 403: + description: Forbidden + 404: + description: Project not found + 500: + description: Internal Server Error + """ + try: + tasks = ( + request.query_params.get("tasks") + if request.query_params.get("tasks") + else None ) - - xml = MappingService.generate_osm_xml(project_id, tasks) - - if as_file: - return send_file( - io.BytesIO(xml), - mimetype="text.xml", - as_attachment=True, - download_name=f"HOT-project-{project_id}.osm", - ) - - return Response(xml, mimetype="text/xml", status=200) - - -class TasksQueriesGpxAPI(Resource): - def get(self, project_id): - """ - Get all tasks for a project as GPX - --- - tags: - - tasks - produces: - - application/xml - parameters: - - name: project_id - in: path - description: Project ID the task is associated with - required: true - type: integer - default: 1 - - in: query - name: tasks - type: string - description: List of tasks; leave blank for all - default: 1,2 - - in: query - name: as_file - type: boolean - description: Set to true if file download preferred - default: False - responses: - 200: - description: GPX XML - 400: - description: Client error - 404: - description: No mapped tasks - 500: - description: Internal Server Error - """ - current_app.logger.debug("GPX Called") - tasks = request.args.get("tasks") as_file = ( - strtobool(request.args.get("as_file")) - if request.args.get("as_file") + strtobool(request.query_params.get("as_file")) + if request.query_params.get("as_file") else False ) - xml = MappingService.generate_gpx(project_id, tasks) - + tasks_json = await ProjectService.get_project_tasks(db, int(project_id), tasks) if as_file: - return send_file( - io.BytesIO(xml), - mimetype="text.xml", - as_attachment=True, - download_name=f"HOT-project-{project_id}.gpx", + tasks_str = json.dumps(tasks_json, indent=4) # Pretty-printed GeoJSON + file_bytes = io.BytesIO(tasks_str.encode("utf-8")) + file_bytes.seek(0) # Reset stream position + + # Return the GeoJSON file response for download + return StreamingResponse( + file_bytes, + media_type="application/geo+json", + headers={ + "Content-Disposition": f'attachment; filename="{project_id}-tasks.geojson"' + }, ) - return Response(xml, mimetype="text/xml", status=200) - - -class TasksQueriesAoiAPI(Resource): - @tm.pm_only() - @token_auth.login_required - def put(self): - """ - Get task tiles intersecting with the aoi provided - --- - tags: - - tasks - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - in: body - name: body - required: true - description: JSON object containing aoi and tasks and bool flag for controlling clip grid to aoi - schema: - properties: - clipToAoi: - type: boolean - default: true - areaOfInterest: - schema: - properties: - type: - type: string - default: FeatureCollection - features: - type: array - items: - schema: - $ref: "#/definitions/GeoJsonFeature" - grid: - schema: - properties: - type: - type: string - default: FeatureCollection - features: - type: array - items: - schema: - $ref: "#/definitions/GeoJsonFeature" - responses: - 200: - description: Intersecting tasks found successfully - 400: - description: Client Error - Invalid Request - 500: - description: Internal Server Error - """ - try: - grid_dto = GridDTO(request.get_json()) - grid_dto.validate() - except DataError as e: - current_app.logger.error(f"error validating request: {str(e)}") - return { - "Error": "Unable to fetch tiles interesecting AOI", - "SubCode": "InvalidData", - }, 400 - - try: - grid = GridService.trim_grid_to_aoi(grid_dto) - return grid, 200 - except InvalidGeoJson as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 400 - - -class TasksQueriesMappedAPI(Resource): - def get(self, project_id): - """ - Get all mapped tasks for a project grouped by username - --- - tags: - - tasks - produces: - - application/json - parameters: - - name: project_id - in: path - description: Unique project ID - required: true - type: integer - default: 1 - responses: - 200: - description: Mapped tasks returned - 500: - description: Internal Server Error - """ - ProjectService.get_project_by_id(project_id) - mapped_tasks = ValidatorService.get_mapped_tasks_by_user(project_id) - return mapped_tasks.to_primitive(), 200 - + return tasks_json + except ProjectServiceError as e: + return JSONResponse(content={"Error": str(e)}, status_code=403) + + +@router.delete("/{project_id}/tasks/") +@requires("authenticated") +async def delete(request: Request, project_id): + """ + Delete a list of tasks from a project + --- + tags: + - tasks + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: project_id + in: path + description: Project ID the task is associated with + required: true + type: integer + default: 1 + - in: body + name: body + required: true + description: JSON object with a list of tasks to delete + schema: + properties: + tasks: + type: array + items: + type: integer + default: [ 1, 2 ] + responses: + 200: + description: Task(s) deleted + 400: + description: Bad request + 403: + description: Forbidden + 404: + description: Project or Task Not Found + 500: + description: Internal Server Error + """ + user_id = request.user.display_name + user = UserService.get_user_by_id(user_id) + if user.role != UserRole.ADMIN.value: + return { + "Error": "This endpoint action is restricted to ADMIN users.", + "SubCode": "OnlyAdminAccess", + }, 403 + + tasks_ids = await request.json().get("tasks") + if tasks_ids is None: + return {"Error": "Tasks ids not provided", "SubCode": "InvalidData"}, 400 + if isinstance(tasks_ids, list) is False: + return { + "Error": "Tasks were not provided as a list", + "SubCode": "InvalidData", + }, 400 + + try: + ProjectService.delete_tasks(project_id, tasks_ids) + return {"Success": "Task(s) deleted"}, 200 + except ProjectServiceError as e: + return {"Error": str(e)}, 403 + + +@router.get("/{project_id}/tasks/queries/xml/") +async def get(request: Request, project_id: int, db: Database = Depends(get_db)): + """ + Get all tasks for a project as OSM XML + --- + tags: + - tasks + produces: + - application/xml + parameters: + - name: project_id + in: path + description: Project ID the task is associated with + required: true + type: integer + default: 1 + - in: query + name: tasks + type: string + description: List of tasks; leave blank to retrieve all + default: 1,2 + - in: query + name: as_file + type: boolean + description: Set to true if file download preferred + default: False + responses: + 200: + description: OSM XML + 400: + description: Client Error + 404: + description: No mapped tasks + 500: + description: Internal Server Error + """ + tasks = request.query_params.get("tasks") + as_file = ( + strtobool(request.query_params.get("as_file")) + if request.query_params.get("as_file") + else False + ) + + xml = await MappingService.generate_osm_xml(project_id, tasks, db) + + if as_file: + return StreamingResponse( + io.BytesIO(xml), + media_type="text/xml", + headers={ + "Content-Disposition": f"attachment; filename=HOT-project-{project_id}.osm" + }, + ) -class TasksQueriesOwnInvalidatedAPI(Resource): - @tm.pm_only(False) - @token_auth.login_required - def get(self, username): - """ - Get invalidated tasks either mapped by user or invalidated by user - --- - tags: - - tasks - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - in: header - name: Accept-Language - description: Language user is requesting - type: string - required: true - default: en - - name: username - in: path - description: The users username - required: true - type: string - - in: query - name: asValidator - description: treats user as validator, rather than mapper, if true - type: string - - in: query - name: sortBy - description: field to sort by, defaults to action_date - type: string - - in: query - name: sortDirection - description: direction of sort, defaults to desc - type: string - - in: query - name: page - description: Page of results user requested - type: integer - - in: query - name: pageSize - description: Size of page, defaults to 10 - type: integer - - in: query - name: project - description: Optional project filter - type: integer - - in: query - name: closed - description: Optional filter for open/closed invalidations - type: boolean - responses: - 200: - description: Invalidated tasks user has invalidated - 404: - description: No invalidated tasks - 500: - description: Internal Server Error - """ - sort_column = {"updatedDate": "updated_date", "projectId": "project_id"} - if request.args.get("sortBy", "updatedDate") in sort_column: - sort_column = sort_column[request.args.get("sortBy", "updatedDate")] - else: - sort_column = sort_column["updatedDate"] - # closed needs to be set to True, False, or None - closed = None - if request.args.get("closed") == "true": - closed = True - elif request.args.get("closed") == "false": - closed = False - # sort direction should only be desc or asc - if request.args.get("sortDirection") in ["asc", "desc"]: - sort_direction = request.args.get("sortDirection") - else: - sort_direction = "desc" - invalidated_tasks = ValidatorService.get_user_invalidated_tasks( - request.args.get("asValidator") == "true", - username, - request.environ.get("HTTP_ACCEPT_LANGUAGE"), - closed, - request.args.get("project", None, type=int), - request.args.get("page", None, type=int), - request.args.get("pageSize", None, type=int), - sort_column, - sort_direction, + return Response(content=xml, media_type="text/xml", status_code=200) + + +@router.get("/{project_id}/tasks/queries/gpx/") +async def get(request: Request, project_id: int, db: Database = Depends(get_db)): + """ + Get all tasks for a project as GPX + --- + tags: + - tasks + produces: + - application/xml + parameters: + - name: project_id + in: path + description: Project ID the task is associated with + required: true + type: integer + default: 1 + - in: query + name: tasks + type: string + description: List of tasks; leave blank for all + default: 1,2 + - in: query + name: as_file + type: boolean + description: Set to true if file download preferred + default: False + responses: + 200: + description: GPX XML + 400: + description: Client error + 404: + description: No mapped tasks + 500: + description: Internal Server Error + """ + tasks = request.query_params.get("tasks") + as_file = ( + strtobool(request.query_params.get("as_file")) + if request.query_params.get("as_file") + else False + ) + + xml = await MappingService.generate_gpx(project_id, tasks, db) + + if as_file: + return StreamingResponse( + io.BytesIO(xml), + media_type="text/xml", + headers={ + "Content-Disposition": f"attachment; filename=HOT-project-{project_id}.gpx" + }, ) - return invalidated_tasks.to_primitive(), 200 + + return Response(content=xml, media_type="text/xml", status_code=200) + + +@router.put("/{project_id}/tasks/queries/aoi/") +@requires("authenticated") +@tm.pm_only() +async def put(request: Request, project_id: int): + """ + Get task tiles intersecting with the aoi provided + --- + tags: + - tasks + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - in: body + name: body + required: true + description: JSON object containing aoi and tasks and bool flag for controlling clip grid to aoi + schema: + properties: + clipToAoi: + type: boolean + default: true + areaOfInterest: + schema: + properties: + type: + type: string + default: FeatureCollection + features: + type: array + items: + schema: + $ref: "#/definitions/GeoJsonFeature" + grid: + schema: + properties: + type: + type: string + default: FeatureCollection + features: + type: array + items: + schema: + $ref: "#/definitions/GeoJsonFeature" + responses: + 200: + description: Intersecting tasks found successfully + 400: + description: Client Error - Invalid Request + 500: + description: Internal Server Error + """ + try: + grid_dto = GridDTO(request.get_json()) + grid_dto.validate() + except Exception as e: + logger.error(f"error validating request: {str(e)}") + return { + "Error": "Unable to fetch tiles interesecting AOI", + "SubCode": "InvalidData", + }, 400 + + try: + grid = GridService.trim_grid_to_aoi(grid_dto) + return grid, 200 + except InvalidGeoJson as e: + return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 400 + + +@router.get("/{project_id}/tasks/queries/mapped/") +async def get(project_id: int): + """ + Get all mapped tasks for a project grouped by username + --- + tags: + - tasks + produces: + - application/json + parameters: + - name: project_id + in: path + description: Unique project ID + required: true + type: integer + default: 1 + responses: + 200: + description: Mapped tasks returned + 500: + description: Internal Server Error + """ + ProjectService.get_project_by_id(project_id) + mapped_tasks = ValidatorService.get_mapped_tasks_by_user(project_id) + return mapped_tasks.model_dump(by_alias=True), 200 + + +@router.get("/{username}/tasks/queries/own/invalidated/") +@requires("authenticated") +async def get(request: Request, username: str): + """ + Get invalidated tasks either mapped by user or invalidated by user + --- + tags: + - tasks + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - in: header + name: Accept-Language + description: Language user is requesting + type: string + required: true + default: en + - name: username + in: path + description: The users username + required: true + type: string + - in: query + name: asValidator + description: treats user as validator, rather than mapper, if true + type: string + - in: query + name: sortBy + description: field to sort by, defaults to action_date + type: string + - in: query + name: sortDirection + description: direction of sort, defaults to desc + type: string + - in: query + name: page + description: Page of results user requested + type: integer + - in: query + name: pageSize + description: Size of page, defaults to 10 + type: integer + - in: query + name: project + description: Optional project filter + type: integer + - in: query + name: closed + description: Optional filter for open/closed invalidations + type: boolean + responses: + 200: + description: Invalidated tasks user has invalidated + 404: + description: No invalidated tasks + 500: + description: Internal Server Error + """ + sort_column = {"updatedDate": "updated_date", "projectId": "project_id"} + if request.query_params.get("sortBy", "updatedDate") in sort_column: + sort_column = sort_column[request.query_params.get("sortBy", "updatedDate")] + else: + sort_column = sort_column["updatedDate"] + # closed needs to be set to True, False, or None + closed = None + if request.query_params.get("closed") == "true": + closed = True + elif request.query_params.get("closed") == "false": + closed = False + # sort direction should only be desc or asc + if request.query_params.get("sortDirection") in ["asc", "desc"]: + sort_direction = request.query_params.get("sortDirection") + else: + sort_direction = "desc" + invalidated_tasks = ValidatorService.get_user_invalidated_tasks( + request.query_params.get("asValidator") == "true", + username, + request.environ.get("HTTP_ACCEPT_LANGUAGE"), + closed, + request.query_params.get("project", None), + request.query_params.get("page", None), + request.query_params.get("pageSize", None), + sort_column, + sort_direction, + ) + return invalidated_tasks.model_dump(by_alias=True), 200 diff --git a/backend/api/tasks/statistics.py b/backend/api/tasks/statistics.py index f51b5a00cb..c00459accc 100644 --- a/backend/api/tasks/statistics.py +++ b/backend/api/tasks/statistics.py @@ -1,101 +1,124 @@ from datetime import date, timedelta -from flask_restful import Resource, request -from backend.services.users.authentication_service import token_auth -from backend.services.stats_service import StatsService +from databases import Database +from fastapi import APIRouter, Depends, Request +from fastapi.responses import JSONResponse + from backend.api.utils import validate_date_input +from backend.db import get_db +from backend.models.dtos.user_dto import AuthUserDTO +from backend.services.stats_service import StatsService +from backend.services.users.authentication_service import login_required +router = APIRouter( + prefix="/tasks", + tags=["tasks"], + responses={404: {"description": "Not found"}}, +) -class TasksStatisticsAPI(Resource): - @token_auth.login_required - def get(self): - """ - Get Task Stats - --- - tags: - - tasks - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - type: string - required: true - default: Token sessionTokenHere== - - in: query - name: startDate - description: Date to filter as minimum - required: true - type: string - - in: query - name: endDate - description: Date to filter as maximum. Default value is the current date. - required: false - type: string - - in: query - name: organisationName - description: Organisation name to filter by - required: false - - in: query - name: organisationId - description: Organisation ID to filter by - required: false - - in: query - name: campaign - description: Campaign name to filter by - required: false - - in: query - name: projectId - description: Project IDs to filter by - required: false - - in: query - name: country - description: Country name to filter by - required: false - responses: - 200: - description: Task statistics - 400: - description: Bad Request - 401: - description: Request is not authenticated - 500: - description: Internal Server Error - """ - try: - if request.args.get("startDate"): - start_date = validate_date_input(request.args.get("startDate")) - else: - return { + +@router.get("/statistics/") +async def get( + request: Request, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Get Task Stats + --- + tags: + - tasks + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + type: string + required: true + default: Token sessionTokenHere== + - in: query + name: startDate + description: Date to filter as minimum + required: true + type: string + - in: query + name: endDate + description: Date to filter as maximum. Default value is the current date. + required: false + type: string + - in: query + name: organisationName + description: Organisation name to filter by + required: false + - in: query + name: organisationId + description: Organisation ID to filter by + required: false + - in: query + name: campaign + description: Campaign name to filter by + required: false + - in: query + name: projectId + description: Project IDs to filter by + required: false + - in: query + name: country + description: Country name to filter by + required: false + responses: + 200: + description: Task statistics + 400: + description: Bad Request + 401: + description: Request is not authenticated + 500: + description: Internal Server Error + """ + try: + if request.query_params.get("startDate"): + start_date = validate_date_input(request.query_params.get("startDate")) + else: + return JSONResponse( + content={ "Error": "Start date is required", "SubCode": "MissingDate", - }, 400 - end_date = validate_date_input(request.args.get("endDate", date.today())) - if end_date < start_date: - raise ValueError( - "InvalidDateRange- Start date must be earlier than end date" - ) - if (end_date - start_date) > timedelta(days=366): - raise ValueError( - "InvalidDateRange- Date range can not be bigger than 1 year" - ) - organisation_id = request.args.get("organisationId", None, int) - organisation_name = request.args.get("organisationName", None, str) - campaign = request.args.get("campaign", None, str) - project_id = request.args.get("projectId") - if project_id: - project_id = map(str, project_id.split(",")) - country = request.args.get("country", None, str) - task_stats = StatsService.get_task_stats( - start_date, - end_date, - organisation_id, - organisation_name, - campaign, - project_id, - country, + }, + status_code=400, + ) + end_date = validate_date_input( + request.query_params.get("endDate", date.today()) + ) + if end_date < start_date: + raise ValueError( + "InvalidDateRange- Start date must be earlier than end date" + ) + if (end_date - start_date) > timedelta(days=366): + raise ValueError( + "InvalidDateRange- Date range can not be bigger than 1 year" ) - return task_stats.to_primitive(), 200 - except (KeyError, ValueError) as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 400 + organisation_id = request.query_params.get("organisationId", None) + organisation_name = request.query_params.get("organisationName", None) + campaign = request.query_params.get("campaign", None) + project_id = request.query_params.get("projectId") + if project_id: + project_id = map(str, project_id.split(",")) + country = request.query_params.get("country", None) + task_stats = await StatsService.get_task_stats( + db, + start_date, + end_date, + organisation_id, + organisation_name, + campaign, + project_id, + country, + ) + return task_stats + except (KeyError, ValueError) as e: + return JSONResponse( + content={"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, + status_code=400, + ) diff --git a/backend/api/teams/actions.py b/backend/api/teams/actions.py index 27a5799afd..a28326d697 100644 --- a/backend/api/teams/actions.py +++ b/backend/api/teams/actions.py @@ -1,359 +1,411 @@ -from flask_restful import Resource, request, current_app -from schematics.exceptions import DataError -import threading +from databases import Database +from fastapi import APIRouter, BackgroundTasks, Body, Depends, Request +from fastapi.responses import JSONResponse +from loguru import logger +from backend.db import get_db from backend.models.dtos.message_dto import MessageDTO +from backend.models.dtos.user_dto import AuthUserDTO +from backend.models.postgis.user import User from backend.services.team_service import ( - TeamService, TeamJoinNotAllowed, + TeamService, TeamServiceError, ) -from backend.services.users.authentication_service import token_auth, tm -from backend.models.postgis.user import User +from backend.services.users.authentication_service import login_required + +router = APIRouter( + prefix="/teams", + tags=["teams"], + responses={404: {"description": "Not found"}}, +) TEAM_NOT_FOUND = "Team not found" -class TeamsActionsJoinAPI(Resource): - @token_auth.login_required - def post(self, team_id): - """ - Request to join a team - --- - tags: - - teams - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: team_id - in: path - description: Unique team ID - required: true - type: integer - default: 1 - responses: - 200: - description: Member added - 403: - description: Forbidden - 404: - description: Not found - 500: - description: Internal Server Error - """ - authenticated_user_id = token_auth.current_user() - try: - TeamService.request_to_join_team(team_id, authenticated_user_id) - return {"Success": "Join request successful"}, 200 - except TeamServiceError as e: - return {"Error": str(e), "SubCode": "InvalidRequest"}, 400 +@router.post("/{team_id}/actions/join/") +async def post( + request: Request, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), + team_id: int = None, +): + """ + Request to join a team + --- + tags: + - teams + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: team_id + in: path + description: Unique team ID + required: true + type: integer + default: 1 + responses: + 200: + description: Member added + 403: + description: Forbidden + 404: + description: Not found + 500: + description: Internal Server Error + """ + try: + async with db.transaction(): + await TeamService.request_to_join_team(team_id, user.id, db) + return JSONResponse( + content={"Success": "Join request successful"}, status_code=200 + ) + except TeamServiceError as e: + return JSONResponse( + content={"Error": str(e), "SubCode": "InvalidRequest"}, status_code=400 + ) + - @tm.pm_only(False) - @token_auth.login_required - def patch(self, team_id): - """ - Take action on a team invite - --- - tags: - - teams - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: team_id - in: path - description: Unique team ID - required: true - type: integer - default: 1 - - in: body - name: body - required: true - description: JSON object to accept or reject a request to join team - schema: - properties: - username: - type: string - required: true - type: - type: string - default: join-response - required: true - role: - type: string - default: member - required: false - action: - type: string - default: accept - required: true - responses: - 200: - description: Member added - 403: - description: Forbidden - 404: - description: Not found - 500: - description: Internal Server Error - """ - try: - json_data = request.get_json(force=True) - username = json_data["username"] - request_type = json_data.get("type", "join-response") - action = json_data["action"] - role = json_data.get("role", "member") - except DataError as e: - current_app.logger.error(f"error validating request: {str(e)}") - return { +@router.patch("/{team_id}/actions/join/") +# @tm.pm_only(False) +async def patch( + request: Request, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), + team_id: int = None, + data: dict = Body(...), +): + """ + Take action on a team invite + --- + tags: + - teams + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: team_id + in: path + description: Unique team ID + required: true + type: integer + default: 1 + - in: body + name: body + required: true + description: JSON object to accept or reject a request to join team + schema: + properties: + username: + type: string + required: true + type: + type: string + default: join-response + required: true + role: + type: string + default: member + required: false + action: + type: string + default: accept + required: true + responses: + 200: + description: Member added + 403: + description: Forbidden + 404: + description: Not found + 500: + description: Internal Server Error + """ + try: + username = data["username"] + request_type = data.get("type", "join-response") + action = data["action"] + role = data.get("role", "member") + except Exception as e: + logger.error(f"error validating request: {str(e)}") + return JSONResponse( + content={ "Error": str(e), "SubCode": "InvalidData", - }, 400 + }, + status_code=400, + ) - authenticated_user_id = token_auth.current_user() - if request_type == "join-response": - if TeamService.is_user_team_manager(team_id, authenticated_user_id): - TeamService.accept_reject_join_request( - team_id, authenticated_user_id, username, role, action - ) - return {"Success": "True"}, 200 - else: - return ( - { - "Error": "You don't have permissions to approve this join team request", - "SubCode": "ApproveJoinError", - }, - 403, - ) - elif request_type == "invite-response": - TeamService.accept_reject_invitation_request( - team_id, authenticated_user_id, username, role, action + if request_type == "join-response": + if await TeamService.is_user_team_manager(team_id, user.id, db): + await TeamService.accept_reject_join_request( + team_id, user.id, username, role, action, db ) - return {"Success": "True"}, 200 + return JSONResponse(content={"Success": "True"}, status_code=200) + else: + return JSONResponse( + content={ + "Error": "You don't have permissions to approve this join team request", + "SubCode": "ApproveJoinError", + }, + status_code=403, + ) + elif request_type == "invite-response": + await TeamService.accept_reject_invitation_request( + team_id, user.id, username, role, action, db + ) + return JSONResponse(content={"Success": "True"}, status_code=200) -class TeamsActionsAddAPI(Resource): - @token_auth.login_required - def post(self, team_id): - """ - Add members to the team - --- - tags: - - teams - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: team_id - in: path - description: Unique team ID - required: true - type: integer - default: 1 - - in: body - name: body - required: true - description: JSON object to join team - schema: - properties: - username: - type: string - required: true - role: - type: string - required: false - responses: - 200: - description: Member added - 403: - description: Forbidden - 404: - description: Not found - 500: - description: Internal Server Error - """ - try: - post_data = request.get_json(force=True) - username = post_data["username"] - role = post_data.get("role", None) - except (DataError, KeyError) as e: - current_app.logger.error(f"error validating request: {str(e)}") - return { +@router.post("/{team_id}/actions/add/") +async def post( + request: Request, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), + team_id: int = None, + data: dict = Body(...), +): + """ + Add members to the team + --- + tags: + - teams + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: team_id + in: path + description: Unique team ID + required: true + type: integer + default: 1 + - in: body + name: body + required: true + description: JSON object to join team + schema: + properties: + username: + type: string + required: true + role: + type: string + required: false + responses: + 200: + description: Member added + 403: + description: Forbidden + 404: + description: Not found + 500: + description: Internal Server Error + """ + try: + username = data["username"] + role = data.get("role", None) + except (Exception, KeyError) as e: + logger.error(f"error validating request: {str(e)}") + return JSONResponse( + content={ "Error": str(e), "SubCode": "InvalidData", - }, 400 - - try: - authenticated_user_id = token_auth.current_user() - TeamService.add_user_to_team(team_id, authenticated_user_id, username, role) - return {"Success": "User added to the team"}, 200 - except TeamJoinNotAllowed as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 403 + }, + status_code=400, + ) + try: + await TeamService.add_user_to_team(team_id, user.id, username, role, db) + return JSONResponse( + content={"Success": "User added to the team"}, status_code=200 + ) + except TeamJoinNotAllowed as e: + return JSONResponse( + content={"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, + status_code=403, + ) -class TeamsActionsLeaveAPI(Resource): - @tm.pm_only(False) - @token_auth.login_required - def post(self, team_id): - """ - Removes a user from a team - --- - tags: - - teams - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: team_id - in: path - description: Unique team ID - required: true - type: integer - default: 1 - - in: body - name: body - required: true - description: JSON object to remove user from team - schema: - properties: - username: - type: string - default: 1 - required: true - responses: - 200: - description: Member deleted - 403: - description: Forbidden, if user attempting to ready other messages - 404: - description: Not found - 500: - description: Internal Server Error - """ - authenticated_user_id = token_auth.current_user() - username = request.get_json(force=True)["username"] - request_user = User.get_by_id(authenticated_user_id) - if ( - TeamService.is_user_team_manager(team_id, authenticated_user_id) - or request_user.username == username - ): - TeamService.leave_team(team_id, username) - return {"Success": "User removed from the team"}, 200 - else: - return ( - { - "Error": "You don't have permissions to remove {} from this team.".format( - username - ), - "SubCode": "RemoveUserError", - }, - 403, - ) +@router.post("/{team_id}/actions/leave/") +async def post( + request: Request, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), + team_id: int = None, + data: dict = Body(...), +): + """ + Removes a user from a team + --- + tags: + - teams + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: team_id + in: path + description: Unique team ID + required: true + type: integer + default: 1 + - in: body + name: body + required: true + description: JSON object to remove user from team + schema: + properties: + username: + type: string + default: 1 + required: true + responses: + 200: + description: Member deleted + 403: + description: Forbidden, if user attempting to ready other messages + 404: + description: Not found + 500: + description: Internal Server Error + """ + username = data["username"] + request_user = await User.get_by_id(user.id, db) + if ( + await TeamService.is_user_team_manager(team_id, user.id, db) + or request_user.username == username + ): + await TeamService.leave_team(team_id, username, db) + return JSONResponse( + content={"Success": "User removed from the team"}, status_code=200 + ) + else: + return JSONResponse( + content={ + "Error": "You don't have permissions to remove {} from this team.".format( + username + ), + "SubCode": "RemoveUserError", + }, + status_code=403, + ) -class TeamsActionsMessageMembersAPI(Resource): - @token_auth.login_required - def post(self, team_id): - """ - Message all team members - --- - tags: - - teams - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: team_id - in: path - description: Unique team ID - required: true - type: integer - default: 1 - - in: body - name: body - required: true - description: JSON object for creating message - schema: - properties: - subject: - type: string - default: Thanks - required: true - message: - type: string - default: Thanks for your contribution - required: true - responses: - 200: - description: Message sent successfully - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 500: - description: Internal Server Error - """ - try: - authenticated_user_id = token_auth.current_user() - message_dto = MessageDTO(request.get_json()) - # Validate if team is present - team = TeamService.get_team_by_id(team_id) - is_manager = TeamService.is_user_team_manager( - team_id, authenticated_user_id +@router.post("/{team_id}/actions/message-members/") +async def post( + request: Request, + background_tasks: BackgroundTasks, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), + team_id: int = None, +): + """ + Message all team members + --- + tags: + - teams + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: team_id + in: path + description: Unique team ID + required: true + type: integer + default: 1 + - in: body + name: body + required: true + description: JSON object for creating message + schema: + properties: + subject: + type: string + default: Thanks + required: true + message: + type: string + default: Thanks for your contribution + required: true + responses: + 200: + description: Message sent successfully + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 500: + description: Internal Server Error + """ + try: + request_json = await request.json() + request_json["from_user_id"] = user.id + message_dto = MessageDTO(**request_json) + # Validate if team is present + team = await TeamService.get_team_by_id(team_id, db) + is_manager = await TeamService.is_user_team_manager(team_id, user.id, db) + if not is_manager: + raise ValueError + if not message_dto.message.strip() or not message_dto.subject.strip(): + raise Exception( + {"Error": "Empty message not allowed", "SubCode": "EmptyMessage"} ) - if not is_manager: - raise ValueError - message_dto.from_user_id = authenticated_user_id - message_dto.validate() - if not message_dto.message.strip() or not message_dto.subject.strip(): - raise DataError( - {"Error": "Empty message not allowed", "SubCode": "EmptyMessage"} - ) - except DataError as e: - current_app.logger.error(f"Error validating request: {str(e)}") - return { + except Exception as e: + logger.error(f"Error validating request: {str(e)}") + return JSONResponse( + content={ "Error": "Request payload did not match validation", "SubCode": "InvalidData", - }, 400 - except ValueError: - return { + }, + status_code=400, + ) + except ValueError: + return JSONResponse( + content={ "Error": "Unauthorised to send message to team members", "SubCode": "UserNotPermitted", - }, 403 - - try: - threading.Thread( - target=TeamService.send_message_to_all_team_members, - args=(team_id, team.name, message_dto), - ).start() + }, + status_code=403, + ) - return {"Success": "Message sent successfully"}, 200 - except ValueError as e: - return {"Error": str(e)}, 403 + try: + background_tasks.add_task( + TeamService.send_message_to_all_team_members, + team_id, + team.name, + message_dto, + user.id, + ) + return JSONResponse( + content={"Success": "Message sent successfully"}, status_code=200 + ) + except ValueError as e: + return JSONResponse(content={"Error": str(e)}, status_code=400) diff --git a/backend/api/teams/resources.py b/backend/api/teams/resources.py index 06df030d27..7f4071b803 100644 --- a/backend/api/teams/resources.py +++ b/backend/api/teams/resources.py @@ -1,370 +1,415 @@ -from flask_restful import Resource, request, current_app -from schematics.exceptions import DataError +from distutils.util import strtobool -from backend.models.dtos.team_dto import ( - NewTeamDTO, - UpdateTeamDTO, - TeamSearchDTO, -) -from backend.services.team_service import TeamService, TeamServiceError -from backend.services.users.authentication_service import token_auth +from databases import Database +from fastapi import APIRouter, Body, Depends, Request +from fastapi.responses import JSONResponse +from loguru import logger + +from backend.db import get_db +from backend.models.dtos.team_dto import NewTeamDTO, TeamSearchDTO, UpdateTeamDTO +from backend.models.dtos.user_dto import AuthUserDTO +from backend.models.postgis.team import Team from backend.services.organisation_service import OrganisationService +from backend.services.team_service import TeamService, TeamServiceError +from backend.services.users.authentication_service import login_required from backend.services.users.user_service import UserService -from distutils.util import strtobool +router = APIRouter( + prefix="/teams", + tags=["teams"], + responses={404: {"description": "Not found"}}, +) -class TeamsRestAPI(Resource): - @token_auth.login_required - def patch(self, team_id): - """ - Updates a team - --- - tags: - - teams - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: team_id - in: path - description: The unique team ID - required: true - type: integer - default: 1 - - in: body - name: body - required: true - description: JSON object for updating a team - schema: - properties: - name: - type: string - default: HOT - Mappers - logo: - type: string - default: https://tasks.hotosm.org/assets/img/hot-tm-logo.svg - members: - type: array - items: - schema: - $ref: "#/definitions/TeamMembers" - organisation: - type: string - default: HOT - description: - type: string - default: HOT's mapping editors - inviteOnly: - type: boolean - default: false - responses: - 200: - description: Team updated successfully - 400: - description: Client Error - Invalid Request - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 500: - description: Internal Server Error - """ - try: - team = TeamService.get_team_by_id(team_id) - team_dto = UpdateTeamDTO(request.get_json()) - team_dto.team_id = team_id - team_dto.validate() - authenticated_user_id = token_auth.current_user() - if not TeamService.is_user_team_manager( - team_id, authenticated_user_id - ) and not OrganisationService.can_user_manage_organisation( - team.organisation_id, authenticated_user_id - ): - return { +@router.patch("/{team_id}/") +async def patch( + request: Request, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), + team_id: int = None, + team_dto: UpdateTeamDTO = Body(...), +): + """ + Updates a team + --- + tags: + - teams + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: team_id + in: path + description: The unique team ID + required: true + type: integer + default: 1 + - in: body + name: body + required: true + description: JSON object for updating a team + schema: + properties: + name: + type: string + default: HOT - Mappers + logo: + type: string + default: https://tasks.hotosm.org/assets/img/hot-tm-logo.svg + members: + type: array + items: + schema: + $ref: "#/definitions/TeamMembers" + organisation: + type: string + default: HOT + description: + type: string + default: HOT's mapping editors + inviteOnly: + type: boolean + default: false + responses: + 200: + description: Team updated successfully + 400: + description: Client Error - Invalid Request + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 500: + description: Internal Server Error + """ + try: + team = await TeamService.get_team_by_id(team_id, db) + team_dto.team_id = team_id + data = await request.json() + if not await TeamService.is_user_team_manager( + team_id, user.id, db + ) and not await OrganisationService.can_user_manage_organisation( + team.organisation_id, user.id, db + ): + return JSONResponse( + content={ "Error": "User is not a admin or a manager for the team", "SubCode": "UserNotTeamManager", - }, 403 - except DataError as e: - current_app.logger.error(f"error validating request: {str(e)}") - return {"Error": str(e), "SubCode": "InvalidData"}, 400 + }, + status_code=403, + ) + except Exception as e: + logger.error(f"error validating request: {str(e)}") + return JSONResponse( + content={"Error": str(e), "SubCode": "InvalidData"}, status_code=400 + ) + try: + if ("joinMethod" or "organisations_id") not in data.keys(): + await Team.update_team_members(team, team_dto, db) + else: + await TeamService.update_team(team_dto, db) + return JSONResponse(content={"Status": "Updated"}, status_code=200) + except TeamServiceError as e: + return JSONResponse(content={"Error": str(e)}, status_code=402) - try: - TeamService.update_team(team_dto) - return {"Status": "Updated"}, 200 - except TeamServiceError as e: - return str(e), 402 - def get(self, team_id): - """ - Retrieves a Team - --- - tags: - - teams - produces: - - application/json - parameters: - - name: team_id - in: path - description: Unique team ID - required: true - type: integer - default: 1 - - in: query - name: omitMemberList - type: boolean - description: Set it to true if you don't want the members list on the response. - default: False - responses: - 200: - description: Team found - 401: - description: Unauthorized - Invalid credentials - 404: - description: Team not found - 500: - description: Internal Server Error - """ - authenticated_user_id = token_auth.current_user() - omit_members = strtobool(request.args.get("omitMemberList", "false")) - if authenticated_user_id is None: - user_id = 0 - else: - user_id = authenticated_user_id - team_dto = TeamService.get_team_as_dto(team_id, user_id, omit_members) - return team_dto.to_primitive(), 200 +@router.get("/{team_id}/") +async def retrieve_team( + request: Request, + team_id: int, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Retrieves a Team + --- + tags: + - teams + produces: + - application/json + parameters: + - name: team_id + in: path + description: Unique team ID + required: true + type: integer + default: 1 + - in: query + name: omitMemberList + type: boolean + description: Set it to true if you don't want the members list on the response. + default: False + responses: + 200: + description: Team found + 401: + description: Unauthorized - Invalid credentials + 404: + description: Team not found + 500: + description: Internal Server Error + """ + authenticated_user_id = user.id + omit_members = strtobool(request.query_params.get("omitMemberList", "false")) + if authenticated_user_id is None: + user_id = 0 + else: + user_id = authenticated_user_id + team_dto = await TeamService.get_team_as_dto(team_id, user_id, omit_members, db) + return team_dto # TODO: Add delete API then do front end services and ui work - @token_auth.login_required - def delete(self, team_id): - """ - Deletes a Team - --- - tags: - - teams - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: team_id - in: path - description: The unique team ID - required: true - type: integer - default: 1 - responses: - 200: - description: Team deleted - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - Team has associated projects - 404: - description: Team not found - 500: - description: Internal Server Error - """ - if not TeamService.is_user_team_manager(team_id, token_auth.current_user()): - return { + +@router.delete("/{team_id}/") +async def delete( + request: Request, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), + team_id: int = None, +): + """ + Deletes a Team + --- + tags: + - teams + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: team_id + in: path + description: The unique team ID + required: true + type: integer + default: 1 + responses: + 200: + description: Team deleted + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden - Team has associated projects + 404: + description: Team not found + 500: + description: Internal Server Error + """ + if not await TeamService.is_user_team_manager(team_id, user.id, db): + return JSONResponse( + content={ "Error": "User is not a manager for the team", "SubCode": "UserNotTeamManager", - }, 401 + }, + status_code=403, + ) - return TeamService.delete_team(team_id) + return await TeamService.delete_team(team_id, db) -class TeamsAllAPI(Resource): - @token_auth.login_required - def get(self): - """ - Gets all teams - --- - tags: - - teams - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - in: query - name: team_name - description: name of the team to filter by - type: str - default: null - - in: query - name: member - description: user ID to filter teams that the users belongs to, user must be active. - type: str - default: null - - in: query - name: manager - description: user ID to filter teams that the users has MANAGER role - type: str - default: null - - in: query - name: member_request - description: user ID to filter teams that the user has send invite request to - type: str - default: null - - in: query - name: team_role - description: team role for project - type: str - default: null - - in: query - name: organisation - description: organisation ID to filter teams - type: integer - default: null - - in: query - name: omitMemberList - type: boolean - description: Set it to true if you don't want the members list on the response. - default: False - - in: query - name: fullMemberList - type: boolean - description: Set it to true if you want full members list otherwise it will be limited to 10 per role. - default: True - - in: query - name: paginate - type: boolean - description: Set it to true if you want to paginate the results. - default: False - - in: query - name: page - type: integer - description: Page number to return. - default: 1 - - in: query - name: perPage - type: integer - description: Number of results per page. - default: 10 +@router.get("/") +async def list_teams( + request: Request, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Gets all teams + --- + tags: + - teams + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - in: query + name: team_name + description: name of the team to filter by + type: str + default: null + - in: query + name: member + description: user ID to filter teams that the users belongs to, user must be active. + type: str + default: null + - in: query + name: manager + description: user ID to filter teams that the users has MANAGER role + type: str + default: null + - in: query + name: member_request + description: user ID to filter teams that the user has send invite request to + type: str + default: null + - in: query + name: team_role + description: team role for project + type: str + default: null + - in: query + name: organisation + description: organisation ID to filter teams + type: integer + default: null + - in: query + name: omitMemberList + type: boolean + description: Set it to true if you don't want the members list on the response. + default: False + - in: query + name: fullMemberList + type: boolean + description: Set it to true if you want full members list otherwise it will be limited to 10 per role. + default: True + - in: query + name: paginate + type: boolean + description: Set it to true if you want to paginate the results. + default: False + - in: query + name: page + type: integer + description: Page number to return. + default: 1 + - in: query + name: perPage + type: integer + description: Number of results per page. + default: 10 - responses: - 201: - description: Team list returned successfully - 400: - description: Client Error - Invalid Request - 401: - description: Unauthorized - Invalid credentials - 500: - description: Internal Server Error - """ - user_id = token_auth.current_user() - search_dto = TeamSearchDTO() - search_dto.team_name = request.args.get("team_name", None) - search_dto.member = request.args.get("member", None) - search_dto.manager = request.args.get("manager", None) - search_dto.member_request = request.args.get("member_request", None) - search_dto.team_role = request.args.get("team_role", None) - search_dto.organisation = request.args.get("organisation", None) - search_dto.omit_member_list = strtobool( - request.args.get("omitMemberList", "false") - ) - search_dto.full_member_list = strtobool( - request.args.get("fullMemberList", "true") - ) - search_dto.paginate = strtobool(request.args.get("paginate", "false")) - search_dto.page = request.args.get("page", 1) - search_dto.per_page = request.args.get("perPage", 10) - search_dto.user_id = user_id - search_dto.validate() + responses: + 201: + description: Team list returned successfully + 400: + description: Client Error - Invalid Request + 401: + description: Unauthorized - Invalid credentials + 500: + description: Internal Server Error + """ + search_dto = TeamSearchDTO() + search_dto.team_name = request.query_params.get("team_name", None) + search_dto.member = ( + int(request.query_params.get("member")) + if request.query_params.get("member") + else None + ) + search_dto.manager = request.query_params.get("manager", None) + search_dto.member_request = request.query_params.get("member_request", None) + search_dto.team_role = request.query_params.get("team_role", None) + search_dto.organisation = request.query_params.get("organisation", None) + search_dto.omit_members = strtobool( + request.query_params.get("omitMemberList", "false") + ) + search_dto.full_members_list = strtobool( + request.query_params.get("fullMemberList", "true") + ) + search_dto.paginate = strtobool(request.query_params.get("paginate", "false")) + search_dto.page = int(request.query_params.get("page", 1)) + search_dto.per_page = int(request.query_params.get("perPage", 10)) + search_dto.user_id = user.id + teams = await TeamService.get_all_teams(search_dto, db) + return teams - teams = TeamService.get_all_teams(search_dto) - return teams.to_primitive(), 200 - @token_auth.login_required - def post(self): - """ - Creates a new team - --- - tags: - - teams - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - in: body - name: body - required: true - description: JSON object for creating team - schema: - properties: - name: - type: string - default: HOT - Mappers - organisation_id: - type: integer - default: 1 - description: - type: string - visibility: - type: string - enum: - - "PUBLIC" - - "PRIVATE" - joinMethod: - type: string - enum: - - "ANY" - - "BY_REQUEST" - - "BY_INVITE" - responses: - 201: - description: Team created successfully - 400: - description: Client Error - Invalid Request - 401: - description: Unauthorized - Invalid credentials - 403: - description: Unauthorized - Forbidden - 500: - description: Internal Server Error - """ - user_id = token_auth.current_user() +@router.post("/") +async def post( + request: Request, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), + team_dto: NewTeamDTO = Body(...), +): + """ + Creates a new team + --- + tags: + - teams + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - in: body + name: body + required: true + description: JSON object for creating team + schema: + properties: + name: + type: string + default: HOT - Mappers + organisation_id: + type: integer + default: 1 + description: + type: string + visibility: + type: string + enum: + - "PUBLIC" + - "PRIVATE" + joinMethod: + type: string + enum: + - "ANY" + - "BY_REQUEST" + - "BY_INVITE" + responses: + 201: + description: Team created successfully + 400: + description: Client Error - Invalid Request + 401: + description: Unauthorized - Invalid credentials + 403: + description: Unauthorized - Forbidden + 500: + description: Internal Server Error + """ - try: - team_dto = NewTeamDTO(request.get_json()) - team_dto.creator = user_id - team_dto.validate() - except DataError as e: - current_app.logger.error(f"error validating request: {str(e)}") - return {"Error": str(e), "SubCode": "InvalidData"}, 400 + try: + team_dto.creator = user.id + except Exception as e: + logger.error(f"error validating request: {str(e)}") + return JSONResponse( + content={"Error": str(e), "SubCode": "InvalidData"}, status_code=400 + ) - try: - organisation_id = team_dto.organisation_id + try: + organisation_id = team_dto.organisation_id - is_org_manager = OrganisationService.is_user_an_org_manager( - organisation_id, user_id + is_org_manager = await OrganisationService.is_user_an_org_manager( + organisation_id, user.id, db + ) + is_admin = await UserService.is_user_an_admin(user.id, db) + if is_admin or is_org_manager: + team_id = await TeamService.create_team(team_dto, db) + return JSONResponse(content={"teamId": team_id}, status_code=201) + else: + error_msg = "User not permitted to create team for the Organisation" + return JSONResponse( + content={"Error": error_msg, "SubCode": "CreateTeamNotPermitted"}, + status_code=403, ) - is_admin = UserService.is_user_an_admin(user_id) - if is_admin or is_org_manager: - team_id = TeamService.create_team(team_dto) - return {"teamId": team_id}, 201 - else: - error_msg = "User not permitted to create team for the Organisation" - return {"Error": error_msg, "SubCode": "CreateTeamNotPermitted"}, 403 - except TeamServiceError as e: - return str(e), 400 + except TeamServiceError as e: + return JSONResponse(content={"Error": str(e)}, status_code=400) diff --git a/backend/api/users/actions.py b/backend/api/users/actions.py index 0bd77f8c9d..b46905f78a 100644 --- a/backend/api/users/actions.py +++ b/backend/api/users/actions.py @@ -1,371 +1,411 @@ -from flask_restful import Resource, current_app, request -from schematics.exceptions import DataError +from databases import Database +from fastapi import APIRouter, Body, Depends, Request +from fastapi.responses import JSONResponse +from loguru import logger -from backend.models.dtos.user_dto import UserDTO, UserRegisterEmailDTO +from backend.db import get_db +from backend.models.dtos.user_dto import AuthUserDTO, UserDTO, UserRegisterEmailDTO +from backend.services.interests_service import InterestService from backend.services.messaging.message_service import MessageService -from backend.services.users.authentication_service import token_auth, tm +from backend.services.users.authentication_service import login_required from backend.services.users.user_service import UserService, UserServiceError -from backend.services.interests_service import InterestService +router = APIRouter( + prefix="/users", + tags=["users"], + responses={404: {"description": "Not found"}}, +) -class UsersActionsSetUsersAPI(Resource): - @tm.pm_only(False) - @token_auth.login_required - def patch(self): - """ - Updates user info - --- - tags: - - users - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - in: body - name: body - required: true - description: JSON object to update a user - schema: - properties: - id: - type: integer - example: 1 - name: - type: string - example: Your Name - city: - type: string - example: Your City - country: - type: string - example: Your Country - emailAddress: - type: string - example: test@test.com - twitterId: - type: string - example: twitter handle without @ - facebookId: - type: string - example: facebook username - linkedinId: - type: string - example: linkedin username - gender: - type: string - description: gender - selfDescriptionGender: - type: string - description: gender self-description - responses: - 200: - description: Details saved - 400: - description: Client Error - Invalid Request - 401: - description: Unauthorized - Invalid credentials - 500: - description: Internal Server Error - """ - try: - user_dto = UserDTO(request.get_json()) - if user_dto.email_address == "": - user_dto.email_address = ( - None # Replace empty string with None so validation doesn't break - ) - user_dto.validate() - authenticated_user_id = token_auth.current_user() - if authenticated_user_id != user_dto.id: - return { +@router.patch("/me/actions/set-user/") +# @tm.pm_only(False) +async def patch( + request: Request, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), + data: dict = Body(...), +): + """ + Updates user info + --- + tags: + - users + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - in: body + name: body + required: true + description: JSON object to update a user + schema: + properties: + id: + type: integer + example: 1 + name: + type: string + example: Your Name + city: + type: string + example: Your City + country: + type: string + example: Your Country + emailAddress: + type: string + example: test@test.com + twitterId: + type: string + example: twitter handle without @ + facebookId: + type: string + example: facebook username + linkedinId: + type: string + example: linkedin username + gender: + type: string + description: gender + selfDescriptionGender: + type: string + description: gender self-description + responses: + 200: + description: Details saved + 400: + description: Client Error - Invalid Request + 401: + description: Unauthorized - Invalid credentials + 500: + description: Internal Server Error + """ + try: + user_dto = UserDTO(**data) + if user_dto.email_address == "": + user_dto.email_address = ( + None # Replace empty string with None so validation doesn't break + ) + if user.id != user_dto.id: + return JSONResponse( + content={ "Error": "Unable to authenticate", "SubCode": "UnableToAuth", - }, 401 - except ValueError as e: - return {"Error": str(e)}, 400 - except DataError as e: - current_app.logger.error(f"error validating request: {str(e)}") - return { + }, + status_code=401, + ) + except ValueError as e: + return JSONResponse(content={"Error": str(e)}, status_code=400) + except ValueError as e: + logger.error(f"error validating request: {str(e)}") + return JSONResponse( + content={ "Error": "Unable to update user details", "SubCode": "InvalidData", - }, 400 - - verification_sent = UserService.update_user_details( - authenticated_user_id, user_dto + }, + status_code=400, ) - return verification_sent, 200 + verification_sent = await UserService.update_user_details(user.id, user_dto, db) + return verification_sent -class UsersActionsSetLevelAPI(Resource): - @tm.pm_only() - @token_auth.login_required - def patch(self, username, level): - """ - Allows PMs to set a user's mapping level - --- - tags: - - users - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: username - in: path - description: Mapper's OpenStreetMap username - required: true - type: string - default: Thinkwhere - - name: level - in: path - description: The mapping level that should be set - required: true - type: string - default: ADVANCED - responses: - 200: - description: Level set - 400: - description: Bad Request - Client Error - 401: - description: Unauthorized - Invalid credentials - 404: - description: User not found - 500: - description: Internal Server Error - """ - try: - UserService.set_user_mapping_level(username, level) - return {"Success": "Level set"}, 200 - except UserServiceError as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 400 +# class UsersActionsSetLevelAPI(Resource): +# @token_auth.login_required +@router.patch("/{username}/actions/set-level/{level}/") +# @tm.pm_only() +async def patch( + request: Request, + username, + level, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), +): + """ + Allows PMs to set a user's mapping level + --- + tags: + - users + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: username + in: path + description: Mapper's OpenStreetMap username + required: true + type: string + default: Thinkwhere + - name: level + in: path + description: The mapping level that should be set + required: true + type: string + default: ADVANCED + responses: + 200: + description: Level set + 400: + description: Bad Request - Client Error + 401: + description: Unauthorized - Invalid credentials + 404: + description: User not found + 500: + description: Internal Server Error + """ + try: + await UserService.set_user_mapping_level(username, level, db) + return JSONResponse(content={"Success": "Level set"}, status_code=200) + except UserServiceError as e: + return JSONResponse( + content={"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, + status_code=400, + ) -class UsersActionsSetRoleAPI(Resource): - @tm.pm_only() - @token_auth.login_required - def patch(self, username, role): - """ - Allows PMs to set a user's role - --- - tags: - - users - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: username - in: path - description: Mapper's OpenStreetMap username - required: true - type: string - default: Thinkwhere - - name: role - in: path - description: The role to add - required: true - type: string - default: ADMIN - responses: - 200: - description: Role set - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 404: - description: User not found - 500: - description: Internal Server Error - """ - try: - UserService.add_role_to_user(token_auth.current_user(), username, role) - return {"Success": "Role Added"}, 200 - except UserServiceError as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 403 +@router.patch("/{username}/actions/set-role/{role}/") +# @tm.pm_only() +async def patch( + request: Request, + username: str, + role: str, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), +): + """ + Allows PMs to set a user's role + --- + tags: + - users + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: username + in: path + description: Mapper's OpenStreetMap username + required: true + type: string + default: Thinkwhere + - name: role + in: path + description: The role to add + required: true + type: string + default: ADMIN + responses: + 200: + description: Role set + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 404: + description: User not found + 500: + description: Internal Server Error + """ + try: + await UserService.add_role_to_user(user.id, username, role, db) + return JSONResponse(content={"Success": "Role Added"}, status_code=200) + except UserServiceError as e: + return JSONResponse( + content={"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, + status_code=403, + ) -class UsersActionsSetExpertModeAPI(Resource): - @tm.pm_only(False) - @token_auth.login_required - def patch(self, is_expert): - """ - Allows user to enable or disable expert mode - --- - tags: - - users - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: is_expert - in: path - description: true to enable expert mode, false to disable - required: true - type: string - responses: - 200: - description: Mode set - 400: - description: Bad Request - Client Error - 401: - description: Unauthorized - Invalid credentials - 404: - description: User not found - 500: - description: Internal Server Error - """ - try: - UserService.set_user_is_expert( - token_auth.current_user(), is_expert == "true" - ) - return {"Success": "Expert mode updated"}, 200 - except UserServiceError: - return {"Error": "Not allowed"}, 400 +@router.patch("/{user_name}/actions/set-expert-mode/{is_expert}/") +# @tm.pm_only() +async def patch( + request: Request, + user_name, + is_expert, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), +): + """ + Allows user to enable or disable expert mode + --- + tags: + - users + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: is_expert + in: path + description: true to enable expert mode, false to disable + required: true + type: string + responses: + 200: + description: Mode set + 400: + description: Bad Request - Client Error + 401: + description: Unauthorized - Invalid credentials + 404: + description: User not found + 500: + description: Internal Server Error + """ + try: + await UserService.set_user_is_expert(user.id, is_expert == "true", db) + return JSONResponse(content={"Success": "Expert mode updated"}, status_code=200) + except UserServiceError: + return JSONResponse(content={"Error": "Not allowed"}, status_code=400) -class UsersActionsVerifyEmailAPI(Resource): - @tm.pm_only(False) - @token_auth.login_required - def patch(self): - """ - Resends the verification email token to the logged in user - --- - tags: - - users - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - responses: - 200: - description: Resends the user their email verification email - 500: - description: Internal Server Error - """ - try: - MessageService.resend_email_validation(token_auth.current_user()) - return {"Success": "Verification email resent"}, 200 - except ValueError as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 400 +@router.patch("/me/actions/verify-email/") +# @tm.pm_only() +async def patch( + request: Request, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), +): + """ + Resends the verification email token to the logged in user + --- + tags: + - users + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + responses: + 200: + description: Resends the user their email verification email + 500: + description: Internal Server Error + """ + try: + await MessageService.resend_email_validation(user.id, db) + return JSONResponse( + content={"Success": "Verification email resent"}, status_code=200 + ) + except ValueError as e: + return JSONResponse( + content={"Error": str(e), "SubCode": str(e).split("-")[0]}, status_code=400 + ) -class UsersActionsRegisterEmailAPI(Resource): - def post(self): - """ - Registers users without OpenStreetMap account - --- - tags: - - users - produces: - - application/json - parameters: - - in: body - name: body - required: true - description: JSON object to update a user - schema: - properties: - email: - type: string - example: test@test.com - responses: - 200: - description: User registered - 400: - description: Client Error - Invalid Request - 500: - description: Internal Server Error - """ - try: - user_dto = UserRegisterEmailDTO(request.get_json()) - user_dto.validate() - except DataError as e: - current_app.logger.error(f"error validating request: {str(e)}") - return {"Error": str(e), "SubCode": "InvalidData"}, 400 - try: - user = UserService.register_user_with_email(user_dto) - user_dto = UserRegisterEmailDTO( - dict( - success=True, - email=user_dto.email, - details="User created successfully", - id=user.id, - ) - ) - return user_dto.to_primitive(), 200 - except ValueError as e: - user_dto = UserRegisterEmailDTO(dict(email=user_dto.email, details=str(e))) - return user_dto.to_primitive(), 400 +@router.post("/actions/register/") +async def post( + request: Request, + db: Database = Depends(get_db), + user_dto: UserRegisterEmailDTO = Body(...), +): + """ + Registers users without OpenStreetMap account + --- + tags: + - users + produces: + - application/json + parameters: + - in: body + name: body + required: true + description: JSON object to update a user + schema: + properties: + email: + type: string + example: test@test.com + responses: + 200: + description: User registered + 400: + description: Client Error - Invalid Request + 500: + description: Internal Server Error + """ + try: + user = await UserService.register_user_with_email(user_dto, db) + result = { + "email": user_dto.email, + "success": True, + "details": "User created successfully", + "id": user, + } + user_dto = UserRegisterEmailDTO(**result) + return user_dto.model_dump(by_alias=True) + except ValueError as e: + return JSONResponse(content={"Error": str(e)}, status_code=400) -class UsersActionsSetInterestsAPI(Resource): - @token_auth.login_required - def post(self): - """ - Creates a relationship between user and interests - --- - tags: - - interests - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - in: body - name: body - required: true - description: JSON object for creating/updating user and interests relationships - schema: - properties: - interests: - type: array - items: - type: integer - responses: - 200: - description: New user interest relationship created - 400: - description: Invalid Request - 401: - description: Unauthorized - Invalid credentials - 500: - description: Internal Server Error - """ - try: - data = request.get_json() - user_interests = InterestService.create_or_update_user_interests( - token_auth.current_user(), data["interests"] - ) - return user_interests.to_primitive(), 200 - except (ValueError, KeyError) as e: - return {"Error": str(e)}, 400 +@router.post("/me/actions/set-interests/") +async def post( + request: Request, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), + data: dict = Body(...), +): + """ + Creates a relationship between user and interests + --- + tags: + - interests + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - in: body + name: body + required: true + description: JSON object for creating/updating user and interests relationships + schema: + properties: + interests: + type: array + items: + type: integer + responses: + 200: + description: New user interest relationship created + 400: + description: Invalid Request + 401: + description: Unauthorized - Invalid credentials + 500: + description: Internal Server Error + """ + try: + user_interests = await InterestService.create_or_update_user_interests( + user.id, data["interests"], db + ) + return user_interests + except (ValueError, KeyError) as e: + return JSONResponse(content={"Error": str(e)}, status_code=400) diff --git a/backend/api/users/openstreetmap.py b/backend/api/users/openstreetmap.py index 7ab26c4bb7..9c448ddbc3 100644 --- a/backend/api/users/openstreetmap.py +++ b/backend/api/users/openstreetmap.py @@ -1,46 +1,60 @@ -from flask_restful import Resource +from databases import Database +from fastapi import APIRouter, Depends, Request +from fastapi.responses import JSONResponse -from backend.services.users.authentication_service import token_auth -from backend.services.users.user_service import UserService, OSMServiceError +from backend.db import get_db +from backend.models.dtos.user_dto import AuthUserDTO +from backend.services.users.authentication_service import login_required +from backend.services.users.user_service import OSMServiceError, UserService +router = APIRouter( + prefix="/users", + tags=["users"], + responses={404: {"description": "Not found"}}, +) -class UsersOpenStreetMapAPI(Resource): - @token_auth.login_required - def get(self, username): - """ - Get details from OpenStreetMap for a specified username - --- - tags: - - users - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded sesesion token - required: true - type: string - default: Token sessionTokenHere== - - name: username - in: path - description: Mapper's OpenStreetMap username - required: true - type: string - default: Thinkwhere - responses: - 200: - description: User found - 401: - description: Unauthorized - Invalid credentials - 404: - description: User not found - 500: - description: Internal Server Error - 502: - description: Bad response from OSM - """ - try: - osm_dto = UserService.get_osm_details_for_user(username) - return osm_dto.to_primitive(), 200 - except OSMServiceError as e: - return {"Error": str(e)}, 502 + +@router.get("/{username}/openstreetmap/") +async def get( + request: Request, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), + username: str = None, +): + """ + Get details from OpenStreetMap for a specified username + --- + tags: + - users + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded sesesion token + required: true + type: string + default: Token sessionTokenHere== + - name: username + in: path + description: Mapper's OpenStreetMap username + required: true + type: string + default: Thinkwhere + responses: + 200: + description: User found + 401: + description: Unauthorized - Invalid credentials + 404: + description: User not found + 500: + description: Internal Server Error + 502: + description: Bad response from OSM + """ + try: + osm_dto = await UserService.get_osm_details_for_user(username, db) + return osm_dto.model_dump(by_alias=True) + except OSMServiceError as e: + return JSONResponse(content={"Error": str(e)}, status_code=502) diff --git a/backend/api/users/resources.py b/backend/api/users/resources.py index ab08d78fd9..59dccdc8db 100644 --- a/backend/api/users/resources.py +++ b/backend/api/users/resources.py @@ -1,388 +1,447 @@ from distutils.util import strtobool -from flask_restful import Resource, current_app, request -from schematics.exceptions import DataError -from backend.models.dtos.user_dto import UserSearchQuery -from backend.services.users.authentication_service import token_auth -from backend.services.users.user_service import UserService -from backend.services.project_service import ProjectService +from databases import Database +from fastapi import APIRouter, Depends, Request +from fastapi.responses import JSONResponse +from loguru import logger -class UsersRestAPI(Resource): - @token_auth.login_required - def get(self, user_id): - """ - Get user information by id - --- - tags: - - users - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: user_id - in: path - description: The id of the user - required: true - type: integer - default: 1 - responses: - 200: - description: User found - 401: - description: Unauthorized - Invalid credentials - 404: - description: User not found - 500: - description: Internal Server Error - """ - user_dto = UserService.get_user_dto_by_id(user_id, token_auth.current_user()) - return user_dto.to_primitive(), 200 +from backend.db import get_db +from backend.models.dtos.user_dto import AuthUserDTO, UserSearchQuery +from backend.services.project_service import ProjectService +from backend.services.users.authentication_service import login_required +from backend.services.users.user_service import UserService -class UsersAllAPI(Resource): - @token_auth.login_required - def get(self): - """ - Get paged list of all usernames - --- - tags: - - users - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded sesesion token - required: true - type: string - default: Token sessionTokenHere== - - in: query - name: page - description: Page of results user requested - type: integer - - in: query - name: pagination - description: Whether to return paginated results - type: boolean - default: true - - in: query - name: per_page - description: Number of results per page - type: integer - default: 20 - - in: query - name: username - description: Full or part username - type: string - - in: query - name: role - description: Role of User, eg ADMIN, PROJECT_MANAGER - type: string - - in: query - name: level - description: Level of User, eg BEGINNER - type: string - responses: - 200: - description: Users found - 401: - description: Unauthorized - Invalid credentials - 500: - description: Internal Server Error - """ - try: - query = UserSearchQuery() - query.pagination = strtobool(request.args.get("pagination", "True")) - if query.pagination: - query.page = ( - int(request.args.get("page")) if request.args.get("page") else 1 - ) - query.per_page = request.args.get("perPage", 20) - query.username = request.args.get("username") - query.mapping_level = request.args.get("level") - query.role = request.args.get("role") - query.validate() - except DataError as e: - current_app.logger.error(f"Error validating request: {str(e)}") - return {"Error": "Unable to fetch user list", "SubCode": "InvalidData"}, 400 +router = APIRouter( + prefix="/users", + tags=["users"], + responses={404: {"description": "Not found"}}, +) - users_dto = UserService.get_all_users(query) - return users_dto.to_primitive(), 200 +@router.get("/{user_id}/") +async def get( + request: Request, + user_id: int, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), +): + """ + Get user information by id + --- + tags: + - users + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: user_id + in: path + description: The id of the user + required: true + type: integer + default: 1 + responses: + 200: + description: User found + 401: + description: Unauthorized - Invalid credentials + 404: + description: User not found + 500: + description: Internal Server Error + """ + user_dto = await UserService.get_user_dto_by_id(user_id, user.id, db) + return user_dto -class UsersQueriesUsernameAPI(Resource): - @token_auth.login_required - def get(self, username): - """ - Get user information by OpenStreetMap username - --- - tags: - - users - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: username - in: path - description: Mapper's OpenStreetMap username - required: true - type: string - default: Thinkwhere - responses: - 200: - description: User found - 401: - description: Unauthorized - Invalid credentials - 404: - description: User not found - 500: - description: Internal Server Error - """ - user_dto = UserService.get_user_dto_by_username( - username, token_auth.current_user() + +@router.get("/") +async def get( + request: Request, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), +): + """ + Get paged list of all usernames + --- + tags: + - users + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded sesesion token + required: true + type: string + default: Token sessionTokenHere== + - in: query + name: page + description: Page of results user requested + type: integer + - in: query + name: pagination + description: Whether to return paginated results + type: boolean + default: true + - in: query + name: per_page + description: Number of results per page + type: integer + default: 20 + - in: query + name: username + description: Full or part username + type: string + - in: query + name: role + description: Role of User, eg ADMIN, PROJECT_MANAGER + type: string + - in: query + name: level + description: Level of User, eg BEGINNER + type: string + responses: + 200: + description: Users found + 401: + description: Unauthorized - Invalid credentials + 500: + description: Internal Server Error + """ + try: + query = UserSearchQuery() + query.pagination = strtobool(request.query_params.get("pagination", "True")) + if query.pagination: + query.page = ( + int(request.query_params.get("page")) + if request.query_params.get("page") + else 1 + ) + query.per_page = int(request.query_params.get("perPage", 20)) + query.username = request.query_params.get("username") + query.mapping_level = request.query_params.get("level") + query.role = request.query_params.get("role") + except Exception: + logger.error(f"Error validating request: {str(e)}") + return JSONResponse( + content={"Error": "Unable to fetch user list", "SubCode": "InvalidData"}, + status_code=400, ) - return user_dto.to_primitive(), 200 + users_dto = await UserService.get_all_users(query, db) + return users_dto -class UsersQueriesUsernameFilterAPI(Resource): - @token_auth.login_required - def get(self, username): - """ - Get paged lists of users matching OpenStreetMap username filter - --- - tags: - - users - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: username - in: path - description: Mapper's partial or full OpenStreetMap username - type: string - default: ab - - in: query - name: page - description: Page of results user requested - type: integer - - in: query - name: projectId - description: Optional, promote project participants to head of results - type: integer - responses: - 200: - description: Users found - 401: - description: Unauthorized - Invalid credentials - 404: - description: User not found - 500: - description: Internal Server Error - """ - page = int(request.args.get("page")) if request.args.get("page") else 1 - project_id = request.args.get("projectId", None, int) - users_dto = UserService.filter_users(username, project_id, page) - return users_dto.to_primitive(), 200 +@router.get("/queries/favorites/") +async def get( + request: Request, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), +): + """ + Get projects favorited by a user + --- + tags: + - favorites + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + responses: + 200: + description: Projects favorited by user + 404: + description: User not found + 500: + description: Internal Server Error + """ + favs_dto = await UserService.get_projects_favorited(user.id, db) + return favs_dto -class UsersQueriesOwnLockedAPI(Resource): - @token_auth.login_required - def get(self): - """ - Gets any locked task on the project for the logged in user - --- - tags: - - mapping - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - responses: - 200: - description: Task user is working on - 401: - description: Unauthorized - Invalid credentials - 404: - description: User is not working on any tasks - 500: - description: Internal Server Error - """ - locked_tasks = ProjectService.get_task_for_logged_in_user( - token_auth.current_user() - ) - return locked_tasks.to_primitive(), 200 +# class UsersQueriesUsernameAPI(): +# @token_auth.login_required +@router.get("/queries/{username}/") +async def get( + request: Request, + username: str, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), +): + """ + Get user information by OpenStreetMap username + --- + tags: + - users + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: username + in: path + description: Mapper's OpenStreetMap username + required: true + type: string + default: Thinkwhere + responses: + 200: + description: User found + 401: + description: Unauthorized - Invalid credentials + 404: + description: User not found + 500: + description: Internal Server Error + """ + user_dto = await UserService.get_user_dto_by_username(username, user.id, db) + return user_dto -class UsersQueriesOwnLockedDetailsAPI(Resource): - @token_auth.login_required - def get(self): - """ - Gets details of any locked task for the logged in user - --- - tags: - - mapping - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - in: header - name: Accept-Language - description: Language user is requesting - type: string - required: true - default: en - responses: - 200: - description: Task user is working on - 401: - description: Unauthorized - Invalid credentials - 404: - description: User is not working on any tasks - 500: - description: Internal Server Error - """ - preferred_locale = request.environ.get("HTTP_ACCEPT_LANGUAGE") - locked_tasks = ProjectService.get_task_details_for_logged_in_user( - token_auth.current_user(), preferred_locale - ) - return locked_tasks.to_primitive(), 200 +# class UsersQueriesUsernameFilterAPI(): +# @token_auth.login_required +@router.get("/queries/filter/{username}/") +async def get( + request: Request, + username, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Get paged lists of users matching OpenStreetMap username filter + --- + tags: + - users + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: username + in: path + description: Mapper's partial or full OpenStreetMap username + type: string + default: ab + - in: query + name: page + description: Page of results user requested + type: integer + - in: query + name: projectId + description: Optional, promote project participants to head of results + type: integer + responses: + 200: + description: Users found + 401: + description: Unauthorized - Invalid credentials + 404: + description: User not found + 500: + description: Internal Server Error + """ + page = ( + int(request.query_params.get("page")) if request.query_params.get("page") else 1 + ) + project_id = request.query_params.get("projectId", None) + if project_id: + project_id = int(project_id) + users_dto = await UserService.filter_users(username, project_id, page, db) + return users_dto -class UsersQueriesFavoritesAPI(Resource): - @token_auth.login_required - def get(self): - """ - Get projects favorited by a user - --- - tags: - - favorites - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - responses: - 200: - description: Projects favorited by user - 404: - description: User not found - 500: - description: Internal Server Error - """ - favs_dto = UserService.get_projects_favorited(token_auth.current_user()) - return favs_dto.to_primitive(), 200 +@router.get("/queries/tasks/locked/") +async def get( + request: Request, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), +): + """ + Gets any locked task on the project for the logged in user + --- + tags: + - mapping + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + responses: + 200: + description: Task user is working on + 401: + description: Unauthorized - Invalid credentials + 404: + description: User is not working on any tasks + 500: + description: Internal Server Error + """ + locked_tasks = await ProjectService.get_task_for_logged_in_user(user.id, db) + return locked_tasks.model_dump(by_alias=True) -class UsersQueriesInterestsAPI(Resource): - @token_auth.login_required - def get(self, username): - """ - Get interests by username - --- - tags: - - interests - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: username - in: path - description: Mapper's OpenStreetMap username - required: true - type: string - responses: - 200: - description: User interests returned - 404: - description: User not found - 500: - description: Internal Server Error - """ - user = UserService.get_user_by_username(username) - interests_dto = UserService.get_interests(user) - return interests_dto.to_primitive(), 200 +@router.get("/queries/tasks/locked/details/") +async def get( + request: Request, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), +): + """ + Gets details of any locked task for the logged in user + --- + tags: + - mapping + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - in: header + name: Accept-Language + description: Language user is requesting + type: string + required: true + default: en + responses: + 200: + description: Task user is working on + 401: + description: Unauthorized - Invalid credentials + 404: + description: User is not working on any tasks + 500: + description: Internal Server Error + """ + preferred_locale = request.headers.get("accept-language") + locked_tasks = await ProjectService.get_task_details_for_logged_in_user( + user.id, preferred_locale, db + ) + return locked_tasks.model_dump(by_alias=True) -class UsersRecommendedProjectsAPI(Resource): - @token_auth.login_required - def get(self, username): - """ - Get recommended projects for a user - --- - tags: - - users - produces: - - application/json - parameters: - - in: header - name: Accept-Language - description: Language user is requesting - type: string - required: true - default: en - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: username - in: path - description: Mapper's OpenStreetMap username - required: true - type: string - default: Thinkwhere - responses: - 200: - description: Recommended projects found - 401: - description: Unauthorized - Invalid credentials - 403: - description: Forbidden - 404: - description: No recommended projects found - 500: - description: Internal Server Error +@router.get("/{username}/queries/interests/") +async def get( + request: Request, + username: str, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Get interests by username + --- + tags: + - interests + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: username + in: path + description: Mapper's OpenStreetMap username + required: true + type: string + responses: + 200: + description: User interests returned + 404: + description: User not found + 500: + description: Internal Server Error + """ + query = """ + SELECT u.id, u.username, array_agg(i.name) AS interests + FROM users u + LEFT JOIN user_interests ui ON u.id = ui.user_id + LEFT JOIN interests i ON ui.interest_id = i.id + WHERE u.username = :username + GROUP BY u.id, u.username """ - locale = ( - request.environ.get("HTTP_ACCEPT_LANGUAGE") - if request.environ.get("HTTP_ACCEPT_LANGUAGE") - else "en" - ) - user_dto = UserService.get_recommended_projects(username, locale) - return user_dto.to_primitive(), 200 + user = await db.fetch_one(query, {"username": username}) + interests_dto = await UserService.get_interests(user, db) + return interests_dto + + +@router.get("/{username}/recommended-projects/") +async def get( + request: Request, + username, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), +): + """ + Get recommended projects for a user + --- + tags: + - users + produces: + - application/json + parameters: + - in: header + name: Accept-Language + description: Language user is requesting + type: string + required: true + default: en + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: username + in: path + description: Mapper's OpenStreetMap username + required: true + type: string + default: Thinkwhere + responses: + 200: + description: Recommended projects found + 401: + description: Unauthorized - Invalid credentials + 403: + description: Forbidden + 404: + description: No recommended projects found + 500: + description: Internal Server Error + """ + locale = ( + request.headers.get("accept-language") + if request.headers.get("accept-language") + else "en" + ) + user_dto = await UserService.get_recommended_projects(username, locale, db) + return user_dto.model_dump(by_alias=True) diff --git a/backend/api/users/statistics.py b/backend/api/users/statistics.py index e3e6f9dd8c..712284a21d 100644 --- a/backend/api/users/statistics.py +++ b/backend/api/users/statistics.py @@ -1,182 +1,217 @@ -from json import JSONEncoder from datetime import date, timedelta -from flask_restful import Resource, request + import requests +from databases import Database +from fastapi import APIRouter, Depends, Request +from fastapi.responses import JSONResponse -from backend.services.users.user_service import UserService -from backend.services.stats_service import StatsService -from backend.services.interests_service import InterestService -from backend.services.users.authentication_service import token_auth from backend.api.utils import validate_date_input -from backend.config import EnvironmentConfig +from backend.config import settings +from backend.db import get_db +from backend.models.dtos.user_dto import AuthUserDTO +from backend.services.interests_service import InterestService +from backend.services.stats_service import StatsService +from backend.services.users.authentication_service import login_required +from backend.services.users.user_service import UserService +router = APIRouter( + prefix="/users", + tags=["users"], + responses={404: {"description": "Not found"}}, +) -class UsersStatisticsAPI(Resource, JSONEncoder): - @token_auth.login_required - def get(self, username): - """ - Get detailed stats about a user by OpenStreetMap username - --- - tags: - - users - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: username - in: path - description: Mapper's OpenStreetMap username - required: true - type: string - default: Thinkwhere - responses: - 200: - description: User found - 401: - description: Unauthorized - Invalid credentials - 404: - description: User not found - 500: - description: Internal Server Error - """ - stats_dto = UserService.get_detailed_stats(username) - return stats_dto.to_primitive(), 200 +@router.get("/{username}/statistics/") +async def get( + request: Request, + username: str, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), +): + """ + Get detailed stats about a user by OpenStreetMap username + --- + tags: + - users + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: username + in: path + description: Mapper's OpenStreetMap username + required: true + type: string + default: Thinkwhere + responses: + 200: + description: User found + 401: + description: Unauthorized - Invalid credentials + 404: + description: User not found + 500: + description: Internal Server Error + """ + stats_dto = await UserService.get_detailed_stats(username, db) + return stats_dto -class UsersStatisticsInterestsAPI(Resource): - @token_auth.login_required - def get(self, user_id): - """ - Get rate of contributions from a user given their interests - --- - tags: - - interests - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: user_id - in: path - description: User ID - required: true - type: integer - responses: - 200: - description: Interest found - 401: - description: Unauthorized - Invalid credentials - 500: - description: Internal Server Error - """ - rate = InterestService.compute_contributions_rate(user_id) - return rate.to_primitive(), 200 + +@router.get("/{user_id}/statistics/interests/") +async def get( + user_id: int, + db: Database = Depends(get_db), + user: AuthUserDTO = Depends(login_required), +): + """ + Get rate of contributions from a user given their interests + --- + tags: + - interests + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: user_id + in: path + description: User ID + required: true + type: integer + responses: + 200: + description: Interest found + 401: + description: Unauthorized - Invalid credentials + 500: + description: Internal Server Error + """ + rate = await InterestService.compute_contributions_rate(user_id, db) + return rate -class UsersStatisticsAllAPI(Resource): - @token_auth.login_required - def get(self): - """ - Get stats about users registered within a period of time - --- - tags: - - users - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - type: string - required: true - default: Token sessionTokenHere== - - in: query - name: startDate - description: Initial date - required: true - type: string - - in: query - name: endDate - description: Final date. - type: string - responses: - 200: - description: User statistics - 400: - description: Bad Request - 401: - description: Request is not authenticated - 500: - description: Internal Server Error - """ - try: - if request.args.get("startDate"): - start_date = validate_date_input(request.args.get("startDate")) - else: - return { +@router.get("/statistics/") +async def get( + request: Request, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), + start_date: str = None, + end_date: str = date.today(), +): + """ + Get stats about users registered within a period of time + --- + tags: + - users + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + type: string + required: true + default: Token sessionTokenHere== + - in: query + name: startDate + description: Initial date + required: true + type: string + - in: query + name: endDate + description: Final date. + type: string + responses: + 200: + description: User statistics + 400: + description: Bad Request + 401: + description: Request is not authenticated + 500: + description: Internal Server Error + """ + try: + if request.query_params.get("startDate"): + start_date = validate_date_input(request.query_params.get("startDate")) + else: + return JSONResponse( + content={ "Error": "Start date is required", "SubCode": "MissingDate", - }, 400 - end_date = validate_date_input(request.args.get("endDate", date.today())) - if end_date < start_date: - raise ValueError( - "InvalidDateRange- Start date must be earlier than end date" - ) - if (end_date - start_date) > timedelta(days=366 * 3): - raise ValueError( - "InvalidDateRange- Date range can not be bigger than 1 year" - ) + }, + status_code=400, + ) + if request.query_params.get("endDate"): + end_date = validate_date_input(request.query_params.get("endDate")) + else: + end_date: str = date.today() + if end_date < start_date: + raise ValueError( + "InvalidDateRange- Start date must be earlier than end date" + ) + if (end_date - start_date) > timedelta(days=366 * 3): + raise ValueError( + "InvalidDateRange- Date range can not be bigger than 1 year" + ) - stats = StatsService.get_all_users_statistics(start_date, end_date) - return stats.to_primitive(), 200 - except (KeyError, ValueError) as e: - return {"Error": str(e).split("-")[1], "SubCode": str(e).split("-")[0]}, 400 + stats = await StatsService.get_all_users_statistics(start_date, end_date, db) + return stats.model_dump(by_alias=True) + except (KeyError, ValueError) as e: + return JSONResponse( + content={"Error": str(e), "SubCode": str(e).split("-")[0]}, + status_code=400, + ) -class OhsomeProxyAPI(Resource): - @token_auth.login_required - def get(self): - """ - Get HomePage Stats - --- - tags: - - system - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - in: query - name: url - type: string - description: get user stats for osm contributions - responses: - 200: - description: User stats - 500: - description: Internal Server Error - """ - url = request.args.get("url") - if not url: - return {"Error": "URL is None", "SubCode": "URL not provided"}, 400 - try: - headers = {"Authorization": f"Basic {EnvironmentConfig.OHSOME_STATS_TOKEN}"} +@router.get("/statistics/ohsome/") +async def get(request: Request, user: AuthUserDTO = Depends(login_required)): + """ + Get HomePage Stats + --- + tags: + - system + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - in: query + name: url + type: string + description: get user stats for osm contributions + responses: + 200: + description: User stats + 500: + description: Internal Server Error + """ + url = request.query_params.get("url") + if not url: + return JSONResponse( + content={"Error": "URL is None", "SubCode": "URL not provided"}, + status_code=400, + ) + try: + headers = {"Authorization": f"Basic {settings.OHSOME_STATS_TOKEN}"} - # Make the GET request with headers - response = requests.get(url, headers=headers) - return response.json(), 200 - except Exception as e: - return {"Error": str(e), "SubCode": "Error fetching data"}, 400 + # Make the GET request with headers + response = requests.get(url, headers=headers) + return response.json() + except Exception as e: + return JSONResponse( + content={"Error": str(e), "SubCode": "Error fetching data"}, status_code=400 + ) diff --git a/backend/api/users/tasks.py b/backend/api/users/tasks.py index e0375024eb..20e506cec7 100644 --- a/backend/api/users/tasks.py +++ b/backend/api/users/tasks.py @@ -1,113 +1,138 @@ -from flask_restful import Resource, request -from dateutil.parser import parse as date_parse +from datetime import datetime -from backend.services.users.authentication_service import token_auth +from databases import Database +from fastapi import APIRouter, Depends, Request +from fastapi.responses import JSONResponse + +from backend.db import get_db +from backend.models.dtos.user_dto import AuthUserDTO +from backend.services.users.authentication_service import login_required from backend.services.users.user_service import UserService +router = APIRouter( + prefix="/users", + tags=["users"], + responses={404: {"description": "Not found"}}, +) + -class UsersTasksAPI(Resource): - @token_auth.login_required - def get(self, user_id): - """ - Get a list of tasks a user has interacted with - --- - tags: - - users - produces: - - application/json - parameters: - - in: header - name: Authorization - description: Base64 encoded session token - required: true - type: string - default: Token sessionTokenHere== - - name: user_id - in: path - description: Mapper's OpenStreetMap ID - required: true - type: integer - - in: query - name: status - description: Task Status filter - required: false - type: string - default: null - - in: query - name: project_status - description: Project Status filter - required: false - type: string - default: null - - in: query - name: project_id - description: Project id - required: false - type: integer - default: null - - in: query - name: start_date - description: Date to filter as minimum - required: false - type: string - default: null - - in: query - name: end_date - description: Date to filter as maximum - required: false - type: string - default: null - - in: query - name: sort_by - description: - criteria to sort by. The supported options are action_date, -action_date, project_id, -project_id. - The default value is -action_date. - required: false - type: string - - in: query - name: page - description: Page of results user requested - type: integer - - in: query - name: page_size - description: Size of page, defaults to 10 - type: integer - responses: - 200: - description: Mapped projects found - 404: - description: No mapped projects found - 500: - description: Internal Server Error - """ - try: - user = UserService.get_user_by_id(user_id) - status = request.args.get("status") - project_status = request.args.get("project_status") - project_id = int(request.args.get("project_id", 0)) - start_date = ( - date_parse(request.args.get("start_date")) - if request.args.get("start_date") - else None - ) - end_date = ( - date_parse(request.args.get("end_date")) - if request.args.get("end_date") - else None - ) - sort_by = request.args.get("sort_by", "-action_date") +@router.get("/{user_id}/tasks/") +async def get( + request: Request, + user_id: int, + user: AuthUserDTO = Depends(login_required), + db: Database = Depends(get_db), + status: str = None, + project_status: str = None, + project_id: int = None, + start_date: datetime = None, + end_date: datetime = None, + sort_by: str = "-action_date", +): + """ + Get a list of tasks a user has interacted with + --- + tags: + - users + produces: + - application/json + parameters: + - in: header + name: Authorization + description: Base64 encoded session token + required: true + type: string + default: Token sessionTokenHere== + - name: user_id + in: path + description: Mapper's OpenStreetMap ID + required: true + type: integer + - in: query + name: status + description: Task Status filter + required: false + type: string + default: null + - in: query + name: project_status + description: Project Status filter + required: false + type: string + default: null + - in: query + name: project_id + description: Project id + required: false + type: integer + default: null + - in: query + name: start_date + description: Date to filter as minimum + required: false + type: string + default: null + - in: query + name: end_date + description: Date to filter as maximum + required: false + type: string + default: null + - in: query + name: sort_by + description: + criteria to sort by. The supported options are action_date, -action_date, project_id, -project_id. + The default value is -action_date. + required: false + type: string + - in: query + name: page + description: Page of results user requested + type: integer + - in: query + name: page_size + description: Size of page, defaults to 10 + type: integer + responses: + 200: + description: Mapped projects found + 404: + description: No mapped projects found + 500: + description: Internal Server Error + """ + try: + user = await UserService.get_user_by_id(user_id, db) + status = request.query_params.get("status") + project_status = request.query_params.get("project_status") + project_id = int(request.query_params.get("project_id", 0)) + start_date = ( + date_parse(request.query_params.get("start_date")) + if request.query_params.get("start_date") + else None + ) + end_date = ( + date_parse(request.query_params.get("end_date")) + if request.query_params.get("end_date") + else None + ) + sort_by = request.query_params.get("sort_by", "-action_date") - tasks = UserService.get_tasks_dto( - user.id, - project_id=project_id, - project_status=project_status, - task_status=status, - start_date=start_date, - end_date=end_date, - page=request.args.get("page", None, type=int), - page_size=request.args.get("page_size", 10, type=int), - sort_by=sort_by, - ) - return tasks.to_primitive(), 200 - except ValueError: - return {"tasks": [], "pagination": {"total": 0}}, 200 + tasks = await UserService.get_tasks_dto( + user.id, + project_id=project_id, + project_status=project_status, + task_status=status, + start_date=start_date, + end_date=end_date, + page=int(request.query_params.get("page", 1)), + page_size=int(request.query_params.get("page_size", 10)), + sort_by=sort_by, + db=db, + ) + return tasks + except ValueError: + print("InvalidDateRange- Date range can not be bigger than 1 year") + return JSONResponse( + content={"tasks": [], "pagination": {"total": 0}}, status_code=200 + ) diff --git a/backend/config.py b/backend/config.py index fe3b7ae40a..f0731def8b 100644 --- a/backend/config.py +++ b/backend/config.py @@ -1,128 +1,153 @@ +import json import logging import os +from functools import lru_cache +from typing import Optional + from dotenv import load_dotenv +from pydantic import PostgresDsn, ValidationInfo, field_validator +from pydantic_settings import BaseSettings -class EnvironmentConfig: +class Settings(BaseSettings): """Base class for configuration.""" """ Most settings can be defined through environment variables. """ + class Config: + ignored_types = (type(json),) + # Load configuration from file load_dotenv( os.path.normpath( os.path.join(os.path.dirname(__file__), "..", "tasking-manager.env") ) ) + APP_NAME: str = "Tasking Manager" + DEBUG: bool = False + EXTRA_CORS_ORIGINS: list = [] # The base url the application is reachable - APP_BASE_URL = os.getenv("TM_APP_BASE_URL", "http://127.0.0.1:5000/").rstrip("/") + APP_BASE_URL: str = os.getenv("TM_APP_BASE_URL", "http://127.0.0.1:5000/").rstrip( + "/" + ) - API_VERSION = os.getenv("TM_APP_API_VERSION", "v2") - ORG_CODE = os.getenv("TM_ORG_CODE", "HOT") - ORG_NAME = os.getenv("TM_ORG_NAME", "Humanitarian OpenStreetMap Team") - ORG_LOGO = os.getenv( + API_VERSION: str = os.getenv("TM_APP_API_VERSION", "v2") + ORG_CODE: str = os.getenv("TM_ORG_CODE", "HOT") + ORG_NAME: str = os.getenv("TM_ORG_NAME", "Humanitarian OpenStreetMap Team") + ORG_LOGO: str = os.getenv( "TM_ORG_LOGO", "https://cdn.hotosm.org/tasking-manager/uploads/1588741335578_hot-logo.png", ) - ENVIRONMENT = os.getenv("TM_ENVIRONMENT", "") + ENVIRONMENT: str = os.getenv("TM_ENVIRONMENT", "") # The default tag used in the OSM changeset comment - DEFAULT_CHANGESET_COMMENT = os.getenv( + DEFAULT_CHANGESET_COMMENT: str = os.getenv( "TM_DEFAULT_CHANGESET_COMMENT", "#hot-tm-stage-project" ) # The address to use as the sender on auto generated emails - EMAIL_FROM_ADDRESS = os.getenv("TM_EMAIL_FROM_ADDRESS", "noreply@hotosmmail.org") + EMAIL_FROM_ADDRESS: str = os.getenv( + "TM_EMAIL_FROM_ADDRESS", "noreply@hotosmmail.org" + ) # The address to use as the receiver in contact form. - EMAIL_CONTACT_ADDRESS = os.getenv("TM_EMAIL_CONTACT_ADDRESS", "sysadmin@hotosm.org") + EMAIL_CONTACT_ADDRESS: str = os.getenv( + "TM_EMAIL_CONTACT_ADDRESS", "sysadmin@hotosm.org" + ) # A freely definable secret key for connecting the front end with the back end - SECRET_KEY = os.getenv("TM_SECRET", None) + SECRET_KEY: str = os.getenv("TM_SECRET", None) # OSM API, Nomimatim URLs - OSM_SERVER_URL = os.getenv("OSM_SERVER_URL", "https://www.openstreetmap.org") - OSM_NOMINATIM_SERVER_URL = os.getenv( + OSM_SERVER_URL: str = os.getenv("OSM_SERVER_URL", "https://www.openstreetmap.org") + OSM_NOMINATIM_SERVER_URL: str = os.getenv( "OSM_NOMINATIM_SERVER_URL", "https://nominatim.openstreetmap.org" ) - # Database connection - POSTGRES_USER = os.getenv("POSTGRES_USER", "postgres") - POSTGRES_PASSWORD = os.getenv("POSTGRES_PASSWORD", None) - POSTGRES_ENDPOINT = os.getenv("POSTGRES_ENDPOINT", "localhost") - POSTGRES_DB = os.getenv("POSTGRES_DB", "postgres") - POSTGRES_PORT = os.getenv("POSTGRES_PORT", "5432") - - # Assamble the database uri - if os.getenv("TM_DB", False): - SQLALCHEMY_DATABASE_URI = os.getenv("TM_DB", None) - elif os.getenv("DB_CONNECT_PARAM_JSON", False): - """ - This section reads JSON formatted Database connection parameters passed - from AWS Secrets Manager with the ENVVAR key `DB_CONNECT_PARAM_JSON` - and forms a valid SQLALCHEMY DATABASE URI - """ - import json - - _params = json.loads(os.getenv("DB_CONNECT_PARAM_JSON", None)) - SQLALCHEMY_DATABASE_URI = ( - f"postgresql://{_params.get('username')}" - + f":{_params.get('password')}" - + f"@{_params.get('host')}" - + f":{_params.get('port')}" - + f"/{_params.get('dbname')}" - ) - else: - SQLALCHEMY_DATABASE_URI = ( - f"postgresql://{POSTGRES_USER}" - + f":{POSTGRES_PASSWORD}" - + f"@{POSTGRES_ENDPOINT}:" - + f"{POSTGRES_PORT}" - + f"/{POSTGRES_DB}" + POSTGRES_USER: str = os.getenv("POSTGRES_USER", "postgres") + POSTGRES_PASSWORD: str = os.getenv("POSTGRES_PASSWORD", None) + POSTGRES_ENDPOINT: str = os.getenv("POSTGRES_ENDPOINT", "localhost") + POSTGRES_DB: str = os.getenv("POSTGRES_DB", "postgres") + POSTGRES_PORT: str = os.getenv("POSTGRES_PORT", "5432") + + SQLALCHEMY_DATABASE_URI: Optional[PostgresDsn] = None + + @field_validator("SQLALCHEMY_DATABASE_URI", mode="before") + @classmethod + def assemble_db_connection( + cls, v: Optional[str], info: ValidationInfo + ) -> Optional[str]: + """Build Postgres connection from environment variables or JSON config.""" + if v: + return v + + if os.getenv("TM_DB"): + return os.getenv("TM_DB") + + if os.getenv("DB_CONNECT_PARAM_JSON"): + params = json.loads(os.getenv("DB_CONNECT_PARAM_JSON")) + return PostgresDsn.build( + scheme="postgresql+asyncpg", + username=params.get("username"), + password=params.get("password"), + host=params.get("host"), + port=int(params.get("port")), + path=f"{params.get('dbname')}", + ) + return PostgresDsn.build( + scheme="postgresql+asyncpg", + username=info.data.get("POSTGRES_USER"), + password=info.data.get("POSTGRES_PASSWORD"), + host=info.data.get("POSTGRES_ENDPOINT"), + port=int(info.data.get("POSTGRES_PORT")), + path=f"{info.data.get('POSTGRES_DB')}", ) # Logging settings - LOG_LEVEL = os.getenv("TM_LOG_LEVEL", logging.DEBUG) - LOG_DIR = os.getenv("TM_LOG_DIR", "/home/appuser/logs") + LOG_LEVEL: int = os.getenv("TM_LOG_LEVEL", logging.DEBUG) + LOG_DIR: str = os.getenv("TM_LOG_DIR", "/home/appuser/logs") # Mapper Level values represent number of OSM changesets - MAPPER_LEVEL_INTERMEDIATE = int(os.getenv("TM_MAPPER_LEVEL_INTERMEDIATE", 250)) - MAPPER_LEVEL_ADVANCED = int(os.getenv("TM_MAPPER_LEVEL_ADVANCED", 500)) + MAPPER_LEVEL_INTERMEDIATE: int = int(os.getenv("TM_MAPPER_LEVEL_INTERMEDIATE", 250)) + MAPPER_LEVEL_ADVANCED: int = int(os.getenv("TM_MAPPER_LEVEL_ADVANCED", 500)) # Time to wait until task auto-unlock (e.g. '2h' or '7d' or '30m' or '1h30m') - TASK_AUTOUNLOCK_AFTER = os.getenv("TM_TASK_AUTOUNLOCK_AFTER", "2h") + TASK_AUTOUNLOCK_AFTER: str = os.getenv("TM_TASK_AUTOUNLOCK_AFTER", "2h") # Configuration for sending emails - MAIL_SERVER = os.getenv("TM_SMTP_HOST", None) - MAIL_PORT = os.getenv("TM_SMTP_PORT", "587") - MAIL_USE_TLS = bool(int(os.getenv("TM_SMTP_USE_TLS", True))) - MAIL_USE_SSL = bool(int(os.getenv("TM_SMTP_USE_SSL", False))) - MAIL_USERNAME = os.getenv("TM_SMTP_USER", None) - MAIL_PASSWORD = os.getenv("TM_SMTP_PASSWORD", None) - MAIL_DEFAULT_SENDER = os.getenv("TM_EMAIL_FROM_ADDRESS", "noreply@hotosmmail.org") - MAIL_DEBUG = True if LOG_LEVEL == "DEBUG" else False + MAIL_SERVER: Optional[str] = os.getenv("TM_SMTP_HOST", None) + MAIL_PORT: str = os.getenv("TM_SMTP_PORT", "587") + MAIL_USE_TLS: bool = bool(int(os.getenv("TM_SMTP_USE_TLS", True))) + MAIL_USE_SSL: bool = bool(int(os.getenv("TM_SMTP_USE_SSL", False))) + MAIL_USERNAME: Optional[str] = os.getenv("TM_SMTP_USER", None) + MAIL_PASSWORD: Optional[str] = os.getenv("TM_SMTP_PASSWORD", None) + MAIL_DEFAULT_SENDER: str = os.getenv( + "TM_EMAIL_FROM_ADDRESS", "noreply@hotosmmail.org" + ) + MAIL_DEBUG: bool = True if LOG_LEVEL == "DEBUG" else False if os.getenv("SMTP_CREDENTIALS", False): """ This section reads JSON formatted SMTP connection parameters passed from AWS Secrets Manager with the ENVVAR key `SMTP_CREDENTIALS`. """ - import json - _params = json.loads(os.getenv("SMTP_CREDENTIALS", None)) - MAIL_SERVER = _params.get("SMTP_HOST", None) - MAIL_PORT = _params.get("SMTP_PORT", "587") - MAIL_USE_TLS = bool(int(_params.get("SMTP_USE_TLS", True))) - MAIL_USE_SSL = bool(int(_params.get("SMTP_USE_SSL", False))) - MAIL_USERNAME = _params.get("SMTP_USER", None) - MAIL_PASSWORD = _params.get("SMTP_PASSWORD", None) + _params: dict = json.loads(os.getenv("SMTP_CREDENTIALS", None)) + MAIL_SERVER: str = _params.get("SMTP_HOST", None) + MAIL_PORT: str = _params.get("SMTP_PORT", "587") + MAIL_USE_TLS: bool = bool(int(_params.get("SMTP_USE_TLS", True))) + MAIL_USE_SSL: bool = bool(int(_params.get("SMTP_USE_SSL", False))) + MAIL_USERNAME: str = _params.get("SMTP_USER", None) + MAIL_PASSWORD: str = _params.get("SMTP_PASSWORD", None) # If disabled project update emails will not be sent. - SEND_PROJECT_EMAIL_UPDATES = bool(os.getenv("TM_SEND_PROJECT_EMAIL_UPDATES", True)) + SEND_PROJECT_EMAIL_UPDATES: bool = bool( + os.getenv("TM_SEND_PROJECT_EMAIL_UPDATES", True) + ) # Languages offered by the Tasking Manager # Please note that there must be exactly the same number of Codes as languages. - SUPPORTED_LANGUAGES = { + SUPPORTED_LANGUAGES: dict = { "codes": os.getenv( "TM_SUPPORTED_LANGUAGES_CODES", ", ".join( @@ -192,65 +217,75 @@ class EnvironmentConfig: } # Connection to OSM authentification system - OAUTH_API_URL = "{}/api/0.6/".format(OSM_SERVER_URL) - OAUTH_CLIENT_ID = os.getenv("TM_CLIENT_ID", None) - OAUTH_CLIENT_SECRET = os.getenv("TM_CLIENT_SECRET", None) - OAUTH_SCOPE = os.getenv("TM_SCOPE", "read_prefs write_api") - OAUTH_REDIRECT_URI = os.getenv("TM_REDIRECT_URI", None) + OAUTH_API_URL: str = "{}/api/0.6/".format(OSM_SERVER_URL) + OAUTH_CLIENT_ID: str = os.getenv("TM_CLIENT_ID", None) + OAUTH_CLIENT_SECRET: str = os.getenv("TM_CLIENT_SECRET", None) + OAUTH_SCOPE: str = os.getenv("TM_SCOPE", "read_prefs write_api") + OAUTH_REDIRECT_URI: str = os.getenv("TM_REDIRECT_URI", None) if os.getenv("OAUTH2_APP_CREDENTIALS", False): """ This section reads JSON formatted OAuth2 app credentials passed from AWS Secrets Manager with the ENVVAR key `OAUTH2_APP_CREDENTIALS`. """ - import json - _params = json.loads(os.getenv("OAUTH2_APP_CREDENTIALS", None)) - OAUTH_CLIENT_ID = _params.get("CLIENT_ID", None) - OAUTH_CLIENT_SECRET = _params.get("CLIENT_SECRET", None) - OAUTH_REDIRECT_URI = _params.get("REDIRECT_URI", None) - OAUTH_SCOPE = _params.get("ACCESS_SCOPE", "read_prefs write_api") + _params: dict = json.loads(os.getenv("OAUTH2_APP_CREDENTIALS", None)) + OAUTH_CLIENT_ID: str = _params.get("CLIENT_ID", None) + OAUTH_CLIENT_SECRET: str = _params.get("CLIENT_SECRET", None) + OAUTH_REDIRECT_URI: str = _params.get("REDIRECT_URI", None) + OAUTH_SCOPE: str = _params.get("ACCESS_SCOPE", "read_prefs write_api") # Some more definitions (not overridable) - SQLALCHEMY_ENGINE_OPTIONS = { + SQLALCHEMY_ENGINE_OPTIONS: dict = { "pool_size": 10, "max_overflow": 10, } - SEND_FILE_MAX_AGE_DEFAULT = 0 - SQLALCHEMY_TRACK_MODIFICATIONS = False + SEND_FILE_MAX_AGE_DEFAULT: int = 0 + SQLALCHEMY_TRACK_MODIFICATIONS: bool = False # Image upload Api - IMAGE_UPLOAD_API_KEY = os.getenv("TM_IMAGE_UPLOAD_API_KEY", None) - IMAGE_UPLOAD_API_URL = os.getenv("TM_IMAGE_UPLOAD_API_URL", None) + IMAGE_UPLOAD_API_KEY: Optional[str] = os.getenv("TM_IMAGE_UPLOAD_API_KEY", None) + IMAGE_UPLOAD_API_URL: Optional[str] = os.getenv("TM_IMAGE_UPLOAD_API_URL", None) if os.getenv("IMAGE_UPLOAD_CREDENTIALS", False): """ This section reads JSON formatted Image Upload credentials passed from AWS Secrets Manager with the ENVVAR key `IMAGE_UPLOAD_CREDENTIALS`. """ - import json - _params = json.loads(os.getenv("IMAGE_UPLOAD_CREDENTIALS"), None) - IMAGE_UPLOAD_API_KEY = _params.get("IMAGE_UPLOAD_API_KEY", None) - IMAGE_UPLOAD_API_URL = _params.get("IMAGE_UPLOAD_API_URL", None) + _params: dict = json.loads(os.getenv("IMAGE_UPLOAD_CREDENTIALS"), None) + IMAGE_UPLOAD_API_KEY: str = _params.get("IMAGE_UPLOAD_API_KEY", None) + IMAGE_UPLOAD_API_URL: str = _params.get("IMAGE_UPLOAD_API_URL", None) # Sentry backend DSN - SENTRY_BACKEND_DSN = os.getenv("TM_SENTRY_BACKEND_DSN", None) + SENTRY_BACKEND_DSN: str = os.getenv("TM_SENTRY_BACKEND_DSN", None) # Ohsome Stats Token - OHSOME_STATS_TOKEN = os.getenv("OHSOME_STATS_TOKEN", None) + OHSOME_STATS_TOKEN: str = os.getenv("OHSOME_STATS_TOKEN", None) + + +@lru_cache +def get_settings(): + """Cache settings when accessed throughout app.""" + _settings = Settings() + if _settings.DEBUG: + print(f"Loaded settings: {_settings.model_dump()}") + return _settings + + +settings = get_settings() -class TestEnvironmentConfig(EnvironmentConfig): - POSTGRES_TEST_DB = os.getenv("POSTGRES_TEST_DB", None) +class TestEnvironmentConfig(Settings): + POSTGRES_TEST_DB: str = os.getenv("POSTGRES_TEST_DB", None) - ENVIRONMENT = "test" + ENVIRONMENT: str = "test" - SQLALCHEMY_DATABASE_URI = ( - f"postgresql://{EnvironmentConfig.POSTGRES_USER}" - + f":{EnvironmentConfig.POSTGRES_PASSWORD}" - + f"@{EnvironmentConfig.POSTGRES_ENDPOINT}:" - + f"{EnvironmentConfig.POSTGRES_PORT}" + SQLALCHEMY_DATABASE_URI: str = ( + f"postgresql://{settings.POSTGRES_USER}" + + f":{settings.POSTGRES_PASSWORD}" + + f"@{settings.POSTGRES_ENDPOINT}:" + + f"{settings.POSTGRES_PORT}" + f"/{POSTGRES_TEST_DB}" ) - LOG_LEVEL = "DEBUG" + LOG_LEVEL: str = "DEBUG" diff --git a/backend/cron.py b/backend/cron.py new file mode 100644 index 0000000000..a087178c65 --- /dev/null +++ b/backend/cron.py @@ -0,0 +1,154 @@ +import datetime + +from apscheduler.schedulers.asyncio import AsyncIOScheduler +from apscheduler.triggers.cron import CronTrigger +from apscheduler.triggers.interval import IntervalTrigger +from loguru import logger + +from backend.db import db_connection +from backend.models.postgis.task import Task + + +async def auto_unlock_tasks(): + async with db_connection.database.connection() as conn: + # Identify distinct project IDs that were touched in the last 2 hours + two_hours_ago = datetime.datetime.utcnow() - datetime.timedelta(minutes=120) + projects_query = """ + SELECT DISTINCT project_id + FROM task_history + WHERE action_date > :two_hours_ago + """ + projects = await conn.fetch_all( + query=projects_query, values={"two_hours_ago": two_hours_ago} + ) + for project in projects: + project_id = project["project_id"] + logger.info(f"Processing project_id: {project_id}") + await Task.auto_unlock_tasks(project_id, conn) + + +async def update_all_project_stats(): + """ + Async function to update project statistics in the database. + """ + async with db_connection.database.connection() as conn: + logger.info("Started updating project stats.") + await conn.execute("UPDATE users SET projects_mapped = NULL;") + projects_query = "SELECT DISTINCT id FROM projects;" + projects = await conn.fetch_all(query=projects_query) + for project in projects: + project_id = project["id"] + logger.info(f"Processing project ID: {project_id}") + + # Update project statistics + await conn.execute( + """ + UPDATE projects + SET total_tasks = (SELECT COUNT(*) FROM tasks WHERE project_id = :project_id), + tasks_mapped = (SELECT COUNT(*) FROM tasks WHERE project_id = :project_id AND task_status = 2), + tasks_validated = (SELECT COUNT(*) FROM tasks WHERE project_id = :project_id AND task_status = 4), + tasks_bad_imagery = (SELECT COUNT(*) FROM tasks WHERE project_id = :project_id AND task_status = 6) + WHERE id = :project_id; + """, + {"project_id": project_id}, + ) + + # Update user stats + await conn.execute( + """ + UPDATE users + SET projects_mapped = array_append(projects_mapped, :project_id) + WHERE id IN ( + SELECT DISTINCT user_id + FROM task_history + WHERE action = 'STATE_CHANGE' AND project_id = :project_id + ); + """, + {"project_id": project_id}, + ) + + logger.info("Finished updating project stats.") + + +async def update_recent_updated_project_stats(): + """ + Async function to update project statistics for the recently updated projects in the database. + """ + async with db_connection.database.connection() as conn: + logger.info("Started updating recently updated projects' project stats.") + + # Calculate the cutoff date for the past week + one_week_ago = datetime.datetime.utcnow() - datetime.timedelta(days=7) + + # Fetch projects updated in the past week + projects_query = """ + SELECT DISTINCT id + FROM projects + WHERE last_updated > :one_week_ago; + """ + projects = await conn.fetch_all( + query=projects_query, values={"one_week_ago": one_week_ago} + ) + for project in projects: + project_id = project["id"] + logger.info(f"Processing project ID: {project_id}") + + # Update project statistics + await conn.execute( + """ + UPDATE projects + SET total_tasks = (SELECT COUNT(*) FROM tasks WHERE project_id = :project_id), + tasks_mapped = (SELECT COUNT(*) FROM tasks WHERE project_id = :project_id AND task_status = 2), + tasks_validated = (SELECT COUNT(*) FROM tasks WHERE project_id = :project_id AND task_status = 4), + tasks_bad_imagery = (SELECT COUNT(*) FROM tasks WHERE project_id = :project_id AND task_status = 6) + WHERE id = :project_id; + """, + {"project_id": project_id}, + ) + + # Update user stats + await conn.execute( + """ + UPDATE users + SET projects_mapped = + CASE + WHEN :project_id = ANY(projects_mapped) THEN projects_mapped + ELSE array_append(projects_mapped, :project_id) + END + WHERE id IN ( + SELECT DISTINCT user_id + FROM task_history + WHERE action = 'STATE_CHANGE' AND project_id = :project_id + ); + """, + {"project_id": project_id}, + ) + + logger.info("Finished updating project stats.") + + +def setup_cron_jobs(): + scheduler = AsyncIOScheduler() + + scheduler.add_job( + auto_unlock_tasks, + IntervalTrigger(minutes=120), + id="auto_unlock_tasks", + replace_existing=True, + ) + + scheduler.add_job( + update_all_project_stats, + CronTrigger(hour=0, minute=0), # Cron trigger for 12:00 AM + id="update_project_stats", + replace_existing=True, + ) + + scheduler.add_job( + update_recent_updated_project_stats, + CronTrigger(minute=0), # Cron trigger for every hour + id="update_recent_updated_project_stats", + replace_existing=True, + ) + scheduler.start() + logger.info("Scheduler initialized: auto_unlock_tasks runs every 2 hours.") diff --git a/backend/db.py b/backend/db.py new file mode 100644 index 0000000000..fdaaa4b825 --- /dev/null +++ b/backend/db.py @@ -0,0 +1,31 @@ +from databases import Database +from sqlalchemy.orm import declarative_base +from backend.config import settings + +Base = declarative_base() + + +class DatabaseConnection: + """Manages database connection (encode databases)""" + + def __init__(self): + self.database = Database( + settings.SQLALCHEMY_DATABASE_URI.unicode_string(), min_size=5, max_size=20 + ) + + async def connect(self): + """Connect to the database.""" + await self.database.connect() + + async def disconnect(self): + """Disconnect from the database.""" + await self.database.disconnect() + + +db_connection = DatabaseConnection() + + +async def get_db(): + """Get the database connection from the pool.""" + async with db_connection.database.connection() as connection: + yield connection diff --git a/backend/gunicorn.py b/backend/gunicorn.py deleted file mode 100644 index def4d6e5b1..0000000000 --- a/backend/gunicorn.py +++ /dev/null @@ -1,8 +0,0 @@ -import os - -bind = "0.0.0.0:5000" -worker_class = "gevent" -workers = (os.cpu_count() or 1) * 2 + 1 -threads = (os.cpu_count() or 1) * 2 + 1 -preload = True -timeout = 180 diff --git a/backend/main.py b/backend/main.py new file mode 100644 index 0000000000..090f79b2d9 --- /dev/null +++ b/backend/main.py @@ -0,0 +1,164 @@ +import logging +import sys +from contextlib import asynccontextmanager + +from fastapi import FastAPI, HTTPException, Request +from fastapi.middleware.cors import CORSMiddleware +from fastapi.responses import HTMLResponse, JSONResponse, RedirectResponse +from loguru import logger as log +from pyinstrument import Profiler +from starlette.middleware.authentication import AuthenticationMiddleware + +from backend.config import settings +from backend.cron import setup_cron_jobs +from backend.db import db_connection +from backend.routes import add_api_end_points +from backend.services.users.authentication_service import TokenAuthBackend + + +def get_application() -> FastAPI: + """Get the FastAPI app instance, with settings.""" + + @asynccontextmanager + async def lifespan(app): + await db_connection.connect() + yield + await db_connection.disconnect() + + _app = FastAPI( + lifespan=lifespan, + title=settings.APP_NAME, + description="HOTOSM Tasking Manager", + version="0.1.0", + license_info={ + "name": "BSD 2-Clause", + "url": "https://raw.githubusercontent.com/hotosm/tasking-manager/develop/LICENSE.md", + }, + debug=settings.DEBUG, + root_path=settings.APP_BASE_URL, + openapi_url="/api/openapi.json", + docs_url="/api/docs", + ) + + # Set custom logger + # _app.logger = get_logger() + + # Custom exception handler for 401 errors + @_app.exception_handler(HTTPException) + async def custom_http_exception_handler(request: Request, exc: HTTPException): + if exc.status_code == 401 and "InvalidToken" in exc.detail.get("SubCode", ""): + return JSONResponse( + content={ + "Error": exc.detail["Error"], + "SubCode": exc.detail["SubCode"], + }, + status_code=exc.status_code, + headers={"WWW-Authenticate": "Bearer"}, + ) + return JSONResponse( + status_code=exc.status_code, + content={"detail": exc.detail}, + ) + + PROFILING = True # Set this from a settings model + + if PROFILING: + + @_app.middleware("http") + async def pyinstrument_middleware(request, call_next): + profiling = request.query_params.get("profile", False) + if profiling: + profiler = Profiler(async_mode=True) + profiler.start() + await call_next(request) + profiler.stop() + return HTMLResponse(profiler.output_html()) + else: + return await call_next(request) + + _app.add_middleware( + CORSMiddleware, + allow_origins=settings.EXTRA_CORS_ORIGINS, + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], + expose_headers=["Content-Disposition"], + ) + + _app.add_middleware( + AuthenticationMiddleware, backend=TokenAuthBackend(), on_error=None + ) + setup_cron_jobs() + add_api_end_points(_app) + return _app + + +class InterceptHandler(logging.Handler): + """Intercept python standard lib logging.""" + + def emit(self, record): + """Retrieve context where the logging call occurred. + + This happens to be in the 6th frame upward. + """ + logger_opt = log.opt(depth=6, exception=record.exc_info) + logger_opt.log(record.levelno, record.getMessage()) + + +def get_logger(): + """Override FastAPI logger with custom loguru.""" + # Hook all other loggers into ours + logger_name_list = [name for name in logging.root.manager.loggerDict] + for logger_name in logger_name_list: + logging.getLogger(logger_name).setLevel(10) + logging.getLogger(logger_name).handlers = [] + if logger_name == "sqlalchemy": + # Don't hook sqlalchemy, very verbose + continue + if "." not in logger_name: + logging.getLogger(logger_name).addHandler(InterceptHandler()) + + log.remove() + log.add( + sys.stderr, + level=settings.LOG_LEVEL, + format=( + "{time:YYYY-MM-DD HH:mm:ss.SSS} | {level: <8} " + "| {name}:{function}:{line} | {message}" + ), + enqueue=True, # Run async / non-blocking + colorize=True, + backtrace=True, # More detailed tracebacks + catch=True, # Prevent app crashes + ) + + # Only log to file in production + if not settings.DEBUG: + log.add( + "/opt/logs/tm.json", + level=settings.LOG_LEVEL, + enqueue=True, + serialize=True, # JSON format + rotation="00:00", # New file at midnight + retention="10 days", + # format=log_json_format, # JSON format func + ) + + log.add( + "/opt/logs/create_project.json", + level=settings.LOG_LEVEL, + enqueue=True, + serialize=True, + rotation="00:00", + retention="10 days", + filter=lambda record: record["extra"].get("task") == "create_project", + ) + + +api = get_application() + + +@api.get("/") +async def home(): + """Redirect home to docs.""" + return RedirectResponse("/api/docs") diff --git a/backend/models/dtos/__init__.py b/backend/models/dtos/__init__.py index 457fd2410a..f1c84a7d22 100644 --- a/backend/models/dtos/__init__.py +++ b/backend/models/dtos/__init__.py @@ -1,6 +1,7 @@ from functools import wraps -from flask import request -from schematics.exceptions import DataError + +# from flask import request +# from schematics.exceptions import DataError from backend.exceptions import BadRequest diff --git a/backend/models/dtos/application_dto.py b/backend/models/dtos/application_dto.py index 0fe23a2900..b3c2be555a 100644 --- a/backend/models/dtos/application_dto.py +++ b/backend/models/dtos/application_dto.py @@ -1,21 +1,18 @@ -from schematics.types import IntType, ListType, ModelType, StringType, UTCDateTimeType -from schematics import Model +from pydantic import BaseModel, Field +from datetime import datetime +from typing import List, Optional -class ApplicationDTO(Model): +class ApplicationDTO(BaseModel): """Describes JSON model used for creating grids""" - id = IntType(required=True, serialized_name="keyId") - user = IntType(required=True, serialized_name="userId") - app_key = StringType(required=True, serialized_name="applicationkey") - created = UTCDateTimeType(required=True, serialized_name="createdDate") + id: Optional[int] = Field(None, alias="keyId") + user: Optional[int] = Field(None, alias="userId") + app_key: Optional[str] = Field(None, alias="applicationkey") + created: Optional[datetime] = Field(None, alias="createdDate") -class ApplicationsDTO(Model): +class ApplicationsDTO(BaseModel): """Describes an array of Application DTOs""" - def __init__(self): - super().__init__() - self.applications = [] - - applications = ListType(ModelType(ApplicationDTO)) + applications: List[ApplicationDTO] = Field([], alias="applications") diff --git a/backend/models/dtos/banner_dto.py b/backend/models/dtos/banner_dto.py index aed6a27b3c..5061795bd3 100644 --- a/backend/models/dtos/banner_dto.py +++ b/backend/models/dtos/banner_dto.py @@ -1,12 +1,9 @@ -from schematics import Model -from schematics.types import ( - BooleanType, - StringType, -) +from pydantic import BaseModel, Field +from typing import Optional -class BannerDTO(Model): +class BannerDTO(BaseModel): """Describes a JSON model for a banner""" - message = StringType(required=True, max_length=255) - visible = BooleanType() + message: str = Field(max_length=255) + visible: Optional[bool] = True diff --git a/backend/models/dtos/campaign_dto.py b/backend/models/dtos/campaign_dto.py index b4837b0b2d..78e9621afc 100644 --- a/backend/models/dtos/campaign_dto.py +++ b/backend/models/dtos/campaign_dto.py @@ -1,51 +1,65 @@ -from schematics import Model -from schematics.types import StringType, IntType, ListType, ModelType from backend.models.dtos.organisation_dto import OrganisationDTO -from schematics.exceptions import ValidationError + +# from schematics.exceptions import ValidationError +from pydantic import BaseModel, Field +from typing import List, Optional +from pydantic.functional_validators import field_validator def is_existent(value): if value.strip() == "": - raise ValidationError("Empty campaign name string") + raise ValueError("Empty campaign name string") return value -class NewCampaignDTO(Model): +class NewCampaignDTO(BaseModel): """Describes JSON model to create a campaign""" - name = StringType(serialize_when_none=False, validators=[is_existent]) - logo = StringType(serialize_when_none=False) - url = StringType(serialize_when_none=False) - description = StringType(serialize_when_none=False) - organisations = ListType(IntType, serialize_when_none=False) + name: str = Field(serialize_when_none=False) + logo: Optional[str] = Field(None, serialize_when_none=False) + url: Optional[str] = Field(None, serialize_when_none=False) + description: Optional[str] = Field(None, serialize_when_none=False) + organisations: Optional[List[int]] = Field(None, serialize_when_none=False) + + @field_validator("name", mode="before") + def validate_type(cls, value): + if value is None: + return value + return is_existent(value) -class CampaignDTO(Model): - """Describes JSON model for an existing campaign""" +class CampaignDTO(BaseModel): + id: Optional[int] = None + name: Optional[str] = None + logo: Optional[str] = None + url: Optional[str] = None + description: Optional[str] = None + organisations: List[OrganisationDTO] = Field(default=None, alias="organisations") - id = IntType(serialize_when_none=False) - name = StringType(serialize_when_none=False) - logo = StringType(serialize_when_none=False) - url = StringType(serialize_when_none=False) - description = StringType(serialize_when_none=False) - organisations = ListType(ModelType(OrganisationDTO), serialize_when_none=False) + class Config: + populate_by_name = True -class CampaignProjectDTO(Model): +class CampaignProjectDTO(BaseModel): """DTO used to define available campaign connected projects""" - project_id = IntType() - campaign_id = IntType() + project_id: int + campaign_id: int -class CampaignOrganisationDTO(Model): +class CampaignOrganisationDTO(BaseModel): """DTO used to define available campaign connected projects""" - organisation_id = IntType() - campaign_id = IntType() + organisation_id: int + campaign_id: int + + +class ListCampaignDTO(BaseModel): + id: Optional[int] = None + name: Optional[str] = None -class CampaignListDTO(Model): +class CampaignListDTO(BaseModel): """DTO used to define available campaigns""" def __init__(self): @@ -53,4 +67,4 @@ def __init__(self): super().__init__() self.campaigns = [] - campaigns = ListType(ModelType(CampaignDTO)) + campaigns: Optional[List[ListCampaignDTO]] = None diff --git a/backend/models/dtos/grid_dto.py b/backend/models/dtos/grid_dto.py index 12dcb73db9..ddc3fac713 100644 --- a/backend/models/dtos/grid_dto.py +++ b/backend/models/dtos/grid_dto.py @@ -1,19 +1,24 @@ -from schematics.types import BaseType, BooleanType, IntType, StringType -from schematics import Model +from pydantic import BaseModel, Field -class GridDTO(Model): +class GridDTO(BaseModel): """Describes JSON model used for creating grids""" - area_of_interest = BaseType(required=True, serialized_name="areaOfInterest") - grid = BaseType(required=True) - clip_to_aoi = BooleanType(required=True, serialized_name="clipToAoi") + area_of_interest: dict = Field(..., alias="areaOfInterest") + grid: dict = Field(..., alias="grid") + clip_to_aoi: bool = Field(..., alias="clipToAoi") + class Config: + populate_by_name = True -class SplitTaskDTO(Model): + +class SplitTaskDTO(BaseModel): """DTO used to split a task""" - user_id = IntType(required=True) - task_id = IntType(required=True) - project_id = IntType(required=True) - preferred_locale = StringType(default="en") + user_id: int = Field(alias="userId") + task_id: int = Field(alias="taskId") + project_id: int = Field(alias="projectId") + preferred_locale: str = "en" + + class Config: + populate_by_name = True diff --git a/backend/models/dtos/interests_dto.py b/backend/models/dtos/interests_dto.py index fd60f73b07..d43c1f32f9 100644 --- a/backend/models/dtos/interests_dto.py +++ b/backend/models/dtos/interests_dto.py @@ -1,42 +1,46 @@ -from schematics import Model -from schematics.types import IntType, StringType, FloatType, BooleanType -from schematics.types.compound import ListType, ModelType +from pydantic import BaseModel, Field +from typing import Optional, List -class InterestDTO(Model): - """DTO for a interest.""" - - id = IntType() - name = StringType(required=True, min_length=1) - user_selected = BooleanType( - serialized_name="userSelected", serialize_when_none=False +class InterestDTO(BaseModel): + id: Optional[int] = None + name: Optional[str] = Field(default=None, min_length=1) + user_selected: Optional[bool] = Field( + alias="userSelected", default=None, none_if_default=True + ) + count_projects: Optional[int] = Field( + serialize=False, alias="countProjects", default=None + ) + count_users: Optional[int] = Field( + serialize=False, alias="countUsers", default=None ) - count_projects = IntType(serialize_when_none=False, serialized_name="countProjects") - count_users = IntType(serialize_when_none=False, serialized_name="countUsers") + class Config: + populate_by_name = True -class InterestsListDTO(Model): + +class InterestsListDTO(BaseModel): """DTO for a list of interests.""" def __init__(self): super().__init__() self.interests = [] - interests = ListType(ModelType(InterestDTO)) + interests: Optional[List[InterestDTO]] = None -class InterestRateDTO(Model): +class InterestRateDTO(BaseModel): """DTO for a interest rate.""" - name = StringType() - rate = FloatType() + name: str + rate: float -class InterestRateListDTO(Model): +class InterestRateListDTO(BaseModel): """DTO for a list of interests rates.""" def __init__(self): super().__init__() - self.interests = [] + self.rates = [] - rates = ListType(ModelType(InterestRateDTO)) + rates: Optional[List[InterestRateDTO]] = None diff --git a/backend/models/dtos/licenses_dto.py b/backend/models/dtos/licenses_dto.py index addd20aecb..a4140a5d0c 100644 --- a/backend/models/dtos/licenses_dto.py +++ b/backend/models/dtos/licenses_dto.py @@ -1,22 +1,21 @@ -from schematics import Model -from schematics.types import StringType, IntType -from schematics.types.compound import ListType, ModelType +from pydantic import BaseModel, Field +from typing import List, Optional -class LicenseDTO(Model): +class LicenseDTO(BaseModel): """DTO used to define a mapping license""" - license_id = IntType(serialized_name="licenseId") - name = StringType(required=True) - description = StringType(required=True) - plain_text = StringType(required=True, serialized_name="plainText") + license_id: Optional[int] = Field(None, alias="licenseId") + name: Optional[str] = None + description: Optional[str] = None + plain_text: Optional[str] = Field(None, alias="plainText") -class LicenseListDTO(Model): +class LicenseListDTO(BaseModel): """DTO for all mapping licenses""" def __init__(self): super().__init__() self.licenses = [] - licenses = ListType(ModelType(LicenseDTO)) + licenses: Optional[List[LicenseDTO]] = None diff --git a/backend/models/dtos/mapping_dto.py b/backend/models/dtos/mapping_dto.py index d394c0117f..ceca066abc 100644 --- a/backend/models/dtos/mapping_dto.py +++ b/backend/models/dtos/mapping_dto.py @@ -1,10 +1,11 @@ -from schematics import Model -from schematics.exceptions import ValidationError -from schematics.types import StringType, IntType, UTCDateTimeType -from schematics.types.compound import ListType, ModelType -from backend.models.postgis.statuses import TaskStatus +from datetime import datetime +from typing import List, Optional + +from pydantic import BaseModel, Field, ValidationError, validator + from backend.models.dtos.mapping_issues_dto import TaskMappingIssueDTO from backend.models.dtos.task_annotation_dto import TaskAnnotationDTO +from backend.models.postgis.statuses import TaskStatus def is_valid_mapped_status(value): @@ -20,98 +21,122 @@ def is_valid_mapped_status(value): raise ValidationError(f"Invalid task Status. Valid values are {valid_values}") -class LockTaskDTO(Model): +class LockTaskDTO(BaseModel): """DTO used to lock a task for mapping""" - user_id = IntType(required=True) - task_id = IntType(required=True) - project_id = IntType(required=True) - preferred_locale = StringType(default="en") + user_id: int + task_id: int + project_id: int + preferred_locale: str = "en" + @validator("preferred_locale", pre=True, always=True) + def set_default_preferred_locale(cls, v): + return v or "en" -class MappedTaskDTO(Model): - """Describes the model used to update the status of one task after mapping""" - user_id = IntType(required=True) - status = StringType(required=True, validators=[is_valid_mapped_status]) - comment = StringType() - task_id = IntType(required=True) - project_id = IntType(required=True) - preferred_locale = StringType(default="en") +class MappedTaskDTO(BaseModel): + """Describes the model used to update the status of one task after mapping""" + user_id: int + status: str = Field(required=True, validators=[is_valid_mapped_status]) + comment: Optional[str] = None + task_id: int + project_id: int + preferred_locale: str = "en" -class StopMappingTaskDTO(Model): - """Describes the model used to stop mapping and reset the status of one task""" - user_id = IntType(required=True) - comment = StringType() - task_id = IntType(required=True) - project_id = IntType(required=True) - preferred_locale = StringType(default="en") +class StopMappingTaskDTO(BaseModel): + user_id: int + comment: Optional[str] = None + task_id: int + project_id: int + preferred_locale: str = Field(default="en") -class TaskHistoryDTO(Model): +class TaskHistoryDTO(BaseModel): """Describes an individual action that was performed on a mapping task""" - history_id = IntType(serialized_name="historyId") - task_id = StringType(serialized_name="taskId") - action = StringType() - action_text = StringType(serialized_name="actionText") - action_date = UTCDateTimeType(serialized_name="actionDate") - action_by = StringType(serialized_name="actionBy") - picture_url = StringType(serialized_name="pictureUrl") - issues = ListType(ModelType(TaskMappingIssueDTO)) + history_id: Optional[int] = Field(alias="historyId", default=None) + task_id: Optional[int] = Field(alias="taskId", default=None) + action: Optional[str] = None + action_text: Optional[str] = Field(alias="actionText", default=None) + action_date: datetime = Field(alias="actionDate", default=None) + action_by: Optional[str] = Field(alias="actionBy", default=None) + picture_url: Optional[str] = Field(alias="pictureUrl", default=None) + issues: Optional[List[TaskMappingIssueDTO]] = None + + class Config: + populate_by_name = True + json_encoders = {datetime: lambda v: v.isoformat() + "Z" if v else None} -class TaskStatusDTO(Model): + +class TaskStatusDTO(BaseModel): """Describes a DTO for the current status of the task""" - task_id = IntType(serialized_name="taskId") - task_status = StringType(serialized_name="taskStatus") - action_date = UTCDateTimeType(serialized_name="actionDate") - action_by = StringType(serialized_name="actionBy") + task_id: Optional[int] = Field(alias="taskId", default=None) + task_status: Optional[str] = Field(alias="taskStatus", default=None) + action_date: Optional[datetime] = Field(alias="actionDate", default=None) + action_by: Optional[str] = Field(alias="actionBy", default=None) + + class Config: + populate_by_name = True + json_encoders = {datetime: lambda v: v.isoformat() + "Z" if v else None} -class TaskDTO(Model): + +class TaskDTO(BaseModel): """Describes a Task DTO""" - task_id = IntType(serialized_name="taskId") - project_id = IntType(serialized_name="projectId") - task_status = StringType(serialized_name="taskStatus") - lock_holder = StringType(serialized_name="lockHolder", serialize_when_none=False) - task_history = ListType(ModelType(TaskHistoryDTO), serialized_name="taskHistory") - task_annotations = ListType( - ModelType(TaskAnnotationDTO), serialized_name="taskAnnotation" + task_id: Optional[int] = Field(None, alias="taskId") + project_id: Optional[int] = Field(None, alias="projectId") + task_status: Optional[str] = Field(None, alias="taskStatus") + lock_holder: Optional[str] = Field( + None, alias="lockHolder", serialize_when_none=False + ) + task_history: Optional[List[TaskHistoryDTO]] = Field(None, alias="taskHistory") + task_annotations: Optional[List[TaskAnnotationDTO]] = Field( + None, alias="taskAnnotation" ) - per_task_instructions = StringType( - serialized_name="perTaskInstructions", serialize_when_none=False + per_task_instructions: Optional[str] = Field( + None, alias="perTaskInstructions", serialize_when_none=False ) - auto_unlock_seconds = IntType(serialized_name="autoUnlockSeconds") - last_updated = UTCDateTimeType( - serialized_name="lastUpdated", serialize_when_none=False + auto_unlock_seconds: Optional[int] = Field(None, alias="autoUnlockSeconds") + last_updated: Optional[datetime] = Field( + None, alias="lastUpdated", serialize_when_none=False ) - comments_number = IntType(serialized_name="numberOfComments") + comments_number: Optional[int] = Field(None, alias="numberOfComments") + class Config: + populate_by_name = True + json_encoders = {datetime: lambda v: v.isoformat() + "Z" if v else None} -class TaskDTOs(Model): + +class TaskDTOs(BaseModel): """Describes an array of Task DTOs""" - tasks = ListType(ModelType(TaskDTO)) + tasks: Optional[List[TaskDTO]] = None -class TaskCommentDTO(Model): +class TaskCommentDTO(BaseModel): """Describes the model used to add a standalone comment to a task outside of mapping/validation""" - user_id = IntType(required=True) - comment = StringType(required=True) - task_id = IntType(required=True) - project_id = IntType(required=True) - preferred_locale = StringType(default="en") + user_id: int = Field(..., alias="userId") + comment: str + task_id: int = Field(..., alias="taskId") + project_id: int = Field(..., alias="projectId") + preferred_locale: str = Field("en") + + class Config: + populate_by_name = True -class ExtendLockTimeDTO(Model): +class ExtendLockTimeDTO(BaseModel): """DTO used to extend expiry time of tasks""" - project_id = IntType(required=True) - task_ids = ListType(IntType, required=True, serialized_name="taskIds") - user_id = IntType(required=True) + project_id: int + task_ids: List[int] = Field(alias="taskIds") + user_id: int + + class Config: + populate_by_name = True diff --git a/backend/models/dtos/mapping_issues_dto.py b/backend/models/dtos/mapping_issues_dto.py index e718017c9d..a8d695b59f 100644 --- a/backend/models/dtos/mapping_issues_dto.py +++ b/backend/models/dtos/mapping_issues_dto.py @@ -1,30 +1,28 @@ -from schematics import Model -from schematics.types import IntType, StringType, BooleanType, ModelType -from schematics.types.compound import ListType +from pydantic import BaseModel, Field +from typing import List, Optional -class MappingIssueCategoryDTO(Model): +class MappingIssueCategoryDTO(BaseModel): """DTO used to define a mapping-issue category""" - category_id = IntType(serialized_name="categoryId") - name = StringType(required=True) - description = StringType(required=False) - archived = BooleanType(required=False) + category_id: int = Field(None, alias="categoryId") + name: str = Field(None, alias="name") + description: str = Field(None, alias="description") + archived: bool = Field(False, alias="archived") -class MappingIssueCategoriesDTO(Model): +class MappingIssueCategoriesDTO(BaseModel): """DTO for all mapping-issue categories""" - def __init__(self): - super().__init__() - self.categories = [] + categories: List[MappingIssueCategoryDTO] = Field([], alias="categories") - categories = ListType(ModelType(MappingIssueCategoryDTO)) - -class TaskMappingIssueDTO(Model): +class TaskMappingIssueDTO(BaseModel): """DTO used to define a single mapping issue recorded with a task invalidation""" - category_id = IntType(serialized_name="categoryId") - name = StringType(required=True) - count = IntType(required=True) + category_id: Optional[int] = Field(alias="categoryId", default=None) + name: Optional[str] = None + count: Optional[int] = None + + class Config: + populate_by_name = True diff --git a/backend/models/dtos/message_dto.py b/backend/models/dtos/message_dto.py index a9dbee231c..4a57557202 100644 --- a/backend/models/dtos/message_dto.py +++ b/backend/models/dtos/message_dto.py @@ -1,37 +1,34 @@ -from schematics import Model -from schematics.types import StringType, IntType, BooleanType, UTCDateTimeType -from schematics.types.compound import ListType, ModelType +from datetime import datetime +from typing import List, Optional + +from pydantic import BaseModel, Field + from backend.models.dtos.stats_dto import Pagination -class MessageDTO(Model): +class MessageDTO(BaseModel): """DTO used to define a message that will be sent to a user""" - message_id = IntType(serialized_name="messageId") - subject = StringType( - serialized_name="subject", - required=True, - serialize_when_none=False, - min_length=1, - ) - message = StringType( - serialized_name="message", - required=True, - serialize_when_none=False, - min_length=1, - ) - from_user_id = IntType(required=True, serialize_when_none=False) - from_username = StringType(serialized_name="fromUsername", default="") - display_picture_url = StringType(serialized_name="displayPictureUrl", default="") - project_id = IntType(serialized_name="projectId") - project_title = StringType(serialized_name="projectTitle") - task_id = IntType(serialized_name="taskId") - message_type = StringType(serialized_name="messageType") - sent_date = UTCDateTimeType(serialized_name="sentDate") - read = BooleanType() - - -class MessagesDTO(Model): + message_id: Optional[int] = Field(None, alias="messageId") + subject: Optional[str] = Field(min_length=1, alias="subject") + message: Optional[str] = Field(min_length=1, alias="message") + from_user_id: Optional[int] = Field(alias="fromUserId") + from_username: Optional[str] = Field("", alias="fromUsername") + display_picture_url: Optional[str] = Field("", alias="displayPictureUrl") + project_id: Optional[int] = Field(None, alias="projectId") + project_title: Optional[str] = Field(None, alias="projectTitle") + task_id: Optional[int] = Field(None, alias="taskId") + message_type: Optional[str] = Field(None, alias="messageType") + sent_date: Optional[datetime] = Field(None, alias="sentDate") + read: Optional[bool] = None + + class Config: + populate_by_name = True + + json_encoders = {datetime: lambda v: v.isoformat() + "Z" if v else None} + + +class MessagesDTO(BaseModel): """DTO used to return all user messages""" def __init__(self): @@ -39,23 +36,48 @@ def __init__(self): super().__init__() self.user_messages = [] - pagination = ModelType(Pagination) - user_messages = ListType(ModelType(MessageDTO), serialized_name="userMessages") + pagination: Optional[Pagination] = None + user_messages: Optional[List[MessageDTO]] = Field([], alias="userMessages") + class Config: + populate_by_name = True -class ChatMessageDTO(Model): + +class ChatMessageDTO(BaseModel): """DTO describing an individual project chat message""" - id = IntType(required=False, serialize_when_none=False) - message = StringType(required=True) - user_id = IntType(required=True, serialize_when_none=False) - project_id = IntType(required=True, serialize_when_none=False) - picture_url = StringType(default=None, serialized_name="pictureUrl") - timestamp = UTCDateTimeType() - username = StringType() + id: Optional[int] = Field(None, alias="id") + message: str = Field(required=True) + user_id: int = Field(required=True) + project_id: int = Field(required=True) + picture_url: str = Field(default=None, alias="pictureUrl") + timestamp: datetime + username: str + + class Config: + populate_by_name = True + + # json_encoders = { + # datetime: lambda v: v.isoformat() + "Z" if v else None + # } + + +class ListChatMessageDTO(BaseModel): + """DTO describing an individual project chat message""" + + id: Optional[int] = Field(None, alias="id") + message: str = Field(required=True) + picture_url: Optional[str] = Field(None, alias="pictureUrl") + timestamp: datetime + username: str + + class Config: + populate_by_name = True + + json_encoders = {datetime: lambda v: v.isoformat() + "Z" if v else None} -class ProjectChatDTO(Model): +class ProjectChatDTO(BaseModel): """DTO describing all chat messages on one project""" def __init__(self): @@ -63,5 +85,5 @@ def __init__(self): super().__init__() self.chat = [] - chat = ListType(ModelType(ChatMessageDTO)) - pagination = ModelType(Pagination) + chat: Optional[List[ListChatMessageDTO]] = None + pagination: Optional[Pagination] = None diff --git a/backend/models/dtos/notification_dto.py b/backend/models/dtos/notification_dto.py index c2c35e4b16..f7a3ec9e17 100644 --- a/backend/models/dtos/notification_dto.py +++ b/backend/models/dtos/notification_dto.py @@ -1,10 +1,10 @@ -from schematics import Model -from schematics.types import IntType, UTCDateTimeType +from pydantic import BaseModel, Field +from datetime import datetime -class NotificationDTO(Model): +class NotificationDTO(BaseModel): """DTO used to define a notification count that will be sent to a user""" - user_id = IntType(serialized_name="userId") - date = UTCDateTimeType(serialized_name="date") - unread_count = IntType(serialized_name="unreadCount") + user_id: int = Field(alias="userId") + date: datetime = Field(alias="date") + unread_count: int = Field(alias="unreadCount") diff --git a/backend/models/dtos/organisation_dto.py b/backend/models/dtos/organisation_dto.py index fef05b01d0..e4ea920d0c 100644 --- a/backend/models/dtos/organisation_dto.py +++ b/backend/models/dtos/organisation_dto.py @@ -1,16 +1,9 @@ -from schematics import Model -from schematics.exceptions import ValidationError -from schematics.types import ( - StringType, - IntType, - ListType, - ModelType, - BooleanType, - DictType, -) - from backend.models.dtos.stats_dto import OrganizationStatsDTO from backend.models.postgis.statuses import OrganisationType +from pydantic import BaseModel, Field +from typing import List, Dict, Optional +from fastapi import HTTPException +from pydantic.functional_validators import field_validator def is_known_organisation_type(value): @@ -18,77 +11,120 @@ def is_known_organisation_type(value): try: OrganisationType[value.upper()] except (AttributeError, KeyError): - raise ValidationError( + raise HTTPException( f"Unknown organisationType: {value}. Valid values are {OrganisationType.FREE.name}, " f"{OrganisationType.DISCOUNTED.name}, {OrganisationType.FULL_FEE.name}" ) -class OrganisationManagerDTO(Model): - """Describes JSON model for a organisation manager""" +class OrganisationManagerDTO(BaseModel): + username: Optional[str] = None + picture_url: Optional[str] = Field(None, alias="pictureUrl") + + class Config: + populate_by_name = True - username = StringType(required=True) - picture_url = StringType(serialized_name="pictureUrl") +class OrganisationTeamsDTO(BaseModel): + team_id: Optional[int] = Field(None, alias="teamId") + name: Optional[str] = None + description: Optional[str] = None + join_method: Optional[str] = Field(None, alias="joinMethod") + visibility: Optional[str] = None + members: List[Dict[str, Optional[str]]] = Field(default=[]) -class OrganisationTeamsDTO(Model): - """Describes JSON model for a team. To be used in the Organisations endpoints.""" + class Config: + populate_by_name = True - team_id = IntType(serialized_name="teamId") - name = StringType(required=True) - description = StringType() - join_method = StringType(required=True, serialized_name="joinMethod") - visibility = StringType() - members = ListType(DictType(StringType, serialize_when_none=False)) +class OrganisationDTO(BaseModel): + organisation_id: Optional[int] = Field(None, alias="organisationId") + managers: Optional[List[OrganisationManagerDTO]] = None + name: Optional[str] = None + slug: Optional[str] = None + logo: Optional[str] = None + description: Optional[str] = None + url: Optional[str] = None + is_manager: Optional[bool] = Field(None, alias="isManager") + projects: Optional[List[str]] = Field(default=[], alias="projects") + teams: List[OrganisationTeamsDTO] = None + campaigns: Optional[List[List[str]]] = None + stats: Optional[OrganizationStatsDTO] = None + type: Optional[str] = Field(None) + subscription_tier: Optional[int] = Field(None, alias="subscriptionTier") -class OrganisationDTO(Model): - """Describes JSON model for an organisation""" + class Config: + populate_by_name = True - organisation_id = IntType(serialized_name="organisationId") - managers = ListType(ModelType(OrganisationManagerDTO), min_size=1, required=True) - name = StringType(required=True) - slug = StringType() - logo = StringType() - description = StringType() - url = StringType() - is_manager = BooleanType(serialized_name="isManager") - projects = ListType(StringType, serialize_when_none=False) - teams = ListType(ModelType(OrganisationTeamsDTO)) - campaigns = ListType(ListType(StringType)) - stats = ModelType(OrganizationStatsDTO, serialize_when_none=False) - type = StringType(validators=[is_known_organisation_type]) - subscription_tier = IntType(serialized_name="subscriptionTier") + @field_validator("type", mode="before") + def validate_type(cls, value): + if value is None: + return value + return is_known_organisation_type(value) -class ListOrganisationsDTO(Model): +class ListOrganisationsDTO(BaseModel): def __init__(self): super().__init__() self.organisations = [] - organisations = ListType(ModelType(OrganisationDTO)) + organisations: Optional[List[OrganisationDTO]] = None -class NewOrganisationDTO(Model): +class NewOrganisationDTO(BaseModel): """Describes a JSON model to create a new organisation""" - organisation_id = IntType(serialized_name="organisationId", required=False) - managers = ListType(StringType(), required=True) - name = StringType(required=True) - slug = StringType() - logo = StringType() - description = StringType() - url = StringType() - type = StringType(validators=[is_known_organisation_type]) - subscription_tier = IntType(serialized_name="subscriptionTier") + organisation_id: Optional[int] = Field(None, alias="organisationId") + managers: List[str] + name: str + slug: Optional[str] = None + logo: Optional[str] = None + description: Optional[str] = None + url: Optional[str] = None + type: str + subscription_tier: Optional[int] = Field(None, alias="subscriptionTier") + + class Config: + populate_by_name = True + + @field_validator("type", mode="before") + @classmethod + def validate_type(cls, value: Optional[str]) -> Optional[str]: + """Validates organisation subscription type string""" + try: + OrganisationType[value.upper()] + except (AttributeError, KeyError): + raise ValueError( + f"Unknown organisationType: {value}. Valid values are {OrganisationType.FREE.name}, " + f"{OrganisationType.DISCOUNTED.name}, {OrganisationType.FULL_FEE.name}" + ) + return value class UpdateOrganisationDTO(OrganisationDTO): - organisation_id = IntType(serialized_name="organisationId", required=False) - managers = ListType(StringType()) - name = StringType() - slug = StringType() - logo = StringType() - description = StringType() - url = StringType() - type = StringType(validators=[is_known_organisation_type]) + organisation_id: Optional[int] = Field(None, alias="organisationId") + managers: List[str] = Field(default=[]) + name: Optional[str] = None + slug: Optional[str] = None + logo: Optional[str] = None + description: Optional[str] = None + url: Optional[str] = None + type: Optional[str] = None + + class Config: + populate_by_name = True + + @field_validator("type", mode="before") + @classmethod + def validate_type(cls, value: Optional[str]) -> Optional[str]: + """Validates organisation subscription type string""" + if value is None: + return value + try: + OrganisationType[value.upper()] + except (AttributeError, KeyError): + raise ValueError( + f"Unknown organisationType: {value}. Valid values are {OrganisationType.FREE.name}, " + f"{OrganisationType.DISCOUNTED.name}, {OrganisationType.FULL_FEE.name}" + ) + return value diff --git a/backend/models/dtos/partner_dto.py b/backend/models/dtos/partner_dto.py index bf9b2087df..b296b38690 100644 --- a/backend/models/dtos/partner_dto.py +++ b/backend/models/dtos/partner_dto.py @@ -1,19 +1,30 @@ -from schematics import Model -from schematics.types import StringType, ListType, LongType +# from schematics import Model +# from schematics.types import StringType, ListType, LongType +import json +from typing import Dict, List, Optional +from pydantic import BaseModel, HttpUrl -class PartnerDTO(Model): + +class PartnerDTO(BaseModel): """DTO for Partner""" - id = LongType() - name = StringType(serialized_name="name") - primary_hashtag = StringType(serialized_name="primary_hashtag") - secondary_hashtag = StringType(serialized_name="secondary_hashtag") - link_x = StringType(serialized_name="link_x") - link_meta = StringType(serialized_name="link_meta") - link_instagram = StringType(serialized_name="link_instagram") - logo_url = StringType(serialized_name="logo_url") - current_projects = StringType(serialized_name="current_projects") - permalink = StringType(serialized_name="permalink") - website_links = ListType(StringType, serialized_name="website_links") - mapswipe_group_id = StringType() + id: Optional[int] = None + name: str + primary_hashtag: str + secondary_hashtag: Optional[str] = None + link_x: Optional[str] = None + link_meta: Optional[str] = None + link_instagram: Optional[str] = None + logo_url: Optional[HttpUrl] = None # Ensures it's a valid URL + current_projects: Optional[str] = None + permalink: Optional[str] = None + website_links: Optional[List[Dict]] = None + mapswipe_group_id: Optional[str] = None + + @classmethod + def from_record(cls, record): + record_dict = dict(record) + if record_dict.get("website_links"): + record_dict["website_links"] = json.loads(record_dict["website_links"]) + return cls(**record_dict) diff --git a/backend/models/dtos/partner_stats_dto.py b/backend/models/dtos/partner_stats_dto.py index 59b75430c2..20bba2179e 100644 --- a/backend/models/dtos/partner_stats_dto.py +++ b/backend/models/dtos/partner_stats_dto.py @@ -1,98 +1,122 @@ +from typing import List, Optional + import pandas as pd -from schematics import Model -from schematics.types import ( - StringType, - LongType, - IntType, - ListType, - ModelType, - FloatType, - BooleanType, -) +from pydantic import BaseModel, Field + + +class UserGroupMemberDTO(BaseModel): + """Describes a JSON model for a user group member.""" + + id: Optional[str] = None + user_id: Optional[str] = Field(None, alias="userId") + username: Optional[str] = None + is_active: Optional[bool] = Field(None, alias="isActive") + total_mapping_projects: Optional[int] = Field(None, alias="totalMappingProjects") + total_contribution_time: Optional[int] = Field(None, alias="totalcontributionTime") + total_contributions: Optional[int] = Field(None, alias="totalcontributions") + + class Config: + populate_by_name = True + + +class OrganizationContributionsDTO(BaseModel): + """Describes a JSON model for organization contributions.""" + + organization_name: Optional[str] = Field(None, alias="organizationName") + total_contributions: Optional[int] = Field(None, alias="totalcontributions") + class Config: + populate_by_name = True -class UserGroupMemberDTO(Model): - id = StringType() - user_id = StringType(serialized_name="userId") - username = StringType() - is_active = BooleanType(serialized_name="isActive") - total_mapping_projects = IntType(serialized_name="totalMappingProjects") - total_contribution_time = IntType(serialized_name="totalcontributionTime") - total_contributions = IntType(serialized_name="totalcontributions") +class UserContributionsDTO(BaseModel): + """Describes a JSON model for user contributions.""" -class OrganizationContributionsDTO(Model): - organization_name = StringType(serialized_name="organizationName") - total_contributions = IntType(serialized_name="totalcontributions") + total_mapping_projects: Optional[int] = Field(None, alias="totalMappingProjects") + total_contribution_time: Optional[int] = Field(None, alias="totalcontributionTime") + total_contributions: Optional[int] = Field(None, alias="totalcontributions") + username: Optional[str] = None + user_id: Optional[str] = Field(None, alias="userId") + class Config: + populate_by_name = True -class UserContributionsDTO(Model): - total_mapping_projects = IntType(serialized_name="totalMappingProjects") - total_contribution_time = IntType(serialized_name="totalcontributionTime") - total_contributions = IntType(serialized_name="totalcontributions") - username = StringType() - user_id = StringType(serialized_name="userId") +class GeojsonDTO(BaseModel): + type: Optional[str] = None + coordinates: Optional[List[float]] = None -class GeojsonDTO(Model): - type = StringType() - coordinates = ListType(FloatType) +class GeoContributionsDTO(BaseModel): + geojson: Optional[GeojsonDTO] = None + total_contributions: Optional[int] = Field(None, alias="totalcontributions") -class GeoContributionsDTO(Model): - geojson = ModelType(GeojsonDTO) - total_contributions = IntType(serialized_name="totalcontributions") + class Config: + populate_by_name = True -class ContributionsByDateDTO(Model): - task_date = StringType(serialized_name="taskDate") - total_contributions = IntType(serialized_name="totalcontributions") +class ContributionsByDateDTO(BaseModel): + task_date: str = Field(None, alias="taskDate") + total_contributions: int = Field(None, alias="totalcontributions") -class ContributionTimeByDateDTO(Model): - date = StringType(serialized_name="date") - total_contribution_time = IntType(serialized_name="totalcontributionTime") +class ContributionTimeByDateDTO(BaseModel): + date: str = Field(None, alias="date") + total_contribution_time: int = Field(None, alias="totalcontributionTime") + class Config: + populate_by_name = True -class ContributionsByProjectTypeDTO(Model): - project_type = StringType(serialized_name="projectType") - project_type_display = StringType(serialized_name="projectTypeDisplay") - total_contributions = IntType(serialized_name="totalcontributions") +class ContributionsByProjectTypeDTO(BaseModel): + project_type: str = Field(None, alias="projectType") + project_type_display: str = Field(None, alias="projectTypeDisplay") + total_contributions: int = Field(None, alias="totalcontributions") -class AreaSwipedByProjectTypeDTO(Model): - total_area = FloatType(serialized_name="totalArea") - project_type = StringType(serialized_name="projectType") - project_type_display = StringType(serialized_name="projectTypeDisplay") + class Config: + populate_by_name = True -class GroupedPartnerStatsDTO(Model): +class AreaSwipedByProjectTypeDTO(BaseModel): + total_area: Optional[float] = Field(None, alias="totalArea") + project_type: str = Field(None, alias="projectType") + project_type_display: str = Field(None, alias="projectTypeDisplay") + + class Config: + populate_by_name = True + + +class GroupedPartnerStatsDTO(BaseModel): """General statistics of a partner and its members.""" - id = LongType() - provider = StringType() - id_inside_provider = StringType(serialized_name="idInsideProvider") - name_inside_provider = StringType(serialized_name="nameInsideProvider") - description_inside_provider = StringType( - serialized_name="descriptionInsideProvider" + id: Optional[int] = None + provider: str + id_inside_provider: Optional[str] = Field(None, alias="idInsideProvider") + name_inside_provider: Optional[str] = Field(None, alias="nameInsideProvider") + description_inside_provider: Optional[str] = Field( + None, alias="descriptionInsideProvider" ) - members_count = IntType(serialized_name="membersCount") - members = ListType(ModelType(UserGroupMemberDTO)) + members_count: Optional[int] = Field(None, alias="membersCount") + members: List[UserGroupMemberDTO] = None # General stats of partner - total_contributors = IntType(serialized_name="totalContributors") - total_contributions = IntType(serialized_name="totalcontributions") - total_contribution_time = IntType(serialized_name="totalcontributionTime") + total_contributors: Optional[int] = Field(None, alias="totalContributors") + total_contributions: Optional[int] = Field(None, alias="totalcontributions") + total_contribution_time: Optional[int] = Field(None, alias="totalcontributionTime") # Recent contributions during the last 1 month - total_recent_contributors = IntType(serialized_name="totalRecentContributors") - total_recent_contributions = IntType(serialized_name="totalRecentcontributions") - total_recent_contribution_time = IntType( - serialized_name="totalRecentcontributionTime" + total_recent_contributors: Optional[int] = Field( + None, alias="totalRecentContributors" + ) + total_recent_contributions: Optional[int] = Field( + None, alias="totalRecentcontributions" + ) + total_recent_contribution_time: Optional[int] = Field( + None, alias="totalRecentcontributionTime" ) def to_csv(self): - df = pd.json_normalize(self.to_primitive()["members"]) + df = pd.json_normalize(self.dict(by_alias=True)["members"]) df.drop( columns=["id"], @@ -109,37 +133,40 @@ def to_csv(self): return df.to_csv(index=False) + class Config: + populate_by_name = True + -class FilteredPartnerStatsDTO(Model): +class FilteredPartnerStatsDTO(BaseModel): """Statistics of a partner contributions filtered by time range.""" - id = LongType() - provider = StringType() - id_inside_provider = StringType(serialized_name="idInsideProvider") + id: Optional[int] = None + provider: str + id_inside_provider: Optional[str] = Field(None, alias="idInsideProvider") - from_date = StringType(serialized_name="fromDate") - to_date = StringType(serialized_name="toDate") - contributions_by_user = ListType( - ModelType(UserContributionsDTO), serialized_name="contributionsByUser" + from_date: Optional[str] = Field(None, alias="fromDate") + to_date: Optional[str] = Field(None, alias="toDate") + contributions_by_user: List[UserContributionsDTO] = Field( + [], alias="contributionsByUser" ) - contributions_by_geo = ListType( - ModelType(GeoContributionsDTO), serialized_name="contributionsByGeo" + contributions_by_geo: List[GeoContributionsDTO] = Field( + [], alias="contributionsByGeo" ) - area_swiped_by_project_type = ListType( - ModelType(AreaSwipedByProjectTypeDTO), serialized_name="areaSwipedByProjectType" + area_swiped_by_project_type: List[AreaSwipedByProjectTypeDTO] = Field( + [], alias="areaSwipedByProjectType" ) - - contributions_by_project_type = ListType( - ModelType(ContributionsByProjectTypeDTO), - serialized_name="contributionsByProjectType", + contributions_by_project_type: List[ContributionsByProjectTypeDTO] = Field( + [], alias="contributionsByProjectType" ) - contributions_by_date = ListType( - ModelType(ContributionsByDateDTO), serialized_name="contributionsByDate" + contributions_by_date: List[ContributionsByDateDTO] = Field( + [], alias="contributionsByDate" ) - contributions_by_organization_name = ListType( - ModelType(OrganizationContributionsDTO), - serialized_name="contributionsByorganizationName", + contributions_by_organization_name: List[OrganizationContributionsDTO] = Field( + [], alias="contributionsByorganizationName" ) - contribution_time_by_date = ListType( - ModelType(ContributionTimeByDateDTO), serialized_name="contributionTimeByDate" + contribution_time_by_date: List[ContributionTimeByDateDTO] = Field( + [], alias="contributionTimeByDate" ) + + class Config: + populate_by_name = True diff --git a/backend/models/dtos/project_dto.py b/backend/models/dtos/project_dto.py index 419d397e7f..8fe71c0f1f 100644 --- a/backend/models/dtos/project_dto.py +++ b/backend/models/dtos/project_dto.py @@ -1,339 +1,377 @@ -from schematics import Model -from schematics.exceptions import ValidationError -from schematics.types import ( - StringType, - BaseType, - IntType, - BooleanType, - FloatType, - UTCDateTimeType, - DateType, -) -from schematics.types.compound import ListType, ModelType -from backend.models.dtos.task_annotation_dto import TaskAnnotationDTO +from datetime import date, datetime +from typing import Any, Dict, List, Optional, Union + +from fastapi import HTTPException +from pydantic import BaseModel, Field, root_validator + +from backend.models.dtos.campaign_dto import CampaignDTO +from backend.models.dtos.interests_dto import InterestDTO from backend.models.dtos.stats_dto import Pagination +from backend.models.dtos.task_annotation_dto import TaskAnnotationDTO from backend.models.dtos.team_dto import ProjectTeamDTO -from backend.models.dtos.interests_dto import InterestDTO from backend.models.postgis.statuses import ( - ProjectStatus, - ProjectPriority, - MappingTypes, - TaskCreationMode, Editors, MappingPermission, - ValidationPermission, + MappingTypes, ProjectDifficulty, + ProjectPriority, + ProjectStatus, + TaskCreationMode, + ValidationPermission, ) -from backend.models.dtos.campaign_dto import CampaignDTO - -def is_known_project_status(value): - """Validates that Project Status is known value""" - if isinstance(value, list): - return # Don't validate the entire list, just the individual values +def is_known_project_status(value: str) -> str: + """Validates that Project Status is a known value.""" try: ProjectStatus[value.upper()] except KeyError: - raise ValidationError( - f"Unknown projectStatus: {value} Valid values are {ProjectStatus.DRAFT.name}, " - f"{ProjectStatus.PUBLISHED.name}, {ProjectStatus.ARCHIVED.name}" + raise HTTPException( + status_code=400, + detail=( + f"Unknown projectStatus: {value}. Valid values are: " + f"{ProjectStatus.DRAFT.name}, " + f"{ProjectStatus.PUBLISHED.name}, " + f"{ProjectStatus.ARCHIVED.name}." + ), ) + return value -def is_known_project_priority(value): - """Validates Project priority is known value""" +def is_known_project_priority(value: str) -> str: + """Validates that Project Priority is a known value.""" try: ProjectPriority[value.upper()] except KeyError: - raise ValidationError( - f"Unknown projectStatus: {value} Valid values are {ProjectPriority.LOW.name}, " - f"{ProjectPriority.MEDIUM.name}, {ProjectPriority.HIGH.name}, " - f"{ProjectPriority.URGENT.name}" + raise HTTPException( + status_code=400, + detail=( + f"Unknown projectPriority: {value}. Valid values are: " + f"{ProjectPriority.LOW.name}, " + f"{ProjectPriority.MEDIUM.name}, " + f"{ProjectPriority.HIGH.name}, " + f"{ProjectPriority.URGENT.name}." + ), ) + return value -def is_known_mapping_type(value): - """Validates Mapping Type is known value""" - if isinstance(value, list): - return # Don't validate the entire list, just the individual values - +def is_known_mapping_type(value: str) -> str: + """Validates that Mapping Type is a known value.""" try: MappingTypes[value.upper()] except KeyError: - raise ValidationError( - f"Unknown mappingType: {value} Valid values are {MappingTypes.ROADS.name}, " - f"{MappingTypes.BUILDINGS.name}, {MappingTypes.WATERWAYS.name}, " - f"{MappingTypes.LAND_USE.name}, {MappingTypes.OTHER.name}" + raise HTTPException( + status_code=400, + detail=( + f"Unknown mappingType: {value}. Valid values are: " + f"{MappingTypes.ROADS.name}, " + f"{MappingTypes.BUILDINGS.name}, " + f"{MappingTypes.WATERWAYS.name}, " + f"{MappingTypes.LAND_USE.name}, " + f"{MappingTypes.OTHER.name}." + ), ) + return value -def is_known_editor(value): - """Validates Editor is known value""" - if isinstance(value, list): - return # Don't validate the entire list, just the individual values - +def is_known_editor(value: str) -> str: + """Validates that Editor is a known value.""" try: Editors[value.upper()] except KeyError: - raise ValidationError( - f"Unknown editor: {value} Valid values are {Editors.ID.name}, " - f"{Editors.JOSM.name}, {Editors.POTLATCH_2.name}, " - f"{Editors.FIELD_PAPERS.name}, " - f"{Editors.RAPID.name} " + raise HTTPException( + status_code=400, + detail=( + f"Unknown editor: {value}. Valid values are: " + f"{Editors.ID.name}, " + f"{Editors.JOSM.name}, " + f"{Editors.POTLATCH_2.name}, " + f"{Editors.FIELD_PAPERS.name}, " + f"{Editors.RAPID.name}." + ), ) + return value -def is_known_task_creation_mode(value): - """Validates Task Creation Mode is known value""" +def is_known_task_creation_mode(value: str) -> str: + """Validates that Task Creation Mode is a known value.""" try: TaskCreationMode[value.upper()] except KeyError: - raise ValidationError( - f"Unknown taskCreationMode: {value} Valid values are {TaskCreationMode.GRID.name}, " - f"{TaskCreationMode.ARBITRARY.name}" + raise HTTPException( + status_code=400, + detail=( + f"Unknown taskCreationMode: {value}. Valid values are: " + f"{TaskCreationMode.GRID.name}, " + f"{TaskCreationMode.ARBITRARY.name}." + ), ) + return value -def is_known_mapping_permission(value): - """Validates Mapping Permission String""" +def is_known_mapping_permission(value: str) -> str: + """Validates that Mapping Permission is a known value.""" try: MappingPermission[value.upper()] except KeyError: - raise ValidationError( - f"Unknown mappingPermission: {value} Valid values are {MappingPermission.ANY.name}, " - f"{MappingPermission.LEVEL.name}" + raise HTTPException( + status_code=400, + detail=( + f"Unknown mappingPermission: {value}. Valid values are: " + f"{MappingPermission.ANY.name}, " + f"{MappingPermission.LEVEL.name}." + ), ) + return value -def is_known_validation_permission(value): - """Validates Validation Permission String""" +def is_known_validation_permission(value: str) -> str: + """Validates that Validation Permission is a known value.""" try: ValidationPermission[value.upper()] except KeyError: - raise ValidationError( - f"Unknown validationPermission: {value} Valid values are {ValidationPermission.ANY.name}, " - f"{ValidationPermission.LEVEL.name}, {ValidationPermission.TEAMS.name}, " - f"{ValidationPermission.TEAMS_LEVEL.name}" + raise HTTPException( + status_code=400, + detail=( + f"Unknown validationPermission: {value}. Valid values are: " + f"{ValidationPermission.ANY.name}, " + f"{ValidationPermission.LEVEL.name}, " + f"{ValidationPermission.TEAMS.name}, " + f"{ValidationPermission.TEAMS_LEVEL.name}." + ), ) + return value -def is_known_project_difficulty(value): - """Validates that supplied project difficulty is known value""" +def is_known_project_difficulty(value: str) -> str: + """Validates that Project Difficulty is a known value.""" if value.upper() == "ALL": - return True + return value try: - value = value.split(",") - for difficulty in value: + value_list = value.split(",") + for difficulty in value_list: ProjectDifficulty[difficulty.upper()] except KeyError: - raise ValidationError( - f"Unknown projectDifficulty: {value} Valid values are {ProjectDifficulty.EASY.name}, " - f"{ProjectDifficulty.MODERATE.name}, {ProjectDifficulty.CHALLENGING.name} and ALL." + raise HTTPException( + status_code=400, + detail=( + f"Unknown projectDifficulty: {value}. Valid values are: " + f"{ProjectDifficulty.EASY.name}, " + f"{ProjectDifficulty.MODERATE.name}, " + f"{ProjectDifficulty.CHALLENGING.name}, and ALL." + ), ) + return value -class DraftProjectDTO(Model): +class DraftProjectDTO(BaseModel): """Describes JSON model used for creating draft project""" - cloneFromProjectId = IntType(serialized_name="cloneFromProjectId") - project_name = StringType(required=True, serialized_name="projectName") - organisation = IntType(required=True) - area_of_interest = BaseType(required=True, serialized_name="areaOfInterest") - tasks = BaseType(required=False) - has_arbitrary_tasks = BooleanType(required=True, serialized_name="arbitraryTasks") - user_id = IntType(required=True) + cloneFromProjectId: int = Field(None, alias="cloneFromProjectId") + project_name: str = Field(..., alias="projectName") + organisation: int = Field(None) + area_of_interest: dict = Field({}, alias="areaOfInterest") + tasks: Optional[dict] = Field({}) + has_arbitrary_tasks: bool = Field(False, alias="arbitraryTasks") + user_id: int = Field(None) + + class Config: + populate_by_name = True -class ProjectInfoDTO(Model): +class ProjectInfoDTO(BaseModel): """Contains the localized project info""" - locale = StringType(required=True) - name = StringType(default="") - short_description = StringType(serialized_name="shortDescription", default="") - description = StringType(default="") - instructions = StringType(default="") - per_task_instructions = StringType( - default="", serialized_name="perTaskInstructions" + locale: str + name: Optional[str] = "" + short_description: Optional[str] = Field(default="", alias="shortDescription") + description: Optional[str] = "" + instructions: Optional[str] = "" + per_task_instructions: Optional[str] = Field( + default="", alias="perTaskInstructions" ) + class Config: + populate_by_name = True -class CustomEditorDTO(Model): - """DTO to define a custom editor""" + @root_validator(pre=True) + def replace_none_with_empty_string(cls, values): + return { + key: (value if value is not None or key == "locale" else "") + for key, value in values.items() + } - name = StringType(required=True) - description = StringType() - url = StringType(required=True) +class CustomEditorDTO(BaseModel): + """DTO to define a custom editor""" -class ProjectDTO(Model): - """Describes JSON model for a tasking manager project""" + name: str = Field(None) + description: Optional[str] = Field(None) + url: str = Field(None) - project_id = IntType(serialized_name="projectId") - project_status = StringType( - required=True, - serialized_name="status", - validators=[is_known_project_status], - serialize_when_none=False, - ) - project_priority = StringType( - required=True, - serialized_name="projectPriority", - validators=[is_known_project_priority], - serialize_when_none=False, - ) - area_of_interest = BaseType(serialized_name="areaOfInterest") - aoi_bbox = ListType(FloatType, serialized_name="aoiBBOX") - tasks = BaseType(serialize_when_none=False) - default_locale = StringType( - required=True, serialized_name="defaultLocale", serialize_when_none=False - ) - project_info = ModelType( - ProjectInfoDTO, serialized_name="projectInfo", serialize_when_none=False - ) - project_info_locales = ListType( - ModelType(ProjectInfoDTO), - serialized_name="projectInfoLocales", - serialize_when_none=False, - ) - difficulty = StringType( - required=True, - serialized_name="difficulty", - validators=[is_known_project_difficulty], - ) - mapping_permission = StringType( - required=True, - serialized_name="mappingPermission", - validators=[is_known_mapping_permission], - ) - validation_permission = StringType( - required=True, - serialized_name="validationPermission", - validators=[is_known_validation_permission], - ) - enforce_random_task_selection = BooleanType( - required=False, default=False, serialized_name="enforceRandomTaskSelection" - ) - - private = BooleanType(required=True) - changeset_comment = StringType(serialized_name="changesetComment") - osmcha_filter_id = StringType(serialized_name="osmchaFilterId") - due_date = UTCDateTimeType(serialized_name="dueDate") - imagery = StringType() - josm_preset = StringType(serialized_name="josmPreset", serialize_when_none=False) - id_presets = ListType(StringType, serialized_name="idPresets", default=[]) - extra_id_params = StringType(serialized_name="extraIdParams") - rapid_power_user = BooleanType( - serialized_name="rapidPowerUser", default=False, required=False - ) - mapping_types = ListType( - StringType, - serialized_name="mappingTypes", - default=[], - validators=[is_known_mapping_type], - ) - campaigns = ListType(ModelType(CampaignDTO), default=[]) - organisation = IntType(required=True) - organisation_name = StringType(serialized_name="organisationName") - organisation_slug = StringType(serialized_name="organisationSlug") - organisation_logo = StringType(serialized_name="organisationLogo") - country_tag = ListType(StringType, serialized_name="countryTag") - - license_id = IntType(serialized_name="licenseId") - allowed_usernames = ListType( - StringType(), serialized_name="allowedUsernames", default=[] - ) - priority_areas = BaseType(serialized_name="priorityAreas") - created = UTCDateTimeType() - last_updated = UTCDateTimeType(serialized_name="lastUpdated") - author = StringType() - active_mappers = IntType(serialized_name="activeMappers") - percent_mapped = IntType(serialized_name="percentMapped") - percent_validated = IntType(serialized_name="percentValidated") - percent_bad_imagery = IntType(serialized_name="percentBadImagery") - task_creation_mode = StringType( - required=True, - serialized_name="taskCreationMode", - validators=[is_known_task_creation_mode], - serialize_when_none=False, - ) - project_teams = ListType(ModelType(ProjectTeamDTO), serialized_name="teams") - mapping_editors = ListType( - StringType, - min_size=1, - required=True, - serialized_name="mappingEditors", - validators=[is_known_editor], - ) - validation_editors = ListType( - StringType, - min_size=1, - required=True, - serialized_name="validationEditors", - validators=[is_known_editor], - ) - custom_editor = ModelType( - CustomEditorDTO, serialized_name="customEditor", serialize_when_none=False - ) - interests = ListType(ModelType(InterestDTO)) +class ProjectDTO(BaseModel): + """Describes JSON model for a tasking manager project""" -class ProjectFavoriteDTO(Model): + project_id: Optional[int] = Field(None, alias="projectId") + project_status: str = Field(alias="status") + project_priority: str = Field(alias="projectPriority") + area_of_interest: Optional[dict] = Field(None, alias="areaOfInterest") + aoi_bbox: Optional[List[float]] = Field(None, alias="aoiBBOX") + tasks: Optional[dict] = None + default_locale: str = Field(alias="defaultLocale") + project_info: Optional[ProjectInfoDTO] = Field(None, alias="projectInfo") + project_info_locales: Optional[List[ProjectInfoDTO]] = Field( + None, alias="projectInfoLocales" + ) + difficulty: str = Field(alias="difficulty") + mapping_permission: str = Field(alias="mappingPermission") + validation_permission: str = Field(alias="validationPermission") + enforce_random_task_selection: Optional[bool] = Field( + False, alias="enforceRandomTaskSelection" + ) + private: bool + changeset_comment: Optional[str] = Field(None, alias="changesetComment") + osmcha_filter_id: Optional[str] = Field(None, alias="osmchaFilterId") + due_date: Optional[datetime] = Field(None, alias="dueDate") + imagery: Optional[str] = None + josm_preset: Optional[str] = Field(None, alias="josmPreset") + id_presets: Optional[List[str]] = Field(default=[], alias="idPresets") + extra_id_params: Optional[str] = Field(None, alias="extraIdParams") + rapid_power_user: Optional[bool] = Field(False, alias="rapidPowerUser") + mapping_types: List[str] = Field(default=[], alias="mappingTypes") + campaigns: List[CampaignDTO] = Field(default=[]) + organisation: int + organisation_name: Optional[str] = Field(None, alias="organisationName") + organisation_slug: Optional[str] = Field(None, alias="organisationSlug") + organisation_logo: Optional[str] = Field(None, alias="organisationLogo") + country_tag: Optional[List[str]] = Field(None, alias="countryTag") + license_id: Optional[int] = Field(None, alias="licenseId") + allowed_usernames: Optional[List[str]] = Field(default=[], alias="allowedUsernames") + priority_areas: Optional[List[Dict]] = Field(None, alias="priorityAreas") + created: Optional[datetime] = None + last_updated: Optional[datetime] = Field(None, alias="lastUpdated") + author: Optional[str] = None + active_mappers: Optional[int] = Field(None, alias="activeMappers") + percent_mapped: Optional[int] = Field(None, alias="percentMapped") + percent_validated: Optional[int] = Field(None, alias="percentValidated") + percent_bad_imagery: Optional[int] = Field(None, alias="percentBadImagery") + task_creation_mode: str = Field(alias="taskCreationMode") + project_teams: Optional[List[ProjectTeamDTO]] = Field(None, alias="teams") + mapping_editors: List[str] = Field(alias="mappingEditors") + validation_editors: List[str] = Field(alias="validationEditors") + custom_editor: Optional[CustomEditorDTO] = Field(None, alias="customEditor") + interests: Optional[List[InterestDTO]] = None + + class Config: + populate_by_name = True + + # TODO CHeck validators. + # @validator('project_status') + # def validate_project_status(cls, value): + # if not is_known_project_status(value): + # raise ValueError('Invalid project status') + # return value + + # @validator('project_priority') + # def validate_project_priority(cls, value): + # if not is_known_project_priority(value): + # raise ValueError('Invalid project priority') + # return value + + # @validator('difficulty') + # def validate_difficulty(cls, value): + # if not is_known_project_difficulty(value): + # raise ValueError('Invalid project difficulty') + # return value + + # @validator('mapping_permission') + # def validate_mapping_permission(cls, value): + # if not is_known_mapping_permission(value): + # raise ValueError('Invalid mapping permission') + # return value + + # @validator('validation_permission') + # def validate_validation_permission(cls, value): + # if not is_known_validation_permission(value): + # raise ValueError('Invalid validation permission') + # return value + + # @validator('mapping_types', each_item=True) + # def validate_mapping_types(cls, value): + # if not is_known_mapping_type(value): + # raise ValueError('Invalid mapping type') + # return value + + # @validator('task_creation_mode') + # def validate_task_creation_mode(cls, value): + # if not is_known_task_creation_mode(value): + # raise ValueError('Invalid task creation mode') + # return value + + # @validator('mapping_editors', 'validation_editors', each_item=True) + # def validate_editors(cls, value): + # if not is_known_editor(value): + # raise ValueError('Invalid editor') + # return value + + +class ProjectFavoriteDTO(BaseModel): """DTO used to favorite a project""" - project_id = IntType(required=True) - user_id = IntType(required=True) + project_id: int + user_id: int -class ProjectFavoritesDTO(Model): - """DTO to retrieve favorited projects""" +class ProjectFavoritesDTO(BaseModel): + def __init__(self, favorited_projects: List[ProjectDTO] = None, **kwargs): + super().__init__(**kwargs) + self.favorited_projects = favorited_projects or [] - def __init__(self): - super().__init__() - self.favorited_projects = [] + favorited_projects: List[ProjectDTO] = Field(default=[], alias="favoritedProjects") - favorited_projects = ListType( - ModelType(ProjectDTO), serialized_name="favoritedProjects" - ) + class Config: + populate_by_name = True -class ProjectSearchDTO(Model): +class ProjectSearchDTO(BaseModel): """Describes the criteria users use to filter active projects""" - preferred_locale = StringType(default="en") - difficulty = StringType(validators=[is_known_project_difficulty]) - action = StringType() - mapping_types = ListType(StringType, validators=[is_known_mapping_type]) - mapping_types_exact = BooleanType(required=False) - project_statuses = ListType(StringType, validators=[is_known_project_status]) - organisation_name = StringType() - organisation_id = IntType() - team_id = IntType() - campaign = StringType() - order_by = StringType() - order_by_type = StringType() - country = StringType() - page = IntType(required=True) - text_search = StringType() - mapping_editors = ListType(StringType, validators=[is_known_editor]) - validation_editors = ListType(StringType, validators=[is_known_editor]) - teams = ListType(StringType()) - interests = ListType(IntType()) - created_by = IntType(required=False) - mapped_by = IntType(required=False) - favorited_by = IntType(required=False) - managed_by = IntType(required=False) - based_on_user_interests = IntType(required=False) - omit_map_results = BooleanType(required=False) - last_updated_lte = StringType(required=False) - last_updated_gte = StringType(required=False) - created_lte = StringType(required=False) - created_gte = StringType(required=False) - partner_id = IntType(required=False) - partnership_from = StringType(required=False) - partnership_to = StringType(required=False) - download_as_csv = BooleanType(required=False) + preferred_locale: Optional[str] = "en" + difficulty: Optional[str] = Field(None, validators=[is_known_project_difficulty]) + action: Optional[str] = None + mapping_types: List[str] = Field(None, validators=[is_known_mapping_type]) + mapping_types_exact: Optional[bool] = None + project_statuses: List[str] = Field(None, validators=[is_known_project_status]) + organisation_name: Optional[str] = None + organisation_id: Optional[int] = None + team_id: Optional[int] = None + campaign: Optional[str] = None + order_by: Optional[str] = None + order_by_type: Optional[str] = None + country: Optional[str] = None + page: Optional[int] = None + text_search: Optional[str] = None + mapping_editors: Optional[List] = Field(None, validators=[is_known_editor]) + validation_editors: Optional[List] = Field(None, validators=[is_known_editor]) + teams: List[str] = None + interests: List[int] = None + created_by: Optional[int] = None + mapped_by: Optional[int] = None + favorited_by: Optional[int] = None + managed_by: Optional[int] = None + based_on_user_interests: Optional[int] = None + omit_map_results: Optional[bool] = None + last_updated_lte: Optional[str] = None + last_updated_gte: Optional[str] = None + created_lte: Optional[str] = None + created_gte: Optional[str] = None + partner_id: Optional[int] = None + partnership_from: Optional[str] = None + partnership_to: Optional[str] = None + download_as_csv: Optional[bool] = None def __hash__(self): """Make object hashable so we can cache user searches""" @@ -378,77 +416,82 @@ def __hash__(self): ) -class ProjectSearchBBoxDTO(Model): - bbox = ListType(FloatType, required=True, min_size=4, max_size=4) - input_srid = IntType(required=True, choices=[4326]) - preferred_locale = StringType(required=False, default="en") - project_author = IntType(required=False, serialized_name="projectAuthor") - - -class ListSearchResultDTO(Model): - """Describes one search result""" - - project_id = IntType(required=True, serialized_name="projectId") - locale = StringType(required=True) - name = StringType(default="") - short_description = StringType(serialized_name="shortDescription", default="") - difficulty = StringType(required=True, serialized_name="difficulty") - priority = StringType(required=True) - organisation_name = StringType(serialized_name="organisationName") - organisation_logo = StringType(serialized_name="organisationLogo") - campaigns = ListType(ModelType(CampaignDTO), default=[]) - percent_mapped = IntType(serialized_name="percentMapped") - percent_validated = IntType(serialized_name="percentValidated") - status = StringType(serialized_name="status") - active_mappers = IntType(serialized_name="activeMappers") - last_updated = UTCDateTimeType(serialized_name="lastUpdated") - due_date = UTCDateTimeType(serialized_name="dueDate") - total_contributors = IntType(serialized_name="totalContributors") - country = StringType(serialize_when_none=False) - - creation_date = UTCDateTimeType(serialized_name="creationDate", required=True) - author = StringType(serialize_when_none=False) - partner_names = ListType(StringType, serialized_name="partnerNames") - total_area = FloatType(required=True, serialized_name="totalAreaSquareKilometers") - - -class ProjectSearchResultsDTO(Model): +class ProjectSearchBBoxDTO(BaseModel): + bbox: List[float] = Field(..., min_items=4, max_items=4) + input_srid: int = Field(..., choices=[4326]) + preferred_locale: Optional[str] = Field(default="en") + project_author: Optional[int] = Field(default=None, alias="projectAuthor") + + class Config: + populate_by_name = True + + +class ListSearchResultDTO(BaseModel): + project_id: Optional[int] = Field(alias="projectId", default=None) + locale: Optional[str] = None + name: Optional[str] = Field(default="") + short_description: str = Field(default="", alias="shortDescription") + difficulty: Optional[str] = None + priority: Optional[str] = None + organisation_name: Optional[str] = Field(alias="organisationName", default=None) + organisation_logo: Optional[str] = Field(alias="organisationLogo", default=None) + campaigns: Optional[List[CampaignDTO]] = Field(default=[]) + percent_mapped: Optional[int] = Field(alias="percentMapped", default=None) + percent_validated: Optional[int] = Field(alias="percentValidated", default=None) + status: Optional[str] = None + active_mappers: Optional[int] = Field(alias="activeMappers", default=None) + last_updated: Optional[datetime] = Field(alias="lastUpdated", default=None) + due_date: Optional[datetime] = Field(alias="dueDate", default=None) + total_contributors: Optional[int] = Field(alias="totalContributors", default=None) + country: Optional[List[str]] = Field(default=None) + + # csv fields + creation_date: Optional[datetime] = Field(alias="creationDate", default=None) + author: Optional[str] = None + partner_names: Optional[List[str]] = Field(default=None, alias="partnerNames") + total_area: Optional[float] = Field(None, alias="totalAreaSquareKilometers") + + class Config: + populate_by_name = True + + json_encoders = {datetime: lambda v: v.isoformat() + "Z" if v else None} + + +class ProjectSearchResultsDTO(BaseModel): """Contains all results for the search criteria""" - def __init__(self): - """DTO constructor initialise all arrays to empty""" - super().__init__() - self.results = [] - self.map_results = [] + map_results: Optional[Any] = Field(default_factory=list, alias="mapResults") + results: Optional[List["ListSearchResultDTO"]] = Field(default_factory=list) + pagination: Optional["Pagination"] = Field(default_factory=dict) - map_results = BaseType(serialized_name="mapResults") - results = ListType(ModelType(ListSearchResultDTO)) - pagination = ModelType(Pagination) + class Config: + populate_by_name = True -class LockedTasksForUser(Model): +class LockedTasksForUser(BaseModel): """Describes all tasks locked by an individual user""" - def __init__(self): - """DTO constructor initialise all arrays to empty""" - super().__init__() - self.locked_tasks = [] + locked_tasks: Optional[List[int]] = Field([], alias="lockedTasks") + project: Optional[int] = Field(None, alias="projectId") + task_status: Optional[str] = Field(None, alias="taskStatus") - locked_tasks = ListType(IntType, serialized_name="lockedTasks") - project = IntType(serialized_name="projectId") - task_status = StringType(serialized_name="taskStatus") + class Config: + populate_by_name = True -class ProjectComment(Model): +class ProjectComment(BaseModel): """Describes an individual user comment on a project task""" - comment = StringType() - comment_date = UTCDateTimeType(serialized_name="commentDate") - user_name = StringType(serialized_name="userName") - task_id = IntType(serialized_name="taskId") + comment: str + comment_date: datetime = Field(alias="commentDate") + user_name: str = Field(alias="userName") + task_id: int = Field(alias="taskId") + + class Config: + populate_by_name = True -class ProjectCommentsDTO(Model): +class ProjectCommentsDTO(BaseModel): """Contains all comments on a project""" def __init__(self): @@ -456,126 +499,109 @@ def __init__(self): super().__init__() self.comments = [] - comments = ListType(ModelType(ProjectComment)) + comments: List[ProjectComment] -class ProjectContribDTO(Model): - date = DateType(required=True) - mapped = IntType(required=True) - validated = IntType(required=True) - cumulative_mapped = IntType(required=False) - cumulative_validated = IntType(required=False) - total_tasks = IntType(required=False) +class ProjectContribDTO(BaseModel): + date: date + mapped: int + validated: int + cumulative_mapped: Optional[int] = None + cumulative_validated: Optional[int] = None + total_tasks: Optional[int] = None -class ProjectContribsDTO(Model): +class ProjectContribsDTO(BaseModel): """Contains all contributions on a project by day""" - def __init__(self): - """DTO constructor initialise all arrays to empty""" - super().__init__() - self.mapping = [] - self.validation = [] - - stats = ListType(ModelType(ProjectContribDTO)) - - -class ProjectSummary(Model): - """Model used for PM dashboard""" - - project_id = IntType(required=True, serialized_name="projectId") - default_locale = StringType(serialized_name="defaultLocale") - author = StringType() - created = UTCDateTimeType() - due_date = UTCDateTimeType(serialized_name="dueDate") - last_updated = UTCDateTimeType(serialized_name="lastUpdated") - priority = StringType(serialized_name="projectPriority") - campaigns = ListType(ModelType(CampaignDTO), default=[]) - organisation = IntType() - organisation_name = StringType(serialized_name="organisationName") - organisation_slug = StringType(serialized_name="organisationSlug") - organisation_logo = StringType(serialized_name="organisationLogo") - country_tag = ListType(StringType, serialized_name="countryTag") - osmcha_filter_id = StringType(serialized_name="osmchaFilterId") - mapping_types = ListType( - StringType, serialized_name="mappingTypes", validators=[is_known_mapping_type] - ) - - changeset_comment = StringType(serialized_name="changesetComment") - percent_mapped = IntType(serialized_name="percentMapped") - percent_validated = IntType(serialized_name="percentValidated") - percent_bad_imagery = IntType(serialized_name="percentBadImagery") - aoi_centroid = BaseType(serialized_name="aoiCentroid") - difficulty = StringType(serialized_name="difficulty") - mapping_permission = IntType( - serialized_name="mappingPermission", validators=[is_known_mapping_permission] - ) - validation_permission = IntType( - serialized_name="validationPermission", - validators=[is_known_validation_permission], - ) - allowed_usernames = ListType( - StringType(), serialized_name="allowedUsernames", default=[] - ) - random_task_selection_enforced = BooleanType( - required=False, default=False, serialized_name="enforceRandomTaskSelection" - ) - private = BooleanType(serialized_name="private") - allowed_users = ListType(StringType, serialized_name="allowedUsernames", default=[]) - project_teams = ListType(ModelType(ProjectTeamDTO), serialized_name="teams") - project_info = ModelType( - ProjectInfoDTO, serialized_name="projectInfo", serialize_when_none=False - ) - short_description = StringType(serialized_name="shortDescription") - status = StringType() - imagery = StringType() - license_id = IntType(serialized_name="licenseId") - id_presets = ListType(StringType, serialized_name="idPresets", default=[]) - extra_id_params = StringType(serialized_name="extraIdParams") - rapid_power_user = BooleanType( - serialized_name="rapidPowerUser", default=False, required=False - ) - mapping_editors = ListType( - StringType, - min_size=1, - required=True, - serialized_name="mappingEditors", - validators=[is_known_editor], - ) - validation_editors = ListType( - StringType, - min_size=1, - required=True, - serialized_name="validationEditors", - validators=[is_known_editor], - ) - custom_editor = ModelType( - CustomEditorDTO, serialized_name="customEditor", serialize_when_none=False - ) - - -class PMDashboardDTO(Model): + stats: Optional[List[ProjectContribDTO]] = None + + +class ProjectSummary(BaseModel): + project_id: int = Field(..., alias="projectId") + default_locale: Optional[str] = Field(None, alias="defaultLocale") + author: Optional[str] = None + created: Optional[datetime] = None + due_date: Optional[datetime] = Field(None, alias="dueDate") + last_updated: Optional[datetime] = Field(None, alias="lastUpdated") + priority: Optional[str] = Field(None, alias="projectPriority") + campaigns: List[CampaignDTO] = Field(default_factory=list) + organisation: Optional[int] = None + organisation_name: Optional[str] = Field(None, alias="organisationName") + organisation_slug: Optional[str] = Field(None, alias="organisationSlug") + organisation_logo: Optional[str] = Field(None, alias="organisationLogo") + country_tag: List[str] = Field(default_factory=list, alias="countryTag") + osmcha_filter_id: Optional[str] = Field(None, alias="osmchaFilterId") + mapping_types: List[str] = Field(default_factory=list, alias="mappingTypes") + changeset_comment: Optional[str] = Field(None, alias="changesetComment") + percent_mapped: Optional[int] = Field(None, alias="percentMapped") + percent_validated: Optional[int] = Field(None, alias="percentValidated") + percent_bad_imagery: Optional[int] = Field(None, alias="percentBadImagery") + aoi_centroid: Optional[Union[dict, None]] = Field(None, alias="aoiCentroid") + difficulty: Optional[str] = Field(None, alias="difficulty") + mapping_permission: Optional[int] = Field(None, alias="mappingPermission") + validation_permission: Optional[int] = Field(None, alias="validationPermission") + allowed_usernames: List[str] = Field(default_factory=list, alias="allowedUsernames") + random_task_selection_enforced: bool = Field( + default=False, alias="enforceRandomTaskSelection" + ) + private: Optional[bool] = Field(None, alias="private") + allowed_users: List[str] = Field(default_factory=list, alias="allowedUsernames") + project_teams: List[ProjectTeamDTO] = Field(default_factory=list, alias="teams") + project_info: Optional[ProjectInfoDTO] = Field(None, alias="projectInfo") + short_description: Optional[str] = Field(None, alias="shortDescription") + status: Optional[str] = None + imagery: Optional[str] = None + license_id: Optional[int] = Field(None, alias="licenseId") + id_presets: List[str] = Field(default_factory=list, alias="idPresets") + extra_id_params: Optional[str] = Field(None, alias="extraIdParams") + rapid_power_user: bool = Field(default=False, alias="rapidPowerUser") + mapping_editors: List[str] = Field(..., min_items=1, alias="mappingEditors") + validation_editors: List[str] = Field(..., min_items=1, alias="validationEditors") + custom_editor: Optional[CustomEditorDTO] = Field(None, alias="customEditor") + + class Config: + populate_by_name = True + + # TODO: Make Validators work. + + # @field_validator('mapping_types', 'mapping_editors', 'validation_editors', mode='plain') + # def validate_list_fields(cls, v, field): + # print(field,'-----') + # field_name = field.field_name + # if field_name == 'mapping_types' and not is_known_mapping_type(v): + # raise ValueError(f"Invalid value in {field_name}") + # if field_name in ['mapping_editors', 'validation_editors'] and not is_known_editor(v): + # raise ValueError(f"Invalid value in {field_name}") + # return v + + # @field_validator('mapping_permission', 'validation_permission', mode='plain') + # def validate_permissions(cls, v, field): + # if field.name == 'mapping_permission' and not is_known_mapping_permission(v): + # raise ValueError(f"Invalid value in {field.name}") + # if field.name == 'validation_permission' and not is_known_validation_permission(v): + # raise ValueError(f"Invalid value in {field.name}") + # return v + + +class PMDashboardDTO(BaseModel): """DTO for constructing the PM Dashboard""" - def __init__(self): - """DTO constructor initialise all arrays to empty""" - super().__init__() - self.draft_projects = [] - self.archived_projects = [] - self.active_projects = [] - - draft_projects = ListType( - ModelType(ProjectSummary), serialized_name="draftProjects" + draft_projects: Optional[List[ProjectSummary]] = Field( + default_factory=list, alias="draftProjects" ) - active_projects = ListType( - ModelType(ProjectSummary), serialized_name="activeProjects" + active_projects: Optional[List[ProjectSummary]] = Field( + default_factory=list, alias="activeProjects" ) - archived_projects = ListType( - ModelType(ProjectSummary), serialized_name="archivedProjects" + archived_projects: Optional[List[ProjectSummary]] = Field( + default_factory=list, alias="archivedProjects" ) + class Config: + populate_by_name = True + -class ProjectTaskAnnotationsDTO(Model): +class ProjectTaskAnnotationsDTO(BaseModel): """DTO for task annotations of a project""" def __init__(self): @@ -583,36 +609,42 @@ def __init__(self): super().__init__() self.tasks = [] - project_id = IntType(required=True, serialized_name="projectId") - tasks = ListType( - ModelType(TaskAnnotationDTO), required=True, serialized_name="tasks" - ) + project_id: Optional[int] = Field(None, alias="projectId") + tasks: Optional[List[TaskAnnotationDTO]] = Field(None, alias="tasks") -class ProjectStatsDTO(Model): +class ProjectStatsDTO(BaseModel): """DTO for detailed stats on a project""" - project_id = IntType(required=True, serialized_name="projectId") - area = FloatType(serialized_name="projectArea(in sq.km)") - total_mappers = IntType(serialized_name="totalMappers") - total_tasks = IntType(serialized_name="totalTasks") - total_comments = IntType(serialized_name="totalComments") - total_mapping_time = IntType(serialized_name="totalMappingTime") - total_validation_time = IntType(serialized_name="totalValidationTime") - total_time_spent = IntType(serialized_name="totalTimeSpent") - average_mapping_time = IntType(serialized_name="averageMappingTime") - average_validation_time = IntType(serialized_name="averageValidationTime") - percent_mapped = IntType(serialized_name="percentMapped") - percent_validated = IntType(serialized_name="percentValidated") - percent_bad_imagery = IntType(serialized_name="percentBadImagery") - aoi_centroid = BaseType(serialized_name="aoiCentroid") - time_to_finish_mapping = IntType(serialized_name="timeToFinishMapping") - time_to_finish_validating = IntType(serialized_name="timeToFinishValidating") - - -class ProjectUserStatsDTO(Model): + project_id: Optional[int] = Field(None, alias="projectId") + area: Optional[float] = Field(None, alias="projectArea(in sq.km)") + total_mappers: Optional[int] = Field(None, alias="totalMappers") + total_tasks: Optional[int] = Field(None, alias="totalTasks") + total_comments: Optional[int] = Field(None, alias="totalComments") + total_mapping_time: Optional[int] = Field(None, alias="totalMappingTime") + total_validation_time: Optional[int] = Field(None, alias="totalValidationTime") + total_time_spent: Optional[int] = Field(None, alias="totalTimeSpent") + average_mapping_time: Optional[int] = Field(None, alias="averageMappingTime") + average_validation_time: Optional[int] = Field(None, alias="averageValidationTime") + percent_mapped: Optional[int] = Field(None, alias="percentMapped") + percent_validated: Optional[int] = Field(None, alias="percentValidated") + percent_bad_imagery: Optional[int] = Field(None, alias="percentBadImagery") + aoi_centroid: Optional[str] = Field(None, alias="aoiCentroid") + time_to_finish_mapping: Optional[int] = Field(None, alias="timeToFinishMapping") + time_to_finish_validating: Optional[int] = Field( + None, alias="timeToFinishValidating" + ) + + class Config: + populate_by_name = True + + +class ProjectUserStatsDTO(BaseModel): """DTO for time spent by users on a project""" - time_spent_mapping = IntType(serialized_name="timeSpentMapping") - time_spent_validating = IntType(serialized_name="timeSpentValidating") - total_time_spent = IntType(serialized_name="totalTimeSpent") + time_spent_mapping: Optional[int] = Field(default=0, alias="timeSpentMapping") + time_spent_validating: Optional[int] = Field(default=0, alias="timeSpentValidating") + total_time_spent: Optional[int] = Field(default=0, alias="totalTimeSpent") + + class Config: + populate_by_name = True diff --git a/backend/models/dtos/project_partner_dto.py b/backend/models/dtos/project_partner_dto.py index 3b068be389..8b1d202d56 100644 --- a/backend/models/dtos/project_partner_dto.py +++ b/backend/models/dtos/project_partner_dto.py @@ -1,6 +1,6 @@ -from schematics import Model -from schematics.types import LongType, UTCDateTimeType, StringType -from schematics.exceptions import ValidationError +from pydantic import BaseModel, Field +from datetime import datetime +from typing import Optional from enum import Enum @@ -20,45 +20,86 @@ def is_known_action(value): ) -class ProjectPartnershipDTO(Model): +# class ProjectPartnershipDTO(Model): +# """DTO for the link between a Partner and a Project""" + +# id = LongType(required=True) +# project_id = LongType(required=True, serialized_name="projectId") +# partner_id = LongType(required=True, serialized_name="partnerId") +# started_on = UTCDateTimeType(required=True, serialized_name="startedOn") +# ended_on = UTCDateTimeType(serialized_name="endedOn") + + +class ProjectPartnershipDTO(BaseModel): """DTO for the link between a Partner and a Project""" - id = LongType(required=True) - project_id = LongType(required=True, serialized_name="projectId") - partner_id = LongType(required=True, serialized_name="partnerId") - started_on = UTCDateTimeType(required=True, serialized_name="startedOn") - ended_on = UTCDateTimeType(serialized_name="endedOn") + id: Optional[int] = None + project_id: int = Field(..., alias="projectId") + partner_id: int = Field(..., alias="partnerId") + started_on: datetime = Field(..., alias="startedOn") + ended_on: Optional[datetime] = Field(None, alias="endedOn") + class Config: + populate_by_name = True + json_encoders = {datetime: lambda v: v.isoformat() + "Z" if v else None} -class ProjectPartnershipUpdateDTO(Model): - """DTO for updating the time range of the link between a Partner and a Project""" - started_on = UTCDateTimeType(serialized_name="startedOn") - ended_on = UTCDateTimeType(serialized_name="endedOn") +# class ProjectPartnershipUpdateDTO(Model): +# """DTO for updating the time range of the link between a Partner and a Project""" +# started_on = UTCDateTimeType(serialized_name="startedOn") +# ended_on = UTCDateTimeType(serialized_name="endedOn") -class ProjectPartnershipHistoryDTO(Model): - """DTO for Logs of changes to all Project-Partner links""" - id = LongType(required=True) - partnership_id = LongType(required=True, serialized_name="partnershipId") - project_id = LongType(required=True, serialized_name="projectId") - partner_id = LongType(required=True, serialized_name="partnerId") - started_on_old = UTCDateTimeType( - serialized_name="startedOnOld", serialize_when_none=False - ) - ended_on_old = UTCDateTimeType( - serialized_name="endedOnOld", serialize_when_none=False - ) - started_on_new = UTCDateTimeType( - serialized_name="startedOnNew", serialize_when_none=False - ) - ended_on_new = UTCDateTimeType( - serialized_name="endedOnNew", serialize_when_none=False - ) +class ProjectPartnershipUpdateDTO(BaseModel): + """DTO for updating the time range of the link between a Partner and a Project""" + + started_on: Optional[datetime] = Field(None, alias="startedOn") + ended_on: Optional[datetime] = Field(None, alias="endedOn") + + class Config: + populate_by_name = True + + +# class ProjectPartnershipHistoryDTO(Model): +# """DTO for Logs of changes to all Project-Partner links""" + +# id = LongType(required=True) +# partnership_id = LongType(required=True, serialized_name="partnershipId") +# project_id = LongType(required=True, serialized_name="projectId") +# partner_id = LongType(required=True, serialized_name="partnerId") +# started_on_old = UTCDateTimeType( +# serialized_name="startedOnOld", serialize_when_none=False +# ) +# ended_on_old = UTCDateTimeType( +# serialized_name="endedOnOld", serialize_when_none=False +# ) +# started_on_new = UTCDateTimeType( +# serialized_name="startedOnNew", serialize_when_none=False +# ) +# ended_on_new = UTCDateTimeType( +# serialized_name="endedOnNew", serialize_when_none=False +# ) + + +# action = StringType(validators=[is_known_action]) +# actionDate = UTCDateTimeType(serialized_name="actionDate") +class ProjectPartnershipHistoryDTO(BaseModel): + """DTO for Logs of changes to all Project-Partner links""" - action = StringType(validators=[is_known_action]) - actionDate = UTCDateTimeType(serialized_name="actionDate") + id: int + partnership_id: int = Field(..., alias="partnershipId") + project_id: int = Field(..., alias="projectId") + partner_id: int = Field(..., alias="partnerId") + started_on_old: Optional[datetime] = Field(None, alias="startedOnOld") + ended_on_old: Optional[datetime] = Field(None, alias="endedOnOld") + started_on_new: Optional[datetime] = Field(None, alias="startedOnNew") + ended_on_new: Optional[datetime] = Field(None, alias="endedOnNew") + action: str + action_date: Optional[datetime] = Field(None, alias="actionDate") + + class Config: + populate_by_name = True class ProjectPartnerAction(Enum): diff --git a/backend/models/dtos/settings_dto.py b/backend/models/dtos/settings_dto.py index 78f58af433..3b9f11cb13 100644 --- a/backend/models/dtos/settings_dto.py +++ b/backend/models/dtos/settings_dto.py @@ -1,20 +1,21 @@ -from schematics import Model -from schematics.types import StringType -from schematics.types.compound import ListType, ModelType +from pydantic import BaseModel, Field +from typing import List, Optional -class SupportedLanguage(Model): +class SupportedLanguage(BaseModel): """Model representing language that Tasking Manager supports""" - code = StringType() - language = StringType() + code: Optional[str] = None + language: Optional[str] = None -class SettingsDTO(Model): +class SettingsDTO(BaseModel): """DTO used to define available tags""" - mapper_level_intermediate = StringType(serialized_name="mapperLevelIntermediate") - mapper_level_advanced = StringType(serialized_name="mapperLevelAdvanced") - supported_languages = ListType( - ModelType(SupportedLanguage), serialized_name="supportedLanguages" + mapper_level_intermediate: Optional[str] = Field( + None, alias="mapperLevelIntermediate" + ) + mapper_level_advanced: Optional[str] = Field(None, alias="mapperLevelAdvanced") + supported_languages: Optional[List[SupportedLanguage]] = Field( + None, alias="supportedLanguages" ) diff --git a/backend/models/dtos/stats_dto.py b/backend/models/dtos/stats_dto.py index ae16880cab..156c95ee30 100644 --- a/backend/models/dtos/stats_dto.py +++ b/backend/models/dtos/stats_dto.py @@ -1,129 +1,166 @@ -from schematics import Model -from schematics.types import StringType, IntType, FloatType, BooleanType, DateType -from schematics.types.compound import ListType, ModelType from backend.models.dtos.mapping_dto import TaskHistoryDTO, TaskStatusDTO +from pydantic import BaseModel, Field +from typing import Optional, List +from datetime import datetime -class UserContribution(Model): +class UserContribution(BaseModel): """User contribution for a project""" - username = StringType() - mapping_level = StringType(serialized_name="mappingLevel") - picture_url = StringType(serialized_name="pictureUrl") - mapped = IntType() - validated = IntType() - bad_imagery = IntType(serialized_name="badImagery") - total = IntType() - mapped_tasks = ListType(IntType, serialized_name="mappedTasks") - validated_tasks = ListType(IntType, serialized_name="validatedTasks") - bad_imagery_tasks = ListType(IntType, serialized_name="badImageryTasks") - name = StringType() - date_registered = DateType(serialized_name="dateRegistered") - - -class ProjectContributionsDTO(Model): + def __init__(self, UserContribution): + super().__init__() + self.username = UserContribution["username"] + self.mapping_level = UserContribution["mapping_level"] + self.picture_url = UserContribution["picture_url"] + self.mapped = UserContribution["mapped"] + self.validated = UserContribution["validated"] + self.bad_imagery = UserContribution["bad_imagery"] + self.total = UserContribution["total"] + self.mapped_tasks = UserContribution["mapped_tasks"] + self.validated_tasks = UserContribution["validated_tasks"] + self.bad_imagery_tasks = UserContribution["bad_imagery_tasks"] + self.name = UserContribution["name"] + self.date_registered = UserContribution["date_registered"] + + username: Optional[str] = None + mapping_level: Optional[str] = Field(alias="mappingLevel", default=None) + picture_url: Optional[str] = Field(alias="pictureUrl", default=None) + mapped: Optional[int] = None + validated: Optional[int] = None + bad_imagery: Optional[int] = Field(alias="badImagery", default=None) + total: Optional[int] = None + mapped_tasks: Optional[List[int]] = Field(alias="mappedTasks", default=None) + validated_tasks: Optional[List[int]] = Field(alias="validatedTasks", default=None) + bad_imagery_tasks: Optional[List[int]] = Field( + alias="badImageryTasks", default=None + ) + name: Optional[str] = None + date_registered: Optional[datetime] = Field(alias="dateRegistered", default=None) + + +# class UserContribution(BaseModel): +# """User contribution for a project""" + +# username: Optional[str] = None +# mapping_level: Optional[str] = Field(None, alias="mappingLevel") +# picture_url: Optional[str] = Field(None, alias="pictureUrl") +# mapped: Optional[int] = None +# validated: Optional[int] = None +# bad_imagery: Optional[int] = Field(None, alias="badImagery") +# total: Optional[int] = None +# mapped_tasks: Optional[List[int]] = Field(default_factory=list, alias="mappedTasks") +# validated_tasks: Optional[List[int]] = Field(default_factory=list, alias="validatedTasks") +# bad_imagery_tasks: Optional[List[int]] = Field(default_factory=list, alias="badImageryTasks") +# name: Optional[str] = None +# date_registered: Optional[datetime] = Field(None, alias="dateRegistered") + +# class Config: +# allow_population_by_field_name = True + + +class ProjectContributionsDTO(BaseModel): """DTO for all user contributions on a project""" def __init__(self): super().__init__() self.user_contributions = [] - user_contributions = ListType( - ModelType(UserContribution), serialized_name="userContributions" + user_contributions: Optional[List[UserContribution]] = Field( + alias="userContributions", default=None ) -class Pagination(Model): - """Properties for paginating results""" - - def __init__(self, paginated_result): - """Instantiate from a Flask-SQLAlchemy paginated result""" - super().__init__() - - self.has_next = paginated_result.has_next - self.has_prev = paginated_result.has_prev - self.next_num = paginated_result.next_num - self.page = paginated_result.page - self.pages = paginated_result.pages - self.prev_num = paginated_result.prev_num - self.per_page = paginated_result.per_page - self.total = paginated_result.total - - has_next = BooleanType(serialized_name="hasNext") - has_prev = BooleanType(serialized_name="hasPrev") - next_num = IntType(serialized_name="nextNum") - page = IntType() - pages = IntType() - prev_num = IntType(serialized_name="prevNum") - per_page = IntType(serialized_name="perPage") - total = IntType() - - -class ProjectActivityDTO(Model): +class Pagination(BaseModel): + has_next: Optional[bool] = Field(serialization_alias="hasNext", default=False) + has_prev: Optional[bool] = Field(serialization_alias="hasPrev", default=False) + next_num: Optional[int] = Field(serialization_alias="nextNum", default=None) + page: Optional[int] = None + pages: Optional[int] = None + prev_num: Optional[int] = Field(serialization_alias="prevNum", default=None) + per_page: Optional[int] = Field(serialization_alias="perPage", default=None) + total: Optional[int] = None + + @staticmethod + def from_total_count(page: int, per_page: int, total: int) -> "Pagination": + pages = (total + per_page - 1) // per_page + has_next = page < pages + has_prev = page > 1 + next_num = page + 1 if has_next else None + prev_num = page - 1 if has_prev else None + + return Pagination( + has_next=has_next, + has_prev=has_prev, + next_num=next_num, + page=page, + pages=pages, + prev_num=prev_num, + per_page=per_page, + total=total, + ) + + +class ProjectActivityDTO(BaseModel): """DTO to hold all project activity""" - def __init__(self): - super().__init__() - self.activity = [] - - pagination = ModelType(Pagination) - activity = ListType(ModelType(TaskHistoryDTO)) + pagination: Optional[Pagination] = None + activity: Optional[List[TaskHistoryDTO]] = None -class ProjectLastActivityDTO(Model): +class ProjectLastActivityDTO(BaseModel): """DTO to hold latest status from project activity""" - def __init__(self): - super().__init__() - self.activity = [] - - activity = ListType(ModelType(TaskStatusDTO)) + activity: Optional[List[TaskStatusDTO]] = Field(default_factory=list) -class OrganizationProjectsStatsDTO(Model): - draft = IntType() - published = IntType() - archived = IntType() - recent = IntType() # projects created in the current year - stale = IntType() # project without any activity in the last 6 months +class OrganizationProjectsStatsDTO(BaseModel): + draft: Optional[int] = None + published: Optional[int] = None + archived: Optional[int] = None + recent: Optional[int] = None + stale: Optional[int] = None -class OrganizationTasksStatsDTO(Model): - ready = IntType() - locked_for_mapping = IntType(serialized_name="lockedForMapping") - locked_for_validation = IntType(serialized_name="lockedForValidation") - mapped = IntType() - validated = IntType() - invalidated = IntType() - badimagery = IntType(serialized_name="badImagery") +class OrganizationTasksStatsDTO(BaseModel): + ready: Optional[int] = 0 + locked_for_mapping: Optional[int] = Field(0, serialization_alias="lockedForMapping") + locked_for_validation: Optional[int] = Field( + 0, serialization_alias="lockedForValidation" + ) + mapped: Optional[int] = 0 + validated: Optional[int] = 0 + invalidated: Optional[int] = 0 + badimagery: Optional[int] = Field(0, serialization_alias="badImagery") -class OrganizationStatsDTO(Model): - projects = ModelType(OrganizationProjectsStatsDTO) - active_tasks = ModelType(OrganizationTasksStatsDTO, serialized_name="activeTasks") +class OrganizationStatsDTO(BaseModel): + projects: Optional[OrganizationProjectsStatsDTO] = None + active_tasks: Optional[OrganizationTasksStatsDTO] = Field( + None, serialization_alias="activeTasks" + ) -class OrganizationListStatsDTO(Model): +class OrganizationListStatsDTO(BaseModel): def __init__(self, row): super().__init__() self.organisation = row[0] self.projects_created = row[1] - organisation = StringType() - projects_created = IntType(serialized_name="projectsCreated") + organisation: str + projects_created: int = Field(alias="projectsCreated") -class CampaignStatsDTO(Model): +class CampaignStatsDTO(BaseModel): def __init__(self, row): super().__init__() self.campaign = row[0] self.projects_created = row[1] - campaign = StringType() - projects_created = IntType(serialized_name="projectsCreated") + campaign: str + projects_created: int = Field(alias="projectsCreated") -class HomePageStatsDTO(Model): +class HomePageStatsDTO(BaseModel): """DTO for stats we want to display on the homepage""" def __init__(self): @@ -131,57 +168,62 @@ def __init__(self): self.organisations = [] self.campaigns = [] - mappers_online = IntType(serialized_name="mappersOnline") - total_area = IntType(serialized_name="totalArea") - tasks_mapped = IntType(serialized_name="tasksMapped") - tasks_validated = IntType(serialized_name="tasksValidated") - total_mappers = IntType(serialized_name="totalMappers") - total_validators = IntType(serialized_name="totalValidators") - total_projects = IntType(serialized_name="totalProjects") - total_mapped_area = FloatType(serialized_name="totalMappedArea") - total_validated_area = FloatType(serialized_name="totalValidatedArea") - total_organisations = IntType(serialized_name="totalOrganisations") - total_campaigns = IntType(serialized_name="totalCampaigns") - # avg_completion_time = IntType(serialized_name='averageCompletionTime') - organisations = ListType(ModelType(OrganizationListStatsDTO)) - campaigns = ListType(ModelType(CampaignStatsDTO)) - - -class TaskStats(Model): + mappers_online: Optional[int] = Field(None, alias="mappersOnline") + total_area: Optional[int] = Field(None, alias="totalArea") + tasks_mapped: Optional[int] = Field(None, alias="tasksMapped") + tasks_validated: Optional[int] = Field(None, alias="tasksValidated") + total_mappers: Optional[int] = Field(None, alias="totalMappers") + total_validators: Optional[int] = Field(None, alias="totalValidators") + total_projects: Optional[int] = Field(None, alias="totalProjects") + total_mapped_area: Optional[float] = Field(None, alias="totalMappedArea") + total_validated_area: Optional[float] = Field(None, alias="totalValidatedArea") + total_organisations: Optional[int] = Field(None, alias="totalOrganisations") + total_campaigns: Optional[int] = Field(None, alias="totalCampaigns") + avg_completion_time: Optional[int] = Field(None, alias="averageCompletionTime") + organisations: Optional[List[OrganizationListStatsDTO]] = None + campaigns: Optional[List[CampaignStatsDTO]] = None + + class Config: + populate_by_name = True + + +class TaskStats(BaseModel): """DTO for tasks stats for a single day""" - date = DateType(required=True) - mapped = IntType(serialized_name="mapped") - validated = IntType(serialized_name="validated") - bad_imagery = IntType(serialized_name="badImagery") + date: str + mapped: int = Field(alias="mapped") + validated: int = Field(alias="validated") + bad_imagery: int = Field(alias="badImagery") + + class Config: + populate_by_name = True -class GenderStatsDTO(Model): +class GenderStatsDTO(BaseModel): """DTO for genre stats of users.""" - male = IntType() - female = IntType() - prefer_not = IntType(serialized_name="preferNotIdentify") - self_describe = IntType(serialized_name="selfDescribe") + male: int = Field(None, alias="male") + female: int = Field(None, alias="female") + prefer_not: int = Field(None, alias="preferNotIdentify") + self_describe: int = Field(None, alias="selfDescribe") -class UserStatsDTO(Model): +class UserStatsDTO(BaseModel): """DTO for user stats.""" - total = IntType() - beginner = IntType() - intermediate = IntType() - advanced = IntType() - contributed = IntType() - email_verified = IntType(serialized_name="emailVerified") - genders = ModelType(GenderStatsDTO) + total: int = Field(None, alias="total") + beginner: int = Field(None, alias="beginner") + intermediate: int = Field(None, alias="intermediate") + advanced: int = Field(None, alias="advanced") + contributed: int = Field(None, alias="contributed") + email_verified: int = Field(None, alias="emailVerified") + genders: GenderStatsDTO = Field(None, alias="genders") -class TaskStatsDTO(Model): +class TaskStatsDTO(BaseModel): """Contains all tasks stats broken down by day""" - def __init__(self): - super().__init__() - self.stats = [] + stats: List[TaskStats] = Field([], alias="taskStats") - stats = ListType(ModelType(TaskStats), serialized_name="taskStats") + class Config: + populate_by_name = True diff --git a/backend/models/dtos/tags_dto.py b/backend/models/dtos/tags_dto.py index ba3dabd1c4..97cd4b6c60 100644 --- a/backend/models/dtos/tags_dto.py +++ b/backend/models/dtos/tags_dto.py @@ -1,9 +1,8 @@ -from schematics import Model -from schematics.types import StringType -from schematics.types.compound import ListType +from pydantic import BaseModel +from typing import List, Optional -class TagsDTO(Model): +class TagsDTO(BaseModel): """DTO used to define available tags""" - tags = ListType(StringType) + tags: Optional[List[str]] = None diff --git a/backend/models/dtos/task_annotation_dto.py b/backend/models/dtos/task_annotation_dto.py index 67f6cd3fd7..4c8b446b7c 100644 --- a/backend/models/dtos/task_annotation_dto.py +++ b/backend/models/dtos/task_annotation_dto.py @@ -1,13 +1,15 @@ -from schematics import Model -from schematics.types import StringType, IntType -from schematics.types.compound import DictType +from pydantic import BaseModel, Field +from typing import Optional -class TaskAnnotationDTO(Model): +class TaskAnnotationDTO(BaseModel): """Model for a single task annotation""" - task_id = IntType(required=True, serialized_name="taskId") - annotation_type = StringType(required=True, serialized_name="annotationType") - annotation_source = StringType(serialized_name="annotationSource") - annotation_markdown = StringType(serialized_name="annotationMarkdown") - properties = DictType(StringType, serialized_name="properties") + task_id: Optional[int] = Field(None, alias="taskId") + annotation_type: Optional[str] = Field(None, alias="annotationType") + annotation_source: Optional[str] = Field(None, alias="annotationSource") + annotation_markdown: Optional[str] = Field(None, alias="annotationMarkdown") + properties: Optional[dict] = Field(None, alias="properties") + + class Config: + populate_by_name = True diff --git a/backend/models/dtos/team_dto.py b/backend/models/dtos/team_dto.py index 58f2ee692b..3fd53c4964 100644 --- a/backend/models/dtos/team_dto.py +++ b/backend/models/dtos/team_dto.py @@ -1,197 +1,246 @@ -from schematics import Model -from schematics.exceptions import ValidationError -from schematics.types import ( - BooleanType, - IntType, - StringType, - LongType, - ListType, - ModelType, -) +from typing import List, Optional + +from fastapi import HTTPException +from pydantic import BaseModel, Field, field_validator from backend.models.dtos.stats_dto import Pagination from backend.models.postgis.statuses import ( + TeamJoinMethod, TeamMemberFunctions, TeamVisibility, - TeamJoinMethod, ) -def validate_team_visibility(value): - """Validates that value is a known Team Visibility""" +def validate_team_visibility(value: str) -> str: + """Validates that value is a known Team Visibility.""" try: TeamVisibility[value.upper()] except KeyError: - raise ValidationError( - f"Unknown teamVisibility: {value} Valid values are " - f"{TeamVisibility.PUBLIC.name}, " - f"{TeamVisibility.PRIVATE.name}" + raise HTTPException( + status_code=400, + detail=( + f"Unknown teamVisibility: {value}. Valid values are: " + f"{TeamVisibility.PUBLIC.name}, " + f"{TeamVisibility.PRIVATE.name}." + ), ) + return value -def validate_team_join_method(value): - """Validates join method value and its visibility""" +def validate_team_join_method(value: str): + """Validates join method value and its visibility.""" try: TeamJoinMethod[value.upper()] except KeyError: - raise ValidationError( - f"Unknown teamJoinMethod: {value} Valid values are " - f"{TeamJoinMethod.ANY.name}, " - f"{TeamJoinMethod.BY_INVITE.name}, " - f"{TeamJoinMethod.BY_REQUEST.name}" + raise HTTPException( + status_code=400, + detail=( + f"Unknown teamJoinMethod: {value}. " + f"Valid values are: {TeamJoinMethod.ANY.name}, " + f"{TeamJoinMethod.BY_INVITE.name}, " + f"{TeamJoinMethod.BY_REQUEST.name}." + ), ) + return value -def validate_team_member_function(value): - """Validates that value is a known Team Member Function""" +def validate_team_member_function(value: str): + """Validates that value is a known Team Member Function.""" try: TeamMemberFunctions[value.upper()] except KeyError: - raise ValidationError( - f"Unknown teamMemberFunction: {value} Valid values are " - f"{TeamMemberFunctions.MEMBER.name}, " - f"{TeamMemberFunctions.MANAGER.name}" + raise HTTPException( + status_code=400, + detail=( + f"Unknown teamMemberFunction: {value}. " + f"Valid values are: {TeamMemberFunctions.MEMBER.name}, " + f"{TeamMemberFunctions.MANAGER.name}." + ), ) + return value -class TeamMembersDTO(Model): - """Describe a JSON model for team members""" - - username = StringType(required=True) - function = StringType(required=True, validators=[validate_team_member_function]) - active = BooleanType() - join_request_notifications = BooleanType( - default=False, serialized_name="joinRequestNotifications" +class TeamMembersDTO(BaseModel): + username: str + function: str + active: bool + join_request_notifications: bool = Field( + default=False, alias="joinRequestNotifications" ) - picture_url = StringType(serialized_name="pictureUrl") + picture_url: Optional[str] = Field(None, alias="pictureUrl") + @field_validator("function") + def validate_function(cls, value): + return validate_team_member_function(value) -class TeamProjectDTO(Model): + class Config: + populate_by_name = True + + +class TeamProjectDTO(BaseModel): """Describes a JSON model to create a project team""" - project_name = StringType(required=True) - project_id = IntType(required=True) - role = StringType(required=True) + project_name: str = Field(None) + project_id: int = Field(None) + role: str = Field(None) -class ProjectTeamDTO(Model): +class ProjectTeamDTO(BaseModel): """Describes a JSON model to create a project team""" - team_id = IntType(required=True, serialized_name="teamId") - team_name = StringType(serialized_name="name") - role = StringType(required=True) + team_id: int = Field(alias="teamId") + team_name: str = Field(default=None, alias="name") + role: str = Field() + class Config: + populate_by_name = True + use_enum_values = True -class TeamDetailsDTO(Model): - def __init__(self): - """DTO constructor initialise all arrays to empty""" - super().__init__() - self.members = [] - self.team_projects = [] - - """ Describes JSON model for a team """ - team_id = IntType(serialized_name="teamId") - organisation_id = IntType(required=True) - organisation = StringType(required=True) - organisation_slug = StringType(serialized_name="organisationSlug") - name = StringType(required=True) - logo = StringType() - description = StringType() - join_method = StringType( - required=True, - validators=[validate_team_join_method], - serialized_name="joinMethod", - ) - visibility = StringType( - required=True, validators=[validate_team_visibility], serialize_when_none=False - ) - is_org_admin = BooleanType(default=False) - is_general_admin = BooleanType(default=False) - members = ListType(ModelType(TeamMembersDTO)) - team_projects = ListType(ModelType(ProjectTeamDTO)) +class TeamDetailsDTO(BaseModel): + """Pydantic model equivalent of the original TeamDetailsDTO""" + + team_id: Optional[int] = Field(None, alias="teamId") + organisation_id: int + organisation: str + organisation_slug: Optional[str] = Field(None, alias="organisationSlug") + name: str + logo: Optional[str] = None + description: Optional[str] = None + join_method: str = Field(alias="joinMethod") + visibility: str + is_org_admin: bool = Field(False) + is_general_admin: bool = Field(False) + members: List[TeamMembersDTO] = Field([], alias="members") + team_projects: List[TeamProjectDTO] = Field([], alias="team_projects") + + @field_validator("join_method") + def validate_join_method(cls, value): + return validate_team_join_method(value) + + @field_validator("visibility") + def validate_visibility(cls, value): + return validate_team_visibility(value) -class TeamDTO(Model): + class Config: + populate_by_name = True + + +class TeamDTO(BaseModel): """Describes JSON model for a team""" - team_id = IntType(serialized_name="teamId") - organisation_id = IntType(required=True, serialized_name="organisationId") - organisation = StringType(required=True) - name = StringType(required=True) - logo = StringType() - description = StringType() - join_method = StringType( - required=True, - validators=[validate_team_join_method], - serialized_name="joinMethod", - ) - visibility = StringType( - required=True, validators=[validate_team_visibility], serialize_when_none=False - ) - members = ListType(ModelType(TeamMembersDTO)) - members_count = IntType(serialized_name="membersCount", required=False) - managers_count = IntType(serialized_name="managersCount", required=False) + team_id: Optional[int] = Field(None, alias="teamId") + organisation_id: int = Field(None, alias="organisationId") + organisation: str = Field(None, alias="organisation") + name: str = Field(None, alias="name") + logo: Optional[str] = None + description: Optional[str] = None + join_method: str = Field(None, alias="joinMethod") + visibility: str = Field(None, alias="visibility") + members: Optional[List[TeamMembersDTO]] = None + members_count: Optional[int] = Field(None, alias="membersCount") + managers_count: Optional[int] = Field(None, alias="managersCount") + + @field_validator("join_method") + def validate_join_method(cls, value): + return validate_team_join_method(value) + @field_validator("visibility") + def validate_visibility(cls, value): + return validate_team_visibility(value) -class TeamsListDTO(Model): + class Config: + populate_by_name = True + + +class TeamsListDTO(BaseModel): + def __init__(self): + """DTO constructor initialise all arrays to empty""" + super().__init__() + self.teams = [] + + """ Returns List of all teams""" + teams: List[TeamDTO] = [] + pagination: Optional[Pagination] = None + + +class ListTeamsDTO(BaseModel): def __init__(self): """DTO constructor initialise all arrays to empty""" super().__init__() self.teams = [] """ Returns List of all teams""" - teams = ListType(ModelType(TeamDTO)) - pagination = ModelType(Pagination) + teams: List[ProjectTeamDTO] = [] + pagination: Optional[Pagination] = None -class NewTeamDTO(Model): +class NewTeamDTO(BaseModel): """Describes a JSON model to create a new team""" - creator = LongType(required=True) - organisation_id = IntType(required=True) - name = StringType(required=True) - description = StringType() - join_method = StringType( - required=True, - validators=[validate_team_join_method], - serialized_name="joinMethod", - ) - visibility = StringType( - required=True, validators=[validate_team_visibility], serialize_when_none=False + creator: float = Field(None, alias="creator") + organisation_id: int = Field(..., alias="organisation_id") + name: str = Field(..., alias="name") + description: Optional[str] = Field(None, alias="description") + join_method: str = Field( + ..., + alias="joinMethod", ) + visibility: str = Field(..., serialize_when_none=False) + + @field_validator("join_method") + def validate_join_method(cls, value): + return validate_team_join_method(value) + + @field_validator("visibility") + def validate_visibility(cls, value): + return validate_team_visibility(value) + class Config: + populate_by_name = True -class UpdateTeamDTO(Model): + +class UpdateTeamDTO(BaseModel): """Describes a JSON model to update a team""" - creator = LongType() - team_id = IntType() - organisation = StringType() - organisation_id = IntType() - name = StringType() - logo = StringType() - description = StringType() - join_method = StringType( - validators=[validate_team_join_method], serialized_name="joinMethod" - ) - visibility = StringType( - validators=[validate_team_visibility], serialize_when_none=False - ) - members = ListType(ModelType(TeamMembersDTO), serialize_when_none=False) + creator: Optional[int] = Field(None, alias="creator") + team_id: Optional[int] = Field(None, alias="team_id") + organisation: Optional[str] = Field(None, alias="organisation") + organisation_id: Optional[int] = Field(None, alias="organisation_id") + name: Optional[str] = Field(None, alias="name") + logo: Optional[str] = Field(None, alias="logo") + description: Optional[str] = Field(None, alias="description") + join_method: Optional[str] = Field(None, alias="joinMethod") + visibility: Optional[str] = Field(None, serialize_when_none=False) + members: Optional[List[TeamMembersDTO]] = Field([], serialize_when_none=False) + + @field_validator("join_method") + def validate_join_method(cls, value): + return validate_team_join_method(value) + + @field_validator("visibility") + def validate_visibility(cls, value): + return validate_team_visibility(value) + + class Config: + populate_by_name = True -class TeamSearchDTO(Model): +class TeamSearchDTO(BaseModel): """Describes a JSON model to search for a team""" - user_id = LongType(serialized_name="userId") - organisation = IntType(serialized_name="organisation") - team_name = StringType(serialized_name="team_name") - omit_members = BooleanType(serialized_name="omitMemberList", default=False) - full_members_list = BooleanType(serialized_name="fullMemberList", default=True) - member = LongType(serialized_name="member") - manager = LongType(serialized_name="manager") - team_role = StringType(serialized_name="team_role") - member_request = LongType(serialized_name="member_request") - paginate = BooleanType(serialized_name="paginate", default=False) - page = IntType(serialized_name="page", default=1) - per_page = IntType(serialized_name="perPage", default=10) + user_id: Optional[float] = Field(None, alias="userId") + organisation: Optional[int] = Field(None, alias="organisation") + team_name: Optional[str] = Field(None, alias="team_name") + omit_members: Optional[bool] = Field(False, alias="omitMemberList") + full_members_list: Optional[bool] = Field(True, alias="fullMemberList") + member: Optional[float] = Field(None, alias="member") + manager: Optional[float] = Field(None, alias="manager") + team_role: Optional[str] = Field(None, alias="team_role") + member_request: Optional[float] = Field(None, alias="member_request") + paginate: Optional[bool] = Field(False, alias="paginate") + page: Optional[int] = Field(1, alias="page") + per_page: Optional[int] = Field(10, alias="perPage") + + class Config: + populate_by_name = True diff --git a/backend/models/dtos/user_dto.py b/backend/models/dtos/user_dto.py index 2308ceff8c..9b13ccbc14 100644 --- a/backend/models/dtos/user_dto.py +++ b/backend/models/dtos/user_dto.py @@ -1,33 +1,13 @@ -from schematics import Model -from schematics.exceptions import ValidationError -from schematics.types import ( - StringType, - IntType, - EmailType, - LongType, - BooleanType, -) -from schematics.types.compound import ListType, ModelType, BaseType -from backend.models.dtos.stats_dto import Pagination -from backend.models.dtos.mapping_dto import TaskDTO -from backend.models.dtos.interests_dto import InterestDTO -from backend.models.postgis.statuses import MappingLevel, UserRole +from datetime import datetime +from typing import List, Optional, Dict +from pydantic import BaseModel, Field +from pydantic.functional_validators import field_validator -def is_known_mapping_level(value): - """Validates that supplied mapping level is known value""" - if value.upper() == "ALL": - return True - - try: - value = value.split(",") - for level in value: - MappingLevel[level.upper()] - except KeyError: - raise ValidationError( - f"Unknown mappingLevel: {value} Valid values are {MappingLevel.BEGINNER.name}, " - f"{MappingLevel.INTERMEDIATE.name}, {MappingLevel.ADVANCED.name}, ALL" - ) +from backend.models.dtos.interests_dto import InterestDTO +from backend.models.dtos.mapping_dto import TaskDTO +from backend.models.dtos.stats_dto import Pagination +from backend.models.postgis.statuses import MappingLevel, UserRole def is_known_role(value): @@ -37,60 +17,70 @@ def is_known_role(value): for role in value: UserRole[role.upper()] except KeyError: - raise ValidationError( + raise ValueError( f"Unknown mappingRole: {value} Valid values are {UserRole.ADMIN.name}, " f"{UserRole.READ_ONLY.name}, {UserRole.MAPPER.name}" ) -class UserDTO(Model): +class UserDTO(BaseModel): """DTO for User""" - id = LongType() - username = StringType() - role = StringType() - mapping_level = StringType( - serialized_name="mappingLevel", validators=[is_known_mapping_level] - ) - projects_mapped = IntType(serialized_name="projectsMapped") - email_address = EmailType(serialized_name="emailAddress") - - is_email_verified = EmailType( - serialized_name="isEmailVerified", serialize_when_none=False + id: Optional[int] = None + username: Optional[str] = None + role: Optional[str] = None + mapping_level: Optional[str] = Field(None, alias="mappingLevel") + projects_mapped: Optional[int] = Field(None, alias="projectsMapped") + email_address: Optional[str] = Field(None, alias="emailAddress") + is_email_verified: Optional[bool] = Field( + None, alias="isEmailVerified", serialize_when_none=False ) - is_expert = BooleanType(serialized_name="isExpert", serialize_when_none=False) - twitter_id = StringType(serialized_name="twitterId") - facebook_id = StringType(serialized_name="facebookId") - linkedin_id = StringType(serialized_name="linkedinId") - slack_id = StringType(serialized_name="slackId") - irc_id = StringType(serialized_name="ircId") - skype_id = StringType(serialized_name="skypeId") - city = StringType(serialized_name="city") - country = StringType(serialized_name="country") - name = StringType(serialized_name="name") - picture_url = StringType(serialized_name="pictureUrl") - default_editor = StringType(serialized_name="defaultEditor") - mentions_notifications = BooleanType(serialized_name="mentionsNotifications") - projects_comments_notifications = BooleanType( - serialized_name="questionsAndCommentsNotifications" + is_expert: bool = Field(None, alias="isExpert", serialize_when_none=False) + twitter_id: Optional[str] = Field(None, alias="twitterId") + facebook_id: Optional[str] = Field(None, alias="facebookId") + linkedin_id: Optional[str] = Field(None, alias="linkedinId") + slack_id: Optional[str] = Field(None, alias="slackId") + irc_id: Optional[str] = Field(None, alias="ircId") + skype_id: Optional[str] = Field(None, alias="skypeId") + city: Optional[str] = Field(None, alias="city") + country: Optional[str] = Field(None, alias="country") + name: Optional[str] = Field(None, alias="name") + picture_url: Optional[str] = Field(None, alias="pictureUrl") + default_editor: Optional[str] = Field(None, alias="defaultEditor") + mentions_notifications: bool = Field(None, alias="mentionsNotifications") + projects_comments_notifications: bool = Field( + None, alias="questionsAndCommentsNotifications" ) - projects_notifications = BooleanType(serialized_name="projectsNotifications") - tasks_notifications = BooleanType(serialized_name="tasksNotifications") - tasks_comments_notifications = BooleanType( - serialized_name="taskCommentsNotifications" - ) - teams_announcement_notifications = BooleanType( - serialized_name="teamsAnnouncementNotifications" + projects_notifications: bool = Field(None, alias="projectsNotifications") + tasks_notifications: bool = Field(None, alias="tasksNotifications") + tasks_comments_notifications: bool = Field(None, alias="taskCommentsNotifications") + teams_announcement_notifications: bool = Field( + None, alias="teamsAnnouncementNotifications" ) # these are read only - gender = StringType( - serialized_name="gender", + gender: Optional[str] = Field( + None, + alias="gender", choices=("MALE", "FEMALE", "SELF_DESCRIBE", "PREFER_NOT"), ) - self_description_gender = StringType( - serialized_name="selfDescriptionGender", default=None - ) + self_description_gender: Optional[str] = Field(None, alias="selfDescriptionGender") + + @field_validator("mapping_level", mode="before") + def is_known_mapping_level(value): + """Validates that supplied mapping level is known value""" + if value.upper() == "ALL": + return True + + try: + value = value.split(",") + for level in value: + MappingLevel[level.upper()] + except KeyError: + raise ValueError( + f"Unknown mappingLevel: {value} Valid values are {MappingLevel.BEGINNER.name}, " + f"{MappingLevel.INTERMEDIATE.name}, {MappingLevel.ADVANCED.name}, ALL" + ) def validate_self_description(self, data, value): if ( @@ -101,142 +91,154 @@ def validate_self_description(self, data, value): return value -class UserCountryContributed(Model): +class UserCountryContributed(BaseModel): """DTO for country a user has contributed""" - name = StringType(required=True) - mapped = IntType(required=True) - validated = IntType(required=True) - total = IntType(required=True) + name: str = Field(None) + mapped: int = Field(None, alias="mapped") + validated: int = Field(None, alias="validated") + total: int = Field(None) -class UserCountriesContributed(Model): +class UserCountriesContributed(BaseModel): """DTO for countries a user has contributed""" - def __init__(self): - super().__init__() - self.countries_contributed = [] + countries_contributed: List[UserCountryContributed] = Field([], alias="countries") + total: int = Field(None) - countries_contributed = ListType( - ModelType(UserCountryContributed), serialized_name="countries" - ) - total = IntType() + class Config: + populate_by_name = True -class UserContributionDTO(Model): - date = StringType() - count = IntType() +class UserContributionDTO(BaseModel): + date: datetime + count: int -class UserStatsDTO(Model): +class UserStatsDTO(BaseModel): """DTO containing statistics about the user""" - total_time_spent = IntType(serialized_name="totalTimeSpent") - time_spent_mapping = IntType(serialized_name="timeSpentMapping") - time_spent_validating = IntType(serialized_name="timeSpentValidating") - projects_mapped = IntType(serialized_name="projectsMapped") - countries_contributed = ModelType( - UserCountriesContributed, serialized_name="countriesContributed" + total_time_spent: int = Field(None, alias="totalTimeSpent") + time_spent_mapping: int = Field(None, alias="timeSpentMapping") + time_spent_validating: int = Field(None, alias="timeSpentValidating") + projects_mapped: int = Field(None, alias="projectsMapped") + countries_contributed: UserCountriesContributed = Field( + None, alias="countriesContributed" ) - contributions_by_day = ListType( - ModelType(UserContributionDTO), serialized_name="contributionsByDay" + contributions_by_day: List[UserContributionDTO] = Field( + [], alias="contributionsByDay" ) - tasks_mapped = IntType(serialized_name="tasksMapped") - tasks_validated = IntType(serialized_name="tasksValidated") - tasks_invalidated = IntType(serialized_name="tasksInvalidated") - tasks_invalidated_by_others = IntType(serialized_name="tasksInvalidatedByOthers") - tasks_validated_by_others = IntType(serialized_name="tasksValidatedByOthers") - contributions_interest = ListType( - ModelType(InterestDTO), serialized_name="ContributionsByInterest" + tasks_mapped: int = Field(None, alias="tasksMapped") + tasks_validated: int = Field(None, alias="tasksValidated") + tasks_invalidated: int = Field(None, alias="tasksInvalidated") + tasks_invalidated_by_others: int = Field(None, alias="tasksInvalidatedByOthers") + tasks_validated_by_others: int = Field(None, alias="tasksValidatedByOthers") + contributions_interest: List[InterestDTO] = Field( + [], alias="ContributionsByInterest" ) -class UserOSMDTO(Model): +class UserOSMDTO(BaseModel): """DTO containing OSM details for the user""" - account_created = StringType(required=True, serialized_name="accountCreated") - changeset_count = IntType(required=True, serialized_name="changesetCount") + account_created: Optional[str] = Field(None, alias="accountCreated") + changeset_count: Optional[int] = Field(None, alias="changesetCount") -class MappedProject(Model): +class MappedProject(BaseModel): """Describes a single project a user has mapped""" - project_id = IntType(serialized_name="projectId") - name = StringType() - tasks_mapped = IntType(serialized_name="tasksMapped") - tasks_validated = IntType(serialized_name="tasksValidated") - status = StringType() - centroid = BaseType() + project_id: Optional[int] = Field(None, alias="projectId") + name: Optional[str] = None + tasks_mapped: Optional[int] = Field(None, alias="tasksMapped") + tasks_validated: Optional[int] = Field(None, alias="tasksValidated") + status: Optional[str] = None + centroid: Optional[Dict] = None + class Config: + populate_by_name = True -class UserMappedProjectsDTO(Model): - """DTO for projects a user has mapped""" - def __init__(self): - super().__init__() - self.mapped_projects = [] +class UserMappedProjectsDTO(BaseModel): + """DTO for projects a user has mapped""" - mapped_projects = ListType( - ModelType(MappedProject), serialized_name="mappedProjects" + mapped_projects: Optional[List[MappedProject]] = Field( + default_factory=list, alias="mappedProjects" ) + class Config: + populate_by_name = True + -class UserSearchQuery(Model): +class UserSearchQuery(BaseModel): """Describes a user search query, that a user may submit to filter the list of users""" - username = StringType() - role = StringType(validators=[is_known_role]) - mapping_level = StringType( - serialized_name="mappingLevel", validators=[is_known_mapping_level] - ) - page = IntType() - pagination = BooleanType(default=True) - per_page = IntType(default=20, serialized_name="perPage") + username: Optional[str] = None + role: Optional[str] = Field(None) + mapping_level: Optional[str] = Field(None, alias="mappingLevel") + page: Optional[int] = None + pagination: bool = True + per_page: Optional[int] = Field(default=20, alias="perPage") + + @field_validator("username", mode="before") + def validate_username(cls, v): + if v is None: + return None + return v.strip() + + @field_validator("role", mode="before") + def validate_role(cls, v): + if v is None: + return None + return v.strip() def __hash__(self): """Make object hashable so we can cache user searches""" return hash((self.username, self.role, self.mapping_level, self.page)) -class ListedUser(Model): +class ListedUser(BaseModel): """Describes a user within the User List""" - id = LongType() - username = StringType() - role = StringType() - mapping_level = StringType(serialized_name="mappingLevel") - picture_url = StringType(serialized_name="pictureUrl") + id: Optional[float] = None + username: Optional[str] = None + role: Optional[str] = None + mapping_level: Optional[str] = Field(None, alias="mappingLevel") + picture_url: Optional[str] = Field(None, alias="pictureUrl") -class UserRegisterEmailDTO(Model): +class UserRegisterEmailDTO(BaseModel): """DTO containing data for user registration with email model""" - id = IntType(serialize_when_none=False) - email = StringType(required=True) - success = BooleanType(default=False) - details = StringType() + id: int = Field(None, serialize_when_none=False) + email: str + success: bool = False + details: str = None -class ProjectParticipantUser(Model): +class ProjectParticipantUser(BaseModel): """Describes a user who has participated in a project""" - username = StringType() - project_id = LongType(serialized_name="projectId") - is_participant = BooleanType(serialized_name="isParticipant") + username: str + project_id: float = Field(alias="projectId") + is_participant: bool = Field(alias="isParticipant") + + class Config: + populate_by_name = True -class UserSearchDTO(Model): +class UserSearchDTO(BaseModel): """Paginated list of TM users""" def __init__(self): super().__init__() self.users = [] - pagination = ModelType(Pagination) - users = ListType(ModelType(ListedUser)) + pagination: Optional[Pagination] = None + users: Optional[List[ListedUser]] = None -class UserFilterDTO(Model): +class UserFilterDTO(BaseModel): """DTO to hold all Tasking Manager users""" def __init__(self): @@ -244,12 +246,12 @@ def __init__(self): self.usernames = [] self.users = [] - pagination = ModelType(Pagination) - usernames = ListType(StringType) - users = ListType(ModelType(ProjectParticipantUser)) + pagination: Optional[Pagination] = None + usernames: Optional[List[str]] = None + users: Optional[List[ProjectParticipantUser]] = None -class UserTaskDTOs(Model): +class UserTaskDTOs(BaseModel): """Describes an array of Task DTOs""" def __init__(self): @@ -257,5 +259,11 @@ def __init__(self): super().__init__() self.user_tasks = [] - user_tasks = ListType(ModelType(TaskDTO), serialized_name="tasks") - pagination = ModelType(Pagination) + user_tasks: List[TaskDTO] = Field([], alias="tasks") + pagination: Pagination = Field(None, alias="pagination") + + +class AuthUserDTO(BaseModel): + """A minimal user model with only id.""" + + id: int diff --git a/backend/models/dtos/validator_dto.py b/backend/models/dtos/validator_dto.py index f3a5d72bfc..97f1235edc 100644 --- a/backend/models/dtos/validator_dto.py +++ b/backend/models/dtos/validator_dto.py @@ -1,12 +1,11 @@ -from schematics import Model -from schematics.exceptions import ValidationError -from schematics.types import StringType, IntType, BooleanType, UTCDateTimeType -from schematics.types.compound import ListType, ModelType from backend.models.postgis.statuses import TaskStatus from backend.models.dtos.stats_dto import Pagination +from pydantic import BaseModel, Field +from typing import List, Optional +from datetime import datetime -class ExtendedStringType(StringType): +class ExtendedStringType(str): converters = [] def __init__(self, **kwargs): @@ -66,104 +65,123 @@ def is_valid_revert_status(value): raise ValidationError(f"Invalid status. Valid values are {valid_values}") -class LockForValidationDTO(Model): +class LockForValidationDTO(BaseModel): """DTO used to lock multiple tasks for validation""" - project_id = IntType(required=True) - task_ids = ListType(IntType, required=True, serialized_name="taskIds") - user_id = IntType(required=True) - preferred_locale = StringType(default="en") + project_id: int + task_ids: List[int] = Field(None, alias="taskIds") + user_id: int + preferred_locale: str = "en" + class Config: + populate_by_name = True -class ValidationMappingIssue(Model): + +class ValidationMappingIssue(BaseModel): """Describes one or more occurrences of an identified mapping problem during validation""" - mapping_issue_category_id = IntType( - required=True, serialized_name="mappingIssueCategoryId" - ) - issue = StringType(required=True) - count = IntType(required=True) + mapping_issue_category_id: int = Field(None, alias="mappingIssueCategoryId") + issue: str + count: int + class Config: + populate_by_name = True -class ValidatedTask(Model): + +class ValidatedTask(BaseModel): """Describes the model used to update the status of one task after validation""" - task_id = IntType(required=True, serialized_name="taskId") - status = StringType(required=True, validators=[is_valid_validated_status]) - comment = StringType() - issues = ListType( - ModelType(ValidationMappingIssue), serialized_name="validationIssues" + task_id: int = Field(None, alias="taskId") + status: str = Field(None, validators=[is_valid_validated_status]) + comment: Optional[str] = None + issues: Optional[List[ValidationMappingIssue]] = Field( + None, alias="validationIssues" ) + class Config: + populate_by_name = True + -class ResetValidatingTask(Model): - """Describes the model used to stop validating and reset the status of one task""" +class ResetValidatingTask(BaseModel): + """Model used to stop validating and reset the status of one task""" - task_id = IntType(required=True, serialized_name="taskId") - comment = StringType() - issues = ListType( - ModelType(ValidationMappingIssue), serialized_name="validationIssues" + task_id: int = Field(alias="taskId") + comment: Optional[str] = None + issues: Optional[List[ValidationMappingIssue]] = Field( + None, alias="validationIssues" ) + class Config: + populate_by_name = True -class UnlockAfterValidationDTO(Model): + +class UnlockAfterValidationDTO(BaseModel): """DTO used to transmit the status of multiple tasks after validation""" - project_id = IntType(required=True) - validated_tasks = ListType( - ModelType(ValidatedTask), required=True, serialized_name="validatedTasks" - ) - user_id = IntType(required=True) - preferred_locale = StringType(default="en") + project_id: int + validated_tasks: List[ValidatedTask] = Field(None, alias="validatedTasks") + user_id: int + preferred_locale: str = Field(default="en") + class Config: + populate_by_name = True -class StopValidationDTO(Model): + +class StopValidationDTO(BaseModel): """DTO used to transmit the the request to stop validating multiple tasks""" - project_id = IntType(required=True) - reset_tasks = ListType( - ModelType(ResetValidatingTask), required=True, serialized_name="resetTasks" - ) - user_id = IntType(required=True) - preferred_locale = StringType(default="en") + project_id: int + reset_tasks: List[ResetValidatingTask] = Field(None, alias="resetTasks") + user_id: int + preferred_locale: str = Field(default="en") + class Config: + populate_by_name = True -class MappedTasksByUser(Model): + +class MappedTasksByUser(BaseModel): """Describes number of tasks user has mapped on a project""" - username = StringType(required=True) - mapped_task_count = IntType(required=True, serialized_name="mappedTaskCount") - tasks_mapped = ListType(IntType, required=True, serialized_name="tasksMapped") - last_seen = UTCDateTimeType(required=True, serialized_name="lastSeen") - mapping_level = StringType(required=True, serialized_name="mappingLevel") - date_registered = UTCDateTimeType(serialized_name="dateRegistered") - last_validation_date = UTCDateTimeType(serialized_name="lastValidationDate") + username: str = Field(None) + mapped_task_count: int = Field(None, alias="mappedTaskCount") + tasks_mapped: List[int] = Field(None, alias="tasksMapped") + last_seen: datetime = Field(None, alias="lastSeen") + mapping_level: str = Field(None, alias="mappingLevel") + date_registered: datetime = Field(alias="dateRegistered") + last_validation_date: datetime = Field(alias="lastValidationDate") + + class Config: + populate_by_name = True -class InvalidatedTask(Model): +class InvalidatedTask(BaseModel): """Describes invalidated tasks with which user is involved""" - task_id = IntType(required=True, serialized_name="taskId") - project_id = IntType(required=True, serialized_name="projectId") - project_name = StringType(serialized_name="projectName") - history_id = IntType(serialized_name="historyId") - closed = BooleanType() - updated_date = UTCDateTimeType(serialized_name="updatedDate") + task_id: int = Field(None, alias="taskId") + project_id: int = Field(None, alias="projectId") + project_name: str = Field(alias="projectName") + history_id: int = Field(alias="historyId") + closed: bool + updated_date: datetime = Field(alias="updatedDate") + class Config: + populate_by_name = True -class InvalidatedTasks(Model): + +class InvalidatedTasks(BaseModel): def __init__(self): """DTO constructor initialise all arrays to empty""" super().__init__() self.invalidated_tasks = [] - invalidated_tasks = ListType( - ModelType(InvalidatedTask), serialized_name="invalidatedTasks" - ) - pagination = ModelType(Pagination) + invalidated_tasks: List[InvalidatedTask] = Field(alias="invalidatedTasks") + pagination: Pagination + class Config: + populate_by_name = True -class MappedTasks(Model): + +class MappedTasks(BaseModel): """Describes all tasks currently mapped on a project""" def __init__(self): @@ -171,16 +189,24 @@ def __init__(self): super().__init__() self.mapped_tasks = [] - mapped_tasks = ListType(ModelType(MappedTasksByUser), serialized_name="mappedTasks") + mapped_tasks: List[MappedTasksByUser] = Field(alias="mappedTasks") + + class Config: + populate_by_name = True -class RevertUserTasksDTO(Model): +class RevertUserTasksDTO(BaseModel): """DTO used to revert all tasks to a given status""" - preferred_locale = StringType(default="en") - project_id = IntType(required=True) - user_id = IntType(required=True) - action_by = IntType(required=True) - action = ExtendedStringType( - required=True, validators=[is_valid_revert_status], converters=[str.upper] - ) + preferred_locale: str = "en" + project_id: int + user_id: int + action_by: int + action: str + # TODO: Incorporate this validator. + # action: ExtendedStringType = Field( + # validators=[is_valid_revert_status], converters=[str.upper] + # ) + + class Config: + populate_by_name = True diff --git a/backend/models/postgis/application.py b/backend/models/postgis/application.py index 75fb8c913d..6922354bd1 100644 --- a/backend/models/postgis/application.py +++ b/backend/models/postgis/application.py @@ -1,20 +1,30 @@ -from backend import db +from databases import Database +from sqlalchemy import ( + BigInteger, + Column, + DateTime, + ForeignKey, + String, + delete, + insert, + select, +) + +from backend.db import Base from backend.models.dtos.application_dto import ApplicationDTO, ApplicationsDTO from backend.models.postgis.utils import timestamp from backend.services.users.authentication_service import AuthenticationService -class Application(db.Model): +class Application(Base): """Describes an application that is authorized to access the TM""" __tablename__ = "application_keys" - id = db.Column(db.BigInteger, primary_key=True) - user = db.Column( - db.BigInteger, db.ForeignKey("users.id", name="fk_users"), nullable=False - ) - app_key = db.Column(db.String, nullable=False) - created = db.Column(db.DateTime, default=timestamp) + id = Column(BigInteger, primary_key=True) + user = Column(BigInteger, ForeignKey("users.id", name="fk_users"), nullable=False) + app_key = Column(String, nullable=False) + created = Column(DateTime, default=timestamp) def generate_application_key(self, user_id): """ @@ -23,33 +33,31 @@ def generate_application_key(self, user_id): token = AuthenticationService.generate_session_token_for_user(user_id) return token - def create(self, user_id): + async def create(self, user_id, db: Database): application = Application() application.app_key = self.generate_application_key(user_id) application.user = user_id - db.session.add(application) - db.session.commit() - + query = insert(Application.__table__).values( + app_key=application.app_key, user=application.user + ) + await db.execute(query) return application - def save(self): - db.session.commit() - - def delete(self): - db.session.delete(self) - db.session.commit() + async def delete(self, db: Database): + query = delete(Application).where(Application.id == self.id) + await db.execute(query) @staticmethod - def get_token(appkey: str): - return ( - db.session.query(Application) - .filter(Application.app_key == appkey) - .one_or_none() - ) + async def get_token(appkey: str, db: Database): + query = select(Application).where(Application.app_key == appkey) + result = await db.fetch_one(query) + return result @staticmethod - def get_all_for_user(user: int): - query = db.session.query(Application).filter(Application.user == user) + async def get_all_for_user(user: int, db: Database): + # query = session.query(Application).filter(Application.user == user) + query = select(Application).where(Application.user == user) + query = await db.fetch_all(query=query) applications_dto = ApplicationsDTO() for r in query: application_dto = ApplicationDTO() diff --git a/backend/models/postgis/banner.py b/backend/models/postgis/banner.py index 297deb4ec1..16190eb0c8 100644 --- a/backend/models/postgis/banner.py +++ b/backend/models/postgis/banner.py @@ -1,34 +1,40 @@ import bleach +from databases import Database from markdown import markdown +from sqlalchemy import Boolean, Column, Integer, String, insert, update -from backend import db +from backend.db import Base from backend.models.dtos.banner_dto import BannerDTO -class Banner(db.Model): +class Banner(Base): """Model for Banners""" __tablename__ = "banner" # Columns - id = db.Column(db.Integer, primary_key=True) - message = db.Column(db.String(255), nullable=False) - visible = db.Column(db.Boolean, default=False, nullable=False) + id = Column(Integer, primary_key=True) + message = Column(String(255), nullable=False) + visible = Column(Boolean, default=False, nullable=False) - def create(self): + async def create(self, db: Database): """Creates and saves the current model to the DB""" - db.session.add(self) - db.session.commit() - - def update(self): - """Updates the current model in the DB""" - db.session.commit() + query = insert(Banner.__table__).values( + message=self.message, visible=self.visible + ) + await db.execute(query) - def update_from_dto(self, dto: BannerDTO): + async def update_from_dto(self, db: Database, dto: BannerDTO): """Updates the current model in the DB""" self.message = dto.message self.visible = dto.visible - db.session.commit() + query = ( + update(Banner.__table__) + .where(Banner.id == self.id) + .values(message=self.message, visible=self.visible) + ) + await db.execute(query) + return self def as_dto(self): """Returns a dto for the banner""" @@ -38,14 +44,15 @@ def as_dto(self): return banner_dto @staticmethod - def get(): + async def get(db: Database): """Returns a banner and creates one if it doesn't exist""" - banner = Banner.query.first() + query = """SELECT * FROM banner LIMIT 1""" + banner = await db.fetch_one(query=query) if banner is None: banner = Banner() banner.message = "Welcome to the API" banner.visible = True - banner.create() + await banner.create(db) return banner @staticmethod diff --git a/backend/models/postgis/campaign.py b/backend/models/postgis/campaign.py index 9e734d7e88..6d6c706fd8 100644 --- a/backend/models/postgis/campaign.py +++ b/backend/models/postgis/campaign.py @@ -1,56 +1,37 @@ -from backend import db +from sqlalchemy import Column, ForeignKey, Integer, String, Table, UniqueConstraint + +from backend.db import Base from backend.models.dtos.campaign_dto import CampaignDTO, CampaignListDTO -campaign_projects = db.Table( +campaign_projects = Table( "campaign_projects", - db.metadata, - db.Column("campaign_id", db.Integer, db.ForeignKey("campaigns.id")), - db.Column("project_id", db.Integer, db.ForeignKey("projects.id")), + Base.metadata, + Column("campaign_id", Integer, ForeignKey("campaigns.id")), + Column("project_id", Integer, ForeignKey("projects.id")), ) -campaign_organisations = db.Table( +campaign_organisations = Table( "campaign_organisations", - db.metadata, - db.Column("campaign_id", db.Integer, db.ForeignKey("campaigns.id")), - db.Column("organisation_id", db.Integer, db.ForeignKey("organisations.id")), - db.UniqueConstraint( + Base.metadata, + Column("campaign_id", Integer, ForeignKey("campaigns.id")), + Column("organisation_id", Integer, ForeignKey("organisations.id")), + UniqueConstraint( "campaign_id", "organisation_id", name="campaign_organisation_key" ), ) -class Campaign(db.Model): +class Campaign(Base): """Describes an Campaign""" __tablename__ = "campaigns" - id = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String, nullable=False, unique=True) - logo = db.Column(db.String) - url = db.Column(db.String) - description = db.Column(db.String) - - def create(self): - """Creates and saves the current model to the DB""" - db.session.add(self) - db.session.commit() - - def delete(self): - """Deletes the current model from the DB""" - db.session.delete(self) - db.session.commit() - - def save(self): - db.session.commit() - - def update(self, dto: CampaignDTO): - """Update the user details""" - self.name = dto.name if dto.name else self.name - self.logo = dto.logo if dto.logo else self.logo - self.url = dto.url if dto.url else self.url - self.description = dto.description if dto.description else self.description - db.session.commit() + id = Column(Integer, primary_key=True) + name = Column(String, nullable=False, unique=True) + logo = Column(String) + url = Column(String) + description = Column(String) @classmethod def from_dto(cls, dto: CampaignDTO): @@ -79,10 +60,6 @@ def campaign_list_as_dto(campaigns: list) -> CampaignListDTO: """Converts a collection of campaigns into DTO""" campaign_list_dto = CampaignListDTO() for campaign in campaigns: - campaign_dto = CampaignDTO() - campaign_dto.id = campaign.id - campaign_dto.name = campaign.name - + campaign_dto = CampaignDTO(**campaign) campaign_list_dto.campaigns.append(campaign_dto) - return campaign_list_dto diff --git a/backend/models/postgis/custom_editors.py b/backend/models/postgis/custom_editors.py index 5c74cfb4dd..6fb1031f47 100644 --- a/backend/models/postgis/custom_editors.py +++ b/backend/models/postgis/custom_editors.py @@ -1,49 +1,45 @@ -from backend import db +from databases import Database +from sqlalchemy import Column, ForeignKey, Integer, String, delete, update + +from backend.db import Base from backend.models.dtos.project_dto import CustomEditorDTO -class CustomEditor(db.Model): +class CustomEditor(Base): """Model for user defined editors for a project""" __tablename__ = "project_custom_editors" - project_id = db.Column(db.Integer, db.ForeignKey("projects.id"), primary_key=True) - name = db.Column(db.String(50), nullable=False) - description = db.Column(db.String) - url = db.Column(db.String, nullable=False) - - def create(self): - """Creates and saves the current model to the DB""" - db.session.add(self) - db.session.commit() - - def save(self): - """Save changes to db""" - db.session.commit() - - @staticmethod - def get_by_project_id(project_id: int): - """Get custom editor by it's project id""" - return db.session.get(CustomEditor, project_id) + project_id = Column(Integer, ForeignKey("projects.id"), primary_key=True) + name = Column(String(50), nullable=False) + description = Column(String) + url = Column(String, nullable=False) @classmethod - def create_from_dto(cls, project_id: int, dto: CustomEditorDTO): + async def create_from_dto(cls, project_id: int, dto: CustomEditorDTO, db: Database): """Creates a new CustomEditor from dto, used in project edit""" new_editor = cls() new_editor.project_id = project_id - new_editor.update_editor(dto) + new_editor = await new_editor.update_editor(dto, db) return new_editor - def update_editor(self, dto: CustomEditorDTO): + async def update_editor(self, dto: CustomEditorDTO, db: Database): """Upates existing CustomEditor form DTO""" self.name = dto.name self.description = dto.description self.url = dto.url - self.save() - def delete(self): + query = ( + update(CustomEditor.__table__) + .where(CustomEditor.id == self.id) + .values(name=self.name, description=self.description, url=self.url) + ) + await db.execute(query) + + async def delete(self, db: Database): """Deletes the current model from the DB""" - db.session.delete(self) - db.session.commit() + await db.execute( + delete(CustomEditor.__table__).where(CustomEditor.id == self.id) + ) def as_dto(self) -> CustomEditorDTO: """Returns the CustomEditor as a DTO""" diff --git a/backend/models/postgis/interests.py b/backend/models/postgis/interests.py index d9c6e0213c..3a7a31e7b4 100644 --- a/backend/models/postgis/interests.py +++ b/backend/models/postgis/interests.py @@ -1,68 +1,45 @@ -from backend import db -from backend.exceptions import NotFound -from backend.models.dtos.interests_dto import InterestDTO, InterestsListDTO +from databases import Database +from sqlalchemy import BigInteger, Column, ForeignKey, Integer, String, Table, select + +from backend.db import Base +from backend.models.dtos.interests_dto import InterestDTO + # Secondary table defining many-to-many join for interests of a user. -user_interests = db.Table( +user_interests = Table( "user_interests", - db.metadata, - db.Column("interest_id", db.Integer, db.ForeignKey("interests.id")), - db.Column("user_id", db.BigInteger, db.ForeignKey("users.id")), + Base.metadata, + Column("interest_id", Integer, ForeignKey("interests.id")), + Column("user_id", BigInteger, ForeignKey("users.id")), ) # Secondary table defining many-to-many join for interests of a project. -project_interests = db.Table( +project_interests = Table( "project_interests", - db.metadata, - db.Column("interest_id", db.Integer, db.ForeignKey("interests.id")), - db.Column("project_id", db.BigInteger, db.ForeignKey("projects.id")), + Base.metadata, + Column("interest_id", Integer, ForeignKey("interests.id")), + Column("project_id", BigInteger, ForeignKey("projects.id")), ) -class Interest(db.Model): +class Interest(Base): """Describes an interest for projects and users""" __tablename__ = "interests" - id = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String, unique=True) + id = Column(Integer, primary_key=True) + name = Column(String, unique=True) @staticmethod - def get_by_id(interest_id: int): + async def get_by_id(interest_id: int, db: Database): """Get interest by id""" - interest = db.session.get(Interest, interest_id) - if interest is None: - raise NotFound(sub_code="INTEREST_NOT_FOUND", interest_id=interest_id) - - return interest - - @staticmethod - def get_by_name(name: str): - """Get interest by name""" - interest = Interest.query.filter(Interest.name == name).first() - if interest is None: - raise NotFound(sub_code="INTEREST_NOT_FOUND", interest_name=name) - - return interest + query = select(Interest).where(Interest.id == interest_id) + result = await db.fetch_one(query) - def update(self, dto): - """Update existing interest""" - self.name = dto.name - db.session.commit() - - def create(self): - """Creates and saves the current model to the DB""" - db.session.add(self) - db.session.commit() - - def save(self): - """Save changes to db""" - db.session.commit() - - def delete(self): - """Deletes the current model from the DB""" - db.session.delete(self) - db.session.commit() + if result: + # If Interest is a Pydantic model or class, you can instantiate it + return Interest(**result) + return None def as_dto(self) -> InterestDTO: """Get the interest from the DB""" @@ -71,12 +48,3 @@ def as_dto(self) -> InterestDTO: dto.name = self.name return dto - - @staticmethod - def get_all_interests(): - """Get all interests""" - query = Interest.query.all() - interest_list_dto = InterestsListDTO() - interest_list_dto.interests = [interest.as_dto() for interest in query] - - return interest_list_dto diff --git a/backend/models/postgis/licenses.py b/backend/models/postgis/licenses.py index 091e2767cb..11cbfbdee2 100644 --- a/backend/models/postgis/licenses.py +++ b/backend/models/postgis/licenses.py @@ -1,81 +1,67 @@ -from backend import db +from databases import Database +from sqlalchemy import BigInteger, Column, ForeignKey, Integer, String, Table +from sqlalchemy.orm import relationship + +from backend.db import Base from backend.exceptions import NotFound -from backend.models.dtos.licenses_dto import LicenseDTO, LicenseListDTO +from backend.models.dtos.licenses_dto import LicenseDTO + # Secondary table defining the many-to-many join -user_licenses_table = db.Table( +user_licenses_table = Table( "user_licenses", - db.metadata, - db.Column("user", db.BigInteger, db.ForeignKey("users.id")), - db.Column("license", db.Integer, db.ForeignKey("licenses.id")), + Base.metadata, + Column("user", BigInteger, ForeignKey("users.id")), + Column("license", Integer, ForeignKey("licenses.id")), ) -class License(db.Model): +class License(Base): """Describes an individual license""" __tablename__ = "licenses" - id = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String, unique=True) - description = db.Column(db.String) - plain_text = db.Column(db.String) + id = Column(Integer, primary_key=True) + name = Column(String, unique=True) + description = Column(String) + plain_text = Column(String) - projects = db.relationship("Project", backref="license") - users = db.relationship( + projects = relationship("Project", backref="license") + users = relationship( "License", secondary=user_licenses_table ) # Many to Many relationship @staticmethod - def get_by_id(license_id: int): + async def get_by_id(license_id: int, db: Database): """Get license by id""" - map_license = db.session.get(License, license_id) + query = """ + SELECT id AS "licenseId", name, description, plain_text AS "plainText" + FROM licenses + WHERE id = :license_id + """ + map_license = await db.fetch_one(query, {"license_id": license_id}) if map_license is None: raise NotFound(sub_code="LICENSE_NOT_FOUND", license_id=license_id) return map_license - @classmethod - def create_from_dto(cls, dto: LicenseDTO) -> int: + async def create_from_dto(license_dto: LicenseDTO, db: Database) -> int: """Creates a new License class from dto""" - new_license = cls() - new_license.name = dto.name - new_license.description = dto.description - new_license.plain_text = dto.plain_text - - db.session.add(new_license) - db.session.commit() - - return new_license.id - - def update_license(self, dto: LicenseDTO): - """Update existing license""" - self.name = dto.name - self.description = dto.description - self.plain_text = dto.plain_text - db.session.commit() - - def delete(self): - """Deletes the current model from the DB""" - db.session.delete(self) - db.session.commit() - - @staticmethod - def get_all() -> LicenseListDTO: - """Gets all licenses currently stored""" - results = License.query.all() - - dto = LicenseListDTO() - for result in results: - imagery_license = LicenseDTO() - imagery_license.license_id = result.id - imagery_license.name = result.name - imagery_license.description = result.description - imagery_license.plain_text = result.plain_text - dto.licenses.append(imagery_license) - - return dto + query = """ + INSERT INTO licenses (name, description, plain_text) + VALUES (:name, :description, :plain_text) + RETURNING id + """ + values = { + "name": license_dto.name, + "description": license_dto.description, + "plain_text": license_dto.plain_text, + } + + async with db.transaction(): + new_license_id = await db.execute(query, values) + return new_license_id def as_dto(self) -> LicenseDTO: """Get the license from the DB""" diff --git a/backend/models/postgis/mapping_issues.py b/backend/models/postgis/mapping_issues.py index ef7bc8a3d5..424fcbb5e6 100644 --- a/backend/models/postgis/mapping_issues.py +++ b/backend/models/postgis/mapping_issues.py @@ -1,58 +1,80 @@ -from backend import db +from databases import Database +from sqlalchemy import Boolean, Column, Integer, String, delete, insert, select, update + +from backend.db import Base from backend.models.dtos.mapping_issues_dto import ( - MappingIssueCategoryDTO, MappingIssueCategoriesDTO, + MappingIssueCategoryDTO, ) -class MappingIssueCategory(db.Model): +class MappingIssueCategory(Base): """Represents a category of task mapping issues identified during validaton""" __tablename__ = "mapping_issue_categories" - id = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String, nullable=False, unique=True) - description = db.Column(db.String, nullable=True) - archived = db.Column(db.Boolean, default=False, nullable=False) + id = Column(Integer, primary_key=True) + name = Column(String, nullable=False, unique=True) + description = Column(String, nullable=True) + archived = Column(Boolean, default=False, nullable=False) def __init__(self, name): self.name = name @staticmethod - def get_by_id(category_id: int): + async def get_by_id(category_id: int, db: Database): """Get category by id""" - return db.session.get(MappingIssueCategory, category_id) + query = select(MappingIssueCategory).where( + MappingIssueCategory.id == category_id + ) + return await db.fetch_one(query) @classmethod - def create_from_dto(cls, dto: MappingIssueCategoryDTO) -> int: + async def create_from_dto(cls, dto: MappingIssueCategoryDTO, db: Database) -> int: """Creates a new MappingIssueCategory class from dto""" new_category = cls(dto.name) new_category.description = dto.description - db.session.add(new_category) - db.session.commit() + query = insert(MappingIssueCategory.__table__).values( + name=new_category.name, + description=new_category.description, + archived=dto.archived, + ) + result = await db.execute(query) + return result - return new_category.id - - def update_category(self, dto: MappingIssueCategoryDTO): + async def update_category(self, dto: MappingIssueCategoryDTO, db: Database): """Update existing category""" self.name = dto.name self.description = dto.description if dto.archived is not None: self.archived = dto.archived - db.session.commit() - - def delete(self): + query = ( + update(MappingIssueCategory.__table__) + .where( + MappingIssueCategory.id == self.id, + ) + .values( + name=self.name, description=self.description, archived=self.archived + ) + ) + await db.execute(query) + + async def delete(self, db: Database): """Deletes the current model from the DB""" - db.session.delete(self) - db.session.commit() + query = delete(MappingIssueCategory.__table__).where( + MappingIssueCategory.id == self.id + ) + await db.execute(query) @staticmethod - def get_all_categories(include_archived): - category_query = MappingIssueCategory.query.order_by(MappingIssueCategory.name) + async def get_all_categories(include_archived, db): + query = select(MappingIssueCategory).order_by(MappingIssueCategory.name) + + # Apply condition if archived records are to be excluded if not include_archived: - category_query = category_query.filter_by(archived=False) + query = query.where(MappingIssueCategory.archived == False) - results = category_query.all() + results = await db.fetch_all(query) dto = MappingIssueCategoriesDTO() for result in results: diff --git a/backend/models/postgis/message.py b/backend/models/postgis/message.py index 32516d5a57..95447c8ecc 100644 --- a/backend/models/postgis/message.py +++ b/backend/models/postgis/message.py @@ -1,14 +1,26 @@ -from sqlalchemy.sql.expression import false - -from backend import db -from flask import current_app from enum import Enum +from databases import Database +from loguru import logger +from sqlalchemy import ( + BigInteger, + Boolean, + Column, + DateTime, + ForeignKey, + ForeignKeyConstraint, + Integer, + String, +) +from sqlalchemy.orm import relationship +from sqlalchemy.sql.expression import false + +from backend.db import Base from backend.exceptions import NotFound from backend.models.dtos.message_dto import MessageDTO, MessagesDTO -from backend.models.postgis.user import User -from backend.models.postgis.task import Task, TaskHistory, TaskAction from backend.models.postgis.project import Project +from backend.models.postgis.task import Task, TaskAction +from backend.models.postgis.user import User from backend.models.postgis.utils import timestamp @@ -30,33 +42,33 @@ class MessageType(Enum): TEAM_BROADCAST = 11 # Broadcast message from a team manager -class Message(db.Model): +class Message(Base): """Describes an individual Message a user can send""" __tablename__ = "messages" __table_args__ = ( - db.ForeignKeyConstraint( + ForeignKeyConstraint( ["task_id", "project_id"], ["tasks.id", "tasks.project_id"] ), ) - id = db.Column(db.Integer, primary_key=True) - message = db.Column(db.String) - subject = db.Column(db.String) - from_user_id = db.Column(db.BigInteger, db.ForeignKey("users.id")) - to_user_id = db.Column(db.BigInteger, db.ForeignKey("users.id"), index=True) - project_id = db.Column(db.Integer, db.ForeignKey("projects.id"), index=True) - task_id = db.Column(db.Integer, index=True) - message_type = db.Column(db.Integer, index=True) - date = db.Column(db.DateTime, default=timestamp) - read = db.Column(db.Boolean, default=False) + id = Column(Integer, primary_key=True) + message = Column(String) + subject = Column(String) + from_user_id = Column(BigInteger, ForeignKey("users.id")) + to_user_id = Column(BigInteger, ForeignKey("users.id"), index=True) + project_id = Column(Integer, ForeignKey("projects.id"), index=True) + task_id = Column(Integer, index=True) + message_type = Column(Integer, index=True) + date = Column(DateTime, default=timestamp) + read = Column(Boolean, default=False) # Relationships - from_user = db.relationship(User, foreign_keys=[from_user_id]) - to_user = db.relationship(User, foreign_keys=[to_user_id], backref="messages") - project = db.relationship(Project, foreign_keys=[project_id], backref="messages") - task = db.relationship( + from_user = relationship(User, foreign_keys=[from_user_id]) + to_user = relationship(User, foreign_keys=[to_user_id], backref="messages") + project = relationship(Project, foreign_keys=[project_id], backref="messages") + task = relationship( Task, primaryjoin="and_(Task.id == foreign(Message.task_id), Task.project_id == Message.project_id)", backref="messages", @@ -72,6 +84,8 @@ def from_dto(cls, to_user_id: int, dto: MessageDTO): message.to_user_id = to_user_id message.project_id = dto.project_id message.task_id = dto.task_id + message.date = timestamp() + message.read = False if dto.message_type is not None: message.message_type = MessageType(dto.message_type) @@ -98,52 +112,72 @@ def as_dto(self) -> MessageDTO: return dto - def add_message(self): + async def add_message(self, db: Database): """Add message into current transaction - DO NOT COMMIT HERE AS MESSAGES ARE PART OF LARGER TRANSACTIONS""" - current_app.logger.debug("Adding message to session") - db.session.add(self) + logger.debug("Adding message to session") + session.add(self) - def save(self): + async def save(self, db: Database): """Save""" - db.session.add(self) - db.session.commit() + await db.execute( + Message.__table__.insert().values( + subject=self.subject, + message=self.message, + from_user_id=self.from_user_id, + to_user_id=self.to_user_id, + project_id=self.project_id, + task_id=self.task_id, + message_type=self.message_type, + read=self.read, + date=self.date, + ) + ) @staticmethod - def get_all_contributors(project_id: int): - """Get all contributors to a project""" - - contributors = ( - db.session.query(Task.mapped_by) - .filter(Task.project_id == project_id) - .filter(Task.mapped_by.isnot(None)) - .union( - db.session.query(Task.validated_by) - .filter(Task.project_id == project_id) - .filter(Task.validated_by.isnot(None)) - ) - .distinct() - ).all() + async def get_all_contributors(project_id: int, db: Database): + """Get all contributors to a project using async raw SQL""" + + query = """ + SELECT DISTINCT contributor + FROM ( + SELECT mapped_by AS contributor + FROM tasks + WHERE project_id = :project_id + AND mapped_by IS NOT NULL + UNION + SELECT validated_by AS contributor + FROM tasks + WHERE project_id = :project_id + AND validated_by IS NOT NULL + ) AS contributors + """ + + rows = await db.fetch_all(query=query, values={"project_id": project_id}) + + contributors = [row["contributor"] for row in rows] return contributors @staticmethod - def get_all_tasks_contributors(project_id: int, task_id: int): + async def get_all_tasks_contributors(project_id: int, task_id: int, db: Database): """Get all contributors of a task""" - contributors = ( - TaskHistory.query.distinct(TaskHistory.user_id) - .filter(TaskHistory.project_id == project_id) - .filter(TaskHistory.task_id == task_id) - .filter(TaskHistory.action != TaskAction.COMMENT.name) - .all() + query = """ + SELECT DISTINCT u.username + FROM task_history th + JOIN users u ON th.user_id = u.id + WHERE th.project_id = :project_id + AND th.task_id = :task_id + AND th.action != :comment_action + """ + contributors = await db.fetch_all( + query, + { + "project_id": project_id, + "task_id": task_id, + "comment_action": TaskAction.COMMENT.name, + }, ) - contributors = [ - contributor.actioned_by.username for contributor in contributors - ] - return contributors - def mark_as_read(self): - """Mark the message in scope as Read""" - self.read = True - db.session.commit() + return [contributor["username"] for contributor in contributors] @staticmethod def get_unread_message_count(user_id: int): @@ -170,56 +204,75 @@ def get_all_messages(user_id: int) -> MessagesDTO: return messages_dto @staticmethod - def delete_multiple_messages(message_ids: list, user_id: int): + async def delete_multiple_messages(message_ids: list, user_id: int, db: Database): """Deletes the specified messages to the user""" - Message.query.filter( - Message.to_user_id == user_id, Message.id.in_(message_ids) - ).delete(synchronize_session=False) - db.session.commit() + delete_query = """ + DELETE FROM messages + WHERE to_user_id = :user_id AND id = ANY(:message_ids) + """ + await db.execute(delete_query, {"user_id": user_id, "message_ids": message_ids}) @staticmethod - def delete_all_messages(user_id: int, message_type_filters: list = None): + async def delete_all_messages( + user_id: int, db: Database, message_type_filters: list = None + ): """Deletes all messages to the user ----------------------------------- :param user_id: user id of the user whose messages are to be deleted :param message_type_filters: list of message types to filter by returns: None """ - query = Message.query.filter(Message.to_user_id == user_id) - if message_type_filters: - query = query.filter(Message.message_type.in_(message_type_filters)) - query.delete(synchronize_session=False) - db.session.commit() + delete_query = """ + DELETE FROM messages + WHERE to_user_id = :user_id + """ + params = {"user_id": user_id} - def delete(self): - """Deletes the current model from the DB""" - db.session.delete(self) - db.session.commit() + if message_type_filters: + delete_query += " AND message_type = ANY(:message_type_filters)" + params["message_type_filters"] = message_type_filters + await db.execute(delete_query, params) @staticmethod - def mark_multiple_messages_read(message_ids: list, user_id: int): + async def mark_multiple_messages_read( + message_ids: list, user_id: int, db: Database + ): """Marks the specified messages as read ---------------------------------------- :param message_ids: list of message ids to mark as read :param user_id: user id of the user who is marking the messages as read + :param db: database connection """ - Message.query.filter( - Message.to_user_id == user_id, Message.id.in_(message_ids) - ).update({Message.read: True}, synchronize_session=False) - db.session.commit() + async with db.transaction(): + query = """ + UPDATE messages + SET read = True + WHERE to_user_id = :user_id AND id = ANY(:message_ids) + """ + await db.execute(query, {"user_id": user_id, "message_ids": message_ids}) @staticmethod - def mark_all_messages_read(user_id: int, message_type_filters: list = None): + async def mark_all_messages_read( + user_id: int, db: Database, message_type_filters: list = None + ): """Marks all messages as read ---------------------------------------- :param user_id: user id of the user who is marking the messages as read + :param db: database connection :param message_type_filters: list of message types to filter by """ - # https://docs.sqlalchemy.org/en/13/orm/query.html#sqlalchemy.orm.query.Query.update - query = Message.query.filter( - Message.to_user_id == user_id, Message.read == false() - ) - if message_type_filters: - query = query.filter(Message.message_type.in_(message_type_filters)) - query.update({Message.read: True}, synchronize_session=False) - db.session.commit() + async with db.transaction(): + query = """ + UPDATE messages + SET read = TRUE + WHERE to_user_id = :user_id + AND read = FALSE + """ + + params = {"user_id": user_id} + + if message_type_filters: + query += " AND message_type = ANY(:message_type_filters)" + params["message_type_filters"] = message_type_filters + + await db.execute(query, params) diff --git a/backend/models/postgis/notification.py b/backend/models/postgis/notification.py index 66a8fcf5ea..0ffa718022 100644 --- a/backend/models/postgis/notification.py +++ b/backend/models/postgis/notification.py @@ -1,25 +1,36 @@ -from backend import db +from datetime import datetime, timedelta + +from databases import Database +from sqlalchemy import ( + BigInteger, + Column, + DateTime, + ForeignKey, + ForeignKeyConstraint, + Integer, +) +from sqlalchemy.orm import relationship + +from backend.db import Base +from backend.models.dtos.notification_dto import NotificationDTO from backend.models.postgis.user import User -from backend.models.postgis.message import Message from backend.models.postgis.utils import timestamp -from backend.models.dtos.notification_dto import NotificationDTO -from datetime import datetime, timedelta -class Notification(db.Model): +class Notification(Base): """Describes a Notification for a user""" __tablename__ = "notifications" - __table_args__ = (db.ForeignKeyConstraint(["user_id"], ["users.id"]),) + __table_args__ = (ForeignKeyConstraint(["user_id"], ["users.id"]),) - id = db.Column(db.Integer, primary_key=True) - user_id = db.Column(db.BigInteger, db.ForeignKey("users.id"), index=True) - unread_count = db.Column(db.Integer) - date = db.Column(db.DateTime, default=timestamp) + id = Column(Integer, primary_key=True) + user_id = Column(BigInteger, ForeignKey("users.id"), index=True) + unread_count = Column(Integer) + date = Column(DateTime, default=timestamp) # Relationships - user = db.relationship(User, foreign_keys=[user_id], backref="notifications") + user = relationship(User, foreign_keys=[user_id], backref="notifications") def as_dto(self) -> NotificationDTO: """Casts notification object to DTO""" @@ -30,35 +41,38 @@ def as_dto(self) -> NotificationDTO: return dto - def save(self): - db.session.add(self) - db.session.commit() - - def update(self): - self.date = timestamp() - db.session.commit() - @staticmethod - def get_unread_message_count(user_id: int) -> int: + async def get_unread_message_count(user_id: int, db: Database) -> int: """Get count of unread messages for user""" - notifications = Notification.query.filter( - Notification.user_id == user_id - ).first() + query = """ + SELECT unread_count, date + FROM notifications + WHERE user_id = :user_id + ORDER BY id + LIMIT 1 + """ + notification = await db.fetch_one(query, {"user_id": user_id}) - # Create if does not exist. - if notifications is None: - # In case users are new but have not logged in previously. - date_value = datetime.today() - timedelta(days=30) - notifications = Notification( - user_id=user_id, unread_count=0, date=date_value + if notification is None: + date_value = datetime.utcnow() - timedelta(days=30) + insert_query = """ + INSERT INTO notifications (user_id, unread_count, date) + VALUES (:user_id, :unread_count, :date) + """ + await db.execute( + insert_query, + {"user_id": user_id, "unread_count": 0, "date": date_value}, ) - notifications.save() + else: + date_value = notification["date"] - # Count messages that the user has received after last check. - count = ( - Message.query.filter_by(to_user_id=user_id, read=False) - .filter(Message.date > notifications.date) - .count() + message_query = """ + SELECT COUNT(*) + FROM messages + WHERE to_user_id = :user_id AND read = False AND date > :date_value + """ + count = await db.fetch_val( + message_query, {"user_id": user_id, "date_value": date_value} ) return count diff --git a/backend/models/postgis/organisation.py b/backend/models/postgis/organisation.py index c19a54a4d8..abc3c431d8 100644 --- a/backend/models/postgis/organisation.py +++ b/backend/models/postgis/organisation.py @@ -1,26 +1,36 @@ +from databases import Database +from fastapi import HTTPException from slugify import slugify +from sqlalchemy import ( + BigInteger, + Column, + ForeignKey, + Integer, + String, + Table, + UniqueConstraint, +) +from sqlalchemy.orm import backref, relationship -from backend import db +from backend.db import Base from backend.exceptions import NotFound from backend.models.dtos.organisation_dto import ( - OrganisationDTO, NewOrganisationDTO, + OrganisationDTO, OrganisationManagerDTO, + UpdateOrganisationDTO, ) -from backend.models.postgis.user import User from backend.models.postgis.campaign import Campaign, campaign_organisations from backend.models.postgis.statuses import OrganisationType - +from backend.models.postgis.user import User # Secondary table defining many-to-many relationship between organisations and managers -organisation_managers = db.Table( +organisation_managers = Table( "organisation_managers", - db.metadata, - db.Column( - "organisation_id", db.Integer, db.ForeignKey("organisations.id"), nullable=False - ), - db.Column("user_id", db.BigInteger, db.ForeignKey("users.id"), nullable=False), - db.UniqueConstraint("organisation_id", "user_id", name="organisation_user_key"), + Base.metadata, + Column("organisation_id", Integer, ForeignKey("organisations.id"), nullable=False), + Column("user_id", BigInteger, ForeignKey("users.id"), nullable=False), + UniqueConstraint("organisation_id", "user_id", name="organisation_user_key"), ) @@ -28,118 +38,169 @@ class InvalidRoleException(Exception): pass -class Organisation(db.Model): +class Organisation(Base): """Describes an Organisation""" __tablename__ = "organisations" # Columns - id = db.Column(db.Integer, primary_key=True) - name = db.Column(db.String(512), nullable=False, unique=True) - slug = db.Column(db.String(255), nullable=False, unique=True) - logo = db.Column(db.String) # URL of a logo - description = db.Column(db.String) - url = db.Column(db.String) - type = db.Column(db.Integer, default=OrganisationType.FREE.value, nullable=False) - subscription_tier = db.Column(db.Integer) - - managers = db.relationship( + id = Column(Integer, primary_key=True) + name = Column(String(512), nullable=False, unique=True) + slug = Column(String(255), nullable=False, unique=True) + logo = Column(String) # URL of a logo + description = Column(String) + url = Column(String) + type = Column(Integer, default=OrganisationType.FREE.value, nullable=False) + subscription_tier = Column(Integer) + + managers = relationship( User, secondary=organisation_managers, - backref=db.backref("organisations", lazy="joined"), + backref=backref("organisations"), ) - campaign = db.relationship( + campaign = relationship( Campaign, secondary=campaign_organisations, backref="organisation" ) - def create(self): - """Creates and saves the current model to the DB""" - db.session.add(self) - db.session.commit() - - def save(self): - db.session.commit() - - @classmethod - def create_from_dto(cls, new_organisation_dto: NewOrganisationDTO): - """Creates a new organisation from a DTO""" - new_org = cls() - - new_org.name = new_organisation_dto.name - new_org.slug = new_organisation_dto.slug or slugify(new_organisation_dto.name) - new_org.logo = new_organisation_dto.logo - new_org.description = new_organisation_dto.description - new_org.url = new_organisation_dto.url - new_org.type = OrganisationType[new_organisation_dto.type].value - new_org.subscription_tier = new_organisation_dto.subscription_tier - - for manager in new_organisation_dto.managers: - user = User.get_by_username(manager) - - if user is None: - raise NotFound(sub_code="USER_NOT_FOUND", username=manager) - - new_org.managers.append(user) - - new_org.create() - return new_org - - def update(self, organisation_dto: OrganisationDTO): - """Updates Organisation from DTO""" - - for attr, value in organisation_dto.items(): - if attr == "type" and value is not None: - value = OrganisationType[organisation_dto.type].value - if attr == "managers": - continue - - try: - is_field_nullable = self.__table__.columns[attr].nullable - if is_field_nullable and value is not None: - setattr(self, attr, value) - elif value is not None: - setattr(self, attr, value) - except KeyError: - continue - - if organisation_dto.managers: - self.managers = [] - # Need to handle this in the loop so we can take care of NotFound users - for manager in organisation_dto.managers: - new_manager = User.get_by_username(manager) - - if new_manager is None: + async def create_from_dto(new_organisation_dto: NewOrganisationDTO, db: Database): + """Creates a new organisation from a DTO and associates managers""" + slug = new_organisation_dto.slug or slugify(new_organisation_dto.name) + query = """ + INSERT INTO organisations (name, slug, logo, description, url, type, subscription_tier) + VALUES (:name, :slug, :logo, :description, :url, :type, :subscription_tier) + RETURNING id + """ + values = { + "name": new_organisation_dto.name, + "slug": slug, + "logo": new_organisation_dto.logo, + "description": new_organisation_dto.description, + "url": new_organisation_dto.url, + "type": OrganisationType[new_organisation_dto.type].value, + "subscription_tier": new_organisation_dto.subscription_tier, + } + + try: + organisation_id = await db.execute(query, values) + + for manager in new_organisation_dto.managers: + user_query = "SELECT id FROM users WHERE username = :username" + user = await db.fetch_one(user_query, {"username": manager}) + + if not user: raise NotFound(sub_code="USER_NOT_FOUND", username=manager) - self.managers.append(new_manager) + manager_query = """ + INSERT INTO organisation_managers (organisation_id, user_id) + VALUES (:organisation_id, :user_id) + """ + await db.execute( + manager_query, + {"organisation_id": organisation_id, "user_id": user.id}, + ) - db.session.commit() + return organisation_id - def delete(self): - """Deletes the current model from the DB""" - db.session.delete(self) - db.session.commit() + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) from e - def can_be_deleted(self) -> bool: - """An Organisation can be deleted if it doesn't have any projects or teams""" - return len(self.projects) == 0 and len(self.teams) == 0 + async def update(organisation_dto: UpdateOrganisationDTO, db: Database): + """Updates Organisation from DTO""" + try: + org_id = organisation_dto.organisation_id + org_dict = organisation_dto.dict(exclude_unset=True) + if "type" in org_dict and org_dict["type"] is not None: + org_dict["type"] = OrganisationType[org_dict["type"].upper()].value + + update_keys = { + key: org_dict[key] + for key in org_dict.keys() + if key not in ["organisation_id", "managers"] + } + set_clause = ", ".join(f"{key} = :{key}" for key in update_keys.keys()) + if set_clause: + update_query = f""" + UPDATE organisations + SET {set_clause} + WHERE id = :id + """ + await db.execute(update_query, values={**update_keys, "id": org_id}) + + if organisation_dto.managers: + clear_managers_query = """ + DELETE FROM organisation_managers + WHERE organisation_id = :id + """ + await db.execute(clear_managers_query, values={"id": org_id}) + for manager_username in organisation_dto.managers: + user_query = "SELECT id FROM users WHERE username = :username" + user = await db.fetch_one( + user_query, {"username": manager_username} + ) + + if not user: + raise NotFound( + sub_code="USER_NOT_FOUND", username=manager_username + ) + + insert_manager_query = """ + INSERT INTO organisation_managers (organisation_id, user_id) + VALUES (:organisation_id, :user_id) + """ + await db.execute( + insert_manager_query, + {"organisation_id": org_id, "user_id": user.id}, + ) + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) from e + + async def can_be_deleted(organisation_id: int, db) -> bool: + # Check if the organization has any projects + projects_query = """ + SELECT COUNT(*) + FROM projects + WHERE organisation_id = :organisation_id + """ + projects_count = await db.fetch_val( + projects_query, values={"organisation_id": organisation_id} + ) + # Check if the organization has any teams + teams_query = """ + SELECT COUNT(*) + FROM teams + WHERE organisation_id = :organisation_id + """ + teams_count = await db.fetch_val( + teams_query, values={"organisation_id": organisation_id} + ) + # Organisation can be deleted if it has no projects and no teams + return projects_count == 0 and teams_count == 0 @staticmethod - def get(organisation_id: int): + async def get(organisation_id: int, db: Database): """ Gets specified organisation by id :param organisation_id: organisation ID in scope :return: Organisation if found otherwise None """ - return db.session.get(Organisation, organisation_id) + organization = await db.fetch_one( + "SELECT * FROM organisations WHERE id = :id", values={"id": organisation_id} + ) + return organization["id"] if organization else None @staticmethod - def get_organisation_by_name(organisation_name: str): + async def get_organisation_by_name(organisation_name: str, db: Database): """Get organisation by name :param organisation_name: name of organisation :return: Organisation if found else None """ - return Organisation.query.filter_by(name=organisation_name).first() + query = """ + SELECT * FROM organisations + WHERE name = :name + """ + + result = await db.fetch_one(query, values={"name": organisation_name}) + return result if result else None @staticmethod def get_organisation_name_by_id(organisation_id: int): @@ -147,44 +208,126 @@ def get_organisation_name_by_id(organisation_id: int): :param organisation_id: :return: Organisation name """ - return Organisation.query.get(organisation_id).name + return session.query(Organisation).get(organisation_id).name @staticmethod - def get_all_organisations(): + async def get_all_organisations(db: Database): """Gets all organisations""" - return Organisation.query.order_by(Organisation.name).all() + query = """ + SELECT + o.id AS organisation_id, + o.name, + o.slug, + o.logo, + o.description, + o.url, + CASE + WHEN o.type = 1 THEN 'FREE' + WHEN o.type = 2 THEN 'DISCOUNTED' + WHEN o.type = 3 THEN 'FULL_FEE' + ELSE 'UNKNOWN' + END AS type, + o.subscription_tier, + COALESCE( + json_agg( + json_build_object( + 'id', u.id, + 'username', u.username, + 'picture_url', u.picture_url + ) + ) FILTER (WHERE u.id IS NOT NULL), '[]' + ) AS managers + FROM organisations o + LEFT JOIN organisation_managers om ON o.id = om.organisation_id + LEFT JOIN users u ON om.user_id = u.id + GROUP BY o.id + """ + result = await db.fetch_all(query) + return result @staticmethod - def get_organisations_managed_by_user(user_id: int): + async def get_organisations_managed_by_user(user_id: int, db: Database): """Gets organisations a user can manage""" - query_results = ( - Organisation.query.join(organisation_managers) - .filter( - (organisation_managers.c.organisation_id == Organisation.id) - & (organisation_managers.c.user_id == user_id) - ) - .order_by(Organisation.name) - .all() - ) - return query_results - - def as_dto(self, omit_managers=False): + query = f""" + SELECT + o.id AS organisation_id, + o.name, + o.slug, + o.logo, + o.description, + o.url, + CASE + WHEN o.type = {OrganisationType.FREE.value} THEN 'FREE' + WHEN o.type = {OrganisationType.DISCOUNTED.value} THEN 'DISCOUNTED' + WHEN o.type = {OrganisationType.FULL_FEE.value} THEN 'FULL_FEE' + ELSE 'UNKNOWN' + END AS type, + o.subscription_tier, + COALESCE( + json_agg( + json_build_object( + 'id', u.id, + 'username', u.username, + 'picture_url', u.picture_url + ) + ) FILTER (WHERE u.id IS NOT NULL), '[]' + ) AS managers + FROM organisations o + LEFT JOIN organisation_managers om ON o.id = om.organisation_id + LEFT JOIN users u ON om.user_id = u.id + WHERE om.user_id = :user_id -- Filter organisations by the user who manages them + GROUP BY o.id + ORDER BY o.name + """ + params = {"user_id": user_id} + result = await db.fetch_all(query, values=params) + return result + + async def fetch_managers(self, session): + """Fetch managers asynchronously""" + await session.refresh(self, ["managers"]) + + # def as_dto(self, omit_managers=False): + # """Returns a dto for an organisation""" + # organisation_dto = OrganisationDTO() + # organisation_dto.organisation_id = self.id + # organisation_dto.name = self.name + # organisation_dto.slug = self.slug + # organisation_dto.logo = self.logo + # organisation_dto.description = self.description + # organisation_dto.url = self.url + # organisation_dto.managers = [] + # organisation_dto.type = OrganisationType(self.type).name + # organisation_dto.subscription_tier = self.subscription_tier + + # if omit_managers: + # return organisation_dto + + # for manager in self.managers: + # org_manager_dto = OrganisationManagerDTO() + # org_manager_dto.username = manager.username + # org_manager_dto.picture_url = manager.picture_url + # organisation_dto.managers.append(org_manager_dto) + + # return organisation_dto + + def as_dto(org, omit_managers=False): """Returns a dto for an organisation""" organisation_dto = OrganisationDTO() - organisation_dto.organisation_id = self.id - organisation_dto.name = self.name - organisation_dto.slug = self.slug - organisation_dto.logo = self.logo - organisation_dto.description = self.description - organisation_dto.url = self.url + organisation_dto.organisation_id = org.organisation_id + organisation_dto.name = org.name + organisation_dto.slug = org.slug + organisation_dto.logo = org.logo + organisation_dto.description = org.description + organisation_dto.url = org.url organisation_dto.managers = [] - organisation_dto.type = OrganisationType(self.type).name - organisation_dto.subscription_tier = self.subscription_tier + organisation_dto.type = org.type + organisation_dto.subscription_tier = org.subscription_tier if omit_managers: return organisation_dto - for manager in self.managers: + for manager in org.managers: org_manager_dto = OrganisationManagerDTO() org_manager_dto.username = manager.username org_manager_dto.picture_url = manager.picture_url diff --git a/backend/models/postgis/partner.py b/backend/models/postgis/partner.py index 507f0fb4df..56b865d8e0 100644 --- a/backend/models/postgis/partner.py +++ b/backend/models/postgis/partner.py @@ -1,58 +1,57 @@ -from backend import db import json + +from databases import Database +from sqlalchemy import Column, Integer, String + +from backend.db import Base from backend.exceptions import NotFound from backend.models.dtos.partner_dto import PartnerDTO +from typing import Optional -class Partner(db.Model): - """Model for Partners""" +class Partner(Base): + """Describes a Partner""" __tablename__ = "partners" - id = db.Column(db.Integer, primary_key=True, autoincrement=True) - name = db.Column(db.String(150), nullable=False, unique=True) - primary_hashtag = db.Column(db.String(200), nullable=False) - secondary_hashtag = db.Column(db.String(200)) - logo_url = db.Column(db.String(500)) - link_meta = db.Column(db.String(300)) - link_x = db.Column(db.String(300)) - link_instagram = db.Column(db.String(300)) - current_projects = db.Column(db.String) - permalink = db.Column(db.String(500), unique=True) - website_links = db.Column(db.String) - mapswipe_group_id = db.Column(db.String, nullable=True) - - def create(self): - """Creates and saves the current model to the DB""" - db.session.add(self) - db.session.commit() - - def save(self): - """Save changes to DB""" - db.session.commit() - - def delete(self): - """Deletes from the DB""" - db.session.delete(self) - db.session.commit() + id = Column(Integer, primary_key=True, autoincrement=True) + name = Column(String(150), nullable=False, unique=True) + primary_hashtag = Column(String(200), nullable=False) + secondary_hashtag = Column(String(200), nullable=True) + logo_url = Column(String(500), nullable=True) + link_meta = Column(String(300), nullable=True) + link_x = Column(String(300), nullable=True) # Formerly link_twitter + link_instagram = Column(String(300), nullable=True) + current_projects = Column(String, nullable=True) + permalink = Column(String(500), unique=True, nullable=True) + website_links = Column(String, nullable=True) + mapswipe_group_id = Column(String, nullable=True) @staticmethod - def get_all_partners(): - """Get all partners in DB""" - return db.session.query(Partner.id).all() + async def get_all_partners(db: Database): + """ + Retrieve all partner IDs + """ + query = "SELECT id FROM partners" + results = await db.fetch_all(query) + return [row["id"] for row in results] @staticmethod - def get_by_permalink(permalink: str): - """Get partner by permalink""" - return Partner.query.filter_by(permalink=permalink).one_or_none() + async def get_by_permalink(permalink: str, db: Database) -> Optional[PartnerDTO]: + """Get partner by permalink using raw SQL.""" + query = "SELECT * FROM partners WHERE permalink = :permalink" + result = await db.fetch_one(query, values={"permalink": permalink}) + if result is None: + raise NotFound(sub_code="PARTNER_NOT_FOUND", permalink=permalink) + return result @staticmethod - def get_by_id(partner_id: int): - """Get partner by id""" - partner = db.session.get(Partner, partner_id) - if partner is None: + async def get_by_id(partner_id: int, db: Database) -> PartnerDTO: + query = "SELECT * FROM partners WHERE id = :partner_id" + result = await db.fetch_one(query, values={"partner_id": partner_id}) + if result is None: raise NotFound(sub_code="PARTNER_NOT_FOUND", partner_id=partner_id) - return partner + return result def as_dto(self) -> PartnerDTO: """Creates partner from DTO""" diff --git a/backend/models/postgis/priority_area.py b/backend/models/postgis/priority_area.py index c38a950de7..9afc62210c 100644 --- a/backend/models/postgis/priority_area.py +++ b/backend/models/postgis/priority_area.py @@ -1,32 +1,39 @@ -import geojson import json -from backend import db + +import geojson +from databases import Database from geoalchemy2 import Geometry -from backend.models.postgis.utils import InvalidGeoJson, ST_SetSRID, ST_GeomFromGeoJSON +from sqlalchemy import Column, ForeignKey, Integer, Table + +from backend.db import Base +from backend.models.postgis.utils import InvalidGeoJson # Priority areas aren't shared, however, this arch was taken from TM2 to ease data migration -project_priority_areas = db.Table( +project_priority_areas = Table( "project_priority_areas", - db.metadata, - db.Column("project_id", db.Integer, db.ForeignKey("projects.id")), - db.Column("priority_area_id", db.Integer, db.ForeignKey("priority_areas.id")), + Base.metadata, + Column("project_id", Integer, ForeignKey("projects.id")), + Column("priority_area_id", Integer, ForeignKey("priority_areas.id")), ) -class PriorityArea(db.Model): +class PriorityArea(Base): """Describes an individual priority area""" __tablename__ = "priority_areas" - id = db.Column(db.Integer, primary_key=True) - geometry = db.Column(Geometry("POLYGON", srid=4326)) + id = Column(Integer, primary_key=True) + geometry = Column(Geometry("POLYGON", srid=4326)) @classmethod - def from_dict(cls, area_poly: dict): - """Create a new Priority Area from dictionary""" + async def from_dict(cls, area_poly: dict, db: Database): + """Create a new Priority Area from dictionary and insert into the database.""" + + # Load GeoJSON from the dictionary pa_geojson = geojson.loads(json.dumps(area_poly)) - if type(pa_geojson) is not geojson.Polygon: + # Ensure it's a valid Polygon + if not isinstance(pa_geojson, geojson.Polygon): raise InvalidGeoJson("Priority Areas must be supplied as Polygons") if not pa_geojson.is_valid: @@ -34,13 +41,36 @@ def from_dict(cls, area_poly: dict): "Priority Area: Invalid Polygon - " + ", ".join(pa_geojson.errors()) ) - pa = cls() + # Convert the GeoJSON into WKT format using a raw SQL query valid_geojson = geojson.dumps(pa_geojson) - pa.geometry = ST_SetSRID(ST_GeomFromGeoJSON(valid_geojson), 4326) - return pa - - def get_as_geojson(self): - """Helper to translate geometry back to a GEOJson Poly""" - with db.engine.connect() as conn: - pa_geojson = conn.execute(self.geometry.ST_AsGeoJSON()).scalar() - return geojson.loads(pa_geojson) + geo_query = """ + SELECT ST_AsText( + ST_SetSRID( + ST_GeomFromGeoJSON(:geojson), 4326 + ) + ) AS geometry_wkt; + """ + result = await db.fetch_one(query=geo_query, values={"geojson": valid_geojson}) + geometry_wkt = result["geometry_wkt"] if result else None + + if not geometry_wkt: + raise InvalidGeoJson("Failed to create geometry from the given GeoJSON") + + # Insert the new Priority Area into the database and return the inserted ID + insert_query = """ + INSERT INTO priority_areas (geometry) + VALUES (ST_GeomFromText(:geometry, 4326)) + RETURNING id; + """ + insert_result = await db.fetch_one( + query=insert_query, values={"geometry": geometry_wkt} + ) + + if insert_result: + # Assign the ID and geometry to the PriorityArea object + pa = cls() + pa.id = insert_result["id"] + pa.geometry = geometry_wkt + return pa + else: + raise Exception("Failed to insert Priority Area") diff --git a/backend/models/postgis/project.py b/backend/models/postgis/project.py index 440edff4cf..14ff1a1713 100644 --- a/backend/models/postgis/project.py +++ b/backend/models/postgis/project.py @@ -1,181 +1,197 @@ import json +import os import re from typing import Optional -from cachetools import TTLCache, cached import geojson -import datetime -from flask import current_app -from geoalchemy2 import Geometry +import requests +from cachetools import TTLCache +from databases import Database +from fastapi import HTTPException +from geoalchemy2 import Geometry, WKTElement from geoalchemy2.shape import to_shape -from sqlalchemy.sql.expression import cast, or_ -from sqlalchemy import desc, func, Time, orm, literal +from loguru import logger from shapely.geometry import shape +from sqlalchemy import ( + BigInteger, + Boolean, + Column, + DateTime, + ForeignKey, + Index, + Integer, + String, + Table, + delete, + func, + inspect, + orm, + select, + update, +) from sqlalchemy.dialects.postgresql import ARRAY from sqlalchemy.ext.hybrid import hybrid_property +from sqlalchemy.orm import backref, relationship -import requests - -from backend import db +from backend.config import settings +from backend.db import Base from backend.exceptions import NotFound -from backend.models.dtos.campaign_dto import CampaignDTO +from backend.models.dtos.campaign_dto import CampaignDTO, ListCampaignDTO +from backend.models.dtos.interests_dto import InterestDTO from backend.models.dtos.project_dto import ( - ProjectDTO, + CustomEditorDTO, DraftProjectDTO, - ProjectSummary, PMDashboardDTO, - ProjectStatsDTO, - ProjectUserStatsDTO, + ProjectDTO, + ProjectInfoDTO, ProjectSearchDTO, + ProjectStatsDTO, + ProjectSummary, ProjectTeamDTO, - ProjectInfoDTO, + ProjectUserStatsDTO, ) -from backend.models.dtos.interests_dto import InterestDTO - from backend.models.dtos.tags_dto import TagsDTO -from backend.models.postgis.organisation import Organisation +from backend.models.postgis.campaign import Campaign, campaign_projects from backend.models.postgis.custom_editors import CustomEditor +from backend.models.postgis.interests import Interest, project_interests +from backend.models.postgis.organisation import Organisation from backend.models.postgis.priority_area import PriorityArea, project_priority_areas -from backend.models.postgis.project_info import ProjectInfo from backend.models.postgis.project_chat import ProjectChat +from backend.models.postgis.project_info import ProjectInfo from backend.models.postgis.statuses import ( - ProjectStatus, - ProjectPriority, - TaskStatus, + Editors, + MappingPermission, MappingTypes, + ProjectDifficulty, + ProjectPriority, + ProjectStatus, TaskCreationMode, - Editors, + TaskStatus, TeamRoles, - MappingPermission, ValidationPermission, - ProjectDifficulty, ) -from backend.models.postgis.task import Task, TaskHistory +from backend.models.postgis.task import Task from backend.models.postgis.team import Team from backend.models.postgis.user import User -from backend.models.postgis.campaign import Campaign, campaign_projects - -from backend.models.postgis.utils import ( - ST_SetSRID, - ST_GeomFromGeoJSON, - timestamp, - ST_Centroid, -) +from backend.models.postgis.utils import timestamp from backend.services.grid.grid_service import GridService -from backend.models.postgis.interests import Interest, project_interests -import os # Secondary table defining many-to-many join for projects that were favorited by users. -project_favorites = db.Table( +project_favorites = Table( "project_favorites", - db.metadata, - db.Column("project_id", db.Integer, db.ForeignKey("projects.id")), - db.Column("user_id", db.BigInteger, db.ForeignKey("users.id")), + Base.metadata, + Column("project_id", Integer, ForeignKey("projects.id")), + Column("user_id", BigInteger, ForeignKey("users.id")), ) # Secondary table defining many-to-many join for private projects that only defined users can map on -project_allowed_users = db.Table( +project_allowed_users = Table( "project_allowed_users", - db.metadata, - db.Column("project_id", db.Integer, db.ForeignKey("projects.id")), - db.Column("user_id", db.BigInteger, db.ForeignKey("users.id")), + Base.metadata, + Column("project_id", Integer, ForeignKey("projects.id")), + Column("user_id", BigInteger, ForeignKey("users.id")), ) -class ProjectTeams(db.Model): +class ProjectTeams(Base): __tablename__ = "project_teams" - team_id = db.Column(db.Integer, db.ForeignKey("teams.id"), primary_key=True) - project_id = db.Column(db.Integer, db.ForeignKey("projects.id"), primary_key=True) - role = db.Column(db.Integer, nullable=False) + team_id = Column(Integer, ForeignKey("teams.id"), primary_key=True) + project_id = Column(Integer, ForeignKey("projects.id"), primary_key=True) + role = Column(Integer, nullable=False) - project = db.relationship( - "Project", backref=db.backref("teams", cascade="all, delete-orphan") - ) - team = db.relationship( - Team, backref=db.backref("projects", cascade="all, delete-orphan") + project = relationship( + "Project", backref=backref("teams", cascade="all, delete-orphan") ) + team = relationship(Team, backref=backref("projects", cascade="all, delete-orphan")) - def create(self): + async def create(self, db: Database): """Creates and saves the current model to the DB""" - db.session.add(self) - db.session.commit() - - def save(self): - """Save changes to db""" - db.session.commit() - - def delete(self): - """Deletes the current model from the DB""" - db.session.delete(self) - db.session.commit() + await db.execute( + self.__table__.insert().values( + team_id=self.team_id, project_id=self.project_id, role=self.role + ) + ) # cache mapper counts for 30 seconds active_mappers_cache = TTLCache(maxsize=1024, ttl=30) -class Project(db.Model): +class Project(Base): """Describes a HOT Mapping Project""" __tablename__ = "projects" + def __init__(self, **kwargs): + # First, initialize with provided kwargs + super().__init__(**kwargs) + + # Then dynamically set defaults for any fields that are None + for column in self.__table__.columns: + if getattr(self, column.name) is None and column.default is not None: + # Retrieve the default value from the column + default_value = ( + column.default.arg + if callable(column.default.arg) + else column.default.arg + ) + setattr(self, column.name, default_value) + # Columns - id = db.Column(db.Integer, primary_key=True) - status = db.Column(db.Integer, default=ProjectStatus.DRAFT.value, nullable=False) - created = db.Column(db.DateTime, default=timestamp, nullable=False) - priority = db.Column(db.Integer, default=ProjectPriority.MEDIUM.value) - default_locale = db.Column( - db.String(10), default="en" + id = Column(Integer, primary_key=True) + status = Column(Integer, default=ProjectStatus.DRAFT.value, nullable=False) + created = Column(DateTime, default=timestamp(), nullable=False) + priority = Column(Integer, default=ProjectPriority.MEDIUM.value) + default_locale = Column( + String(10), default="en" ) # The locale that is returned if requested locale not available - author_id = db.Column( - db.BigInteger, db.ForeignKey("users.id", name="fk_users"), nullable=False + author_id = Column( + BigInteger, ForeignKey("users.id", name="fk_users"), nullable=False ) - difficulty = db.Column( - db.Integer, default=2, nullable=False, index=True + difficulty = Column( + Integer, default=2, nullable=False, index=True ) # Mapper level project is suitable for - mapping_permission = db.Column(db.Integer, default=MappingPermission.ANY.value) - validation_permission = db.Column( - db.Integer, default=ValidationPermission.LEVEL.value + mapping_permission = Column(Integer, default=MappingPermission.ANY.value) + validation_permission = Column( + Integer, default=ValidationPermission.LEVEL.value ) # Means only users with validator role can validate - enforce_random_task_selection = db.Column( - db.Boolean, default=False + enforce_random_task_selection = Column( + Boolean, default=False ) # Force users to edit at random to avoid mapping "easy" tasks - private = db.Column(db.Boolean, default=False) # Only allowed users can validate - featured = db.Column( - db.Boolean, default=False - ) # Only PMs can set a project as featured - changeset_comment = db.Column(db.String) - osmcha_filter_id = db.Column( - db.String + private = Column(Boolean, default=False) # Only allowed users can validate + featured = Column(Boolean, default=False) # Only PMs can set a project as featured + changeset_comment = Column(String) + osmcha_filter_id = Column( + String ) # Optional custom filter id for filtering on OSMCha - due_date = db.Column(db.DateTime) - imagery = db.Column(db.String) - josm_preset = db.Column(db.String) - id_presets = db.Column(ARRAY(db.String)) - extra_id_params = db.Column(db.String) - rapid_power_user = db.Column(db.Boolean, default=False) - last_updated = db.Column(db.DateTime, default=timestamp) - progress_email_sent = db.Column(db.Boolean, default=False) - license_id = db.Column(db.Integer, db.ForeignKey("licenses.id", name="fk_licenses")) - geometry = db.Column(Geometry("MULTIPOLYGON", srid=4326), nullable=False) - centroid = db.Column(Geometry("POINT", srid=4326), nullable=False) - country = db.Column(ARRAY(db.String), default=[]) - task_creation_mode = db.Column( - db.Integer, default=TaskCreationMode.GRID.value, nullable=False + due_date = Column(DateTime) + imagery = Column(String) + josm_preset = Column(String) + id_presets = Column(ARRAY(String)) + extra_id_params = Column(String) + rapid_power_user = Column(Boolean, default=False) + last_updated = Column(DateTime, default=timestamp()) + progress_email_sent = Column(Boolean, default=False) + license_id = Column(Integer, ForeignKey("licenses.id", name="fk_licenses")) + geometry = Column(Geometry("MULTIPOLYGON", srid=4326), nullable=False) + centroid = Column(Geometry("POINT", srid=4326), nullable=False) + country = Column(ARRAY(String), default=[]) + task_creation_mode = Column( + Integer, default=TaskCreationMode.GRID.value, nullable=False ) - organisation_id = db.Column( - db.Integer, - db.ForeignKey("organisations.id", name="fk_organisations"), + organisation_id = Column( + Integer, + ForeignKey("organisations.id", name="fk_organisations"), index=True, ) # Tags - mapping_types = db.Column(ARRAY(db.Integer), index=True) + mapping_types = Column(ARRAY(Integer), index=True) # Editors - mapping_editors = db.Column( - ARRAY(db.Integer), + mapping_editors = Column( + ARRAY(Integer), default=[ Editors.ID.value, Editors.JOSM.value, @@ -184,8 +200,8 @@ class Project(db.Model): index=True, nullable=False, ) - validation_editors = db.Column( - ARRAY(db.Integer), + validation_editors = Column( + ARRAY(Integer), default=[ Editors.ID.value, Editors.JOSM.value, @@ -196,10 +212,10 @@ class Project(db.Model): ) # Stats - total_tasks = db.Column(db.Integer, nullable=False) - tasks_mapped = db.Column(db.Integer, default=0, nullable=False) - tasks_validated = db.Column(db.Integer, default=0, nullable=False) - tasks_bad_imagery = db.Column(db.Integer, default=0, nullable=False) + total_tasks = Column(Integer, nullable=False) + tasks_mapped = Column(Integer, default=0, nullable=False) + tasks_validated = Column(Integer, default=0, nullable=False) + tasks_bad_imagery = Column(Integer, default=0, nullable=False) # Total tasks are always >= 1 @hybrid_property @@ -215,31 +231,32 @@ def percent_validated(self): return self.tasks_validated * 100 // (self.total_tasks - self.tasks_bad_imagery) # Mapped Objects - tasks = db.relationship( + tasks = orm.relationship( Task, backref="projects", cascade="all, delete, delete-orphan", lazy="dynamic" ) - project_info = db.relationship(ProjectInfo, lazy="dynamic", cascade="all") - project_chat = db.relationship(ProjectChat, lazy="dynamic", cascade="all") - author = db.relationship(User) - allowed_users = db.relationship(User, secondary=project_allowed_users) - priority_areas = db.relationship( + project_info = orm.relationship(ProjectInfo, lazy="dynamic", cascade="all") + project_chat = orm.relationship(ProjectChat, lazy="dynamic", cascade="all") + author = orm.relationship(User) + allowed_users = orm.relationship(User, secondary=project_allowed_users) + priority_areas = orm.relationship( PriorityArea, secondary=project_priority_areas, cascade="all, delete-orphan", single_parent=True, ) - custom_editor = db.relationship( + custom_editor = orm.relationship( CustomEditor, cascade="all, delete-orphan", uselist=False ) - favorited = db.relationship(User, secondary=project_favorites, backref="favorites") - organisation = db.relationship(Organisation, backref="projects") - campaign = db.relationship( + favorited = orm.relationship(User, secondary=project_favorites, backref="favorites") + # organisation = orm.relationship(Organisation, backref="projects", lazy="joined") + organisation = orm.relationship(Organisation, backref="projects") + campaign = orm.relationship( Campaign, secondary=campaign_projects, backref="projects" ) - interests = db.relationship( + interests = orm.relationship( Interest, secondary=project_interests, backref="projects" ) - partnerships = db.relationship("ProjectPartnership", backref="project") + partnerships = orm.relationship("ProjectPartnership", backref="project") def create_draft_project(self, draft_project_dto: DraftProjectDTO): """ @@ -247,41 +264,59 @@ def create_draft_project(self, draft_project_dto: DraftProjectDTO): :param draft_project_dto: DTO containing draft project details :param aoi: Area of Interest for the project (eg boundary of project) """ - self.project_info.append( - ProjectInfo.create_from_name(draft_project_dto.project_name) - ) - self.organisation = draft_project_dto.organisation + organisation = dict(draft_project_dto.organisation) + organisation["id"] = organisation.pop("organisation_id") + self.organisation = Organisation(**organisation) + self.organisation_id = self.organisation.id self.status = ProjectStatus.DRAFT.value self.author_id = draft_project_dto.user_id + self.created = timestamp() self.last_updated = timestamp() - def set_project_aoi(self, draft_project_dto: DraftProjectDTO): + async def set_project_aoi(self, draft_project_dto: DraftProjectDTO, db: Database): """Sets the AOI for the supplied project""" aoi_geojson = geojson.loads(json.dumps(draft_project_dto.area_of_interest)) aoi_geometry = GridService.merge_to_multi_polygon(aoi_geojson, dissolve=True) valid_geojson = geojson.dumps(aoi_geometry) - self.geometry = ST_SetSRID(ST_GeomFromGeoJSON(valid_geojson), 4326) - self.centroid = ST_Centroid(self.geometry) + + query = """ + SELECT ST_AsText( + ST_SetSRID( + ST_GeomFromGeoJSON(:geojson), 4326 + ) + ) AS geometry_wkt; + """ + # Execute the query with the GeoJSON value passed in as a parameter + result = await db.fetch_one(query=query, values={"geojson": valid_geojson}) + self.geometry = result["geometry_wkt"] if result else None + + query = """ + SELECT ST_AsText(ST_Centroid(ST_SetSRID(ST_GeomFromGeoJSON(:geometry), 4326))) AS centroid + """ + + # Execute the query and pass the GeoJSON as a parameter + result = await db.fetch_one(query=query, values={"geometry": valid_geojson}) + self.centroid = result["centroid"] if result else None def set_default_changeset_comment(self): """Sets the default changeset comment""" - default_comment = current_app.config["DEFAULT_CHANGESET_COMMENT"] + default_comment = settings.DEFAULT_CHANGESET_COMMENT self.changeset_comment = ( f"{default_comment}-{self.id} {self.changeset_comment}" if self.changeset_comment is not None else f"{default_comment}-{self.id}" ) - self.save() def set_country_info(self): """Sets the default country based on centroid""" + centroid = WKTElement(self.centroid, srid=4326) - centroid = to_shape(self.centroid) + centroid = to_shape(centroid) lat, lng = (centroid.y, centroid.x) url = "{0}/reverse?format=jsonv2&lat={1}&lon={2}&accept-language=en".format( - current_app.config["OSM_NOMINATIM_SERVER_URL"], lat, lng + settings.OSM_NOMINATIM_SERVER_URL, lat, lng ) headers = { "User-Agent": ( @@ -303,35 +338,95 @@ def set_country_info(self): requests.exceptions.ConnectionError, requests.exceptions.HTTPError, ) as e: - current_app.logger.debug(e, exc_info=True) + logger.debug(e, exc_info=True) - self.save() - - def create(self): + async def create(self, project_name: str, db: Database): """Creates and saves the current model to the DB""" - db.session.add(self) - db.session.commit() + values = {} + for column in Project.__table__.columns: + # Get attribute value from the instance + attribute_value = getattr(self, column.name, None) + values[column.name] = attribute_value + + values.pop("id", None) + + project = await db.execute(Project.__table__.insert().values(**values)) + await db.execute( + ProjectInfo.__table__.insert().values( + project_id=project, locale="en", name=project_name + ) + ) + # Set the default changeset comment + default_comment = settings.DEFAULT_CHANGESET_COMMENT + self.changeset_comment = ( + f"{default_comment}-{project} {self.changeset_comment}" + if self.changeset_comment is not None + else f"{default_comment}-{project}" + ) + # Update the changeset comment in the database + await db.execute( + Project.__table__.update() + .where(Project.__table__.c.id == project) + .values(changeset_comment=self.changeset_comment) + ) - def save(self): + for task in self.tasks: + await db.execute( + Task.__table__.insert().values( + id=task.id, + project_id=project, + x=task.x, + y=task.y, + zoom=task.zoom, + is_square=task.is_square, + task_status=TaskStatus.READY.value, + extra_properties=task.extra_properties, + geometry=task.geometry, + ) + ) + + return project + + async def save(self, db: Database): """Save changes to db""" - db.session.commit() + columns = { + c.key: getattr(self, c.key) for c in inspect(self).mapper.column_attrs + } + await db.execute( + Project.__table__.update().where(Project.id == self.id).values(**columns) + ) + + for task in self.tasks: + await db.execute( + Task.__table__.insert().values( + id=task.id, + project_id=self.id, + x=task.x, + y=task.y, + zoom=task.zoom, + is_square=task.is_square, + task_status=TaskStatus.READY.value, + extra_properties=task.extra_properties, + geometry=task.geometry, + ) + ) @staticmethod - def clone(project_id: int, author_id: int): - """Clone project""" + async def clone(project_id: int, author_id: int, db: Database): + """Clone a project using encode databases and raw SQL.""" + # Fetch the original project data + orig_query = "SELECT * FROM projects WHERE id = :project_id" + orig = await db.fetch_one(orig_query, {"project_id": project_id}) - orig = db.session.get(Project, project_id) - if orig is None: + if not orig: raise NotFound(sub_code="PROJECT_NOT_FOUND", project_id=project_id) - # Transform into dictionary. - orig_metadata = orig.__dict__.copy() + orig_metadata = dict(orig) + items_to_remove = ["id", "allowed_users"] + for item in items_to_remove: + orig_metadata.pop(item, None) - # Remove unneeded data. - items_to_remove = ["_sa_instance_state", "id", "allowed_users"] - [orig_metadata.pop(i, None) for i in items_to_remove] - - # Remove clone from session so we can reinsert it as a new object + # Update metadata for the new project orig_metadata.update( { "total_tasks": 0, @@ -345,64 +440,145 @@ def clone(project_id: int, author_id: int): } ) - new_proj = Project(**orig_metadata) - db.session.add(new_proj) + # Construct the INSERT query for the new project + columns = ", ".join(orig_metadata.keys()) + values = ", ".join([f":{key}" for key in orig_metadata.keys()]) + insert_project_query = ( + f"INSERT INTO projects ({columns}) VALUES ({values}) RETURNING id" + ) + new_project_id = await db.execute(insert_project_query, orig_metadata) - proj_info = [] - for info in orig.project_info.all(): - info_data = info.__dict__.copy() - info_data.pop("_sa_instance_state") - info_data.update( - {"project_id": new_proj.id, "project_id_str": str(new_proj.id)} - ) - proj_info.append(ProjectInfo(**info_data)) + # Clone project_info data + project_info_query = "SELECT * FROM project_info WHERE project_id = :project_id" + project_info_records = await db.fetch_all( + project_info_query, {"project_id": project_id} + ) - new_proj.project_info = proj_info + for info in project_info_records: + info_data = dict(info) + info_data.pop("id", None) + info_data.update({"project_id": new_project_id}) + columns_info = ", ".join(info_data.keys()) + values_info = ", ".join([f":{key}" for key in info_data.keys()]) + insert_info_query = ( + f"INSERT INTO project_info ({columns_info}) VALUES ({values_info})" + ) + await db.execute(insert_info_query, info_data) + + # Clone teams data + teams_query = "SELECT * FROM project_teams WHERE project_id = :project_id" + team_records = await db.fetch_all(teams_query, {"project_id": project_id}) + + for team in team_records: + team_data = dict(team) + team_data.pop("id", None) + team_data.update({"project_id": new_project_id}) + columns_team = ", ".join(team_data.keys()) + values_team = ", ".join([f":{key}" for key in team_data.keys()]) + insert_team_query = ( + f"INSERT INTO project_teams ({columns_team}) VALUES ({values_team})" + ) + await db.execute(insert_team_query, team_data) - # Replace changeset comment. - default_comment = current_app.config["DEFAULT_CHANGESET_COMMENT"] + # Clone campaigns associated with the original project + campaign_query = ( + "SELECT campaign_id FROM campaign_projects WHERE project_id = :project_id" + ) + campaign_ids = await db.fetch_all(campaign_query, {"project_id": project_id}) + + for campaign in campaign_ids: + clone_campaign_query = """ + INSERT INTO campaign_projects (campaign_id, project_id) + VALUES (:campaign_id, :new_project_id) + """ + await db.execute( + clone_campaign_query, + { + "campaign_id": campaign["campaign_id"], + "new_project_id": new_project_id, + }, + ) - if default_comment is not None: - orig_changeset = f"{default_comment}-{orig.id}" # Preserve space - new_proj.changeset_comment = orig.changeset_comment.replace( - orig_changeset, "" - ).strip() + # Clone interests associated with the original project + interest_query = ( + "SELECT interest_id FROM project_interests WHERE project_id = :project_id" + ) + interest_ids = await db.fetch_all(interest_query, {"project_id": project_id}) + + for interest in interest_ids: + clone_interest_query = """ + INSERT INTO project_interests (interest_id, project_id) + VALUES (:interest_id, :new_project_id) + """ + await db.execute( + clone_interest_query, + { + "interest_id": interest["interest_id"], + "new_project_id": new_project_id, + }, + ) - # Populate teams, interests and campaigns - teams = [] - for team in orig.teams: - team_data = team.__dict__.copy() - team_data.pop("_sa_instance_state") - team_data.update({"project_id": new_proj.id}) - teams.append(ProjectTeams(**team_data)) - new_proj.teams = teams + # Clone CustomEditor associated with the original project + custom_editor_query = """ + SELECT name, description, url FROM project_custom_editors WHERE project_id = :project_id + """ + custom_editor = await db.fetch_one( + custom_editor_query, {"project_id": project_id} + ) - for field in ["interests", "campaign"]: - value = getattr(orig, field) - setattr(new_proj, field, value) - if orig.custom_editor: - new_proj.custom_editor = orig.custom_editor.clone_to_project(new_proj.id) + if custom_editor: + clone_custom_editor_query = """ + INSERT INTO project_custom_editors (project_id, name, description, url) + VALUES (:new_project_id, :name, :description, :url) + """ + await db.execute( + clone_custom_editor_query, + { + "new_project_id": new_project_id, + "name": custom_editor["name"], + "description": custom_editor["description"], + "url": custom_editor["url"], + }, + ) - return new_proj + # Return the new project data + new_project_query = "SELECT * FROM projects WHERE id = :new_project_id" + new_project = await db.fetch_one( + new_project_query, {"new_project_id": new_project_id} + ) + return Project(**new_project) @staticmethod - def get(project_id: int) -> Optional["Project"]: + async def get(project_id: int, db: Database) -> Optional["Project"]: """ Gets specified project :param project_id: project ID in scope + :param db: Instance of `databases.Database` for querying :return: Project if found otherwise None """ - return db.session.get( - Project, - project_id, - options=[ + # Construct the SQLAlchemy select statement + query = ( + select(Project) + .where(Project.id == project_id) + .options( orm.noload(Project.tasks), orm.noload(Project.messages), orm.noload(Project.project_chat), - ], + ) ) - def update(self, project_dto: ProjectDTO): + # Execute the query using the `fetch_one` method of `db` + result = await db.fetch_one(query) + + # If a result is found, map it back to the Project ORM class + # (If `Project` is a Core table, you can directly return `result`) + if result: + project = Project(**result) + return project + + return None + + async def update(self, project_dto: ProjectDTO, db: Database): """Updates project from DTO""" self.status = ProjectStatus[project_dto.project_status].value self.priority = ProjectPriority[project_dto.project_priority].value @@ -416,7 +592,9 @@ def update(self, project_dto: ProjectDTO): self.private = project_dto.private self.difficulty = ProjectDifficulty[project_dto.difficulty.upper()].value self.changeset_comment = project_dto.changeset_comment - self.due_date = project_dto.due_date + self.due_date = ( + project_dto.due_date.replace(tzinfo=None) if project_dto.due_date else None + ) self.imagery = project_dto.imagery self.josm_preset = project_dto.josm_preset self.id_presets = project_dto.id_presets @@ -435,13 +613,29 @@ def update(self, project_dto: ProjectDTO): self.osmcha_filter_id = None if project_dto.organisation: - org = Organisation.get(project_dto.organisation) - if org is None: + organisation_query = "SELECT * FROM organisations WHERE id = :id" + organization = await db.fetch_one( + organisation_query, values={"id": project_dto.organisation} + ) + + if organization is None: raise NotFound( sub_code="ORGANISATION_NOT_FOUND", organisation_id=project_dto.organisation, ) - self.organisation = org + + update_organisation_query = """ + UPDATE projects + SET organisation_id = :organisation_id + WHERE id = :project_id + """ + await db.execute( + update_organisation_query, + values={ + "organisation_id": project_dto.organisation, + "project_id": project_dto.project_id, + }, + ) # Cast MappingType strings to int array type_array = [] @@ -468,57 +662,110 @@ def update(self, project_dto: ProjectDTO): self.allowed_users.append(user) # Update teams and projects relationship. - self.teams = [] + await db.execute(delete(ProjectTeams).where(ProjectTeams.project_id == self.id)) if hasattr(project_dto, "project_teams") and project_dto.project_teams: for team_dto in project_dto.project_teams: - team = Team.get(team_dto.team_id) - + team = await Team.get(team_dto.team_id, db) if team is None: raise NotFound(sub_code="TEAM_NOT_FOUND", team_id=team_dto.team_id) - role = TeamRoles[team_dto.role].value - project_team = ProjectTeams(project=self, team=team, role=role) - db.session.add(project_team) + project_team = ProjectTeams( + project_id=self.id, team_id=team.id, role=role + ) + await project_team.create(db) # Set Project Info for all returned locales for dto in project_dto.project_info_locales: - project_info = self.project_info.filter_by(locale=dto.locale).one_or_none() + project_info = await db.fetch_one( + select(ProjectInfo).where( + ProjectInfo.project_id == self.id, ProjectInfo.locale == dto.locale + ) + ) if project_info is None: - new_info = ProjectInfo.create_from_dto( - dto + new_info = await ProjectInfo.create_from_dto( + dto, self.id, db ) # Can't find info so must be new locale - self.project_info.append(new_info) else: - project_info.update_from_dto(dto) + await ProjectInfo.update_from_dto(ProjectInfo(**project_info), dto, db) - self.priority_areas = [] # Always clear Priority Area prior to updating + # Always clear Priority Area prior to updating + await Project.clear_existing_priority_areas(db, self.id) if project_dto.priority_areas: for priority_area in project_dto.priority_areas: - pa = PriorityArea.from_dict(priority_area) - self.priority_areas.append(pa) + pa = await PriorityArea.from_dict(priority_area, db) + # Link project and priority area in the database + if pa and pa.id: + link_query = """ + INSERT INTO project_priority_areas (project_id, priority_area_id) + VALUES (:project_id, :priority_area_id) + """ + await db.execute( + query=link_query, + values={"project_id": self.id, "priority_area_id": pa.id}, + ) if project_dto.custom_editor: if not self.custom_editor: - new_editor = CustomEditor.create_from_dto( - self.id, project_dto.custom_editor + new_editor = await CustomEditor.create_from_dto( + self.id, project_dto.custom_editor, db ) self.custom_editor = new_editor else: - self.custom_editor.update_editor(project_dto.custom_editor) + await CustomEditor.update_editor( + self.custom_editor, project_dto.custom_editor, db + ) else: if self.custom_editor: - self.custom_editor.delete() + await CustomEditor.delete(self.custom_editor, db) # handle campaign update try: new_ids = [c.id for c in project_dto.campaigns] - new_ids.sort() except TypeError: new_ids = [] - current_ids = [c.id for c in self.campaign] - current_ids.sort() - if new_ids != current_ids: - self.campaign = Campaign.query.filter(Campaign.id.in_(new_ids)).all() + + query = """ + SELECT campaign_id + FROM campaign_projects + WHERE project_id = :project_id + """ + campaign_results = await db.fetch_all( + query, values={"project_id": project_dto.project_id} + ) + current_ids = [c.campaign_id for c in campaign_results] + + new_set = set(new_ids) + current_set = set(current_ids) + + if new_set != current_set: + to_add = new_set - current_set + to_remove = current_set - new_set + if to_remove: + await db.execute( + """ + DELETE FROM campaign_projects + WHERE project_id = :project_id + AND campaign_id = ANY(:to_remove) + """, + values={ + "project_id": project_dto.project_id, + "to_remove": list(to_remove), + }, + ) + + if to_add: + insert_query = """ + INSERT INTO campaign_projects (project_id, campaign_id) + VALUES (:project_id, :campaign_id) + """ + for campaign_id in to_add: + await db.execute( + insert_query, + values={ + "project_id": project_dto.project_id, + "campaign_id": campaign_id, + }, + ) if project_dto.mapping_permission: self.mapping_permission = MappingPermission[ @@ -532,96 +779,281 @@ def update(self, project_dto: ProjectDTO): # handle interests update try: - new_ids = [c.id for c in project_dto.interests] - new_ids.sort() + new_interest_ids = [i.id for i in project_dto.interests] except TypeError: - new_ids = [] - current_ids = [c.id for c in self.interests] - current_ids.sort() - if new_ids != current_ids: - self.interests = Interest.query.filter(Interest.id.in_(new_ids)).all() + new_interest_ids = [] + + interest_query = """ + SELECT interest_id + FROM project_interests + WHERE project_id = :project_id + """ + interest_results = await db.fetch_all( + interest_query, values={"project_id": project_dto.project_id} + ) + current_interest_ids = [i.interest_id for i in interest_results] + + new_interest_set = set(new_interest_ids) + current_interest_set = set(current_interest_ids) + + if new_interest_set != current_interest_set: + to_add_interests = new_interest_set - current_interest_set + to_remove_interests = current_interest_set - new_interest_set + + if to_remove_interests: + await db.execute( + """ + DELETE FROM project_interests + WHERE project_id = :project_id + AND interest_id = ANY(:to_remove) + """, + values={ + "project_id": project_dto.project_id, + "to_remove": list(to_remove_interests), + }, + ) + + if to_add_interests: + insert_interest_query = """ + INSERT INTO project_interests (project_id, interest_id) + VALUES (:project_id, :interest_id) + """ + for interest_id in to_add_interests: + await db.execute( + insert_interest_query, + values={ + "project_id": project_dto.project_id, + "interest_id": interest_id, + }, + ) # try to update country info if that information is not present if not self.country: self.set_country_info() - db.session.commit() + columns = { + c.key: getattr(self, c.key) for c in inspect(self).mapper.column_attrs + } + columns.pop("geometry", None) + columns.pop("centroid", None) + columns.pop("id", None) + columns.pop("organisation_id", None) + # Update the project in the database + await db.execute( + self.__table__.update().where(Project.id == self.id).values(**columns) + ) + + async def delete(self, db: Database): + """Deletes the current project and related records from the database using raw SQL.""" + # List of tables to delete from, in the order required to satisfy foreign key constraints + related_tables = [ + "project_favorites", + "campaign_projects", + "project_custom_editors", + "project_interests", + "project_priority_areas", + "project_allowed_users", + "project_teams", + "task_invalidation_history", + "task_history", + "tasks", + "project_info", + "project_chat", + "project_partnerships_history", + "project_partnerships", + ] - def delete(self): - """Deletes the current model from the DB""" - db.session.delete(self) - db.session.commit() + # Start a transaction to ensure atomic deletion + async with db.transaction(): + # Loop through each table and execute the delete query + for table in related_tables: + await db.execute( + f"DELETE FROM {table} WHERE project_id = :project_id", + {"project_id": self.id}, + ) + + # Finally, delete the project itself + await db.execute( + "DELETE FROM projects WHERE id = :project_id", {"project_id": self.id} + ) @staticmethod - def exists(project_id): - query = Project.query.filter(Project.id == project_id).exists() + async def exists(project_id: int, db: Database) -> bool: + query = """ + SELECT 1 + FROM projects + WHERE id = :project_id + """ - return db.session.query(literal(True)).filter(query).scalar() + # Execute the query + result = await db.fetch_one(query=query, values={"project_id": project_id}) - def is_favorited(self, user_id: int) -> bool: - user = db.session.get(User, user_id) - if user not in self.favorited: - return False + if result is None: + raise NotFound(sub_code="PROJECT_NOT_FOUND", project_id=project_id) return True - def favorite(self, user_id: int): - user = db.session.get(User, user_id) - self.favorited.append(user) - db.session.commit() + @staticmethod + async def is_favorited(project_id: int, user_id: int, db: Database) -> bool: + query = """ + SELECT 1 + FROM project_favorites + WHERE user_id = :user_id + AND project_id = :project_id + LIMIT 1 + """ - def unfavorite(self, user_id: int): - user = db.session.get(User, user_id) - if user not in self.favorited: - raise ValueError("NotFeatured- Project not been favorited by user") - self.favorited.remove(user) - db.session.commit() + result = await db.fetch_one( + query, values={"user_id": user_id, "project_id": project_id} + ) + return result is not None - def set_as_featured(self): + @staticmethod + async def favorite(project_id: int, user_id: int, db: Database): + check_query = """ + SELECT 1 FROM project_favorites WHERE project_id = :project_id AND user_id = :user_id + """ + exists = await db.fetch_one( + check_query, {"project_id": project_id, "user_id": user_id} + ) + + if not exists: + insert_query = """ + INSERT INTO project_favorites (project_id, user_id) + VALUES (:project_id, :user_id) + """ + await db.execute( + insert_query, {"project_id": project_id, "user_id": user_id} + ) + + @staticmethod + async def unfavorite(project_id: int, user_id: int, db: Database): + check_query = """ + SELECT 1 FROM project_favorites + WHERE project_id = :project_id AND user_id = :user_id + """ + exists = await db.fetch_one( + check_query, {"project_id": project_id, "user_id": user_id} + ) + + if not exists: + raise ValueError("NotFeatured - Project has not been favorited by user") + + delete_query = """ + DELETE FROM project_favorites + WHERE project_id = :project_id AND user_id = :user_id + """ + await db.execute(delete_query, {"project_id": project_id, "user_id": user_id}) + + async def set_as_featured(self, db: Database): + """ + Sets the project as featured. + :param db: Instance of `databases.Database` for querying + """ if self.featured is True: raise ValueError("AlreadyFeatured- Project is already featured") - self.featured = True - db.session.commit() - def unset_as_featured(self): + query = update(Project).where(Project.id == self.id).values(featured=True) + + # Execute the update query using the async `db.execute` + await db.execute(query) + + async def unset_as_featured(self, db: Database): + """ + Unsets the project as featured. + :param db: Instance of `databases.Database` for querying + """ + # Check if the project is already not featured if self.featured is False: - raise ValueError("NotFeatured- Project is not featured") - self.featured = False - db.session.commit() - - def can_be_deleted(self) -> bool: - """Projects can be deleted if they have no mapped work""" - task_count = self.tasks.filter( - Task.task_status != TaskStatus.READY.value - ).count() + raise ValueError("NotFeatured - Project is not featured") + + query = update(Project).where(Project.id == self.id).values(featured=False) + + # Execute the update query using the async `db.execute` + await db.execute(query) + + async def can_be_deleted(self, db: Database) -> bool: + """Projects can be deleted if they have no mapped work.""" + # Build a query to count tasks associated with the project + query = ( + select(func.count()) + .select_from(Task) + .where( + Task.project_id + == self.id, # Assuming `self.id` refers to the project instance ID + Task.task_status != TaskStatus.READY.value, + ) + ) + + # Execute the query + task_count = await db.fetch_val(query) if task_count == 0: return True else: return False @staticmethod - def get_projects_for_admin( - admin_id: int, preferred_locale: str, search_dto: ProjectSearchDTO + async def get_projects_for_admin( + admin_id: int, preferred_locale: str, search_dto: ProjectSearchDTO, db: Database ) -> PMDashboardDTO: - """Get projects for admin""" - query = Project.query.filter(Project.author_id == admin_id) - # Do Filtering Here + """Get all projects for provided admin.""" + + query = """ + SELECT + p.id AS id, + p.difficulty, + p.priority, + p.default_locale, + ST_AsGeoJSON(p.centroid) AS centroid, + p.organisation_id, + p.tasks_bad_imagery, + p.tasks_mapped, + p.tasks_validated, + p.status, + p.mapping_types, + p.total_tasks, + p.last_updated, + p.due_date, + p.country, + p.changeset_comment, + p.created, + p.osmcha_filter_id, + p.mapping_permission, + p.validation_permission, + p.enforce_random_task_selection, + p.private, + p.license_id, + p.id_presets, + p.extra_id_params, + p.rapid_power_user, + p.imagery, + p.mapping_editors, + p.validation_editors, + u.username AS author, + o.name AS organisation_name, + o.slug AS organisation_slug, + o.logo AS organisation_logo, + ARRAY(SELECT user_id FROM project_allowed_users WHERE project_id = p.id) AS allowed_users + FROM projects p + LEFT JOIN organisations o ON o.id = p.organisation_id + LEFT JOIN users u ON u.id = p.author_id + WHERE p.author_id = :admin_id + """ + + params = {"admin_id": admin_id} if search_dto.order_by: + query += f" ORDER BY p.{search_dto.order_by} " if search_dto.order_by_type == "DESC": - query = query.order_by(desc(search_dto.order_by)) - else: - query = query.order_by(search_dto.order_by) - - admins_projects = query.all() - - if admins_projects is None: - raise NotFound(sub_code="PROJECTS_NOT_FOUND") + query += "DESC" + # Execute query + rows = await db.fetch_all(query, params) + # Process results admin_projects_dto = PMDashboardDTO() - for project in admins_projects: - pm_project = project.get_project_summary(preferred_locale) - project_status = ProjectStatus(project.status) + for row in rows: + pm_project = await Project.get_project_summary(row, preferred_locale, db) + project_status = ProjectStatus(row["status"]) if project_status == ProjectStatus.DRAFT: admin_projects_dto.draft_projects.append(pm_project) @@ -630,107 +1062,154 @@ def get_projects_for_admin( elif project_status == ProjectStatus.ARCHIVED: admin_projects_dto.archived_projects.append(pm_project) else: - current_app.logger.error(f"Unexpected state project {project.id}") + raise HTTPException( + status_code=500, detail=f"Unexpected state project {row['id']}" + ) return admin_projects_dto - def get_project_user_stats(self, user_id: int) -> ProjectUserStatsDTO: - """Compute project specific stats for a given user""" + @staticmethod + async def get_project_user_stats( + project_id: int, user_id: int, db: Database + ) -> ProjectUserStatsDTO: + """Compute project-specific stats for a given user""" stats_dto = ProjectUserStatsDTO() - stats_dto.time_spent_mapping = 0 - stats_dto.time_spent_validating = 0 - stats_dto.total_time_spent = 0 - total_mapping_time = ( - db.session.query( - func.sum( - cast(func.to_timestamp(TaskHistory.action_text, "HH24:MI:SS"), Time) - ) - ) - .filter( - or_( - TaskHistory.action == "LOCKED_FOR_MAPPING", - TaskHistory.action == "AUTO_UNLOCKED_FOR_MAPPING", - ) - ) - .filter(TaskHistory.user_id == user_id) - .filter(TaskHistory.project_id == self.id) + total_mapping_query = """ + SELECT + SUM(TO_TIMESTAMP(action_text, 'HH24:MI:SS')::TIME - '00:00:00'::TIME) AS total_time + FROM task_history + WHERE action IN ('LOCKED_FOR_MAPPING', 'AUTO_UNLOCKED_FOR_MAPPING') + AND project_id = :project_id + AND user_id = :user_id + """ + total_mapping_result = await db.fetch_one( + total_mapping_query, {"project_id": project_id, "user_id": user_id} ) - for time in total_mapping_time: - total_mapping_time = time[0] - if total_mapping_time: - stats_dto.time_spent_mapping = total_mapping_time.total_seconds() - stats_dto.total_time_spent += stats_dto.time_spent_mapping - query = ( - TaskHistory.query.with_entities( - func.date_trunc("minute", TaskHistory.action_date).label("trn"), - func.max(TaskHistory.action_text).label("tm"), - ) - .filter(TaskHistory.user_id == user_id) - .filter(TaskHistory.project_id == self.id) - .filter(TaskHistory.action == "LOCKED_FOR_VALIDATION") - .group_by("trn") - .subquery() + total_mapping_time = ( + total_mapping_result["total_time"].total_seconds() + if total_mapping_result and total_mapping_result["total_time"] + else 0 + ) + stats_dto.time_spent_mapping = total_mapping_time + stats_dto.total_time_spent += total_mapping_time + + total_validation_query = """ + SELECT + SUM(TO_TIMESTAMP(action_text, 'HH24:MI:SS')::TIME - '00:00:00'::TIME) AS total_time + FROM task_history + WHERE action IN ('LOCKED_FOR_VALIDATION', 'AUTO_UNLOCKED_FOR_VALIDATION') + AND project_id = :project_id + AND user_id = :user_id + """ + total_validation_result = await db.fetch_one( + total_validation_query, {"project_id": project_id, "user_id": user_id} ) - total_validation_time = db.session.query( - func.sum(cast(func.to_timestamp(query.c.tm, "HH24:MI:SS"), Time)) - ).all() - - for time in total_validation_time: - total_validation_time = time[0] - if total_validation_time: - stats_dto.time_spent_validating = total_validation_time.total_seconds() - stats_dto.total_time_spent += stats_dto.time_spent_validating + total_validation_time = ( + total_validation_result["total_time"].total_seconds() + if total_validation_result and total_validation_result["total_time"] + else 0 + ) + stats_dto.time_spent_validating = total_validation_time + stats_dto.total_time_spent += total_validation_time return stats_dto - def get_project_stats(self) -> ProjectStatsDTO: - """Create Project Stats model for postgis project object""" + @staticmethod + async def get_project_stats(project_id: int, database: Database) -> ProjectStatsDTO: + """Create Project Stats model for postgis project object.""" project_stats = ProjectStatsDTO() - project_stats.project_id = self.id - project_stats.area = ( - db.session.query(func.ST_Area(Project.geometry, True)) - .where(Project.id == self.id) - .first()[0] - / 1000000 + project_stats.project_id = project_id + project_query = """ + SELECT + ST_Area(geometry, TRUE) / 1000000 AS area, + ST_AsGeoJSON(centroid) AS centroid_geojson, + tasks_mapped, + tasks_validated, + total_tasks, + tasks_bad_imagery + FROM projects + WHERE id = :project_id + """ + + result = await database.fetch_one( + project_query, values={"project_id": project_id} + ) + + project_stats.area = result["area"] + project_stats.aoi_centroid = ( + geojson.loads(result["centroid_geojson"]) + if result["centroid_geojson"] + else None + ) + tasks_mapped = result["tasks_mapped"] + tasks_validated = result["tasks_validated"] + total_tasks = result["total_tasks"] + tasks_bad_imagery = result["tasks_bad_imagery"] + + # Calculate task percentages + project_stats.total_tasks = total_tasks + + project_stats.percent_mapped = Project.calculate_tasks_percent( + "mapped", tasks_mapped, tasks_validated, total_tasks, tasks_bad_imagery + ) + project_stats.percent_validated = Project.calculate_tasks_percent( + "validated", tasks_mapped, tasks_validated, total_tasks, tasks_bad_imagery + ) + project_stats.percent_bad_imagery = Project.calculate_tasks_percent( + "bad_imagery", tasks_mapped, tasks_validated, total_tasks, tasks_bad_imagery ) + # Query for total mappers + total_mappers_query = """ + SELECT COUNT(*) + FROM users + WHERE :project_id = ANY(projects_mapped) + """ + total_mappers_result = await database.fetch_one( + total_mappers_query, values={"project_id": project_id} + ) project_stats.total_mappers = ( - db.session.query(User).filter(User.projects_mapped.any(self.id)).count() + total_mappers_result[0] if total_mappers_result else 0 + ) + + # Query for total comments + total_comments_query = """ + SELECT COUNT(*) + FROM project_chat + WHERE project_id = :project_id + """ + total_comments_result = await database.fetch_one( + total_comments_query, values={"project_id": project_id} ) - project_stats.total_tasks = self.total_tasks project_stats.total_comments = ( - db.session.query(ProjectChat) - .filter(ProjectChat.project_id == self.id) - .count() + total_comments_result[0] if total_comments_result else 0 ) - project_stats.percent_mapped = self.calculate_tasks_percent("mapped") - project_stats.percent_validated = self.calculate_tasks_percent("validated") - project_stats.percent_bad_imagery = self.calculate_tasks_percent("bad_imagery") - centroid_geojson = db.session.scalar(self.centroid.ST_AsGeoJSON()) - project_stats.aoi_centroid = geojson.loads(centroid_geojson) + + # Initialize time stats project_stats.total_time_spent = 0 project_stats.total_mapping_time = 0 project_stats.total_validation_time = 0 project_stats.average_mapping_time = 0 project_stats.average_validation_time = 0 + # Query total mapping time and tasks + total_mapping_query = """ + SELECT + SUM(TO_TIMESTAMP(action_text, 'HH24:MI:SS')::TIME - '00:00:00'::TIME) AS total_time, + COUNT(action) AS total_tasks + FROM task_history + WHERE action IN ('LOCKED_FOR_MAPPING', 'AUTO_UNLOCKED_FOR_MAPPING') + AND project_id = :project_id + """ + total_mapping_result = await database.fetch_one( + total_mapping_query, values={"project_id": project_id} + ) total_mapping_time, total_mapping_tasks = ( - db.session.query( - func.sum( - cast(func.to_timestamp(TaskHistory.action_text, "HH24:MI:SS"), Time) - ), - func.count(TaskHistory.action), - ) - .filter( - or_( - TaskHistory.action == "LOCKED_FOR_MAPPING", - TaskHistory.action == "AUTO_UNLOCKED_FOR_MAPPING", - ) - ) - .filter(TaskHistory.project_id == self.id) - .one() + (total_mapping_result["total_time"], total_mapping_result["total_tasks"]) + if total_mapping_result + else (0, 0) ) if total_mapping_tasks > 0: @@ -741,23 +1220,30 @@ def get_project_stats(self) -> ProjectStatsDTO: ) project_stats.total_time_spent += total_mapping_time + # Query total validation time and tasks + total_validation_query = """ + SELECT + SUM(TO_TIMESTAMP(action_text, 'HH24:MI:SS')::TIME - '00:00:00'::TIME) AS total_time, + COUNT(action) AS total_tasks + FROM task_history + WHERE action IN ('LOCKED_FOR_VALIDATION', 'AUTO_UNLOCKED_FOR_VALIDATION') + AND project_id = :project_id + """ + total_validation_result = await database.fetch_one( + total_validation_query, values={"project_id": project_id} + ) + + # Safely unpack the results, or default to (0, 0) if the query returns no results total_validation_time, total_validation_tasks = ( - db.session.query( - func.sum( - cast(func.to_timestamp(TaskHistory.action_text, "HH24:MI:SS"), Time) - ), - func.count(TaskHistory.action), - ) - .filter( - or_( - TaskHistory.action == "LOCKED_FOR_VALIDATION", - TaskHistory.action == "AUTO_UNLOCKED_FOR_VALIDATION", - ) + ( + total_validation_result["total_time"], + total_validation_result["total_tasks"], ) - .filter(TaskHistory.project_id == self.id) - .one() + if total_validation_result + else (0, 0) ) + # If there are validation tasks, convert the time to total seconds and update project stats if total_validation_tasks > 0: total_validation_time = total_validation_time.total_seconds() project_stats.total_validation_time = total_validation_time @@ -766,214 +1252,339 @@ def get_project_stats(self) -> ProjectStatsDTO: ) project_stats.total_time_spent += total_validation_time - actions = [] - if project_stats.average_mapping_time <= 0: - actions.append(TaskStatus.LOCKED_FOR_MAPPING.name) - if project_stats.average_validation_time <= 0: - actions.append(TaskStatus.LOCKED_FOR_VALIDATION.name) - - zoom_levels = [] - # Check that averages are non-zero. - if len(actions) != 0: - zoom_levels = ( - Task.query.with_entities(Task.zoom.distinct()) - .filter(Task.project_id == self.id) - .all() + # TODO: Understand the functionality of subquery used and incorporate this part. + + # actions = [] + # if project_stats.average_mapping_time <= 0: + # actions.append(TaskStatus.LOCKED_FOR_MAPPING.name) + # if project_stats.average_validation_time <= 0: + # actions.append(TaskStatus.LOCKED_FOR_VALIDATION.name) + + # zoom_levels = [] + # if actions: + # # Query for distinct zoom levels + # zoom_levels_query = """ + # SELECT DISTINCT zoom + # FROM tasks + # WHERE project_id = :project_id + # """ + # zoom_levels_result = await database.fetch_all(zoom_levels_query, values={"project_id": project_id}) + # zoom_levels = [row['zoom'] for row in zoom_levels_result] + + # is_square = None not in zoom_levels + + # subquery = f""" + # SELECT + # t.zoom, + # th.action, + # EXTRACT(EPOCH FROM TO_TIMESTAMP(th.action_text, 'HH24:MI:SS')) AS ts + # FROM task_history th + # JOIN tasks t ON th.task_id = t.id + # WHERE th.action IN :actions + # AND th.project_id = :project_id + # AND t.is_square = :is_square + # AND (t.zoom IN :zoom_levels OR :is_square IS FALSE) + # """ + # subquery_params = { + # "project_id": project_id, + # "actions": tuple(actions), + # "is_square": is_square, + # "zoom_levels": tuple(zoom_levels) if zoom_levels else (None,) + # } + # subquery_result = await database.fetch_all(subquery, values=subquery_params) + + # # Query for average mapping time + # if project_stats.average_mapping_time <= 0: + # mapping_avg_query = """ + # SELECT zoom, AVG(ts) AS avg + # FROM ( + # SELECT zoom, ts + # FROM subquery_result + # WHERE action = 'LOCKED_FOR_MAPPING' + # ) AS mapping_times + # GROUP BY zoom + # """ + # mapping_avg_result = await database.fetch_all(mapping_avg_query) + # if mapping_avg_result: + # mapping_time = sum(row['avg'].total_seconds() for row in mapping_avg_result) / len(mapping_avg_result) + # project_stats.average_mapping_time = mapping_time + + # # Query for average validation time + # if project_stats.average_validation_time <= 0: + # validation_avg_query = """ + # SELECT zoom, AVG(ts) AS avg + # FROM ( + # SELECT zoom, ts + # FROM subquery_result + # WHERE action = 'LOCKED_FOR_VALIDATION' + # ) AS validation_times + # GROUP BY zoom + # """ + # validation_avg_result = await database.fetch_all(validation_avg_query) + # if validation_avg_result: + # validation_time = sum(row['avg'].total_seconds() for row in validation_avg_result) / len(validation_avg_result) + # project_stats.average_validation_time = validation_time + + # Calculate time to finish mapping and validation + project_stats.time_to_finish_mapping = ( + total_tasks - (tasks_mapped + tasks_bad_imagery + tasks_validated) + ) * project_stats.average_mapping_time + project_stats.time_to_finish_validating = ( + total_tasks - (tasks_validated + tasks_bad_imagery) + ) * project_stats.average_validation_time + + return project_stats + + @staticmethod + async def get_project_summary( + project_row, preferred_locale: str, db: Database, calculate_completion=True + ) -> ProjectSummary: + """Create Project Summary model for a project.""" + + project_id = project_row["id"] + + # Mapping editors + if project_row.mapping_editors: + mapping_editors = ( + [ + Editors(mapping_editor).name + for mapping_editor in project_row.mapping_editors + ] + if project_row["mapping_editors"] + else [] ) - zoom_levels = [z[0] for z in zoom_levels] - - # Validate project has arbitrary tasks. - is_square = True - if None in zoom_levels: - is_square = False - sq = ( - TaskHistory.query.with_entities( - Task.zoom, - TaskHistory.action, - ( - cast(func.to_timestamp(TaskHistory.action_text, "HH24:MI:SS"), Time) - ).label("ts"), + # Validation editors + if project_row.validation_editors: + validation_editors = ( + [ + Editors(validation_editor).name + for validation_editor in project_row["validation_editors"] + ] + if project_row["validation_editors"] + else [] ) - .filter(Task.is_square == is_square) - .filter(TaskHistory.project_id == Task.project_id) - .filter(TaskHistory.task_id == Task.id) - .filter(TaskHistory.action.in_(actions)) + summary = ProjectSummary( + project_id=project_id, + mapping_editors=mapping_editors, + validation_editors=validation_editors, ) - if is_square is True: - sq = sq.filter(Task.zoom.in_(zoom_levels)) - sq = sq.subquery() - - nz = ( - db.session.query(sq.c.zoom, sq.c.action, sq.c.ts) - .filter(sq.c.ts > datetime.time(0)) - .limit(10000) - .subquery() + # Set priority + priority_map = {0: "URGENT", 1: "HIGH", 2: "MEDIUM"} + summary.priority = priority_map.get(project_row["priority"], "LOW") + + summary.author = project_row.author + + # Set other fields directly from project_row + summary.default_locale = project_row.default_locale + summary.country_tag = project_row.country + summary.changeset_comment = project_row.changeset_comment + summary.due_date = project_row.due_date + summary.created = project_row.created + summary.last_updated = project_row.last_updated + summary.osmcha_filter_id = project_row.osmcha_filter_id + summary.difficulty = ProjectDifficulty(project_row["difficulty"]).name + summary.mapping_permission = MappingPermission( + project_row["mapping_permission"] + ).name + summary.validation_permission = ValidationPermission( + project_row["validation_permission"] + ).name + summary.random_task_selection_enforced = ( + project_row.enforce_random_task_selection ) + summary.private = project_row.private + summary.license_id = project_row.license_id + summary.status = ProjectStatus(project_row["status"]).name + summary.id_presets = project_row.id_presets + summary.extra_id_params = project_row.extra_id_params + summary.rapid_power_user = project_row.rapid_power_user + summary.imagery = project_row.imagery + + # Handle organisation details if available + if project_row.organisation_id: + summary.organisation = project_row.organisation_id + summary.organisation_name = project_row.organisation_name + summary.organisation_slug = project_row.organisation_slug + summary.organisation_logo = project_row.organisation_logo + + # Mapping types + if project_row.mapping_types: + summary.mapping_types = ( + [ + MappingTypes(mapping_type).name + for mapping_type in project_row.mapping_types + ] + if project_row.mapping_types + else [] + ) - if project_stats.average_mapping_time <= 0: - mapped_avg = ( - db.session.query(nz.c.zoom, (func.avg(nz.c.ts)).label("avg")) - .filter(nz.c.action == TaskStatus.LOCKED_FOR_MAPPING.name) - .group_by(nz.c.zoom) - .all() + # Custom editor + custom_editor_query = """ + SELECT name, description, url + FROM project_custom_editors + WHERE project_id = :project_id + """ + custom_editor_row = await db.fetch_one( + custom_editor_query, {"project_id": project_id} + ) + if custom_editor_row: + summary.custom_editor = CustomEditorDTO( + name=custom_editor_row.name, + description=custom_editor_row.description, + url=custom_editor_row.url, ) - if len(mapped_avg) != 0: - mapping_time = sum([t.avg.total_seconds() for t in mapped_avg]) / len( - mapped_avg - ) - project_stats.average_mapping_time = mapping_time - - if project_stats.average_validation_time <= 0: - val_avg = ( - db.session.query(nz.c.zoom, (func.avg(nz.c.ts)).label("avg")) - .filter(nz.c.action == TaskStatus.LOCKED_FOR_VALIDATION.name) - .group_by(nz.c.zoom) - .all() + + if summary.private: + allowed_user_ids = ( + project_row.allowed_users if project_row.allowed_users else [] ) - if len(val_avg) != 0: - validation_time = sum([t.avg.total_seconds() for t in val_avg]) / len( - val_avg + if allowed_user_ids: + query = "SELECT username FROM users WHERE id = ANY(:allowed_user_ids)" + allowed_users = await db.fetch_all( + query, {"allowed_user_ids": allowed_user_ids} ) - project_stats.average_validation_time = validation_time - - time_to_finish_mapping = ( - self.total_tasks - - (self.tasks_mapped + self.tasks_bad_imagery + self.tasks_validated) - ) * project_stats.average_mapping_time - project_stats.time_to_finish_mapping = time_to_finish_mapping - project_stats.time_to_finish_validating = ( - self.total_tasks - (self.tasks_validated + self.tasks_bad_imagery) - ) * project_stats.average_validation_time - - return project_stats + summary.allowed_users = [user["username"] for user in allowed_users] + else: + summary.allowed_users = [] - def get_project_summary( - self, preferred_locale, calculate_completion=True - ) -> ProjectSummary: - """Create Project Summary model for postgis project object""" - summary = ProjectSummary() - summary.project_id = self.id - priority = self.priority - if priority == 0: - summary.priority = "URGENT" - elif priority == 1: - summary.priority = "HIGH" - elif priority == 2: - summary.priority = "MEDIUM" - else: - summary.priority = "LOW" - summary.author = User.get_by_id(self.author_id).username - summary.default_locale = self.default_locale - summary.country_tag = self.country - summary.changeset_comment = self.changeset_comment - summary.due_date = self.due_date - summary.created = self.created - summary.last_updated = self.last_updated - summary.osmcha_filter_id = self.osmcha_filter_id - summary.difficulty = ProjectDifficulty(self.difficulty).name - summary.mapping_permission = MappingPermission(self.mapping_permission).name - summary.validation_permission = ValidationPermission( - self.validation_permission - ).name - summary.random_task_selection_enforced = self.enforce_random_task_selection - summary.private = self.private - summary.license_id = self.license_id - summary.status = ProjectStatus(self.status).name - summary.id_presets = self.id_presets - summary.extra_id_params = self.extra_id_params - summary.rapid_power_user = self.rapid_power_user - summary.imagery = self.imagery - if self.organisation_id: - summary.organisation = self.organisation_id - summary.organisation_name = self.organisation.name - summary.organisation_slug = self.organisation.slug - summary.organisation_logo = self.organisation.logo - - if self.campaign: - summary.campaigns = [i.as_dto() for i in self.campaign] - - # Cast MappingType values to related string array - mapping_types_array = [] - if self.mapping_types: - for mapping_type in self.mapping_types: - mapping_types_array.append(MappingTypes(mapping_type).name) - summary.mapping_types = mapping_types_array - - if self.mapping_editors: - mapping_editors = [] - for mapping_editor in self.mapping_editors: - mapping_editors.append(Editors(mapping_editor).name) - - summary.mapping_editors = mapping_editors - - if self.validation_editors: - validation_editors = [] - for validation_editor in self.validation_editors: - validation_editors.append(Editors(validation_editor).name) - - summary.validation_editors = validation_editors - - if self.custom_editor: - summary.custom_editor = self.custom_editor.as_dto() - - # If project is private, fetch list of allowed users - if self.private: - allowed_users = [] - for user in self.allowed_users: - allowed_users.append(user.username) - summary.allowed_users = allowed_users - - centroid_geojson = db.session.scalar(self.centroid.ST_AsGeoJSON()) - summary.aoi_centroid = geojson.loads(centroid_geojson) + # AOI centroid + summary.aoi_centroid = geojson.loads(project_row.centroid) + # Calculate completion percentages if requested if calculate_completion: - summary.percent_mapped = self.calculate_tasks_percent("mapped") - summary.percent_validated = self.calculate_tasks_percent("validated") - summary.percent_bad_imagery = self.calculate_tasks_percent("bad_imagery") + summary.percent_mapped = Project.calculate_tasks_percent( + "mapped", + project_row.tasks_mapped, + project_row.tasks_validated, + project_row.total_tasks, + project_row.tasks_bad_imagery, + ) + summary.percent_validated = Project.calculate_tasks_percent( + "validated", + project_row.tasks_validated, + project_row.tasks_validated, + project_row.total_tasks, + project_row.tasks_bad_imagery, + ) + summary.percent_bad_imagery = Project.calculate_tasks_percent( + "bad_imagery", + project_row.tasks_mapped, + project_row.tasks_validated, + project_row.total_tasks, + project_row.tasks_bad_imagery, + ) + # Project campaigns + query = """ + SELECT c.* + FROM campaigns c + INNER JOIN campaign_projects cp ON c.id = cp.campaign_id + WHERE cp.project_id = :project_id + """ + + campaigns = await db.fetch_all(query=query, values={"project_id": project_id}) + campaigns_dto = ( + [CampaignDTO(**campaign) for campaign in campaigns] if campaigns else [] + ) + summary.campaigns = campaigns_dto + + # Project teams + query = """ + SELECT + pt.team_id, + t.name AS team_name, + pt.role + FROM project_teams pt + JOIN teams t ON pt.team_id = t.id + WHERE pt.project_id = :project_id + """ + teams = await db.fetch_all(query, {"project_id": project_row["id"]}) summary.project_teams = [ ProjectTeamDTO( - dict( - team_id=t.team.id, - team_name=t.team.name, - role=TeamRoles(t.role).name, - ) + team_id=team["team_id"], + team_name=team["team_name"], + role=TeamRoles(team["role"]), ) - for t in self.teams + for team in teams ] - - project_info = ProjectInfo.get_dto_for_locale( - self.id, preferred_locale, self.default_locale + # Project info for the preferred locale + project_info = await Project.get_dto_for_locale( + project_row["id"], preferred_locale, project_row["default_locale"], db ) summary.project_info = project_info return summary - def get_project_title(self, preferred_locale): - project_info = ProjectInfo.get_dto_for_locale( - self.id, preferred_locale, self.default_locale - ) - return project_info.name + # TODO Remove if not used. + # @staticmethod + # async def calculate_tasks_percent(status: str, project_id: int, db: Database) -> float: + # """Calculate the percentage of tasks with a given status for a project.""" + # query = f""" + # SELECT COUNT(*) + # FROM tasks + # WHERE project_id = :project_id AND status = :status + # """ + # total_tasks_query = "SELECT COUNT(*) FROM tasks WHERE project_id = :project_id" + + # total_tasks = await db.fetch_val(total_tasks_query, {"project_id": project_id}) + # status_tasks = await db.fetch_val(query, {"project_id": project_id, "status": status}) + # return (status_tasks / total_tasks) * 100 if total_tasks > 0 else 0.0 @staticmethod - def get_project_total_contributions(project_id: int) -> int: - project_contributors_count = ( - TaskHistory.query.with_entities(TaskHistory.user_id) - .filter( - TaskHistory.project_id == project_id, TaskHistory.action != "COMMENT" - ) - .distinct(TaskHistory.user_id) - .count() + async def get_dto_for_locale( + project_id: int, preferred_locale: str, default_locale: str, db: Database + ) -> ProjectInfoDTO: + """Get project info for the preferred locale.""" + query = """ + SELECT + name, + locale, + short_description, + description, + instructions + FROM project_info + WHERE project_id = :project_id AND locale = :preferred_locale + """ + project_info = await db.fetch_one( + query, {"project_id": project_id, "preferred_locale": preferred_locale} ) - return project_contributors_count + if not project_info: + # Fallback to default locale if preferred locale is not available + project_info = await db.fetch_one( + query, {"project_id": project_id, "preferred_locale": default_locale} + ) + return ProjectInfoDTO(**project_info) if project_info else None + + @staticmethod + async def get_project_total_contributions(project_id: int, db) -> int: + query = """ + SELECT COUNT(DISTINCT user_id) + FROM task_history + WHERE project_id = :project_id AND action != 'COMMENT' + """ + + result = await db.fetch_one(query=query, values={"project_id": project_id}) + + # fetch_one returns a single record, use index [0] to get the first column value + return result[0] if result else 0 - def get_aoi_geometry_as_geojson(self): + @staticmethod + async def get_aoi_geometry_as_geojson(project_id: int, db: Database) -> dict: """Helper which returns the AOI geometry as a geojson object""" - with db.engine.connect() as conn: - aoi_geojson = conn.execute(self.geometry.ST_AsGeoJSON()).scalar() - return geojson.loads(aoi_geojson) + + query = """ + SELECT ST_AsGeoJSON(geometry) AS aoi_geojson + FROM projects + WHERE id = :project_id + """ + + result = await db.fetch_one(query, {"project_id": project_id}) + if not result: + raise ValueError("Project not found or geometry is missing") + aoi_geojson = geojson.loads(result["aoi_geojson"]) + return aoi_geojson def get_project_teams(self): """Helper to return teams with members so we can handle permissions""" @@ -989,231 +1600,421 @@ def get_project_teams(self): return project_teams - @staticmethod - @cached(active_mappers_cache) - def get_active_mappers(project_id) -> int: - """Get count of Locked tasks as a proxy for users who are currently active on the project""" + # def get_project_title(self, preferred_locale): + # project_info = ProjectInfo.get_dto_for_locale( + # self.id, preferred_locale, self.default_locale + # ) + # return project_info.name - return ( - Task.query.filter( - Task.task_status.in_( - ( - TaskStatus.LOCKED_FOR_MAPPING.value, - TaskStatus.LOCKED_FOR_VALIDATION.value, - ) - ) - ) - .filter(Task.project_id == project_id) - .distinct(Task.locked_by) - .count() + @staticmethod + async def get_project_title(db: Database, project_id: int, preferred_locale): + project_info = await ProjectInfo.get_dto_for_locale( + db, project_id, preferred_locale ) + return project_info.name - def _get_project_and_base_dto(self): - """Populates a project DTO with properties common to all roles""" - base_dto = ProjectDTO() - base_dto.project_id = self.id - base_dto.project_status = ProjectStatus(self.status).name - base_dto.default_locale = self.default_locale - base_dto.project_priority = ProjectPriority(self.priority).name - base_dto.area_of_interest = self.get_aoi_geometry_as_geojson() - base_dto.aoi_bbox = shape(base_dto.area_of_interest).bounds - base_dto.mapping_permission = MappingPermission(self.mapping_permission).name - base_dto.validation_permission = ValidationPermission( - self.validation_permission - ).name - base_dto.enforce_random_task_selection = self.enforce_random_task_selection - base_dto.private = self.private - base_dto.difficulty = ProjectDifficulty(self.difficulty).name - base_dto.changeset_comment = self.changeset_comment - base_dto.osmcha_filter_id = self.osmcha_filter_id - base_dto.due_date = self.due_date - base_dto.imagery = self.imagery - base_dto.josm_preset = self.josm_preset - base_dto.id_presets = self.id_presets - base_dto.extra_id_params = self.extra_id_params - base_dto.rapid_power_user = self.rapid_power_user - base_dto.country_tag = self.country - base_dto.organisation = self.organisation_id - base_dto.license_id = self.license_id - base_dto.created = self.created - base_dto.last_updated = self.last_updated - base_dto.author = User.get_by_id(self.author_id).username - base_dto.active_mappers = Project.get_active_mappers(self.id) - base_dto.task_creation_mode = TaskCreationMode(self.task_creation_mode).name - - base_dto.percent_mapped = self.calculate_tasks_percent("mapped") - base_dto.percent_validated = self.calculate_tasks_percent("validated") - base_dto.percent_bad_imagery = self.calculate_tasks_percent("bad_imagery") - - base_dto.project_teams = [ - ProjectTeamDTO( - dict( - team_id=t.team.id, - team_name=t.team.name, - role=TeamRoles(t.role).name, - ) - ) - for t in self.teams - ] + @staticmethod + async def get_active_mappers(project_id: int, database: Database) -> int: + """Get count of Locked tasks as a proxy for users who are currently active on the project""" + query = """ + SELECT COUNT(*) + FROM ( + SELECT DISTINCT locked_by + FROM tasks + WHERE task_status IN (:locked_for_mapping, :locked_for_validation) + AND project_id = :project_id + ) AS active_mappers + """ - if self.custom_editor: - base_dto.custom_editor = self.custom_editor.as_dto() + values = { + "project_id": project_id, + "locked_for_mapping": TaskStatus.LOCKED_FOR_MAPPING.value, + "locked_for_validation": TaskStatus.LOCKED_FOR_VALIDATION.value, + } - if self.private: - # If project is private it should have a list of allowed users - allowed_usernames = [] - for user in self.allowed_users: - allowed_usernames.append(user.username) - base_dto.allowed_usernames = allowed_usernames + count = await database.fetch_val(query, values) + # Handle the case where count might be None + return count or 0 - if self.mapping_types: - mapping_types = [] - for mapping_type in self.mapping_types: - mapping_types.append(MappingTypes(mapping_type).name) + @staticmethod + async def get_project_and_base_dto(project_id: int, db: Database) -> ProjectDTO: + """Populates a project DTO with properties common to all roles""" - base_dto.mapping_types = mapping_types + # Raw SQL query to fetch project data with date formatting + query = """ + SELECT p.id as project_id, p.status as project_status, p.default_locale, p.priority as project_priority, + p.mapping_permission, p.validation_permission, p.enforce_random_task_selection, p.private, + p.difficulty, p.changeset_comment, p.osmcha_filter_id, + TO_CHAR(COALESCE(p.due_date, NULL), 'YYYY-MM-DD"T"HH24:MI:SS.MS"Z"') as due_date, + p.imagery, p.josm_preset, p.id_presets, p.extra_id_params, p.rapid_power_user, p.country, + p.organisation_id, p.license_id, + TO_CHAR(p.created, 'YYYY-MM-DD"T"HH24:MI:SS.MS"Z"') as created, + TO_CHAR(p.last_updated, 'YYYY-MM-DD"T"HH24:MI:SS.MS"Z"') as last_updated, + u.username as author, + p.total_tasks, p.tasks_mapped, p.tasks_validated, p.tasks_bad_imagery, p.task_creation_mode, p.mapping_types, p.mapping_editors, p.validation_editors, p.organisation_id + FROM projects p + LEFT JOIN users u ON p.author_id = u.id + WHERE p.id = :project_id + """ + # Execute query and fetch the result + record = await db.fetch_one(query, {"project_id": project_id}) - if self.campaign: - base_dto.campaigns = [i.as_dto() for i in self.campaign] + if not record: + raise ValueError("Project not found") - if self.mapping_editors: - mapping_editors = [] - for mapping_editor in self.mapping_editors: - mapping_editors.append(Editors(mapping_editor).name) + area_of_interest = await Project.get_aoi_geometry_as_geojson(project_id, db) + aoi_bbox = shape(area_of_interest).bounds + active_mappers = await Project.get_active_mappers(project_id, db) - base_dto.mapping_editors = mapping_editors + # stats + tasks_mapped = record.tasks_mapped + tasks_validated = record.tasks_validated + total_tasks = record.total_tasks + tasks_bad_imagery = record.tasks_bad_imagery - if self.validation_editors: - validation_editors = [] - for validation_editor in self.validation_editors: - validation_editors.append(Editors(validation_editor).name) + percent_mapped = Project.calculate_tasks_percent( + "mapped", tasks_mapped, tasks_validated, total_tasks, tasks_bad_imagery + ) + percent_validated = Project.calculate_tasks_percent( + "validated", + tasks_validated, + tasks_validated, + total_tasks, + tasks_bad_imagery, + ) + percent_bad_imagery = Project.calculate_tasks_percent( + "bad_imagery", tasks_mapped, tasks_validated, total_tasks, tasks_bad_imagery + ) - base_dto.validation_editors = validation_editors + # Convert record to DTO + project_dto = ProjectDTO( + project_id=record.project_id, + project_status=ProjectStatus(record.project_status).name, + default_locale=record.default_locale, + project_priority=ProjectPriority(record.project_priority).name, + area_of_interest=area_of_interest, + aoi_bbox=aoi_bbox, + mapping_permission=MappingPermission(record.mapping_permission).name, + validation_permission=ValidationPermission( + record.validation_permission + ).name, + enforce_random_task_selection=record.enforce_random_task_selection, + private=record.private, + difficulty=ProjectDifficulty(record.difficulty).name, + changeset_comment=record.changeset_comment, + osmcha_filter_id=record.osmcha_filter_id, + due_date=record.due_date, + imagery=record.imagery, + josm_preset=record.josm_preset, + id_presets=record.id_presets, + extra_id_params=record.extra_id_params, + rapid_power_user=record.rapid_power_user, + country_tag=record.country, + organisation=record.organisation_id, + license_id=record.license_id, + created=record.created, + last_updated=record.last_updated, + author=record.author, + active_mappers=active_mappers, + task_creation_mode=TaskCreationMode(record.task_creation_mode).name, + mapping_types=( + [ + MappingTypes(mapping_type).name + for mapping_type in record.mapping_types + ] + if record.mapping_types is not None + else [] + ), + mapping_editors=( + [Editors(editor).name for editor in record.mapping_editors] + if record.mapping_editors + else [] + ), + validation_editors=( + [Editors(editor).name for editor in record.validation_editors] + if record.validation_editors + else [] + ), + percent_mapped=percent_mapped, + percent_validated=percent_validated, + percent_bad_imagery=percent_bad_imagery, + ) + # Fetch project teams + teams_query = """ + SELECT + t.id AS team_id, + t.name AS team_name, + pt.role + FROM + project_teams pt + JOIN + teams t ON t.id = pt.team_id + WHERE + pt.project_id = :project_id + """ + teams = await db.fetch_all(teams_query, {"project_id": project_id}) + project_dto.project_teams = ( + [ + ProjectTeamDTO(**{**team, "role": TeamRoles(team["role"]).name}) + for team in teams + ] + if teams + else [] + ) - if self.priority_areas: - geojson_areas = [] - for priority_area in self.priority_areas: - geojson_areas.append(priority_area.get_as_geojson()) + custom_editor = await db.fetch_one( + """ + SELECT project_id, name, description, url + FROM project_custom_editors + WHERE project_id = :project_id + """, + {"project_id": project_id}, + ) - base_dto.priority_areas = geojson_areas + if custom_editor: + project_dto.custom_editor = CustomEditorDTO(**custom_editor) + + if project_dto.private: + # Fetch allowed usernames using the intermediate table + allowed_users_query = """ + SELECT u.username + FROM project_allowed_users pau + JOIN users u ON pau.user_id = u.id + WHERE pau.project_id = :project_id + """ + allowed_usernames = await db.fetch_all( + allowed_users_query, {"project_id": project_id} + ) + project_dto.allowed_usernames = ( + [user.username for user in allowed_usernames] + if allowed_usernames + else [] + ) - base_dto.interests = [ - InterestDTO(dict(id=i.id, name=i.name)) for i in self.interests - ] + campaigns_query = """ + SELECT c.id, c.name + FROM campaigns c + JOIN campaign_projects cp ON c.id = cp.campaign_id + WHERE cp.project_id = :project_id + """ + campaigns = await db.fetch_all(campaigns_query, {"project_id": project_id}) + project_dto.campaigns = [ListCampaignDTO(**c) for c in campaigns] + + priority_areas_query = """ + SELECT ST_AsGeoJSON(pa.geometry) as geojson + FROM priority_areas pa + JOIN project_priority_areas ppa ON pa.id = ppa.priority_area_id + WHERE ppa.project_id = :project_id + """ + priority_areas = await db.fetch_all( + priority_areas_query, {"project_id": project_id} + ) + project_dto.priority_areas = ( + [geojson.loads(area["geojson"]) for area in priority_areas] + if priority_areas + else None + ) - return self, base_dto + interests_query = """ + SELECT i.id, i.name + FROM interests i + JOIN project_interests pi ON i.id = pi.interest_id + WHERE pi.project_id = :project_id + """ + interests = await db.fetch_all(interests_query, {"project_id": project_id}) + project_dto.interests = [InterestDTO(**i) for i in interests] + return project_dto - def as_dto_for_mapping( - self, authenticated_user_id: int = None, locale: str = "en", abbrev: bool = True + @staticmethod + async def as_dto_for_mapping( + project_id: int, + db: Database, + authenticated_user_id: int = None, + locale: str = "en", + abbrev: bool = True, ) -> Optional[ProjectDTO]: """Creates a Project DTO suitable for transmitting to mapper users""" - project, project_dto = self._get_project_and_base_dto() + project_dto = await Project.get_project_and_base_dto(project_id, db) + if abbrev is False: - project_dto.tasks = Task.get_tasks_as_geojson_feature_collection( - self.id, None + project_dto.tasks = await Task.get_tasks_as_geojson_feature_collection( + db, project_id, None ) else: - project_dto.tasks = Task.get_tasks_as_geojson_feature_collection_no_geom( - self.id + project_dto.tasks = ( + await Task.get_tasks_as_geojson_feature_collection_no_geom( + db, project_id + ) ) - project_dto.project_info = ProjectInfo.get_dto_for_locale( - self.id, locale, project.default_locale + + project_dto.project_info = await ProjectInfo.get_dto_for_locale( + db, project_id, locale, project_dto.default_locale ) - if project.organisation_id: - project_dto.organisation = project.organisation.id - project_dto.organisation_name = project.organisation.name - project_dto.organisation_logo = project.organisation.logo - project_dto.organisation_slug = project.organisation.slug - project_dto.project_info_locales = ProjectInfo.get_dto_for_all_locales(self.id) - return project_dto + if project_dto.organisation: + # Fetch organisation details + org_query = """ + SELECT + id AS "organisation_id", + name, + slug, + logo + FROM organisations + WHERE id = :organisation_id + """ + org_record = await db.fetch_one( + org_query, values={"organisation_id": project_dto.organisation} + ) + if org_record: + project_dto.organisation_name = org_record.name + project_dto.organisation_logo = org_record.logo + project_dto.organisation_slug = org_record.slug - def tasks_as_geojson( - self, task_ids_str: str, order_by=None, order_by_type="ASC", status=None - ): - """Creates a geojson of all areas""" - project_tasks = Task.get_tasks_as_geojson_feature_collection( - self.id, task_ids_str, order_by, order_by_type, status + project_dto.project_info_locales = await ProjectInfo.get_dto_for_all_locales( + db, project_id ) - return project_tasks + return project_dto @staticmethod - def get_all_countries(): - query = ( - db.session.query(func.unnest(Project.country).label("country")) - .distinct() - .order_by("country") - .all() + async def tasks_as_geojson( + db: Database, + project_id: int, + task_ids_str: Optional[str], + order_by: Optional[str] = None, + order_by_type: str = "ASC", + status: Optional[int] = None, + ): + return await Task.get_tasks_as_geojson_feature_collection( + db, project_id, task_ids_str, order_by, order_by_type, status ) - tags_dto = TagsDTO() - tags_dto.tags = [r[0] for r in query] + + @staticmethod + async def get_all_countries(database: Database) -> TagsDTO: + # Raw SQL query to unnest the country field, select distinct values, and order by country + query = """ + SELECT DISTINCT UNNEST(country) AS country + FROM projects + ORDER BY country + """ + rows = await database.fetch_all(query=query) + countries = [row["country"] for row in rows] + tags_dto = TagsDTO(tags=countries) return tags_dto - def calculate_tasks_percent(self, target): - """Calculates percentages of contributions""" + @staticmethod + def calculate_tasks_percent( + target: str, + tasks_mapped: int, + tasks_validated: int, + total_tasks: int, + tasks_bad_imagery: int, + ) -> int: + """Calculates percentages of contributions based on provided statistics.""" try: if target == "mapped": return int( - (self.tasks_mapped + self.tasks_validated) - / (self.total_tasks - self.tasks_bad_imagery) + (tasks_mapped + tasks_validated) + / (total_tasks - tasks_bad_imagery) * 100 ) elif target == "validated": - return int( - self.tasks_validated - / (self.total_tasks - self.tasks_bad_imagery) - * 100 - ) + return int(tasks_validated / (total_tasks - tasks_bad_imagery) * 100) elif target == "bad_imagery": - return int((self.tasks_bad_imagery / self.total_tasks) * 100) + return int((tasks_bad_imagery / total_tasks) * 100) elif target == "project_completion": # To calculate project completion we assign 2 points to each task # one for mapping and one for validation return int( - (self.tasks_mapped + (self.tasks_validated * 2)) - / ((self.total_tasks - self.tasks_bad_imagery) * 2) + (tasks_mapped + (tasks_validated * 2)) + / ((total_tasks - tasks_bad_imagery) * 2) * 100 ) except ZeroDivisionError: return 0 - def as_dto_for_admin(self, project_id): + @staticmethod + async def as_dto_for_admin(project_id: int, db: Database): """Creates a Project DTO suitable for transmitting to project admins""" - project, project_dto = self._get_project_and_base_dto() + project_dto = await Project.get_project_and_base_dto(project_id, db) - if project is None: - return None - - project_dto.project_info_locales = ProjectInfo.get_dto_for_all_locales( - project_id + project_dto.project_info_locales = await ProjectInfo.get_dto_for_all_locales( + db, project_id ) return project_dto - def create_or_update_interests(self, interests_ids): + async def create_or_update_interests(self, interests_ids, db): self.interests = [] - objs = [Interest.get_by_id(i) for i in interests_ids] + objs = [Interest.get_by_id(i, db) for i in interests_ids] self.interests.extend(objs) - db.session.commit() - - @staticmethod - def get_project_campaigns(project_id: int): query = ( - Campaign.query.join(campaign_projects) - .filter(campaign_projects.c.project_id == project_id) - .all() + update(Project) + .where(Project.id == self.id) + .values(interests=self.interests) ) - campaign_list = [] - for campaign in query: - campaign_dto = CampaignDTO() - campaign_dto.id = campaign.id - campaign_dto.name = campaign.name - campaign_list.append(campaign_dto) + # Execute the update query using the async `db.execute` + project = await db.execute(query) + + return project + @staticmethod + async def get_project_campaigns(project_id: int, db: Database): + query = """ + SELECT c.id, c.name + FROM campaign_projects cp + JOIN campaigns c ON cp.campaign_id = c.id + WHERE cp.project_id = :project_id + """ + rows = await db.fetch_all(query=query, values={"project_id": project_id}) + + campaign_list = [ListCampaignDTO(**row) for row in rows] return campaign_list + @staticmethod + async def clear_existing_priority_areas(db: Database, project_id: int): + """Clear existing priority area links and delete the corresponding priority areas for the given project ID.""" + + existing_priority_area_ids_query = """ + SELECT priority_area_id + FROM project_priority_areas + WHERE project_id = :project_id; + """ + existing_priority_area_ids = await db.fetch_all( + query=existing_priority_area_ids_query, values={"project_id": project_id} + ) + existing_ids = [ + record["priority_area_id"] for record in existing_priority_area_ids + ] + + clear_links_query = """ + DELETE FROM project_priority_areas + WHERE project_id = :project_id; + """ + await db.execute(query=clear_links_query, values={"project_id": project_id}) + + if existing_ids: + delete_priority_areas_query = """ + DELETE FROM priority_areas + WHERE id = ANY(:ids); + """ + # Pass the list as an array using PostgreSQL's array syntax + await db.execute( + query=delete_priority_areas_query, values={"ids": existing_ids} + ) + + async def update_project_author(project_id: int, new_author_id: int, db: Database): + query = """ + UPDATE projects + SET author_id = :new_author_id + WHERE id = :project_id + """ + values = {"new_author_id": new_author_id, "project_id": project_id} + + # Execute the query + await db.execute(query=query, values=values) + # Add index on project geometry -db.Index("idx_geometry", Project.geometry, postgresql_using="gist") +Index("idx_geometry", Project.geometry, postgresql_using="gist") diff --git a/backend/models/postgis/project_chat.py b/backend/models/postgis/project_chat.py index 9070bd917c..c6e150390b 100644 --- a/backend/models/postgis/project_chat.py +++ b/backend/models/postgis/project_chat.py @@ -1,34 +1,38 @@ import bleach +from databases import Database +from loguru import logger from markdown import markdown -from flask import current_app -from backend import db +from sqlalchemy import BigInteger, Column, DateTime, ForeignKey, Integer, String +from sqlalchemy.orm import relationship + +from backend.db import Base +from backend.models.dtos.message_dto import ( + ChatMessageDTO, + ListChatMessageDTO, + Pagination, + ProjectChatDTO, +) from backend.models.postgis.user import User from backend.models.postgis.utils import timestamp -from backend.models.dtos.message_dto import ChatMessageDTO, ProjectChatDTO, Pagination -class ProjectChat(db.Model): +class ProjectChat(Base): """Contains all project info localized into supported languages""" __tablename__ = "project_chat" - id = db.Column(db.BigInteger, primary_key=True) - project_id = db.Column( - db.Integer, db.ForeignKey("projects.id"), index=True, nullable=False - ) - user_id = db.Column(db.Integer, db.ForeignKey("users.id"), nullable=False) - time_stamp = db.Column(db.DateTime, nullable=False, default=timestamp) - message = db.Column(db.String, nullable=False) + id = Column(BigInteger, primary_key=True) + project_id = Column(Integer, ForeignKey("projects.id"), index=True, nullable=False) + user_id = Column(Integer, ForeignKey("users.id"), nullable=False) + time_stamp = Column(DateTime, nullable=False, default=timestamp) + message = Column(String, nullable=False) # Relationships - posted_by = db.relationship(User, foreign_keys=[user_id]) + posted_by = relationship(User, foreign_keys=[user_id]) @classmethod - def create_from_dto(cls, dto: ChatMessageDTO): + async def create_from_dto(cls, dto: ChatMessageDTO, db: Database): """Creates a new ProjectInfo class from dto, used from project edit""" - current_app.logger.debug("Create chat message from DTO") - new_message = cls() - new_message.project_id = dto.project_id - new_message.user_id = dto.user_id + logger.debug("Create chat message from DTO") # Use bleach to remove any potential mischief allowed_tags = [ @@ -57,36 +61,83 @@ def create_from_dto(cls, dto: ChatMessageDTO): attributes=allowed_atrributes, ) clean_message = bleach.linkify(clean_message) - new_message.message = clean_message - db.session.add(new_message) - return new_message + query = """ + INSERT INTO project_chat (project_id, user_id, message, time_stamp) + VALUES (:project_id, :user_id, :message, :time_stamp) + RETURNING id, project_id, user_id, message, time_stamp + """ + + values = { + "project_id": dto.project_id, + "user_id": dto.user_id, + "time_stamp": dto.timestamp, + "message": clean_message, + } + + new_message_id = await db.execute(query=query, values=values) + new_message = await db.fetch_one( + """ + SELECT pc.id, pc.message, pc.project_id, pc.time_stamp, u.id AS user_id, u.username, u.picture_url + FROM project_chat pc + JOIN users u ON u.id = pc.user_id + WHERE pc.id = :message_id + """, + {"message_id": new_message_id}, + ) + return ListChatMessageDTO( + id=new_message["id"], + message=new_message["message"], + picture_url=new_message["picture_url"], + timestamp=new_message["time_stamp"], + username=new_message["username"], + ) @staticmethod - def get_messages(project_id: int, page: int, per_page: int = 20) -> ProjectChatDTO: + async def get_messages( + project_id: int, db: Database, page: int, per_page: int = 20 + ) -> ProjectChatDTO: """Get all messages on the project""" - project_messages = ( - ProjectChat.query.filter_by(project_id=project_id) - .order_by(ProjectChat.time_stamp.desc()) - .paginate(page=page, per_page=per_page, error_out=True) - ) + offset = (page - 1) * per_page + count_query = """ + SELECT COUNT(*) + FROM project_chat + WHERE project_id = :project_id + """ + messages_query = """ + SELECT pc.id, pc.message, pc.project_id, pc.time_stamp, u.id AS user_id, u.username, u.picture_url + FROM project_chat pc + JOIN users u ON u.id = pc.user_id + WHERE pc.project_id = :project_id + ORDER BY pc.time_stamp DESC + LIMIT :limit OFFSET :offset + """ - dto = ProjectChatDTO() + total_count = await db.fetch_val(count_query, {"project_id": project_id}) - if project_messages.total == 0: - return dto + if total_count == 0: + return ProjectChatDTO() # Return empty DTO if no messages - for message in project_messages.items: - chat_dto = ChatMessageDTO() - chat_dto.message = message.message - chat_dto.username = message.posted_by.username - chat_dto.picture_url = message.posted_by.picture_url - chat_dto.timestamp = message.time_stamp - chat_dto.id = message.id + messages = await db.fetch_all( + messages_query, + {"project_id": project_id, "limit": per_page, "offset": offset}, + ) + dto = ProjectChatDTO() + + for message in messages: + chat_dto = ListChatMessageDTO( + id=message["id"], + message=message["message"], + picture_url=message["picture_url"], + timestamp=message["time_stamp"], + username=message["username"], + ) dto.chat.append(chat_dto) - dto.pagination = Pagination(project_messages) + dto.pagination = Pagination.from_total_count( + page=page, per_page=per_page, total=total_count + ) return dto diff --git a/backend/models/postgis/project_info.py b/backend/models/postgis/project_info.py index d28253b12f..e7a04bc95b 100644 --- a/backend/models/postgis/project_info.py +++ b/backend/models/postgis/project_info.py @@ -1,30 +1,42 @@ -from flask import current_app -from sqlalchemy.dialects.postgresql import TSVECTOR from typing import List -from backend import db + +from databases import Database +from sqlalchemy import ( + Column, + ForeignKey, + Index, + Integer, + String, + insert, + inspect, + update, +) +from sqlalchemy.dialects.postgresql import TSVECTOR + +from backend.db import Base from backend.models.dtos.project_dto import ProjectInfoDTO -class ProjectInfo(db.Model): +class ProjectInfo(Base): """Contains all project info localized into supported languages""" __tablename__ = "project_info" - project_id = db.Column(db.Integer, db.ForeignKey("projects.id"), primary_key=True) - locale = db.Column(db.String(10), primary_key=True) - name = db.Column(db.String(512)) - short_description = db.Column(db.String) - description = db.Column(db.String) - instructions = db.Column(db.String) - project_id_str = db.Column(db.String) - text_searchable = db.Column( + project_id = Column(Integer, ForeignKey("projects.id"), primary_key=True) + locale = Column(String(10), primary_key=True) + name = Column(String(512)) + short_description = Column(String) + description = Column(String) + instructions = Column(String) + project_id_str = Column(String) + text_searchable = Column( TSVECTOR ) # This contains searchable text and is populated by a DB Trigger - per_task_instructions = db.Column(db.String) + per_task_instructions = Column(String) __table_args__ = ( - db.Index("idx_project_info_composite", "locale", "project_id"), - db.Index("textsearch_idx", "text_searchable"), + Index("idx_project_info_composite", "locale", "project_id"), + Index("textsearch_idx", "text_searchable"), {}, ) @@ -37,15 +49,29 @@ def create_from_name(cls, name: str): return new_info @classmethod - def create_from_dto(cls, dto: ProjectInfoDTO): + async def create_from_dto(cls, dto: ProjectInfoDTO, project_id: int, db: Database): """Creates a new ProjectInfo class from dto, used from project edit""" - new_info = cls() - new_info.update_from_dto(dto) - return new_info + self = cls() + self.locale = dto.locale + self.name = dto.name + self.project_id = project_id + self.project_id_str = str(project_id) # Allows project_id to be searched - def update_from_dto(self, dto: ProjectInfoDTO): + # Note project info not bleached on basis that admins are trusted users and shouldn't be doing anything bad + self.short_description = dto.short_description + self.description = dto.description + self.instructions = dto.instructions + self.per_task_instructions = dto.per_task_instructions + columns = { + c.key: getattr(self, c.key) for c in inspect(self).mapper.column_attrs + } + query = insert(ProjectInfo.__table__).values(**columns) + result = await db.execute(query) + return result + + async def update_from_dto(self, dto: ProjectInfoDTO, db: Database): """Updates existing ProjectInfo from supplied DTO""" - self.locale = dto.locale + # self.locale = dto.locale self.name = dto.name self.project_id_str = str(self.project_id) # Allows project_id to be searched @@ -54,77 +80,148 @@ def update_from_dto(self, dto: ProjectInfoDTO): self.description = dto.description self.instructions = dto.instructions self.per_task_instructions = dto.per_task_instructions + columns = { + c.key: getattr(self, c.key) for c in inspect(self).mapper.column_attrs + } + columns.pop("project_id", None) + columns.pop("locale", None) + query = ( + update(ProjectInfo.__table__) + .where( + ProjectInfo.project_id == self.project_id, + ProjectInfo.locale == self.locale, + ) + .values(**columns) + ) + result = await db.execute(query) + return result @staticmethod - def get_dto_for_locale(project_id, locale, default_locale="en") -> ProjectInfoDTO: + async def get_dto_for_locale( + db: Database, project_id: int, locale: str, default_locale: str = "en" + ) -> ProjectInfoDTO: """ - Gets the projectInfoDTO for the project for the requested locale. If not found, then the default locale is used + Gets the ProjectInfoDTO for the project for the requested locale. If not found, then the default locale is used. + :param db: The async database connection :param project_id: ProjectID in scope - :param locale: locale requested by user - :param default_locale: default locale of project + :param locale: Locale requested by user + :param default_locale: Default locale of project + :return: ProjectInfoDTO :raises: ValueError if no info found for Default Locale """ - project_info = ProjectInfo.query.filter_by( - project_id=project_id, locale=locale - ).one_or_none() - + query = """ + SELECT * FROM project_info + WHERE project_id = :project_id AND locale = :locale + """ + # Execute the query for the requested locale + project_info = await db.fetch_one( + query, values={"project_id": project_id, "locale": locale} + ) if project_info is None: - # If project is none, get default locale and don't worry about empty translations - project_info = ProjectInfo.query.filter_by( - project_id=project_id, locale=default_locale - ).one_or_none() - return project_info.get_dto() + # Define the SQL query to get project info by default locale + query_default = """ + SELECT * FROM project_info + WHERE project_id = :project_id AND locale = :default_locale + """ + + # Execute the query for the default locale + project_info = await db.fetch_one( + query_default, + values={"project_id": project_id, "default_locale": default_locale}, + ) + + if project_info is None: + error_message = f"BAD DATA: no info for project {project_id}, locale: {locale}, default {default_locale}" + raise ValueError(error_message) + + return ProjectInfoDTO(**project_info) if locale == default_locale: - # If locale == default_locale don't need to worry about empty translations - return project_info.get_dto() + # Return the DTO for the default locale + return ProjectInfoDTO(**project_info) + # Define the SQL query to get project info by default locale for partial translations + query_default = """ + SELECT * FROM project_info + WHERE project_id = :project_id AND locale = :default_locale + """ - default_locale = ProjectInfo.query.filter_by( - project_id=project_id, locale=default_locale - ).one_or_none() + # Execute the query for the default locale + default_locale_info = await db.fetch_one( + query_default, + values={"project_id": project_id, "default_locale": default_locale}, + ) - if default_locale is None: + if default_locale_info is None: error_message = f"BAD DATA: no info for project {project_id}, locale: {locale}, default {default_locale}" - current_app.logger.critical(error_message) raise ValueError(error_message) - # Pass thru default_locale in case of partial translation - return project_info.get_dto(default_locale) - - def get_dto(self, default_locale=ProjectInfoDTO()) -> ProjectInfoDTO: - """ - Get DTO for current ProjectInfo - :param default_locale: The default locale string for any empty fields - """ - project_info_dto = ProjectInfoDTO() - project_info_dto.locale = self.locale - project_info_dto.name = self.name if self.name else default_locale.name - project_info_dto.description = ( - self.description if self.description else default_locale.description + combined_info = ProjectInfoDTO(locale=project_info.locale) + combined_info.name = ( + project_info.name if project_info.name else default_locale_info.name ) - project_info_dto.short_description = ( - self.short_description - if self.short_description - else default_locale.short_description + combined_info.description = ( + project_info.description + if project_info.description + else default_locale_info.description ) - project_info_dto.instructions = ( - self.instructions if self.instructions else default_locale.instructions + combined_info.short_description = ( + project_info.short_description + if project_info.short_description + else default_locale_info.short_description ) - project_info_dto.per_task_instructions = ( - self.per_task_instructions - if self.per_task_instructions - else default_locale.per_task_instructions + combined_info.instructions = ( + project_info.instructions + if project_info.instructions + else default_locale_info.instructions ) + combined_info.per_task_instructions = ( + project_info.per_task_instructions + if project_info.per_task_instructions + else default_locale_info.per_task_instructions + ) + return combined_info - return project_info_dto - - @staticmethod - def get_dto_for_all_locales(project_id) -> List[ProjectInfoDTO]: - locales = ProjectInfo.query.filter_by(project_id=project_id).all() + # Function to get a single ProjectInfoDTO + async def get_project_info_dto(locale_record) -> ProjectInfoDTO: + """ + Get DTO for the current ProjectInfo + :param locale_record: The record from the database for the locale + :param default_locale: The default locale DTO for any empty fields + :return: ProjectInfoDTO + """ + return ProjectInfoDTO( + locale=locale_record["locale"], + name=locale_record["name"] or "", + description=locale_record["description"] or "", + short_description=locale_record["short_description"] or "", + instructions=locale_record["instructions"] or "", + per_task_instructions=locale_record["per_task_instructions"] or "", + ) - project_info_dtos = [] - for locale in locales: - project_info_dto = locale.get_dto() - project_info_dtos.append(project_info_dto) + # Function to get DTOs for all locales of a project + async def get_dto_for_all_locales( + db: Database, project_id: int + ) -> List[ProjectInfoDTO]: + """ + Get DTOs for all locales associated with a project + :param database: The database connection + :param project_id: The project ID to filter locales + :return: List of ProjectInfoDTO + """ + query = """ + SELECT locale, name, description, short_description, instructions, per_task_instructions + FROM project_info + WHERE project_id = :project_id + """ + locales = await db.fetch_all(query=query, values={"project_id": project_id}) + + project_info_dtos = ( + [ + await ProjectInfo.get_project_info_dto(locale_record) + for locale_record in locales + ] + if locales + else [] + ) return project_info_dtos diff --git a/backend/models/postgis/project_partner.py b/backend/models/postgis/project_partner.py index b8f8726179..5b99008b6b 100644 --- a/backend/models/postgis/project_partner.py +++ b/backend/models/postgis/project_partner.py @@ -1,86 +1,202 @@ -from backend import db -from backend.models.postgis.utils import timestamp +from datetime import datetime, timezone + +from databases import Database +from sqlalchemy import Column, DateTime, ForeignKey, Integer +from backend.db import Base from backend.models.dtos.project_partner_dto import ( - ProjectPartnershipDTO, ProjectPartnerAction, + ProjectPartnershipDTO, ) +from backend.models.postgis.utils import timestamp + +class ProjectPartnershipHistory(Base): + """Logs changes to the Project-Partnership links""" -class ProjectPartnershipHistory(db.Model): __tablename__ = "project_partnerships_history" - id = db.Column(db.Integer, primary_key=True, autoincrement=True) - partnership_id = db.Column( - db.Integer, - db.ForeignKey("project_partnerships.id", ondelete="SET NULL"), + id = Column(Integer, primary_key=True, autoincrement=True) + partnership_id = Column( + Integer, + ForeignKey("project_partnerships.id", ondelete="SET NULL"), nullable=True, index=True, ) - project_id = db.Column( - db.Integer, - db.ForeignKey("projects.id", ondelete="CASCADE"), + project_id = Column( + Integer, + ForeignKey("projects.id", ondelete="CASCADE"), nullable=False, index=True, ) - partner_id = db.Column( - db.Integer, - db.ForeignKey("partners.id", ondelete="CASCADE"), + partner_id = Column( + Integer, + ForeignKey("partners.id", ondelete="CASCADE"), nullable=False, index=True, ) - action = db.Column(db.Integer, default=ProjectPartnerAction.CREATE.value) - action_date = db.Column(db.DateTime, nullable=False, default=timestamp) - - started_on_old = db.Column(db.DateTime) - ended_on_old = db.Column(db.DateTime) - started_on_new = db.Column(db.DateTime) - ended_on_new = db.Column(db.DateTime) + action = Column(Integer, nullable=False, default=ProjectPartnerAction.CREATE.value) + action_date = Column(DateTime, nullable=False, default=timestamp) + + started_on_old = Column(DateTime, nullable=True) + ended_on_old = Column(DateTime, nullable=True) + started_on_new = Column(DateTime, nullable=True) + ended_on_new = Column(DateTime, nullable=True) + + def convert_to_utc_naive(self, dt: datetime) -> datetime: + """Converts a timezone-aware datetime to a UTC timezone-naive datetime.""" + if dt.tzinfo is not None: + # return dt.astimezone(datetime.timezone.utc).replace(tzinfo=None) + return dt.astimezone(timezone.utc).replace(tzinfo=None) + return dt + + async def create(self, db: Database) -> int: + """ + Inserts the current object as a record in the database and returns its ID. + """ + + if self.started_on_old: + self.started_on_old = self.convert_to_utc_naive(self.started_on_old) + if self.ended_on_old: + self.ended_on_old = self.convert_to_utc_naive(self.ended_on_old) + if self.started_on_new: + self.started_on_new = self.convert_to_utc_naive(self.started_on_new) + if self.ended_on_new: + self.ended_on_new = self.convert_to_utc_naive(self.ended_on_new) + + query = """ + INSERT INTO project_partnerships_history ( + partnership_id, + project_id, + partner_id, + action, + action_date, + started_on_old, + ended_on_old, + started_on_new, + ended_on_new + ) + VALUES ( + :partnership_id, + :project_id, + :partner_id, + :action, + :action_date, + :started_on_old, + :ended_on_old, + :started_on_new, + :ended_on_new + ) + RETURNING id + """ + values = { + "partnership_id": self.partnership_id, + "project_id": self.project_id, + "partner_id": self.partner_id, + "action": self.action if self.action else ProjectPartnerAction.CREATE.value, + "action_date": timestamp(), + "started_on_old": self.started_on_old if self.started_on_old else None, + "ended_on_old": self.ended_on_old if self.ended_on_old else None, + "started_on_new": self.started_on_new if self.started_on_new else None, + "ended_on_new": self.ended_on_new if self.ended_on_new else None, + } + result = await db.fetch_one(query, values=values) + return result["id"] + + +class ProjectPartnership(Base): + """Describes the relationship between a Project and a Partner""" - def create(self): - """Creates and saves the current model to the DB""" - db.session.add(self) - db.session.commit() - - def save(self): - """Save changes to db""" - db.session.commit() - - def delete(self): - """Deletes the current model from the DB""" - db.session.delete(self) - db.session.commit() - - -class ProjectPartnership(db.Model): __tablename__ = "project_partnerships" - id = db.Column(db.Integer, primary_key=True, autoincrement=True) - project_id = db.Column(db.Integer, db.ForeignKey("projects.id", ondelete="CASCADE")) - partner_id = db.Column(db.Integer, db.ForeignKey("partners.id", ondelete="CASCADE")) - started_on = db.Column(db.DateTime, default=timestamp, nullable=False) - ended_on = db.Column(db.DateTime, nullable=True) + id = Column(Integer, primary_key=True, autoincrement=True) + project_id = Column( + Integer, ForeignKey("projects.id", ondelete="CASCADE"), nullable=False + ) + partner_id = Column( + Integer, ForeignKey("partners.id", ondelete="CASCADE"), nullable=False + ) + started_on = Column(DateTime, nullable=False, default=timestamp) + ended_on = Column(DateTime, nullable=True) + + def convert_to_utc_naive(self, dt: datetime) -> datetime: + """Converts a timezone-aware datetime to a UTC timezone-naive datetime.""" + if dt.tzinfo is not None: + return dt.astimezone(timezone.utc).replace(tzinfo=None) + return dt @staticmethod - def get_by_id(partnership_id: int): + async def get_by_id(partnership_id: int, db: Database): """Return the user for the specified id, or None if not found""" - return db.session.get(ProjectPartnership, partnership_id) - - def create(self): - """Creates and saves the current model to the DB""" - db.session.add(self) - db.session.commit() - return self.id - - def save(self): - """Save changes to db""" - db.session.commit() - - def delete(self): - """Deletes the current model from the DB""" - db.session.delete(self) - db.session.commit() + query = """ + SELECT * + FROM project_partnerships + WHERE id = :partnership_id + """ + result = await db.fetch_one(query, values={"partnership_id": partnership_id}) + return result if result else None + + async def create(self, db: Database) -> int: + """ + Inserts the current object as a record in the database and returns its ID. + """ + + self.started_on = self.convert_to_utc_naive(self.started_on) + self.ended_on = ( + self.convert_to_utc_naive(self.ended_on) if self.ended_on else None + ) + + query = """ + INSERT INTO project_partnerships (project_id, partner_id, started_on, ended_on) + VALUES (:project_id, :partner_id, :started_on, :ended_on) + RETURNING id + """ + values = { + "project_id": self.project_id, + "partner_id": self.partner_id, + "started_on": self.started_on, + "ended_on": self.ended_on if self.ended_on else None, + } + result = await db.fetch_one(query, values=values) + return result["id"] + + async def save(self, db: Database) -> None: + """ + Updates the current object in the database. + """ + self.started_on = self.convert_to_utc_naive(self.started_on) + self.ended_on = ( + self.convert_to_utc_naive(self.ended_on) if self.ended_on else None + ) + + query = """ + UPDATE project_partnerships + SET + project_id = :project_id, + partner_id = :partner_id, + started_on = :started_on, + ended_on = :ended_on + WHERE id = :id + """ + values = { + "id": self.id, + "project_id": self.project_id, + "partner_id": self.partner_id, + "started_on": self.started_on, + "ended_on": self.ended_on if self.ended_on else None, + } + await db.execute(query, values=values) + + async def delete(self, db: Database) -> None: + """ + Deletes the current object from the database. + """ + query = """ + DELETE FROM project_partnerships + WHERE id = :id + """ + await db.execute(query, values={"id": self.id}) def as_dto(self) -> ProjectPartnershipDTO: """Creates a Partnership DTO""" diff --git a/backend/models/postgis/release_version.py b/backend/models/postgis/release_version.py index 6c5bd99af2..26a34493af 100644 --- a/backend/models/postgis/release_version.py +++ b/backend/models/postgis/release_version.py @@ -1,20 +1,24 @@ -from backend import db +from databases import Database +from sqlalchemy import Column, String, DateTime, insert +from backend.db import Base -class ReleaseVersion(db.Model): + +class ReleaseVersion(Base): """Describes an current release version of TM (i.e. github.com/hotosm/tasking-manager)""" __tablename__ = "release_version" - tag_name = db.Column(db.String(64), nullable=False, primary_key=True) - published_at = db.Column(db.DateTime, nullable=False) - - def update(self): - db.session.commit() + tag_name = Column(String(64), nullable=False, primary_key=True) + published_at = Column(DateTime, nullable=False) - def save(self): - db.session.add(self) - db.session.commit() + async def save(self, db: Database): + query = insert(ReleaseVersion.__table__).values( + tag_name=self.tag_name, published_at=self.published_at + ) + await db.execute(query) @staticmethod - def get(): - return ReleaseVersion.query.first() + async def get(db: Database): + """Get the latest release version""" + query = """SELECT * FROM release_version LIMIT 1""" + return await db.fetch_one(query=query) diff --git a/backend/models/postgis/tags.py b/backend/models/postgis/tags.py index 7d4d6a0b44..740c126062 100644 --- a/backend/models/postgis/tags.py +++ b/backend/models/postgis/tags.py @@ -1,15 +1,16 @@ -from backend import db +from sqlalchemy import Column, String, Integer from backend.models.dtos.tags_dto import TagsDTO +from backend.db import Base -class Tags(db.Model): +class Tags(Base): """Describes an individual mapping Task""" __tablename__ = "tags" - id = db.Column(db.Integer, primary_key=True) - organisations = db.Column(db.String, unique=True) - campaigns = db.Column(db.String, unique=True) + id = Column(Integer, primary_key=True) + organisations = Column(String, unique=True) + campaigns = Column(String, unique=True) @staticmethod def upsert_organisation_tag(organisation_tag: str) -> str: @@ -21,7 +22,7 @@ def upsert_organisation_tag(organisation_tag: str) -> str: tag = Tags() tag.organisations = organisation_tag - db.session.add( + session.add( tag ) # Note no commit here, done as part of project update transaction return organisation_tag @@ -36,7 +37,7 @@ def upsert_campaign_tag(campaign_tag: str) -> str: tag = Tags() tag.campaigns = campaign_tag - db.session.add( + session.add( tag ) # Note no commit here, done as part of project update transaction return campaign_tag @@ -44,7 +45,7 @@ def upsert_campaign_tag(campaign_tag: str) -> str: @staticmethod def get_all_organisations(): """Get all org tags in DB""" - result = db.session.query(Tags.organisations).filter( + result = session.query(Tags.organisations).filter( Tags.organisations.isnot(None) ) @@ -55,7 +56,7 @@ def get_all_organisations(): @staticmethod def get_all_campaigns(): """Get all campaign tags in DB""" - result = db.session.query(Tags.campaigns).filter(Tags.campaigns.isnot(None)) + result = session.query(Tags.campaigns).filter(Tags.campaigns.isnot(None)) dto = TagsDTO() dto.tags = [r for (r,) in result] diff --git a/backend/models/postgis/task.py b/backend/models/postgis/task.py index 6708dde2ee..8d0ff166ec 100644 --- a/backend/models/postgis/task.py +++ b/backend/models/postgis/task.py @@ -1,37 +1,56 @@ -import bleach import datetime -import geojson import json +from datetime import timezone from enum import Enum -from flask import current_app -from sqlalchemy.types import Float, Text -from sqlalchemy import desc, cast, func, distinct -from sqlalchemy.orm.exc import NoResultFound, MultipleResultsFound -from sqlalchemy.orm.session import make_transient +from typing import Any, Dict, List, Optional + +import bleach +import geojson +from databases import Database from geoalchemy2 import Geometry -from typing import List +from shapely.geometry import shape + +# # from flask import current_app +from sqlalchemy import ( + BigInteger, + Boolean, + Column, + DateTime, + ForeignKey, + ForeignKeyConstraint, + Index, + Integer, + String, + Unicode, + desc, + distinct, + func, + select, +) +from sqlalchemy.orm import relationship +from sqlalchemy.orm.exc import MultipleResultsFound -from backend import db +from backend.config import settings +from backend.db import Base from backend.exceptions import NotFound from backend.models.dtos.mapping_dto import TaskDTO, TaskHistoryDTO -from backend.models.dtos.validator_dto import MappedTasksByUser, MappedTasks +from backend.models.dtos.mapping_issues_dto import TaskMappingIssueDTO from backend.models.dtos.project_dto import ( + LockedTasksForUser, ProjectComment, ProjectCommentsDTO, - LockedTasksForUser, ) -from backend.models.dtos.mapping_issues_dto import TaskMappingIssueDTO -from backend.models.postgis.statuses import TaskStatus, MappingLevel +from backend.models.dtos.task_annotation_dto import TaskAnnotationDTO +from backend.models.dtos.validator_dto import MappedTasks, MappedTasksByUser +from backend.models.postgis.statuses import MappingLevel, TaskStatus +from backend.models.postgis.task_annotation import TaskAnnotation from backend.models.postgis.user import User from backend.models.postgis.utils import ( InvalidData, InvalidGeoJson, - ST_GeomFromGeoJSON, - ST_SetSRID, - timestamp, parse_duration, + timestamp, ) -from backend.models.postgis.task_annotation import TaskAnnotation class TaskAction(Enum): @@ -47,42 +66,36 @@ class TaskAction(Enum): EXTENDED_FOR_VALIDATION = 8 -class TaskInvalidationHistory(db.Model): +class TaskInvalidationHistory(Base): """Describes the most recent history of task invalidation and subsequent validation""" __tablename__ = "task_invalidation_history" - id = db.Column(db.Integer, primary_key=True) - project_id = db.Column(db.Integer, db.ForeignKey("projects.id"), nullable=False) - task_id = db.Column(db.Integer, nullable=False) - is_closed = db.Column(db.Boolean, default=False) - mapper_id = db.Column(db.BigInteger, db.ForeignKey("users.id", name="fk_mappers")) - mapped_date = db.Column(db.DateTime) - invalidator_id = db.Column( - db.BigInteger, db.ForeignKey("users.id", name="fk_invalidators") - ) - invalidated_date = db.Column(db.DateTime) - invalidation_history_id = db.Column( - db.Integer, db.ForeignKey("task_history.id", name="fk_invalidation_history") - ) - validator_id = db.Column( - db.BigInteger, db.ForeignKey("users.id", name="fk_validators") + id = Column(Integer, primary_key=True) + project_id = Column(Integer, ForeignKey("projects.id"), nullable=False) + task_id = Column(Integer, nullable=False) + is_closed = Column(Boolean, default=False) + mapper_id = Column(BigInteger, ForeignKey("users.id", name="fk_mappers")) + mapped_date = Column(DateTime) + invalidator_id = Column(BigInteger, ForeignKey("users.id", name="fk_invalidators")) + invalidated_date = Column(DateTime) + invalidation_history_id = Column( + Integer, ForeignKey("task_history.id", name="fk_invalidation_history") ) - validated_date = db.Column(db.DateTime) - updated_date = db.Column(db.DateTime, default=timestamp) + validator_id = Column(BigInteger, ForeignKey("users.id", name="fk_validators")) + validated_date = Column(DateTime) + updated_date = Column(DateTime, default=timestamp) __table_args__ = ( - db.ForeignKeyConstraint( + ForeignKeyConstraint( [task_id, project_id], ["tasks.id", "tasks.project_id"], name="fk_tasks" ), - db.Index("idx_task_validation_history_composite", "task_id", "project_id"), - db.Index( + Index("idx_task_validation_history_composite", "task_id", "project_id"), + Index( "idx_task_validation_validator_status_composite", "invalidator_id", "is_closed", ), - db.Index( - "idx_task_validation_mapper_status_composite", "mapper_id", "is_closed" - ), + Index("idx_task_validation_mapper_status_composite", "mapper_id", "is_closed"), {}, ) @@ -91,13 +104,8 @@ def __init__(self, project_id, task_id): self.task_id = task_id self.is_closed = False - def delete(self): - """Deletes the current model from the DB""" - db.session.delete(self) - db.session.commit() - @staticmethod - def get_open_for_task(project_id, task_id, local_session=None): + async def get_open_for_task(project_id: int, task_id: int, db: Database): """ Retrieve the open TaskInvalidationHistory entry for the given project and task. @@ -120,135 +128,159 @@ def get_open_for_task(project_id, task_id, local_session=None): None: This method handles the MultipleResultsFound exception internally. """ try: - if local_session: - return ( - local_session.query(TaskInvalidationHistory) - .filter_by(task_id=task_id, project_id=project_id, is_closed=False) - .one_or_none() - ) - return TaskInvalidationHistory.query.filter_by( - task_id=task_id, project_id=project_id, is_closed=False - ).one_or_none() + # Fetch open entry + query = """ + SELECT * FROM task_invalidation_history + WHERE task_id = :task_id + AND project_id = :project_id + AND is_closed = FALSE + """ + entry = await db.fetch_one( + query=query, values={"task_id": task_id, "project_id": project_id} + ) + return entry except MultipleResultsFound: - TaskInvalidationHistory.close_duplicate_invalidation_history_rows( - project_id, task_id, local_session + await TaskInvalidationHistory.close_duplicate_invalidation_history_rows( + project_id, task_id, db ) - - return TaskInvalidationHistory.get_open_for_task( - project_id, task_id, local_session + return await TaskInvalidationHistory.get_open_for_task( + project_id, task_id, db ) @staticmethod - def close_duplicate_invalidation_history_rows( - project_id: int, task_id: int, local_session=None + async def close_duplicate_invalidation_history_rows( + project_id: int, task_id: int, db: Database ): """ Closes duplicate TaskInvalidationHistory entries except for the latest one for the given project and task. + """ - Args: - project_id (int): The ID of the project. - task_id (int): The ID of the task. - local_session (Session, optional): The SQLAlchemy session to use for the query. - If not provided, a default session is used. + # Fetch the oldest duplicate + query = """ + SELECT id FROM task_invalidation_history + WHERE task_id = :task_id + AND project_id = :project_id + AND is_closed = FALSE + ORDER BY id ASC + LIMIT 1 """ - if local_session: - oldest_dupe = ( - local_session.query(TaskInvalidationHistory) - .filter_by(task_id=task_id, project_id=project_id, is_closed=False) - .order_by(TaskInvalidationHistory.id.asc()) - .first() - ) - else: - oldest_dupe = ( - TaskInvalidationHistory.query.filter_by( - task_id=task_id, project_id=project_id, is_closed=False - ) - .order_by(TaskInvalidationHistory.id.asc()) - .first() - ) + oldest_dupe = await db.fetch_one( + query=query, values={"task_id": task_id, "project_id": project_id} + ) if oldest_dupe: - oldest_dupe.is_closed = True - if local_session: - local_session.commit() - else: - db.session.commit() + update_query = """ + UPDATE task_invalidation_history + SET is_closed = TRUE + WHERE id = :id + """ + await db.execute(query=update_query, values={"id": oldest_dupe["id"]}) @staticmethod - def close_all_for_task(project_id, task_id, local_session=None): - if local_session: - return ( - local_session.query(TaskInvalidationHistory) - .filter_by(task_id=task_id, project_id=project_id, is_closed=False) - .update({"is_closed": True}) - ) - TaskInvalidationHistory.query.filter_by( - task_id=task_id, project_id=project_id, is_closed=False - ).update({"is_closed": True}) + async def close_all_for_task(project_id: int, task_id: int, db: Database): + """ + Closes all open invalidation history entries for the specified task. + """ + update_query = """ + UPDATE task_invalidation_history + SET is_closed = TRUE, updated_date = :updated_date + WHERE project_id = :project_id AND task_id = :task_id AND is_closed = FALSE + """ + values = { + "project_id": project_id, + "task_id": task_id, + "updated_date": datetime.datetime.utcnow(), + } + + await db.execute(query=update_query, values=values) @staticmethod - def record_invalidation( - project_id, task_id, invalidator_id, history, local_session=None + async def record_invalidation( + project_id: int, task_id: int, invalidator_id: int, history, db: Database ): # Invalidation always kicks off a new entry for a task, so close any existing ones. - TaskInvalidationHistory.close_all_for_task( - project_id, task_id, local_session=local_session - ) + await TaskInvalidationHistory.close_all_for_task(project_id, task_id, db) - last_mapped = TaskHistory.get_last_mapped_action(project_id, task_id) - if last_mapped is None: + last_mapped = await TaskHistory.get_last_mapped_action(project_id, task_id, db) + if not last_mapped: return - entry = TaskInvalidationHistory(project_id, task_id) - entry.invalidation_history_id = history.id - entry.mapper_id = last_mapped.user_id - entry.mapped_date = last_mapped.action_date - entry.invalidator_id = invalidator_id - entry.invalidated_date = history.action_date - entry.updated_date = timestamp() - if local_session: - local_session.add(entry) - else: - db.session.add(entry) + # Insert a new TaskInvalidationHistory entry + insert_query = """ + INSERT INTO task_invalidation_history + (project_id, task_id, invalidation_history_id, mapper_id, mapped_date, invalidator_id, invalidated_date, updated_date) + VALUES (:project_id, :task_id, :invalidation_history_id, :mapper_id, :mapped_date, :invalidator_id, :invalidated_date, :updated_date) + """ + values = { + "project_id": project_id, + "task_id": task_id, + "invalidation_history_id": history.id, + "mapper_id": last_mapped["user_id"], + "mapped_date": last_mapped["action_date"], + "invalidator_id": invalidator_id, + "invalidated_date": history.action_date, + "updated_date": datetime.datetime.utcnow(), + } + + await db.execute(query=insert_query, values=values) @staticmethod - def record_validation( - project_id, task_id, validator_id, history, local_session=None + async def record_validation( + project_id: int, + task_id: int, + validator_id: int, + history: TaskHistoryDTO, + db: Database, ): - entry = TaskInvalidationHistory.get_open_for_task( - project_id, task_id, local_session=local_session - ) + entry = await TaskInvalidationHistory.get_open_for_task(project_id, task_id, db) # If no open invalidation to update, then nothing to do if entry is None: return - last_mapped = TaskHistory.get_last_mapped_action(project_id, task_id) - entry.mapper_id = last_mapped.user_id - entry.mapped_date = last_mapped.action_date - entry.validator_id = validator_id - entry.validated_date = history.action_date - entry.is_closed = True - entry.updated_date = timestamp() + last_mapped = await TaskHistory.get_last_mapped_action(project_id, task_id, db) + + # Update entry with validation details + update_query = """ + UPDATE task_invalidation_history + SET mapper_id = :mapper_id, + mapped_date = :mapped_date, + validator_id = :validator_id, + validated_date = :validated_date, + is_closed = TRUE, + updated_date = :updated_date + WHERE id = :entry_id + """ + await db.execute( + query=update_query, + values={ + "mapper_id": last_mapped["user_id"], + "mapped_date": last_mapped["action_date"], + "validator_id": validator_id, + "validated_date": history.action_date, + "updated_date": timestamp(), + "entry_id": entry["id"], + }, + ) -class TaskMappingIssue(db.Model): +class TaskMappingIssue(Base): """Describes an issue (along with an occurrence count) with a task mapping that contributed to invalidation of the task""" __tablename__ = "task_mapping_issues" - id = db.Column(db.Integer, primary_key=True) - task_history_id = db.Column( - db.Integer, db.ForeignKey("task_history.id"), nullable=False, index=True + id = Column(Integer, primary_key=True) + task_history_id = Column( + Integer, ForeignKey("task_history.id"), nullable=False, index=True ) - issue = db.Column(db.String, nullable=False) - mapping_issue_category_id = db.Column( - db.Integer, - db.ForeignKey("mapping_issue_categories.id", name="fk_issue_category"), + issue = Column(String, nullable=False) + mapping_issue_category_id = Column( + Integer, + ForeignKey("mapping_issue_categories.id", name="fk_issue_category"), nullable=False, ) - count = db.Column(db.Integer, nullable=False) + count = Column(Integer, nullable=False) def __init__(self, issue, count, mapping_issue_category_id, task_history_id=None): self.task_history_id = task_history_id @@ -256,11 +288,6 @@ def __init__(self, issue, count, mapping_issue_category_id, task_history_id=None self.count = count self.mapping_issue_category_id = mapping_issue_category_id - def delete(self): - """Deletes the current model from the DB""" - db.session.delete(self) - db.session.commit() - def as_dto(self): issue_dto = TaskMappingIssueDTO() issue_dto.category_id = self.mapping_issue_category_id @@ -272,36 +299,36 @@ def __repr__(self): return "{0}: {1}".format(self.issue, self.count) -class TaskHistory(db.Model): +class TaskHistory(Base): """Describes the history associated with a task""" __tablename__ = "task_history" - id = db.Column(db.Integer, primary_key=True) - project_id = db.Column(db.Integer, db.ForeignKey("projects.id"), index=True) - task_id = db.Column(db.Integer, nullable=False) - action = db.Column(db.String, nullable=False) - action_text = db.Column(db.String) - action_date = db.Column(db.DateTime, nullable=False, default=timestamp) - user_id = db.Column( - db.BigInteger, - db.ForeignKey("users.id", name="fk_users"), + id = Column(Integer, primary_key=True) + project_id = Column(Integer, ForeignKey("projects.id"), index=True) + task_id = Column(Integer, nullable=False) + action = Column(String, nullable=False) + action_text = Column(String) + action_date = Column(DateTime, nullable=False, default=timestamp) + user_id = Column( + BigInteger, + ForeignKey("users.id", name="fk_users"), index=True, nullable=False, ) - invalidation_history = db.relationship( + invalidation_history = relationship( TaskInvalidationHistory, lazy="dynamic", cascade="all" ) - actioned_by = db.relationship(User) - task_mapping_issues = db.relationship(TaskMappingIssue, cascade="all") + actioned_by = relationship(User) + task_mapping_issues = relationship(TaskMappingIssue, cascade="all") __table_args__ = ( - db.ForeignKeyConstraint( + ForeignKeyConstraint( [task_id, project_id], ["tasks.id", "tasks.project_id"], name="fk_tasks" ), - db.Index("idx_task_history_composite", "task_id", "project_id"), - db.Index("idx_task_history_project_id_user_id", "user_id", "project_id"), + Index("idx_task_history_composite", "task_id", "project_id"), + Index("idx_task_history_project_id_user_id", "user_id", "project_id"), {}, ) @@ -310,173 +337,181 @@ def __init__(self, task_id, project_id, user_id): self.project_id = project_id self.user_id = user_id - def set_task_extend_action(self, task_action: TaskAction): + def set_task_extend_action(task_action: TaskAction) -> str: if task_action not in [ TaskAction.EXTENDED_FOR_MAPPING, TaskAction.EXTENDED_FOR_VALIDATION, ]: raise ValueError("Invalid Action") + return task_action.name, None - self.action = task_action.name - - def set_task_locked_action(self, task_action: TaskAction): + def set_task_locked_action(task_action: TaskAction) -> str: if task_action not in [ TaskAction.LOCKED_FOR_MAPPING, TaskAction.LOCKED_FOR_VALIDATION, ]: raise ValueError("Invalid Action") + return task_action.name, None - self.action = task_action.name + def set_comment_action(comment: str) -> str: + clean_comment = bleach.clean(comment) # Ensure no harmful scripts or tags + return TaskAction.COMMENT.name, clean_comment - def set_comment_action(self, comment): - self.action = TaskAction.COMMENT.name - clean_comment = bleach.clean( - comment - ) # Bleach input to ensure no nefarious script tags etc - self.action_text = clean_comment + def set_state_change_action(new_state: TaskStatus) -> str: + return TaskAction.STATE_CHANGE.name, new_state.name - def set_state_change_action(self, new_state): - self.action = TaskAction.STATE_CHANGE.name - self.action_text = new_state.name + def set_auto_unlock_action(task_action: TaskAction) -> str: + return task_action.name, None - def set_auto_unlock_action(self, task_action: TaskAction): - self.action = task_action.name - - def delete(self): - """Deletes the current model from the DB""" - db.session.delete(self) - db.session.commit() - - @staticmethod - def update_task_locked_with_duration( - task_id: int, project_id: int, lock_action, user_id: int, local_session=None + async def update_task_locked_with_duration( + task_id: int, + project_id: int, + lock_action: TaskAction, + user_id: int, + db: Database, ): """ - Calculates the duration a task was locked for and sets it on the history record + Calculates the duration a task was locked for and sets it on the history record. :param task_id: Task in scope :param project_id: Project ID in scope :param lock_action: The lock action, either Mapping or Validation - :param user_id: Logged in user updating the task - :return: + :param user_id: Logged-in user updating the task. """ try: - if local_session: - last_locked = ( - local_session.query(TaskHistory) - .filter_by( - task_id=task_id, - project_id=project_id, - action=lock_action.name, - action_text=None, - user_id=user_id, - ) - .one() - ) - else: - last_locked = TaskHistory.query.filter_by( - task_id=task_id, - project_id=project_id, - action=lock_action.name, - action_text=None, - user_id=user_id, - ).one() - except NoResultFound: - # We suspect there's some kind or race condition that is occasionally deleting history records - # prior to user unlocking task. Most likely stemming from auto-unlock feature. However, given that - # we're trying to update a row that doesn't exist, it's better to return without doing anything - # rather than showing the user an error that they can't fix - return + # Fetch the last locked task history entry with raw SQL + query = """ + SELECT id, action_date + FROM task_history + WHERE task_id = :task_id + AND project_id = :project_id + AND action = :action + AND action_text IS NULL + AND user_id = :user_id + ORDER BY action_date DESC + LIMIT 1 + """ + values = { + "task_id": task_id, + "project_id": project_id, + "action": lock_action.name, + "user_id": user_id, + } + + last_locked = await db.fetch_one(query=query, values=values) + + if last_locked is None: + # We suspect there's some kind or race condition that is occasionally deleting history records + # prior to user unlocking task. Most likely stemming from auto-unlock feature. However, given that + # we're trying to update a row that doesn't exist, it's better to return without doing anything + # rather than showing the user an error that they can't fix. + # No record found, possibly a race condition or auto-unlock scenario. + return + + # Calculate the duration the task was locked for + duration_task_locked = ( + datetime.datetime.utcnow() - last_locked["action_date"] + ) + + # Cast duration to ISO format + action_text = ( + (datetime.datetime.min + duration_task_locked).time().isoformat() + ) + + # Update the task history with the duration + update_query = """ + UPDATE task_history + SET action_text = :action_text + WHERE id = :id + """ + update_values = { + "action_text": action_text, + "id": last_locked["id"], + } + await db.execute(query=update_query, values=update_values) + except MultipleResultsFound: # Again race conditions may mean we have multiple rows within the Task History. Here we attempt to # remove the oldest duplicate rows, and update the newest on the basis that this was the last action # the user was attempting to make. - TaskHistory.remove_duplicate_task_history_rows( - task_id, project_id, lock_action, user_id + # Handle race conditions by removing duplicates. + await TaskHistory.remove_duplicate_task_history_rows( + task_id, project_id, lock_action, user_id, db ) - # Now duplicate is removed, we recursively call ourself to update the duration on the remaining row - TaskHistory.update_task_locked_with_duration( - task_id, project_id, lock_action, user_id + # Recursively call the method to update the remaining row + await TaskHistory.update_task_locked_with_duration( + task_id, project_id, lock_action, user_id, db ) - return - - duration_task_locked = datetime.datetime.utcnow() - last_locked.action_date - # Cast duration to isoformat for later transmission via api - last_locked.action_text = ( - (datetime.datetime.min + duration_task_locked).time().isoformat() - ) - if local_session: - local_session.commit() - else: - db.session.commit() - @staticmethod - def remove_duplicate_task_history_rows( - task_id: int, project_id: int, lock_action: TaskStatus, user_id: int + async def remove_duplicate_task_history_rows( + task_id: int, + project_id: int, + lock_action: TaskAction, + user_id: int, + db: Database, ): - """Method used in rare cases where we have duplicate task history records for a given action by a user - This method will remove the oldest duplicate record, on the basis that the newest record was the - last action the user was attempting to perform """ - dupe = ( - TaskHistory.query.filter( - TaskHistory.project_id == project_id, - TaskHistory.task_id == task_id, - TaskHistory.action == lock_action.name, - TaskHistory.user_id == user_id, + Removes duplicate task history rows for the specified task, project, and action. + Keeps the most recent entry and deletes the older ones. + """ + duplicate_query = """ + DELETE FROM task_history + WHERE id IN ( + SELECT id + FROM task_history + WHERE task_id = :task_id + AND project_id = :project_id + AND action = :action + AND user_id = :user_id + ORDER BY action_date ASC + OFFSET 1 ) - .order_by(TaskHistory.id.asc()) - .first() - ) + """ + values = { + "task_id": task_id, + "project_id": project_id, + "action": lock_action.name, + "user_id": user_id, + } - dupe.delete() + await db.execute(query=duplicate_query, values=values) @staticmethod - def update_expired_and_locked_actions( - project_id: int, task_id: int, expiry_date: datetime, action_text: str + async def update_expired_and_locked_actions( + task_id: int, + project_id: int, + expiry_date: datetime, + action_text: str, + db: Database, ): + """Update expired actions with an auto-unlock state.""" + query = """ + UPDATE task_history + SET action = CASE + WHEN action IN ('LOCKED_FOR_MAPPING', 'EXTENDED_FOR_MAPPING') THEN 'AUTO_UNLOCKED_FOR_MAPPING' + WHEN action IN ('LOCKED_FOR_VALIDATION', 'EXTENDED_FOR_VALIDATION') THEN 'AUTO_UNLOCKED_FOR_VALIDATION' + END, + action_text = :action_text + WHERE task_id = :task_id + AND project_id = :project_id + AND action_text IS NULL + AND action IN ('LOCKED_FOR_MAPPING', 'LOCKED_FOR_VALIDATION', 'EXTENDED_FOR_MAPPING', 'EXTENDED_FOR_VALIDATION') + AND action_date <= :expiry_date """ - Sets auto unlock state to all not finished actions, that are older then the expiry date. - Action is considered as a not finished, when it is in locked state and doesn't have action text - :param project_id: Project ID in scope - :param task_id: Task in scope - :param expiry_date: Action created before this date is treated as expired - :param action_text: Text which will be set for all changed actions - :return: - """ - all_expired = TaskHistory.query.filter( - TaskHistory.task_id == task_id, - TaskHistory.project_id == project_id, - TaskHistory.action_text.is_(None), - TaskHistory.action.in_( - [ - TaskAction.LOCKED_FOR_VALIDATION.name, - TaskAction.LOCKED_FOR_MAPPING.name, - TaskAction.EXTENDED_FOR_MAPPING.name, - TaskAction.EXTENDED_FOR_VALIDATION.name, - ] - ), - TaskHistory.action_date <= expiry_date, - ).all() - - for task_history in all_expired: - unlock_action = ( - TaskAction.AUTO_UNLOCKED_FOR_MAPPING - if task_history.action in ["LOCKED_FOR_MAPPING", "EXTENDED_FOR_MAPPING"] - else TaskAction.AUTO_UNLOCKED_FOR_VALIDATION - ) - - task_history.set_auto_unlock_action(unlock_action) - task_history.action_text = action_text - - db.session.commit() + values = { + "action_text": action_text, + "task_id": task_id, + "project_id": project_id, + "expiry_date": expiry_date, + } + await db.execute(query=query, values=values) @staticmethod def get_all_comments(project_id: int) -> ProjectCommentsDTO: """Gets all comments for the supplied project_id""" comments = ( - db.session.query( + session.query( TaskHistory.task_id, TaskHistory.action_date, TaskHistory.action_text, @@ -502,176 +537,186 @@ def get_all_comments(project_id: int) -> ProjectCommentsDTO: return comments_dto @staticmethod - def get_last_status(project_id: int, task_id: int, for_undo: bool = False): - """Get the status the task was set to the last time the task had a STATUS_CHANGE""" - result = ( - db.session.query(TaskHistory.action_text) - .filter( - TaskHistory.project_id == project_id, - TaskHistory.task_id == task_id, - TaskHistory.action == TaskAction.STATE_CHANGE.name, - ) - .order_by(TaskHistory.action_date.desc()) - .all() + async def get_last_status( + project_id: int, task_id: int, db: Database, for_undo: bool = False + ) -> TaskStatus: + """Get the status the task was set to the last time the task had a STATUS_CHANGE.""" + + query = """ + SELECT action_text + FROM task_history + WHERE project_id = :project_id + AND task_id = :task_id + AND action = 'STATE_CHANGE' + ORDER BY action_date DESC + """ + result = await db.fetch_all( + query, values={"project_id": project_id, "task_id": task_id} ) + # If no results, return READY status if not result: - return TaskStatus.READY # No result so default to ready status + return TaskStatus.READY + # If we only have one result and for_undo is True, return READY if len(result) == 1 and for_undo: - # We're looking for the previous status, however, there isn't any so we'll return Ready return TaskStatus.READY - if for_undo and result[0][0] in [ + # If the last status was MAPPED or BADIMAGERY and for_undo is True, return READY + if for_undo and result[0]["action_text"] in [ TaskStatus.MAPPED.name, TaskStatus.BADIMAGERY.name, ]: - # We need to return a READY when last status of the task is badimagery or mapped. return TaskStatus.READY + # If for_undo is True, return the second last status if for_undo: - # Return the second last status which was status the task was previously set to - return TaskStatus[result[1][0]] - else: - return TaskStatus[result[0][0]] + return TaskStatus[result[1]["action_text"]] + # Otherwise, return the last status + return TaskStatus[result[0]["action_text"]] @staticmethod - def get_last_action(project_id: int, task_id: int): + async def get_last_action(project_id: int, task_id: int, db: Database): """Gets the most recent task history record for the task""" - return ( - TaskHistory.query.filter( - TaskHistory.project_id == project_id, TaskHistory.task_id == task_id - ) - .order_by(TaskHistory.action_date.desc()) - .first() - ) + query = """ + SELECT * FROM task_history + WHERE project_id = :project_id AND task_id = :task_id + ORDER BY action_date DESC + LIMIT 1 + """ + return await db.fetch_one(query, {"project_id": project_id, "task_id": task_id}) @staticmethod - def get_last_action_of_type( - project_id: int, task_id: int, allowed_task_actions: list + async def get_last_action_of_type( + project_id: int, task_id: int, allowed_task_actions: list, db: Database ): """Gets the most recent task history record having provided TaskAction""" - return ( - TaskHistory.query.filter( - TaskHistory.project_id == project_id, - TaskHistory.task_id == task_id, - TaskHistory.action.in_(allowed_task_actions), - ) - .order_by(TaskHistory.action_date.desc()) - .first() - ) + query = """ + SELECT id, action, action_date + FROM task_history + WHERE project_id = :project_id + AND task_id = :task_id + AND action = ANY(:allowed_actions) + ORDER BY action_date DESC + LIMIT 1 + """ + values = { + "project_id": project_id, + "task_id": task_id, + "allowed_actions": tuple(allowed_task_actions), + } + result = await db.fetch_one(query=query, values=values) + return result @staticmethod - def get_last_locked_action(project_id: int, task_id: int): + async def get_last_locked_action(project_id: int, task_id: int, db: Database): """Gets the most recent task history record with locked action for the task""" - return TaskHistory.get_last_action_of_type( + return await TaskHistory.get_last_action_of_type( project_id, task_id, [ TaskAction.LOCKED_FOR_MAPPING.name, TaskAction.LOCKED_FOR_VALIDATION.name, ], + db, ) @staticmethod - def get_last_locked_or_auto_unlocked_action(project_id: int, task_id: int): - """Gets the most recent task history record with locked or auto unlocked action for the task""" - return TaskHistory.get_last_action_of_type( - project_id, - task_id, - [ - TaskAction.LOCKED_FOR_MAPPING.name, - TaskAction.LOCKED_FOR_VALIDATION.name, - TaskAction.AUTO_UNLOCKED_FOR_MAPPING.name, - TaskAction.AUTO_UNLOCKED_FOR_VALIDATION.name, - ], + async def get_last_locked_or_auto_unlocked_action( + task_id: int, project_id: int, db: Database + ): + """Fetch the last locked or auto-unlocked action for a task.""" + query = """ + SELECT action + FROM task_history + WHERE task_id = :task_id + AND project_id = :project_id + AND action IN ( + 'LOCKED_FOR_MAPPING', + 'LOCKED_FOR_VALIDATION', + 'AUTO_UNLOCKED_FOR_MAPPING', + 'AUTO_UNLOCKED_FOR_VALIDATION' + ) + ORDER BY action_date DESC + LIMIT 1 + """ + row = await db.fetch_one( + query=query, values={"task_id": task_id, "project_id": project_id} ) + return row["action"] if row else None - def get_last_mapped_action(project_id: int, task_id: int): - """Gets the most recent mapped action, if any, in the task history""" - return ( - db.session.query(TaskHistory) - .filter( - TaskHistory.project_id == project_id, - TaskHistory.task_id == task_id, - TaskHistory.action == TaskAction.STATE_CHANGE.name, - TaskHistory.action_text.in_( - [TaskStatus.BADIMAGERY.name, TaskStatus.MAPPED.name] - ), - ) - .order_by(TaskHistory.action_date.desc()) - .first() + @staticmethod + async def get_last_mapped_action(project_id: int, task_id: int, db: Database): + """ + Gets the most recent mapped action, if any, in the task history. + """ + + query = """ + SELECT * FROM task_history + WHERE project_id = :project_id + AND task_id = :task_id + AND action = 'STATE_CHANGE' + AND action_text IN ('BADIMAGERY', 'MAPPED') + ORDER BY action_date DESC + LIMIT 1 + """ + last_mapped = await db.fetch_one( + query=query, values={"project_id": project_id, "task_id": task_id} ) + return last_mapped -class Task(db.Model): +class Task(Base): """Describes an individual mapping Task""" __tablename__ = "tasks" # Table has composite PK on (id and project_id) - id = db.Column(db.Integer, primary_key=True) - project_id = db.Column( - db.Integer, db.ForeignKey("projects.id"), index=True, primary_key=True + id = Column(Integer, primary_key=True) + project_id = Column( + Integer, ForeignKey("projects.id"), index=True, primary_key=True ) - x = db.Column(db.Integer) - y = db.Column(db.Integer) - zoom = db.Column(db.Integer) - extra_properties = db.Column(db.Unicode) + x = Column(Integer) + y = Column(Integer) + zoom = Column(Integer) + extra_properties = Column(Unicode) # Tasks need to be split differently if created from an arbitrary grid or were clipped to the edge of the AOI - is_square = db.Column(db.Boolean, default=True) - geometry = db.Column(Geometry("MULTIPOLYGON", srid=4326)) - task_status = db.Column(db.Integer, default=TaskStatus.READY.value) - locked_by = db.Column( - db.BigInteger, db.ForeignKey("users.id", name="fk_users_locked"), index=True + is_square = Column(Boolean, default=True) + geometry = Column(Geometry("MULTIPOLYGON", srid=4326)) + task_status = Column(Integer, default=TaskStatus.READY.value) + locked_by = Column( + BigInteger, ForeignKey("users.id", name="fk_users_locked"), index=True ) - mapped_by = db.Column( - db.BigInteger, db.ForeignKey("users.id", name="fk_users_mapper"), index=True + mapped_by = Column( + BigInteger, ForeignKey("users.id", name="fk_users_mapper"), index=True ) - validated_by = db.Column( - db.BigInteger, db.ForeignKey("users.id", name="fk_users_validator"), index=True + validated_by = Column( + BigInteger, ForeignKey("users.id", name="fk_users_validator"), index=True ) # Mapped objects - task_history = db.relationship( + task_history = relationship( TaskHistory, cascade="all", order_by=desc(TaskHistory.action_date) ) - task_annotations = db.relationship(TaskAnnotation, cascade="all") - lock_holder = db.relationship(User, foreign_keys=[locked_by]) - mapper = db.relationship(User, foreign_keys=[mapped_by]) - - def create(self): - """Creates and saves the current model to the DB""" - db.session.add(self) - db.session.commit() - - def update(self, local_session=None): - """Updates the DB with the current state of the Task""" - if local_session: - local_session.commit() - else: - db.session.commit() - - def delete(self): - """Deletes the current model from the DB""" - db.session.delete(self) - db.session.commit() + task_annotations = relationship(TaskAnnotation, cascade="all") + lock_holder = relationship(User, foreign_keys=[locked_by]) + mapper = relationship(User, foreign_keys=[mapped_by]) @classmethod def from_geojson_feature(cls, task_id, task_feature): """ - Constructs and validates a task from a GeoJson feature object - :param task_id: Unique ID for the task - :param task_feature: A geojson feature object + Constructs and validates a task from a GeoJson feature object. + :param task_id: Unique ID for the task. + :param task_feature: A geojson feature object. :raises InvalidGeoJson, InvalidData """ if type(task_feature) is not geojson.Feature: - raise InvalidGeoJson("MustBeFeature- Invalid GeoJson should be a feature") + raise InvalidGeoJson("MustBeFeature - Invalid GeoJson should be a feature") task_geometry = task_feature.geometry if type(task_geometry) is not geojson.MultiPolygon: - raise InvalidGeoJson("MustBeMultiPloygon- Geometry must be a MultiPolygon") + raise InvalidGeoJson("MustBeMultiPolygon - Geometry must be a MultiPolygon") if not task_geometry.is_valid: raise InvalidGeoJson( @@ -684,6 +729,7 @@ def from_geojson_feature(cls, task_id, task_feature): task.y = task_feature.properties["y"] task.zoom = task_feature.properties["zoom"] task.is_square = task_feature.properties["isSquare"] + task.geometry = shape(task_feature.geometry).wkt except KeyError as e: raise InvalidData( f"PropertyNotFound: Expected property not found: {str(e)}" @@ -693,392 +739,710 @@ def from_geojson_feature(cls, task_id, task_feature): task.extra_properties = json.dumps( task_feature.properties["extra_properties"] ) - task.id = task_id - task_geojson = geojson.dumps(task_geometry) - task.geometry = ST_SetSRID(ST_GeomFromGeoJSON(task_geojson), 4326) - return task @staticmethod - def get(task_id: int, project_id: int, local_session=None): - """ - Gets specified task - :param task_id: task ID in scope - :param project_id: project ID in scope - :return: Task if found otherwise None - """ - # LIKELY PROBLEM AREA - if local_session: - return ( - local_session.query(Task) - .filter_by(id=task_id, project_id=project_id) - .one_or_none() - ) - return Task.query.filter_by(id=task_id, project_id=project_id).one_or_none() + async def get(task_id: int, project_id: int, db: Database) -> Optional[dict]: + """ + Gets the specified task. + :param db: The async database connection. + :param task_id: Task ID in scope. + :param project_id: Project ID in scope. + :return: A dictionary representing the Task if found, otherwise None. + """ + query = """ + SELECT + id, project_id, x, y, zoom, is_square, task_status, locked_by, mapped_by, geometry + FROM + tasks + WHERE + id = :task_id AND project_id = :project_id + LIMIT 1 + """ + task = await db.fetch_one( + query, values={"task_id": task_id, "project_id": project_id} + ) + return task if task else None + + @staticmethod + async def exists(task_id: int, project_id: int, db: Database) -> bool: + """ + Checks if the specified task exists. + :param db: The async database connection. + :param task_id: Task ID in scope. + :param project_id: Project ID in scope. + :return: True if the task exists, otherwise False. + """ + query = """ + SELECT 1 + FROM tasks + WHERE id = :task_id AND project_id = :project_id + LIMIT 1 + """ + task = await db.fetch_one( + query, values={"task_id": task_id, "project_id": project_id} + ) + return task is not None @staticmethod - def get_tasks(project_id: int, task_ids: List[int]): - """Get all tasks that match supplied list""" - return Task.query.filter( - Task.project_id == project_id, Task.id.in_(task_ids) - ).all() + async def get_tasks(project_id: int, task_ids: List[int], db: Database): + """ + Get all tasks that match the supplied list of task_ids for a project. + """ + query = """ + SELECT id, geometry + FROM tasks + WHERE project_id = :project_id + AND id = ANY(:task_ids) + """ + values = {"project_id": project_id, "task_ids": task_ids} + rows = await db.fetch_all(query=query, values=values) + return rows @staticmethod - def get_all_tasks(project_id: int): - """Get all tasks for a given project""" - return Task.query.filter(Task.project_id == project_id).all() + async def get_all_tasks(project_id: int, db: Database): + """ + Get all tasks for a given project. + """ + query = """ + SELECT id, geometry + FROM tasks + WHERE project_id = :project_id + """ + values = {"project_id": project_id} + rows = await db.fetch_all(query=query, values=values) + return rows + + # @staticmethod + # def get_tasks_by_status(project_id: int, status: str): + # "Returns all tasks filtered by status in a project" + # return ( + # session.query(Task) + # .filter( + # Task.project_id == project_id, + # Task.task_status == TaskStatus[status].value, + # ) + # .all() + # ) @staticmethod - def get_tasks_by_status(project_id: int, status: str): - "Returns all tasks filtered by status in a project" - return Task.query.filter( - Task.project_id == project_id, Task.task_status == TaskStatus[status].value - ).all() + async def get_tasks_by_status(project_id: int, status: str, db: Database): + """ + Returns all tasks filtered by status in a project. + :param project_id: The ID of the project. + :param status: The status to filter tasks by. + :param db: The database connection. + :return: A list of tasks with the specified status in the given project. + """ + query = """ + SELECT * + FROM tasks + WHERE project_id = :project_id + AND task_status = :task_status + """ + values = { + "project_id": project_id, + "task_status": TaskStatus[status].value, + } + tasks = await db.fetch_all(query=query, values=values) + return tasks @staticmethod - def auto_unlock_delta(): - return parse_duration(current_app.config["TASK_AUTOUNLOCK_AFTER"]) + async def auto_unlock_delta(): + return parse_duration(settings.TASK_AUTOUNLOCK_AFTER) @staticmethod - def auto_unlock_tasks(project_id: int): - """Unlock all tasks locked for longer than the auto-unlock delta""" - expiry_delta = Task.auto_unlock_delta() - lock_duration = (datetime.datetime.min + expiry_delta).time().isoformat() + async def auto_unlock_tasks(project_id: int, db: Database): + """Unlock all tasks locked for longer than the auto-unlock delta.""" + expiry_delta = await Task.auto_unlock_delta() expiry_date = datetime.datetime.utcnow() - expiry_delta - - old_tasks = ( - db.session.query(Task.id) - .filter(Task.id == TaskHistory.task_id) - .filter(Task.project_id == TaskHistory.project_id) - .filter(Task.task_status.in_([1, 3])) - .filter( - TaskHistory.action.in_( - [ - "EXTENDED_FOR_MAPPING", - "EXTENDED_FOR_VALIDATION", - "LOCKED_FOR_VALIDATION", - "LOCKED_FOR_MAPPING", - ] - ) - ) - .filter(TaskHistory.action_text.is_(None)) - .filter(Task.project_id == project_id) - .filter(TaskHistory.action_date <= str(expiry_date)) + lock_duration = ( + (datetime.datetime.min + await Task.auto_unlock_delta()).time().isoformat() ) - if old_tasks.count() == 0: - # no tasks older than the delta found, return without further processing - return + # Query for task IDs to unlock + query = """ + SELECT tasks.id + FROM tasks + JOIN task_history + ON tasks.id = task_history.task_id + AND tasks.project_id = task_history.project_id + WHERE tasks.task_status IN (1, 3) + AND task_history.action IN ( + 'EXTENDED_FOR_MAPPING', + 'EXTENDED_FOR_VALIDATION', + 'LOCKED_FOR_VALIDATION', + 'LOCKED_FOR_MAPPING' + ) + AND task_history.action_text IS NULL + AND tasks.project_id = :project_id + AND task_history.action_date <= :expiry_date + """ + old_task_ids = await db.fetch_all( + query=query, values={"project_id": project_id, "expiry_date": expiry_date} + ) + old_task_ids = [row["id"] for row in old_task_ids] + if not old_task_ids: + return # No tasks to unlock - for old_task in old_tasks: - task = Task.get(old_task[0], project_id) - task.auto_unlock_expired_tasks(expiry_date, lock_duration) + for task_id in old_task_ids: + await Task.auto_unlock_expired_tasks(task_id, project_id, expiry_date, db) - def auto_unlock_expired_tasks(self, expiry_date, lock_duration): - """Unlock all tasks locked before expiry date. Clears task lock if needed""" - TaskHistory.update_expired_and_locked_actions( - self.project_id, self.id, expiry_date, lock_duration + @staticmethod + async def auto_unlock_expired_tasks( + task_id: int, project_id: int, expiry_date: datetime, db: Database + ): + """Unlock all tasks locked before expiry date. Clears task lock if needed.""" + lock_duration = ( + (datetime.datetime.min + await Task.auto_unlock_delta()).time().isoformat() ) - last_action = TaskHistory.get_last_locked_or_auto_unlocked_action( - self.project_id, self.id + await TaskHistory.update_expired_and_locked_actions( + task_id, project_id, expiry_date, lock_duration, db + ) + last_action = await TaskHistory.get_last_locked_or_auto_unlocked_action( + task_id, project_id, db ) - if last_action.action in [ - "AUTO_UNLOCKED_FOR_MAPPING", - "AUTO_UNLOCKED_FOR_VALIDATION", - ]: - self.clear_lock() - def is_mappable(self): - """Determines if task in scope is in suitable state for mapping""" - if TaskStatus(self.task_status) not in [ + if last_action in ["AUTO_UNLOCKED_FOR_MAPPING", "AUTO_UNLOCKED_FOR_VALIDATION"]: + await Task.clear_lock(task_id, project_id, db) + + @staticmethod + def is_mappable(task: dict) -> bool: + """Determines if task in scope is in a suitable state for mapping.""" + if TaskStatus(task["task_status"]) not in [ TaskStatus.READY, TaskStatus.INVALIDATED, ]: return False - return True - def set_task_history( - self, action, user_id, comment=None, new_state=None, mapping_issues=None + @staticmethod + async def set_task_history( + task_id: int, + project_id: int, + user_id: int, + action: TaskAction, + db: Database, + comment: Optional[str] = None, + new_state: Optional[TaskStatus] = None, + mapping_issues: Optional[ + List[Dict[str, Any]] + ] = None, # Updated to accept a list of dictionaries ): - """ - Sets the task history for the action that the user has just performed - :param task: Task in scope - :param user_id: ID of user performing the action - :param action: Action the user has performed - :param comment: Comment user has added - :param new_state: New state of the task - :param mapping_issues: Identified issues leading to invalidation - """ - history = TaskHistory(self.id, self.project_id, user_id) + """Sets the task history for the action that the user has just performed.""" + # Determine action and action_text based on the task action if action in [TaskAction.LOCKED_FOR_MAPPING, TaskAction.LOCKED_FOR_VALIDATION]: - history.set_task_locked_action(action) + action_name, action_text = TaskHistory.set_task_locked_action(action) elif action in [ TaskAction.EXTENDED_FOR_MAPPING, TaskAction.EXTENDED_FOR_VALIDATION, ]: - history.set_task_extend_action(action) + action_name, action_text = TaskHistory.set_task_extend_action(action) elif action == TaskAction.COMMENT: - history.set_comment_action(comment) - elif action == TaskAction.STATE_CHANGE: - history.set_state_change_action(new_state) + action_name, action_text = TaskHistory.set_comment_action(comment) + elif action == TaskAction.STATE_CHANGE and new_state: + action_name, action_text = TaskHistory.set_state_change_action(new_state) elif action in [ TaskAction.AUTO_UNLOCKED_FOR_MAPPING, TaskAction.AUTO_UNLOCKED_FOR_VALIDATION, ]: - history.set_auto_unlock_action(action) + action_name, action_text = TaskHistory.set_auto_unlock_action(action) + else: + raise ValueError("Invalid Action") + + # Insert the task history into the task_history table + query = """ + INSERT INTO task_history (task_id, user_id, project_id, action, action_text, action_date) + VALUES (:task_id, :user_id, :project_id, :action, :action_text, :action_date) + RETURNING id, action, action_text, action_date + """ + values = { + "task_id": task_id, + "user_id": user_id, + "project_id": project_id, + "action": action_name, + "action_text": action_text, + "action_date": timestamp(), + } + task_history = await db.fetch_one(query=query, values=values) + + # TODO Verify this. + # Insert any mapping issues into the task_mapping_issues table, building the query dynamically + if mapping_issues: + for issue in mapping_issues: + fields = {"task_history_id": task_history["id"]} + placeholders = [":task_history_id"] + + if "issue" in issue: + fields["issue"] = issue["issue"] + placeholders.append(":issue") + + if "mapping_issue_category_id" in issue: + fields["mapping_issue_category_id"] = issue[ + "mapping_issue_category_id" + ] + placeholders.append(":mapping_issue_category_id") + + if "count" in issue: + fields["count"] = issue["count"] + placeholders.append(":count") - if mapping_issues is not None: - history.task_mapping_issues = mapping_issues + columns = ", ".join(fields.keys()) + values_placeholders = ", ".join(placeholders) - self.task_history.append(history) - return history + mapping_issue_query = f""" + INSERT INTO task_mapping_issues ({columns}) + VALUES ({values_placeholders}) + """ - def lock_task_for_mapping(self, user_id: int): - self.set_task_history(TaskAction.LOCKED_FOR_MAPPING, user_id) - self.task_status = TaskStatus.LOCKED_FOR_MAPPING.value - self.locked_by = user_id - self.update() + await db.execute(query=mapping_issue_query, values=fields) - def lock_task_for_validating(self, user_id: int): - self.set_task_history(TaskAction.LOCKED_FOR_VALIDATION, user_id) - self.task_status = TaskStatus.LOCKED_FOR_VALIDATION.value - self.locked_by = user_id - self.update() + return task_history - def reset_task(self, user_id: int): - expiry_delta = Task.auto_unlock_delta() + @staticmethod + async def lock_task_for_mapping( + task_id: int, project_id: int, user_id: int, db: Database + ): + """Locks a task for mapping by a user.""" + # Insert a task history record for the action + await Task.set_task_history( + task_id, project_id, user_id, TaskAction.LOCKED_FOR_MAPPING, db + ) + + # Update the task's status and set it as locked by the user for the specific project_id + query = """ + UPDATE tasks + SET task_status = :task_status, locked_by = :user_id + WHERE id = :task_id AND project_id = :project_id + """ + values = { + "task_status": TaskStatus.LOCKED_FOR_MAPPING.value, + "user_id": user_id, + "task_id": task_id, + "project_id": project_id, + } + await db.execute(query=query, values=values) + + @staticmethod + async def lock_task_for_validating( + task_id: int, project_id: int, user_id: int, db: Database + ): + """Lock the task for validation.""" + # Insert a task history record for the action + await Task.set_task_history( + task_id, project_id, user_id, TaskAction.LOCKED_FOR_VALIDATION, db + ) + query = """ + UPDATE tasks + SET task_status = :status, locked_by = :user_id + WHERE id = :task_id AND project_id = :project_id + """ + values = { + "status": TaskStatus.LOCKED_FOR_VALIDATION.value, + "user_id": user_id, + "task_id": task_id, + "project_id": project_id, + } + await db.execute(query=query, values=values) + + @staticmethod + async def reset_task(task_id: int, project_id: int, user_id: int, db: Database): + """Resets the task with the provided task_id and updates its status""" + + # Fetch the auto-unlock duration + expiry_delta = await Task.auto_unlock_delta() lock_duration = (datetime.datetime.min + expiry_delta).time().isoformat() - if TaskStatus(self.task_status) in [ + + query = """ + SELECT task_status, mapped_by, validated_by, locked_by + FROM tasks + WHERE id = :task_id AND project_id = :project_id + """ + task = await db.fetch_one( + query=query, values={"task_id": task_id, "project_id": project_id} + ) + + if TaskStatus(task["task_status"]) in [ TaskStatus.LOCKED_FOR_MAPPING, TaskStatus.LOCKED_FOR_VALIDATION, ]: - self.record_auto_unlock(lock_duration) - - self.set_task_history(TaskAction.STATE_CHANGE, user_id, None, TaskStatus.READY) - self.mapped_by = None - self.validated_by = None - self.locked_by = None - self.task_status = TaskStatus.READY.value - self.update() - - def clear_task_lock(self): - """ - Unlocks task in scope in the database. Clears the lock as though it never happened. - No history of the unlock is recorded. - :return: - """ - # clear the lock action for the task in the task history - last_action = TaskHistory.get_last_locked_action(self.project_id, self.id) - last_action.delete() - - # Set locked_by to null and status to last status on task - self.clear_lock() - - def record_auto_unlock(self, lock_duration): - locked_user = self.locked_by - last_action = TaskHistory.get_last_locked_action(self.project_id, self.id) - next_action = ( - TaskAction.AUTO_UNLOCKED_FOR_MAPPING - if last_action.action == "LOCKED_FOR_MAPPING" - else TaskAction.AUTO_UNLOCKED_FOR_VALIDATION + await Task.record_auto_unlock(task_id, project_id, lock_duration, db) + + update_task_query = """ + UPDATE tasks + SET task_status = :ready_status, + mapped_by = NULL, + validated_by = NULL, + locked_by = NULL + WHERE id = :task_id AND project_id = :project_id + """ + await db.execute( + query=update_task_query, + values={ + "task_id": task_id, + "ready_status": TaskStatus.READY.value, + "project_id": project_id, + }, + ) + + # Log the state change in the task history + await Task.set_task_history( + task_id=task_id, + project_id=None, # Assuming project_id is not needed here or is passed earlier + user_id=user_id, + action=TaskAction.STATE_CHANGE, + db=db, + new_state=TaskStatus.READY, + ) + + @staticmethod + async def clear_task_lock(task_id: int, project_id: int, db: Database): + """Unlocks task in scope, clears the lock as though it never happened.""" + + # Get the last locked action and delete it from the task history + last_action = await TaskHistory.get_last_locked_action(project_id, task_id, db) + if last_action: + delete_action_query = """ + DELETE FROM task_history + WHERE id = :history_id + """ + await db.execute( + query=delete_action_query, values={"history_id": last_action["id"]} + ) + + # Clear the lock from the task itself + await Task.clear_lock(task_id=task_id, project_id=project_id, db=db) + + @staticmethod + async def record_auto_unlock( + task_id: int, project_id: int, lock_duration: str, db: Database + ): + """Automatically unlocks the task and records the auto-unlock action in task history""" + + # Fetch the locked user and last locked action for the task + locked_user_query = """ + SELECT locked_by + FROM tasks + WHERE id = :task_id AND project_id = :project_id + """ + locked_user = await db.fetch_one( + query=locked_user_query, + values={"task_id": task_id, "project_id": project_id}, ) - self.clear_task_lock() + last_action = await TaskHistory.get_last_locked_action(project_id, task_id, db) + + if last_action and last_action["action"] == "LOCKED_FOR_MAPPING": + next_action = TaskAction.AUTO_UNLOCKED_FOR_MAPPING + else: + next_action = TaskAction.AUTO_UNLOCKED_FOR_VALIDATION + + # Clear the task lock (clear the lock and delete the last locked action) + await Task.clear_task_lock(task_id, project_id, db) # Add AUTO_UNLOCKED action in the task history - auto_unlocked = self.set_task_history(action=next_action, user_id=locked_user) - auto_unlocked.action_text = lock_duration - self.update() + auto_unlocked = await Task.set_task_history( + task_id=task_id, + project_id=project_id, + user_id=locked_user["locked_by"], + action=next_action, + db=db, + ) - def unlock_task( - self, - user_id, - new_state=None, - comment=None, - undo=False, - issues=None, - local_session=None, + # Update the action_text with the lock duration + update_history_query = """ + UPDATE task_history + SET action_text = :lock_duration + WHERE id = :history_id + """ + await db.execute( + query=update_history_query, + values={"lock_duration": lock_duration, "history_id": auto_unlocked["id"]}, + ) + + @staticmethod + async def unlock_task( + task_id: int, + project_id: int, + user_id: int, + new_state: TaskStatus, + db: Database, + comment: Optional[str] = None, + undo: bool = False, + issues: Optional[List[Dict[str, Any]]] = None, ): - """Unlock task and ensure duration task locked is saved in History""" + """Unlock the task and change its state.""" + # Add task comment history if provided if comment: - self.set_task_history( - action=TaskAction.COMMENT, + await Task.set_task_history( + task_id, + project_id, + user_id, + TaskAction.COMMENT, + db, comment=comment, - user_id=user_id, mapping_issues=issues, ) - - history = self.set_task_history( - action=TaskAction.STATE_CHANGE, + # Record state change in history + history = await Task.set_task_history( + task_id, + project_id, + user_id, + TaskAction.STATE_CHANGE, + db, + comment=comment, new_state=new_state, - user_id=user_id, mapping_issues=issues, ) - # If undo, clear the mapped_by and validated_by fields if undo: if new_state == TaskStatus.MAPPED: - self.validated_by = None + update_query = """ + UPDATE tasks + SET validated_by = NULL + WHERE id = :task_id AND project_id = :project_id + """ + await db.execute( + query=update_query, + values={"task_id": task_id, "project_id": project_id}, + ) elif new_state == TaskStatus.READY: - self.mapped_by = None - elif ( - new_state in [TaskStatus.MAPPED, TaskStatus.BADIMAGERY] - and TaskStatus(self.task_status) != TaskStatus.LOCKED_FOR_VALIDATION - ): - # Don't set mapped if state being set back to mapped after validation - self.mapped_by = user_id - elif new_state == TaskStatus.VALIDATED: - TaskInvalidationHistory.record_validation( - self.project_id, self.id, user_id, history, local_session=local_session - ) - self.validated_by = user_id - elif new_state == TaskStatus.INVALIDATED: - TaskInvalidationHistory.record_invalidation( - self.project_id, self.id, user_id, history, local_session=local_session + update_query = """ + UPDATE tasks + SET mapped_by = NULL + WHERE id = :task_id AND project_id = :project_id + """ + await db.execute( + query=update_query, + values={"task_id": task_id, "project_id": project_id}, + ) + else: + current_status_query = """ + SELECT task_status FROM tasks WHERE id = :task_id AND project_id = :project_id + """ + current_status_result = await db.fetch_one( + query=current_status_query, + values={"task_id": task_id, "project_id": project_id}, ) - self.mapped_by = None - self.validated_by = None + current_status = TaskStatus(current_status_result["task_status"]) + # Handle specific state changes + if new_state == TaskStatus.VALIDATED: + await TaskInvalidationHistory.record_validation( + project_id, task_id, user_id, history, db + ) + update_query = """ + UPDATE tasks + SET validated_by = :user_id + WHERE id = :task_id AND project_id = :project_id + """ + await db.execute( + query=update_query, + values={ + "user_id": user_id, + "task_id": task_id, + "project_id": project_id, + }, + ) + + elif new_state == TaskStatus.INVALIDATED: + await TaskInvalidationHistory.record_invalidation( + project_id, task_id, user_id, history, db + ) + update_query = """ + UPDATE tasks + SET mapped_by = NULL, validated_by = NULL + WHERE id = :task_id AND project_id = :project_id + """ + await db.execute( + query=update_query, + values={"task_id": task_id, "project_id": project_id}, + ) - if not undo: + # Set `mapped_by` for MAPPED or BADIMAGERY states when not locked for validation + elif new_state in [TaskStatus.MAPPED, TaskStatus.BADIMAGERY]: + if current_status != TaskStatus.LOCKED_FOR_VALIDATION: + update_query = """ + UPDATE tasks + SET mapped_by = :user_id + WHERE id = :task_id AND project_id = :project_id + """ + await db.execute( + query=update_query, + values={ + "user_id": user_id, + "task_id": task_id, + "project_id": project_id, + }, + ) + + # Update task locked duration in the history when `undo` is False # Using a slightly evil side effect of Actions and Statuses having the same name here :) - TaskHistory.update_task_locked_with_duration( - self.id, - self.project_id, - TaskStatus(self.task_status), - user_id, - local_session=local_session, + await TaskHistory.update_task_locked_with_duration( + task_id, project_id, TaskStatus(current_status), user_id, db ) + # Final query for updating task status + final_update_query = """ + UPDATE tasks + SET task_status = :new_status, locked_by = NULL + WHERE id = :task_id AND project_id = :project_id + """ + await db.execute( + query=final_update_query, + values={ + "new_status": new_state.value, + "task_id": task_id, + "project_id": project_id, + }, + ) - self.task_status = new_state.value - self.locked_by = None - if local_session: - self.update(local_session=local_session) - else: - self.update() - - def reset_lock(self, user_id, comment=None): - """Removes a current lock from a task, resets to last status and - updates history with duration of lock""" + @staticmethod + async def reset_lock( + task_id: int, + project_id: int, + task_status: TaskStatus, + user_id: int, + comment: Optional[str], + db: Database, + ): + """ + Removes a current lock from a task, resets to the last status, and updates history with the lock duration. + :param task_id: The ID of the task to reset the lock for. + :param project_id: The project ID the task belongs to. + :param task_status: The current task status. + :param user_id: The ID of the user resetting the lock. + :param comment: Optional comment provided during the reset. + :param db: The database connection. + """ + # If a comment is provided, set the task history with a comment action if comment: - self.set_task_history( - action=TaskAction.COMMENT, comment=comment, user_id=user_id + await Task.set_task_history( + task_id=task_id, + project_id=project_id, + user_id=user_id, + action=TaskAction.COMMENT, + comment=comment, + db=db, ) - - # Using a slightly evil side effect of Actions and Statuses having the same name here :) - TaskHistory.update_task_locked_with_duration( - self.id, self.project_id, TaskStatus(self.task_status), user_id + # Update task lock history with duration + await TaskHistory.update_task_locked_with_duration( + task_id=task_id, + project_id=project_id, + lock_action=TaskStatus(task_status), + user_id=user_id, + db=db, ) - self.clear_lock() - def clear_lock(self): - """Resets to last status and removes current lock from a task""" - self.task_status = TaskHistory.get_last_status(self.project_id, self.id).value - self.locked_by = None - self.update() + # Clear the lock on the task + await Task.clear_lock(task_id=task_id, project_id=project_id, db=db) + + @staticmethod + async def clear_lock(task_id: int, project_id: int, db: Database): + """ + Resets the task to its last status and removes the current lock from the task. + :param task_id: The ID of the task to clear the lock for. + :param project_id: The project ID the task belongs to. + :param db: The database connection. + """ + last_status = await TaskHistory.get_last_status(project_id, task_id, db) + # Clear the lock by updating the task's status and lock status + update_query = """ + UPDATE tasks + SET task_status = :task_status, locked_by = NULL + WHERE id = :task_id AND project_id = :project_id + """ + update_values = { + "task_status": last_status.value, + "task_id": task_id, + "project_id": project_id, + } + await db.execute(query=update_query, values=update_values) @staticmethod - def get_tasks_as_geojson_feature_collection( - project_id, - task_ids_str: str = None, - order_by: str = None, + async def get_tasks_as_geojson_feature_collection( + db: Database, + project_id: int, + task_ids_str: Optional[str] = None, + order_by: Optional[str] = None, order_by_type: str = "ASC", - status: int = None, - ): + status: Optional[int] = None, + ) -> geojson.FeatureCollection: """ - Creates a geoJson.FeatureCollection object for tasks related to the supplied project ID + Creates a geoJson.FeatureCollection object for tasks related to the supplied project ID. + :param db: The async database connection :param project_id: Owning project ID - :order_by: sorting option: available values update_date and building_area_diff - :status: task status id to filter by + :param task_ids_str: Comma-separated task IDs to filter by + :param order_by: Sorting option: available values are 'effort_prediction' + :param order_by_type: Sorting order: 'ASC' or 'DESC' + :param status: Task status ID to filter by :return: geojson.FeatureCollection """ - # subquery = ( - # db.session.query(func.max(TaskHistory.action_date)) - # .filter( - # Task.id == TaskHistory.task_id, - # Task.project_id == TaskHistory.project_id, - # ) - # .correlate(Task) - # .group_by(Task.id) - # .label("update_date") - # ) - query = db.session.query( - Task.id, - Task.x, - Task.y, - Task.zoom, - Task.is_square, - Task.task_status, - Task.geometry.ST_AsGeoJSON().label("geojson"), - Task.locked_by, - Task.mapped_by, - # subquery, - ) + # Base query + query = """ + SELECT + t.id, + t.x, + t.y, + t.zoom, + t.is_square, + t.task_status, + ST_AsGeoJSON(t.geometry) AS geojson, + t.locked_by, + t.mapped_by + FROM tasks t + WHERE t.project_id = :project_id + """ - filters = [Task.project_id == project_id] + # Initialize query parameters + filters = {"project_id": project_id} + # Add task_id filter if task_ids_str: - task_ids = list(map(int, task_ids_str.split(","))) - tasks = Task.get_tasks(project_id, task_ids) - if not tasks or len(tasks) == 0: - raise NotFound( - sub_code="TASKS_NOT_FOUND", tasks=task_ids, project_id=project_id - ) - else: - tasks_filters = [task.id for task in tasks] - filters = [Task.project_id == project_id, Task.id.in_(tasks_filters)] - else: - tasks = Task.get_all_tasks(project_id) - if not tasks or len(tasks) == 0: - raise NotFound(sub_code="TASKS_NOT_FOUND", project_id=project_id) + task_ids = [int(task_id) for task_id in task_ids_str.split(",")] + query += " AND t.id IN :task_ids" + filters["task_ids"] = tuple(task_ids) - if status: - filters.append(Task.task_status == status) + # Add status filter + if status is not None: + query += " AND t.task_status = :status" + filters["status"] = status + # Add ordering if order_by == "effort_prediction": - query = query.outerjoin(TaskAnnotation).filter(*filters) if order_by_type == "DESC": - query = query.order_by( - desc( - cast( - cast(TaskAnnotation.properties["building_area_diff"], Text), - Float, - ) - ) - ) + query += """ + LEFT JOIN task_annotations ta ON ta.task_id = t.id + ORDER BY CAST(ta.properties->>'building_area_diff' AS FLOAT) DESC + """ else: - query = query.order_by( - cast( - cast(TaskAnnotation.properties["building_area_diff"], Text), - Float, - ) - ) - # elif order_by == "last_updated": - # if order_by_type == "DESC": - # query = query.filter(*filters).order_by(desc("update_date")) - # else: - # query = query.filter(*filters).order_by("update_date") - else: - query = query.filter(*filters) + query += """ + LEFT JOIN task_annotations ta ON ta.task_id = t.id + ORDER BY CAST(ta.properties->>'building_area_diff' AS FLOAT) ASC + """ + elif order_by: + if order_by_type == "DESC": + query += f" ORDER BY {order_by} DESC" + else: + query += f" ORDER BY {order_by} ASC" - project_tasks = query.all() + # Execute the query + rows = await db.fetch_all(query, values=filters) + # Process results into geojson.FeatureCollection tasks_features = [] - for task in project_tasks: - task_geometry = geojson.loads(task.geojson) + for row in rows: + task_geometry = geojson.loads(row["geojson"]) task_properties = dict( - taskId=task.id, - taskX=task.x, - taskY=task.y, - taskZoom=task.zoom, - taskIsSquare=task.is_square, - taskStatus=TaskStatus(task.task_status).name, - lockedBy=task.locked_by, - mappedBy=task.mapped_by, + taskId=row["id"], + taskX=row["x"], + taskY=row["y"], + taskZoom=row["zoom"], + taskIsSquare=row["is_square"], + taskStatus=TaskStatus(row["task_status"]).name, + lockedBy=row["locked_by"], + mappedBy=row["mapped_by"], ) - feature = geojson.Feature( geometry=task_geometry, properties=task_properties ) @@ -1087,31 +1451,42 @@ def get_tasks_as_geojson_feature_collection( return geojson.FeatureCollection(tasks_features) @staticmethod - def get_tasks_as_geojson_feature_collection_no_geom(project_id): + async def get_tasks_as_geojson_feature_collection_no_geom( + db: Database, project_id: int + ) -> geojson.FeatureCollection: """ - Creates a geoJson.FeatureCollection object for all tasks related to the supplied project ID without geometry + Creates a geoJson.FeatureCollection object for all tasks related to the supplied project ID without geometry. + :param db: The async database connection :param project_id: Owning project ID :return: geojson.FeatureCollection """ - project_tasks = ( - db.session.query( - Task.id, Task.x, Task.y, Task.zoom, Task.is_square, Task.task_status - ) - .filter(Task.project_id == project_id) - .all() - ) + # Define the SQL query + query = """ + SELECT + t.id, + t.x, + t.y, + t.zoom, + t.is_square, + t.task_status + FROM tasks t + WHERE t.project_id = :project_id + """ + # Execute the query + rows = await db.fetch_all(query, values={"project_id": project_id}) + + # Process results into geojson.FeatureCollection tasks_features = [] - for task in project_tasks: + for row in rows: task_properties = dict( - taskId=task.id, - taskX=task.x, - taskY=task.y, - taskZoom=task.zoom, - taskIsSquare=task.is_square, - taskStatus=TaskStatus(task.task_status).name, + taskId=row["id"], + taskX=row["x"], + taskY=row["y"], + taskZoom=row["zoom"], + taskIsSquare=row["is_square"], + taskStatus=TaskStatus(row["task_status"]).name, ) - feature = geojson.Feature(properties=task_properties) tasks_features.append(feature) @@ -1121,7 +1496,7 @@ def get_tasks_as_geojson_feature_collection_no_geom(project_id): def get_mapped_tasks_by_user(project_id: int): """Gets all mapped tasks for supplied project grouped by user""" results = ( - db.session.query( + session.query( User.username, User.mapping_level, func.count(distinct(Task.id)), @@ -1160,141 +1535,374 @@ def get_mapped_tasks_by_user(project_id: int): return mapped_tasks_dto @staticmethod - def get_max_task_id_for_project(project_id: int): - """Gets the nights task id currently in use on a project""" - result = ( - db.session.query(func.max(Task.id)) - .filter(Task.project_id == project_id) - .group_by(Task.project_id) - ) - if result.count() == 0: + async def get_max_task_id_for_project(project_id: int, db: Database): + """ + Gets the highest task id currently in use on a project using raw SQL with async db. + """ + query = """ + SELECT MAX(id) + FROM tasks + WHERE project_id = :project_id + GROUP BY project_id + """ + + result = await db.fetch_val(query, values={"project_id": project_id}) + if not result: raise NotFound(sub_code="TASKS_NOT_FOUND", project_id=project_id) - for row in result: - return row[0] + return result + + @staticmethod + async def as_dto( + task_id: int, project_id: int, db: Database, last_updated: str + ) -> TaskDTO: + """Fetch basic TaskDTO details without history or instructions""" + query = """ + SELECT + t.id AS task_id, + t.project_id, + t.task_status, + u.username AS lock_holder + FROM + tasks t + LEFT JOIN + users u ON t.locked_by = u.id + WHERE + t.id = :task_id AND t.project_id = :project_id; + """ + + task = await db.fetch_one( + query=query, values={"task_id": task_id, "project_id": project_id} + ) + auto_unlock_seconds = await Task.auto_unlock_delta() + task_dto = TaskDTO( + task_id=task["task_id"], + project_id=task["project_id"], + task_status=TaskStatus(task["task_status"]).name, + lock_holder=task["lock_holder"], + last_updated=last_updated, + auto_unlock_seconds=auto_unlock_seconds.total_seconds(), + comments_number=None, # Placeholder, can be populated as needed + ) + return task_dto - def as_dto( + async def task_as_dto( self, task_history: List[TaskHistoryDTO] = [], last_updated: datetime.datetime = None, comments: int = None, + db: Database = None, ): + from backend.services.users.user_service import UserService + """Just converts to a TaskDTO""" task_dto = TaskDTO() task_dto.task_id = self.id task_dto.project_id = self.project_id task_dto.task_status = TaskStatus(self.task_status).name - task_dto.lock_holder = self.lock_holder.username if self.lock_holder else None + user = ( + await UserService.get_user_by_id(self.locked_by, db) + if self.locked_by + else None + ) + task_dto.lock_holder = user.username if user else None task_dto.task_history = task_history task_dto.last_updated = last_updated if last_updated else None - task_dto.auto_unlock_seconds = Task.auto_unlock_delta().total_seconds() + unlock_delta = await Task.auto_unlock_delta() + task_dto.auto_unlock_seconds = ( + unlock_delta.total_seconds() if unlock_delta else None + ) task_dto.comments_number = comments if isinstance(comments, int) else None return task_dto - def as_dto_with_instructions(self, preferred_locale: str = "en") -> TaskDTO: - """Get dto with any task instructions""" + @staticmethod + async def get_task_history( + task_id: int, project_id: int, db: Database + ) -> List[TaskHistoryDTO]: + """Get the task history""" + query = """ + SELECT id, action, action_text, action_date, user_id + FROM task_history + WHERE task_id = :task_id AND project_id = :project_id + ORDER BY action_date DESC + """ + task_history_records = await db.fetch_all( + query, values={"task_id": task_id, "project_id": project_id} + ) + task_history = [] - for action in self.task_history: + for record in task_history_records: history = TaskHistoryDTO() - history.history_id = action.id - history.action = action.action - history.action_text = action.action_text - history.action_date = action.action_date + history.history_id = record.id + history.action = record.action + history.action_text = record.action_text + history.action_date = record.action_date history.action_by = ( - action.actioned_by.username if action.actioned_by else None - ) + record.user_id + ) # Simplified to user_id, username lookup can be done separately history.picture_url = ( - action.actioned_by.picture_url if action.actioned_by else None + None # Add a separate query to fetch user picture if needed ) - if action.task_mapping_issues: - history.issues = [ - issue.as_dto() for issue in action.task_mapping_issues - ] - task_history.append(history) - last_updated = None - if len(task_history) > 0: - last_updated = task_history[0].action_date + return task_history + + @staticmethod + async def as_dto_with_instructions( + task_id: int, project_id: int, db: Database, preferred_locale: str = "en" + ) -> TaskDTO: + """Get DTO with any task instructions""" + + # Query to get task history and associated data + query = """ + SELECT + th.id AS history_id, + th.action, + th.action_text, + th.action_date, + u.username AS action_by, + u.picture_url, + tmi.issue, + tmi.count, + mic.id AS issue_category_id, + p.default_locale + FROM + task_history th + LEFT JOIN + users u ON th.user_id = u.id + LEFT JOIN + task_mapping_issues tmi ON th.id = tmi.task_history_id + LEFT JOIN + mapping_issue_categories mic ON tmi.mapping_issue_category_id = mic.id + LEFT JOIN + projects p ON th.project_id = p.id + WHERE + th.task_id = :task_id AND th.project_id = :project_id + ORDER BY + th.action_date DESC; + """ + rows = await db.fetch_all( + query=query, values={"task_id": task_id, "project_id": project_id} + ) + task_history = [] + + for row in rows: + history = TaskHistoryDTO( + history_id=row["history_id"], + action=row["action"], + action_text=row["action_text"], + action_date=row["action_date"], + action_by=row["action_by"], + picture_url=row["picture_url"], + ) - task_dto = self.as_dto(task_history, last_updated=last_updated) + if row["issue"]: + issues = [] + issue_dto = TaskMappingIssueDTO( + category_id=row["issue_category_id"], + count=row["count"], + name=row["issue"], + ) + issues.append(issue_dto) + history.issues = issues - per_task_instructions = self.get_per_task_instructions(preferred_locale) + task_history.append(history) - # If we don't have instructions in preferred locale try again for default locale - task_dto.per_task_instructions = ( - per_task_instructions - if per_task_instructions - else self.get_per_task_instructions(self.projects.default_locale) + last_updated = ( + task_history[0].action_date.replace(tzinfo=timezone.utc).isoformat() + if task_history + else None ) + task_dto = await Task.as_dto(task_id, project_id, db, last_updated) + per_task_instructions = await Task.get_per_task_instructions( + task_id, project_id, preferred_locale, db + ) + if not per_task_instructions: + query_locale = """ + SELECT + p.default_locale + FROM + projects p + WHERE + p.id = :project_id + """ + default_locale_row = await db.fetch_one( + query=query_locale, values={"project_id": project_id} + ) + default_locale = ( + default_locale_row["default_locale"] if default_locale_row else None + ) - annotations = self.get_per_task_annotations() - task_dto.task_annotations = annotations if annotations else [] + per_task_instructions = ( + await Task.get_per_task_instructions( + task_id, project_id, default_locale, db + ) + if default_locale + else None + ) + task_dto.per_task_instructions = per_task_instructions + task_dto.task_annotations = await Task.get_task_annotations(task_id, db) + task_dto.task_history = task_history return task_dto def get_per_task_annotations(self): result = [ta.get_dto() for ta in self.task_annotations] return result - def get_per_task_instructions(self, search_locale: str) -> str: + @staticmethod + async def get_per_task_instructions( + task_id: int, project_id: int, search_locale: str, db: Database + ) -> str: """Gets any per task instructions attached to the project""" - project_info = self.projects.project_info.all() + query = """ + SELECT + pi.per_task_instructions + FROM + project_info pi + WHERE + pi.project_id = :project_id AND pi.locale = :search_locale; + """ - for info in project_info: - if info.locale == search_locale: - return self.format_per_task_instructions(info.per_task_instructions) + result = await db.fetch_one( + query=query, + values={"project_id": project_id, "search_locale": search_locale}, + ) + return ( + await Task.format_per_task_instructions( + result["per_task_instructions"], task_id, project_id, db + ) + if result + else "" + ) - def format_per_task_instructions(self, instructions) -> str: + @staticmethod + async def format_per_task_instructions( + instructions: str, task_id: int, project_id: int, db: Database + ) -> str: """Format instructions by looking for X, Y, Z tokens and replacing them with the task values""" if not instructions: - return "" # No instructions so return empty string + return "" # No instructions, return empty string + # Query to get the necessary task details (x, y, zoom, etc.) + query = """ + SELECT + t.x, + t.y, + t.zoom, + t.extra_properties + FROM + tasks t + WHERE + t.id = :task_id AND t.project_id = :project_id; + """ + task = await db.fetch_one( + query=query, values={"task_id": task_id, "project_id": project_id} + ) properties = {} - if self.x: - properties["x"] = str(self.x) - if self.y: - properties["y"] = str(self.y) - if self.zoom: - properties["z"] = str(self.zoom) - if self.extra_properties: - properties.update(json.loads(self.extra_properties)) + if task["x"]: + properties["x"] = str(task["x"]) + if task["y"]: + properties["y"] = str(task["y"]) + if task["zoom"]: + properties["z"] = str(task["zoom"]) + if task["extra_properties"]: + properties.update(json.loads(task["extra_properties"])) try: instructions = instructions.format(**properties) except (KeyError, ValueError, IndexError): - # KeyError is raised if a format string contains a key that is not in the dictionary, e.g. {foo} - # ValueError is raised if a format string contains a single { or } - # IndexError is raised if a format string contains empty braces, e.g. {} + # Handle formatting errors pass return instructions - def copy_task_history(self) -> list: - copies = [] - for entry in self.task_history: - db.session.expunge(entry) - make_transient(entry) - entry.id = None - entry.task_id = None - db.session.add(entry) - copies.append(entry) + @staticmethod + async def get_task_annotations( + task_id: int, db: Database + ) -> List[TaskAnnotationDTO]: + """Fetch annotations related to the task""" + query = """ + SELECT + ta.task_id, + ta.annotation_type, + ta.annotation_source, + ta.annotation_markdown, + ta.properties + FROM + task_annotations ta + WHERE + ta.task_id = :task_id; + """ + rows = await db.fetch_all(query=query, values={"task_id": task_id}) + + # Map the query results to TaskAnnotationDTO + return [ + TaskAnnotationDTO( + task_id=row["task_id"], + annotation_type=row["annotation_type"], + annotation_source=row["annotation_source"], + annotation_markdown=row["annotation_markdown"], + properties=row["properties"], + ) + for row in rows + ] + + @staticmethod + async def copy_task_history( + original_task_id: int, new_task_id: int, project_id: int, db: Database + ) -> None: + """ + Copy all task history records from the original task to a new task. + + :param original_task_id: ID of the task whose history is to be copied. + :param new_task_id: ID of the new task to which the history will be copied. + :param project_id: ID of the project associated with the task history. + :param db: Database connection instance. + """ + # Insert the task history with the new_task_id and provided project_id + insert_query = """ + INSERT INTO task_history (project_id, task_id, action, action_text, action_date, user_id) + SELECT :project_id, :new_task_id, action, action_text, action_date, user_id + FROM task_history + WHERE task_id = :original_task_id AND project_id = :project_id + """ + + await db.execute( + insert_query, + values={ + "project_id": project_id, + "new_task_id": new_task_id, + "original_task_id": original_task_id, + }, + ) - return copies + async def get_locked_tasks_for_user( + user_id: int, db: Database + ) -> LockedTasksForUser: + """Gets tasks on projects locked by the specified user id""" - def get_locked_tasks_for_user(user_id: int): - """Gets tasks on project owned by specified user id""" - tasks = Task.query.filter_by(locked_by=user_id) + query = """ + SELECT id, project_id, task_status + FROM tasks + WHERE locked_by = :user_id + """ + + rows = await db.fetch_all(query=query, values={"user_id": user_id}) tasks_dto = LockedTasksForUser() - for task in tasks: - tasks_dto.locked_tasks.append(task.id) - tasks_dto.project = task.project_id - tasks_dto.task_status = TaskStatus(task.task_status).name + + if rows: + tasks_dto.locked_tasks = [row["id"] for row in rows] + tasks_dto.project = rows[0][ + "project_id" + ] # Assuming all tasks belong to the same project + tasks_dto.task_status = TaskStatus(rows[0]["task_status"]).name return tasks_dto - def get_locked_tasks_details_for_user(user_id: int): + async def get_locked_tasks_details_for_user(user_id: int, db: Database) -> list: """Gets tasks on project owned by specified user id""" - tasks = Task.query.filter_by(locked_by=user_id) + query = select(Task).filter_by(locked_by=user_id) + tasks = await db.fetch_all(query) locked_tasks = [task for task in tasks] return locked_tasks diff --git a/backend/models/postgis/task_annotation.py b/backend/models/postgis/task_annotation.py index 155a24da96..522fab5f8b 100644 --- a/backend/models/postgis/task_annotation.py +++ b/backend/models/postgis/task_annotation.py @@ -1,30 +1,41 @@ -from backend.models.postgis.utils import timestamp -from backend import db -from backend.models.dtos.task_annotation_dto import TaskAnnotationDTO +from sqlalchemy import ( + JSON, + Column, + DateTime, + ForeignKey, + ForeignKeyConstraint, + Index, + Integer, + String, +) + +from backend.db import Base from backend.models.dtos.project_dto import ProjectTaskAnnotationsDTO +from backend.models.dtos.task_annotation_dto import TaskAnnotationDTO +from backend.models.postgis.utils import timestamp -class TaskAnnotation(db.Model): +class TaskAnnotation(Base): """Describes Task annotaions like derived ML attributes""" __tablename__ = "task_annotations" - id = db.Column(db.Integer, primary_key=True) - project_id = db.Column(db.Integer, db.ForeignKey("projects.id"), index=True) - task_id = db.Column(db.Integer, nullable=False) - annotation_type = db.Column(db.String, nullable=False) - annotation_source = db.Column(db.String) - annotation_markdown = db.Column(db.String) - updated_timestamp = db.Column(db.DateTime, nullable=False, default=timestamp) - properties = db.Column(db.JSON, nullable=False) + id = Column(Integer, primary_key=True) + project_id = Column(Integer, ForeignKey("projects.id"), index=True) + task_id = Column(Integer, nullable=False) + annotation_type = Column(String, nullable=False) + annotation_source = Column(String) + annotation_markdown = Column(String) + updated_timestamp = Column(DateTime, nullable=False, default=timestamp) + properties = Column(JSON, nullable=False) __table_args__ = ( - db.ForeignKeyConstraint( + ForeignKeyConstraint( [task_id, project_id], ["tasks.id", "tasks.project_id"], name="fk_task_annotations", ), - db.Index("idx_task_annotations_composite", "task_id", "project_id"), + Index("idx_task_annotations_composite", "task_id", "project_id"), {}, ) @@ -44,26 +55,16 @@ def __init__( self.annotation_markdown = annotation_markdown self.properties = properties - def create(self): - """Creates and saves the current model to the DB""" - db.session.add(self) - db.session.commit() - - def update(self): - """Updates the DB with the current state of the Task Annotations""" - db.session.commit() - - def delete(self): - """Deletes the current model from the DB""" - db.session.delete(self) - db.session.commit() - @staticmethod def get_task_annotation(task_id, project_id, annotation_type): """Get annotations for a task with supplied type""" - return TaskAnnotation.query.filter_by( - project_id=project_id, task_id=task_id, annotation_type=annotation_type - ).one_or_none() + return ( + session.query(TaskAnnotation) + .filter_by( + project_id=project_id, task_id=task_id, annotation_type=annotation_type + ) + .one_or_none() + ) def get_dto(self): task_annotation_dto = TaskAnnotationDTO() @@ -77,9 +78,11 @@ def get_dto(self): @staticmethod def get_task_annotations_by_project_id_type(project_id, annotation_type): """Get annotatiols for a project with the supplied type""" - project_task_annotations = TaskAnnotation.query.filter_by( - project_id=project_id, annotation_type=annotation_type - ).all() + project_task_annotations = ( + session.query(TaskAnnotation) + .filter_by(project_id=project_id, annotation_type=annotation_type) + .all() + ) project_task_annotations_dto = ProjectTaskAnnotationsDTO() project_task_annotations_dto.project_id = project_id @@ -100,9 +103,9 @@ def get_task_annotations_by_project_id_type(project_id, annotation_type): @staticmethod def get_task_annotations_by_project_id(project_id): """Get annotatiols for a project with the supplied type""" - project_task_annotations = TaskAnnotation.query.filter_by( - project_id=project_id - ).all() + project_task_annotations = ( + session.query(TaskAnnotation).filter_by(project_id=project_id).all() + ) project_task_annotations_dto = ProjectTaskAnnotationsDTO() project_task_annotations_dto.project_id = project_id diff --git a/backend/models/postgis/team.py b/backend/models/postgis/team.py index ca9ac2a8f9..a440942747 100644 --- a/backend/models/postgis/team.py +++ b/backend/models/postgis/team.py @@ -1,93 +1,155 @@ -from backend import db +from databases import Database +from sqlalchemy import ( + BigInteger, + Boolean, + Column, + ForeignKey, + Integer, + String, + insert, + select, +) +from sqlalchemy.orm import backref, relationship + +from backend.db import Base from backend.exceptions import NotFound +from backend.models.dtos.organisation_dto import OrganisationTeamsDTO from backend.models.dtos.team_dto import ( - TeamDTO, NewTeamDTO, + TeamDTO, TeamMembersDTO, TeamProjectDTO, ) -from backend.models.dtos.organisation_dto import OrganisationTeamsDTO from backend.models.postgis.organisation import Organisation from backend.models.postgis.statuses import ( TeamJoinMethod, - TeamVisibility, TeamMemberFunctions, TeamRoles, + TeamVisibility, ) from backend.models.postgis.user import User -class TeamMembers(db.Model): +class TeamMembers(Base): __tablename__ = "team_members" - team_id = db.Column( - db.Integer, db.ForeignKey("teams.id", name="fk_teams"), primary_key=True - ) - user_id = db.Column( - db.BigInteger, db.ForeignKey("users.id", name="fk_users"), primary_key=True + team_id = Column(Integer, ForeignKey("teams.id", name="fk_teams"), primary_key=True) + user_id = Column( + BigInteger, ForeignKey("users.id", name="fk_users"), primary_key=True ) - function = db.Column(db.Integer, nullable=False) # either 'editor' or 'manager' - active = db.Column(db.Boolean, default=False) - join_request_notifications = db.Column( - db.Boolean, nullable=False, default=False + function = Column(Integer, nullable=False) # either 'editor' or 'manager' + active = Column(Boolean, default=False) + join_request_notifications = Column( + Boolean, nullable=False, default=False ) # Managers can turn notifications on/off for team join requests - member = db.relationship( - User, backref=db.backref("teams", cascade="all, delete-orphan") + member = relationship( + User, backref=backref("teams", cascade="all, delete-orphan", lazy="joined") ) - team = db.relationship( - "Team", backref=db.backref("members", cascade="all, delete-orphan") + team = relationship( + "Team", backref=backref("members", cascade="all, delete-orphan", lazy="joined") ) - def create(self): + async def create(self, db: Database): """Creates and saves the current model to the DB""" - db.session.add(self) - db.session.commit() - - def delete(self): - """Deletes the current model from the DB""" - db.session.delete(self) - db.session.commit() + team_member = await db.execute( + insert(TeamMembers.__table__).values( + team_id=self.team_id, + user_id=self.user_id, + function=self.function, + active=self.active, + join_request_notifications=False, + ) + ) + return team_member - def update(self): + async def update(self, db: Database): """Updates the current model in the DB""" - db.session.commit() + await db.execute( + TeamMembers.__table__.update() + .where(TeamMembers.team_id == self.team_id) + .where(TeamMembers.user_id == self.user_id) + .values( + function=self.function, + active=self.active, + join_request_notifications=self.join_request_notifications, + ) + ) @staticmethod - def get(team_id: int, user_id: int): - """Returns a team member by team_id and user_id""" - return TeamMembers.query.filter_by(team_id=team_id, user_id=user_id).first() + async def get(team_id: int, user_id: int, db: Database): + """ + Returns a team member by team_id and user_id + :param team_id: ID of the team + :param user_id: ID of the user + :param db: async database connection + :return: Team member if found, otherwise None + """ + query = """ + SELECT * FROM team_members + WHERE team_id = :team_id AND user_id = :user_id + """ + member = await db.fetch_one( + query, values={"team_id": team_id, "user_id": user_id} + ) + + return member # Returns the team member if found, otherwise None -class Team(db.Model): +class Team(Base): """Describes a team""" __tablename__ = "teams" # Columns - id = db.Column(db.Integer, primary_key=True) - organisation_id = db.Column( - db.Integer, - db.ForeignKey("organisations.id", name="fk_organisations"), + id = Column(Integer, primary_key=True) + organisation_id = Column( + Integer, + ForeignKey("organisations.id", name="fk_organisations"), nullable=False, ) - name = db.Column(db.String(512), nullable=False) - logo = db.Column(db.String) # URL of a logo - description = db.Column(db.String) - join_method = db.Column( - db.Integer, default=TeamJoinMethod.ANY.value, nullable=False - ) - visibility = db.Column( - db.Integer, default=TeamVisibility.PUBLIC.value, nullable=False - ) + name = Column(String(512), nullable=False) + logo = Column(String) # URL of a logo + description = Column(String) + join_method = Column(Integer, default=TeamJoinMethod.ANY.value, nullable=False) + visibility = Column(Integer, default=TeamVisibility.PUBLIC.value, nullable=False) + + # organisation = relationship(Organisation, backref="teams", lazy="joined") + organisation = relationship(Organisation, backref="teams") + + async def create(self, db: Database): + """Creates and saves the current model to the DB, including members if they exist.""" + + # Create the Team and get the generated team_id + team_id = await db.execute( + insert(Team.__table__) + .values( + organisation_id=self.organisation_id, + name=self.name, + logo=self.logo, + description=self.description, + join_method=self.join_method, + visibility=self.visibility, + ) + .returning(Team.__table__.c.id) + ) - organisation = db.relationship(Organisation, backref="teams") + if team_id and self.members: + members_to_insert = [ + { + "team_id": team_id, + "user_id": member.user_id, + "function": member.function, + "active": member.active, + "join_request_notifications": member.join_request_notifications, + } + for member in self.members + ] - def create(self): - """Creates and saves the current model to the DB""" - db.session.add(self) - db.session.commit() + await db.execute_many(insert(TeamMembers.__table__), members_to_insert) + + return team_id @classmethod - def create_from_dto(cls, new_team_dto: NewTeamDTO): + async def create_from_dto(cls, new_team_dto: NewTeamDTO, db: Database): """Creates a new team from a dto""" new_team = cls() @@ -96,8 +158,8 @@ def create_from_dto(cls, new_team_dto: NewTeamDTO): new_team.join_method = TeamJoinMethod[new_team_dto.join_method].value new_team.visibility = TeamVisibility[new_team_dto.visibility].value - org = Organisation.get(new_team_dto.organisation_id) - new_team.organisation = org + org = await Organisation.get(new_team_dto.organisation_id, db) + new_team.organisation_id = org # Create team member with creator as a manager new_member = TeamMembers() @@ -105,20 +167,21 @@ def create_from_dto(cls, new_team_dto: NewTeamDTO): new_member.user_id = new_team_dto.creator new_member.function = TeamMemberFunctions.MANAGER.value new_member.active = True + new_member.join_request_notifications = False - new_team.members.append(new_member) - - new_team.create() - return new_team + team = await Team.create(new_team, db) + return team - def update(self, team_dto: TeamDTO): + async def update(team, team_dto: TeamDTO, db: Database): """Updates Team from DTO""" if team_dto.organisation: - self.organisation = Organisation().get_organisation_by_name( - team_dto.organisation + team.organisation = Organisation.get_organisation_by_name( + team_dto.organisation, db ) - for attr, value in team_dto.items(): + # Build the update query for the team attributes + update_fields = {} + for attr, value in team_dto.dict().items(): if attr == "visibility" and value is not None: value = TeamVisibility[team_dto.visibility].value if attr == "join_method" and value is not None: @@ -127,73 +190,76 @@ def update(self, team_dto: TeamDTO): if attr in ("members", "organisation"): continue - try: - is_field_nullable = self.__table__.columns[attr].nullable - if is_field_nullable and value is not None: - setattr(self, attr, value) - elif value is not None: - setattr(self, attr, value) - except KeyError: - continue + if attr in Team.__table__.columns: + update_fields[attr] = value - if team_dto.members != self._get_team_members() and team_dto.members: - for member in self.members: - member_name = User.get_by_id(member.user_id).username - if member_name not in [i["username"] for i in team_dto.members]: - member.delete() - for member in team_dto.members: - user = User.get_by_username(member["username"]) - if user is None: - raise NotFound( - sub_code="USER_NOT_FOUND", username=member["username"] - ) - team_member = TeamMembers.get(self.id, user.id) - if team_member: - team_member.join_request_notifications = member[ - "join_request_notifications" - ] - else: - new_team_member = TeamMembers() - new_team_member.team = self - new_team_member.member = user - new_team_member.function = TeamMemberFunctions[ - member["function"] - ].value - - db.session.commit() - - def delete(self): - """Deletes the current model from the DB""" - db.session.delete(self) - db.session.commit() - - def can_be_deleted(self) -> bool: - """A Team can be deleted if it doesn't have any projects""" - return len(self.projects) == 0 - - def get(team_id: int): + # Update the team in the database + if update_fields: + update_query = ( + "UPDATE teams SET " + + ", ".join([f"{k} = :{k}" for k in update_fields.keys()]) + + " WHERE id = :id" + ) + await db.execute(update_query, {**update_fields, "id": team.id}) + + # Update team members if they have changed + if team_dto.members: + await Team.update_team_members(team, team_dto, db) + + async def delete(self, db: Database): + """Deletes the current team and its members from the DB""" + + # Delete team members associated with this team + delete_team_members_query = """ + DELETE FROM team_members WHERE team_id = :team_id + """ + await db.execute(delete_team_members_query, values={"team_id": self.id}) + + # Delete the team + delete_team_query = """ + DELETE FROM teams WHERE id = :team_id + """ + await db.execute(delete_team_query, values={"team_id": self.id}) + + @staticmethod + async def can_be_deleted(team_id: int, db: Database) -> bool: + """Check if a Team can be deleted by querying for associated projects""" + query = "SELECT COUNT(*) FROM project_teams WHERE team_id = :team_id" + result = await db.fetch_one(query, {"team_id": team_id}) + return result[0] == 0 + + async def get(team_id: int, db: Database): """ Gets specified team by id :param team_id: team ID in scope :return: Team if found otherwise None """ - return db.session.get(Team, team_id) + query = select(Team).where(Team.id == team_id) + result = await db.fetch_one(query) + return result - def get_team_by_name(team_name: str): + async def get_team_by_name(team_name: str, db: Database): """ Gets specified team by name :param team_name: team name in scope - :return: Team if found otherwise None + :param db: async database connection + :return: Team if found, otherwise None + """ + query = """ + SELECT * FROM teams + WHERE name = :team_name """ - return Team.query.filter_by(name=team_name).one_or_none() + team = await db.fetch_one(query, values={"team_name": team_name}) + + return team # Returns the team if found, otherwise None - def as_dto(self): + async def as_dto(self, db: Database): """Returns a dto for the team""" team_dto = TeamDTO() team_dto.team_id = self.id team_dto.description = self.description team_dto.join_method = TeamJoinMethod(self.join_method).name - team_dto.members = self._get_team_members() + team_dto.members = self._get_team_members(db) team_dto.name = self.name team_dto.organisation = self.organisation.name team_dto.organisation_id = self.organisation.id @@ -201,89 +267,273 @@ def as_dto(self): team_dto.visibility = TeamVisibility(self.visibility).name return team_dto - def as_dto_inside_org(self): + async def as_dto_inside_org(self, db: Database): """Returns a dto for the team""" team_dto = OrganisationTeamsDTO() + team_dto.team_id = self.id team_dto.name = self.name team_dto.description = self.description team_dto.join_method = TeamJoinMethod(self.join_method).name - team_dto.members = self._get_team_members() + team_dto.members = self._get_team_members(db) team_dto.visibility = TeamVisibility(self.visibility).name + return team_dto - def as_dto_team_member(self, member) -> TeamMembersDTO: - """Returns a dto for the team member""" - member_dto = TeamMembersDTO() - user = User.get_by_id(member.user_id) - member_function = TeamMemberFunctions(member.function).name - member_dto.username = user.username - member_dto.function = member_function - member_dto.picture_url = user.picture_url - member_dto.active = member.active - member_dto.join_request_notifications = member.join_request_notifications - return member_dto - - def as_dto_team_project(self, project) -> TeamProjectDTO: + async def as_dto_team_member( + user_id: int, team_id: int, db: Database + ) -> TeamMembersDTO: + """Returns a DTO for the team member""" + user_query = """ + SELECT username, picture_url FROM users WHERE id = :user_id + """ + user = await db.fetch_one(query=user_query, values={"user_id": user_id}) + + if not user: + raise NotFound(sub_code="USER_NOT_FOUND", user_id=user_id) + member_query = """ + SELECT function, active, join_request_notifications + FROM team_members WHERE user_id = :user_id AND team_id = :team_id + """ + member = await db.fetch_one( + query=member_query, values={"user_id": user_id, "team_id": team_id} + ) + if not member: + raise NotFound(sub_code="MEMBER_NOT_FOUND", user_id=user_id) + + return TeamMembersDTO( + username=user["username"], + function=TeamMemberFunctions(member["function"]).name, + picture_url=user["picture_url"], + active=member["active"], + join_request_notifications=member["join_request_notifications"], + ) + + def as_dto_team_project(project) -> TeamProjectDTO: """Returns a dto for the team project""" - project_team_dto = TeamProjectDTO() - project_team_dto.project_name = project.name - project_team_dto.project_id = project.project_id - project_team_dto.role = TeamRoles(project.role).name - return project_team_dto - - def _get_team_members(self): - """Helper to get JSON serialized members""" - members = [] - for mem in self.members: - members.append( - { - "username": mem.member.username, - "pictureUrl": mem.member.picture_url, - "function": TeamMemberFunctions(mem.function).name, - "active": mem.active, - } - ) + + return TeamProjectDTO( + project_name=project.name, + project_id=project.project_id, + role=TeamRoles(project.role).name, + ) + + async def _get_team_members(self, db: Database): + """Helper to get JSON serialized members using raw SQL queries""" + + # SQL query to fetch all members of the team, including their username, picture_url, function, and active status + query = """ + SELECT u.username, u.picture_url, tm.function, tm.active + FROM team_members tm + JOIN users u ON tm.user_id = u.id + WHERE tm.team_id = :team_id + """ + + # Execute the query and fetch all team members + rows = await db.fetch_all(query, {"team_id": self.id}) + + # Convert the fetched rows into a list of dictionaries (JSON serialized format) + members = [ + { + "username": row["username"], + "pictureUrl": row["picture_url"], + "function": TeamMemberFunctions(row["function"]).name, + "active": row["active"], + } + for row in rows + ] return members - def get_team_managers(self, count: int = None): + async def get_all_members(db: Database, team_id: int, count: int = None): """ - Returns users with manager role in the team + Returns all users in the team regardless of their role (manager or member). -------------------------------- - :param count: number of managers to return - :return: list of team managers + :param db: Database session + :param team_id: ID of the team + :param count: Number of members to return + :return: List of team members with specified attributes """ - base_query = TeamMembers.query.filter_by( - team_id=self.id, function=TeamMemberFunctions.MANAGER.value, active=True - ) + + query = f""" + SELECT u.username, + CASE + WHEN tm.function = {TeamMemberFunctions.MANAGER.value} THEN '{TeamMemberFunctions.MANAGER.name}' + WHEN tm.function = {TeamMemberFunctions.MEMBER.value} THEN '{TeamMemberFunctions.MEMBER.name}' + ELSE 'UNKNOWN' + END as function, + tm.active, + tm.join_request_notifications, + u.picture_url + FROM team_members tm + JOIN users u ON tm.user_id = u.id + WHERE tm.team_id = :team_id AND tm.active = true + """ + + values = { + "team_id": team_id, + } + if count: - return base_query.limit(count).all() - else: - return base_query.all() + query += " LIMIT :count" + values["count"] = count + + results = await db.fetch_all(query=query, values=values) + return [TeamMembersDTO(**result) for result in results] - def get_team_members(self, count: int = None): + async def get_team_managers(db: Database, team_id: int, count: int = None): """ - Returns users with member role in the team + Returns users with manager role in the team. -------------------------------- - :param count: number of members to return - :return: list of members in the team + :param db: Database session + :param team_id: ID of the team + :param count: Number of managers to return + :return: List of team managers with specified attributes """ - base_query = TeamMembers.query.filter_by( - team_id=self.id, function=TeamMemberFunctions.MEMBER.value, active=True - ) + query = f""" + SELECT u.username, + CASE + WHEN tm.function = {TeamMemberFunctions.MANAGER.value} THEN '{TeamMemberFunctions.MANAGER.name}' + WHEN tm.function = {TeamMemberFunctions.MEMBER.value} THEN '{TeamMemberFunctions.MEMBER.name}' + ELSE 'UNKNOWN' + END as function, + tm.active, + tm.join_request_notifications, + u.picture_url + FROM team_members tm + JOIN users u ON tm.user_id = u.id + WHERE tm.team_id = :team_id AND tm.function = :function_value AND tm.active = true + """ + + values = { + "team_id": team_id, + "function_value": TeamMemberFunctions.MANAGER.value, + } + if count: - return base_query.limit(count).all() - else: - return base_query.all() + query += " LIMIT :count" + values["count"] = count - def get_members_count_by_role(self, role: TeamMemberFunctions): + results = await db.fetch_all(query=query, values=values) + return [TeamMembersDTO(**result) for result in results] + + async def get_team_members(db: Database, team_id: int, count: int = None): """ - Returns number of members with specified role in the team + Returns users with member role in the team. -------------------------------- - :param role: role to count - :return: number of members with specified role in the team + :param db: Database session + :param team_id: ID of the team + :param count: Number of members to return + :return: List of team members with specified attributes + """ + + query = f""" + SELECT u.username, + CASE + WHEN tm.function = {TeamMemberFunctions.MANAGER.value} THEN '{TeamMemberFunctions.MANAGER.name}' + WHEN tm.function = {TeamMemberFunctions.MEMBER.value} THEN '{TeamMemberFunctions.MEMBER.name}' + ELSE 'UNKNOWN' + END as function, + tm.active, + tm.join_request_notifications, + u.picture_url + FROM team_members tm + JOIN users u ON tm.user_id = u.id + WHERE tm.team_id = :team_id AND tm.function = :function_value AND tm.active = true """ - return TeamMembers.query.filter_by( - team_id=self.id, function=role.value, active=True - ).count() + + values = { + "team_id": team_id, + "function_value": TeamMemberFunctions.MEMBER.value, + } + + if count: + query += " LIMIT :count" + values["count"] = count + + results = await db.fetch_all(query=query, values=values) + return [TeamMembersDTO(**result) for result in results] + + async def get_members_count_by_role( + db: Database, team_id: int, role: TeamMemberFunctions + ): + """ + Returns the number of members with the specified role in the team. + -------------------------------- + :param db: Database session + :param team_id: ID of the team + :param role: Role to count + :return: Number of members with the specified role in the team + """ + query = """ + SELECT COUNT(*) + FROM team_members + WHERE team_id = :team_id AND function = :function AND active = true + """ + + values = {"team_id": team_id, "function": role.value} + + return await db.fetch_val(query=query, values=values) + + @staticmethod + async def update_team_members(team, team_dto: TeamDTO, db: Database): + # Get existing members from the team + existing_members = await db.fetch_all( + "SELECT user_id FROM team_members WHERE team_id = :team_id", + {"team_id": team.id}, + ) + existing_members_list = list( + set([existing_member.user_id for existing_member in existing_members]) + ) + + new_member_usernames = list( + set([member.username for member in team_dto.members]) + ) + new_members_records = await db.fetch_all( + "SELECT id FROM users WHERE username = ANY(:new_member_usernames)", + {"new_member_usernames": new_member_usernames}, + ) + new_member_list = list( + set([new_member.id for new_member in new_members_records]) + ) + if existing_members_list != new_member_list: + for member in existing_members_list: + if member not in new_member_list: + await db.execute( + "DELETE FROM team_members WHERE team_id = :team_id AND user_id = :user_id", + {"team_id": team.id, "user_id": member}, + ) + + # Add or update members from the new member list + for member in team_dto.members: + user = await db.fetch_one( + "SELECT id FROM users WHERE username = :username", + {"username": member.username}, + ) + if not user: + raise NotFound(sub_code="USER_NOT_FOUND", username=member.username) + # Check if the user is already a member of the team + team_member = await db.fetch_one( + "SELECT * FROM team_members WHERE team_id = :team_id AND user_id = :user_id", + {"team_id": team.id, "user_id": user["id"]}, + ) + + if team_member: + await db.execute( + "UPDATE team_members SET join_request_notifications = :join_request_notifications WHERE team_id = :team_id AND user_id = :user_id", + { + "join_request_notifications": member.join_request_notifications, + "team_id": team.id, + "user_id": user["id"], + }, + ) + else: + await db.execute( + "INSERT INTO team_members (team_id, user_id, function, join_request_notifications) VALUES (:team_id, :user_id, :function, :join_request_notifications)", + { + "team_id": team.id, + "user_id": user["id"], + "function": TeamMemberFunctions[member["function"]].value, + "join_request_notifications": member.join_request_notifications, + }, + ) diff --git a/backend/models/postgis/user.py b/backend/models/postgis/user.py index e07cb59e42..731ed1e024 100644 --- a/backend/models/postgis/user.py +++ b/backend/models/postgis/user.py @@ -1,112 +1,133 @@ import geojson -from backend import db -from sqlalchemy import desc, func -from geoalchemy2 import functions +from databases import Database +from sqlalchemy import ( + ARRAY, + BigInteger, + Boolean, + Column, + DateTime, + Integer, + String, + delete, + insert, + update, +) +from sqlalchemy.orm import relationship +from backend.db import Base from backend.exceptions import NotFound from backend.models.dtos.user_dto import ( - UserDTO, - UserMappedProjectsDTO, + ListedUser, MappedProject, - UserFilterDTO, Pagination, - UserSearchQuery, - UserSearchDTO, ProjectParticipantUser, - ListedUser, + UserDTO, + UserFilterDTO, + UserMappedProjectsDTO, + UserSearchDTO, + UserSearchQuery, ) +from backend.models.postgis.interests import Interest, user_interests from backend.models.postgis.licenses import License, user_licenses_table from backend.models.postgis.project_info import ProjectInfo from backend.models.postgis.statuses import ( MappingLevel, ProjectStatus, - UserRole, UserGender, + UserRole, ) from backend.models.postgis.utils import timestamp -from backend.models.postgis.interests import Interest, user_interests -class User(db.Model): +class User(Base): """Describes the history associated with a task""" __tablename__ = "users" - id = db.Column(db.BigInteger, primary_key=True, index=True) - username = db.Column(db.String, unique=True) - role = db.Column(db.Integer, default=0, nullable=False) - mapping_level = db.Column(db.Integer, default=1, nullable=False) - tasks_mapped = db.Column(db.Integer, default=0, nullable=False) - tasks_validated = db.Column(db.Integer, default=0, nullable=False) - tasks_invalidated = db.Column(db.Integer, default=0, nullable=False) - projects_mapped = db.Column(db.ARRAY(db.Integer)) - email_address = db.Column(db.String) - is_email_verified = db.Column(db.Boolean, default=False) - is_expert = db.Column(db.Boolean, default=False) - twitter_id = db.Column(db.String) - facebook_id = db.Column(db.String) - linkedin_id = db.Column(db.String) - slack_id = db.Column(db.String) - skype_id = db.Column(db.String) - irc_id = db.Column(db.String) - name = db.Column(db.String) - city = db.Column(db.String) - country = db.Column(db.String) - picture_url = db.Column(db.String) - gender = db.Column(db.Integer) - self_description_gender = db.Column(db.String) - default_editor = db.Column(db.String, default="ID", nullable=False) - mentions_notifications = db.Column(db.Boolean, default=True, nullable=False) - projects_comments_notifications = db.Column( - db.Boolean, default=False, nullable=False - ) - projects_notifications = db.Column(db.Boolean, default=True, nullable=False) - tasks_notifications = db.Column(db.Boolean, default=True, nullable=False) - tasks_comments_notifications = db.Column(db.Boolean, default=False, nullable=False) - teams_announcement_notifications = db.Column( - db.Boolean, default=True, nullable=False - ) - date_registered = db.Column(db.DateTime, default=timestamp) + id = Column(BigInteger, primary_key=True, index=True) + username = Column(String, unique=True) + role = Column(Integer, default=0, nullable=False) + mapping_level = Column(Integer, default=1, nullable=False) + tasks_mapped = Column(Integer, default=0, nullable=False) + tasks_validated = Column(Integer, default=0, nullable=False) + tasks_invalidated = Column(Integer, default=0, nullable=False) + projects_mapped = Column(ARRAY(Integer)) + email_address = Column(String) + is_email_verified = Column(Boolean, default=False) + is_expert = Column(Boolean, default=False) + twitter_id = Column(String) + facebook_id = Column(String) + linkedin_id = Column(String) + slack_id = Column(String) + skype_id = Column(String) + irc_id = Column(String) + name = Column(String) + city = Column(String) + country = Column(String) + picture_url = Column(String) + gender = Column(Integer) + self_description_gender = Column(String) + default_editor = Column(String, default="ID", nullable=False) + mentions_notifications = Column(Boolean, default=True, nullable=False) + projects_comments_notifications = Column(Boolean, default=False, nullable=False) + projects_notifications = Column(Boolean, default=True, nullable=False) + tasks_notifications = Column(Boolean, default=True, nullable=False) + tasks_comments_notifications = Column(Boolean, default=False, nullable=False) + teams_announcement_notifications = Column(Boolean, default=True, nullable=False) + date_registered = Column(DateTime, default=timestamp) # Represents the date the user last had one of their tasks validated - last_validation_date = db.Column(db.DateTime, default=timestamp) + last_validation_date = Column(DateTime, default=timestamp) # Relationships - accepted_licenses = db.relationship( + accepted_licenses = relationship( "License", secondary=user_licenses_table, overlaps="users" ) - interests = db.relationship(Interest, secondary=user_interests, backref="users") - - def create(self): - """Creates and saves the current model to the DB""" - db.session.add(self) - db.session.commit() - - def save(self): - db.session.commit() + interests = relationship(Interest, secondary=user_interests, backref="users") @staticmethod - def get_by_id(user_id: int): - """Return the user for the specified id, or None if not found""" - return db.session.get(User, user_id) + async def get_by_id(user_id: int, db: Database): + """ + Return the user for the specified id, or None if not found. + :param user_id: ID of the user to retrieve + :param db: Database connection + :return: User object or None + """ + query = "SELECT * FROM users WHERE id = :user_id" + result = await db.fetch_one(query, values={"user_id": user_id}) + if result is None: + return None + return User(**result) @staticmethod - def get_by_username(username: str): + async def get_by_username(username: str, db: Database): """Return the user for the specified username, or None if not found""" - return User.query.filter_by(username=username).one_or_none() + query = """ + SELECT * FROM users + WHERE username = :username + """ + # Execute the query and fetch the result + result = await db.fetch_one(query, values={"username": username}) + return result if result else None - def update_username(self, username: str): + async def update_username(self, username: str, db: Database): """Update the username""" self.username = username - db.session.commit() + await db.execute( + "UPDATE users SET username = :username WHERE id = :user_id", + values={"user_id": self.id, "username": username}, + ) - def update_picture_url(self, picture_url: str): + async def update_picture_url(self, picture_url: str, db: Database): """Update the profile picture""" self.picture_url = picture_url - db.session.commit() + await db.execute( + "UPDATE users SET picture_url = :picture_url WHERE id = :user_id", + values={"user_id": self.id, "picture_url": picture_url}, + ) - def update(self, user_dto: UserDTO): + async def update(self, user_dto: UserDTO, db: Database): """Update the user details""" - for attr, value in user_dto.items(): + for attr, value in user_dto.dict().items(): if attr == "gender" and value is not None: value = UserGender[value].value @@ -121,225 +142,278 @@ def update(self, user_dto: UserDTO): if user_dto.gender != UserGender.SELF_DESCRIBE.name: self.self_description_gender = None - db.session.commit() - def set_email_verified_status(self, is_verified: bool): + # Create a dictionary for updating fields in the database + update_fields = { + attr: getattr(self, attr) + for attr in self.__dict__ + if attr in self.__table__.columns + } + + await db.execute(update(User).where(User.id == self.id).values(**update_fields)) + + async def set_email_verified_status(self, is_verified: bool, db: Database): """Updates email verfied flag on successfully verified emails""" self.is_email_verified = is_verified - db.session.commit() + query = "UPDATE users SET is_email_verified = :is_email_verified WHERE id = :user_id" + await db.execute( + query, values={"is_email_verified": is_verified, "user_id": self.id} + ) - def set_is_expert(self, is_expert: bool): + async def set_is_expert(self, is_expert: bool, db: Database): """Enables or disables expert mode on the user""" self.is_expert = is_expert - db.session.commit() + query = "UPDATE users SET is_expert = :is_expert WHERE id = :user_id" + await db.execute(query, values={"is_expert": is_expert, "user_id": self.id}) @staticmethod - def get_all_users(query: UserSearchQuery) -> UserSearchDTO: + async def get_all_users(query: UserSearchQuery, db) -> UserSearchDTO: """Search and filter all users""" - # Base query that applies to all searches - base = db.session.query( - User.id, User.username, User.mapping_level, User.role, User.picture_url - ) + base_query = """ + SELECT id, username, mapping_level, role, picture_url FROM users + """ + filters = [] + params = {} - # Add filter to query as required if query.mapping_level: mapping_levels = query.mapping_level.split(",") mapping_level_array = [ MappingLevel[mapping_level].value for mapping_level in mapping_levels ] - base = base.filter(User.mapping_level.in_(mapping_level_array)) + filters.append("mapping_level = ANY(:mapping_levels)") + params["mapping_levels"] = tuple(mapping_level_array) + if query.username: - base = base.filter( - User.username.ilike(("%" + query.username + "%")) - ).order_by( - func.strpos(func.lower(User.username), func.lower(query.username)) - ) + filters.append("username ILIKE :username") + params["username"] = f"%{query.username}%" if query.role: roles = query.role.split(",") role_array = [UserRole[role].value for role in roles] - base = base.filter(User.role.in_(role_array)) + filters.append("role = ANY(:roles)") + params["roles"] = tuple(role_array) + + if filters: + base_query += " WHERE " + " AND ".join(filters) + + base_query += " ORDER BY username" + if query.pagination: - results = base.order_by(User.username).paginate( - page=query.page, per_page=query.per_page, error_out=True - ) + base_query += " LIMIT :limit OFFSET :offset" + base_params = params.copy() + base_params["limit"] = query.per_page + base_params["offset"] = (query.page - 1) * query.per_page + + results = await db.fetch_all(base_query, base_params) + else: - per_page = base.count() - results = base.order_by(User.username).paginate(per_page=per_page) + results = await db.fetch_all(base_query, params) + dto = UserSearchDTO() - for result in results.items: + for result in results: listed_user = ListedUser() - listed_user.id = result.id - listed_user.mapping_level = MappingLevel(result.mapping_level).name - listed_user.username = result.username - listed_user.picture_url = result.picture_url - listed_user.role = UserRole(result.role).name - + listed_user.id = result["id"] + listed_user.mapping_level = MappingLevel(result["mapping_level"]).name + listed_user.username = result["username"] + listed_user.picture_url = result["picture_url"] + listed_user.role = UserRole(result["role"]).name dto.users.append(listed_user) + if query.pagination: - dto.pagination = Pagination(results) + count_query = "SELECT COUNT(*) FROM users" + count_query += " WHERE " + " AND ".join(filters) if filters else "" + total_count = await db.fetch_val(count_query, params) + dto.pagination = Pagination.from_total_count( + query.page, query.per_page, total_count + ) + return dto @staticmethod def get_all_users_not_paginated(): """Get all users in DB""" - return db.session.query(User.id).all() + return session.query(User.id).all() @staticmethod - def filter_users(user_filter: str, project_id: int, page: int) -> UserFilterDTO: - """Finds users that matches first characters, for auto-complete. + async def filter_users( + username: str, project_id: int, page: int, db: Database + ) -> UserFilterDTO: + """Finds users that match the first characters, for auto-complete. Users who have participated (mapped or validated) in the project, if given, will be returned ahead of those who have not. """ - # Note that the projects_mapped column includes both mapped and validated projects. - query = ( - db.session.query( - User.username, User.projects_mapped.any(project_id).label("participant") - ) - .filter(User.username.ilike(user_filter.lower() + "%")) - .order_by(desc("participant").nullslast(), User.username) - ) + query = """ + SELECT u.username, :project_id = ANY(u.projects_mapped) AS participant + FROM users u + WHERE u.username ILIKE :username || '%' + ORDER BY participant DESC NULLS LAST, u.username + LIMIT 20 OFFSET :offset + """ - results = query.paginate(page=page, per_page=20, error_out=True) + offset = (page - 1) * 20 + values = { + "username": username.lower(), + "project_id": project_id, + "offset": offset, + } - if results.total == 0: - raise NotFound(sub_code="USER_NOT_FOUND", username=user_filter) + results = await db.fetch_all(query, values=values) + + if not results: + raise NotFound(sub_code="USER_NOT_FOUND", username=username) dto = UserFilterDTO() - for result in results.items: - dto.usernames.append(result.username) + for result in results: + dto.usernames.append(result["username"]) if project_id is not None: - participant = ProjectParticipantUser() - participant.username = result.username - participant.project_id = project_id - participant.is_participant = bool(result.participant) + participant = ProjectParticipantUser( + username=result["username"], + project_id=project_id, + is_participant=bool(result["participant"]), + ) dto.users.append(participant) - dto.pagination = Pagination(results) + total_query = """ + SELECT COUNT(*) FROM users u WHERE u.username ILIKE :username || '%' + """ + total = await db.fetch_val(total_query, values={"username": username.lower()}) + dto.pagination = Pagination.from_total_count( + page=page, per_page=20, total=total + ) + return dto @staticmethod - def upsert_mapped_projects(user_id: int, project_id: int, local_session=None): - """Adds projects to mapped_projects if it doesn't exist""" - if local_session: - query = local_session.query(User).filter_by(id=user_id) - else: - query = User.query.filter_by(id=user_id) - result = query.filter( - User.projects_mapped.op("@>")("{}".format("{" + str(project_id) + "}")) - ).count() + async def upsert_mapped_projects(user_id: int, project_id: int, db: Database): + """Add project to mapped projects if it doesn't exist""" + query = """ + SELECT COUNT(*) + FROM users + WHERE id = :user_id + AND projects_mapped @> ARRAY[:project_id]::integer[] + """ + result = await db.fetch_val( + query, values={"user_id": user_id, "project_id": project_id} + ) + if result > 0: - return # User has previously mapped this project so return - - user = query.one_or_none() - # Fix for new mappers. - if user.projects_mapped is None: - user.projects_mapped = [] - user.projects_mapped.append(project_id) - if local_session: - local_session.commit() - else: - db.session.commit() + return # Project already exists in mapped projects + + # Insert the project_id into the user's mapped projects array + query = """ + UPDATE users + SET projects_mapped = array_append(projects_mapped, :project_id) + WHERE id = :user_id + """ + await db.execute(query, values={"user_id": user_id, "project_id": project_id}) + # TODO Optimization: Get only project name instead of all the locale attributes. @staticmethod - def get_mapped_projects( - user_id: int, preferred_locale: str + async def get_mapped_projects( + user_id: int, preferred_locale: str, db: Database ) -> UserMappedProjectsDTO: """Get all projects a user has mapped on""" - from backend.models.postgis.task import Task - from backend.models.postgis.project import Project + # Subquery for validated tasks + query_validated = """ + SELECT project_id, COUNT(validated_by) AS validated + FROM tasks + WHERE project_id IN ( + SELECT unnest(projects_mapped) FROM users WHERE id = :user_id + ) AND validated_by = :user_id + GROUP BY project_id, validated_by + """ - query = db.session.query(func.unnest(User.projects_mapped)).filter_by( - id=user_id - ) - query_validated = ( - db.session.query( - Task.project_id.label("project_id"), - func.count(Task.validated_by).label("validated"), - ) - .filter(Task.project_id.in_(query)) - .filter_by(validated_by=user_id) - .group_by(Task.project_id, Task.validated_by) - .subquery() - ) + # Subquery for mapped tasks + query_mapped = """ + SELECT project_id, COUNT(mapped_by) AS mapped + FROM tasks + WHERE project_id IN ( + SELECT unnest(projects_mapped) FROM users WHERE id = :user_id + ) AND mapped_by = :user_id + GROUP BY project_id, mapped_by + """ - query_mapped = ( - db.session.query( - Task.project_id.label("project_id"), - func.count(Task.mapped_by).label("mapped"), - ) - .filter(Task.project_id.in_(query)) - .filter_by(mapped_by=user_id) - .group_by(Task.project_id, Task.mapped_by) - .subquery() - ) + # Union of validated and mapped tasks + query_union = f""" + SELECT COALESCE(v.project_id, m.project_id) AS project_id, + COALESCE(v.validated, 0) AS validated, + COALESCE(m.mapped, 0) AS mapped + FROM ({query_validated}) v + FULL OUTER JOIN ({query_mapped}) m + ON v.project_id = m.project_id + """ - query_union = ( - db.session.query( - func.coalesce( - query_validated.c.project_id, query_mapped.c.project_id - ).label("project_id"), - func.coalesce(query_validated.c.validated, 0).label("validated"), - func.coalesce(query_mapped.c.mapped, 0).label("mapped"), - ) - .join( - query_mapped, - query_validated.c.project_id == query_mapped.c.project_id, - full=True, - ) - .subquery() - ) + # Main query to get project details + query_projects = f""" + SELECT p.id, p.status, p.default_locale, u.mapped, u.validated, ST_AsGeoJSON(p.centroid) AS centroid + FROM projects p + JOIN ({query_union}) u ON p.id = u.project_id + ORDER BY p.id DESC + """ - results = ( - db.session.query( - Project.id, - Project.status, - Project.default_locale, - query_union.c.mapped, - query_union.c.validated, - functions.ST_AsGeoJSON(Project.centroid), - ) - .filter(Project.id == query_union.c.project_id) - .order_by(desc(Project.id)) - .all() - ) + results = await db.fetch_all(query_projects, {"user_id": user_id}) mapped_projects_dto = UserMappedProjectsDTO() for row in results: mapped_project = MappedProject() - mapped_project.project_id = row[0] - mapped_project.status = ProjectStatus(row[1]).name - mapped_project.tasks_mapped = row[3] - mapped_project.tasks_validated = row[4] - mapped_project.centroid = geojson.loads(row[5]) - - project_info = ProjectInfo.get_dto_for_locale( - row[0], preferred_locale, row[2] + mapped_project.project_id = row["id"] + mapped_project.status = ProjectStatus(row["status"]).name + mapped_project.tasks_mapped = row["mapped"] + mapped_project.tasks_validated = row["validated"] + mapped_project.centroid = geojson.loads(row["centroid"]) + project_info = await ProjectInfo.get_dto_for_locale( + db, row["id"], preferred_locale, row["default_locale"] ) mapped_project.name = project_info.name - mapped_projects_dto.mapped_projects.append(mapped_project) - return mapped_projects_dto - def set_user_role(self, role: UserRole): + async def set_user_role(self, role: UserRole, db: Database): """Sets the supplied role on the user""" self.role = role.value - db.session.commit() - def set_mapping_level(self, level: MappingLevel): + query = """ + UPDATE users + SET role = :role + WHERE id = :user_id + """ + await db.execute(query, values={"user_id": self.id, "role": role.value}) + + async def set_mapping_level(self, level: MappingLevel, db: Database): """Sets the supplied level on the user""" self.mapping_level = level.value - db.session.commit() - def accept_license_terms(self, license_id: int): + query = """ + UPDATE users + SET mapping_level = :mapping_level + WHERE id = :user_id + """ + await db.execute( + query, values={"user_id": self.id, "mapping_level": level.value} + ) + + async def accept_license_terms(self, user_id, license_id: int, db: Database): """Associate the user in scope with the supplied license""" - image_license = License.get_by_id(license_id) - self.accepted_licenses.append(image_license) - db.session.commit() + _ = await License.get_by_id(license_id, db) + + query_check = """ + SELECT 1 FROM user_licenses WHERE "user" = :user_id AND "license" = :license_id + """ + record = await db.fetch_one( + query_check, values={"user_id": user_id, "license_id": license_id} + ) + + if not record: + query = """ + INSERT INTO user_licenses ("user", "license") + VALUES (:user_id, :license_id) + """ + await db.execute( + query, values={"user_id": user_id, "license_id": license_id} + ) def has_user_accepted_licence(self, license_id: int): """Test to see if the user has accepted the terms of the specified license""" @@ -350,11 +424,6 @@ def has_user_accepted_licence(self, license_id: int): return False - def delete(self): - """Delete the user in scope from DB""" - db.session.delete(self) - db.session.commit() - def as_dto(self, logged_in_username: str) -> UserDTO: """Create DTO object from user in scope""" user_dto = UserDTO() @@ -366,7 +435,7 @@ def as_dto(self, logged_in_username: str) -> UserDTO: len(self.projects_mapped) if self.projects_mapped else None ) user_dto.is_expert = self.is_expert or False - user_dto.date_registered = self.date_registered + # user_dto.date_registered = self.date_registered user_dto.twitter_id = self.twitter_id user_dto.linkedin_id = self.linkedin_id user_dto.facebook_id = self.facebook_id @@ -398,31 +467,21 @@ def as_dto(self, logged_in_username: str) -> UserDTO: user_dto.self_description_gender = self.self_description_gender return user_dto - def create_or_update_interests(self, interests_ids): - self.interests = [] - objs = [Interest.get_by_id(i) for i in interests_ids] - self.interests.extend(objs) - db.session.commit() - -class UserEmail(db.Model): +class UserEmail(Base): __tablename__ = "users_with_email" - id = db.Column(db.BigInteger, primary_key=True, index=True) - email = db.Column(db.String, nullable=False, unique=True) + id = Column(BigInteger, primary_key=True, index=True) + email = Column(String, nullable=False, unique=True) - def create(self): + async def create(self, db: Database): """Creates and saves the current model to the DB""" - db.session.add(self) - db.session.commit() - - def save(self): - db.session.commit() + user = await db.execute(insert(UserEmail.__table__).values(email=self.email)) + return user - def delete(self): + async def delete(self, db: Database): """Deletes the current model from the DB""" - db.session.delete(self) - db.session.commit() + await db.execute(delete(UserEmail.__table__).where(UserEmail.id == self.id)) @staticmethod def get_by_email(email_address: str): diff --git a/backend/models/postgis/utils.py b/backend/models/postgis/utils.py index d2cfeef134..a54835a95c 100644 --- a/backend/models/postgis/utils.py +++ b/backend/models/postgis/utils.py @@ -1,37 +1,39 @@ import datetime import json import re -from flask import current_app + +# # from flask import current_app from geoalchemy2 import Geometry from geoalchemy2.functions import GenericFunction +from loguru import logger class NotFound(Exception): """Custom exception to indicate model not found in database""" - pass + def __init__(self, message): + logger.debug(message) class InvalidGeoJson(Exception): """Custom exception to notify caller they have supplied Invalid GeoJson""" def __init__(self, message): - if current_app: - current_app.logger.debug(message) + logger.debug(message) class UserLicenseError(Exception): """Custom Exception to notify caller that the user attempting to map has not accepted the license""" - pass + def __init__(self, message): + logger.debug(message) class InvalidData(Exception): """Custom exception to notify caller they have supplied Invalid data to a model""" def __init__(self, message): - if current_app: - current_app.logger.debug(message) + logger.debug(message) class ST_SetSRID(GenericFunction): diff --git a/backend/pagination.py b/backend/pagination.py new file mode 100644 index 0000000000..a04398c4ed --- /dev/null +++ b/backend/pagination.py @@ -0,0 +1,409 @@ +from __future__ import annotations + +import typing as t +from typing import Optional +from math import ceil + +import sqlalchemy as sa +import sqlalchemy.orm as sa_orm +from sqlalchemy.sql.selectable import Select +from sqlalchemy.ext.asyncio import AsyncSession + + +def abort(code): + raise Exception + + +class Pagination: + """Apply an offset and limit to the query based on the current page and number of + items per page. + + Don't create pagination objects manually. They are created by + :meth:`.SQLAlchemy.paginate` and :meth:`.Query.paginate`. + + This is a base class, a subclass must implement :meth:`_query_items` and + :meth:`_query_count`. Those methods will use arguments passed as ``kwargs`` to + perform the queries. + + :param page: The current page, used to calculate the offset. Defaults to the + ``page`` query arg during a request, or 1 otherwise. + :param per_page: The maximum number of items on a page, used to calculate the + offset and limit. Defaults to the ``per_page`` query arg during a request, + or 20 otherwise. + :param max_per_page: The maximum allowed value for ``per_page``, to limit a + user-provided value. Use ``None`` for no limit. Defaults to 100. + :param error_out: Abort with a ``404 Not Found`` error if no items are returned + and ``page`` is not 1, or if ``page`` or ``per_page`` is less than 1, or if + either are not ints. + :param count: Calculate the total number of values by issuing an extra count + query. For very complex queries this may be inaccurate or slow, so it can be + disabled and set manually if necessary. + :param kwargs: Information about the query to paginate. Different subclasses will + require different arguments. + + .. versionchanged:: 3.0 + Iterating over a pagination object iterates over its items. + + .. versionchanged:: 3.0 + Creating instances manually is not a public API. + """ + + def __init__( + self, + page: Optional[int] = None, + per_page: Optional[int] = None, + max_per_page: Optional[int] = 100, + error_out: bool = True, + count: bool = True, + **kwargs: t.Any, + ) -> None: + self._query_args = kwargs + page, per_page = self._prepare_page_args( + page=page, + per_page=per_page, + max_per_page=max_per_page, + error_out=error_out, + ) + + self.page: int = page + """The current page.""" + + self.per_page: int = per_page + """The maximum number of items on a page.""" + + self.max_per_page: int | None = max_per_page + """The maximum allowed value for ``per_page``.""" + + @staticmethod + async def get(self, count): + items = await self._query_items() + + if not items and self.page != 1 and self.error_out: + abort(404) + + self.items: list[t.Any] = items + """The items on the current page. Iterating over the pagination object is + equivalent to iterating over the items. + """ + + if count: + total = await self._query_count() + else: + total = None + + self.total: int | None = total + """The total number of items across all pages.""" + return self + + @staticmethod + def _prepare_page_args( + *, + page: Optional[int] = None, + per_page: Optional[int] = None, + max_per_page: Optional[int] = None, + error_out: bool = True, + ) -> tuple[int, int]: + if page is None: + page = 1 + + if per_page is None: + per_page = 20 + + if max_per_page is not None: + per_page = min(per_page, max_per_page) + + if page < 1: + if error_out: + abort(404) + else: + page = 1 + + if per_page < 1: + if error_out: + abort(404) + else: + per_page = 20 + + return page, per_page + + @property + def _query_offset(self) -> int: + """The index of the first item to query, passed to ``offset()``. + + :meta private: + + .. versionadded:: 3.0 + """ + return (self.page - 1) * self.per_page + + def _query_items(self) -> list[t.Any]: + """Execute the query to get the items on the current page. + + Uses init arguments stored in :attr:`_query_args`. + + :meta private: + + .. versionadded:: 3.0 + """ + raise NotImplementedError + + def _query_count(self) -> int: + """Execute the query to get the total number of items. + + Uses init arguments stored in :attr:`_query_args`. + + :meta private: + + .. versionadded:: 3.0 + """ + raise NotImplementedError + + @property + def first(self) -> int: + """The number of the first item on the page, starting from 1, or 0 if there are + no items. + + .. versionadded:: 3.0 + """ + if len(self.items) == 0: + return 0 + + return (self.page - 1) * self.per_page + 1 + + @property + def last(self) -> int: + """The number of the last item on the page, starting from 1, inclusive, or 0 if + there are no items. + + .. versionadded:: 3.0 + """ + first = self.first + return max(first, first + len(self.items) - 1) + + @property + def pages(self) -> int: + """The total number of pages.""" + if self.total == 0 or self.total is None: + return 0 + + return ceil(self.total / self.per_page) + + @property + def has_prev(self) -> bool: + """``True`` if this is not the first page.""" + return self.page > 1 + + @property + def prev_num(self) -> int | None: + """The previous page number, or ``None`` if this is the first page.""" + if not self.has_prev: + return None + + return self.page - 1 + + def prev(self, *, error_out: bool = False) -> Pagination: + """Query the :class:`Pagination` object for the previous page. + + :param error_out: Abort with a ``404 Not Found`` error if no items are returned + and ``page`` is not 1, or if ``page`` or ``per_page`` is less than 1, or if + either are not ints. + """ + p = type(self)( + page=self.page - 1, + per_page=self.per_page, + error_out=error_out, + count=False, + **self._query_args, + ) + p.total = self.total + return p + + @property + def has_next(self) -> bool: + """``True`` if this is not the last page.""" + return self.page < self.pages + + @property + def next_num(self) -> int | None: + """The next page number, or ``None`` if this is the last page.""" + if not self.has_next: + return None + + return self.page + 1 + + def next(self, *, error_out: bool = False) -> Pagination: + """Query the :class:`Pagination` object for the next page. + + :param error_out: Abort with a ``404 Not Found`` error if no items are returned + and ``page`` is not 1, or if ``page`` or ``per_page`` is less than 1, or if + either are not ints. + """ + p = type(self)( + page=self.page + 1, + per_page=self.per_page, + max_per_page=self.max_per_page, + error_out=error_out, + count=False, + **self._query_args, + ) + p.total = self.total + return p + + def iter_pages( + self, + *, + left_edge: int = 2, + left_current: int = 2, + right_current: int = 4, + right_edge: int = 2, + ) -> t.Iterator[int | None]: + """Yield page numbers for a pagination widget. Skipped pages between the edges + and middle are represented by a ``None``. + + For example, if there are 20 pages and the current page is 7, the following + values are yielded. + + .. code-block:: python + + 1, 2, None, 5, 6, 7, 8, 9, 10, 11, None, 19, 20 + + :param left_edge: How many pages to show from the first page. + :param left_current: How many pages to show left of the current page. + :param right_current: How many pages to show right of the current page. + :param right_edge: How many pages to show from the last page. + + .. versionchanged:: 3.0 + Improved efficiency of calculating what to yield. + + .. versionchanged:: 3.0 + ``right_current`` boundary is inclusive. + + .. versionchanged:: 3.0 + All parameters are keyword-only. + """ + pages_end = self.pages + 1 + + if pages_end == 1: + return + + left_end = min(1 + left_edge, pages_end) + yield from range(1, left_end) + + if left_end == pages_end: + return + + mid_start = max(left_end, self.page - left_current) + mid_end = min(self.page + right_current + 1, pages_end) + + if mid_start - left_end > 0: + yield None + + yield from range(mid_start, mid_end) + + if mid_end == pages_end: + return + + right_start = max(mid_end, pages_end - right_edge) + + if right_start - mid_end > 0: + yield None + + yield from range(right_start, pages_end) + + def __iter__(self) -> t.Iterator[t.Any]: + yield from self.items + + +class SelectPagination(Pagination): + """Returned by :meth:`.SQLAlchemy.paginate`. Takes ``select`` and ``session`` + arguments in addition to the :class:`Pagination` arguments. + + .. versionadded:: 3.0 + """ + + def _query_items(self) -> list[t.Any]: + select = self._query_args["select"] + select = select.limit(self.per_page).offset(self._query_offset) + session = self._query_args["session"] + return list(session.execute(select).unique().scalars()) + + def _query_count(self) -> int: + select = self._query_args["select"] + sub = select.options(sa_orm.lazyload("*")).order_by(None).subquery() + session = self._query_args["session"] + out = session.execute(sa.select(sa.func.count()).select_from(sub)).scalar() + return out # type: ignore[no-any-return] + + +class QueryPagination(Pagination): + """Returned by :meth:`.Query.paginate`. Takes a ``query`` argument in addition to + the :class:`Pagination` arguments. + + .. versionadded:: 3.0 + """ + + async def _query_items(self) -> list[t.Any]: + query = self._query_args["query"] + session = self._query_args["session"] + out = await session.execute( + query.limit(self.per_page).offset(self._query_offset) + ) + return out # type: ignore[no-any-return] + + async def _query_count(self) -> int: + # Query.count automatically disables eager loads + session = self._query_args["session"] + out = await session.scalar( + sa.select(sa.func.count()).select_from(self._query_args["query"]) + ) + return out # type: ignore[no-any-return] + + +# @inherit_cache +class CustomQuery(Select): + async def paginate( + self, + *, + session: AsyncSession, + page: Optional[int] = None, + per_page: Optional[int] = None, + max_per_page: Optional[int] = None, + error_out: bool = True, + count: bool = True, + ) -> Pagination: + """Apply an offset and limit to the query based on the current page and number + of items per page, returning a :class:`.Pagination` object. + + :param page: The current page, used to calculate the offset. Defaults to the + ``page`` query arg during a request, or 1 otherwise. + :param per_page: The maximum number of items on a page, used to calculate the + offset and limit. Defaults to the ``per_page`` query arg during a request, + or 20 otherwise. + :param max_per_page: The maximum allowed value for ``per_page``, to limit a + user-provided value. Use ``None`` for no limit. Defaults to 100. + :param error_out: Abort with a ``404 Not Found`` error if no items are returned + and ``page`` is not 1, or if ``page`` or ``per_page`` is less than 1, or if + either are not ints. + :param count: Calculate the total number of values by issuing an extra count + query. For very complex queries this may be inaccurate or slow, so it can be + disabled and set manually if necessary. + + .. versionchanged:: 3.0 + All parameters are keyword-only. + + .. versionchanged:: 3.0 + The ``count`` query is more efficient. + + .. versionchanged:: 3.0 + ``max_per_page`` defaults to 100. + """ + query = QueryPagination( + query=self, + session=session, + page=page, + per_page=per_page, + max_per_page=max_per_page, + error_out=error_out, + count=count, + ) + return await query.get(query, count) diff --git a/backend/routes.py b/backend/routes.py new file mode 100644 index 0000000000..c8a5fc3bca --- /dev/null +++ b/backend/routes.py @@ -0,0 +1,134 @@ +from fastapi import APIRouter +from backend.api.projects import ( + resources as project_resources, + activities as project_activities, + contributions as project_contributions, + statistics as project_statistics, + teams as project_teams, + campaigns as project_campaigns, + actions as project_actions, + favorites as project_favorites, + partnerships as project_partnerships, +) + +from backend.api.comments import resources as comment_resources +from backend.api.countries import resources as country_resources +from backend.api.campaigns import resources as campaign_resources +from backend.api.annotations import resources as annotation_resources +from backend.api.interests import resources as interest_resources +from backend.api.users import ( + resources as user_resources, + statistics as user_statistics, + openstreetmap as users_openstreetmap, + tasks as users_tasks, + actions as user_actions, +) +from backend.api.issues import resources as issue_resources +from backend.api.licenses import ( + resources as license_resources, + actions as license_actions, +) +from backend.api.organisations import ( + resources as organisation_resources, + campaigns as organisation_campaigns, +) +from backend.api.tasks import ( + resources as task_resources, + actions as task_actions, + statistics as task_statistics, +) +from backend.api.teams import ( + resources as teams_resources, + actions as teams_actions, +) +from backend.api.system import ( + applications as system_applications, + general as system_general, + banner as system_banner, + statistics as system_statistics, + authentication as system_authentication, + image_upload as system_image_upload, +) + +from backend.api.notifications import ( + resources as notification_resources, + actions as notification_actions, +) + +from backend.api.partners import ( + resources as partners_resources, + statistics as partners_statistics, +) + +v2 = APIRouter(prefix="/api/v2") + + +def add_api_end_points(api): + v2.include_router(project_resources.router) + v2.include_router(project_activities.router) + v2.include_router(project_contributions.router) + v2.include_router(project_statistics.router) + v2.include_router(project_teams.router) + v2.include_router(project_campaigns.router) + v2.include_router(project_actions.router) + v2.include_router(project_favorites.router) + v2.include_router(project_partnerships.router) + + # Comments REST endpoint + v2.include_router(comment_resources.router) + + # Teams REST endpoint + v2.include_router(teams_resources.router) + v2.include_router(teams_actions.router) + + # Countries REST endpoint + v2.include_router(country_resources.router) + + # Campaigns REST endpoint + v2.include_router(campaign_resources.router) + + # Annotations REST endpoint + v2.include_router(annotation_resources.router) + + # Interests REST endpoint + v2.include_router(interest_resources.router) + + # Users REST endpoint + v2.include_router(user_statistics.router) + v2.include_router(user_resources.router) + v2.include_router(users_openstreetmap.router) + v2.include_router(users_tasks.router) + v2.include_router(user_statistics.router) + v2.include_router(user_actions.router) + + # Licenses REST endpoint + v2.include_router(license_resources.router) + v2.include_router(license_actions.router) + + # Organisations REST endpoint + v2.include_router(organisation_resources.router) + v2.include_router(organisation_campaigns.router) + + # Tasks REST endpoint + v2.include_router(task_resources.router) + v2.include_router(task_actions.router) + v2.include_router(task_statistics.router) + + # System REST endpoint + v2.include_router(system_applications.router) + v2.include_router(system_general.router) + v2.include_router(system_banner.router) + v2.include_router(system_statistics.router) + v2.include_router(system_authentication.router) + v2.include_router(system_image_upload.router) + + # Notifications REST endpoint + v2.include_router(notification_actions.router) + v2.include_router(notification_resources.router) + + # Issues REST endpoint + v2.include_router(issue_resources.router) + v2.include_router(partners_resources.router) + v2.include_router(partners_statistics.router) + + api.include_router(v2) diff --git a/backend/services/application_service.py b/backend/services/application_service.py index b29eb2a4df..fa1ed52548 100644 --- a/backend/services/application_service.py +++ b/backend/services/application_service.py @@ -1,3 +1,5 @@ +from databases import Database + from backend.exceptions import NotFound from backend.models.postgis.application import Application from backend.services.users.authentication_service import AuthenticationService @@ -5,14 +7,14 @@ class ApplicationService: @staticmethod - def create_token(user_id: int) -> Application: - application = Application().create(user_id) + async def create_token(user_id: int, db: Database) -> Application: + application = await Application().create(user_id, db) return application.as_dto() @staticmethod - def get_token(token: str): - application = Application.get_token(token) + async def get_token(token: str, db: Database): + application = await Application.get_token(token, db) if application is None: raise NotFound(sub_code="APPLICATION_NOT_FOUND") @@ -20,14 +22,14 @@ def get_token(token: str): return application @staticmethod - def get_all_tokens_for_logged_in_user(user_id: int): - tokens = Application.get_all_for_user(user_id) + async def get_all_tokens_for_logged_in_user(user_id: int, db: Database): + tokens = await Application.get_all_for_user(user_id, db) return tokens @staticmethod - def check_token(token: str): - valid_token = ApplicationService.get_token(token) + async def check_token(token: str, db: Database): + valid_token = await ApplicationService.get_token(token, db) if not valid_token: return False diff --git a/backend/services/campaign_service.py b/backend/services/campaign_service.py index 9f00f6dded..796f19c0f1 100644 --- a/backend/services/campaign_service.py +++ b/backend/services/campaign_service.py @@ -1,211 +1,349 @@ -from backend import db -from flask import current_app +from databases import Database +from fastapi import HTTPException from sqlalchemy.exc import IntegrityError -from psycopg2.errors import UniqueViolation, NotNullViolation from backend.exceptions import NotFound from backend.models.dtos.campaign_dto import ( CampaignDTO, - NewCampaignDTO, - CampaignProjectDTO, CampaignListDTO, + CampaignProjectDTO, + NewCampaignDTO, ) -from backend.models.postgis.campaign import ( - Campaign, - campaign_projects, - campaign_organisations, -) -from backend.models.postgis.organisation import Organisation +from backend.models.postgis.campaign import Campaign from backend.services.organisation_service import OrganisationService from backend.services.project_service import ProjectService class CampaignService: @staticmethod - def get_campaign(campaign_id: int) -> Campaign: - """Gets the specified campaign""" - campaign = db.session.get(Campaign, campaign_id) + async def get_campaign(campaign_id: int, db: Database) -> CampaignDTO: + """Gets the specified campaign by its ID""" + query = """ + SELECT id, name, logo, url, description + FROM campaigns + WHERE id = :campaign_id + """ + row = await db.fetch_one(query=query, values={"campaign_id": campaign_id}) - if campaign is None: + if row is None: raise NotFound(sub_code="CAMPAIGN_NOT_FOUND", campaign_id=campaign_id) - return campaign + return CampaignDTO(**row) @staticmethod - def get_campaign_by_name(campaign_name: str) -> Campaign: - campaign = Campaign.query.filter_by(name=campaign_name).first() + async def get_campaign_by_name(campaign_name: str, db: Database) -> CampaignDTO: + """Gets the specified campaign by its name""" + query = """ + SELECT id, name, logo, url, description + FROM campaigns + WHERE name = :campaign_name + """ + row = await db.fetch_one(query=query, values={"campaign_name": campaign_name}) - if campaign is None: + if row is None: raise NotFound(sub_code="CAMPAIGN_NOT_FOUND", campaign_name=campaign_name) - return campaign + return CampaignDTO(**row) @staticmethod - def delete_campaign(campaign_id: int): - """Delete campaign for a project""" - campaign = db.session.get(Campaign, campaign_id) - campaign.delete() - campaign.save() + async def delete_campaign(campaign_id: int, db: Database): + """Delete a campaign and its related organizations by its ID""" + # Begin a transaction to ensure both deletions are handled together + async with db.transaction(): + query_delete_orgs = """ + DELETE FROM campaign_organisations + WHERE campaign_id = :campaign_id + """ + await db.execute( + query=query_delete_orgs, values={"campaign_id": campaign_id} + ) + + query_delete_campaign = """ + DELETE FROM campaigns + WHERE id = :campaign_id + """ + await db.execute( + query=query_delete_campaign, values={"campaign_id": campaign_id} + ) @staticmethod - def get_campaign_as_dto(campaign_id: int, user_id: int): + async def get_campaign_as_dto(campaign_id: int, db) -> CampaignDTO: """Gets the specified campaign""" - campaign = CampaignService.get_campaign(campaign_id) - - campaign_dto = CampaignDTO() - campaign_dto.id = campaign.id - campaign_dto.url = campaign.url - campaign_dto.name = campaign.name - campaign_dto.logo = campaign.logo - campaign_dto.description = campaign.description - - return campaign_dto + campaign = await CampaignService.get_campaign(campaign_id, db) + return campaign @staticmethod - def get_project_campaigns_as_dto(project_id: int) -> CampaignListDTO: + async def get_project_campaigns_as_dto( + project_id: int, db: Database + ) -> CampaignListDTO: """Gets all the campaigns for a specified project""" # Test if project exists - ProjectService.get_project_by_id(project_id) - query = ( - Campaign.query.join(campaign_projects) - .filter(campaign_projects.c.project_id == project_id) - .all() - ) + await ProjectService.get_project_by_id(project_id, db) - return Campaign.campaign_list_as_dto(query) + query = """ + SELECT c.* + FROM campaigns c + INNER JOIN campaign_projects cp ON c.id = cp.campaign_id + WHERE cp.project_id = :project_id + """ + + campaigns = await db.fetch_all(query=query, values={"project_id": project_id}) + return Campaign.campaign_list_as_dto(campaigns) @staticmethod - def delete_project_campaign(project_id: int, campaign_id: int): - """Delete campaign for a project""" - campaign = CampaignService.get_campaign(campaign_id) - project = ProjectService.get_project_by_id(project_id) - project_campaigns = CampaignService.get_project_campaigns_as_dto(project_id) - if campaign.id not in [i["id"] for i in project_campaigns["campaigns"]]: + async def delete_project_campaign(project_id: int, campaign_id: int, db: Database): + """Delete campaign from a project.""" + # Check if the campaign exists + await CampaignService.get_campaign(campaign_id, db) + + # Check if the project exists + await ProjectService.get_project_by_id(project_id, db) + + """Fetch all campaigns associated with a project.""" + query = """ + SELECT c.id + FROM campaigns c + JOIN campaign_projects pc ON c.id = pc.campaign_id + WHERE pc.project_id = :project_id + """ + project_campaigns = await db.fetch_all( + query=query, values={"project_id": project_id} + ) + + if campaign_id not in [campaign.id for campaign in project_campaigns]: raise NotFound( sub_code="PROJECT_CAMPAIGN_NOT_FOUND", campaign_id=campaign_id, project_id=project_id, ) - project.campaign.remove(campaign) - db.session.commit() - new_campaigns = CampaignService.get_project_campaigns_as_dto(project_id) - return new_campaigns + + # Delete the campaign from the project + delete_query = """ + DELETE FROM campaign_projects + WHERE project_id = :project_id + AND campaign_id = :campaign_id + """ + await db.execute( + delete_query, values={"project_id": project_id, "campaign_id": campaign_id} + ) + # Fetch the updated list of campaigns + updated_campaigns = await CampaignService.get_project_campaigns_as_dto( + project_id, db + ) + return updated_campaigns @staticmethod - def get_all_campaigns() -> CampaignListDTO: + async def get_all_campaigns(db: Database) -> CampaignListDTO: """Returns a list of all campaigns""" - query = Campaign.query.order_by(Campaign.name).distinct() - - return Campaign.campaign_list_as_dto(query) + # Define the raw SQL query + query = """ + SELECT DISTINCT id, name + FROM campaigns + ORDER BY name + """ + rows = await db.fetch_all(query) + return Campaign.campaign_list_as_dto(rows) @staticmethod - def create_campaign(campaign_dto: NewCampaignDTO): - """Creates a new campaign""" - campaign = Campaign.from_dto(campaign_dto) + async def create_campaign(campaign_dto: NewCampaignDTO, db: Database): + """Creates a new campaign asynchronously""" try: - campaign.create() - if campaign_dto.organisations: - for org_id in campaign_dto.organisations: - organisation = OrganisationService.get_organisation_by_id(org_id) - campaign.organisation.append(organisation) - db.session.commit() - except IntegrityError as e: - current_app.logger.info("Integrity error: {}".format(e.args[0])) - if isinstance(e.orig, UniqueViolation): - raise ValueError("NameExists- Campaign name already exists") from e - if isinstance(e.orig, NotNullViolation): - raise ValueError("NullName- Campaign name cannot be null") from e - return campaign + async with db.transaction(): + # Generate the base query and values + query = """ + INSERT INTO campaigns (name, logo, url, description) + VALUES (:name, :logo, :url, :description) + RETURNING id + """ + values = { + "name": campaign_dto.name, + "logo": campaign_dto.logo, + "url": campaign_dto.url, + "description": campaign_dto.description, + } + + campaign_id = await db.execute(query, values) + if campaign_dto.organisations: + for org_id in campaign_dto.organisations: + organisation = await OrganisationService.get_organisation_by_id( + org_id, db + ) + if organisation: + org_query = """ + INSERT INTO campaign_organisations (campaign_id, organisation_id) + VALUES (:campaign_id, :organisation_id) + """ + await db.execute( + org_query, + {"campaign_id": campaign_id, "organisation_id": org_id}, + ) + + return campaign_id + except Exception as e: + raise HTTPException( + status_code=500, detail="Failed to create campaign." + ) from e @staticmethod - def create_campaign_project(dto: CampaignProjectDTO): - """Assign a campaign with a project""" - ProjectService.get_project_by_id(dto.project_id) - CampaignService.get_campaign(dto.campaign_id) - statement = campaign_projects.insert().values( - campaign_id=dto.campaign_id, project_id=dto.project_id + async def create_campaign_project( + dto: CampaignProjectDTO, db: Database + ) -> CampaignListDTO: + """Assign a campaign to a project""" + + # Check if the project exists + await ProjectService.get_project_by_id(dto.project_id, db) + + # Check if the campaign exists + await CampaignService.get_campaign(dto.campaign_id, db) + + insert_query = """ + INSERT INTO campaign_projects (campaign_id, project_id) + VALUES (:campaign_id, :project_id) + """ + + await db.execute( + query=insert_query, + values={"campaign_id": dto.campaign_id, "project_id": dto.project_id}, + ) + new_campaigns = await CampaignService.get_project_campaigns_as_dto( + dto.project_id, db ) - db.session.execute(statement) - db.session.commit() - new_campaigns = CampaignService.get_project_campaigns_as_dto(dto.project_id) return new_campaigns @staticmethod - def create_campaign_organisation(organisation_id: int, campaign_id: int): - """Creates new campaign from DTO""" + async def create_campaign_organisation( + organisation_id: int, campaign_id: int, db: Database + ): + """Creates new campaign organisation from DTO""" # Check if campaign exists - CampaignService.get_campaign(campaign_id) + await CampaignService.get_campaign(campaign_id, db) # Check if organisation exists - OrganisationService.get_organisation_by_id(organisation_id) + await OrganisationService.get_organisation_by_id(organisation_id, db) - statement = campaign_organisations.insert().values( - campaign_id=campaign_id, organisation_id=organisation_id + query = """ + INSERT INTO campaign_organisations (campaign_id, organisation_id) + VALUES (:campaign_id, :organisation_id) + """ + await db.execute( + query=query, + values={"campaign_id": campaign_id, "organisation_id": organisation_id}, ) - db.session.execute(statement) - db.session.commit() - new_campaigns = CampaignService.get_organisation_campaigns_as_dto( - organisation_id - ) - return new_campaigns @staticmethod - def get_organisation_campaigns_as_dto(organisation_id: int) -> CampaignListDTO: - """Gets all the campaigns for a specified project""" + async def get_organisation_campaigns_as_dto( + organisation_id: int, database: Database + ) -> CampaignListDTO: + """Gets all the campaigns for a specified organisation""" + # Check if organisation exists - OrganisationService.get_organisation_by_id(organisation_id) - query = ( - Campaign.query.join(campaign_organisations) - .filter(campaign_organisations.c.organisation_id == organisation_id) - .all() + await OrganisationService.get_organisation_by_id(organisation_id, database) + + query = """ + SELECT c.* + FROM campaigns c + JOIN campaign_organisations co ON c.id = co.campaign_id + WHERE co.organisation_id = :organisation_id + """ + campaigns = await database.fetch_all( + query=query, values={"organisation_id": organisation_id} ) - return Campaign.campaign_list_as_dto(query) + + # Convert the result to a list of campaign DTOs + return Campaign.campaign_list_as_dto(campaigns) @staticmethod - def campaign_organisation_exists(campaign_id: int, org_id: int): - return ( - Campaign.query.join(campaign_organisations) - .filter( - campaign_organisations.c.organisation_id == org_id, - campaign_organisations.c.campaign_id == campaign_id, - ) - .one_or_none() + async def campaign_organisation_exists( + campaign_id: int, org_id: int, database: Database + ) -> bool: + query = """ + SELECT 1 + FROM campaign_organisations + WHERE organisation_id = :org_id + AND campaign_id = :campaign_id + LIMIT 1 + """ + result = await database.fetch_one( + query=query, values={"org_id": org_id, "campaign_id": campaign_id} ) + return result is not None @staticmethod - def delete_organisation_campaign(organisation_id: int, campaign_id: int): - """Delete campaign for a organisation""" - campaign = db.session.get(Campaign, campaign_id) - if not campaign: + async def delete_organisation_campaign( + organisation_id: int, campaign_id: int, db: Database + ): + """Delete campaign for an organisation""" + + # Check if campaign exists + query_campaign = "SELECT 1 FROM campaigns WHERE id = :campaign_id LIMIT 1" + campaign_exists = await db.fetch_one( + query=query_campaign, values={"campaign_id": campaign_id} + ) + if not campaign_exists: raise NotFound(sub_code="CAMPAIGN_NOT_FOUND", campaign_id=campaign_id) - org = db.session.get(Organisation, organisation_id) - if not org: + + # Check if organisation exists + query_org = "SELECT 1 FROM organisations WHERE id = :organisation_id LIMIT 1" + org_exists = await db.fetch_one( + query=query_org, values={"organisation_id": organisation_id} + ) + if not org_exists: raise NotFound( sub_code="ORGANISATION_NOT_FOUND", organisation_id=organisation_id ) - if not CampaignService.campaign_organisation_exists( - campaign_id, organisation_id - ): + + campaign_org_exists = await CampaignService.campaign_organisation_exists( + campaign_id, organisation_id, db + ) + if not campaign_org_exists: raise NotFound( sub_code="ORGANISATION_CAMPAIGN_NOT_FOUND", organisation_id=organisation_id, campaign_id=campaign_id, ) - org.campaign.remove(campaign) - db.session.commit() - new_campaigns = CampaignService.get_organisation_campaigns_as_dto( - organisation_id + + query_delete = """ + DELETE FROM campaign_organisations + WHERE campaign_id = :campaign_id + AND organisation_id = :organisation_id + """ + await db.execute( + query=query_delete, + values={"campaign_id": campaign_id, "organisation_id": organisation_id}, ) - return new_campaigns @staticmethod - def update_campaign(campaign_dto: CampaignDTO, campaign_id: int): - campaign = db.session.get(Campaign, campaign_id) + async def update_campaign( + campaign_dto: CampaignDTO, campaign_id: int, db: Database + ): + campaign_query = "SELECT * FROM campaigns WHERE id = :id" + campaign = await db.fetch_one(query=campaign_query, values={"id": campaign_id}) + if not campaign: raise NotFound(sub_code="CAMPAIGN_NOT_FOUND", campaign_id=campaign_id) try: - campaign.update(campaign_dto) - except IntegrityError as e: - current_app.logger.info("Integrity error: {}".format(e.args[0])) - raise ValueError() + # Convert the DTO to a dictionary, excluding unset fields + campaign_dict = campaign_dto.dict(exclude_unset=True) + # Remove 'organisation' key if it exists + if "organisations" in campaign_dict: + del campaign_dict["organisations"] - return campaign + set_clause = ", ".join(f"{key} = :{key}" for key in campaign_dict.keys()) + update_query = f""" + UPDATE campaigns + SET {set_clause} + WHERE id = :id + RETURNING id + """ + campaign = await db.fetch_one( + query=update_query, values={**campaign_dict, "id": campaign_id} + ) + if not campaign: + raise HTTPException(status_code=404, detail="Campaign not found") + + return campaign + + except IntegrityError: + raise HTTPException(status_code=409, detail="Campaign name already exists") + + except Exception as e: + raise HTTPException(status_code=500, detail=str(e)) from e diff --git a/backend/services/grid/grid_service.py b/backend/services/grid/grid_service.py index 7564f92d8b..7a634a42a0 100644 --- a/backend/services/grid/grid_service.py +++ b/backend/services/grid/grid_service.py @@ -1,9 +1,11 @@ -import geojson import json + +import geojson +import shapely.geometry +from loguru import logger from shapely.geometry import MultiPolygon, mapping from shapely.ops import unary_union -import shapely.geometry -from flask import current_app + from backend.models.dtos.grid_dto import GridDTO from backend.models.postgis.utils import InvalidGeoJson @@ -12,8 +14,7 @@ class GridServiceError(Exception): """Custom Exception to notify callers an error occurred when handling projects""" def __init__(self, message): - if current_app: - current_app.logger.error(message) + logger.error(message) class GridService: diff --git a/backend/services/grid/split_service.py b/backend/services/grid/split_service.py index fe57b04015..c9446bd99a 100644 --- a/backend/services/grid/split_service.py +++ b/backend/services/grid/split_service.py @@ -1,16 +1,17 @@ import geojson -from shapely.geometry import Polygon, MultiPolygon, LineString, shape as shapely_shape -from shapely.ops import split -from backend import db -from flask import current_app +from databases import Database + +# from flask import current_app from geoalchemy2 import shape +from geoalchemy2.elements import WKBElement +from loguru import logger +from shapely.geometry import LineString, MultiPolygon, Polygon +from shapely.geometry import shape as shapely_shape +from shapely.ops import split -from backend.exceptions import NotFound from backend.models.dtos.grid_dto import SplitTaskDTO from backend.models.dtos.mapping_dto import TaskDTOs -from backend.models.postgis.utils import ST_Transform, ST_Area, ST_GeogFromWKB -from backend.models.postgis.task import Task, TaskStatus, TaskAction -from backend.models.postgis.project import Project +from backend.models.postgis.task import Task, TaskAction, TaskStatus from backend.models.postgis.utils import InvalidGeoJson @@ -18,22 +19,17 @@ class SplitServiceError(Exception): """Custom Exception to notify callers an error occurred when handling splitting tasks""" def __init__(self, message): - if current_app: - current_app.logger.debug(message) + logger.debug(message) class SplitService: @staticmethod - def _create_split_tasks(x, y, zoom, task) -> list: + async def _create_split_tasks(x, y, zoom, task, db) -> list: """ - function for splitting a task square geometry into 4 smaller squares - :param geom_to_split: {geojson.Feature} the geojson feature to b split - :return: list of {geojson.Feature} + Refactored function for splitting a task square geometry into 4 smaller squares using asyncpg and encode databases. """ - # If the task's geometry doesn't correspond to an OSM tile identified by an - # x, y, zoom then we need to take a different approach to splitting if x is None or y is None or zoom is None or not task.is_square: - return SplitService._create_split_tasks_from_geometry(task) + return await SplitService._create_split_tasks_from_geometry(task, db) try: split_geoms = [] @@ -42,7 +38,9 @@ def _create_split_tasks(x, y, zoom, task) -> list: new_x = x * 2 + i new_y = y * 2 + j new_zoom = zoom + 1 - new_square = SplitService._create_square(new_x, new_y, new_zoom) + new_square = await SplitService._create_square( + new_x, new_y, new_zoom, db + ) feature = geojson.Feature() feature.geometry = new_square feature.properties = { @@ -56,62 +54,64 @@ def _create_split_tasks(x, y, zoom, task) -> list: split_geoms.append(feature) return split_geoms + except Exception as e: raise SplitServiceError(f"unhandled error splitting tile: {str(e)}") @staticmethod - def _create_square(x, y, zoom) -> geojson.MultiPolygon: + async def _create_square(x, y, zoom, db) -> geojson.MultiPolygon: """ - Function for creating a geojson.MultiPolygon square representing a single OSM tile grid square - :param x: osm tile grid x - :param y: osm tile grid y - :param zoom: osm tile grid zoom level - :return: geojson.MultiPolygon in EPSG:4326 + Refactored function to create a geojson.MultiPolygon square using raw SQL with encode databases. """ - # Maximum resolution MAXRESOLUTION = 156543.0339 - - # X/Y axis limit max = MAXRESOLUTION * 256 / 2 - - # calculate extents step = max / (2 ** (zoom - 1)) + xmin = x * step - max ymin = y * step - max xmax = (x + 1) * step - max ymax = (y + 1) * step - max - # make a shapely multipolygon + # Create the MultiPolygon object multipolygon = MultiPolygon( [Polygon([(xmin, ymin), (xmax, ymin), (xmax, ymax), (xmin, ymax)])] ) - # use the database to transform the geometry from 3857 to 4326 - transformed_geometry = ST_Transform(shape.from_shape(multipolygon, 3857), 4326) + # Convert MultiPolygon to WKT + multipolygon_wkt = multipolygon.wkt - # use DB to get the geometry as geojson - with db.engine.connect() as conn: - return geojson.loads( - conn.execute(transformed_geometry.ST_AsGeoJSON()).scalar() - ) + # Query to transform and get the GeoJSON + create_square_query = """ + SELECT ST_AsGeoJSON(ST_Transform(ST_SetSRID(ST_Multi(ST_GeomFromText(:multipolygon_geometry)), 3857), 4326)) AS geojson + """ + + # Use the WKT version of the multipolygon in the SQL query + square_geojson_str = await db.fetch_val( + create_square_query, values={"multipolygon_geometry": multipolygon_wkt} + ) + + # Convert the result back to GeoJSON and return + return geojson.loads(square_geojson_str) @staticmethod - def _create_split_tasks_from_geometry(task) -> list: + async def _create_split_tasks_from_geometry(task, db) -> list: + """ + Splits a task into 4 smaller tasks based on its geometry (not OSM tile). Uses raw SQL with asyncpg/encode databases. """ - Splits a task into 4 smaller tasks based purely on the task's geometry rather than - an OSM tile identified by x, y, zoom - :return: list of {geojson.Feature} + task_query = """ + SELECT ST_AsGeoJSON(geometry) AS geometry + FROM tasks + WHERE id = :task_id AND project_id = :project_id """ - # Load the task's geometry and calculate its centroid and bbox - query = db.session.query( - Task.id, Task.geometry.ST_AsGeoJSON().label("geometry") - ).filter(Task.id == task.id, Task.project_id == task.project_id) - task_geojson = geojson.loads(query[0].geometry) + task_geojson_str = await db.fetch_val( + task_query, values={"task_id": task.id, "project_id": task.project_id} + ) + task_geojson = geojson.loads(task_geojson_str) geometry = shapely_shape(task_geojson) centroid = geometry.centroid minx, miny, maxx, maxy = geometry.bounds - # split geometry in half vertically, then split those halves in half horizontally + # split geometry in half vertically, then horizontally split_geometries = [] vertical_dividing_line = LineString([(centroid.x, miny), (centroid.x, maxy)]) horizontal_dividing_line = LineString([(minx, centroid.y), (maxx, centroid.y)]) @@ -127,18 +127,21 @@ def _create_split_tasks_from_geometry(task) -> list: # convert split geometries into GeoJSON features expected by Task split_features = [] for split_geometry in split_geometries: - feature = geojson.Feature() - # Tasks expect multipolygons. Convert and use the database to get as GeoJSON - multipolygon_geometry = shape.from_shape(split_geometry, 4326) - with db.engine.connect() as conn: - feature.geometry = geojson.loads( - conn.execute(multipolygon_geometry.ST_AsGeoJSON()).scalar() - ) + multipolygon_geometry_wkt = split_geometry.wkt + multipolygon_as_geojson_query = """ + SELECT ST_AsGeoJSON(ST_Transform(ST_SetSRID(ST_Multi(ST_GeomFromText(:multipolygon_geometry)), 4326), 4326)) AS geojson + """ + feature_geojson = await db.fetch_val( + multipolygon_as_geojson_query, + values={"multipolygon_geometry": multipolygon_geometry_wkt}, + ) + feature = geojson.Feature(geometry=geojson.loads(feature_geojson)) feature.properties["x"] = None feature.properties["y"] = None feature.properties["zoom"] = None feature.properties["isSquare"] = False split_features.append(feature) + return split_features @staticmethod @@ -162,106 +165,210 @@ def _as_halves(geometries, centroid, axis) -> list: return (MultiPolygon(first_half), MultiPolygon(second_half)) @staticmethod - def split_task(split_task_dto: SplitTaskDTO) -> TaskDTOs: + async def delete_task_and_related_records(task_id: int, project_id: int, db): """ - Replaces a task square with 4 smaller tasks at the next OSM tile grid zoom level - Validates that task is: - - locked for mapping by current user - :param split_task_dto: - :return: new tasks in a DTO + Deletes a task and all its related records (task_mapping_issues, task_invalidation_history, task_history) + by task_id and project_id. + + Args: + task_id (int): The ID of the task. + project_id (int): The ID of the project. + db: The database connection object (asyncpg, databases, etc.). """ - # get the task to be split - original_task = Task.get(split_task_dto.task_id, split_task_dto.project_id) - if original_task is None: - raise NotFound(sub_code="TASK_NOT_FOUND", task_id=split_task_dto.task_id) + # Delete related messages + await db.execute( + """ + DELETE FROM messages + WHERE task_id = :task_id AND project_id = :project_id + """, + values={"task_id": task_id, "project_id": project_id}, + ) - original_geometry = shape.to_shape(original_task.geometry) + # Delete related task_mapping_issues records + await db.execute( + """ + DELETE FROM task_mapping_issues + WHERE task_history_id IN ( + SELECT id FROM task_history WHERE task_id = :task_id AND project_id = :project_id + ) + """, + values={"task_id": task_id, "project_id": project_id}, + ) + + # Delete related task_invalidation_history records + await db.execute( + """ + DELETE FROM task_invalidation_history + WHERE task_id = :task_id AND project_id = :project_id + """, + values={"task_id": task_id, "project_id": project_id}, + ) - # Fetch the task geometry in meters - with db.engine.connect() as conn: - original_task_area_m = conn.execute( - ST_Area(ST_GeogFromWKB(original_task.geometry)) - ).scalar() + # Delete related task_history records + await db.execute( + """ + DELETE FROM task_history + WHERE task_id = :task_id AND project_id = :project_id + """, + values={"task_id": task_id, "project_id": project_id}, + ) + # Finally, delete the task itself + await db.execute( + """ + DELETE FROM tasks WHERE id = :task_id AND project_id = :project_id + """, + values={"task_id": task_id, "project_id": project_id}, + ) + + @staticmethod + async def split_task(split_task_dto: SplitTaskDTO, db: Database) -> list: + original_task = await Task.get( + split_task_dto.task_id, split_task_dto.project_id, db + ) + + if not original_task: + raise SplitServiceError("TASK_NOT_FOUND- Task not found") + original_geometry = shape.to_shape( + WKBElement(original_task["geometry"], srid=4326) + ) + + query = """ + SELECT ST_Area(ST_GeogFromWKB(geometry)) + FROM tasks + WHERE id = :task_id AND project_id = :project_id + """ + original_task_area_m = await db.fetch_val( + query, + values={ + "task_id": split_task_dto.task_id, + "project_id": split_task_dto.project_id, + }, + ) if ( - original_task.zoom and original_task.zoom >= 18 + original_task["zoom"] and original_task["zoom"] >= 18 ) or original_task_area_m < 25000: raise SplitServiceError("SmallToSplit- Task is too small to be split") - # check its locked for mapping by the current user - if TaskStatus(original_task.task_status) != TaskStatus.LOCKED_FOR_MAPPING: + if original_task["task_status"] != TaskStatus.LOCKED_FOR_MAPPING.value: raise SplitServiceError( "LockToSplit- Status must be LOCKED_FOR_MAPPING to split" ) - - if original_task.locked_by != split_task_dto.user_id: + if original_task["locked_by"] != split_task_dto.user_id: raise SplitServiceError( "SplitOtherUserTask- Attempting to split a task owned by another user" ) - # create new geometries from the task geometry - try: - new_tasks_geojson = SplitService._create_split_tasks( - original_task.x, original_task.y, original_task.zoom, original_task - ) - except Exception as e: - raise SplitServiceError(f"Error splitting task{str(e)}") + # Split the task geometry into smaller tasks + new_tasks_geojson = await SplitService._create_split_tasks( + original_task["x"], + original_task["y"], + original_task["zoom"], + original_task, + db, + ) - # create new tasks from the new geojson - i = Task.get_max_task_id_for_project(split_task_dto.project_id) + # Fetch the highest task ID for the project + i = await Task.get_max_task_id_for_project(split_task_dto.project_id, db) new_tasks = [] new_tasks_dto = [] + for new_task_geojson in new_tasks_geojson: - # Sanity check: ensure the new task geometry intersects the original task geometry - new_geometry = shapely_shape(new_task_geojson.geometry) + # Ensure the new task geometry intersects the original geometry + new_geometry = shapely_shape(new_task_geojson["geometry"]) if not new_geometry.intersects(original_geometry): raise InvalidGeoJson( "SplitGeoJsonError- New split task does not intersect original task" ) - # insert new tasks into database - i = i + 1 + # Insert new task into database + i += 1 new_task = Task.from_geojson_feature(i, new_task_geojson) - new_task.project_id = split_task_dto.project_id - new_task.task_status = TaskStatus.READY.value - new_task.create() - new_task.task_history.extend(original_task.copy_task_history()) - if new_task.task_history: - new_task.clear_task_lock() # since we just copied the lock - new_task.set_task_history( - TaskAction.STATE_CHANGE, split_task_dto.user_id, None, TaskStatus.SPLIT + task_geojson_str = geojson.dumps(new_geometry) + task_values = { + "id": new_task.id, + "project_id": split_task_dto.project_id, + "x": new_task.x, + "y": new_task.y, + "zoom": new_task.zoom, + "is_square": new_task.is_square, + "task_status": TaskStatus.READY.value, + "geojson": task_geojson_str, + } + + query = """ + INSERT INTO tasks (id, project_id, x, y, zoom, is_square, task_status, geometry) + VALUES (:id, :project_id, :x, :y, :zoom, :is_square, :task_status, ST_SetSRID(ST_GeomFromGeoJSON(:geojson), 4326)) + """ + await db.execute(query, values=task_values) + await Task.copy_task_history( + split_task_dto.task_id, new_task.id, split_task_dto.project_id, db + ) + await Task.clear_task_lock(new_task.id, new_task.project_id, db) + await Task.set_task_history( + task_id=new_task.id, + project_id=split_task_dto.project_id, + user_id=split_task_dto.user_id, + action=TaskAction.STATE_CHANGE, + db=db, + new_state=TaskStatus.SPLIT, ) - new_task.set_task_history( - TaskAction.STATE_CHANGE, split_task_dto.user_id, None, TaskStatus.READY + await Task.set_task_history( + task_id=new_task.id, + project_id=split_task_dto.project_id, + user_id=split_task_dto.user_id, + action=TaskAction.STATE_CHANGE, + db=db, + new_state=TaskStatus.READY, + ) + update_status_query = """ + UPDATE tasks + SET task_status = :task_status + WHERE id = :task_id AND project_id = :project_id + """ + await db.execute( + update_status_query, + values={ + "task_status": TaskStatus.READY.value, + "task_id": new_task.id, + "project_id": split_task_dto.project_id, + }, ) - new_task.task_status = TaskStatus.READY.value - new_tasks.append(new_task) - new_task.update() new_tasks_dto.append( - new_task.as_dto_with_instructions(split_task_dto.preferred_locale) + await Task.as_dto_with_instructions( + new_task.id, + split_task_dto.project_id, + db, + split_task_dto.preferred_locale, + ) ) - # delete original task from the database - try: - original_task.delete() - except Exception: - db.session.rollback() - # Ensure the new tasks are cleaned up - for new_task in new_tasks: - new_task.delete() - db.session.commit() - raise - - # update project task counts - project = Project.get(split_task_dto.project_id) - project.total_tasks = project.tasks.count() - # update bad imagery because we may have split a bad imagery tile - project.tasks_bad_imagery = project.tasks.filter( - Task.task_status == TaskStatus.BADIMAGERY.value - ).count() - project.save() - - # return the new tasks in a DTO + await SplitService.delete_task_and_related_records( + split_task_dto.task_id, split_task_dto.project_id, db + ) + + query = """ + UPDATE projects + SET total_tasks = ( + SELECT COUNT(*) + FROM tasks + WHERE project_id = :project_id + ), + tasks_bad_imagery = ( + SELECT COUNT(*) + FROM tasks + WHERE project_id = :project_id AND task_status = :bad_imagery_status + ) + WHERE id = :project_id + """ + await db.execute( + query, + values={ + "project_id": split_task_dto.project_id, + "bad_imagery_status": TaskStatus.BADIMAGERY.value, + }, + ) + task_dtos = TaskDTOs() task_dtos.tasks = new_tasks_dto return task_dtos diff --git a/backend/services/interests_service.py b/backend/services/interests_service.py index 1fe5a9dc95..392e75f7c0 100644 --- a/backend/services/interests_service.py +++ b/backend/services/interests_service.py @@ -1,26 +1,27 @@ -from backend import db - -from sqlalchemy import func +from databases import Database +from fastapi import HTTPException from backend.models.dtos.interests_dto import ( + InterestDTO, InterestRateDTO, InterestRateListDTO, InterestsListDTO, ) -from backend.models.postgis.task import TaskHistory -from backend.models.postgis.interests import ( - Interest, - project_interests, -) +from backend.models.postgis.interests import Interest +from backend.models.postgis.project import Project from backend.services.project_service import ProjectService -from backend.services.users.user_service import UserService class InterestService: @staticmethod - def get(interest_id): - interest = InterestService.get_by_id(interest_id) - return interest.as_dto() + async def get(interest_id: int, db: Database) -> InterestDTO: + query = """ + SELECT id, name + FROM interests + WHERE id = :interest_id + """ + interest_dto = await db.fetch_one(query, {"interest_id": interest_id}) + return interest_dto @staticmethod def get_by_id(interest_id): @@ -28,35 +29,71 @@ def get_by_id(interest_id): return interest @staticmethod - def get_by_name(name): - interest = Interest.get_by_name(name) - return interest - - @staticmethod - def create(interest_name): - interest_model = Interest(name=interest_name) - interest_model.create() - return interest_model.as_dto() + async def create(interest_name: str, db: Database) -> InterestDTO: + query = """ + INSERT INTO interests (name) + VALUES (:name) + RETURNING id; + """ + values = {"name": interest_name} + interest_id = await db.execute(query, values) + + query_select = """ + SELECT id, name + FROM interests + WHERE id = :id + """ + interest_dto = await db.fetch_one(query_select, {"id": interest_id}) + return interest_dto @staticmethod - def update(interest_id, new_interest_dto): - interest = InterestService.get_by_id(interest_id) - interest.update(new_interest_dto) - return interest.as_dto() + async def update(interest_id: int, interest_dto: InterestDTO, db: Database): + query = """ + UPDATE interests + SET name = :name + WHERE id = :interest_id + """ + values = {"name": interest_dto.name} + await db.execute(query, {**values, "interest_id": interest_id}) + + query_select = """ + SELECT id, name + FROM interests + WHERE id = :id + """ + updated_interest_dto = await db.fetch_one(query_select, {"id": interest_id}) + return updated_interest_dto @staticmethod - def get_all_interests() -> InterestsListDTO: - return Interest.get_all_interests() + async def get_all_interests(db: Database) -> InterestsListDTO: + query = """ + SELECT id, name + FROM interests + """ + results = await db.fetch_all(query) + + interest_list_dto = InterestsListDTO() + for record in results: + interest_dto = InterestDTO(**record) + interest_list_dto.interests.append(interest_dto) + return interest_list_dto @staticmethod - def delete(interest_id): - interest = InterestService.get_by_id(interest_id) - interest.delete() + async def delete(interest_id: int, db: Database): + query = """ + DELETE FROM interests + WHERE id = :interest_id; + """ + try: + async with db.transaction(): + await db.execute(query, {"interest_id": interest_id}) + except Exception as e: + raise HTTPException(status_code=500, detail="Deletion failed") from e @staticmethod - def create_or_update_project_interests(project_id, interests): - project = ProjectService.get_project_by_id(project_id) - project.create_or_update_interests(interests) + async def create_or_update_project_interests(project_id, interests, db: Database): + project = await ProjectService.get_project_by_id(project_id, db) + project = await Project.create_or_update_interests(project, interests, db) # Return DTO. dto = InterestsListDTO() @@ -65,39 +102,68 @@ def create_or_update_project_interests(project_id, interests): return dto @staticmethod - def create_or_update_user_interests(user_id, interests): - user = UserService.get_user_by_id(user_id) - user.create_or_update_interests(interests) + async def create_or_update_user_interests(user_id, interests_ids, db: Database): + """ + Create or update the user's interests by directly interacting with the database. + """ + async with db.transaction(): + delete_query = """ + DELETE FROM user_interests WHERE user_id = :user_id + """ + await db.execute(delete_query, {"user_id": user_id}) + insert_query = """ + INSERT INTO user_interests (user_id, interest_id) + VALUES (:user_id, :interest_id) + """ + values = [ + {"user_id": user_id, "interest_id": interest_id} + for interest_id in interests_ids + ] + await db.execute_many(insert_query, values) + return await InterestService.get_user_interests(user_id, db) - # Return DTO. + @staticmethod + async def get_user_interests(user_id, db: Database) -> InterestsListDTO: + """ + Fetch the updated interests for the user and return the DTO. + """ + query = """ + SELECT i.id, i.name + FROM interests i + JOIN user_interests ui ON i.id = ui.interest_id + WHERE ui.user_id = :user_id + """ + rows = await db.fetch_all(query, {"user_id": user_id}) dto = InterestsListDTO() - dto.interests = [i.as_dto() for i in user.interests] - + dto.interests = [InterestDTO(id=row["id"], name=row["name"]) for row in rows] return dto @staticmethod - def compute_contributions_rate(user_id): - # 1. Get all projects that user has contributed. - stmt = ( - TaskHistory.query.with_entities(TaskHistory.project_id) - .distinct() - .filter(TaskHistory.user_id == user_id) - .subquery() - ) - - res = ( - db.session.query( - Interest.name, - func.count(project_interests.c.interest_id) - / func.sum(func.count(project_interests.c.interest_id)).over(), - ) - .group_by(project_interests.c.interest_id, Interest.name) - .filter(project_interests.c.project_id.in_(stmt)) - .join(Interest, Interest.id == project_interests.c.interest_id) - ) - - rates = [InterestRateDTO({"name": r[0], "rate": r[1]}) for r in res.all()] + async def compute_contributions_rate(user_id: int, db: Database): + stmt = """ + SELECT DISTINCT project_id + FROM task_history + WHERE user_id = :user_id + """ + project_ids = await db.fetch_all(stmt, values={"user_id": user_id}) + + if not project_ids: + return InterestRateListDTO() + + project_ids_list = [row["project_id"] for row in project_ids] + + query = """ + SELECT i.name, COUNT(pi.interest_id) / SUM(COUNT(pi.interest_id)) OVER() as rate + FROM project_interests pi + JOIN interests i ON i.id = pi.interest_id + WHERE pi.project_id = ANY(:project_ids) + GROUP BY pi.interest_id, i.name + """ + res = await db.fetch_all(query, values={"project_ids": project_ids_list}) + results = InterestRateListDTO() - results.rates = rates + + for r in res: + results.rates.append(InterestRateDTO(name=r["name"], rate=r["rate"])) return results diff --git a/backend/services/license_service.py b/backend/services/license_service.py index 307e3d6a71..87c521729b 100644 --- a/backend/services/license_service.py +++ b/backend/services/license_service.py @@ -1,10 +1,12 @@ from backend.models.dtos.licenses_dto import LicenseDTO, LicenseListDTO from backend.models.postgis.licenses import License +from databases import Database +from fastapi import HTTPException class LicenseService: @staticmethod - def get_license(license_id: int) -> License: + def get_license(license_id: int, db: Database) -> License: """ Get task from DB :raises: NotFound @@ -13,31 +15,65 @@ def get_license(license_id: int) -> License: return map_license @staticmethod - def get_license_as_dto(license_id: int) -> LicenseDTO: + async def get_license_as_dto(license_id: int, db: Database) -> LicenseDTO: """Get License from DB""" - map_license = LicenseService.get_license(license_id) - return map_license.as_dto() + query = """ + SELECT id AS "licenseId", name, description, plain_text AS "plainText" + FROM licenses + WHERE id = :license_id + """ + license_dto = await db.fetch_one(query, {"license_id": license_id}) + return LicenseDTO(**license_dto) @staticmethod - def create_licence(license_dto: LicenseDTO) -> int: + async def create_license(license_dto: LicenseDTO, db: Database) -> int: """Create License in DB""" - new_licence_id = License.create_from_dto(license_dto) - return new_licence_id + new_license_id = await License.create_from_dto(license_dto, db) + return new_license_id @staticmethod - def update_licence(license_dto: LicenseDTO) -> LicenseDTO: + async def update_license( + license_dto: LicenseDTO, license_id: int, db: Database + ) -> LicenseDTO: """Create License in DB""" - map_license = LicenseService.get_license(license_dto.license_id) - map_license.update_license(license_dto) - return map_license.as_dto() + + query = """ + UPDATE licenses + SET name = :name, description = :description, plain_text = :plain_text + WHERE id = :license_id + """ + + values = { + "name": license_dto.name, + "description": license_dto.description, + "plain_text": license_dto.plain_text, + } + await db.execute(query, values={**values, "license_id": license_id}) @staticmethod - def delete_license(license_id: int): + async def delete_license(license_id: int, db: Database): """Delete specified license""" - map_license = LicenseService.get_license(license_id) - map_license.delete() + query = """ + DELETE FROM licenses + WHERE id = :license_id; + """ + try: + async with db.transaction(): + await db.execute(query, {"license_id": license_id}) + except Exception as e: + raise HTTPException(status_code=500, detail="Deletion failed") from e @staticmethod - def get_all_licenses() -> LicenseListDTO: - """Get all licenses in DB""" - return License.get_all() + async def get_all_licenses(db: Database) -> LicenseListDTO: + """Gets all licenses currently stored""" + query = """ + SELECT id AS "licenseId", name, description, plain_text AS "plainText" + FROM licenses + """ + results = await db.fetch_all(query) + + lic_dto = LicenseListDTO() + for record in results: + l_dto = LicenseDTO(**record) + lic_dto.licenses.append(l_dto) + return lic_dto diff --git a/backend/services/mapping_issues_service.py b/backend/services/mapping_issues_service.py index 1644976f93..a5f8bd23c2 100644 --- a/backend/services/mapping_issues_service.py +++ b/backend/services/mapping_issues_service.py @@ -1,3 +1,5 @@ +from databases import Database + from backend.exceptions import NotFound from backend.models.postgis.mapping_issues import MappingIssueCategory from backend.models.dtos.mapping_issues_dto import MappingIssueCategoryDTO @@ -5,12 +7,14 @@ class MappingIssueCategoryService: @staticmethod - def get_mapping_issue_category(category_id: int) -> MappingIssueCategory: + async def get_mapping_issue_category( + category_id: int, db: Database + ) -> MappingIssueCategory: """ Get MappingIssueCategory from DB :raises: NotFound """ - category = MappingIssueCategory.get_by_id(category_id) + category = await MappingIssueCategory.get_by_id(category_id, db) if category is None: raise NotFound(sub_code="ISSUE_CATEGORY_NOT_FOUND", category_id=category_id) @@ -18,37 +22,45 @@ def get_mapping_issue_category(category_id: int) -> MappingIssueCategory: return category @staticmethod - def get_mapping_issue_category_as_dto(category_id: int) -> MappingIssueCategoryDTO: + async def get_mapping_issue_category_as_dto( + category_id: int, db: Database + ) -> MappingIssueCategoryDTO: """Get MappingIssueCategory from DB""" - category = MappingIssueCategoryService.get_mapping_issue_category(category_id) - return category.as_dto() + category = await MappingIssueCategoryService.get_mapping_issue_category( + category_id, db + ) + return MappingIssueCategory.as_dto(category) @staticmethod - def create_mapping_issue_category(category_dto: MappingIssueCategoryDTO) -> int: + async def create_mapping_issue_category( + category_dto: MappingIssueCategoryDTO, db: Database + ) -> int: """Create MappingIssueCategory in DB""" - new_mapping_issue_category_id = MappingIssueCategory.create_from_dto( - category_dto + new_mapping_issue_category_id = await MappingIssueCategory.create_from_dto( + category_dto, db ) return new_mapping_issue_category_id @staticmethod - def update_mapping_issue_category( - category_dto: MappingIssueCategoryDTO, + async def update_mapping_issue_category( + category_dto: MappingIssueCategoryDTO, db: Database ) -> MappingIssueCategoryDTO: """Create MappingIssueCategory in DB""" - category = MappingIssueCategoryService.get_mapping_issue_category( - category_dto.category_id + category = await MappingIssueCategoryService.get_mapping_issue_category( + category_dto.category_id, db ) - category.update_category(category_dto) - return category.as_dto() + await MappingIssueCategory.update_category(category, category_dto, db) + return MappingIssueCategory.as_dto(category) @staticmethod - def delete_mapping_issue_category(category_id: int): + async def delete_mapping_issue_category(category_id: int, db: Database): """Delete specified license""" - category = MappingIssueCategoryService.get_mapping_issue_category(category_id) - category.delete() + category = await MappingIssueCategoryService.get_mapping_issue_category( + category_id, db + ) + await MappingIssueCategory.delete(category, db) @staticmethod - def get_all_mapping_issue_categories(include_archived): + async def get_all_mapping_issue_categories(include_archived, db): """Get all mapping issue categories""" - return MappingIssueCategory.get_all_categories(include_archived) + return await MappingIssueCategory.get_all_categories(include_archived, db) diff --git a/backend/services/mapping_service.py b/backend/services/mapping_service.py index ca8c1a47bd..66c58af6dd 100644 --- a/backend/services/mapping_service.py +++ b/backend/services/mapping_service.py @@ -1,20 +1,25 @@ import datetime import xml.etree.ElementTree as ET -from flask import current_app -from geoalchemy2 import shape +from databases import Database +from fastapi import BackgroundTasks + +# from flask import current_app +from geoalchemy2 import WKBElement +from geoalchemy2.shape import to_shape +from loguru import logger from backend.exceptions import NotFound from backend.models.dtos.mapping_dto import ( ExtendLockTimeDTO, - TaskDTO, - MappedTaskDTO, LockTaskDTO, + MappedTaskDTO, StopMappingTaskDTO, TaskCommentDTO, + TaskDTO, ) from backend.models.postgis.statuses import MappingNotAllowed -from backend.models.postgis.task import Task, TaskStatus, TaskHistory, TaskAction +from backend.models.postgis.task import Task, TaskAction, TaskHistory, TaskStatus from backend.models.postgis.utils import UserLicenseError from backend.services.messaging.message_service import MessageService from backend.services.project_service import ProjectService @@ -25,76 +30,85 @@ class MappingServiceError(Exception): """Custom Exception to notify callers an error occurred when handling mapping""" def __init__(self, message): - if current_app: - current_app.logger.debug(message) + logger.debug(message) class MappingService: @staticmethod - def get_task(task_id: int, project_id: int) -> Task: + async def get_task(task_id: int, project_id: int, db: Database) -> Task: """ Get task from DB :raises: NotFound """ - task = Task.get(task_id, project_id) - - if task is None: + task = await Task.get(task_id, project_id, db) + if not task: raise NotFound( sub_code="TASK_NOT_FOUND", project_id=project_id, task_id=task_id ) - return task @staticmethod - def get_task_as_dto( + async def get_task_as_dto( task_id: int, project_id: int, + db, preferred_local: str = "en", ) -> TaskDTO: """Get task as DTO for transmission over API""" - task = MappingService.get_task(task_id, project_id) - task_dto = task.as_dto_with_instructions(preferred_local) + task = await Task.exists(task_id, project_id, db) + if not task: + raise NotFound( + sub_code="TASK_NOT_FOUND", project_id=project_id, task_id=task_id + ) + task_dto = await Task.as_dto_with_instructions( + task_id, project_id, db, preferred_local + ) return task_dto @staticmethod - def _is_task_undoable(logged_in_user_id: int, task: Task) -> bool: + async def _is_task_undoable( + logged_in_user_id: int, task: dict, db: Database + ) -> bool: """Determines if the current task status can be undone by the logged in user""" - # Test to see if user can undo status on this task - if logged_in_user_id and TaskStatus(task.task_status) not in [ + if logged_in_user_id and TaskStatus(task["task_status"]) not in [ TaskStatus.LOCKED_FOR_MAPPING, TaskStatus.LOCKED_FOR_VALIDATION, TaskStatus.READY, ]: - last_action = TaskHistory.get_last_action(task.project_id, task.id) - + last_action = await TaskHistory.get_last_action( + task["project_id"], task["id"], db + ) # User requesting task made the last change, so they are allowed to undo it. - is_user_permitted, _ = ProjectService.is_user_permitted_to_validate( - task.project_id, logged_in_user_id + is_user_permitted, _ = await ProjectService.is_user_permitted_to_validate( + task["project_id"], logged_in_user_id, db ) - if last_action.user_id == int(logged_in_user_id) or is_user_permitted: + if last_action["user_id"] == logged_in_user_id or is_user_permitted: return True - return False @staticmethod - def lock_task_for_mapping(lock_task_dto: LockTaskDTO) -> TaskDTO: + async def lock_task_for_mapping( + lock_task_dto: LockTaskDTO, db: Database + ) -> TaskDTO: """ Sets the task_locked status to locked so no other user can work on it :param lock_task_dto: DTO with data needed to lock the task :raises TaskServiceError :return: Updated task, or None if not found """ - task = MappingService.get_task(lock_task_dto.task_id, lock_task_dto.project_id) - + task = await MappingService.get_task( + lock_task_dto.task_id, lock_task_dto.project_id, db + ) if task.locked_by != lock_task_dto.user_id: - if not task.is_mappable(): + if not Task.is_mappable(task): raise MappingServiceError( "InvalidTaskState- Task in invalid state for mapping" ) - user_can_map, error_reason = ProjectService.is_user_permitted_to_map( - lock_task_dto.project_id, lock_task_dto.user_id + user_can_map, error_reason = await ProjectService.is_user_permitted_to_map( + lock_task_dto.project_id, lock_task_dto.user_id, db ) + # TODO Handle error exceptions.. if not user_can_map: if error_reason == MappingNotAllowed.USER_NOT_ACCEPTED_LICENSE: raise UserLicenseError("User must accept license to map this task") @@ -115,93 +129,138 @@ def lock_task_for_mapping(lock_task_dto: LockTaskDTO) -> TaskDTO: f"{error_reason}- Mapping not allowed because: {error_reason}" ) - task.lock_task_for_mapping(lock_task_dto.user_id) - return task.as_dto_with_instructions(lock_task_dto.preferred_locale) + await Task.lock_task_for_mapping( + lock_task_dto.task_id, lock_task_dto.project_id, lock_task_dto.user_id, db + ) + return await Task.as_dto_with_instructions( + lock_task_dto.task_id, + lock_task_dto.project_id, + db, + lock_task_dto.preferred_locale, + ) @staticmethod - def unlock_task_after_mapping(mapped_task: MappedTaskDTO) -> TaskDTO: + async def unlock_task_after_mapping( + mapped_task: MappedTaskDTO, + db: Database, + background_tasks: BackgroundTasks, + ) -> TaskDTO: """Unlocks the task and sets the task history appropriately""" - task = MappingService.get_task_locked_by_user( - mapped_task.project_id, mapped_task.task_id, mapped_task.user_id + # Fetch the task locked by the user + task = await MappingService.get_task_locked_by_user( + mapped_task.project_id, mapped_task.task_id, mapped_task.user_id, db ) - + # Validate the new state new_state = TaskStatus[mapped_task.status.upper()] - if new_state not in [ TaskStatus.MAPPED, TaskStatus.BADIMAGERY, TaskStatus.READY, ]: raise MappingServiceError( - "InvalidUnlockState- Can only set status to MAPPED, BADIMAGERY, READY after mapping" + "InvalidUnlockState - Can only set status to MAPPED, BADIMAGERY, READY after mapping" ) - - # Update stats around the change of state - last_state = TaskHistory.get_last_status( - mapped_task.project_id, mapped_task.task_id + last_state = await TaskHistory.get_last_status( + mapped_task.project_id, mapped_task.task_id, db ) - StatsService.update_stats_after_task_state_change( - mapped_task.project_id, mapped_task.user_id, last_state, new_state + await StatsService.update_stats_after_task_state_change( + mapped_task.project_id, mapped_task.user_id, last_state, new_state, db ) if mapped_task.comment: - # Parses comment to see if any users have been @'d - MessageService.send_message_after_comment( + await MessageService.send_message_after_comment( mapped_task.user_id, mapped_task.comment, - task.id, + task["id"], mapped_task.project_id, + db, ) - task.unlock_task(mapped_task.user_id, new_state, mapped_task.comment) - ProjectService.send_email_on_project_progress(mapped_task.project_id) - return task.as_dto_with_instructions(mapped_task.preferred_locale) + # Unlock the task and change its state + await Task.unlock_task( + task_id=mapped_task.task_id, + project_id=mapped_task.project_id, + user_id=mapped_task.user_id, + new_state=new_state, + db=db, + comment=mapped_task.comment, + ) + # Send email on project progress + background_tasks.add_task( + ProjectService.send_email_on_project_progress, mapped_task.project_id + ) + + return await Task.as_dto_with_instructions( + task_id=mapped_task.task_id, + project_id=mapped_task.project_id, + db=db, + preferred_locale=mapped_task.preferred_locale, + ) @staticmethod - def stop_mapping_task(stop_task: StopMappingTaskDTO) -> TaskDTO: + async def stop_mapping_task(stop_task: StopMappingTaskDTO, db: Database) -> TaskDTO: """Unlocks the task and revert the task status to the last one""" - task = MappingService.get_task_locked_by_user( - stop_task.project_id, stop_task.task_id, stop_task.user_id + task = await MappingService.get_task_locked_by_user( + stop_task.project_id, stop_task.task_id, stop_task.user_id, db ) if stop_task.comment: # Parses comment to see if any users have been @'d - MessageService.send_message_after_comment( - stop_task.user_id, stop_task.comment, task.id, stop_task.project_id + await MessageService.send_message_after_comment( + stop_task.user_id, stop_task.comment, task.id, stop_task.project_id, db ) - - task.reset_lock(stop_task.user_id, stop_task.comment) - return task.as_dto_with_instructions(stop_task.preferred_locale) + await Task.reset_lock( + task.id, + stop_task.project_id, + task.task_status, + stop_task.user_id, + stop_task.comment, + db, + ) + return await Task.as_dto_with_instructions( + task.id, stop_task.project_id, db, stop_task.preferred_locale + ) @staticmethod - def get_task_locked_by_user(project_id: int, task_id: int, user_id: int) -> Task: + async def get_task_locked_by_user( + project_id: int, task_id: int, user_id: int, db: Database + ): + """Returns task specified by project id and task id if found and locked for mapping by user""" + query = """ + SELECT * FROM tasks + WHERE id = :task_id AND project_id = :project_id """ - Returns task specified by project id and task id if found and locked for mapping by user - :raises: MappingServiceError - """ - task = MappingService.get_task(task_id, project_id) + task = await db.fetch_one( + query, values={"task_id": task_id, "project_id": project_id} + ) + if task is None: raise NotFound( - sub_code="TASK_NOT_FOUND", project_id=project_id, task_id=task_id + status_code=404, + sub_code="TASK_NOT_FOUND", + project_id=project_id, + task_id=task_id, ) - current_state = TaskStatus(task.task_status) - if current_state != TaskStatus.LOCKED_FOR_MAPPING: + + if task["task_status"] != TaskStatus.LOCKED_FOR_MAPPING.value: raise MappingServiceError( "LockBeforeUnlocking- Status must be LOCKED_FOR_MAPPING to unlock" ) - if task.locked_by != user_id: + + if task["locked_by"] != user_id: raise MappingServiceError( "TaskNotOwned- Attempting to unlock a task owned by another user" ) + return task @staticmethod - def add_task_comment(task_comment: TaskCommentDTO) -> TaskDTO: + async def add_task_comment(task_comment: TaskCommentDTO, db: Database) -> TaskDTO: """Adds the comment to the task history""" # Check if project exists - ProjectService.exists(task_comment.project_id) + await ProjectService.exists(task_comment.project_id, db) - task = Task.get(task_comment.task_id, task_comment.project_id) + task = await Task.get(task_comment.task_id, task_comment.project_id, db) if task is None: raise NotFound( sub_code="TASK_NOT_FOUND", @@ -209,18 +268,33 @@ def add_task_comment(task_comment: TaskCommentDTO) -> TaskDTO: task_id=task_comment.task_id, ) - task.set_task_history( - TaskAction.COMMENT, task_comment.user_id, task_comment.comment + await Task.set_task_history( + task_id=task_comment.task_id, + project_id=task_comment.project_id, + user_id=task_comment.user_id, + action=TaskAction.COMMENT, + db=db, + comment=task_comment.comment, ) # Parse comment to see if any users have been @'d - MessageService.send_message_after_comment( - task_comment.user_id, task_comment.comment, task.id, task_comment.project_id + await MessageService.send_message_after_comment( + task_comment.user_id, + task_comment.comment, + task.id, + task_comment.project_id, + db, + ) + return await Task.as_dto_with_instructions( + task_comment.task_id, + task_comment.project_id, + db, + task_comment.preferred_locale, ) - task.update() - return task.as_dto_with_instructions(task_comment.preferred_locale) @staticmethod - def generate_gpx(project_id: int, task_ids_str: str, timestamp=None): + async def generate_gpx( + project_id: int, task_ids_str: str, db: Database, timestamp=None + ): """ Creates a GPX file for supplied tasks. Timestamp is for unit testing only. You can use the following URL to test locally: @@ -260,18 +334,22 @@ def generate_gpx(project_id: int, task_ids_str: str, timestamp=None): # Construct trkseg elements if task_ids_str is not None: task_ids = list(map(int, task_ids_str.split(","))) - tasks = Task.get_tasks(project_id, task_ids) + tasks = await Task.get_tasks(project_id, task_ids, db) if not tasks or len(tasks) == 0: raise NotFound( sub_code="TASKS_NOT_FOUND", project_id=project_id, task_ids=task_ids ) else: - tasks = Task.get_all_tasks(project_id) + tasks = await Task.get_all_tasks(project_id, db) if not tasks or len(tasks) == 0: raise NotFound(sub_code="TASKS_NOT_FOUND", project_id=project_id) for task in tasks: - task_geom = shape.to_shape(task.geometry) + # task_geom = shape.to_shape(task.geometry) + if isinstance(task["geometry"], (bytes, str)): + task_geom = to_shape(WKBElement(task["geometry"], srid=4326)) + else: + raise ValueError("Invalid geometry format") for poly in task_geom.geoms: trkseg = ET.SubElement(trk, "trkseg") for point in poly.exterior.coords: @@ -280,8 +358,6 @@ def generate_gpx(project_id: int, task_ids_str: str, timestamp=None): "trkpt", attrib=dict(lon=str(point[0]), lat=str(point[1])), ) - - # Append wpt elements to end of doc wpt = ET.Element( "wpt", attrib=dict(lon=str(point[0]), lat=str(point[1])) ) @@ -291,7 +367,7 @@ def generate_gpx(project_id: int, task_ids_str: str, timestamp=None): return xml_gpx @staticmethod - def generate_osm_xml(project_id: int, task_ids_str: str) -> str: + async def generate_osm_xml(project_id: int, task_ids_str: str, db: Database) -> str: """Generate xml response suitable for loading into JOSM. A sample output file is in /backend/helpers/testfiles/osm-sample.xml""" # Note XML created with upload No to ensure it will be rejected by OSM if uploaded by mistake @@ -299,22 +375,24 @@ def generate_osm_xml(project_id: int, task_ids_str: str) -> str: "osm", attrib=dict(version="0.6", upload="never", creator="HOT Tasking Manager"), ) - if task_ids_str: task_ids = list(map(int, task_ids_str.split(","))) - tasks = Task.get_tasks(project_id, task_ids) + tasks = await Task.get_tasks(project_id, task_ids, db) if not tasks or len(tasks) == 0: raise NotFound( sub_code="TASKS_NOT_FOUND", project_id=project_id, task_ids=task_ids ) else: - tasks = Task.get_all_tasks(project_id) + tasks = await Task.get_all_tasks(project_id, db) if not tasks or len(tasks) == 0: raise NotFound(sub_code="TASKS_NOT_FOUND", project_id=project_id) fake_id = -1 # We use fake-ids to ensure XML will not be validated by OSM for task in tasks: - task_geom = shape.to_shape(task.geometry) + if isinstance(task["geometry"], (bytes, str)): + task_geom = to_shape(WKBElement(task["geometry"], srid=4326)) + else: + raise ValueError("Invalid geometry format") way = ET.SubElement( root, "way", @@ -340,16 +418,19 @@ def generate_osm_xml(project_id: int, task_ids_str: str) -> str: return xml_gpx @staticmethod - def undo_mapping( - project_id: int, task_id: int, user_id: int, preferred_locale: str = "en" + async def undo_mapping( + project_id: int, + task_id: int, + user_id: int, + db: Database, + preferred_locale: str = "en", ) -> TaskDTO: """Allows a user to Undo the task state they updated""" - task = MappingService.get_task(task_id, project_id) - if not MappingService._is_task_undoable(user_id, task): + task = await MappingService.get_task(task_id, project_id, db) + if not await MappingService._is_task_undoable(user_id, task, db): raise MappingServiceError( "UndoPermissionError- Undo not allowed for this user" ) - current_state = TaskStatus(task.task_status) # Set the state to the previous state in the workflow if current_state == TaskStatus.VALIDATED: @@ -359,80 +440,110 @@ def undo_mapping( elif current_state == TaskStatus.MAPPED: undo_state = TaskStatus.READY else: - undo_state = TaskHistory.get_last_status(project_id, task_id, True) + undo_state = await TaskHistory.get_last_status( + project_id, task_id, db, True + ) # Refer to last action for user of it. - last_action = TaskHistory.get_last_action(project_id, task_id) + last_action = await TaskHistory.get_last_action(project_id, task_id, db) - StatsService.update_stats_after_task_state_change( - project_id, last_action.user_id, current_state, undo_state, "undo" + await StatsService.update_stats_after_task_state_change( + project_id, last_action.user_id, current_state, undo_state, db, "undo" ) - - task.unlock_task( - user_id, - undo_state, - f"Undo state from {current_state.name} to {undo_state.name}", - True, + await Task.unlock_task( + task_id=task_id, + project_id=project_id, + user_id=user_id, + new_state=undo_state, + db=db, + comment=f"Undo state from {current_state.name} to {undo_state.name}", + undo=True, + ) + return await Task.as_dto_with_instructions( + task_id, project_id, db, preferred_locale ) - # Reset the user who mapped/validated the task - if current_state.name == "MAPPED": - task.mapped_by = None - elif current_state.name == "VALIDATED": - task.validated_by = None - task.update() - return task.as_dto_with_instructions(preferred_locale) @staticmethod - def map_all_tasks(project_id: int, user_id: int): - """Marks all tasks on a project as mapped""" - tasks_to_map = Task.query.filter( - Task.project_id == project_id, - Task.task_status.notin_( - [ - TaskStatus.BADIMAGERY.value, - TaskStatus.MAPPED.value, - TaskStatus.VALIDATED.value, - ] - ), - ).all() + async def map_all_tasks(project_id: int, user_id: int, db: Database): + """Marks all tasks on a project as mapped using raw SQL queries""" + + query = """ + SELECT id, task_status + FROM tasks + WHERE project_id = :project_id + AND task_status NOT IN (:bad_imagery, :mapped, :validated) + """ + tasks_to_map = await db.fetch_all( + query=query, + values={ + "project_id": project_id, + "bad_imagery": TaskStatus.BADIMAGERY.value, + "mapped": TaskStatus.MAPPED.value, + "validated": TaskStatus.VALIDATED.value, + }, + ) for task in tasks_to_map: - if TaskStatus(task.task_status) not in [ + task_id = task["id"] + current_status = TaskStatus(task["task_status"]) + + # Lock the task for mapping if it's not already locked + if current_status not in [ TaskStatus.LOCKED_FOR_MAPPING, TaskStatus.LOCKED_FOR_VALIDATION, ]: - # Only lock tasks that are not already locked to avoid double lock issue - task.lock_task_for_mapping(user_id) - - task.unlock_task(user_id, new_state=TaskStatus.MAPPED) + await Task.lock_task_for_mapping(task_id, project_id, user_id, db) + + # Unlock the task and set its status to MAPPED + await Task.unlock_task( + task_id=task_id, + project_id=project_id, + user_id=user_id, + new_state=TaskStatus.MAPPED, + db=db, + ) - # Set counters to fully mapped - project = ProjectService.get_project_by_id(project_id) - project.tasks_mapped = ( - project.total_tasks - project.tasks_bad_imagery - project.tasks_validated - ) - project.save() + project_update_query = """ + UPDATE projects + SET tasks_mapped = (total_tasks - tasks_bad_imagery - tasks_validated) + WHERE id = :project_id + """ + await db.execute(query=project_update_query, values={"project_id": project_id}) @staticmethod - def reset_all_badimagery(project_id: int, user_id: int): - """Marks all bad imagery tasks ready for mapping""" - badimagery_tasks = Task.query.filter( - Task.task_status == TaskStatus.BADIMAGERY.value, - Task.project_id == project_id, - ).all() + async def reset_all_badimagery(project_id: int, user_id: int, db: Database): + """Marks all bad imagery tasks as ready for mapping and resets the bad imagery counter""" + # Fetch all tasks with status BADIMAGERY for the given project + badimagery_query = """ + SELECT id FROM tasks + WHERE task_status = :task_status AND project_id = :project_id + """ + badimagery_tasks = await db.fetch_all( + query=badimagery_query, + values={ + "task_status": TaskStatus.BADIMAGERY.value, + "project_id": project_id, + }, + ) for task in badimagery_tasks: - task.lock_task_for_mapping(user_id) - task.unlock_task(user_id, new_state=TaskStatus.READY) - - # Reset bad imagery counter - project = ProjectService.get_project_by_id(project_id) - project.tasks_bad_imagery = 0 - project.save() + task_id = task["id"] + await Task.lock_task_for_mapping(task_id, project_id, user_id, db) + await Task.unlock_task(task_id, project_id, user_id, TaskStatus.READY, db) + + # Reset bad imagery counter in the project + reset_query = """ + UPDATE projects + SET tasks_bad_imagery = 0 + WHERE id = :project_id + """ + await db.execute(query=reset_query, values={"project_id": project_id}) @staticmethod - def lock_time_can_be_extended(project_id, task_id, user_id): - task = Task.get(task_id, project_id) + async def lock_time_can_be_extended( + project_id: int, task_id: int, user_id: int, db: Database + ): + task = await Task.get(task_id, project_id, db) if task is None: raise NotFound( sub_code="TASK_NOT_FOUND", project_id=project_id, task_id=task_id @@ -450,32 +561,62 @@ def lock_time_can_be_extended(project_id, task_id, user_id): "LockedByAnotherUser- Task is locked by another user." ) + # @staticmethod + # async def extend_task_lock_time(extend_dto: ExtendLockTimeDTO, db: Database): + # """ + # Extends expiry time of locked tasks + # :raises ValidatorServiceError + # """ + # # Loop supplied tasks to check they can all be locked for validation + # tasks_to_extend = [] + # for task_id in extend_dto.task_ids: + # await MappingService.lock_time_can_be_extended( + # extend_dto.project_id, task_id, extend_dto.user_id, db + # ) + # tasks_to_extend.append(task_id) + + # for task_id in tasks_to_extend: + # task = await Task.get(task_id, extend_dto.project_id, db) + # action = TaskAction.EXTENDED_FOR_MAPPING + # if task.task_status == TaskStatus.LOCKED_FOR_VALIDATION: + # action = TaskAction.EXTENDED_FOR_VALIDATION + + # await TaskHistory.update_task_locked_with_duration( + # task_id, + # extend_dto.project_id, + # TaskStatus(task.task_status), + # extend_dto.user_id, + # ) + # await Task.set_task_history(action, extend_dto.user_id) + @staticmethod - def extend_task_lock_time(extend_dto: ExtendLockTimeDTO): + async def extend_task_lock_time(extend_dto: ExtendLockTimeDTO, db: Database): """ - Extends expiry time of locked tasks + Extends expiry time of locked tasks. :raises ValidatorServiceError """ - # Loop supplied tasks to check they can all be locked for validation - tasks_to_extend = [] + # Validate each task before extending lock time for task_id in extend_dto.task_ids: - MappingService.lock_time_can_be_extended( - extend_dto.project_id, task_id, extend_dto.user_id + await MappingService.lock_time_can_be_extended( + extend_dto.project_id, task_id, extend_dto.user_id, db ) - tasks_to_extend.append(task_id) - # # Lock all tasks for validation - for task_id in tasks_to_extend: - task = Task.get(task_id, extend_dto.project_id) - action = TaskAction.EXTENDED_FOR_MAPPING - if task.task_status == TaskStatus.LOCKED_FOR_VALIDATION: - action = TaskAction.EXTENDED_FOR_VALIDATION + # Extend lock time for validated tasks + for task_id in extend_dto.task_ids: + task = await Task.get(task_id, extend_dto.project_id, db) + action = ( + TaskAction.EXTENDED_FOR_MAPPING + if task["task_status"] == TaskStatus.LOCKED_FOR_MAPPING + else TaskAction.EXTENDED_FOR_VALIDATION + ) - TaskHistory.update_task_locked_with_duration( + await TaskHistory.update_task_locked_with_duration( task_id, extend_dto.project_id, - TaskStatus(task.task_status), + TaskStatus(task["task_status"]), extend_dto.user_id, + db, + ) + await Task.set_task_history( + task_id, extend_dto.project_id, extend_dto.user_id, action, db ) - task.set_task_history(action, extend_dto.user_id) - task.update() diff --git a/backend/services/mapswipe_service.py b/backend/services/mapswipe_service.py index b41552c3df..00ea242434 100644 --- a/backend/services/mapswipe_service.py +++ b/backend/services/mapswipe_service.py @@ -1,20 +1,22 @@ import json + +import requests +from cachetools import TTLCache, cached + from backend.exceptions import Conflict from backend.models.dtos.partner_stats_dto import ( - GroupedPartnerStatsDTO, - FilteredPartnerStatsDTO, - UserGroupMemberDTO, - UserContributionsDTO, - GeojsonDTO, - GeoContributionsDTO, AreaSwipedByProjectTypeDTO, ContributionsByDateDTO, - ContributionTimeByDateDTO, ContributionsByProjectTypeDTO, + ContributionTimeByDateDTO, + FilteredPartnerStatsDTO, + GeoContributionsDTO, + GeojsonDTO, + GroupedPartnerStatsDTO, OrganizationContributionsDTO, + UserContributionsDTO, + UserGroupMemberDTO, ) -from cachetools import TTLCache, cached -import requests grouped_partner_stats_cache = TTLCache(maxsize=128, ttl=60 * 60 * 24) filtered_partner_stats_cache = TTLCache(maxsize=128, ttl=60 * 60 * 24) @@ -138,9 +140,8 @@ def setup_group_dto( self, partner_id: str, group_id: str, resp_body: str ) -> GroupedPartnerStatsDTO: group_stats = json.loads(resp_body)["data"] - group_dto = GroupedPartnerStatsDTO() + group_dto = GroupedPartnerStatsDTO(provider="mapswipe") group_dto.id = partner_id - group_dto.provider = "mapswipe" group_dto.id_inside_provider = group_id if group_stats["userGroup"] is None: @@ -194,9 +195,8 @@ def setup_filtered_dto( to_date: str, resp_body: str, ): - filtered_stats_dto = FilteredPartnerStatsDTO() + filtered_stats_dto = FilteredPartnerStatsDTO(provider="mapswipe") filtered_stats_dto.id = partner_id - filtered_stats_dto.provider = "mapswipe" filtered_stats_dto.id_inside_provider = group_id filtered_stats_dto.from_date = from_date filtered_stats_dto.to_date = to_date diff --git a/backend/services/messaging/chat_service.py b/backend/services/messaging/chat_service.py index 825be2123f..6ec0d3e27a 100644 --- a/backend/services/messaging/chat_service.py +++ b/backend/services/messaging/chat_service.py @@ -1,34 +1,37 @@ -import threading -from flask import current_app +from databases import Database +from fastapi import BackgroundTasks -from backend import db from backend.exceptions import NotFound from backend.models.dtos.message_dto import ChatMessageDTO, ProjectChatDTO +from backend.models.postgis.project import ProjectStatus from backend.models.postgis.project_chat import ProjectChat from backend.models.postgis.project_info import ProjectInfo +from backend.models.postgis.statuses import TeamRoles from backend.services.messaging.message_service import MessageService -from backend.services.project_service import ProjectService from backend.services.project_admin_service import ProjectAdminService +from backend.services.project_service import ProjectService from backend.services.team_service import TeamService -from backend.models.postgis.statuses import TeamRoles -from backend.models.postgis.project import ProjectStatus class ChatService: @staticmethod - def post_message( - chat_dto: ChatMessageDTO, project_id: int, authenticated_user_id: int + async def post_message( + chat_dto: ChatMessageDTO, + project_id: int, + authenticated_user_id: int, + db: Database, + background_tasks: BackgroundTasks, ) -> ProjectChatDTO: - """Save message to DB and return latest chat""" - current_app.logger.debug("Posting Chat Message") - - project = ProjectService.get_project_by_id(project_id) - project_name = ProjectInfo.get_dto_for_locale( - project_id, project.default_locale - ).name + project = await ProjectService.get_project_by_id(project_id, db) + project_info_dto = await ProjectInfo.get_dto_for_locale( + db, project_id, project.default_locale + ) + project_name = project_info_dto.name is_allowed_user = True - is_manager_permission = ProjectAdminService.is_user_action_permitted_on_project( - authenticated_user_id, project_id + is_manager_permission = ( + await ProjectAdminService.is_user_action_permitted_on_project( + authenticated_user_id, project_id, db + ) ) is_team_member = False @@ -47,8 +50,8 @@ def post_message( TeamRoles.VALIDATOR.value, TeamRoles.MAPPER.value, ] - is_team_member = TeamService.check_team_membership( - project_id, allowed_roles, authenticated_user_id + is_team_member = await TeamService.check_team_membership( + project_id, allowed_roles, authenticated_user_id, db ) if not is_team_member: is_allowed_user = ( @@ -61,28 +64,25 @@ def post_message( ) > 0 ) - if is_manager_permission or is_team_member or is_allowed_user: - chat_message = ProjectChat.create_from_dto(chat_dto) - db.session.commit() - threading.Thread( - target=MessageService.send_message_after_chat, - args=( - chat_dto.user_id, - chat_message.message, - chat_dto.project_id, - project_name, - ), - ).start() - # Ensure we return latest messages after post - return ProjectChat.get_messages(chat_dto.project_id, 1, 5) + chat_message = await ProjectChat.create_from_dto(chat_dto, db) + background_tasks.add_task( + MessageService.send_message_after_chat, + chat_dto.user_id, + chat_message.message, + chat_dto.project_id, + project_name, + ) + return await ProjectChat.get_messages(chat_dto.project_id, db, 1, 5) else: raise ValueError("UserNotPermitted- User not permitted to post Comment") @staticmethod - def get_messages(project_id: int, page: int, per_page: int) -> ProjectChatDTO: + async def get_messages( + project_id: int, db: Database, page: int, per_page: int + ) -> ProjectChatDTO: """Get all messages attached to a project""" - return ProjectChat.get_messages(project_id, page, per_page) + return await ProjectChat.get_messages(project_id, db, page, per_page) @staticmethod def get_project_chat_by_id(project_id: int, comment_id: int) -> ProjectChat: @@ -109,8 +109,11 @@ def get_project_chat_by_id(project_id: int, comment_id: int) -> ProjectChat: return chat_message @staticmethod - def delete_project_chat_by_id(project_id: int, comment_id: int, user_id: int): - """Deletes a message from a project chat + async def delete_project_chat_by_id( + project_id: int, comment_id: int, user_id: int, db: Database + ): + """ + Deletes a message from a project chat ---------------------------------------- :param project_id: The id of the project the message belongs to :param message_id: The message id to delete @@ -122,12 +125,18 @@ def delete_project_chat_by_id(project_id: int, comment_id: int, user_id: int): returns: None """ # Check if project exists - ProjectService.exists(project_id) + await ProjectService.exists(project_id, db) + + # Fetch the chat message + query = """ + SELECT user_id + FROM project_chat + WHERE project_id = :project_id AND id = :comment_id + """ + chat_message = await db.fetch_one( + query, values={"project_id": project_id, "comment_id": comment_id} + ) - chat_message = ProjectChat.query.filter( - ProjectChat.project_id == project_id, - ProjectChat.id == comment_id, - ).one_or_none() if chat_message is None: raise NotFound( sub_code="MESSAGE_NOT_FOUND", @@ -135,15 +144,22 @@ def delete_project_chat_by_id(project_id: int, comment_id: int, user_id: int): project_id=project_id, ) - is_user_allowed = ( - chat_message.user_id == user_id - or ProjectAdminService.is_user_action_permitted_on_project( - user_id, project_id - ) + is_user_allowed = chat_message[ + "user_id" + ] == user_id or await ProjectAdminService.is_user_action_permitted_on_project( + user_id, project_id, db ) + if is_user_allowed: - db.session.delete(chat_message) - db.session.commit() + # Delete the chat message + delete_query = """ + DELETE FROM project_chat + WHERE project_id = :project_id AND id = :comment_id + """ + await db.execute( + delete_query, + values={"project_id": project_id, "comment_id": comment_id}, + ) else: raise ValueError( "DeletePermissionError- User not allowed to delete message" diff --git a/backend/services/messaging/message_service.py b/backend/services/messaging/message_service.py index ff002aa3e9..9a513430dc 100644 --- a/backend/services/messaging/message_service.py +++ b/backend/services/messaging/message_service.py @@ -1,33 +1,34 @@ +import datetime import re import time -import datetime -import bleach +from typing import List +import bleach from cachetools import TTLCache, cached -from typing import List -from flask import current_app -from sqlalchemy import text, func +from databases import Database +from loguru import logger from markdown import markdown +from sqlalchemy import func, insert, text -from backend import db, create_app +from backend.config import settings +from backend.db import db_connection from backend.exceptions import NotFound from backend.models.dtos.message_dto import MessageDTO, MessagesDTO from backend.models.dtos.stats_dto import Pagination from backend.models.postgis.message import Message, MessageType from backend.models.postgis.notification import Notification from backend.models.postgis.project import Project, ProjectInfo -from backend.models.postgis.task import TaskStatus, TaskAction, TaskHistory -from backend.models.postgis.statuses import TeamRoles +from backend.models.postgis.task import TaskAction, TaskStatus +from backend.models.postgis.utils import timestamp from backend.services.messaging.smtp_service import SMTPService from backend.services.messaging.template_service import ( + clean_html, get_template, get_txt_template, template_var_replacing, - clean_html, ) from backend.services.organisation_service import OrganisationService -from backend.services.users.user_service import UserService, User - +from backend.services.users.user_service import User, UserService message_cache = TTLCache(maxsize=512, ttl=30) @@ -36,21 +37,20 @@ class MessageServiceError(Exception): """Custom Exception to notify callers an error occurred when handling mapping""" def __init__(self, message): - if current_app: - current_app.logger.debug(message) + logger.debug(message) class MessageService: @staticmethod - def send_welcome_message(user: User): + async def send_welcome_message(user: User, db: Database): """Sends welcome message to new user at Sign up""" - org_code = current_app.config["ORG_CODE"] + org_code = settings.ORG_CODE text_template = get_txt_template("welcome_message_en.txt") hot_welcome_section = get_txt_template("hot_welcome_section_en.txt") replace_list = [ ["[USERNAME]", user.username], ["[ORG_CODE]", org_code], - ["[ORG_NAME]", current_app.config["ORG_NAME"]], + ["[ORG_NAME]", settings.ORG_NAME], ["[SETTINGS_LINK]", MessageService.get_user_settings_link()], ["[HOT_WELCOME]", hot_welcome_section if org_code == "HOT" else ""], ] @@ -61,22 +61,33 @@ def send_welcome_message(user: User): welcome_message.to_user_id = user.id welcome_message.subject = "Welcome to the {} Tasking Manager".format(org_code) welcome_message.message = text_template - welcome_message.save() - - return welcome_message.id + welcome_message.date = timestamp() + welcome_message.read = False + await Message.save(welcome_message, db) @staticmethod - def send_message_after_validation( - status: int, validated_by: int, mapped_by: int, task_id: int, project_id: int + async def send_message_after_validation( + status: int, + validated_by: int, + mapped_by: int, + task_id: int, + project_id: int, + db: Database, ): """Sends mapper a notification after their task has been marked valid or invalid""" if validated_by == mapped_by: return # No need to send a notification if you've verified your own task - project = Project.get(project_id) - project_name = ProjectInfo.get_dto_for_locale( - project_id, project.default_locale - ).name - user = UserService.get_user_by_id(mapped_by) + project = await Project.get(project_id, db) + project_name_query = """ + SELECT name + FROM project_info + WHERE project_id = :project_id AND locale = :locale + """ + project_name = await db.fetch_val( + project_name_query, + values={"project_id": project_id, "locale": project.default_locale}, + ) + user = await UserService.get_user_by_id(mapped_by, db) text_template = get_txt_template( "invalidation_message_en.txt" if status == TaskStatus.INVALIDATED @@ -91,7 +102,7 @@ def send_message_after_validation( replace_list = [ ["[USERNAME]", user.username], ["[TASK_LINK]", task_link], - ["[ORG_NAME]", current_app.config["ORG_NAME"]], + ["[ORG_NAME]", settings.ORG_NAME], ] text_template = template_var_replacing(text_template, replace_list) @@ -114,101 +125,105 @@ def send_message_after_validation( messages.append( dict(message=validation_message, user=user, project_name=project_name) ) - - # For email alerts - MessageService._push_messages(messages) + await MessageService._push_messages(messages, db) @staticmethod - def send_message_to_all_contributors(project_id: int, message_dto: MessageDTO): + async def send_message_to_all_contributors( + project_id: int, message_dto: MessageDTO + ): """Sends supplied message to all contributors on specified project. Message all contributors can take over a minute to run, so this method is expected to be called on its own thread """ - - app = ( - create_app() - ) # Because message-all run on background thread it needs it's own app context - - with app.app_context(): - contributors = Message.get_all_contributors(project_id) - project = Project.get(project_id) - project_name = ProjectInfo.get_dto_for_locale( - project_id, project.default_locale - ).name + async with db_connection.database.connection() as conn: + contributors = await Message.get_all_contributors(project_id, conn) + project = await Project.get(project_id, conn) + project_info = await ProjectInfo.get_dto_for_locale( + conn, project_id, project.default_locale + ) message_dto.message = "A message from {} managers:

{}".format( MessageService.get_project_link( - project_id, project_name, highlight=True + project_id, project_info.name, highlight=True ), markdown(message_dto.message, output_format="html"), ) - messages = [] for contributor in contributors: - message = Message.from_dto(contributor[0], message_dto) + message = Message.from_dto(contributor, message_dto) message.message_type = MessageType.BROADCAST.value message.project_id = project_id - user = UserService.get_user_by_id(contributor[0]) + user = await UserService.get_user_by_id(contributor, conn) messages.append( - dict(message=message, user=user, project_name=project_name) + dict(message=message, user=user, project_name=project_info.name) ) - - MessageService._push_messages(messages) + await MessageService._push_messages(messages, conn) @staticmethod - def _push_messages(messages): + async def _push_messages(messages: list, db: Database): if len(messages) == 0: return - messages_objs = [] for i, message in enumerate(messages): user = message.get("user") obj = message.get("message") project_name = message.get("project_name") - # Store message in the database only if mentions option are disabled. + + # Skipping message if certain notifications are disabled if ( user.mentions_notifications is False and obj.message_type == MessageType.MENTION_NOTIFICATION.value ): messages_objs.append(obj) continue + if ( user.projects_notifications is False and obj.message_type == MessageType.PROJECT_ACTIVITY_NOTIFICATION.value ): continue + if ( user.projects_notifications is False and obj.message_type == MessageType.BROADCAST.value ): continue + if ( user.teams_announcement_notifications is False and obj.message_type == MessageType.TEAM_BROADCAST.value ): messages_objs.append(obj) continue + if ( user.projects_comments_notifications is False and obj.message_type == MessageType.PROJECT_CHAT_NOTIFICATION.value ): continue + if ( user.tasks_comments_notifications is False and obj.message_type == MessageType.TASK_COMMENT_NOTIFICATION.value ): continue + if user.tasks_notifications is False and obj.message_type in ( MessageType.VALIDATION_NOTIFICATION.value, MessageType.INVALIDATION_NOTIFICATION.value, ): messages_objs.append(obj) continue + # If the notification is enabled, send an email messages_objs.append(obj) - SMTPService.send_email_alert( + await SMTPService.send_email_alert( user.email_address, user.username, user.is_email_verified, message["message"].id, - UserService.get_user_by_id(message["message"].from_user_id).username, + ( + await UserService.get_user_by_id( + message["message"].from_user_id, db + ) + ).username, message["message"].project_id, message["message"].task_id, clean_html(message["message"].subject), @@ -217,30 +232,56 @@ def _push_messages(messages): project_name, ) - if i + 1 % 10 == 0: + if (i + 1) % 10 == 0: time.sleep(0.5) - # Flush messages to the database. - if len(messages_objs) > 0: - db.session.add_all(messages_objs) - db.session.flush() - db.session.commit() + if messages_objs: + insert_values = [ + { + "message": msg.message, + "subject": msg.subject, + "from_user_id": msg.from_user_id, + "to_user_id": msg.to_user_id, + "project_id": msg.project_id, + "task_id": msg.task_id, + "message_type": msg.message_type, + "date": timestamp(), + "read": False, + } + for msg in messages_objs + ] + + # Insert the messages into the database + query = insert(Message).values(insert_values) + await db.execute(query) @staticmethod - def send_message_after_comment( - comment_from: int, comment: str, task_id: int, project_id: int + async def send_message_after_comment( + comment_from: int, comment: str, task_id: int, project_id: int, db: Database ): """Will send a canned message to anyone @'d in a comment""" - comment_from_user = UserService.get_user_by_id(comment_from) - usernames = MessageService._parse_message_for_username( - comment, project_id, task_id + # Fetch the user who made the comment + comment_from_user = await UserService.get_user_by_id(comment_from, db) + # Parse the comment for mentions + usernames = await MessageService._parse_message_for_username( + comment, project_id, task_id, db ) if comment_from_user.username in usernames: usernames.remove(comment_from_user.username) - project = Project.get(project_id) - default_locale = project.default_locale if project else "en" - project_name = ProjectInfo.get_dto_for_locale(project_id, default_locale).name - if len(usernames) != 0: + + # Fetch project details + project = await db.fetch_one( + "SELECT * FROM projects WHERE id = :project_id", {"project_id": project_id} + ) + default_locale = project["default_locale"] if project else "en" + + # Get the project info DTO using the get_dto_for_locale function + project_info_dto = await ProjectInfo.get_dto_for_locale( + db, project_id, default_locale + ) + project_name = project_info_dto.name # Use the `name` field from the DTO + + if usernames: task_link = MessageService.get_task_link(project_id, task_id) project_link = MessageService.get_project_link(project_id, project_name) @@ -264,107 +305,115 @@ def send_message_after_comment( "strong", "ul", ] - allowed_atrributes = {"a": ["href", "rel"], "img": ["src", "alt"]} + allowed_attributes = {"a": ["href", "rel"], "img": ["src", "alt"]} + + # Convert comment to HTML using markdown and sanitize it with bleach clean_comment = bleach.clean( markdown(comment, output_format="html"), tags=allowed_tags, - attributes=allowed_atrributes, - ) # Bleach input to ensure no nefarious script tags etc - clean_comment = bleach.linkify(clean_comment) + attributes=allowed_attributes, + ) + clean_comment = bleach.linkify(clean_comment) # Linkify URLs in the comment messages = [] for username in usernames: try: - user = UserService.get_user_by_username(username) + user = await UserService.get_user_by_username(username, db) except NotFound: - continue # If we can't find the user, keep going no need to fail + continue message = Message() message.message_type = MessageType.MENTION_NOTIFICATION.value message.project_id = project_id message.task_id = task_id message.from_user_id = comment_from - message.to_user_id = user.id - message.subject = ( - f"You were mentioned in a comment in {task_link} " - + f"of Project {project_link}" - ) + message.to_user_id = user["id"] + message.subject = f"You were mentioned in a comment in {task_link} of Project {project_link}" message.message = clean_comment + message.date = timestamp() + message.read = False messages.append( dict(message=message, user=user, project_name=project_name) ) - MessageService._push_messages(messages) - - # Notify all contributors except the user that created the comment. - results = ( - TaskHistory.query.with_entities(TaskHistory.user_id.distinct()) - .filter(TaskHistory.project_id == project_id) - .filter(TaskHistory.task_id == task_id) - .filter(TaskHistory.user_id != comment_from) - .filter(TaskHistory.action == TaskAction.STATE_CHANGE.name) - .all() + await MessageService._push_messages(messages, db) + + # Notify all contributors except the comment author + results = await db.fetch_all( + """ + SELECT DISTINCT user_id + FROM task_history + WHERE project_id = :project_id + AND task_id = :task_id + AND user_id != :comment_from + AND action = 'STATE_CHANGE' + """, + { + "project_id": project_id, + "task_id": task_id, + "comment_from": comment_from, + }, ) - contributed_users = [r[0] for r in results] - if len(contributed_users) != 0: - user_from = User.query.get(comment_from) - if user_from is None: - raise ValueError("Username not found") - user_link = MessageService.get_user_link(user_from.username) + contributed_users = [r["user_id"] for r in results] + if contributed_users: + user_from = await UserService.get_user_by_id(comment_from, db) + user_link = MessageService.get_user_link(user_from.username) task_link = MessageService.get_task_link(project_id, task_id) project_link = MessageService.get_project_link(project_id, project_name) messages = [] for user_id in contributed_users: try: - user = UserService.get_user_by_id(user_id) - # if user was mentioned, a message has already been sent to them, - # so we can skip + user = await UserService.get_user_by_id(user_id, db) if user.username in usernames: break except NotFound: - continue # If we can't find the user, keep going no need to fail + continue message = Message() message.message_type = MessageType.TASK_COMMENT_NOTIFICATION.value message.project_id = project_id - message.from_user_id = comment_from message.task_id = task_id + message.from_user_id = comment_from message.to_user_id = user.id message.subject = f"{user_link} left a comment in {task_link} of Project {project_link}" message.message = comment + message.date = timestamp() + message.read = False messages.append( dict(message=message, user=user, project_name=project_name) ) - - MessageService._push_messages(messages) + await MessageService._push_messages(messages, db) @staticmethod - def send_project_transfer_message( + async def send_project_transfer_message( project_id: int, transferred_to: str, transferred_by: str, ): """Will send a message to the manager of the organization after a project is transferred""" - app = ( - create_app() - ) # Because message-all run on background thread it needs it's own app context - - with app.app_context(): - project = Project.get(project_id) - project_name = project.get_project_title(project.default_locale) - + async with db_connection.database.connection() as db: + project = await Project.get(project_id, db) + project_name = await project.get_project_title( + db, project.id, project.default_locale + ) + from_user = await User.get_by_username(transferred_by, db) + organisation = await OrganisationService.get_organisation_by_id_as_dto( + project.organisation_id, from_user.id, False, db + ) message = Message() message.message_type = MessageType.SYSTEM.value + message.date = timestamp() + message.read = False message.subject = f"Project {project_name} #{project_id} was transferred to {transferred_to}" message.message = ( f"Project {project_name} #{project_id} associated with your" - + f"organisation {project.organisation.name} was transferred to {transferred_to} by {transferred_by}." + + f"organisation {organisation.name} was transferred to {transferred_to} by {transferred_by}." ) values = { - "PROJECT_ORG_NAME": project.organisation.name, + "PROJECT_ORG_NAME": organisation.name, "PROJECT_ORG_ID": project.organisation_id, "PROJECT_NAME": project_name, "PROJECT_ID": project_id, @@ -372,16 +421,13 @@ def send_project_transfer_message( "TRANSFERRED_BY": transferred_by, } html_template = get_template("project_transfer_alert_en.html", values) - - managers = OrganisationService.get_organisation_by_id_as_dto( - project.organisation_id, User.get_by_username(transferred_by).id, False - ).managers + managers = organisation.managers for manager in managers: - manager = UserService.get_user_by_username(manager.username) + manager = await UserService.get_user_by_username(manager.username, db) message.to_user_id = manager.id - message.save() + await message.save(db) if manager.email_address and manager.is_email_verified: - SMTPService._send_message( + await SMTPService._send_message( manager.email_address, message.subject, html_template, @@ -390,20 +436,25 @@ def send_project_transfer_message( @staticmethod def get_user_link(username: str): - base_url = current_app.config["APP_BASE_URL"] + base_url = settings.APP_BASE_URL return f'{username}' @staticmethod def get_team_link(team_name: str, team_id: int, management: bool): - base_url = current_app.config["APP_BASE_URL"] + base_url = settings.APP_BASE_URL if management is True: return f'{team_name}' else: return f'{team_name}' @staticmethod - def send_request_to_join_team( - from_user: int, from_username: str, to_user: int, team_name: str, team_id: int + async def send_request_to_join_team( + from_user: int, + from_username: str, + to_user: int, + team_name: str, + team_id: int, + db: Database, ): message = Message() message.message_type = MessageType.REQUEST_TEAM_NOTIFICATION.value @@ -414,34 +465,36 @@ def send_request_to_join_team( message.subject = f"{user_link} requested to join {team_link}" message.message = f"{user_link} has requested to join the {team_link} team.\ Access the team management page to accept or reject that request." - MessageService._push_messages( - [dict(message=message, user=db.session.get(User, to_user))] - ) + user = await UserService.get_user_by_id(to_user, db) + await MessageService._push_messages([dict(message=message, user=user)], db) @staticmethod - def accept_reject_request_to_join_team( + async def accept_reject_request_to_join_team( from_user: int, from_username: str, to_user: int, team_name: str, team_id: int, response: str, + db: Database, ): message = Message() message.message_type = MessageType.REQUEST_TEAM_NOTIFICATION.value message.from_user_id = from_user message.to_user_id = to_user + message.date = timestamp() + message.read = False team_link = MessageService.get_team_link(team_name, team_id, False) user_link = MessageService.get_user_link(from_username) message.subject = f"Your request to join team {team_link} has been {response}ed" message.message = ( f"{user_link} has {response}ed your request to join the {team_link} team." ) - message.add_message() - message.save() + user = await UserService.get_user_by_id(to_user, db) + await MessageService._push_messages([dict(message=message, user=user)], db) @staticmethod - def accept_reject_invitation_request_for_team( + async def accept_reject_invitation_request_for_team( from_user: int, from_username: str, to_user: int, @@ -449,11 +502,14 @@ def accept_reject_invitation_request_for_team( team_name: str, team_id: int, response: str, + db: Database, ): message = Message() message.message_type = MessageType.INVITATION_NOTIFICATION.value message.from_user_id = from_user message.to_user_id = to_user + message.date = timestamp() + message.read = False message.subject = "{} {}ed to join {}".format( MessageService.get_user_link(from_username), response, @@ -465,17 +521,18 @@ def accept_reject_invitation_request_for_team( sending_member, MessageService.get_team_link(team_name, team_id, True), ) - message.add_message() - message.save() + user = await UserService.get_user_by_id(to_user, db) + await MessageService._push_messages([dict(message=message, user=user)], db) @staticmethod - def send_team_join_notification( + async def send_team_join_notification( from_user: int, from_username: str, to_user: int, team_name: str, team_id: int, role: str, + db: Database, ): message = Message() message.message_type = MessageType.INVITATION_NOTIFICATION.value @@ -486,35 +543,33 @@ def send_team_join_notification( message.subject = f"You have been added to team {team_link}" message.message = f"You have been added to the team {team_link} as {role} by {user_link}.\ Access the {team_link}'s page to view more info about this team." - - message.add_message() - message.save() + message.date = timestamp() + message.read = False + user = await UserService.get_user_by_id(to_user, db) + await MessageService._push_messages([dict(message=message, user=user)], db) @staticmethod - def send_message_after_chat( - chat_from: int, chat: str, project_id: int, project_name: str + async def send_message_after_chat( + chat_from: int, + chat: str, + project_id: int, + project_name: str, ): - """Send alert to user if they were @'d in a chat message""" - app = ( - create_app() - ) # Because message-all run on background thread it needs it's own app context - if ( - app.config["ENVIRONMENT"] == "test" - ): # Don't send in test mode as this will cause tests to fail. - return - with app.app_context(): - usernames = MessageService._parse_message_for_username(chat, project_id) + async with db_connection.database.connection() as db: + usernames = await MessageService._parse_message_for_username( + message=chat, project_id=project_id, db=db + ) if len(usernames) != 0: link = MessageService.get_project_link( project_id, project_name, include_chat_section=True ) messages = [] for username in usernames: - current_app.logger.debug(f"Searching for {username}") + logger.debug(f"Searching for {username}") try: - user = UserService.get_user_by_username(username) + user = await UserService.get_user_by_username(username, db) except NotFound: - current_app.logger.error(f"Username {username} not found") + logger.error(f"Username {username} not found") continue # If we can't find the user, keep going no need to fail message = Message() @@ -522,33 +577,45 @@ def send_message_after_chat( message.project_id = project_id message.from_user_id = chat_from message.to_user_id = user.id + message.date = timestamp() + message.read = False message.subject = f"You were mentioned in Project {link} chat" message.message = chat messages.append( dict(message=message, user=user, project_name=project_name) ) - MessageService._push_messages(messages) - - query = f""" select user_id from project_favorites where project_id ={project_id}""" - with db.engine.connect() as conn: - favorited_users_results = conn.execute(text(query)) - favorited_users = [r[0] for r in favorited_users_results] - - # Notify all contributors except the user that created the comment. - contributed_users_results = ( - TaskHistory.query.with_entities(TaskHistory.user_id.distinct()) - .filter(TaskHistory.project_id == project_id) - .filter(TaskHistory.user_id != chat_from) - .filter(TaskHistory.action == TaskAction.STATE_CHANGE.name) - .all() + await MessageService._push_messages(messages, db) + favorited_users_query = """ select user_id from project_favorites where project_id = :project_id""" + favorited_users_values = { + "project_id": project_id, + } + favorited_users_results = await db.fetch_all( + query=favorited_users_query, values=favorited_users_values ) - contributed_users = [r[0] for r in contributed_users_results] + favorited_users = [r.user_id for r in favorited_users_results] + # Notify all contributors except the user that created the comment. + contributed_users_query = """ + SELECT DISTINCT user_id + FROM task_history + WHERE project_id = :project_id + AND user_id != :chat_from + AND action = :state_change_action + """ + values = { + "project_id": project_id, + "chat_from": chat_from, + "state_change_action": TaskAction.STATE_CHANGE.name, + } + contributed_users_results = await db.fetch_all( + query=contributed_users_query, values=values + ) + contributed_users = [r.user_id for r in contributed_users_results] users_to_notify = list(set(contributed_users + favorited_users)) if len(users_to_notify) != 0: - from_user = User.query.get(chat_from) + from_user = await UserService.get_user_by_id(chat_from, db) from_user_link = MessageService.get_user_link(from_user.username) project_link = MessageService.get_project_link( project_id, project_name, include_chat_section=True @@ -556,14 +623,16 @@ def send_message_after_chat( messages = [] for user_id in users_to_notify: try: - user = UserService.get_user_by_id(user_id) + user = await UserService.get_user_by_id(user_id, db) except NotFound: - continue # If we can't find the user, keep going no need to fail + continue message = Message() message.message_type = MessageType.PROJECT_CHAT_NOTIFICATION.value message.project_id = project_id message.from_user_id = chat_from message.to_user_id = user.id + message.date = timestamp() + message.read = False message.subject = ( f"{from_user_link} left a comment in project {project_link}" ) @@ -572,14 +641,13 @@ def send_message_after_chat( dict(message=message, user=user, project_name=project_name) ) - # it's important to keep that line inside the if to avoid duplicated emails - MessageService._push_messages(messages) + await MessageService._push_messages(messages, db) @staticmethod - def send_favorite_project_activities(user_id: int): - current_app.logger.debug("Sending Favorite Project Activities") + async def send_favorite_project_activities(user_id: int): + logger.debug("Sending Favorite Project Activities") favorited_projects = UserService.get_projects_favorited(user_id) - contributed_projects = UserService.get_projects_mapped(user_id) + contributed_projects = await UserService.get_projects_mapped(user_id, db) if contributed_projects is None: contributed_projects = [] @@ -624,6 +692,8 @@ def send_favorite_project_activities(user_id: int): message.message_type = MessageType.PROJECT_ACTIVITY_NOTIFICATION.value message.project_id = project.id message.to_user_id = user.id + message.date = timestamp() + message.read = False message.subject = ( "Recent activities from your contributed/favorited Projects" ) @@ -635,74 +705,89 @@ def send_favorite_project_activities(user_id: int): MessageService._push_messages(messages) @staticmethod - def resend_email_validation(user_id: int): + async def resend_email_validation(user_id: int, db: Database): """Resends the email validation email to the logged in user""" - user = UserService.get_user_by_id(user_id) + user = await UserService.get_user_by_id(user_id, db) if user.email_address is None: raise ValueError("EmailNotSet- User does not have an email address") - SMTPService.send_verification_email(user.email_address, user.username) + await SMTPService.send_verification_email(user.email_address, user.username) @staticmethod - def _parse_message_for_bulk_mentions( - message: str, project_id: int, task_id: int = None + async def _parse_message_for_bulk_mentions( + message: str, project_id: int, task_id: int = None, db: Database = None ) -> List[str]: parser = re.compile(r"((?<=#)\w+|\[.+?\])") parsed = parser.findall(message) usernames = [] - project = db.session.get(Project, project_id) + query = """ + SELECT * FROM projects + WHERE id = :project_id + """ + project = await db.fetch_one(query, values={"project_id": project_id}) + + # Fetch project details, including author username by joining users and projects + project_query = """ + SELECT p.*, u.username AS author_username + FROM projects p + JOIN users u ON p.author_id = u.id + WHERE p.id = :project_id + """ + project = await db.fetch_one(project_query, {"project_id": project_id}) - if project is None: + if not project: return usernames - if "author" in parsed or "managers" in parsed: - usernames.append(project.author.username) - if "managers" in parsed: - teams = [ - t - for t in project.teams - if t.role == TeamRoles.PROJECT_MANAGER.value - ] - team_members = [ - [u.member.username for u in t.team.members if u.active is True] - for t in teams - ] - - team_members = [item for sublist in team_members for item in sublist] - usernames.extend(team_members) + # Add author if mentioned + if "author" in parsed: + usernames.append(project["author_username"]) + + # Add project managers if mentioned + if "managers" in parsed: + team_members = await db.fetch_all( + """ + SELECT u.username + FROM users u + JOIN team_members tm ON u.id = tm.user_id + JOIN teams t ON tm.team_id = t.id + WHERE t.role = 'PROJECT_MANAGER' + AND t.project_id = :project_id + """, + {"project_id": project_id}, + ) + usernames.extend([member["username"] for member in team_members]) + + # Add contributors if task_id is provided and contributors are mentioned if task_id and "contributors" in parsed: - contributors = Message.get_all_tasks_contributors(project_id, task_id) + contributors = await Message.get_all_tasks_contributors( + project_id, task_id, db + ) usernames.extend(contributors) + return usernames @staticmethod - def _parse_message_for_username( - message: str, project_id: int, task_id: int = None + async def _parse_message_for_username( + message: str, project_id: int, task_id: int = None, db: Database = None ) -> List[str]: - """Extracts all usernames from a comment looks for format @[user name]""" - + """Extracts all usernames from a comment looking for format @[user name]""" parser = re.compile(r"((?<=@)\w+|\[.+?\])") - - usernames = [] - for username in parser.findall(message): - username = username.replace("[", "", 1) - index = username.rfind("]") - username = username.replace("]", "", index) - usernames.append(username) - + usernames = [ + username.replace("[", "", 1).replace("]", "", username.rfind("]")) + for username in parser.findall(message) + ] usernames.extend( - MessageService._parse_message_for_bulk_mentions( - message, project_id, task_id + await MessageService._parse_message_for_bulk_mentions( + message, project_id, task_id, db ) ) - usernames = list(set(usernames)) - return usernames + return list(set(usernames)) @staticmethod @cached(message_cache) - def has_user_new_messages(user_id: int) -> dict: + async def has_user_new_messages(user_id: int, db: Database) -> dict: """Determines if the user has any unread messages""" - count = Notification.get_unread_message_count(user_id) + count = await Notification.get_unread_message_count(user_id, db) new_messages = False if count > 0: @@ -711,7 +796,8 @@ def has_user_new_messages(user_id: int) -> dict: return dict(newMessages=new_messages, unread=count) @staticmethod - def get_all_messages( + async def get_all_messages( + db: Database, user_id: int, locale: str, page: int, @@ -725,59 +811,121 @@ def get_all_messages( status=None, ): """Get all messages for user""" - sort_column = Message.__table__.columns.get(sort_by) - if sort_column is None: - sort_column = Message.date + sort_column = ( - sort_column.asc() if sort_direction.lower() == "asc" else sort_column.desc() + sort_by + if sort_by in ["date", "message_type", "from_user_id", "project_id", "read"] + else "date" + ) + sort_direction = ( + "ASC" if sort_direction and sort_direction.lower() == "asc" else "DESC" ) - query = Message.query - if project is not None: - query = query.filter(Message.project_id == project) + query = """ + SELECT + m.id AS message_id, + m.subject, + m.message, + m.from_user_id, + m.to_user_id, + m.task_id, + m.message_type, + m.date AS sent_date, + m.read, + m.project_id, + u.username AS from_username, + u.picture_url AS display_picture_url + FROM + messages m + LEFT JOIN + users u ON m.from_user_id = u.id + WHERE + m.to_user_id = :user_id + """ + + filters = [] + params = {"user_id": user_id} - if task_id is not None: - query = query.filter(Message.task_id == task_id) + if project: + filters.append("m.project_id = :project") + params["project"] = int(project) + + if task_id: + filters.append("m.task_id = :task_id") + params["task_id"] = int(task_id) if status in ["read", "unread"]: - query = query.filter(Message.read == (True if status == "read" else False)) + filters.append("m.read = :read_status") + params["read_status"] = True if status == "read" else False if message_type: - message_type_filters = map(int, message_type.split(",")) - query = query.filter(Message.message_type.in_(message_type_filters)) + filters.append("m.message_type = ANY(:message_types)") + params["message_types"] = list(map(int, message_type.split(","))) - if from_username is not None: - query = query.join(Message.from_user).filter( - User.username.ilike(from_username + "%") - ) + if from_username: + filters.append("u.username ILIKE :from_username") + params["from_username"] = from_username + "%" - results = ( - query.filter(Message.to_user_id == user_id) - .order_by(sort_column) - .paginate(page=page, per_page=page_size, error_out=True) - ) - # if results.total == 0: - # raise NotFound() + if filters: + query += " AND " + " AND ".join(filters) + + query += f" ORDER BY {sort_column} {sort_direction} LIMIT :limit OFFSET :offset" + params["limit"] = int(page_size) + params["offset"] = (int(page) - 1) * int(page_size) + + messages = await db.fetch_all(query, params) messages_dto = MessagesDTO() - for item in results.items: - if isinstance(item, tuple): - message_dto = item[0].as_dto() - message_dto.project_title = item[1].name - else: - message_dto = item.as_dto() - if item.project_id is not None: - message_dto.project_title = item.project.get_project_title(locale) - - messages_dto.user_messages.append(message_dto) - - messages_dto.pagination = Pagination(results) + for msg in messages: + message_dict = dict(msg) + if message_dict["message_type"]: + message_dict["message_type"] = MessageType( + message_dict["message_type"] + ).name + if message_dict["project_id"]: + try: + message_dict["project_title"] = ( + await Project.get_project_title( + db, message_dict["project_id"], locale + ) + or "" + ) + except: + pass + msg_dto = MessageDTO(**message_dict).copy(exclude={"from_user_id"}) + messages_dto.user_messages.append(msg_dto) + + total_count_query = """ + SELECT COUNT(*) AS total_count + FROM messages m + WHERE m.to_user_id = :user_id + """ + if filters: + total_count_query += " AND " + " AND ".join(filters) + + total_count_params = {"user_id": params["user_id"]} + if "project" in params: + total_count_params["project"] = params["project"] + if "task_id" in params: + total_count_params["task_id"] = params["task_id"] + if "read_status" in params: + total_count_params["read_status"] = params["read_status"] + if "message_types" in params: + total_count_params["message_types"] = params["message_types"] + if "from_username" in params: + total_count_params["from_username"] = params["from_username"] + + total_count = await db.fetch_one(total_count_query, total_count_params) + + messages_dto.pagination = Pagination.from_total_count( + page=int(page), per_page=int(page_size), total=total_count["total_count"] + ) return messages_dto @staticmethod def get_message(message_id: int, user_id: int) -> Message: """Gets the specified message""" - message = db.session.get(Message, message_id) + message = session.get(Message, message_id) if message is None: raise NotFound(sub_code="MESSAGE_NOT_FOUND", message_id=message_id) @@ -791,58 +939,106 @@ def get_message(message_id: int, user_id: int) -> Message: return message @staticmethod - def mark_all_messages_read(user_id: int, message_type: str = None): + async def mark_all_messages_read( + user_id: int, db: Database, message_type: str = None + ): """Marks all messages as read for the user ----------------------------------------- :param user_id: The user id + :param db: Database connection :param message_type: The message types to mark as read returns: None """ if message_type is not None: - # Wrap in list for unit tests to work message_type = list(map(int, message_type.split(","))) - Message.mark_all_messages_read(user_id, message_type) + await Message.mark_all_messages_read(user_id, db, message_type) @staticmethod - def mark_multiple_messages_read(message_ids: list, user_id: int): + async def mark_multiple_messages_read( + message_ids: list, user_id: int, db: Database + ): """Marks the specified messages as read for the user --------------------------------------------------- :param message_ids: List of message ids to mark as read :param user_id: The user id + :param db: Database connection returns: None """ - Message.mark_multiple_messages_read(message_ids, user_id) + await Message.mark_multiple_messages_read(message_ids, user_id, db) @staticmethod - def get_message_as_dto(message_id: int, user_id: int): + async def get_message_as_dto(message_id: int, user_id: int, db: Database): """Gets the selected message and marks it as read""" - message = MessageService.get_message(message_id, user_id) - message.mark_as_read() - return message.as_dto() + query = """ + SELECT + m.id AS message_id, + m.subject, + m.message, + m.to_user_id, + m.from_user_id, + m.task_id, + m.message_type, + m.date AS sent_date, + m.read, + m.project_id, + u.username AS from_username, + u.picture_url AS display_picture_url, + pi.name AS project_title + FROM + messages m + LEFT JOIN + users u ON m.from_user_id = u.id + LEFT JOIN + project_info pi ON m.project_id = pi.project_id + WHERE + m.id = :message_id + """ + message = await db.fetch_one(query, {"message_id": message_id}) + + if message is None: + raise NotFound(sub_code="MESSAGE_NOT_FOUND", message_id=message_id) + + if message["to_user_id"] != user_id: + raise MessageServiceError( + "AccessOtherUserMessage- " + + f"User {user_id} attempting to access another user's message {message_id}" + ) + + update_query = """ + UPDATE messages SET read = TRUE WHERE id = :message_id + """ + await db.execute(update_query, {"message_id": message_id}) + + message_dict = dict(message) + message_dict["message_type"] = MessageType(message_dict["message_type"]).name + return message_dict @staticmethod - def delete_message(message_id: int, user_id: int): + async def delete_message(message_id: int, user_id: int, db: Database): """Deletes the specified message""" - message = MessageService.get_message(message_id, user_id) - message.delete() + delete_query = """ + DELETE FROM messages WHERE id = :message_id AND to_user_id = :user_id + """ + await db.execute(delete_query, {"message_id": message_id, "user_id": user_id}) @staticmethod - def delete_multiple_messages(message_ids: list, user_id: int): + async def delete_multiple_messages(message_ids: list, user_id: int, db: Database): """Deletes the specified messages to the user""" - Message.delete_multiple_messages(message_ids, user_id) + await Message.delete_multiple_messages(message_ids, user_id, db) @staticmethod - def delete_all_messages(user_id: int, message_type: str = None): + async def delete_all_messages(user_id: int, db: Database, message_type: str = None): """Deletes all messages to the user ---------------------------------- :param user_id: The user id + :param db: Database connection :param message_type: The message types to delete (comma separated) returns: None """ if message_type is not None: # Wrap in list for unit tests to work message_type = list(map(int, message_type.split(","))) - Message.delete_all_messages(user_id, message_type) + await Message.delete_all_messages(user_id, db, message_type) @staticmethod def get_task_link( @@ -850,7 +1046,7 @@ def get_task_link( ) -> str: """Helper method that generates a link to the task""" if not base_url: - base_url = current_app.config["APP_BASE_URL"] + base_url = settings.APP_BASE_URL style = "" if highlight: style = "color: #d73f3f" @@ -866,7 +1062,7 @@ def get_project_link( ) -> str: """Helper method to generate a link to project chat""" if not base_url: - base_url = current_app.config["APP_BASE_URL"] + base_url = settings.APP_BASE_URL if include_chat_section: section = "#questionsAndComments" else: @@ -881,7 +1077,7 @@ def get_project_link( def get_user_profile_link(user_name: str, base_url=None) -> str: """Helper method to generate a link to a user profile""" if not base_url: - base_url = current_app.config["APP_BASE_URL"] + base_url = settings.APP_BASE_URL return f'{user_name}' @@ -889,7 +1085,7 @@ def get_user_profile_link(user_name: str, base_url=None) -> str: def get_user_settings_link(section=None, base_url=None) -> str: """Helper method to generate a link to a user profile""" if not base_url: - base_url = current_app.config["APP_BASE_URL"] + base_url = settings.APP_BASE_URL return f'User Settings' @@ -899,6 +1095,6 @@ def get_organisation_link( ) -> str: """Helper method to generate a link to a user profile""" if not base_url: - base_url = current_app.config["APP_BASE_URL"] + base_url = settings.APP_BASE_URL return f'{organisation_name}' diff --git a/backend/services/messaging/smtp_service.py b/backend/services/messaging/smtp_service.py index 610e1fab18..f844a5ac69 100644 --- a/backend/services/messaging/smtp_service.py +++ b/backend/services/messaging/smtp_service.py @@ -1,20 +1,24 @@ import urllib.parse + +from fastapi_mail import MessageSchema, MessageType from itsdangerous import URLSafeTimedSerializer -from flask import current_app -from flask_mail import Message +from loguru import logger +from databases import Database -from backend import mail, create_app +# from backend import mail, create_app +from backend import mail +from backend.config import settings from backend.models.postgis.message import Message as PostgisMessage from backend.models.postgis.statuses import EncouragingEmailType from backend.services.messaging.template_service import ( - get_template, format_username_link, + get_template, ) class SMTPService: @staticmethod - def send_verification_email(to_address: str, username: str): + async def send_verification_email(to_address: str, username: str): """Sends a verification email with a unique token so we can verify user owns this email address""" # TODO these could be localised if needed, in the future verification_url = SMTPService._generate_email_verification_url( @@ -25,13 +29,12 @@ def send_verification_email(to_address: str, username: str): "VERIFICATION_LINK": verification_url, } html_template = get_template("email_verification_en.html", values) - subject = "Confirm your email address" - SMTPService._send_message(to_address, subject, html_template) + await SMTPService._send_message(to_address, subject, html_template) return True @staticmethod - def send_welcome_email(to_address: str, username: str): + async def send_welcome_email(to_address: str, username: str): """Sends email welcoming new user to tasking manager""" values = { "USERNAME": username, @@ -39,12 +42,12 @@ def send_welcome_email(to_address: str, username: str): html_template = get_template("welcome.html", values) subject = "Welcome to Tasking Manager" - SMTPService._send_message(to_address, subject, html_template) + await SMTPService._send_message(to_address, subject, html_template) return True @staticmethod - def send_contact_admin_email(data): - email_to = current_app.config["EMAIL_CONTACT_ADDRESS"] + async def send_contact_admin_email(data): + email_to = settings.EMAIL_CONTACT_ADDRESS if email_to is None: raise ValueError( "This feature is not implemented due to missing variable TM_EMAIL_CONTACT_ADDRESS." @@ -59,72 +62,69 @@ def send_contact_admin_email(data): ) subject = "New contact from {name}".format(name=data.get("name")) - SMTPService._send_message(email_to, subject, message, message) + await SMTPService._send_message(email_to, subject, message, message) @staticmethod - def send_email_to_contributors_on_project_progress( + async def send_email_to_contributors_on_project_progress( email_type: str, project_id: int = None, project_name: str = None, project_completion: int = None, + db: Database = None, ): """Sends an encouraging email to a users when a project they have contributed to make progress""" from backend.services.users.user_service import UserService - app = ( - create_app() - ) # Because message-all run on background thread it needs it's own app context - with app.app_context(): - if email_type == EncouragingEmailType.PROJECT_PROGRESS.value: - subject = "The project you have contributed to has made progress." - elif email_type == EncouragingEmailType.PROJECT_COMPLETE.value: - subject = "The project you have contributed to has been completed." - values = { - "EMAIL_TYPE": email_type, - "PROJECT_ID": project_id, - "PROJECT_NAME": project_name, - "PROJECT_COMPLETION": project_completion, - } - contributor_ids = PostgisMessage.get_all_contributors(project_id) - for contributor_id in contributor_ids: - contributor = UserService.get_user_by_id(contributor_id[0]) - values["USERNAME"] = contributor.username - if email_type == EncouragingEmailType.BEEN_SOME_TIME.value: - recommended_projects = UserService.get_recommended_projects( - contributor.username, "en" - ).results - projects = [] - for recommended_project in recommended_projects[:4]: - projects.append( - { - "org_logo": recommended_project.organisation_logo, - "priority": recommended_project.priority, - "name": recommended_project.name, - "id": recommended_project.project_id, - "description": recommended_project.short_description, - "total_contributors": recommended_project.total_contributors, - "difficulty": recommended_project.difficulty, - "progress": recommended_project.percent_mapped, - "due_date": recommended_project.due_date, - } - ) - - values["PROJECTS"] = projects - html_template = get_template("encourage_mapper_en.html", values) - if ( - contributor.email_address - and contributor.is_email_verified - and contributor.projects_notifications - ): - current_app.logger.debug( - f"Sending {email_type} email to {contributor.email_address} for project {project_id}" - ) - SMTPService._send_message( - contributor.email_address, subject, html_template + if email_type == EncouragingEmailType.PROJECT_PROGRESS.value: + subject = "The project you have contributed to has made progress." + elif email_type == EncouragingEmailType.PROJECT_COMPLETE.value: + subject = "The project you have contributed to has been completed." + values = { + "EMAIL_TYPE": email_type, + "PROJECT_ID": project_id, + "PROJECT_NAME": project_name, + "PROJECT_COMPLETION": project_completion, + } + contributor_ids = await PostgisMessage.get_all_contributors(project_id, db) + for contributor_id in contributor_ids: + contributor = await UserService.get_user_by_id(contributor_id, db) + values["USERNAME"] = contributor.username + if email_type == EncouragingEmailType.BEEN_SOME_TIME.value: + recommended_projects = await UserService.get_recommended_projects( + contributor.username, "en", db + ).results + projects = [] + for recommended_project in recommended_projects[:4]: + projects.append( + { + "org_logo": recommended_project.organisation_logo, + "priority": recommended_project.priority, + "name": recommended_project.name, + "id": recommended_project.project_id, + "description": recommended_project.short_description, + "total_contributors": recommended_project.total_contributors, + "difficulty": recommended_project.difficulty, + "progress": recommended_project.percent_mapped, + "due_date": recommended_project.due_date, + } ) + values["PROJECTS"] = projects + html_template = get_template("encourage_mapper_en.html", values) + if ( + contributor.email_address + and contributor.is_email_verified + and contributor.projects_notifications + ): + logger.debug( + f"Sending {email_type} email to {contributor.email_address} for project {project_id}" + ) + await SMTPService._send_message( + contributor.email_address, subject, html_template + ) + @staticmethod - def send_email_alert( + async def send_email_alert( to_address: str, username: str, user_email_verified: bool, @@ -142,13 +142,13 @@ def send_email_alert( if not user_email_verified: return False - current_app.logger.debug(f"Test if email required {to_address}") - from_user_link = f"{current_app.config['APP_BASE_URL']}/users/{from_username}" - project_link = f"{current_app.config['APP_BASE_URL']}/projects/{project_id}" - task_link = f"{current_app.config['APP_BASE_URL']}/projects/{project_id}/tasks/?search={task_id}" - settings_url = "{}/settings#notifications".format( - current_app.config["APP_BASE_URL"] + logger.debug(f"Test if email required {to_address}") + from_user_link = f"{settings.APP_BASE_URL}/users/{from_username}" + project_link = f"{settings.APP_BASE_URL}/projects/{project_id}" + task_link = ( + f"{settings.APP_BASE_URL}/projects/{project_id}/tasks/?search={task_id}" ) + settings_url = "{}/settings#notifications".format(settings.APP_BASE_URL) if not to_address: return False # Many users will not have supplied email address so return @@ -156,7 +156,7 @@ def send_email_alert( if message_id is not None: message_path = f"/message/{message_id}" - inbox_url = f"{current_app.config['APP_BASE_URL']}/inbox{message_path}" + inbox_url = f"{settings.APP_BASE_URL}/inbox{message_path}" values = { "FROM_USER_LINK": from_user_link, "FROM_USERNAME": from_username, @@ -171,50 +171,47 @@ def send_email_alert( "MESSAGE_TYPE": message_type, } html_template = get_template("message_alert_en.html", values) - SMTPService._send_message(to_address, subject, html_template) + await SMTPService._send_message(to_address, subject, html_template) return True @staticmethod - def _send_message( + async def _send_message( to_address: str, subject: str, html_message: str, text_message: str = None ): """Helper sends SMTP message""" - from_address = current_app.config["MAIL_DEFAULT_SENDER"] + from_address = settings.MAIL_DEFAULT_SENDER if from_address is None: raise ValueError("Missing TM_EMAIL_FROM_ADDRESS environment variable") - msg = Message() - msg.subject = subject - msg.sender = "{} Tasking Manager <{}>".format( - current_app.config["ORG_CODE"], from_address + msg = MessageSchema( + recipients=[to_address], + subject=subject, + body=html_message, + subtype=MessageType.html, ) - msg.add_recipient(to_address) - - msg.body = text_message - msg.html = html_message + logger.debug(f"Sending email via SMTP {to_address}") + if settings.LOG_LEVEL == "DEBUG": + logger.debug(msg.as_string()) - current_app.logger.debug(f"Sending email via SMTP {to_address}") - if current_app.config["LOG_LEVEL"] == "DEBUG": - current_app.logger.debug(msg.as_string()) else: try: - mail.send(msg) - current_app.logger.debug(f"Email sent {to_address}") + await mail.send_message(msg) + logger.debug(f"Email sent {to_address}") except Exception as e: # ERROR level logs are automatically captured by sentry so that admins are notified - current_app.logger.error( + logger.error( f"{e}: Sending email failed. Please check SMTP configuration" ) @staticmethod def _generate_email_verification_url(email_address: str, user_name: str): """Generate email verification url with unique token""" - entropy = current_app.secret_key if current_app.secret_key else "un1testingmode" + entropy = settings.SECRET_KEY if settings.SECRET_KEY else "un1testingmode" serializer = URLSafeTimedSerializer(entropy) token = serializer.dumps(email_address) - base_url = current_app.config["APP_BASE_URL"] + base_url = settings.APP_BASE_URL verification_params = {"token": token, "username": user_name} verification_url = "{0}/verify-email/?{1}".format( diff --git a/backend/services/messaging/template_service.py b/backend/services/messaging/template_service.py index 4696fa3a1c..46fcff9a75 100644 --- a/backend/services/messaging/template_service.py +++ b/backend/services/messaging/template_service.py @@ -1,7 +1,14 @@ import os import re +from jinja2 import Environment, FileSystemLoader +from loguru import logger -from flask import current_app, render_template +from backend.config import settings + +# Set up Jinja2 environment +env = Environment( + loader=FileSystemLoader(os.path.join(os.path.dirname(__file__), "templates")) +) def get_txt_template(template_name: str): @@ -17,7 +24,7 @@ def get_txt_template(template_name: str): with open(template_location, mode="r", encoding="utf-8") as template: return template.read() except FileNotFoundError: - current_app.logger.error("Unable open file {0}".format(template_location)) + logger.error("Unable open file {0}".format(template_location)) raise ValueError("Unable open file {0}".format(template_location)) @@ -29,13 +36,19 @@ def get_template(template_name: str, values: dict) -> str: :return: Template as a string """ try: - values["ORG_CODE"] = current_app.config["ORG_CODE"] - values["ORG_NAME"] = current_app.config["ORG_NAME"] - values["ORG_LOGO"] = current_app.config["ORG_LOGO"] - values["APP_BASE_URL"] = current_app.config["APP_BASE_URL"] - return render_template(template_name, values=values) + values["ORG_CODE"] = settings.ORG_CODE + values["ORG_NAME"] = settings.ORG_NAME + values["ORG_LOGO"] = settings.ORG_LOGO + values["APP_BASE_URL"] = settings.APP_BASE_URL + + # Load the template + template = env.get_template(template_name) + + # Render the template as a string + rendered_template = template.render({"values": values}) + return rendered_template except (FileNotFoundError, TypeError): - current_app.logger.error("Unable open file {0}".format(template_name)) + logger.error("Unable open file {0}".format(template_name)) raise ValueError("Unable open file {0}".format(template_name)) @@ -59,6 +72,6 @@ def format_username_link(content): username = name[2:-1] content = content.replace( name, - f'@{username}', + f'@{username}', ) return content diff --git a/backend/services/notification_service.py b/backend/services/notification_service.py index e4c5aeab04..95d0e77453 100644 --- a/backend/services/notification_service.py +++ b/backend/services/notification_service.py @@ -1,19 +1,32 @@ from backend.models.postgis.notification import Notification +from backend.models.postgis.utils import timestamp from backend.exceptions import NotFound +from databases import Database class NotificationService: @staticmethod - def update(user_id: int): - notifications = Notification.query.filter( - Notification.user_id == user_id - ).first() + async def update(user_id: int, db: Database): + async with db.transaction(): + query = """ + SELECT * FROM notifications WHERE user_id = :user_id ORDER BY id LIMIT 1 + """ + notifications = await db.fetch_one(query, {"user_id": user_id}) - if notifications is None: - raise NotFound(sub_code="NOTIFICATIONS_NOT_FOUND", user_id=user_id) + if notifications is None: + raise NotFound(sub_code="NOTIFICATIONS_NOT_FOUND", user_id=user_id) - notifications.update() - return notifications.unread_count + # Update the notification's date + update_query = """ + UPDATE notifications + SET date = :timestamp + WHERE user_id = :user_id + """ + await db.execute( + update_query, {"user_id": user_id, "timestamp": timestamp()} + ) + + return notifications["unread_count"] @staticmethod def get_unread_message_count(user_id: int): diff --git a/backend/services/organisation_service.py b/backend/services/organisation_service.py index e1eef65c9e..0edb8054ea 100644 --- a/backend/services/organisation_service.py +++ b/backend/services/organisation_service.py @@ -1,29 +1,34 @@ +import json from datetime import datetime -from flask import current_app + +from databases import Database +from fastapi import HTTPException +from loguru import logger from sqlalchemy.exc import IntegrityError -from sqlalchemy import func -from sqlalchemy.sql import extract -from dateutil.relativedelta import relativedelta -from backend import db from backend.exceptions import NotFound from backend.models.dtos.organisation_dto import ( - OrganisationDTO, - NewOrganisationDTO, ListOrganisationsDTO, + NewOrganisationDTO, + OrganisationDTO, + OrganisationTeamsDTO, UpdateOrganisationDTO, ) from backend.models.dtos.stats_dto import ( - OrganizationStatsDTO, OrganizationProjectsStatsDTO, + OrganizationStatsDTO, OrganizationTasksStatsDTO, ) from backend.models.postgis.campaign import campaign_organisations from backend.models.postgis.organisation import Organisation from backend.models.postgis.project import Project, ProjectInfo -from backend.models.postgis.task import Task +from backend.models.postgis.statuses import ( + ProjectStatus, + TaskStatus, + TeamJoinMethod, + TeamMemberFunctions, +) from backend.models.postgis.team import TeamVisibility -from backend.models.postgis.statuses import ProjectStatus, TaskStatus from backend.services.users.user_service import UserService @@ -31,45 +36,158 @@ class OrganisationServiceError(Exception): """Custom Exception to notify callers an error occurred when handling organisations""" def __init__(self, message): - if current_app: - current_app.logger.debug(message) + logger.debug(message) class OrganisationService: @staticmethod - def get_organisation_by_id(organisation_id: int) -> Organisation: - org = Organisation.get(organisation_id) - - if org is None: + async def get_organisation_by_id(organisation_id: int, db: Database): + # Fetch organisation details + org_query = """ + SELECT + id AS "organisation_id", + name, + slug, + logo, + description, + url, + CASE + WHEN type = 1 THEN 'FREE' + WHEN type = 2 THEN 'DISCOUNTED' + WHEN type = 3 THEN 'FULL_FEE' + ELSE 'UNKNOWN' + END AS type, + subscription_tier + FROM organisations + WHERE id = :organisation_id + """ + org_record = await db.fetch_one( + org_query, values={"organisation_id": organisation_id} + ) + if not org_record: raise NotFound( sub_code="ORGANISATION_NOT_FOUND", organisation_id=organisation_id ) - return org + # Fetch organisation managers + managers_query = """ + SELECT + u.id, + u.username, + u.picture_url + FROM users u + JOIN organisation_managers om ON u.id = om.user_id + WHERE om.organisation_id = :organisation_id + """ + managers_records = await db.fetch_all( + managers_query, values={"organisation_id": organisation_id} + ) + # Assign manager records initially + org_record.managers = managers_records + return org_record @staticmethod - def get_organisation_by_id_as_dto( - organisation_id: int, user_id: int, abbreviated: bool - ): - org = OrganisationService.get_organisation_by_id(organisation_id) - return OrganisationService.get_organisation_dto(org, user_id, abbreviated) + async def get_organisation_by_id_as_dto( + organisation_id: int, user_id: int, abbreviated: bool, db: Database + ) -> OrganisationDTO: + org = await OrganisationService.get_organisation_by_id(organisation_id, db) + return await OrganisationService.get_organisation_dto( + org, user_id, abbreviated, db + ) @staticmethod - def get_organisation_by_slug_as_dto(slug: str, user_id: int, abbreviated: bool): - org = Organisation.query.filter_by(slug=slug).first() - if org is None: + async def get_organisation_by_slug_as_dto( + slug: str, user_id: int, abbreviated: bool, db: Database + ): + org_query = """ + SELECT + id AS "organisation_id", + name, + slug, + logo, + description, + url, + CASE + WHEN type = 1 THEN 'FREE' + WHEN type = 2 THEN 'DISCOUNTED' + WHEN type = 3 THEN 'FULL_FEE' + ELSE 'UNKNOWN' + END AS type, + subscription_tier + FROM organisations + WHERE slug = :slug + """ + org_record = await db.fetch_one(org_query, values={"slug": slug}) + if not org_record: raise NotFound(sub_code="ORGANISATION_NOT_FOUND", slug=slug) - return OrganisationService.get_organisation_dto(org, user_id, abbreviated) + + organisation_id = org_record["organisation_id"] + + # Fetch organisation managers + managers_query = """ + SELECT + u.id, + u.username, + u.picture_url + FROM users u + JOIN organisation_managers om ON u.id = om.user_id + WHERE om.organisation_id = :organisation_id + """ + managers_records = await db.fetch_all( + managers_query, values={"organisation_id": organisation_id} + ) + + org_record.managers = managers_records + return await OrganisationService.get_organisation_dto( + org_record, user_id, abbreviated, db + ) @staticmethod - def get_organisation_dto(org, user_id: int, abbreviated: bool): + def organisation_as_dto(org) -> OrganisationDTO: + org_dto = OrganisationDTO( + organisation_id=org.organisation_id, + name=org.name, + slug=org.slug, + logo=org.logo, + description=org.description, + url=org.url, + type=org.type, + subscription_tier=org.subscription_tier, + managers=json.loads(org["managers"]), + ) + return org_dto + + @staticmethod + def team_as_dto_inside_org(team) -> OrganisationTeamsDTO: + team_dto = OrganisationTeamsDTO( + team_id=team.team_id, + name=team.name, + description=team.description, + join_method=TeamJoinMethod(team.join_method).name, + members=[ + { + "username": member["username"], + "pictureUrl": member["pictureUrl"], + "function": TeamMemberFunctions(member["function"]).name, + "active": str(member["active"]), + } + for member in json.loads(team["members"]) + ], + visibility=TeamVisibility(team["visibility"]).name, + ) + return team_dto + + @staticmethod + async def get_organisation_dto(org, user_id: int, abbreviated: bool, db): if org is None: raise NotFound(sub_code="ORGANISATION_NOT_FOUND") - organisation_dto = org.as_dto(abbreviated) + organisation_dto = Organisation.as_dto(org, abbreviated) if user_id != 0: organisation_dto.is_manager = ( - OrganisationService.can_user_manage_organisation(org.id, user_id) + await OrganisationService.can_user_manage_organisation( + organisation_dto.organisation_id, user_id, db + ) ) else: organisation_dto.is_manager = False @@ -77,20 +195,47 @@ def get_organisation_dto(org, user_id: int, abbreviated: bool): if abbreviated: return organisation_dto + teams_query = """ + SELECT + t.id AS team_id, + t.name, + t.description, + t.join_method, + t.visibility, + COALESCE(json_agg(json_build_object( + 'username', u.username, + 'pictureUrl', u.picture_url, + 'function', tm.function, + 'active', tm.active::text + )) FILTER (WHERE u.id IS NOT NULL), '[]') AS members + FROM teams t + LEFT JOIN team_members tm ON t.id = tm.team_id + LEFT JOIN users u ON tm.user_id = u.id + WHERE t.organisation_id = :org_id + GROUP BY t.id + """ + teams_records = await db.fetch_all( + teams_query, values={"org_id": org.organisation_id} + ) + teams = [ + OrganisationService.team_as_dto_inside_org(record) + for record in teams_records + ] if organisation_dto.is_manager: - organisation_dto.teams = [team.as_dto_inside_org() for team in org.teams] + organisation_dto.teams = teams else: organisation_dto.teams = [ - team.as_dto_inside_org() - for team in org.teams - if team.visibility == TeamVisibility.PUBLIC.value + team for team in teams if team.visibility == "PUBLIC" ] - return organisation_dto @staticmethod - def get_organisation_by_name(organisation_name: str) -> Organisation: - organisation = Organisation.get_organisation_by_name(organisation_name) + async def get_organisation_by_name( + organisation_name: str, db: Database + ) -> Organisation: + organisation = await Organisation.get_organisation_by_name( + organisation_name, db + ) if organisation is None: raise NotFound( @@ -104,94 +249,127 @@ def get_organisation_name_by_id(organisation_id: int) -> str: return Organisation.get_organisation_name_by_id(organisation_id) @staticmethod - def create_organisation(new_organisation_dto: NewOrganisationDTO) -> int: + async def create_organisation( + new_organisation_dto: NewOrganisationDTO, db: Database + ) -> int: """ Creates a new organisation using an organisation dto :param new_organisation_dto: Organisation DTO :returns: ID of new Organisation """ try: - org = Organisation.create_from_dto(new_organisation_dto) - return org.id + org = await Organisation.create_from_dto(new_organisation_dto, db) + return org except IntegrityError: raise OrganisationServiceError( f"NameExists- Organisation name already exists: {new_organisation_dto.name}" ) @staticmethod - def update_organisation(organisation_dto: UpdateOrganisationDTO) -> Organisation: + async def update_organisation( + organisation_dto: UpdateOrganisationDTO, db: Database + ) -> int: """ Updates an organisation :param organisation_dto: DTO with updated info :returns updated Organisation """ - org = OrganisationService.get_organisation_by_id( - organisation_dto.organisation_id + org = await OrganisationService.get_organisation_by_id( + organisation_dto.organisation_id, db ) - OrganisationService.assert_validate_name(org, organisation_dto.name) - OrganisationService.assert_validate_users(organisation_dto) - org.update(organisation_dto) - return org + await OrganisationService.assert_validate_name(org, organisation_dto.name, db) + await OrganisationService.assert_validate_users(organisation_dto, db) + await Organisation.update(organisation_dto, db) + return org.organisation_id @staticmethod - def delete_organisation(organisation_id: int): + async def delete_organisation(organisation_id: int, db: Database): """Deletes an organisation if it has no projects""" - org = OrganisationService.get_organisation_by_id(organisation_id) - - if org.can_be_deleted(): - org.delete() + if await Organisation.can_be_deleted(organisation_id, db): + delete_organisation_managers_query = """ + DELETE FROM organisation_managers + WHERE organisation_id = :organisation_id + """ + delete_organisation_query = """ + DELETE FROM organisations + WHERE id = :organisation_id + """ + try: + async with db.transaction(): + await db.execute( + query=delete_organisation_managers_query, + values={"organisation_id": organisation_id}, + ) + await db.execute( + query=delete_organisation_query, + values={"organisation_id": organisation_id}, + ) + except Exception as e: + raise HTTPException(status_code=500, detail="Deletion failed") from e else: raise OrganisationServiceError( "Organisation has projects, cannot be deleted" ) @staticmethod - def get_organisations(manager_user_id: int): + async def get_organisations(manager_user_id: int, db: Database): if manager_user_id is None: """Get all organisations""" - return Organisation.get_all_organisations() + return await Organisation.get_all_organisations(db) else: - return Organisation.get_organisations_managed_by_user(manager_user_id) + return await Organisation.get_organisations_managed_by_user( + manager_user_id, db + ) @staticmethod - def get_organisations_as_dto( + async def get_organisations_as_dto( manager_user_id: int, authenticated_user_id: int, omit_managers: bool, omit_stats: bool, + db: Database, ): - orgs = OrganisationService.get_organisations(manager_user_id) + orgs = await OrganisationService.get_organisations(manager_user_id, db) orgs_dto = ListOrganisationsDTO() for org in orgs: - org_dto = org.as_dto(omit_managers) + org_dto = OrganisationService.organisation_as_dto(org) if not omit_stats: year = datetime.today().strftime("%Y") - org_dto.stats = OrganisationService.get_organisation_stats(org.id, year) - if not authenticated_user_id: + stats = await OrganisationService.get_organisation_stats( + org_dto.organisation_id, db, year + ) + org_dto.stats = stats + + if omit_managers or not authenticated_user_id: del org_dto.managers - orgs_dto.organisations.append(org_dto) + orgs_dto.organisations.append(org_dto) return orgs_dto @staticmethod - def get_organisations_managed_by_user(user_id: int): + async def get_organisations_managed_by_user(user_id: int, db): """Get all organisations a user manages""" - if UserService.is_user_an_admin(user_id): - return Organisation.get_all_organisations() + if await UserService.is_user_an_admin(user_id, db): + return await Organisation.get_all_organisations(db) - return Organisation.get_organisations_managed_by_user(user_id) + return await Organisation.get_organisations_managed_by_user(user_id, db) @staticmethod - def get_organisations_managed_by_user_as_dto(user_id: int) -> ListOrganisationsDTO: - orgs = OrganisationService.get_organisations_managed_by_user(user_id) + async def get_organisations_managed_by_user_as_dto( + user_id: int, db: Database + ) -> ListOrganisationsDTO: + orgs = await OrganisationService.get_organisations_managed_by_user(user_id, db) orgs_dto = ListOrganisationsDTO() - orgs_dto.organisations = [org.as_dto() for org in orgs] + + # Fetch managers asynchronously for each organisation + for org in orgs: + orgs_dto.organisations.append(OrganisationService.organisation_as_dto(org)) return orgs_dto @staticmethod def get_projects_by_organisation_id(organisation_id: int) -> Organisation: projects = ( - db.session.query(Project.id, ProjectInfo.name) + session.query(Project.id, ProjectInfo.name) .join(ProjectInfo) .filter(Project.organisation_id == organisation_id) .all() @@ -205,83 +383,81 @@ def get_projects_by_organisation_id(organisation_id: int) -> Organisation: return projects @staticmethod - def get_organisation_stats( - organisation_id: int, year: int = None + async def get_organisation_stats( + organisation_id: int, db: Database, year: int = None ) -> OrganizationStatsDTO: - projects = db.session.query( - Project.id, Project.status, Project.last_updated, Project.created - ).filter(Project.organisation_id == organisation_id) + # Prepare the base projects query + projects_query = f""" + SELECT + COUNT(CASE WHEN status = {ProjectStatus.DRAFT.value} THEN 1 END) AS draft, + COUNT(CASE WHEN status = {ProjectStatus.PUBLISHED.value} THEN 1 END) AS published, + COUNT(CASE WHEN status = {ProjectStatus.ARCHIVED.value} THEN 1 END) AS archived, + COUNT(CASE WHEN status IN ({ProjectStatus.ARCHIVED.value}, {ProjectStatus.PUBLISHED.value}) + AND EXTRACT(YEAR FROM created) = {datetime.now().year} THEN 1 END) AS recent, + COUNT(CASE WHEN status = {ProjectStatus.PUBLISHED.value} + AND last_updated < NOW() - INTERVAL '6 MONTH' THEN 1 END) AS stale + FROM projects + WHERE organisation_id = :organisation_id""" + + projects_values = {"organisation_id": organisation_id} + if year: - start_date = f"{year}/01/01" - projects = projects.filter(Project.created.between(start_date, func.now())) + start_date = datetime(int(year), 1, 1) + projects_query += " AND created BETWEEN :start_date AND NOW()" + projects_values["start_date"] = start_date + + project_stats = await db.fetch_one(query=projects_query, values=projects_values) + + projects_dto = OrganizationProjectsStatsDTO(**project_stats) + + active_tasks_query = f""" + SELECT + COUNT(CASE WHEN t.task_status = {TaskStatus.READY.value} THEN 1 END) AS ready, + COUNT(CASE WHEN t.task_status = {TaskStatus.LOCKED_FOR_MAPPING.value} THEN 1 END) AS locked_for_mapping, + COUNT(CASE WHEN t.task_status = {TaskStatus.MAPPED.value} THEN 1 END) AS mapped, + COUNT(CASE WHEN t.task_status = {TaskStatus.LOCKED_FOR_VALIDATION.value} THEN 1 END) AS locked_for_validation, + COUNT(CASE WHEN t.task_status = {TaskStatus.VALIDATED.value} THEN 1 END) AS validated, + COUNT(CASE WHEN t.task_status = {TaskStatus.INVALIDATED.value} THEN 1 END) AS invalidated, + COUNT(CASE WHEN t.task_status = {TaskStatus.BADIMAGERY.value} THEN 1 END) AS badimagery + FROM tasks t + WHERE t.project_id IN ( + SELECT p.id + FROM projects p + WHERE p.organisation_id = :organisation_id + AND p.status = {ProjectStatus.PUBLISHED.value} + """ - published_projects = projects.filter( - Project.status == ProjectStatus.PUBLISHED.value - ) - active_tasks = db.session.query( - Task.id, Task.project_id, Task.task_status - ).filter(Task.project_id.in_([i.id for i in published_projects.all()])) - - # populate projects stats - projects_dto = OrganizationProjectsStatsDTO() - projects_dto.draft = projects.filter( - Project.status == ProjectStatus.DRAFT.value - ).count() - projects_dto.published = published_projects.count() - projects_dto.archived = projects.filter( - Project.status == ProjectStatus.ARCHIVED.value - ).count() - projects_dto.recent = projects.filter( - Project.status.in_( - [ProjectStatus.ARCHIVED.value, ProjectStatus.PUBLISHED.value] - ), - extract("year", Project.created) == datetime.now().year, - ).count() - projects_dto.stale = projects.filter( - Project.status == ProjectStatus.PUBLISHED.value, - func.DATE(Project.last_updated) < datetime.now() + relativedelta(months=-6), - ).count() - - # populate tasks stats - tasks_dto = OrganizationTasksStatsDTO() - tasks_dto.ready = active_tasks.filter( - Task.task_status == TaskStatus.READY.value - ).count() - tasks_dto.locked_for_mapping = active_tasks.filter( - Task.task_status == TaskStatus.LOCKED_FOR_MAPPING.value - ).count() - tasks_dto.mapped = active_tasks.filter( - Task.task_status == TaskStatus.MAPPED.value - ).count() - tasks_dto.locked_for_validation = active_tasks.filter( - Task.task_status == TaskStatus.LOCKED_FOR_VALIDATION.value - ).count() - tasks_dto.validated = active_tasks.filter( - Task.task_status == TaskStatus.VALIDATED.value - ).count() - tasks_dto.invalidated = active_tasks.filter( - Task.task_status == TaskStatus.INVALIDATED.value - ).count() - tasks_dto.badimagery = active_tasks.filter( - Task.task_status == TaskStatus.BADIMAGERY.value - ).count() - - # populate and return main dto + task_values = {"organisation_id": organisation_id} + + if year: + start_date = datetime(int(year), 1, 1) + active_tasks_query += " AND p.created BETWEEN :start_date AND NOW()" + task_values["start_date"] = start_date + + active_tasks_query += ")" + task_stats = await db.fetch_one(query=active_tasks_query, values=task_values) + tasks_dto = OrganizationTasksStatsDTO(**task_stats) + + # Populate and return the main DTO stats_dto = OrganizationStatsDTO() stats_dto.projects = projects_dto stats_dto.active_tasks = tasks_dto + return stats_dto @staticmethod - def assert_validate_name(org: Organisation, name: str): + async def assert_validate_name(org: Organisation, name: str, db): """Validates that the organisation name doesn't exist""" - if org.name != name and Organisation.get_organisation_by_name(name) is not None: + if ( + org.name != name + and await Organisation.get_organisation_by_name(name, db) is not None + ): raise OrganisationServiceError( f"NameExists- Organisation name already exists: {name}" ) @staticmethod - def assert_validate_users(organisation_dto: OrganisationDTO): + async def assert_validate_users(organisation_dto: OrganisationDTO, db): """Validates that the users exist""" if organisation_dto.managers and len(organisation_dto.managers) == 0: raise OrganisationServiceError( @@ -292,35 +468,43 @@ def assert_validate_users(organisation_dto: OrganisationDTO): managers = [] for user in organisation_dto.managers: try: - admin = UserService.get_user_by_username(user) + admin = await UserService.get_user_by_username(user, db) except NotFound: raise NotFound(sub_code="USER_NOT_FOUND", username=user) managers.append(admin.username) - organisation_dto.managers = managers @staticmethod - def can_user_manage_organisation(organisation_id: int, user_id: int): + async def can_user_manage_organisation( + organisation_id: int, user_id: int, db: Database + ): """Check that the user is an admin for the org or a global admin""" - if UserService.is_user_an_admin(user_id): + if await UserService.is_user_an_admin(user_id, db): return True else: - return OrganisationService.is_user_an_org_manager(organisation_id, user_id) + return await OrganisationService.is_user_an_org_manager( + organisation_id, user_id, db + ) @staticmethod - def is_user_an_org_manager(organisation_id: int, user_id: int): + async def is_user_an_org_manager(organisation_id: int, user_id: int, db: Database): """Check that the user is an manager for the org""" - - org = Organisation.get(organisation_id) - - if org is None: - raise NotFound( - sub_code="ORGANISATION_NOT_FOUND", organisation_id=organisation_id - ) - user = UserService.get_user_by_id(user_id) - - return user in org.managers + # Fetch organisation managers' IDs + managers_query = """ + SELECT + u.id + FROM users u + JOIN organisation_managers om ON u.id = om.user_id + WHERE om.organisation_id = :organisation_id + """ + managers_records = await db.fetch_all( + managers_query, values={"organisation_id": organisation_id} + ) + # Extract the list of IDs from the records + managers_ids = [record.id for record in managers_records] + user = await UserService.get_user_by_id(user_id, db) + return user.id in managers_ids @staticmethod def get_campaign_organisations_as_dto(campaign_id: int, user_id: int): @@ -329,7 +513,8 @@ def get_campaign_organisations_as_dto(campaign_id: int, user_id: int): """ organisation_list_dto = ListOrganisationsDTO() orgs = ( - Organisation.query.join(campaign_organisations) + session.query(Organisation) + .join(campaign_organisations) .filter(campaign_organisations.c.campaign_id == campaign_id) .all() ) diff --git a/backend/services/partner_service.py b/backend/services/partner_service.py index cde96e6383..e1435bcd57 100644 --- a/backend/services/partner_service.py +++ b/backend/services/partner_service.py @@ -1,5 +1,10 @@ -from flask import current_app +# from flask import current_app import json + +from databases import Database +from fastapi.responses import JSONResponse +from loguru import logger + from backend.models.dtos.partner_dto import PartnerDTO from backend.models.postgis.partner import Partner @@ -8,22 +13,21 @@ class PartnerServiceError(Exception): """Custom Exception to notify callers an error occurred when handling partners""" def __init__(self, message): - if current_app: - current_app.logger.debug(message) + logger.debug(message) class PartnerService: @staticmethod - def get_partner_by_id(partner_id: int) -> Partner: - return Partner.get_by_id(partner_id) + async def get_partner_by_id(partner_id: int, db: Database): + return await Partner.get_by_id(partner_id, db) @staticmethod - def get_partner_by_permalink(permalink: str) -> Partner: - return Partner.get_by_permalink(permalink) + async def get_partner_by_permalink(permalink: str, db: Database) -> Partner: + return await Partner.get_by_permalink(permalink, db) @staticmethod - def create_partner(data): - """Create a new partner in database""" + async def create_partner(data, db: Database) -> int: + """Create a new partner in the database""" website_links = [] for i in range(1, 6): name_key = f"name_{i}" @@ -32,34 +36,54 @@ def create_partner(data): url = data.get(url_key) if name and url: website_links.append({"name": name, "url": url}) - new_partner = Partner( - name=data.get("name"), - primary_hashtag=data.get("primary_hashtag"), - secondary_hashtag=data.get("secondary_hashtag"), - logo_url=data.get("logo_url"), - link_meta=data.get("link_meta"), - link_x=data.get("link_x"), - link_instagram=data.get("link_instagram"), - current_projects=data.get("current_projects"), - permalink=data.get("permalink"), - website_links=json.dumps(website_links), - mapswipe_group_id=data.get("mapswipe_group_id"), - ) - new_partner.create() - return new_partner + + query = """ + INSERT INTO partners ( + name, primary_hashtag, secondary_hashtag, logo_url, link_meta, + link_x, link_instagram, current_projects, permalink, + website_links, mapswipe_group_id + ) VALUES ( + :name, :primary_hashtag, :secondary_hashtag, :logo_url, :link_meta, + :link_x, :link_instagram, :current_projects, :permalink, + :website_links, :mapswipe_group_id + ) RETURNING id + """ + + values = { + "name": data.get("name"), + "primary_hashtag": data.get("primary_hashtag"), + "secondary_hashtag": data.get("secondary_hashtag"), + "logo_url": data.get("logo_url"), + "link_meta": data.get("link_meta"), + "link_x": data.get("link_x"), + "link_instagram": data.get("link_instagram"), + "current_projects": data.get("current_projects"), + "permalink": data.get("permalink"), + "website_links": json.dumps(website_links), + "mapswipe_group_id": data.get("mapswipe_group_id"), + } + + new_partner_id = await db.execute(query, values) + return new_partner_id @staticmethod - def delete_partner(partner_id: int): - partner = Partner.get_by_id(partner_id) + async def delete_partner(partner_id: int, db: Database): + partner = await Partner.get_by_id(partner_id, db) if partner: - partner.delete() - return {"Success": "Team deleted"}, 200 + delete_partner_query = """ + DELETE FROM partners WHERE id = :partner_id + """ + await db.execute(delete_partner_query, {"partner_id": partner_id}) + return JSONResponse(content={"Success": "Team deleted"}, status_code=200) else: - return {"Error": "Partner cannot be deleted"}, 400 + return JSONResponse( + content={"Error": "Partner cannot be deleted"}, status_code=400 + ) @staticmethod - def update_partner(partner_id: int, data: dict) -> Partner: - partner = Partner.get_by_id(partner_id) + async def update_partner(partner_id: int, data: dict, db: Database) -> dict: + partner = await Partner.get_by_id(partner_id, db) + # Handle dynamic website links from name_* and url_* website_links = [] for key, value in data.items(): if key.startswith("name_"): @@ -67,12 +91,36 @@ def update_partner(partner_id: int, data: dict) -> Partner: url_key = f"url_{index}" if url_key in data and value.strip(): website_links.append({"name": value, "url": data[url_key]}) + + set_clauses = [] + params = {"partner_id": partner_id} + for key, value in data.items(): - if hasattr(partner, key): - setattr(partner, key, value) - partner.website_links = json.dumps(website_links) - partner.save() - return partner + # Exclude name_* and url_* fields from direct update + if key.startswith("name_") or key.startswith("url_"): + continue + set_clauses.append(f"{key} = :{key}") + params[key] = value + + if website_links: + set_clauses.append("website_links = :website_links") + params["website_links"] = json.dumps(website_links) + + set_clause = ", ".join(set_clauses) + query = f""" + UPDATE partners + SET {set_clause} + WHERE id = :partner_id + RETURNING * + """ + + updated_partner = await db.fetch_one(query, params) + if not updated_partner: + raise PartnerServiceError(f"Failed to update Partner with ID {partner_id}.") + partner_dict = dict(updated_partner) + if "website_links" in partner_dict and partner_dict["website_links"]: + partner_dict["website_links"] = json.loads(partner_dict["website_links"]) + return partner_dict @staticmethod def get_partner_dto_by_id(partner: int, request_partner: int) -> PartnerDTO: @@ -83,6 +131,6 @@ def get_partner_dto_by_id(partner: int, request_partner: int) -> PartnerDTO: return partner.as_dto() @staticmethod - def get_all_partners(): + async def get_all_partners(db: Database): """Get all partners""" - return Partner.get_all_partners() + return await Partner.get_all_partners(db) diff --git a/backend/services/project_admin_service.py b/backend/services/project_admin_service.py index 13e34bb6fb..19eb6f4ad0 100644 --- a/backend/services/project_admin_service.py +++ b/backend/services/project_admin_service.py @@ -1,47 +1,50 @@ import json -import threading + import geojson -from flask import current_app +from databases import Database +from fastapi import BackgroundTasks +from loguru import logger +from backend.config import settings from backend.exceptions import NotFound from backend.models.dtos.project_dto import ( DraftProjectDTO, - ProjectDTO, ProjectCommentsDTO, + ProjectDTO, ProjectSearchDTO, ) -from backend.models.postgis.project import Project, Task, ProjectStatus +from backend.models.postgis.project import Project, ProjectStatus, Task from backend.models.postgis.statuses import TaskCreationMode, TeamRoles -from backend.models.postgis.task import TaskHistory, TaskStatus, TaskAction +from backend.models.postgis.task import TaskAction, TaskHistory, TaskStatus from backend.models.postgis.user import User from backend.models.postgis.utils import InvalidData, InvalidGeoJson from backend.services.grid.grid_service import GridService from backend.services.license_service import LicenseService from backend.services.messaging.message_service import MessageService -from backend.services.users.user_service import UserService from backend.services.organisation_service import OrganisationService from backend.services.team_service import TeamService +from backend.services.users.user_service import UserService class ProjectAdminServiceError(Exception): """Custom Exception to notify callers an error occurred when validating a Project""" def __init__(self, message): - if current_app: - current_app.logger.debug(message) + logger.debug(message) class ProjectStoreError(Exception): """Custom Exception to notify callers an error occurred with database CRUD operations""" def __init__(self, message): - if current_app: - current_app.logger.debug(message) + logger.debug(message) class ProjectAdminService: @staticmethod - def create_draft_project(draft_project_dto: DraftProjectDTO) -> int: + async def create_draft_project( + draft_project_dto: DraftProjectDTO, db: Database + ) -> int: """ Validates and then persists draft projects in the DB :param draft_project_dto: Draft Project DTO with data from API @@ -49,15 +52,15 @@ def create_draft_project(draft_project_dto: DraftProjectDTO) -> int: :returns ID of new draft project """ user_id = draft_project_dto.user_id - is_admin = UserService.is_user_an_admin(user_id) - user_orgs = OrganisationService.get_organisations_managed_by_user_as_dto( - user_id + is_admin = await UserService.is_user_an_admin(user_id, db) + user_orgs = await OrganisationService.get_organisations_managed_by_user_as_dto( + user_id, db ) is_org_manager = len(user_orgs.organisations) > 0 # First things first, we need to validate that the author_id is a PM. issue #1715 if not (is_admin or is_org_manager): - user = UserService.get_user_by_id(user_id) + user = await UserService.get_user_by_id(user_id, db) raise ( ProjectAdminServiceError( f"NotPermittedToCreate- User {user.username} is not permitted to create project" @@ -66,16 +69,19 @@ def create_draft_project(draft_project_dto: DraftProjectDTO) -> int: # If we're cloning we'll copy all the project details from the clone, otherwise create brand new project if draft_project_dto.cloneFromProjectId: - draft_project = Project.clone(draft_project_dto.cloneFromProjectId, user_id) + draft_project = await Project.clone( + draft_project_dto.cloneFromProjectId, user_id, db + ) else: draft_project = Project() - org = OrganisationService.get_organisation_by_id( - draft_project_dto.organisation + org = await OrganisationService.get_organisation_by_id( + draft_project_dto.organisation, db ) draft_project_dto.organisation = org + draft_project.create_draft_project(draft_project_dto) - draft_project.set_project_aoi(draft_project_dto) + await draft_project.set_project_aoi(draft_project_dto, db) # if arbitrary_tasks requested, create tasks from aoi otherwise use tasks in DTO if draft_project_dto.has_arbitrary_tasks: @@ -85,27 +91,30 @@ def create_draft_project(draft_project_dto: DraftProjectDTO) -> int: draft_project.task_creation_mode = TaskCreationMode.ARBITRARY.value else: tasks = draft_project_dto.tasks - ProjectAdminService._attach_tasks_to_project(draft_project, tasks) + + await ProjectAdminService._attach_tasks_to_project(draft_project, tasks, db) + draft_project.set_country_info() if draft_project_dto.cloneFromProjectId: - draft_project.save() # Update the clone + draft_project.set_default_changeset_comment() + await draft_project.save(db) # Update the clone + return draft_project.id else: - draft_project.create() # Create the new project - - draft_project.set_default_changeset_comment() - draft_project.set_country_info() - return draft_project.id + project_id = await Project.create( + draft_project, draft_project_dto.project_name, db + ) # Create the new project + return project_id @staticmethod def _set_default_changeset_comment(draft_project: Project): """Sets the default changesset comment when project created""" - default_comment = current_app.config["DEFAULT_CHANGESET_COMMENT"] + default_comment = settings.DEFAULT_CHANGESET_COMMENT draft_project.changeset_comment = f"{default_comment}-{draft_project.id}" draft_project.save() @staticmethod - def _get_project_by_id(project_id: int) -> Project: - project = Project.get(project_id) + async def _get_project_by_id(project_id: int, db: Database) -> Project: + project = await Project.get(project_id, db) if project is None: raise NotFound(sub_code="PROJECT_NOT_FOUND", project_id=project_id) @@ -113,13 +122,15 @@ def _get_project_by_id(project_id: int) -> Project: return project @staticmethod - def get_project_dto_for_admin(project_id: int) -> ProjectDTO: + async def get_project_dto_for_admin(project_id: int, db: Database) -> ProjectDTO: """Get the project as DTO for project managers""" - project = ProjectAdminService._get_project_by_id(project_id) - return project.as_dto_for_admin(project_id) + project = await Project.exists(project_id, db) + return await Project.as_dto_for_admin(project_id, db) @staticmethod - def update_project(project_dto: ProjectDTO, authenticated_user_id: int): + async def update_project( + project_dto: ProjectDTO, authenticated_user_id: int, db: Database + ): project_id = project_dto.project_id if project_dto.project_status == ProjectStatus.PUBLISHED.name: @@ -128,14 +139,16 @@ def update_project(project_dto: ProjectDTO, authenticated_user_id: int): ) if project_dto.license_id: - ProjectAdminService._validate_imagery_licence(project_dto.license_id) + await ProjectAdminService._validate_imagery_licence( + project_dto.license_id, db + ) # To be handled before reaching this function - if ProjectAdminService.is_user_action_permitted_on_project( - authenticated_user_id, project_id + if await ProjectAdminService.is_user_action_permitted_on_project( + authenticated_user_id, project_id, db ): - project = ProjectAdminService._get_project_by_id(project_id) - project.update(project_dto) + project = await ProjectAdminService._get_project_by_id(project_id, db) + await Project.update(project, project_dto, db) else: raise ValueError( str(project_id) @@ -145,29 +158,29 @@ def update_project(project_dto: ProjectDTO, authenticated_user_id: int): return project @staticmethod - def _validate_imagery_licence(license_id: int): + async def _validate_imagery_licence(license_id: int, db: Database): """Ensures that the suppliced license Id actually exists""" try: - LicenseService.get_license_as_dto(license_id) + await LicenseService.get_license_as_dto(license_id, db) except NotFound: raise ProjectAdminServiceError( f"RequireLicenseId- LicenseId {license_id} not found" ) @staticmethod - def delete_project(project_id: int, authenticated_user_id: int): + async def delete_project(project_id: int, authenticated_user_id: int, db: Database): """Deletes project if it has no completed tasks""" - project = ProjectAdminService._get_project_by_id(project_id) - is_admin = UserService.is_user_an_admin(authenticated_user_id) - user_orgs = OrganisationService.get_organisations_managed_by_user_as_dto( - authenticated_user_id + project = await ProjectAdminService._get_project_by_id(project_id, db) + is_admin = await UserService.is_user_an_admin(authenticated_user_id, db) + user_orgs = await OrganisationService.get_organisations_managed_by_user_as_dto( + authenticated_user_id, db ) is_org_manager = len(user_orgs.organisations) > 0 if is_admin or is_org_manager: - if project.can_be_deleted(): - project.delete() + if await Project.can_be_deleted(project, db): + await Project.delete(project, db) else: raise ProjectAdminServiceError( "HasMappedTasks- Project has mapped tasks, cannot be deleted" @@ -178,25 +191,53 @@ def delete_project(project_id: int, authenticated_user_id: int): ) @staticmethod - def reset_all_tasks(project_id: int, user_id: int): + async def reset_all_tasks(project_id: int, user_id: int, db: Database): """Resets all tasks on project, preserving history""" - tasks_to_reset = Task.query.filter( - Task.project_id == project_id, - Task.task_status != TaskStatus.READY.value, - ).all() + # Fetch tasks that are not in the READY state + query = """ + SELECT id, task_status + FROM tasks + WHERE project_id = :project_id + AND task_status != :ready_status + """ + tasks_to_reset = await db.fetch_all( + query=query, + values={ + "project_id": project_id, + "ready_status": TaskStatus.READY.value, + }, + ) + + # Reset each task and preserve history for task in tasks_to_reset: - task.set_task_history( - TaskAction.COMMENT, user_id, "Task reset", TaskStatus.READY + task_id = task["id"] + + # Add a history entry for the task reset + await Task.set_task_history( + task_id=task_id, + project_id=project_id, + user_id=user_id, + action=TaskAction.COMMENT, + db=db, + comment="Task reset", + new_state=TaskStatus.READY, ) - task.reset_task(user_id) - # Reset project counters - project = ProjectAdminService._get_project_by_id(project_id) - project.tasks_mapped = 0 - project.tasks_validated = 0 - project.tasks_bad_imagery = 0 - project.save() + # Reset the task's status to READY + await Task.reset_task( + task_id=task_id, project_id=project_id, user_id=user_id, db=db + ) + + # Reset project counters using raw SQL + project_update_query = """ + UPDATE projects + SET tasks_mapped = 0, + tasks_validated = 0, + tasks_bad_imagery = 0 + WHERE id = :project_id + """ + await db.execute(query=project_update_query, values={"project_id": project_id}) @staticmethod def get_all_comments(project_id: int) -> ProjectCommentsDTO: @@ -209,7 +250,9 @@ def get_all_comments(project_id: int) -> ProjectCommentsDTO: return comments @staticmethod - def _attach_tasks_to_project(draft_project: Project, tasks_geojson): + async def _attach_tasks_to_project( + draft_project: Project, tasks_geojson, db: Database + ): """ Validates then iterates over the array of tasks and attach them to the draft project :param draft_project: Draft project in scope @@ -260,8 +303,7 @@ def _validate_default_locale(default_locale, project_info_locales): raise ProjectAdminServiceError( "InfoForLocaleRequired- Project Info for Default Locale not provided" ) - - for attr, value in default_info.items(): + for attr, value in default_info.dict().items(): if attr == "per_task_instructions": continue # Not mandatory field @@ -275,81 +317,109 @@ def _validate_default_locale(default_locale, project_info_locales): return True # Indicates valid default locale for unit testing @staticmethod - def get_projects_for_admin( - admin_id: int, preferred_locale: str, search_dto: ProjectSearchDTO + async def get_projects_for_admin( + admin_id: int, preferred_locale: str, search_dto: ProjectSearchDTO, db: Database ): """Get all projects for provided admin""" - return Project.get_projects_for_admin(admin_id, preferred_locale, search_dto) + return await Project.get_projects_for_admin( + admin_id, preferred_locale, search_dto, db + ) @staticmethod - def transfer_project_to(project_id: int, transfering_user_id: int, username: str): + async def transfer_project_to( + project_id: int, + transfering_user_id: int, + username: str, + db: Database, + background_tasks: BackgroundTasks, + ): """Transfers project from old owner (transfering_user_id) to new owner (username)""" - project = ProjectAdminService._get_project_by_id(project_id) - new_owner = UserService.get_user_by_username(username) - # No operation is required if the new owner is same as old owner - if username == project.author.username: - return - - # Check permissions for the user (transferring_user_id) who initiatied the action - is_admin = UserService.is_user_an_admin(transfering_user_id) - is_author = UserService.is_user_the_project_author( - transfering_user_id, project.author_id - ) - is_org_manager = OrganisationService.is_user_an_org_manager( - project.organisation_id, transfering_user_id - ) - if not (is_admin or is_author or is_org_manager): - raise ProjectAdminServiceError( - "TransferPermissionError- User does not have permissions to transfer project" + async with db.transaction(): + project = await Project.get(project_id, db) + new_owner = await UserService.get_user_by_username(username, db) + author_id = project.author_id + if not author_id: + raise ProjectAdminServiceError( + "TransferPermissionError- User does not have permissions to transfer project" + ) + author = await User.get_by_id(author_id, db) + if username == author.username: + return + + is_admin = await UserService.is_user_an_admin(transfering_user_id, db) + + is_author = UserService.is_user_the_project_author( + transfering_user_id, project.author_id + ) + is_org_manager = await OrganisationService.is_user_an_org_manager( + project.organisation_id, transfering_user_id, db ) + if not (is_admin or is_author or is_org_manager): + raise ProjectAdminServiceError( + "TransferPermissionError- User does not have permissions to transfer project" + ) - # Check permissions for the new owner - must be project's org manager - is_new_owner_org_manager = OrganisationService.is_user_an_org_manager( - project.organisation_id, new_owner.id - ) - is_new_owner_admin = UserService.is_user_an_admin(new_owner.id) - if not (is_new_owner_org_manager or is_new_owner_admin): - error_message = ( - "InvalidNewOwner- New owner must be project's org manager or TM admin" + is_new_owner_org_manager = await OrganisationService.is_user_an_org_manager( + project.organisation_id, new_owner.id, db + ) + is_new_owner_admin = await UserService.is_user_an_admin(new_owner.id, db) + if not (is_new_owner_org_manager or is_new_owner_admin): + error_message = "InvalidNewOwner- New owner must be project's org manager or TM admin" + logger.debug(error_message) + raise ValueError(error_message) + else: + transferred_by = await User.get_by_id(transfering_user_id, db) + transferred_by = transferred_by.username + project.author_id = new_owner.id + await Project.update_project_author(project_id, new_owner.id, db) + + background_tasks.add_task( + MessageService.send_project_transfer_message, + project_id, + username, + transferred_by, ) - if current_app: - current_app.logger.debug(error_message) - raise ValueError(error_message) - else: - transferred_by = User.get_by_id(transfering_user_id).username - project.author_id = new_owner.id - project.save() - threading.Thread( - target=MessageService.send_project_transfer_message, - args=(project_id, username, transferred_by), - ).start() @staticmethod - def is_user_action_permitted_on_project( - authenticated_user_id: int, project_id: int + async def is_user_action_permitted_on_project( + authenticated_user_id: int, project_id: int, db: Database ) -> bool: """Is user action permitted on project""" - project = Project.get(project_id) - if project is None: + # Fetch the project details + project_query = """ + SELECT author_id, organisation_id + FROM projects + WHERE id = :project_id + """ + project = await db.fetch_one( + query=project_query, values={"project_id": project_id} + ) + if not project: raise NotFound(sub_code="PROJECT_NOT_FOUND", project_id=project_id) + author_id = project.author_id - allowed_roles = [TeamRoles.PROJECT_MANAGER.value] + organisation_id = project.organisation_id - is_admin = UserService.is_user_an_admin(authenticated_user_id) - is_author = UserService.is_user_the_project_author( - authenticated_user_id, author_id - ) + is_admin = await UserService.is_user_an_admin(authenticated_user_id, db) + + # Check if the user is the project author + is_author = authenticated_user_id == author_id is_org_manager = False is_manager_team = False + # If the user is neither an admin nor the author, check further permissions if not (is_admin or is_author): - if hasattr(project, "organisation_id") and project.organisation_id: - org_id = project.organisation_id - is_org_manager = OrganisationService.is_user_an_org_manager( - org_id, authenticated_user_id + if organisation_id: + # Check if the user is an organisation manager + is_org_manager = await OrganisationService.is_user_an_org_manager( + organisation_id, authenticated_user_id, db ) if not is_org_manager: - is_manager_team = TeamService.check_team_membership( - project_id, allowed_roles, authenticated_user_id + # Check if the user is a project manager in the team + is_manager_team = await TeamService.check_team_membership( + project_id, + [TeamRoles.PROJECT_MANAGER.value], + authenticated_user_id, + db, ) return is_admin or is_author or is_org_manager or is_manager_team diff --git a/backend/services/project_partnership_service.py b/backend/services/project_partnership_service.py index 1cc8ce5a94..28509a98a6 100644 --- a/backend/services/project_partnership_service.py +++ b/backend/services/project_partnership_service.py @@ -1,55 +1,80 @@ -from flask import current_app -from backend.exceptions import NotFound, BadRequest +# from flask import current_app +import datetime +from typing import List, Optional + +from databases import Database +from loguru import logger + +from backend.exceptions import BadRequest, NotFound +from backend.models.dtos.project_partner_dto import ProjectPartnershipDTO +from backend.models.postgis.partner import Partner from backend.models.postgis.project_partner import ( + ProjectPartnerAction, ProjectPartnership, ProjectPartnershipHistory, - ProjectPartnerAction, ) -from backend.models.dtos.project_partner_dto import ProjectPartnershipDTO - -from backend.models.postgis.partner import Partner - -from typing import List, Optional -import datetime class ProjectPartnershipServiceError(Exception): """Custom Exception to notify callers an error occurred when handling project partnerships""" def __init__(self, message): - if current_app: - current_app.logger.debug(message) + logger.debug(message) class ProjectPartnershipService: @staticmethod - def get_partnership_as_dto(partnership_id: int) -> ProjectPartnershipDTO: - partnership = ProjectPartnership.get_by_id(partnership_id) + async def get_partnership_as_dto( + partnership_id: int, db: Database + ) -> ProjectPartnershipDTO: + partnership = await ProjectPartnership.get_by_id(partnership_id, db) if partnership is None: raise NotFound( sub_code="PARTNERSHIP_NOT_FOUND", partnership_id=partnership_id ) - partnership_dto = ProjectPartnershipDTO() - partnership_dto.id = partnership.id - partnership_dto.project_id = partnership.project_id - partnership_dto.partner_id = partnership.partner_id - partnership_dto.started_on = partnership.started_on - partnership_dto.ended_on = partnership.ended_on + partnership_dto = ProjectPartnershipDTO( + id=partnership.id, + project_id=partnership.project_id, + partner_id=partnership.partner_id, + started_on=partnership.started_on, + ended_on=partnership.ended_on, + ) + # partnership_dto.id = partnership.id + # partnership_dto.project_id = partnership.project_id + # partnership_dto.partner_id = partnership.partner_id + # partnership_dto.started_on = partnership.started_on + # partnership_dto.ended_on = partnership.ended_on return partnership_dto - @staticmethod - def get_partnerships_by_project(project_id: int) -> List[ProjectPartnershipDTO]: - partnerships = ProjectPartnership.query.filter( - ProjectPartnership.project_id == project_id - ).all() + # @staticmethod + # def get_partnerships_by_project(project_id: int) -> List[ProjectPartnershipDTO]: + # partnerships = ProjectPartnership.query.filter( + # ProjectPartnership.project_id == project_id + # ).all() - return list( - map(lambda partnership: partnership.as_dto().to_primitive(), partnerships) - ) + # return list( + # map(lambda partnership: partnership.as_dto().to_primitive(), partnerships) + # ) @staticmethod - def create_partnership( + async def get_partnerships_by_project( + project_id: int, db: Database + ) -> List[ProjectPartnershipDTO]: + """ + Retrieves all partnerships for a specific project ID. + """ + query = """ + SELECT id, project_id, partner_id, started_on, ended_on + FROM project_partnerships + WHERE project_id = :project_id + """ + rows = await db.fetch_all(query, values={"project_id": project_id}) + return [ProjectPartnershipDTO(**row) for row in rows] + + @staticmethod + async def create_partnership( + db: Database, project_id: int, partner_id: int, started_on: Optional[datetime.datetime], @@ -72,7 +97,7 @@ def create_partnership( ended_on=partnership.ended_on, ) - partnership_id = partnership.create() + partnership_id = await partnership.create(db) partnership_history = ProjectPartnershipHistory() partnership_history.partnership_id = partnership_id @@ -80,17 +105,19 @@ def create_partnership( partnership_history.partner_id = partner_id partnership_history.started_on_new = partnership.started_on partnership_history.ended_on_new = partnership.ended_on - partnership_history.create() + await partnership_history.create(db) return partnership_id @staticmethod - def update_partnership_time_range( + async def update_partnership_time_range( + db: Database, partnership_id: int, started_on: Optional[datetime.datetime], ended_on: Optional[datetime.datetime], ) -> ProjectPartnership: - partnership = ProjectPartnership.get_by_id(partnership_id) + partnership_record = await ProjectPartnership.get_by_id(partnership_id, db) + partnership = ProjectPartnership(**partnership_record) if partnership is None: raise NotFound( sub_code="PARTNERSHIP_NOT_FOUND", partnership_id=partnership_id @@ -126,14 +153,15 @@ def update_partnership_time_range( ended_on=partnership.ended_on, ) - partnership.save() - partnership_history.create() + await partnership.save(db) + await partnership_history.create(db) return partnership @staticmethod - def delete_partnership(partnership_id: int): - partnership = ProjectPartnership.get_by_id(partnership_id) + async def delete_partnership(partnership_id: int, db: Database): + partnership_record = await ProjectPartnership.get_by_id(partnership_id, db) + partnership = ProjectPartnership(**partnership_record) if partnership is None: raise NotFound( sub_code="PARTNERSHIP_NOT_FOUND", partnership_id=partnership_id @@ -146,9 +174,8 @@ def delete_partnership(partnership_id: int): partnership_history.started_on_old = partnership.started_on partnership_history.ended_on_old = partnership.ended_on partnership_history.action = ProjectPartnerAction.DELETE.value - partnership_history.create() - - partnership.delete() + await partnership_history.create(db) + await partnership.delete(db) @staticmethod def get_partners_by_project(project_id: int) -> List[Partner]: diff --git a/backend/services/project_search_service.py b/backend/services/project_search_service.py index bdddc463c9..b4b4f1aea1 100644 --- a/backend/services/project_search_service.py +++ b/backend/services/project_search_service.py @@ -1,46 +1,37 @@ -import pandas as pd -from flask import current_app +# # from flask import current_app import math +from typing import List + import geojson +import pandas as pd +from cachetools import TTLCache, cached +from databases import Database +from fastapi import HTTPException from geoalchemy2 import shape -from sqlalchemy import func, distinct, desc, or_, and_ +from loguru import logger from shapely.geometry import Polygon, box -from cachetools import TTLCache, cached -from backend import db -from backend.exceptions import NotFound from backend.api.utils import validate_date_input +from backend.exceptions import NotFound from backend.models.dtos.project_dto import ( - ProjectSearchDTO, - ProjectSearchResultsDTO, ListSearchResultDTO, Pagination, ProjectSearchBBoxDTO, + ProjectSearchDTO, + ProjectSearchResultsDTO, ) -from backend.models.postgis.project import Project, ProjectInfo, ProjectTeams -from backend.models.postgis.partner import Partner +from backend.models.postgis.project import Project, ProjectInfo from backend.models.postgis.statuses import ( - ProjectStatus, MappingLevel, + MappingPermission, MappingTypes, + ProjectDifficulty, ProjectPriority, - UserRole, + ProjectStatus, TeamRoles, + UserRole, ValidationPermission, - MappingPermission, - ProjectDifficulty, -) -from backend.models.postgis.project_partner import ProjectPartnership -from backend.models.postgis.campaign import Campaign -from backend.models.postgis.organisation import Organisation -from backend.models.postgis.task import TaskHistory -from backend.models.postgis.utils import ( - ST_Intersects, - ST_MakeEnvelope, - ST_Transform, - ST_Area, ) -from backend.models.postgis.interests import project_interests from backend.services.users.user_service import UserService search_cache = TTLCache(maxsize=128, ttl=300) @@ -55,113 +46,140 @@ class ProjectSearchServiceError(Exception): """Custom Exception to notify callers an error occurred when handling mapping""" def __init__(self, message): - if current_app: - current_app.logger.debug(message) + logger.debug(message) class BBoxTooBigError(Exception): """Custom Exception to notify callers an error occurred when handling mapping""" def __init__(self, message): - if current_app: - current_app.logger.debug(message) + logger.debug(message) class ProjectSearchService: @staticmethod - def create_search_query(user=None, as_csv: bool = False): + async def create_search_query(db, user=None, as_csv: bool = False): + # Base query for fetching project details if as_csv: - query = ( - db.session.query( - Project.id.label("id"), - ProjectInfo.name.label("project_name"), - Project.difficulty, - Project.priority, - Project.default_locale, - Project.centroid.ST_AsGeoJSON().label("centroid"), - Project.organisation_id, - Project.tasks_bad_imagery, - Project.tasks_mapped, - Project.tasks_validated, - Project.percent_mapped, - Project.percent_validated, - Project.status, - Project.total_tasks, - Project.last_updated, - Project.due_date, - Project.country, - Organisation.name.label("organisation_name"), - Organisation.logo.label("organisation_logo"), - Project.created.label("creation_date"), - func.coalesce( - func.sum(func.ST_Area(Project.geometry, True) / 1000000) - ).label("total_area"), - ) - .filter(Project.geometry is not None) - .outerjoin(Organisation, Organisation.id == Project.organisation_id) - .group_by(Organisation.id, Project.id, ProjectInfo.name) - ) - else: - query = ( - db.session.query( - Project.id.label("id"), - Project.difficulty, - Project.priority, - Project.default_locale, - Project.centroid.ST_AsGeoJSON().label("centroid"), - Project.organisation_id, - Project.tasks_bad_imagery, - Project.tasks_mapped, - Project.tasks_validated, - Project.status, - Project.total_tasks, - Project.last_updated, - Project.due_date, - Project.country, - Organisation.name.label("organisation_name"), - Organisation.logo.label("organisation_logo"), - ) - .filter(Project.geometry is not None) - .outerjoin(Organisation, Organisation.id == Project.organisation_id) - .group_by(Organisation.id, Project.id) - ) + query = """ + SELECT + p.id AS id, + p.priority, + p.difficulty, + p.default_locale, + p.status, + p.last_updated, + p.due_date, + p.total_tasks, + p.tasks_mapped, + p.tasks_validated, + p.tasks_bad_imagery, + u.name AS author_name, + u.username AS author_username, + o.name AS organisation_name, + ROUND(COALESCE( + (p.tasks_mapped + p.tasks_validated) * 100.0 / NULLIF(p.total_tasks - p.tasks_bad_imagery, 0), 0 + ), 2) AS percent_mapped, + ROUND(COALESCE( + p.tasks_validated * 100.0 / NULLIF(p.total_tasks - p.tasks_bad_imagery, 0), 0 + ), 2) AS percent_validated, + ROUND(CAST(COALESCE( + ST_Area(p.geometry::geography) / 1000000, 0 + ) AS numeric), 3) AS total_area, + p.country, + p.created AS creation_date + FROM projects p + LEFT JOIN organisations o ON o.id = p.organisation_id + LEFT JOIN users u ON u.id = p.author_id + WHERE p.geometry IS NOT NULL + """ - # Get public projects only for anonymous user. + else: + query = """ + SELECT + p.id AS id, + p.difficulty, + p.priority, + p.default_locale, + ST_AsGeoJSON(p.centroid) AS centroid, + p.organisation_id, + p.tasks_bad_imagery, + p.tasks_mapped, + p.tasks_validated, + p.status, + p.total_tasks, + p.last_updated, + p.due_date, + p.country, + p.mapping_types, + u.name AS author_name, + u.username AS author_username, + o.name AS organisation_name, + o.logo AS organisation_logo + FROM projects p + LEFT JOIN organisations o ON o.id = p.organisation_id + LEFT JOIN users u ON u.id = p.author_id + WHERE p.geometry IS NOT NULL + """ + + filters = [] + params = {} if user is None: - query = query.filter(Project.private.is_(False)) - - if user is not None and user.role != UserRole.ADMIN.value: - # Get also private projects of teams that the user is member. - project_ids = [[p.project_id for p in t.team.projects] for t in user.teams] - - # Get projects that belong to user organizations. - orgs_projects_ids = [[p.id for p in u.projects] for u in user.organisations] - - project_ids.extend(orgs_projects_ids) - - project_ids = tuple( - set([item for sublist in project_ids for item in sublist]) - ) - - query = query.filter( - or_(Project.private.is_(False), Project.id.in_(project_ids)) - ) + filters.append("p.private = :private") + params["private"] = False + if user is not None: + if user.role != UserRole.ADMIN.value: + # Fetch project_ids for user's teams + team_projects_query = """ + SELECT p.id + FROM projects p + JOIN project_teams pt ON pt.project_id = p.id + JOIN teams t ON t.id = pt.team_id + JOIN team_members tm ON tm.team_id = t.id + WHERE tm.user_id = :user_id + """ + team_projects = await db.fetch_all( + team_projects_query, {"user_id": user.id} + ) + # Fetch project_ids for user's organisations + org_projects_query = """ + SELECT p.id + FROM projects p + JOIN organisations o ON o.id = p.organisation_id + JOIN organisation_managers om ON om.organisation_id = o.id + WHERE om.user_id = :user_id + """ + org_projects = await db.fetch_all( + org_projects_query, {"user_id": user.id} + ) - # If the user is admin, no filter. - return query + # Combine and deduplicate project IDs + project_ids = tuple( + set( + [row["id"] for row in team_projects] + + [row["id"] for row in org_projects] + ) + ) + if project_ids: + filters.append("p.private = :private OR p.id = ANY(:project_ids)") + params["private"] = False + params["project_ids"] = list(project_ids) + else: + filters.append("p.private = :private") + params["private"] = False + if filters: + query += " AND (" + " AND ".join(filters) + ")" + + return query, params @staticmethod - def create_result_dto( - project: Project, - preferred_locale: str, - total_contributors: int, - with_partner_names: bool = False, - with_author_name: bool = True, - ) -> ListSearchResultDTO: - project_info_dto = ProjectInfo.get_dto_for_locale( - project.id, preferred_locale, project.default_locale + async def create_result_dto( + project, preferred_locale, total_contributors, db: Database + ): + project_info_dto = await ProjectInfo.get_dto_for_locale( + db, project.id, preferred_locale, project.default_locale ) - project_obj = Project.get(project.id) + list_dto = ListSearchResultDTO() list_dto.project_id = project.id list_dto.locale = project_info_dto.locale @@ -171,104 +189,114 @@ def create_result_dto( list_dto.short_description = project_info_dto.short_description list_dto.last_updated = project.last_updated list_dto.due_date = project.due_date - list_dto.percent_mapped = project_obj.calculate_tasks_percent( + list_dto.percent_mapped = Project.calculate_tasks_percent( "mapped", + project.tasks_mapped, + project.tasks_validated, + project.total_tasks, + project.tasks_bad_imagery, ) - list_dto.percent_validated = project_obj.calculate_tasks_percent( + list_dto.percent_validated = Project.calculate_tasks_percent( "validated", + project.tasks_mapped, + project.tasks_validated, + project.total_tasks, + project.tasks_bad_imagery, ) list_dto.status = ProjectStatus(project.status).name - list_dto.active_mappers = Project.get_active_mappers(project.id) + list_dto.active_mappers = await Project.get_active_mappers(project.id, db) list_dto.total_contributors = total_contributors list_dto.country = project.country + list_dto.author = project.author_name or project.author_username list_dto.organisation_name = project.organisation_name list_dto.organisation_logo = project.organisation_logo - list_dto.campaigns = Project.get_project_campaigns(project.id) - - list_dto.creation_date = project_obj.created - - if with_author_name: - list_dto.author = project_obj.author.name or project_obj.author.username - - if with_partner_names: - list_dto.partner_names = list( - set( - map( - lambda p: Partner.get_by_id(p.partner_id).name, - project_obj.partnerships, - ) - ) - ) - - # Use postgis to compute the total area of the geometry in square kilometers - list_dto.total_area = project_obj.query.with_entities( - func.coalesce(func.sum(func.ST_Area(project_obj.geometry, True) / 1000000)) - ).scalar() - list_dto.total_area = round(list_dto.total_area, 3) - + list_dto.campaigns = await Project.get_project_campaigns(project.id, db) return list_dto @staticmethod - def get_total_contributions(paginated_results): - paginated_projects_ids = [p.id for p in paginated_results] - - # We need to make a join to return projects without contributors. - project_contributors_count = ( - Project.query.with_entities( - Project.id, func.count(distinct(TaskHistory.user_id)).label("total") - ) - .filter(Project.id.in_(paginated_projects_ids)) - .outerjoin( - TaskHistory, - and_( - TaskHistory.project_id == Project.id, - TaskHistory.action != "COMMENT", - ), - ) - .group_by(Project.id) - .all() - ) - - return [p.total for p in project_contributors_count] + async def get_total_contributions( + project_ids: List[int], db: Database + ) -> List[int]: + """Fetch total contributions for given project IDs.""" + if not project_ids: + return [] + + query = """ + SELECT + p.id AS id, + COUNT(DISTINCT th.user_id) AS total + FROM projects p + LEFT JOIN task_history th ON th.project_id = p.id + AND th.action != 'COMMENT' + WHERE p.id = ANY(:project_ids) + GROUP BY p.id + """ + + params = {"project_ids": project_ids} + + result = await db.fetch_all(query, params) + + return [row["total"] for row in result] @staticmethod - @cached(csv_download_cache) - def search_projects_as_csv(search_dto: ProjectSearchDTO, user) -> str: - all_results, _ = ProjectSearchService._filter_projects(search_dto, user, True) - rows = [row._asdict() for row in all_results] + # @cached(csv_download_cache) + async def search_projects_as_csv( + search_dto: ProjectSearchDTO, user, db: Database, as_csv: bool = False + ) -> str: + all_results = await ProjectSearchService._filter_projects( + search_dto, user, db, as_csv + ) + rows = [dict(row) for row in all_results] is_user_admin = user is not None and user.role == UserRole.ADMIN.value - for row in rows: row["priority"] = ProjectPriority(row["priority"]).name row["difficulty"] = ProjectDifficulty(row["difficulty"]).name row["status"] = ProjectStatus(row["status"]).name row["total_area"] = round(row["total_area"], 3) - row["total_contributors"] = Project.get_project_total_contributions( - row["id"] + row["total_contributors"] = await Project.get_project_total_contributions( + row["id"], db ) + row["author"] = row["author_name"] or row["author_username"] + + project_name_query = """ + SELECT COALESCE( + (SELECT pi.name FROM project_info pi WHERE pi.project_id = :project_id AND pi.locale = :locale LIMIT 1), + (SELECT pi.name FROM project_info pi WHERE pi.project_id = :project_id AND pi.locale = 'en' LIMIT 1) + ) AS name + """ + + result = await db.fetch_one( + project_name_query, + { + "project_id": row["id"], + "locale": search_dto.preferred_locale or "en", + }, + ) + + row["project_name"] = result["name"] if result else None if is_user_admin: - partners_names = ( - ProjectPartnership.query.with_entities( - ProjectPartnership.project_id, Partner.name - ) - .join(Partner, ProjectPartnership.partner_id == Partner.id) - .filter(ProjectPartnership.project_id == row["id"]) - .group_by(ProjectPartnership.project_id, Partner.name) - .all() + query = """ + SELECT p.name + FROM project_partnerships pp + JOIN partners p ON pp.partner_id = p.id + WHERE pp.project_id = :project_id + GROUP BY pp.project_id, p.name + """ + partners_names = await db.fetch_all( + query=query, values={"project_id": row["id"]} ) - row["partner_names"] = [pn for (_, pn) in partners_names] + row["partner_names"] = [record["name"] for record in partners_names] df = pd.json_normalize(rows) columns_to_drop = [ "default_locale", - "organisation_id", - "organisation_logo", "tasks_bad_imagery", "tasks_mapped", "tasks_validated", "total_tasks", - "centroid", + "author_name", + "author_username", ] colummns_to_rename = { @@ -281,6 +309,7 @@ def search_projects_as_csv(search_dto: ProjectSearchDTO, user) -> str: "total_area": "totalArea", "total_contributors": "totalContributors", "partner_names": "partnerNames", + "author_name": "author", "project_name": "name", } @@ -290,32 +319,38 @@ def search_projects_as_csv(search_dto: ProjectSearchDTO, user) -> str: axis=1, ) df.rename(columns=colummns_to_rename, inplace=True) + cols_order = ["projectId", "name"] + [ + col for col in df.columns if col not in ["projectId", "name"] + ] + df = df[cols_order] return df.to_csv(index=False) @staticmethod @cached(search_cache) - def search_projects(search_dto: ProjectSearchDTO, user) -> ProjectSearchResultsDTO: + async def search_projects( + search_dto: ProjectSearchDTO, user, db + ) -> ProjectSearchResultsDTO: """Searches all projects for matches to the criteria provided by the user""" - all_results, paginated_results = ProjectSearchService._filter_projects( - search_dto, user - ) - if paginated_results.total == 0: + ( + all_results, + paginated_results, + pagination_dto, + ) = await ProjectSearchService._filter_projects(search_dto, user, db) + if pagination_dto.total == 0: raise NotFound(sub_code="PROJECTS_NOT_FOUND") dto = ProjectSearchResultsDTO() dto.results = [ - ProjectSearchService.create_result_dto( + await ProjectSearchService.create_result_dto( p, search_dto.preferred_locale, - Project.get_project_total_contributions(p[0]), - with_partner_names=( - user is not None and user.role == UserRole.ADMIN.value - ), - with_author_name=True, + await Project.get_project_total_contributions(p.id, db), + db, ) - for p in paginated_results.items + for p in paginated_results ] - dto.pagination = Pagination(paginated_results) + + dto.pagination = pagination_dto if search_dto.omit_map_results: return dto @@ -333,329 +368,442 @@ def search_projects(search_dto: ProjectSearchDTO, user) -> ProjectSearchResultsD features.append(feature) feature_collection = geojson.FeatureCollection(features) dto.map_results = feature_collection - return dto - @staticmethod - def _filter_projects(search_dto: ProjectSearchDTO, user, as_csv=False): - """Filters all projects based on criteria provided by user""" - - query = ProjectSearchService.create_search_query(user, as_csv) - - query = query.join(ProjectInfo).filter( - ProjectInfo.locale.in_([search_dto.preferred_locale, "en"]) + async def _filter_projects( + search_dto: ProjectSearchDTO, user, db: Database, as_csv: bool = False + ): + base_query, params = await ProjectSearchService.create_search_query( + db, user, as_csv ) - project_status_array = [] + # Initialize filter list and parameters dictionary + filters = [] + + if search_dto.preferred_locale or search_dto.text_search: + subquery_filters = [] + if search_dto.preferred_locale: + subquery_filters.append("locale IN (:preferred_locale, 'en')") + params["preferred_locale"] = search_dto.preferred_locale + + if search_dto.text_search: + search_text = "".join( + char for char in search_dto.text_search if char not in "@|&!><\\():" + ) + tsquery_search = " & ".join([x for x in search_text.split(" ") if x]) + ilike_search = f"%{search_text}%" + + subquery_filters.append( + """ + text_searchable @@ to_tsquery('english', :tsquery_search) + OR name ILIKE :text_search + """ + ) + params["tsquery_search"] = tsquery_search + params["text_search"] = ilike_search + + filters.append( + """ + p.id = ANY( + SELECT project_id + FROM project_info + WHERE {} + ) + """.format(" AND ".join(subquery_filters)) + ) + if search_dto.project_statuses: - project_status_array = [ - ProjectStatus[project_status].value - for project_status in search_dto.project_statuses + statuses = [ + ProjectStatus[status].value for status in search_dto.project_statuses ] - query = query.filter(Project.status.in_(project_status_array)) + filters.append("p.status = ANY(:statuses)") + params["statuses"] = tuple(statuses) else: if not search_dto.created_by: - project_status_array = [ProjectStatus.PUBLISHED.value] - query = query.filter(Project.status.in_(project_status_array)) + filters.append("p.status = :published_status") + params["published_status"] = ProjectStatus.PUBLISHED.value if not search_dto.based_on_user_interests: - # Only filter by interests if not based on user interests is provided if search_dto.interests: - query = query.join( - project_interests, project_interests.c.project_id == Project.id - ).filter(project_interests.c.interest_id.in_(search_dto.interests)) - else: - user = UserService.get_user_by_id(search_dto.based_on_user_interests) - query = query.join( - project_interests, project_interests.c.project_id == Project.id - ).filter( - project_interests.c.interest_id.in_( - [interest.id for interest in user.interests] + filters.append( + "p.id IN (SELECT project_id FROM project_interests WHERE interest_id = ANY(:interests))" ) + params["interests"] = tuple(search_dto.interests) + else: + user_interest_query = """ + SELECT interest_id + FROM user_interests + WHERE user_id = :user_id + """ + results = await db.fetch_all( + query=user_interest_query, values={"user_id": user.id} + ) + user_interests = ( + [record["interest_id"] for record in results] if results else [] ) + filters.append( + "p.id IN (SELECT project_id FROM project_interests WHERE interest_id = ANY(:user_interests))" + ) + params["user_interests"] = tuple(user_interests) + if search_dto.created_by: - query = query.filter(Project.author_id == search_dto.created_by) + filters.append("p.author_id = :created_by") + params["created_by"] = search_dto.created_by + if search_dto.mapped_by: - projects_mapped = UserService.get_projects_mapped(search_dto.mapped_by) - query = query.filter(Project.id.in_(projects_mapped)) - if search_dto.favorited_by: - projects_favorited = user.favorites - query = query.filter( - Project.id.in_([project.id for project in projects_favorited]) + mapped_projects = await UserService.get_projects_mapped( + search_dto.mapped_by, db ) + filters.append("p.id = ANY(:mapped_projects)") + params["mapped_projects"] = tuple(mapped_projects) + + if search_dto.favorited_by: + favorited_projects = [] + if user: + query = """ + SELECT project_id + FROM project_favorites + WHERE user_id = :user_id + """ + results = await db.fetch_all(query=query, values={"user_id": user.id}) + favorited_projects = [record["project_id"] for record in results] + filters.append("p.id = ANY(:favorited_projects)") + params["favorited_projects"] = tuple(favorited_projects) + if search_dto.difficulty and search_dto.difficulty.upper() != "ALL": - query = query.filter( - Project.difficulty == ProjectDifficulty[search_dto.difficulty].value - ) + filters.append("p.difficulty = :difficulty") + params["difficulty"] = ProjectDifficulty[search_dto.difficulty].value + if search_dto.action and search_dto.action != "any": if search_dto.action == "map": - query = ProjectSearchService.filter_projects_to_map(query, user) - if search_dto.action == "validate": - query = ProjectSearchService.filter_projects_to_validate(query, user) + mapping_project_ids = await ProjectSearchService.filter_projects_to_map( + user, db + ) + filters.append("p.id = ANY(:mapping_project_ids)") + params["mapping_project_ids"] = tuple(mapping_project_ids) + + elif search_dto.action == "validate": + validation_project_ids = ( + await ProjectSearchService.filter_projects_to_validate(user, db) + ) + filters.append("p.id = ANY(:validation_project_ids)") + params["validation_project_ids"] = tuple(validation_project_ids) if search_dto.organisation_name: - query = query.filter(Organisation.name == search_dto.organisation_name) + filters.append("o.name = :organisation_name") + params["organisation_name"] = search_dto.organisation_name if search_dto.organisation_id: - query = query.filter(Organisation.id == search_dto.organisation_id) + filters.append("o.id = :organisation_id") + params["organisation_id"] = int(search_dto.organisation_id) if search_dto.team_id: - query = query.join( - ProjectTeams, ProjectTeams.project_id == Project.id - ).filter(ProjectTeams.team_id == search_dto.team_id) + filters.append( + "p.id IN (SELECT project_id FROM project_teams WHERE team_id = :team_id)" + ) + params["team_id"] = int(search_dto.team_id) if search_dto.campaign: - query = query.join(Campaign, Project.campaign).group_by(Campaign.name) - query = query.filter(Campaign.name == search_dto.campaign) + filters.append( + "p.id IN (SELECT cp.project_id FROM campaign_projects cp " + "JOIN campaigns c ON c.id = cp.campaign_id WHERE c.name = :campaign_name)" + ) + params["campaign_name"] = search_dto.campaign if search_dto.mapping_types: - # Construct array of mapping types for query - mapping_type_array = [] - if search_dto.mapping_types_exact: - mapping_type_array = [ - { - MappingTypes[mapping_type].value - for mapping_type in search_dto.mapping_types - } - ] - query = query.filter(Project.mapping_types.in_(mapping_type_array)) + filters.append( + "p.mapping_types @> :mapping_types AND array_length(p.mapping_types, 1) = :mapping_length" + ) + params["mapping_types"] = tuple( + MappingTypes[mapping_type].value + for mapping_type in search_dto.mapping_types + ) + params["mapping_length"] = len(search_dto.mapping_types) else: - mapping_type_array = [ + filters.append("p.mapping_types && :mapping_types") + params["mapping_types"] = tuple( MappingTypes[mapping_type].value for mapping_type in search_dto.mapping_types - ] - query = query.filter(Project.mapping_types.overlap(mapping_type_array)) - - if search_dto.text_search: - # We construct an OR search, so any projects that contain or more of the search terms should be returned - invalid_ts_chars = "@|&!><\\():" - search_text = "".join( - char for char in search_dto.text_search if char not in invalid_ts_chars - ) - or_search = " | ".join([x for x in search_text.split(" ") if x != ""]) - opts = [ - ProjectInfo.text_searchable.match( - or_search, postgresql_regconfig="english" - ), - ProjectInfo.name.ilike(f"%{or_search}%"), - ] - try: - opts.append(Project.id == int(search_dto.text_search)) - except ValueError: - pass - - query = query.filter(or_(*opts)) + ) if search_dto.country: - # Unnest country column array. - sq = Project.query.with_entities( - Project.id, func.unnest(Project.country).label("country") - ).subquery() - query = query.filter( - func.lower(sq.c.country) == search_dto.country.lower() - ).filter(Project.id == sq.c.id) + filters.append( + "LOWER(:country) = ANY(ARRAY(SELECT LOWER(c) FROM unnest(p.country) AS c))" + ) + params["country"] = search_dto.country.lower() if search_dto.last_updated_gte: - last_updated_gte = validate_date_input(search_dto.last_updated_gte) - query = query.filter(Project.last_updated >= last_updated_gte) + filters.append("p.last_updated >= :last_updated_gte") + params["last_updated_gte"] = validate_date_input( + search_dto.last_updated_gte + ) if search_dto.last_updated_lte: - last_updated_lte = validate_date_input(search_dto.last_updated_lte) - query = query.filter(Project.last_updated <= last_updated_lte) + filters.append("p.last_updated <= :last_updated_lte") + params["last_updated_lte"] = validate_date_input( + search_dto.last_updated_lte + ) if search_dto.created_gte: - created_gte = validate_date_input(search_dto.created_gte) - query = query.filter(Project.created >= created_gte) + filters.append("p.created >= :created_gte") + params["created_gte"] = validate_date_input(search_dto.created_gte) if search_dto.created_lte: - created_lte = validate_date_input(search_dto.created_lte) - query = query.filter(Project.created <= created_lte) + filters.append("p.created <= :created_lte") + params["created_lte"] = validate_date_input(search_dto.created_lte) if search_dto.partner_id: - query = query.join( - ProjectPartnership, ProjectPartnership.project_id == Project.id - ).filter(ProjectPartnership.partner_id == search_dto.partner_id) + partner_conditions = ["pp.partner_id = :partner_id"] + params["partner_id"] = int(search_dto.partner_id) if search_dto.partnership_from: partnership_from = validate_date_input(search_dto.partnership_from) - query = query.filter(ProjectPartnership.started_on <= partnership_from) + partner_conditions.append("pp.started_on <= :partnership_from") + params["partnership_from"] = partnership_from if search_dto.partnership_to: partnership_to = validate_date_input(search_dto.partnership_to) - query = query.filter( - (ProjectPartnership.ended_on.is_(None)) - | (ProjectPartnership.ended_on >= partnership_to) + partner_conditions.append( + "(pp.ended_on IS NULL OR pp.ended_on >= :partnership_to)" + ) + params["partnership_to"] = partnership_to + + filters.append( + """ + p.id = ANY( + SELECT pp.project_id + FROM project_partnerships pp + WHERE {} ) + """.format(" AND ".join(partner_conditions)) + ) + + if search_dto.managed_by and user.role != UserRole.ADMIN.value: + # Fetch project IDs for user's organisations + org_projects_query = """ + SELECT p.id AS id + FROM projects p + JOIN organisations o ON o.id = p.organisation_id + JOIN organisation_managers om ON om.organisation_id = o.id + WHERE om.user_id = :user_id + """ + orgs_projects_ids = await db.fetch_all( + org_projects_query, {"user_id": user.id} + ) - order_by = search_dto.order_by + # Fetch project IDs for user's teams + team_projects_query = """ + SELECT pt.project_id AS id + FROM project_teams pt + JOIN team_members tm ON tm.team_id = pt.team_id + WHERE tm.user_id = :user_id + AND pt.role = :project_manager_role + AND tm.active = TRUE + """ + team_project_ids = await db.fetch_all( + team_projects_query, + { + "user_id": user.id, + "project_manager_role": TeamRoles.PROJECT_MANAGER.value, + }, + ) + + # Combine and flatten the project IDs from both queries + project_ids = tuple( + set( + [row["id"] for row in orgs_projects_ids] + + [row["id"] for row in team_project_ids] + ) + ) + if project_ids: + filters.append("p.id = ANY(:managed_projects)") + params["managed_projects"] = project_ids + + order_by_clause = "" + + if search_dto.order_by: + order_by = search_dto.order_by + + if order_by == "percent_mapped": + percent_mapped_sql = """ + (p.tasks_mapped + p.tasks_validated) * 100 + / NULLIF((p.total_tasks - p.tasks_bad_imagery), 0) + """ + if search_dto.order_by_type == "DESC": + order_by_clause = f" ORDER BY {percent_mapped_sql} DESC" + else: + order_by_clause = f" ORDER BY {percent_mapped_sql} ASC" + + elif order_by == "percent_validated": + percent_validated_sql = """ + p.tasks_validated * 100 + / NULLIF((p.total_tasks - p.tasks_bad_imagery), 0) + """ + if search_dto.order_by_type == "DESC": + order_by_clause = f" ORDER BY {percent_validated_sql} DESC" + else: + order_by_clause = f" ORDER BY {percent_validated_sql} ASC" - if search_dto.order_by == "percent_mapped": - if search_dto.order_by_type == "DESC": - order_by = Project.percent_mapped.desc() - else: - order_by = Project.percent_mapped.asc() - query = query.order_by(order_by) - elif search_dto.order_by == "percent_validated": - if search_dto.order_by_type == "DESC": - order_by = Project.percent_validated.desc() else: - order_by = Project.percent_validated.asc() - query = query.order_by(order_by) + order_by = f"p.{order_by}" + if search_dto.order_by_type == "DESC": + order_by += " DESC" + order_by_clause = f" ORDER BY {order_by}" + + if filters: + sql_query = base_query + " AND " + " AND ".join(filters) else: - if search_dto.order_by_type == "DESC": - order_by = desc(search_dto.order_by) - query = query.order_by(order_by).distinct(search_dto.order_by, Project.id) + sql_query = base_query - if search_dto.managed_by and user.role != UserRole.ADMIN.value: - # Get all the projects associated with the user and team. - orgs_projects_ids = [[p.id for p in u.projects] for u in user.organisations] - orgs_projects_ids = [ - item for sublist in orgs_projects_ids for item in sublist - ] + sql_query += order_by_clause + + all_results = await db.fetch_all(sql_query, values=params) + if as_csv: + return all_results + + page = search_dto.page + per_page = 14 + offset = (page - 1) * per_page + sql_query_paginated = sql_query + f" LIMIT {per_page} OFFSET {offset}" + # Get total count + count_query = f"SELECT COUNT(*) FROM ({sql_query}) AS count_subquery" + total_count = await db.fetch_val(count_query, values=params) + paginated_results = await db.fetch_all(sql_query_paginated, values=params) + pagination_dto = Pagination.from_total_count(page, per_page, total_count) + return all_results, paginated_results, pagination_dto - team_project_ids = [ - [ - p.project_id - for p in u.team.projects - if p.role == TeamRoles.PROJECT_MANAGER.value - ] - for u in user.teams + @staticmethod + async def filter_by_user_permission(db: Database, user, permission: str): + """Add permission filter to the project query based on user permissions.""" + + # Set the permission class and team roles based on the type of permission + if permission == "validation_permission": + permission_class = ValidationPermission + team_roles = [ + TeamRoles.VALIDATOR.value, + TeamRoles.PROJECT_MANAGER.value, ] - team_project_ids = [ - item for sublist in team_project_ids for item in sublist + else: + permission_class = MappingPermission + team_roles = [ + TeamRoles.MAPPER.value, + TeamRoles.VALIDATOR.value, + TeamRoles.PROJECT_MANAGER.value, ] - orgs_projects_ids.extend(team_project_ids) - ids = tuple(set(orgs_projects_ids)) - query = query.filter(Project.id.in_(ids)) - - all_results = [] - if not search_dto.omit_map_results: - query_result = query - query_result.column_descriptions.clear() - query_result.add_columns( - Project.id, - Project.centroid.ST_AsGeoJSON().label("centroid"), - Project.priority, + subquery = """ + AND EXISTS ( + SELECT 1 + FROM project_teams pt + JOIN teams t ON t.id = pt.team_id + WHERE pt.project_id = p.id + AND t.id IN ( + SELECT tm.team_id + FROM team_members tm + WHERE tm.user_id = :user_id AND tm.active = true + ) + AND pt.role = ANY(:team_roles) ) - all_results = query_result.all() - - paginated_results = query.paginate( - page=search_dto.page, per_page=14, error_out=True - ) - - return all_results, paginated_results + """ + + if user.mapping_level == MappingLevel.BEGINNER.value: + subquery += f""" + AND (p.{permission} IN (:teams_permission, :any_permission)) + """ + params = { + "user_id": user.id, + "team_roles": tuple(team_roles), + "teams_permission": permission_class.TEAMS.value, + "any_permission": permission_class.ANY.value, + } + else: + subquery += f""" + AND (p.{permission} IN (:any_permission, :level_permission)) + """ + params = { + "user_id": user.id, + "team_roles": tuple(team_roles), + "any_permission": permission_class.ANY.value, + "level_permission": permission_class.LEVEL.value, + } + return subquery, params @staticmethod - def filter_by_user_permission(query, user, permission: str): - """Filter projects a user can map or validate, based on their permissions.""" + async def filter_projects_to_map(user, db: Database): + """Filter projects that need mapping and can be mapped by the current user.""" + query = """ + SELECT DISTINCT p.id + FROM projects p + WHERE (p.tasks_mapped + p.tasks_validated) < (p.total_tasks - p.tasks_bad_imagery) + """ + params = {} if user and user.role != UserRole.ADMIN.value: - if permission == "validation_permission": - permission_class = ValidationPermission - team_roles = [ - TeamRoles.VALIDATOR.value, - TeamRoles.PROJECT_MANAGER.value, - ] - else: - permission_class = MappingPermission - team_roles = [ - TeamRoles.MAPPER.value, - TeamRoles.VALIDATOR.value, - TeamRoles.PROJECT_MANAGER.value, - ] - - selection = [] - # get ids of projects assigned to the user's teams - [ - [ - selection.append(team_project.project_id) - for team_project in user_team.team.projects - if team_project.project_id not in selection - and team_project.role in team_roles - ] - for user_team in user.teams - ] - if user.mapping_level == MappingLevel.BEGINNER.value: - # if user is beginner, get only projects with ANY or TEAMS mapping permission - # in the later case, only those that are associated with user teams - query = query.filter( - or_( - and_( - Project.id.in_(selection), - getattr(Project, permission) - == permission_class.TEAMS.value, - ), - getattr(Project, permission) == permission_class.ANY.value, - ) - ) - else: - # if user is intermediate or advanced, get projects with ANY or LEVEL permission - # and projects associated with user teams - query = query.filter( - or_( - Project.id.in_(selection), - getattr(Project, permission).in_( - [ - permission_class.ANY.value, - permission_class.LEVEL.value, - ] - ), - ) - ) + ( + subquery, + subquery_params, + ) = await ProjectSearchService.filter_by_user_permission( + db, user, "mapping_permission" + ) + query += subquery + params.update(subquery_params) - return query + project_records = await db.fetch_all(query, params) + return [record["id"] for record in project_records] if project_records else [] @staticmethod - def filter_projects_to_map(query, user): - """Filter projects that needs mapping and can be mapped by the current user.""" - query = query.filter( - Project.tasks_mapped + Project.tasks_validated - < Project.total_tasks - Project.tasks_bad_imagery - ) - return ProjectSearchService.filter_by_user_permission( - query, user, "mapping_permission" - ) + async def filter_projects_to_validate(user, db: Database): + """Filter projects that need validation and can be validated by the current user.""" + # Base query to get unique project IDs that need validation + query = """ + SELECT DISTINCT p.id + FROM projects p + WHERE p.tasks_validated < (p.total_tasks - p.tasks_bad_imagery) + """ + + params = {} + if user and user.role != UserRole.ADMIN.value: + ( + subquery, + subquery_params, + ) = await ProjectSearchService.filter_by_user_permission( + db, user, "validation_permission" + ) + query += subquery + params.update(subquery_params) - @staticmethod - def filter_projects_to_validate(query, user): - """Filter projects that needs validation and can be validated by the current user.""" - query = query.filter( - Project.tasks_validated < Project.total_tasks - Project.tasks_bad_imagery - ) - return ProjectSearchService.filter_by_user_permission( - query, user, "validation_permission" - ) + project_records = await db.fetch_all(query, params) + return [record["id"] for record in project_records] if project_records else [] @staticmethod - def get_projects_geojson( - search_bbox_dto: ProjectSearchBBoxDTO, + async def get_projects_geojson( + search_bbox_dto: ProjectSearchBBoxDTO, db: Database ) -> geojson.FeatureCollection: """Search for projects meeting the provided criteria. Returns a GeoJSON feature collection.""" - # make a polygon from provided bounding box - polygon = ProjectSearchService._make_4326_polygon_from_bbox( - search_bbox_dto.bbox, search_bbox_dto.input_srid + polygon = await ProjectSearchService._make_4326_polygon_from_bbox( + search_bbox_dto.bbox, search_bbox_dto.input_srid, db ) - # validate the bbox area is less than or equal to the max area allowed to prevent # abuse of the api or performance issues from large requests - if not ProjectSearchService.validate_bbox_area(polygon): - raise BBoxTooBigError( - "BBoxTooBigError- Requested bounding box is too large" + if not await ProjectSearchService.validate_bbox_area(polygon, db): + raise HTTPException( + status_code=400, + detail="Organisation has projects or teams, cannot be deleted.", ) - # get projects intersecting the polygon for created by the author_id - intersecting_projects = ProjectSearchService._get_intersecting_projects( - polygon, search_bbox_dto.project_author + intersecting_projects = await ProjectSearchService._get_intersecting_projects( + polygon, search_bbox_dto.project_author, db ) - # allow an empty feature collection to be returned if no intersecting features found, since this is primarily # for returning data to show on a map features = [] for project in intersecting_projects: try: - localDTO = ProjectInfo.get_dto_for_locale( - project.id, search_bbox_dto.preferred_locale, project.default_locale + localDTO = await ProjectInfo.get_dto_for_locale( + db, + project.id, + search_bbox_dto.preferred_locale, + project.default_locale, ) except Exception: pass @@ -673,57 +821,88 @@ def get_projects_geojson( return geojson.FeatureCollection(features) @staticmethod - def _get_intersecting_projects(search_polygon: Polygon, author_id: int): + async def _get_intersecting_projects( + search_polygon: Polygon, author_id: int, db: Database + ): """Executes a database query to get the intersecting projects created by the author if provided""" + try: + # Convert the search_polygon bounds to a bounding box (WKT) + bounds = search_polygon.bounds + envelope_wkt = f"ST_MakeEnvelope({bounds[0]}, {bounds[1]}, {bounds[2]}, {bounds[3]}, 4326)" + + # Base SQL query with parameter placeholders + query_str = f""" + SELECT + id, + status, + default_locale, + ST_AsGeoJSON(geometry) AS geometry + FROM + projects + WHERE + ST_Intersects(geometry, {envelope_wkt}) + """ + + # If an author_id is provided, append the AND condition + if author_id: + query_str += " AND author_id = :author_id" + + # Execute the query asynchronously with the parameters + values = {"author_id": author_id} if author_id else {} + results = await db.fetch_all(query=query_str, values=values) + + return results - query = db.session.query( - Project.id, - Project.status, - Project.default_locale, - Project.geometry.ST_AsGeoJSON().label("geometry"), - ).filter( - ST_Intersects( - Project.geometry, - ST_MakeEnvelope( - search_polygon.bounds[0], - search_polygon.bounds[1], - search_polygon.bounds[2], - search_polygon.bounds[3], - 4326, - ), + except Exception as e: + logger.error(f"Error fetching intersecting projects: {e}") + raise ProjectSearchServiceError( + f"Error fetching intersecting projects: {e}" ) - ) - - if author_id: - query = query.filter(Project.author_id == author_id) - - return query.all() @staticmethod - def _make_4326_polygon_from_bbox(bbox: list, srid: int) -> Polygon: - """make a shapely Polygon in SRID 4326 from bbox and srid""" + async def _make_4326_polygon_from_bbox( + bbox: list, srid: int, db: Database + ) -> Polygon: + """Make a shapely Polygon in SRID 4326 from bbox and srid""" try: polygon = box(bbox[0], bbox[1], bbox[2], bbox[3]) - if not srid == 4326: + + # If the SRID is not 4326, transform it to 4326 + if srid != 4326: geometry = shape.from_shape(polygon, srid) - with db.engine.connect() as conn: - geom_4326 = conn.execute(ST_Transform(geometry, 4326)).scalar() + # Construct the raw SQL query to transform the geometry + query = "SELECT ST_Transform(ST_GeomFromText(:wkt, :srid), 4326) AS geom_4326" + values = {"wkt": geometry.wkt, "srid": srid} + + # Execute the SQL query using the encode databases instance + result = await db.fetch_one(query=query, values=values) + geom_4326 = result["geom_4326"] polygon = shape.to_shape(geom_4326) + except Exception as e: - current_app.logger.error(f"InvalidData- error making polygon: {e}") + logger.error(f"InvalidData- error making polygon: {e}") raise ProjectSearchServiceError(f"InvalidData- error making polygon: {e}") + return polygon @staticmethod - def _get_area_sqm(polygon: Polygon) -> float: - """get the area of the polygon in square metres""" - with db.engine.connect() as conn: - return conn.execute( - ST_Area(ST_Transform(shape.from_shape(polygon, 4326), 3857)) - ).scalar() + async def _get_area_sqm(polygon: Polygon, db: Database) -> float: + """Get the area of the polygon in square meters.""" + try: + geometry_wkt = polygon.wkt + + query = "SELECT ST_Area(ST_Transform(ST_GeomFromText(:wkt, 4326), 3857)) AS area" + values = {"wkt": geometry_wkt} + + result = await db.fetch_one(query=query, values=values) + return result["area"] + + except Exception as e: + logger.error(f"Error calculating area: {e}") + raise ProjectSearchServiceError(f"Error calculating area: {e}") @staticmethod - def validate_bbox_area(polygon: Polygon) -> bool: - """check polygon does not exceed maximim allowed area""" - area = ProjectSearchService._get_area_sqm(polygon) + async def validate_bbox_area(polygon: Polygon, db: Database) -> bool: + """Check if the polygon does not exceed the maximum allowed area.""" + area = await ProjectSearchService._get_area_sqm(polygon, db) return area <= MAX_AREA diff --git a/backend/services/project_service.py b/backend/services/project_service.py index 810d4c7edb..9acd065613 100644 --- a/backend/services/project_service.py +++ b/backend/services/project_service.py @@ -1,42 +1,43 @@ -import threading -from cachetools import TTLCache, cached -from flask import current_app +import json +from datetime import datetime, timedelta, timezone + import geojson -from datetime import datetime, timedelta +from cachetools import TTLCache, cached +from databases import Database +from fastapi import HTTPException +from loguru import logger +# # from flask import current_app +from backend.config import get_settings +from backend.db import db_connection from backend.exceptions import NotFound - from backend.models.dtos.mapping_dto import TaskDTOs from backend.models.dtos.project_dto import ( + ProjectContribDTO, + ProjectContribsDTO, ProjectDTO, - ProjectSummary, + ProjectSearchResultsDTO, ProjectStatsDTO, + ProjectSummary, ProjectUserStatsDTO, - ProjectContribsDTO, - ProjectContribDTO, - ProjectSearchResultsDTO, ) -from backend.models.postgis.project_chat import ProjectChat from backend.models.postgis.organisation import Organisation -from backend.models.postgis.project_info import ProjectInfo from backend.models.postgis.project import Project, ProjectStatus from backend.models.postgis.statuses import ( + EncouragingEmailType, + MappingLevel, MappingNotAllowed, - ValidatingNotAllowed, MappingPermission, - ValidationPermission, TeamRoles, - EncouragingEmailType, - MappingLevel, + ValidatingNotAllowed, + ValidationPermission, ) -from backend.models.postgis.task import Task, TaskHistory +from backend.models.postgis.task import Task from backend.services.messaging.smtp_service import SMTPService -from backend.services.users.user_service import UserService -from backend.services.project_search_service import ProjectSearchService from backend.services.project_admin_service import ProjectAdminService +from backend.services.project_search_service import ProjectSearchService from backend.services.team_service import TeamService -from sqlalchemy import func, or_ -from sqlalchemy.sql.expression import true +from backend.services.users.user_service import UserService summary_cache = TTLCache(maxsize=1024, ttl=600) @@ -45,24 +46,37 @@ class ProjectServiceError(Exception): """Custom Exception to notify callers an error occurred when handling projects""" def __init__(self, message): - if current_app: - current_app.logger.debug(message) + logger.debug(message) class ProjectService: @staticmethod - def get_project_by_id(project_id: int) -> Project: - project = Project.get(project_id) - if project is None: - raise NotFound(sub_code="PROJECT_NOT_FOUND", project_id=project_id) + async def get_project_by_id(project_id: int, db: Database): + query = """ + SELECT * FROM projects WHERE id = :project_id + """ + project = await db.fetch_one(query=query, values={"project_id": project_id}) + + if not project: + raise HTTPException(status_code=404, detail="Project not found") return project @staticmethod - def exists(project_id: int) -> bool: - project = Project.exists(project_id) - if project is None: + async def exists(project_id: int, db: Database) -> bool: + # Query to check if the project exists + query = """ + SELECT 1 + FROM projects + WHERE id = :project_id + """ + + # Execute the query + result = await db.fetch_one(query=query, values={"project_id": project_id}) + + if result is None: raise NotFound(sub_code="PROJECT_NOT_FOUND", project_id=project_id) + return True @staticmethod @@ -74,8 +88,8 @@ def get_project_by_name(project_id: int) -> Project: return project @staticmethod - def auto_unlock_tasks(project_id: int): - Task.auto_unlock_tasks(project_id) + async def auto_unlock_tasks(project_id: int, db: Database): + await Task.auto_unlock_tasks(project_id, db) @staticmethod def delete_tasks(project_id: int, tasks_ids): @@ -95,63 +109,47 @@ def delete_tasks(project_id: int, tasks_ids): [t["obj"].delete() for t in tasks] @staticmethod - def get_contribs_by_day(project_id: int) -> ProjectContribsDTO: - # Validate that project exists - project = ProjectService.get_project_by_id(project_id) - - # Fetch all state change with date and task ID - stats = ( - TaskHistory.query.with_entities( - TaskHistory.action_text.label("action_text"), - func.DATE(TaskHistory.action_date).label("day"), - TaskHistory.task_id.label("task_id"), - ) - .filter(TaskHistory.project_id == project_id) - .filter( - TaskHistory.action == "STATE_CHANGE", - or_( - TaskHistory.action_text == "MAPPED", - TaskHistory.action_text == "VALIDATED", - TaskHistory.action_text == "INVALIDATED", - ), - ) - .group_by("action_text", "day", "task_id") - .order_by("day") - ).all() + async def get_contribs_by_day(project_id: int, db: Database) -> ProjectContribsDTO: + project = await ProjectService.get_project_by_id(project_id, db) + query = """ + SELECT + action_text, + DATE(action_date) AS day, + task_id + FROM task_history + WHERE project_id = :project_id + AND action = 'STATE_CHANGE' + AND action_text IN ('MAPPED', 'VALIDATED', 'INVALIDATED') + GROUP BY action_text, day, task_id + ORDER BY day ASC + """ + rows = await db.fetch_all(query=query, values={"project_id": project_id}) contribs_dto = ProjectContribsDTO() - # Filter and store unique dates - dates = list(set(r[1] for r in stats)) - dates.sort( - reverse=False - ) # Why was this reversed? To have the dates in ascending order - dates_list = [] + dates = sorted({row["day"] for row in rows}) + cumulative_mapped = 0 cumulative_validated = 0 - # A hashmap to track task state change updates tasks = { "MAPPED": {"total": 0}, "VALIDATED": {"total": 0}, "INVALIDATED": {"total": 0}, } + dates_list = [] for date in dates: dto = ProjectContribDTO( - { - "date": date, - "mapped": 0, - "validated": 0, - "total_tasks": project.total_tasks, - } + date=date, mapped=0, validated=0, total_tasks=project.total_tasks ) - # s -> ('LOCKED_FOR_MAPPING', datetime.date(2019, 4, 23), 1) - # s[0] -> action, s[1] -> date, s[2] -> task_id - values = [(s[0], s[2]) for s in stats if date == s[1]] - values.sort(reverse=True) # Most recent action comes first - for val in values: - task_id = val[1] - task_status = val[0] + values = [ + (row["action_text"], row["task_id"]) + for row in rows + if row["day"] == date + ] + values.sort(reverse=True) + + for task_status, task_id in values: if task_status == "MAPPED": if task_id not in tasks["MAPPED"]: tasks["MAPPED"][task_id] = 1 @@ -169,7 +167,7 @@ def get_contribs_by_day(project_id: int) -> ProjectContribsDTO: tasks["MAPPED"][task_id] = 1 tasks["MAPPED"]["total"] += 1 dto.mapped += 1 - else: + else: # "INVALIDATED" if task_id not in tasks["INVALIDATED"]: tasks["INVALIDATED"][task_id] = 1 tasks["INVALIDATED"]["total"] += 1 @@ -188,15 +186,15 @@ def get_contribs_by_day(project_id: int) -> ProjectContribsDTO: cumulative_validated = tasks["VALIDATED"]["total"] dto.cumulative_mapped = cumulative_mapped dto.cumulative_validated = cumulative_validated + dates_list.append(dto) contribs_dto.stats = dates_list - return contribs_dto @staticmethod - def get_project_dto_for_mapper( - project_id, current_user_id, locale="en", abbrev=False + async def get_project_dto_for_mapper( + project_id, current_user_id, db: Database, locale="en", abbrev=False ) -> ProjectDTO: """ Get the project DTO for mappers @@ -204,10 +202,12 @@ def get_project_dto_for_mapper( :param locale: Locale the mapper has requested :raises ProjectServiceError, NotFound """ - project = ProjectService.get_project_by_id(project_id) + project = await ProjectService.get_project_by_id(project_id, db) # if project is public and is not draft, we don't need to check permissions if not project.private and not project.status == ProjectStatus.DRAFT.value: - return project.as_dto_for_mapping(current_user_id, locale, abbrev) + return await Project.as_dto_for_mapping( + project.id, db, current_user_id, locale, abbrev + ) is_allowed_user = True is_team_member = None @@ -215,8 +215,8 @@ def get_project_dto_for_mapper( if current_user_id: is_manager_permission = ( - ProjectAdminService.is_user_action_permitted_on_project( - current_user_id, project_id + await ProjectAdminService.is_user_action_permitted_on_project( + current_user_id, project_id, db ) ) # Draft Projects - admins, authors, org admins & team managers permitted @@ -231,16 +231,17 @@ def get_project_dto_for_mapper( if project.private and not is_manager_permission: is_allowed_user = False if current_user_id: - is_allowed_user = ( - len( - [ - user - for user in project.allowed_users - if user.id == current_user_id - ] - ) - > 0 + # Query to check if the current user is an allowed user for the project + allowed_user_check_query = """ + SELECT 1 + FROM project_allowed_users pau + WHERE pau.project_id = :project_id AND pau.user_id = :user_id + """ + result = await db.fetch_one( + allowed_user_check_query, + {"project_id": project.id, "user_id": current_user_id}, ) + is_allowed_user = result is not None if not (is_allowed_user or is_manager_permission): if current_user_id: @@ -249,51 +250,64 @@ def get_project_dto_for_mapper( TeamRoles.VALIDATOR.value, TeamRoles.PROJECT_MANAGER.value, ] - is_team_member = TeamService.check_team_membership( - project_id, allowed_roles, current_user_id + is_team_member = await TeamService.check_team_membership( + project.id, allowed_roles, current_user_id, db ) if is_allowed_user or is_manager_permission or is_team_member: - return project.as_dto_for_mapping(current_user_id, locale, abbrev) + return await Project.as_dto_for_mapping( + project.id, db, current_user_id, locale, abbrev + ) else: return None @staticmethod - def get_project_tasks( - project_id, + async def get_project_tasks( + db: Database, + project_id: int, task_ids_str: str, order_by: str = None, order_by_type: str = "ASC", status: int = None, ): - project = ProjectService.get_project_by_id(project_id) - return project.tasks_as_geojson(task_ids_str, order_by, order_by_type, status) + await Project.exists(project_id, db) + return await Project.tasks_as_geojson( + db, project_id, task_ids_str, order_by, order_by_type, status + ) @staticmethod - def get_project_aoi(project_id): - project = ProjectService.get_project_by_id(project_id) - return project.get_aoi_geometry_as_geojson() + async def get_project_aoi(project_id, db: Database): + project = await Project.exists(project_id, db) + return await Project.get_aoi_geometry_as_geojson(project_id, db) @staticmethod - def get_project_priority_areas(project_id): - project = ProjectService.get_project_by_id(project_id) - geojson_areas = [] - for priority_area in project.priority_areas: - geojson_areas.append(priority_area.get_as_geojson()) + async def get_project_priority_areas(project_id: int, db: Database) -> list: + project = await Project.exists(project_id, db) + + # Fetch the priority areas' geometries as GeoJSON + query = """ + SELECT ST_AsGeoJSON(pa.geometry) AS geojson + FROM priority_areas pa + JOIN project_priority_areas ppa ON pa.id = ppa.priority_area_id + WHERE ppa.project_id = :project_id; + """ + rows = await db.fetch_all(query, values={"project_id": project_id}) + geojson_areas = [json.loads(row["geojson"]) for row in rows] if rows else [] return geojson_areas @staticmethod - def get_task_for_logged_in_user(user_id: int): + async def get_task_for_logged_in_user(user_id: int, db: Database): """if the user is working on a task in the project return it""" - tasks = Task.get_locked_tasks_for_user(user_id) + tasks = await Task.get_locked_tasks_for_user(user_id, db) - tasks_dto = tasks - return tasks_dto + return tasks @staticmethod - def get_task_details_for_logged_in_user(user_id: int, preferred_locale: str): + async def get_task_details_for_logged_in_user( + user_id: int, preferred_locale: str, db: Database + ): """if the user is working on a task in the project return it""" - tasks = Task.get_locked_tasks_details_for_user(user_id) + tasks = await Task.get_locked_tasks_details_for_user(user_id, db) if len(tasks) == 0: raise NotFound(sub_code="TASK_NOT_FOUND") @@ -301,7 +315,11 @@ def get_task_details_for_logged_in_user(user_id: int, preferred_locale: str): # TODO put the task details in to a DTO dtos = [] for task in tasks: - dtos.append(task.as_dto_with_instructions(preferred_locale)) + dtos.append( + await Task.as_dto_with_instructions( + task.id, task.project_id, db, preferred_locale + ) + ) task_dtos = TaskDTOs() task_dtos.tasks = dtos @@ -309,23 +327,34 @@ def get_task_details_for_logged_in_user(user_id: int, preferred_locale: str): return task_dtos @staticmethod - def is_user_in_the_allowed_list(allowed_users: list, current_user_id: int): + async def is_user_in_the_allowed_list( + project_id: int, current_user_id: int, db: Database + ) -> bool: """For private projects, check if user is present in the allowed list""" - return ( - len([user.id for user in allowed_users if user.id == current_user_id]) > 0 + + query = """ + SELECT COUNT(1) + FROM project_allowed_users + WHERE project_id = :project_id AND user_id = :user_id + """ + + result = await db.fetch_val( + query, values={"project_id": project_id, "user_id": current_user_id} ) + # Return True if the user is in the allowed list, False otherwise + return result > 0 @staticmethod - def evaluate_mapping_permission( - project_id: int, user_id: int, mapping_permission: int + async def evaluate_mapping_permission( + project_id: int, user_id: int, mapping_permission: int, db: Database ): allowed_roles = [ TeamRoles.MAPPER.value, TeamRoles.VALIDATOR.value, TeamRoles.PROJECT_MANAGER.value, ] - is_team_member = TeamService.check_team_membership( - project_id, allowed_roles, user_id + is_team_member = await TeamService.check_team_membership( + project_id, allowed_roles, user_id, db ) # mapping_permission = 1(level),2(teams),3(teamsAndLevel) @@ -334,33 +363,37 @@ def evaluate_mapping_permission( return False, MappingNotAllowed.USER_NOT_TEAM_MEMBER elif mapping_permission == MappingPermission.LEVEL.value: - if not ProjectService._is_user_intermediate_or_advanced(user_id): + if not await ProjectService._is_user_intermediate_or_advanced(user_id, db): return False, MappingNotAllowed.USER_NOT_CORRECT_MAPPING_LEVEL elif mapping_permission == MappingPermission.TEAMS_LEVEL.value: - if not ProjectService._is_user_intermediate_or_advanced(user_id): + if not await ProjectService._is_user_intermediate_or_advanced(user_id, db): return False, MappingNotAllowed.USER_NOT_CORRECT_MAPPING_LEVEL if not is_team_member: return False, MappingNotAllowed.USER_NOT_TEAM_MEMBER @staticmethod - def is_user_permitted_to_map(project_id: int, user_id: int): + async def is_user_permitted_to_map(project_id: int, user_id: int, db: Database): """Check if the user is allowed to map the on the project in scope""" - if UserService.is_user_blocked(user_id): + + if await UserService.is_user_blocked(user_id, db): return False, MappingNotAllowed.USER_NOT_ON_ALLOWED_LIST - project = ProjectService.get_project_by_id(project_id) + project = await ProjectService.get_project_by_id(project_id, db) if project.license_id: - if not UserService.has_user_accepted_license(user_id, project.license_id): + if not await UserService.has_user_accepted_license( + user_id, project.license_id, db + ): return False, MappingNotAllowed.USER_NOT_ACCEPTED_LICENSE - mapping_permission = project.mapping_permission + mapping_permission = project.mapping_permission is_manager_permission = ( False # is_admin or is_author or is_org_manager or is_manager_team ) - if ProjectAdminService.is_user_action_permitted_on_project(user_id, project_id): + if await ProjectAdminService.is_user_action_permitted_on_project( + user_id, project_id, db + ): is_manager_permission = True - # Draft (public/private) accessible only for is_manager_permission if ( ProjectStatus(project.status) == ProjectStatus.DRAFT @@ -370,48 +403,47 @@ def is_user_permitted_to_map(project_id: int, user_id: int): is_restriction = None if not is_manager_permission and mapping_permission: - is_restriction = ProjectService.evaluate_mapping_permission( - project_id, user_id, mapping_permission + is_restriction = await ProjectService.evaluate_mapping_permission( + project_id, user_id, mapping_permission, db ) - - tasks = Task.get_locked_tasks_for_user(user_id) + tasks = await Task.get_locked_tasks_for_user(user_id, db) if len(tasks.locked_tasks) > 0: return False, MappingNotAllowed.USER_ALREADY_HAS_TASK_LOCKED is_allowed_user = None if project.private and not is_manager_permission: # Check if user is in allowed user list - is_allowed_user = ProjectService.is_user_in_the_allowed_list( - project.allowed_users, user_id + is_allowed_user = await ProjectService.is_user_in_the_allowed_list( + project.id, user_id ) if is_allowed_user: return True, "User allowed to map" if not is_manager_permission and is_restriction: return is_restriction + elif project.private and not ( is_manager_permission or is_allowed_user or not is_restriction ): return False, MappingNotAllowed.USER_NOT_ON_ALLOWED_LIST - return True, "User allowed to map" @staticmethod - def _is_user_intermediate_or_advanced(user_id): + async def _is_user_intermediate_or_advanced(user_id, db: Database): """Helper method to determine if user level is not beginner""" - user_mapping_level = UserService.get_mapping_level(user_id) + user_mapping_level = await UserService.get_mapping_level(user_id, db) if user_mapping_level not in [MappingLevel.INTERMEDIATE, MappingLevel.ADVANCED]: return False return True @staticmethod - def evaluate_validation_permission( - project_id: int, user_id: int, validation_permission: int + async def evaluate_validation_permission( + project_id: int, user_id: int, validation_permission: int, db: Database ): allowed_roles = [TeamRoles.VALIDATOR.value, TeamRoles.PROJECT_MANAGER.value] - is_team_member = TeamService.check_team_membership( - project_id, allowed_roles, user_id + is_team_member = await TeamService.check_team_membership( + project_id, allowed_roles, user_id, db ) # validation_permission = 1(level),2(teams),3(teamsAndLevel) if validation_permission == ValidationPermission.TEAMS.value: @@ -419,30 +451,37 @@ def evaluate_validation_permission( return False, ValidatingNotAllowed.USER_NOT_TEAM_MEMBER elif validation_permission == ValidationPermission.LEVEL.value: - if not ProjectService._is_user_intermediate_or_advanced(user_id): + if not await ProjectService._is_user_intermediate_or_advanced(user_id, db): return False, ValidatingNotAllowed.USER_IS_BEGINNER elif validation_permission == ValidationPermission.TEAMS_LEVEL.value: - if not ProjectService._is_user_intermediate_or_advanced(user_id): + if not await ProjectService._is_user_intermediate_or_advanced(user_id, db): return False, ValidatingNotAllowed.USER_IS_BEGINNER if not is_team_member: return False, ValidatingNotAllowed.USER_NOT_TEAM_MEMBER @staticmethod - def is_user_permitted_to_validate(project_id, user_id): + async def is_user_permitted_to_validate( + project_id: int, user_id: int, db: Database + ): """Check if the user is allowed to validate on the project in scope""" - if UserService.is_user_blocked(user_id): + if await UserService.is_user_blocked(user_id, db): return False, ValidatingNotAllowed.USER_NOT_ON_ALLOWED_LIST - project = ProjectService.get_project_by_id(project_id) + project = await ProjectService.get_project_by_id(project_id, db) if project.license_id: - if not UserService.has_user_accepted_license(user_id, project.license_id): + if not UserService.has_user_accepted_license( + user_id, project.license_id, db + ): return False, ValidatingNotAllowed.USER_NOT_ACCEPTED_LICENSE + validation_permission = project.validation_permission # is_admin or is_author or is_org_manager or is_manager_team is_manager_permission = False - if ProjectAdminService.is_user_action_permitted_on_project(user_id, project_id): + if await ProjectAdminService.is_user_action_permitted_on_project( + user_id, project_id, db + ): is_manager_permission = True # Draft (public/private) accessible only for is_manager_permission @@ -454,19 +493,19 @@ def is_user_permitted_to_validate(project_id, user_id): is_restriction = None if not is_manager_permission and validation_permission: - is_restriction = ProjectService.evaluate_validation_permission( - project_id, user_id, validation_permission + is_restriction = await ProjectService.evaluate_validation_permission( + project_id, user_id, validation_permission, db ) - tasks = Task.get_locked_tasks_for_user(user_id) + tasks = await Task.get_locked_tasks_for_user(user_id, db) if len(tasks.locked_tasks) > 0: return False, ValidatingNotAllowed.USER_ALREADY_HAS_TASK_LOCKED is_allowed_user = None if project.private and not is_manager_permission: # Check if user is in allowed user list - is_allowed_user = ProjectService.is_user_in_the_allowed_list( - project.allowed_users, user_id + is_allowed_user = await ProjectService.is_user_in_the_allowed_list( + project_id, user_id, db ) if is_allowed_user: @@ -492,65 +531,140 @@ def get_cached_project_summary( return project.get_project_summary(preferred_locale, calculate_completion=False) @staticmethod - def get_project_summary( - project_id: int, preferred_locale: str = "en" + async def get_project_summary( + project_id: int, db: Database, preferred_locale: str = "en" ) -> ProjectSummary: + query = """ + SELECT + p.id AS id, + p.difficulty, + p.priority, + p.default_locale, + ST_AsGeoJSON(p.centroid) AS centroid, + p.organisation_id, + p.tasks_bad_imagery, + p.tasks_mapped, + p.tasks_validated, + p.status, + p.mapping_types, + p.total_tasks, + p.last_updated, + p.due_date, + p.country, + p.changeset_comment, + p.created, + p.osmcha_filter_id, + p.mapping_permission, + p.validation_permission, + p.enforce_random_task_selection, + p.private, + p.license_id, + p.id_presets, + p.extra_id_params, + p.rapid_power_user, + p.imagery, + p.mapping_editors, + p.validation_editors, + u.username AS author, + o.name AS organisation_name, + o.slug AS organisation_slug, + o.logo AS organisation_logo, + ARRAY(SELECT user_id FROM project_allowed_users WHERE project_id = p.id) AS allowed_users + FROM projects p + LEFT JOIN organisations o ON o.id = p.organisation_id + LEFT JOIN users u ON u.id = p.author_id + WHERE p.id = :id + """ + params = {"id": project_id} + # Execute query + project = await db.fetch_one(query, params) + """Gets the project summary DTO""" - project = ProjectService.get_project_by_id(project_id) - summary = ProjectService.get_cached_project_summary( - project_id, preferred_locale + + summary = await Project.get_project_summary( + project, preferred_locale, db, calculate_completion=False + ) + summary.percent_mapped = Project.calculate_tasks_percent( + "mapped", + project.tasks_mapped, + project.tasks_validated, + project.total_tasks, + project.tasks_bad_imagery, + ) + summary.percent_validated = Project.calculate_tasks_percent( + "validated", + project.tasks_validated, + project.tasks_validated, + project.total_tasks, + project.tasks_bad_imagery, + ) + summary.percent_bad_imagery = Project.calculate_tasks_percent( + "bad_imagery", + project.tasks_mapped, + project.tasks_validated, + project.total_tasks, + project.tasks_bad_imagery, ) - # Since we don't want to cache the project stats, we need to update them - summary.percent_mapped = project.calculate_tasks_percent("mapped") - summary.percent_validated = project.calculate_tasks_percent("validated") - summary.percent_bad_imagery = project.calculate_tasks_percent("bad_imagery") return summary @staticmethod - def set_project_as_featured(project_id: int): + async def set_project_as_featured(project_id: int, db: Database): """Sets project as featured""" - project = ProjectService.get_project_by_id(project_id) - project.set_as_featured() + project = await ProjectService.get_project_by_id(project_id, db) + await Project.set_as_featured(project, db) @staticmethod - def unset_project_as_featured(project_id: int): + async def unset_project_as_featured(project_id: int, db: Database): """Sets project as featured""" - project = ProjectService.get_project_by_id(project_id) - project.unset_as_featured() + project = await ProjectService.get_project_by_id(project_id, db) + await Project.unset_as_featured(project, db) @staticmethod - def get_featured_projects(preferred_locale): - """Sets project as featured""" - query = ProjectSearchService.create_search_query() - projects = query.filter(Project.featured == true()).group_by(Project.id).all() + async def get_featured_projects( + preferred_locale: str, db: Database + ) -> ProjectSearchResultsDTO: + """Fetch featured projects and return results.""" - # Get total contributors. - contrib_counts = ProjectSearchService.get_total_contributions(projects) + # Create the search query + query, params = await ProjectSearchService.create_search_query(db) + + # Append filtering for featured projects + query += " AND p.featured = TRUE" + + projects = await db.fetch_all(query, params) + project_ids = [project["id"] for project in projects] + + # Get total contributors + contrib_counts = await ProjectSearchService.get_total_contributions( + project_ids, db + ) zip_items = zip(projects, contrib_counts) dto = ProjectSearchResultsDTO() dto.results = [ - ProjectSearchService.create_result_dto(p, preferred_locale, t) - for p, t in zip_items + await ProjectSearchService.create_result_dto( + project, preferred_locale, total_contributors, db + ) + for project, total_contributors in zip_items ] - + # TODO Check if pagination needed. + dto.pagination = None return dto @staticmethod - def is_favorited(project_id: int, user_id: int) -> bool: - project = ProjectService.get_project_by_id(project_id) - - return project.is_favorited(user_id) + async def is_favorited(project_id: int, user_id: int, db: Database) -> bool: + await ProjectService.exists(project_id, db) + return await Project.is_favorited(project_id, user_id, db) @staticmethod - def favorite(project_id: int, user_id: int): - project = ProjectService.get_project_by_id(project_id) - project.favorite(user_id) + async def favorite(project_id: int, user_id: int, db: Database): + await ProjectService.exists(project_id, db) + await Project.favorite(project_id, user_id, db) @staticmethod - def unfavorite(project_id: int, user_id: int): - project = ProjectService.get_project_by_id(project_id) - project.unfavorite(user_id) + async def unfavorite(project_id: int, user_id: int, db: Database): + await ProjectService.exists(project_id, db) + await Project.unfavorite(project_id, user_id, db) @staticmethod def get_project_title(project_id: int, preferred_locale: str = "en") -> str: @@ -560,17 +674,19 @@ def get_project_title(project_id: int, preferred_locale: str = "en") -> str: @staticmethod @cached(TTLCache(maxsize=1024, ttl=600)) - def get_project_stats(project_id: int) -> ProjectStatsDTO: + async def get_project_stats(project_id: int, db: Database) -> ProjectStatsDTO: """Gets the project stats DTO""" - project = ProjectService.get_project_by_id(project_id) - return project.get_project_stats() + project = await ProjectService.exists(project_id, db) + return await Project.get_project_stats(project_id, db) @staticmethod - def get_project_user_stats(project_id: int, username: str) -> ProjectUserStatsDTO: + async def get_project_user_stats( + project_id: int, username: str, db: Database + ) -> ProjectUserStatsDTO: """Gets the user stats for a specific project""" - project = ProjectService.get_project_by_id(project_id) - user = UserService.get_user_by_username(username) - return project.get_project_user_stats(user.id) + await ProjectService.exists(project_id, db) + user = await UserService.get_user_by_username(username, db) + return await Project.get_project_user_stats(project_id, user.id, db) def get_project_teams(project_id: int): project = ProjectService.get_project_by_id(project_id) @@ -590,74 +706,101 @@ def get_project_organisation(project_id: int) -> Organisation: return project.organisation @staticmethod - def send_email_on_project_progress(project_id): + async def send_email_on_project_progress(project_id: int): """Send email to all contributors on project progress""" - if not current_app.config["SEND_PROJECT_EMAIL_UPDATES"]: - return - project = ProjectService.get_project_by_id(project_id) - - project_completion = project.calculate_tasks_percent("project_completion") - if project_completion == 50 and project.progress_email_sent: - return # Don't send progress email if it's already sent - if project_completion in [50, 100]: - email_type = ( - EncouragingEmailType.PROJECT_COMPLETE.value - if project_completion == 100 - else EncouragingEmailType.PROJECT_PROGRESS.value + async with db_connection.database.connection() as db: + current_settings = get_settings() + if not current_settings.SEND_PROJECT_EMAIL_UPDATES: + return + project = await ProjectService.get_project_by_id(project_id, db) + + project_completion = Project.calculate_tasks_percent( + "project_completion", + project.tasks_mapped, + project.tasks_validated, + project.total_tasks, + project.tasks_bad_imagery, ) - project_title = ProjectInfo.get_dto_for_locale( - project_id, project.default_locale - ).name - project.progress_email_sent = True - project.save() - threading.Thread( - target=SMTPService.send_email_to_contributors_on_project_progress, - args=( - email_type, - project_id, - project_title, - project_completion, - ), - ).start() - - @staticmethod - def get_active_projects(interval): - action_date = datetime.utcnow() - timedelta(hours=interval) - history_result = ( - TaskHistory.query.with_entities(TaskHistory.project_id) - .distinct() - .filter((TaskHistory.action_date) >= action_date) - .all() + if project_completion == 50 and project.progress_email_sent: + return # Don't send progress email if it's already sent + if project_completion in [50, 100]: + email_type = ( + EncouragingEmailType.PROJECT_COMPLETE.value + if project_completion == 100 + else EncouragingEmailType.PROJECT_PROGRESS.value + ) + project_title_query = """ + SELECT name + FROM project_info + WHERE project_id = :project_id AND locale = :locale + """ + project_title = await db.fetch_val( + project_title_query, + values={ + "project_id": project_id, + "locale": project["default_locale"], + }, + ) + + # Update progress_email_sent status + await db.execute( + """ + UPDATE projects + SET progress_email_sent = TRUE + WHERE id = :project_id + """, + values={"project_id": project_id}, + ) + await SMTPService.send_email_to_contributors_on_project_progress( + email_type, project_id, project_title, project_completion, db + ) + + @staticmethod + async def get_active_projects(interval: int, db: Database): + # Calculate the action_date and make it naive + action_date = (datetime.now(timezone.utc) - timedelta(hours=interval)).replace( + tzinfo=None ) - project_ids = [row.project_id for row in history_result] - chat_result = ( - ProjectChat.query.with_entities(ProjectChat.project_id) - .distinct() - .filter((ProjectChat.time_stamp) >= action_date) - .all() + # First query to get distinct project_ids + query_project_ids = """ + SELECT DISTINCT project_id + FROM task_history + WHERE action_date >= :action_date + """ + project_ids_result = await db.fetch_all( + query_project_ids, {"action_date": action_date} ) - chat_project_ids = [row.project_id for row in chat_result] - project_ids.extend(chat_project_ids) - project_ids = list(set(project_ids)) - projects = ( - Project.query.with_entities( - Project.id, - Project.mapping_types, - Project.geometry.ST_AsGeoJSON().label("geometry"), - ) - .filter( - Project.id.in_(project_ids), - ) - .all() + project_ids = [row["project_id"] for row in project_ids_result] + + # If there are no project IDs, return an empty FeatureCollection + if not project_ids: + return geojson.FeatureCollection([]) + + # Second query to get project details + query_projects = """ + SELECT + id, + mapping_types, + ST_AsGeoJSON(geometry) AS geometry + FROM projects + WHERE status = :status + AND id = ANY(:project_ids) + """ + project_result = await db.fetch_all( + query_projects, + {"status": ProjectStatus.PUBLISHED.value, "project_ids": project_ids}, ) - features = [] - for project in projects: - properties = { - "project_id": project.id, - "mapping_types": project.mapping_types, - } - feature = geojson.Feature( - geometry=geojson.loads(project.geometry), properties=properties + + # Building GeoJSON FeatureCollection + features = [ + geojson.Feature( + geometry=geojson.loads(project["geometry"]), + properties={ + "project_id": project["id"], + "mapping_types": project["mapping_types"], + }, ) - features.append(feature) + for project in project_result + ] + return geojson.FeatureCollection(features) diff --git a/backend/services/recommendation_service.py b/backend/services/recommendation_service.py index a53289744c..2ffc90adc5 100644 --- a/backend/services/recommendation_service.py +++ b/backend/services/recommendation_service.py @@ -1,15 +1,13 @@ import pandas as pd +from cachetools import TTLCache +from databases import Database from sklearn.metrics.pairwise import cosine_similarity from sklearn.preprocessing import MultiLabelBinarizer -from sqlalchemy.orm import joinedload -from sqlalchemy.sql.expression import func -from cachetools import TTLCache, cached -from backend import db from backend.exceptions import NotFound -from backend.models.postgis.project import Project, Interest, project_interests -from backend.models.postgis.statuses import ProjectStatus from backend.models.dtos.project_dto import ProjectSearchResultsDTO +from backend.models.postgis.project import Project +from backend.models.postgis.statuses import ProjectStatus from backend.services.project_search_service import ProjectSearchService from backend.services.users.user_service import UserService @@ -26,52 +24,6 @@ class ProjectRecommendationService: - @staticmethod - def to_dataframe(records, columns: list): - """Convert records fetched from sql execution into dataframe - :param records: records fetched from sql execution - :param columns: columns of the dataframe - :return: dataframe - """ - batch_rows = list() - for _, row in enumerate(records, start=0): - batch_rows.append(row) - table = pd.DataFrame(batch_rows, columns=columns) - return table - - @staticmethod - def get_all_published_projects(): - """Gets all published projects - :return: list of published projects - """ - # Create a subquery to fetch the interests of the projects - subquery = ( - db.session.query( - project_interests.c.project_id, Interest.id.label("interest_id") - ) - .join(Interest) - .subquery() - ) - - # Only fetch the columns required for recommendation - # Should be in order of the columns defined in the project_columns line 13 - query = Project.query.options(joinedload(Project.interests)).with_entities( - Project.id, - Project.default_locale, - Project.difficulty, - Project.mapping_types, - Project.country, - func.array_agg(subquery.c.interest_id).label("interests"), - ) - # Outerjoin so that projects without interests are also returned - query = ( - query.outerjoin(subquery, Project.id == subquery.c.project_id) - .filter(Project.status == ProjectStatus.PUBLISHED.value) - .group_by(Project.id) - ) - result = query.all() - return result - @staticmethod def mlb_transform(table, column, prefix): """Transforms multi label column into multiple columns and retruns the data frame with new columns @@ -165,89 +117,116 @@ def get_similar_project_ids(all_projects_df, target_project_df): return similar_projects + # TODO: Cache # This function is cached so that the matrix is not calculated every time # as it is expensive and not changing often + # # @cached(cache=similar_projects_cache) @staticmethod - @cached(cache=similar_projects_cache) - def create_project_matrix(target_project=None): - """Creates project matrix that is required to calculate the similarity - :param target_project: target project id (not used). - This is required to reset the cache when a new project is published - :return: project matrix data frame with encoded columns + async def create_project_matrix(db: Database) -> pd.DataFrame: + """Creates project matrix required to calculate similarity.""" + + # Query to fetch all published projects with their related data + query = """ + SELECT p.id, p.default_locale, p.difficulty, p.mapping_types, p.country, + COALESCE(ARRAY_AGG(pi.interest_id), ARRAY[]::INTEGER[]) AS categories + FROM projects p + LEFT JOIN ( + SELECT pi.project_id, i.id as interest_id + FROM project_interests pi + JOIN interests i ON pi.interest_id = i.id + ) pi ON p.id = pi.project_id + WHERE p.status = :status + GROUP BY p.id """ - all_projects = ProjectRecommendationService.get_all_published_projects() - all_projects_df = ProjectRecommendationService.to_dataframe( - all_projects, project_columns - ) - all_projects_df = ProjectRecommendationService.build_encoded_data_frame( - all_projects_df - ) - return all_projects_df + try: + # Execute the query and fetch results + result = await db.fetch_all( + query=query, values={"status": ProjectStatus.PUBLISHED.value} + ) + # Convert the result into a DataFrame + df = pd.DataFrame([dict(row) for row in result]) + # Optionally encode categorical data + df = ProjectRecommendationService.build_encoded_data_frame(df) + return df + + except Exception as e: + print(f"An error occurred: {e}") + return pd.DataFrame() @staticmethod - def get_similar_projects( - project_id, user_id=None, preferred_locale="en", limit=4 + async def get_similar_projects( + db: Database, + project_id: int, + user_id: str = None, + preferred_locale: str = "en", + limit: int = 4, ) -> ProjectSearchResultsDTO: - """Get similar projects based on the given project ID. - ---------------------------------------- - :param project_id: project id - :param preferred_locale: preferred locale - :return: list of similar projects in the order of similarity - """ - target_project = Project.query.get(project_id) - # Check if the project exists and is published - project_is_published = ( - target_project and target_project.status == ProjectStatus.PUBLISHED.value + """Get similar projects based on the given project ID.""" + + # Fetch the target project details + target_project_query = "SELECT * FROM projects WHERE id = :project_id" + target_project = await db.fetch_one( + query=target_project_query, values={"project_id": project_id} ) - if not project_is_published: + + if ( + not target_project + or target_project["status"] != ProjectStatus.PUBLISHED.value + ): raise NotFound(sub_code="PROJECT_NOT_FOUND", project_id=project_id) - projects_df = ProjectRecommendationService.create_project_matrix() + # Create the project similarity matrix + projects_df = await ProjectRecommendationService.create_project_matrix(db) target_project_df = projects_df[projects_df["id"] == project_id] + if target_project_df.empty: - # If the target project is not in the projects_df then it means it is published - # but not yet in the cache of create_project_matrix. So we need to update the cache. - projects_df = ProjectRecommendationService.create_project_matrix( - target_project=project_id + projects_df = await ProjectRecommendationService.create_project_matrix( + db, target_project=project_id ) target_project_df = projects_df[projects_df["id"] == project_id] dto = ProjectSearchResultsDTO() - # If there is only one project then return empty list as there is no other project to compare + dto.pagination = None if projects_df.shape[0] < 2: return dto + # Get IDs of similar projects similar_projects = ProjectRecommendationService.get_similar_project_ids( projects_df, target_project_df ) + user = await UserService.get_user_by_id(user_id, db) if user_id else None - user = UserService.get_user_by_id(user_id) if user_id else None + # Create the search query with filters applied based on user role + search_query, params = await ProjectSearchService.create_search_query(db, user) - query = ProjectSearchService.create_search_query(user) - # Only return projects which are not completed - query = query.filter( - Project.total_tasks != Project.tasks_validated + Project.tasks_bad_imagery - ) + # Filter out fully completed projects + search_query += """ + AND (p.total_tasks != p.tasks_validated + p.tasks_bad_imagery) + AND p.id = :project_id + """ - # Set the limit to the number of similar projects if it is less than the limit + # Limit the number of similar projects to fetch limit = min(limit, len(similar_projects)) if similar_projects else 0 - count = 0 while len(dto.results) < limit: - # In case the user is not authorized to view the project and similar projects are less than the limit - # then we need to break the loop and return the results try: - project_id = similar_projects[count] + similar_project_id = similar_projects[count] except IndexError: break - project = query.filter(Project.id == project_id).all() + project = await db.fetch_one( + query=search_query, values={**params, "project_id": similar_project_id} + ) if project: dto.results.append( - ProjectSearchService.create_result_dto( - project[0], + await ProjectSearchService.create_result_dto( + project, preferred_locale, - Project.get_project_total_contributions(project[0][0]), + await Project.get_project_total_contributions( + project["id"], db + ), + db, ) ) count += 1 + return dto diff --git a/backend/services/settings_service.py b/backend/services/settings_service.py index f4e52dd2b8..4d996f0423 100644 --- a/backend/services/settings_service.py +++ b/backend/services/settings_service.py @@ -1,6 +1,6 @@ from cachetools import TTLCache, cached -from flask import current_app from backend.models.dtos.settings_dto import SupportedLanguage, SettingsDTO +from backend.config import settings settings_cache = TTLCache(maxsize=4, ttl=300) @@ -11,17 +11,15 @@ class SettingsService: def get_settings(): """Gets all settings required by the client""" settings_dto = SettingsDTO() - settings_dto.mapper_level_advanced = current_app.config["MAPPER_LEVEL_ADVANCED"] - settings_dto.mapper_level_intermediate = current_app.config[ - "MAPPER_LEVEL_INTERMEDIATE" - ] + settings_dto.mapper_level_advanced = settings.MAPPER_LEVEL_ADVANCED + settings_dto.mapper_level_intermediate = settings.MAPPER_LEVEL_INTERMEDIATE settings_dto.supported_languages = SettingsService.get_supported_languages() return settings_dto @staticmethod def get_supported_languages(): """Gets all supported languages from the config""" - app_languages = current_app.config["SUPPORTED_LANGUAGES"] + app_languages = settings.SUPPORTED_LANGUAGES codes = [x.strip() for x in app_languages["codes"].split(",")] languages = [x.strip() for x in app_languages["languages"].split(",")] diff --git a/backend/services/stats_service.py b/backend/services/stats_service.py index 725ac2d05b..feb3cc7b44 100644 --- a/backend/services/stats_service.py +++ b/backend/services/stats_service.py @@ -1,97 +1,101 @@ -from cachetools import TTLCache, cached +import datetime from datetime import date, timedelta -from sqlalchemy import func, desc, cast, extract, or_ -from sqlalchemy.sql.functions import coalesce -from sqlalchemy.types import Time -from backend import db -from backend.exceptions import NotFound +from cachetools import TTLCache, cached +from databases import Database +from sqlalchemy import func, select + +from backend.models.dtos.project_dto import ProjectSearchResultsDTO from backend.models.dtos.stats_dto import ( - ProjectContributionsDTO, - UserContribution, + CampaignStatsDTO, + GenderStatsDTO, + HomePageStatsDTO, + OrganizationListStatsDTO, Pagination, - TaskHistoryDTO, - TaskStatusDTO, ProjectActivityDTO, + ProjectContributionsDTO, ProjectLastActivityDTO, - HomePageStatsDTO, - OrganizationListStatsDTO, - CampaignStatsDTO, - TaskStats, + TaskHistoryDTO, TaskStatsDTO, - GenderStatsDTO, + TaskStatusDTO, + UserContribution, UserStatsDTO, ) - -from backend.models.dtos.project_dto import ProjectSearchResultsDTO from backend.models.postgis.campaign import Campaign, campaign_projects from backend.models.postgis.organisation import Organisation from backend.models.postgis.project import Project -from backend.models.postgis.statuses import TaskStatus, MappingLevel, UserGender -from backend.models.postgis.task import TaskHistory, User, Task, TaskAction +from backend.models.postgis.statuses import MappingLevel, TaskStatus, UserGender +from backend.models.postgis.task import Task, TaskAction, User from backend.models.postgis.utils import timestamp # noqa: F401 -from backend.services.project_service import ProjectService from backend.services.project_search_service import ProjectSearchService +from backend.services.project_service import ProjectService from backend.services.users.user_service import UserService -from backend.services.organisation_service import OrganisationService -from backend.services.campaign_service import CampaignService homepage_stats_cache = TTLCache(maxsize=4, ttl=30) class StatsService: @staticmethod - def update_stats_after_task_state_change( + async def update_stats_after_task_state_change( project_id: int, user_id: int, last_state: TaskStatus, new_state: TaskStatus, - action="change", - local_session=None, + db: Database, + action: str = "change", ): """Update stats when a task has had a state change""" + # No stats to record for these states if new_state in [ TaskStatus.LOCKED_FOR_VALIDATION, TaskStatus.LOCKED_FOR_MAPPING, ]: - return # No stats to record for these states - - project = ProjectService.get_project_by_id(project_id) - user = UserService.get_user_by_id(user_id) - - project, user = StatsService._update_tasks_stats( - project, user, last_state, new_state, action + return + project = await ProjectService.get_project_by_id(project_id, db) + user = await UserService.get_user_by_id(user_id, db) + project, user = await StatsService._update_tasks_stats( + project, user, last_state, new_state, db, action ) - UserService.upsert_mapped_projects( - user_id, project_id, local_session=local_session + # Upsert mapped projects for the user + await UserService.upsert_mapped_projects(user_id, project_id, db) + query = """ + UPDATE projects + SET last_updated = :last_updated + WHERE id = :project_id + """ + await db.execute( + query, + values={ + "last_updated": datetime.datetime.utcnow(), + "project_id": project_id, + }, ) - project.last_updated = timestamp() - - # Transaction will be saved when task is saved return project, user @staticmethod - def _update_tasks_stats( - project: Project, - user: User, + async def _update_tasks_stats( + project: dict, + user: dict, last_state: TaskStatus, new_state: TaskStatus, + db: Database, action="change", ): - # Make sure you are aware that users table has it as incrementing counters, - # while projects table reflect the actual state, and both increment and decrement happens + project_stats = dict(project) # Mutable copy of the project dictionary if new_state == last_state: - return project, user + return project_stats, user - # Set counters for new state + # Increment counters for the new state if new_state == TaskStatus.MAPPED: - project.tasks_mapped += 1 + project_stats["tasks_mapped"] += 1 + elif new_state == TaskStatus.VALIDATED: - project.tasks_validated += 1 + project_stats["tasks_validated"] += 1 elif new_state == TaskStatus.BADIMAGERY: - project.tasks_bad_imagery += 1 + project_stats["tasks_bad_imagery"] += 1 + # Increment user stats if action is "change" if action == "change": if new_state == TaskStatus.MAPPED: user.tasks_mapped += 1 @@ -100,14 +104,16 @@ def _update_tasks_stats( elif new_state == TaskStatus.INVALIDATED: user.tasks_invalidated += 1 - # Remove counters for old state + # Decrement counters for the old state if last_state == TaskStatus.MAPPED: - project.tasks_mapped -= 1 + project_stats["tasks_mapped"] -= 1 elif last_state == TaskStatus.VALIDATED: - project.tasks_validated -= 1 + project_stats["tasks_validated"] -= 1 + elif last_state == TaskStatus.BADIMAGERY: - project.tasks_bad_imagery -= 1 + project_stats["tasks_bad_imagery"] -= 1 + # Undo user stats if action is "undo" if action == "undo": if last_state == TaskStatus.MAPPED: user.tasks_mapped -= 1 @@ -116,247 +122,320 @@ def _update_tasks_stats( elif last_state == TaskStatus.INVALIDATED: user.tasks_invalidated -= 1 - return project, user + # Update the project and user records in the database + await db.execute( + """ + UPDATE projects + SET tasks_mapped = :tasks_mapped, + tasks_validated = :tasks_validated, + tasks_bad_imagery = :tasks_bad_imagery + WHERE id = :project_id + """, + values={ + "tasks_mapped": project_stats["tasks_mapped"], + "tasks_validated": project_stats["tasks_validated"], + "tasks_bad_imagery": project_stats["tasks_bad_imagery"], + "project_id": project_stats["id"], + }, + ) + + await db.execute( + """ + UPDATE users + SET tasks_mapped = :tasks_mapped, + tasks_validated = :tasks_validated, + tasks_invalidated = :tasks_invalidated + WHERE id = :user_id + """, + values={ + "tasks_mapped": user.tasks_mapped, + "tasks_validated": user.tasks_validated, + "tasks_invalidated": user.tasks_invalidated, + "user_id": user.id, + }, + ) + return project_stats, user @staticmethod - def get_latest_activity(project_id: int, page: int) -> ProjectActivityDTO: + async def get_latest_activity( + project_id: int, page: int, db: Database + ) -> ProjectActivityDTO: """Gets all the activity on a project""" - if not ProjectService.exists(project_id): - raise NotFound(sub_code="PROJECT_NOT_FOUND", project_id=project_id) - - results = ( - db.session.query( - TaskHistory.id, - TaskHistory.task_id, - TaskHistory.action, - TaskHistory.action_date, - TaskHistory.action_text, - User.username, - ) - .join(User) - .filter( - TaskHistory.project_id == project_id, - TaskHistory.action != TaskAction.COMMENT.name, - ) - .order_by(TaskHistory.action_date.desc()) - .paginate(page=page, per_page=10, error_out=True) + # Pagination setup + page_size = 10 + offset = (page - 1) * page_size + + # Query to fetch task history + query = """ + SELECT + th.id, + th.task_id, + th.action, + th.action_date, + th.action_text, + u.username + FROM task_history th + JOIN users u ON th.user_id = u.id + WHERE + th.project_id = :project_id + AND th.action != :comment_action + ORDER BY th.action_date DESC + LIMIT :limit OFFSET :offset + """ + rows = await db.fetch_all( + query, + { + "project_id": project_id, + "comment_action": "COMMENT", + "limit": page_size, + "offset": offset, + }, ) - activity_dto = ProjectActivityDTO() - for item in results.items: - history = TaskHistoryDTO() - history.task_id = item.id - history.task_id = item.task_id - history.action = item.action - history.action_text = item.action_text - history.action_date = item.action_date - history.action_by = item.username + # Creating DTO + activity_dto = ProjectActivityDTO(activity=[]) + for row in rows: + history = TaskHistoryDTO( + history_id=row["id"], + task_id=row["task_id"], + action=row["action"], + action_text=row["action_text"], + action_date=row["action_date"], + action_by=row["username"], + ) activity_dto.activity.append(history) - activity_dto.pagination = Pagination(results) - return activity_dto + # Calculate total items for pagination + total_query = """ + SELECT COUNT(*) + FROM task_history th + WHERE + th.project_id = :project_id + AND th.action != :comment_action + """ + total_items_result = await db.fetch_one( + total_query, {"project_id": project_id, "comment_action": "COMMENT"} + ) - @staticmethod - def get_popular_projects() -> ProjectSearchResultsDTO: - """Get all projects ordered by task_history""" + total_items = total_items_result["count"] if total_items_result else 0 - rate_func = func.count(TaskHistory.user_id) / extract( - "epoch", func.sum(cast(TaskHistory.action_date, Time)) + # Use the from_total_count method to correctly initialize the Pagination DTO + activity_dto.pagination = Pagination.from_total_count( + page=page, per_page=page_size, total=total_items ) - query = ( - TaskHistory.query.with_entities( - TaskHistory.project_id.label("id"), rate_func.label("rate") - ) - .filter(TaskHistory.action_date >= date.today() - timedelta(days=90)) - .filter( - or_( - TaskHistory.action == TaskAction.LOCKED_FOR_MAPPING.name, - TaskHistory.action == TaskAction.LOCKED_FOR_VALIDATION.name, - ) - ) - .filter(TaskHistory.action_text is not None) - .filter(TaskHistory.action_text != "") - .group_by(TaskHistory.project_id) - .order_by(desc("rate")) - .limit(10) - .subquery() - ) + return activity_dto - projects_query = ProjectSearchService.create_search_query() - projects = projects_query.filter(Project.id == query.c.id).all() - # Get total contributors. - contrib_counts = ProjectSearchService.get_total_contributions(projects) + @staticmethod + async def get_popular_projects(db: Database) -> ProjectSearchResultsDTO: + """Get all projects ordered by task history.""" + + # Query to calculate the "popularity" rate based on task history + popularity_query = """ + SELECT + th.project_id AS id, + COUNT(th.user_id) / EXTRACT(EPOCH FROM SUM(th.action_date::time)) AS rate + FROM task_history th + WHERE th.action_date >= :start_date + AND (th.action = :locked_for_mapping OR th.action = :locked_for_validation) + AND th.action_text IS NOT NULL + AND th.action_text != '' + GROUP BY th.project_id + ORDER BY rate DESC + LIMIT 10 + """ + + start_date = date.today() - timedelta(days=90) + params = { + "start_date": start_date, + "locked_for_mapping": TaskAction.LOCKED_FOR_MAPPING.name, + "locked_for_validation": TaskAction.LOCKED_FOR_VALIDATION.name, + } + + # Fetch the popular projects based on the rate calculated above + popular_projects = await db.fetch_all(popularity_query, params) + project_ids = [row["id"] for row in popular_projects] + + if not project_ids: + return ProjectSearchResultsDTO(results=[]) + + # Use the existing `create_search_query` function to fetch detailed project data + project_query, query_params = await ProjectSearchService.create_search_query(db) + project_query += " AND p.id = ANY(:project_ids)" + query_params["project_ids"] = project_ids + + projects = await db.fetch_all(project_query, query_params) + + # Get total contributors for each project + contrib_counts = await ProjectSearchService.get_total_contributions( + project_ids, db + ) zip_items = zip(projects, contrib_counts) + # Prepare the final DTO with all project details dto = ProjectSearchResultsDTO() dto.results = [ - ProjectSearchService.create_result_dto(p, "en", t) for p, t in zip_items + await ProjectSearchService.create_result_dto(p, "en", t, db) + for p, t in zip_items ] return dto @staticmethod - def get_last_activity(project_id: int) -> ProjectLastActivityDTO: + async def get_last_activity( + project_id: int, db: Database + ) -> ProjectLastActivityDTO: """Gets the last activity for a project's tasks""" - sq = ( - TaskHistory.query.with_entities( - TaskHistory.task_id, - TaskHistory.action_date, - TaskHistory.user_id, - ) - .filter(TaskHistory.project_id == project_id) - .filter(TaskHistory.action != TaskAction.COMMENT.name) - .order_by(TaskHistory.task_id, TaskHistory.action_date.desc()) - .distinct(TaskHistory.task_id) - .subquery() - ) - sq_statuses = ( - Task.query.with_entities(Task.id, Task.task_status) - .filter(Task.project_id == project_id) - .subquery() - ) - results = ( - db.session.query( - sq_statuses.c.id, - sq.c.action_date, - sq_statuses.c.task_status, - User.username, - ) - .outerjoin(sq, sq.c.task_id == sq_statuses.c.id) - .outerjoin(User, User.id == sq.c.user_id) - .order_by(sq_statuses.c.id) - .all() + # Subquery: Fetch latest actions for each task, excluding comments + subquery_latest_action = """ + SELECT DISTINCT ON (th.task_id) + th.task_id, + th.action_date, + th.user_id + FROM task_history th + WHERE th.project_id = :project_id + AND th.action != :comment_action + ORDER BY th.task_id, th.action_date DESC + """ + + # Main query: Join task statuses with latest actions and user details + query_task_statuses = f""" + SELECT + t.id AS task_id, + t.task_status, + la.action_date, + u.username AS action_by + FROM tasks t + LEFT JOIN ({subquery_latest_action}) la ON la.task_id = t.id + LEFT JOIN users u ON u.id = la.user_id + WHERE t.project_id = :project_id + ORDER BY t.id + """ + + # Execute the query + results = await db.fetch_all( + query_task_statuses, {"project_id": project_id, "comment_action": "COMMENT"} ) - dto = ProjectLastActivityDTO() - dto.activity = [ - TaskStatusDTO( - dict( - task_id=r.id, - task_status=TaskStatus(r.task_status).name, - action_date=r.action_date, - action_by=r.username, - ) + # Create DTO + dto = ProjectLastActivityDTO(activity=[]) + for row in results: + task_status_dto = TaskStatusDTO( + task_id=row["task_id"], + task_status=TaskStatus(row["task_status"]).name, + action_date=row["action_date"], + action_by=row["action_by"], ) - for r in results - ] + dto.activity.append(task_status_dto) return dto @staticmethod - def get_user_contributions(project_id: int) -> ProjectContributionsDTO: - """Get all user contributions on a project""" - - mapped_stmt = ( - Task.query.with_entities( - Task.mapped_by, - func.count(Task.mapped_by).label("count"), - func.array_agg(Task.id).label("task_ids"), - ) - .filter(Task.project_id == project_id) - .filter(Task.task_status != TaskStatus.BADIMAGERY.value) - .group_by(Task.mapped_by) - .subquery() - ) - badimagery_stmt = ( - Task.query.with_entities( - Task.mapped_by, - func.count(Task.mapped_by).label("count"), - func.array_agg(Task.id).label("task_ids"), - ) - .filter(Task.project_id == project_id) - .filter(Task.task_status == TaskStatus.BADIMAGERY.value) - .group_by(Task.mapped_by) - .subquery() - ) - validated_stmt = ( - Task.query.with_entities( - Task.validated_by, - func.count(Task.validated_by).label("count"), - func.array_agg(Task.id).label("task_ids"), - ) - .filter(Task.project_id == project_id) - .group_by(Task.validated_by) - .subquery() - ) - - project_contributions = ( - TaskHistory.query.with_entities(TaskHistory.user_id) - .filter( - TaskHistory.project_id == project_id, TaskHistory.action != "COMMENT" - ) - .distinct(TaskHistory.user_id) - .subquery() - ) - - results = ( - db.session.query( - User.id, - User.username, - User.name, - User.mapping_level, - User.picture_url, - User.date_registered, - coalesce(mapped_stmt.c.count, 0).label("mapped"), - coalesce(validated_stmt.c.count, 0).label("validated"), - coalesce(badimagery_stmt.c.count, 0).label("bad_imagery"), - ( - coalesce(mapped_stmt.c.count, 0) - + coalesce(validated_stmt.c.count, 0) - + coalesce(badimagery_stmt.c.count, 0) - ).label("total"), - mapped_stmt.c.task_ids.label("mapped_tasks"), - validated_stmt.c.task_ids.label("validated_tasks"), - badimagery_stmt.c.task_ids.label("bad_imagery_tasks"), - ) - .join(project_contributions, User.id == project_contributions.c.user_id) - .outerjoin(mapped_stmt, User.id == mapped_stmt.c.mapped_by) - .outerjoin(badimagery_stmt, User.id == badimagery_stmt.c.mapped_by) - .outerjoin(validated_stmt, User.id == validated_stmt.c.validated_by) - .group_by( - User.id, - User.username, - User.name, - User.mapping_level, - User.picture_url, - User.date_registered, - mapped_stmt.c.count, - mapped_stmt.c.task_ids, - badimagery_stmt.c.count, - badimagery_stmt.c.task_ids, - validated_stmt.c.count, - validated_stmt.c.task_ids, - ) - .order_by(desc("total")) - .all() + async def get_user_contributions( + project_id: int, db: Database + ) -> ProjectContributionsDTO: + # Query to get user contributions + query = """ + WITH mapped AS ( + SELECT + mapped_by AS user_id, + COUNT(mapped_by) AS count, + ARRAY_AGG(id) AS task_ids + FROM tasks + WHERE project_id = :project_id + AND task_status != :bad_imagery_status + GROUP BY mapped_by + ), + badimagery AS ( + SELECT + mapped_by AS user_id, + COUNT(mapped_by) AS count, + ARRAY_AGG(id) AS task_ids + FROM tasks + WHERE project_id = :project_id + AND task_status = :bad_imagery_status + GROUP BY mapped_by + ), + validated AS ( + SELECT + validated_by AS user_id, + COUNT(validated_by) AS count, + ARRAY_AGG(id) AS task_ids + FROM tasks + WHERE project_id = :project_id + GROUP BY validated_by + ), + project_contributions AS ( + SELECT DISTINCT user_id + FROM task_history + WHERE project_id = :project_id + AND action != 'COMMENT' + ) + SELECT + u.id, + u.username, + u.name, + u.mapping_level, + u.picture_url, + u.date_registered, + COALESCE(m.count, 0) AS mapped, + COALESCE(v.count, 0) AS validated, + COALESCE(b.count, 0) AS bad_imagery, + COALESCE(m.count, 0) + COALESCE(v.count, 0) + COALESCE(b.count, 0) AS total, + COALESCE(m.task_ids, '{}') AS mapped_tasks, + COALESCE(v.task_ids, '{}') AS validated_tasks, + COALESCE(b.task_ids, '{}') AS bad_imagery_tasks + FROM users u + JOIN project_contributions pc ON u.id = pc.user_id + LEFT JOIN mapped m ON u.id = m.user_id + LEFT JOIN badimagery b ON u.id = b.user_id + LEFT JOIN validated v ON u.id = v.user_id + ORDER BY total DESC; + """ + + # Execute the query + rows = await db.fetch_all( + query, + values={ + "project_id": project_id, + "bad_imagery_status": TaskStatus.BADIMAGERY.value, + }, ) + # Process the results into DTO contrib_dto = ProjectContributionsDTO() user_contributions = [ UserContribution( dict( - username=r.username, - name=r.name, - mapping_level=MappingLevel(r.mapping_level).name, - picture_url=r.picture_url, - mapped=r.mapped, - bad_imagery=r.bad_imagery, - validated=r.validated, - total=r.total, - mapped_tasks=r.mapped_tasks if r.mapped_tasks is not None else [], - bad_imagery_tasks=r.bad_imagery_tasks - if r.bad_imagery_tasks - else [], - validated_tasks=r.validated_tasks - if r.validated_tasks is not None - else [], - date_registered=r.date_registered.date(), + username=row["username"], + name=row["name"], + mapping_level=MappingLevel(row["mapping_level"]).name, + picture_url=row["picture_url"], + mapped=row["mapped"], + bad_imagery=row["bad_imagery"], + validated=row["validated"], + total=row["total"], + mapped_tasks=( + row["mapped_tasks"] if row["mapped_tasks"] is not None else [] + ), + bad_imagery_tasks=( + row["bad_imagery_tasks"] if row["bad_imagery_tasks"] else [] + ), + validated_tasks=( + row["validated_tasks"] + if row["validated_tasks"] is not None + else [] + ), + date_registered=( + row["date_registered"].date() + if isinstance(row["date_registered"], datetime.datetime) + else None + ), ) ) - for r in results + for row in rows ] contrib_dto.user_contributions = user_contributions @@ -364,82 +443,78 @@ def get_user_contributions(project_id: int) -> ProjectContributionsDTO: @staticmethod @cached(homepage_stats_cache) - def get_homepage_stats(abbrev=True) -> HomePageStatsDTO: + async def get_homepage_stats( + abbrev: bool = True, db: Database = None + ) -> HomePageStatsDTO: """Get overall TM stats to give community a feel for progress that's being made""" dto = HomePageStatsDTO() - dto.total_projects = Project.query.with_entities( - func.count(Project.id) - ).scalar() - dto.mappers_online = ( - Task.query.with_entities(func.count(Task.locked_by.distinct())) - .filter(Task.locked_by.isnot(None)) - .scalar() + + # Total Projects + query = select(func.count(Project.id)) + dto.total_projects = await db.fetch_val(query) + + # Mappers online (distinct users who locked tasks) + query = select(func.count(Task.locked_by.distinct())).where( + Task.locked_by.isnot(None) ) - dto.total_mappers = User.query.with_entities(func.count(User.id)).scalar() - dto.tasks_mapped = ( - Task.query.with_entities(func.count()) - .filter( - Task.task_status.in_( - (TaskStatus.MAPPED.value, TaskStatus.VALIDATED.value) - ) - ) - .scalar() + dto.mappers_online = await db.fetch_val(query) + + # Total Mappers + query = select(func.count(User.id)) + dto.total_mappers = await db.fetch_val(query) + + # Tasks mapped (status: MAPPED, VALIDATED) + query = select(func.count()).where( + Task.task_status.in_([TaskStatus.MAPPED.value, TaskStatus.VALIDATED.value]) ) + dto.tasks_mapped = await db.fetch_val(query) + if not abbrev: - dto.total_validators = ( - Task.query.filter(Task.task_status == TaskStatus.VALIDATED.value) - .distinct(Task.validated_by) - .count() + # Total Validators + query = select(func.count(Task.validated_by.distinct())).where( + Task.task_status == TaskStatus.VALIDATED.value ) - dto.tasks_validated = Task.query.filter( + dto.total_validators = await db.fetch_val(query) + + # Tasks Validated + query = select(func.count()).where( Task.task_status == TaskStatus.VALIDATED.value - ).count() + ) + dto.tasks_validated = await db.fetch_val(query) - dto.total_area = Project.query.with_entities( + # Total Area (sum of project areas in km²) + query = select( func.coalesce(func.sum(func.ST_Area(Project.geometry, True) / 1000000)) - ).scalar() - - dto.total_mapped_area = ( - Task.query.with_entities( - func.coalesce(func.sum(func.ST_Area(Task.geometry, True) / 1000000)) - ) - .filter(Task.task_status == TaskStatus.MAPPED.value) - .scalar() ) + dto.total_area = await db.fetch_val(query) - dto.total_validated_area = ( - Task.query.with_entities( - func.coalesce(func.sum(func.ST_Area(Task.geometry, True) / 1000000)) - ) - .filter(Task.task_status == TaskStatus.VALIDATED.value) - .scalar() - ) + # Total Mapped Area + query = select( + func.coalesce(func.sum(func.ST_Area(Task.geometry, True) / 1000000)) + ).where(Task.task_status == TaskStatus.MAPPED.value) + dto.total_mapped_area = await db.fetch_val(query) - unique_campaigns = Campaign.query.with_entities( - func.count(Campaign.id) - ).scalar() + # Total Validated Area + query = select( + func.coalesce(func.sum(func.ST_Area(Task.geometry, True) / 1000000)) + ).where(Task.task_status == TaskStatus.VALIDATED.value) + dto.total_validated_area = await db.fetch_val(query) - linked_campaigns_count = ( - Campaign.query.join( - campaign_projects, Campaign.id == campaign_projects.c.campaign_id - ) - .with_entities( - Campaign.name, func.count(campaign_projects.c.campaign_id) - ) + # Campaign Stats + query = select(func.count(Campaign.id)) + unique_campaigns = await db.fetch_val(query) + + query = ( + select([Campaign.name, func.count()]) + .select_from(Campaign.join(campaign_projects)) .group_by(Campaign.id) - .all() ) + linked_campaigns_count = await db.fetch_all(query) + + subquery = select(campaign_projects.c.project_id.distinct()).subquery() + query = select(func.count()).where(~Project.id.in_(subquery)) + no_campaign_count = await db.fetch_val(query) - subquery = ( - db.session.query(campaign_projects.c.project_id.distinct()) - .order_by(campaign_projects.c.project_id) - .subquery() - ) - no_campaign_count = ( - Project.query.with_entities(func.count()) - .filter(~Project.id.in_(subquery)) - .scalar() - ) dto.campaigns = [CampaignStatsDTO(row) for row in linked_campaigns_count] if no_campaign_count: dto.campaigns.append( @@ -447,27 +522,22 @@ def get_homepage_stats(abbrev=True) -> HomePageStatsDTO: ) dto.total_campaigns = unique_campaigns - unique_orgs = Organisation.query.with_entities( - func.count(Organisation.id) - ).scalar() - linked_orgs_count = ( - db.session.query(Organisation.name, func.count(Project.organisation_id)) + # Organisation Stats + query = select(func.count(Organisation.id)) + unique_orgs = await db.fetch_val(query) + + query = ( + select([Organisation.name, func.count(Project.organisation_id)]) .join(Project.organisation) .group_by(Organisation.id) - .all() ) + linked_orgs_count = await db.fetch_all(query) + + subquery = select(Project.organisation_id.distinct()).subquery() + query = select(func.count()).where(~Organisation.id.in_(subquery)) + no_org_project_count = await db.fetch_val(query) - subquery = ( - db.session.query(Project.organisation_id.distinct()) - .order_by(Project.organisation_id) - .subquery() - ) - no_org_project_count = ( - Organisation.query.with_entities(func.count()) - .filter(~Organisation.id.in_(subquery)) - .scalar() - ) dto.organisations = [ OrganizationListStatsDTO(row) for row in linked_orgs_count ] @@ -500,14 +570,14 @@ def get_homepage_stats(abbrev=True) -> HomePageStatsDTO: @staticmethod def update_all_project_stats(): - projects = db.session.query(Project.id) + projects = session.query(Project.id) for project_id in projects.all(): StatsService.update_project_stats(project_id) @staticmethod def update_project_stats(project_id: int): project = ProjectService.get_project_by_id(project_id) - tasks = Task.query.filter(Task.project_id == project_id) + tasks = session.query(Task).filter(Task.project_id == project_id) project.total_tasks = tasks.count() project.tasks_mapped = tasks.filter( @@ -522,184 +592,201 @@ def update_project_stats(project_id: int): project.save() @staticmethod - def get_all_users_statistics(start_date: date, end_date: date): - users = User.query.filter( - User.date_registered >= start_date, - User.date_registered <= end_date, + async def get_all_users_statistics(start_date: date, end_date: date, db: Database): + # Base query for users within the date range + base_query = select(User).filter( + User.date_registered >= start_date, User.date_registered <= end_date ) + # Execute total user count stats_dto = UserStatsDTO() - stats_dto.total = users.count() - stats_dto.beginner = users.filter( - User.mapping_level == MappingLevel.BEGINNER.value - ).count() - stats_dto.intermediate = users.filter( - User.mapping_level == MappingLevel.INTERMEDIATE.value - ).count() - stats_dto.advanced = users.filter( - User.mapping_level == MappingLevel.ADVANCED.value - ).count() - stats_dto.contributed = users.filter(User.projects_mapped.isnot(None)).count() - stats_dto.email_verified = users.filter( - User.is_email_verified.is_(True) - ).count() + total_count_query = select(func.count()).select_from(base_query.subquery()) + result = await db.execute(total_count_query) + stats_dto.total = result + + # Beginner count + beginner_count_query = select(func.count()).select_from( + base_query.filter( + User.mapping_level == MappingLevel.BEGINNER.value + ).subquery() + ) + result = await db.execute(beginner_count_query) + stats_dto.beginner = result + + # Intermediate count + intermediate_count_query = select(func.count()).select_from( + base_query.filter( + User.mapping_level == MappingLevel.INTERMEDIATE.value + ).subquery() + ) + result = await db.execute(intermediate_count_query) + stats_dto.intermediate = result + # Advanced count + advanced_count_query = select(func.count()).select_from( + base_query.filter( + User.mapping_level == MappingLevel.ADVANCED.value + ).subquery() + ) + result = await db.execute(advanced_count_query) + stats_dto.advanced = result + + # Contributed count (those with projects mapped) + contributed_count_query = select(func.count()).select_from( + base_query.filter(User.projects_mapped.isnot(None)).subquery() + ) + result = await db.execute(contributed_count_query) + stats_dto.contributed = result + + # Email verified count + email_verified_count_query = select(func.count()).select_from( + base_query.filter(User.is_email_verified.is_(True)).subquery() + ) + result = await db.execute(email_verified_count_query) + stats_dto.email_verified = result + + # Gender stats gender_stats = GenderStatsDTO() - gender_stats.male = users.filter(User.gender == UserGender.MALE.value).count() - gender_stats.female = users.filter( - User.gender == UserGender.FEMALE.value - ).count() - gender_stats.self_describe = users.filter( - User.gender == UserGender.SELF_DESCRIBE.value - ).count() - gender_stats.prefer_not = users.filter( - User.gender == UserGender.PREFER_NOT.value - ).count() + # Male count + male_count_query = select(func.count()).select_from( + base_query.filter(User.gender == UserGender.MALE.value).subquery() + ) + result = await db.execute(male_count_query) + gender_stats.male = result + + # Female count + female_count_query = select(func.count()).select_from( + base_query.filter(User.gender == UserGender.FEMALE.value).subquery() + ) + result = await db.execute(female_count_query) + gender_stats.female = result + + # Self-describe count + self_describe_count_query = select(func.count()).select_from( + base_query.filter(User.gender == UserGender.SELF_DESCRIBE.value).subquery() + ) + result = await db.execute(self_describe_count_query) + gender_stats.self_describe = result + + # Prefer not to say count + prefer_not_count_query = select(func.count()).select_from( + base_query.filter(User.gender == UserGender.PREFER_NOT.value).subquery() + ) + result = await db.execute(prefer_not_count_query) + gender_stats.prefer_not = result + + # Set gender stats in the stats_dto stats_dto.genders = gender_stats + return stats_dto @staticmethod - def set_task_stats(result_row): - date_dto = TaskStats( - { - "date": result_row[0], - "mapped": result_row[1], - "validated": result_row[2], - "bad_imagery": result_row[3], - } - ) - return date_dto + def set_task_stats(row): + return { + "date": row["date"], + "mapped": row["mapped"], + "validated": row["validated"], + "bad_imagery": row["bad_imagery"], + } @staticmethod - def get_task_stats( - start_date, end_date, org_id, org_name, campaign, project_id, country + async def get_task_stats( + db: Database, + start_date, + end_date, + org_id=None, + org_name=None, + campaign=None, + project_id=None, + country=None, ): - """Creates tasks stats for a period using the TaskStatsDTO""" - - query = ( - db.session.query( - TaskHistory.task_id, - TaskHistory.project_id, - TaskHistory.action_text, - func.DATE(TaskHistory.action_date).label("day"), - ) - .distinct( - TaskHistory.project_id, TaskHistory.task_id, TaskHistory.action_text - ) - .filter( - TaskHistory.action == "STATE_CHANGE", - or_( - TaskHistory.action_text == "MAPPED", - TaskHistory.action_text == "VALIDATED", - TaskHistory.action_text == "BADIMAGERY", - ), - ) - .order_by( - TaskHistory.project_id, - TaskHistory.task_id, - TaskHistory.action_text, - TaskHistory.action_date, - ) - ) + """Creates task stats for a period using the TaskStatsDTO""" + + # Base query components + base_query = """ + WITH filtered_projects AS ( + SELECT id FROM projects + WHERE 1 = 1 + {filters} + ), + aggregated_stats AS ( + SELECT + DATE(action_date) AS day, + action_text, + COUNT(*) AS count + FROM task_history + WHERE action = 'STATE_CHANGE' + AND action_text IN ('MAPPED', 'VALIDATED', 'BADIMAGERY') + AND project_id IN (SELECT id FROM filtered_projects) + GROUP BY DATE(action_date), action_text + ), + date_series AS ( + SELECT generate_series( + CAST(:start_date AS DATE), + CAST(:end_date AS DATE), + INTERVAL '1 day' + )::DATE AS date + ) + SELECT + TO_CHAR(ds.date, 'YYYY-MM-DD') AS date, -- Cast date to string + COALESCE(SUM(CASE WHEN ag.action_text = 'MAPPED' THEN ag.count END), 0) AS mapped, + COALESCE(SUM(CASE WHEN ag.action_text = 'VALIDATED' THEN ag.count END), 0) AS validated, + COALESCE(SUM(CASE WHEN ag.action_text = 'BADIMAGERY' THEN ag.count END), 0) AS bad_imagery + FROM date_series ds + LEFT JOIN aggregated_stats ag ON ds.date = ag.day + GROUP BY ds.date + HAVING + COALESCE(SUM(CASE WHEN ag.action_text = 'MAPPED' THEN ag.count END), 0) > 0 OR + COALESCE(SUM(CASE WHEN ag.action_text = 'VALIDATED' THEN ag.count END), 0) > 0 OR + COALESCE(SUM(CASE WHEN ag.action_text = 'BADIMAGERY' THEN ag.count END), 0) > 0 + ORDER BY ds.date; + """ + + filters = [] + values = {"start_date": start_date, "end_date": end_date} if org_id: - query = query.join(Project, Project.id == TaskHistory.project_id).filter( - Project.organisation_id == org_id - ) - if org_name: - try: - organisation_id = OrganisationService.get_organisation_by_name( - org_name - ).id - except NotFound: - organisation_id = None - query = query.join(Project, Project.id == TaskHistory.project_id).filter( - Project.organisation_id == organisation_id - ) - if campaign: - try: - campaign_id = CampaignService.get_campaign_by_name(campaign).id - except NotFound: - campaign_id = None - query = query.join( - campaign_projects, - campaign_projects.c.project_id == TaskHistory.project_id, - ).filter(campaign_projects.c.campaign_id == campaign_id) - if project_id: - query = query.filter(TaskHistory.project_id.in_(project_id)) - if country: - # Unnest country column array. - sq = Project.query.with_entities( - Project.id, func.unnest(Project.country).label("country") - ).subquery() + filters.append("AND organisation_id = :org_id") + values["org_id"] = int(org_id) - query = query.filter(sq.c.country.ilike("%{}%".format(country))).filter( - TaskHistory.project_id == sq.c.id - ) + if org_name: + filters.append(""" + AND organisation_id = ( + SELECT id FROM organisations WHERE name = :org_name + ) + """) + values["org_name"] = org_name - query = query.subquery() + if campaign: + filters.append(""" + AND id IN ( + SELECT project_id FROM campaign_projects + WHERE campaign_id = ( + SELECT id FROM campaigns WHERE name = :campaign + ) + ) + """) + values["campaign"] = campaign - date_query = db.session.query( - func.DATE( - func.generate_series(start_date, end_date, timedelta(days=1)) - ).label("d_day") - ).subquery() + if project_id: + filters.append("AND id = ANY(:project_id)") + values["project_id"] = project_id - grouped_dates = ( - db.session.query( - date_query.c.d_day, - query.c.action_text, - func.count(query.c.action_text).label("cnt"), - ) - .join(date_query, date_query.c.d_day == query.c.day) - .group_by(date_query.c.d_day, query.c.action_text) - .order_by(date_query.c.d_day) - ).subquery() - - mapped = ( - db.session.query( - grouped_dates.c.d_day, grouped_dates.c.action_text, grouped_dates.c.cnt - ) - .select_from(grouped_dates) - .filter(grouped_dates.c.action_text == "MAPPED") - .subquery() - ) - validated = ( - db.session.query( - grouped_dates.c.d_day, grouped_dates.c.action_text, grouped_dates.c.cnt - ) - .select_from(grouped_dates) - .filter(grouped_dates.c.action_text == "VALIDATED") - .subquery() - ) - badimagery = ( - db.session.query( - grouped_dates.c.d_day, grouped_dates.c.action_text, grouped_dates.c.cnt - ) - .select_from(grouped_dates) - .filter(grouped_dates.c.action_text == "BADIMAGERY") - .subquery() - ) + if country: + filters.append(""" + AND EXISTS ( + SELECT 1 + FROM unnest(country) AS c + WHERE c ILIKE :country + ) + """) + values["country"] = f"%{country}%" - result = ( - db.session.query( - func.to_char(grouped_dates.c.d_day, "YYYY-MM-DD"), - func.coalesce(mapped.c.cnt, 0).label("mapped"), - func.coalesce(validated.c.cnt, 0).label("validated"), - func.coalesce(badimagery.c.cnt, 0).label("badimagery"), - ) - .select_from(grouped_dates) - .distinct(grouped_dates.c.d_day) - .filter(grouped_dates.c.d_day is not None) - .outerjoin(mapped, mapped.c.d_day == grouped_dates.c.d_day) - .outerjoin(validated, validated.c.d_day == grouped_dates.c.d_day) - .outerjoin(badimagery, badimagery.c.d_day == grouped_dates.c.d_day) - ) + final_query = base_query.format(filters=" ".join(filters)) - day_stats_dto = list(map(StatsService.set_task_stats, result)) + results = await db.fetch_all(query=final_query, values=values) - results_dto = TaskStatsDTO() - results_dto.stats = day_stats_dto + stats_dicts = [dict(row) for row in results] - return results_dto + return TaskStatsDTO(stats=stats_dicts) diff --git a/backend/services/tags_service.py b/backend/services/tags_service.py index 07e8c34e12..80d664ac4b 100644 --- a/backend/services/tags_service.py +++ b/backend/services/tags_service.py @@ -3,6 +3,6 @@ class TagsService: @staticmethod - def get_all_countries(): + async def get_all_countries(db): """Get all countries""" - return Project.get_all_countries() + return await Project.get_all_countries(db) diff --git a/backend/services/team_service.py b/backend/services/team_service.py index 6eeaf7b5b3..2e5205020e 100644 --- a/backend/services/team_service.py +++ b/backend/services/team_service.py @@ -1,64 +1,73 @@ -from flask import current_app -from sqlalchemy import and_, or_ +from databases import Database +from fastapi.responses import JSONResponse +from loguru import logger from markdown import markdown -from backend import create_app, db +from backend.db import db_connection from backend.exceptions import NotFound +from backend.models.dtos.message_dto import MessageDTO +from backend.models.dtos.stats_dto import Pagination from backend.models.dtos.team_dto import ( - TeamDTO, + ListTeamsDTO, NewTeamDTO, - TeamsListDTO, ProjectTeamDTO, TeamDetailsDTO, + TeamDTO, TeamSearchDTO, + TeamsListDTO, ) - -from backend.models.dtos.message_dto import MessageDTO -from backend.models.dtos.stats_dto import Pagination from backend.models.postgis.message import Message, MessageType -from backend.models.postgis.team import Team, TeamMembers from backend.models.postgis.project import ProjectTeams -from backend.models.postgis.project_info import ProjectInfo from backend.models.postgis.statuses import ( TeamJoinMethod, TeamMemberFunctions, - TeamVisibility, TeamRoles, + TeamVisibility, UserRole, ) +from backend.models.postgis.team import Team, TeamMembers +from backend.services.messaging.message_service import MessageService from backend.services.organisation_service import OrganisationService from backend.services.users.user_service import UserService -from backend.services.messaging.message_service import MessageService class TeamServiceError(Exception): """Custom Exception to notify callers an error occurred when handling teams""" def __init__(self, message): - if current_app: - current_app.logger.debug(message) + logger.debug(message) class TeamJoinNotAllowed(Exception): """Custom Exception to notify bad user level on joining team""" def __init__(self, message): - if current_app: - current_app.logger.debug(message) + logger.debug(message) class TeamService: @staticmethod - def request_to_join_team(team_id: int, user_id: int): - team = TeamService.get_team_by_id(team_id) + async def get_team_by_id_user(team_id: int, user_id: int, db: Database): + query = """ + SELECT * FROM team_members + WHERE team_id = :team_id AND user_id = :user_id + """ + team_member = await db.fetch_one( + query, values={"team_id": team_id, "user_id": user_id} + ) + return team_member + + @staticmethod + async def request_to_join_team(team_id: int, user_id: int, db: Database): + team = await TeamService.get_team_by_id(team_id, db) # If user has team manager permission add directly to the team without request.E.G. Admins, Org managers - if TeamService.is_user_team_member(team_id, user_id): + if await TeamService.is_user_team_member(team_id, user_id, db): raise TeamServiceError( "The user is already a member of the team or has requested to join." ) - if TeamService.is_user_team_manager(team_id, user_id): - TeamService.add_team_member( - team_id, user_id, TeamMemberFunctions.MEMBER.value, True + if await TeamService.is_user_team_manager(team_id, user_id, db): + await TeamService.add_team_member( + team_id, user_id, TeamMemberFunctions.MEMBER.value, True, db ) return @@ -69,39 +78,49 @@ def request_to_join_team(team_id: int, user_id: int): ) role = TeamMemberFunctions.MEMBER.value - user = UserService.get_user_by_id(user_id) + user = await UserService.get_user_by_id(user_id, db) active = False # Set active=True for team with join method ANY as no approval is required to join this team type. if team.join_method == TeamJoinMethod.ANY.value: active = True - TeamService.add_team_member(team_id, user_id, role, active) - + await TeamService.add_team_member(team_id, user_id, role, active, db) # Notify team managers about a join request in BY_REQUEST team. if team.join_method == TeamJoinMethod.BY_REQUEST.value: - team_managers = team.get_team_managers() + team_managers = await Team.get_team_managers(db, team.id) for manager in team_managers: - # Only send notifications to team managers who have join request notification enabled. if manager.join_request_notifications: - MessageService.send_request_to_join_team( - user.id, user.username, manager.user_id, team.name, team_id + manager_obj = await UserService.get_user_by_username( + manager.username, db + ) + await MessageService.send_request_to_join_team( + user.id, user.username, manager_obj.id, team.name, team_id, db ) @staticmethod - def add_user_to_team( - team_id: int, requesting_user: int, username: str, role: str = None + async def add_user_to_team( + team_id: int, + requesting_user: int, + username: str, + role: str = None, + db: Database = None, ): - is_manager = TeamService.is_user_team_manager(team_id, requesting_user) + is_manager = await TeamService.is_user_team_manager( + team_id, requesting_user, db + ) if not is_manager: raise TeamServiceError("User is not allowed to add member to the team") - team = TeamService.get_team_by_id(team_id) - from_user = UserService.get_user_by_id(requesting_user) - to_user = UserService.get_user_by_username(username) - member = TeamMembers.get(team_id, to_user.id) + team = await TeamService.get_team_by_id(team_id, db) + from_user = await UserService.get_user_by_id(requesting_user, db) + to_user = await UserService.get_user_by_username(username, db) + member = await TeamMembers.get(team_id, to_user.id, db) if member: member.function = TeamMemberFunctions[role].value member.active = True - member.update() - return {"Success": "User role updated"} + await TeamMembers.update(member, db) + return JSONResponse( + content={"Success": "User role updated"}, status_code=200 + ) + else: if role: try: @@ -110,62 +129,68 @@ def add_user_to_team( raise Exception("Invalid TeamMemberFunction") else: role = TeamMemberFunctions.MEMBER.value - TeamService.add_team_member(team_id, to_user.id, role, True) - MessageService.send_team_join_notification( + await TeamService.add_team_member(team_id, to_user.id, role, True, db) + await MessageService.send_team_join_notification( requesting_user, from_user.username, to_user.id, team.name, team_id, TeamMemberFunctions(role).name, + db, ) @staticmethod - def add_team_member(team_id, user_id, function, active=False): + async def add_team_member( + team_id, user_id, function, active=False, db: Database = None + ): team_member = TeamMembers() team_member.team_id = team_id team_member.user_id = user_id team_member.function = function team_member.active = active - team_member.create() + await TeamMembers.create(team_member, db) @staticmethod - def send_invite(team_id, from_user_id, username): - to_user = UserService.get_user_by_username(username) - from_user = UserService.get_user_by_id(from_user_id) - team = TeamService.get_team_by_id(team_id) + async def send_invite(team_id, from_user_id, username, db: Database): + to_user = await UserService.get_user_by_username(username, db) + from_user = await UserService.get_user_by_id(from_user_id, db) + team = await TeamService.get_team_by_id(team_id, db) MessageService.send_invite_to_join_team( from_user_id, from_user.username, to_user.id, team.name, team_id ) @staticmethod - def accept_reject_join_request(team_id, from_user_id, username, function, action): - from_user = UserService.get_user_by_id(from_user_id) - to_user_id = UserService.get_user_by_username(username).id - team = TeamService.get_team_by_id(team_id) + async def accept_reject_join_request( + team_id, from_user_id, username, function, action, db: Database + ): + from_user = await UserService.get_user_by_id(from_user_id, db) + user = await UserService.get_user_by_username(username, db) + to_user_id = user.id + team = await TeamService.get_team_by_id(team_id, db) - if not TeamService.is_user_team_member(team_id, to_user_id): + if not await TeamService.is_user_team_member(team_id, to_user_id, db): raise NotFound(sub_code="JOIN_REQUEST_NOT_FOUND", username=username) if action not in ["accept", "reject"]: raise TeamServiceError("Invalid action type") if action == "accept": - TeamService.activate_team_member(team_id, to_user_id) + await TeamService.activate_team_member(team_id, to_user_id, db) elif action == "reject": - TeamService.delete_invite(team_id, to_user_id) + await TeamService.delete_invite(team_id, to_user_id, db) - MessageService.accept_reject_request_to_join_team( - from_user_id, from_user.username, to_user_id, team.name, team_id, action + await MessageService.accept_reject_request_to_join_team( + from_user_id, from_user.username, to_user_id, team.name, team_id, action, db ) @staticmethod - def accept_reject_invitation_request( - team_id, from_user_id, username, function, action + async def accept_reject_invitation_request( + team_id, from_user_id, username, function, action, db: Database ): - from_user = UserService.get_user_by_id(from_user_id) - to_user = UserService.get_user_by_username(username) - team = TeamService.get_team_by_id(team_id) - team_members = team.get_team_managers() + from_user = await UserService.get_user_by_id(from_user_id, db) + to_user = await UserService.get_user_by_username(username, db) + team = await TeamService.get_team_by_id(team_id, db) + team_members = await Team.get_team_managers(db, team.id) for member in team_members: MessageService.accept_reject_invitation_request_for_team( @@ -178,260 +203,361 @@ def accept_reject_invitation_request( action, ) if action == "accept": - TeamService.add_team_member( - team_id, from_user_id, TeamMemberFunctions[function.upper()].value + await TeamService.add_team_member( + team_id, from_user_id, TeamMemberFunctions[function.upper()].value, db ) @staticmethod - def leave_team(team_id, username): - user = UserService.get_user_by_username(username) - team_member = TeamMembers.query.filter( - TeamMembers.team_id == team_id, TeamMembers.user_id == user.id - ).one_or_none() + async def leave_team(team_id, username, db: Database = None): + user = await UserService.get_user_by_username(username, db) + team_member = await TeamService.get_team_by_id_user(team_id, user.id, db) + + # Raise an exception if the team member is not found if not team_member: raise NotFound( sub_code="USER_NOT_IN_TEAM", username=username, team_id=team_id ) - team_member.delete() + + # If found, delete the team member + delete_query = """ + DELETE FROM team_members + WHERE team_id = :team_id AND user_id = :user_id + """ + await db.execute(delete_query, values={"team_id": team_id, "user_id": user.id}) @staticmethod - def add_team_project(team_id, project_id, role): + async def add_team_project(team_id, project_id, role, db: Database): team_project = ProjectTeams() team_project.project_id = project_id team_project.team_id = team_id team_project.role = TeamRoles[role].value - team_project.create() + await ProjectTeams.create(team_project, db) @staticmethod - def delete_team_project(team_id, project_id): - project = ProjectTeams.query.filter( - and_(ProjectTeams.team_id == team_id, ProjectTeams.project_id == project_id) - ).one() - project.delete() + async def delete_team_project(team_id: int, project_id: int, db: Database): + """ + Deletes a project team by team_id and project_id. + :param team_id: ID of the team + :param project_id: ID of the project + :param db: async database connection + """ + # Query to find the project team + query = """ + SELECT * FROM project_teams + WHERE team_id = :team_id AND project_id = :project_id + """ + project_team = await db.fetch_one( + query, values={"team_id": team_id, "project_id": project_id} + ) + + # Check if the project team exists + if not project_team: + raise NotFound( + sub_code="PROJECT_TEAM_NOT_FOUND", + team_id=team_id, + project_id=project_id, + ) + + # If found, delete the project team + delete_query = """ + DELETE FROM project_teams + WHERE team_id = :team_id AND project_id = :project_id + """ + await db.execute( + delete_query, values={"team_id": team_id, "project_id": project_id} + ) @staticmethod - def get_all_teams(search_dto: TeamSearchDTO) -> TeamsListDTO: - query = db.session.query(Team) + async def get_all_teams(search_dto: TeamSearchDTO, db: Database) -> TeamsListDTO: + query_parts = [] + params = {} + + base_query = """ + SELECT t.id, t.name, t.join_method, t.visibility, t.description, + o.logo, o.name as organisation_name, o.id as organisation_id + FROM teams t + JOIN organisations o ON t.organisation_id = o.id + """ - orgs_query = None - user = UserService.get_user_by_id(search_dto.user_id) - is_admin = UserRole(user.role) == UserRole.ADMIN if search_dto.organisation: - orgs_query = query.filter(Team.organisation_id == search_dto.organisation) - if search_dto.manager and search_dto.manager == search_dto.user_id: - manager_teams = query.filter( - TeamMembers.user_id == search_dto.manager, - TeamMembers.active == True, # noqa - TeamMembers.function == TeamMemberFunctions.MANAGER.value, - Team.id == TeamMembers.team_id, - ) - - manager_orgs_teams = query.filter( - Team.organisation_id.in_( - [ - org.id - for org in OrganisationService.get_organisations( - search_dto.manager - ) - ] + query_parts.append("t.organisation_id = :organisation_id") + params["organisation_id"] = search_dto.organisation + + if search_dto.manager and int(search_dto.manager) == int(search_dto.user_id): + manager_teams_query = """ + SELECT t.id FROM teams t + JOIN team_members tm ON t.id = tm.team_id + WHERE tm.user_id = :manager_id AND tm.active = true AND tm.function = :manager_function + """ + params["manager_id"] = int(search_dto.manager) + params["manager_function"] = TeamMemberFunctions.MANAGER.value + + orgs_teams_query = """ + SELECT t.id FROM teams t + WHERE t.organisation_id = ANY( + SELECT organisation_id FROM organisation_managers WHERE user_id = :manager_id ) - ) + """ - query = manager_teams.union(manager_orgs_teams) + query_parts.append( + f"t.id IN ({manager_teams_query} UNION {orgs_teams_query})" + ) if search_dto.team_name: - query = query.filter( - Team.name.ilike("%" + search_dto.team_name + "%"), - ) + query_parts.append("t.name ILIKE :team_name") + params["team_name"] = f"%{search_dto.team_name}%" if search_dto.team_role: try: role = TeamRoles[search_dto.team_role.upper()].value - project_teams = ( - db.session.query(ProjectTeams) - .filter(ProjectTeams.role == role) - .subquery() - ) - query = query.join(project_teams) + project_teams_query = """ + SELECT pt.team_id FROM project_teams pt WHERE pt.role = :team_role + """ + query_parts.append(f"t.id IN ({project_teams_query})") + params["team_role"] = role except KeyError: pass if search_dto.member: - team_member = ( - db.session.query(TeamMembers) - .filter( - TeamMembers.user_id == search_dto.member, - TeamMembers.active.is_(True), - ) - .subquery() - ) - query = query.join(team_member) + team_member_query = """ + SELECT tm.team_id FROM team_members tm + WHERE tm.user_id = :member_id AND tm.active = true + """ + query_parts.append(f"t.id IN ({team_member_query})") + params["member_id"] = search_dto.member if search_dto.member_request: - team_member = ( - db.session.query(TeamMembers) - .filter( - TeamMembers.user_id == search_dto.member_request, - TeamMembers.active.is_(False), - ) - .subquery() - ) - query = query.join(team_member) - if orgs_query: - query = query.union(orgs_query) - - # Only show public teams and teams that the user is a member of + team_member_request_query = """ + SELECT tm.team_id FROM team_members tm + WHERE tm.user_id = :member_request_id AND tm.active = false + """ + query_parts.append(f"t.id IN ({team_member_request_query})") + params["member_request_id"] = search_dto.member_request + + user = await UserService.get_user_by_id(search_dto.user_id, db) + is_admin = UserRole(user.role) == UserRole.ADMIN if not is_admin: - query = query.filter( - or_( - Team.visibility == TeamVisibility.PUBLIC.value, - # Since user.teams returns TeamMembers, we need to get the team_id - Team.id.in_([team.team_id for team in user.teams]), + public_or_member_query = """ + t.visibility = :public_visibility OR t.id IN ( + SELECT tm.team_id FROM team_members tm WHERE tm.user_id = :user_id ) - ) - teams_list_dto = TeamsListDTO() + """ + query_parts.append(f"({public_or_member_query})") + params["public_visibility"] = TeamVisibility.PUBLIC.value + params["user_id"] = search_dto.user_id + + if query_parts: + final_query = f"{base_query} WHERE {' AND '.join(query_parts)}" + else: + final_query = base_query if search_dto.paginate: - paginated = query.paginate( - page=search_dto.page, per_page=search_dto.per_page, error_out=True - ) - teams_list_dto.pagination = Pagination(paginated) - teams_list = paginated.items + final_query_paginated = final_query + limit = search_dto.per_page + offset = (search_dto.page - 1) * search_dto.per_page + final_query_paginated += f" LIMIT {limit} OFFSET {offset}" + rows = await db.fetch_all(query=final_query_paginated, values=params) + else: - teams_list = query.all() - for team in teams_list: - team_dto = TeamDTO() - team_dto.team_id = team.id - team_dto.name = team.name - team_dto.join_method = TeamJoinMethod(team.join_method).name - team_dto.visibility = TeamVisibility(team.visibility).name - team_dto.description = team.description - team_dto.logo = team.organisation.logo - team_dto.organisation = team.organisation.name - team_dto.organisation_id = team.organisation.id - team_dto.members = [] - # Skip if members are not included + rows = await db.fetch_all(query=final_query, values=params) + + teams_list_dto = TeamsListDTO() + for row in rows: + team_dto = TeamDTO( + team_id=row["id"], + name=row["name"], + join_method=TeamJoinMethod(row["join_method"]).name, + visibility=TeamVisibility(row["visibility"]).name, + description=row["description"], + logo=row["logo"], + organisation=row["organisation_name"], + organisation_id=row["organisation_id"], + members=[], + ) + if not search_dto.omit_members: if search_dto.full_members_list: - team_members = team.members + team_dto.members = await Team.get_all_members(db, row["id"], None) else: - team_managers = team.get_team_managers(10) - team_members = team.get_team_members(10) + team_managers = await Team.get_team_managers(db, row["id"], 10) + team_members = await Team.get_team_members(db, row["id"], 10) team_members.extend(team_managers) - team_dto.members = [ - team.as_dto_team_member(member) for member in team_members - ] - team_dto.members_count = team.get_members_count_by_role( - TeamMemberFunctions.MEMBER + team_dto.members = team_members + + team_dto.members_count = await Team.get_members_count_by_role( + db, row["id"], TeamMemberFunctions.MEMBER ) - team_dto.managers_count = team.get_members_count_by_role( - TeamMemberFunctions.MANAGER + team_dto.managers_count = await Team.get_members_count_by_role( + db, row["id"], TeamMemberFunctions.MANAGER ) + teams_list_dto.teams.append(team_dto) + + if search_dto.paginate: + total_query = "SELECT COUNT(*) FROM (" + final_query + ") as total" + total = await db.fetch_val(query=total_query, values=params) + teams_list_dto.pagination = Pagination.from_total_count( + total=total, page=search_dto.page, per_page=search_dto.per_page + ) return teams_list_dto - @staticmethod - def get_team_as_dto( - team_id: int, user_id: int, abbreviated: bool + async def get_team_as_dto( + team_id: int, user_id: int, abbreviated: bool, db: Database ) -> TeamDetailsDTO: - team = TeamService.get_team_by_id(team_id) + # Query to fetch team and organisation details + team_query = """ + SELECT t.id as team_id, t.name as team_name, t.join_method, t.visibility, + t.description, o.logo as org_logo, o.name as org_name, + o.id as org_id, o.slug as org_slug + FROM teams t + JOIN organisations o ON t.organisation_id = o.id + WHERE t.id = :team_id + """ - if team is None: + # Fetch the team details + team_details = await db.fetch_one(query=team_query, values={"team_id": team_id}) + + if not team_details: raise NotFound(sub_code="TEAM_NOT_FOUND", team_id=team_id) - team_dto = TeamDetailsDTO() - team_dto.team_id = team.id - team_dto.name = team.name - team_dto.join_method = TeamJoinMethod(team.join_method).name - team_dto.visibility = TeamVisibility(team.visibility).name - team_dto.description = team.description - team_dto.logo = team.organisation.logo - team_dto.organisation = team.organisation.name - team_dto.organisation_id = team.organisation.id - team_dto.organisation_slug = team.organisation.slug + # Create the TeamDetailsDTO + team_dto = TeamDetailsDTO( + team_id=team_details["team_id"], + name=team_details["team_name"], + join_method=TeamJoinMethod(team_details["join_method"]).name, + visibility=TeamVisibility(team_details["visibility"]).name, + description=team_details["description"], + logo=team_details["org_logo"], + organisation=team_details["org_name"], + organisation_id=team_details["org_id"], + organisation_slug=team_details["org_slug"], + ) + # Check for admin roles if user_id is provided if user_id != 0: - if UserService.is_user_an_admin(user_id): - team_dto.is_general_admin = True - - if OrganisationService.is_user_an_org_manager( - team.organisation.id, user_id - ): - team_dto.is_org_admin = True - else: - team_dto.is_general_admin = False - team_dto.is_org_admin = False + team_dto.is_general_admin = await UserService.is_user_an_admin(user_id, db) + team_dto.is_org_admin = await OrganisationService.is_user_an_org_manager( + team_details["org_id"], user_id, db + ) if abbreviated: return team_dto - team_dto.members = [team.as_dto_team_member(member) for member in team.members] - - team_projects = TeamService.get_projects_by_team_id(team.id) - - team_dto.team_projects = [ - team.as_dto_team_project(project) for project in team_projects - ] - + # Fetch and add team members to the DTO + members_query = """ + SELECT user_id FROM team_members WHERE team_id = :team_id + """ + members = await db.fetch_all(query=members_query, values={"team_id": team_id}) + team_dto.members = ( + [ + await Team.as_dto_team_member(member.user_id, team_id, db) + for member in members + ] + if members + else [] + ) + team_projects = await TeamService.get_projects_by_team_id(team_id, db) + team_dto.team_projects = ( + [Team.as_dto_team_project(project) for project in team_projects] + if team_projects + else [] + ) return team_dto @staticmethod - def get_projects_by_team_id(team_id: int): - projects = ( - db.session.query( - ProjectInfo.name, ProjectTeams.project_id, ProjectTeams.role - ) - .join(ProjectTeams, ProjectInfo.project_id == ProjectTeams.project_id) - .filter(ProjectTeams.team_id == team_id) - .all() - ) + async def get_projects_by_team_id(team_id: int, db: Database): + # SQL query to fetch project details associated with the team + projects_query = """ + SELECT p.name, pt.project_id, pt.role + FROM project_teams pt + JOIN project_info p ON p.project_id = pt.project_id + WHERE pt.team_id = :team_id + """ - if projects is None: - raise NotFound(sub_code="PROJECTS_NOT_FOUND", team_id=team_id) + # Execute the query and fetch all results + projects = await db.fetch_all(query=projects_query, values={"team_id": team_id}) + + if not projects: + projects = [] return projects @staticmethod - def get_project_teams_as_dto(project_id: int) -> TeamsListDTO: - """Gets all the teams for a specified project""" - project_teams = ProjectTeams.query.filter( - ProjectTeams.project_id == project_id - ).all() - teams_list_dto = TeamsListDTO() + async def get_project_teams_as_dto(project_id: int, db: Database) -> TeamsListDTO: + """Gets all the teams for a specified project with their roles and names""" + # Raw SQL query to get project teams with team names + query = """ + SELECT pt.team_id, t.name AS team_name, pt.role + FROM project_teams pt + JOIN teams t ON pt.team_id = t.id + WHERE pt.project_id = :project_id + """ + project_teams = await db.fetch_all( + query=query, values={"project_id": project_id} + ) + # Initialize the DTO + teams_list_dto = ListTeamsDTO() + # Populate the DTO with team data for project_team in project_teams: - team = TeamService.get_team_by_id(project_team.team_id) - team_dto = ProjectTeamDTO() - team_dto.team_id = project_team.team_id - team_dto.team_name = team.name - team_dto.role = project_team.role - + team_dto = ProjectTeamDTO( + team_id=project_team["team_id"], + team_name=project_team["team_name"], + role=str(project_team["role"]), + ) teams_list_dto.teams.append(team_dto) return teams_list_dto @staticmethod - def change_team_role(team_id: int, project_id: int, role: str): - project = ProjectTeams.query.filter( - and_(ProjectTeams.team_id == team_id, ProjectTeams.project_id == project_id) - ).one() - project.role = TeamRoles[role].value - project.save() + async def change_team_role(team_id: int, project_id: int, role: str, db: Database): + """ + Change the role of a team in a project. + :param team_id: ID of the team + :param project_id: ID of the project + :param role: New role to assign + :param db: Database instance for executing queries + """ + # Assuming `TeamRoles[role].value` gives the correct integer or string value for the role + new_role_value = TeamRoles[role].value + + # Write the raw SQL query to update the role in the `project_teams` table + query = """ + UPDATE project_teams + SET role = :new_role_value + WHERE team_id = :team_id AND project_id = :project_id + """ + + # Execute the query + await db.execute( + query, + { + "new_role_value": new_role_value, + "team_id": team_id, + "project_id": project_id, + }, + ) @staticmethod - def get_team_by_id(team_id: int) -> Team: + async def get_team_by_id(team_id: int, db: Database): """ Get team from DB :param team_id: ID of team to fetch :returns: Team :raises: Not Found """ - team = Team.get(team_id) - - if team is None: + # Raw SQL query to select the team by ID + query = """ + SELECT id, name, organisation_id, join_method, description, visibility + FROM teams + WHERE id = :team_id + """ + # Execute the query and fetch the team + team_record = await db.fetch_one(query=query, values={"team_id": team_id}) + if team_record is None: raise NotFound(sub_code="TEAM_NOT_FOUND", team_id=team_id) - return team + return team_record @staticmethod def get_team_by_name(team_name: str) -> Team: @@ -443,34 +569,34 @@ def get_team_by_name(team_name: str) -> Team: return team @staticmethod - def create_team(new_team_dto: NewTeamDTO) -> int: + async def create_team(new_team_dto: NewTeamDTO, db: Database) -> int: """ Creates a new team using a team dto :param new_team_dto: Team DTO :returns: ID of new Team """ - TeamService.assert_validate_organisation(new_team_dto.organisation_id) + await TeamService.assert_validate_organisation(new_team_dto.organisation_id, db) - team = Team.create_from_dto(new_team_dto) - return team.id + team = await Team.create_from_dto(new_team_dto, db) + return team @staticmethod - def update_team(team_dto: TeamDTO) -> Team: + async def update_team(team_dto: TeamDTO, db: Database) -> Team: """ Updates a team :param team_dto: DTO with updated info :returns updated Team """ - team = TeamService.get_team_by_id(team_dto.team_id) - team.update(team_dto) + team = await TeamService.get_team_by_id(team_dto.team_id, db) + team = await Team.update(team, team_dto, db) - return team + return team["id"] if team else None @staticmethod - def assert_validate_organisation(org_id: int): + async def assert_validate_organisation(org_id: int, db: Database): """Makes sure an organisation exists""" try: - OrganisationService.get_organisation_by_id(org_id) + await OrganisationService.get_organisation_by_id(org_id, db) except NotFound: raise TeamServiceError(f"Organisation {org_id} does not exist") @@ -498,61 +624,126 @@ def assert_validate_members(team_dto: TeamDTO): team_dto.members = members @staticmethod - def _get_team_members(team_id: int): - return TeamMembers.query.filter_by(team_id=team_id).all() + async def _get_team_members(team_id: int, db: Database): + # Asynchronous query to fetch team members by team_id + query = "SELECT * FROM team_members WHERE team_id = :team_id" + return await db.fetch_all(query, values={"team_id": team_id}) @staticmethod - def _get_active_team_members(team_id: int): - return TeamMembers.query.filter_by(team_id=team_id, active=True).all() + async def _get_active_team_members(team_id: int, db: Database): + try: + query = """ + SELECT * FROM team_members + WHERE team_id = :team_id AND active = TRUE + """ + return await db.fetch_all(query, values={"team_id": team_id}) + except Exception as e: + print(f"Error executing query: {str(e)}") + raise @staticmethod - def activate_team_member(team_id: int, user_id: int): - member = TeamMembers.query.filter( - TeamMembers.team_id == team_id, TeamMembers.user_id == user_id - ).first() - member.active = True - db.session.add(member) - db.session.commit() + async def activate_team_member(team_id: int, user_id: int, db: Database): + # Fetch the member by team_id and user_id + member = await TeamService.get_team_by_id_user(team_id, user_id, db) + + if member: + # Update the 'active' status of the member + update_query = """ + UPDATE team_members + SET active = TRUE + WHERE team_id = :team_id AND user_id = :user_id + """ + await db.execute( + update_query, values={"team_id": team_id, "user_id": user_id} + ) + else: + # Handle case where member is not found + raise ValueError( + f"No member found with team_id {team_id} and user_id {user_id}" + ) @staticmethod - def delete_invite(team_id: int, user_id: int): - member = TeamMembers.query.filter( - TeamMembers.team_id == team_id, TeamMembers.user_id == user_id - ).first() - member.delete() + async def delete_invite(team_id: int, user_id: int, db: Database): + # Fetch the member by team_id and user_id to check if it exists + member = await TeamService.get_team_by_id_user(team_id, user_id, db) + + if member: + # Delete the member from the database + delete_query = """ + DELETE FROM team_members + WHERE team_id = :team_id AND user_id = :user_id + """ + await db.execute( + delete_query, values={"team_id": team_id, "user_id": user_id} + ) + else: + # Handle case where member is not found + raise ValueError( + f"No member found with team_id {team_id} and user_id {user_id}" + ) @staticmethod - def is_user_team_member(team_id: int, user_id: int): - query = TeamMembers.query.filter( - TeamMembers.team_id == team_id, - TeamMembers.user_id == user_id, - ).exists() - return db.session.query(query).scalar() + async def is_user_team_member(team_id: int, user_id: int, db: Database) -> bool: + # Query to check if the user is a member of the team + query = """ + SELECT EXISTS ( + SELECT 1 FROM team_members + WHERE team_id = :team_id AND user_id = :user_id + ) AS is_member + """ + result = await db.fetch_one( + query, values={"team_id": team_id, "user_id": user_id} + ) + + # The result contains the 'is_member' field, which is a boolean + return result["is_member"] @staticmethod - def is_user_an_active_team_member(team_id: int, user_id: int): - query = TeamMembers.query.filter( - TeamMembers.team_id == team_id, - TeamMembers.user_id == user_id, - TeamMembers.active.is_(True), - ).exists() - return db.session.query(query).scalar() + async def is_user_an_active_team_member( + team_id: int, user_id: int, db: Database + ) -> bool: + """ + Check if a user is an active member of a team. + :param team_id: ID of the team + :param user_id: ID of the user + :param db: Database connection + :returns: True if the user is an active member, False otherwise + """ + # Raw SQL query to check if the user is an active team member + query = """ + SELECT EXISTS( + SELECT 1 + FROM team_members + WHERE team_id = :team_id + AND user_id = :user_id + AND active = true + ) AS is_active + """ + + # Execute the query and fetch the result + result = await db.fetch_one( + query=query, values={"team_id": team_id, "user_id": user_id} + ) + # Return the boolean value indicating if the user is an active team member + return result["is_active"] @staticmethod - def is_user_team_manager(team_id: int, user_id: int): + async def is_user_team_manager(team_id: int, user_id: int, db: Database) -> bool: # Admin manages all teams - team = TeamService.get_team_by_id(team_id) - if UserService.is_user_an_admin(user_id): + team = await TeamService.get_team_by_id(team_id, db) + if await UserService.is_user_an_admin(user_id, db): return True - managers = team.get_team_managers() + managers = await Team.get_team_managers(db, team.id) for member in managers: - if member.user_id == user_id: + team_manager = await UserService.get_user_by_username(member.username, db) + if team_manager.id == user_id: return True # Org admin manages teams attached to their org user_managed_orgs = [ - org.id for org in OrganisationService.get_organisations(user_id) + org.organisation_id + for org in await OrganisationService.get_organisations(user_id, db) ] if team.organisation_id in user_managed_orgs: return True @@ -560,63 +751,68 @@ def is_user_team_manager(team_id: int, user_id: int): return False @staticmethod - def delete_team(team_id: int): + async def delete_team(team_id: int, db: Database): """Deletes a team""" - team = TeamService.get_team_by_id(team_id) - - if team.can_be_deleted(): - team.delete() - return {"Success": "Team deleted"}, 200 + team = await TeamService.get_team_by_id(team_id, db) + if await Team.can_be_deleted(team_id, db): + await Team.delete(team, db) + return JSONResponse(content={"Success": "Team deleted"}, status_code=200) else: - return { - "Error": "Team has projects, cannot be deleted", - "SubCode": "This team has projects associated. Before deleting team, unlink any associated projects.", - }, 400 + return JSONResponse( + content={ + "Error": "Team has projects, cannot be deleted", + "SubCode": "This team has projects associated. Before deleting team, unlink any associated projects.", + }, + status_code=400, + ) @staticmethod - def check_team_membership(project_id: int, allowed_roles: list, user_id: int): + async def check_team_membership( + project_id: int, allowed_roles: list, user_id: int, db + ): """Given a project and permitted team roles, check user's membership in the team list""" - teams_dto = TeamService.get_project_teams_as_dto(project_id) + teams_dto = await TeamService.get_project_teams_as_dto(project_id, db) teams_allowed = [ team_dto for team_dto in teams_dto.teams if team_dto.role in allowed_roles ] user_membership = [ team_dto.team_id for team_dto in teams_allowed - if TeamService.is_user_an_active_team_member(team_dto.team_id, user_id) + if await TeamService.is_user_an_active_team_member( + team_dto.team_id, user_id, db + ) ] return len(user_membership) > 0 @staticmethod - def send_message_to_all_team_members( - team_id: int, team_name: str, message_dto: MessageDTO + async def send_message_to_all_team_members( + team_id: int, + team_name: str, + message_dto: MessageDTO, + user_id: int, ): - """Sends supplied message to all contributors in a team. Message all team members can take - over a minute to run, so this method is expected to be called on its own thread - """ - app = ( - create_app() - ) # Because message-all run on background thread it needs it's own app context - - with app.app_context(): - team_members = TeamService._get_active_team_members(team_id) - sender = UserService.get_user_by_id(message_dto.from_user_id).username - - message_dto.message = ( - "A message from {}, manager of {} team:

{}".format( - MessageService.get_user_profile_link(sender), - MessageService.get_team_link(team_name, team_id, False), - markdown(message_dto.message, output_format="html"), + try: + async with db_connection.database.connection() as conn: + team_members = await TeamService._get_active_team_members(team_id, conn) + user = await UserService.get_user_by_id(user_id, conn) + sender = user.username + message_dto.message = ( + "A message from {}, manager of {} team:

{}".format( + MessageService.get_user_profile_link(sender), + MessageService.get_team_link(team_name, team_id, False), + markdown(message_dto.message, output_format="html"), + ) ) - ) - - messages = [] - for team_member in team_members: - if team_member.user_id != message_dto.from_user_id: - message = Message.from_dto(team_member.user_id, message_dto) - message.message_type = MessageType.TEAM_BROADCAST.value - message.save() - user = UserService.get_user_by_id(team_member.user_id) - messages.append(dict(message=message, user=user)) - - MessageService._push_messages(messages) + messages = [] + for team_member in team_members: + if team_member.user_id != user_id: + message = Message.from_dto(team_member.user_id, message_dto) + message.message_type = MessageType.TEAM_BROADCAST.value + user = await UserService.get_user_by_id( + team_member.user_id, conn + ) + messages.append(dict(message=message, user=user)) + await MessageService._push_messages(messages, conn) + logger.info("Messages sent successfully.") + except Exception as e: + logger.error(f"Error sending messages in background task: {str(e)}") diff --git a/backend/services/users/authentication_service.py b/backend/services/users/authentication_service.py index 93fbd3852b..a4e7e5937f 100644 --- a/backend/services/users/authentication_service.py +++ b/backend/services/users/authentication_service.py @@ -1,16 +1,30 @@ import base64 +import binascii import urllib.parse - -from flask import current_app, request -from flask_httpauth import HTTPTokenAuth -from itsdangerous import URLSafeTimedSerializer, BadSignature, SignatureExpired +from random import SystemRandom +from typing import Optional + +from databases import Database +from fastapi import HTTPException, Security, status +from fastapi.responses import JSONResponse +from fastapi.security.api_key import APIKeyHeader +from itsdangerous import BadSignature, SignatureExpired, URLSafeTimedSerializer +from loguru import logger +from starlette.authentication import ( + AuthCredentials, + AuthenticationBackend, + AuthenticationError, + SimpleUser, +) from backend.api.utils import TMAPIDecorators +from backend.config import settings +from backend.models.dtos.user_dto import AuthUserDTO +from backend.models.postgis.user import User from backend.services.messaging.message_service import MessageService -from backend.services.users.user_service import UserService, NotFound -from random import SystemRandom +from backend.services.users.user_service import NotFound, UserService -token_auth = HTTPTokenAuth(scheme="Token") +# token_auth = HTTPTokenAuth(scheme="Token") tm = TMAPIDecorators() UNICODE_ASCII_CHARACTER_SET = ( @@ -18,13 +32,16 @@ ) -@token_auth.error_handler +# @token_auth.error_handler def handle_unauthorized_token(): - current_app.logger.debug("Token not valid") - return {"Error": "Token is expired or invalid", "SubCode": "InvalidToken"}, 401 + logger.debug("Token not valid") + return JSONResponse( + content={"Error": "Token is expired or invalid", "SubCode": "InvalidToken"}, + status_code=401, + ) -@token_auth.verify_token +# @token_auth.verify_token def verify_token(token): """Verify the supplied token and check user role is correct for the requested resource""" tm.authenticated_user_id = None @@ -34,12 +51,12 @@ def verify_token(token): try: decoded_token = base64.b64decode(token).decode("utf-8") except UnicodeDecodeError: - current_app.logger.debug(f"Unable to decode token {request.base_url}") + logger.debug("Unable to decode token") return False # Can't decode token, so fail login - valid_token, user_id = AuthenticationService.is_valid_token(decoded_token, 604800) + valid_token, user_id = AuthenticationService.is_valid_token(decoded_token, 120) if not valid_token: - current_app.logger.debug(f"Token not valid {request.base_url}") + logger.debug("Token not valid") return False tm.authenticated_user_id = ( @@ -48,17 +65,44 @@ def verify_token(token): return user_id # All tests passed token is good for the requested resource +class TokenAuthBackend(AuthenticationBackend): + async def authenticate(self, conn): + if "authorization" not in conn.headers: + return + + auth = conn.headers["authorization"] + try: + scheme, credentials = auth.split() + if scheme.lower() != "token": + return + try: + decoded_token = base64.b64decode(credentials).decode("ascii") + except UnicodeDecodeError: + logger.debug("Unable to decode token") + return False + except (ValueError, UnicodeDecodeError, binascii.Error): + raise AuthenticationError("Invalid auth credentials") + + valid_token, user_id = AuthenticationService.is_valid_token( + decoded_token, 604800 + ) + if not valid_token: + logger.debug("Token not valid.") + return + tm.authenticated_user_id = user_id + return AuthCredentials(["authenticated"]), SimpleUser(user_id) + + class AuthServiceError(Exception): """Custom Exception to notify callers an error occurred when authenticating""" def __init__(self, message): - if current_app: - current_app.logger.debug(message) + logger.debug(message) class AuthenticationService: @staticmethod - def login_user(osm_user_details, email, user_element="user") -> dict: + async def login_user(osm_user_details, email, db, user_element="user") -> dict: """ Generates authentication details for user, creating in DB if user is unknown to us :param osm_user_details: XML response from OSM @@ -82,16 +126,17 @@ def login_user(osm_user_details, email, user_element="user") -> dict: user_picture = None try: - UserService.get_user_by_id(osm_id) - UserService.update_user(osm_id, username, user_picture) + await UserService.get_user_by_id(osm_id, db) + await UserService.update_user(osm_id, username, user_picture, db) except NotFound: # User not found, so must be new user changesets = osm_user.get("changesets") changeset_count = int(changesets.get("count")) - new_user = UserService.register_user( - osm_id, username, changeset_count, user_picture, email - ) - MessageService.send_welcome_message(new_user) + async with db.transaction(): + new_user = await UserService.register_user( + osm_id, username, changeset_count, user_picture, email, db + ) + await MessageService.send_welcome_message(new_user, db) session_token = AuthenticationService.generate_session_token_for_user(osm_id) return { @@ -101,9 +146,9 @@ def login_user(osm_user_details, email, user_element="user") -> dict: } @staticmethod - def authenticate_email_token(username: str, token: str): + async def authenticate_email_token(username: str, token: str, db: Database): """Validate that the email token is valid""" - user = UserService.get_user_by_username(username) + user = await UserService.get_user_by_username(username, db) is_valid, tokenised_email = AuthenticationService.is_valid_token(token, 86400) @@ -116,13 +161,13 @@ def authenticate_email_token(username: str, token: str): raise AuthServiceError("InvalidEmail- Email address does not match token") # Token is valid so update DB and return - user.set_email_verified_status(is_verified=True) + await User.set_email_verified_status(user, is_verified=True, db=db) return AuthenticationService._get_email_validated_url(True) @staticmethod def _get_email_validated_url(is_valid: bool) -> str: """Helper function to generate redirect url for email verification""" - base_url = current_app.config["APP_BASE_URL"] + base_url = settings.APP_BASE_URL verification_params = {"is_valid": is_valid} verification_url = "{0}/validate-email?{1}".format( @@ -133,7 +178,7 @@ def _get_email_validated_url(is_valid: bool) -> str: @staticmethod def get_authentication_failed_url(): """Generates the auth-failed URL for the running app""" - base_url = current_app.config["APP_BASE_URL"] + base_url = settings.APP_BASE_URL auth_failed_url = f"{base_url}/auth-failed" return auth_failed_url @@ -144,7 +189,7 @@ def generate_session_token_for_user(osm_id: int): :param osm_id: OSM ID of the user authenticating :return: Token """ - entropy = current_app.secret_key if current_app.secret_key else "un1testingmode" + entropy = settings.SECRET_KEY if settings.SECRET_KEY else "un1testingmode" serializer = URLSafeTimedSerializer(entropy) return serializer.dumps(osm_id) @@ -169,16 +214,94 @@ def is_valid_token(token, token_expiry): :param token_expiry: When the token expires in seconds :return: True if token is valid, and user_id contained in token """ - entropy = current_app.secret_key if current_app.secret_key else "un1testingmode" + entropy = settings.SECRET_KEY if settings.SECRET_KEY else "un1testingmode" serializer = URLSafeTimedSerializer(entropy) try: tokenised_user_id = serializer.loads(token, max_age=token_expiry) except SignatureExpired: - current_app.logger.debug("Token has expired") + # current_app.logger.debug("Token has expired") return False, "ExpiredToken- Token has expired" except BadSignature: - current_app.logger.debug("Bad Token Signature") + # current_app.logger.debug("Bad Token Signature") return False, "BadSignature- Bad Token Signature" return True, tokenised_user_id + + +async def login_required( + Authorization: str = Security(APIKeyHeader(name="Authorization")), +): + if not Authorization: + raise HTTPException(status_code=401, detail="Authorization header missing") + try: + scheme, credentials = Authorization.split() + if scheme.lower() != "token": + raise HTTPException(status_code=401, detail="Invalid authentication scheme") + try: + decoded_token = base64.b64decode(credentials).decode("ascii") + except UnicodeDecodeError: + logger.debug("Unable to decode token") + raise HTTPException(status_code=401, detail="Invalid token") + except (ValueError, UnicodeDecodeError, binascii.Error): + raise AuthenticationError("Invalid auth credentials") + valid_token, user_id = AuthenticationService.is_valid_token(decoded_token, 604800) + if not valid_token: + logger.debug("Token not valid") + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail={"Error": "Token is expired or invalid", "SubCode": "InvalidToken"}, + headers={"WWW-Authenticate": "Bearer"}, + ) + return AuthUserDTO(id=user_id) + + +async def login_required_optional( + Authorization: Optional[str] = Security( + APIKeyHeader(name="Authorization", auto_error=False) + ), +): + if not Authorization: + return None + try: + scheme, credentials = Authorization.split() + if scheme.lower() != "token": + raise HTTPException(status_code=401, detail="Invalid authentication scheme") + try: + decoded_token = base64.b64decode(credentials).decode("ascii") + except UnicodeDecodeError: + logger.debug("Unable to decode token") + raise HTTPException(status_code=401, detail="Invalid token") + except (ValueError, UnicodeDecodeError, binascii.Error): + raise AuthenticationError("Invalid auth credentials") + valid_token, user_id = AuthenticationService.is_valid_token(decoded_token, 604800) + if not valid_token: + logger.debug("Token not valid") + return None + return AuthUserDTO(id=user_id) + + +async def pm_only( + Authorization: str = Security(APIKeyHeader(name="Authorization")), +): + if not Authorization: + raise HTTPException(status_code=401, detail="Authorization header missing") + try: + scheme, credentials = Authorization.split() + if scheme.lower() != "token": + raise HTTPException(status_code=401, detail="Invalid authentication scheme") + try: + decoded_token = base64.b64decode(credentials).decode("ascii") + except UnicodeDecodeError: + logger.debug("Unable to decode token") + raise HTTPException(status_code=401, detail="Invalid token") + except (ValueError, UnicodeDecodeError, binascii.Error): + raise AuthenticationError("Invalid auth credentials") + valid_token, user_id = AuthenticationService.is_valid_token(decoded_token, 604800) + if not valid_token: + logger.debug("Token not valid") + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail={"Error": "Token is expired or invalid", "SubCode": "InvalidToken"}, + headers={"WWW-Authenticate": "Bearer"}, + ) diff --git a/backend/services/users/osm_service.py b/backend/services/users/osm_service.py index 33feb7ae17..0b15f0a864 100644 --- a/backend/services/users/osm_service.py +++ b/backend/services/users/osm_service.py @@ -1,15 +1,17 @@ import requests -from flask import current_app +from loguru import logger + +# # from flask import current_app from backend.models.dtos.user_dto import UserOSMDTO +from backend.config import settings class OSMServiceError(Exception): """Custom Exception to notify callers an error occurred when in the User Service""" def __init__(self, message): - if current_app: - current_app.logger.debug(message) + logger.debug(message) class OSMService: @@ -20,9 +22,7 @@ def get_osm_details_for_user(user_id: int) -> UserOSMDTO: :param user_id: user_id in scope :raises OSMServiceError """ - osm_user_details_url = ( - f"{current_app.config['OSM_SERVER_URL']}/api/0.6/user/{user_id}.json" - ) + osm_user_details_url = f"{settings.OSM_SERVER_URL}/api/0.6/user/{user_id}.json" response = requests.get(osm_user_details_url) if response.status_code != 200: diff --git a/backend/services/users/user_service.py b/backend/services/users/user_service.py index 8324cd1477..e286b96656 100644 --- a/backend/services/users/user_service.py +++ b/backend/services/users/user_service.py @@ -1,40 +1,42 @@ -from cachetools import TTLCache, cached -from flask import current_app import datetime -from sqlalchemy.sql.expression import literal -from sqlalchemy import func, or_, desc, and_, distinct, cast, Time, column +from cachetools import TTLCache, cached +from databases import Database +from loguru import logger +from sqlalchemy import and_, desc, distinct, func, insert, select +from backend.config import Settings from backend.exceptions import NotFound -from backend import db +from backend.models.dtos.interests_dto import InterestDTO, InterestsListDTO from backend.models.dtos.project_dto import ProjectFavoritesDTO, ProjectSearchResultsDTO +from backend.models.dtos.stats_dto import Pagination from backend.models.dtos.user_dto import ( + UserContributionDTO, + UserCountriesContributed, + UserCountryContributed, UserDTO, - UserOSMDTO, UserFilterDTO, - UserSearchQuery, + UserOSMDTO, + UserRegisterEmailDTO, UserSearchDTO, + UserSearchQuery, UserStatsDTO, - UserContributionDTO, - UserRegisterEmailDTO, - UserCountryContributed, - UserCountriesContributed, + UserTaskDTOs, ) -from backend.models.dtos.interests_dto import InterestsListDTO, InterestDTO from backend.models.postgis.interests import Interest, project_interests -from backend.models.postgis.message import Message, MessageType +from backend.models.postgis.message import MessageType from backend.models.postgis.project import Project -from backend.models.postgis.user import User, UserRole, MappingLevel, UserEmail -from backend.models.postgis.task import TaskHistory, TaskAction, Task -from backend.models.dtos.user_dto import UserTaskDTOs -from backend.models.dtos.stats_dto import Pagination -from backend.models.postgis.statuses import TaskStatus, ProjectStatus -from backend.services.users.osm_service import OSMService, OSMServiceError +from backend.models.postgis.statuses import ProjectStatus, TaskStatus +from backend.models.postgis.task import Task, TaskHistory +from backend.models.postgis.user import MappingLevel, User, UserEmail, UserRole +from backend.models.postgis.utils import timestamp from backend.services.messaging.smtp_service import SMTPService from backend.services.messaging.template_service import ( get_txt_template, template_var_replacing, ) +from backend.services.users.osm_service import OSMService, OSMServiceError +settings = Settings() user_filter_cache = TTLCache(maxsize=1024, ttl=600) @@ -43,23 +45,20 @@ class UserServiceError(Exception): """Custom Exception to notify callers an error occurred when in the User Service""" def __init__(self, message): - if current_app: - current_app.logger.debug(message) + logger.debug(message) class UserService: @staticmethod - def get_user_by_id(user_id: int) -> User: - user = User.get_by_id(user_id) - + async def get_user_by_id(user_id: int, db: Database) -> User: + user = await User.get_by_id(user_id, db) if user is None: raise NotFound(sub_code="USER_NOT_FOUND", user_id=user_id) - return user @staticmethod - def get_user_by_username(username: str) -> User: - user = User.get_by_username(username) + async def get_user_by_username(username: str, db: Database) -> User: + user = await User.get_by_username(username, db) if user is None: raise NotFound(sub_code="USER_NOT_FOUND", username=username) @@ -67,32 +66,31 @@ def get_user_by_username(username: str) -> User: return user @staticmethod - def get_contributions_by_day(user_id: int): - # Validate that user exists. - stats = ( - TaskHistory.query.with_entities( - func.DATE(TaskHistory.action_date).label("day"), - func.count(TaskHistory.action).label("cnt"), - ) - .filter(TaskHistory.user_id == user_id) - .filter(TaskHistory.action == TaskAction.STATE_CHANGE.name) - .filter( - func.DATE(TaskHistory.action_date) - > datetime.date.today() - datetime.timedelta(days=365) - ) - .group_by("day") - .order_by(desc("day")) - ) + async def get_contributions_by_day(user_id: int, db: Database): + # Define the query using raw SQL + query = """ + SELECT + DATE(action_date) AS day, + COUNT(action) AS cnt + FROM task_history + WHERE user_id = :user_id + AND action = 'STATE_CHANGE' + AND DATE(action_date) > CURRENT_DATE - INTERVAL '1 year' + GROUP BY day + ORDER BY day DESC; + """ + results = await db.fetch_all(query=query, values={"user_id": user_id}) contributions = [ - UserContributionDTO(dict(date=str(s[0]), count=s[1])) for s in stats + UserContributionDTO(date=record["day"], count=record["cnt"]) + for record in results ] return contributions @staticmethod def get_project_managers() -> User: - users = User.query.filter(User.role == 2).all() + users = session.query(User).filter(User.role == 2).all() if users is None: raise NotFound(sub_code="USER_NOT_FOUND") @@ -101,7 +99,7 @@ def get_project_managers() -> User: @staticmethod def get_general_admins() -> User: - users = User.query.filter(User.role == 1).all() + users = session.query(User).filter(User.role == 1).all() if users is None: raise NotFound(sub_code="USER_NOT_FOUND") @@ -109,29 +107,42 @@ def get_general_admins() -> User: return users @staticmethod - def update_user(user_id: int, osm_username: str, picture_url: str) -> User: - user = UserService.get_user_by_id(user_id) + async def update_user( + user_id: int, osm_username: str, picture_url: str, db: Database + ) -> User: + user = await UserService.get_user_by_id(user_id, db) if user.username != osm_username: - user.update_username(osm_username) + await user.update_username(osm_username, db) if user.picture_url != picture_url: - user.update_picture_url(picture_url) + await user.update_picture_url(picture_url, db) return user @staticmethod - def get_projects_favorited(user_id: int) -> ProjectFavoritesDTO: - user = UserService.get_user_by_id(user_id) - projects_dto = [f.as_dto_for_admin(f.id) for f in user.favorites] + async def get_projects_favorited(user_id: int, db: Database) -> ProjectFavoritesDTO: + # Query to get the project IDs favorited by the user + project_ids_query = """ + SELECT project_id + FROM project_favorites + WHERE user_id = :user_id + """ + project_ids_rows = await db.fetch_all(project_ids_query, {"user_id": user_id}) + if not project_ids_rows: + return ProjectFavoritesDTO(favorited_projects=[]) + + projects_dto = [ + await Project.as_dto_for_admin(row["project_id"], db) + for row in project_ids_rows + ] fav_dto = ProjectFavoritesDTO() fav_dto.favorited_projects = projects_dto - return fav_dto @staticmethod - def get_projects_mapped(user_id: int): - user = UserService.get_user_by_id(user_id) + async def get_projects_mapped(user_id: int, db: Database): + user = await UserService.get_user_by_id(user_id, db) projects_mapped = user.projects_mapped # Return empty list if the user has no projects_mapped. @@ -141,66 +152,101 @@ def get_projects_mapped(user_id: int): return projects_mapped @staticmethod - def register_user(osm_id, username, changeset_count, picture_url, email): + async def register_user(osm_id, username, changeset_count, picture_url, email, db): """ Creates user in DB :param osm_id: Unique OSM user id :param username: OSM Username :param changeset_count: OSM changeset count """ - new_user = User() - new_user.id = osm_id - new_user.username = username - if picture_url is not None: - new_user.picture_url = picture_url - - intermediate_level = current_app.config["MAPPER_LEVEL_INTERMEDIATE"] - advanced_level = current_app.config["MAPPER_LEVEL_ADVANCED"] + """ + Creates user in DB + :param osm_id: Unique OSM user id + :param username: OSM Username + :param changeset_count: OSM changeset count + """ + # Determine mapping level based on changeset count + intermediate_level = settings.MAPPER_LEVEL_INTERMEDIATE + advanced_level = settings.MAPPER_LEVEL_ADVANCED if changeset_count > advanced_level: - new_user.mapping_level = MappingLevel.ADVANCED.value - elif intermediate_level < changeset_count < advanced_level: - new_user.mapping_level = MappingLevel.INTERMEDIATE.value + mapping_level = MappingLevel.ADVANCED.value + elif intermediate_level < changeset_count <= advanced_level: + mapping_level = MappingLevel.INTERMEDIATE.value else: - new_user.mapping_level = MappingLevel.BEGINNER.value - - if email is not None: - new_user.email_address = email - - new_user.create() + mapping_level = MappingLevel.BEGINNER.value + + values = { + "id": osm_id, + "username": username, + "role": 0, + "mapping_level": mapping_level, + "tasks_mapped": 0, + "tasks_validated": 0, + "tasks_invalidated": 0, + "projects_mapped": [], + "email_address": email, + "is_email_verified": False, + "is_expert": False, + "picture_url": picture_url, + "default_editor": "ID", + "mentions_notifications": True, + "projects_comments_notifications": False, + "projects_notifications": True, + "tasks_notifications": True, + "tasks_comments_notifications": False, + "teams_announcement_notifications": True, + "date_registered": datetime.datetime.utcnow(), + } + + query = insert(User).values(values) + await db.execute(query) + + user_query = select(User).where(User.id == osm_id) + new_user = await db.fetch_one(user_query) return new_user @staticmethod - def get_user_dto_by_username( - requested_username: str, logged_in_user_id: int + async def get_user_dto_by_username( + requested_username: str, logged_in_user_id: int, db: Database ) -> UserDTO: """Gets user DTO for supplied username""" - requested_user = UserService.get_user_by_username(requested_username) - logged_in_user = UserService.get_user_by_id(logged_in_user_id) - UserService.check_and_update_mapper_level(requested_user.id) + query = """ + SELECT * FROM users + WHERE username = :username + """ + result = await db.fetch_one(query, values={"username": requested_username}) + if result is None: + raise NotFound(sub_code="USER_NOT_FOUND", username=requested_username) + requested_user = User(**result) + logged_in_user = await UserService.get_user_by_id(logged_in_user_id, db) + await UserService.check_and_update_mapper_level(requested_user.id, db) return requested_user.as_dto(logged_in_user.username) @staticmethod - def get_user_dto_by_id(user: int, request_user: int) -> UserDTO: + async def get_user_dto_by_id( + user_id: int, request_user: int, db: Database + ) -> UserDTO: """Gets user DTO for supplied user id""" - user = UserService.get_user_by_id(user) + user = await UserService.get_user_by_id(user_id, db) if request_user: - request_username = UserService.get_user_by_id(request_user).username - return user.as_dto(request_username) + request_user = await UserService.get_user_by_id(request_user, db) + return user.as_dto(request_user.username) return user.as_dto() @staticmethod - def get_interests_stats(user_id): + async def get_interests_stats(user_id: int, db: Database): # Get all projects that the user has contributed. stmt = ( - TaskHistory.query.with_entities(TaskHistory.project_id) + select(TaskHistory.project_id) .distinct() - .filter(TaskHistory.user_id == user_id) + .where(TaskHistory.user_id == user_id) ) - interests = ( - Interest.query.with_entities( + # Prepare the query for interests + interests_query = ( + select( Interest.id, Interest.name, func.count(distinct(project_interests.c.project_id)).label( @@ -216,297 +262,302 @@ def get_interests_stats(user_id): ) .group_by(Interest.id) .order_by(desc("count_projects")) - .all() ) - interests_dto = [ - InterestDTO(dict(id=i.id, name=i.name, count_projects=i.count_projects)) - for i in interests - ] + # Execute the query + interests = await db.fetch_all(interests_query) + + # Map results to DTOs + interests_dto = [InterestDTO(**i) for i in interests] return interests_dto @staticmethod - def get_tasks_dto( + async def get_tasks_dto( user_id: int, start_date: datetime.datetime = None, end_date: datetime.datetime = None, task_status: str = None, project_status: str = None, project_id: int = None, - page=1, - page_size=10, + page: int = 1, + page_size: int = 10, sort_by: str = None, + db: Database = None, ) -> UserTaskDTOs: + # Base query to get the latest task history actions for a user base_query = ( - TaskHistory.query.with_entities( + select( TaskHistory.project_id.label("project_id"), TaskHistory.task_id.label("task_id"), func.max(TaskHistory.action_date).label("max"), ) - .filter(TaskHistory.user_id == user_id) + .where(TaskHistory.user_id == user_id) .group_by(TaskHistory.task_id, TaskHistory.project_id) ) if task_status: - base_query = base_query.filter( + base_query = base_query.where( TaskHistory.action_text == TaskStatus[task_status.upper()].name ) if start_date: - base_query = base_query.filter(TaskHistory.action_date >= start_date) + base_query = base_query.where(TaskHistory.action_date >= start_date) if end_date: - base_query = base_query.filter(TaskHistory.action_date <= end_date) + base_query = base_query.where(TaskHistory.action_date <= end_date) - user_task_dtos = UserTaskDTOs() - task_id_list = base_query.subquery() + task_id_list = base_query.alias("task_id_list") + # Query to get the number of comments per task comments_query = ( - TaskHistory.query.with_entities( + select( TaskHistory.project_id, TaskHistory.task_id, func.count(TaskHistory.action).label("count"), ) - .filter(TaskHistory.action == "COMMENT") + .where(TaskHistory.action == "COMMENT") .group_by(TaskHistory.task_id, TaskHistory.project_id) - ).subquery() + ).alias("comments_query") + # Subquery for joining comments and task IDs sq = ( - db.session.query( - func.coalesce(comments_query.c.count, 0).label("comments"), task_id_list + select( + func.coalesce(comments_query.c.count, 0).label("comments"), + task_id_list.c.project_id, + task_id_list.c.task_id, + task_id_list.c.max, ) .select_from(task_id_list) .outerjoin( comments_query, - (comments_query.c.task_id == task_id_list.c.task_id) - & (comments_query.c.project_id == task_id_list.c.project_id), + and_( + comments_query.c.task_id == task_id_list.c.task_id, + comments_query.c.project_id == task_id_list.c.project_id, + ), ) - .subquery() - ) + ).alias("sq") - tasks = Task.query.join( - sq, - and_( - Task.id == sq.c.task_id, - Task.project_id == sq.c.project_id, - ), + # Main task query joining with subquery + tasks_query = ( + select(Task, sq.c.max, sq.c.comments) + .select_from(Task) + .join( + sq, + and_( + Task.id == sq.c.task_id, + Task.project_id == sq.c.project_id, + ), + ) ) - tasks = tasks.add_columns(column("max"), column("comments")) if sort_by == "action_date": - tasks = tasks.order_by(sq.c.max) + tasks_query = tasks_query.order_by(sq.c.max) elif sort_by == "-action_date": - tasks = tasks.order_by(desc(sq.c.max)) + tasks_query = tasks_query.order_by(desc(sq.c.max)) elif sort_by == "project_id": - tasks = tasks.order_by(sq.c.project_id) + tasks_query = tasks_query.order_by(sq.c.project_id) elif sort_by == "-project_id": - tasks = tasks.order_by(desc(sq.c.project_id)) + tasks_query = tasks_query.order_by(desc(sq.c.project_id)) if project_status: - tasks = tasks.filter( - Task.project_id == Project.id, - Project.status == ProjectStatus[project_status.upper()].value, + tasks_query = tasks_query.where( + and_( + Task.project_id == Project.id, + Project.status == ProjectStatus[project_status.upper()].value, + ) ) if project_id: - tasks = tasks.filter_by(project_id=project_id) - - results = tasks.paginate(page=page, per_page=page_size, error_out=True) - - task_list = [] - - for task, action_date, comments in results.items: - task_list.append(task.as_dto(last_updated=action_date, comments=comments)) + tasks_query = tasks_query.where(Task.project_id == project_id) + # Pagination + offset = (page - 1) * page_size + paginated_tasks_query = tasks_query.limit(page_size).offset(offset) + + # Execute the query and fetch results + all_tasks = await db.fetch_all(tasks_query) + paginated_tasks = await db.fetch_all(paginated_tasks_query) + + # Create list of task DTOs from the results + task_list = [ + await Task.task_as_dto( + row, last_updated=row["max"], comments=row["comments"], db=db + ) + for row in paginated_tasks + ] + user_task_dtos = UserTaskDTOs() user_task_dtos.user_tasks = task_list - user_task_dtos.pagination = Pagination(results) + user_task_dtos.pagination = Pagination.from_total_count( + page=int(page), per_page=int(page_size), total=len(all_tasks) + ) return user_task_dtos @staticmethod - def get_detailed_stats(username: str): - user = UserService.get_user_by_username(username) + async def get_detailed_stats(username: str, db: Database) -> UserStatsDTO: stats_dto = UserStatsDTO() - - actions = [ - TaskStatus.VALIDATED.name, - TaskStatus.INVALIDATED.name, - TaskStatus.MAPPED.name, - ] - - actions_table = ( - db.session.query(literal(TaskStatus.VALIDATED.name).label("action_text")) - .union( - db.session.query( - literal(TaskStatus.INVALIDATED.name).label("action_text") - ), - db.session.query(literal(TaskStatus.MAPPED.name).label("action_text")), - ) - .subquery() - .alias("actions_table") - ) - - # Get only rows with the given actions. - filtered_actions = ( - TaskHistory.query.with_entities( - TaskHistory.user_id, - TaskHistory.project_id, - TaskHistory.task_id, - TaskHistory.action_text, - ) - .filter(TaskHistory.action_text.in_(actions)) - .subquery() - .alias("filtered_actions") - ) - - user_tasks = ( - db.session.query(filtered_actions) - .filter(filtered_actions.c.user_id == user.id) - .distinct( - filtered_actions.c.project_id, - filtered_actions.c.task_id, - filtered_actions.c.action_text, + user_query = """ + SELECT id FROM users WHERE username = :username + """ + user = await db.fetch_one(query=user_query, values={"username": username}) + if not user: + raise ValueError("User not found") + user_id = user["id"] + stats_query = """ + WITH user_actions AS ( + SELECT + action_text, + COUNT(DISTINCT (project_id, task_id)) AS action_count + FROM task_history + WHERE user_id = :user_id + AND action_text IN ('VALIDATED', 'INVALIDATED', 'MAPPED') + GROUP BY action_text + ), + others_actions AS ( + SELECT + action_text, + COUNT(DISTINCT (project_id, task_id)) AS action_count + FROM task_history th + WHERE (project_id, task_id) IN ( + SELECT project_id, task_id + FROM task_history + WHERE user_id = :user_id + ) + AND user_id != :user_id + AND action_text IN ('VALIDATED', 'INVALIDATED') + GROUP BY action_text ) - .subquery() - .alias("user_tasks") + SELECT + CAST(COALESCE(SUM(CASE WHEN u.action_text = 'VALIDATED' THEN u.action_count ELSE 0 END), 0) AS INTEGER) AS tasks_validated, + CAST(COALESCE(SUM(CASE WHEN u.action_text = 'INVALIDATED' THEN u.action_count ELSE 0 END), 0) AS INTEGER) AS tasks_invalidated, + CAST(COALESCE(SUM(CASE WHEN u.action_text = 'MAPPED' THEN u.action_count ELSE 0 END), 0) AS INTEGER) AS tasks_mapped, + CAST(COALESCE(SUM(CASE WHEN o.action_text = 'VALIDATED' THEN o.action_count ELSE 0 END), 0) AS INTEGER) AS tasks_validated_by_others, + CAST(COALESCE(SUM(CASE WHEN o.action_text = 'INVALIDATED' THEN o.action_count ELSE 0 END), 0) AS INTEGER) AS tasks_invalidated_by_others + FROM user_actions u + LEFT JOIN others_actions o + ON u.action_text = o.action_text; + """ + stats_result = await db.fetch_one( + query=stats_query, values={"user_id": user_id} ) + stats_dto.tasks_mapped = stats_result["tasks_mapped"] + stats_dto.tasks_validated = stats_result["tasks_validated"] + stats_dto.tasks_invalidated = stats_result["tasks_invalidated"] + stats_dto.tasks_validated_by_others = stats_result["tasks_validated_by_others"] + stats_dto.tasks_invalidated_by_others = stats_result[ + "tasks_invalidated_by_others" + ] - others_tasks = ( - db.session.query(filtered_actions) - .filter(filtered_actions.c.user_id != user.id) - .filter(filtered_actions.c.task_id == user_tasks.c.task_id) - .filter(filtered_actions.c.project_id == user_tasks.c.project_id) - .filter(filtered_actions.c.action_text != TaskStatus.MAPPED.name) - .distinct( - filtered_actions.c.project_id, - filtered_actions.c.task_id, - filtered_actions.c.action_text, - ) - .subquery() - .alias("others_tasks") + projects_mapped_query = """ + SELECT COUNT(DISTINCT project_id) AS projects_count + FROM task_history + WHERE user_id = :user_id AND action_text = 'MAPPED'; + """ + projects_mapped = await db.fetch_one( + query=projects_mapped_query, values={"user_id": user_id} ) + stats_dto.projects_mapped = projects_mapped["projects_count"] - user_stats = ( - db.session.query( - actions_table.c.action_text, func.count(user_tasks.c.action_text) - ) - .outerjoin( - user_tasks, actions_table.c.action_text == user_tasks.c.action_text - ) - .group_by(actions_table.c.action_text) + stats_dto.countries_contributed = await UserService.get_countries_contributed( + user_id, db ) - others_stats = ( - db.session.query( - func.concat(actions_table.c.action_text, "_BY_OTHERS"), - func.count(others_tasks.c.action_text), - ) - .outerjoin( - others_tasks, actions_table.c.action_text == others_tasks.c.action_text - ) - .group_by(actions_table.c.action_text) + stats_dto.contributions_by_day = await UserService.get_contributions_by_day( + user_id, db ) - res = user_stats.union(others_stats).all() - results = {key: value for key, value in res} - - projects_mapped = UserService.get_projects_mapped(user.id) - stats_dto.tasks_mapped = results["MAPPED"] - stats_dto.tasks_validated = results["VALIDATED"] - stats_dto.tasks_invalidated = results["INVALIDATED"] - stats_dto.tasks_validated_by_others = results["VALIDATED_BY_OTHERS"] - stats_dto.tasks_invalidated_by_others = results["INVALIDATED_BY_OTHERS"] - stats_dto.projects_mapped = len(projects_mapped) - stats_dto.countries_contributed = UserService.get_countries_contributed(user.id) - stats_dto.contributions_by_day = UserService.get_contributions_by_day(user.id) stats_dto.total_time_spent = 0 stats_dto.time_spent_mapping = 0 stats_dto.time_spent_validating = 0 - query = ( - TaskHistory.query.with_entities( - func.date_trunc("minute", TaskHistory.action_date).label("trn"), - func.max(TaskHistory.action_text).label("tm"), + # Total validation time + total_validation_time_query = """ + WITH max_action_text_per_minute AS ( + SELECT + date_trunc('minute', action_date) AS trn, + MAX(action_text) AS tm + FROM task_history + WHERE user_id = :user_id + AND action = 'LOCKED_FOR_VALIDATION' + GROUP BY date_trunc('minute', action_date) ) - .filter(TaskHistory.user_id == user.id) - .filter(TaskHistory.action == "LOCKED_FOR_VALIDATION") - .group_by("trn") - .subquery() + SELECT + SUM(EXTRACT(EPOCH FROM (tm || ' seconds')::interval)) AS total_time + FROM max_action_text_per_minute + """ + result = await db.fetch_one( + total_validation_time_query, values={"user_id": user.id} ) - total_validation_time = db.session.query( - func.sum(cast(func.to_timestamp(query.c.tm, "HH24:MI:SS"), Time)) - ).scalar() - - if total_validation_time: - stats_dto.time_spent_validating = total_validation_time.total_seconds() + if result and result["total_time"]: + total_validation_time = result["total_time"] + stats_dto.time_spent_validating = int(total_validation_time) stats_dto.total_time_spent += stats_dto.time_spent_validating - total_mapping_time = ( - db.session.query( - func.sum( - cast(func.to_timestamp(TaskHistory.action_text, "HH24:MI:SS"), Time) - ) - ) - .filter( - or_( - TaskHistory.action == TaskAction.LOCKED_FOR_MAPPING.name, - TaskHistory.action == TaskAction.AUTO_UNLOCKED_FOR_MAPPING.name, - ) - ) - .filter(TaskHistory.user_id == user.id) - .scalar() + # Total mapping time + total_mapping_time_query = """ + SELECT + SUM(EXTRACT(EPOCH FROM (CAST(action_text AS INTERVAL) || ' seconds')::interval)) AS total_mapping_time_seconds + FROM task_history + WHERE user_id = :user_id + AND action IN ('LOCKED_FOR_MAPPING', 'AUTO_UNLOCKED_FOR_MAPPING') + """ + result = await db.fetch_one( + total_mapping_time_query, values={"user_id": user.id} ) - - if total_mapping_time: - stats_dto.time_spent_mapping = total_mapping_time.total_seconds() + if result and result["total_mapping_time_seconds"]: + total_mapping_time = result["total_mapping_time_seconds"] + stats_dto.time_spent_mapping = int(total_mapping_time) stats_dto.total_time_spent += stats_dto.time_spent_mapping - stats_dto.contributions_interest = UserService.get_interests_stats(user.id) - + stats_dto.contributions_interest = await UserService.get_interests_stats( + user["id"], db + ) return stats_dto @staticmethod - def update_user_details(user_id: int, user_dto: UserDTO) -> dict: + async def update_user_details( + user_id: int, user_dto: UserDTO, db: Database + ) -> dict: """Update user with info supplied by user, if they add or change their email address a verification mail will be sent""" - user = UserService.get_user_by_id(user_id) - + user = await UserService.get_user_by_id(user_id, db) verification_email_sent = False if ( user_dto.email_address and user.email_address != user_dto.email_address.lower() ): # Send user verification email if they are adding or changing their email address - SMTPService.send_verification_email( + await SMTPService.send_verification_email( user_dto.email_address.lower(), user.username ) - user.set_email_verified_status(is_verified=False) + await User.set_email_verified_status(user, is_verified=False, db=db) verification_email_sent = True - user.update(user_dto) - user_email = UserEmail.query.filter( - UserEmail.email == user_dto.email_address - ).one_or_none() + await User.update(user, user_dto, db) + query = select(UserEmail).filter(UserEmail.email == user_dto.email_address) + user_email = await db.fetch_one(query=query) if user_email is not None: - user_email.delete() + await UserEmail.delete(user, db) return dict(verificationEmailSent=verification_email_sent) @staticmethod - def get_all_users(query: UserSearchQuery) -> UserSearchDTO: + async def get_all_users(query: UserSearchQuery, db: Database) -> UserSearchDTO: """Gets paginated list of users""" - return User.get_all_users(query) + return await User.get_all_users(query, db) @staticmethod @cached(user_filter_cache) - def filter_users(username: str, project_id: int, page: int) -> UserFilterDTO: + async def filter_users( + username: str, project_id: int, page: int, db: Database + ) -> UserFilterDTO: """Gets paginated list of users, filtered by username, for autocomplete""" - return User.filter_users(username, project_id, page) + return await User.filter_users(username, project_id, page, db) @staticmethod - def is_user_an_admin(user_id: int) -> bool: + async def is_user_an_admin(user_id: int, db: Database) -> bool: """Is the user an admin""" - user = UserService.get_user_by_id(user_id) + user = await UserService.get_user_by_id(user_id, db) if UserRole(user.role) == UserRole.ADMIN: return True @@ -518,10 +569,9 @@ def is_user_the_project_author(user_id: int, author_id: int) -> bool: return user_id == author_id @staticmethod - def get_mapping_level(user_id: int): + async def get_mapping_level(user_id: int, db: Database): """Gets mapping level user is at""" - user = UserService.get_user_by_id(user_id) - + user = await UserService.get_user_by_id(user_id, db) return MappingLevel(user.mapping_level) @staticmethod @@ -537,141 +587,181 @@ def is_user_validator(user_id: int) -> bool: return False @staticmethod - def is_user_blocked(user_id: int) -> bool: + async def is_user_blocked(user_id: int, db: Database) -> bool: """Determines if a user is blocked""" - user = UserService.get_user_by_id(user_id) - + user = await UserService.get_user_by_id(user_id, db) if UserRole(user.role) == UserRole.READ_ONLY: return True return False @staticmethod - def get_countries_contributed(user_id: int): - query = ( - TaskHistory.query.with_entities( - func.unnest(Project.country).label("country"), - TaskHistory.action_text, - func.count(TaskHistory.action_text).label("count"), - ) - .filter(TaskHistory.user_id == user_id) - .filter( - TaskHistory.action_text.in_( - [ - TaskStatus.MAPPED.name, - TaskStatus.BADIMAGERY.name, - TaskStatus.VALIDATED.name, - ] - ) - ) - .group_by("country", TaskHistory.action_text) - .outerjoin(Project, Project.id == TaskHistory.project_id) - .all() - ) - countries = list(set([q.country for q in query])) - result = [] - for country in countries: - values = [q for q in query if q.country == country] - - # Filter element to sum mapped values. - mapped = sum( - [ - v.count - for v in values - if v.action_text - in [TaskStatus.MAPPED.name, TaskStatus.BADIMAGERY.name] - ] - ) - validated = sum( - [v.count for v in values if v.action_text == TaskStatus.VALIDATED.name] - ) - dto = UserCountryContributed( - dict( - name=country, - mapped=mapped, - validated=validated, - total=mapped + validated, - ) + async def get_countries_contributed(user_id: int, db: Database): + query = """ + WITH country_stats AS ( + SELECT + unnest(projects.country) AS country, + task_history.action_text, + COUNT(task_history.action_text) AS count + FROM task_history + LEFT JOIN projects ON task_history.project_id = projects.id + WHERE task_history.user_id = :user_id + AND task_history.action_text IN ('MAPPED', 'BADIMAGERY', 'VALIDATED') + GROUP BY country, task_history.action_text + ), + aggregated_stats AS ( + SELECT + country, + SUM(CASE + WHEN action_text IN ('MAPPED', 'BADIMAGERY') THEN count + ELSE 0 + END) AS mapped, + SUM(CASE + WHEN action_text = 'VALIDATED' THEN count + ELSE 0 + END) AS validated + FROM country_stats + GROUP BY country ) - result.append(dto) + SELECT + country AS name, + COALESCE(mapped, 0) AS mapped, + COALESCE(validated, 0) AS validated, + COALESCE(mapped, 0) + COALESCE(validated, 0) AS total + FROM aggregated_stats + WHERE country IS NOT NULL + ORDER BY total DESC; + """ - # Order by total - result = sorted(result, reverse=True, key=lambda i: i.total) - countries_dto = UserCountriesContributed() - countries_dto.countries_contributed = result - countries_dto.total = len(result) + results = await db.fetch_all(query=query, values={"user_id": user_id}) + countries_contributed = [UserCountryContributed(**record) for record in results] - return countries_dto + return UserCountriesContributed( + countries_contributed=countries_contributed, + total=len(countries_contributed), + ) @staticmethod - def upsert_mapped_projects(user_id: int, project_id: int, local_session=None): + async def upsert_mapped_projects(user_id: int, project_id: int, db: Database): """Add project to mapped projects if it doesn't exist, otherwise return""" - User.upsert_mapped_projects(user_id, project_id, local_session=local_session) + await User.upsert_mapped_projects(user_id, project_id, db) @staticmethod - def get_mapped_projects(user_name: str, preferred_locale: str): + async def get_mapped_projects(user_name: str, preferred_locale: str, db: Database): """Gets all projects a user has mapped or validated on""" - user = UserService.get_user_by_username(user_name) - return User.get_mapped_projects(user.id, preferred_locale) + user = await UserService.get_user_by_username(user_name, db) + return await User.get_mapped_projects(user.id, preferred_locale, db) @staticmethod - def get_recommended_projects(user_name: str, preferred_locale: str): - """Gets all projects a user has mapped or validated on""" + async def get_recommended_projects( + user_name: str, preferred_locale: str, db: Database + ) -> ProjectSearchResultsDTO: from backend.services.project_search_service import ProjectSearchService + """Gets all projects a user has mapped or validated on""" limit = 20 - user = ( - User.query.with_entities(User.id, User.mapping_level) - .filter(User.username == user_name) - .one_or_none() - ) - if user is None: + + # Get user details + user_query = """ + SELECT id, mapping_level + FROM users + WHERE username = :user_name + """ + user = await db.fetch_one(user_query, {"user_name": user_name}) + if not user: raise NotFound(sub_code="USER_NOT_FOUND", username=user_name) - # Get all projects that the user has contributed - sq = ( - TaskHistory.query.with_entities(TaskHistory.project_id.label("project_id")) - .distinct(TaskHistory.project_id) - .filter(TaskHistory.user_id == user.id) - .subquery() + # Get all projects the user has contributed to + contributed_projects_query = """ + SELECT DISTINCT project_id + FROM task_history + WHERE user_id = :user_id + """ + contributed_projects = await db.fetch_all( + contributed_projects_query, {"user_id": user["id"]} ) - # Get all campaigns for all contributed projects. - campaign_tags = ( - Project.query.with_entities(Project.campaign.label("tag")) - .filter(or_(Project.author_id == user.id, Project.id == sq.c.project_id)) - .subquery() + contributed_project_ids = [row["project_id"] for row in contributed_projects] + + # Fetch campaign tags for contributed or authored projects + campaign_tags_query = """ + SELECT DISTINCT c.name AS tag + FROM campaigns c + JOIN campaign_projects cp ON c.id = cp.campaign_id + WHERE cp.project_id = ANY(:project_ids) OR :user_id IN ( + SELECT p.author_id + FROM projects p + WHERE p.id = cp.project_id + ) + """ + campaign_tags = await db.fetch_all( + query=campaign_tags_query, + values={"user_id": user["id"], "project_ids": contributed_project_ids}, ) - # Get projects with given campaign tags but without user contributions. - query = ProjectSearchService.create_search_query() - projs = ( - query.filter(Project.campaign.any(campaign_tags.c.tag)).limit(limit).all() + + campaign_tags_set = {row["tag"] for row in campaign_tags} + # Get projects with matching campaign tags but exclude user contributions + recommended_projects_query = """ + SELECT DISTINCT + p.*, + o.name AS organisation_name, + o.logo AS organisation_logo + FROM projects p + LEFT JOIN organisations o ON p.organisation_id = o.id + JOIN campaign_projects cp ON p.id = cp.project_id + JOIN campaigns c ON cp.campaign_id = c.id + WHERE c.name = ANY(:campaign_tags) + AND p.author_id != :user_id + LIMIT :limit + """ + recommended_projects = await db.fetch_all( + query=recommended_projects_query, + values={ + "campaign_tags": list(campaign_tags_set), + "user_id": user["id"], + "limit": limit, + }, ) - # Get only user mapping level projects. - len_projs = len(projs) + # Get only projects matching the user's mapping level if needed + len_projs = len(recommended_projects) if len_projs < limit: - remaining_projs = ( - query.filter(Project.difficulty == user.mapping_level) - .limit(limit - len_projs) - .all() + remaining_projects_query = """ + SELECT DISTINCT p.*, o.name AS organisation_name, o.logo AS organisation_logo + FROM projects p + LEFT JOIN organisations o ON p.organisation_id = o.id + WHERE difficulty = :mapping_level + LIMIT :remaining_limit + """ + remaining_projects = await db.fetch_all( + remaining_projects_query, + { + "mapping_level": user["mapping_level"], + "remaining_limit": limit - len_projs, + }, ) - projs.extend(remaining_projs) + recommended_projects.extend(remaining_projects) dto = ProjectSearchResultsDTO() - # Get all total contributions for each paginated project. - contrib_counts = ProjectSearchService.get_total_contributions(projs) - - zip_items = zip(projs, contrib_counts) + project_ids = [project["id"] for project in recommended_projects] + contrib_counts = await ProjectSearchService.get_total_contributions( + project_ids, db + ) dto.results = [ - ProjectSearchService.create_result_dto(p, "en", t) for p, t in zip_items + await ProjectSearchService.create_result_dto( + project, preferred_locale, contrib_count, db + ) + for project, contrib_count in zip(recommended_projects, contrib_counts) ] + dto.pagination = None return dto @staticmethod - def add_role_to_user(admin_user_id: int, username: str, role: str): + async def add_role_to_user( + admin_user_id: int, username: str, role: str, db: Database + ): """ Add role to user :param admin_user_id: ID of admin attempting to add the role @@ -687,7 +777,7 @@ def add_role_to_user(admin_user_id: int, username: str, role: str): + f"Unknown role {role} accepted values are ADMIN, PROJECT_MANAGER, VALIDATOR" ) - admin = UserService.get_user_by_id(admin_user_id) + admin = await UserService.get_user_by_id(admin_user_id, db) admin_role = UserRole(admin.role) if admin_role != UserRole.ADMIN and requested_role == UserRole.ADMIN: @@ -695,11 +785,11 @@ def add_role_to_user(admin_user_id: int, username: str, role: str): "NeedAdminRole- You must be an Admin to assign Admin role" ) - user = UserService.get_user_by_username(username) - user.set_user_role(requested_role) + user = await UserService.get_user_by_username(username, db) + await User.set_user_role(user, requested_role, db) @staticmethod - def set_user_mapping_level(username: str, level: str) -> User: + async def set_user_mapping_level(username: str, level: str, db: Database) -> User: """ Sets the users mapping level :raises: UserServiceError @@ -712,96 +802,137 @@ def set_user_mapping_level(username: str, level: str) -> User: + f"Unknown role {level} accepted values are BEGINNER, INTERMEDIATE, ADVANCED" ) - user = UserService.get_user_by_username(username) - user.set_mapping_level(requested_level) + user = await UserService.get_user_by_username(username, db) + await User.set_mapping_level(user, requested_level, db) return user @staticmethod - def set_user_is_expert(user_id: int, is_expert: bool) -> User: + async def set_user_is_expert(user_id: int, is_expert: bool, db: Database) -> User: """ Enabled or disables expert mode for the user :raises: UserServiceError """ - user = UserService.get_user_by_id(user_id) - user.set_is_expert(is_expert) + user = await UserService.get_user_by_id(user_id, db) + await User.set_is_expert(user, is_expert, db) return user @staticmethod - def accept_license_terms(user_id: int, license_id: int): + async def accept_license_terms(user_id: int, license_id: int, db: Database): """Saves the fact user has accepted license terms""" - user = UserService.get_user_by_id(user_id) - user.accept_license_terms(license_id) - - @staticmethod - def has_user_accepted_license(user_id: int, license_id: int): - """Checks if user has accepted specified license""" - user = UserService.get_user_by_id(user_id) - return user.has_user_accepted_licence(license_id) + user = await UserService.get_user_by_id(user_id, db) + await user.accept_license_terms(user_id, license_id, db) + + @staticmethod + async def has_user_accepted_license( + user_id: int, license_id: int, db: Database + ) -> bool: + """Checks if a user has accepted the specified license.""" + query = """ + SELECT EXISTS ( + SELECT 1 + FROM user_licenses + WHERE "user" = :user_id AND license = :license_id + ) + """ + result = await db.fetch_one( + query, values={"user_id": user_id, "license_id": license_id} + ) + return result[0] if result else False @staticmethod - def get_osm_details_for_user(username: str) -> UserOSMDTO: + async def get_osm_details_for_user(username: str, db: Database) -> UserOSMDTO: """ Gets OSM details for the user from OSM API :param username: username in scope :raises UserServiceError, NotFound """ - user = UserService.get_user_by_username(username) + user = await UserService.get_user_by_username(username, db) osm_dto = OSMService.get_osm_details_for_user(user.id) return osm_dto @staticmethod - def check_and_update_mapper_level(user_id: int): - """Check users mapping level and update if they have crossed threshold""" - user = UserService.get_user_by_id(user_id) + async def check_and_update_mapper_level(user_id: int, db: Database): + """Check user's mapping level and update if they have crossed threshold""" + user = await UserService.get_user_by_id(user_id, db) user_level = MappingLevel(user.mapping_level) if user_level == MappingLevel.ADVANCED: - return # User has achieved highest level, so no need to do further checking + return # User has achieved the highest level, no need to proceed - intermediate_level = current_app.config["MAPPER_LEVEL_INTERMEDIATE"] - advanced_level = current_app.config["MAPPER_LEVEL_ADVANCED"] + intermediate_level = MappingLevel.INTERMEDIATE + advanced_level = MappingLevel.ADVANCED try: osm_details = OSMService.get_osm_details_for_user(user_id) + if ( - osm_details.changeset_count > advanced_level - and user.mapping_level != MappingLevel.ADVANCED.value + osm_details.changeset_count > advanced_level.value + and user_level != MappingLevel.ADVANCED.value ): - user.mapping_level = MappingLevel.ADVANCED.value - UserService.notify_level_upgrade(user_id, user.username, "ADVANCED") + update_query = """ + UPDATE users + SET mapping_level = :new_level + WHERE id = :user_id + """ + await db.execute( + update_query, + {"new_level": MappingLevel.ADVANCED.value, "user_id": user_id}, + ) + await UserService.notify_level_upgrade( + user_id, user.username, "ADVANCED", db + ) + elif ( - intermediate_level < osm_details.changeset_count < advanced_level - and user.mapping_level != MappingLevel.INTERMEDIATE.value + intermediate_level.value + < osm_details.changeset_count + < advanced_level.value + and user_level != MappingLevel.INTERMEDIATE.value ): - user.mapping_level = MappingLevel.INTERMEDIATE.value - UserService.notify_level_upgrade(user_id, user.username, "INTERMEDIATE") - except OSMServiceError: - # Swallow exception as we don't want to blow up the server for this - current_app.logger.error("Error attempting to update mapper level") - return + await db.execute( + update_query, + {"new_level": MappingLevel.INTERMEDIATE.value, "user_id": user_id}, + ) + await UserService.notify_level_upgrade( + user_id, user.username, "INTERMEDIATE", db + ) - user.save() + except OSMServiceError: + # Log the error and move on; don't block the process + logger.error("Error attempting to update mapper level for user %s", user_id) @staticmethod - def notify_level_upgrade(user_id: int, username: str, level: str): + async def notify_level_upgrade( + user_id: int, username: str, level: str, db: Database + ): text_template = get_txt_template("level_upgrade_message_en.txt") + replace_list = [ ["[USERNAME]", username], ["[LEVEL]", level.capitalize()], - ["[ORG_CODE]", current_app.config["ORG_CODE"]], + ["[ORG_CODE]", settings.ORG_CODE], ] text_template = template_var_replacing(text_template, replace_list) - level_upgrade_message = Message() - level_upgrade_message.to_user_id = user_id - level_upgrade_message.subject = ( - f"Congratulations🎉, You're now an {level} mapper." + subject = f"Congratulations🎉, You're now an {level} mapper." + message_type = MessageType.SYSTEM.value + + insert_query = """ + INSERT INTO messages (to_user_id, subject, message, message_type, date, read) + VALUES (:to_user_id, :subject, :message, :message_type, :date, :read) + """ + await db.execute( + insert_query, + { + "to_user_id": user_id, + "subject": subject, + "message": text_template, + "message_type": message_type, + "date": timestamp(), + "read": False, + }, ) - level_upgrade_message.message = text_template - level_upgrade_message.message_type = MessageType.SYSTEM.value - level_upgrade_message.save() @staticmethod def refresh_mapper_level() -> int: @@ -821,30 +952,33 @@ def refresh_mapper_level() -> int: return users_updated @staticmethod - def register_user_with_email(user_dto: UserRegisterEmailDTO): + async def register_user_with_email(user_dto: UserRegisterEmailDTO, db: Database): # Validate that user is not within the general users table. user_email = user_dto.email.lower() - user = User.query.filter(func.lower(User.email_address) == user_email).first() + query = select(User).filter(func.lower(User.email_address) == user_email) + user = await db.fetch_one(query) if user is not None: details_msg = f"Email address {user_email} already exists" raise ValueError(details_msg) - user = UserEmail.query.filter( - func.lower(UserEmail.email) == user_email - ).one_or_none() + query = select(UserEmail).filter(func.lower(UserEmail.email) == user_email) + user = await db.fetch_one(query) if user is None: user = UserEmail(email=user_email) - user.create() + user = await user.create(db) return user @staticmethod - def get_interests(user: User) -> InterestsListDTO: - dto = InterestsListDTO() - for interest in Interest.query.all(): - int_dto = interest.as_dto() - if interest in user.interests: + async def get_interests(user: User, db: Database) -> InterestsListDTO: + query = """ + SELECT * FROM interests + """ + interests = await db.fetch_all(query) + interest_list_dto = InterestsListDTO() + for interest in interests: + int_dto = InterestDTO(**interest) + if interest.name in user.interests: int_dto.user_selected = True - dto.interests.append(int_dto) - - return dto + interest_list_dto.interests.append(int_dto) + return interest_list_dto diff --git a/backend/services/validator_service.py b/backend/services/validator_service.py index 653110b840..1165db3c85 100644 --- a/backend/services/validator_service.py +++ b/backend/services/validator_service.py @@ -1,50 +1,53 @@ -from flask import current_app +import asyncio +import datetime + +from databases import Database +from fastapi import BackgroundTasks +from loguru import logger from sqlalchemy import text -from multiprocessing.dummy import Pool as ThreadPool -from sqlalchemy.orm import scoped_session, sessionmaker -import os -from backend import db +from backend.db import db_connection from backend.exceptions import NotFound from backend.models.dtos.mapping_dto import TaskDTOs from backend.models.dtos.stats_dto import Pagination from backend.models.dtos.validator_dto import ( - LockForValidationDTO, - UnlockAfterValidationDTO, - MappedTasks, - StopValidationDTO, InvalidatedTask, InvalidatedTasks, + LockForValidationDTO, + MappedTasks, RevertUserTasksDTO, + StopValidationDTO, + UnlockAfterValidationDTO, ) +from backend.models.postgis.project_info import ProjectInfo from backend.models.postgis.statuses import ValidatingNotAllowed from backend.models.postgis.task import ( Task, - TaskStatus, TaskHistory, TaskInvalidationHistory, TaskMappingIssue, + TaskStatus, ) -from backend.models.postgis.utils import UserLicenseError, timestamp -from backend.models.postgis.project_info import ProjectInfo +from backend.models.postgis.utils import UserLicenseError +from backend.services.mapping_service import MappingService from backend.services.messaging.message_service import MessageService -from backend.services.project_service import ProjectService, ProjectAdminService +from backend.services.project_service import ProjectAdminService, ProjectService from backend.services.stats_service import StatsService from backend.services.users.user_service import UserService -from backend.services.mapping_service import MappingService class ValidatorServiceError(Exception): """Custom exception to notify callers that error has occurred""" def __init__(self, message): - if current_app: - current_app.logger.debug(message) + logger.debug(message) class ValidatorService: @staticmethod - def lock_tasks_for_validation(validation_dto: LockForValidationDTO) -> TaskDTOs: + async def lock_tasks_for_validation( + validation_dto: LockForValidationDTO, db: Database + ) -> TaskDTOs: """ Lock supplied tasks for validation :raises ValidatorServiceError @@ -52,7 +55,7 @@ def lock_tasks_for_validation(validation_dto: LockForValidationDTO) -> TaskDTOs: # Loop supplied tasks to check they can all be locked for validation tasks_to_lock = [] for task_id in validation_dto.task_ids: - task = Task.get(task_id, validation_dto.project_id) + task = await Task.get(task_id, validation_dto.project_id, db) if task is None: raise NotFound( @@ -72,8 +75,8 @@ def lock_tasks_for_validation(validation_dto: LockForValidationDTO) -> TaskDTOs: raise ValidatorServiceError( f"NotReadyForValidation- Task {task_id} is not MAPPED, BADIMAGERY or INVALIDATED" ) - user_can_validate = ValidatorService._user_can_validate_task( - validation_dto.user_id, task.mapped_by + user_can_validate = await ValidatorService._user_can_validate_task( + validation_dto.user_id, task.mapped_by, db ) if not user_can_validate: raise ValidatorServiceError( @@ -83,8 +86,11 @@ def lock_tasks_for_validation(validation_dto: LockForValidationDTO) -> TaskDTOs: tasks_to_lock.append(task) - user_can_validate, error_reason = ProjectService.is_user_permitted_to_validate( - validation_dto.project_id, validation_dto.user_id + ( + user_can_validate, + error_reason, + ) = await ProjectService.is_user_permitted_to_validate( + validation_dto.project_id, validation_dto.user_id, db ) if not user_can_validate: @@ -99,7 +105,9 @@ def lock_tasks_for_validation(validation_dto: LockForValidationDTO) -> TaskDTOs: "ProjectNotPublished- Validation not allowed because: Project not published" ) elif error_reason == ValidatingNotAllowed.USER_ALREADY_HAS_TASK_LOCKED: - user_tasks = Task.get_locked_tasks_for_user(validation_dto.user_id) + user_tasks = await Task.get_locked_tasks_for_user( + validation_dto.user_id, db + ) if set(user_tasks.locked_tasks) != set(validation_dto.task_ids): raise ValidatorServiceError( "UserAlreadyHasTaskLocked- User already has a task locked" @@ -112,16 +120,26 @@ def lock_tasks_for_validation(validation_dto: LockForValidationDTO) -> TaskDTOs: # Lock all tasks for validation dtos = [] for task in tasks_to_lock: - task.lock_task_for_validating(validation_dto.user_id) - dtos.append(task.as_dto_with_instructions(validation_dto.preferred_locale)) - + await Task.lock_task_for_validating( + task.id, validation_dto.project_id, validation_dto.user_id, db + ) + dtos.append( + await Task.as_dto_with_instructions( + task.id, + validation_dto.project_id, + db, + validation_dto.preferred_locale, + ) + ) task_dtos = TaskDTOs() task_dtos.tasks = dtos return task_dtos @staticmethod - def _user_can_validate_task(user_id: int, mapped_by: int) -> bool: + async def _user_can_validate_task( + user_id: int, mapped_by: int, db: Database + ) -> bool: """ check whether a user is able to validate a task. Users cannot validate their own tasks unless they are a PM (admin counts as project manager too) @@ -129,7 +147,7 @@ def _user_can_validate_task(user_id: int, mapped_by: int) -> bool: :param mapped_by: id of user who mapped the task :return: Boolean """ - is_admin = UserService.is_user_an_admin(user_id) + is_admin = await UserService.is_user_an_admin(user_id, db) if is_admin: return True else: @@ -138,81 +156,91 @@ def _user_can_validate_task(user_id: int, mapped_by: int) -> bool: return True return False - @staticmethod - def _process_tasks(args): - ( - app_context, - task_to_unlock, - project_id, - validated_dto, - message_sent_to, - dtos, - ) = args - with app_context: - Session = scoped_session(sessionmaker(bind=db.engine)) - local_session = Session() + async def process_task(project_id, task_to_unlock, validated_dto): + async with db_connection.database.connection() as db: task = task_to_unlock["task"] - task = ( - local_session.query(Task) - .filter_by(id=task.id, project_id=project_id) - .one() - ) - if task_to_unlock["comment"]: - # Parses comment to see if any users have been @'d - MessageService.send_message_after_comment( + await MessageService.send_message_after_comment( validated_dto.user_id, task_to_unlock["comment"], task.id, validated_dto.project_id, + db, ) if ( task_to_unlock["new_state"] == TaskStatus.VALIDATED or task_to_unlock["new_state"] == TaskStatus.INVALIDATED ): - # All mappers get a notification if their task has been validated or invalidated. - # Only once if multiple tasks mapped - if task.mapped_by not in message_sent_to: - MessageService.send_message_after_validation( - task_to_unlock["new_state"], - validated_dto.user_id, - task.mapped_by, - task.id, - validated_dto.project_id, - ) - message_sent_to.append(task.mapped_by) + await MessageService.send_message_after_validation( + task_to_unlock["new_state"], + validated_dto.user_id, + task.mapped_by, + task.id, + validated_dto.project_id, + db, + ) + # Set last_validation_date for the mapper to current date if task_to_unlock["new_state"] == TaskStatus.VALIDATED: - # Set last_validation_date for the mapper to current date - task.mapper.last_validation_date = timestamp() + query = """ + UPDATE users + SET last_validation_date = :timestamp + WHERE id = ( + SELECT mapped_by + FROM tasks + WHERE id = :task_id + AND project_id = :project_id + ); + """ + values = { + "timestamp": datetime.datetime.utcnow(), + "task_id": task.id, + "project_id": validated_dto.project_id, + } + await db.execute(query=query, values=values) # Update stats if user setting task to a different state from previous state - prev_status = TaskHistory.get_last_status(project_id, task.id) + prev_status = await TaskHistory.get_last_status(project_id, task.id, db) if prev_status != task_to_unlock["new_state"]: - StatsService.update_stats_after_task_state_change( + await StatsService.update_stats_after_task_state_change( validated_dto.project_id, validated_dto.user_id, prev_status, task_to_unlock["new_state"], - local_session=local_session, + db, ) - task_mapping_issues = ValidatorService.get_task_mapping_issues( + task_mapping_issues = await ValidatorService.get_task_mapping_issues( task_to_unlock ) - task.unlock_task( - validated_dto.user_id, - task_to_unlock["new_state"], - task_to_unlock["comment"], + await Task.unlock_task( + task_id=task.id, + project_id=project_id, + user_id=validated_dto.user_id, + new_state=task_to_unlock["new_state"], + db=db, + comment=task_to_unlock["comment"], issues=task_mapping_issues, - local_session=local_session, ) - dtos.append(task.as_dto_with_instructions(validated_dto.preferred_locale)) - local_session.commit() - Session.remove() + + return await Task.as_dto_with_instructions( + task.id, project_id, db, validated_dto.preferred_locale + ) + + async def process_tasks_concurrently(project_id, tasks_to_unlock, validated_dto): + """ + Process tasks concurrently and ensure each task gets its own DB connection. + """ + tasks = [ + ValidatorService.process_task(project_id, task_to_unlock, validated_dto) + for task_to_unlock in tasks_to_unlock + ] + return await asyncio.gather(*tasks) @staticmethod - def unlock_tasks_after_validation( + async def unlock_tasks_after_validation( validated_dto: UnlockAfterValidationDTO, + db: Database, + background_tasks: BackgroundTasks, ) -> TaskDTOs: """ Unlocks supplied tasks after validation @@ -221,42 +249,24 @@ def unlock_tasks_after_validation( validated_tasks = validated_dto.validated_tasks project_id = validated_dto.project_id user_id = validated_dto.user_id - tasks_to_unlock = ValidatorService.get_tasks_locked_by_user( - project_id, validated_tasks, user_id + tasks_to_unlock = await ValidatorService.get_tasks_locked_by_user( + project_id, validated_tasks, user_id, db + ) + results = await ValidatorService.process_tasks_concurrently( + project_id, tasks_to_unlock, validated_dto + ) + background_tasks.add_task( + ProjectService.send_email_on_project_progress, + validated_dto.project_id, ) - - # Unlock all tasks - dtos = [] - message_sent_to = [] - args_list = [] - for task_to_unlock in tasks_to_unlock: - args = ( - current_app.app_context(), - task_to_unlock, - project_id, - validated_dto, - message_sent_to, - dtos, - ) - args_list.append(args) - - # Create a pool and Process the tasks in parallel - pool = ThreadPool(os.cpu_count()) - pool.map(ValidatorService._process_tasks, args_list) - - # Close the pool and wait for the threads to finish - pool.close() - pool.join() - - # Send email on project progress - ProjectService.send_email_on_project_progress(validated_dto.project_id) task_dtos = TaskDTOs() - task_dtos.tasks = dtos - + task_dtos.tasks = results return task_dtos @staticmethod - def stop_validating_tasks(stop_validating_dto: StopValidationDTO) -> TaskDTOs: + async def stop_validating_tasks( + stop_validating_dto: StopValidationDTO, db: Database + ) -> TaskDTOs: """ Unlocks supplied tasks after validation :raises ValidatorServiceError @@ -264,46 +274,53 @@ def stop_validating_tasks(stop_validating_dto: StopValidationDTO) -> TaskDTOs: reset_tasks = stop_validating_dto.reset_tasks project_id = stop_validating_dto.project_id user_id = stop_validating_dto.user_id - tasks_to_unlock = ValidatorService.get_tasks_locked_by_user( - project_id, reset_tasks, user_id + tasks_to_unlock = await ValidatorService.get_tasks_locked_by_user( + project_id, reset_tasks, user_id, db ) - dtos = [] for task_to_unlock in tasks_to_unlock: task = task_to_unlock["task"] - if task_to_unlock["comment"]: # Parses comment to see if any users have been @'d - MessageService.send_message_after_comment( - user_id, task_to_unlock["comment"], task.id, project_id + await MessageService.send_message_after_comment( + user_id, task_to_unlock["comment"], task.id, project_id, db ) - - task.reset_lock(user_id, task_to_unlock["comment"]) + await Task.reset_lock( + task.id, + project_id, + task.task_status, + user_id, + task_to_unlock["comment"], + db, + ) dtos.append( - task.as_dto_with_instructions(stop_validating_dto.preferred_locale) + await Task.as_dto_with_instructions( + task.id, project_id, db, stop_validating_dto.preferred_locale + ) ) - task_dtos = TaskDTOs() task_dtos.tasks = dtos - return task_dtos @staticmethod - def get_tasks_locked_by_user(project_id: int, unlock_tasks, user_id: int): + async def get_tasks_locked_by_user( + project_id: int, unlock_tasks: list, user_id: int, db: Database + ): """ Returns tasks specified by project id and unlock_tasks list if found and locked for validation by user, - otherwise raises ValidatorServiceError, NotFound - :param project_id: - :param unlock_tasks: List of tasks to be unlocked - :param user_id: - :return: List of Tasks - :raises ValidatorServiceError - :raises NotFound + otherwise raises ValidatorServiceError, NotFound. + + :param project_id: ID of the project. + :param unlock_tasks: List of tasks to be unlocked. + :param user_id: ID of the user attempting to unlock tasks. + :param db: Async database connection. + :return: List of tasks to unlock with new states and comments. + :raises ValidatorServiceError: When task is not locked for validation or owned by another user. + :raises NotFound: When task is not found. """ tasks_to_unlock = [] - # Loop supplied tasks to check they can all be unlocked for unlock_task in unlock_tasks: - task = Task.get(unlock_task.task_id, project_id) + task = await Task.get(unlock_task.task_id, project_id, db) if task is None: raise NotFound( @@ -315,19 +332,18 @@ def get_tasks_locked_by_user(project_id: int, unlock_tasks, user_id: int): current_state = TaskStatus(task.task_status) if current_state != TaskStatus.LOCKED_FOR_VALIDATION: raise ValidatorServiceError( - f"NotLockedForValidation- Task {unlock_task.task_id} is not LOCKED_FOR_VALIDATION" + f"NotLockedForValidation - Task {unlock_task.task_id} is not LOCKED_FOR_VALIDATION" ) - if task.locked_by != user_id: raise ValidatorServiceError( - "TaskNotOwned- Attempting to unlock a task owned by another user" + "TaskNotOwned - Attempting to unlock a task owned by another user" ) - if hasattr(unlock_task, "status"): - # we know what status we ate going to be setting to on unlock - new_status = TaskStatus[unlock_task.status] - else: - new_status = None + new_status = ( + TaskStatus[unlock_task.status] + if hasattr(unlock_task, "status") + else None + ) tasks_to_unlock.append( dict( @@ -397,51 +413,105 @@ def get_user_invalidated_tasks( return invalidated_tasks_dto @staticmethod - def invalidate_all_tasks(project_id: int, user_id: int): - """Invalidates all validated tasks on a project""" - validated_tasks = Task.query.filter( - Task.project_id == project_id, - Task.task_status == TaskStatus.VALIDATED.value, - ).all() + async def invalidate_all_tasks(project_id: int, user_id: int, db: Database): + """Invalidates all validated tasks on a project.""" + query = """ + SELECT id, task_status FROM tasks + WHERE project_id = :project_id + AND task_status = :validated_status + """ + validated_tasks = await db.fetch_all( + query=query, + values={ + "project_id": project_id, + "validated_status": TaskStatus.VALIDATED.value, + }, + ) for task in validated_tasks: - task.lock_task_for_validating(user_id) - task.unlock_task(user_id, new_state=TaskStatus.INVALIDATED) + await Task.lock_task_for_validating(task["id"], project_id, user_id, db) + await Task.unlock_task( + task["id"], project_id, user_id, TaskStatus.INVALIDATED, db + ) - # Reset counters - project = ProjectService.get_project_by_id(project_id) - project.tasks_validated = 0 - project.save() + # Reset counters for the project + project_query = """ + UPDATE projects + SET tasks_validated = 0 + WHERE id = :project_id + """ + await db.execute(query=project_query, values={"project_id": project_id}) @staticmethod - def validate_all_tasks(project_id: int, user_id: int): - """Validates all mapped tasks on a project""" - tasks_to_validate = Task.query.filter( - Task.project_id == project_id, - Task.task_status == TaskStatus.MAPPED.value, - ).all() + async def validate_all_tasks(project_id: int, user_id: int, db: Database): + """Validates all mapped tasks on a project using raw SQL queries""" + + # Fetch tasks that are in the MAPPED state + query = """ + SELECT id, task_status, mapped_by + FROM tasks + WHERE project_id = :project_id + AND task_status = :mapped_status + """ + tasks_to_validate = await db.fetch_all( + query=query, + values={ + "project_id": project_id, + "mapped_status": TaskStatus.MAPPED.value, + }, + ) for task in tasks_to_validate: - task.mapped_by = task.mapped_by or user_id # Ensure we set mapped by value - if TaskStatus(task.task_status) not in [ + task_id = task["id"] + mapped_by = ( + task["mapped_by"] or user_id + ) # Ensure we set the 'mapped_by' value + + # Lock the task for validation if it's not already locked + current_status = TaskStatus(task["task_status"]) + if current_status not in [ TaskStatus.LOCKED_FOR_MAPPING, TaskStatus.LOCKED_FOR_VALIDATION, ]: - # Only lock tasks that are not already locked to avoid double lock issue - task.lock_task_for_validating(user_id) + await Task.lock_task_for_validating(task_id, project_id, user_id, db) + + # Unlock the task and set its status to VALIDATED + await Task.unlock_task( + task_id=task_id, + project_id=project_id, + user_id=user_id, + new_state=TaskStatus.VALIDATED, + db=db, + ) - task.unlock_task(user_id, new_state=TaskStatus.VALIDATED) + # Update the mapped_by field if necessary + update_mapped_by_query = """ + UPDATE tasks + SET mapped_by = :mapped_by + WHERE id = :task_id + AND project_id = :project_id + """ + await db.execute( + query=update_mapped_by_query, + values={ + "mapped_by": mapped_by, + "task_id": task_id, + "project_id": project_id, + }, + ) - # Set counters to fully mapped and validated - project = ProjectService.get_project_by_id(project_id) - project.tasks_validated += project.tasks_mapped - project.tasks_mapped = 0 - project.save() + # Update the project's task counters using raw SQL + project_update_query = """ + UPDATE projects + SET tasks_validated = tasks_validated + tasks_mapped, + tasks_mapped = 0 + WHERE id = :project_id + """ + await db.execute(query=project_update_query, values={"project_id": project_id}) @staticmethod - def get_task_mapping_issues(task_to_unlock: dict): + async def get_task_mapping_issues(task_to_unlock: dict): if task_to_unlock["issues"] is None: return None - # map ValidationMappingIssue DTOs to TaskMappingIssue instances for any issues # that have count above zero. return list( @@ -456,29 +526,37 @@ def get_task_mapping_issues(task_to_unlock: dict): ) @staticmethod - def revert_user_tasks(revert_dto: RevertUserTasksDTO): + async def revert_user_tasks(revert_dto: RevertUserTasksDTO, db: Database): """ - Reverts tasks with supplied action to previous state by specific user + Reverts tasks with the supplied action to the previous state by a specific user. :raises ValidatorServiceError """ - if ProjectAdminService.is_user_action_permitted_on_project( - revert_dto.action_by, revert_dto.project_id + if await ProjectAdminService.is_user_action_permitted_on_project( + revert_dto.action_by, revert_dto.project_id, db ): - query = Task.query.filter( - Task.project_id == revert_dto.project_id, - Task.task_status == TaskStatus[revert_dto.action].value, - ) + query = """ + SELECT id + FROM tasks + WHERE project_id = :project_id + AND task_status = :task_status + """ + values = { + "project_id": revert_dto.project_id, + "task_status": TaskStatus[revert_dto.action].value, + } if TaskStatus[revert_dto.action].value == TaskStatus.BADIMAGERY.value: - query = query.filter(Task.mapped_by == revert_dto.user_id) + query += " AND mapped_by = :user_id" + values["user_id"] = revert_dto.user_id else: - query = query.filter(Task.validated_by == revert_dto.user_id) - - tasks_to_revert = query.all() + query += " AND validated_by = :user_id" + values["user_id"] = revert_dto.user_id + tasks_to_revert = await db.fetch_all(query=query, values=values) for task in tasks_to_revert: - task = MappingService.undo_mapping( + await MappingService.undo_mapping( revert_dto.project_id, - task.id, + task["id"], revert_dto.user_id, + db, revert_dto.preferred_locale, ) else: diff --git a/docker-compose.yml b/docker-compose.yml index e8f0c294f3..3818c9b7ef 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -17,8 +17,8 @@ services: - tasking-manager.env restart: unless-stopped healthcheck: - test: psql -h 0.0.0.0 -U ${POSTGRES_USER:-tm} -d ${POSTGRES_DB:-tasking-manager} -c 'SELECT 1;' - start_period: 5s + test: pg_isready -U ${POSTGRES_USER} -d ${POSTGRES_DB} + start_period: 35s interval: 10s timeout: 5s retries: 3 @@ -29,6 +29,7 @@ services: image: ghcr.io/hotosm/tasking-manager/backend:main build: context: . + target: ${TARGET_TAG:-prod} depends_on: tm-db: condition: service_healthy @@ -36,16 +37,12 @@ services: condition: service_completed_successfully env_file: - tasking-manager.env - volumes: - - ./pyproject.toml:/usr/src/app/pyproject.toml:ro - - ./backend:/usr/src/app/backend:ro - - ./tests:/usr/src/app/tests:ro - - ./migrations:/src/migrations restart: unless-stopped healthcheck: test: curl --fail http://localhost:5000 || exit 1 interval: 10s retries: 5 + start_period: 10s timeout: 3s deploy: replicas: ${API_REPLICAS:-1} @@ -66,18 +63,14 @@ services: image: ghcr.io/hotosm/tasking-manager/backend:main build: context: . - entrypoint: ["python", "manage.py", "db"] - command: "upgrade" + entrypoint: ["alembic", "-c", "migrations/alembic.ini", "upgrade", "head"] depends_on: tm-db: condition: service_healthy env_file: - tasking-manager.env - volumes: - - ./pyproject.toml:/usr/src/app/pyproject.toml:ro - - ./backend:/usr/src/app/backend:ro - - ./migrations:/usr/src/app/migrations:ro deploy: + replicas: ${API_REPLICAS:-1} resources: limits: cpus: "1" @@ -88,18 +81,6 @@ services: networks: - tm-net - swagger: - image: swaggerapi/swagger-ui:v5.11.10 - restart: always - environment: - - BASE_URL=/docs - - SWAGGER_JSON_URL=http://127.0.0.1:${TM_DEV_PORT:-3000}/api/v2/system/docs/json/ - labels: - - traefik.http.routers.swagger.rule=(Host(`127.0.0.1`) || Host(`localhost`)) && PathPrefix(`/docs/`) - - traefik.http.services.swagger.loadbalancer.server.port=8080 - networks: - - tm-net - traefik: image: traefik:v2.10 restart: always @@ -112,16 +93,3 @@ services: - --providers.docker=true networks: - tm-net - - tm-frontend: - image: ghcr.io/hotosm/tasking-manager/frontend:main - build: - context: . - dockerfile: "./scripts/docker/Dockerfile.frontend_development" - env_file: - - tasking-manager.env - labels: - - traefik.http.routers.frontend.rule=Host(`127.0.0.1`) || Host(`localhost`) - - traefik.http.services.frontend.loadbalancer.server.port=3000 - networks: - - tm-net diff --git a/docs/LICENSE.md b/docs/LICENSE.md index 7eabdb1c27..a198ac4132 120000 --- a/docs/LICENSE.md +++ b/docs/LICENSE.md @@ -1 +1 @@ -../LICENSE.md \ No newline at end of file +../LICENSE.md diff --git a/manage.py b/manage.py index acd5cae448..8a2cb4b27d 100644 --- a/manage.py +++ b/manage.py @@ -1,23 +1,22 @@ -import os -import warnings +import atexit import base64 import csv import datetime +import os +import warnings + import click -from flask_migrate import Migrate +from apscheduler.schedulers.background import BackgroundScheduler from dotenv import load_dotenv +from flask_migrate import Migrate +from sqlalchemy import func -from backend import create_app, initialise_counters, db +from backend import create_app, db, initialise_counters +from backend.models.postgis.task import Task, TaskHistory +from backend.services.interests_service import InterestService +from backend.services.stats_service import StatsService from backend.services.users.authentication_service import AuthenticationService from backend.services.users.user_service import UserService -from backend.services.stats_service import StatsService -from backend.services.interests_service import InterestService -from backend.models.postgis.task import Task, TaskHistory - -from sqlalchemy import func -import atexit -from apscheduler.schedulers.background import BackgroundScheduler - # Load configuration from file into environment load_dotenv(os.path.join(os.path.dirname(__file__), "tasking-manager.env")) @@ -137,8 +136,9 @@ def update_project_categories(filename): # This is compatibility code with previous releases # People should generally prefer `flask `. from sys import argv - from flask.cli import FlaskGroup + from click import Command + from flask.cli import FlaskGroup cli = FlaskGroup(create_app=lambda: application) cli.add_command( diff --git a/migrations/alembic.ini b/migrations/alembic.ini index 59e0ae6bf6..daac5be826 100644 --- a/migrations/alembic.ini +++ b/migrations/alembic.ini @@ -7,7 +7,7 @@ # set to 'true' to run the environment during # the 'revision' command, regardless of autogenerate # revision_environment = false -script_location = /src/migrations +script_location = /usr/src/app/migrations # Custom param that enables us to specify tables to ignore when determining migrations [alembic:exclude] diff --git a/migrations/env.py b/migrations/env.py index 9a62c50755..45eee50975 100644 --- a/migrations/env.py +++ b/migrations/env.py @@ -1,11 +1,22 @@ -from __future__ import with_statement -from alembic import context -from sqlalchemy import engine_from_config, pool +import os +import sys + +project_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), "..")) +sys.path.append(project_dir) + from geoalchemy2 import alembic_helpers -from logging.config import fileConfig -from flask import current_app import logging +import asyncio +from logging.config import fileConfig + +from alembic import context +from backend.db import Base +from backend.config import settings +from asyncpg import Connection +from sqlalchemy import pool +from sqlalchemy.ext.asyncio import async_engine_from_config + # this is the Alembic Config object, which provides # access to the values within the .ini file in use. config = context.config @@ -18,12 +29,17 @@ # add your model's MetaData object here # for 'autogenerate' support # from myapp import mymodel -# target_metadata = mymodel.Base.metadata +target_metadata = Base.metadata config.set_main_option( - "sqlalchemy.url", current_app.config.get("SQLALCHEMY_DATABASE_URI") + "sqlalchemy.url", settings.SQLALCHEMY_DATABASE_URI.unicode_string() ) -target_metadata = current_app.extensions["migrate"].db.metadata + + +# target_metadata = current_app.extensions["migrate"].db.metadata +def get_url(): + return settings.SQLALCHEMY_DATABASE_URI.unicode_string() + # other values from the config, defined by the needs of env.py, # can be acquired: @@ -59,9 +75,10 @@ def run_migrations_offline(): script output. """ - url = config.get_main_option("sqlalchemy.url") + url = get_url() context.configure( url=url, + target_metadata=target_metadata, include_object=include_object, process_revision_directives=alembic_helpers.writer, render_item=alembic_helpers.render_item, @@ -71,7 +88,14 @@ def run_migrations_offline(): context.run_migrations() -def run_migrations_online(): +def do_run_migrations(connection: Connection) -> None: + context.configure(connection=connection, target_metadata=target_metadata) + + with context.begin_transaction(): + context.run_migrations() + + +async def run_migrations_online(): """Run migrations in 'online' mode. In this scenario we need to create an Engine @@ -90,30 +114,21 @@ def process_revision_directives(context, revision, directives): directives[:] = [] logger.info("No changes in schema detected.") - engine = engine_from_config( - config.get_section(config.config_ini_section), + configuration = config.get_section(config.config_ini_section) + configuration["sqlalchemy.url"] = get_url() + connectable = async_engine_from_config( + configuration, prefix="sqlalchemy.", poolclass=pool.NullPool, ) - connection = engine.connect() - context.configure( - connection=connection, - target_metadata=target_metadata, - process_revision_directives=process_revision_directives, - include_object=include_object, - render_item=alembic_helpers.render_item, - **current_app.extensions["migrate"].configure_args, - ) + async with connectable.connect() as connection: + await connection.run_sync(do_run_migrations) - try: - with context.begin_transaction(): - context.run_migrations() - finally: - connection.close() + await connectable.dispose() if context.is_offline_mode(): run_migrations_offline() else: - run_migrations_online() + asyncio.run(run_migrations_online()) diff --git a/migrations/versions/451f6bd05a19_.py b/migrations/versions/451f6bd05a19_.py index 55cc343a6e..c4a0d2a0d0 100644 --- a/migrations/versions/451f6bd05a19_.py +++ b/migrations/versions/451f6bd05a19_.py @@ -16,9 +16,9 @@ def upgrade(): + op.execute("DROP TRIGGER IF EXISTS tsvectorupdate ON project_info;") op.execute( """ - DROP TRIGGER IF EXISTS tsvectorupdate ON project_info; CREATE TRIGGER tsvectorupdate BEFORE INSERT OR UPDATE ON project_info FOR EACH ROW EXECUTE PROCEDURE tsvector_update_trigger(text_searchable, "pg_catalog.english", project_id_str, short_description, description) """ diff --git a/pdm.lock b/pdm.lock index 2a1f53d365..569144b424 100644 --- a/pdm.lock +++ b/pdm.lock @@ -3,12 +3,19 @@ [metadata] groups = ["default", "dev", "lint", "test"] -strategy = ["cross_platform", "inherit_metadata"] -lock_version = "4.5.0" -content_hash = "sha256:cc71db9fff34bb60941d621046a2b8b25eba61a67353e9ae05f0416376cc56d8" +strategy = [] +lock_version = "4.4.1" +content_hash = "sha256:0c3f5bf9e08b44fa7b24a65dc72716d4f297478e41d98d4de907c0c2ec9a823d" -[[metadata.targets]] -requires_python = ">=3.9,<=3.11" +[[package]] +name = "aiosmtplib" +version = "2.0.2" +requires_python = ">=3.7,<4.0" +summary = "asyncio SMTP client" +files = [ + {file = "aiosmtplib-2.0.2-py3-none-any.whl", hash = "sha256:1e631a7a3936d3e11c6a144fb8ffd94bb4a99b714f2cb433e825d88b698e37bc"}, + {file = "aiosmtplib-2.0.2.tar.gz", hash = "sha256:138599a3227605d29a9081b646415e9e793796ca05322a78f69179f0135016a3"}, +] [[package]] name = "alembic" @@ -19,8 +26,6 @@ groups = ["default"] dependencies = [ "Mako", "SQLAlchemy>=1.3.0", - "importlib-metadata; python_version < \"3.9\"", - "importlib-resources; python_version < \"3.9\"", "typing-extensions>=4", ] files = [ @@ -29,13 +34,29 @@ files = [ ] [[package]] -name = "aniso8601" -version = "9.0.1" -summary = "A library for parsing ISO 8601 strings." -groups = ["default"] +name = "annotated-types" +version = "0.6.0" +requires_python = ">=3.8" +summary = "Reusable constraint types to use with typing.Annotated" +files = [ + {file = "annotated_types-0.6.0-py3-none-any.whl", hash = "sha256:0641064de18ba7a25dee8f96403ebc39113d0cb953a01429249d5c7564666a43"}, + {file = "annotated_types-0.6.0.tar.gz", hash = "sha256:563339e807e53ffd9c267e99fc6d9ea23eb8443c08f112651963e24e22f84a5d"}, +] + +[[package]] +name = "anyio" +version = "4.3.0" +requires_python = ">=3.8" +summary = "High level compatibility layer for multiple asynchronous event loop implementations" +dependencies = [ + "exceptiongroup>=1.0.2; python_version < \"3.11\"", + "idna>=2.8", + "sniffio>=1.1", + "typing-extensions>=4.1; python_version < \"3.11\"", +] files = [ - {file = "aniso8601-9.0.1-py2.py3-none-any.whl", hash = "sha256:1d2b7ef82963909e93c4f24ce48d4de9e66009a21bf1c1e1c85bdd0812fe412f"}, - {file = "aniso8601-9.0.1.tar.gz", hash = "sha256:72e3117667eedf66951bb2d93f4296a56b94b078a8a95905a052611fb3f1b973"}, + {file = "anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8"}, + {file = "anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6"}, ] [[package]] @@ -55,6 +76,52 @@ files = [ {file = "APScheduler-3.10.1.tar.gz", hash = "sha256:0293937d8f6051a0f493359440c1a1b93e882c57daf0197afeff0e727777b96e"}, ] +[[package]] +name = "async-timeout" +version = "4.0.3" +requires_python = ">=3.7" +summary = "Timeout context manager for asyncio programs" +files = [ + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, +] + +[[package]] +name = "asyncpg" +version = "0.29.0" +requires_python = ">=3.8.0" +summary = "An asyncio PostgreSQL driver" +dependencies = [ + "async-timeout>=4.0.3; python_version < \"3.12.0\"", +] +files = [ + {file = "asyncpg-0.29.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72fd0ef9f00aeed37179c62282a3d14262dbbafb74ec0ba16e1b1864d8a12169"}, + {file = "asyncpg-0.29.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:52e8f8f9ff6e21f9b39ca9f8e3e33a5fcdceaf5667a8c5c32bee158e313be385"}, + {file = "asyncpg-0.29.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9e6823a7012be8b68301342ba33b4740e5a166f6bbda0aee32bc01638491a22"}, + {file = "asyncpg-0.29.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:746e80d83ad5d5464cfbf94315eb6744222ab00aa4e522b704322fb182b83610"}, + {file = "asyncpg-0.29.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ff8e8109cd6a46ff852a5e6bab8b0a047d7ea42fcb7ca5ae6eaae97d8eacf397"}, + {file = "asyncpg-0.29.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:97eb024685b1d7e72b1972863de527c11ff87960837919dac6e34754768098eb"}, + {file = "asyncpg-0.29.0-cp310-cp310-win32.whl", hash = "sha256:5bbb7f2cafd8d1fa3e65431833de2642f4b2124be61a449fa064e1a08d27e449"}, + {file = "asyncpg-0.29.0-cp310-cp310-win_amd64.whl", hash = "sha256:76c3ac6530904838a4b650b2880f8e7af938ee049e769ec2fba7cd66469d7772"}, + {file = "asyncpg-0.29.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4900ee08e85af01adb207519bb4e14b1cae8fd21e0ccf80fac6aa60b6da37b4"}, + {file = "asyncpg-0.29.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a65c1dcd820d5aea7c7d82a3fdcb70e096f8f70d1a8bf93eb458e49bfad036ac"}, + {file = "asyncpg-0.29.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b52e46f165585fd6af4863f268566668407c76b2c72d366bb8b522fa66f1870"}, + {file = "asyncpg-0.29.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc600ee8ef3dd38b8d67421359779f8ccec30b463e7aec7ed481c8346decf99f"}, + {file = "asyncpg-0.29.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:039a261af4f38f949095e1e780bae84a25ffe3e370175193174eb08d3cecab23"}, + {file = "asyncpg-0.29.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6feaf2d8f9138d190e5ec4390c1715c3e87b37715cd69b2c3dfca616134efd2b"}, + {file = "asyncpg-0.29.0-cp311-cp311-win32.whl", hash = "sha256:1e186427c88225ef730555f5fdda6c1812daa884064bfe6bc462fd3a71c4b675"}, + {file = "asyncpg-0.29.0-cp311-cp311-win_amd64.whl", hash = "sha256:cfe73ffae35f518cfd6e4e5f5abb2618ceb5ef02a2365ce64f132601000587d3"}, + {file = "asyncpg-0.29.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5340dd515d7e52f4c11ada32171d87c05570479dc01dc66d03ee3e150fb695da"}, + {file = "asyncpg-0.29.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e17b52c6cf83e170d3d865571ba574577ab8e533e7361a2b8ce6157d02c665d3"}, + {file = "asyncpg-0.29.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f100d23f273555f4b19b74a96840aa27b85e99ba4b1f18d4ebff0734e78dc090"}, + {file = "asyncpg-0.29.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48e7c58b516057126b363cec8ca02b804644fd012ef8e6c7e23386b7d5e6ce83"}, + {file = "asyncpg-0.29.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f9ea3f24eb4c49a615573724d88a48bd1b7821c890c2effe04f05382ed9e8810"}, + {file = "asyncpg-0.29.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8d36c7f14a22ec9e928f15f92a48207546ffe68bc412f3be718eedccdf10dc5c"}, + {file = "asyncpg-0.29.0-cp39-cp39-win32.whl", hash = "sha256:797ab8123ebaed304a1fad4d7576d5376c3a006a4100380fb9d517f0b59c1ab2"}, + {file = "asyncpg-0.29.0-cp39-cp39-win_amd64.whl", hash = "sha256:cce08a178858b426ae1aa8409b5cc171def45d4293626e7aa6510696d46decd8"}, + {file = "asyncpg-0.29.0.tar.gz", hash = "sha256:d1c49e1f44fffafd9a55e1a9b101590859d881d639ea2922516f5d9c512d354e"}, +] + [[package]] name = "black" version = "23.7.0" @@ -68,7 +135,6 @@ dependencies = [ "pathspec>=0.9.0", "platformdirs>=2", "tomli>=1.1.0; python_version < \"3.11\"", - "typing-extensions>=3.10.0.0; python_version < \"3.10\"", ] files = [ {file = "black-23.7.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:5c4bc552ab52f6c1c506ccae05681fab58c3f72d59ae6e6639e8885e94fe2587"}, @@ -81,6 +147,11 @@ files = [ {file = "black-23.7.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:c333286dc3ddca6fdff74670b911cccedacb4ef0a60b34e491b8a67c833b343a"}, {file = "black-23.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831d8f54c3a8c8cf55f64d0422ee875eecac26f5f649fb6c1df65316b67c8926"}, {file = "black-23.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:7f3bf2dec7d541b4619b8ce526bda74a6b0bffc480a163fed32eb8b3c9aed8ad"}, + {file = "black-23.7.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:8417dbd2f57b5701492cd46edcecc4f9208dc75529bcf76c514864e48da867d9"}, + {file = "black-23.7.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:47e56d83aad53ca140da0af87678fb38e44fd6bc0af71eebab2d1f59b1acf1d3"}, + {file = "black-23.7.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:25cc308838fe71f7065df53aedd20327969d05671bac95b38fdf37ebe70ac087"}, + {file = "black-23.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:642496b675095d423f9b8448243336f8ec71c9d4d57ec17bf795b67f08132a91"}, + {file = "black-23.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:ad0014efc7acf0bd745792bd0d8857413652979200ab924fbf239062adc12491"}, {file = "black-23.7.0-py3-none-any.whl", hash = "sha256:9fd59d418c60c0348505f2ddf9609c1e1de8e7493eab96198fc89d9f865e7a96"}, {file = "black-23.7.0.tar.gz", hash = "sha256:022a582720b0d9480ed82576c920a8c1dde97cc38ff11d8d8859b3bd6ca9eedb"}, ] @@ -102,13 +173,12 @@ files = [ [[package]] name = "blinker" -version = "1.6.2" -requires_python = ">=3.7" +version = "1.8.2" +requires_python = ">=3.8" summary = "Fast, simple object-to-object and broadcast signaling" -groups = ["default"] files = [ - {file = "blinker-1.6.2-py3-none-any.whl", hash = "sha256:c3d739772abb7bc2860abf5f2ec284223d9ad5c76da018234f6f50d6f31ab1f0"}, - {file = "blinker-1.6.2.tar.gz", hash = "sha256:4afd3de66ef3a9f8067559fb7a1cbe555c17dcbe15971b05d1b625c3e7abe213"}, + {file = "blinker-1.8.2-py3-none-any.whl", hash = "sha256:1779309f71bf239144b9399d06ae925637cf6634cf6bd131104184531bf67c01"}, + {file = "blinker-1.8.2.tar.gz", hash = "sha256:8f77b09d3bf7c795e969e9486f39c2c5e9c39d4ee07424be2bc594ece9642d83"}, ] [[package]] @@ -124,141 +194,79 @@ files = [ [[package]] name = "certifi" -version = "2023.5.7" +version = "2024.2.2" requires_python = ">=3.6" summary = "Python package for providing Mozilla's CA Bundle." groups = ["default"] files = [ - {file = "certifi-2023.5.7-py3-none-any.whl", hash = "sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716"}, - {file = "certifi-2023.5.7.tar.gz", hash = "sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7"}, -] - -[[package]] -name = "cffi" -version = "1.15.1" -summary = "Foreign Function Interface for Python calling C code." -groups = ["default"] -marker = "platform_python_implementation == \"CPython\" and sys_platform == \"win32\"" -dependencies = [ - "pycparser", -] -files = [ - {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, - {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, - {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, - {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, - {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, - {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, - {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, - {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, - {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, - {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, - {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, - {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, - {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, - {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, - {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, - {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, - {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, + {file = "certifi-2024.2.2-py3-none-any.whl", hash = "sha256:dc383c07b76109f368f6106eee2b593b04a011ea4d55f652c6ca24a754d1cdd1"}, + {file = "certifi-2024.2.2.tar.gz", hash = "sha256:0569859f95fc761b18b45ef421b1290a0f65f147e92a1e5eb3e635f9a5e4e66f"}, ] [[package]] name = "charset-normalizer" -version = "3.1.0" +version = "3.3.2" requires_python = ">=3.7.0" summary = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." groups = ["default"] files = [ - {file = "charset-normalizer-3.1.0.tar.gz", hash = "sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-win32.whl", hash = "sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448"}, - {file = "charset_normalizer-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-win32.whl", hash = "sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909"}, - {file = "charset_normalizer-3.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-win32.whl", hash = "sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1"}, - {file = "charset_normalizer-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b"}, - {file = "charset_normalizer-3.1.0-py3-none-any.whl", hash = "sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d"}, + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] [[package]] name = "click" -version = "8.1.3" +version = "8.1.7" requires_python = ">=3.7" summary = "Composable command line interface toolkit" -groups = ["default", "lint"] -dependencies = [ - "colorama; platform_system == \"Windows\"", - "importlib-metadata; python_version < \"3.8\"", -] files = [ - {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, - {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, -] - -[[package]] -name = "colorama" -version = "0.4.6" -requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -summary = "Cross-platform colored terminal text." -groups = ["default", "lint", "test"] -marker = "sys_platform == \"win32\" or platform_system == \"Windows\"" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, ] [[package]] @@ -303,154 +311,107 @@ files = [ ] [[package]] -name = "exceptiongroup" -version = "1.1.1" -requires_python = ">=3.7" -summary = "Backport of PEP 654 (exception groups)" -groups = ["test"] -marker = "python_version < \"3.11\"" -files = [ - {file = "exceptiongroup-1.1.1-py3-none-any.whl", hash = "sha256:232c37c63e4f682982c8b6459f33a8981039e5fb8756b2074364e5055c498c9e"}, - {file = "exceptiongroup-1.1.1.tar.gz", hash = "sha256:d484c3090ba2889ae2928419117447a14daf3c1231d5e30d0aae34f354f01785"}, -] - -[[package]] -name = "flake8" -version = "6.1.0" -requires_python = ">=3.8.1" -summary = "the modular source code checker: pep8 pyflakes and co" -groups = ["lint"] -dependencies = [ - "mccabe<0.8.0,>=0.7.0", - "pycodestyle<2.12.0,>=2.11.0", - "pyflakes<3.2.0,>=3.1.0", -] -files = [ - {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"}, - {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"}, -] - -[[package]] -name = "flask" -version = "2.3.2" +name = "databases" +version = "0.9.0" requires_python = ">=3.8" -summary = "A simple framework for building complex web applications." -groups = ["default"] +summary = "Async database support for Python." dependencies = [ - "Jinja2>=3.1.2", - "Werkzeug>=2.3.3", - "blinker>=1.6.2", - "click>=8.1.3", - "importlib-metadata>=3.6.0; python_version < \"3.10\"", - "itsdangerous>=2.1.2", + "sqlalchemy>=2.0.7", ] files = [ - {file = "Flask-2.3.2-py3-none-any.whl", hash = "sha256:77fd4e1249d8c9923de34907236b747ced06e5467ecac1a7bb7115ae0e9670b0"}, - {file = "Flask-2.3.2.tar.gz", hash = "sha256:8c2f9abd47a9e8df7f0c3f091ce9497d011dc3b31effcf4c85a6e2b50f4114ef"}, + {file = "databases-0.9.0-py3-none-any.whl", hash = "sha256:9ee657c9863b34f8d3a06c06eafbe1bda68af2a434b56996312edf1f1c0b6297"}, + {file = "databases-0.9.0.tar.gz", hash = "sha256:d2f259677609bf187737644c95fa41701072e995dfeb8d2882f335795c5b61b0"}, ] [[package]] -name = "flask-cors" -version = "4.0.0" -summary = "A Flask extension adding a decorator for CORS support" -groups = ["default"] -dependencies = [ - "Flask>=0.9", -] -files = [ - {file = "Flask-Cors-4.0.0.tar.gz", hash = "sha256:f268522fcb2f73e2ecdde1ef45e2fd5c71cc48fe03cffb4b441c6d1b40684eb0"}, - {file = "Flask_Cors-4.0.0-py2.py3-none-any.whl", hash = "sha256:bc3492bfd6368d27cfe79c7821df5a8a319e1a6d5eab277a3794be19bdc51783"}, -] - -[[package]] -name = "flask-httpauth" -version = "4.8.0" -summary = "HTTP authentication for Flask routes" -groups = ["default"] -dependencies = [ - "flask", -] +name = "dnspython" +version = "2.6.1" +requires_python = ">=3.8" +summary = "DNS toolkit" files = [ - {file = "Flask-HTTPAuth-4.8.0.tar.gz", hash = "sha256:66568a05bc73942c65f1e2201ae746295816dc009edd84b482c44c758d75097a"}, - {file = "Flask_HTTPAuth-4.8.0-py3-none-any.whl", hash = "sha256:a58fedd09989b9975448eef04806b096a3964a7feeebc0a78831ff55685b62b0"}, + {file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"}, + {file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"}, ] [[package]] -name = "flask-mail" -version = "0.9.1" -summary = "Flask extension for sending email" -groups = ["default"] +name = "email-validator" +version = "2.2.0" +requires_python = ">=3.8" +summary = "A robust email address syntax and deliverability validation library." dependencies = [ - "Flask", - "blinker", + "dnspython>=2.0.0", + "idna>=2.0.0", ] files = [ - {file = "Flask-Mail-0.9.1.tar.gz", hash = "sha256:22e5eb9a940bf407bcf30410ecc3708f3c56cc44b29c34e1726fe85006935f41"}, + {file = "email_validator-2.2.0-py3-none-any.whl", hash = "sha256:561977c2d73ce3611850a06fa56b414621e0c8faa9d66f2611407d87465da631"}, + {file = "email_validator-2.2.0.tar.gz", hash = "sha256:cb690f344c617a714f22e66ae771445a1ceb46821152df8e165c5f9a364582b7"}, ] [[package]] -name = "flask-migrate" -version = "4.0.4" -requires_python = ">=3.6" -summary = "SQLAlchemy database migrations for Flask applications using Alembic." -groups = ["default"] -dependencies = [ - "Flask-SQLAlchemy>=1.0", - "Flask>=0.9", - "alembic>=1.9.0", -] +name = "exceptiongroup" +version = "1.2.1" +requires_python = ">=3.7" +summary = "Backport of PEP 654 (exception groups)" +groups = ["test"] +marker = "python_version < \"3.11\"" files = [ - {file = "Flask-Migrate-4.0.4.tar.gz", hash = "sha256:73293d40b10ac17736e715b377e7b7bde474cb8105165d77474df4c3619b10b3"}, - {file = "Flask_Migrate-4.0.4-py3-none-any.whl", hash = "sha256:77580f27ab39bc68be4906a43c56d7674b45075bc4f883b1d0b985db5164d58f"}, + {file = "exceptiongroup-1.2.1-py3-none-any.whl", hash = "sha256:5258b9ed329c5bbdd31a309f53cbfb0b155341807f6ff7606a1e801a891b29ad"}, + {file = "exceptiongroup-1.2.1.tar.gz", hash = "sha256:a4785e48b045528f5bfe627b6ad554ff32def154f42372786903b7abcfe1aa16"}, ] [[package]] -name = "flask-restful" -version = "0.3.10" -summary = "Simple framework for creating REST APIs" -groups = ["default"] +name = "fastapi" +version = "0.108.0" +requires_python = ">=3.8" +summary = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" dependencies = [ - "Flask>=0.8", - "aniso8601>=0.82", - "pytz", - "six>=1.3.0", + "pydantic!=1.8,!=1.8.1,!=2.0.0,!=2.0.1,!=2.1.0,<3.0.0,>=1.7.4", + "starlette<0.33.0,>=0.29.0", + "typing-extensions>=4.8.0", ] files = [ - {file = "Flask-RESTful-0.3.10.tar.gz", hash = "sha256:fe4af2ef0027df8f9b4f797aba20c5566801b6ade995ac63b588abf1a59cec37"}, - {file = "Flask_RESTful-0.3.10-py2.py3-none-any.whl", hash = "sha256:1cf93c535172f112e080b0d4503a8d15f93a48c88bdd36dd87269bdaf405051b"}, + {file = "fastapi-0.108.0-py3-none-any.whl", hash = "sha256:8c7bc6d315da963ee4cdb605557827071a9a7f95aeb8fcdd3bde48cdc8764dd7"}, + {file = "fastapi-0.108.0.tar.gz", hash = "sha256:5056e504ac6395bf68493d71fcfc5352fdbd5fda6f88c21f6420d80d81163296"}, ] [[package]] -name = "flask-sqlalchemy" -version = "3.0.5" -requires_python = ">=3.7" -summary = "Add SQLAlchemy support to your Flask application." -groups = ["default"] +name = "fastapi-mail" +version = "1.4.1" +requires_python = ">=3.8.1,<4.0" +summary = "Simple lightweight mail library for FastApi" dependencies = [ - "flask>=2.2.5", - "sqlalchemy>=1.4.18", + "Jinja2<4.0,>=3.0", + "aiosmtplib<3.0,>=2.0", + "blinker<2.0,>=1.5", + "email-validator<3.0,>=2.0", + "pydantic-settings<3.0,>=2.0", + "pydantic<3.0,>=2.0", + "starlette<1.0,>=0.24", ] files = [ - {file = "flask_sqlalchemy-3.0.5-py3-none-any.whl", hash = "sha256:cabb6600ddd819a9f859f36515bb1bd8e7dbf30206cc679d2b081dff9e383283"}, - {file = "flask_sqlalchemy-3.0.5.tar.gz", hash = "sha256:c5765e58ca145401b52106c0f46178569243c5da25556be2c231ecc60867c5b1"}, + {file = "fastapi_mail-1.4.1-py3-none-any.whl", hash = "sha256:fa5ef23b2dea4d3ba4587f4bbb53f8f15274124998fb4e40629b3b636c76c398"}, + {file = "fastapi_mail-1.4.1.tar.gz", hash = "sha256:9095b713bd9d3abb02fe6d7abb637502aaf680b52e177d60f96273ef6bc8bb70"}, ] [[package]] -name = "flask-swagger" -version = "0.2.14" -summary = "Extract swagger specs from your flask project" -groups = ["default"] +name = "flake8" +version = "6.1.0" +requires_python = ">=3.8.1" +summary = "the modular source code checker: pep8 pyflakes and co" +groups = ["lint"] dependencies = [ - "Flask>=0.10", - "PyYAML>=5.1", + "mccabe<0.8.0,>=0.7.0", + "pycodestyle<2.12.0,>=2.11.0", + "pyflakes<3.2.0,>=3.1.0", ] files = [ - {file = "flask-swagger-0.2.14.tar.gz", hash = "sha256:b4085f5bc36df4c20b6548cd1413adc9cf35719b0f0695367cd542065145294d"}, + {file = "flake8-6.1.0-py2.py3-none-any.whl", hash = "sha256:ffdfce58ea94c6580c77888a86506937f9a1a227dfcd15f245d694ae20a6b6e5"}, + {file = "flake8-6.1.0.tar.gz", hash = "sha256:d5b3857f07c030bdb5bf41c7f53799571d75c4491748a3adcd47de929e34cd23"}, ] [[package]] name = "geoalchemy2" -version = "0.14.1" +version = "0.14.3" requires_python = ">=3.7" summary = "Using SQLAlchemy with Spatial Databases" groups = ["default"] @@ -459,19 +420,19 @@ dependencies = [ "packaging", ] files = [ - {file = "GeoAlchemy2-0.14.1-py3-none-any.whl", hash = "sha256:0830c98f83d6b1706e62b5544793d304e2853493d6e70ac18444c13748c3d1c7"}, - {file = "GeoAlchemy2-0.14.1.tar.gz", hash = "sha256:620b31cbf97a368b2486dbcfcd36da2081827e933d4163bcb942043b79b545e8"}, + {file = "GeoAlchemy2-0.14.3-py3-none-any.whl", hash = "sha256:a727198394fcc4760a27c4c5bff8b9f4f79324ec2dd98c4c1b8a7026b8918d81"}, + {file = "GeoAlchemy2-0.14.3.tar.gz", hash = "sha256:79c432b10dd8c48422f794eaf9a1200929de14f41d2396923bfe92f4c6abaf89"}, ] [[package]] name = "geojson" -version = "3.0.1" -requires_python = ">=3.7, <3.12" +version = "3.1.0" +requires_python = ">=3.7" summary = "Python bindings and utilities for GeoJSON" groups = ["default"] files = [ - {file = "geojson-3.0.1-py3-none-any.whl", hash = "sha256:e49df982b204ed481e4c1236c57f587adf71537301cf8faf7120ab27d73c7568"}, - {file = "geojson-3.0.1.tar.gz", hash = "sha256:ff3d75acab60b1e66504a11f7ea12c104bad32ff3c410a807788663b966dee4a"}, + {file = "geojson-3.1.0-py3-none-any.whl", hash = "sha256:68a9771827237adb8c0c71f8527509c8f5bef61733aa434cefc9c9d4f0ebe8f3"}, + {file = "geojson-3.1.0.tar.gz", hash = "sha256:58a7fa40727ea058efc28b0e9ff0099eadf6d0965e04690830208d3ef571adac"}, ] [[package]] @@ -481,7 +442,6 @@ requires_python = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5" summary = "Coroutine-based network library" groups = ["default"] dependencies = [ - "cffi>=1.12.2; platform_python_implementation == \"CPython\" and sys_platform == \"win32\"", "greenlet>=2.0.0; platform_python_implementation == \"CPython\"", "setuptools", "zope-event", @@ -520,6 +480,7 @@ requires_python = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*" summary = "Lightweight in-process concurrent programming" groups = ["default"] files = [ + {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d967650d3f56af314b72df7089d96cda1083a7fc2da05b375d2bc48c82ab3f3c"}, {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"}, {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"}, {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"}, @@ -528,12 +489,14 @@ files = [ {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"}, {file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"}, {file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"}, + {file = "greenlet-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d4606a527e30548153be1a9f155f4e283d109ffba663a15856089fb55f933e47"}, {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"}, {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"}, {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"}, {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:eff4eb9b7eb3e4d0cae3d28c283dc16d9bed6b193c2e1ace3ed86ce48ea8df19"}, {file = "greenlet-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5454276c07d27a740c5892f4907c86327b632127dd9abec42ee62e12427ff7e3"}, {file = "greenlet-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:7cafd1208fdbe93b67c7086876f061f660cfddc44f404279c1585bbf3cdc64c5"}, + {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8512a0c38cfd4e66a858ddd1b17705587900dd760c6003998e9472b77b56d417"}, {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"}, {file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"}, {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"}, @@ -576,15 +539,25 @@ files = [ {file = "gunicorn-20.1.0.tar.gz", hash = "sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8"}, ] +[[package]] +name = "h11" +version = "0.14.0" +requires_python = ">=3.7" +summary = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + [[package]] name = "idna" -version = "3.4" +version = "3.7" requires_python = ">=3.5" summary = "Internationalized Domain Names in Applications (IDNA)" groups = ["default"] files = [ - {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, - {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, + {file = "idna-3.7-py3-none-any.whl", hash = "sha256:82fee1fc78add43492d3a1898bfa6d8a904cc97d8427f683ed8e798d07761aa0"}, + {file = "idna-3.7.tar.gz", hash = "sha256:028ff3aadf0609c1fd278d8ea3089299412a7a8b9bd005dd08b9f8285bcb5cfc"}, ] [[package]] @@ -594,7 +567,6 @@ requires_python = ">=3.8" summary = "Read metadata from Python packages" groups = ["default"] dependencies = [ - "typing-extensions>=3.6.4; python_version < \"3.8\"", "zipp>=0.5", ] files = [ @@ -626,33 +598,41 @@ files = [ [[package]] name = "jinja2" -version = "3.1.2" +version = "3.1.4" requires_python = ">=3.7" summary = "A very fast and expressive template engine." -groups = ["default"] dependencies = [ "MarkupSafe>=2.0", ] files = [ - {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, - {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, + {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, + {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, ] [[package]] name = "joblib" -version = "1.2.0" -requires_python = ">=3.7" +version = "1.4.2" +requires_python = ">=3.8" summary = "Lightweight pipelining with Python functions" -groups = ["default"] files = [ - {file = "joblib-1.2.0-py3-none-any.whl", hash = "sha256:091138ed78f800342968c523bdde947e7a305b8594b910a0fea2ab83c3c6d385"}, - {file = "joblib-1.2.0.tar.gz", hash = "sha256:e1cee4a79e4af22881164f218d4311f60074197fb707e082e803b61f6d137018"}, + {file = "joblib-1.4.2-py3-none-any.whl", hash = "sha256:06d478d5674cbc267e7496a410ee875abd68e4340feff4490bcb7afb88060ae6"}, + {file = "joblib-1.4.2.tar.gz", hash = "sha256:2382c5816b2636fbd20a09e0f4e9dad4736765fdfb7dca582943b9c1366b3f0e"}, +] + +[[package]] +name = "loguru" +version = "0.7.2" +requires_python = ">=3.5" +summary = "Python logging made (stupidly) simple" +files = [ + {file = "loguru-0.7.2-py3-none-any.whl", hash = "sha256:003d71e3d3ed35f0f8984898359d65b79e5b21943f78af86aa5491210429b8eb"}, + {file = "loguru-0.7.2.tar.gz", hash = "sha256:e671a53522515f34fd406340ee968cb9ecafbc4b36c679da03c18fd8d0bd51ac"}, ] [[package]] name = "mako" -version = "1.2.4" -requires_python = ">=3.7" +version = "1.3.3" +requires_python = ">=3.8" summary = "A super-fast templating language that borrows the best ideas from the existing templating languages." groups = ["default"] dependencies = [ @@ -660,8 +640,8 @@ dependencies = [ "importlib-metadata; python_version < \"3.8\"", ] files = [ - {file = "Mako-1.2.4-py3-none-any.whl", hash = "sha256:c97c79c018b9165ac9922ae4f32da095ffd3c4e6872b45eded42926deea46818"}, - {file = "Mako-1.2.4.tar.gz", hash = "sha256:d60a3903dc3bb01a18ad6a89cdbe2e4eadc69c0bc8ef1e3773ba53d44c3f7a34"}, + {file = "Mako-1.3.3-py3-none-any.whl", hash = "sha256:5324b88089a8978bf76d1629774fcc2f1c07b82acdf00f4c5dd8ceadfffc4b40"}, + {file = "Mako-1.3.3.tar.gz", hash = "sha256:e16c01d9ab9c11f7290eef1cfefc093fb5a45ee4a3da09e2fec2e4d1bae54e73"}, ] [[package]] @@ -669,10 +649,6 @@ name = "markdown" version = "3.4.4" requires_python = ">=3.7" summary = "Python implementation of John Gruber's Markdown." -groups = ["default"] -dependencies = [ - "importlib-metadata>=4.4; python_version < \"3.10\"", -] files = [ {file = "Markdown-3.4.4-py3-none-any.whl", hash = "sha256:a4c1b65c0957b4bd9e7d86ddc7b3c9868fb9670660f6f99f6d1bca8954d5a941"}, {file = "Markdown-3.4.4.tar.gz", hash = "sha256:225c6123522495d4119a90b3a3ba31a1e87a70369e03f14799ea9c0d7183a3d6"}, @@ -680,42 +656,42 @@ files = [ [[package]] name = "markupsafe" -version = "2.1.2" +version = "2.1.5" requires_python = ">=3.7" summary = "Safely add untrusted strings to HTML/XML markup." groups = ["default"] files = [ - {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"}, - {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"}, - {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"}, - {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"}, - {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, + {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, + {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, + {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, + {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] [[package]] @@ -758,33 +734,39 @@ files = [ [[package]] name = "numpy" -version = "1.24.3" -requires_python = ">=3.8" +version = "1.26.4" +requires_python = ">=3.9" summary = "Fundamental package for array computing in Python" groups = ["default"] files = [ - {file = "numpy-1.24.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3c1104d3c036fb81ab923f507536daedc718d0ad5a8707c6061cdfd6d184e570"}, - {file = "numpy-1.24.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:202de8f38fc4a45a3eea4b63e2f376e5f2dc64ef0fa692838e31a808520efaf7"}, - {file = "numpy-1.24.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8535303847b89aa6b0f00aa1dc62867b5a32923e4d1681a35b5eef2d9591a463"}, - {file = "numpy-1.24.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d926b52ba1367f9acb76b0df6ed21f0b16a1ad87c6720a1121674e5cf63e2b6"}, - {file = "numpy-1.24.3-cp310-cp310-win32.whl", hash = "sha256:f21c442fdd2805e91799fbe044a7b999b8571bb0ab0f7850d0cb9641a687092b"}, - {file = "numpy-1.24.3-cp310-cp310-win_amd64.whl", hash = "sha256:ab5f23af8c16022663a652d3b25dcdc272ac3f83c3af4c02eb8b824e6b3ab9d7"}, - {file = "numpy-1.24.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9a7721ec204d3a237225db3e194c25268faf92e19338a35f3a224469cb6039a3"}, - {file = "numpy-1.24.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d6cc757de514c00b24ae8cf5c876af2a7c3df189028d68c0cb4eaa9cd5afc2bf"}, - {file = "numpy-1.24.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76e3f4e85fc5d4fd311f6e9b794d0c00e7002ec122be271f2019d63376f1d385"}, - {file = "numpy-1.24.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1d3c026f57ceaad42f8231305d4653d5f05dc6332a730ae5c0bea3513de0950"}, - {file = "numpy-1.24.3-cp311-cp311-win32.whl", hash = "sha256:c91c4afd8abc3908e00a44b2672718905b8611503f7ff87390cc0ac3423fb096"}, - {file = "numpy-1.24.3-cp311-cp311-win_amd64.whl", hash = "sha256:5342cf6aad47943286afa6f1609cad9b4266a05e7f2ec408e2cf7aea7ff69d80"}, - {file = "numpy-1.24.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4719d5aefb5189f50887773699eaf94e7d1e02bf36c1a9d353d9f46703758ca4"}, - {file = "numpy-1.24.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0ec87a7084caa559c36e0a2309e4ecb1baa03b687201d0a847c8b0ed476a7187"}, - {file = "numpy-1.24.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea8282b9bcfe2b5e7d491d0bf7f3e2da29700cec05b49e64d6246923329f2b02"}, - {file = "numpy-1.24.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:210461d87fb02a84ef243cac5e814aad2b7f4be953b32cb53327bb49fd77fbb4"}, - {file = "numpy-1.24.3-cp39-cp39-win32.whl", hash = "sha256:784c6da1a07818491b0ffd63c6bbe5a33deaa0e25a20e1b3ea20cf0e43f8046c"}, - {file = "numpy-1.24.3-cp39-cp39-win_amd64.whl", hash = "sha256:d5036197ecae68d7f491fcdb4df90082b0d4960ca6599ba2659957aafced7c17"}, - {file = "numpy-1.24.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:352ee00c7f8387b44d19f4cada524586f07379c0d49270f87233983bc5087ca0"}, - {file = "numpy-1.24.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a7d6acc2e7524c9955e5c903160aa4ea083736fde7e91276b0e5d98e6332812"}, - {file = "numpy-1.24.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:35400e6a8d102fd07c71ed7dcadd9eb62ee9a6e84ec159bd48c28235bbb0f8e4"}, - {file = "numpy-1.24.3.tar.gz", hash = "sha256:ab344f1bf21f140adab8e47fdbc7c35a477dc01408791f8ba00d018dd0bc5155"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, + {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, + {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, + {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, + {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, + {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, + {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, + {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, + {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, + {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, + {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, + {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, + {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, + {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, + {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, + {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, + {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, + {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, ] [[package]] @@ -800,173 +782,204 @@ files = [ [[package]] name = "packaging" -version = "23.1" +version = "24.0" requires_python = ">=3.7" summary = "Core utilities for Python packages" groups = ["default", "lint", "test"] files = [ - {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, - {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, + {file = "packaging-24.0-py3-none-any.whl", hash = "sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5"}, + {file = "packaging-24.0.tar.gz", hash = "sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9"}, ] [[package]] name = "pandas" -version = "2.0.2" -requires_python = ">=3.8" +version = "2.2.2" +requires_python = ">=3.9" summary = "Powerful data structures for data analysis, time series, and statistics" groups = ["default"] dependencies = [ - "numpy>=1.20.3; python_version < \"3.10\"", - "numpy>=1.21.0; python_version >= \"3.10\"", - "numpy>=1.23.2; python_version >= \"3.11\"", + "numpy>=1.22.4; python_version < \"3.11\"", "python-dateutil>=2.8.2", "pytz>=2020.1", - "tzdata>=2022.1", -] -files = [ - {file = "pandas-2.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ebb9f1c22ddb828e7fd017ea265a59d80461d5a79154b49a4207bd17514d122"}, - {file = "pandas-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1eb09a242184092f424b2edd06eb2b99d06dc07eeddff9929e8667d4ed44e181"}, - {file = "pandas-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7319b6e68de14e6209460f72a8d1ef13c09fb3d3ef6c37c1e65b35d50b5c145"}, - {file = "pandas-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd46bde7309088481b1cf9c58e3f0e204b9ff9e3244f441accd220dd3365ce7c"}, - {file = "pandas-2.0.2-cp310-cp310-win32.whl", hash = "sha256:51a93d422fbb1bd04b67639ba4b5368dffc26923f3ea32a275d2cc450f1d1c86"}, - {file = "pandas-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:66d00300f188fa5de73f92d5725ced162488f6dc6ad4cecfe4144ca29debe3b8"}, - {file = "pandas-2.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:02755de164da6827764ceb3bbc5f64b35cb12394b1024fdf88704d0fa06e0e2f"}, - {file = "pandas-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0a1e0576611641acde15c2322228d138258f236d14b749ad9af498ab69089e2d"}, - {file = "pandas-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6b5f14cd24a2ed06e14255ff40fe2ea0cfaef79a8dd68069b7ace74bd6acbba"}, - {file = "pandas-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50e451932b3011b61d2961b4185382c92cc8c6ee4658dcd4f320687bb2d000ee"}, - {file = "pandas-2.0.2-cp311-cp311-win32.whl", hash = "sha256:7b21cb72958fc49ad757685db1919021d99650d7aaba676576c9e88d3889d456"}, - {file = "pandas-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:c4af689352c4fe3d75b2834933ee9d0ccdbf5d7a8a7264f0ce9524e877820c08"}, - {file = "pandas-2.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b42b120458636a981077cfcfa8568c031b3e8709701315e2bfa866324a83efa8"}, - {file = "pandas-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f908a77cbeef9bbd646bd4b81214cbef9ac3dda4181d5092a4aa9797d1bc7774"}, - {file = "pandas-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:713f2f70abcdade1ddd68fc91577cb090b3544b07ceba78a12f799355a13ee44"}, - {file = "pandas-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf3f0c361a4270185baa89ec7ab92ecaa355fe783791457077473f974f654df5"}, - {file = "pandas-2.0.2-cp39-cp39-win32.whl", hash = "sha256:598e9020d85a8cdbaa1815eb325a91cfff2bb2b23c1442549b8a3668e36f0f77"}, - {file = "pandas-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:77550c8909ebc23e56a89f91b40ad01b50c42cfbfab49b3393694a50549295ea"}, - {file = "pandas-2.0.2.tar.gz", hash = "sha256:dd5476b6c3fe410ee95926873f377b856dbc4e81a9c605a0dc05aaccc6a7c6c6"}, + "tzdata>=2022.7", +] +files = [ + {file = "pandas-2.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:90c6fca2acf139569e74e8781709dccb6fe25940488755716d1d354d6bc58bce"}, + {file = "pandas-2.2.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7adfc142dac335d8c1e0dcbd37eb8617eac386596eb9e1a1b77791cf2498238"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4abfe0be0d7221be4f12552995e58723c7422c80a659da13ca382697de830c08"}, + {file = "pandas-2.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8635c16bf3d99040fdf3ca3db669a7250ddf49c55dc4aa8fe0ae0fa8d6dcc1f0"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:40ae1dffb3967a52203105a077415a86044a2bea011b5f321c6aa64b379a3f51"}, + {file = "pandas-2.2.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8e5a0b00e1e56a842f922e7fae8ae4077aee4af0acb5ae3622bd4b4c30aedf99"}, + {file = "pandas-2.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ddf818e4e6c7c6f4f7c8a12709696d193976b591cc7dc50588d3d1a6b5dc8772"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:696039430f7a562b74fa45f540aca068ea85fa34c244d0deee539cb6d70aa288"}, + {file = "pandas-2.2.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e90497254aacacbc4ea6ae5e7a8cd75629d6ad2b30025a4a8b09aa4faf55151"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58b84b91b0b9f4bafac2a0ac55002280c094dfc6402402332c0913a59654ab2b"}, + {file = "pandas-2.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2123dc9ad6a814bcdea0f099885276b31b24f7edf40f6cdbc0912672e22eee"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:2925720037f06e89af896c70bca73459d7e6a4be96f9de79e2d440bd499fe0db"}, + {file = "pandas-2.2.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0cace394b6ea70c01ca1595f839cf193df35d1575986e484ad35c4aeae7266c1"}, + {file = "pandas-2.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:873d13d177501a28b2756375d59816c365e42ed8417b41665f346289adc68d24"}, + {file = "pandas-2.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0ca6377b8fca51815f382bd0b697a0814c8bda55115678cbc94c30aacbb6eff2"}, + {file = "pandas-2.2.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9057e6aa78a584bc93a13f0a9bf7e753a5e9770a30b4d758b8d5f2a62a9433cd"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:001910ad31abc7bf06f49dcc903755d2f7f3a9186c0c040b827e522e9cef0863"}, + {file = "pandas-2.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66b479b0bd07204e37583c191535505410daa8df638fd8e75ae1b383851fe921"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a77e9d1c386196879aa5eb712e77461aaee433e54c68cf253053a73b7e49c33a"}, + {file = "pandas-2.2.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:92fd6b027924a7e178ac202cfbe25e53368db90d56872d20ffae94b96c7acc57"}, + {file = "pandas-2.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:640cef9aa381b60e296db324337a554aeeb883ead99dc8f6c18e81a93942f5f4"}, + {file = "pandas-2.2.2.tar.gz", hash = "sha256:9e79019aba43cb4fda9e4d983f8e88ca0373adbb697ae9c6c43093218de28b54"}, ] [[package]] name = "pathspec" -version = "0.11.1" -requires_python = ">=3.7" +version = "0.12.1" +requires_python = ">=3.8" summary = "Utility library for gitignore style pattern matching of file paths." groups = ["lint"] files = [ - {file = "pathspec-0.11.1-py3-none-any.whl", hash = "sha256:d8af70af76652554bd134c22b3e8a1cc46ed7d91edcdd721ef1a0c51a84a5293"}, - {file = "pathspec-0.11.1.tar.gz", hash = "sha256:2798de800fa92780e33acca925945e9a19a133b715067cf165b8866c15a31687"}, + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, ] [[package]] name = "platformdirs" -version = "3.5.1" -requires_python = ">=3.7" -summary = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -groups = ["lint"] -dependencies = [ - "typing-extensions>=4.5; python_version < \"3.8\"", -] +version = "4.2.1" +requires_python = ">=3.8" +summary = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." files = [ - {file = "platformdirs-3.5.1-py3-none-any.whl", hash = "sha256:e2378146f1964972c03c085bb5662ae80b2b8c06226c54b2ff4aa9483e8a13a5"}, - {file = "platformdirs-3.5.1.tar.gz", hash = "sha256:412dae91f52a6f84830f39a8078cecd0e866cb72294a5c66808e74d5e88d251f"}, + {file = "platformdirs-4.2.1-py3-none-any.whl", hash = "sha256:17d5a1161b3fd67b390023cb2d3b026bbd40abde6fdb052dfbd3a29c3ba22ee1"}, + {file = "platformdirs-4.2.1.tar.gz", hash = "sha256:031cd18d4ec63ec53e82dceaac0417d218a6863f7745dfcc9efe7793b7039bdf"}, ] [[package]] name = "pluggy" -version = "1.0.0" -requires_python = ">=3.6" +version = "1.5.0" +requires_python = ">=3.8" summary = "plugin and hook calling mechanisms for python" groups = ["test"] dependencies = [ "importlib-metadata>=0.12; python_version < \"3.8\"", ] files = [ - {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, - {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, + {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, + {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, ] [[package]] -name = "psycopg2" -version = "2.9.6" -requires_python = ">=3.6" -summary = "psycopg2 - Python-PostgreSQL Database Adapter" -groups = ["default"] +name = "pycodestyle" +version = "2.11.1" +requires_python = ">=3.8" +summary = "Python style guide checker" +groups = ["lint"] files = [ - {file = "psycopg2-2.9.6-cp310-cp310-win32.whl", hash = "sha256:f7a7a5ee78ba7dc74265ba69e010ae89dae635eea0e97b055fb641a01a31d2b1"}, - {file = "psycopg2-2.9.6-cp310-cp310-win_amd64.whl", hash = "sha256:f75001a1cbbe523e00b0ef896a5a1ada2da93ccd752b7636db5a99bc57c44494"}, - {file = "psycopg2-2.9.6-cp311-cp311-win32.whl", hash = "sha256:53f4ad0a3988f983e9b49a5d9765d663bbe84f508ed655affdb810af9d0972ad"}, - {file = "psycopg2-2.9.6-cp311-cp311-win_amd64.whl", hash = "sha256:b81fcb9ecfc584f661b71c889edeae70bae30d3ef74fa0ca388ecda50b1222b7"}, - {file = "psycopg2-2.9.6-cp39-cp39-win32.whl", hash = "sha256:1861a53a6a0fd248e42ea37c957d36950da00266378746588eab4f4b5649e95f"}, - {file = "psycopg2-2.9.6-cp39-cp39-win_amd64.whl", hash = "sha256:ded2faa2e6dfb430af7713d87ab4abbfc764d8d7fb73eafe96a24155f906ebf5"}, - {file = "psycopg2-2.9.6.tar.gz", hash = "sha256:f15158418fd826831b28585e2ab48ed8df2d0d98f502a2b4fe619e7d5ca29011"}, + {file = "pycodestyle-2.11.1-py2.py3-none-any.whl", hash = "sha256:44fe31000b2d866f2e41841b18528a505fbd7fef9017b04eff4e2648a0fadc67"}, + {file = "pycodestyle-2.11.1.tar.gz", hash = "sha256:41ba0e7afc9752dfb53ced5489e89f8186be00e599e712660695b7a75ff2663f"}, ] [[package]] -name = "psycopg2-binary" -version = "2.9.6" -requires_python = ">=3.6" -summary = "psycopg2 - Python-PostgreSQL Database Adapter" -groups = ["dev"] -files = [ - {file = "psycopg2-binary-2.9.6.tar.gz", hash = "sha256:1f64dcfb8f6e0c014c7f55e51c9759f024f70ea572fbdef123f85318c297947c"}, - {file = "psycopg2_binary-2.9.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d26e0342183c762de3276cca7a530d574d4e25121ca7d6e4a98e4f05cb8e4df7"}, - {file = "psycopg2_binary-2.9.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c48d8f2db17f27d41fb0e2ecd703ea41984ee19362cbce52c097963b3a1b4365"}, - {file = "psycopg2_binary-2.9.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffe9dc0a884a8848075e576c1de0290d85a533a9f6e9c4e564f19adf8f6e54a7"}, - {file = "psycopg2_binary-2.9.6-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a76e027f87753f9bd1ab5f7c9cb8c7628d1077ef927f5e2446477153a602f2c"}, - {file = "psycopg2_binary-2.9.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6460c7a99fc939b849431f1e73e013d54aa54293f30f1109019c56a0b2b2ec2f"}, - {file = "psycopg2_binary-2.9.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae102a98c547ee2288637af07393dd33f440c25e5cd79556b04e3fca13325e5f"}, - {file = "psycopg2_binary-2.9.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9972aad21f965599ed0106f65334230ce826e5ae69fda7cbd688d24fa922415e"}, - {file = "psycopg2_binary-2.9.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7a40c00dbe17c0af5bdd55aafd6ff6679f94a9be9513a4c7e071baf3d7d22a70"}, - {file = "psycopg2_binary-2.9.6-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:cacbdc5839bdff804dfebc058fe25684cae322987f7a38b0168bc1b2df703fb1"}, - {file = "psycopg2_binary-2.9.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7f0438fa20fb6c7e202863e0d5ab02c246d35efb1d164e052f2f3bfe2b152bd0"}, - {file = "psycopg2_binary-2.9.6-cp310-cp310-win32.whl", hash = "sha256:b6c8288bb8a84b47e07013bb4850f50538aa913d487579e1921724631d02ea1b"}, - {file = "psycopg2_binary-2.9.6-cp310-cp310-win_amd64.whl", hash = "sha256:61b047a0537bbc3afae10f134dc6393823882eb263088c271331602b672e52e9"}, - {file = "psycopg2_binary-2.9.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:964b4dfb7c1c1965ac4c1978b0f755cc4bd698e8aa2b7667c575fb5f04ebe06b"}, - {file = "psycopg2_binary-2.9.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:afe64e9b8ea66866a771996f6ff14447e8082ea26e675a295ad3bdbffdd72afb"}, - {file = "psycopg2_binary-2.9.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15e2ee79e7cf29582ef770de7dab3d286431b01c3bb598f8e05e09601b890081"}, - {file = "psycopg2_binary-2.9.6-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dfa74c903a3c1f0d9b1c7e7b53ed2d929a4910e272add6700c38f365a6002820"}, - {file = "psycopg2_binary-2.9.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b83456c2d4979e08ff56180a76429263ea254c3f6552cd14ada95cff1dec9bb8"}, - {file = "psycopg2_binary-2.9.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0645376d399bfd64da57148694d78e1f431b1e1ee1054872a5713125681cf1be"}, - {file = "psycopg2_binary-2.9.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e99e34c82309dd78959ba3c1590975b5d3c862d6f279f843d47d26ff89d7d7e1"}, - {file = "psycopg2_binary-2.9.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4ea29fc3ad9d91162c52b578f211ff1c931d8a38e1f58e684c45aa470adf19e2"}, - {file = "psycopg2_binary-2.9.6-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:4ac30da8b4f57187dbf449294d23b808f8f53cad6b1fc3623fa8a6c11d176dd0"}, - {file = "psycopg2_binary-2.9.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e78e6e2a00c223e164c417628572a90093c031ed724492c763721c2e0bc2a8df"}, - {file = "psycopg2_binary-2.9.6-cp311-cp311-win32.whl", hash = "sha256:1876843d8e31c89c399e31b97d4b9725a3575bb9c2af92038464231ec40f9edb"}, - {file = "psycopg2_binary-2.9.6-cp311-cp311-win_amd64.whl", hash = "sha256:b4b24f75d16a89cc6b4cdff0eb6a910a966ecd476d1e73f7ce5985ff1328e9a6"}, - {file = "psycopg2_binary-2.9.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e9182eb20f41417ea1dd8e8f7888c4d7c6e805f8a7c98c1081778a3da2bee3e4"}, - {file = "psycopg2_binary-2.9.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8a6979cf527e2603d349a91060f428bcb135aea2be3201dff794813256c274f1"}, - {file = "psycopg2_binary-2.9.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8338a271cb71d8da40b023a35d9c1e919eba6cbd8fa20a54b748a332c355d896"}, - {file = "psycopg2_binary-2.9.6-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e3ed340d2b858d6e6fb5083f87c09996506af483227735de6964a6100b4e6a54"}, - {file = "psycopg2_binary-2.9.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f81e65376e52f03422e1fb475c9514185669943798ed019ac50410fb4c4df232"}, - {file = "psycopg2_binary-2.9.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfb13af3c5dd3a9588000910178de17010ebcccd37b4f9794b00595e3a8ddad3"}, - {file = "psycopg2_binary-2.9.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4c727b597c6444a16e9119386b59388f8a424223302d0c06c676ec8b4bc1f963"}, - {file = "psycopg2_binary-2.9.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4d67fbdaf177da06374473ef6f7ed8cc0a9dc640b01abfe9e8a2ccb1b1402c1f"}, - {file = "psycopg2_binary-2.9.6-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:0892ef645c2fabb0c75ec32d79f4252542d0caec1d5d949630e7d242ca4681a3"}, - {file = "psycopg2_binary-2.9.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:02c0f3757a4300cf379eb49f543fb7ac527fb00144d39246ee40e1df684ab514"}, - {file = "psycopg2_binary-2.9.6-cp39-cp39-win32.whl", hash = "sha256:c3dba7dab16709a33a847e5cd756767271697041fbe3fe97c215b1fc1f5c9848"}, - {file = "psycopg2_binary-2.9.6-cp39-cp39-win_amd64.whl", hash = "sha256:f6a88f384335bb27812293fdb11ac6aee2ca3f51d3c7820fe03de0a304ab6249"}, +name = "pydantic" +version = "2.5.3" +requires_python = ">=3.7" +summary = "Data validation using Python type hints" +dependencies = [ + "annotated-types>=0.4.0", + "pydantic-core==2.14.6", + "typing-extensions>=4.6.1", ] - -[[package]] -name = "pycodestyle" -version = "2.11.0" -requires_python = ">=3.8" -summary = "Python style guide checker" -groups = ["lint"] files = [ - {file = "pycodestyle-2.11.0-py2.py3-none-any.whl", hash = "sha256:5d1013ba8dc7895b548be5afb05740ca82454fd899971563d2ef625d090326f8"}, - {file = "pycodestyle-2.11.0.tar.gz", hash = "sha256:259bcc17857d8a8b3b4a2327324b79e5f020a13c16074670f9c8c8f872ea76d0"}, + {file = "pydantic-2.5.3-py3-none-any.whl", hash = "sha256:d0caf5954bee831b6bfe7e338c32b9e30c85dfe080c843680783ac2b631673b4"}, + {file = "pydantic-2.5.3.tar.gz", hash = "sha256:b3ef57c62535b0941697cce638c08900d87fcb67e29cfa99e8a68f747f393f7a"}, ] [[package]] -name = "pycparser" -version = "2.21" -requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -summary = "C parser in Python" -groups = ["default"] -marker = "platform_python_implementation == \"CPython\" and sys_platform == \"win32\"" +name = "pydantic-core" +version = "2.14.6" +requires_python = ">=3.7" +summary = "" +dependencies = [ + "typing-extensions!=4.7.0,>=4.6.0", +] +files = [ + {file = "pydantic_core-2.14.6-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:72f9a942d739f09cd42fffe5dc759928217649f070056f03c70df14f5770acf9"}, + {file = "pydantic_core-2.14.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6a31d98c0d69776c2576dda4b77b8e0c69ad08e8b539c25c7d0ca0dc19a50d6c"}, + {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5aa90562bc079c6c290f0512b21768967f9968e4cfea84ea4ff5af5d917016e4"}, + {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:370ffecb5316ed23b667d99ce4debe53ea664b99cc37bfa2af47bc769056d534"}, + {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f85f3843bdb1fe80e8c206fe6eed7a1caeae897e496542cee499c374a85c6e08"}, + {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9862bf828112e19685b76ca499b379338fd4c5c269d897e218b2ae8fcb80139d"}, + {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:036137b5ad0cb0004c75b579445a1efccd072387a36c7f217bb8efd1afbe5245"}, + {file = "pydantic_core-2.14.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:92879bce89f91f4b2416eba4429c7b5ca22c45ef4a499c39f0c5c69257522c7c"}, + {file = "pydantic_core-2.14.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0c08de15d50fa190d577e8591f0329a643eeaed696d7771760295998aca6bc66"}, + {file = "pydantic_core-2.14.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:36099c69f6b14fc2c49d7996cbf4f87ec4f0e66d1c74aa05228583225a07b590"}, + {file = "pydantic_core-2.14.6-cp310-none-win32.whl", hash = "sha256:7be719e4d2ae6c314f72844ba9d69e38dff342bc360379f7c8537c48e23034b7"}, + {file = "pydantic_core-2.14.6-cp310-none-win_amd64.whl", hash = "sha256:36fa402dcdc8ea7f1b0ddcf0df4254cc6b2e08f8cd80e7010d4c4ae6e86b2a87"}, + {file = "pydantic_core-2.14.6-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:dea7fcd62915fb150cdc373212141a30037e11b761fbced340e9db3379b892d4"}, + {file = "pydantic_core-2.14.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ffff855100bc066ff2cd3aa4a60bc9534661816b110f0243e59503ec2df38421"}, + {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b027c86c66b8627eb90e57aee1f526df77dc6d8b354ec498be9a757d513b92b"}, + {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:00b1087dabcee0b0ffd104f9f53d7d3eaddfaa314cdd6726143af6bc713aa27e"}, + {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:75ec284328b60a4e91010c1acade0c30584f28a1f345bc8f72fe8b9e46ec6a96"}, + {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e1f4744eea1501404b20b0ac059ff7e3f96a97d3e3f48ce27a139e053bb370b"}, + {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2602177668f89b38b9f84b7b3435d0a72511ddef45dc14446811759b82235a1"}, + {file = "pydantic_core-2.14.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6c8edaea3089bf908dd27da8f5d9e395c5b4dc092dbcce9b65e7156099b4b937"}, + {file = "pydantic_core-2.14.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:478e9e7b360dfec451daafe286998d4a1eeaecf6d69c427b834ae771cad4b622"}, + {file = "pydantic_core-2.14.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b6ca36c12a5120bad343eef193cc0122928c5c7466121da7c20f41160ba00ba2"}, + {file = "pydantic_core-2.14.6-cp311-none-win32.whl", hash = "sha256:2b8719037e570639e6b665a4050add43134d80b687288ba3ade18b22bbb29dd2"}, + {file = "pydantic_core-2.14.6-cp311-none-win_amd64.whl", hash = "sha256:78ee52ecc088c61cce32b2d30a826f929e1708f7b9247dc3b921aec367dc1b23"}, + {file = "pydantic_core-2.14.6-cp311-none-win_arm64.whl", hash = "sha256:a19b794f8fe6569472ff77602437ec4430f9b2b9ec7a1105cfd2232f9ba355e6"}, + {file = "pydantic_core-2.14.6-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:4ce8299b481bcb68e5c82002b96e411796b844d72b3e92a3fbedfe8e19813eab"}, + {file = "pydantic_core-2.14.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b9a9d92f10772d2a181b5ca339dee066ab7d1c9a34ae2421b2a52556e719756f"}, + {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd9e98b408384989ea4ab60206b8e100d8687da18b5c813c11e92fd8212a98e0"}, + {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4f86f1f318e56f5cbb282fe61eb84767aee743ebe32c7c0834690ebea50c0a6b"}, + {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86ce5fcfc3accf3a07a729779d0b86c5d0309a4764c897d86c11089be61da160"}, + {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3dcf1978be02153c6a31692d4fbcc2a3f1db9da36039ead23173bc256ee3b91b"}, + {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eedf97be7bc3dbc8addcef4142f4b4164066df0c6f36397ae4aaed3eb187d8ab"}, + {file = "pydantic_core-2.14.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d5f916acf8afbcab6bacbb376ba7dc61f845367901ecd5e328fc4d4aef2fcab0"}, + {file = "pydantic_core-2.14.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8a14c192c1d724c3acbfb3f10a958c55a2638391319ce8078cb36c02283959b9"}, + {file = "pydantic_core-2.14.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0348b1dc6b76041516e8a854ff95b21c55f5a411c3297d2ca52f5528e49d8411"}, + {file = "pydantic_core-2.14.6-cp39-none-win32.whl", hash = "sha256:de2a0645a923ba57c5527497daf8ec5df69c6eadf869e9cd46e86349146e5975"}, + {file = "pydantic_core-2.14.6-cp39-none-win_amd64.whl", hash = "sha256:aca48506a9c20f68ee61c87f2008f81f8ee99f8d7f0104bff3c47e2d148f89d9"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:d5c28525c19f5bb1e09511669bb57353d22b94cf8b65f3a8d141c389a55dec95"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:78d0768ee59baa3de0f4adac9e3748b4b1fffc52143caebddfd5ea2961595277"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b93785eadaef932e4fe9c6e12ba67beb1b3f1e5495631419c784ab87e975670"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a874f21f87c485310944b2b2734cd6d318765bcbb7515eead33af9641816506e"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b89f4477d915ea43b4ceea6756f63f0288941b6443a2b28c69004fe07fde0d0d"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:172de779e2a153d36ee690dbc49c6db568d7b33b18dc56b69a7514aecbcf380d"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:dfcebb950aa7e667ec226a442722134539e77c575f6cfaa423f24371bb8d2e94"}, + {file = "pydantic_core-2.14.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:55a23dcd98c858c0db44fc5c04fc7ed81c4b4d33c653a7c45ddaebf6563a2f66"}, + {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-macosx_10_7_x86_64.whl", hash = "sha256:4241204e4b36ab5ae466ecec5c4c16527a054c69f99bba20f6f75232a6a534e2"}, + {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e574de99d735b3fc8364cba9912c2bec2da78775eba95cbb225ef7dda6acea24"}, + {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1302a54f87b5cd8528e4d6d1bf2133b6aa7c6122ff8e9dc5220fbc1e07bffebd"}, + {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f8e81e4b55930e5ffab4a68db1af431629cf2e4066dbdbfef65348b8ab804ea8"}, + {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:c99462ffc538717b3e60151dfaf91125f637e801f5ab008f81c402f1dff0cd0f"}, + {file = "pydantic_core-2.14.6-pp37-pypy37_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:e4cf2d5829f6963a5483ec01578ee76d329eb5caf330ecd05b3edd697e7d768a"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:cf10b7d58ae4a1f07fccbf4a0a956d705356fea05fb4c70608bb6fa81d103cda"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:399ac0891c284fa8eb998bcfa323f2234858f5d2efca3950ae58c8f88830f145"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c6a5c79b28003543db3ba67d1df336f253a87d3112dac3a51b94f7d48e4c0e1"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:599c87d79cab2a6a2a9df4aefe0455e61e7d2aeede2f8577c1b7c0aec643ee8e"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:43e166ad47ba900f2542a80d83f9fc65fe99eb63ceec4debec160ae729824052"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3a0b5db001b98e1c649dd55afa928e75aa4087e587b9524a4992316fa23c9fba"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:747265448cb57a9f37572a488a57d873fd96bf51e5bb7edb52cfb37124516da4"}, + {file = "pydantic_core-2.14.6-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7ebe3416785f65c28f4f9441e916bfc8a54179c8dea73c23023f7086fa601c5d"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:86c963186ca5e50d5c8287b1d1c9d3f8f024cbe343d048c5bd282aec2d8641f2"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:e0641b506486f0b4cd1500a2a65740243e8670a2549bb02bc4556a83af84ae03"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71d72ca5eaaa8d38c8df16b7deb1a2da4f650c41b58bb142f3fb75d5ad4a611f"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27e524624eace5c59af499cd97dc18bb201dc6a7a2da24bfc66ef151c69a5f2a"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a3dde6cac75e0b0902778978d3b1646ca9f438654395a362cb21d9ad34b24acf"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:00646784f6cd993b1e1c0e7b0fdcbccc375d539db95555477771c27555e3c556"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:23598acb8ccaa3d1d875ef3b35cb6376535095e9405d91a3d57a8c7db5d29341"}, + {file = "pydantic_core-2.14.6-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7f41533d7e3cf9520065f610b41ac1c76bc2161415955fbcead4981b22c7611e"}, + {file = "pydantic_core-2.14.6.tar.gz", hash = "sha256:1fd0c1d395372843fba13a51c28e3bb9d59bd7aebfeb17358ffaaa1e4dbbe948"}, +] + +[[package]] +name = "pydantic-settings" +version = "2.1.0" +requires_python = ">=3.8" +summary = "Settings management using Pydantic" +dependencies = [ + "pydantic>=2.3.0", + "python-dotenv>=0.21.0", +] files = [ - {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, - {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, + {file = "pydantic_settings-2.1.0-py3-none-any.whl", hash = "sha256:7621c0cb5d90d1140d2f0ef557bdf03573aac7035948109adf2574770b77605a"}, + {file = "pydantic_settings-2.1.0.tar.gz", hash = "sha256:26b1492e0a24755626ac5e6d715e9077ab7ad4fb5f19a8b7ed7011d52f36141c"}, ] [[package]] @@ -980,6 +993,45 @@ files = [ {file = "pyflakes-3.1.0.tar.gz", hash = "sha256:a0aae034c444db0071aa077972ba4768d40c830d9539fd45bf4cd3f8f6992efc"}, ] +[[package]] +name = "pyinstrument" +version = "4.6.2" +requires_python = ">=3.7" +summary = "Call stack profiler for Python. Shows you why your code is slow!" +files = [ + {file = "pyinstrument-4.6.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7a1b1cd768ea7ea9ab6f5490f7e74431321bcc463e9441dbc2f769617252d9e2"}, + {file = "pyinstrument-4.6.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8a386b9d09d167451fb2111eaf86aabf6e094fed42c15f62ec51d6980bce7d96"}, + {file = "pyinstrument-4.6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23c3e3ca8553b9aac09bd978c73d21b9032c707ac6d803bae6a20ecc048df4a8"}, + {file = "pyinstrument-4.6.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5f329f5534ca069420246f5ce57270d975229bcb92a3a3fd6b2ca086527d9764"}, + {file = "pyinstrument-4.6.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4dcdcc7ba224a0c5edfbd00b0f530f5aed2b26da5aaa2f9af5519d4aa8c7e41"}, + {file = "pyinstrument-4.6.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:73db0c2c99119c65b075feee76e903b4ed82e59440fe8b5724acf5c7cb24721f"}, + {file = "pyinstrument-4.6.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:da58f265326f3cf3975366ccb8b39014f1e69ff8327958a089858d71c633d654"}, + {file = "pyinstrument-4.6.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:feebcf860f955401df30d029ec8de7a0c5515d24ea809736430fd1219686fe14"}, + {file = "pyinstrument-4.6.2-cp310-cp310-win32.whl", hash = "sha256:b2b66ff0b16c8ecf1ec22de001cfff46872b2c163c62429055105564eef50b2e"}, + {file = "pyinstrument-4.6.2-cp310-cp310-win_amd64.whl", hash = "sha256:8d104b7a7899d5fa4c5bf1ceb0c1a070615a72c5dc17bc321b612467ad5c5d88"}, + {file = "pyinstrument-4.6.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:62f6014d2b928b181a52483e7c7b82f2c27e22c577417d1681153e5518f03317"}, + {file = "pyinstrument-4.6.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dcb5c8d763c5df55131670ba2a01a8aebd0d490a789904a55eb6a8b8d497f110"}, + {file = "pyinstrument-4.6.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ed4e8c6c84e0e6429ba7008a66e435ede2d8cb027794c20923c55669d9c5633"}, + {file = "pyinstrument-4.6.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6c0f0e1d8f8c70faa90ff57f78ac0dda774b52ea0bfb2d9f0f41ce6f3e7c869e"}, + {file = "pyinstrument-4.6.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b3c44cb037ad0d6e9d9a48c14d856254ada641fbd0ae9de40da045fc2226a2a"}, + {file = "pyinstrument-4.6.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:be9901f17ac2f527c352f2fdca3d717c1d7f2ce8a70bad5a490fc8cc5d2a6007"}, + {file = "pyinstrument-4.6.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8a9791bf8916c1cf439c202fded32de93354b0f57328f303d71950b0027c7811"}, + {file = "pyinstrument-4.6.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d6162615e783c59e36f2d7caf903a7e3ecb6b32d4a4ae8907f2760b2ef395bf6"}, + {file = "pyinstrument-4.6.2-cp311-cp311-win32.whl", hash = "sha256:28af084aa84bbfd3620ebe71d5f9a0deca4451267f363738ca824f733de55056"}, + {file = "pyinstrument-4.6.2-cp311-cp311-win_amd64.whl", hash = "sha256:dd6007d3c2e318e09e582435dd8d111cccf30d342af66886b783208813caf3d7"}, + {file = "pyinstrument-4.6.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3a165e0d2deb212d4cf439383982a831682009e1b08733c568cac88c89784e62"}, + {file = "pyinstrument-4.6.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7ba858b3d6f6e5597c641edcc0e7e464f85aba86d71bc3b3592cb89897bf43f6"}, + {file = "pyinstrument-4.6.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2fd8e547cf3df5f0ec6e4dffbe2e857f6b28eda51b71c3c0b5a2fc0646527835"}, + {file = "pyinstrument-4.6.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0de2c1714a37a820033b19cf134ead43299a02662f1379140974a9ab733c5f3a"}, + {file = "pyinstrument-4.6.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01fc45dedceec3df81668d702bca6d400d956c8b8494abc206638c167c78dfd9"}, + {file = "pyinstrument-4.6.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5b6e161ef268d43ee6bbfae7fd2cdd0a52c099ddd21001c126ca1805dc906539"}, + {file = "pyinstrument-4.6.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6ba8e368d0421f15ba6366dfd60ec131c1b46505d021477e0f865d26cf35a605"}, + {file = "pyinstrument-4.6.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:edca46f04a573ac2fb11a84b937844e6a109f38f80f4b422222fb5be8ecad8cb"}, + {file = "pyinstrument-4.6.2-cp39-cp39-win32.whl", hash = "sha256:baf375953b02fe94d00e716f060e60211ede73f49512b96687335f7071adb153"}, + {file = "pyinstrument-4.6.2-cp39-cp39-win_amd64.whl", hash = "sha256:af1a953bce9fd530040895d01ff3de485e25e1576dccb014f76ba9131376fcad"}, + {file = "pyinstrument-4.6.2.tar.gz", hash = "sha256:0002ee517ed8502bbda6eb2bb1ba8f95a55492fcdf03811ba13d4806e50dd7f6"}, +] + [[package]] name = "pytest" version = "7.4.0" @@ -987,9 +1039,7 @@ requires_python = ">=3.7" summary = "pytest: simple powerful testing with Python" groups = ["test"] dependencies = [ - "colorama; sys_platform == \"win32\"", "exceptiongroup>=1.0.0rc8; python_version < \"3.11\"", - "importlib-metadata>=0.12; python_version < \"3.8\"", "iniconfig", "packaging", "pluggy<2.0,>=0.12", @@ -1041,36 +1091,12 @@ files = [ [[package]] name = "pytz" -version = "2023.3" +version = "2024.1" summary = "World timezone definitions, modern and historical" groups = ["default"] files = [ - {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"}, - {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"}, -] - -[[package]] -name = "pyyaml" -version = "6.0.1" -requires_python = ">=3.6" -summary = "YAML parser and emitter for Python" -groups = ["default"] -files = [ - {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, - {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, - {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, - {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, - {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, - {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, - {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, + {file = "pytz-2024.1-py2.py3-none-any.whl", hash = "sha256:328171f4e3623139da4983451950b28e95ac706e13f3f2630a879749e7a8b319"}, + {file = "pytz-2024.1.tar.gz", hash = "sha256:2a29735ea9c18baf14b448846bde5a48030ed267578472d8955cd0e7443a9812"}, ] [[package]] @@ -1105,73 +1131,66 @@ files = [ {file = "requests_oauthlib-1.3.1-py2.py3-none-any.whl", hash = "sha256:2577c501a2fb8d05a304c09d090d6e47c306fef15809d102b327cf8364bddab5"}, ] -[[package]] -name = "schematics" -version = "2.1.1" -summary = "Python Data Structures for Humans" -groups = ["default"] -files = [ - {file = "schematics-2.1.1-py2.py3-none-any.whl", hash = "sha256:be2d451bfb86789975e5ec0864aec569b63cea9010f0d24cbbd992a4e564c647"}, - {file = "schematics-2.1.1.tar.gz", hash = "sha256:34c87f51a25063bb498ae1cc201891b134cfcb329baf9e9f4f3ae869b767560f"}, -] - [[package]] name = "scikit-learn" -version = "1.2.2" -requires_python = ">=3.8" +version = "1.4.2" +requires_python = ">=3.9" summary = "A set of python modules for machine learning and data mining" groups = ["default"] dependencies = [ - "joblib>=1.1.1", - "numpy>=1.17.3", - "scipy>=1.3.2", + "joblib>=1.2.0", + "numpy>=1.19.5", + "scipy>=1.6.0", "threadpoolctl>=2.0.0", ] files = [ - {file = "scikit-learn-1.2.2.tar.gz", hash = "sha256:8429aea30ec24e7a8c7ed8a3fa6213adf3814a6efbea09e16e0a0c71e1a1a3d7"}, - {file = "scikit_learn-1.2.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:99cc01184e347de485bf253d19fcb3b1a3fb0ee4cea5ee3c43ec0cc429b6d29f"}, - {file = "scikit_learn-1.2.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:e6e574db9914afcb4e11ade84fab084536a895ca60aadea3041e85b8ac963edb"}, - {file = "scikit_learn-1.2.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fe83b676f407f00afa388dd1fdd49e5c6612e551ed84f3b1b182858f09e987d"}, - {file = "scikit_learn-1.2.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e2642baa0ad1e8f8188917423dd73994bf25429f8893ddbe115be3ca3183584"}, - {file = "scikit_learn-1.2.2-cp310-cp310-win_amd64.whl", hash = "sha256:ad66c3848c0a1ec13464b2a95d0a484fd5b02ce74268eaa7e0c697b904f31d6c"}, - {file = "scikit_learn-1.2.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dfeaf8be72117eb61a164ea6fc8afb6dfe08c6f90365bde2dc16456e4bc8e45f"}, - {file = "scikit_learn-1.2.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:fe0aa1a7029ed3e1dcbf4a5bc675aa3b1bc468d9012ecf6c6f081251ca47f590"}, - {file = "scikit_learn-1.2.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:065e9673e24e0dc5113e2dd2b4ca30c9d8aa2fa90f4c0597241c93b63130d233"}, - {file = "scikit_learn-1.2.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf036ea7ef66115e0d49655f16febfa547886deba20149555a41d28f56fd6d3c"}, - {file = "scikit_learn-1.2.2-cp311-cp311-win_amd64.whl", hash = "sha256:8b0670d4224a3c2d596fd572fb4fa673b2a0ccfb07152688ebd2ea0b8c61025c"}, - {file = "scikit_learn-1.2.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8156db41e1c39c69aa2d8599ab7577af53e9e5e7a57b0504e116cc73c39138dd"}, - {file = "scikit_learn-1.2.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:fe175ee1dab589d2e1033657c5b6bec92a8a3b69103e3dd361b58014729975c3"}, - {file = "scikit_learn-1.2.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d5312d9674bed14f73773d2acf15a3272639b981e60b72c9b190a0cffed5bad"}, - {file = "scikit_learn-1.2.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea061bf0283bf9a9f36ea3c5d3231ba2176221bbd430abd2603b1c3b2ed85c89"}, - {file = "scikit_learn-1.2.2-cp39-cp39-win_amd64.whl", hash = "sha256:6477eed40dbce190f9f9e9d0d37e020815825b300121307942ec2110302b66a3"}, + {file = "scikit-learn-1.4.2.tar.gz", hash = "sha256:daa1c471d95bad080c6e44b4946c9390a4842adc3082572c20e4f8884e39e959"}, + {file = "scikit_learn-1.4.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8539a41b3d6d1af82eb629f9c57f37428ff1481c1e34dddb3b9d7af8ede67ac5"}, + {file = "scikit_learn-1.4.2-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:68b8404841f944a4a1459b07198fa2edd41a82f189b44f3e1d55c104dbc2e40c"}, + {file = "scikit_learn-1.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81bf5d8bbe87643103334032dd82f7419bc8c8d02a763643a6b9a5c7288c5054"}, + {file = "scikit_learn-1.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36f0ea5d0f693cb247a073d21a4123bdf4172e470e6d163c12b74cbb1536cf38"}, + {file = "scikit_learn-1.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:87440e2e188c87db80ea4023440923dccbd56fbc2d557b18ced00fef79da0727"}, + {file = "scikit_learn-1.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:45dee87ac5309bb82e3ea633955030df9bbcb8d2cdb30383c6cd483691c546cc"}, + {file = "scikit_learn-1.4.2-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:1d0b25d9c651fd050555aadd57431b53d4cf664e749069da77f3d52c5ad14b3b"}, + {file = "scikit_learn-1.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0203c368058ab92efc6168a1507d388d41469c873e96ec220ca8e74079bf62e"}, + {file = "scikit_learn-1.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44c62f2b124848a28fd695db5bc4da019287abf390bfce602ddc8aa1ec186aae"}, + {file = "scikit_learn-1.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:5cd7b524115499b18b63f0c96f4224eb885564937a0b3477531b2b63ce331904"}, + {file = "scikit_learn-1.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d9993d5e78a8148b1d0fdf5b15ed92452af5581734129998c26f481c46586d68"}, + {file = "scikit_learn-1.4.2-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:426d258fddac674fdf33f3cb2d54d26f49406e2599dbf9a32b4d1696091d4256"}, + {file = "scikit_learn-1.4.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5460a1a5b043ae5ae4596b3126a4ec33ccba1b51e7ca2c5d36dac2169f62ab1d"}, + {file = "scikit_learn-1.4.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49d64ef6cb8c093d883e5a36c4766548d974898d378e395ba41a806d0e824db8"}, + {file = "scikit_learn-1.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:c97a50b05c194be9146d61fe87dbf8eac62b203d9e87a3ccc6ae9aed2dfaf361"}, ] [[package]] name = "scipy" -version = "1.10.1" -requires_python = "<3.12,>=3.8" +version = "1.13.0" +requires_python = ">=3.9" summary = "Fundamental algorithms for scientific computing in Python" groups = ["default"] dependencies = [ - "numpy<1.27.0,>=1.19.5", -] -files = [ - {file = "scipy-1.10.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e7354fd7527a4b0377ce55f286805b34e8c54b91be865bac273f527e1b839019"}, - {file = "scipy-1.10.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:4b3f429188c66603a1a5c549fb414e4d3bdc2a24792e061ffbd607d3d75fd84e"}, - {file = "scipy-1.10.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1553b5dcddd64ba9a0d95355e63fe6c3fc303a8fd77c7bc91e77d61363f7433f"}, - {file = "scipy-1.10.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c0ff64b06b10e35215abce517252b375e580a6125fd5fdf6421b98efbefb2d2"}, - {file = "scipy-1.10.1-cp310-cp310-win_amd64.whl", hash = "sha256:fae8a7b898c42dffe3f7361c40d5952b6bf32d10c4569098d276b4c547905ee1"}, - {file = "scipy-1.10.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0f1564ea217e82c1bbe75ddf7285ba0709ecd503f048cb1236ae9995f64217bd"}, - {file = "scipy-1.10.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:d925fa1c81b772882aa55bcc10bf88324dadb66ff85d548c71515f6689c6dac5"}, - {file = "scipy-1.10.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aaea0a6be54462ec027de54fca511540980d1e9eea68b2d5c1dbfe084797be35"}, - {file = "scipy-1.10.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15a35c4242ec5f292c3dd364a7c71a61be87a3d4ddcc693372813c0b73c9af1d"}, - {file = "scipy-1.10.1-cp311-cp311-win_amd64.whl", hash = "sha256:43b8e0bcb877faf0abfb613d51026cd5cc78918e9530e375727bf0625c82788f"}, - {file = "scipy-1.10.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cd9f1027ff30d90618914a64ca9b1a77a431159df0e2a195d8a9e8a04c78abf9"}, - {file = "scipy-1.10.1-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:79c8e5a6c6ffaf3a2262ef1be1e108a035cf4f05c14df56057b64acc5bebffb6"}, - {file = "scipy-1.10.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51af417a000d2dbe1ec6c372dfe688e041a7084da4fdd350aeb139bd3fb55353"}, - {file = "scipy-1.10.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b4735d6c28aad3cdcf52117e0e91d6b39acd4272f3f5cd9907c24ee931ad601"}, - {file = "scipy-1.10.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ff7f37b1bf4417baca958d254e8e2875d0cc23aaadbe65b3d5b3077b0eb23ea"}, - {file = "scipy-1.10.1.tar.gz", hash = "sha256:2cf9dfb80a7b4589ba4c40ce7588986d6d5cebc5457cad2c2880f6bc2d42f3a5"}, + "numpy<2.3,>=1.22.4", +] +files = [ + {file = "scipy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ba419578ab343a4e0a77c0ef82f088238a93eef141b2b8017e46149776dfad4d"}, + {file = "scipy-1.13.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:22789b56a999265431c417d462e5b7f2b487e831ca7bef5edeb56efe4c93f86e"}, + {file = "scipy-1.13.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05f1432ba070e90d42d7fd836462c50bf98bd08bed0aa616c359eed8a04e3922"}, + {file = "scipy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8434f6f3fa49f631fae84afee424e2483289dfc30a47755b4b4e6b07b2633a4"}, + {file = "scipy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:dcbb9ea49b0167de4167c40eeee6e167caeef11effb0670b554d10b1e693a8b9"}, + {file = "scipy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:1d2f7bb14c178f8b13ebae93f67e42b0a6b0fc50eba1cd8021c9b6e08e8fb1cd"}, + {file = "scipy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fbcf8abaf5aa2dc8d6400566c1a727aed338b5fe880cde64907596a89d576fa"}, + {file = "scipy-1.13.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:5e4a756355522eb60fcd61f8372ac2549073c8788f6114449b37e9e8104f15a5"}, + {file = "scipy-1.13.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5acd8e1dbd8dbe38d0004b1497019b2dbbc3d70691e65d69615f8a7292865d7"}, + {file = "scipy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ff7dad5d24a8045d836671e082a490848e8639cabb3dbdacb29f943a678683d"}, + {file = "scipy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4dca18c3ffee287ddd3bc8f1dabaf45f5305c5afc9f8ab9cbfab855e70b2df5c"}, + {file = "scipy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:a2f471de4d01200718b2b8927f7d76b5d9bde18047ea0fa8bd15c5ba3f26a1d6"}, + {file = "scipy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5407708195cb38d70fd2d6bb04b1b9dd5c92297d86e9f9daae1576bd9e06f602"}, + {file = "scipy-1.13.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:ac38c4c92951ac0f729c4c48c9e13eb3675d9986cc0c83943784d7390d540c78"}, + {file = "scipy-1.13.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09c74543c4fbeb67af6ce457f6a6a28e5d3739a87f62412e4a16e46f164f0ae5"}, + {file = "scipy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28e286bf9ac422d6beb559bc61312c348ca9b0f0dae0d7c5afde7f722d6ea13d"}, + {file = "scipy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:33fde20efc380bd23a78a4d26d59fc8704e9b5fd9b08841693eb46716ba13d86"}, + {file = "scipy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:45c08bec71d3546d606989ba6e7daa6f0992918171e2a6f7fbedfa7361c2de1e"}, + {file = "scipy-1.13.0.tar.gz", hash = "sha256:58569af537ea29d3f78e5abd18398459f195546bb3be23d16677fb26616cc11e"}, ] [[package]] @@ -1181,9 +1200,7 @@ summary = "Python client for Sentry (https://sentry.io)" groups = ["default"] dependencies = [ "certifi", - "urllib3>=1.25.7; python_version <= \"3.4\"", "urllib3>=1.26.11; python_version >= \"3.6\"", - "urllib3>=1.26.9; python_version == \"3.5\"", ] files = [ {file = "sentry-sdk-1.26.0.tar.gz", hash = "sha256:760e4fb6d01c994110507133e08ecd4bdf4d75ee4be77f296a3579796cf73134"}, @@ -1193,13 +1210,11 @@ files = [ [[package]] name = "sentry-sdk" version = "1.26.0" -extras = ["flask"] +extras = ["fastapi"] summary = "Python client for Sentry (https://sentry.io)" groups = ["default"] dependencies = [ - "blinker>=1.1", - "flask>=0.11", - "markupsafe", + "fastapi>=0.79.0", "sentry-sdk==1.26.0", ] files = [ @@ -1209,13 +1224,13 @@ files = [ [[package]] name = "setuptools" -version = "67.7.2" -requires_python = ">=3.7" +version = "69.5.1" +requires_python = ">=3.8" summary = "Easily download, build, install, upgrade, and uninstall Python packages" groups = ["default"] files = [ - {file = "setuptools-67.7.2-py3-none-any.whl", hash = "sha256:23aaf86b85ca52ceb801d32703f12d77517b2556af839621c641fca11287952b"}, - {file = "setuptools-67.7.2.tar.gz", hash = "sha256:f104fa03692a2602fa0fec6c6a9e63b6c8a968de13e17c026957dd1f53d80990"}, + {file = "setuptools-69.5.1-py3-none-any.whl", hash = "sha256:c636ac361bc47580504644275c9ad802c50415c7522212252c033bd15f301f32"}, + {file = "setuptools-69.5.1.tar.gz", hash = "sha256:6c1fccdac05a97e598fb0ae3bbed5904ccb317337a51139dcd51453611bbb987"}, ] [[package]] @@ -1265,6 +1280,16 @@ files = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] +[[package]] +name = "sniffio" +version = "1.3.1" +requires_python = ">=3.7" +summary = "Sniff out which async library your code is running under" +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + [[package]] name = "sqlalchemy" version = "2.0.19" @@ -1273,7 +1298,6 @@ summary = "Database Abstraction Library" groups = ["default"] dependencies = [ "greenlet!=0.4.17; platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\"", - "importlib-metadata; python_version < \"3.8\"", "typing-extensions>=4.2.0", ] files = [ @@ -1293,10 +1317,31 @@ files = [ {file = "SQLAlchemy-2.0.19-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3afa8a21a9046917b3a12ffe016ba7ebe7a55a6fc0c7d950beb303c735c3c3ad"}, {file = "SQLAlchemy-2.0.19-cp311-cp311-win32.whl", hash = "sha256:c896d4e6ab2eba2afa1d56be3d0b936c56d4666e789bfc59d6ae76e9fcf46145"}, {file = "SQLAlchemy-2.0.19-cp311-cp311-win_amd64.whl", hash = "sha256:024d2f67fb3ec697555e48caeb7147cfe2c08065a4f1a52d93c3d44fc8e6ad1c"}, + {file = "SQLAlchemy-2.0.19-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a752b7a9aceb0ba173955d4f780c64ee15a1a991f1c52d307d6215c6c73b3a4c"}, + {file = "SQLAlchemy-2.0.19-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7351c05db355da112e056a7b731253cbeffab9dfdb3be1e895368513c7d70106"}, + {file = "SQLAlchemy-2.0.19-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa51ce4aea583b0c6b426f4b0563d3535c1c75986c4373a0987d84d22376585b"}, + {file = "SQLAlchemy-2.0.19-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae7473a67cd82a41decfea58c0eac581209a0aa30f8bc9190926fbf628bb17f7"}, + {file = "SQLAlchemy-2.0.19-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:851a37898a8a39783aab603c7348eb5b20d83c76a14766a43f56e6ad422d1ec8"}, + {file = "SQLAlchemy-2.0.19-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:539010665c90e60c4a1650afe4ab49ca100c74e6aef882466f1de6471d414be7"}, + {file = "SQLAlchemy-2.0.19-cp39-cp39-win32.whl", hash = "sha256:f82c310ddf97b04e1392c33cf9a70909e0ae10a7e2ddc1d64495e3abdc5d19fb"}, + {file = "SQLAlchemy-2.0.19-cp39-cp39-win_amd64.whl", hash = "sha256:8e712cfd2e07b801bc6b60fdf64853bc2bd0af33ca8fa46166a23fe11ce0dbb0"}, {file = "SQLAlchemy-2.0.19-py3-none-any.whl", hash = "sha256:314145c1389b021a9ad5aa3a18bac6f5d939f9087d7fc5443be28cba19d2c972"}, {file = "SQLAlchemy-2.0.19.tar.gz", hash = "sha256:77a14fa20264af73ddcdb1e2b9c5a829b8cc6b8304d0f093271980e36c200a3f"}, ] +[[package]] +name = "starlette" +version = "0.32.0.post1" +requires_python = ">=3.8" +summary = "The little ASGI library that shines." +dependencies = [ + "anyio<5,>=3.4.0", +] +files = [ + {file = "starlette-0.32.0.post1-py3-none-any.whl", hash = "sha256:cd0cb10ddb49313f609cedfac62c8c12e56c7314b66d89bb077ba228bada1b09"}, + {file = "starlette-0.32.0.post1.tar.gz", hash = "sha256:e54e2b7e2fb06dff9eac40133583f10dfa05913f5a85bf26f427c7a40a9a3d02"}, +] + [[package]] name = "text-unidecode" version = "1.3" @@ -1309,13 +1354,13 @@ files = [ [[package]] name = "threadpoolctl" -version = "3.1.0" -requires_python = ">=3.6" +version = "3.5.0" +requires_python = ">=3.8" summary = "threadpoolctl" groups = ["default"] files = [ - {file = "threadpoolctl-3.1.0-py3-none-any.whl", hash = "sha256:8b99adda265feb6773280df41eece7b2e6561b772d21ffd52e372f999024907b"}, - {file = "threadpoolctl-3.1.0.tar.gz", hash = "sha256:a335baacfaa4400ae1f0d8e3a58d6674d2f8828e3716bb2802c44955ad391380"}, + {file = "threadpoolctl-3.5.0-py3-none-any.whl", hash = "sha256:56c1e26c150397e58c4926da8eeee87533b1e32bef131bd4bf6a2f45f3185467"}, + {file = "threadpoolctl-3.5.0.tar.gz", hash = "sha256:082433502dd922bf738de0d8bcc4fdcbf0979ff44c42bd40f5af8a282f6fa107"}, ] [[package]] @@ -1332,50 +1377,58 @@ files = [ [[package]] name = "typing-extensions" -version = "4.5.0" -requires_python = ">=3.7" -summary = "Backported and Experimental Type Hints for Python 3.7+" -groups = ["default", "lint"] +version = "4.8.0" +requires_python = ">=3.8" +summary = "Backported and Experimental Type Hints for Python 3.8+" files = [ - {file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"}, - {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"}, + {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, + {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, ] [[package]] name = "tzdata" -version = "2023.3" +version = "2024.1" requires_python = ">=2" summary = "Provider of IANA time zone data" groups = ["default"] files = [ - {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, - {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, + {file = "tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252"}, + {file = "tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd"}, ] [[package]] name = "tzlocal" -version = "5.0.1" -requires_python = ">=3.7" +version = "5.2" +requires_python = ">=3.8" summary = "tzinfo object for the local timezone" -groups = ["default"] -dependencies = [ - "backports-zoneinfo; python_version < \"3.9\"", - "tzdata; platform_system == \"Windows\"", -] files = [ - {file = "tzlocal-5.0.1-py3-none-any.whl", hash = "sha256:f3596e180296aaf2dbd97d124fe76ae3a0e3d32b258447de7b939b3fd4be992f"}, - {file = "tzlocal-5.0.1.tar.gz", hash = "sha256:46eb99ad4bdb71f3f72b7d24f4267753e240944ecfc16f25d2719ba89827a803"}, + {file = "tzlocal-5.2-py3-none-any.whl", hash = "sha256:49816ef2fe65ea8ac19d19aa7a1ae0551c834303d5014c6d5a62e4cbda8047b8"}, + {file = "tzlocal-5.2.tar.gz", hash = "sha256:8d399205578f1a9342816409cc1e46a93ebd5755e39ea2d85334bea911bf0e6e"}, ] [[package]] name = "urllib3" -version = "1.26.15" -requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +version = "2.2.1" +requires_python = ">=3.8" summary = "HTTP library with thread-safe connection pooling, file post, and more." groups = ["default"] files = [ - {file = "urllib3-1.26.15-py2.py3-none-any.whl", hash = "sha256:aa751d169e23c7479ce47a0cb0da579e3ede798f994f5816a74e4f4500dcea42"}, - {file = "urllib3-1.26.15.tar.gz", hash = "sha256:8a388717b9476f934a21484e8c8e61875ab60644d29b9b39e11e4b9dc1c6b305"}, + {file = "urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d"}, + {file = "urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19"}, +] + +[[package]] +name = "uvicorn" +version = "0.19.0" +requires_python = ">=3.7" +summary = "The lightning-fast ASGI server." +dependencies = [ + "click>=7.0", + "h11>=0.8", +] +files = [ + {file = "uvicorn-0.19.0-py3-none-any.whl", hash = "sha256:cc277f7e73435748e69e075a721841f7c4a95dba06d12a72fe9874acced16f6f"}, + {file = "uvicorn-0.19.0.tar.gz", hash = "sha256:cf538f3018536edb1f4a826311137ab4944ed741d52aeb98846f52215de57f25"}, ] [[package]] @@ -1404,31 +1457,32 @@ files = [ [[package]] name = "zipp" -version = "3.16.2" +version = "3.18.1" requires_python = ">=3.8" summary = "Backport of pathlib-compatible object wrapper for zip files" groups = ["default"] files = [ - {file = "zipp-3.16.2-py3-none-any.whl", hash = "sha256:679e51dd4403591b2d6838a48de3d283f3d188412a9782faadf845f298736ba0"}, - {file = "zipp-3.16.2.tar.gz", hash = "sha256:ebc15946aa78bd63458992fc81ec3b6f7b1e92d51c35e6de1c3804e73b799147"}, + {file = "zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b"}, + {file = "zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715"}, ] [[package]] name = "zope-event" -version = "4.6" +version = "5.0" +requires_python = ">=3.7" summary = "Very basic event publishing system" groups = ["default"] dependencies = [ "setuptools", ] files = [ - {file = "zope.event-4.6-py2.py3-none-any.whl", hash = "sha256:73d9e3ef750cca14816a9c322c7250b0d7c9dbc337df5d1b807ff8d3d0b9e97c"}, - {file = "zope.event-4.6.tar.gz", hash = "sha256:81d98813046fc86cc4136e3698fee628a3282f9c320db18658c21749235fce80"}, + {file = "zope.event-5.0-py3-none-any.whl", hash = "sha256:2832e95014f4db26c47a13fdaef84cef2f4df37e66b59d8f1f4a8f319a632c26"}, + {file = "zope.event-5.0.tar.gz", hash = "sha256:bac440d8d9891b4068e2b5a2c5e2c9765a9df762944bda6955f96bb9b91e67cd"}, ] [[package]] name = "zope-interface" -version = "6.0" +version = "6.3" requires_python = ">=3.7" summary = "Interfaces for Python" groups = ["default"] @@ -1436,23 +1490,23 @@ dependencies = [ "setuptools", ] files = [ - {file = "zope.interface-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f299c020c6679cb389814a3b81200fe55d428012c5e76da7e722491f5d205990"}, - {file = "zope.interface-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ee4b43f35f5dc15e1fec55ccb53c130adb1d11e8ad8263d68b1284b66a04190d"}, - {file = "zope.interface-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5a158846d0fca0a908c1afb281ddba88744d403f2550dc34405c3691769cdd85"}, - {file = "zope.interface-6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f72f23bab1848edb7472309e9898603141644faec9fd57a823ea6b4d1c4c8995"}, - {file = "zope.interface-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48f4d38cf4b462e75fac78b6f11ad47b06b1c568eb59896db5b6ec1094eb467f"}, - {file = "zope.interface-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:87b690bbee9876163210fd3f500ee59f5803e4a6607d1b1238833b8885ebd410"}, - {file = "zope.interface-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f2363e5fd81afb650085c6686f2ee3706975c54f331b426800b53531191fdf28"}, - {file = "zope.interface-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:af169ba897692e9cd984a81cb0f02e46dacdc07d6cf9fd5c91e81f8efaf93d52"}, - {file = "zope.interface-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fa90bac61c9dc3e1a563e5babb3fd2c0c1c80567e815442ddbe561eadc803b30"}, - {file = "zope.interface-6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:89086c9d3490a0f265a3c4b794037a84541ff5ffa28bb9c24cc9f66566968464"}, - {file = "zope.interface-6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:809fe3bf1a91393abc7e92d607976bbb8586512913a79f2bf7d7ec15bd8ea518"}, - {file = "zope.interface-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:0ec9653825f837fbddc4e4b603d90269b501486c11800d7c761eee7ce46d1bbb"}, - {file = "zope.interface-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d0583b75f2e70ec93f100931660328965bb9ff65ae54695fb3fa0a1255daa6f2"}, - {file = "zope.interface-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:23ac41d52fd15dd8be77e3257bc51bbb82469cf7f5e9a30b75e903e21439d16c"}, - {file = "zope.interface-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99856d6c98a326abbcc2363827e16bd6044f70f2ef42f453c0bd5440c4ce24e5"}, - {file = "zope.interface-6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1592f68ae11e557b9ff2bc96ac8fc30b187e77c45a3c9cd876e3368c53dc5ba8"}, - {file = "zope.interface-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4407b1435572e3e1610797c9203ad2753666c62883b921318c5403fb7139dec2"}, - {file = "zope.interface-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:5171eb073474a5038321409a630904fd61f12dd1856dd7e9d19cd6fe092cbbc5"}, - {file = "zope.interface-6.0.tar.gz", hash = "sha256:aab584725afd10c710b8f1e6e208dbee2d0ad009f57d674cb9d1b3964037275d"}, + {file = "zope.interface-6.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2f32010ffb87759c6a3ad1c65ed4d2e38e51f6b430a1ca11cee901ec2b42e021"}, + {file = "zope.interface-6.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e78a183a3c2f555c2ad6aaa1ab572d1c435ba42f1dc3a7e8c82982306a19b785"}, + {file = "zope.interface-6.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa0491a9f154cf8519a02026dc85a416192f4cb1efbbf32db4a173ba28b289a"}, + {file = "zope.interface-6.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62e32f02b3f26204d9c02c3539c802afc3eefb19d601a0987836ed126efb1f21"}, + {file = "zope.interface-6.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c40df4aea777be321b7e68facb901bc67317e94b65d9ab20fb96e0eb3c0b60a1"}, + {file = "zope.interface-6.3-cp310-cp310-win_amd64.whl", hash = "sha256:46034be614d1f75f06e7dcfefba21d609b16b38c21fc912b01a99cb29e58febb"}, + {file = "zope.interface-6.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:600101f43a7582d5b9504a7c629a1185a849ce65e60fca0f6968dfc4b76b6d39"}, + {file = "zope.interface-6.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4d6b229f5e1a6375f206455cc0a63a8e502ed190fe7eb15e94a312dc69d40299"}, + {file = "zope.interface-6.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:10cde8dc6b2fd6a1d0b5ca4be820063e46ddba417ab82bcf55afe2227337b130"}, + {file = "zope.interface-6.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40aa8c8e964d47d713b226c5baf5f13cdf3a3169c7a2653163b17ff2e2334d10"}, + {file = "zope.interface-6.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d165d7774d558ea971cb867739fb334faf68fc4756a784e689e11efa3becd59e"}, + {file = "zope.interface-6.3-cp311-cp311-win_amd64.whl", hash = "sha256:69dedb790530c7ca5345899a1b4cb837cc53ba669051ea51e8c18f82f9389061"}, + {file = "zope.interface-6.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2c3cfb272bcb83650e6695d49ae0d14dd06dc694789a3d929f23758557a23d92"}, + {file = "zope.interface-6.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:01a0b3dd012f584afcf03ed814bce0fc40ed10e47396578621509ac031be98bf"}, + {file = "zope.interface-6.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4137025731e824eee8d263b20682b28a0bdc0508de9c11d6c6be54163e5b7c83"}, + {file = "zope.interface-6.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c8731596198198746f7ce2a4487a0edcbc9ea5e5918f0ab23c4859bce56055c"}, + {file = "zope.interface-6.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf34840e102d1d0b2d39b1465918d90b312b1119552cebb61a242c42079817b9"}, + {file = "zope.interface-6.3-cp39-cp39-win_amd64.whl", hash = "sha256:a1adc14a2a9d5e95f76df625a9b39f4709267a483962a572e3f3001ef90ea6e6"}, + {file = "zope.interface-6.3.tar.gz", hash = "sha256:f83d6b4b22262d9a826c3bd4b2fbfafe1d0000f085ef8e44cd1328eea274ae6a"}, ] diff --git a/pyproject.toml b/pyproject.toml index 2ca43061e7..8927b13246 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,33 +7,31 @@ dependencies = [ # Direct dependencies (at least one import requires it) "APScheduler==3.10.1", "alembic==1.11.1", + "asyncpg==0.29.0", "bleach==6.0.0", "cachetools==5.3.1", - "Flask==2.3.2", - "Flask-Cors==4.0.0", - "Flask-HTTPAuth==4.8.0", - "Flask-Migrate==4.0.4", - "Flask-Mail==0.9.1", - "Flask-RESTful==0.3.10", - "Flask-SQLAlchemy==3.0.5", - "flask-swagger==0.2.14", - "GeoAlchemy2==0.14.1", - "geojson==3.0.1", + "fastapi==0.108.0", + "GeoAlchemy2==0.14.3", + "geojson==3.1.0", "itsdangerous==2.1.2", + "loguru==0.7.2", "Markdown==3.4.4", "oauthlib==3.2.2", "pandas>=2.0.2", - "psycopg2==2.9.6", + "pydantic==2.5.3", + "pydantic-settings==2.1.0", + "pyinstrument==4.6.2", "python-dateutil==2.8.2", "python-dotenv==1.0.0", "python-slugify==8.0.1", "requests==2.31.0", "requests-oauthlib==1.3.1", - "schematics==2.1.1", "scikit-learn>=1.2.2", - "sentry-sdk[flask]==1.26.0", + "sentry-sdk[fastapi]==1.26.0", "shapely==2.0.1", "SQLAlchemy==2.0.19", + "typing-extensions==4.8.0", + "uvicorn==0.19.0", "Werkzeug==2.3.6", # Indirect, but required dependencies (often required for efficient deployments) "gevent==22.10.2", @@ -43,6 +41,8 @@ dependencies = [ "importlib-metadata==6.8.0", # Dependencies used by hotosm.org for production deployments "newrelic==8.8.0", + "databases>=0.9.0", + "fastapi-mail==1.4.1", ] requires-python = ">=3.9,<=3.11" readme = "README.md" @@ -52,18 +52,7 @@ license = { text = "BSD-2-Clause" } [tool.pdm.dev-dependencies] test = ["coverage==7.2.7", "pytest==7.4.0"] lint = ["black==23.7.0", "flake8==6.1.0"] -dev = ["psycopg2-binary>=2.9.6"] - -[tool.pdm.scripts] -start = "flask run --debug --reload" -migrate = "flask db migrate" -upgrade = "flask db upgrade" -downgrade = "flask db downgrade" -test = "python -m unittest discover" -lint = "black manage.py backend tests migrations" -flake8 = "flake8 manage.py backend tests migrations" -coverage-discover = "coverage run -m unittest discover" - +dev = ["pyinstrument==4.6.2"] [tool.commitizen] name = "cz_conventional_commits" @@ -72,6 +61,7 @@ version_scheme = "pep440" version_provider = "pep621" update_changelog_on_bump = true major_version_zero = true + [build-system] requires = ["pdm-pep517>=1.0.0"] build-backend = "pdm.pep517.api" diff --git a/requirements.txt b/requirements.txt index 5c1e335f86..20d1ea53e8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,53 +1,39 @@ -# IMPORTANT: It is recommended to generate requirements.txt using "pdm export --dev --without-hashes > requirements.txt" -# Since we are using pdm for dependency management. - -# Update instructions: -# 1. Delete the virtual environment -# 2. Create a new clean virtual environment -# 3. Remove all unnecessary indirect dependencies in this file ("Indirect dependencies (these can be blown away at any time)") -# 4. Update the relevant packages -# 5. Run `pip install -r requirements.txt` -# 6. Run `pip freeze -r requirements.txt > requirements.new.txt` -# 7. Run `mv requirements.new.txt requirements.txt` -# 8. Run tests -# -# Direct dependencies (at least one import requires it) -APScheduler==3.10.1 alembic==1.11.1 +APScheduler==3.10.1 bleach==6.0.0 cachetools==5.3.1 -Flask==2.3.2 -Flask-Cors==4.0.0 -Flask-HTTPAuth==4.8.0 -Flask-Migrate==4.0.4 -Flask-Mail==0.9.1 -Flask-RESTful==0.3.10 -Flask-SQLAlchemy==3.0.5 -flask-swagger==0.2.14 -GeoAlchemy2==0.14.1 -geojson==3.0.1 +fastapi==0.108.0 +GeoAlchemy2==0.14.3 +geojson==3.1.0 itsdangerous==2.1.2 +loguru==0.7.2 Markdown==3.4.4 oauthlib==3.2.2 pandas>=2.0.2 -scikit-learn>=1.2.2 -psycopg2==2.9.6 +pydantic==2.5.3 +pydantic-settings==2.1.0 python-dateutil==2.8.2 python-dotenv==1.0.0 python-slugify==8.0.1 requests==2.31.0 requests-oauthlib==1.3.1 -schematics==2.1.1 -sentry-sdk[flask]==1.26.0 +scikit-learn>=1.2.2 shapely==2.0.1 SQLAlchemy==2.0.19 +typing-extensions==4.8.0 +uvicorn==0.19.0 Werkzeug==2.3.6 +asyncpg==0.29.0 +sqlmodel==0.0.16 + # Dev dependencies (stuff useful for development) black==23.7.0 coverage==7.2.7 flake8==6.1.0 psycopg2-binary>=2.9.6 pytest==7.4.0 +pyinstrument==4.6.2 + # Indirect, but required dependencies (often required for efficient deployments) gevent==22.10.2 greenlet==2.0.2 diff --git a/scripts/docker/Dockerfile.frontend b/scripts/docker/Dockerfile.frontend index 3ed4955365..54e05fc89c 100644 --- a/scripts/docker/Dockerfile.frontend +++ b/scripts/docker/Dockerfile.frontend @@ -9,7 +9,7 @@ RUN git config --global url.https://github.com/.insteadOf git@github.com: \ # SERVE COPY tasking-manager.env .. -RUN npm run build +RUN yarn run build FROM nginx:stable-alpine COPY --from=build /usr/src/app/frontend/build /usr/share/nginx/html diff --git a/scripts/locust/Dockerfile b/scripts/locust/Dockerfile new file mode 100644 index 0000000000..1fe6880bc9 --- /dev/null +++ b/scripts/locust/Dockerfile @@ -0,0 +1,3 @@ +FROM python:3.11-slim +WORKDIR /app/locust +RUN pip install locust diff --git a/scripts/locust/docker-compose.yml b/scripts/locust/docker-compose.yml new file mode 100644 index 0000000000..b161c5f7b5 --- /dev/null +++ b/scripts/locust/docker-compose.yml @@ -0,0 +1,15 @@ +version: '3.8' + +services: + locust: + build: + context: . + dockerfile: Dockerfile + ports: + - "8089:8089" # Expose Locust web UI + volumes: + - ./:/app/locust + working_dir: /app/locust + entrypoint: locust -f /app/locust/locustfile.py + environment: + LOCUST_HOST: "https://tm-fastapi.naxa.com.np" diff --git a/scripts/locust/locust_requirements.txt b/scripts/locust/locust_requirements.txt new file mode 100644 index 0000000000..e69de29bb2 diff --git a/scripts/locust/locustfile.py b/scripts/locust/locustfile.py new file mode 100644 index 0000000000..151a887c59 --- /dev/null +++ b/scripts/locust/locustfile.py @@ -0,0 +1,95 @@ +import os +from locust import HttpUser, TaskSet, task, between + +# Define tokens for Flask and FastAPI +FASTAPI_TOKEN = "TVRBeU5UQTBOVFkuWjJVOWNnLmtBNUZUcDZaMkpYVGJ2QnhFN29mb3lqZXZlSQ==" +FLASK_TOKEN = "TVRBeU5UQTBOVFkuWjJVeDV3LmtDTHhCbFdQR2ROZTEzYzJORWRVblp4akFCMA==" + +LOCUST_HOST = os.getenv("LOCUST_HOST", "https://tm.naxa.com.np") + + +AUTH_TOKEN = FASTAPI_TOKEN if "tm-fastapi.naxa.com.np" in LOCUST_HOST else FLASK_TOKEN + +class ProjectAndComments(TaskSet): + @task + def get_project(self): + self.client.get("/api/v2/projects/114/") + + @task + def get_comments(self): + self.client.get("/api/v2/projects/114/comments/") + +class ProjectList(TaskSet): + @task + def project_list(self): + self.client.get("/api/v2/projects/?action=any&omitMapResults=true", headers={"Authorization": f"Token {AUTH_TOKEN}"}) + +class TaskStatistics(TaskSet): + @task + def get_contributions(self): + self.client.get("/api/v2/tasks/statistics/?startDate=2024-01-01", headers={"Authorization": f"Token {AUTH_TOKEN}"}) + +class TaskPage(TaskSet): + @task + def get_tasks(self): + self.client.get("/api/v2/projects/114/tasks/", headers={"Authorization": f"Token {AUTH_TOKEN}"}) + +class GetSimilarProjects(TaskSet): + @task + def get_similar_projects(self): + self.client.get("/api/v2/projects/queries/114/similar-projects/") + +class GetContributions(TaskSet): + @task + def get_contributions(self): + self.client.get("/api/v2/projects/114/contributions/") + +class GetContributionsByDay(TaskSet): + @task + def get_contributions_by_day(self): + self.client.get("/api/v2/projects/114/contributions/queries/day/") + +class GetStatistics(TaskSet): + @task + def get_statistics(self): + self.client.get("/api/v2/system/statistics/") + +class GetActionAny(TaskSet): + @task + def get_action_any(self): + self.client.get("/api/v2/projects/?action=any") + +# Mapping task names to classes +task_mapping = { + "project_and_comments": ProjectAndComments, + "project_list": ProjectList, + "task_statistics": TaskStatistics, + "task_page": TaskPage, + "similar_projects": GetSimilarProjects, + "contributions": GetContributions, + "contributions_by_day": GetContributionsByDay, + "statistics": GetStatistics, + "action_any": GetActionAny, +} + +# User class +class ApiBenchmarkUser(HttpUser): + wait_time = between(1, 2) + + # Dynamically select tasks based on environment variable or CLI parameter + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + task_name = os.getenv("TASK_SET", "project_list").lower() + self.tasks = [task_mapping.get(task_name, TaskPage)] + + +''' +/api/v2/projects/?action=any&omitMapResults=true +/api/v2/projects/114/ +/api/v2/projects/114/comments/ +/api/v2/projects/queries/114/similar-projects/ +/api/v2/projects/114/contributions/ +/api/v2/projects/114/contributions/queries/day/ +/api/v2/system/statistics/ +/api/v2/projects/?action=any +'''