From c284271628ef21db28383cad3f62f11bcf3bd4ec Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 8 Aug 2024 10:19:49 +0000 Subject: [PATCH] [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --- src/backend/app/db/db_models.py | 2 +- src/backend/app/s3.py | 8 +-- .../app/submissions/submission_crud.py | 61 ++++++++++--------- .../app/submissions/submission_routes.py | 12 ++-- .../createnewproject/DataExtract.tsx | 8 +-- 5 files changed, 49 insertions(+), 42 deletions(-) diff --git a/src/backend/app/db/db_models.py b/src/backend/app/db/db_models.py index 8d3fb82eae..ad85679943 100644 --- a/src/backend/app/db/db_models.py +++ b/src/backend/app/db/db_models.py @@ -562,4 +562,4 @@ class DbSubmissionPhotos(Base): project_id = cast(int, Column(Integer)) task_id = cast(int, Column(Integer)) submission_id = cast(str, Column(String)) - s3_path = cast(str, Column(String)) \ No newline at end of file + s3_path = cast(str, Column(String)) diff --git a/src/backend/app/s3.py b/src/backend/app/s3.py index 081e5e536f..42a4bc2418 100644 --- a/src/backend/app/s3.py +++ b/src/backend/app/s3.py @@ -1,11 +1,11 @@ """Initialise the S3 buckets for FMTM to function.""" -from fastapi import HTTPException import json import sys from io import BytesIO from typing import Any +from fastapi import HTTPException from loguru import logger as log from minio import Minio from minio.commonconfig import CopySource @@ -40,6 +40,7 @@ def s3_client(): # print(creds.access_key) # print(creds.secret_key) + def object_exists(bucket_name: str, s3_path: str) -> bool: """Check if an object exists in an S3 bucket using stat_object. @@ -57,13 +58,12 @@ def object_exists(bucket_name: str, s3_path: str) -> bool: client.stat_object(bucket_name, s3_path) return True except S3Error as e: - if e.code == 'NoSuchKey': + if e.code == "NoSuchKey": return False else: # Handle other exceptions raise HTTPException( - status_code = HTTPStatus.BAD_REQUEST, - detail = str(e) + status_code=HTTPStatus.BAD_REQUEST, detail=str(e) ) from e diff --git a/src/backend/app/submissions/submission_crud.py b/src/backend/app/submissions/submission_crud.py index 5a0e3feb20..d71284e955 100644 --- a/src/backend/app/submissions/submission_crud.py +++ b/src/backend/app/submissions/submission_crud.py @@ -569,13 +569,9 @@ async def get_submission_detail( async def upload_attachment_to_s3( - project_id: int, - instance_ids: list, - background_task_id: uuid.UUID, - db: Session - ): - """ - Uploads attachments to S3 for a given project and instance IDs. + project_id: int, instance_ids: list, background_task_id: uuid.UUID, db: Session +): + """Uploads attachments to S3 for a given project and instance IDs. Args: project_id (int): The ID of the project. @@ -590,7 +586,7 @@ async def upload_attachment_to_s3( Exception: If an error occurs during the upload process. """ try: - project = await project_deps.get_project_by_id(db,project_id) + project = await project_deps.get_project_by_id(db, project_id) db_xform = await project_deps.get_project_xform(db, project_id) odk_central = await project_deps.get_odk_credentials(db, project_id) xform = get_odk_form(odk_central) @@ -599,29 +595,33 @@ async def upload_attachment_to_s3( for instance_id in instance_ids: submission_detail = await get_submission_detail(instance_id, project, db) attachments = submission_detail["verification"]["image"] - + if not isinstance(attachments, list): attachments = [attachments] - + for idx, filename in enumerate(attachments): s3_key = f"fmtm-data/{project.organisation_id}/{project_id}/{instance_id}/{idx+1}.jpeg" - + if object_exists(s3_bucket, s3_key): - log.warning(f"Object {s3_key} already exists in S3. Skipping upload.") + log.warning( + f"Object {s3_key} already exists in S3. Skipping upload." + ) continue try: - if attachment:= xform.getMedia( - project.odkid, - str(instance_id), - db_xform.odk_form_id, - str(filename) + if attachment := xform.getMedia( + project.odkid, + str(instance_id), + db_xform.odk_form_id, + str(filename), ): # Convert the attachment to a BytesIO stream image_stream = io.BytesIO(attachment) # Upload the attachment to S3 - add_obj_to_bucket(s3_bucket, image_stream, s3_key, content_type="image/jpeg") + add_obj_to_bucket( + s3_bucket, image_stream, s3_key, content_type="image/jpeg" + ) # Generate the image URL img_url = f"{settings.S3_DOWNLOAD_ROOT}/{settings.S3_BUCKET_NAME}/{s3_key}" @@ -631,17 +631,22 @@ async def upload_attachment_to_s3( INSERT INTO submission_photos (project_id, task_id, submission_id, s3_path) VALUES (:project_id, :task_id, :submission_id, :s3_path) """) - db.execute(sql, { - "project_id": project_id, - "task_id": submission_detail["task_id"], - "submission_id": instance_id, - "s3_path": img_url - }) - + db.execute( + sql, + { + "project_id": project_id, + "task_id": submission_detail["task_id"], + "submission_id": instance_id, + "s3_path": img_url, + }, + ) + except Exception as e: - log.warning(f"Failed to process {filename} for instance {instance_id}: {e}") + log.warning( + f"Failed to process {filename} for instance {instance_id}: {e}" + ) continue - + db.commit() return True @@ -651,4 +656,4 @@ async def upload_attachment_to_s3( update_bg_task_sync = async_to_sync( project_crud.update_background_task_status_in_database ) - update_bg_task_sync(db, background_task_id, 2, str(e)) + update_bg_task_sync(db, background_task_id, 2, str(e)) diff --git a/src/backend/app/submissions/submission_routes.py b/src/backend/app/submissions/submission_routes.py index 6fe28f45dd..50b326199d 100644 --- a/src/backend/app/submissions/submission_routes.py +++ b/src/backend/app/submissions/submission_routes.py @@ -20,13 +20,13 @@ import json import uuid from io import BytesIO -from loguru import logger as log from typing import Annotated, Optional import geojson -from fastapi import APIRouter, Depends, HTTPException, Query, BackgroundTasks +from fastapi import APIRouter, BackgroundTasks, Depends, HTTPException, Query from fastapi.concurrency import run_in_threadpool from fastapi.responses import FileResponse, JSONResponse, Response +from loguru import logger as log from sqlalchemy.orm import Session from app.auth.auth_schemas import AuthUser, ProjectUserDict @@ -387,14 +387,18 @@ async def submission_table( for submission in submissions: if submission["__system"]["attachmentsPresent"] != 0: instance_ids.append(submission["__id"]) - + if instance_ids: background_task_id = await project_crud.insert_background_task_into_database( db, "upload_submission_photos", project.id ) log.info("uploading submission photos to s3") background_tasks.add_task( - submission_crud.upload_attachment_to_s3, project.id, instance_ids, background_task_id, db + submission_crud.upload_attachment_to_s3, + project.id, + instance_ids, + background_task_id, + db, ) if task_id: diff --git a/src/frontend/src/components/createnewproject/DataExtract.tsx b/src/frontend/src/components/createnewproject/DataExtract.tsx index 8bfda464dc..4efe9ad4ea 100644 --- a/src/frontend/src/components/createnewproject/DataExtract.tsx +++ b/src/frontend/src/components/createnewproject/DataExtract.tsx @@ -226,13 +226,11 @@ const DataExtract = ({ flag, customDataExtractUpload, setCustomDataExtractUpload
- - You may either choose to use OSM data, or upload your own data for the mapping project. - + You may either choose to use OSM data, or upload your own data for the mapping project. The relevant map features that exist on OSM are imported based on the select map area.{' '} - You can use these map features to use the 'select from map' functionality from ODK that allows you - to select the feature to collect data for. + You can use these map features to use the 'select from map' functionality from ODK that allows you to select + the feature to collect data for. {' '}