diff --git a/app/assets/js/components/Work/Tabs/Preservation/ActionsCol.jsx b/app/assets/js/components/Work/Tabs/Preservation/ActionsCol.jsx index bbaf1999f..d137f3099 100644 --- a/app/assets/js/components/Work/Tabs/Preservation/ActionsCol.jsx +++ b/app/assets/js/components/Work/Tabs/Preservation/ActionsCol.jsx @@ -176,7 +176,10 @@ const PreservationActionsCol = ({ handleReplaceFilesetClick(fileset)} + onClick={() => { + handleReplaceFilesetClick(fileset) + setIsActionsOpen(false); + }} > Replace fileset @@ -202,11 +205,10 @@ const PreservationActionsCol = ({ Delete - {`Fileset: ${ - deleteFilesetModal.fileset.coreMetadata + {`Fileset: ${deleteFilesetModal.fileset.coreMetadata ? deleteFilesetModal.fileset.coreMetadata.label : "" - }`} + }`} {work && (
{ + setCurrentFile({ + location: s3Object.key, + name: getFileNameFromS3Uri(s3Object.key), + }); + setS3UploadLocation(s3Object.key); + setUploadMethod('s3'); + }; + React.useEffect(() => { if (!watchRole) return; const mimeTypes = useAcceptedMimeTypes({ @@ -88,8 +110,6 @@ function WorkTabsPreservationFileSetModal({ onError(error) { console.error(`error:`, error); resetForm(); - // bug with this error not clearing/resetting - // https://github.com/apollographql/apollo-feature-requests/issues/170 }, refetchQueries: [ { @@ -102,7 +122,7 @@ function WorkTabsPreservationFileSetModal({ ); const handleSubmit = (data) => { - ingestFileSet({ + const mutationInput = { variables: { accession_number: data.accessionNumber, workId, @@ -113,8 +133,9 @@ function WorkTabsPreservationFileSetModal({ original_filename: currentFile.name, location: s3UploadLocation, }, - }, - }); + } + } + ingestFileSet(mutationInput); }; const handleCancel = () => { @@ -129,10 +150,12 @@ function WorkTabsPreservationFileSetModal({ setS3UploadLocation(null); setUploadProgress(0); setUploadError(null); + setUploadMethod(null); }; const handleSetFile = (file) => { setCurrentFile(file); + setUploadMethod('dragdrop'); if (file) { getPresignedUrl({ variables: { @@ -228,17 +251,31 @@ function WorkTabsPreservationFileSetModal({ {watchRole && ( -
- -
+ <> +
+

Option 1: Drag and Drop File

+ +
+ +
+

Option 2: Choose from S3 Ingest Bucket

+ +
+ )} {s3UploadLocation && ( @@ -282,4 +319,4 @@ WorkTabsPreservationFileSetModal.propTypes = { workTypeId: PropTypes.oneOf(["IMAGE", "AUDIO", "VIDEO"]), }; -export default WorkTabsPreservationFileSetModal; +export default WorkTabsPreservationFileSetModal; \ No newline at end of file diff --git a/app/assets/js/components/Work/Tabs/Preservation/ReplaceFileSet.jsx b/app/assets/js/components/Work/Tabs/Preservation/ReplaceFileSet.jsx index cdee91665..b2f69eccf 100644 --- a/app/assets/js/components/Work/Tabs/Preservation/ReplaceFileSet.jsx +++ b/app/assets/js/components/Work/Tabs/Preservation/ReplaceFileSet.jsx @@ -1,10 +1,20 @@ -import { Button, Notification } from "@nulib/design-system"; +import * as Dialog from "@radix-ui/react-dialog"; +import { + DialogClose, + DialogContent, + DialogOverlay, + DialogTitle +} from "@js/components/UI/Dialog/Dialog.styled"; +import { Button, Icon, Notification } from "@nulib/design-system"; import { FormProvider, useForm } from "react-hook-form"; -import { GET_WORK, REPLACE_FILE_SET } from "@js/components/Work/work.gql.js"; +import { + GET_WORK, + REPLACE_FILE_SET, +} from "@js/components/Work/work.gql.js"; import React, { useEffect, useState } from "react"; /** @jsx jsx */ import { css, jsx } from "@emotion/react"; -import { s3Location, toastWrapper } from "@js/services/helpers"; +import { getFileNameFromS3Uri, s3Location, toastWrapper } from "@js/services/helpers"; import { useLazyQuery, useMutation } from "@apollo/client"; import Error from "@js/components/UI/Error"; @@ -15,10 +25,19 @@ import UIFormField from "@js/components/UI/Form/Field.jsx"; import UIFormInput from "@js/components/UI/Form/Input.jsx"; import UIIconText from "../../../UI/IconText"; import WorkTabsPreservationFileSetDropzone from "@js/components/Work/Tabs/Preservation/FileSetDropzone"; -import classNames from "classnames"; +import S3ObjectPicker from "@js/components/Work/Tabs/Preservation/S3ObjectPicker" -const modalCss = css` - z-index: 100; +const sectionHeaderCss = css` + font-size: 1.2rem; + font-weight: bold; + margin-bottom: 1rem; +`; + +const sectionCss = css` + margin-bottom: 2rem; + padding: 1rem; + border: 1px solid #ddd; + border-radius: 4px; `; function WorkTabsPreservationReplaceFileSet({ @@ -33,20 +52,10 @@ function WorkTabsPreservationReplaceFileSet({ const [s3UploadLocation, setS3UploadLocation] = useState(); const [uploadError, setUploadError] = useState(); const [stateXhr, setStateXhr] = useState(null); - const [acceptedFileTypes, setAcceptedFileTypes] = React.useState(""); - - useEffect(() => { - if (!fileset.id) return; - - // Dynamically set some default form values since - // the active fileset to replace may change by what user selects in the UI - methods.setValue("label", fileset?.coreMetadata?.label || ""); - methods.setValue("description", fileset?.coreMetadata?.description || ""); - }, [fileset?.id]); + const [uploadMethod, setUploadMethod] = useState(null); const methods = useForm(); - // Get the presigned URL for the file upload const [ getPresignedUrl, { error: urlError, loading: urlLoading, data: urlData }, @@ -56,11 +65,10 @@ function WorkTabsPreservationReplaceFileSet({ uploadFile(data.presignedUrl.url); }, onError(error) { - console.error(`error`, error); + console.error("error", error); }, }); - // Set up and handle the GraphQL mutation to replace the fileset const [replaceFileSet, { loading, error, data }] = useMutation( REPLACE_FILE_SET, { @@ -73,10 +81,8 @@ function WorkTabsPreservationReplaceFileSet({ closeModal(); }, onError(error) { - console.error(`error:`, error); + console.error("error:", error); resetForm(); - // bug with this error not clearing/resetting - // https://github.com/apollographql/apollo-feature-requests/issues/170 }, refetchQueries: [ { @@ -88,7 +94,13 @@ function WorkTabsPreservationReplaceFileSet({ } ); - // React Hook Form form submit handler function which calls the GraphQL mutation + useEffect(() => { + if (fileset.id) { + methods.setValue("label", fileset?.coreMetadata?.label || ""); + methods.setValue("description", fileset?.coreMetadata?.description || ""); + } + }, [fileset.id]); + const handleSubmit = (data) => { replaceFileSet({ variables: { @@ -96,7 +108,7 @@ function WorkTabsPreservationReplaceFileSet({ coreMetadata: { description: data.description, label: data.label, - original_filename: currentFile.name, + original_filename: currentFile?.name, location: s3UploadLocation, }, }, @@ -114,10 +126,21 @@ function WorkTabsPreservationReplaceFileSet({ setS3UploadLocation(null); setUploadProgress(0); setUploadError(null); + setUploadMethod(null); + }; + + const handleSelectS3Object = (s3Object) => { + setCurrentFile({ + location: s3Object.key, + name: getFileNameFromS3Uri(s3Object.key), + }); + setS3UploadLocation(s3Object.key); + setUploadMethod('s3'); }; const handleSetFile = (file) => { setCurrentFile(file); + setUploadMethod('dragdrop'); if (file) { getPresignedUrl({ variables: { @@ -164,64 +187,69 @@ function WorkTabsPreservationReplaceFileSet({ }; return ( -
-
+ + + + + + + + + + + Replace Fileset + - {urlError && ( -
-
- Error retrieving presigned url -
-
- )} + {urlError && ( +
+
+ Error retrieving presigned url +
+
+ )} - {!urlError && ( - -
-
-
-

Replace Fileset

- -
-
- - }> - Replacing a fileset cannot be undone - - - {uploadError && ( - {uploadError} - )} - {error && } + {!urlError && ( + + +
+
+ + }> + Replacing a fileset cannot be undone + + + {uploadError && ( + {uploadError} + )} + {error && } -
- -
+
+

Option 1: Drag and Drop File

+ +
+ +
+

Option 2: Choose from S3 Ingest Bucket

+ +
- {s3UploadLocation && ( - <> - - )} -
+
-
- {s3UploadLocation && ( - <> - - - - )} -
-
-
-
- )} -
+
+ {s3UploadLocation && ( + <> + + + + )} +
+
+ + + )} + + + ); } @@ -282,4 +310,4 @@ WorkTabsPreservationReplaceFileSet.propTypes = { workTypeId: PropTypes.oneOf(["IMAGE", "AUDIO", "VIDEO"]), }; -export default WorkTabsPreservationReplaceFileSet; +export default WorkTabsPreservationReplaceFileSet; \ No newline at end of file diff --git a/app/assets/js/components/Work/Tabs/Preservation/ReplaceFileSet.test.jsx b/app/assets/js/components/Work/Tabs/Preservation/ReplaceFileSet.test.jsx index 14570c7cd..6aa81a0d9 100644 --- a/app/assets/js/components/Work/Tabs/Preservation/ReplaceFileSet.test.jsx +++ b/app/assets/js/components/Work/Tabs/Preservation/ReplaceFileSet.test.jsx @@ -1,17 +1,27 @@ +import React from "react"; +import ReplaceFileSet from "./ReplaceFileSet"; import { renderWithRouterApollo, withReactHookForm, } from "@js/services/testing-helpers"; - import { AuthProvider } from "@js/components/Auth/Auth"; -import { CodeListProvider } from "@js/context/code-list-context"; -import React from "react"; -import ReplaceFileSet from "@js/components/Work/Tabs/Preservation/ReplaceFileSet"; -import { allCodeListMocks } from "@js/components/Work/controlledVocabulary.gql.mock"; -import { getCurrentUserMock } from "@js/components/Auth/auth.gql.mock"; import { getPresignedUrlForFileSetMock } from "@js/components/IngestSheet/ingestSheet.gql.mock"; import { mockWork } from "@js/components/Work/work.gql.mock.js"; -import { screen } from "@testing-library/react"; +import { screen, fireEvent, waitFor } from "@testing-library/react"; +import { getCurrentUserMock } from "@js/components/Auth/auth.gql.mock"; +import { CodeListProvider } from "@js/context/code-list-context"; +import { allCodeListMocks } from "@js/components/Work/controlledVocabulary.gql.mock"; + +// Mock the S3ObjectPicker component +jest.mock("@js/components/Work/Tabs/Preservation/S3ObjectPicker", () => { + return function MockS3ObjectPicker({ onFileSelect }) { + return ( + + ); + }; +}); let isModalOpen = true; @@ -19,12 +29,12 @@ const handleClose = () => { isModalOpen = false; }; -describe("ReplaceFileSet component", () => { +describe("Replace fileset modal", () => { beforeEach(() => { const Wrapped = withReactHookForm(ReplaceFileSet, { closeModal: handleClose, - fileset: mockWork.fileSets[0], isVisible: isModalOpen, + fileset: mockWork.fileSets[0], workId: mockWork.id, workTypeId: mockWork.workType.id, }); @@ -44,23 +54,34 @@ describe("ReplaceFileSet component", () => { ); }); - it("renders the replace fileset modal and form", async () => { - expect( - await screen.findByTestId("replace-fileset-modal") - ).toBeInTheDocument(); - expect( - await screen.findByTestId("replace-fileset-form") - ).toBeInTheDocument(); + + it("renders replace fileset form", async () => { + expect(await screen.findByTestId("replace-fileset-form")); }); - it("renders a warning message and a button to replace the fileset", async () => { - expect( - await screen.findByText(/Replacing a fileset cannot be undone/i) - ).toBeInTheDocument(); - expect( - await screen.findByText( - /Drag 'n' drop a file here, or click to select file/i - ) - ).toBeInTheDocument(); + it("displays warning message", async () => { + expect(await screen.findByText(/Replacing a fileset cannot be undone/i)); }); -}); + + it("renders file upload dropzone", async () => { + expect(await screen.findByText(/Drag 'n' drop a file here, or click to select file/i)); + }); + + it("renders label input field", async () => { + expect(await screen.findByTestId("fileset-label-input")); + }); + + it("renders description input field", async () => { + expect(await screen.findByTestId("fileset-description-input")); + }); + + it("renders cancel and submit buttons when file is selected from S3ObjectPicker", async () => { + const selectFileButton = await screen.findByText("Select Mocked File"); + fireEvent.click(selectFileButton); + + await waitFor(() => { + expect(screen.getByTestId("cancel-button")).toBeInTheDocument(); + expect(screen.getByTestId("submit-button")).toBeInTheDocument(); + }); + }); +}); \ No newline at end of file diff --git a/app/assets/js/components/Work/Tabs/Preservation/S3ObjectPicker.jsx b/app/assets/js/components/Work/Tabs/Preservation/S3ObjectPicker.jsx new file mode 100644 index 000000000..4f2abf5fa --- /dev/null +++ b/app/assets/js/components/Work/Tabs/Preservation/S3ObjectPicker.jsx @@ -0,0 +1,115 @@ +import useAcceptedMimeTypes from "@js/hooks/useAcceptedMimeTypes"; +import { Button } from "@nulib/design-system"; +import { + LIST_INGEST_BUCKET_OBJECTS, +} from "@js/components/Work/work.gql.js"; +import React, { useState } from "react"; +/** @jsx jsx */ +import { css, jsx } from "@emotion/react"; +import { useQuery } from "@apollo/client"; +import { FaSpinner } from "react-icons/fa"; +import { formatBytes } from "@js/services/helpers"; + +import Error from "@js/components/UI/Error"; +import UIFormInput from "@js/components/UI/Form/Input.jsx"; + +const tableContainerCss = css` + max-height: 30vh; + overflow-y: auto; +`; + +const fileRowCss = css` + cursor: pointer; +`; + +// a nice gentle blue +const selectedRowCss = css` + background-color: #f0f8ff !important; +`; + +const colHeaders = ["File Key", "Size", "Mime Type"]; + +const { isFileValid } = useAcceptedMimeTypes(); + +const S3ObjectPicker = ({ onFileSelect, fileSetRole, workTypeId, defaultPrefix = "" }) => { + const [prefix, setPrefix] = useState(defaultPrefix); + const [selectedFile, setSelectedFile] = useState(null); + const [error, _setError] = useState(null); + + const { loading: queryLoading, error: queryError, data, refetch } = useQuery(LIST_INGEST_BUCKET_OBJECTS, { + variables: { prefix } + }); + + const handleClear = () => { + setPrefix(defaultPrefix); + refetch({ prefix: defaultPrefix }); + }; + + const handlePrefixChange = (e) => { + const inputValue = e.target.value; + const newPrefix = inputValue.startsWith(defaultPrefix) ? inputValue : defaultPrefix + inputValue; + setPrefix(newPrefix); + refetch({ prefix: newPrefix }); + }; + + const handleRefresh = async () => { + await refetch({ prefix: prefix }); + }; + + const handleFileClick = (fileSet) => { + setSelectedFile(fileSet.key); + onFileSelect(fileSet); + }; + + if (queryLoading) return ; + if (queryError) return ; + + return ( +
+ +
+ + +
+ {error &&
{error}
} + {data && data.ListIngestBucketObjects && ( +
+ + + + {colHeaders.map((col) => ( + + ))} + + + + {data.ListIngestBucketObjects.filter(file => { + const { isValid } = isFileValid(fileSetRole, workTypeId, file.mimeType); + return isValid; + }).map((fileSet, index) => ( + handleFileClick(fileSet)} + className={selectedFile === fileSet.key ? "selected" : ""} + css={[fileRowCss, selectedFile === fileSet.key && selectedRowCss]} + > + + + + + ))} + +
{col}
{fileSet.key}{formatBytes(fileSet.size)}{fileSet.mimeType}
+
+ )} +
+ ); +}; + +export default S3ObjectPicker; \ No newline at end of file diff --git a/app/assets/js/components/Work/Tabs/Preservation/S3ObjectPicker.test.jsx b/app/assets/js/components/Work/Tabs/Preservation/S3ObjectPicker.test.jsx new file mode 100644 index 000000000..4ca355e16 --- /dev/null +++ b/app/assets/js/components/Work/Tabs/Preservation/S3ObjectPicker.test.jsx @@ -0,0 +1,112 @@ +import React from "react"; +import { render, fireEvent, waitFor } from "@testing-library/react"; +import { MockedProvider } from "@apollo/client/testing"; +import S3ObjectPicker from "@js/components/Work/Tabs/Preservation/S3ObjectPicker"; +import { LIST_INGEST_BUCKET_OBJECTS } from "@js/components/Work/work.gql.js"; + +const mocks = [ + { + request: { + query: LIST_INGEST_BUCKET_OBJECTS, + variables: { prefix: "file_sets/" }, + }, + result: { + data: { + ListIngestBucketObjects: [ + { key: "file_sets/file3", size: 1000, mimeType: "image/jpeg" }, + { key: "file_sets/file4", size: 2000, mimeType: "image/png" }, + ], + }, + }, + }, + { + request: { + query: LIST_INGEST_BUCKET_OBJECTS, + variables: { prefix: "" }, + }, + result: { + data: { + ListIngestBucketObjects: [ + { key: "file1", size: 1000, mimeType: "image/jpeg" }, + { key: "file2", size: 2000, mimeType: "image/png" }, + { key: "file_sets/file3", size: 1000, mimeType: "image/jpeg" }, + { key: "file_sets/file4", size: 2000, mimeType: "image/png" }, + ], + }, + }, + }, +]; + +describe("S3ObjectPicker component", () => { + it("renders without crashing", () => { + render( + + { }} fileSetRole="A" workTypeId="IMAGE" /> + + ); + }); + + it("renders an error message when there is a query error", async () => { + const errorMock = [ + { + request: { + query: LIST_INGEST_BUCKET_OBJECTS, + variables: { prefix: "" }, + }, + error: new Error("An error occurred"), + }, + ]; + const { findByText } = render( + + { }} fileSetRole="A" workTypeId="IMAGE" /> + + ); + expect(await findByText("An error occurred")).toBeInTheDocument(); + }); + + it("renders the Clear and Refresh buttons", async () => { + const { findByText } = render( + + { }} fileSetRole="A" workTypeId="IMAGE" /> + + ); + expect(await findByText("Clear")).toBeInTheDocument(); + expect(await findByText("Refresh")).toBeInTheDocument(); + }); + + it("renders the table when data is available", async () => { + const { findByText } = render( + + { }} fileSetRole="A" workTypeId="IMAGE" /> + + ); + expect(await findByText("file1")).toBeInTheDocument(); + expect(await findByText("file2")).toBeInTheDocument(); + }); + + it("handles prefixed search", async () => { + const { findByText, getByPlaceholderText, queryByText } = render( + + { }} fileSetRole="A" workTypeId="IMAGE" /> + + ); + + await findByText("file1"); + + const input = getByPlaceholderText("Enter prefix"); + fireEvent.change(input, { target: { value: "file_sets/" } }); + + await waitFor(() => { + expect(input.value).toBe("file_sets/"); + }); + + // Check that the prefixed files are present + expect(await findByText("file_sets/file3")).toBeInTheDocument(); + expect(await findByText("file_sets/file4")).toBeInTheDocument(); + + // Check that the non-prefixed files are not present + expect(queryByText("file1")).not.toBeInTheDocument(); + expect(queryByText("file2")).not.toBeInTheDocument(); + }); + +}); \ No newline at end of file diff --git a/app/assets/js/components/Work/work.gql.js b/app/assets/js/components/Work/work.gql.js index 60b6055a7..198958b2f 100644 --- a/app/assets/js/components/Work/work.gql.js +++ b/app/assets/js/components/Work/work.gql.js @@ -360,6 +360,18 @@ export const REPLACE_FILE_SET = gql` } `; +export const LIST_INGEST_BUCKET_OBJECTS = gql` + query ListIngestBucketObjects($prefix: String) { + ListIngestBucketObjects(prefix: $prefix) { + key + storageClass + size + lastModified + mimeType + } + } +` + export const SET_WORK_IMAGE = gql` mutation SetWorkImage($fileSetId: ID!, $workId: ID!) { setWorkImage(fileSetId: $fileSetId, workId: $workId) { diff --git a/app/assets/js/services/helpers.ts b/app/assets/js/services/helpers.ts index 009e7d1fc..45ace4f7a 100644 --- a/app/assets/js/services/helpers.ts +++ b/app/assets/js/services/helpers.ts @@ -174,3 +174,8 @@ export function formatBytes(bytes: number, decimals: number) { i = Math.floor(Math.log(bytes) / Math.log(k)); return parseFloat((bytes / Math.pow(k, i)).toFixed(dm)) + " " + sizes[i]; } + +export function getFileNameFromS3Uri(s3Uri: string) { + const segments = s3Uri.split("/"); + return segments[segments.length - 1]; +} diff --git a/app/assets/package-lock.json b/app/assets/package-lock.json index 4c42aa41d..df4e38cbb 100644 --- a/app/assets/package-lock.json +++ b/app/assets/package-lock.json @@ -7,7 +7,7 @@ "license": "MIT", "dependencies": { "@absinthe/socket-apollo-link": "^0.2.1", - "@apollo/client": "*", + "@apollo/client": "latest", "@apollo/react-hooks": "^4.0.0", "@apollo/react-testing": "^4.0.0", "@appbaseio/reactivesearch": "3.23.1", diff --git a/app/lib/meadow/utils/aws/s3.ex b/app/lib/meadow/utils/aws/s3.ex new file mode 100644 index 000000000..0515ca8fb --- /dev/null +++ b/app/lib/meadow/utils/aws/s3.ex @@ -0,0 +1,64 @@ +defmodule Meadow.Utils.AWS.S3 do + @moduledoc """ + S3 utility functions + """ + + alias Meadow.Config + + require Logger + + @doc """ + Lists the file sets in the ingest bucket with the given user prefix. + + ## Parameters + + - user_prefix: The prefix to filter the file sets. + + ## Returns + + A list of file sets in the ingest bucket. + """ + def list_ingest_bucket_objects(opts \\ []) do + user_prefix = Keyword.get(opts, :prefix, "") + bucket = Config.ingest_bucket() + + bucket + |> ExAws.S3.list_objects(prefix: user_prefix) + |> ExAws.stream!() + |> Enum.into([]) + |> Enum.filter(&(!String.ends_with?(&1.key, "/"))) + |> Enum.map(&get_object_metadata(bucket, &1)) + end + + defp get_object_metadata(bucket, file_set) do + s3_key = "s3://" <> bucket <> "/" <> file_set.key + mime_type = fetch_mime_type(bucket, file_set.key) + + Map.put(file_set, :mime_type, mime_type) + |> Map.put(:key, s3_key) + end + + defp fetch_mime_type(bucket, key) do + bucket + |> do_fetch_mime_type(key) + |> case do + nil -> "application/octet-stream" + mime_type -> mime_type + end + end + + defp do_fetch_mime_type(bucket, key) do + case ExAws.S3.head_object(bucket, key) |> ExAws.request() do + {:ok, %{headers: headers}} -> extract_content_type(headers) + _ -> nil + end + end + + defp extract_content_type(headers) do + Enum.find_value(headers, fn + {"content-type", value} -> value + {"Content-Type", value} -> value + _ -> false + end) + end +end diff --git a/app/lib/meadow_web/resolvers/data.ex b/app/lib/meadow_web/resolvers/data.ex index 882d106d6..2978417b1 100644 --- a/app/lib/meadow_web/resolvers/data.ex +++ b/app/lib/meadow_web/resolvers/data.ex @@ -6,6 +6,7 @@ defmodule MeadowWeb.Resolvers.Data do alias Meadow.Pipeline alias Meadow.Data.{FileSets, Works} alias Meadow.Data.Works.TransferFileSets + alias Meadow.Utils.AWS.S3, as: S3Utils alias Meadow.Utils.ChangesetErrors def works(_, args, _) do @@ -141,6 +142,28 @@ defmodule MeadowWeb.Resolvers.Data do end end + def list_ingest_bucket_objects(_, %{prefix: prefix}, _) do + {:ok, S3Utils.list_ingest_bucket_objects(prefix: prefix)} + end + + def list_ingest_bucket_objects(_, _, _) do + {:ok, S3Utils.list_ingest_bucket_objects()} + end + + def replace_file_set(_, %{id: id} = params, _) do + file_set = FileSets.get_file_set!(id) + + case Pipeline.replace_the_file_set(file_set, Map.delete(params, :id)) do + {:error, changeset} -> + {:error, + message: "Could not replace file set", + details: ChangesetErrors.humanize_errors(changeset)} + + {:ok, file_set} -> + {:ok, file_set} + end + end + def replace_file_set(_, %{id: id} = params, _) do file_set = FileSets.get_file_set!(id) diff --git a/app/lib/meadow_web/schema/schema.ex b/app/lib/meadow_web/schema/schema.ex index 44e034dd5..36b4123d5 100644 --- a/app/lib/meadow_web/schema/schema.ex +++ b/app/lib/meadow_web/schema/schema.ex @@ -21,6 +21,7 @@ defmodule MeadowWeb.Schema do import_types(__MODULE__.Data.FileSetTypes) import_types(__MODULE__.Data.FieldTypes) import_types(__MODULE__.Data.PreservationCheckTypes) + import_types(__MODULE__.Data.S3Types) import_types(__MODULE__.Data.SharedLinkTypes) import_types(__MODULE__.HelperTypes) import_types(__MODULE__.Data.CSVMetadataUpdateTypes) @@ -38,6 +39,7 @@ defmodule MeadowWeb.Schema do import_fields(:csv_metadata_update_queries) import_fields(:nul_authority_queries) import_fields(:preservation_check_queries) + import_fields(:s3_queries) import_fields(:work_queries) end diff --git a/app/lib/meadow_web/schema/types/data/file_set_types.ex b/app/lib/meadow_web/schema/types/data/file_set_types.ex index f3d00832d..11aba21b4 100644 --- a/app/lib/meadow_web/schema/types/data/file_set_types.ex +++ b/app/lib/meadow_web/schema/types/data/file_set_types.ex @@ -165,7 +165,9 @@ defmodule MeadowWeb.Schema.Data.FileSetTypes do field(:md5, :string, do: resolve(fn digests, _, _ -> {:ok, Map.get(digests, "md5")} end)) field(:sha1, :string, do: resolve(fn digests, _, _ -> {:ok, Map.get(digests, "sha1")} end)) - field(:sha256, :string, do: resolve(fn digests, _, _ -> {:ok, Map.get(digests, "sha256")} end)) + field(:sha256, :string, + do: resolve(fn digests, _, _ -> {:ok, Map.get(digests, "sha256")} end) + ) end @desc "`file_set_structural_metadata` represents the structural metadata within a file set object." diff --git a/app/lib/meadow_web/schema/types/data/s3_types.ex b/app/lib/meadow_web/schema/types/data/s3_types.ex new file mode 100644 index 000000000..67f8ace40 --- /dev/null +++ b/app/lib/meadow_web/schema/types/data/s3_types.ex @@ -0,0 +1,35 @@ +defmodule MeadowWeb.Schema.Data.S3Types do + @moduledoc """ + Absinthe Schema for S3Types + + """ + use Absinthe.Schema.Notation + + alias MeadowWeb.Resolvers + alias MeadowWeb.Schema.Middleware + + object :s3_queries do + @desc "List ingest bucket objects" + field :list_ingest_bucket_objects, list_of(:s3_object) do + arg(:prefix, :string) + middleware(Middleware.Authenticate) + middleware(Middleware.Authorize, "Editor") + resolve(&Resolvers.Data.list_ingest_bucket_objects/3) + end + end + + object :s3_object do + field(:owner, :s3_owner) + field(:size, :string) + field(:key, :string) + field(:last_modified, :string) + field(:storage_class, :string) + field(:e_tag, :string) + field(:mime_type, :string) + end + + object :s3_owner do + field(:id, :string) + field(:display_name, :string) + end +end diff --git a/app/test/meadow_web/schema/query/s3_objects_test.exs b/app/test/meadow_web/schema/query/s3_objects_test.exs new file mode 100644 index 000000000..afb9b0f69 --- /dev/null +++ b/app/test/meadow_web/schema/query/s3_objects_test.exs @@ -0,0 +1,108 @@ +defmodule MeadowWeb.Schema.Query.S3ObjectsTest do + use Meadow.DataCase + use Meadow.S3Case + use MeadowWeb.ConnCase, async: true + + alias Meadow.Config + alias Meadow.Utils.AWS + + import WaitForIt + + @query """ + query ($prefix: String) { + ListIngestBucketObjects(prefix: $prefix) { + key + storageClass + size + lastModified + mimeType + } + } + """ + + @image_fixture "test/fixtures/coffee.tif" + @json_fixture "test/fixtures/details.json" + + setup do + file_fixtures = [ + {@ingest_bucket, "coffee/coffee.tif", File.read!(@image_fixture)}, + {@ingest_bucket, "details.json", File.read!(@json_fixture)} + ] + + setup_fixtures(file_fixtures) + + on_exit(fn -> cleanup_fixtures(file_fixtures) end) + + {:ok, %{file_fixtures: file_fixtures}} + end + + test "ListIngestBucketObjects query returns objects with a prefix", %{ + file_fixtures: _file_fixtures + } do + conn = build_conn() |> auth_user(user_fixture()) + variables = %{"prefix" => "coffee"} + + response = + conn + |> get("/api/graphql", query: @query, variables: variables) + |> json_response(200) + + assert %{ + "data" => %{ + "ListIngestBucketObjects" => [s3_object] + } + } = response + + assert s3_object["key"] == "s3://#{@ingest_bucket}/coffee/coffee.tif" + assert s3_object["mimeType"] == "application/octet-stream" + assert s3_object["size"] == "3179982" + assert s3_object["storageClass"] == "STANDARD" + assert_valid_iso8601_datetime(s3_object["lastModified"]) + + refute Enum.any?( + response["data"]["ListIngestBucketObjects"], + &(&1["key"] == "s3://#{@ingest_bucket}/details.json") + ) + end + + test "ListIngestBucketObjects query returns all objects in the ingest bucket", %{ + file_fixtures: file_fixtures + } do + conn = build_conn() |> auth_user(user_fixture()) + + response = + conn + |> get("/api/graphql", query: @query) + |> json_response(200) + + s3_objects = response["data"]["ListIngestBucketObjects"] + + assert Enum.all?(file_fixtures, fn {bucket, key, _} -> + expected_key = "s3://#{bucket}/#{key}" + Enum.any?(s3_objects, &(&1["key"] == expected_key)) + end) + end + + defp setup_fixtures(fixtures) do + fixtures + |> Task.async_stream(&upload_and_tag_fixture/1, timeout: Config.checksum_wait_timeout()) + |> Stream.run() + end + + defp upload_and_tag_fixture({bucket, key, content}) do + upload_object(bucket, key, content) + + AWS.check_object_tags!(bucket, key, Config.required_checksum_tags()) + |> wait(timeout: Config.checksum_wait_timeout(), frequency: 250) + end + + defp cleanup_fixtures(fixtures) do + fixtures + |> Task.async_stream(fn {bucket, key, _} -> delete_object(bucket, key) end) + |> Stream.run() + end + + defp assert_valid_iso8601_datetime(datetime_string) do + assert {:ok, _, 0} = DateTime.from_iso8601(datetime_string) + end +end