Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Clean up Archive Storage Tests #351

Draft
wants to merge 2 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -152,3 +152,5 @@ cython_debug/
# Make working with Sapling a little easier
.git
.sl

tests/gcs-service-account.json
6 changes: 3 additions & 3 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -51,12 +51,12 @@ services:
size: 1024M

minio:
image: minio/minio:RELEASE.2019-04-09T01-22-30Z
command: server /export
image: minio/minio
command: server /data
ports:
- "9000:9000"
environment:
- MINIO_ACCESS_KEY=codecov-default-key
- MINIO_SECRET_KEY=codecov-default-secret
volumes:
- archive-volume:/export
- archive-volume:/data
1 change: 1 addition & 0 deletions shared/storage/aws.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ def __init__(self, aws_config):
self.config = aws_config
self.storage_client = boto3.client(
aws_config.get("resource"),
endpoint_url=aws_config.get("endpoint_url"),
aws_access_key_id=aws_config.get("aws_access_key_id"),
aws_secret_access_key=aws_config.get("aws_secret_access_key"),
region_name=aws_config.get("region_name"),
Expand Down
83 changes: 43 additions & 40 deletions shared/storage/gcp.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import gzip
import logging
from typing import IO

import google.cloud.exceptions
from google.cloud import storage
Expand All @@ -26,8 +27,8 @@ def load_credentials(self, gcp_config):
return Credentials.from_service_account_info(gcp_config)

def get_blob(self, bucket_name, path):
bucket = self.storage_client.get_bucket(bucket_name)
return storage.Blob(path, bucket)
bucket = self.storage_client.bucket(bucket_name)
return bucket.blob(path)

def create_root_storage(self, bucket_name="archive", region="us-east-1"):
"""
Expand All @@ -48,29 +49,27 @@ def create_root_storage(self, bucket_name="archive", region="us-east-1"):

def write_file(
self,
bucket_name,
path,
data,
bucket_name: str,
path: str,
data: str | bytes | IO,
reduced_redundancy=False,
*,
is_already_gzipped: bool = False,
):
"""
Writes a new file with the contents of `data`
(What happens if the file already exists?)
Writes a new file with the contents of `data`
(What happens if the file already exists?)


Args:
bucket_name (str): The name of the bucket for the file to be created on
path (str): The desired path of the file
data (str): The data to be written to the file
data: The data to be written to the file
reduced_redundancy (bool): Whether a reduced redundancy mode should be used (default: {False})
is_already_gzipped (bool): Whether the file is already gzipped (default: {False})

Raises:
NotImplementedError: If the current instance did not implement this method
"""
blob = self.get_blob(bucket_name, path)

if isinstance(data, str):
data = data.encode()
if isinstance(data, bytes):
Expand All @@ -84,34 +83,46 @@ def write_file(
blob.upload_from_file(data)
return True

def read_file(self, bucket_name, path, file_obj=None, *, retry=0):
def read_file(
self, bucket_name: str, path: str, file_obj: IO | None = None, retry=0
) -> bytes:
"""Reads the content of a file

Args:
bucket_name (str): The name of the bucket for the file lives
path (str): The path of the file

Raises:
NotImplementedError: If the current instance did not implement this method
FileNotInStorageError: If the file does not exist

Returns:
bytes : The contents of that file, still encoded as bytes
bytes: The contents of that file, still encoded as bytes
"""
blob = self.get_blob(bucket_name, path)

try:
blob.reload()
if (
blob.content_type == "application/x-gzip"
and blob.content_encoding == "gzip"
):
blob.content_type = "text/plain"
blob.content_encoding = "gzip"
blob.patch()
# The two `download_XYZ` below will transparently decompress gzip encoded files.
# However, that has a very weird interaction with a content-type of
# `application/x-gzip`, which can lead to bogus checksum mismatch errors.
# To avoid that for now without creating a wrapper that can
# decompress `gzip`, and potentially `zstd` compressed transparently,
# we will rewrite the metadata of the blob so that the `download_XYZ`
# will properly apply transparent decompression.
if retry:
blob.reload()
if (
blob.content_type == "application/x-gzip"
and blob.content_encoding == "gzip"
):
blob.content_type = "text/plain"
blob.content_encoding = "gzip"
blob.patch()

if file_obj is None:
return blob.download_as_bytes(checksum="crc32c")
else:
blob.download_to_file(file_obj, checksum="crc32c")
return b""
except google.cloud.exceptions.NotFound:
raise FileNotInStorageError(f"File {path} does not exist in {bucket_name}")
except google.resumable_media.common.DataCorruption:
Expand All @@ -120,19 +131,18 @@ def read_file(self, bucket_name, path, file_obj=None, *, retry=0):
return self.read_file(bucket_name, path, file_obj, retry=1)
raise

def delete_file(self, bucket_name, path):
def delete_file(self, bucket_name: str, path: str) -> bool:
"""Deletes a single file from the storage (what happens if the file doesnt exist?)

Args:
bucket_name (str): The name of the bucket for the file lives
path (str): The path of the file to be deleted

Raises:
NotImplementedError: If the current instance did not implement this method
FileNotInStorageError: If the file does not exist

Returns:
bool: True if the deletion was succesful
bool: True if the deletion was successful
"""
blob = self.get_blob(bucket_name, path)
try:
Expand All @@ -141,28 +151,25 @@ def delete_file(self, bucket_name, path):
raise FileNotInStorageError(f"File {path} does not exist in {bucket_name}")
return True

def delete_files(self, bucket_name, paths=[]):
def delete_files(self, bucket_name: str, paths: list[str]) -> list[bool]:
"""Batch deletes a list of files from a given bucket
(what happens to the files that don't exist?)

Args:
bucket_name (str): The name of the bucket for the file lives
paths (list): A list of the paths to be deletes (default: {[]})

Raises:
NotImplementedError: If the current instance did not implement this method

Returns:
list: A list of booleans, where each result indicates whether that file was deleted
successfully
"""
bucket = self.storage_client.get_bucket(bucket_name)
blobs = [self.get_blob(bucket_name, path) for path in paths]
bucket = self.storage_client.bucket(bucket_name)
blobs = [bucket.blob(path) for path in paths]
blobs_errored = set()
bucket.delete_blobs(blobs, on_error=blobs_errored.add)
return [b not in blobs_errored for b in blobs]

def list_folder_contents(self, bucket_name, prefix=None, recursive=True):
def list_folder_contents(self, bucket_name: str, prefix=None, recursive=True):
"""List the contents of a specific folder

Attention: google ignores the `recursive` param
Expand All @@ -171,13 +178,9 @@ def list_folder_contents(self, bucket_name, prefix=None, recursive=True):
bucket_name (str): The name of the bucket for the file lives
prefix: The prefix of the files to be listed (default: {None})
recursive: Whether the listing should be recursive (default: {True})

Raises:
NotImplementedError: If the current instance did not implement this method
"""
assert recursive
bucket = self.storage_client.get_bucket(bucket_name)
return (self._blob_to_dict(b) for b in bucket.list_blobs(prefix=prefix))

def _blob_to_dict(self, blob):
return {"name": blob.name, "size": blob.size}
bucket = self.storage_client.bucket(bucket_name)
return (
{"name": b.name, "size": b.size} for b in bucket.list_blobs(prefix=prefix)
)
10 changes: 5 additions & 5 deletions shared/storage/minio.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,17 +56,17 @@ def init_minio_client(
region: str = None,
):
"""
Initialize the minio client
Initialize the minio client

`iam_auth` adds support for IAM base authentication in a fallback pattern.
The following will be checked in order:
The following will be checked in order:

* EC2 metadata -- a custom endpoint can be provided, default is None.
* AWS env vars, specifically AWS_ACCESS_KEY and AWS_SECRECT_KEY
* Minio env vars, specifically MINIO_ACCESS_KEY and MINIO_SECRET_KEY
* AWS env vars, specifically AWS_ACCESS_KEY and AWS_SECRECT_KEY

to support backward compatibility, the iam_auth setting should be used in the installation
configuration
to support backward compatibility, the iam_auth setting should be used
in the installation configuration

Args:
host (str): The address of the host where minio lives
Expand Down
Empty file added tests/unit/storage/__init__.py
Empty file.
Loading
Loading