-
Notifications
You must be signed in to change notification settings - Fork 0
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Data versioning support #99
Changes from 1 commit
f8b88d1
140ef4d
a92c402
476aed0
675051d
3c332ce
126b9b4
2e259f9
2caa445
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -6,7 +6,6 @@ | |
import sys | ||
|
||
import boto3 | ||
|
||
from requests.auth import _basic_auth_str | ||
|
||
|
||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -4,7 +4,6 @@ | |
import argparse | ||
import os | ||
import sys | ||
|
||
from pathlib import Path | ||
|
||
import boto3 | ||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. The intent with this file:
After this is applied, I will either remove it in a followon commit or change it so that it is not runnable unless you :really: try, just in case we want an easy template for a future version of a similar operation. I could also be convinced to move this to a different folder - perhaps ./migrations? I've already run this script on the dev bucket. If you want, I can reset it by copying over prod and you can give it a run yourself. The initial copy is slow - if this wasn't a one and done I'd put a progress bar on it. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Eh, I don't need to run it. Sounds good - I can see some value as keeping it around, but you can just as easily add a line to some file here that's like |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,89 @@ | ||
""" Utility for adding versioning to an existing aggregator data store | ||
|
||
This is a one time thing for us, so the CLI/Boto creds are not robust. | ||
""" | ||
import argparse | ||
import io | ||
import json | ||
|
||
import boto3 | ||
|
||
UPLOAD_ROOT_BUCKETS = ["archive", "error", "last_valid", "latest", "site_upload"] | ||
|
||
|
||
def _get_s3_data(key: str, bucket_name: str, client) -> dict: | ||
"""Convenience class for retrieving a dict from S3""" | ||
try: | ||
bytes_buffer = io.BytesIO() | ||
client.download_fileobj(Bucket=bucket_name, Key=key, Fileobj=bytes_buffer) | ||
return json.loads(bytes_buffer.getvalue().decode()) | ||
except Exception: # pylint: disable=broad-except | ||
return {} | ||
|
||
|
||
def _put_s3_data(key: str, bucket_name: str, client, data: dict) -> None: | ||
"""Convenience class for writing a dict to S3""" | ||
b_data = io.BytesIO(json.dumps(data).encode()) | ||
client.upload_fileobj(Bucket=bucket_name, Key=key, Fileobj=b_data) | ||
|
||
|
||
def _get_depth(d): | ||
if isinstance(d, dict): | ||
return 1 + (max(map(_get_depth, d.values())) if d else 0) | ||
return 0 | ||
|
||
|
||
def migrate_bucket_versioning(bucket: str): | ||
client = boto3.client("s3") | ||
res = client.list_objects_v2(Bucket=bucket) | ||
contents = res["Contents"] | ||
moved_files = 0 | ||
for s3_file in contents: | ||
if s3_file["Key"].split("/")[0] in UPLOAD_ROOT_BUCKETS: | ||
key = s3_file["Key"] | ||
key_array = key.split("/") | ||
if len(key_array) == 5: | ||
key_array.insert(4, "000") | ||
new_key = "/".join(key_array) | ||
client.copy({"Bucket": bucket, "Key": key}, bucket, new_key) | ||
client.delete_object(Bucket=bucket, Key=key) | ||
moved_files += 1 | ||
print(f"Moved {moved_files} uploads") | ||
|
||
study_periods = _get_s3_data("metadata/study_periods.json", bucket, client) | ||
|
||
if _get_depth(study_periods) == 3: | ||
new_sp = {} | ||
for site in study_periods: | ||
new_sp[site] = {} | ||
for study in study_periods[site]: | ||
new_sp[site][study] = {} | ||
new_sp[site][study]["000"] = study_periods[site][study] | ||
_put_s3_data("metadata/study_periods.json", bucket, client, new_sp) | ||
print("study_periods.json updated") | ||
else: | ||
print("study_periods.json does not need update") | ||
|
||
transactions = _get_s3_data("metadata/transactions.json", bucket, client) | ||
if _get_depth(transactions) == 4: | ||
new_t = {} | ||
for site in transactions: | ||
new_t[site] = {} | ||
for study in transactions[site]: | ||
new_t[site][study] = {} | ||
for dp in transactions[site][study]: | ||
new_t[site][study][dp] = {} | ||
new_t[site][study][dp]["000"] = transactions[site][study][dp] | ||
_put_s3_data("metadata/transactions.json", bucket, client, new_t) | ||
print("transactions.json updated") | ||
else: | ||
print("transactions.json does not need update") | ||
|
||
|
||
if __name__ == "__main__": | ||
parser = argparse.ArgumentParser( | ||
description="""Util for migrating aggregator data""" | ||
) | ||
parser.add_argument("-b", "--bucket", help="bucket name") | ||
args = parser.parse_args() | ||
migrate_bucket_versioning(args.bucket) |
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,10 +1,9 @@ | ||
""" Functions used across different lambdas""" | ||
import io | ||
import logging | ||
import json | ||
|
||
from typing import Dict, Optional | ||
import logging | ||
from datetime import datetime, timezone | ||
from typing import Optional | ||
dogversioning marked this conversation as resolved.
Show resolved
Hide resolved
|
||
|
||
import boto3 | ||
|
||
|
@@ -26,7 +25,7 @@ | |
} | ||
|
||
|
||
def http_response(status: int, body: str, allow_cors: bool = False) -> Dict: | ||
def http_response(status: int, body: str, allow_cors: bool = False) -> dict: | ||
"""Generates the payload AWS lambda expects as a return value""" | ||
headers = {"Content-Type": "application/json"} | ||
if allow_cors: | ||
|
@@ -57,7 +56,7 @@ def check_meta_type(meta_type: str) -> None: | |
|
||
def read_metadata( | ||
s3_client, s3_bucket_name: str, meta_type: str = JsonFilename.TRANSACTIONS.value | ||
) -> Dict: | ||
) -> dict: | ||
"""Reads transaction information from an s3 bucket as a dictionary""" | ||
check_meta_type(meta_type) | ||
s3_path = f"{BucketPath.META.value}/{meta_type}.json" | ||
|
@@ -71,10 +70,11 @@ def read_metadata( | |
|
||
|
||
def update_metadata( | ||
metadata: Dict, | ||
metadata: dict, | ||
site: str, | ||
study: str, | ||
data_package: str, | ||
version: str, | ||
target: str, | ||
dt: Optional[datetime] = None, | ||
meta_type: str = JsonFilename.TRANSACTIONS.value, | ||
|
@@ -84,25 +84,27 @@ def update_metadata( | |
if meta_type == JsonFilename.TRANSACTIONS.value: | ||
site_metadata = metadata.setdefault(site, {}) | ||
study_metadata = site_metadata.setdefault(study, {}) | ||
data_package_metadata = study_metadata.setdefault( | ||
data_package, TRANSACTION_METADATA_TEMPLATE | ||
data_package_metadata = study_metadata.setdefault(data_package, {}) | ||
version_metadata = data_package_metadata.setdefault( | ||
version, TRANSACTION_METADATA_TEMPLATE | ||
) | ||
dt = dt or datetime.now(timezone.utc) | ||
data_package_metadata[target] = dt.isoformat() | ||
version_metadata[target] = dt.isoformat() | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Why is the version for a target getting set to a date? That bumped me. There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. so that comes back to this data structure (as an example from the unit tests):
Other than the transaction format version, these are always either datetimes or none. But I can do two things:
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I wouldn't push for you doing anything, I think this was me just not being familiar with the data structures / flow here. And maybe the re-use of the increasingly common word But regardless, it's fine as is. |
||
elif meta_type == JsonFilename.STUDY_PERIODS.value: | ||
site_metadata = metadata.setdefault(site, {}) | ||
study_period_metadata = site_metadata.setdefault( | ||
study, STUDY_PERIOD_METADATA_TEMPLATE | ||
study_period_metadata = site_metadata.setdefault(study, {}) | ||
version_metadata = study_period_metadata.setdefault( | ||
version, STUDY_PERIOD_METADATA_TEMPLATE | ||
) | ||
dt = dt or datetime.now(timezone.utc) | ||
study_period_metadata[target] = dt.isoformat() | ||
version_metadata[target] = dt.isoformat() | ||
return metadata | ||
|
||
|
||
def write_metadata( | ||
s3_client, | ||
s3_bucket_name: str, | ||
metadata: Dict, | ||
metadata: dict, | ||
meta_type: str = JsonFilename.TRANSACTIONS.value, | ||
) -> None: | ||
"""Writes transaction info from ∏a dictionary to an s3 bucket metadata location""" | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
this is a little unrelated, but since isort pulled up a couple instances of typing reorgs, i pinned this version and removed all Dict/List typing in favor of 3.9+ dict/list.