Skip to content

Commit

Permalink
Merge pull request #507 from hubmapconsortium/shirey/disable-metadata…
Browse files Browse the repository at this point in the history
…-file-gen-at-pub

disable metadata.json file generation during publication
  • Loading branch information
yuanzhou authored Mar 1, 2024
2 parents 02bb6b5 + 38feac2 commit d0a2d19
Showing 1 changed file with 12 additions and 12 deletions.
24 changes: 12 additions & 12 deletions src/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -1028,18 +1028,18 @@ def publish_datastage(identifier):
out = entity_instance.clear_cache(e_id)

# Write out the metadata.json file after all processing has been done...
ds_path = ingest_helper.dataset_directory_absolute_path(dataset_data_access_level,
dataset_group_uuid, dataset_uuid, False)
md_file = os.path.join(ds_path, "metadata.json")
json_object = entity_json_dumps(entity_instance, dataset_uuid)
logger.info(f"publish_datastage; writing metadata.json file: '{md_file}'; "
f"containing: '{json_object}'")
try:
with open(md_file, "w") as outfile:
outfile.write(json_object)
except Exception as e:
logger.exception(f"Fatal error while writing md_file {md_file}; {str(e)}")
return jsonify({"error": f"{dataset_uuid} problem writing metadata.json file."}), 500
# ds_path = ingest_helper.dataset_directory_absolute_path(dataset_data_access_level,
# dataset_group_uuid, dataset_uuid, False)
# md_file = os.path.join(ds_path, "metadata.json")
# json_object = entity_json_dumps(entity_instance, dataset_uuid)
# logger.info(f"publish_datastage; writing metadata.json file: '{md_file}'; "
# f"containing: '{json_object}'")
# try:
# with open(md_file, "w") as outfile:
# outfile.write(json_object)
# except Exception as e:
# logger.exception(f"Fatal error while writing md_file {md_file}; {str(e)}")
# return jsonify({"error": f"{dataset_uuid} problem writing metadata.json file."}), 500

if no_indexing_and_acls:
r_val = {'acl_cmd': acls_cmd, 'donors_for_indexing': donors_to_reindex}
Expand Down

0 comments on commit d0a2d19

Please sign in to comment.