From bd70faff3072f8ee04ee071b9804a1dc111cfda6 Mon Sep 17 00:00:00 2001 From: Arash Date: Thu, 12 Sep 2024 14:42:57 +0200 Subject: [PATCH 01/22] Add tests for library contents API --- lib/galaxy_test/api/test_library_contents.py | 258 +++++++++++++++++++ 1 file changed, 258 insertions(+) create mode 100644 lib/galaxy_test/api/test_library_contents.py diff --git a/lib/galaxy_test/api/test_library_contents.py b/lib/galaxy_test/api/test_library_contents.py new file mode 100644 index 000000000000..5afd51f78386 --- /dev/null +++ b/lib/galaxy_test/api/test_library_contents.py @@ -0,0 +1,258 @@ +from typing import Any + +from galaxy_test.base.populators import ( + DatasetCollectionPopulator, + DatasetPopulator, + LibraryPopulator, +) +from ._framework import ApiTestCase + + +class TestLibraryContentsApi(ApiTestCase): + dataset_populator: DatasetPopulator + + def setUp(self): + super().setUp() + self.dataset_populator = DatasetPopulator(self.galaxy_interactor) + self.dataset_collection_populator = DatasetCollectionPopulator(self.galaxy_interactor) + self.library_populator = LibraryPopulator(self.galaxy_interactor) + + self.library = self.library_populator.new_private_library("TestLibrary") + self.history = self.dataset_populator.new_history() + + def test_create_folder(self): + folder_list = self._create_library_content(type="folder") + assert isinstance(folder_list, list), "Expected response to be a list" + for folder in folder_list: + self._assert_has_keys(folder, "id", "name") + + def test_create_file_from_hda(self): + file_item = self._create_library_content(type="from_hda") + self._assert_has_keys(file_item, "id", "name") + + def test_create_file_from_hdca(self): + files = self._create_library_content(type="from_hdca") + assert isinstance(files, list), "Response should be a list of files" + for file_item in files: + self._assert_has_keys(file_item, "id", "name") + + def test_create_invalid(self): + library_id = self.library["id"] + folder_id = self.library["root_folder_id"] + + payload = {"folder_id": folder_id, "create_type": "invalid_type"} + response = self._post(f"/api/libraries/{library_id}/contents", data=payload) + self._assert_status_code_is(response, 400) + + def test_index(self): + library_id = self.library["id"] + response = self._get(f"/api/libraries/{library_id}/contents") + self._assert_status_code_is(response, 200) + + contents = response.json() + assert isinstance(contents, list), "Expected response to be a list" + + for item in contents: + self._assert_has_keys(item, "id", "name", "type", "url") + + def test_get_library_contents_invalid_id(self): + invalid_item_id = "invalid_id" + response = self._get(f"/api/libraries/{invalid_item_id}/contents") + self._assert_status_code_is(response, 400) + + def test_get_library_folder(self): + library_id = self.library["id"] + folder_id = self._create_library_content(type="folder")[0]["id"] + response = self._get(f"/api/libraries/{library_id}/contents/{folder_id}") + self._assert_status_code_is(response, 200) + + folder_info = response.json() + self._assert_has_keys( + folder_info, + "model_class", + "id", + "parent_id", + "name", + "description", + "item_count", + "genome_build", + "update_time", + "deleted", + "library_path", + "parent_library_id", + ) + + def test_get_library_file_from_hda(self): + library_id = self.library["id"] + file_id = self._create_library_content(type="from_hda")["id"] + response = self._get(f"/api/libraries/{library_id}/contents/{file_id}") + self._assert_status_code_is(response, 200) + + file_info = response.json() + self._assert_has_keys( + file_info, + "id", + "ldda_id", + "parent_library_id", + "folder_id", + "model_class", + "state", + "name", + "file_name", + "created_from_basename", + "uploaded_by", + "message", + "date_uploaded", + "update_time", + "file_size", + "file_ext", + "data_type", + "genome_build", + "misc_info", + "misc_blurb", + "peek", + "uuid", + "metadata_dbkey", + "metadata_data_lines", + "tags", + ) + + def test_get_library_file_from_hdca(self): + library_id = self.library["id"] + file_id = self._create_library_content(type="from_hdca")[0]["id"] + response = self._get(f"/api/libraries/{library_id}/contents/{file_id}") + self._assert_status_code_is(response, 200) + + file_info = response.json() + self._assert_has_keys( + file_info, + "id", + "ldda_id", + "parent_library_id", + "folder_id", + "model_class", + "state", + "name", + "file_name", + "created_from_basename", + "uploaded_by", + "message", + "date_uploaded", + "update_time", + "file_size", + "file_ext", + "data_type", + "genome_build", + "misc_info", + "misc_blurb", + "peek", + "uuid", + "metadata_dbkey", + "metadata_data_lines", + "tags", + ) + + def test_get_invalid_library_item(self): + library_id = self.library["id"] + invalid_item_id = "invalid_id" + response = self._get(f"/api/libraries/{library_id}/contents/{invalid_item_id}") + self._assert_status_code_is(response, 400) + + def test_update_library_folder(self): + library_id = self.library["id"] + file_id = self._create_library_content(type="folder")[0]["id"] + converted_dataset_id = self.dataset_populator.new_dataset(self.history)["id"] + payload = {"converted_dataset_id": converted_dataset_id} + response = self._put(f"/api/libraries/{library_id}/contents/{file_id}", data=payload) + self._assert_status_code_is(response, 200) + + def test_update_library_item_from_hda(self): + library_id = self.library["id"] + file_id = self._create_library_content(type="from_hda")["id"] + converted_dataset_id = self.dataset_populator.new_dataset(self.history)["id"] + + payload = {"converted_dataset_id": converted_dataset_id} + response = self._put(f"/api/libraries/{library_id}/contents/{file_id}", data=payload) + self._assert_status_code_is(response, 200) + + def test_update_library_item_from_hdca(self): + library_id = self.library["id"] + file_id = self._create_library_content(type="from_hdca")[0]["id"] + converted_dataset_id = self.dataset_populator.new_dataset(self.history)["id"] + + payload = {"converted_dataset_id": converted_dataset_id} + response = self._put(f"/api/libraries/{library_id}/contents/{file_id}", data=payload) + self._assert_status_code_is(response, 200) + + def test_update_invalid_library_item(self): + library_id = self.library["id"] + invalid_item_id = "invalid_id" + converted_dataset_id = self.dataset_populator.new_dataset(self.history)["id"] + + payload = {"converted_dataset_id": converted_dataset_id} + response = self._put(f"/api/libraries/{library_id}/contents/{invalid_item_id}", data=payload) + self._assert_status_code_is(response, 400) + + def test_delete_library_folder(self): + library_id = self.library["id"] + file_id = self._create_library_content(type="folder")[0]["id"] + + response = self._delete(f"/api/libraries/{library_id}/contents/{file_id}") + self._assert_status_code_is(response, 200) + + def test_delete_library_item_from_hda(self): + library_id = self.library["id"] + file_id = self._create_library_content(type="from_hda")["id"] + + response = self._delete(f"/api/libraries/{library_id}/contents/{file_id}") + self._assert_status_code_is(response, 200) + + def test_delete_library_item_from_hdca(self): + library_id = self.library["id"] + file_id = self._create_library_content(type="from_hdca")[0]["id"] + + response = self._delete(f"/api/libraries/{library_id}/contents/{file_id}") + self._assert_status_code_is(response, 200) + + def test_delete_invalid_library_item(self): + library_id = self.library["id"] + invalid_item_id = "invalid_id" + response = self._delete(f"/api/libraries/{library_id}/contents/{invalid_item_id}") + self._assert_status_code_is(response, 500) + + def _create_library_content(self, type) -> Any: + folder_id = self.library["root_folder_id"] + library_id = self.library["id"] + + if type == "folder": + folder_name = "NewFolder" + payload = { + "folder_id": folder_id, + "create_type": "folder", + "name": folder_name, + "description": "Test", + } + + elif type == "from_hda": + dataset_id = self.dataset_populator.new_dataset(self.history)["id"] + payload = { + "folder_id": folder_id, + "create_type": "file", + "from_hda_id": dataset_id, + "ldda_message": "Test", + } + + elif type == "from_hdca": + hdca_id = self.dataset_collection_populator.create_list_in_history( + self.history, contents=["dataset01", "dataset02"], direct_upload=True, wait=True + ).json()["outputs"][0]["id"] + payload = { + "folder_id": folder_id, + "create_type": "file", + "from_hdca_id": hdca_id, + "ldda_message": "Test", + } + + response = self._post(f"/api/libraries/{library_id}/contents", data=payload) + self._assert_status_code_is(response, 200) + return response.json() From d1ac6f049554b1119f40e7f5d4b33b518f8d8c3e Mon Sep 17 00:00:00 2001 From: Arash Date: Fri, 13 Sep 2024 12:19:17 +0200 Subject: [PATCH 02/22] Deleting and updating not possible for folders! Updating test_update. --- lib/galaxy_test/api/test_library_contents.py | 21 +++----------------- 1 file changed, 3 insertions(+), 18 deletions(-) diff --git a/lib/galaxy_test/api/test_library_contents.py b/lib/galaxy_test/api/test_library_contents.py index 5afd51f78386..afdd31516801 100644 --- a/lib/galaxy_test/api/test_library_contents.py +++ b/lib/galaxy_test/api/test_library_contents.py @@ -158,18 +158,10 @@ def test_get_invalid_library_item(self): response = self._get(f"/api/libraries/{library_id}/contents/{invalid_item_id}") self._assert_status_code_is(response, 400) - def test_update_library_folder(self): - library_id = self.library["id"] - file_id = self._create_library_content(type="folder")[0]["id"] - converted_dataset_id = self.dataset_populator.new_dataset(self.history)["id"] - payload = {"converted_dataset_id": converted_dataset_id} - response = self._put(f"/api/libraries/{library_id}/contents/{file_id}", data=payload) - self._assert_status_code_is(response, 200) - def test_update_library_item_from_hda(self): library_id = self.library["id"] file_id = self._create_library_content(type="from_hda")["id"] - converted_dataset_id = self.dataset_populator.new_dataset(self.history)["id"] + converted_dataset_id = self._create_library_content(type="from_hda")["id"] payload = {"converted_dataset_id": converted_dataset_id} response = self._put(f"/api/libraries/{library_id}/contents/{file_id}", data=payload) @@ -178,7 +170,7 @@ def test_update_library_item_from_hda(self): def test_update_library_item_from_hdca(self): library_id = self.library["id"] file_id = self._create_library_content(type="from_hdca")[0]["id"] - converted_dataset_id = self.dataset_populator.new_dataset(self.history)["id"] + converted_dataset_id = self._create_library_content(type="from_hdca")[0]["id"] payload = {"converted_dataset_id": converted_dataset_id} response = self._put(f"/api/libraries/{library_id}/contents/{file_id}", data=payload) @@ -187,19 +179,12 @@ def test_update_library_item_from_hdca(self): def test_update_invalid_library_item(self): library_id = self.library["id"] invalid_item_id = "invalid_id" - converted_dataset_id = self.dataset_populator.new_dataset(self.history)["id"] + converted_dataset_id = invalid_item_id payload = {"converted_dataset_id": converted_dataset_id} response = self._put(f"/api/libraries/{library_id}/contents/{invalid_item_id}", data=payload) self._assert_status_code_is(response, 400) - def test_delete_library_folder(self): - library_id = self.library["id"] - file_id = self._create_library_content(type="folder")[0]["id"] - - response = self._delete(f"/api/libraries/{library_id}/contents/{file_id}") - self._assert_status_code_is(response, 200) - def test_delete_library_item_from_hda(self): library_id = self.library["id"] file_id = self._create_library_content(type="from_hda")["id"] From 507e7c318e2a8fc9656becc3fc0be5c68a8a40ed Mon Sep 17 00:00:00 2001 From: Arash Date: Mon, 16 Sep 2024 18:24:47 +0200 Subject: [PATCH 03/22] Refactor library content routes and controllers --- lib/galaxy/actions/library.py | 187 +----- .../webapps/galaxy/api/library_contents.py | 573 +++-------------- lib/galaxy/webapps/galaxy/buildapp.py | 22 +- .../galaxy/services/library_contents.py | 602 ++++++++++++++++++ 4 files changed, 690 insertions(+), 694 deletions(-) create mode 100644 lib/galaxy/webapps/galaxy/services/library_contents.py diff --git a/lib/galaxy/actions/library.py b/lib/galaxy/actions/library.py index 72b651e1869a..e56c4dc180c5 100644 --- a/lib/galaxy/actions/library.py +++ b/lib/galaxy/actions/library.py @@ -2,28 +2,18 @@ Contains library functions """ -import json import logging import os.path -from typing import Optional - -from markupsafe import escape from galaxy import util from galaxy.exceptions import ( AdminRequiredException, ConfigDoesNotAllowException, - ItemAccessibilityException, - ObjectNotFound, RequestParameterInvalidException, -) -from galaxy.model import ( - LibraryDataset, - LibraryFolder, + RequestParameterMissingException, ) from galaxy.model.base import transaction from galaxy.tools.actions import upload_common -from galaxy.tools.parameters import populate_state from galaxy.util.path import ( safe_contains, safe_relpath, @@ -93,125 +83,6 @@ class LibraryActions: Mixin for controllers that provide library functionality. """ - def _upload_dataset(self, trans, folder_id: int, replace_dataset: Optional[LibraryDataset] = None, **kwd): - # Set up the traditional tool state/params - cntrller = "api" - tool_id = "upload1" - message = None - file_type = kwd.get("file_type") - try: - upload_common.validate_datatype_extension(datatypes_registry=trans.app.datatypes_registry, ext=file_type) - except RequestParameterInvalidException as e: - return (400, util.unicodify(e)) - tool = trans.app.toolbox.get_tool(tool_id) - state = tool.new_state(trans) - populate_state(trans, tool.inputs, kwd, state.inputs) - tool_params = state.inputs - dataset_upload_inputs = [] - for input in tool.inputs.values(): - if input.type == "upload_dataset": - dataset_upload_inputs.append(input) - # Library-specific params - server_dir = kwd.get("server_dir", "") - upload_option = kwd.get("upload_option", "upload_file") - response_code = 200 - if upload_option == "upload_directory": - full_dir, import_dir_desc = validate_server_directory_upload(trans, server_dir) - message = "Select a directory" - elif upload_option == "upload_paths": - # Library API already checked this - following check isn't actually needed. - validate_path_upload(trans) - # Some error handling should be added to this method. - try: - # FIXME: instead of passing params here ( which have been processed by util.Params(), the original kwd - # should be passed so that complex objects that may have been included in the initial request remain. - library_bunch = upload_common.handle_library_params(trans, kwd, folder_id, replace_dataset) - except Exception: - response_code = 500 - message = "Unable to parse upload parameters, please report this error." - # Proceed with (mostly) regular upload processing if we're still errorless - if response_code == 200: - if upload_option == "upload_file": - tool_params = upload_common.persist_uploads(tool_params, trans) - uploaded_datasets = upload_common.get_uploaded_datasets( - trans, cntrller, tool_params, dataset_upload_inputs, library_bunch=library_bunch - ) - elif upload_option == "upload_directory": - uploaded_datasets, response_code, message = self._get_server_dir_uploaded_datasets( - trans, kwd, full_dir, import_dir_desc, library_bunch, response_code, message - ) - elif upload_option == "upload_paths": - uploaded_datasets, response_code, message = self._get_path_paste_uploaded_datasets( - trans, kwd, library_bunch, response_code, message - ) - if upload_option == "upload_file" and not uploaded_datasets: - response_code = 400 - message = "Select a file, enter a URL or enter text" - if response_code != 200: - return (response_code, message) - json_file_path = upload_common.create_paramfile(trans, uploaded_datasets) - data_list = [ud.data for ud in uploaded_datasets] - job_params = {} - job_params["link_data_only"] = json.dumps(kwd.get("link_data_only", "copy_files")) - job_params["uuid"] = json.dumps(kwd.get("uuid", None)) - job, output = upload_common.create_job( - trans, tool_params, tool, json_file_path, data_list, folder=library_bunch.folder, job_params=job_params - ) - trans.app.job_manager.enqueue(job, tool=tool) - return output - - def _get_server_dir_uploaded_datasets( - self, trans, params, full_dir, import_dir_desc, library_bunch, response_code, message - ): - dir_response = self._get_server_dir_files(params, full_dir, import_dir_desc) - files = dir_response[0] - if not files: - return dir_response - uploaded_datasets = [] - for file in files: - name = os.path.basename(file) - uploaded_datasets.append( - self._make_library_uploaded_dataset(trans, params, name, file, "server_dir", library_bunch) - ) - return uploaded_datasets, 200, None - - def _get_server_dir_files(self, params, full_dir, import_dir_desc): - files = [] - try: - for entry in os.listdir(full_dir): - # Only import regular files - path = os.path.join(full_dir, entry) - link_data_only = params.get("link_data_only", "copy_files") - if os.path.islink(full_dir) and link_data_only == "link_to_files": - # If we're linking instead of copying and the - # sub-"directory" in the import dir is actually a symlink, - # dereference the symlink, but not any of its contents. - link_path = os.readlink(full_dir) - if os.path.isabs(link_path): - path = os.path.join(link_path, entry) - else: - path = os.path.abspath(os.path.join(link_path, entry)) - elif os.path.islink(path) and os.path.isfile(path) and link_data_only == "link_to_files": - # If we're linking instead of copying and the "file" in the - # sub-directory of the import dir is actually a symlink, - # dereference the symlink (one dereference only, Vasili). - link_path = os.readlink(path) - if os.path.isabs(link_path): - path = link_path - else: - path = os.path.abspath(os.path.join(os.path.dirname(path), link_path)) - if os.path.isfile(path): - files.append(path) - except Exception as e: - message = f"Unable to get file list for configured {import_dir_desc}, error: {util.unicodify(e)}" - response_code = 500 - return None, response_code, message - if not files: - message = f"The directory '{full_dir}' contains no valid files" - response_code = 400 - return None, response_code, message - return files, None, None - def _get_path_paste_uploaded_datasets(self, trans, params, library_bunch, response_code, message): preserve_dirs = util.string_as_bool(params.get("preserve_dirs", False)) uploaded_datasets = [] @@ -306,59 +177,3 @@ def _make_library_uploaded_dataset(self, trans, params, name, path, type, librar with transaction(trans.sa_session): trans.sa_session.commit() return uploaded_dataset - - def _create_folder(self, trans, parent_id: int, **kwd): - is_admin = trans.user_is_admin - current_user_roles = trans.get_current_user_roles() - parent_folder = trans.sa_session.get(LibraryFolder, parent_id) - # Check the library which actually contains the user-supplied parent folder, not the user-supplied - # library, which could be anything. - self._check_access(trans, is_admin, parent_folder, current_user_roles) - self._check_add(trans, is_admin, parent_folder, current_user_roles) - new_folder = LibraryFolder(name=kwd.get("name", ""), description=kwd.get("description", "")) - # We are associating the last used genome build with folders, so we will always - # initialize a new folder with the first dbkey in genome builds list which is currently - # ? unspecified (?) - new_folder.genome_build = trans.app.genome_builds.default_value - parent_folder.add_folder(new_folder) - trans.sa_session.add(new_folder) - with transaction(trans.sa_session): - trans.sa_session.commit() - # New folders default to having the same permissions as their parent folder - trans.app.security_agent.copy_library_permissions(trans, parent_folder, new_folder) - return 200, dict(created=new_folder) - - def _check_access(self, trans, is_admin, item, current_user_roles): - if isinstance(item, trans.model.HistoryDatasetAssociation): - # Make sure the user has the DATASET_ACCESS permission on the history_dataset_association. - if not item: - message = f"Invalid history dataset ({escape(str(item))}) specified." - raise ObjectNotFound(message) - elif ( - not trans.app.security_agent.can_access_dataset(current_user_roles, item.dataset) - and item.user == trans.user - ): - message = f"You do not have permission to access the history dataset with id ({str(item.id)})." - raise ItemAccessibilityException(message) - else: - # Make sure the user has the LIBRARY_ACCESS permission on the library item. - if not item: - message = f"Invalid library item ({escape(str(item))}) specified." - raise ObjectNotFound(message) - elif not ( - is_admin or trans.app.security_agent.can_access_library_item(current_user_roles, item, trans.user) - ): - if isinstance(item, trans.model.Library): - item_type = "data library" - elif isinstance(item, LibraryFolder): - item_type = "folder" - else: - item_type = "(unknown item type)" - message = f"You do not have permission to access the {escape(item_type)} with id ({str(item.id)})." - raise ItemAccessibilityException(message) - - def _check_add(self, trans, is_admin, item, current_user_roles): - # Deny access if the user is not an admin and does not have the LIBRARY_ADD permission. - if not (is_admin or trans.app.security_agent.can_add_library_item(current_user_roles, item)): - message = f"You are not authorized to add an item to ({escape(item.name)})." - raise ItemAccessibilityException(message) diff --git a/lib/galaxy/webapps/galaxy/api/library_contents.py b/lib/galaxy/webapps/galaxy/api/library_contents.py index dc3c5e120f34..33f2324e155d 100644 --- a/lib/galaxy/webapps/galaxy/api/library_contents.py +++ b/lib/galaxy/webapps/galaxy/api/library_contents.py @@ -3,506 +3,85 @@ """ import logging -from typing import Optional -from galaxy import ( - exceptions, - managers, - util, +from galaxy.managers.context import ( + ProvidesHistoryContext, + ProvidesUserContext, ) -from galaxy.actions.library import ( - LibraryActions, - validate_path_upload, +from galaxy.webapps.galaxy.api import ( + depends, + DependsOnTrans, + Router, ) -from galaxy.managers.collections_util import ( - api_payload_to_create_params, - dictify_dataset_collection_instance, -) -from galaxy.model import ( - ExtendedMetadata, - ExtendedMetadataIndex, - Library, - LibraryDataset, - LibraryFolder, - tags, -) -from galaxy.model.base import transaction -from galaxy.structured_app import StructuredApp -from galaxy.web import expose_api -from galaxy.webapps.base.controller import ( - HTTPBadRequest, - url_for, - UsesFormDefinitionsMixin, - UsesLibraryMixinItems, -) -from galaxy.webapps.galaxy.api import BaseGalaxyAPIController +from galaxy.webapps.galaxy.services.library_contents import LibraryContentsService log = logging.getLogger(__name__) - -class LibraryContentsController( - BaseGalaxyAPIController, UsesLibraryMixinItems, UsesFormDefinitionsMixin, LibraryActions -): - def __init__(self, app: StructuredApp, hda_manager: managers.hdas.HDAManager): - super().__init__(app) - self.hda_manager = hda_manager - - @expose_api - def index(self, trans, library_id, **kwd): - """ - GET /api/libraries/{library_id}/contents: - - Return a list of library files and folders. - - .. note:: This endpoint is slow for large libraries. Returns all content traversing recursively through all folders. - .. seealso:: :class:`galaxy.webapps.galaxy.api.FolderContentsController.index` for a faster non-recursive solution - - :param library_id: the encoded id of the library - :type library_id: str - - :returns: list of dictionaries of the form: - - * id: the encoded id of the library item - * name: the 'library path' - or relationship of the library item to the root - * type: 'file' or 'folder' - * url: the url to get detailed information on the library item - - :rtype: list - - :raises: MalformedId, InconsistentDatabase, RequestParameterInvalidException, InternalServerError - """ - rval = [] - current_user_roles = trans.get_current_user_roles() - - def traverse(folder): - admin = trans.user_is_admin - rval = [] - for subfolder in folder.active_folders: - if not admin: - can_access, folder_ids = trans.app.security_agent.check_folder_contents( - trans.user, current_user_roles, subfolder - ) - if (admin or can_access) and not subfolder.deleted: - subfolder.api_path = f"{folder.api_path}/{subfolder.name}" - subfolder.api_type = "folder" - rval.append(subfolder) - rval.extend(traverse(subfolder)) - for ld in folder.datasets: - if not admin: - can_access = trans.app.security_agent.can_access_dataset( - current_user_roles, ld.library_dataset_dataset_association.dataset - ) - if (admin or can_access) and not ld.deleted: - ld.api_path = f"{folder.api_path}/{ld.name}" - ld.api_type = "file" - rval.append(ld) - return rval - - library = trans.sa_session.get(Library, self.decode_id(library_id)) - if not library: - raise exceptions.RequestParameterInvalidException("No library found with the id provided.") - if not (trans.user_is_admin or trans.app.security_agent.can_access_library(current_user_roles, library)): - raise exceptions.RequestParameterInvalidException("No library found with the id provided.") - encoded_id = f"F{trans.security.encode_id(library.root_folder.id)}" - # appending root folder - rval.append( - dict( - id=encoded_id, - type="folder", - name="/", - url=url_for("library_content", library_id=library_id, id=encoded_id), - ) - ) - library.root_folder.api_path = "" - # appending all other items in the library recursively - for content in traverse(library.root_folder): - encoded_id = trans.security.encode_id(content.id) - if content.api_type == "folder": - encoded_id = f"F{encoded_id}" - rval.append( - dict( - id=encoded_id, - type=content.api_type, - name=content.api_path, - url=url_for( - "library_content", - library_id=library_id, - id=encoded_id, - ), - ) - ) - return rval - - @expose_api - def show(self, trans, id, library_id, **kwd): - """ - GET /api/libraries/{library_id}/contents/{id} - - Returns information about library file or folder. - - :param id: the encoded id of the library item to return - :type id: str - - :param library_id: the encoded id of the library that contains this item - :type library_id: str - - :returns: detailed library item information - :rtype: dict - - .. seealso:: - :func:`galaxy.model.LibraryDataset.to_dict` and - :attr:`galaxy.model.LibraryFolder.dict_element_visible_keys` - """ - class_name, content_id = self._decode_library_content_id(id) - if class_name == "LibraryFolder": - content = self.get_library_folder(trans, content_id, check_ownership=False, check_accessible=True) - rval = content.to_dict(view="element", value_mapper={"id": trans.security.encode_id}) - rval["id"] = f"F{str(rval['id'])}" - if rval["parent_id"] is not None: # This can happen for root folders. - rval["parent_id"] = f"F{str(trans.security.encode_id(rval['parent_id']))}" - rval["parent_library_id"] = trans.security.encode_id(rval["parent_library_id"]) - else: - content = self.get_library_dataset(trans, content_id, check_ownership=False, check_accessible=True) - rval = content.to_dict(view="element") - rval["id"] = trans.security.encode_id(rval["id"]) - rval["ldda_id"] = trans.security.encode_id(rval["ldda_id"]) - rval["folder_id"] = f"F{str(trans.security.encode_id(rval['folder_id']))}" - rval["parent_library_id"] = trans.security.encode_id(rval["parent_library_id"]) - - tag_manager = tags.GalaxyTagHandler(trans.sa_session) - rval["tags"] = tag_manager.get_tags_list(content.library_dataset_dataset_association.tags) - return rval - - @expose_api - def create(self, trans, library_id, payload, **kwd): - """ - POST /api/libraries/{library_id}/contents: - - Create a new library file or folder. - - To copy an HDA into a library send ``create_type`` of 'file' and - the HDA's encoded id in ``from_hda_id`` (and optionally ``ldda_message``). - - To copy an HDCA into a library send ``create_type`` of 'file' and - the HDCA's encoded id in ``from_hdca_id`` (and optionally ``ldda_message``). - - :type library_id: str - :param library_id: the encoded id of the library where to create the new item - :type payload: dict - :param payload: dictionary structure containing: - - * folder_id: the encoded id of the parent folder of the new item - * create_type: the type of item to create ('file', 'folder' or 'collection') - * from_hda_id: (optional, only if create_type is 'file') the - encoded id of an accessible HDA to copy into the library - * ldda_message: (optional) the new message attribute of the LDDA created - * extended_metadata: (optional) sub-dictionary containing any extended - metadata to associate with the item - * upload_option: (optional) one of 'upload_file' (default), 'upload_directory' or 'upload_paths' - * server_dir: (optional, only if upload_option is - 'upload_directory') relative path of the subdirectory of Galaxy - ``library_import_dir`` (if admin) or ``user_library_import_dir`` - (if non-admin) to upload. All and only the files (i.e. - no subdirectories) contained in the specified directory will be - uploaded. - * filesystem_paths: (optional, only if upload_option is - 'upload_paths' and the user is an admin) file paths on the - Galaxy server to upload to the library, one file per line - * link_data_only: (optional, only when upload_option is - 'upload_directory' or 'upload_paths') either 'copy_files' - (default) or 'link_to_files'. Setting to 'link_to_files' - symlinks instead of copying the files - * name: (optional, only if create_type is 'folder') name of the - folder to create - * description: (optional, only if create_type is 'folder') - description of the folder to create - * tag_using_filenames: (optional) - create tags on datasets using the file's original name - * tags: (optional) - create the given list of tags on datasets - - :returns: a dictionary describing the new item unless ``from_hdca_id`` is supplied, - in that case a list of such dictionaries is returned. - :rtype: object - """ - if trans.user_is_bootstrap_admin: - raise exceptions.RealUserRequiredException("Only real users can create a new library file or folder.") - if "create_type" not in payload: - raise exceptions.RequestParameterMissingException("Missing required 'create_type' parameter.") - create_type = payload.pop("create_type") - if create_type not in ("file", "folder", "collection"): - raise exceptions.RequestParameterInvalidException( - f"Invalid value for 'create_type' parameter ( {create_type} ) specified." - ) - if "upload_option" in payload and payload["upload_option"] not in ( - "upload_file", - "upload_directory", - "upload_paths", - ): - raise exceptions.RequestParameterInvalidException( - f"Invalid value for 'upload_option' parameter ( {payload['upload_option']} ) specified." - ) - if "folder_id" not in payload: - raise exceptions.RequestParameterMissingException("Missing required 'folder_id' parameter.") - folder_id = payload.pop("folder_id") - _, folder_id = self._decode_library_content_id(folder_id) - folder_id = trans.security.decode_id(folder_id) - # security is checked in the downstream controller - parent = self.get_library_folder(trans, folder_id, check_ownership=False, check_accessible=False) - # The rest of the security happens in the library_common controller. - - payload["tag_using_filenames"] = util.string_as_bool(payload.get("tag_using_filenames", None)) - payload["tags"] = util.listify(payload.get("tags", None)) - - # are we copying an HDA to the library folder? - # we'll need the id and any message to attach, then branch to that private function - from_hda_id, from_hdca_id, ldda_message = ( - payload.pop("from_hda_id", None), - payload.pop("from_hdca_id", None), - payload.pop("ldda_message", ""), - ) - if create_type == "file": - if from_hda_id: - return self._copy_hda_to_library_folder( - trans, self.hda_manager, self.decode_id(from_hda_id), folder_id, ldda_message - ) - if from_hdca_id: - return self._copy_hdca_to_library_folder( - trans, self.hda_manager, self.decode_id(from_hdca_id), folder_id, ldda_message - ) - - # check for extended metadata, store it and pop it out of the param - # otherwise sanitize_param will have a fit - ex_meta_payload = payload.pop("extended_metadata", None) - - # Now create the desired content object, either file or folder. - if create_type == "file": - status, output = self._upload_library_dataset(trans, folder_id, **payload) - elif create_type == "folder": - status, output = self._create_folder(trans, folder_id, **payload) - elif create_type == "collection": - # Not delegating to library_common, so need to check access to parent - # folder here. - self.check_user_can_add_to_library_item(trans, parent, check_accessible=True) - create_params = api_payload_to_create_params(payload) - create_params["parent"] = parent - dataset_collection_manager = trans.app.dataset_collection_manager - dataset_collection_instance = dataset_collection_manager.create(**create_params) - return [ - dictify_dataset_collection_instance( - dataset_collection_instance, security=trans.security, url_builder=trans.url_builder, parent=parent - ) - ] - if status != 200: - trans.response.status = status - return output - else: - rval = [] - for v in output.values(): - if ex_meta_payload is not None: - # If there is extended metadata, store it, attach it to the dataset, and index it - ex_meta = ExtendedMetadata(ex_meta_payload) - trans.sa_session.add(ex_meta) - v.extended_metadata = ex_meta - trans.sa_session.add(v) - with transaction(trans.sa_session): - trans.sa_session.commit() - for path, value in self._scan_json_block(ex_meta_payload): - meta_i = ExtendedMetadataIndex(ex_meta, path, value) - trans.sa_session.add(meta_i) - with transaction(trans.sa_session): - trans.sa_session.commit() - if isinstance(v, trans.app.model.LibraryDatasetDatasetAssociation): - v = v.library_dataset - encoded_id = trans.security.encode_id(v.id) - if create_type == "folder": - encoded_id = f"F{encoded_id}" - rval.append( - dict( - id=encoded_id, name=v.name, url=url_for("library_content", library_id=library_id, id=encoded_id) - ) - ) - return rval - - def _upload_library_dataset(self, trans, folder_id: int, **kwd): - replace_dataset: Optional[LibraryDataset] = None - upload_option = kwd.get("upload_option", "upload_file") - dbkey = kwd.get("dbkey", "?") - if isinstance(dbkey, list): - last_used_build = dbkey[0] - else: - last_used_build = dbkey - is_admin = trans.user_is_admin - current_user_roles = trans.get_current_user_roles() - folder = trans.sa_session.get(LibraryFolder, folder_id) - self._check_access(trans, is_admin, folder, current_user_roles) - self._check_add(trans, is_admin, folder, current_user_roles) - library = folder.parent_library - if folder and last_used_build in ["None", None, "?"]: - last_used_build = folder.genome_build - error = False - if upload_option == "upload_paths": - validate_path_upload(trans) # Duplicate check made in _upload_dataset. - elif roles := kwd.get("roles", ""): - # Check to see if the user selected roles to associate with the DATASET_ACCESS permission - # on the dataset that would cause accessibility issues. - vars = dict(DATASET_ACCESS_in=roles) - permissions, in_roles, error, message = trans.app.security_agent.derive_roles_from_access( - trans, library.id, "api", library=True, **vars - ) - if error: - return 400, message - else: - created_outputs_dict = self._upload_dataset( - trans, folder_id=folder.id, replace_dataset=replace_dataset, **kwd - ) - if created_outputs_dict: - if isinstance(created_outputs_dict, str): - return 400, created_outputs_dict - elif isinstance(created_outputs_dict, tuple): - return created_outputs_dict[0], created_outputs_dict[1] - return 200, created_outputs_dict - else: - return 400, "Upload failed" - - def _scan_json_block(self, meta, prefix=""): - """ - Scan a json style data structure, and emit all fields and their values. - Example paths - - Data - { "data" : [ 1, 2, 3 ] } - - Path: - /data == [1,2,3] - - /data/[0] == 1 - - """ - if isinstance(meta, dict): - for a in meta: - yield from self._scan_json_block(meta[a], f"{prefix}/{a}") - elif isinstance(meta, list): - for i, a in enumerate(meta): - yield from self._scan_json_block(a, prefix + "[%d]" % (i)) - else: - # BUG: Everything is cast to string, which can lead to false positives - # for cross type comparisions, ie "True" == True - yield prefix, (f"{meta}").encode() - - @expose_api - def update(self, trans, id, library_id, payload, **kwd): - """ - PUT /api/libraries/{library_id}/contents/{id} - - Create an ImplicitlyConvertedDatasetAssociation. - - .. seealso:: :class:`galaxy.model.ImplicitlyConvertedDatasetAssociation` - - :type id: str - :param id: the encoded id of the library item to return - :type library_id: str - :param library_id: the encoded id of the library that contains this item - :type payload: dict - :param payload: dictionary structure containing:: - 'converted_dataset_id': - - :rtype: None - :returns: None - """ - if "converted_dataset_id" in payload: - converted_id = payload.pop("converted_dataset_id") - content = self.get_library_dataset(trans, id, check_ownership=False, check_accessible=False) - content_conv = self.get_library_dataset(trans, converted_id, check_ownership=False, check_accessible=False) - assoc = trans.app.model.ImplicitlyConvertedDatasetAssociation( - parent=content.library_dataset_dataset_association, - dataset=content_conv.library_dataset_dataset_association, - file_type=content_conv.library_dataset_dataset_association.extension, - metadata_safe=True, - ) - trans.sa_session.add(assoc) - with transaction(trans.sa_session): - trans.sa_session.commit() - - def _decode_library_content_id(self, content_id): - if len(content_id) % 16 == 0: - return "LibraryDataset", content_id - elif content_id.startswith("F"): - return "LibraryFolder", content_id[1:] - else: - raise HTTPBadRequest(f"Malformed library content id ( {str(content_id)} ) specified, unable to decode.") - - @expose_api - def delete(self, trans, library_id, id, **kwd): - """ - DELETE /api/libraries/{library_id}/contents/{id} - - Delete the LibraryDataset with the given ``id``. - - :type id: str - :param id: the encoded id of the library dataset to delete - :type kwd: dict - :param kwd: (optional) dictionary structure containing: - - * payload: a dictionary itself containing: - * purge: if True, purge the LD - - :rtype: dict - :returns: an error object if an error occurred or a dictionary containing: - * id: the encoded id of the library dataset, - * deleted: if the library dataset was marked as deleted, - * purged: if the library dataset was purged - """ - purge = False - if kwd.get("payload", None): - purge = util.string_as_bool(kwd["payload"].get("purge", False)) - - rval = {"id": id} - try: - ld = self.get_library_dataset(trans, id, check_ownership=False, check_accessible=True) - user_is_admin = trans.user_is_admin - can_modify = trans.app.security_agent.can_modify_library_item(trans.user.all_roles(), ld) - log.debug("is_admin: %s, can_modify: %s", user_is_admin, can_modify) - if not (user_is_admin or can_modify): - trans.response.status = 403 - rval.update({"error": "Unauthorized to delete or purge this library dataset"}) - return rval - - ld.deleted = True - if purge: - ld.purged = True - trans.sa_session.add(ld) - with transaction(trans.sa_session): - trans.sa_session.commit() - - # TODO: had to change this up a bit from Dataset.user_can_purge - dataset = ld.library_dataset_dataset_association.dataset - no_history_assoc = len(dataset.history_associations) == len(dataset.purged_history_associations) - no_library_assoc = dataset.library_associations == [ld.library_dataset_dataset_association] - can_purge_dataset = not dataset.purged and no_history_assoc and no_library_assoc - - if can_purge_dataset: - try: - ld.library_dataset_dataset_association.dataset.full_delete() - trans.sa_session.add(ld.dataset) - except Exception: - pass - # flush now to preserve deleted state in case of later interruption - with transaction(trans.sa_session): - trans.sa_session.commit() - rval["purged"] = True - with transaction(trans.sa_session): - trans.sa_session.commit() - rval["deleted"] = True - - except exceptions.httpexceptions.HTTPInternalServerError: - log.exception("Library_contents API, delete: uncaught HTTPInternalServerError: %s, %s", id, str(kwd)) - raise - except exceptions.httpexceptions.HTTPException: - raise - except Exception as exc: - log.exception("library_contents API, delete: uncaught exception: %s, %s", id, str(kwd)) - trans.response.status = 500 - rval.update({"error": util.unicodify(exc)}) - return rval +router = Router(tags=["libraries"]) + + +@router.cbv +class FastAPILibraryContents: + service: LibraryContentsService = depends(LibraryContentsService) + + @router.get( + "/api/libraries/{library_id}/contents", + summary="Return a list of library files and folders.", + ) + def index( + self, + library_id, + trans: ProvidesUserContext = DependsOnTrans, + ) -> list: + return self.service.index(trans, library_id) + + @router.get( + "/api/libraries/{library_id}/contents/{id}", + summary="Return a library file or folder.", + ) + def show( + self, + id, + library_id, + trans: ProvidesUserContext = DependsOnTrans, + ): + return self.service.show(trans, id) + + @router.post( + "/api/libraries/{library_id}/contents", + summary="Create a new library file or folder.", + ) + def create( + self, + library_id, + payload, + trans: ProvidesHistoryContext = DependsOnTrans, + ): + return self.service.create(trans, library_id, payload) + + @router.put( + "/api/libraries/{library_id}/contents/{id}", + summary="Update a library file or folder.", + deprecated=True, + ) + def update( + self, + id, + library_id, + payload, + trans: ProvidesUserContext = DependsOnTrans, + ): + return self.service.update(trans, id, payload) + + @router.delete( + "/api/libraries/{library_id}/contents/{id}", + summary="Delete a library file or folder.", + ) + def delete( + self, + library_id, + id, + payload, + trans: ProvidesHistoryContext = DependsOnTrans, + ): + return self.service.delete(trans, id, payload) diff --git a/lib/galaxy/webapps/galaxy/buildapp.py b/lib/galaxy/webapps/galaxy/buildapp.py index 951926dcc35b..d1aa8766a170 100644 --- a/lib/galaxy/webapps/galaxy/buildapp.py +++ b/lib/galaxy/webapps/galaxy/buildapp.py @@ -851,18 +851,18 @@ def populate_api_routes(webapp, app): conditions=dict(method=["POST", "GET"]), ) - webapp.mapper.resource( - "content", - "contents", - controller="library_contents", - name_prefix="library_", - path_prefix="/api/libraries/{library_id}", - parent_resources=dict(member_name="library", collection_name="libraries"), - ) + # webapp.mapper.resource( + # "content", + # "contents", + # controller="library_contents", + # name_prefix="library_", + # path_prefix="/api/libraries/{library_id}", + # parent_resources=dict(member_name="library", collection_name="libraries"), + # ) - _add_item_extended_metadata_controller( - webapp, name_prefix="library_dataset_", path_prefix="/api/libraries/{library_id}/contents/{library_content_id}" - ) + # _add_item_extended_metadata_controller( + # webapp, name_prefix="library_dataset_", path_prefix="/api/libraries/{library_id}/contents/{library_content_id}" + # ) webapp.mapper.connect( "build_for_rerun", diff --git a/lib/galaxy/webapps/galaxy/services/library_contents.py b/lib/galaxy/webapps/galaxy/services/library_contents.py new file mode 100644 index 000000000000..28b1a2123f4c --- /dev/null +++ b/lib/galaxy/webapps/galaxy/services/library_contents.py @@ -0,0 +1,602 @@ +import json +import logging +import os +from typing import Optional + +from markupsafe import escape + +import galaxy.schema.schema +from galaxy import ( + exceptions, + util, +) +from galaxy.actions.library import ( + LibraryActions, + validate_path_upload, + validate_server_directory_upload, +) +from galaxy.managers import base as managers_base +from galaxy.managers.collections_util import ( + api_payload_to_create_params, + dictify_dataset_collection_instance, +) +from galaxy.managers.context import ( + ProvidesHistoryContext, + ProvidesUserContext, +) +from galaxy.managers.hdas import HDAManager +from galaxy.model import ( + ExtendedMetadata, + ExtendedMetadataIndex, + Library, + LibraryDataset, + LibraryFolder, + tags, +) +from galaxy.model.base import transaction +from galaxy.security.idencoding import IdEncodingHelper +from galaxy.tools.actions import upload_common +from galaxy.tools.parameters import populate_state +from galaxy.util import bunch +from galaxy.webapps.base.controller import UsesLibraryMixinItems +from galaxy.webapps.galaxy.services.base import ServiceBase + +log = logging.getLogger(__name__) + + +class LibraryContentsService(ServiceBase, LibraryActions, UsesLibraryMixinItems): + """ + Interface/service shared by controllers for interacting with the contents of a library contents. + """ + + def __init__(self, security: IdEncodingHelper, hda_manager: HDAManager): + super().__init__(security) + self.hda_manager = hda_manager + + def get_object(self, trans, id, class_name, check_ownership=False, check_accessible=False, deleted=None): + """ + Convenience method to get a model object with the specified checks. + """ + return managers_base.get_object( + trans, id, class_name, check_ownership=check_ownership, check_accessible=check_accessible, deleted=deleted + ) + + def index( + self, + trans: ProvidesUserContext, + library_id, + ): + """Return a list of library files and folders.""" + rval = [] + current_user_roles = trans.get_current_user_roles() + library = trans.sa_session.get(Library, self.decode_id(library_id)) + if not library: + raise exceptions.RequestParameterInvalidException("No library found with the id provided.") + if not (trans.user_is_admin or trans.app.security_agent.can_access_library(current_user_roles, library)): + raise exceptions.RequestParameterInvalidException("No library found with the id provided.") + encoded_id = f"F{trans.security.encode_id(library.root_folder.id)}" + # appending root folder + rval.append( + dict( + id=encoded_id, + type="folder", + name="/", + url=( + trans.url_builder("library_content", library_id=library_id, id=encoded_id) + if trans.url_builder + else None + ), + ) + ) + library.root_folder.api_path = "" + # appending all other items in the library recursively + for content in self._traverse(trans, library.root_folder, current_user_roles): + encoded_id = trans.security.encode_id(content.id) + if content.api_type == "folder": + encoded_id = f"F{encoded_id}" + rval.append( + dict( + id=encoded_id, + type=content.api_type, + name=content.api_path, + url=( + trans.url_builder("library_content", library_id=library_id, id=encoded_id) + if trans.url_builder + else None + ), + ) + ) + return rval + + def show( + self, + trans: ProvidesUserContext, + id, + ): + """Returns information about library file or folder.""" + class_name, content_id = self._decode_library_content_id(id) + if class_name == "LibraryFolder": + content = self.get_library_folder(trans, content_id, check_ownership=False, check_accessible=True) + rval = content.to_dict(view="element", value_mapper={"id": trans.security.encode_id}) + rval["id"] = f"F{str(rval['id'])}" + if rval["parent_id"] is not None: # This can happen for root folders. + rval["parent_id"] = f"F{str(trans.security.encode_id(rval['parent_id']))}" + rval["parent_library_id"] = trans.security.encode_id(rval["parent_library_id"]) + else: + content = self.get_library_dataset(trans, content_id, check_ownership=False, check_accessible=True) + rval = content.to_dict(view="element") + rval["id"] = trans.security.encode_id(rval["id"]) + rval["ldda_id"] = trans.security.encode_id(rval["ldda_id"]) + rval["folder_id"] = f"F{str(trans.security.encode_id(rval['folder_id']))}" + rval["parent_library_id"] = trans.security.encode_id(rval["parent_library_id"]) + + tag_manager = tags.GalaxyTagHandler(trans.sa_session) + rval["tags"] = tag_manager.get_tags_list(content.library_dataset_dataset_association.tags) + return rval + + def create( + self, + trans: ProvidesHistoryContext, + library_id, + payload, + ): + """Create a new library file or folder.""" + if trans.user_is_bootstrap_admin: + raise exceptions.RealUserRequiredException("Only real users can create a new library file or folder.") + if not payload.create_type: + raise exceptions.RequestParameterMissingException("Missing required 'create_type' parameter.") + create_type = payload.create_type + if create_type not in ("file", "folder", "collection"): + raise exceptions.RequestParameterInvalidException( + f"Invalid value for 'create_type' parameter ( {create_type} ) specified." + ) + if payload.upload_option and payload.upload_option not in ( + "upload_file", + "upload_directory", + "upload_paths", + ): + raise exceptions.RequestParameterInvalidException( + f"Invalid value for 'upload_option' parameter ( {payload.upload_option} ) specified." + ) + if not payload.folder_id: + raise exceptions.RequestParameterMissingException("Missing required 'folder_id' parameter.") + folder_id = payload.folder_id + _, folder_id = self._decode_library_content_id(folder_id) + folder_id = trans.security.decode_id(folder_id) + # security is checked in the downstream controller + parent = self.get_library_folder(trans, folder_id, check_ownership=False, check_accessible=False) + # The rest of the security happens in the library_common controller. + + payload.tag_using_filenames = payload.tag_using_filenames or False + payload.tags = payload.tags or [] + + # are we copying an HDA to the library folder? + # we'll need the id and any message to attach, then branch to that private function + from_hda_id, from_hdca_id, ldda_message = ( + payload.from_hda_id or None, + payload.from_hdca_id or None, + payload.ldda_message or "", + ) + if create_type == "file": + if from_hda_id: + return self._copy_hda_to_library_folder( + trans, self.hda_manager, self.decode_id(from_hda_id), folder_id, ldda_message + ) + if from_hdca_id: + return self._copy_hdca_to_library_folder( + trans, self.hda_manager, self.decode_id(from_hdca_id), folder_id, ldda_message + ) + + # check for extended metadata, store it and pop it out of the param + # otherwise sanitize_param will have a fit + ex_meta_payload = payload.extended_metadata or None + + # Now create the desired content object, either file or folder. + if create_type == "file": + output = self._upload_library_dataset(trans, folder_id, payload) + elif create_type == "folder": + output = self._create_folder(trans, folder_id, payload) + elif create_type == "collection": + # Not delegating to library_common, so need to check access to parent + # folder here. + self.check_user_can_add_to_library_item(trans, parent, check_accessible=True) + create_params = api_payload_to_create_params(payload) + create_params["parent"] = parent + dataset_collection_manager = trans.app.dataset_collection_manager + dataset_collection_instance = dataset_collection_manager.create(**create_params) + return [ + dictify_dataset_collection_instance( + dataset_collection_instance, security=trans.security, url_builder=trans.url_builder, parent=parent + ) + ] + rval = [] + for v in output.values(): + if ex_meta_payload is not None: + # If there is extended metadata, store it, attach it to the dataset, and index it + ex_meta = ExtendedMetadata(ex_meta_payload) + trans.sa_session.add(ex_meta) + v.extended_metadata = ex_meta + trans.sa_session.add(v) + with transaction(trans.sa_session): + trans.sa_session.commit() + for path, value in self._scan_json_block(ex_meta_payload): + meta_i = ExtendedMetadataIndex(ex_meta, path, value) + trans.sa_session.add(meta_i) + with transaction(trans.sa_session): + trans.sa_session.commit() + if isinstance(v, trans.app.model.LibraryDatasetDatasetAssociation): + v = v.library_dataset + encoded_id = trans.security.encode_id(v.id) + if create_type == "folder": + encoded_id = f"F{encoded_id}" + rval.append( + dict( + id=encoded_id, + name=v.name, + url=( + trans.url_builder("library_content", library_id=library_id, id=encoded_id) + if trans.url_builder + else None + ), + ) + ) + return rval + + def update( + self, + trans: ProvidesUserContext, + id, + payload, + ): + """Create an ImplicitlyConvertedDatasetAssociation.""" + if payload.converted_dataset_id: + converted_id = payload.converted_dataset_id + content = self.get_library_dataset(trans, id, check_ownership=False, check_accessible=False) + content_conv = self.get_library_dataset(trans, converted_id, check_ownership=False, check_accessible=False) + assoc = trans.app.model.ImplicitlyConvertedDatasetAssociation( + parent=content.library_dataset_dataset_association, + dataset=content_conv.library_dataset_dataset_association, + file_type=content_conv.library_dataset_dataset_association.extension, + metadata_safe=True, + ) + trans.sa_session.add(assoc) + with transaction(trans.sa_session): + trans.sa_session.commit() + + def delete( + self, + trans: ProvidesHistoryContext, + id, + payload, + ): + """Delete the LibraryDataset with the given ``id``.""" + purge = payload.purge or False + + rval = {"id": id} + try: + ld = self.get_library_dataset(trans, id, check_ownership=False, check_accessible=True) + user_is_admin = trans.user_is_admin + can_modify = trans.app.security_agent.can_modify_library_item(trans.user.all_roles(), ld) + log.debug("is_admin: %s, can_modify: %s", user_is_admin, can_modify) + if not (user_is_admin or can_modify): + raise exceptions.InsufficientPermissionsException( + "Unauthorized to delete or purge this library dataset" + ) + + ld.deleted = True + if purge: + ld.purged = True + trans.sa_session.add(ld) + with transaction(trans.sa_session): + trans.sa_session.commit() + + # TODO: had to change this up a bit from Dataset.user_can_purge + dataset = ld.library_dataset_dataset_association.dataset + no_history_assoc = len(dataset.history_associations) == len(dataset.purged_history_associations) + no_library_assoc = dataset.library_associations == [ld.library_dataset_dataset_association] + can_purge_dataset = not dataset.purged and no_history_assoc and no_library_assoc + + if can_purge_dataset: + try: + ld.library_dataset_dataset_association.dataset.full_delete() + trans.sa_session.add(ld.dataset) + except Exception: + pass + # flush now to preserve deleted state in case of later interruption + with transaction(trans.sa_session): + trans.sa_session.commit() + rval["purged"] = True + with transaction(trans.sa_session): + trans.sa_session.commit() + rval["deleted"] = True + except Exception as exc: + log.exception(f"library_contents API, delete: uncaught exception: {id}, {payload}") + raise exceptions.InternalServerError(util.unicodify(exc)) + return rval + + def _upload_library_dataset( + self, + trans: ProvidesHistoryContext, + folder_id: int, + payload, + ): + replace_dataset: Optional[LibraryDataset] = None + upload_option = payload.upload_option or "upload_file" + dbkey = payload.dbkey or "?" + if isinstance(dbkey, list): + last_used_build = dbkey[0] + else: + last_used_build = dbkey + is_admin = trans.user_is_admin + current_user_roles = trans.get_current_user_roles() + folder = trans.sa_session.get(LibraryFolder, folder_id) + if not folder: + raise exceptions.RequestParameterInvalidException("Invalid folder id specified.") + self._check_access(trans, is_admin, folder, current_user_roles) + self._check_add(trans, is_admin, folder, current_user_roles) + library = folder.parent_library + if folder and last_used_build in ["None", None, "?"]: + last_used_build = folder.genome_build + error = False + if upload_option == "upload_paths": + validate_path_upload(trans) # Duplicate check made in _upload_dataset. + elif roles := payload.roles or "": + # Check to see if the user selected roles to associate with the DATASET_ACCESS permission + # on the dataset that would cause accessibility issues. + vars = dict(DATASET_ACCESS_in=roles) + permissions, in_roles, error, message = trans.app.security_agent.derive_roles_from_access( + trans, library.id, "api", library=True, **vars + ) + if error: + raise exceptions.RequestParameterInvalidException(message) + else: + created_outputs_dict = self._upload_dataset( + trans, payload=payload, folder_id=folder.id, replace_dataset=replace_dataset + ) + if created_outputs_dict: + if isinstance(created_outputs_dict, str): + raise exceptions.RequestParameterInvalidException(created_outputs_dict) + elif isinstance(created_outputs_dict, tuple): + return created_outputs_dict[0], created_outputs_dict[1] + return created_outputs_dict + else: + raise exceptions.RequestParameterInvalidException("Upload failed") + + def _scan_json_block( + self, + meta, + prefix="", + ): + """ + Scan a json style data structure, and emit all fields and their values. + Example paths + + Data + { "data" : [ 1, 2, 3 ] } + + Path: + /data == [1,2,3] + + /data/[0] == 1 + + """ + if isinstance(meta, dict): + for a in meta: + yield from self._scan_json_block(meta[a], f"{prefix}/{a}") + elif isinstance(meta, list): + for i, a in enumerate(meta): + yield from self._scan_json_block(a, prefix + "[%d]" % (i)) + else: + # BUG: Everything is cast to string, which can lead to false positives + # for cross type comparisions, ie "True" == True + yield prefix, (f"{meta}").encode() + + def _decode_library_content_id( + self, + content_id, + ): + if len(content_id) % 16 == 0: + return "LibraryDataset", content_id + elif content_id.startswith("F"): + return "LibraryFolder", content_id[1:] + else: + raise exceptions.RequestParameterInvalidException( + f"Malformed library content id ( {str(content_id)} ) specified, unable to decode." + ) + + def _traverse( + self, + trans: ProvidesUserContext, + folder, + current_user_roles, + ): + admin = trans.user_is_admin + rval = [] + for subfolder in folder.active_folders: + if not admin: + can_access, folder_ids = trans.app.security_agent.check_folder_contents( + trans.user, current_user_roles, subfolder + ) + if (admin or can_access) and not subfolder.deleted: + subfolder.api_path = f"{folder.api_path}/{subfolder.name}" + subfolder.api_type = "folder" + rval.append(subfolder) + rval.extend(self._traverse(trans, subfolder, current_user_roles)) + for ld in folder.datasets: + if not admin: + can_access = trans.app.security_agent.can_access_dataset( + current_user_roles, ld.library_dataset_dataset_association.dataset + ) + if (admin or can_access) and not ld.deleted: + ld.api_path = f"{folder.api_path}/{ld.name}" + ld.api_type = "file" + rval.append(ld) + return rval + + def _upload_dataset( + self, + trans, + payload, + folder_id: int, + replace_dataset: Optional[LibraryDataset] = None, + ): + # Set up the traditional tool state/params + cntrller = "api" + tool_id = "upload1" + file_type = payload.file_type + upload_common.validate_datatype_extension(datatypes_registry=trans.app.datatypes_registry, ext=file_type) + tool = trans.app.toolbox.get_tool(tool_id) + state = tool.new_state(trans) + populate_state(trans, tool.inputs, payload.dict(), state.inputs) + tool_params = state.inputs + dataset_upload_inputs = [] + for input in tool.inputs.values(): + if input.type == "upload_dataset": + dataset_upload_inputs.append(input) + # Library-specific params + server_dir = payload.server_dir or "" + upload_option = payload.upload_option or "upload_file" + if upload_option == "upload_directory": + full_dir, import_dir_desc = validate_server_directory_upload(trans, server_dir) + elif upload_option == "upload_paths": + # Library API already checked this - following check isn't actually needed. + validate_path_upload(trans) + # Some error handling should be added to this method. + try: + # FIXME: instead of passing params here ( which have been processed by util.Params(), the original payload + # should be passed so that complex objects that may have been included in the initial request remain. + library_bunch = upload_common.handle_library_params(trans, payload.dict(), folder_id, replace_dataset) + except Exception: + raise exceptions.InvalidFileFormatError("Invalid folder specified") + # Proceed with (mostly) regular upload processing if we're still errorless + if upload_option == "upload_file": + tool_params = upload_common.persist_uploads(tool_params, trans) + uploaded_datasets = upload_common.get_uploaded_datasets( + trans, cntrller, tool_params, dataset_upload_inputs, library_bunch=library_bunch + ) + elif upload_option == "upload_directory": + uploaded_datasets = self._get_server_dir_uploaded_datasets( + trans, payload, full_dir, import_dir_desc, library_bunch + ) + elif upload_option == "upload_paths": + uploaded_datasets, _, _ = self._get_path_paste_uploaded_datasets( + trans, payload.dict(), library_bunch, 200, None + ) + if upload_option == "upload_file" and not uploaded_datasets: + raise exceptions.RequestParameterInvalidException("Select a file, enter a URL or enter text") + json_file_path = upload_common.create_paramfile(trans, uploaded_datasets) + data_list = [ud.data for ud in uploaded_datasets] + job_params = {} + job_params["link_data_only"] = json.dumps(payload.link_data_only or "copy_files") + job_params["uuid"] = json.dumps(payload.uuid or None) + job, output = upload_common.create_job( + trans, tool_params, tool, json_file_path, data_list, folder=library_bunch.folder, job_params=job_params + ) + trans.app.job_manager.enqueue(job, tool=tool) + return output + + def _get_server_dir_uploaded_datasets(self, trans, payload, full_dir, import_dir_desc, library_bunch): + files = self._get_server_dir_files(payload, full_dir, import_dir_desc) + uploaded_datasets = [] + for file in files: + name = os.path.basename(file) + uploaded_datasets.append( + self._make_library_uploaded_dataset(trans, payload.dict(), name, file, "server_dir", library_bunch) + ) + return uploaded_datasets + + def _get_server_dir_files(self, payload, full_dir, import_dir_desc): + files = [] + try: + for entry in os.listdir(full_dir): + # Only import regular files + path = os.path.join(full_dir, entry) + link_data_only = payload.link_data_only or "copy_files" + if os.path.islink(full_dir) and link_data_only == "link_to_files": + # If we're linking instead of copying and the + # sub-"directory" in the import dir is actually a symlink, + # dereference the symlink, but not any of its contents. + link_path = os.readlink(full_dir) + if os.path.isabs(link_path): + path = os.path.join(link_path, entry) + else: + path = os.path.abspath(os.path.join(link_path, entry)) + elif os.path.islink(path) and os.path.isfile(path) and link_data_only == "link_to_files": + # If we're linking instead of copying and the "file" in the + # sub-directory of the import dir is actually a symlink, + # dereference the symlink (one dereference only, Vasili). + link_path = os.readlink(path) + if os.path.isabs(link_path): + path = link_path + else: + path = os.path.abspath(os.path.join(os.path.dirname(path), link_path)) + if os.path.isfile(path): + files.append(path) + except Exception as e: + raise exceptions.InternalServerError( + f"Unable to get file list for configured {import_dir_desc}, error: {util.unicodify(e)}" + ) + if not files: + raise exceptions.ObjectAttributeMissingException(f"The directory '{full_dir}' contains no valid files") + return files + + def _create_folder( + self, + trans, + parent_id: int, + payload, + ): + is_admin = trans.user_is_admin + current_user_roles = trans.get_current_user_roles() + parent_folder = trans.sa_session.get(LibraryFolder, parent_id) + # Check the library which actually contains the user-supplied parent folder, not the user-supplied + # library, which could be anything. + self._check_access(trans, is_admin, parent_folder, current_user_roles) + self._check_add(trans, is_admin, parent_folder, current_user_roles) + new_folder = LibraryFolder(name=payload.name or "", description=payload.description or "") + # We are associating the last used genome build with folders, so we will always + # initialize a new folder with the first dbkey in genome builds list which is currently + # ? unspecified (?) + new_folder.genome_build = trans.app.genome_builds.default_value + parent_folder.add_folder(new_folder) + trans.sa_session.add(new_folder) + with transaction(trans.sa_session): + trans.sa_session.commit() + # New folders default to having the same permissions as their parent folder + trans.app.security_agent.copy_library_permissions(trans, parent_folder, new_folder) + return dict(created=new_folder) + + def _check_access(self, trans, is_admin, item, current_user_roles): + if isinstance(item, trans.model.HistoryDatasetAssociation): + # Make sure the user has the DATASET_ACCESS permission on the history_dataset_association. + if not item: + message = f"Invalid history dataset ({escape(str(item))}) specified." + raise exceptions.ObjectNotFound(message) + elif ( + not trans.app.security_agent.can_access_dataset(current_user_roles, item.dataset) + and item.user == trans.user + ): + message = f"You do not have permission to access the history dataset with id ({str(item.id)})." + raise exceptions.ItemAccessibilityException(message) + else: + # Make sure the user has the LIBRARY_ACCESS permission on the library item. + if not item: + message = f"Invalid library item ({escape(str(item))}) specified." + raise exceptions.ObjectNotFound(message) + elif not ( + is_admin or trans.app.security_agent.can_access_library_item(current_user_roles, item, trans.user) + ): + if isinstance(item, trans.model.Library): + item_type = "data library" + elif isinstance(item, LibraryFolder): + item_type = "folder" + else: + item_type = "(unknown item type)" + message = f"You do not have permission to access the {escape(item_type)} with id ({str(item.id)})." + raise exceptions.ItemAccessibilityException(message) + + def _check_add(self, trans, is_admin, item, current_user_roles): + # Deny access if the user is not an admin and does not have the LIBRARY_ADD permission. + if not (is_admin or trans.app.security_agent.can_add_library_item(current_user_roles, item)): + message = f"You are not authorized to add an item to ({escape(item.name)})." + raise exceptions.ItemAccessibilityException(message) From 4621db9a32dc679537c20229981b150b71ff1f3f Mon Sep 17 00:00:00 2001 From: Arash Date: Tue, 17 Sep 2024 16:38:25 +0200 Subject: [PATCH 04/22] Refactor library content routes and controllers --- lib/galaxy/actions/library.py | 1 - lib/galaxy/schema/library_contents.py | 120 ++++++++++++++++++ .../webapps/galaxy/api/library_contents.py | 27 ++-- .../galaxy/services/library_contents.py | 58 ++++----- lib/galaxy_test/api/test_library_contents.py | 31 +---- 5 files changed, 162 insertions(+), 75 deletions(-) create mode 100644 lib/galaxy/schema/library_contents.py diff --git a/lib/galaxy/actions/library.py b/lib/galaxy/actions/library.py index e56c4dc180c5..35ada9f0c295 100644 --- a/lib/galaxy/actions/library.py +++ b/lib/galaxy/actions/library.py @@ -10,7 +10,6 @@ AdminRequiredException, ConfigDoesNotAllowException, RequestParameterInvalidException, - RequestParameterMissingException, ) from galaxy.model.base import transaction from galaxy.tools.actions import upload_common diff --git a/lib/galaxy/schema/library_contents.py b/lib/galaxy/schema/library_contents.py new file mode 100644 index 000000000000..3120e1b360e5 --- /dev/null +++ b/lib/galaxy/schema/library_contents.py @@ -0,0 +1,120 @@ +from enum import Enum +from typing import ( + List, + Optional, +) + +from pydantic import ( + Field, + RootModel, +) + +from galaxy.schema.fields import EncodedDatabaseIdField, DecodedDatabaseIdField, LibraryFolderDatabaseIdField +from galaxy.schema.schema import Model + + +class UploadOption(str, Enum): + upload_file = "upload_file" + upload_paths = "upload_paths" + upload_directory = "upload_directory" + + +class CreateType(str, Enum): + file = "file" + folder = "folder" + collection = "collection" + + +class LinkDataOnly(str, Enum): + copy_files = "copy_files" + link_to_files = "link_to_files" + + +class LibraryContentsCreatePayload(Model): + create_type: CreateType = Field( + ..., + title="the type of item to create", + ) + upload_option: Optional[UploadOption] = Field( + UploadOption.upload_file, + title="the method to use for uploading files", + ) + folder_id: LibraryFolderDatabaseIdField = Field( + ..., + title="the encoded id of the parent folder of the new item", + ) + tag_using_filenames: Optional[bool] = Field( + False, + title="create tags on datasets using the file's original name", + ) + tags: Optional[List[str]] = Field( + [], + title="create the given list of tags on datasets", + ) + from_hda_id: Optional[DecodedDatabaseIdField] = Field( + None, + title="(only if create_type is 'file') the encoded id of an accessible HDA to copy into the library", + ) + from_hdca_id: Optional[DecodedDatabaseIdField] = Field( + None, + title="(only if create_type is 'file') the encoded id of an accessible HDCA to copy into the library", + ) + ldda_message: Optional[str] = Field( + "", + title="the new message attribute of the LDDA created", + ) + extended_metadata: Optional[str] = Field( + None, + title="sub-dictionary containing any extended metadata to associate with the item", + ) + + +class LibraryContentsFileCreatePayload(LibraryContentsCreatePayload): + dbkey: Optional[str] = Field( + "?", + title="database key", + ) + roles: Optional[str] = Field( + "", + title="user selected roles", + ) + file_type: str = Field( + ..., + title="file type", + ) + server_dir: Optional[str] = Field( + "", + title="(only if upload_option is 'upload_directory') relative path of the " + "subdirectory of Galaxy ``library_import_dir`` (if admin) or " + "``user_library_import_dir`` (if non-admin) to upload. " + "All and only the files (i.e. no subdirectories) contained " + "in the specified directory will be uploaded.", + ) + filesystem_paths: Optional[str] = Field( + "", + title="(only if upload_option is 'upload_paths' and the user is an admin) " + "file paths on the Galaxy server to upload to the library, one file per line", + ) + link_data_only: Optional[LinkDataOnly] = Field( + LinkDataOnly.copy_files, + title="(only when upload_option is 'upload_directory' or 'upload_paths')." + "Setting to 'link_to_files' symlinks instead of copying the files", + ) + + +class LibraryContentsFolderCreatePayload(LibraryContentsCreatePayload): + name: Optional[str] = Field( + "", + title="(only if create_type is 'folder') name of the folder to create", + ) + description: Optional[str] = Field( + "", + title="(only if create_type is 'folder') description of the folder to create", + ) + + +class LibraryContentsDeletePayload(Model): + purge: Optional[bool] = Field( + False, + title="if True, purge the library dataset", + ) diff --git a/lib/galaxy/webapps/galaxy/api/library_contents.py b/lib/galaxy/webapps/galaxy/api/library_contents.py index 33f2324e155d..76868f146c77 100644 --- a/lib/galaxy/webapps/galaxy/api/library_contents.py +++ b/lib/galaxy/webapps/galaxy/api/library_contents.py @@ -3,11 +3,18 @@ """ import logging +from typing import Union from galaxy.managers.context import ( ProvidesHistoryContext, ProvidesUserContext, ) +from galaxy.schema.fields import DecodedDatabaseIdField, LibraryFolderDatabaseIdField +from galaxy.schema.library_contents import ( + LibraryContentsDeletePayload, + LibraryContentsFileCreatePayload, + LibraryContentsFolderCreatePayload, +) from galaxy.webapps.galaxy.api import ( depends, DependsOnTrans, @@ -30,7 +37,7 @@ class FastAPILibraryContents: ) def index( self, - library_id, + library_id: DecodedDatabaseIdField, trans: ProvidesUserContext = DependsOnTrans, ) -> list: return self.service.index(trans, library_id) @@ -41,8 +48,8 @@ def index( ) def show( self, - id, - library_id, + library_id: DecodedDatabaseIdField, + id: Union[LibraryFolderDatabaseIdField, DecodedDatabaseIdField], trans: ProvidesUserContext = DependsOnTrans, ): return self.service.show(trans, id) @@ -53,8 +60,8 @@ def show( ) def create( self, - library_id, - payload, + library_id: DecodedDatabaseIdField, + payload: Union[LibraryContentsFolderCreatePayload, LibraryContentsFileCreatePayload], trans: ProvidesHistoryContext = DependsOnTrans, ): return self.service.create(trans, library_id, payload) @@ -66,8 +73,8 @@ def create( ) def update( self, - id, - library_id, + library_id: DecodedDatabaseIdField, + id: DecodedDatabaseIdField, payload, trans: ProvidesUserContext = DependsOnTrans, ): @@ -79,9 +86,9 @@ def update( ) def delete( self, - library_id, - id, - payload, + library_id: DecodedDatabaseIdField, + id: DecodedDatabaseIdField, + payload: LibraryContentsDeletePayload, trans: ProvidesHistoryContext = DependsOnTrans, ): return self.service.delete(trans, id, payload) diff --git a/lib/galaxy/webapps/galaxy/services/library_contents.py b/lib/galaxy/webapps/galaxy/services/library_contents.py index 28b1a2123f4c..41f27261ef34 100644 --- a/lib/galaxy/webapps/galaxy/services/library_contents.py +++ b/lib/galaxy/webapps/galaxy/services/library_contents.py @@ -1,11 +1,13 @@ import json import logging import os -from typing import Optional +from typing import ( + Optional, + Union, +) from markupsafe import escape -import galaxy.schema.schema from galaxy import ( exceptions, util, @@ -34,6 +36,12 @@ tags, ) from galaxy.model.base import transaction +from galaxy.schema.fields import DecodedDatabaseIdField, LibraryFolderDatabaseIdField +from galaxy.schema.library_contents import ( + LibraryContentsDeletePayload, + LibraryContentsFileCreatePayload, + LibraryContentsFolderCreatePayload, +) from galaxy.security.idencoding import IdEncodingHelper from galaxy.tools.actions import upload_common from galaxy.tools.parameters import populate_state @@ -64,12 +72,12 @@ def get_object(self, trans, id, class_name, check_ownership=False, check_accessi def index( self, trans: ProvidesUserContext, - library_id, + library_id: DecodedDatabaseIdField, ): """Return a list of library files and folders.""" rval = [] current_user_roles = trans.get_current_user_roles() - library = trans.sa_session.get(Library, self.decode_id(library_id)) + library = trans.sa_session.get(Library, library_id) if not library: raise exceptions.RequestParameterInvalidException("No library found with the id provided.") if not (trans.user_is_admin or trans.app.security_agent.can_access_library(current_user_roles, library)): @@ -111,19 +119,18 @@ def index( def show( self, trans: ProvidesUserContext, - id, + id: Union[LibraryFolderDatabaseIdField, DecodedDatabaseIdField], ): """Returns information about library file or folder.""" - class_name, content_id = self._decode_library_content_id(id) - if class_name == "LibraryFolder": - content = self.get_library_folder(trans, content_id, check_ownership=False, check_accessible=True) + if isinstance(id, LibraryFolderDatabaseIdField): + content = self.get_library_folder(trans, id, check_ownership=False, check_accessible=True) rval = content.to_dict(view="element", value_mapper={"id": trans.security.encode_id}) rval["id"] = f"F{str(rval['id'])}" if rval["parent_id"] is not None: # This can happen for root folders. rval["parent_id"] = f"F{str(trans.security.encode_id(rval['parent_id']))}" rval["parent_library_id"] = trans.security.encode_id(rval["parent_library_id"]) else: - content = self.get_library_dataset(trans, content_id, check_ownership=False, check_accessible=True) + content = self.get_library_dataset(trans, id, check_ownership=False, check_accessible=True) rval = content.to_dict(view="element") rval["id"] = trans.security.encode_id(rval["id"]) rval["ldda_id"] = trans.security.encode_id(rval["ldda_id"]) @@ -137,8 +144,8 @@ def show( def create( self, trans: ProvidesHistoryContext, - library_id, - payload, + library_id: LibraryFolderDatabaseIdField, + payload: Union[LibraryContentsFolderCreatePayload, LibraryContentsFileCreatePayload], ): """Create a new library file or folder.""" if trans.user_is_bootstrap_admin: @@ -161,8 +168,6 @@ def create( if not payload.folder_id: raise exceptions.RequestParameterMissingException("Missing required 'folder_id' parameter.") folder_id = payload.folder_id - _, folder_id = self._decode_library_content_id(folder_id) - folder_id = trans.security.decode_id(folder_id) # security is checked in the downstream controller parent = self.get_library_folder(trans, folder_id, check_ownership=False, check_accessible=False) # The rest of the security happens in the library_common controller. @@ -179,13 +184,9 @@ def create( ) if create_type == "file": if from_hda_id: - return self._copy_hda_to_library_folder( - trans, self.hda_manager, self.decode_id(from_hda_id), folder_id, ldda_message - ) + return self._copy_hda_to_library_folder(trans, self.hda_manager, from_hda_id, folder_id, ldda_message) if from_hdca_id: - return self._copy_hdca_to_library_folder( - trans, self.hda_manager, self.decode_id(from_hdca_id), folder_id, ldda_message - ) + return self._copy_hdca_to_library_folder(trans, self.hda_manager, from_hdca_id, folder_id, ldda_message) # check for extended metadata, store it and pop it out of the param # otherwise sanitize_param will have a fit @@ -245,7 +246,7 @@ def create( def update( self, trans: ProvidesUserContext, - id, + id: DecodedDatabaseIdField, payload, ): """Create an ImplicitlyConvertedDatasetAssociation.""" @@ -266,8 +267,8 @@ def update( def delete( self, trans: ProvidesHistoryContext, - id, - payload, + id: DecodedDatabaseIdField, + payload: LibraryContentsDeletePayload, ): """Delete the LibraryDataset with the given ``id``.""" purge = payload.purge or False @@ -391,19 +392,6 @@ def _scan_json_block( # for cross type comparisions, ie "True" == True yield prefix, (f"{meta}").encode() - def _decode_library_content_id( - self, - content_id, - ): - if len(content_id) % 16 == 0: - return "LibraryDataset", content_id - elif content_id.startswith("F"): - return "LibraryFolder", content_id[1:] - else: - raise exceptions.RequestParameterInvalidException( - f"Malformed library content id ( {str(content_id)} ) specified, unable to decode." - ) - def _traverse( self, trans: ProvidesUserContext, diff --git a/lib/galaxy_test/api/test_library_contents.py b/lib/galaxy_test/api/test_library_contents.py index afdd31516801..b3dca586eb28 100644 --- a/lib/galaxy_test/api/test_library_contents.py +++ b/lib/galaxy_test/api/test_library_contents.py @@ -41,7 +41,7 @@ def test_create_invalid(self): folder_id = self.library["root_folder_id"] payload = {"folder_id": folder_id, "create_type": "invalid_type"} - response = self._post(f"/api/libraries/{library_id}/contents", data=payload) + response = self._post(f"/api/libraries/{library_id}/contents", data=payload, json=True) self._assert_status_code_is(response, 400) def test_index(self): @@ -158,33 +158,6 @@ def test_get_invalid_library_item(self): response = self._get(f"/api/libraries/{library_id}/contents/{invalid_item_id}") self._assert_status_code_is(response, 400) - def test_update_library_item_from_hda(self): - library_id = self.library["id"] - file_id = self._create_library_content(type="from_hda")["id"] - converted_dataset_id = self._create_library_content(type="from_hda")["id"] - - payload = {"converted_dataset_id": converted_dataset_id} - response = self._put(f"/api/libraries/{library_id}/contents/{file_id}", data=payload) - self._assert_status_code_is(response, 200) - - def test_update_library_item_from_hdca(self): - library_id = self.library["id"] - file_id = self._create_library_content(type="from_hdca")[0]["id"] - converted_dataset_id = self._create_library_content(type="from_hdca")[0]["id"] - - payload = {"converted_dataset_id": converted_dataset_id} - response = self._put(f"/api/libraries/{library_id}/contents/{file_id}", data=payload) - self._assert_status_code_is(response, 200) - - def test_update_invalid_library_item(self): - library_id = self.library["id"] - invalid_item_id = "invalid_id" - converted_dataset_id = invalid_item_id - - payload = {"converted_dataset_id": converted_dataset_id} - response = self._put(f"/api/libraries/{library_id}/contents/{invalid_item_id}", data=payload) - self._assert_status_code_is(response, 400) - def test_delete_library_item_from_hda(self): library_id = self.library["id"] file_id = self._create_library_content(type="from_hda")["id"] @@ -238,6 +211,6 @@ def _create_library_content(self, type) -> Any: "ldda_message": "Test", } - response = self._post(f"/api/libraries/{library_id}/contents", data=payload) + response = self._post(f"/api/libraries/{library_id}/contents", data=payload, json=True) self._assert_status_code_is(response, 200) return response.json() From 1450ab285c315cda7b947433f60d6eb3683ca334 Mon Sep 17 00:00:00 2001 From: Arash Date: Wed, 18 Sep 2024 17:50:44 +0200 Subject: [PATCH 05/22] Refactor library content routes and controllers --- lib/galaxy/schema/library_contents.py | 40 ++- .../webapps/galaxy/api/library_contents.py | 11 +- .../galaxy/services/library_contents.py | 235 +++++++++--------- lib/galaxy_test/api/test_library_contents.py | 18 +- 4 files changed, 181 insertions(+), 123 deletions(-) diff --git a/lib/galaxy/schema/library_contents.py b/lib/galaxy/schema/library_contents.py index 3120e1b360e5..0eabc8ccbf43 100644 --- a/lib/galaxy/schema/library_contents.py +++ b/lib/galaxy/schema/library_contents.py @@ -1,7 +1,10 @@ from enum import Enum from typing import ( + Any, + Dict, List, Optional, + Union, ) from pydantic import ( @@ -9,7 +12,11 @@ RootModel, ) -from galaxy.schema.fields import EncodedDatabaseIdField, DecodedDatabaseIdField, LibraryFolderDatabaseIdField +from galaxy.schema.fields import ( + DecodedDatabaseIdField, + EncodedDatabaseIdField, + LibraryFolderDatabaseIdField, +) from galaxy.schema.schema import Model @@ -63,14 +70,14 @@ class LibraryContentsCreatePayload(Model): "", title="the new message attribute of the LDDA created", ) - extended_metadata: Optional[str] = Field( + extended_metadata: Optional[Union[Dict[str, Any], List[Any], int, float, str, bool]] = Field( None, title="sub-dictionary containing any extended metadata to associate with the item", ) class LibraryContentsFileCreatePayload(LibraryContentsCreatePayload): - dbkey: Optional[str] = Field( + dbkey: Optional[Union[str, list]] = Field( "?", title="database key", ) @@ -100,6 +107,10 @@ class LibraryContentsFileCreatePayload(LibraryContentsCreatePayload): title="(only when upload_option is 'upload_directory' or 'upload_paths')." "Setting to 'link_to_files' symlinks instead of copying the files", ) + uuid: Optional[str] = Field( + None, + title="UUID of the dataset to upload", + ) class LibraryContentsFolderCreatePayload(LibraryContentsCreatePayload): @@ -113,8 +124,31 @@ class LibraryContentsFolderCreatePayload(LibraryContentsCreatePayload): ) +class LibraryContentsUpdatePayload(Model): + converted_dataset_id: Optional[DecodedDatabaseIdField] = Field( + None, + title="the decoded id of the dataset that was created from the file", + ) + + class LibraryContentsDeletePayload(Model): purge: Optional[bool] = Field( False, title="if True, purge the library dataset", ) + + +class LibraryContentsIndexResponse(Model): + pass + + +class LibraryContentsShowResponse(Model): + pass + + +class LibraryContentsCreateResponse(Model): + pass + + +class LibraryContentsDeleteResponse(Model): + pass diff --git a/lib/galaxy/webapps/galaxy/api/library_contents.py b/lib/galaxy/webapps/galaxy/api/library_contents.py index 76868f146c77..e6b74ac00245 100644 --- a/lib/galaxy/webapps/galaxy/api/library_contents.py +++ b/lib/galaxy/webapps/galaxy/api/library_contents.py @@ -9,7 +9,7 @@ ProvidesHistoryContext, ProvidesUserContext, ) -from galaxy.schema.fields import DecodedDatabaseIdField, LibraryFolderDatabaseIdField +from galaxy.schema.fields import DecodedDatabaseIdField from galaxy.schema.library_contents import ( LibraryContentsDeletePayload, LibraryContentsFileCreatePayload, @@ -20,7 +20,10 @@ DependsOnTrans, Router, ) -from galaxy.webapps.galaxy.services.library_contents import LibraryContentsService +from galaxy.webapps.galaxy.services.library_contents import ( + LibraryContentsService, + MaybeLibraryFolderOrDatasetID, +) log = logging.getLogger(__name__) @@ -49,7 +52,7 @@ def index( def show( self, library_id: DecodedDatabaseIdField, - id: Union[LibraryFolderDatabaseIdField, DecodedDatabaseIdField], + id: MaybeLibraryFolderOrDatasetID, trans: ProvidesUserContext = DependsOnTrans, ): return self.service.show(trans, id) @@ -88,7 +91,7 @@ def delete( self, library_id: DecodedDatabaseIdField, id: DecodedDatabaseIdField, - payload: LibraryContentsDeletePayload, + payload: LibraryContentsDeletePayload = LibraryContentsDeletePayload(), trans: ProvidesHistoryContext = DependsOnTrans, ): return self.service.delete(trans, id, payload) diff --git a/lib/galaxy/webapps/galaxy/services/library_contents.py b/lib/galaxy/webapps/galaxy/services/library_contents.py index 41f27261ef34..ff025e5435a7 100644 --- a/lib/galaxy/webapps/galaxy/services/library_contents.py +++ b/lib/galaxy/webapps/galaxy/services/library_contents.py @@ -2,10 +2,15 @@ import logging import os from typing import ( + Annotated, + Dict, + List, Optional, + Tuple, Union, ) +from fastapi import Path from markupsafe import escape from galaxy import ( @@ -17,7 +22,6 @@ validate_path_upload, validate_server_directory_upload, ) -from galaxy.managers import base as managers_base from galaxy.managers.collections_util import ( api_payload_to_create_params, dictify_dataset_collection_instance, @@ -28,31 +32,47 @@ ) from galaxy.managers.hdas import HDAManager from galaxy.model import ( - ExtendedMetadata, - ExtendedMetadataIndex, + Role, Library, LibraryDataset, LibraryFolder, tags, ) from galaxy.model.base import transaction -from galaxy.schema.fields import DecodedDatabaseIdField, LibraryFolderDatabaseIdField +from galaxy.schema.fields import ( + DecodedDatabaseIdField, + LibraryFolderDatabaseIdField, +) from galaxy.schema.library_contents import ( LibraryContentsDeletePayload, LibraryContentsFileCreatePayload, + LibraryContentsUpdatePayload, LibraryContentsFolderCreatePayload, + LibraryContentsIndexResponse, + LibraryContentsShowResponse, + LibraryContentsCreateResponse, + LibraryContentsDeleteResponse, ) from galaxy.security.idencoding import IdEncodingHelper from galaxy.tools.actions import upload_common from galaxy.tools.parameters import populate_state -from galaxy.util import bunch -from galaxy.webapps.base.controller import UsesLibraryMixinItems +from galaxy.webapps.base.controller import UsesLibraryMixinItems, UsesExtendedMetadataMixin from galaxy.webapps.galaxy.services.base import ServiceBase log = logging.getLogger(__name__) +MaybeLibraryFolderOrDatasetID = Annotated[ + str, + Path( + title="The encoded ID of a library folder or dataset.", + example="F0123456789ABCDEF", + min_length=16, + pattern="F?[0-9a-fA-F]+", + ), +] + -class LibraryContentsService(ServiceBase, LibraryActions, UsesLibraryMixinItems): +class LibraryContentsService(ServiceBase, LibraryActions, UsesLibraryMixinItems, UsesExtendedMetadataMixin): """ Interface/service shared by controllers for interacting with the contents of a library contents. """ @@ -61,19 +81,11 @@ def __init__(self, security: IdEncodingHelper, hda_manager: HDAManager): super().__init__(security) self.hda_manager = hda_manager - def get_object(self, trans, id, class_name, check_ownership=False, check_accessible=False, deleted=None): - """ - Convenience method to get a model object with the specified checks. - """ - return managers_base.get_object( - trans, id, class_name, check_ownership=check_ownership, check_accessible=check_accessible, deleted=deleted - ) - def index( self, trans: ProvidesUserContext, library_id: DecodedDatabaseIdField, - ): + ) -> LibraryContentsIndexResponse: """Return a list of library files and folders.""" rval = [] current_user_roles = trans.get_current_user_roles() @@ -119,18 +131,19 @@ def index( def show( self, trans: ProvidesUserContext, - id: Union[LibraryFolderDatabaseIdField, DecodedDatabaseIdField], - ): + id: MaybeLibraryFolderOrDatasetID, + ) -> LibraryContentsShowResponse: """Returns information about library file or folder.""" - if isinstance(id, LibraryFolderDatabaseIdField): - content = self.get_library_folder(trans, id, check_ownership=False, check_accessible=True) + class_name, content_id = self._decode_library_content_id(id) + if class_name == "LibraryFolder": + content = self.get_library_folder(trans, content_id, check_ownership=False, check_accessible=True) rval = content.to_dict(view="element", value_mapper={"id": trans.security.encode_id}) rval["id"] = f"F{str(rval['id'])}" if rval["parent_id"] is not None: # This can happen for root folders. rval["parent_id"] = f"F{str(trans.security.encode_id(rval['parent_id']))}" rval["parent_library_id"] = trans.security.encode_id(rval["parent_library_id"]) else: - content = self.get_library_dataset(trans, id, check_ownership=False, check_accessible=True) + content = self.get_library_dataset(trans, content_id, check_ownership=False, check_accessible=True) rval = content.to_dict(view="element") rval["id"] = trans.security.encode_id(rval["id"]) rval["ldda_id"] = trans.security.encode_id(rval["ldda_id"]) @@ -146,7 +159,7 @@ def create( trans: ProvidesHistoryContext, library_id: LibraryFolderDatabaseIdField, payload: Union[LibraryContentsFolderCreatePayload, LibraryContentsFileCreatePayload], - ): + ) -> LibraryContentsCreateResponse: """Create a new library file or folder.""" if trans.user_is_bootstrap_admin: raise exceptions.RealUserRequiredException("Only real users can create a new library file or folder.") @@ -172,15 +185,12 @@ def create( parent = self.get_library_folder(trans, folder_id, check_ownership=False, check_accessible=False) # The rest of the security happens in the library_common controller. - payload.tag_using_filenames = payload.tag_using_filenames or False - payload.tags = payload.tags or [] - # are we copying an HDA to the library folder? # we'll need the id and any message to attach, then branch to that private function from_hda_id, from_hdca_id, ldda_message = ( - payload.from_hda_id or None, - payload.from_hdca_id or None, - payload.ldda_message or "", + payload.from_hda_id, + payload.from_hdca_id, + payload.ldda_message, ) if create_type == "file": if from_hda_id: @@ -190,12 +200,12 @@ def create( # check for extended metadata, store it and pop it out of the param # otherwise sanitize_param will have a fit - ex_meta_payload = payload.extended_metadata or None + ex_meta_payload = payload.extended_metadata # Now create the desired content object, either file or folder. - if create_type == "file": + if create_type == "file" and isinstance(payload, LibraryContentsFileCreatePayload): output = self._upload_library_dataset(trans, folder_id, payload) - elif create_type == "folder": + elif create_type == "folder" and isinstance(payload, LibraryContentsFolderCreatePayload): output = self._create_folder(trans, folder_id, payload) elif create_type == "collection": # Not delegating to library_common, so need to check access to parent @@ -214,17 +224,7 @@ def create( for v in output.values(): if ex_meta_payload is not None: # If there is extended metadata, store it, attach it to the dataset, and index it - ex_meta = ExtendedMetadata(ex_meta_payload) - trans.sa_session.add(ex_meta) - v.extended_metadata = ex_meta - trans.sa_session.add(v) - with transaction(trans.sa_session): - trans.sa_session.commit() - for path, value in self._scan_json_block(ex_meta_payload): - meta_i = ExtendedMetadataIndex(ex_meta, path, value) - trans.sa_session.add(meta_i) - with transaction(trans.sa_session): - trans.sa_session.commit() + self.create_extended_metadata(trans, ex_meta_payload) if isinstance(v, trans.app.model.LibraryDatasetDatasetAssociation): v = v.library_dataset encoded_id = trans.security.encode_id(v.id) @@ -247,8 +247,8 @@ def update( self, trans: ProvidesUserContext, id: DecodedDatabaseIdField, - payload, - ): + payload: LibraryContentsUpdatePayload, + ) -> None: """Create an ImplicitlyConvertedDatasetAssociation.""" if payload.converted_dataset_id: converted_id = payload.converted_dataset_id @@ -269,9 +269,9 @@ def delete( trans: ProvidesHistoryContext, id: DecodedDatabaseIdField, payload: LibraryContentsDeletePayload, - ): + ) -> LibraryContentsDeleteResponse: """Delete the LibraryDataset with the given ``id``.""" - purge = payload.purge or False + purge = payload.purge rval = {"id": id} try: @@ -315,15 +315,28 @@ def delete( raise exceptions.InternalServerError(util.unicodify(exc)) return rval + def _decode_library_content_id( + self, + content_id: MaybeLibraryFolderOrDatasetID, + ) -> Tuple: + if len(content_id) % 16 == 0: + return "LibraryDataset", content_id + elif content_id.startswith("F"): + return "LibraryFolder", content_id[1:] + else: + raise exceptions.MalformedId( + f"Malformed library content id ( {str(content_id)} ) specified, unable to decode." + ) + def _upload_library_dataset( self, trans: ProvidesHistoryContext, - folder_id: int, - payload, - ): + folder_id: LibraryFolderDatabaseIdField, + payload: LibraryContentsFileCreatePayload, + ) -> Dict: replace_dataset: Optional[LibraryDataset] = None - upload_option = payload.upload_option or "upload_file" - dbkey = payload.dbkey or "?" + upload_option = payload.upload_option + dbkey = payload.dbkey if isinstance(dbkey, list): last_used_build = dbkey[0] else: @@ -341,7 +354,7 @@ def _upload_library_dataset( error = False if upload_option == "upload_paths": validate_path_upload(trans) # Duplicate check made in _upload_dataset. - elif roles := payload.roles or "": + elif roles := payload.roles: # Check to see if the user selected roles to associate with the DATASET_ACCESS permission # on the dataset that would cause accessibility issues. vars = dict(DATASET_ACCESS_in=roles) @@ -354,50 +367,16 @@ def _upload_library_dataset( created_outputs_dict = self._upload_dataset( trans, payload=payload, folder_id=folder.id, replace_dataset=replace_dataset ) - if created_outputs_dict: - if isinstance(created_outputs_dict, str): - raise exceptions.RequestParameterInvalidException(created_outputs_dict) - elif isinstance(created_outputs_dict, tuple): - return created_outputs_dict[0], created_outputs_dict[1] - return created_outputs_dict - else: + if not created_outputs_dict: raise exceptions.RequestParameterInvalidException("Upload failed") - - def _scan_json_block( - self, - meta, - prefix="", - ): - """ - Scan a json style data structure, and emit all fields and their values. - Example paths - - Data - { "data" : [ 1, 2, 3 ] } - - Path: - /data == [1,2,3] - - /data/[0] == 1 - - """ - if isinstance(meta, dict): - for a in meta: - yield from self._scan_json_block(meta[a], f"{prefix}/{a}") - elif isinstance(meta, list): - for i, a in enumerate(meta): - yield from self._scan_json_block(a, prefix + "[%d]" % (i)) - else: - # BUG: Everything is cast to string, which can lead to false positives - # for cross type comparisions, ie "True" == True - yield prefix, (f"{meta}").encode() + return created_outputs_dict def _traverse( self, trans: ProvidesUserContext, - folder, - current_user_roles, - ): + folder: LibraryFolder, + current_user_roles: List[Role], + ) -> List: admin = trans.user_is_admin rval = [] for subfolder in folder.active_folders: @@ -423,11 +402,11 @@ def _traverse( def _upload_dataset( self, - trans, - payload, - folder_id: int, - replace_dataset: Optional[LibraryDataset] = None, - ): + trans: ProvidesHistoryContext, + payload: LibraryContentsFileCreatePayload, + folder_id: LibraryFolderDatabaseIdField, + replace_dataset: Optional[LibraryDataset], + ) -> Dict[str, List]: # Set up the traditional tool state/params cntrller = "api" tool_id = "upload1" @@ -435,15 +414,15 @@ def _upload_dataset( upload_common.validate_datatype_extension(datatypes_registry=trans.app.datatypes_registry, ext=file_type) tool = trans.app.toolbox.get_tool(tool_id) state = tool.new_state(trans) - populate_state(trans, tool.inputs, payload.dict(), state.inputs) + populate_state(trans, tool.inputs, payload.model_dump(), state.inputs) tool_params = state.inputs dataset_upload_inputs = [] for input in tool.inputs.values(): if input.type == "upload_dataset": dataset_upload_inputs.append(input) # Library-specific params - server_dir = payload.server_dir or "" - upload_option = payload.upload_option or "upload_file" + server_dir = payload.server_dir + upload_option = payload.upload_option if upload_option == "upload_directory": full_dir, import_dir_desc = validate_server_directory_upload(trans, server_dir) elif upload_option == "upload_paths": @@ -453,7 +432,7 @@ def _upload_dataset( try: # FIXME: instead of passing params here ( which have been processed by util.Params(), the original payload # should be passed so that complex objects that may have been included in the initial request remain. - library_bunch = upload_common.handle_library_params(trans, payload.dict(), folder_id, replace_dataset) + library_bunch = upload_common.handle_library_params(trans, payload.model_dump(), folder_id, replace_dataset) except Exception: raise exceptions.InvalidFileFormatError("Invalid folder specified") # Proceed with (mostly) regular upload processing if we're still errorless @@ -468,38 +447,52 @@ def _upload_dataset( ) elif upload_option == "upload_paths": uploaded_datasets, _, _ = self._get_path_paste_uploaded_datasets( - trans, payload.dict(), library_bunch, 200, None + trans, payload.model_dump(), library_bunch, 200, None ) if upload_option == "upload_file" and not uploaded_datasets: raise exceptions.RequestParameterInvalidException("Select a file, enter a URL or enter text") json_file_path = upload_common.create_paramfile(trans, uploaded_datasets) data_list = [ud.data for ud in uploaded_datasets] job_params = {} - job_params["link_data_only"] = json.dumps(payload.link_data_only or "copy_files") - job_params["uuid"] = json.dumps(payload.uuid or None) + job_params["link_data_only"] = json.dumps(payload.link_data_only) + job_params["uuid"] = json.dumps(payload.uuid) job, output = upload_common.create_job( trans, tool_params, tool, json_file_path, data_list, folder=library_bunch.folder, job_params=job_params ) trans.app.job_manager.enqueue(job, tool=tool) return output - def _get_server_dir_uploaded_datasets(self, trans, payload, full_dir, import_dir_desc, library_bunch): + def _get_server_dir_uploaded_datasets( + self, + trans: ProvidesHistoryContext, + payload: LibraryContentsFileCreatePayload, + full_dir: str, + import_dir_desc: str, + library_bunch: upload_common.LibraryParams, + ) -> List: files = self._get_server_dir_files(payload, full_dir, import_dir_desc) uploaded_datasets = [] for file in files: name = os.path.basename(file) uploaded_datasets.append( - self._make_library_uploaded_dataset(trans, payload.dict(), name, file, "server_dir", library_bunch) + self._make_library_uploaded_dataset( + trans, payload.model_dump(), name, file, "server_dir", library_bunch + ) ) return uploaded_datasets - def _get_server_dir_files(self, payload, full_dir, import_dir_desc): + def _get_server_dir_files( + self, + payload: LibraryContentsFileCreatePayload, + full_dir: str, + import_dir_desc: str, + ) -> List: files = [] try: for entry in os.listdir(full_dir): # Only import regular files path = os.path.join(full_dir, entry) - link_data_only = payload.link_data_only or "copy_files" + link_data_only = payload.link_data_only if os.path.islink(full_dir) and link_data_only == "link_to_files": # If we're linking instead of copying and the # sub-"directory" in the import dir is actually a symlink, @@ -530,18 +523,20 @@ def _get_server_dir_files(self, payload, full_dir, import_dir_desc): def _create_folder( self, - trans, - parent_id: int, - payload, - ): + trans: ProvidesUserContext, + parent_id: LibraryFolderDatabaseIdField, + payload: LibraryContentsFolderCreatePayload, + ) -> Dict[str, LibraryFolder]: is_admin = trans.user_is_admin current_user_roles = trans.get_current_user_roles() parent_folder = trans.sa_session.get(LibraryFolder, parent_id) + if not parent_folder: + raise exceptions.RequestParameterInvalidException("Invalid folder id specified.") # Check the library which actually contains the user-supplied parent folder, not the user-supplied # library, which could be anything. self._check_access(trans, is_admin, parent_folder, current_user_roles) self._check_add(trans, is_admin, parent_folder, current_user_roles) - new_folder = LibraryFolder(name=payload.name or "", description=payload.description or "") + new_folder = LibraryFolder(name=payload.name, description=payload.description) # We are associating the last used genome build with folders, so we will always # initialize a new folder with the first dbkey in genome builds list which is currently # ? unspecified (?) @@ -554,7 +549,13 @@ def _create_folder( trans.app.security_agent.copy_library_permissions(trans, parent_folder, new_folder) return dict(created=new_folder) - def _check_access(self, trans, is_admin, item, current_user_roles): + def _check_access( + self, + trans: ProvidesUserContext, + is_admin: bool, + item: LibraryFolder, + current_user_roles: List[Role], + ) -> None: if isinstance(item, trans.model.HistoryDatasetAssociation): # Make sure the user has the DATASET_ACCESS permission on the history_dataset_association. if not item: @@ -583,7 +584,13 @@ def _check_access(self, trans, is_admin, item, current_user_roles): message = f"You do not have permission to access the {escape(item_type)} with id ({str(item.id)})." raise exceptions.ItemAccessibilityException(message) - def _check_add(self, trans, is_admin, item, current_user_roles): + def _check_add( + self, + trans: ProvidesUserContext, + is_admin: bool, + item: LibraryFolder, + current_user_roles : List[Role], + ) -> None: # Deny access if the user is not an admin and does not have the LIBRARY_ADD permission. if not (is_admin or trans.app.security_agent.can_add_library_item(current_user_roles, item)): message = f"You are not authorized to add an item to ({escape(item.name)})." diff --git a/lib/galaxy_test/api/test_library_contents.py b/lib/galaxy_test/api/test_library_contents.py index b3dca586eb28..862f5cc4c89e 100644 --- a/lib/galaxy_test/api/test_library_contents.py +++ b/lib/galaxy_test/api/test_library_contents.py @@ -172,11 +172,25 @@ def test_delete_library_item_from_hdca(self): response = self._delete(f"/api/libraries/{library_id}/contents/{file_id}") self._assert_status_code_is(response, 200) + def test_delete_library_item_from_hda_purged(self): + library_id = self.library["id"] + file_id = self._create_library_content(type="from_hda")["id"] + payload = {"purged": True} + response = self._delete(f"/api/libraries/{library_id}/contents/{file_id}", data=payload, json=True) + self._assert_status_code_is(response, 200) + + def test_delete_library_item_from_hdca_purged(self): + library_id = self.library["id"] + file_id = self._create_library_content(type="from_hdca")[0]["id"] + payload = {"purged": True} + response = self._delete(f"/api/libraries/{library_id}/contents/{file_id}", data=payload, json=True) + self._assert_status_code_is(response, 200) + def test_delete_invalid_library_item(self): library_id = self.library["id"] invalid_item_id = "invalid_id" - response = self._delete(f"/api/libraries/{library_id}/contents/{invalid_item_id}") - self._assert_status_code_is(response, 500) + response_invalid = self._delete(f"/api/libraries/{library_id}/contents/{invalid_item_id}") + self._assert_status_code_is(response_invalid, 400) def _create_library_content(self, type) -> Any: folder_id = self.library["root_folder_id"] From c5023cd11e54bda4198607dea79eabfa5d240ec0 Mon Sep 17 00:00:00 2001 From: Arash Date: Thu, 19 Sep 2024 18:37:23 +0200 Subject: [PATCH 06/22] Refactor library content routes and controllers --- lib/galaxy/schema/library_contents.py | 134 +++++++- .../webapps/galaxy/api/library_contents.py | 22 +- .../galaxy/services/library_contents.py | 296 ++++++++---------- lib/galaxy_test/api/test_library_contents.py | 8 +- 4 files changed, 275 insertions(+), 185 deletions(-) diff --git a/lib/galaxy/schema/library_contents.py b/lib/galaxy/schema/library_contents.py index 0eabc8ccbf43..d02c0350989b 100644 --- a/lib/galaxy/schema/library_contents.py +++ b/lib/galaxy/schema/library_contents.py @@ -15,9 +15,13 @@ from galaxy.schema.fields import ( DecodedDatabaseIdField, EncodedDatabaseIdField, + EncodedLibraryFolderDatabaseIdField, LibraryFolderDatabaseIdField, ) -from galaxy.schema.schema import Model +from galaxy.schema.schema import ( + Model, + TagCollection, +) class UploadOption(str, Enum): @@ -37,6 +41,11 @@ class LinkDataOnly(str, Enum): link_to_files = "link_to_files" +class ModelClass(str, Enum): + LibraryDataset = "LibraryDataset" + LibraryFolder = "LibraryFolder" + + class LibraryContentsCreatePayload(Model): create_type: CreateType = Field( ..., @@ -85,8 +94,8 @@ class LibraryContentsFileCreatePayload(LibraryContentsCreatePayload): "", title="user selected roles", ) - file_type: str = Field( - ..., + file_type: Optional[str] = Field( + None, title="file type", ) server_dir: Optional[str] = Field( @@ -124,6 +133,29 @@ class LibraryContentsFolderCreatePayload(LibraryContentsCreatePayload): ) +class LibraryContentsCollectionCreatePayload(LibraryContentsCreatePayload): + collection_type: str = Field( + ..., + title="the type of collection to create", + ) + element_identifiers: List[Dict[str, Any]] = Field( + ..., + title="list of dictionaries containing the element identifiers for the collection", + ) + name: Optional[str] = Field( + None, + title="the name of the collection", + ) + hide_source_items: Optional[bool] = Field( + False, + title="if True, hide the source items in the collection", + ) + copy_elements: Optional[bool] = Field( + False, + title="if True, copy the elements into the collection", + ) + + class LibraryContentsUpdatePayload(Model): converted_dataset_id: Optional[DecodedDatabaseIdField] = Field( None, @@ -139,15 +171,103 @@ class LibraryContentsDeletePayload(Model): class LibraryContentsIndexResponse(Model): - pass + type: str + name: str + url: str + + +class LibraryContentsIndexFolderResponse(LibraryContentsIndexResponse): + id: EncodedLibraryFolderDatabaseIdField + + +class LibraryContentsIndexDatasetResponse(LibraryContentsIndexResponse): + id: EncodedDatabaseIdField + + +class LibraryContentsIndexListResponse(RootModel): + root: List[Union[LibraryContentsIndexFolderResponse, LibraryContentsIndexDatasetResponse]] class LibraryContentsShowResponse(Model): - pass + model_class: ModelClass + name: str + genome_build: Optional[str] + update_time: str + parent_library_id: EncodedDatabaseIdField -class LibraryContentsCreateResponse(Model): - pass +class LibraryContentsShowFolderResponse(LibraryContentsShowResponse): + id: EncodedLibraryFolderDatabaseIdField + parent_id: Optional[EncodedLibraryFolderDatabaseIdField] + description: str + item_count: int + deleted: bool + library_path: List[str] + + +class LibraryContentsShowDatasetResponse(LibraryContentsShowResponse): + id: EncodedDatabaseIdField + ldda_id: EncodedDatabaseIdField + folder_id: EncodedLibraryFolderDatabaseIdField + state: str + file_name: str + created_from_basename: str + uploaded_by: str + message: Optional[str] + date_uploaded: str + file_size: int + file_ext: str + data_type: str + misc_info: str + misc_blurb: str + peek: Optional[str] + uuid: str + metadata_dbkey: str + metadata_data_lines: int + tags: TagCollection + + +class LibraryContentsCreateFolderResponse(Model): + id: EncodedLibraryFolderDatabaseIdField + name: str + url: str + + +class LibraryContentsCreateFolderListResponse(RootModel): + root: List[LibraryContentsCreateFolderResponse] + + +class LibraryContentsCreateDatasetResponse(Model): + id: EncodedDatabaseIdField + hda_ldda: str + model_class: str + name: str + deleted: bool + visible: bool + state: str + library_dataset_id: EncodedDatabaseIdField + file_size: int + file_name: str + update_time: str + file_ext: str + data_type: str + genome_build: str + misc_info: str + misc_blurb: str + created_from_basename: str + uuid: str + parent_library_id: EncodedDatabaseIdField + metadata_dbkey: str + metadata_data_lines: int + metadata_comment_lines: Union[str, int] + metadata_columns: int + metadata_column_types: List[str] + metadata_column_names: List[str] + metadata_delimiter: str + + +class LibraryContentsCreateDatasetListResponse(RootModel): + root: List[LibraryContentsCreateDatasetResponse] class LibraryContentsDeleteResponse(Model): diff --git a/lib/galaxy/webapps/galaxy/api/library_contents.py b/lib/galaxy/webapps/galaxy/api/library_contents.py index e6b74ac00245..24d122e70775 100644 --- a/lib/galaxy/webapps/galaxy/api/library_contents.py +++ b/lib/galaxy/webapps/galaxy/api/library_contents.py @@ -11,9 +11,17 @@ ) from galaxy.schema.fields import DecodedDatabaseIdField from galaxy.schema.library_contents import ( + LibraryContentsCollectionCreatePayload, + LibraryContentsCreateDatasetListResponse, + LibraryContentsCreateDatasetResponse, + LibraryContentsCreateFolderListResponse, LibraryContentsDeletePayload, + LibraryContentsDeleteResponse, LibraryContentsFileCreatePayload, LibraryContentsFolderCreatePayload, + LibraryContentsIndexListResponse, + LibraryContentsShowDatasetResponse, + LibraryContentsShowFolderResponse, ) from galaxy.webapps.galaxy.api import ( depends, @@ -42,7 +50,7 @@ def index( self, library_id: DecodedDatabaseIdField, trans: ProvidesUserContext = DependsOnTrans, - ) -> list: + ) -> LibraryContentsIndexListResponse: return self.service.index(trans, library_id) @router.get( @@ -54,7 +62,7 @@ def show( library_id: DecodedDatabaseIdField, id: MaybeLibraryFolderOrDatasetID, trans: ProvidesUserContext = DependsOnTrans, - ): + ) -> Union[LibraryContentsShowFolderResponse, LibraryContentsShowDatasetResponse]: return self.service.show(trans, id) @router.post( @@ -64,9 +72,13 @@ def show( def create( self, library_id: DecodedDatabaseIdField, - payload: Union[LibraryContentsFolderCreatePayload, LibraryContentsFileCreatePayload], + payload: Union[LibraryContentsFolderCreatePayload, LibraryContentsFileCreatePayload, LibraryContentsCollectionCreatePayload], trans: ProvidesHistoryContext = DependsOnTrans, - ): + ) -> Union[ + LibraryContentsCreateFolderListResponse, + LibraryContentsCreateDatasetResponse, + LibraryContentsCreateDatasetListResponse, + ]: return self.service.create(trans, library_id, payload) @router.put( @@ -80,7 +92,7 @@ def update( id: DecodedDatabaseIdField, payload, trans: ProvidesUserContext = DependsOnTrans, - ): + ) -> None: return self.service.update(trans, id, payload) @router.delete( diff --git a/lib/galaxy/webapps/galaxy/services/library_contents.py b/lib/galaxy/webapps/galaxy/services/library_contents.py index ff025e5435a7..0f9b1f893c4d 100644 --- a/lib/galaxy/webapps/galaxy/services/library_contents.py +++ b/lib/galaxy/webapps/galaxy/services/library_contents.py @@ -3,10 +3,12 @@ import os from typing import ( Annotated, + cast, Dict, List, Optional, Tuple, + Type, Union, ) @@ -22,20 +24,17 @@ validate_path_upload, validate_server_directory_upload, ) -from galaxy.managers.collections_util import ( - api_payload_to_create_params, - dictify_dataset_collection_instance, -) +from galaxy.managers.collections_util import dictify_dataset_collection_instance from galaxy.managers.context import ( ProvidesHistoryContext, ProvidesUserContext, ) from galaxy.managers.hdas import HDAManager from galaxy.model import ( - Role, Library, LibraryDataset, LibraryFolder, + Role, tags, ) from galaxy.model.base import transaction @@ -44,19 +43,28 @@ LibraryFolderDatabaseIdField, ) from galaxy.schema.library_contents import ( + LibraryContentsCollectionCreatePayload, + LibraryContentsCreateDatasetListResponse, + LibraryContentsCreateDatasetResponse, + LibraryContentsCreateFolderListResponse, LibraryContentsDeletePayload, + LibraryContentsDeleteResponse, LibraryContentsFileCreatePayload, - LibraryContentsUpdatePayload, LibraryContentsFolderCreatePayload, - LibraryContentsIndexResponse, - LibraryContentsShowResponse, - LibraryContentsCreateResponse, - LibraryContentsDeleteResponse, + LibraryContentsIndexDatasetResponse, + LibraryContentsIndexFolderResponse, + LibraryContentsIndexListResponse, + LibraryContentsShowDatasetResponse, + LibraryContentsShowFolderResponse, + LibraryContentsUpdatePayload, ) from galaxy.security.idencoding import IdEncodingHelper from galaxy.tools.actions import upload_common from galaxy.tools.parameters import populate_state -from galaxy.webapps.base.controller import UsesLibraryMixinItems, UsesExtendedMetadataMixin +from galaxy.webapps.base.controller import ( + UsesExtendedMetadataMixin, + UsesLibraryMixinItems, +) from galaxy.webapps.galaxy.services.base import ServiceBase log = logging.getLogger(__name__) @@ -85,163 +93,121 @@ def index( self, trans: ProvidesUserContext, library_id: DecodedDatabaseIdField, - ) -> LibraryContentsIndexResponse: + ) -> LibraryContentsIndexListResponse: """Return a list of library files and folders.""" - rval = [] + rval: List[Union[LibraryContentsIndexFolderResponse, LibraryContentsIndexDatasetResponse]] = [] current_user_roles = trans.get_current_user_roles() library = trans.sa_session.get(Library, library_id) if not library: raise exceptions.RequestParameterInvalidException("No library found with the id provided.") if not (trans.user_is_admin or trans.app.security_agent.can_access_library(current_user_roles, library)): raise exceptions.RequestParameterInvalidException("No library found with the id provided.") - encoded_id = f"F{trans.security.encode_id(library.root_folder.id)}" # appending root folder - rval.append( - dict( - id=encoded_id, - type="folder", - name="/", - url=( - trans.url_builder("library_content", library_id=library_id, id=encoded_id) - if trans.url_builder - else None - ), - ) - ) + url = self._url_for(trans, library_id, library.root_folder.id, "folder") + rval.append(LibraryContentsIndexFolderResponse(id=library.root_folder.id, type="folder", name="/", url=url)) library.root_folder.api_path = "" # appending all other items in the library recursively for content in self._traverse(trans, library.root_folder, current_user_roles): - encoded_id = trans.security.encode_id(content.id) - if content.api_type == "folder": - encoded_id = f"F{encoded_id}" - rval.append( - dict( - id=encoded_id, - type=content.api_type, - name=content.api_path, - url=( - trans.url_builder("library_content", library_id=library_id, id=encoded_id) - if trans.url_builder - else None - ), - ) + url = self._url_for(trans, library_id, content.id, content.api_type) + response_class: Union[ + Type[LibraryContentsIndexFolderResponse], Type[LibraryContentsIndexDatasetResponse] + ] = ( + LibraryContentsIndexFolderResponse + if content.api_type == "folder" + else LibraryContentsIndexDatasetResponse ) - return rval + rval.append(response_class(id=content.id, type=content.api_type, name=content.api_path, url=url)) + return LibraryContentsIndexListResponse(root=rval) def show( self, trans: ProvidesUserContext, id: MaybeLibraryFolderOrDatasetID, - ) -> LibraryContentsShowResponse: + ) -> Union[LibraryContentsShowFolderResponse, LibraryContentsShowDatasetResponse]: """Returns information about library file or folder.""" class_name, content_id = self._decode_library_content_id(id) + rval: Union[LibraryContentsShowFolderResponse, LibraryContentsShowDatasetResponse] if class_name == "LibraryFolder": content = self.get_library_folder(trans, content_id, check_ownership=False, check_accessible=True) - rval = content.to_dict(view="element", value_mapper={"id": trans.security.encode_id}) - rval["id"] = f"F{str(rval['id'])}" - if rval["parent_id"] is not None: # This can happen for root folders. - rval["parent_id"] = f"F{str(trans.security.encode_id(rval['parent_id']))}" - rval["parent_library_id"] = trans.security.encode_id(rval["parent_library_id"]) + rval = LibraryContentsShowFolderResponse(**content.to_dict(view="element")) else: content = self.get_library_dataset(trans, content_id, check_ownership=False, check_accessible=True) - rval = content.to_dict(view="element") - rval["id"] = trans.security.encode_id(rval["id"]) - rval["ldda_id"] = trans.security.encode_id(rval["ldda_id"]) - rval["folder_id"] = f"F{str(trans.security.encode_id(rval['folder_id']))}" - rval["parent_library_id"] = trans.security.encode_id(rval["parent_library_id"]) - + rval_dict = content.to_dict(view="element") tag_manager = tags.GalaxyTagHandler(trans.sa_session) - rval["tags"] = tag_manager.get_tags_list(content.library_dataset_dataset_association.tags) + rval_dict["tags"] = tag_manager.get_tags_list(content.library_dataset_dataset_association.tags) + rval = LibraryContentsShowDatasetResponse(**rval_dict) return rval def create( self, trans: ProvidesHistoryContext, library_id: LibraryFolderDatabaseIdField, - payload: Union[LibraryContentsFolderCreatePayload, LibraryContentsFileCreatePayload], - ) -> LibraryContentsCreateResponse: + payload: Union[ + LibraryContentsFolderCreatePayload, LibraryContentsFileCreatePayload, LibraryContentsCollectionCreatePayload + ], + ) -> Union[ + LibraryContentsCreateFolderListResponse, + LibraryContentsCreateDatasetResponse, + LibraryContentsCreateDatasetListResponse, + ]: """Create a new library file or folder.""" if trans.user_is_bootstrap_admin: raise exceptions.RealUserRequiredException("Only real users can create a new library file or folder.") - if not payload.create_type: - raise exceptions.RequestParameterMissingException("Missing required 'create_type' parameter.") - create_type = payload.create_type - if create_type not in ("file", "folder", "collection"): - raise exceptions.RequestParameterInvalidException( - f"Invalid value for 'create_type' parameter ( {create_type} ) specified." - ) - if payload.upload_option and payload.upload_option not in ( - "upload_file", - "upload_directory", - "upload_paths", - ): - raise exceptions.RequestParameterInvalidException( - f"Invalid value for 'upload_option' parameter ( {payload.upload_option} ) specified." - ) - if not payload.folder_id: - raise exceptions.RequestParameterMissingException("Missing required 'folder_id' parameter.") - folder_id = payload.folder_id # security is checked in the downstream controller - parent = self.get_library_folder(trans, folder_id, check_ownership=False, check_accessible=False) + parent = self.get_library_folder(trans, payload.folder_id, check_ownership=False, check_accessible=False) # The rest of the security happens in the library_common controller. # are we copying an HDA to the library folder? # we'll need the id and any message to attach, then branch to that private function - from_hda_id, from_hdca_id, ldda_message = ( - payload.from_hda_id, - payload.from_hdca_id, - payload.ldda_message, - ) - if create_type == "file": - if from_hda_id: - return self._copy_hda_to_library_folder(trans, self.hda_manager, from_hda_id, folder_id, ldda_message) - if from_hdca_id: - return self._copy_hdca_to_library_folder(trans, self.hda_manager, from_hdca_id, folder_id, ldda_message) - - # check for extended metadata, store it and pop it out of the param - # otherwise sanitize_param will have a fit - ex_meta_payload = payload.extended_metadata + if payload.create_type == "file": + if payload.from_hda_id: + return self._copy_hda_to_library_folder( + trans, self.hda_manager, payload.from_hda_id, payload.folder_id, payload.ldda_message + ) + elif payload.from_hdca_id: + return self._copy_hdca_to_library_folder( + trans, self.hda_manager, payload.from_hdca_id, payload.folder_id, payload.ldda_message + ) + else: + raise exceptions.RequestParameterInvalidException("Invalid create request") # Now create the desired content object, either file or folder. - if create_type == "file" and isinstance(payload, LibraryContentsFileCreatePayload): - output = self._upload_library_dataset(trans, folder_id, payload) - elif create_type == "folder" and isinstance(payload, LibraryContentsFolderCreatePayload): - output = self._create_folder(trans, folder_id, payload) - elif create_type == "collection": + if payload.create_type == "file": + output = self._upload_library_dataset( + trans, payload.folder_id, cast(LibraryContentsFileCreatePayload, payload) + ) + elif payload.create_type == "folder": + output = self._create_folder(trans, payload.folder_id, cast(LibraryContentsFolderCreatePayload, payload)) + elif payload.create_type == "collection": # Not delegating to library_common, so need to check access to parent # folder here. + payload = cast(LibraryContentsCollectionCreatePayload, payload) self.check_user_can_add_to_library_item(trans, parent, check_accessible=True) - create_params = api_payload_to_create_params(payload) - create_params["parent"] = parent - dataset_collection_manager = trans.app.dataset_collection_manager - dataset_collection_instance = dataset_collection_manager.create(**create_params) - return [ - dictify_dataset_collection_instance( - dataset_collection_instance, security=trans.security, url_builder=trans.url_builder, parent=parent - ) - ] + create_params = dict( + collection_type=payload.collection_type, + element_identifiers=payload.element_identifiers, + name=payload.name or None, + hide_source_items=payload.hide_source_items or False, + copy_elements=payload.copy_elements or False, + parent=parent, + ) + dataset_collection_instance = trans.app.dataset_collection_manager.create(**create_params) + dataset_collection = dictify_dataset_collection_instance( + dataset_collection_instance, security=trans.security, url_builder=trans.url_builder, parent=parent + ) + return [dataset_collection] + # return LibraryContentsCreateListResponse(root=dataset_collection) rval = [] for v in output.values(): - if ex_meta_payload is not None: + if payload.extended_metadata is not None: # If there is extended metadata, store it, attach it to the dataset, and index it - self.create_extended_metadata(trans, ex_meta_payload) + self.create_extended_metadata(trans, payload.extended_metadata) if isinstance(v, trans.app.model.LibraryDatasetDatasetAssociation): v = v.library_dataset - encoded_id = trans.security.encode_id(v.id) - if create_type == "folder": - encoded_id = f"F{encoded_id}" - rval.append( - dict( - id=encoded_id, - name=v.name, - url=( - trans.url_builder("library_content", library_id=library_id, id=encoded_id) - if trans.url_builder - else None - ), - ) - ) + url = self._url_for(trans, library_id, v.id, payload.create_type) + rval.append(dict(id=v.id, name=v.name, url=url)) return rval + # return LibraryContentsCreateListResponse(root=rval) def update( self, @@ -251,9 +217,10 @@ def update( ) -> None: """Create an ImplicitlyConvertedDatasetAssociation.""" if payload.converted_dataset_id: - converted_id = payload.converted_dataset_id content = self.get_library_dataset(trans, id, check_ownership=False, check_accessible=False) - content_conv = self.get_library_dataset(trans, converted_id, check_ownership=False, check_accessible=False) + content_conv = self.get_library_dataset( + trans, payload.converted_dataset_id, check_ownership=False, check_accessible=False + ) assoc = trans.app.model.ImplicitlyConvertedDatasetAssociation( parent=content.library_dataset_dataset_association, dataset=content_conv.library_dataset_dataset_association, @@ -269,10 +236,8 @@ def delete( trans: ProvidesHistoryContext, id: DecodedDatabaseIdField, payload: LibraryContentsDeletePayload, - ) -> LibraryContentsDeleteResponse: + ): """Delete the LibraryDataset with the given ``id``.""" - purge = payload.purge - rval = {"id": id} try: ld = self.get_library_dataset(trans, id, check_ownership=False, check_accessible=True) @@ -285,7 +250,7 @@ def delete( ) ld.deleted = True - if purge: + if payload.purge: ld.purged = True trans.sa_session.add(ld) with transaction(trans.sa_session): @@ -328,6 +293,18 @@ def _decode_library_content_id( f"Malformed library content id ( {str(content_id)} ) specified, unable to decode." ) + def _url_for( + self, + trans: ProvidesUserContext, + library_id: DecodedDatabaseIdField, + id: int, + type: str, + ) -> Optional[str]: + encoded_id = trans.security.encode_id(id) + if type == "folder": + encoded_id = f"F{encoded_id}" + return trans.url_builder("library_content", library_id=library_id, id=encoded_id) if trans.url_builder else None + def _upload_library_dataset( self, trans: ProvidesHistoryContext, @@ -335,12 +312,6 @@ def _upload_library_dataset( payload: LibraryContentsFileCreatePayload, ) -> Dict: replace_dataset: Optional[LibraryDataset] = None - upload_option = payload.upload_option - dbkey = payload.dbkey - if isinstance(dbkey, list): - last_used_build = dbkey[0] - else: - last_used_build = dbkey is_admin = trans.user_is_admin current_user_roles = trans.get_current_user_roles() folder = trans.sa_session.get(LibraryFolder, folder_id) @@ -349,34 +320,25 @@ def _upload_library_dataset( self._check_access(trans, is_admin, folder, current_user_roles) self._check_add(trans, is_admin, folder, current_user_roles) library = folder.parent_library - if folder and last_used_build in ["None", None, "?"]: - last_used_build = folder.genome_build - error = False - if upload_option == "upload_paths": + if payload.upload_option == "upload_paths": validate_path_upload(trans) # Duplicate check made in _upload_dataset. - elif roles := payload.roles: + elif payload.roles: # Check to see if the user selected roles to associate with the DATASET_ACCESS permission # on the dataset that would cause accessibility issues. - vars = dict(DATASET_ACCESS_in=roles) + vars = dict(DATASET_ACCESS_in=payload.roles) permissions, in_roles, error, message = trans.app.security_agent.derive_roles_from_access( trans, library.id, "api", library=True, **vars ) - if error: - raise exceptions.RequestParameterInvalidException(message) - else: - created_outputs_dict = self._upload_dataset( - trans, payload=payload, folder_id=folder.id, replace_dataset=replace_dataset - ) - if not created_outputs_dict: - raise exceptions.RequestParameterInvalidException("Upload failed") - return created_outputs_dict + if error: + raise exceptions.RequestParameterInvalidException(message) + created_outputs_dict = self._upload_dataset( + trans, payload=payload, folder_id=folder.id, replace_dataset=replace_dataset + ) + if not created_outputs_dict: + raise exceptions.RequestParameterInvalidException("Upload failed") + return created_outputs_dict - def _traverse( - self, - trans: ProvidesUserContext, - folder: LibraryFolder, - current_user_roles: List[Role], - ) -> List: + def _traverse(self, trans: ProvidesUserContext, folder, current_user_roles): admin = trans.user_is_admin rval = [] for subfolder in folder.active_folders: @@ -410,8 +372,9 @@ def _upload_dataset( # Set up the traditional tool state/params cntrller = "api" tool_id = "upload1" - file_type = payload.file_type - upload_common.validate_datatype_extension(datatypes_registry=trans.app.datatypes_registry, ext=file_type) + upload_common.validate_datatype_extension( + datatypes_registry=trans.app.datatypes_registry, ext=payload.file_type + ) tool = trans.app.toolbox.get_tool(tool_id) state = tool.new_state(trans) populate_state(trans, tool.inputs, payload.model_dump(), state.inputs) @@ -421,11 +384,9 @@ def _upload_dataset( if input.type == "upload_dataset": dataset_upload_inputs.append(input) # Library-specific params - server_dir = payload.server_dir - upload_option = payload.upload_option - if upload_option == "upload_directory": - full_dir, import_dir_desc = validate_server_directory_upload(trans, server_dir) - elif upload_option == "upload_paths": + if payload.upload_option == "upload_directory": + full_dir, import_dir_desc = validate_server_directory_upload(trans, payload.server_dir) + elif payload.upload_option == "upload_paths": # Library API already checked this - following check isn't actually needed. validate_path_upload(trans) # Some error handling should be added to this method. @@ -436,20 +397,20 @@ def _upload_dataset( except Exception: raise exceptions.InvalidFileFormatError("Invalid folder specified") # Proceed with (mostly) regular upload processing if we're still errorless - if upload_option == "upload_file": + if payload.upload_option == "upload_file": tool_params = upload_common.persist_uploads(tool_params, trans) uploaded_datasets = upload_common.get_uploaded_datasets( trans, cntrller, tool_params, dataset_upload_inputs, library_bunch=library_bunch ) - elif upload_option == "upload_directory": + elif payload.upload_option == "upload_directory": uploaded_datasets = self._get_server_dir_uploaded_datasets( trans, payload, full_dir, import_dir_desc, library_bunch ) - elif upload_option == "upload_paths": + elif payload.upload_option == "upload_paths": uploaded_datasets, _, _ = self._get_path_paste_uploaded_datasets( trans, payload.model_dump(), library_bunch, 200, None ) - if upload_option == "upload_file" and not uploaded_datasets: + if payload.upload_option == "upload_file" and not uploaded_datasets: raise exceptions.RequestParameterInvalidException("Select a file, enter a URL or enter text") json_file_path = upload_common.create_paramfile(trans, uploaded_datasets) data_list = [ud.data for ud in uploaded_datasets] @@ -492,8 +453,7 @@ def _get_server_dir_files( for entry in os.listdir(full_dir): # Only import regular files path = os.path.join(full_dir, entry) - link_data_only = payload.link_data_only - if os.path.islink(full_dir) and link_data_only == "link_to_files": + if os.path.islink(full_dir) and payload.link_data_only == "link_to_files": # If we're linking instead of copying and the # sub-"directory" in the import dir is actually a symlink, # dereference the symlink, but not any of its contents. @@ -502,7 +462,7 @@ def _get_server_dir_files( path = os.path.join(link_path, entry) else: path = os.path.abspath(os.path.join(link_path, entry)) - elif os.path.islink(path) and os.path.isfile(path) and link_data_only == "link_to_files": + elif os.path.islink(path) and os.path.isfile(path) and payload.link_data_only == "link_to_files": # If we're linking instead of copying and the "file" in the # sub-directory of the import dir is actually a symlink, # dereference the symlink (one dereference only, Vasili). @@ -553,7 +513,7 @@ def _check_access( self, trans: ProvidesUserContext, is_admin: bool, - item: LibraryFolder, + item, current_user_roles: List[Role], ) -> None: if isinstance(item, trans.model.HistoryDatasetAssociation): @@ -589,7 +549,7 @@ def _check_add( trans: ProvidesUserContext, is_admin: bool, item: LibraryFolder, - current_user_roles : List[Role], + current_user_roles: List[Role], ) -> None: # Deny access if the user is not an admin and does not have the LIBRARY_ADD permission. if not (is_admin or trans.app.security_agent.can_add_library_item(current_user_roles, item)): diff --git a/lib/galaxy_test/api/test_library_contents.py b/lib/galaxy_test/api/test_library_contents.py index 862f5cc4c89e..2bc097d82a43 100644 --- a/lib/galaxy_test/api/test_library_contents.py +++ b/lib/galaxy_test/api/test_library_contents.py @@ -1,5 +1,3 @@ -from typing import Any - from galaxy_test.base.populators import ( DatasetCollectionPopulator, DatasetPopulator, @@ -56,8 +54,8 @@ def test_index(self): self._assert_has_keys(item, "id", "name", "type", "url") def test_get_library_contents_invalid_id(self): - invalid_item_id = "invalid_id" - response = self._get(f"/api/libraries/{invalid_item_id}/contents") + invalid_library_id = "invalid_id" + response = self._get(f"/api/libraries/{invalid_library_id}/contents") self._assert_status_code_is(response, 400) def test_get_library_folder(self): @@ -192,7 +190,7 @@ def test_delete_invalid_library_item(self): response_invalid = self._delete(f"/api/libraries/{library_id}/contents/{invalid_item_id}") self._assert_status_code_is(response_invalid, 400) - def _create_library_content(self, type) -> Any: + def _create_library_content(self, type): folder_id = self.library["root_folder_id"] library_id = self.library["id"] From 0f26838119ecb76294f972b37f54796b6c790c0f Mon Sep 17 00:00:00 2001 From: Arash Date: Fri, 20 Sep 2024 16:12:24 +0200 Subject: [PATCH 07/22] Refactor library content routes and controllers --- lib/galaxy/schema/library_contents.py | 40 ++-- .../webapps/galaxy/api/library_contents.py | 17 +- .../galaxy/services/library_contents.py | 174 ++++++++++-------- 3 files changed, 135 insertions(+), 96 deletions(-) diff --git a/lib/galaxy/schema/library_contents.py b/lib/galaxy/schema/library_contents.py index d02c0350989b..d0f423c9dd9e 100644 --- a/lib/galaxy/schema/library_contents.py +++ b/lib/galaxy/schema/library_contents.py @@ -125,11 +125,11 @@ class LibraryContentsFileCreatePayload(LibraryContentsCreatePayload): class LibraryContentsFolderCreatePayload(LibraryContentsCreatePayload): name: Optional[str] = Field( "", - title="(only if create_type is 'folder') name of the folder to create", + title="name of the folder to create", ) description: Optional[str] = Field( "", - title="(only if create_type is 'folder') description of the folder to create", + title="description of the folder to create", ) @@ -211,15 +211,15 @@ class LibraryContentsShowDatasetResponse(LibraryContentsShowResponse): folder_id: EncodedLibraryFolderDatabaseIdField state: str file_name: str - created_from_basename: str + created_from_basename: Optional[str] uploaded_by: str message: Optional[str] date_uploaded: str file_size: int file_ext: str data_type: str - misc_info: str - misc_blurb: str + misc_info: Optional[str] + misc_blurb: Optional[str] peek: Optional[str] uuid: str metadata_dbkey: str @@ -237,28 +237,34 @@ class LibraryContentsCreateFolderListResponse(RootModel): root: List[LibraryContentsCreateFolderResponse] -class LibraryContentsCreateDatasetResponse(Model): - id: EncodedDatabaseIdField +class LibraryContentsCreateDatasetResponseBase(Model): + id: str # should change to EncodedDatabaseIdField latter hda_ldda: str model_class: str name: str deleted: bool visible: bool state: str - library_dataset_id: EncodedDatabaseIdField + library_dataset_id: str # should change to EncodedDatabaseIdField latter file_size: int file_name: str update_time: str file_ext: str data_type: str genome_build: str - misc_info: str - misc_blurb: str - created_from_basename: str + misc_info: Optional[str] + misc_blurb: Optional[str] + created_from_basename: Optional[str] uuid: str - parent_library_id: EncodedDatabaseIdField + parent_library_id: str # should change to EncodedDatabaseIdField latter metadata_dbkey: str - metadata_data_lines: int + + +class LibraryContentsCreateDatasetResponse(LibraryContentsCreateDatasetResponseBase): + metadata_data_lines: Optional[int] + + +class LibraryContentsCreateDatasetExtendedResponse(LibraryContentsCreateDatasetResponse): metadata_comment_lines: Union[str, int] metadata_columns: int metadata_column_types: List[str] @@ -267,8 +273,12 @@ class LibraryContentsCreateDatasetResponse(Model): class LibraryContentsCreateDatasetListResponse(RootModel): - root: List[LibraryContentsCreateDatasetResponse] + root: List[LibraryContentsCreateDatasetResponseBase] class LibraryContentsDeleteResponse(Model): - pass + id: EncodedDatabaseIdField + deleted: bool + +class LibraryContentsPurgedResponse(LibraryContentsDeleteResponse): + purged: bool \ No newline at end of file diff --git a/lib/galaxy/webapps/galaxy/api/library_contents.py b/lib/galaxy/webapps/galaxy/api/library_contents.py index 24d122e70775..3c5406faee43 100644 --- a/lib/galaxy/webapps/galaxy/api/library_contents.py +++ b/lib/galaxy/webapps/galaxy/api/library_contents.py @@ -3,7 +3,12 @@ """ import logging -from typing import Union +from typing import ( + Optional, + Union, +) + +from fastapi import Body from galaxy.managers.context import ( ProvidesHistoryContext, @@ -12,6 +17,7 @@ from galaxy.schema.fields import DecodedDatabaseIdField from galaxy.schema.library_contents import ( LibraryContentsCollectionCreatePayload, + LibraryContentsCreateDatasetExtendedResponse, LibraryContentsCreateDatasetListResponse, LibraryContentsCreateDatasetResponse, LibraryContentsCreateFolderListResponse, @@ -72,12 +78,15 @@ def show( def create( self, library_id: DecodedDatabaseIdField, - payload: Union[LibraryContentsFolderCreatePayload, LibraryContentsFileCreatePayload, LibraryContentsCollectionCreatePayload], + payload: Union[ + LibraryContentsFolderCreatePayload, LibraryContentsFileCreatePayload, LibraryContentsCollectionCreatePayload + ], trans: ProvidesHistoryContext = DependsOnTrans, ) -> Union[ LibraryContentsCreateFolderListResponse, LibraryContentsCreateDatasetResponse, LibraryContentsCreateDatasetListResponse, + LibraryContentsCreateDatasetExtendedResponse, ]: return self.service.create(trans, library_id, payload) @@ -103,7 +112,9 @@ def delete( self, library_id: DecodedDatabaseIdField, id: DecodedDatabaseIdField, - payload: LibraryContentsDeletePayload = LibraryContentsDeletePayload(), + payload: Optional[LibraryContentsDeletePayload] = Body(None), trans: ProvidesHistoryContext = DependsOnTrans, ): + if payload is None: + payload = LibraryContentsDeletePayload() return self.service.delete(trans, id, payload) diff --git a/lib/galaxy/webapps/galaxy/services/library_contents.py b/lib/galaxy/webapps/galaxy/services/library_contents.py index 0f9b1f893c4d..ccfc3559e1fc 100644 --- a/lib/galaxy/webapps/galaxy/services/library_contents.py +++ b/lib/galaxy/webapps/galaxy/services/library_contents.py @@ -24,7 +24,11 @@ validate_path_upload, validate_server_directory_upload, ) -from galaxy.managers.collections_util import dictify_dataset_collection_instance +from galaxy.managers.collections import DatasetCollectionManager +from galaxy.managers.collections_util import ( + api_payload_to_create_params, + dictify_dataset_collection_instance, +) from galaxy.managers.context import ( ProvidesHistoryContext, ProvidesUserContext, @@ -44,9 +48,12 @@ ) from galaxy.schema.library_contents import ( LibraryContentsCollectionCreatePayload, + LibraryContentsCreateDatasetExtendedResponse, LibraryContentsCreateDatasetListResponse, LibraryContentsCreateDatasetResponse, LibraryContentsCreateFolderListResponse, + LibraryContentsCreateFolderResponse, + LibraryContentsCreatePayload, LibraryContentsDeletePayload, LibraryContentsDeleteResponse, LibraryContentsFileCreatePayload, @@ -85,9 +92,15 @@ class LibraryContentsService(ServiceBase, LibraryActions, UsesLibraryMixinItems, Interface/service shared by controllers for interacting with the contents of a library contents. """ - def __init__(self, security: IdEncodingHelper, hda_manager: HDAManager): + def __init__( + self, + security: IdEncodingHelper, + hda_manager: HDAManager, + collection_manager: DatasetCollectionManager, + ): super().__init__(security) self.hda_manager = hda_manager + self.collection_manager = collection_manager def index( self, @@ -149,6 +162,7 @@ def create( LibraryContentsCreateFolderListResponse, LibraryContentsCreateDatasetResponse, LibraryContentsCreateDatasetListResponse, + LibraryContentsCreateDatasetExtendedResponse, ]: """Create a new library file or folder.""" if trans.user_is_bootstrap_admin: @@ -161,53 +175,28 @@ def create( # we'll need the id and any message to attach, then branch to that private function if payload.create_type == "file": if payload.from_hda_id: - return self._copy_hda_to_library_folder( + rval = self._copy_hda_to_library_folder( trans, self.hda_manager, payload.from_hda_id, payload.folder_id, payload.ldda_message ) + if "metadata_comment_lines" in rval: + return LibraryContentsCreateDatasetExtendedResponse(**rval) + else: + return LibraryContentsCreateDatasetResponse(**rval) elif payload.from_hdca_id: - return self._copy_hdca_to_library_folder( + rval = self._copy_hdca_to_library_folder( trans, self.hda_manager, payload.from_hdca_id, payload.folder_id, payload.ldda_message ) - else: - raise exceptions.RequestParameterInvalidException("Invalid create request") + return LibraryContentsCreateDatasetListResponse(root=rval) # Now create the desired content object, either file or folder. if payload.create_type == "file": - output = self._upload_library_dataset( - trans, payload.folder_id, cast(LibraryContentsFileCreatePayload, payload) - ) + return self._upload_library_dataset(trans, cast(LibraryContentsFileCreatePayload, payload), library_id) elif payload.create_type == "folder": - output = self._create_folder(trans, payload.folder_id, cast(LibraryContentsFolderCreatePayload, payload)) + return self._create_folder(trans, cast(LibraryContentsFolderCreatePayload, payload), library_id) elif payload.create_type == "collection": - # Not delegating to library_common, so need to check access to parent - # folder here. - payload = cast(LibraryContentsCollectionCreatePayload, payload) - self.check_user_can_add_to_library_item(trans, parent, check_accessible=True) - create_params = dict( - collection_type=payload.collection_type, - element_identifiers=payload.element_identifiers, - name=payload.name or None, - hide_source_items=payload.hide_source_items or False, - copy_elements=payload.copy_elements or False, - parent=parent, - ) - dataset_collection_instance = trans.app.dataset_collection_manager.create(**create_params) - dataset_collection = dictify_dataset_collection_instance( - dataset_collection_instance, security=trans.security, url_builder=trans.url_builder, parent=parent - ) - return [dataset_collection] - # return LibraryContentsCreateListResponse(root=dataset_collection) - rval = [] - for v in output.values(): - if payload.extended_metadata is not None: - # If there is extended metadata, store it, attach it to the dataset, and index it - self.create_extended_metadata(trans, payload.extended_metadata) - if isinstance(v, trans.app.model.LibraryDatasetDatasetAssociation): - v = v.library_dataset - url = self._url_for(trans, library_id, v.id, payload.create_type) - rval.append(dict(id=v.id, name=v.name, url=url)) - return rval - # return LibraryContentsCreateListResponse(root=rval) + return self._create_collection(trans, cast(LibraryContentsCollectionCreatePayload, payload), parent) + else: + raise exceptions.RequestParameterInvalidException("Invalid create_type specified.") def update( self, @@ -305,39 +294,6 @@ def _url_for( encoded_id = f"F{encoded_id}" return trans.url_builder("library_content", library_id=library_id, id=encoded_id) if trans.url_builder else None - def _upload_library_dataset( - self, - trans: ProvidesHistoryContext, - folder_id: LibraryFolderDatabaseIdField, - payload: LibraryContentsFileCreatePayload, - ) -> Dict: - replace_dataset: Optional[LibraryDataset] = None - is_admin = trans.user_is_admin - current_user_roles = trans.get_current_user_roles() - folder = trans.sa_session.get(LibraryFolder, folder_id) - if not folder: - raise exceptions.RequestParameterInvalidException("Invalid folder id specified.") - self._check_access(trans, is_admin, folder, current_user_roles) - self._check_add(trans, is_admin, folder, current_user_roles) - library = folder.parent_library - if payload.upload_option == "upload_paths": - validate_path_upload(trans) # Duplicate check made in _upload_dataset. - elif payload.roles: - # Check to see if the user selected roles to associate with the DATASET_ACCESS permission - # on the dataset that would cause accessibility issues. - vars = dict(DATASET_ACCESS_in=payload.roles) - permissions, in_roles, error, message = trans.app.security_agent.derive_roles_from_access( - trans, library.id, "api", library=True, **vars - ) - if error: - raise exceptions.RequestParameterInvalidException(message) - created_outputs_dict = self._upload_dataset( - trans, payload=payload, folder_id=folder.id, replace_dataset=replace_dataset - ) - if not created_outputs_dict: - raise exceptions.RequestParameterInvalidException("Upload failed") - return created_outputs_dict - def _traverse(self, trans: ProvidesUserContext, folder, current_user_roles): admin = trans.user_is_admin rval = [] @@ -362,12 +318,36 @@ def _traverse(self, trans: ProvidesUserContext, folder, current_user_roles): rval.append(ld) return rval + def _upload_library_dataset( + self, + trans: ProvidesHistoryContext, + payload: LibraryContentsFileCreatePayload, + library_id: DecodedDatabaseIdField, + ) -> LibraryContentsCreateFolderListResponse: + is_admin = trans.user_is_admin + current_user_roles = trans.get_current_user_roles() + folder = trans.sa_session.get(LibraryFolder, payload.folder_id) + if not folder: + raise exceptions.RequestParameterInvalidException("Invalid folder id specified.") + self._check_access(trans, is_admin, folder, current_user_roles) + self._check_add(trans, is_admin, folder, current_user_roles) + if payload.roles: + # Check to see if the user selected roles to associate with the DATASET_ACCESS permission + # on the dataset that would cause accessibility issues. + vars = dict(DATASET_ACCESS_in=payload.roles) + permissions, in_roles, error, message = trans.app.security_agent.derive_roles_from_access( + trans, folder.parent_library.id, "api", library=True, **vars + ) + if error: + raise exceptions.RequestParameterInvalidException(message) + created_outputs_dict = self._upload_dataset(trans, payload=payload, folder_id=folder.id) + return self._convert_output_to_rval(trans, payload, created_outputs_dict, library_id) + def _upload_dataset( self, trans: ProvidesHistoryContext, payload: LibraryContentsFileCreatePayload, folder_id: LibraryFolderDatabaseIdField, - replace_dataset: Optional[LibraryDataset], ) -> Dict[str, List]: # Set up the traditional tool state/params cntrller = "api" @@ -393,7 +373,7 @@ def _upload_dataset( try: # FIXME: instead of passing params here ( which have been processed by util.Params(), the original payload # should be passed so that complex objects that may have been included in the initial request remain. - library_bunch = upload_common.handle_library_params(trans, payload.model_dump(), folder_id, replace_dataset) + library_bunch = upload_common.handle_library_params(trans, payload.model_dump(), folder_id, None) except Exception: raise exceptions.InvalidFileFormatError("Invalid folder specified") # Proceed with (mostly) regular upload processing if we're still errorless @@ -421,6 +401,8 @@ def _upload_dataset( trans, tool_params, tool, json_file_path, data_list, folder=library_bunch.folder, job_params=job_params ) trans.app.job_manager.enqueue(job, tool=tool) + if not output: + raise exceptions.RequestParameterInvalidException("Upload failed") return output def _get_server_dir_uploaded_datasets( @@ -484,12 +466,12 @@ def _get_server_dir_files( def _create_folder( self, trans: ProvidesUserContext, - parent_id: LibraryFolderDatabaseIdField, payload: LibraryContentsFolderCreatePayload, - ) -> Dict[str, LibraryFolder]: + library_id: DecodedDatabaseIdField, + ) -> LibraryContentsCreateFolderListResponse: is_admin = trans.user_is_admin current_user_roles = trans.get_current_user_roles() - parent_folder = trans.sa_session.get(LibraryFolder, parent_id) + parent_folder = trans.sa_session.get(LibraryFolder, payload.folder_id) if not parent_folder: raise exceptions.RequestParameterInvalidException("Invalid folder id specified.") # Check the library which actually contains the user-supplied parent folder, not the user-supplied @@ -507,7 +489,43 @@ def _create_folder( trans.sa_session.commit() # New folders default to having the same permissions as their parent folder trans.app.security_agent.copy_library_permissions(trans, parent_folder, new_folder) - return dict(created=new_folder) + new_folder_dict = dict(created=new_folder) + return self._convert_output_to_rval(trans, payload, new_folder_dict, library_id) + + def _convert_output_to_rval( + self, + trans: ProvidesUserContext, + payload: LibraryContentsCreatePayload, + output: Dict, + library_id: DecodedDatabaseIdField, + ) -> LibraryContentsCreateFolderListResponse: + rval = [] + for v in output.values(): + if payload.extended_metadata is not None: + # If there is extended metadata, store it, attach it to the dataset, and index it + self.create_extended_metadata(trans, payload.extended_metadata) + if isinstance(v, trans.app.model.LibraryDatasetDatasetAssociation): + v = v.library_dataset + url = self._url_for(trans, library_id, v.id, payload.create_type) + rval.append(LibraryContentsCreateFolderResponse(id=v.id, name=v.name, url=url)) + return LibraryContentsCreateFolderListResponse(root=rval) + + def _create_collection( + self, + trans: ProvidesUserContext, + payload: LibraryContentsCollectionCreatePayload, + parent: LibraryFolder, + ) -> LibraryContentsCreateDatasetListResponse: + # Not delegating to library_common, so need to check access to parent folder here. + self.check_user_can_add_to_library_item(trans, parent, check_accessible=True) + create_params = api_payload_to_create_params(payload.model_dump()) + create_params["trans"] = trans + create_params["parent"] = parent + dataset_collection_instance = self.collection_manager.create(**create_params) + dataset_collection = dictify_dataset_collection_instance( + dataset_collection_instance, security=trans.security, url_builder=trans.url_builder, parent=parent + ) + return LibraryContentsCreateDatasetListResponse(root=[dataset_collection]) def _check_access( self, From dfe82e3581150ed7ac3cab1f9b6001b242f5825c Mon Sep 17 00:00:00 2001 From: Arash Date: Tue, 24 Sep 2024 17:52:56 +0200 Subject: [PATCH 08/22] Refactor library content routes and controllers --- client/src/api/schema/schema.ts | 676 ++++++++++++++++++ lib/galaxy/schema/library_contents.py | 29 +- .../webapps/galaxy/api/library_contents.py | 34 +- .../galaxy/services/library_contents.py | 24 +- 4 files changed, 725 insertions(+), 38 deletions(-) diff --git a/client/src/api/schema/schema.ts b/client/src/api/schema/schema.ts index a3a790978166..b23ac2c2f1e6 100644 --- a/client/src/api/schema/schema.ts +++ b/client/src/api/schema/schema.ts @@ -3028,6 +3028,63 @@ export interface paths { patch?: never; trace?: never; }; + "/api/libraries/{library_id}/contents": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Return a list of library files and folders. + * @deprecated + * @description This endpoint is deprecated. Please use GET /api/folders/{folder_id}/contents instead. + */ + get: operations["index_api_libraries__library_id__contents_get"]; + put?: never; + /** + * Create a new library file or folder. + * @deprecated + * @description This endpoint is deprecated. Please use POST /api/folders/{folder_id} or POST /api/folders/{folder_id}/contents instead. + */ + post: operations["create_api_libraries__library_id__contents_post"]; + delete?: never; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; + "/api/libraries/{library_id}/contents/{id}": { + parameters: { + query?: never; + header?: never; + path?: never; + cookie?: never; + }; + /** + * Return a library file or folder. + * @deprecated + * @description This endpoint is deprecated. Please use GET /api/libraries/datasets/{library_id} instead. + */ + get: operations["show_api_libraries__library_id__contents__id__get"]; + /** + * Update a library file or folder. + * @deprecated + * @description This endpoint is deprecated. Please use PATCH /api/libraries/datasets/{library_id} instead. + */ + put: operations["update_api_libraries__library_id__contents__id__put"]; + post?: never; + /** + * Delete a library file or folder. + * @deprecated + * @description This endpoint is deprecated. Please use DELETE /api/libraries/datasets/{library_id} instead. + */ + delete: operations["delete_api_libraries__library_id__contents__id__delete"]; + options?: never; + head?: never; + patch?: never; + trace?: never; + }; "/api/licenses": { parameters: { query?: never; @@ -6907,6 +6964,11 @@ export interface components { */ url: string; }; + /** + * CreateType + * @enum {string} + */ + CreateType: "file" | "folder" | "collection"; /** CreatedEntryResponse */ CreatedEntryResponse: { /** @@ -12806,6 +12868,368 @@ export interface components { */ total: number; }; + /** LibraryContentsCollectionCreatePayload */ + LibraryContentsCollectionCreatePayload: { + /** the type of collection to create */ + collection_type: string; + /** + * if True, copy the elements into the collection + * @default false + */ + copy_elements: boolean | null; + /** the type of item to create */ + create_type: components["schemas"]["CreateType"]; + /** list of dictionaries containing the element identifiers for the collection */ + element_identifiers: Record[]; + /** sub-dictionary containing any extended metadata to associate with the item */ + extended_metadata?: Record | null; + /** + * the encoded id of the parent folder of the new item + * @example 0123456789ABCDEF + */ + folder_id: string; + /** (only if create_type is 'file') the encoded id of an accessible HDA to copy into the library */ + from_hda_id?: string | null; + /** (only if create_type is 'file') the encoded id of an accessible HDCA to copy into the library */ + from_hdca_id?: string | null; + /** + * if True, hide the source items in the collection + * @default false + */ + hide_source_items: boolean | null; + /** + * the new message attribute of the LDDA created + * @default + */ + ldda_message: string | null; + /** the name of the collection */ + name?: string | null; + /** + * create tags on datasets using the file's original name + * @default false + */ + tag_using_filenames: boolean | null; + /** + * create the given list of tags on datasets + * @default [] + */ + tags: string[] | null; + /** + * the method to use for uploading files + * @default upload_file + */ + upload_option: components["schemas"]["UploadOption"] | null; + }; + /** LibraryContentsCreateDatasetListResponse */ + LibraryContentsCreateDatasetListResponse: components["schemas"]["LibraryContentsCreateDatasetResponse"][]; + /** LibraryContentsCreateDatasetResponse */ + LibraryContentsCreateDatasetResponse: { + /** Created From Basename */ + created_from_basename: string | null; + /** Data Type */ + data_type: string; + /** Deleted */ + deleted: boolean; + /** File Ext */ + file_ext: string; + /** File Name */ + file_name: string; + /** File Size */ + file_size: number; + /** Genome Build */ + genome_build: string; + /** Hda Ldda */ + hda_ldda: string; + /** Id */ + id: string; + /** Library Dataset Id */ + library_dataset_id: string; + /** Misc Blurb */ + misc_blurb: string | null; + /** Misc Info */ + misc_info: string | null; + /** Model Class */ + model_class: string; + /** Name */ + name: string; + /** Parent Library Id */ + parent_library_id: string; + /** State */ + state: string; + /** Update Time */ + update_time: string; + /** Uuid */ + uuid: string; + /** Visible */ + visible: boolean; + } & { + [key: string]: unknown; + }; + /** LibraryContentsCreateFolderListResponse */ + LibraryContentsCreateFolderListResponse: components["schemas"]["LibraryContentsCreateFolderResponse"][]; + /** LibraryContentsCreateFolderResponse */ + LibraryContentsCreateFolderResponse: { + /** + * Id + * @example 0123456789ABCDEF + */ + id: string; + /** Name */ + name: string; + /** Url */ + url: string; + }; + /** LibraryContentsDeletePayload */ + LibraryContentsDeletePayload: { + /** + * if True, purge the library dataset + * @default false + */ + purge: boolean | null; + }; + /** LibraryContentsDeleteResponse */ + LibraryContentsDeleteResponse: { + /** Deleted */ + deleted: boolean; + /** + * Id + * @example 0123456789ABCDEF + */ + id: string; + }; + /** LibraryContentsFileCreatePayload */ + LibraryContentsFileCreatePayload: { + /** the type of item to create */ + create_type: components["schemas"]["CreateType"]; + /** + * database key + * @default ? + */ + dbkey: string | unknown[] | null; + /** sub-dictionary containing any extended metadata to associate with the item */ + extended_metadata?: Record | null; + /** file type */ + file_type?: string | null; + /** + * (only if upload_option is 'upload_paths' and the user is an admin) file paths on the Galaxy server to upload to the library, one file per line + * @default + */ + filesystem_paths: string | null; + /** + * the encoded id of the parent folder of the new item + * @example 0123456789ABCDEF + */ + folder_id: string; + /** (only if create_type is 'file') the encoded id of an accessible HDA to copy into the library */ + from_hda_id?: string | null; + /** (only if create_type is 'file') the encoded id of an accessible HDCA to copy into the library */ + from_hdca_id?: string | null; + /** + * the new message attribute of the LDDA created + * @default + */ + ldda_message: string | null; + /** + * (only when upload_option is 'upload_directory' or 'upload_paths').Setting to 'link_to_files' symlinks instead of copying the files + * @default copy_files + */ + link_data_only: components["schemas"]["LinkDataOnly"] | null; + /** + * user selected roles + * @default + */ + roles: string | null; + /** + * (only if upload_option is 'upload_directory') relative path of the subdirectory of Galaxy ``library_import_dir`` (if admin) or ``user_library_import_dir`` (if non-admin) to upload. All and only the files (i.e. no subdirectories) contained in the specified directory will be uploaded. + * @default + */ + server_dir: string | null; + /** + * create tags on datasets using the file's original name + * @default false + */ + tag_using_filenames: boolean | null; + /** + * create the given list of tags on datasets + * @default [] + */ + tags: string[] | null; + /** + * the method to use for uploading files + * @default upload_file + */ + upload_option: components["schemas"]["UploadOption"] | null; + /** UUID of the dataset to upload */ + uuid?: string | null; + }; + /** LibraryContentsFolderCreatePayload */ + LibraryContentsFolderCreatePayload: { + /** the type of item to create */ + create_type: components["schemas"]["CreateType"]; + /** + * description of the folder to create + * @default + */ + description: string | null; + /** sub-dictionary containing any extended metadata to associate with the item */ + extended_metadata?: Record | null; + /** + * the encoded id of the parent folder of the new item + * @example 0123456789ABCDEF + */ + folder_id: string; + /** (only if create_type is 'file') the encoded id of an accessible HDA to copy into the library */ + from_hda_id?: string | null; + /** (only if create_type is 'file') the encoded id of an accessible HDCA to copy into the library */ + from_hdca_id?: string | null; + /** + * the new message attribute of the LDDA created + * @default + */ + ldda_message: string | null; + /** + * name of the folder to create + * @default + */ + name: string | null; + /** + * create tags on datasets using the file's original name + * @default false + */ + tag_using_filenames: boolean | null; + /** + * create the given list of tags on datasets + * @default [] + */ + tags: string[] | null; + /** + * the method to use for uploading files + * @default upload_file + */ + upload_option: components["schemas"]["UploadOption"] | null; + }; + /** LibraryContentsIndexDatasetResponse */ + LibraryContentsIndexDatasetResponse: { + /** + * Id + * @example 0123456789ABCDEF + */ + id: string; + /** Name */ + name: string; + /** Type */ + type: string; + /** Url */ + url: string; + }; + /** LibraryContentsIndexFolderResponse */ + LibraryContentsIndexFolderResponse: { + /** + * Id + * @example 0123456789ABCDEF + */ + id: string; + /** Name */ + name: string; + /** Type */ + type: string; + /** Url */ + url: string; + }; + /** LibraryContentsIndexListResponse */ + LibraryContentsIndexListResponse: ( + | components["schemas"]["LibraryContentsIndexFolderResponse"] + | components["schemas"]["LibraryContentsIndexDatasetResponse"] + )[]; + /** LibraryContentsShowDatasetResponse */ + LibraryContentsShowDatasetResponse: { + /** Created From Basename */ + created_from_basename: string | null; + /** Data Type */ + data_type: string; + /** Date Uploaded */ + date_uploaded: string; + /** File Ext */ + file_ext: string; + /** File Name */ + file_name: string; + /** File Size */ + file_size: number; + /** + * Folder Id + * @example 0123456789ABCDEF + */ + folder_id: string; + /** Genome Build */ + genome_build: string | null; + /** + * Id + * @example 0123456789ABCDEF + */ + id: string; + /** + * Ldda Id + * @example 0123456789ABCDEF + */ + ldda_id: string; + /** Message */ + message: string | null; + /** Misc Blurb */ + misc_blurb: string | null; + /** Misc Info */ + misc_info: string | null; + model_class: components["schemas"]["ModelClass"]; + /** Name */ + name: string; + /** + * Parent Library Id + * @example 0123456789ABCDEF + */ + parent_library_id: string; + /** Peek */ + peek: string | null; + /** State */ + state: string; + tags: components["schemas"]["TagCollection"]; + /** Update Time */ + update_time: string; + /** Uploaded By */ + uploaded_by: string; + /** Uuid */ + uuid: string; + } & { + [key: string]: unknown; + }; + /** LibraryContentsShowFolderResponse */ + LibraryContentsShowFolderResponse: { + /** Deleted */ + deleted: boolean; + /** Description */ + description: string; + /** Genome Build */ + genome_build: string | null; + /** + * Id + * @example 0123456789ABCDEF + */ + id: string; + /** Item Count */ + item_count: number; + /** Library Path */ + library_path: string[]; + model_class: components["schemas"]["ModelClass"]; + /** Name */ + name: string; + /** Parent Id */ + parent_id: string | null; + /** + * Parent Library Id + * @example 0123456789ABCDEF + */ + parent_library_id: string; + /** Update Time */ + update_time: string; + }; /** LibraryCurrentPermissions */ LibraryCurrentPermissions: { /** @@ -13261,6 +13685,11 @@ export interface components { /** Name */ name: string; }; + /** + * LinkDataOnly + * @enum {string} + */ + LinkDataOnly: "copy_files" | "link_to_files"; /** * ListJstreeResponse * @deprecated @@ -13364,6 +13793,11 @@ export interface components { */ time: string; }; + /** + * ModelClass + * @enum {string} + */ + ModelClass: "LibraryDataset" | "LibraryFolder"; /** * ModelStoreFormat * @description Available types of model stores for export. @@ -16439,6 +16873,11 @@ export interface components { /** Tool Version */ tool_version?: string | null; }; + /** + * UploadOption + * @enum {string} + */ + UploadOption: "upload_file" | "upload_paths" | "upload_directory"; /** UrlDataElement */ UrlDataElement: { /** Md5 */ @@ -27417,6 +27856,243 @@ export interface operations { }; }; }; + index_api_libraries__library_id__contents_get: { + parameters: { + query?: never; + header?: { + /** @description The user ID that will be used to effectively make this API call. Only admins and designated users can make API calls on behalf of other users. */ + "run-as"?: string | null; + }; + path: { + library_id: string; + }; + cookie?: never; + }; + requestBody?: never; + responses: { + /** @description Successful Response */ + 200: { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["LibraryContentsIndexListResponse"]; + }; + }; + /** @description Request Error */ + "4XX": { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["MessageExceptionModel"]; + }; + }; + /** @description Server Error */ + "5XX": { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["MessageExceptionModel"]; + }; + }; + }; + }; + create_api_libraries__library_id__contents_post: { + parameters: { + query?: never; + header?: { + /** @description The user ID that will be used to effectively make this API call. Only admins and designated users can make API calls on behalf of other users. */ + "run-as"?: string | null; + }; + path: { + library_id: string; + }; + cookie?: never; + }; + requestBody: { + content: { + "application/json": + | components["schemas"]["LibraryContentsFolderCreatePayload"] + | components["schemas"]["LibraryContentsFileCreatePayload"] + | components["schemas"]["LibraryContentsCollectionCreatePayload"]; + }; + }; + responses: { + /** @description Successful Response */ + 200: { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": + | components["schemas"]["LibraryContentsCreateFolderListResponse"] + | components["schemas"]["LibraryContentsCreateDatasetListResponse"] + | components["schemas"]["LibraryContentsCreateDatasetResponse"]; + }; + }; + /** @description Request Error */ + "4XX": { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["MessageExceptionModel"]; + }; + }; + /** @description Server Error */ + "5XX": { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["MessageExceptionModel"]; + }; + }; + }; + }; + show_api_libraries__library_id__contents__id__get: { + parameters: { + query?: never; + header?: { + /** @description The user ID that will be used to effectively make this API call. Only admins and designated users can make API calls on behalf of other users. */ + "run-as"?: string | null; + }; + path: { + library_id: string; + /** @example F0123456789ABCDEF */ + id: string; + }; + cookie?: never; + }; + requestBody?: never; + responses: { + /** @description Successful Response */ + 200: { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": + | components["schemas"]["LibraryContentsShowFolderResponse"] + | components["schemas"]["LibraryContentsShowDatasetResponse"]; + }; + }; + /** @description Request Error */ + "4XX": { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["MessageExceptionModel"]; + }; + }; + /** @description Server Error */ + "5XX": { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["MessageExceptionModel"]; + }; + }; + }; + }; + update_api_libraries__library_id__contents__id__put: { + parameters: { + query: { + payload: unknown; + }; + header?: { + /** @description The user ID that will be used to effectively make this API call. Only admins and designated users can make API calls on behalf of other users. */ + "run-as"?: string | null; + }; + path: { + library_id: string; + id: string; + }; + cookie?: never; + }; + requestBody?: never; + responses: { + /** @description Successful Response */ + 200: { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": unknown; + }; + }; + /** @description Request Error */ + "4XX": { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["MessageExceptionModel"]; + }; + }; + /** @description Server Error */ + "5XX": { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["MessageExceptionModel"]; + }; + }; + }; + }; + delete_api_libraries__library_id__contents__id__delete: { + parameters: { + query?: never; + header?: { + /** @description The user ID that will be used to effectively make this API call. Only admins and designated users can make API calls on behalf of other users. */ + "run-as"?: string | null; + }; + path: { + library_id: string; + id: string; + }; + cookie?: never; + }; + requestBody?: { + content: { + "application/json": components["schemas"]["LibraryContentsDeletePayload"] | null; + }; + }; + responses: { + /** @description Successful Response */ + 200: { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["LibraryContentsDeleteResponse"]; + }; + }; + /** @description Request Error */ + "4XX": { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["MessageExceptionModel"]; + }; + }; + /** @description Server Error */ + "5XX": { + headers: { + [name: string]: unknown; + }; + content: { + "application/json": components["schemas"]["MessageExceptionModel"]; + }; + }; + }; + }; index_api_licenses_get: { parameters: { query?: never; diff --git a/lib/galaxy/schema/library_contents.py b/lib/galaxy/schema/library_contents.py index d0f423c9dd9e..5e33e8f43c62 100644 --- a/lib/galaxy/schema/library_contents.py +++ b/lib/galaxy/schema/library_contents.py @@ -8,6 +8,7 @@ ) from pydantic import ( + ConfigDict, Field, RootModel, ) @@ -79,7 +80,7 @@ class LibraryContentsCreatePayload(Model): "", title="the new message attribute of the LDDA created", ) - extended_metadata: Optional[Union[Dict[str, Any], List[Any], int, float, str, bool]] = Field( + extended_metadata: Optional[Dict[str, Any]] = Field( None, title="sub-dictionary containing any extended metadata to associate with the item", ) @@ -222,10 +223,11 @@ class LibraryContentsShowDatasetResponse(LibraryContentsShowResponse): misc_blurb: Optional[str] peek: Optional[str] uuid: str - metadata_dbkey: str - metadata_data_lines: int tags: TagCollection + # metadata fields + model_config = ConfigDict(extra="allow") + class LibraryContentsCreateFolderResponse(Model): id: EncodedLibraryFolderDatabaseIdField @@ -237,7 +239,7 @@ class LibraryContentsCreateFolderListResponse(RootModel): root: List[LibraryContentsCreateFolderResponse] -class LibraryContentsCreateDatasetResponseBase(Model): +class LibraryContentsCreateDatasetResponse(Model): id: str # should change to EncodedDatabaseIdField latter hda_ldda: str model_class: str @@ -257,28 +259,19 @@ class LibraryContentsCreateDatasetResponseBase(Model): created_from_basename: Optional[str] uuid: str parent_library_id: str # should change to EncodedDatabaseIdField latter - metadata_dbkey: str - - -class LibraryContentsCreateDatasetResponse(LibraryContentsCreateDatasetResponseBase): - metadata_data_lines: Optional[int] - -class LibraryContentsCreateDatasetExtendedResponse(LibraryContentsCreateDatasetResponse): - metadata_comment_lines: Union[str, int] - metadata_columns: int - metadata_column_types: List[str] - metadata_column_names: List[str] - metadata_delimiter: str + # metadata fields + model_config = ConfigDict(extra="allow") class LibraryContentsCreateDatasetListResponse(RootModel): - root: List[LibraryContentsCreateDatasetResponseBase] + root: List[LibraryContentsCreateDatasetResponse] class LibraryContentsDeleteResponse(Model): id: EncodedDatabaseIdField deleted: bool + class LibraryContentsPurgedResponse(LibraryContentsDeleteResponse): - purged: bool \ No newline at end of file + purged: bool diff --git a/lib/galaxy/webapps/galaxy/api/library_contents.py b/lib/galaxy/webapps/galaxy/api/library_contents.py index 3c5406faee43..9dfa8436775a 100644 --- a/lib/galaxy/webapps/galaxy/api/library_contents.py +++ b/lib/galaxy/webapps/galaxy/api/library_contents.py @@ -17,7 +17,6 @@ from galaxy.schema.fields import DecodedDatabaseIdField from galaxy.schema.library_contents import ( LibraryContentsCollectionCreatePayload, - LibraryContentsCreateDatasetExtendedResponse, LibraryContentsCreateDatasetListResponse, LibraryContentsCreateDatasetResponse, LibraryContentsCreateFolderListResponse, @@ -51,29 +50,41 @@ class FastAPILibraryContents: @router.get( "/api/libraries/{library_id}/contents", summary="Return a list of library files and folders.", + deprecated=True, ) def index( self, library_id: DecodedDatabaseIdField, trans: ProvidesUserContext = DependsOnTrans, ) -> LibraryContentsIndexListResponse: + """ + This endpoint is deprecated. Please use GET /api/folders/{folder_id}/contents instead. + """ return self.service.index(trans, library_id) @router.get( "/api/libraries/{library_id}/contents/{id}", summary="Return a library file or folder.", + deprecated=True, ) def show( self, library_id: DecodedDatabaseIdField, id: MaybeLibraryFolderOrDatasetID, trans: ProvidesUserContext = DependsOnTrans, - ) -> Union[LibraryContentsShowFolderResponse, LibraryContentsShowDatasetResponse]: + ) -> Union[ + LibraryContentsShowFolderResponse, + LibraryContentsShowDatasetResponse, + ]: + """ + This endpoint is deprecated. Please use GET /api/libraries/datasets/{library_id} instead. + """ return self.service.show(trans, id) @router.post( "/api/libraries/{library_id}/contents", summary="Create a new library file or folder.", + deprecated=True, ) def create( self, @@ -84,10 +95,12 @@ def create( trans: ProvidesHistoryContext = DependsOnTrans, ) -> Union[ LibraryContentsCreateFolderListResponse, - LibraryContentsCreateDatasetResponse, LibraryContentsCreateDatasetListResponse, - LibraryContentsCreateDatasetExtendedResponse, + LibraryContentsCreateDatasetResponse, ]: + """ + This endpoint is deprecated. Please use POST /api/folders/{folder_id} or POST /api/folders/{folder_id}/contents instead. + """ return self.service.create(trans, library_id, payload) @router.put( @@ -102,11 +115,15 @@ def update( payload, trans: ProvidesUserContext = DependsOnTrans, ) -> None: + """ + This endpoint is deprecated. Please use PATCH /api/libraries/datasets/{library_id} instead. + """ return self.service.update(trans, id, payload) @router.delete( "/api/libraries/{library_id}/contents/{id}", summary="Delete a library file or folder.", + deprecated=True, ) def delete( self, @@ -114,7 +131,8 @@ def delete( id: DecodedDatabaseIdField, payload: Optional[LibraryContentsDeletePayload] = Body(None), trans: ProvidesHistoryContext = DependsOnTrans, - ): - if payload is None: - payload = LibraryContentsDeletePayload() - return self.service.delete(trans, id, payload) + ) -> LibraryContentsDeleteResponse: + """ + This endpoint is deprecated. Please use DELETE /api/libraries/datasets/{library_id} instead. + """ + return self.service.delete(trans, id, payload or LibraryContentsDeletePayload()) diff --git a/lib/galaxy/webapps/galaxy/services/library_contents.py b/lib/galaxy/webapps/galaxy/services/library_contents.py index ccfc3559e1fc..7af6427258d5 100644 --- a/lib/galaxy/webapps/galaxy/services/library_contents.py +++ b/lib/galaxy/webapps/galaxy/services/library_contents.py @@ -36,7 +36,6 @@ from galaxy.managers.hdas import HDAManager from galaxy.model import ( Library, - LibraryDataset, LibraryFolder, Role, tags, @@ -48,7 +47,6 @@ ) from galaxy.schema.library_contents import ( LibraryContentsCollectionCreatePayload, - LibraryContentsCreateDatasetExtendedResponse, LibraryContentsCreateDatasetListResponse, LibraryContentsCreateDatasetResponse, LibraryContentsCreateFolderListResponse, @@ -136,10 +134,16 @@ def show( self, trans: ProvidesUserContext, id: MaybeLibraryFolderOrDatasetID, - ) -> Union[LibraryContentsShowFolderResponse, LibraryContentsShowDatasetResponse]: + ) -> Union[ + LibraryContentsShowFolderResponse, + LibraryContentsShowDatasetResponse, + ]: """Returns information about library file or folder.""" class_name, content_id = self._decode_library_content_id(id) - rval: Union[LibraryContentsShowFolderResponse, LibraryContentsShowDatasetResponse] + rval: Union[ + LibraryContentsShowFolderResponse, + LibraryContentsShowDatasetResponse, + ] if class_name == "LibraryFolder": content = self.get_library_folder(trans, content_id, check_ownership=False, check_accessible=True) rval = LibraryContentsShowFolderResponse(**content.to_dict(view="element")) @@ -160,9 +164,8 @@ def create( ], ) -> Union[ LibraryContentsCreateFolderListResponse, - LibraryContentsCreateDatasetResponse, LibraryContentsCreateDatasetListResponse, - LibraryContentsCreateDatasetExtendedResponse, + LibraryContentsCreateDatasetResponse, ]: """Create a new library file or folder.""" if trans.user_is_bootstrap_admin: @@ -178,10 +181,7 @@ def create( rval = self._copy_hda_to_library_folder( trans, self.hda_manager, payload.from_hda_id, payload.folder_id, payload.ldda_message ) - if "metadata_comment_lines" in rval: - return LibraryContentsCreateDatasetExtendedResponse(**rval) - else: - return LibraryContentsCreateDatasetResponse(**rval) + return LibraryContentsCreateDatasetResponse(**rval) elif payload.from_hdca_id: rval = self._copy_hdca_to_library_folder( trans, self.hda_manager, payload.from_hdca_id, payload.folder_id, payload.ldda_message @@ -225,7 +225,7 @@ def delete( trans: ProvidesHistoryContext, id: DecodedDatabaseIdField, payload: LibraryContentsDeletePayload, - ): + ) -> LibraryContentsDeleteResponse: """Delete the LibraryDataset with the given ``id``.""" rval = {"id": id} try: @@ -267,7 +267,7 @@ def delete( except Exception as exc: log.exception(f"library_contents API, delete: uncaught exception: {id}, {payload}") raise exceptions.InternalServerError(util.unicodify(exc)) - return rval + return LibraryContentsDeleteResponse(**rval) def _decode_library_content_id( self, From 52ff5048945856685d0f41e5b76457eae8506532 Mon Sep 17 00:00:00 2001 From: Arash Date: Tue, 24 Sep 2024 18:20:08 +0200 Subject: [PATCH 09/22] Refactor library content routes and controllers --- lib/galaxy/schema/library_contents.py | 9 ++++++--- lib/galaxy/webapps/galaxy/services/library_contents.py | 2 +- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/lib/galaxy/schema/library_contents.py b/lib/galaxy/schema/library_contents.py index 5e33e8f43c62..e3d0f0e5545e 100644 --- a/lib/galaxy/schema/library_contents.py +++ b/lib/galaxy/schema/library_contents.py @@ -240,14 +240,17 @@ class LibraryContentsCreateFolderListResponse(RootModel): class LibraryContentsCreateDatasetResponse(Model): - id: str # should change to EncodedDatabaseIdField latter + # id, library_dataset_id, parent_library_id should change to EncodedDatabaseIdField latter + # because they are encoded ids in _copy_hda_to_library_folder and _copy_hdca_to_library_folder + # functions that are shared by LibraryFolderContentsService too + id: str hda_ldda: str model_class: str name: str deleted: bool visible: bool state: str - library_dataset_id: str # should change to EncodedDatabaseIdField latter + library_dataset_id: str file_size: int file_name: str update_time: str @@ -258,7 +261,7 @@ class LibraryContentsCreateDatasetResponse(Model): misc_blurb: Optional[str] created_from_basename: Optional[str] uuid: str - parent_library_id: str # should change to EncodedDatabaseIdField latter + parent_library_id: str # metadata fields model_config = ConfigDict(extra="allow") diff --git a/lib/galaxy/webapps/galaxy/services/library_contents.py b/lib/galaxy/webapps/galaxy/services/library_contents.py index 7af6427258d5..680126aa0488 100644 --- a/lib/galaxy/webapps/galaxy/services/library_contents.py +++ b/lib/galaxy/webapps/galaxy/services/library_contents.py @@ -2,7 +2,6 @@ import logging import os from typing import ( - Annotated, cast, Dict, List, @@ -14,6 +13,7 @@ from fastapi import Path from markupsafe import escape +from typing_extensions import Annotated from galaxy import ( exceptions, From 331bee741346cc52fbc8ce7d36b1511e57f68b3f Mon Sep 17 00:00:00 2001 From: Arash Date: Tue, 24 Sep 2024 18:30:00 +0200 Subject: [PATCH 10/22] Remove old library_contents API route configurations --- lib/galaxy/webapps/galaxy/buildapp.py | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/lib/galaxy/webapps/galaxy/buildapp.py b/lib/galaxy/webapps/galaxy/buildapp.py index d1aa8766a170..ef56cd174fdb 100644 --- a/lib/galaxy/webapps/galaxy/buildapp.py +++ b/lib/galaxy/webapps/galaxy/buildapp.py @@ -851,19 +851,6 @@ def populate_api_routes(webapp, app): conditions=dict(method=["POST", "GET"]), ) - # webapp.mapper.resource( - # "content", - # "contents", - # controller="library_contents", - # name_prefix="library_", - # path_prefix="/api/libraries/{library_id}", - # parent_resources=dict(member_name="library", collection_name="libraries"), - # ) - - # _add_item_extended_metadata_controller( - # webapp, name_prefix="library_dataset_", path_prefix="/api/libraries/{library_id}/contents/{library_content_id}" - # ) - webapp.mapper.connect( "build_for_rerun", "/api/jobs/{id}/build_for_rerun", From e10fd627998cb6d2a61c417844db639da1ea5426 Mon Sep 17 00:00:00 2001 From: Arash Date: Tue, 24 Sep 2024 19:52:00 +0200 Subject: [PATCH 11/22] make the review process easy --- client/src/api/schema/schema.ts | 15 + lib/galaxy/actions/library.py | 244 +++++++++++++- lib/galaxy/schema/library_contents.py | 15 +- .../webapps/galaxy/api/library_contents.py | 2 + .../galaxy/services/library_contents.py | 302 +----------------- 5 files changed, 276 insertions(+), 302 deletions(-) diff --git a/client/src/api/schema/schema.ts b/client/src/api/schema/schema.ts index ea794bb8a18c..2914e057011c 100644 --- a/client/src/api/schema/schema.ts +++ b/client/src/api/schema/schema.ts @@ -12965,6 +12965,20 @@ export interface components { } & { [key: string]: unknown; }; + /** LibraryContentsCreateFileListResponse */ + LibraryContentsCreateFileListResponse: components["schemas"]["LibraryContentsCreateFileResponse"][]; + /** LibraryContentsCreateFileResponse */ + LibraryContentsCreateFileResponse: { + /** + * Id + * @example 0123456789ABCDEF + */ + id: string; + /** Name */ + name: string; + /** Url */ + url: string; + }; /** LibraryContentsCreateFolderListResponse */ LibraryContentsCreateFolderListResponse: components["schemas"]["LibraryContentsCreateFolderResponse"][]; /** LibraryContentsCreateFolderResponse */ @@ -27928,6 +27942,7 @@ export interface operations { content: { "application/json": | components["schemas"]["LibraryContentsCreateFolderListResponse"] + | components["schemas"]["LibraryContentsCreateFileListResponse"] | components["schemas"]["LibraryContentsCreateDatasetListResponse"] | components["schemas"]["LibraryContentsCreateDatasetResponse"]; }; diff --git a/lib/galaxy/actions/library.py b/lib/galaxy/actions/library.py index 35ada9f0c295..570f69bb4754 100644 --- a/lib/galaxy/actions/library.py +++ b/lib/galaxy/actions/library.py @@ -2,39 +2,49 @@ Contains library functions """ +import json import logging import os.path -from galaxy import util -from galaxy.exceptions import ( - AdminRequiredException, - ConfigDoesNotAllowException, - RequestParameterInvalidException, +from markupsafe import escape + +from galaxy import ( + exceptions, + util, +) +from galaxy.managers.collections_util import ( + api_payload_to_create_params, + dictify_dataset_collection_instance, ) +from galaxy.model import LibraryFolder from galaxy.model.base import transaction from galaxy.tools.actions import upload_common +from galaxy.tools.parameters import populate_state from galaxy.util.path import ( safe_contains, safe_relpath, unsafe_walk, ) +from galaxy.webapps.base.controller import UsesExtendedMetadataMixin log = logging.getLogger(__name__) def validate_server_directory_upload(trans, server_dir): if server_dir in [None, "None", ""]: - raise RequestParameterInvalidException("Invalid or unspecified server_dir parameter") + raise exceptions.RequestParameterInvalidException("Invalid or unspecified server_dir parameter") if trans.user_is_admin: import_dir = trans.app.config.library_import_dir import_dir_desc = "library_import_dir" if not import_dir: - raise ConfigDoesNotAllowException('"library_import_dir" is not set in the Galaxy configuration') + raise exceptions.ConfigDoesNotAllowException('"library_import_dir" is not set in the Galaxy configuration') else: import_dir = trans.app.config.user_library_import_dir if not import_dir: - raise ConfigDoesNotAllowException('"user_library_import_dir" is not set in the Galaxy configuration') + raise exceptions.ConfigDoesNotAllowException( + '"user_library_import_dir" is not set in the Galaxy configuration' + ) if server_dir != trans.user.email: import_dir = os.path.join(import_dir, trans.user.email) import_dir_desc = "user_library_import_dir" @@ -64,17 +74,21 @@ def validate_server_directory_upload(trans, server_dir): ) unsafe = True if unsafe: - raise RequestParameterInvalidException("Invalid server_dir specified") + raise exceptions.RequestParameterInvalidException("Invalid server_dir specified") return full_dir, import_dir_desc def validate_path_upload(trans): if not trans.app.config.allow_library_path_paste: - raise ConfigDoesNotAllowException('"allow_path_paste" is not set to True in the Galaxy configuration file') + raise exceptions.ConfigDoesNotAllowException( + '"allow_path_paste" is not set to True in the Galaxy configuration file' + ) if not trans.user_is_admin: - raise AdminRequiredException("Uploading files via filesystem paths can only be performed by administrators") + raise exceptions.AdminRequiredException( + "Uploading files via filesystem paths can only be performed by administrators" + ) class LibraryActions: @@ -82,6 +96,109 @@ class LibraryActions: Mixin for controllers that provide library functionality. """ + def _upload_dataset(self, trans, folder_id: int, payload): + # Set up the traditional tool state/params + cntrller = "api" + tool_id = "upload1" + upload_common.validate_datatype_extension( + datatypes_registry=trans.app.datatypes_registry, ext=payload.file_type + ) + tool = trans.app.toolbox.get_tool(tool_id) + state = tool.new_state(trans) + populate_state(trans, tool.inputs, payload.model_dump(), state.inputs) + tool_params = state.inputs + dataset_upload_inputs = [] + for input in tool.inputs.values(): + if input.type == "upload_dataset": + dataset_upload_inputs.append(input) + # Library-specific params + if payload.upload_option == "upload_directory": + full_dir, import_dir_desc = validate_server_directory_upload(trans, payload.server_dir) + elif payload.upload_option == "upload_paths": + # Library API already checked this - following check isn't actually needed. + validate_path_upload(trans) + # Some error handling should be added to this method. + try: + # FIXME: instead of passing params here ( which have been processed by util.Params(), the original payload + # should be passed so that complex objects that may have been included in the initial request remain. + library_bunch = upload_common.handle_library_params(trans, payload.model_dump(), folder_id, None) + except Exception: + raise exceptions.InvalidFileFormatError("Invalid folder specified") + # Proceed with (mostly) regular upload processing if we're still errorless + if payload.upload_option == "upload_file": + tool_params = upload_common.persist_uploads(tool_params, trans) + uploaded_datasets = upload_common.get_uploaded_datasets( + trans, cntrller, tool_params, dataset_upload_inputs, library_bunch=library_bunch + ) + elif payload.upload_option == "upload_directory": + uploaded_datasets = self._get_server_dir_uploaded_datasets( + trans, payload, full_dir, import_dir_desc, library_bunch + ) + elif payload.upload_option == "upload_paths": + uploaded_datasets, _, _ = self._get_path_paste_uploaded_datasets( + trans, payload.model_dump(), library_bunch, 200, None + ) + if payload.upload_option == "upload_file" and not uploaded_datasets: + raise exceptions.RequestParameterInvalidException("Select a file, enter a URL or enter text") + json_file_path = upload_common.create_paramfile(trans, uploaded_datasets) + data_list = [ud.data for ud in uploaded_datasets] + job_params = {} + job_params["link_data_only"] = json.dumps(payload.link_data_only) + job_params["uuid"] = json.dumps(payload.uuid) + job, output = upload_common.create_job( + trans, tool_params, tool, json_file_path, data_list, folder=library_bunch.folder, job_params=job_params + ) + trans.app.job_manager.enqueue(job, tool=tool) + if not output: + raise exceptions.RequestParameterInvalidException("Upload failed") + return output + + def _get_server_dir_uploaded_datasets(self, trans, payload, full_dir, import_dir_desc, library_bunch): + files = self._get_server_dir_files(payload, full_dir, import_dir_desc) + uploaded_datasets = [] + for file in files: + name = os.path.basename(file) + uploaded_datasets.append( + self._make_library_uploaded_dataset( + trans, payload.model_dump(), name, file, "server_dir", library_bunch + ) + ) + return uploaded_datasets + + def _get_server_dir_files(self, payload, full_dir, import_dir_desc): + files = [] + try: + for entry in os.listdir(full_dir): + # Only import regular files + path = os.path.join(full_dir, entry) + if os.path.islink(full_dir) and payload.link_data_only == "link_to_files": + # If we're linking instead of copying and the + # sub-"directory" in the import dir is actually a symlink, + # dereference the symlink, but not any of its contents. + link_path = os.readlink(full_dir) + if os.path.isabs(link_path): + path = os.path.join(link_path, entry) + else: + path = os.path.abspath(os.path.join(link_path, entry)) + elif os.path.islink(path) and os.path.isfile(path) and payload.link_data_only == "link_to_files": + # If we're linking instead of copying and the "file" in the + # sub-directory of the import dir is actually a symlink, + # dereference the symlink (one dereference only, Vasili). + link_path = os.readlink(path) + if os.path.isabs(link_path): + path = link_path + else: + path = os.path.abspath(os.path.join(os.path.dirname(path), link_path)) + if os.path.isfile(path): + files.append(path) + except Exception as e: + raise exceptions.InternalServerError( + f"Unable to get file list for configured {import_dir_desc}, error: {util.unicodify(e)}" + ) + if not files: + raise exceptions.ObjectAttributeMissingException(f"The directory '{full_dir}' contains no valid files") + return files + def _get_path_paste_uploaded_datasets(self, trans, params, library_bunch, response_code, message): preserve_dirs = util.string_as_bool(params.get("preserve_dirs", False)) uploaded_datasets = [] @@ -176,3 +293,108 @@ def _make_library_uploaded_dataset(self, trans, params, name, path, type, librar with transaction(trans.sa_session): trans.sa_session.commit() return uploaded_dataset + + def _upload_library_dataset(self, trans, payload, library_id): + is_admin = trans.user_is_admin + current_user_roles = trans.get_current_user_roles() + folder = trans.sa_session.get(LibraryFolder, payload.folder_id) + if not folder: + raise exceptions.RequestParameterInvalidException("Invalid folder id specified.") + self._check_access(trans, is_admin, folder, current_user_roles) + self._check_add(trans, is_admin, folder, current_user_roles) + if payload.roles: + # Check to see if the user selected roles to associate with the DATASET_ACCESS permission + # on the dataset that would cause accessibility issues. + vars = dict(DATASET_ACCESS_in=payload.roles) + permissions, in_roles, error, message = trans.app.security_agent.derive_roles_from_access( + trans, folder.parent_library.id, "api", library=True, **vars + ) + if error: + raise exceptions.RequestParameterInvalidException(message) + created_outputs_dict = self._upload_dataset(trans, folder_id=folder.id, payload=payload) + return self._create_response(trans, payload, created_outputs_dict, library_id) + + def _create_folder(self, trans, payload, library_id): + is_admin = trans.user_is_admin + current_user_roles = trans.get_current_user_roles() + parent_folder = trans.sa_session.get(LibraryFolder, payload.folder_id) + if not parent_folder: + raise exceptions.RequestParameterInvalidException("Invalid folder id specified.") + # Check the library which actually contains the user-supplied parent folder, not the user-supplied + # library, which could be anything. + self._check_access(trans, is_admin, parent_folder, current_user_roles) + self._check_add(trans, is_admin, parent_folder, current_user_roles) + new_folder = LibraryFolder(name=payload.name, description=payload.description) + # We are associating the last used genome build with folders, so we will always + # initialize a new folder with the first dbkey in genome builds list which is currently + # ? unspecified (?) + new_folder.genome_build = trans.app.genome_builds.default_value + parent_folder.add_folder(new_folder) + trans.sa_session.add(new_folder) + with transaction(trans.sa_session): + trans.sa_session.commit() + # New folders default to having the same permissions as their parent folder + trans.app.security_agent.copy_library_permissions(trans, parent_folder, new_folder) + new_folder_dict = dict(created=new_folder) + return self._create_response(trans, payload, new_folder_dict, library_id) + + def _create_response(self, trans, payload, output, library_id): + rval = [] + for v in output.values(): + if payload.extended_metadata is not None: + # If there is extended metadata, store it, attach it to the dataset, and index it + extended_metadata = UsesExtendedMetadataMixin + extended_metadata.create_extended_metadata(trans, payload.extended_metadata) + if isinstance(v, trans.app.model.LibraryDatasetDatasetAssociation): + v = v.library_dataset + url = self._url_for(trans, library_id, v.id, payload.create_type) + rval.append(dict(id=v.id, name=v.name, url=url)) + return rval + + def _create_collection(self, trans, payload, parent): + # Not delegating to library_common, so need to check access to parent folder here. + self.check_user_can_add_to_library_item(trans, parent, check_accessible=True) + create_params = api_payload_to_create_params(payload.model_dump()) + # collection_manager.create needs trans as one of the params + create_params["trans"] = trans + create_params["parent"] = parent + dataset_collection_instance = self.collection_manager.create(**create_params) + dataset_collection = dictify_dataset_collection_instance( + dataset_collection_instance, security=trans.security, url_builder=trans.url_builder, parent=parent + ) + return [dataset_collection] + + def _check_access(self, trans, is_admin, item, current_user_roles): + if isinstance(item, trans.model.HistoryDatasetAssociation): + # Make sure the user has the DATASET_ACCESS permission on the history_dataset_association. + if not item: + message = f"Invalid history dataset ({escape(str(item))}) specified." + raise exceptions.ObjectNotFound(message) + elif ( + not trans.app.security_agent.can_access_dataset(current_user_roles, item.dataset) + and item.user == trans.user + ): + message = f"You do not have permission to access the history dataset with id ({str(item.id)})." + raise exceptions.ItemAccessibilityException(message) + else: + # Make sure the user has the LIBRARY_ACCESS permission on the library item. + if not item: + message = f"Invalid library item ({escape(str(item))}) specified." + raise exceptions.ObjectNotFound(message) + elif not ( + is_admin or trans.app.security_agent.can_access_library_item(current_user_roles, item, trans.user) + ): + if isinstance(item, trans.model.Library): + item_type = "data library" + elif isinstance(item, LibraryFolder): + item_type = "folder" + else: + item_type = "(unknown item type)" + message = f"You do not have permission to access the {escape(item_type)} with id ({str(item.id)})." + raise exceptions.ItemAccessibilityException(message) + + def _check_add(self, trans, is_admin, item, current_user_roles): + # Deny access if the user is not an admin and does not have the LIBRARY_ADD permission. + if not (is_admin or trans.app.security_agent.can_add_library_item(current_user_roles, item)): + message = f"You are not authorized to add an item to ({escape(item.name)})." + raise exceptions.ItemAccessibilityException(message) diff --git a/lib/galaxy/schema/library_contents.py b/lib/galaxy/schema/library_contents.py index e3d0f0e5545e..da66a61e6cb2 100644 --- a/lib/galaxy/schema/library_contents.py +++ b/lib/galaxy/schema/library_contents.py @@ -229,16 +229,27 @@ class LibraryContentsShowDatasetResponse(LibraryContentsShowResponse): model_config = ConfigDict(extra="allow") -class LibraryContentsCreateFolderResponse(Model): - id: EncodedLibraryFolderDatabaseIdField +class LibraryContentsCreateResponse(Model): name: str url: str +class LibraryContentsCreateFolderResponse(LibraryContentsCreateResponse): + id: EncodedLibraryFolderDatabaseIdField + + +class LibraryContentsCreateFileResponse(LibraryContentsCreateResponse): + id: EncodedDatabaseIdField + + class LibraryContentsCreateFolderListResponse(RootModel): root: List[LibraryContentsCreateFolderResponse] +class LibraryContentsCreateFileListResponse(RootModel): + root: List[LibraryContentsCreateFileResponse] + + class LibraryContentsCreateDatasetResponse(Model): # id, library_dataset_id, parent_library_id should change to EncodedDatabaseIdField latter # because they are encoded ids in _copy_hda_to_library_folder and _copy_hdca_to_library_folder diff --git a/lib/galaxy/webapps/galaxy/api/library_contents.py b/lib/galaxy/webapps/galaxy/api/library_contents.py index 9dfa8436775a..e719520a8508 100644 --- a/lib/galaxy/webapps/galaxy/api/library_contents.py +++ b/lib/galaxy/webapps/galaxy/api/library_contents.py @@ -19,6 +19,7 @@ LibraryContentsCollectionCreatePayload, LibraryContentsCreateDatasetListResponse, LibraryContentsCreateDatasetResponse, + LibraryContentsCreateFileListResponse, LibraryContentsCreateFolderListResponse, LibraryContentsDeletePayload, LibraryContentsDeleteResponse, @@ -95,6 +96,7 @@ def create( trans: ProvidesHistoryContext = DependsOnTrans, ) -> Union[ LibraryContentsCreateFolderListResponse, + LibraryContentsCreateFileListResponse, LibraryContentsCreateDatasetListResponse, LibraryContentsCreateDatasetResponse, ]: diff --git a/lib/galaxy/webapps/galaxy/services/library_contents.py b/lib/galaxy/webapps/galaxy/services/library_contents.py index 680126aa0488..48246e1b7be4 100644 --- a/lib/galaxy/webapps/galaxy/services/library_contents.py +++ b/lib/galaxy/webapps/galaxy/services/library_contents.py @@ -1,9 +1,5 @@ -import json import logging -import os from typing import ( - cast, - Dict, List, Optional, Tuple, @@ -12,23 +8,14 @@ ) from fastapi import Path -from markupsafe import escape from typing_extensions import Annotated from galaxy import ( exceptions, util, ) -from galaxy.actions.library import ( - LibraryActions, - validate_path_upload, - validate_server_directory_upload, -) +from galaxy.actions.library import LibraryActions from galaxy.managers.collections import DatasetCollectionManager -from galaxy.managers.collections_util import ( - api_payload_to_create_params, - dictify_dataset_collection_instance, -) from galaxy.managers.context import ( ProvidesHistoryContext, ProvidesUserContext, @@ -36,22 +23,16 @@ from galaxy.managers.hdas import HDAManager from galaxy.model import ( Library, - LibraryFolder, - Role, tags, ) from galaxy.model.base import transaction -from galaxy.schema.fields import ( - DecodedDatabaseIdField, - LibraryFolderDatabaseIdField, -) +from galaxy.schema.fields import DecodedDatabaseIdField from galaxy.schema.library_contents import ( LibraryContentsCollectionCreatePayload, LibraryContentsCreateDatasetListResponse, LibraryContentsCreateDatasetResponse, + LibraryContentsCreateFileListResponse, LibraryContentsCreateFolderListResponse, - LibraryContentsCreateFolderResponse, - LibraryContentsCreatePayload, LibraryContentsDeletePayload, LibraryContentsDeleteResponse, LibraryContentsFileCreatePayload, @@ -64,12 +45,7 @@ LibraryContentsUpdatePayload, ) from galaxy.security.idencoding import IdEncodingHelper -from galaxy.tools.actions import upload_common -from galaxy.tools.parameters import populate_state -from galaxy.webapps.base.controller import ( - UsesExtendedMetadataMixin, - UsesLibraryMixinItems, -) +from galaxy.webapps.base.controller import UsesLibraryMixinItems from galaxy.webapps.galaxy.services.base import ServiceBase log = logging.getLogger(__name__) @@ -85,7 +61,7 @@ ] -class LibraryContentsService(ServiceBase, LibraryActions, UsesLibraryMixinItems, UsesExtendedMetadataMixin): +class LibraryContentsService(ServiceBase, LibraryActions, UsesLibraryMixinItems): """ Interface/service shared by controllers for interacting with the contents of a library contents. """ @@ -158,12 +134,13 @@ def show( def create( self, trans: ProvidesHistoryContext, - library_id: LibraryFolderDatabaseIdField, + library_id: DecodedDatabaseIdField, payload: Union[ LibraryContentsFolderCreatePayload, LibraryContentsFileCreatePayload, LibraryContentsCollectionCreatePayload ], ) -> Union[ LibraryContentsCreateFolderListResponse, + LibraryContentsCreateFileListResponse, LibraryContentsCreateDatasetListResponse, LibraryContentsCreateDatasetResponse, ]: @@ -190,11 +167,14 @@ def create( # Now create the desired content object, either file or folder. if payload.create_type == "file": - return self._upload_library_dataset(trans, cast(LibraryContentsFileCreatePayload, payload), library_id) + rval = self._upload_library_dataset(trans, payload, library_id) + return LibraryContentsCreateFileListResponse(root=rval) elif payload.create_type == "folder": - return self._create_folder(trans, cast(LibraryContentsFolderCreatePayload, payload), library_id) + rval = self._create_folder(trans, payload, library_id) + return LibraryContentsCreateFolderListResponse(root=rval) elif payload.create_type == "collection": - return self._create_collection(trans, cast(LibraryContentsCollectionCreatePayload, payload), parent) + rval = self._create_collection(trans, payload, parent) + return LibraryContentsCreateDatasetListResponse(root=rval) else: raise exceptions.RequestParameterInvalidException("Invalid create_type specified.") @@ -317,259 +297,3 @@ def _traverse(self, trans: ProvidesUserContext, folder, current_user_roles): ld.api_type = "file" rval.append(ld) return rval - - def _upload_library_dataset( - self, - trans: ProvidesHistoryContext, - payload: LibraryContentsFileCreatePayload, - library_id: DecodedDatabaseIdField, - ) -> LibraryContentsCreateFolderListResponse: - is_admin = trans.user_is_admin - current_user_roles = trans.get_current_user_roles() - folder = trans.sa_session.get(LibraryFolder, payload.folder_id) - if not folder: - raise exceptions.RequestParameterInvalidException("Invalid folder id specified.") - self._check_access(trans, is_admin, folder, current_user_roles) - self._check_add(trans, is_admin, folder, current_user_roles) - if payload.roles: - # Check to see if the user selected roles to associate with the DATASET_ACCESS permission - # on the dataset that would cause accessibility issues. - vars = dict(DATASET_ACCESS_in=payload.roles) - permissions, in_roles, error, message = trans.app.security_agent.derive_roles_from_access( - trans, folder.parent_library.id, "api", library=True, **vars - ) - if error: - raise exceptions.RequestParameterInvalidException(message) - created_outputs_dict = self._upload_dataset(trans, payload=payload, folder_id=folder.id) - return self._convert_output_to_rval(trans, payload, created_outputs_dict, library_id) - - def _upload_dataset( - self, - trans: ProvidesHistoryContext, - payload: LibraryContentsFileCreatePayload, - folder_id: LibraryFolderDatabaseIdField, - ) -> Dict[str, List]: - # Set up the traditional tool state/params - cntrller = "api" - tool_id = "upload1" - upload_common.validate_datatype_extension( - datatypes_registry=trans.app.datatypes_registry, ext=payload.file_type - ) - tool = trans.app.toolbox.get_tool(tool_id) - state = tool.new_state(trans) - populate_state(trans, tool.inputs, payload.model_dump(), state.inputs) - tool_params = state.inputs - dataset_upload_inputs = [] - for input in tool.inputs.values(): - if input.type == "upload_dataset": - dataset_upload_inputs.append(input) - # Library-specific params - if payload.upload_option == "upload_directory": - full_dir, import_dir_desc = validate_server_directory_upload(trans, payload.server_dir) - elif payload.upload_option == "upload_paths": - # Library API already checked this - following check isn't actually needed. - validate_path_upload(trans) - # Some error handling should be added to this method. - try: - # FIXME: instead of passing params here ( which have been processed by util.Params(), the original payload - # should be passed so that complex objects that may have been included in the initial request remain. - library_bunch = upload_common.handle_library_params(trans, payload.model_dump(), folder_id, None) - except Exception: - raise exceptions.InvalidFileFormatError("Invalid folder specified") - # Proceed with (mostly) regular upload processing if we're still errorless - if payload.upload_option == "upload_file": - tool_params = upload_common.persist_uploads(tool_params, trans) - uploaded_datasets = upload_common.get_uploaded_datasets( - trans, cntrller, tool_params, dataset_upload_inputs, library_bunch=library_bunch - ) - elif payload.upload_option == "upload_directory": - uploaded_datasets = self._get_server_dir_uploaded_datasets( - trans, payload, full_dir, import_dir_desc, library_bunch - ) - elif payload.upload_option == "upload_paths": - uploaded_datasets, _, _ = self._get_path_paste_uploaded_datasets( - trans, payload.model_dump(), library_bunch, 200, None - ) - if payload.upload_option == "upload_file" and not uploaded_datasets: - raise exceptions.RequestParameterInvalidException("Select a file, enter a URL or enter text") - json_file_path = upload_common.create_paramfile(trans, uploaded_datasets) - data_list = [ud.data for ud in uploaded_datasets] - job_params = {} - job_params["link_data_only"] = json.dumps(payload.link_data_only) - job_params["uuid"] = json.dumps(payload.uuid) - job, output = upload_common.create_job( - trans, tool_params, tool, json_file_path, data_list, folder=library_bunch.folder, job_params=job_params - ) - trans.app.job_manager.enqueue(job, tool=tool) - if not output: - raise exceptions.RequestParameterInvalidException("Upload failed") - return output - - def _get_server_dir_uploaded_datasets( - self, - trans: ProvidesHistoryContext, - payload: LibraryContentsFileCreatePayload, - full_dir: str, - import_dir_desc: str, - library_bunch: upload_common.LibraryParams, - ) -> List: - files = self._get_server_dir_files(payload, full_dir, import_dir_desc) - uploaded_datasets = [] - for file in files: - name = os.path.basename(file) - uploaded_datasets.append( - self._make_library_uploaded_dataset( - trans, payload.model_dump(), name, file, "server_dir", library_bunch - ) - ) - return uploaded_datasets - - def _get_server_dir_files( - self, - payload: LibraryContentsFileCreatePayload, - full_dir: str, - import_dir_desc: str, - ) -> List: - files = [] - try: - for entry in os.listdir(full_dir): - # Only import regular files - path = os.path.join(full_dir, entry) - if os.path.islink(full_dir) and payload.link_data_only == "link_to_files": - # If we're linking instead of copying and the - # sub-"directory" in the import dir is actually a symlink, - # dereference the symlink, but not any of its contents. - link_path = os.readlink(full_dir) - if os.path.isabs(link_path): - path = os.path.join(link_path, entry) - else: - path = os.path.abspath(os.path.join(link_path, entry)) - elif os.path.islink(path) and os.path.isfile(path) and payload.link_data_only == "link_to_files": - # If we're linking instead of copying and the "file" in the - # sub-directory of the import dir is actually a symlink, - # dereference the symlink (one dereference only, Vasili). - link_path = os.readlink(path) - if os.path.isabs(link_path): - path = link_path - else: - path = os.path.abspath(os.path.join(os.path.dirname(path), link_path)) - if os.path.isfile(path): - files.append(path) - except Exception as e: - raise exceptions.InternalServerError( - f"Unable to get file list for configured {import_dir_desc}, error: {util.unicodify(e)}" - ) - if not files: - raise exceptions.ObjectAttributeMissingException(f"The directory '{full_dir}' contains no valid files") - return files - - def _create_folder( - self, - trans: ProvidesUserContext, - payload: LibraryContentsFolderCreatePayload, - library_id: DecodedDatabaseIdField, - ) -> LibraryContentsCreateFolderListResponse: - is_admin = trans.user_is_admin - current_user_roles = trans.get_current_user_roles() - parent_folder = trans.sa_session.get(LibraryFolder, payload.folder_id) - if not parent_folder: - raise exceptions.RequestParameterInvalidException("Invalid folder id specified.") - # Check the library which actually contains the user-supplied parent folder, not the user-supplied - # library, which could be anything. - self._check_access(trans, is_admin, parent_folder, current_user_roles) - self._check_add(trans, is_admin, parent_folder, current_user_roles) - new_folder = LibraryFolder(name=payload.name, description=payload.description) - # We are associating the last used genome build with folders, so we will always - # initialize a new folder with the first dbkey in genome builds list which is currently - # ? unspecified (?) - new_folder.genome_build = trans.app.genome_builds.default_value - parent_folder.add_folder(new_folder) - trans.sa_session.add(new_folder) - with transaction(trans.sa_session): - trans.sa_session.commit() - # New folders default to having the same permissions as their parent folder - trans.app.security_agent.copy_library_permissions(trans, parent_folder, new_folder) - new_folder_dict = dict(created=new_folder) - return self._convert_output_to_rval(trans, payload, new_folder_dict, library_id) - - def _convert_output_to_rval( - self, - trans: ProvidesUserContext, - payload: LibraryContentsCreatePayload, - output: Dict, - library_id: DecodedDatabaseIdField, - ) -> LibraryContentsCreateFolderListResponse: - rval = [] - for v in output.values(): - if payload.extended_metadata is not None: - # If there is extended metadata, store it, attach it to the dataset, and index it - self.create_extended_metadata(trans, payload.extended_metadata) - if isinstance(v, trans.app.model.LibraryDatasetDatasetAssociation): - v = v.library_dataset - url = self._url_for(trans, library_id, v.id, payload.create_type) - rval.append(LibraryContentsCreateFolderResponse(id=v.id, name=v.name, url=url)) - return LibraryContentsCreateFolderListResponse(root=rval) - - def _create_collection( - self, - trans: ProvidesUserContext, - payload: LibraryContentsCollectionCreatePayload, - parent: LibraryFolder, - ) -> LibraryContentsCreateDatasetListResponse: - # Not delegating to library_common, so need to check access to parent folder here. - self.check_user_can_add_to_library_item(trans, parent, check_accessible=True) - create_params = api_payload_to_create_params(payload.model_dump()) - create_params["trans"] = trans - create_params["parent"] = parent - dataset_collection_instance = self.collection_manager.create(**create_params) - dataset_collection = dictify_dataset_collection_instance( - dataset_collection_instance, security=trans.security, url_builder=trans.url_builder, parent=parent - ) - return LibraryContentsCreateDatasetListResponse(root=[dataset_collection]) - - def _check_access( - self, - trans: ProvidesUserContext, - is_admin: bool, - item, - current_user_roles: List[Role], - ) -> None: - if isinstance(item, trans.model.HistoryDatasetAssociation): - # Make sure the user has the DATASET_ACCESS permission on the history_dataset_association. - if not item: - message = f"Invalid history dataset ({escape(str(item))}) specified." - raise exceptions.ObjectNotFound(message) - elif ( - not trans.app.security_agent.can_access_dataset(current_user_roles, item.dataset) - and item.user == trans.user - ): - message = f"You do not have permission to access the history dataset with id ({str(item.id)})." - raise exceptions.ItemAccessibilityException(message) - else: - # Make sure the user has the LIBRARY_ACCESS permission on the library item. - if not item: - message = f"Invalid library item ({escape(str(item))}) specified." - raise exceptions.ObjectNotFound(message) - elif not ( - is_admin or trans.app.security_agent.can_access_library_item(current_user_roles, item, trans.user) - ): - if isinstance(item, trans.model.Library): - item_type = "data library" - elif isinstance(item, LibraryFolder): - item_type = "folder" - else: - item_type = "(unknown item type)" - message = f"You do not have permission to access the {escape(item_type)} with id ({str(item.id)})." - raise exceptions.ItemAccessibilityException(message) - - def _check_add( - self, - trans: ProvidesUserContext, - is_admin: bool, - item: LibraryFolder, - current_user_roles: List[Role], - ) -> None: - # Deny access if the user is not an admin and does not have the LIBRARY_ADD permission. - if not (is_admin or trans.app.security_agent.can_add_library_item(current_user_roles, item)): - message = f"You are not authorized to add an item to ({escape(item.name)})." - raise exceptions.ItemAccessibilityException(message) From 7a574436137a298d64597e791147756e63a9abe3 Mon Sep 17 00:00:00 2001 From: Arash Date: Thu, 26 Sep 2024 11:29:06 +0200 Subject: [PATCH 12/22] Refactor library content routes and controllers --- client/src/api/schema/schema.ts | 36 +++++++++++------ lib/galaxy/actions/library.py | 6 ++- lib/galaxy/schema/library_contents.py | 19 +++++---- .../webapps/galaxy/api/library_contents.py | 4 +- .../galaxy/services/library_contents.py | 40 +++++++------------ 5 files changed, 55 insertions(+), 50 deletions(-) diff --git a/client/src/api/schema/schema.ts b/client/src/api/schema/schema.ts index 2914e057011c..1ba29e901647 100644 --- a/client/src/api/schema/schema.ts +++ b/client/src/api/schema/schema.ts @@ -12920,8 +12920,8 @@ export interface components { */ upload_option: components["schemas"]["UploadOption"] | null; }; - /** LibraryContentsCreateDatasetListResponse */ - LibraryContentsCreateDatasetListResponse: components["schemas"]["LibraryContentsCreateDatasetResponse"][]; + /** LibraryContentsCreateDatasetCollectionResponse */ + LibraryContentsCreateDatasetCollectionResponse: components["schemas"]["LibraryContentsCreateDatasetResponse"][]; /** LibraryContentsCreateDatasetResponse */ LibraryContentsCreateDatasetResponse: { /** Created From Basename */ @@ -12948,8 +12948,13 @@ export interface components { misc_blurb: string | null; /** Misc Info */ misc_info: string | null; - /** Model Class */ - model_class: string; + /** + * Model class + * @description The name of the database model class. + * @constant + * @enum {string} + */ + model_class: "LibraryDatasetDatasetAssociation"; /** Name */ name: string; /** Parent Library Id */ @@ -13192,7 +13197,13 @@ export interface components { misc_blurb: string | null; /** Misc Info */ misc_info: string | null; - model_class: components["schemas"]["ModelClass"]; + /** + * Model class + * @description The name of the database model class. + * @constant + * @enum {string} + */ + model_class: "LibraryDataset"; /** Name */ name: string; /** @@ -13231,7 +13242,13 @@ export interface components { item_count: number; /** Library Path */ library_path: string[]; - model_class: components["schemas"]["ModelClass"]; + /** + * Model class + * @description The name of the database model class. + * @constant + * @enum {string} + */ + model_class: "LibraryFolder"; /** Name */ name: string; /** Parent Id */ @@ -13807,11 +13824,6 @@ export interface components { */ time: string; }; - /** - * ModelClass - * @enum {string} - */ - ModelClass: "LibraryDataset" | "LibraryFolder"; /** * ModelStoreFormat * @description Available types of model stores for export. @@ -27943,7 +27955,7 @@ export interface operations { "application/json": | components["schemas"]["LibraryContentsCreateFolderListResponse"] | components["schemas"]["LibraryContentsCreateFileListResponse"] - | components["schemas"]["LibraryContentsCreateDatasetListResponse"] + | components["schemas"]["LibraryContentsCreateDatasetCollectionResponse"] | components["schemas"]["LibraryContentsCreateDatasetResponse"]; }; }; diff --git a/lib/galaxy/actions/library.py b/lib/galaxy/actions/library.py index 570f69bb4754..bad496686aba 100644 --- a/lib/galaxy/actions/library.py +++ b/lib/galaxy/actions/library.py @@ -135,9 +135,11 @@ def _upload_dataset(self, trans, folder_id: int, payload): trans, payload, full_dir, import_dir_desc, library_bunch ) elif payload.upload_option == "upload_paths": - uploaded_datasets, _, _ = self._get_path_paste_uploaded_datasets( + uploaded_datasets, response_code, message = self._get_path_paste_uploaded_datasets( trans, payload.model_dump(), library_bunch, 200, None ) + if response_code != 200: + raise exceptions.RequestParameterInvalidException(message) if payload.upload_option == "upload_file" and not uploaded_datasets: raise exceptions.RequestParameterInvalidException("Select a file, enter a URL or enter text") json_file_path = upload_common.create_paramfile(trans, uploaded_datasets) @@ -311,7 +313,7 @@ def _upload_library_dataset(self, trans, payload, library_id): ) if error: raise exceptions.RequestParameterInvalidException(message) - created_outputs_dict = self._upload_dataset(trans, folder_id=folder.id, payload=payload) + created_outputs_dict = self._upload_dataset(trans, folder.id, payload) return self._create_response(trans, payload, created_outputs_dict, library_id) def _create_folder(self, trans, payload, library_id): diff --git a/lib/galaxy/schema/library_contents.py b/lib/galaxy/schema/library_contents.py index da66a61e6cb2..1af206f857a3 100644 --- a/lib/galaxy/schema/library_contents.py +++ b/lib/galaxy/schema/library_contents.py @@ -12,12 +12,17 @@ Field, RootModel, ) +from typing_extensions import ( + Annotated, + Literal, +) from galaxy.schema.fields import ( DecodedDatabaseIdField, EncodedDatabaseIdField, EncodedLibraryFolderDatabaseIdField, LibraryFolderDatabaseIdField, + ModelClassField, ) from galaxy.schema.schema import ( Model, @@ -42,11 +47,6 @@ class LinkDataOnly(str, Enum): link_to_files = "link_to_files" -class ModelClass(str, Enum): - LibraryDataset = "LibraryDataset" - LibraryFolder = "LibraryFolder" - - class LibraryContentsCreatePayload(Model): create_type: CreateType = Field( ..., @@ -190,7 +190,6 @@ class LibraryContentsIndexListResponse(RootModel): class LibraryContentsShowResponse(Model): - model_class: ModelClass name: str genome_build: Optional[str] update_time: str @@ -198,6 +197,7 @@ class LibraryContentsShowResponse(Model): class LibraryContentsShowFolderResponse(LibraryContentsShowResponse): + model_class: Annotated[Literal["LibraryFolder"], ModelClassField(Literal["LibraryFolder"])] id: EncodedLibraryFolderDatabaseIdField parent_id: Optional[EncodedLibraryFolderDatabaseIdField] description: str @@ -207,6 +207,7 @@ class LibraryContentsShowFolderResponse(LibraryContentsShowResponse): class LibraryContentsShowDatasetResponse(LibraryContentsShowResponse): + model_class: Annotated[Literal["LibraryDataset"], ModelClassField(Literal["LibraryDataset"])] id: EncodedDatabaseIdField ldda_id: EncodedDatabaseIdField folder_id: EncodedLibraryFolderDatabaseIdField @@ -256,7 +257,9 @@ class LibraryContentsCreateDatasetResponse(Model): # functions that are shared by LibraryFolderContentsService too id: str hda_ldda: str - model_class: str + model_class: Annotated[ + Literal["LibraryDatasetDatasetAssociation"], ModelClassField(Literal["LibraryDatasetDatasetAssociation"]) + ] name: str deleted: bool visible: bool @@ -278,7 +281,7 @@ class LibraryContentsCreateDatasetResponse(Model): model_config = ConfigDict(extra="allow") -class LibraryContentsCreateDatasetListResponse(RootModel): +class LibraryContentsCreateDatasetCollectionResponse(RootModel): root: List[LibraryContentsCreateDatasetResponse] diff --git a/lib/galaxy/webapps/galaxy/api/library_contents.py b/lib/galaxy/webapps/galaxy/api/library_contents.py index e719520a8508..451cb42bc6f5 100644 --- a/lib/galaxy/webapps/galaxy/api/library_contents.py +++ b/lib/galaxy/webapps/galaxy/api/library_contents.py @@ -17,7 +17,7 @@ from galaxy.schema.fields import DecodedDatabaseIdField from galaxy.schema.library_contents import ( LibraryContentsCollectionCreatePayload, - LibraryContentsCreateDatasetListResponse, + LibraryContentsCreateDatasetCollectionResponse, LibraryContentsCreateDatasetResponse, LibraryContentsCreateFileListResponse, LibraryContentsCreateFolderListResponse, @@ -97,7 +97,7 @@ def create( ) -> Union[ LibraryContentsCreateFolderListResponse, LibraryContentsCreateFileListResponse, - LibraryContentsCreateDatasetListResponse, + LibraryContentsCreateDatasetCollectionResponse, LibraryContentsCreateDatasetResponse, ]: """ diff --git a/lib/galaxy/webapps/galaxy/services/library_contents.py b/lib/galaxy/webapps/galaxy/services/library_contents.py index 48246e1b7be4..bf8f79ca4181 100644 --- a/lib/galaxy/webapps/galaxy/services/library_contents.py +++ b/lib/galaxy/webapps/galaxy/services/library_contents.py @@ -29,7 +29,7 @@ from galaxy.schema.fields import DecodedDatabaseIdField from galaxy.schema.library_contents import ( LibraryContentsCollectionCreatePayload, - LibraryContentsCreateDatasetListResponse, + LibraryContentsCreateDatasetCollectionResponse, LibraryContentsCreateDatasetResponse, LibraryContentsCreateFileListResponse, LibraryContentsCreateFolderListResponse, @@ -96,14 +96,13 @@ def index( # appending all other items in the library recursively for content in self._traverse(trans, library.root_folder, current_user_roles): url = self._url_for(trans, library_id, content.id, content.api_type) - response_class: Union[ - Type[LibraryContentsIndexFolderResponse], Type[LibraryContentsIndexDatasetResponse] - ] = ( - LibraryContentsIndexFolderResponse - if content.api_type == "folder" - else LibraryContentsIndexDatasetResponse - ) - rval.append(response_class(id=content.id, type=content.api_type, name=content.api_path, url=url)) + response_model: Union[LibraryContentsIndexFolderResponse, LibraryContentsIndexDatasetResponse] + common_args = dict(id=content.id, type=content.api_type, name=content.api_path, url=url) + if content.api_type == "folder": + response_model = LibraryContentsIndexFolderResponse(**common_args) + else: + response_model = LibraryContentsIndexDatasetResponse(**common_args) + rval.append(response_model) return LibraryContentsIndexListResponse(root=rval) def show( @@ -116,20 +115,15 @@ def show( ]: """Returns information about library file or folder.""" class_name, content_id = self._decode_library_content_id(id) - rval: Union[ - LibraryContentsShowFolderResponse, - LibraryContentsShowDatasetResponse, - ] if class_name == "LibraryFolder": content = self.get_library_folder(trans, content_id, check_ownership=False, check_accessible=True) - rval = LibraryContentsShowFolderResponse(**content.to_dict(view="element")) + return LibraryContentsShowFolderResponse(**content.to_dict(view="element")) else: content = self.get_library_dataset(trans, content_id, check_ownership=False, check_accessible=True) rval_dict = content.to_dict(view="element") tag_manager = tags.GalaxyTagHandler(trans.sa_session) rval_dict["tags"] = tag_manager.get_tags_list(content.library_dataset_dataset_association.tags) - rval = LibraryContentsShowDatasetResponse(**rval_dict) - return rval + return LibraryContentsShowDatasetResponse(**rval_dict) def create( self, @@ -141,7 +135,7 @@ def create( ) -> Union[ LibraryContentsCreateFolderListResponse, LibraryContentsCreateFileListResponse, - LibraryContentsCreateDatasetListResponse, + LibraryContentsCreateDatasetCollectionResponse, LibraryContentsCreateDatasetResponse, ]: """Create a new library file or folder.""" @@ -163,7 +157,7 @@ def create( rval = self._copy_hdca_to_library_folder( trans, self.hda_manager, payload.from_hdca_id, payload.folder_id, payload.ldda_message ) - return LibraryContentsCreateDatasetListResponse(root=rval) + return LibraryContentsCreateDatasetCollectionResponse(root=rval) # Now create the desired content object, either file or folder. if payload.create_type == "file": @@ -174,7 +168,7 @@ def create( return LibraryContentsCreateFolderListResponse(root=rval) elif payload.create_type == "collection": rval = self._create_collection(trans, payload, parent) - return LibraryContentsCreateDatasetListResponse(root=rval) + return LibraryContentsCreateDatasetCollectionResponse(root=rval) else: raise exceptions.RequestParameterInvalidException("Invalid create_type specified.") @@ -262,13 +256,7 @@ def _decode_library_content_id( f"Malformed library content id ( {str(content_id)} ) specified, unable to decode." ) - def _url_for( - self, - trans: ProvidesUserContext, - library_id: DecodedDatabaseIdField, - id: int, - type: str, - ) -> Optional[str]: + def _url_for(self, trans: ProvidesUserContext, library_id: DecodedDatabaseIdField, id, type): encoded_id = trans.security.encode_id(id) if type == "folder": encoded_id = f"F{encoded_id}" From 5591a8a95d5fd6387713683c549d81c17aada275 Mon Sep 17 00:00:00 2001 From: Arash Date: Thu, 26 Sep 2024 14:33:44 +0200 Subject: [PATCH 13/22] remove unused imports --- lib/galaxy/webapps/galaxy/services/library_contents.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/lib/galaxy/webapps/galaxy/services/library_contents.py b/lib/galaxy/webapps/galaxy/services/library_contents.py index bf8f79ca4181..4bc2ddc521da 100644 --- a/lib/galaxy/webapps/galaxy/services/library_contents.py +++ b/lib/galaxy/webapps/galaxy/services/library_contents.py @@ -1,9 +1,7 @@ import logging from typing import ( List, - Optional, Tuple, - Type, Union, ) From 73c6ddca5a1f929a6481c2d43c179cfbf3c94cf4 Mon Sep 17 00:00:00 2001 From: Arash Date: Thu, 26 Sep 2024 15:25:57 +0200 Subject: [PATCH 14/22] move _create_response function to services --- lib/galaxy/actions/library.py | 22 +++------------ .../galaxy/services/library_contents.py | 27 ++++++++++++++----- 2 files changed, 25 insertions(+), 24 deletions(-) diff --git a/lib/galaxy/actions/library.py b/lib/galaxy/actions/library.py index bad496686aba..a10098da9400 100644 --- a/lib/galaxy/actions/library.py +++ b/lib/galaxy/actions/library.py @@ -25,7 +25,6 @@ safe_relpath, unsafe_walk, ) -from galaxy.webapps.base.controller import UsesExtendedMetadataMixin log = logging.getLogger(__name__) @@ -296,7 +295,7 @@ def _make_library_uploaded_dataset(self, trans, params, name, path, type, librar trans.sa_session.commit() return uploaded_dataset - def _upload_library_dataset(self, trans, payload, library_id): + def _upload_library_dataset(self, trans, payload): is_admin = trans.user_is_admin current_user_roles = trans.get_current_user_roles() folder = trans.sa_session.get(LibraryFolder, payload.folder_id) @@ -314,9 +313,9 @@ def _upload_library_dataset(self, trans, payload, library_id): if error: raise exceptions.RequestParameterInvalidException(message) created_outputs_dict = self._upload_dataset(trans, folder.id, payload) - return self._create_response(trans, payload, created_outputs_dict, library_id) + return created_outputs_dict - def _create_folder(self, trans, payload, library_id): + def _create_folder(self, trans, payload): is_admin = trans.user_is_admin current_user_roles = trans.get_current_user_roles() parent_folder = trans.sa_session.get(LibraryFolder, payload.folder_id) @@ -338,20 +337,7 @@ def _create_folder(self, trans, payload, library_id): # New folders default to having the same permissions as their parent folder trans.app.security_agent.copy_library_permissions(trans, parent_folder, new_folder) new_folder_dict = dict(created=new_folder) - return self._create_response(trans, payload, new_folder_dict, library_id) - - def _create_response(self, trans, payload, output, library_id): - rval = [] - for v in output.values(): - if payload.extended_metadata is not None: - # If there is extended metadata, store it, attach it to the dataset, and index it - extended_metadata = UsesExtendedMetadataMixin - extended_metadata.create_extended_metadata(trans, payload.extended_metadata) - if isinstance(v, trans.app.model.LibraryDatasetDatasetAssociation): - v = v.library_dataset - url = self._url_for(trans, library_id, v.id, payload.create_type) - rval.append(dict(id=v.id, name=v.name, url=url)) - return rval + return new_folder_dict def _create_collection(self, trans, payload, parent): # Not delegating to library_common, so need to check access to parent folder here. diff --git a/lib/galaxy/webapps/galaxy/services/library_contents.py b/lib/galaxy/webapps/galaxy/services/library_contents.py index 4bc2ddc521da..01f421d15b0a 100644 --- a/lib/galaxy/webapps/galaxy/services/library_contents.py +++ b/lib/galaxy/webapps/galaxy/services/library_contents.py @@ -43,7 +43,10 @@ LibraryContentsUpdatePayload, ) from galaxy.security.idencoding import IdEncodingHelper -from galaxy.webapps.base.controller import UsesLibraryMixinItems +from galaxy.webapps.base.controller import ( + UsesExtendedMetadataMixin, + UsesLibraryMixinItems, +) from galaxy.webapps.galaxy.services.base import ServiceBase log = logging.getLogger(__name__) @@ -59,7 +62,7 @@ ] -class LibraryContentsService(ServiceBase, LibraryActions, UsesLibraryMixinItems): +class LibraryContentsService(ServiceBase, LibraryActions, UsesLibraryMixinItems, UsesExtendedMetadataMixin): """ Interface/service shared by controllers for interacting with the contents of a library contents. """ @@ -159,11 +162,11 @@ def create( # Now create the desired content object, either file or folder. if payload.create_type == "file": - rval = self._upload_library_dataset(trans, payload, library_id) - return LibraryContentsCreateFileListResponse(root=rval) + rval = self._upload_library_dataset(trans, payload) + return LibraryContentsCreateFileListResponse(root=self._create_response(trans, payload, rval, library_id)) elif payload.create_type == "folder": - rval = self._create_folder(trans, payload, library_id) - return LibraryContentsCreateFolderListResponse(root=rval) + rval = self._create_folder(trans, payload) + return LibraryContentsCreateFolderListResponse(root=self._create_response(trans, payload, rval, library_id)) elif payload.create_type == "collection": rval = self._create_collection(trans, payload, parent) return LibraryContentsCreateDatasetCollectionResponse(root=rval) @@ -283,3 +286,15 @@ def _traverse(self, trans: ProvidesUserContext, folder, current_user_roles): ld.api_type = "file" rval.append(ld) return rval + + def _create_response(self, trans, payload, output, library_id): + rval = [] + for v in output.values(): + if payload.extended_metadata is not None: + # If there is extended metadata, store it, attach it to the dataset, and index it + self.create_extended_metadata(trans, payload.extended_metadata) + if isinstance(v, trans.app.model.LibraryDatasetDatasetAssociation): + v = v.library_dataset + url = self._url_for(trans, library_id, v.id, payload.create_type) + rval.append(dict(id=v.id, name=v.name, url=url)) + return rval From f3e3f6d7f36c26733851aeb6b8103f6c3bd93e03 Mon Sep 17 00:00:00 2001 From: Arash Date: Thu, 26 Sep 2024 16:25:48 +0200 Subject: [PATCH 15/22] Fix url_builder --- lib/galaxy/webapps/galaxy/api/library_contents.py | 1 + lib/galaxy/webapps/galaxy/services/library_contents.py | 9 +++++++-- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/lib/galaxy/webapps/galaxy/api/library_contents.py b/lib/galaxy/webapps/galaxy/api/library_contents.py index 451cb42bc6f5..cace7bbb9ba8 100644 --- a/lib/galaxy/webapps/galaxy/api/library_contents.py +++ b/lib/galaxy/webapps/galaxy/api/library_contents.py @@ -65,6 +65,7 @@ def index( @router.get( "/api/libraries/{library_id}/contents/{id}", + name="library_content", summary="Return a library file or folder.", deprecated=True, ) diff --git a/lib/galaxy/webapps/galaxy/services/library_contents.py b/lib/galaxy/webapps/galaxy/services/library_contents.py index 01f421d15b0a..467e4aa260d3 100644 --- a/lib/galaxy/webapps/galaxy/services/library_contents.py +++ b/lib/galaxy/webapps/galaxy/services/library_contents.py @@ -257,11 +257,16 @@ def _decode_library_content_id( f"Malformed library content id ( {str(content_id)} ) specified, unable to decode." ) - def _url_for(self, trans: ProvidesUserContext, library_id: DecodedDatabaseIdField, id, type): + def _url_for(self, trans: ProvidesUserContext, library_id, id, type): + encoded_library_id = trans.security.encode_id(library_id) encoded_id = trans.security.encode_id(id) if type == "folder": encoded_id = f"F{encoded_id}" - return trans.url_builder("library_content", library_id=library_id, id=encoded_id) if trans.url_builder else None + return ( + trans.url_builder("library_content", library_id=encoded_library_id, id=encoded_id) + if trans.url_builder + else None + ) def _traverse(self, trans: ProvidesUserContext, folder, current_user_roles): admin = trans.user_is_admin From eb36c65bb3e0c83bf1b991e10763aa35b87d6b2e Mon Sep 17 00:00:00 2001 From: Arash Date: Thu, 26 Sep 2024 16:35:17 +0200 Subject: [PATCH 16/22] Add `json=true` to library_content api tests --- lib/galaxy_test/api/test_libraries.py | 6 +++--- lib/galaxy_test/base/populators.py | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/lib/galaxy_test/api/test_libraries.py b/lib/galaxy_test/api/test_libraries.py index 6cbfaa21e0fe..1fb51357301b 100644 --- a/lib/galaxy_test/api/test_libraries.py +++ b/lib/galaxy_test/api/test_libraries.py @@ -313,7 +313,7 @@ def test_legacy_upload_unknown_datatype(self): "upload_option": "upload_file", "files_0|url_paste": FILE_URL, } - create_response = self._post(f"libraries/{library['id']}/contents", payload) + create_response = self._post(f"libraries/{library['id']}/contents", payload, json=True) self._assert_status_code_is(create_response, 400) assert create_response.json() == "Requested extension 'xxx' unknown, cannot upload dataset." @@ -552,7 +552,7 @@ def test_create_datasets_in_library_from_collection(self): history_id, contents=["xxx", "yyy"], direct_upload=True, wait=True ).json()["outputs"][0]["id"] payload = {"from_hdca_id": hdca_id, "create_type": "file", "folder_id": folder_id} - create_response = self._post(f"libraries/{library['id']}/contents", payload) + create_response = self._post(f"libraries/{library['id']}/contents", payload, json=True) self._assert_status_code_is(create_response, 200) @requires_new_library @@ -588,7 +588,7 @@ def _create_folder(self, library): create_type="folder", name="New Folder", ) - return self._post(f"libraries/{library['id']}/contents", data=create_data) + return self._post(f"libraries/{library['id']}/contents", data=create_data, json=True) def _create_subfolder(self, containing_folder_id): create_data = dict( diff --git a/lib/galaxy_test/base/populators.py b/lib/galaxy_test/base/populators.py index 82b458d0068e..1df5b0358c56 100644 --- a/lib/galaxy_test/base/populators.py +++ b/lib/galaxy_test/base/populators.py @@ -2740,7 +2740,7 @@ def raw_library_contents_create(self, library_id, payload, files=None): files = {} url_rel = f"libraries/{library_id}/contents" - return self.galaxy_interactor.post(url_rel, payload, files=files) + return self.galaxy_interactor.post(url_rel, payload, files=files, json=True) def show_ld_raw(self, library_id: str, library_dataset_id: str) -> Response: response = self.galaxy_interactor.get(f"libraries/{library_id}/contents/{library_dataset_id}") @@ -2759,7 +2759,7 @@ def show_ldda(self, ldda_id): def new_library_dataset_in_private_library(self, library_name="private_dataset", wait=True): library = self.new_private_library(library_name) payload, files = self.create_dataset_request(library, file_type="txt", contents="create_test") - create_response = self.galaxy_interactor.post(f"libraries/{library['id']}/contents", payload, files=files) + create_response = self.galaxy_interactor.post(f"libraries/{library['id']}/contents", payload, files=files, json=True) api_asserts.assert_status_code_is(create_response, 200) library_datasets = create_response.json() assert len(library_datasets) == 1 From 663d99ceb9794e648b8757b8f4a33cf423b77e05 Mon Sep 17 00:00:00 2001 From: Arash Date: Thu, 26 Sep 2024 17:18:22 +0200 Subject: [PATCH 17/22] update OpenAPI schema --- client/src/api/schema/schema.ts | 21 +++++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/client/src/api/schema/schema.ts b/client/src/api/schema/schema.ts index c1a287e67872..13457ce632ac 100644 --- a/client/src/api/schema/schema.ts +++ b/client/src/api/schema/schema.ts @@ -3066,7 +3066,7 @@ export interface paths { * @deprecated * @description This endpoint is deprecated. Please use GET /api/libraries/datasets/{library_id} instead. */ - get: operations["show_api_libraries__library_id__contents__id__get"]; + get: operations["library_content_api_libraries__library_id__contents__id__get"]; /** * Update a library file or folder. * @deprecated @@ -6079,6 +6079,16 @@ export interface components { /** Name */ name?: unknown; }; + /** Body_create_api_libraries__library_id__contents_post */ + Body_create_api_libraries__library_id__contents_post: { + /** File */ + file?: string | null; + /** Payload */ + payload: + | components["schemas"]["LibraryContentsFolderCreatePayload"] + | components["schemas"]["LibraryContentsFileCreatePayload"] + | components["schemas"]["LibraryContentsCollectionCreatePayload"]; + }; /** Body_fetch_form_api_tools_fetch_post */ Body_fetch_form_api_tools_fetch_post: { /** Files */ @@ -13027,6 +13037,8 @@ export interface components { dbkey: string | unknown[] | null; /** sub-dictionary containing any extended metadata to associate with the item */ extended_metadata?: Record | null; + /** the file to upload */ + file?: string | null; /** file type */ file_type?: string | null; /** @@ -27943,10 +27955,7 @@ export interface operations { }; requestBody: { content: { - "application/json": - | components["schemas"]["LibraryContentsFolderCreatePayload"] - | components["schemas"]["LibraryContentsFileCreatePayload"] - | components["schemas"]["LibraryContentsCollectionCreatePayload"]; + "multipart/form-data": components["schemas"]["Body_create_api_libraries__library_id__contents_post"]; }; }; responses: { @@ -27983,7 +27992,7 @@ export interface operations { }; }; }; - show_api_libraries__library_id__contents__id__get: { + library_content_api_libraries__library_id__contents__id__get: { parameters: { query?: never; header?: { From d944728bd53f3934390f6ad8d74628658171ac1f Mon Sep 17 00:00:00 2001 From: Arash Date: Thu, 26 Sep 2024 17:37:51 +0200 Subject: [PATCH 18/22] Update OpenAPI schema --- client/src/api/schema/schema.ts | 17 ++++------------- 1 file changed, 4 insertions(+), 13 deletions(-) diff --git a/client/src/api/schema/schema.ts b/client/src/api/schema/schema.ts index 13457ce632ac..6e5344c53c21 100644 --- a/client/src/api/schema/schema.ts +++ b/client/src/api/schema/schema.ts @@ -6079,16 +6079,6 @@ export interface components { /** Name */ name?: unknown; }; - /** Body_create_api_libraries__library_id__contents_post */ - Body_create_api_libraries__library_id__contents_post: { - /** File */ - file?: string | null; - /** Payload */ - payload: - | components["schemas"]["LibraryContentsFolderCreatePayload"] - | components["schemas"]["LibraryContentsFileCreatePayload"] - | components["schemas"]["LibraryContentsCollectionCreatePayload"]; - }; /** Body_fetch_form_api_tools_fetch_post */ Body_fetch_form_api_tools_fetch_post: { /** Files */ @@ -13037,8 +13027,6 @@ export interface components { dbkey: string | unknown[] | null; /** sub-dictionary containing any extended metadata to associate with the item */ extended_metadata?: Record | null; - /** the file to upload */ - file?: string | null; /** file type */ file_type?: string | null; /** @@ -27955,7 +27943,10 @@ export interface operations { }; requestBody: { content: { - "multipart/form-data": components["schemas"]["Body_create_api_libraries__library_id__contents_post"]; + "application/json": + | components["schemas"]["LibraryContentsFolderCreatePayload"] + | components["schemas"]["LibraryContentsFileCreatePayload"] + | components["schemas"]["LibraryContentsCollectionCreatePayload"]; }; }; responses: { From e60e2bca4f54775f24a8ff66c5569031d723fb58 Mon Sep 17 00:00:00 2001 From: Arash Date: Thu, 26 Sep 2024 17:44:23 +0200 Subject: [PATCH 19/22] Reformating populators.py --- lib/galaxy_test/base/populators.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/lib/galaxy_test/base/populators.py b/lib/galaxy_test/base/populators.py index 1df5b0358c56..abd8fe1b5a24 100644 --- a/lib/galaxy_test/base/populators.py +++ b/lib/galaxy_test/base/populators.py @@ -2759,7 +2759,9 @@ def show_ldda(self, ldda_id): def new_library_dataset_in_private_library(self, library_name="private_dataset", wait=True): library = self.new_private_library(library_name) payload, files = self.create_dataset_request(library, file_type="txt", contents="create_test") - create_response = self.galaxy_interactor.post(f"libraries/{library['id']}/contents", payload, files=files, json=True) + create_response = self.galaxy_interactor.post( + f"libraries/{library['id']}/contents", payload, files=files, json=True + ) api_asserts.assert_status_code_is(create_response, 200) library_datasets = create_response.json() assert len(library_datasets) == 1 From 5f323a567b5a67229e1014afe13fb668ad3c080b Mon Sep 17 00:00:00 2001 From: Arash Date: Thu, 26 Sep 2024 18:46:12 +0200 Subject: [PATCH 20/22] Fix test_libraries erros --- lib/galaxy_test/api/test_libraries.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/galaxy_test/api/test_libraries.py b/lib/galaxy_test/api/test_libraries.py index 1fb51357301b..a7f5dfd71d0c 100644 --- a/lib/galaxy_test/api/test_libraries.py +++ b/lib/galaxy_test/api/test_libraries.py @@ -315,7 +315,7 @@ def test_legacy_upload_unknown_datatype(self): } create_response = self._post(f"libraries/{library['id']}/contents", payload, json=True) self._assert_status_code_is(create_response, 400) - assert create_response.json() == "Requested extension 'xxx' unknown, cannot upload dataset." + assert create_response.json()["err_msg"] == "Requested extension 'xxx' unknown, cannot upload dataset." @skip_if_github_down @requires_new_library @@ -605,6 +605,6 @@ def _create_dataset_in_folder_in_library(self, library_name, content="1 2 3", wa history_id = self.dataset_populator.new_history() hda_id = self.dataset_populator.new_dataset(history_id, content=content, wait=wait)["id"] payload = {"from_hda_id": hda_id, "create_type": "file", "folder_id": folder_id} - ld = self._post(f"libraries/{folder_id}/contents", payload) + ld = self._post(f"libraries/{folder_id}/contents", payload, json=True) ld.raise_for_status() return ld From 912ba9b0f719df9b2ed7c24ff8f2cc1804c99b38 Mon Sep 17 00:00:00 2001 From: Arash Date: Fri, 27 Sep 2024 11:56:23 +0200 Subject: [PATCH 21/22] add LibraryFolderDatabaseIdField Model for library_id --- lib/galaxy/webapps/galaxy/api/library_contents.py | 15 +++++++++------ .../webapps/galaxy/services/library_contents.py | 9 ++++++--- 2 files changed, 15 insertions(+), 9 deletions(-) diff --git a/lib/galaxy/webapps/galaxy/api/library_contents.py b/lib/galaxy/webapps/galaxy/api/library_contents.py index cace7bbb9ba8..566744599fae 100644 --- a/lib/galaxy/webapps/galaxy/api/library_contents.py +++ b/lib/galaxy/webapps/galaxy/api/library_contents.py @@ -14,7 +14,10 @@ ProvidesHistoryContext, ProvidesUserContext, ) -from galaxy.schema.fields import DecodedDatabaseIdField +from galaxy.schema.fields import ( + DecodedDatabaseIdField, + LibraryFolderDatabaseIdField, +) from galaxy.schema.library_contents import ( LibraryContentsCollectionCreatePayload, LibraryContentsCreateDatasetCollectionResponse, @@ -55,7 +58,7 @@ class FastAPILibraryContents: ) def index( self, - library_id: DecodedDatabaseIdField, + library_id: Union[DecodedDatabaseIdField, LibraryFolderDatabaseIdField], trans: ProvidesUserContext = DependsOnTrans, ) -> LibraryContentsIndexListResponse: """ @@ -71,7 +74,7 @@ def index( ) def show( self, - library_id: DecodedDatabaseIdField, + library_id: Union[DecodedDatabaseIdField, LibraryFolderDatabaseIdField], id: MaybeLibraryFolderOrDatasetID, trans: ProvidesUserContext = DependsOnTrans, ) -> Union[ @@ -90,7 +93,7 @@ def show( ) def create( self, - library_id: DecodedDatabaseIdField, + library_id: Union[DecodedDatabaseIdField, LibraryFolderDatabaseIdField], payload: Union[ LibraryContentsFolderCreatePayload, LibraryContentsFileCreatePayload, LibraryContentsCollectionCreatePayload ], @@ -113,7 +116,7 @@ def create( ) def update( self, - library_id: DecodedDatabaseIdField, + library_id: Union[DecodedDatabaseIdField, LibraryFolderDatabaseIdField], id: DecodedDatabaseIdField, payload, trans: ProvidesUserContext = DependsOnTrans, @@ -130,7 +133,7 @@ def update( ) def delete( self, - library_id: DecodedDatabaseIdField, + library_id: Union[DecodedDatabaseIdField, LibraryFolderDatabaseIdField], id: DecodedDatabaseIdField, payload: Optional[LibraryContentsDeletePayload] = Body(None), trans: ProvidesHistoryContext = DependsOnTrans, diff --git a/lib/galaxy/webapps/galaxy/services/library_contents.py b/lib/galaxy/webapps/galaxy/services/library_contents.py index 467e4aa260d3..cf2ce633d928 100644 --- a/lib/galaxy/webapps/galaxy/services/library_contents.py +++ b/lib/galaxy/webapps/galaxy/services/library_contents.py @@ -24,7 +24,10 @@ tags, ) from galaxy.model.base import transaction -from galaxy.schema.fields import DecodedDatabaseIdField +from galaxy.schema.fields import ( + DecodedDatabaseIdField, + LibraryFolderDatabaseIdField, +) from galaxy.schema.library_contents import ( LibraryContentsCollectionCreatePayload, LibraryContentsCreateDatasetCollectionResponse, @@ -80,7 +83,7 @@ def __init__( def index( self, trans: ProvidesUserContext, - library_id: DecodedDatabaseIdField, + library_id: Union[DecodedDatabaseIdField, LibraryFolderDatabaseIdField], ) -> LibraryContentsIndexListResponse: """Return a list of library files and folders.""" rval: List[Union[LibraryContentsIndexFolderResponse, LibraryContentsIndexDatasetResponse]] = [] @@ -129,7 +132,7 @@ def show( def create( self, trans: ProvidesHistoryContext, - library_id: DecodedDatabaseIdField, + library_id: Union[DecodedDatabaseIdField, LibraryFolderDatabaseIdField], payload: Union[ LibraryContentsFolderCreatePayload, LibraryContentsFileCreatePayload, LibraryContentsCollectionCreatePayload ], From 5bf8e41e68df896d2364b86c4eb850dda251fbf9 Mon Sep 17 00:00:00 2001 From: Arash Date: Fri, 27 Sep 2024 12:21:52 +0200 Subject: [PATCH 22/22] Fix schema for uploaded_by being None in show library contents endpoint caused in test_materialize_library_dataset --- client/src/api/schema/schema.ts | 2 +- lib/galaxy/schema/library_contents.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/client/src/api/schema/schema.ts b/client/src/api/schema/schema.ts index 6e5344c53c21..23baa4dc094e 100644 --- a/client/src/api/schema/schema.ts +++ b/client/src/api/schema/schema.ts @@ -13219,7 +13219,7 @@ export interface components { /** Update Time */ update_time: string; /** Uploaded By */ - uploaded_by: string; + uploaded_by: string | null; /** Uuid */ uuid: string; } & { diff --git a/lib/galaxy/schema/library_contents.py b/lib/galaxy/schema/library_contents.py index 1af206f857a3..c692c062da26 100644 --- a/lib/galaxy/schema/library_contents.py +++ b/lib/galaxy/schema/library_contents.py @@ -214,7 +214,7 @@ class LibraryContentsShowDatasetResponse(LibraryContentsShowResponse): state: str file_name: str created_from_basename: Optional[str] - uploaded_by: str + uploaded_by: Optional[str] message: Optional[str] date_uploaded: str file_size: int