From 0cd5daa27cb616258b8edfac97763413349258ad Mon Sep 17 00:00:00 2001 From: mvdbeek Date: Fri, 20 Sep 2024 07:10:36 +0200 Subject: [PATCH 01/64] Raise MessageException instead of assertions on rerun problems --- lib/galaxy/tools/actions/__init__.py | 24 ++++++++++++++---------- 1 file changed, 14 insertions(+), 10 deletions(-) diff --git a/lib/galaxy/tools/actions/__init__.py b/lib/galaxy/tools/actions/__init__.py index 826b5d54059a..f922e5b858b7 100644 --- a/lib/galaxy/tools/actions/__init__.py +++ b/lib/galaxy/tools/actions/__init__.py @@ -18,6 +18,7 @@ from galaxy import model from galaxy.exceptions import ( + AuthenticationRequired, ItemAccessibilityException, RequestParameterInvalidException, ) @@ -758,19 +759,22 @@ def _remap_job_on_rerun(self, trans, galaxy_session, rerun_remap_job_id, current try: old_job = trans.sa_session.get(Job, rerun_remap_job_id) assert old_job is not None, f"({rerun_remap_job_id}/{current_job.id}): Old job id is invalid" - assert ( - old_job.tool_id == current_job.tool_id - ), f"({old_job.id}/{current_job.id}): Old tool id ({old_job.tool_id}) does not match rerun tool id ({current_job.tool_id})" + if old_job.old_id != current_job.old_id: + raise RequestParameterInvalidException( + f"Old tool id ({old_job.tool_id}) does not match rerun tool id ({current_job.tool_id})" + ) if trans.user is not None: - assert ( - old_job.user_id == trans.user.id - ), f"({old_job.id}/{current_job.id}): Old user id ({old_job.user_id}) does not match rerun user id ({trans.user.id})" + if old_job.user_id != trans.user.id: + raise RequestParameterInvalidException( + "Cannot remap job dependencies for job not created by current user." + ) elif trans.user is None and isinstance(galaxy_session, trans.model.GalaxySession): - assert ( - old_job.session_id == galaxy_session.id - ), f"({old_job.id}/{current_job.id}): Old session id ({old_job.session_id}) does not match rerun session id ({galaxy_session.id})" + if old_job.session_id != galaxy_session.id: + raise RequestParameterInvalidException( + "Cannot remap job dependencies for job not created by current user." + ) else: - raise Exception(f"({old_job.id}/{current_job.id}): Remapping via the API is not (yet) supported") + raise AuthenticationRequired("Authentication required to remap job dependencies") # Start by hiding current job outputs before taking over the old job's (implicit) outputs. current_job.hide_outputs(flush=False) # Duplicate PJAs before remap. From e25f5e092935f185b257cc1fbae5cc6a8c131818 Mon Sep 17 00:00:00 2001 From: John Davis Date: Wed, 18 Sep 2024 15:52:34 -0400 Subject: [PATCH 02/64] Add directory for migrations data fixing scripts --- lib/galaxy/model/migrations/data_fixes/__init__.py | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 lib/galaxy/model/migrations/data_fixes/__init__.py diff --git a/lib/galaxy/model/migrations/data_fixes/__init__.py b/lib/galaxy/model/migrations/data_fixes/__init__.py new file mode 100644 index 000000000000..8b48aef46800 --- /dev/null +++ b/lib/galaxy/model/migrations/data_fixes/__init__.py @@ -0,0 +1,4 @@ +""" +Package contains code for fixing inconsistent data in the database that must be +run together with a migration script. +""" From 505cd87eb59adc5b28b3fc8655d3e1b97c870baa Mon Sep 17 00:00:00 2001 From: John Davis Date: Wed, 18 Sep 2024 17:22:04 -0400 Subject: [PATCH 03/64] Add username deduplication data fixer --- .../migrations/data_fixes/user_table_fixer.py | 47 +++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100644 lib/galaxy/model/migrations/data_fixes/user_table_fixer.py diff --git a/lib/galaxy/model/migrations/data_fixes/user_table_fixer.py b/lib/galaxy/model/migrations/data_fixes/user_table_fixer.py new file mode 100644 index 000000000000..c293dd2fec4f --- /dev/null +++ b/lib/galaxy/model/migrations/data_fixes/user_table_fixer.py @@ -0,0 +1,47 @@ +from sqlalchemy import ( + func, + Result, + select, + update, +) + +from galaxy.model import User + + +class UsernameDeduplicator: + + def __init__(self, connection): + self.connection = connection + + def run(self): + """ + Deduplicate usernames by generating a unique value for all duplicates, keeping + the username of the most recently created user unchanged. + """ + duplicates = self._get_duplicate_username_data() + prev_username = None + for id, username, _ in duplicates: + if username == prev_username: + new_username = self._generate_next_available_username(username) + stmt = update(User).where(User.id == id).values(username=new_username) + self.connection.execute(stmt) + else: + prev_username = username + + def _get_duplicate_username_data(self) -> Result: + # Duplicate usernames + counts = select(User.username, func.count()).group_by(User.username).having(func.count() > 1) + sq = select(User.username).select_from(counts.cte()) + # User data for records with duplicate usernames (ordering: newest to oldest) + stmt = ( + select(User.id, User.username, User.create_time) + .where(User.username.in_(sq)) + .order_by(User.username, User.create_time.desc()) + ) + return self.connection.execute(stmt) + + def _generate_next_available_username(self, username): + i = 1 + while self.connection.execute(select(User).where(User.username == f"{username}-{i}")).first(): + i += 1 + return f"{username}-{i}" From 90f298bae8e9a11cb27cd65f35d95203da09d1fb Mon Sep 17 00:00:00 2001 From: John Davis Date: Wed, 3 Jul 2024 12:56:06 -0400 Subject: [PATCH 04/64] Add migration for username column unique constraint --- ...a6168_username_column_unique_constraint.py | 51 +++++++++++++++++++ 1 file changed, 51 insertions(+) create mode 100644 lib/galaxy/model/migrations/alembic/versions_gxy/d619fdfa6168_username_column_unique_constraint.py diff --git a/lib/galaxy/model/migrations/alembic/versions_gxy/d619fdfa6168_username_column_unique_constraint.py b/lib/galaxy/model/migrations/alembic/versions_gxy/d619fdfa6168_username_column_unique_constraint.py new file mode 100644 index 000000000000..de09d29097bb --- /dev/null +++ b/lib/galaxy/model/migrations/alembic/versions_gxy/d619fdfa6168_username_column_unique_constraint.py @@ -0,0 +1,51 @@ +"""Username column unique constraint + +Revision ID: d619fdfa6168 +Revises: d2d8f51ebb7e +Create Date: 2024-07-02 13:13:10.325586 +""" + +from alembic import op + +from galaxy.model.database_object_names import build_index_name +from galaxy.model.migrations.data_fixes.user_table_fixer import UsernameDeduplicator +from galaxy.model.migrations.util import ( + create_index, + drop_index, + index_exists, + transaction, +) + +# revision identifiers, used by Alembic. +revision = "d619fdfa6168" +down_revision = "d2d8f51ebb7e" +branch_labels = None +depends_on = None + +table_name = "galaxy_user" +column_name = "username" +index_name = build_index_name(table_name, [column_name]) + + +def upgrade(): + with transaction(): + _fix_duplicate_usernames() + # Existing databases may have an existing index we no longer need + # New databases will not have that index, so we must check. + if index_exists(index_name, table_name, False): + drop_index(index_name, table_name) + # Create a UNIQUE index + create_index(index_name, table_name, [column_name], unique=True) + + +def downgrade(): + with transaction(): + drop_index(index_name, table_name) + # Restore a non-unique index + create_index(index_name, table_name, [column_name]) + + +def _fix_duplicate_usernames(): + """Fix records with duplicate usernames""" + connection = op.get_bind() + UsernameDeduplicator(connection).run() From 0e4559f7da7d931b526cf7211568bb6d40ecacbf Mon Sep 17 00:00:00 2001 From: John Davis Date: Wed, 18 Sep 2024 18:31:30 -0400 Subject: [PATCH 05/64] Add email deduplication data fixer --- .../migrations/data_fixes/user_table_fixer.py | 73 ++++++++++++++++++- 1 file changed, 69 insertions(+), 4 deletions(-) diff --git a/lib/galaxy/model/migrations/data_fixes/user_table_fixer.py b/lib/galaxy/model/migrations/data_fixes/user_table_fixer.py index c293dd2fec4f..4b9054872cd0 100644 --- a/lib/galaxy/model/migrations/data_fixes/user_table_fixer.py +++ b/lib/galaxy/model/migrations/data_fixes/user_table_fixer.py @@ -1,7 +1,10 @@ +import uuid + from sqlalchemy import ( func, Result, select, + text, update, ) @@ -17,25 +20,25 @@ def run(self): """ Deduplicate usernames by generating a unique value for all duplicates, keeping the username of the most recently created user unchanged. + Records updated with the generated value are marked as deleted. """ duplicates = self._get_duplicate_username_data() prev_username = None for id, username, _ in duplicates: if username == prev_username: new_username = self._generate_next_available_username(username) - stmt = update(User).where(User.id == id).values(username=new_username) + stmt = update(User).where(User.id == id).values(username=new_username, deleted=True) self.connection.execute(stmt) else: prev_username = username def _get_duplicate_username_data(self) -> Result: # Duplicate usernames - counts = select(User.username, func.count()).group_by(User.username).having(func.count() > 1) - sq = select(User.username).select_from(counts.cte()) + duplicates_stmt = select(User.username).group_by(User.username).having(func.count() > 1) # User data for records with duplicate usernames (ordering: newest to oldest) stmt = ( select(User.id, User.username, User.create_time) - .where(User.username.in_(sq)) + .where(User.username.in_(duplicates_stmt)) .order_by(User.username, User.create_time.desc()) ) return self.connection.execute(stmt) @@ -45,3 +48,65 @@ def _generate_next_available_username(self, username): while self.connection.execute(select(User).where(User.username == f"{username}-{i}")).first(): i += 1 return f"{username}-{i}" + + +class EmailDeduplicator: + + def __init__(self, connection): + self.connection = connection + + def run(self): + """ + Deduplicate user emails by generating a unique value for all duplicates, keeping + the email of the most recently created user that has one or more history unchanged. + If such a user does not exist, keep the oldest user. + Records updated with the generated value are marked as deleted (we presume them + to be invalid, and the user should not be able to login). + """ + stmt = select(User.email).group_by(User.email).having(func.count() > 1) + duplicate_emails = self.connection.scalars(stmt) + for email in duplicate_emails: + users = self._get_users_with_same_email(email) + user_with_history = self._find_oldest_user_with_history(users) + duplicates = self._get_users_to_deduplicate(users, user_with_history) + self._deduplicate_users(email, duplicates) + + def _get_users_with_same_email(self, email: str): + sql = text( + """ + SELECT u.id, EXISTS(SELECT h.id FROM history h WHERE h.user_id = u.id) + FROM galaxy_user u + WHERE u.email = :email + ORDER BY u.create_time + """ + ) + params = {"email": email} + return self.connection.execute(sql, params).all() + + def _find_oldest_user_with_history(self, users): + for user_id, exists in users: + if exists: + return user_id + return None + + def _get_users_to_deduplicate(self, users, user_with_history): + if user_with_history: + # Preserve the oldest user with a history + return [user_id for user_id, _ in users if user_id != user_with_history] + else: + # Preserve the oldest user + return [user_id for user_id, _ in users[1:]] + + def _deduplicate_users(self, email, to_deduplicate): + for id in to_deduplicate: + new_email = self._generate_replacement_for_duplicate_email(email) + stmt = update(User).where(User.id == id).values(email=new_email, deleted=True) + self.connection.execute(stmt) + + def _generate_replacement_for_duplicate_email(self, email: str) -> str: + """ + Generate a replacement for a duplicate email value. The new value consists of the original + email and a unique suffix. Since the original email is part of the new value, it will be + possible to retrieve the user record based on this value, if needed. + """ + return f"{email}-{uuid.uuid4()}" From bacc3046f3759ba60043fb8c80a9c00143d5b677 Mon Sep 17 00:00:00 2001 From: John Davis Date: Wed, 3 Jul 2024 19:58:32 -0400 Subject: [PATCH 06/64] Add migration for email column unique constraint --- ...95475b58_email_column_unique_constraint.py | 52 +++++++++++++++++++ 1 file changed, 52 insertions(+) create mode 100644 lib/galaxy/model/migrations/alembic/versions_gxy/1cf595475b58_email_column_unique_constraint.py diff --git a/lib/galaxy/model/migrations/alembic/versions_gxy/1cf595475b58_email_column_unique_constraint.py b/lib/galaxy/model/migrations/alembic/versions_gxy/1cf595475b58_email_column_unique_constraint.py new file mode 100644 index 000000000000..ba356b1c770d --- /dev/null +++ b/lib/galaxy/model/migrations/alembic/versions_gxy/1cf595475b58_email_column_unique_constraint.py @@ -0,0 +1,52 @@ +"""Email column unique constraint + +Revision ID: 1cf595475b58 +Revises: d619fdfa6168 +Create Date: 2024-07-03 19:53:22.443016 +""" + +from alembic import op + +from galaxy.model.database_object_names import build_index_name +from galaxy.model.migrations.data_fixes.user_table_fixer import EmailDeduplicator +from galaxy.model.migrations.util import ( + create_index, + drop_index, + index_exists, + transaction, +) + +# revision identifiers, used by Alembic. +revision = "1cf595475b58" +down_revision = "d619fdfa6168" +branch_labels = None +depends_on = None + + +table_name = "galaxy_user" +column_name = "email" +index_name = build_index_name(table_name, [column_name]) + + +def upgrade(): + with transaction(): + _fix_duplicate_emails() + # Existing databases may have an existing index we no longer need + # New databases will not have that index, so we must check. + if index_exists(index_name, table_name, False): + drop_index(index_name, table_name) + # Create a UNIQUE index + create_index(index_name, table_name, [column_name], unique=True) + + +def downgrade(): + with transaction(): + drop_index(index_name, table_name) + # Restore a non-unique index + create_index(index_name, table_name, [column_name]) + + +def _fix_duplicate_emails(): + """Fix records with duplicate usernames""" + connection = op.get_bind() + EmailDeduplicator(connection).run() From 07c0b2de222235925df79b235c2144896ecd915d Mon Sep 17 00:00:00 2001 From: John Davis Date: Wed, 3 Jul 2024 20:05:20 -0400 Subject: [PATCH 07/64] Update the model w/unique constraints --- lib/galaxy/model/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index f45921606266..c7ab8a57c06e 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -780,7 +780,7 @@ class User(Base, Dictifiable, RepresentById): id: Mapped[int] = mapped_column(primary_key=True) create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) - email: Mapped[str] = mapped_column(TrimmedString(255), index=True) + email: Mapped[str] = mapped_column(TrimmedString(255), index=True, unique=True) username: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True, unique=True) password: Mapped[str] = mapped_column(TrimmedString(255)) last_password_change: Mapped[Optional[datetime]] = mapped_column(default=now) From 527b3711040205613fa66609e13b1b9a81ab72b0 Mon Sep 17 00:00:00 2001 From: John Davis Date: Wed, 3 Jul 2024 21:55:42 -0400 Subject: [PATCH 08/64] Fix integration test that violated db integrity constraint --- test/integration/test_celery_user_rate_limit.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/integration/test_celery_user_rate_limit.py b/test/integration/test_celery_user_rate_limit.py index b9b832c5cddc..c36dacc880fd 100644 --- a/test/integration/test_celery_user_rate_limit.py +++ b/test/integration/test_celery_user_rate_limit.py @@ -50,7 +50,7 @@ def setup_users(dburl: str, num_users: int = 2): for user_id in user_ids_to_add: conn.execute( text("insert into galaxy_user(id, active, email, password) values (:id, :active, :email, :pw)"), - [{"id": user_id, "active": True, "email": "e", "pw": "p"}], + [{"id": user_id, "active": True, "email": f"e{user_id}", "pw": "p"}], ) From b261dea43a265d1498fa833534ca6a3c985d8bd6 Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 19 Sep 2024 15:24:04 -0400 Subject: [PATCH 09/64] Randomize test user email to respect unique constraint --- lib/galaxy/model/unittest_utils/utils.py | 13 +++++++++++++ test/unit/data/model/conftest.py | 17 ++++------------- test/unit/workflows/test_run_parameters.py | 3 ++- 3 files changed, 19 insertions(+), 14 deletions(-) create mode 100644 lib/galaxy/model/unittest_utils/utils.py diff --git a/lib/galaxy/model/unittest_utils/utils.py b/lib/galaxy/model/unittest_utils/utils.py new file mode 100644 index 000000000000..c558b52b51de --- /dev/null +++ b/lib/galaxy/model/unittest_utils/utils.py @@ -0,0 +1,13 @@ +import random +import string + + +def random_str() -> str: + alphabet = string.ascii_lowercase + string.digits + size = random.randint(5, 10) + return "".join(random.choices(alphabet, k=size)) + + +def random_email() -> str: + text = random_str() + return f"{text}@galaxy.testing" diff --git a/test/unit/data/model/conftest.py b/test/unit/data/model/conftest.py index 26ea7d8b7cc2..aff81d80af23 100644 --- a/test/unit/data/model/conftest.py +++ b/test/unit/data/model/conftest.py @@ -1,7 +1,5 @@ import contextlib import os -import random -import string import tempfile import uuid @@ -10,6 +8,10 @@ from sqlalchemy.orm import Session from galaxy import model as m +from galaxy.model.unittest_utils.utils import ( + random_email, + random_str, +) @pytest.fixture @@ -449,17 +451,6 @@ def transaction(session): yield -def random_str() -> str: - alphabet = string.ascii_lowercase + string.digits - size = random.randint(5, 10) - return "".join(random.choices(alphabet, k=size)) - - -def random_email() -> str: - text = random_str() - return f"{text}@galaxy.testing" - - def write_to_db(session, model) -> None: with transaction(session): session.add(model) diff --git a/test/unit/workflows/test_run_parameters.py b/test/unit/workflows/test_run_parameters.py index 76eae8955744..5718ac923a40 100644 --- a/test/unit/workflows/test_run_parameters.py +++ b/test/unit/workflows/test_run_parameters.py @@ -1,5 +1,6 @@ from galaxy import model from galaxy.model.base import transaction +from galaxy.model.unittest_utils.utils import random_email from galaxy.workflow.run_request import ( _normalize_inputs, _normalize_step_parameters, @@ -89,7 +90,7 @@ def __new_input(): def __workflow_fixure(trans): - user = model.User(email="testworkflow_params@bx.psu.edu", password="pass") + user = model.User(email=random_email(), password="pass") stored_workflow = model.StoredWorkflow() stored_workflow.user = user workflow = model.Workflow() From d8abcadd262554547d7be130d4b1a27c3cf76af8 Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 19 Sep 2024 15:27:15 -0400 Subject: [PATCH 10/64] Fix bug in test_rule_helper --- test/unit/app/jobs/test_rule_helper.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/unit/app/jobs/test_rule_helper.py b/test/unit/app/jobs/test_rule_helper.py index 1d1197a0dc78..f3be1cdd20c0 100644 --- a/test/unit/app/jobs/test_rule_helper.py +++ b/test/unit/app/jobs/test_rule_helper.py @@ -66,7 +66,7 @@ def __setup_fixtures(app): # user3 has no jobs. user1 = model.User(email=USER_EMAIL_1, password="pass1") user2 = model.User(email=USER_EMAIL_2, password="pass2") - user3 = model.User(email=USER_EMAIL_2, password="pass2") + user3 = model.User(email=USER_EMAIL_3, password="pass3") app.add(user1, user2, user3) From 11a25ae19928a0b33cb671eb00fe452e93b44ff8 Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 19 Sep 2024 15:34:38 -0400 Subject: [PATCH 11/64] Fix test_quota to respect constraint --- test/unit/data/test_quota.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/test/unit/data/test_quota.py b/test/unit/data/test_quota.py index 5c34ea7cb056..7a3695894445 100644 --- a/test/unit/data/test_quota.py +++ b/test/unit/data/test_quota.py @@ -1,6 +1,7 @@ import uuid from galaxy import model +from galaxy.model.unittest_utils.utils import random_email from galaxy.objectstore import ( QuotaSourceInfo, QuotaSourceMap, @@ -16,7 +17,7 @@ class TestPurgeUsage(BaseModelTestCase): def setUp(self): super().setUp() model = self.model - u = model.User(email="purge_usage@example.com", password="password") + u = model.User(email=random_email(), password="password") u.disk_usage = 25 self.persist(u) From 600523b7733d403a16def0ece22e09c802baba3a Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 19 Sep 2024 16:31:29 -0400 Subject: [PATCH 12/64] Fix test_galaxy_mapping to respect constraint --- test/unit/data/test_galaxy_mapping.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/test/unit/data/test_galaxy_mapping.py b/test/unit/data/test_galaxy_mapping.py index 3e99841db44b..eed3eb01ad67 100644 --- a/test/unit/data/test_galaxy_mapping.py +++ b/test/unit/data/test_galaxy_mapping.py @@ -22,6 +22,7 @@ get_object_session, ) from galaxy.model.security import GalaxyRBACAgent +from galaxy.model.unittest_utils.utils import random_email from galaxy.objectstore import QuotaSourceMap from galaxy.util.unittest import TestCase @@ -78,7 +79,7 @@ def expunge(cls): class TestMappings(BaseModelTestCase): def test_dataset_instance_order(self) -> None: - u = model.User(email="mary@example.com", password="password") + u = model.User(email=random_email(), password="password") h1 = model.History(name="History 1", user=u) elements = [] list_pair = model.DatasetCollection(collection_type="list:paired") @@ -213,7 +214,7 @@ def test_nested_collection_attributes(self): assert c4.dataset_elements == [dce1, dce2] def test_history_audit(self): - u = model.User(email="contents@foo.bar.baz", password="password") + u = model.User(email=random_email(), password="password") h1 = model.History(name="HistoryAuditHistory", user=u) h2 = model.History(name="HistoryAuditHistory", user=u) @@ -272,7 +273,7 @@ def test_flush_refreshes(self): # states and flushing in SQL Alchemy is very subtle and it is good to have a executable # reference for how it behaves in the context of Galaxy objects. model = self.model - user = model.User(email="testworkflows@bx.psu.edu", password="password") + user = model.User(email=random_email(), password="password") galaxy_session = model.GalaxySession() galaxy_session_other = model.GalaxySession() galaxy_session.user = user @@ -345,7 +346,7 @@ def test_flush_refreshes(self): assert "id" not in inspect(galaxy_model_object_new).unloaded def test_workflows(self): - user = model.User(email="testworkflows@bx.psu.edu", password="password") + user = model.User(email=random_email(), password="password") child_workflow = _workflow_from_steps(user, []) self.persist(child_workflow) From 4471c6a851dfc95c5a55e18cca440c6ad2cee333 Mon Sep 17 00:00:00 2001 From: John Davis Date: Sat, 21 Sep 2024 00:17:20 -0400 Subject: [PATCH 13/64] Refactor tests to reduce duplication --- test/unit/data/model/__init__.py | 10 ++++++++++ test/unit/data/model/db/__init__.py | 11 ----------- test/unit/data/model/db/conftest.py | 2 +- test/unit/data/model/db/test_misc.py | 6 ++---- 4 files changed, 13 insertions(+), 16 deletions(-) diff --git a/test/unit/data/model/__init__.py b/test/unit/data/model/__init__.py index e69de29bb2d1..7d0a1eeb1f8d 100644 --- a/test/unit/data/model/__init__.py +++ b/test/unit/data/model/__init__.py @@ -0,0 +1,10 @@ +PRIVATE_OBJECT_STORE_ID = "my_private_data" + + +class MockObjectStore: + + def is_private(self, object): + if object.object_store_id == PRIVATE_OBJECT_STORE_ID: + return True + else: + return False diff --git a/test/unit/data/model/db/__init__.py b/test/unit/data/model/db/__init__.py index 13a615086ebe..817efe285c17 100644 --- a/test/unit/data/model/db/__init__.py +++ b/test/unit/data/model/db/__init__.py @@ -3,20 +3,9 @@ namedtuple, ) -PRIVATE_OBJECT_STORE_ID = "my_private_data" - MockTransaction = namedtuple("MockTransaction", "user") -class MockObjectStore: - - def is_private(self, object): - if object.object_store_id == PRIVATE_OBJECT_STORE_ID: - return True - else: - return False - - def verify_items(items, expected_items): """ Assert that items and expected_items contain the same elements. diff --git a/test/unit/data/model/db/conftest.py b/test/unit/data/model/db/conftest.py index d36a38e71ace..1693cf27eaac 100644 --- a/test/unit/data/model/db/conftest.py +++ b/test/unit/data/model/db/conftest.py @@ -13,7 +13,7 @@ from galaxy import model as m from galaxy.datatypes.registry import Registry as DatatypesRegistry from galaxy.model.triggers.update_audit_table import install as install_timestamp_triggers -from . import MockObjectStore +from .. import MockObjectStore if TYPE_CHECKING: from sqlalchemy.engine import Engine diff --git a/test/unit/data/model/db/test_misc.py b/test/unit/data/model/db/test_misc.py index b8ef3fe5cf0c..9dadda4c326a 100644 --- a/test/unit/data/model/db/test_misc.py +++ b/test/unit/data/model/db/test_misc.py @@ -5,10 +5,8 @@ from galaxy import model as m from galaxy.model.unittest_utils.db_helpers import get_hdca_by_name -from . import ( - MockTransaction, - PRIVATE_OBJECT_STORE_ID, -) +from . import MockTransaction +from .. import PRIVATE_OBJECT_STORE_ID def test_history_update(make_history, make_hda, session): From 87573ee617f34c4f0472f3c827f041505a6eeb60 Mon Sep 17 00:00:00 2001 From: John Davis Date: Sat, 21 Sep 2024 00:18:02 -0400 Subject: [PATCH 14/64] Add tests for migration data fixes --- .../data/model/migration_fixes/__init__.py | 0 .../data/model/migration_fixes/conftest.py | 47 ++++++ .../model/migration_fixes/test_migrations.py | 154 ++++++++++++++++++ 3 files changed, 201 insertions(+) create mode 100644 test/unit/data/model/migration_fixes/__init__.py create mode 100644 test/unit/data/model/migration_fixes/conftest.py create mode 100644 test/unit/data/model/migration_fixes/test_migrations.py diff --git a/test/unit/data/model/migration_fixes/__init__.py b/test/unit/data/model/migration_fixes/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/unit/data/model/migration_fixes/conftest.py b/test/unit/data/model/migration_fixes/conftest.py new file mode 100644 index 000000000000..39ba30f5462a --- /dev/null +++ b/test/unit/data/model/migration_fixes/conftest.py @@ -0,0 +1,47 @@ +from typing import ( + Generator, + TYPE_CHECKING, +) + +import pytest +from sqlalchemy import ( + create_engine, + text, +) +from sqlalchemy.orm import Session + +from galaxy import model as m + +if TYPE_CHECKING: + from sqlalchemy.engine import Engine + +from galaxy.model.unittest_utils.model_testing_utils import ( # noqa: F401 - url_factory is a fixture we have to import explicitly + sqlite_url_factory, +) + + +@pytest.fixture() +def db_url(sqlite_url_factory): # noqa: F811 + return sqlite_url_factory() + + +@pytest.fixture() +def engine(db_url: str) -> "Engine": + return create_engine(db_url) + + +@pytest.fixture +def session(engine: "Engine") -> Session: + return Session(engine) + + +@pytest.fixture(autouse=True) +def clear_database(engine: "Engine") -> "Generator": + """Delete all rows from all tables. Called after each test.""" + yield + with engine.begin() as conn: + for table in m.mapper_registry.metadata.tables: + # Unless db is sqlite, disable foreign key constraints to delete out of order + if engine.name != "sqlite": + conn.execute(text(f"ALTER TABLE {table} DISABLE TRIGGER ALL")) + conn.execute(text(f"DELETE FROM {table}")) diff --git a/test/unit/data/model/migration_fixes/test_migrations.py b/test/unit/data/model/migration_fixes/test_migrations.py new file mode 100644 index 000000000000..12b0791689aa --- /dev/null +++ b/test/unit/data/model/migration_fixes/test_migrations.py @@ -0,0 +1,154 @@ +import pytest + +from galaxy.model import User +from galaxy.model.unittest_utils.migration_scripts_testing_utils import ( # noqa: F401 - contains fixtures we have to import explicitly + run_command, + tmp_directory, +) + +COMMAND = "manage_db.sh" + + +@pytest.fixture(autouse=True) +def upgrade_database_after_test(): + """Run after each test for proper cleanup""" + yield + run_command(f"{COMMAND} upgrade") + + +def test_1cf595475b58(monkeypatch, session, make_user, make_history): + # Initialize db and migration environment + dburl = str(session.bind.url) + monkeypatch.setenv("GALAXY_CONFIG_OVERRIDE_DATABASE_CONNECTION", dburl) + monkeypatch.setenv("GALAXY_INSTALL_CONFIG_OVERRIDE_INSTALL_DATABASE_CONNECTION", dburl) + run_command(f"{COMMAND} init") + + # STEP 0: Load pre-migration state + run_command(f"{COMMAND} downgrade d619fdfa6168") + + # STEP 1: Load users with duplicate emails + + # Duplicate group 1: users have no histories + # Expect: oldest user preserved + u1_1 = make_user(email="a") + u1_2 = make_user(email="a") + u1_3 = make_user(email="a") + original_email1 = u1_1.email + assert u1_1.email == u1_2.email == u1_3.email + assert u1_1.create_time < u1_2.create_time < u1_3.create_time # u1_1 is oldest user + + # Duplicate group 2: oldest user does NOT have a history, another user has a history + # Expect: user with history preserved + u2_1 = make_user(email="b") + u2_2 = make_user(email="b") + u2_3 = make_user(email="b") + original_email2 = u2_1.email + assert u2_1.email == u2_2.email == u2_3.email + assert u2_1.create_time < u2_2.create_time < u2_3.create_time # u2_1 is oldest user + + make_history(user=u2_2) # u2_2 has a history + + # Duplicate group 3: oldest user does NOT have a history, 2 users have a history + # Expect: oldest user with history preserved + u3_1 = make_user(email="c") + u3_2 = make_user(email="c") + u3_3 = make_user(email="c") + original_email3 = u3_1.email + assert u3_1.email == u3_2.email == u3_3.email + assert u3_1.create_time < u3_2.create_time < u3_3.create_time # u2_1 is oldest user + + make_history(user=u3_2) # u3_2 has a history + make_history(user=u3_3) # u3_3 has a history + + # User w/o duplicate email + u4 = make_user() + original_email4 = u4.email + + # STEP 2: Run migration + + run_command(f"{COMMAND} upgrade 1cf595475b58") + session.expire_all() + + # STEP 3: Verify deduplicated results + + # Duplicate group 1: + u1_1_fixed = session.get(User, u1_1.id) + u1_2_fixed = session.get(User, u1_2.id) + u1_3_fixed = session.get(User, u1_3.id) + + # oldest user's email is preserved; the rest are deduplicated + assert u1_1.email == original_email1 + assert u1_1.email != u1_2.email != u1_3.email + # deduplicated users are marked as deleted + assert u1_1_fixed.deleted is False + assert u1_2_fixed.deleted is True + assert u1_3_fixed.deleted is True + + # Duplicate group 2: + u2_1_fixed = session.get(User, u2_1.id) + u2_2_fixed = session.get(User, u2_2.id) + u2_3_fixed = session.get(User, u2_3.id) + + # the email of the user with a history is preserved; the rest are deduplicated + assert u2_2.email == original_email2 + assert u2_1.email != u1_2.email != u1_3.email + # deduplicated users are marked as deleted + assert u2_1_fixed.deleted is True + assert u2_2_fixed.deleted is False + assert u2_3_fixed.deleted is True + + # Duplicate group 3: + u3_1_fixed = session.get(User, u3_1.id) + u3_2_fixed = session.get(User, u3_2.id) + u3_3_fixed = session.get(User, u3_3.id) + + # the email of the oldest user with a history is preserved; the rest are deduplicated + assert u3_2.email == original_email3 + assert u3_1.email != u3_2.email != u3_3.email + # deduplicated users are marked as deleted + assert u3_1_fixed.deleted is True + assert u3_2_fixed.deleted is False + assert u3_3_fixed.deleted is True + + # User w/o duplicate email + u4_no_change = session.get(User, u4.id) + assert u4_no_change.email == original_email4 + assert u4_no_change.deleted is False + + +def test_d619fdfa6168(monkeypatch, session, make_user): + # Initialize db and migration environment + dburl = str(session.bind.url) + monkeypatch.setenv("GALAXY_CONFIG_OVERRIDE_DATABASE_CONNECTION", dburl) + monkeypatch.setenv("GALAXY_INSTALL_CONFIG_OVERRIDE_INSTALL_DATABASE_CONNECTION", dburl) + run_command(f"{COMMAND} init") + + # STEP 0: Load pre-migration state + run_command(f"{COMMAND} downgrade d2d8f51ebb7e") + + # STEP 1: Load users with duplicate usernames + + # Expect: oldest user preserved + u1 = make_user(username="a") + u2 = make_user(username="a") + u3 = make_user(username="a") + original_username = u3.username + assert u1.username == u2.username == u3.username + assert u1.create_time < u2.create_time < u3.create_time # u3 is newest user + + # STEP 2: Run migration + run_command(f"{COMMAND} upgrade d619fdfa6168") + session.expire_all() + + # STEP 3: Verify deduplicated results + u1_fixed = session.get(User, u1.id) + u2_fixed = session.get(User, u2.id) + u3_fixed = session.get(User, u3.id) + + # oldest user's username is preserved; the rest are deduplicated + assert u3_fixed.username == original_username + assert u1.username != u2.username != u3.username + # deduplicated users are marked as deleted + assert u1_fixed.deleted is True + assert u2_fixed.deleted is True + assert u3_fixed.deleted is False From 69a3cda018d4a7f4e7fbe8604949185753808b0f Mon Sep 17 00:00:00 2001 From: mvdbeek Date: Sun, 22 Sep 2024 10:35:13 +0200 Subject: [PATCH 15/64] Fix history import when parent_hda not serialized and make sure we include the parent_hda when exporting histories. Fixes https://sentry.galaxyproject.org/share/issue/20ee27f675ef476588bbe8ff273eaee7/: ``` KeyError: '31e7840b5aedca43523bfcc8e2e9e56f' File "galaxy/jobs/__init__.py", line 2039, in finish task_wrapper = self.tool.exec_after_process( File "galaxy/tools/__init__.py", line 3100, in exec_after_process JobImportHistoryArchiveWrapper(self.app, job.id).cleanup_after_job() File "galaxy/tools/imp_exp/__init__.py", line 81, in cleanup_after_job model_store.perform_import(new_history, job=job, new_history=True) File "galaxy/model/store/__init__.py", line 416, in perform_import self._import_implicit_dataset_conversions(object_import_tracker) File "galaxy/model/store/__init__.py", line 1280, in _import_implicit_dataset_conversions idc.parent_hda = object_import_tracker.hdas_by_key[idc_attrs["parent_hda"]] ``` --- lib/galaxy/model/__init__.py | 1 + lib/galaxy/model/store/__init__.py | 30 +++++++++--- test/unit/data/model/test_model_store.py | 62 +++++++++++++++++++++--- 3 files changed, 78 insertions(+), 15 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 8c04e2ecefb5..6354b3654f0e 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -4546,6 +4546,7 @@ class DatasetInstance(RepresentById, UsesCreateAndUpdateTime, _HasTable): copied_from_history_dataset_association: Optional["HistoryDatasetAssociation"] copied_from_library_dataset_dataset_association: Optional["LibraryDatasetDatasetAssociation"] implicitly_converted_datasets: List["ImplicitlyConvertedDatasetAssociation"] + implicitly_converted_parent_datasets: List["ImplicitlyConvertedDatasetAssociation"] validated_states = DatasetValidatedState diff --git a/lib/galaxy/model/store/__init__.py b/lib/galaxy/model/store/__init__.py index a27e8c57b10c..a3bf6a6ec896 100644 --- a/lib/galaxy/model/store/__init__.py +++ b/lib/galaxy/model/store/__init__.py @@ -1023,7 +1023,7 @@ def _reassign_hids(self, object_import_tracker: "ObjectImportTracker", history: if object_import_tracker.copy_hid_for: # in an if to avoid flush if unneeded - for from_dataset, to_dataset in object_import_tracker.copy_hid_for.items(): + for from_dataset, to_dataset in object_import_tracker.copy_hid_for: to_dataset.hid = from_dataset.hid self._session_add(to_dataset) self._flush() @@ -1276,18 +1276,24 @@ def _import_implicit_dataset_conversions(self, object_import_tracker: "ObjectImp metadata_safe = False idc = model.ImplicitlyConvertedDatasetAssociation(metadata_safe=metadata_safe, for_import=True) idc.type = idc_attrs["file_type"] - if idc_attrs.get("parent_hda"): - idc.parent_hda = object_import_tracker.hdas_by_key[idc_attrs["parent_hda"]] + # We may not have exported the parent, so only set the parent_hda attribute if we did. + if (parent_hda_id := idc_attrs.get("parent_hda")) and ( + parent_hda := object_import_tracker.hdas_by_key.get(parent_hda_id) + ): + # exports created prior to 24.2 may not have a parent if the parent had been purged + idc.parent_hda = parent_hda if idc_attrs.get("hda"): idc.dataset = object_import_tracker.hdas_by_key[idc_attrs["hda"]] - # we have a the dataset and the parent, lets ensure they land up with the same HID - if idc.dataset and idc.parent_hda and idc.parent_hda in object_import_tracker.requires_hid: + # we have the dataset and the parent, lets ensure they land up with the same HID + if idc.dataset and idc.parent_hda: try: object_import_tracker.requires_hid.remove(idc.dataset) except ValueError: pass # we wanted to remove it anyway. - object_import_tracker.copy_hid_for[idc.parent_hda] = idc.dataset + # A HDA can be the parent of multiple implicitly converted dataset, + # that's thy we use [(source, target)] here + object_import_tracker.copy_hid_for.append((idc.parent_hda, idc.dataset)) self._session_add(idc) @@ -1370,7 +1376,7 @@ class ObjectImportTracker: hdca_copied_from_sinks: Dict[ObjectKeyType, ObjectKeyType] jobs_by_key: Dict[ObjectKeyType, model.Job] requires_hid: List["HistoryItem"] - copy_hid_for: Dict["HistoryItem", "HistoryItem"] + copy_hid_for: List[Tuple["HistoryItem", "HistoryItem"]] def __init__(self) -> None: self.libraries_by_key = {} @@ -1388,7 +1394,7 @@ def __init__(self) -> None: self.implicit_collection_jobs_by_key: Dict[str, ImplicitCollectionJobs] = {} self.workflows_by_key: Dict[str, model.Workflow] = {} self.requires_hid = [] - self.copy_hid_for = {} + self.copy_hid_for = [] self.new_history: Optional[model.History] = None @@ -2301,6 +2307,14 @@ def add_implicit_conversion_dataset( include_files: bool, conversion: model.ImplicitlyConvertedDatasetAssociation, ) -> None: + parent_hda = conversion.parent_hda + if parent_hda and parent_hda not in self.included_datasets: + # We should always include the parent of an implicit conversion + # to avoid holes in the provenance. + self.included_datasets[parent_hda] = (parent_hda, include_files) + grand_parent_association = parent_hda.implicitly_converted_parent_datasets + if grand_parent_association and (grand_parent_hda := grand_parent_association[0].parent_hda): + self.add_implicit_conversion_dataset(grand_parent_hda, include_files, grand_parent_association[0]) self.included_datasets[dataset] = (dataset, include_files) self.dataset_implicit_conversions[dataset] = conversion diff --git a/test/unit/data/model/test_model_store.py b/test/unit/data/model/test_model_store.py index a30410dd0207..5ea51452e9f9 100644 --- a/test/unit/data/model/test_model_store.py +++ b/test/unit/data/model/test_model_store.py @@ -122,32 +122,80 @@ def test_import_export_history_allow_discarded_data(): assert imported_job.output_datasets[0].dataset == datasets[1] -def test_import_export_history_with_implicit_conversion(): +def setup_history_with_implicit_conversion(): app = _mock_app() u, h, d1, d2, j = _setup_simple_cat_job(app) + intermediate_ext = "bam" + intermediate_implicit_hda = model.HistoryDatasetAssociation( + extension=intermediate_ext, create_dataset=True, flush=False, history=h + ) + intermediate_implicit_hda.hid = d2.hid convert_ext = "fasta" implicit_hda = model.HistoryDatasetAssociation(extension=convert_ext, create_dataset=True, flush=False, history=h) implicit_hda.hid = d2.hid # this adds and flushes the result... - d2.attach_implicitly_converted_dataset(app.model.context, implicit_hda, convert_ext) + intermediate_implicit_hda.attach_implicitly_converted_dataset(app.model.context, implicit_hda, convert_ext) + d2.attach_implicitly_converted_dataset(app.model.context, intermediate_implicit_hda, intermediate_ext) + + app.object_store.update_from_file(intermediate_implicit_hda.dataset, file_name=TEST_PATH_2_CONVERTED, create=True) app.object_store.update_from_file(implicit_hda.dataset, file_name=TEST_PATH_2_CONVERTED, create=True) - assert len(h.active_datasets) == 3 + assert len(h.active_datasets) == 4 + return app, h, implicit_hda + + +def test_import_export_history_with_implicit_conversion(): + app, h, _ = setup_history_with_implicit_conversion() imported_history = _import_export_history(app, h, export_files="copy", include_hidden=True) - assert len(imported_history.active_datasets) == 3 + assert len(imported_history.active_datasets) == 4 recovered_hda_2 = imported_history.active_datasets[1] assert recovered_hda_2.implicitly_converted_datasets - imported_conversion = recovered_hda_2.implicitly_converted_datasets[0] - assert imported_conversion.type == "fasta" - assert imported_conversion.dataset == imported_history.active_datasets[2] + intermediate_conversion = recovered_hda_2.implicitly_converted_datasets[0] + assert intermediate_conversion.type == "bam" + intermediate_hda = intermediate_conversion.dataset + assert intermediate_hda.implicitly_converted_datasets + final_conversion = intermediate_hda.implicitly_converted_datasets[0] + + assert final_conversion.type == "fasta" + assert final_conversion.dataset == imported_history.active_datasets[-1] # implicit conversions have the same HID... ensure this property is recovered... assert imported_history.active_datasets[2].hid == imported_history.active_datasets[1].hid +def test_import_export_history_with_implicit_conversion_parents_purged(): + app, h, implicit_hda = setup_history_with_implicit_conversion() + # Purge parents + parent = implicit_hda.implicitly_converted_parent_datasets[0].parent_hda + parent.dataset.purged = True + grandparent = parent.implicitly_converted_parent_datasets[0].parent_hda + grandparent.dataset.purged = True + app.model.context.commit() + imported_history = _import_export_history(app, h, export_files="copy", include_hidden=True) + + assert len(imported_history.active_datasets) == 2 + assert len(imported_history.datasets) == 4 + imported_implicit_hda = imported_history.active_datasets[1] + assert imported_implicit_hda.extension == "fasta" + + # implicit conversions have the same HID... ensure this property is recovered... + assert imported_implicit_hda.hid == implicit_hda.hid + assert imported_implicit_hda.implicitly_converted_parent_datasets + intermediate_implicit_conversion = imported_implicit_hda.implicitly_converted_parent_datasets[0] + intermediate_hda = intermediate_implicit_conversion.parent_hda + assert intermediate_hda.hid == implicit_hda.hid + assert intermediate_hda.extension == "bam" + assert intermediate_hda.implicitly_converted_datasets + assert intermediate_hda.implicitly_converted_parent_datasets + first_implicit_conversion = intermediate_hda.implicitly_converted_parent_datasets[0] + source_hda = first_implicit_conversion.parent_hda + assert source_hda.hid == implicit_hda.hid + assert source_hda.extension == "txt" + + def test_import_export_history_with_implicit_conversion_and_extra_files(): app = _mock_app() From 37a672607733fd5dd748c1d833320d38a6a9582c Mon Sep 17 00:00:00 2001 From: Wolfgang Maier Date: Mon, 23 Sep 2024 15:41:35 +0200 Subject: [PATCH 16/64] Rely on get_column_list as only source of truth --- lib/galaxy/tools/parameters/basic.py | 31 +++++++++++++--------------- 1 file changed, 14 insertions(+), 17 deletions(-) diff --git a/lib/galaxy/tools/parameters/basic.py b/lib/galaxy/tools/parameters/basic.py index 9669b62771e9..31eb54953175 100644 --- a/lib/galaxy/tools/parameters/basic.py +++ b/lib/galaxy/tools/parameters/basic.py @@ -1507,6 +1507,9 @@ def get_options(self, trans, other_values): Show column labels rather than c1..cn if use_header_names=True """ options: List[Tuple[str, Union[str, Tuple[str, str]], bool]] = [] + column_list = self.get_column_list(trans, other_values) + if not column_list: + return options # if available use column_names metadata for option names # otherwise read first row - assume is a header with tab separated names if self.usecolnames: @@ -1516,29 +1519,23 @@ def get_options(self, trans, other_values): and hasattr(dataset.metadata, "column_names") and dataset.metadata.element_is_set("column_names") ): - column_list = [ - ("%d" % (i + 1), "c%d: %s" % (i + 1, x)) for i, x in enumerate(dataset.metadata.column_names) - ] + try: + options = [("c%s: %s" % (c, dataset.metadata.column_names[int(c) - 1]), c, False) for c in column_list] + except IndexError: + # ignore and rely on fallback + pass else: try: with open(dataset.get_file_name()) as f: head = f.readline() cnames = head.rstrip("\n\r ").split("\t") - column_list = [("%d" % (i + 1), "c%d: %s" % (i + 1, x)) for i, x in enumerate(cnames)] + options = [("c%s: %s" % (c, cnames[int(c) - 1]), c, False) for i in column_list] except Exception: - column_list = self.get_column_list(trans, other_values) - if self.numerical: # If numerical was requested, filter columns based on metadata - if hasattr(dataset, "metadata") and getattr(dataset.metadata, "column_types", None) is not None: - if len(dataset.metadata.column_types) >= len(column_list): - numerics = [i for i, x in enumerate(dataset.metadata.column_types) if x in ["int", "float"]] - column_list = [column_list[i] for i in numerics] - else: - column_list = self.get_column_list(trans, other_values) - for col in column_list: - if isinstance(col, tuple) and len(col) == 2: - options.append((col[1], col[0], False)) - else: - options.append((f"Column: {col}", col, False)) + # ignore and rely on fallback + pass + if not options: + # fallback if no options list could be built so far + options = [(f"Column: {col}", col, False) for col in column_list] return options def get_initial_value(self, trans, other_values): From b70a7637b93140b15ec43e9e1ede998e59c9ffec Mon Sep 17 00:00:00 2001 From: mvdbeek Date: Mon, 23 Sep 2024 16:17:53 +0200 Subject: [PATCH 17/64] Fix up exception handling, add test --- lib/galaxy/model/__init__.py | 1 + lib/galaxy/tools/actions/__init__.py | 68 +++++++++++++++++----------- lib/galaxy_test/api/test_jobs.py | 26 +++++++++++ 3 files changed, 68 insertions(+), 27 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 34fd70c56029..85c0c9f4dfc9 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -4544,6 +4544,7 @@ class DatasetInstance(RepresentById, UsesCreateAndUpdateTime, _HasTable): creating_job_associations: List[Union[JobToOutputDatasetCollectionAssociation, JobToOutputDatasetAssociation]] copied_from_history_dataset_association: Optional["HistoryDatasetAssociation"] copied_from_library_dataset_dataset_association: Optional["LibraryDatasetDatasetAssociation"] + dependent_jobs: List[JobToInputLibraryDatasetAssociation] implicitly_converted_datasets: List["ImplicitlyConvertedDatasetAssociation"] validated_states = DatasetValidatedState diff --git a/lib/galaxy/tools/actions/__init__.py b/lib/galaxy/tools/actions/__init__.py index f922e5b858b7..31c7eb76526c 100644 --- a/lib/galaxy/tools/actions/__init__.py +++ b/lib/galaxy/tools/actions/__init__.py @@ -9,6 +9,7 @@ cast, Dict, List, + Optional, Set, TYPE_CHECKING, Union, @@ -700,14 +701,6 @@ def handle_output(name, output, hidden=None): # Remap any outputs if this is a rerun and the user chose to continue dependent jobs # This functionality requires tracking jobs in the database. if app.config.track_jobs_in_database and rerun_remap_job_id is not None: - # Need to flush here so that referencing outputs by id works - session = trans.sa_session() - try: - session.expire_on_commit = False - with transaction(session): - session.commit() - finally: - session.expire_on_commit = True self._remap_job_on_rerun( trans=trans, galaxy_session=galaxy_session, @@ -748,7 +741,14 @@ def handle_output(name, output, hidden=None): return job, out_data, history - def _remap_job_on_rerun(self, trans, galaxy_session, rerun_remap_job_id, current_job, out_data): + def _remap_job_on_rerun( + self, + trans: ProvidesHistoryContext, + galaxy_session: Optional[model.GalaxySession], + rerun_remap_job_id: int, + current_job: Job, + out_data, + ): """ Re-connect dependent datasets for a job that is being rerun (because it failed initially). @@ -756,25 +756,39 @@ def _remap_job_on_rerun(self, trans, galaxy_session, rerun_remap_job_id, current To be able to resume jobs that depend on this jobs output datasets we change the dependent's job input datasets to be those of the job that is being rerun. """ - try: - old_job = trans.sa_session.get(Job, rerun_remap_job_id) - assert old_job is not None, f"({rerun_remap_job_id}/{current_job.id}): Old job id is invalid" - if old_job.old_id != current_job.old_id: + old_job = trans.sa_session.get(Job, rerun_remap_job_id) + if not old_job: + # I don't think that can really happen + raise RequestParameterInvalidException("rerun_remap_job_id parameter is invalid") + old_tool = trans.app.toolbox.get_tool(old_job.tool_id, exact=False) + new_tool = trans.app.toolbox.get_tool(current_job.tool_id, exact=False) + if old_tool and new_tool and old_tool.old_id != new_tool.old_id: + # If we currently only have the old or new tool installed we'll find the other tool anyway with `exact=False`. + # If we don't have the tool at all we'll fail anyway, no need to worry here. + raise RequestParameterInvalidException( + f"Old tool id ({old_job.tool_id}) does not match rerun tool id ({current_job.tool_id})" + ) + if trans.user is not None: + if old_job.user_id != trans.user.id: raise RequestParameterInvalidException( - f"Old tool id ({old_job.tool_id}) does not match rerun tool id ({current_job.tool_id})" + "Cannot remap job dependencies for job not created by current user." ) - if trans.user is not None: - if old_job.user_id != trans.user.id: - raise RequestParameterInvalidException( - "Cannot remap job dependencies for job not created by current user." - ) - elif trans.user is None and isinstance(galaxy_session, trans.model.GalaxySession): - if old_job.session_id != galaxy_session.id: - raise RequestParameterInvalidException( - "Cannot remap job dependencies for job not created by current user." - ) - else: - raise AuthenticationRequired("Authentication required to remap job dependencies") + elif trans.user is None and galaxy_session: + if old_job.session_id != galaxy_session.id: + raise RequestParameterInvalidException( + "Cannot remap job dependencies for job not created by current user." + ) + else: + raise AuthenticationRequired("Authentication required to remap job dependencies") + # Need to flush here so that referencing outputs by id works + session = trans.sa_session() + try: + session.expire_on_commit = False + with transaction(session): + session.commit() + finally: + session.expire_on_commit = True + try: # Start by hiding current job outputs before taking over the old job's (implicit) outputs. current_job.hide_outputs(flush=False) # Duplicate PJAs before remap. @@ -796,7 +810,7 @@ def _remap_job_on_rerun(self, trans, galaxy_session, rerun_remap_job_id, current for jtod in old_job.output_datasets: for job_to_remap, jtid in [(jtid.job, jtid) for jtid in jtod.dataset.dependent_jobs]: if (trans.user is not None and job_to_remap.user_id == trans.user.id) or ( - trans.user is None and job_to_remap.session_id == galaxy_session.id + trans.user is None and galaxy_session and job_to_remap.session_id == galaxy_session.id ): self.__remap_parameters(job_to_remap, jtid, jtod, out_data) trans.sa_session.add(job_to_remap) diff --git a/lib/galaxy_test/api/test_jobs.py b/lib/galaxy_test/api/test_jobs.py index 82f9ecbab416..c904e808f70f 100644 --- a/lib/galaxy_test/api/test_jobs.py +++ b/lib/galaxy_test/api/test_jobs.py @@ -464,6 +464,32 @@ def test_no_hide_on_rerun(self): assert hdca["visible"] assert isoparse(hdca["update_time"]) > (isoparse(first_update_time)) + def test_rerun_exception_handling(self): + with self.dataset_populator.test_history() as history_id: + other_run_response = self.dataset_populator.run_tool( + tool_id="job_properties", + inputs={}, + history_id=history_id, + ) + unrelated_job_id = other_run_response["jobs"][0]["id"] + run_response = self._run_map_over_error(history_id) + job_id = run_response["jobs"][0]["id"] + self.dataset_populator.wait_for_job(job_id) + failed_hdca = self.dataset_populator.get_history_collection_details( + history_id=history_id, + content_id=run_response["implicit_collections"][0]["id"], + assert_ok=False, + ) + assert failed_hdca["visible"] + rerun_params = self._get(f"jobs/{job_id}/build_for_rerun").json() + inputs = rerun_params["state_inputs"] + inputs["rerun_remap_job_id"] = unrelated_job_id + before_rerun_items = self.dataset_populator.get_history_contents(history_id) + rerun_response = self._run_detect_errors(history_id=history_id, inputs=inputs) + assert "does not match rerun tool id" in rerun_response["err_msg"] + after_rerun_items = self.dataset_populator.get_history_contents(history_id) + assert len(before_rerun_items) == len(after_rerun_items) + @skip_without_tool("empty_output") def test_common_problems(self): with self.dataset_populator.test_history() as history_id: From d4705870283176e7d2fd90f69ae49128217ecb6e Mon Sep 17 00:00:00 2001 From: mvdbeek Date: Mon, 23 Sep 2024 17:19:45 +0200 Subject: [PATCH 18/64] Fix data_column ref to nested collection --- lib/galaxy/tools/parameters/basic.py | 12 +++++++---- lib/galaxy_test/api/test_tools.py | 30 ++++++++++++++++++++++++++++ lib/galaxy_test/base/populators.py | 4 ++-- 3 files changed, 40 insertions(+), 6 deletions(-) diff --git a/lib/galaxy/tools/parameters/basic.py b/lib/galaxy/tools/parameters/basic.py index 279adde7a92e..9eef495cb153 100644 --- a/lib/galaxy/tools/parameters/basic.py +++ b/lib/galaxy/tools/parameters/basic.py @@ -1457,8 +1457,8 @@ def get_column_list(self, trans, other_values): # Use representative dataset if a dataset collection is parsed if isinstance(dataset, HistoryDatasetCollectionAssociation): dataset = dataset.to_hda_representative() - if isinstance(dataset, DatasetCollectionElement) and dataset.hda: - dataset = dataset.hda + if isinstance(dataset, DatasetCollectionElement): + dataset = dataset.first_dataset_instance() if isinstance(dataset, HistoryDatasetAssociation) and self.ref_input and self.ref_input.formats: direct_match, target_ext, converted_dataset = dataset.find_conversion_destination( self.ref_input.formats @@ -1553,9 +1553,13 @@ def is_file_empty(self, trans, other_values): for dataset in util.listify(other_values.get(self.data_ref)): # Use representative dataset if a dataset collection is parsed if isinstance(dataset, HistoryDatasetCollectionAssociation): - dataset = dataset.to_hda_representative() + if dataset.populated: + dataset = dataset.to_hda_representative() + else: + # That's fine, we'll check again on execution + return True if isinstance(dataset, DatasetCollectionElement): - dataset = dataset.hda + dataset = dataset.first_dataset_instance() if isinstance(dataset, DatasetInstance): return not dataset.has_data() if is_runtime_value(dataset): diff --git a/lib/galaxy_test/api/test_tools.py b/lib/galaxy_test/api/test_tools.py index 78a4b3e05c92..56fea13f8285 100644 --- a/lib/galaxy_test/api/test_tools.py +++ b/lib/galaxy_test/api/test_tools.py @@ -2481,6 +2481,36 @@ def test_implicit_reduce_with_mapping(self): ) assert output_hdca["collection_type"] == "list" + @skip_without_tool("column_multi_param") + def test_multi_param_column_nested_list(self): + with self.dataset_populator.test_history() as history_id: + hdca = self.dataset_collection_populator.create_list_of_list_in_history( + history_id, ext="tabular", wait=True + ).json() + inputs = { + "input1": {"src": "hdca", "id": hdca["id"]}, + # FIXME: integers don't work here + "col": "1", + } + response = self._run("column_multi_param", history_id, inputs, assert_ok=True) + self.dataset_populator.wait_for_job(job_id=response["jobs"][0]["id"], assert_ok=True) + + @skip_without_tool("column_multi_param") + def test_multi_param_column_nested_list_fails_on_invalid_column(self): + with self.dataset_populator.test_history() as history_id: + hdca = self.dataset_collection_populator.create_list_of_list_in_history( + history_id, ext="tabular", wait=True + ).json() + inputs = { + "input1": {"src": "hdca", "id": hdca["id"]}, + "col": "10", + } + try: + self._run("column_multi_param", history_id, inputs, assert_ok=True) + except AssertionError as e: + exception_raised = e + assert exception_raised, "Expected invalid column selection to fail job" + @skip_without_tool("column_multi_param") def test_implicit_conversion_and_reduce(self): with self.dataset_populator.test_history() as history_id: diff --git a/lib/galaxy_test/base/populators.py b/lib/galaxy_test/base/populators.py index 4d1c46ca5309..014880ebd1e6 100644 --- a/lib/galaxy_test/base/populators.py +++ b/lib/galaxy_test/base/populators.py @@ -2898,7 +2898,7 @@ def __create_payload(self, history_id: str, *args, **kwds): else: return self.__create_payload_collection(history_id, *args, **kwds) - def __create_payload_fetch(self, history_id: str, collection_type, **kwds): + def __create_payload_fetch(self, history_id: str, collection_type, ext="txt", **kwds): contents = None if "contents" in kwds: contents = kwds["contents"] @@ -2920,7 +2920,7 @@ def __create_payload_fetch(self, history_id: str, collection_type, **kwds): elements.append(contents_level) continue - element = {"src": "pasted", "ext": "txt"} + element = {"src": "pasted", "ext": ext} # Else older style list of contents or element ID and contents, # convert to fetch API. if isinstance(contents_level, tuple): From af3af5a158a3a95229234eefdb8a9a2296f4778e Mon Sep 17 00:00:00 2001 From: mvdbeek Date: Mon, 23 Sep 2024 19:58:04 +0200 Subject: [PATCH 19/64] Fix loading very old worflows with data inputs Fixes https://sentry.galaxyproject.org/share/issue/d1cad9f916e8466f844ea3b49f52afcb/: ``` AttributeError: 'NoneType' object has no attribute 'get' File "galaxy/web/framework/middleware/error.py", line 167, in __call__ app_iter = self.application(environ, sr_checker) File "galaxy/web/framework/middleware/statsd.py", line 29, in __call__ req = self.application(environ, start_response) File "/cvmfs/main.galaxyproject.org/venv/lib/python3.11/site-packages/paste/httpexceptions.py", line 635, in __call__ return self.application(environ, start_response) File "galaxy/web/framework/base.py", line 176, in __call__ return self.handle_request(request_id, path_info, environ, start_response) File "galaxy/web/framework/base.py", line 271, in handle_request body = method(trans, **kwargs) File "galaxy/web/framework/decorators.py", line 74, in call_and_format rval = func(self, trans, *args, **kwargs) File "galaxy/webapps/galaxy/controllers/workflow.py", line 263, in load_workflow return workflow_contents_manager.workflow_to_dict(trans, stored, style="editor", version=version) File "galaxy/managers/workflows.py", line 926, in workflow_to_dict wf_dict = self._workflow_to_dict_editor(trans, stored, workflow) File "galaxy/managers/workflows.py", line 1239, in _workflow_to_dict_editor self.__set_default_label(step, module, step.tool_inputs) File "galaxy/managers/workflows.py", line 1958, in __set_default_label default_label = new_state.get("name") ``` and https://sentry.galaxyproject.org/share/issue/92e8f30a288341e7b219e2c7c91390da/: ``` AttributeError: 'NoneType' object has no attribute 'get' File "starlette/applications.py", line 123, in __call__ await self.middleware_stack(scope, receive, send) File "starlette/middleware/errors.py", line 186, in __call__ raise exc File "starlette/middleware/errors.py", line 164, in __call__ await self.app(scope, receive, _send) File "starlette_context/middleware/raw_middleware.py", line 92, in __call__ await self.app(scope, receive, send_wrapper) File "starlette/middleware/base.py", line 189, in __call__ with collapse_excgroups(): File "contextlib.py", line 155, in __exit__ self.gen.throw(typ, value, traceback) File "starlette/_utils.py", line 93, in collapse_excgroups raise exc File "starlette/middleware/base.py", line 191, in __call__ response = await self.dispatch_func(request, call_next) File "galaxy/webapps/galaxy/fast_app.py", line 109, in add_x_frame_options response = await call_next(request) File "starlette/middleware/base.py", line 165, in call_next raise app_exc File "starlette/middleware/base.py", line 151, in coro await self.app(scope, receive_or_disconnect, send_no_error) File "starlette/middleware/exceptions.py", line 65, in __call__ await wrap_app_handling_exceptions(self.app, conn)(scope, receive, send) File "starlette/_exception_handler.py", line 64, in wrapped_app raise exc File "starlette/_exception_handler.py", line 53, in wrapped_app await app(scope, receive, sender) File "starlette/routing.py", line 756, in __call__ await self.middleware_stack(scope, receive, send) File "starlette/routing.py", line 776, in app await route.handle(scope, receive, send) File "starlette/routing.py", line 297, in handle await self.app(scope, receive, send) File "starlette/routing.py", line 77, in app await wrap_app_handling_exceptions(app, request)(scope, receive, send) File "starlette/_exception_handler.py", line 64, in wrapped_app raise exc File "starlette/_exception_handler.py", line 53, in wrapped_app await app(scope, receive, sender) File "starlette/routing.py", line 72, in app response = await func(request) File "fastapi/routing.py", line 278, in app raw_response = await run_endpoint_function( File "fastapi/routing.py", line 193, in run_endpoint_function return await run_in_threadpool(dependant.call, **values) File "starlette/concurrency.py", line 42, in run_in_threadpool return await anyio.to_thread.run_sync(func, *args) File "anyio/to_thread.py", line 56, in run_sync return await get_async_backend().run_sync_in_worker_thread( File "anyio/_backends/_asyncio.py", line 2144, in run_sync_in_worker_thread return await future File "anyio/_backends/_asyncio.py", line 851, in run result = context.run(func, *args) File "galaxy/webapps/galaxy/api/workflows.py", line 1159, in show_workflow return self.service.show_workflow(trans, workflow_id, instance, legacy, version) File "galaxy/webapps/galaxy/services/workflows.py", line 243, in show_workflow **self._workflow_contents_manager.workflow_to_dict(trans, stored_workflow, style=style, version=version) File "galaxy/managers/workflows.py", line 930, in workflow_to_dict wf_dict = self._workflow_to_dict_instance(trans, stored, workflow=workflow, legacy=False) File "galaxy/managers/workflows.py", line 1663, in _workflow_to_dict_instance step_label = step.label or step.tool_inputs.get("name") ``` This is for https://usegalaxy.org/workflows/export?id=ea97d3a23e6d38e6 --- lib/galaxy/managers/workflows.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/galaxy/managers/workflows.py b/lib/galaxy/managers/workflows.py index eabaf60af260..9a9e3fb1ba3c 100644 --- a/lib/galaxy/managers/workflows.py +++ b/lib/galaxy/managers/workflows.py @@ -1645,7 +1645,7 @@ def _workflow_to_dict_instance(self, trans, stored, workflow, legacy=True): inputs = {} for step in workflow.input_steps: step_type = step.type - step_label = step.label or step.tool_inputs.get("name") + step_label = step.label or step.tool_inputs and step.tool_inputs.get("name") if step_label: label = step_label elif step_type == "data_input": @@ -1939,7 +1939,7 @@ def __set_default_label(self, step, module, state): to the actual `label` attribute which is available for all module types, unique, and mapped to its own database column. """ if not module.label and module.type in ["data_input", "data_collection_input"]: - new_state = safe_loads(state) + new_state = safe_loads(state) or {} default_label = new_state.get("name") if default_label and util.unicodify(default_label).lower() not in [ "input dataset", From bef388868338e12bcec67d8d02ddb9bfb538422d Mon Sep 17 00:00:00 2001 From: John Davis Date: Tue, 13 Aug 2024 09:53:14 -0400 Subject: [PATCH 20/64] Refactor for testability: pass only session We don't need a ModelMapping instance here; without it we can easily write unit tests. --- lib/galaxy/app.py | 2 +- lib/galaxy/model/mapping.py | 2 +- lib/galaxy/model/security.py | 135 ++++++++++++-------------- test/unit/data/test_galaxy_mapping.py | 20 ++-- 4 files changed, 76 insertions(+), 83 deletions(-) diff --git a/lib/galaxy/app.py b/lib/galaxy/app.py index 38553ee968ca..2f67d6ec3b1b 100644 --- a/lib/galaxy/app.py +++ b/lib/galaxy/app.py @@ -655,7 +655,7 @@ def __init__(self, configure_logging=True, use_converters=True, use_display_appl # Load security policy. self.security_agent = self.model.security_agent self.host_security_agent = galaxy.model.security.HostAgent( - model=self.security_agent.model, permitted_actions=self.security_agent.permitted_actions + self.security_agent.sa_session, permitted_actions=self.security_agent.permitted_actions ) # We need the datatype registry for running certain tasks that modify HDAs, and to build the registry we need diff --git a/lib/galaxy/model/mapping.py b/lib/galaxy/model/mapping.py index e1d975e5be5a..707b20b7ca2f 100644 --- a/lib/galaxy/model/mapping.py +++ b/lib/galaxy/model/mapping.py @@ -97,7 +97,7 @@ def _build_model_mapping(engine, map_install_models, thread_local_log) -> Galaxy model_modules.append(tool_shed_install) model_mapping = GalaxyModelMapping(model_modules, engine) - model_mapping.security_agent = GalaxyRBACAgent(model_mapping) + model_mapping.security_agent = GalaxyRBACAgent(model_mapping.session) model_mapping.thread_local_log = thread_local_log return model_mapping diff --git a/lib/galaxy/model/security.py b/lib/galaxy/model/security.py index 09b425dcd8eb..d2cf628bdffc 100644 --- a/lib/galaxy/model/security.py +++ b/lib/galaxy/model/security.py @@ -20,14 +20,21 @@ import galaxy.model from galaxy.model import ( Dataset, + DatasetCollection, DatasetPermissions, + DefaultHistoryPermissions, + DefaultUserPermissions, Group, GroupRoleAssociation, HistoryDatasetAssociationDisplayAtAuthorization, Library, LibraryDataset, + LibraryDatasetCollectionAssociation, LibraryDatasetDatasetAssociation, + LibraryDatasetDatasetAssociationPermissions, LibraryDatasetPermissions, + LibraryFolder, + LibraryFolderPermissions, LibraryPermissions, Role, User, @@ -51,23 +58,18 @@ class GalaxyRBACAgent(RBACAgent): - def __init__(self, model, permitted_actions=None): - self.model = model + def __init__(self, sa_session, permitted_actions=None): + self.sa_session = sa_session if permitted_actions: self.permitted_actions = permitted_actions # List of "library_item" objects and their associated permissions and info template objects self.library_item_assocs = ( - (self.model.Library, self.model.LibraryPermissions), - (self.model.LibraryFolder, self.model.LibraryFolderPermissions), - (self.model.LibraryDataset, self.model.LibraryDatasetPermissions), - (self.model.LibraryDatasetDatasetAssociation, self.model.LibraryDatasetDatasetAssociationPermissions), + (Library, LibraryPermissions), + (LibraryFolder, LibraryFolderPermissions), + (LibraryDataset, LibraryDatasetPermissions), + (LibraryDatasetDatasetAssociation, LibraryDatasetDatasetAssociationPermissions), ) - @property - def sa_session(self): - """Returns a SQLAlchemy session""" - return self.model.context - def sort_by_attr(self, seq, attr): """ Sort the sequence of objects by object's attribute @@ -139,11 +141,11 @@ def get_valid_roles(self, trans, item, query=None, page=None, page_limit=None, i else: limit = None total_count = None - if isinstance(item, self.model.Library) and self.library_is_public(item): + if isinstance(item, Library) and self.library_is_public(item): is_public_item = True - elif isinstance(item, self.model.Dataset) and self.dataset_is_public(item): + elif isinstance(item, Dataset) and self.dataset_is_public(item): is_public_item = True - elif isinstance(item, self.model.LibraryFolder): + elif isinstance(item, LibraryFolder): is_public_item = True else: is_public_item = False @@ -238,8 +240,8 @@ def get_legitimate_roles(self, trans, item, cntrller): """ admin_controller = cntrller in ["library_admin"] roles = set() - if (isinstance(item, self.model.Library) and self.library_is_public(item)) or ( - isinstance(item, self.model.Dataset) and self.dataset_is_public(item) + if (isinstance(item, Library) and self.library_is_public(item)) or ( + isinstance(item, Dataset) and self.dataset_is_public(item) ): return self.get_all_roles(trans, cntrller) # If item has roles associated with the access permission, we need to start with them. @@ -272,13 +274,13 @@ def ok_to_display(self, user, role): """ role_type = role.type if user: - if role_type == self.model.Role.types.PRIVATE: + if role_type == Role.types.PRIVATE: return role == self.get_private_user_role(user) - if role_type == self.model.Role.types.SHARING: + if role_type == Role.types.SHARING: return role in self.get_sharing_roles(user) # If role_type is neither private nor sharing, it's ok to display return True - return role_type != self.model.Role.types.PRIVATE and role_type != self.model.Role.types.SHARING + return role_type != Role.types.PRIVATE and role_type != Role.types.SHARING def allow_action(self, roles, action, item): """ @@ -329,7 +331,7 @@ def get_actions_for_items(self, trans, action, permission_items): ret_permissions = {} if len(permission_items) > 0: # SM: NB: LibraryDatasets became Datasets for some odd reason. - if isinstance(permission_items[0], trans.model.LibraryDataset): + if isinstance(permission_items[0], LibraryDataset): ids = [item.library_dataset_id for item in permission_items] stmt = select(LibraryDatasetPermissions).where( and_( @@ -348,7 +350,7 @@ def get_actions_for_items(self, trans, action, permission_items): ret_permissions[item.library_dataset_id] = [] for permission in permissions: ret_permissions[permission.library_dataset_id].append(permission) - elif isinstance(permission_items[0], trans.model.Dataset): + elif isinstance(permission_items[0], Dataset): ids = [item.id for item in permission_items] stmt = select(DatasetPermissions).where( @@ -499,7 +501,7 @@ def item_permission_map_for_manage(self, trans, user_roles, libitems): def item_permission_map_for_add(self, trans, user_roles, libitems): return self.allow_action_on_libitems(trans, user_roles, self.permitted_actions.LIBRARY_ADD, libitems) - def can_access_dataset(self, user_roles, dataset: galaxy.model.Dataset): + def can_access_dataset(self, user_roles, dataset: Dataset): # SM: dataset_is_public will access dataset.actions, which is a # backref that causes a query to be made to DatasetPermissions retval = self.dataset_is_public(dataset) or self.allow_action( @@ -518,7 +520,7 @@ def can_access_datasets(self, user_roles, action_tuples): return True - def can_access_collection(self, user_roles: List[galaxy.model.Role], collection: galaxy.model.DatasetCollection): + def can_access_collection(self, user_roles: List[Role], collection: DatasetCollection): action_tuples = collection.dataset_action_tuples if not self.can_access_datasets(user_roles, action_tuples): return False @@ -599,21 +601,21 @@ def __active_folders_have_accessible_library_datasets(self, trans, folder, user, return False def can_access_library_item(self, roles, item, user): - if isinstance(item, self.model.Library): + if isinstance(item, Library): return self.can_access_library(roles, item) - elif isinstance(item, self.model.LibraryFolder): + elif isinstance(item, LibraryFolder): return ( self.can_access_library(roles, item.parent_library) and self.check_folder_contents(user, roles, item)[0] ) - elif isinstance(item, self.model.LibraryDataset): + elif isinstance(item, LibraryDataset): return self.can_access_library(roles, item.folder.parent_library) and self.can_access_dataset( roles, item.library_dataset_dataset_association.dataset ) - elif isinstance(item, self.model.LibraryDatasetDatasetAssociation): + elif isinstance(item, LibraryDatasetDatasetAssociation): return self.can_access_library( roles, item.library_dataset.folder.parent_library ) and self.can_access_dataset(roles, item.dataset) - elif isinstance(item, self.model.LibraryDatasetCollectionAssociation): + elif isinstance(item, LibraryDatasetCollectionAssociation): return self.can_access_library(roles, item.folder.parent_library) else: log.warning(f"Unknown library item type: {type(item)}") @@ -658,7 +660,7 @@ def guess_derived_permissions_for_datasets(self, datasets=None): datasets = datasets or [] perms = {} for dataset in datasets: - if not isinstance(dataset, self.model.Dataset): + if not isinstance(dataset, Dataset): dataset = dataset.dataset these_perms = {} # initialize blank perms @@ -715,28 +717,28 @@ def associate_components(self, **kwd): raise Exception(f"No valid method of associating provided components: {kwd}") def associate_user_group(self, user, group): - assoc = self.model.UserGroupAssociation(user, group) + assoc = UserGroupAssociation(user, group) self.sa_session.add(assoc) with transaction(self.sa_session): self.sa_session.commit() return assoc def associate_user_role(self, user, role): - assoc = self.model.UserRoleAssociation(user, role) + assoc = UserRoleAssociation(user, role) self.sa_session.add(assoc) with transaction(self.sa_session): self.sa_session.commit() return assoc def associate_group_role(self, group, role): - assoc = self.model.GroupRoleAssociation(group, role) + assoc = GroupRoleAssociation(group, role) self.sa_session.add(assoc) with transaction(self.sa_session): self.sa_session.commit() return assoc def associate_action_dataset_role(self, action, dataset, role): - assoc = self.model.DatasetPermissions(action, dataset, role) + assoc = DatasetPermissions(action, dataset, role) self.sa_session.add(assoc) with transaction(self.sa_session): self.sa_session.commit() @@ -767,14 +769,14 @@ def get_private_user_role(self, user, auto_create=False): return role def get_role(self, name, type=None): - type = type or self.model.Role.types.SYSTEM + type = type or Role.types.SYSTEM # will raise exception if not found stmt = select(Role).where(and_(Role.name == name, Role.type == type)) return self.sa_session.execute(stmt).scalar_one() def create_role(self, name, description, in_users, in_groups, create_group_for_role=False, type=None): - type = type or self.model.Role.types.SYSTEM - role = self.model.Role(name=name, description=description, type=type) + type = type or Role.types.SYSTEM + role = Role(name=name, description=description, type=type) self.sa_session.add(role) # Create the UserRoleAssociations for user in [self.sa_session.get(User, x) for x in in_users]: @@ -784,7 +786,7 @@ def create_role(self, name, description, in_users, in_groups, create_group_for_r self.associate_group_role(group, role) if create_group_for_role: # Create the group - group = self.model.Group(name=name) + group = Group(name=name) self.sa_session.add(group) # Associate the group with the role self.associate_group_role(group, role) @@ -831,7 +833,7 @@ def user_set_default_permissions( for action, roles in permissions.items(): if isinstance(action, Action): action = action.action - for dup in [self.model.DefaultUserPermissions(user, action, role) for role in roles]: + for dup in [DefaultUserPermissions(user, action, role) for role in roles]: self.sa_session.add(dup) flush_needed = True if flush_needed: @@ -871,7 +873,7 @@ def history_set_default_permissions(self, history, permissions=None, dataset=Fal for action, roles in permissions.items(): if isinstance(action, Action): action = action.action - for dhp in [self.model.DefaultHistoryPermissions(history, action, role) for role in roles]: + for dhp in [DefaultHistoryPermissions(history, action, role) for role in roles]: self.sa_session.add(dhp) flush_needed = True if flush_needed: @@ -922,7 +924,7 @@ def set_all_dataset_permissions(self, dataset, permissions=None, new=False, flus for _, roles in _walk_action_roles(permissions, self.permitted_actions.DATASET_ACCESS): dataset_access_roles.extend(roles) - if len(dataset_access_roles) != 1 or dataset_access_roles[0].type != self.model.Role.types.PRIVATE: + if len(dataset_access_roles) != 1 or dataset_access_roles[0].type != Role.types.PRIVATE: return galaxy.model.CANNOT_SHARE_PRIVATE_DATASET_MESSAGE flush_needed = False @@ -940,7 +942,7 @@ def set_all_dataset_permissions(self, dataset, permissions=None, new=False, flus role_id = role.id else: role_id = role - dp = self.model.DatasetPermissions(action, dataset, role_id=role_id) + dp = DatasetPermissions(action, dataset, role_id=role_id) self.sa_session.add(dp) flush_needed = True if flush_needed and flush: @@ -970,7 +972,7 @@ def set_dataset_permission(self, dataset, permission=None): self.sa_session.delete(dp) flush_needed = True # Add the new specific permission on the dataset - for dp in [self.model.DatasetPermissions(action, dataset, role) for role in roles]: + for dp in [DatasetPermissions(action, dataset, role) for role in roles]: self.sa_session.add(dp) flush_needed = True if flush_needed: @@ -993,9 +995,9 @@ def get_permissions(self, item): return permissions def copy_dataset_permissions(self, src, dst, flush=True): - if not isinstance(src, self.model.Dataset): + if not isinstance(src, Dataset): src = src.dataset - if not isinstance(dst, self.model.Dataset): + if not isinstance(dst, Dataset): dst = dst.dataset self.set_all_dataset_permissions(dst, self.get_permissions(src), flush=flush) @@ -1004,7 +1006,7 @@ def privately_share_dataset(self, dataset, users=None): intersect = None users = users or [] for user in users: - roles = [ura.role for ura in user.roles if ura.role.type == self.model.Role.types.SHARING] + roles = [ura.role for ura in user.roles if ura.role.type == Role.types.SHARING] if intersect is None: intersect = roles else: @@ -1021,9 +1023,7 @@ def privately_share_dataset(self, dataset, users=None): sharing_role = role break if sharing_role is None: - sharing_role = self.model.Role( - name=f"Sharing role for: {', '.join(u.email for u in users)}", type=self.model.Role.types.SHARING - ) + sharing_role = Role(name=f"Sharing role for: {', '.join(u.email for u in users)}", type=Role.types.SHARING) self.sa_session.add(sharing_role) with transaction(self.sa_session): self.sa_session.commit() @@ -1047,7 +1047,7 @@ def set_all_library_permissions(self, trans, library_item, permissions=None): for role_assoc in [permission_class(action, library_item, role) for role in roles]: self.sa_session.add(role_assoc) flush_needed = True - if isinstance(library_item, self.model.LibraryDatasetDatasetAssociation): + if isinstance(library_item, LibraryDatasetDatasetAssociation): # Permission setting related to DATASET_MANAGE_PERMISSIONS was broken for a period of time, # so it is possible that some Datasets have no roles associated with the DATASET_MANAGE_PERMISSIONS # permission. In this case, we'll reset this permission to the library_item user's private role. @@ -1086,14 +1086,12 @@ def set_library_item_permission(self, library_item, permission=None): self.sa_session.delete(item_permission) flush_needed = True # Add the new specific permission on the library item - if isinstance(library_item, self.model.LibraryDataset): - for item_permission in [ - self.model.LibraryDatasetPermissions(action, library_item, role) for role in roles - ]: + if isinstance(library_item, LibraryDataset): + for item_permission in [LibraryDatasetPermissions(action, library_item, role) for role in roles]: self.sa_session.add(item_permission) flush_needed = True - elif isinstance(library_item, self.model.LibraryPermissions): - for item_permission in [self.model.LibraryPermissions(action, library_item, role) for role in roles]: + elif isinstance(library_item, LibraryPermissions): + for item_permission in [LibraryPermissions(action, library_item, role) for role in roles]: self.sa_session.add(item_permission) flush_needed = True if flush_needed: @@ -1151,7 +1149,7 @@ def make_folder_public(self, folder): if not dataset.purged and not self.dataset_is_public(dataset): self.make_dataset_public(dataset) - def dataset_is_public(self, dataset: galaxy.model.Dataset): + def dataset_is_public(self, dataset: Dataset): """ A dataset is considered public if there are no "access" actions associated with it. Any other actions ( 'manage permissions', @@ -1194,7 +1192,7 @@ def dataset_is_private_to_a_user(self, dataset): return False else: access_role = access_roles[0] - return access_role.type == self.model.Role.types.PRIVATE + return access_role.type == Role.types.PRIVATE def datasets_are_public(self, trans, datasets): """ @@ -1294,7 +1292,7 @@ def derive_roles_from_access(self, trans, item_id, cntrller, library=False, **kw # permission on this dataset, or the dataset is not accessible. # Since we have more than 1 role, none of them can be private. for role in in_roles: - if role.type == self.model.Role.types.PRIVATE: + if role.type == Role.types.PRIVATE: private_role_found = True break if len(in_roles) == 1: @@ -1358,7 +1356,7 @@ def copy_library_permissions(self, trans, source_library_item, target_library_it f"Invalid class ({target_library_item.__class__}) specified for target_library_item ({target_library_item.__class__.__name__})" ) # Make sure user's private role is included - private_role = self.model.security_agent.get_private_user_role(user) + private_role = self.get_private_user_role(user) for action in self.permitted_actions.values(): if not found_permission_class.filter_by(role_id=private_role.id, action=action.action).first(): lp = found_permission_class(action.action, target_library_item, private_role) @@ -1407,9 +1405,9 @@ def show_library_item(self, user, roles, library_item, actions_to_check, hidden_ for action in actions_to_check: if self.allow_action(roles, action, library_item): return True, hidden_folder_ids - if isinstance(library_item, self.model.Library): + if isinstance(library_item, Library): return self.show_library_item(user, roles, library_item.root_folder, actions_to_check, hidden_folder_ids="") - if isinstance(library_item, self.model.LibraryFolder): + if isinstance(library_item, LibraryFolder): for folder in library_item.active_folders: can_show, hidden_folder_ids = self.show_library_item( user, roles, folder, actions_to_check, hidden_folder_ids=hidden_folder_ids @@ -1433,11 +1431,11 @@ def get_showable_folders( """ hidden_folder_ids = hidden_folder_ids or [] showable_folders = showable_folders or [] - if isinstance(library_item, self.model.Library): + if isinstance(library_item, Library): return self.get_showable_folders( user, roles, library_item.root_folder, actions_to_check, showable_folders=[] ) - if isinstance(library_item, self.model.LibraryFolder): + if isinstance(library_item, LibraryFolder): if library_item.id not in hidden_folder_ids: for action in actions_to_check: if self.allow_action(roles, action, library_item): @@ -1594,16 +1592,11 @@ class HostAgent(RBACAgent): ucsc_archaea=("lowepub.cse.ucsc.edu",), ) - def __init__(self, model, permitted_actions=None): - self.model = model + def __init__(self, sa_session, permitted_actions=None): + self.sa_session = sa_session if permitted_actions: self.permitted_actions = permitted_actions - @property - def sa_session(self): - """Returns a SQLAlchemy session""" - return self.model.context - def allow_action(self, addr, action, **kwd): if "dataset" in kwd and action == self.permitted_actions.DATASET_ACCESS: hda = kwd["dataset"] @@ -1664,7 +1657,7 @@ def set_dataset_permissions(self, hda, user, site): if hdadaa: hdadaa.update_time = datetime.utcnow() else: - hdadaa = self.model.HistoryDatasetAssociationDisplayAtAuthorization(hda=hda, user=user, site=site) + hdadaa = HistoryDatasetAssociationDisplayAtAuthorization(hda=hda, user=user, site=site) self.sa_session.add(hdadaa) with transaction(self.sa_session): self.sa_session.commit() diff --git a/test/unit/data/test_galaxy_mapping.py b/test/unit/data/test_galaxy_mapping.py index eed3eb01ad67..60c9c3116942 100644 --- a/test/unit/data/test_galaxy_mapping.py +++ b/test/unit/data/test_galaxy_mapping.py @@ -456,7 +456,7 @@ def test_workflows(self): assert counts.root["scheduled"] == 1 def test_role_creation(self): - security_agent = GalaxyRBACAgent(self.model) + security_agent = GalaxyRBACAgent(self.model.session) def check_private_role(private_role, email): assert private_role.type == model.Role.types.PRIVATE @@ -489,7 +489,7 @@ def check_private_role(private_role, email): check_private_role(role, email) def test_private_share_role(self): - security_agent = GalaxyRBACAgent(self.model) + security_agent = GalaxyRBACAgent(self.model.session) u_from, u_to, u_other = self._three_users("private_share_role") @@ -504,7 +504,7 @@ def test_private_share_role(self): assert not security_agent.can_access_dataset(u_other.all_roles(), d1.dataset) def test_make_dataset_public(self): - security_agent = GalaxyRBACAgent(self.model) + security_agent = GalaxyRBACAgent(self.model.session) u_from, u_to, u_other = self._three_users("make_dataset_public") h = model.History(name="History for Annotation", user=u_from) @@ -520,7 +520,7 @@ def test_make_dataset_public(self): assert security_agent.can_access_dataset(u_other.all_roles(), d1.dataset) def test_set_all_dataset_permissions(self): - security_agent = GalaxyRBACAgent(self.model) + security_agent = GalaxyRBACAgent(self.model.session) u_from, _, u_other = self._three_users("set_all_perms") h = model.History(name="History for Annotation", user=u_from) @@ -541,7 +541,7 @@ def test_set_all_dataset_permissions(self): assert not security_agent.can_access_dataset(u_other.all_roles(), d1.dataset) def test_can_manage_privately_shared_dataset(self): - security_agent = GalaxyRBACAgent(self.model) + security_agent = GalaxyRBACAgent(self.model.session) u_from, u_to, u_other = self._three_users("can_manage_dataset") h = model.History(name="History for Prevent Sharing", user=u_from) @@ -556,7 +556,7 @@ def test_can_manage_privately_shared_dataset(self): assert not security_agent.can_manage_dataset(u_to.all_roles(), d1.dataset) def test_can_manage_private_dataset(self): - security_agent = GalaxyRBACAgent(self.model) + security_agent = GalaxyRBACAgent(self.model.session) u_from, _, u_other = self._three_users("can_manage_dataset_ps") h = model.History(name="History for Prevent Sharing", user=u_from) @@ -570,7 +570,7 @@ def test_can_manage_private_dataset(self): assert not security_agent.can_manage_dataset(u_other.all_roles(), d1.dataset) def test_cannot_make_private_objectstore_dataset_public(self): - security_agent = GalaxyRBACAgent(self.model) + security_agent = GalaxyRBACAgent(self.model.session) u_from, u_to, _ = self._three_users("cannot_make_private_public") h = self.model.History(name="History for Prevent Sharing", user=u_from) @@ -587,7 +587,7 @@ def test_cannot_make_private_objectstore_dataset_public(self): assert galaxy.model.CANNOT_SHARE_PRIVATE_DATASET_MESSAGE in str(exec_info.value) def test_cannot_make_private_objectstore_dataset_shared(self): - security_agent = GalaxyRBACAgent(self.model) + security_agent = GalaxyRBACAgent(self.model.session) u_from, u_to, _ = self._three_users("cannot_make_private_shared") h = self.model.History(name="History for Prevent Sharing", user=u_from) @@ -604,7 +604,7 @@ def test_cannot_make_private_objectstore_dataset_shared(self): assert galaxy.model.CANNOT_SHARE_PRIVATE_DATASET_MESSAGE in str(exec_info.value) def test_cannot_set_dataset_permisson_on_private(self): - security_agent = GalaxyRBACAgent(self.model) + security_agent = GalaxyRBACAgent(self.model.session) u_from, u_to, _ = self._three_users("cannot_set_permissions_on_private") h = self.model.History(name="History for Prevent Sharing", user=u_from) @@ -624,7 +624,7 @@ def test_cannot_set_dataset_permisson_on_private(self): assert galaxy.model.CANNOT_SHARE_PRIVATE_DATASET_MESSAGE in str(exec_info.value) def test_cannot_make_private_dataset_public(self): - security_agent = GalaxyRBACAgent(self.model) + security_agent = GalaxyRBACAgent(self.model.session) u_from, u_to, u_other = self._three_users("cannot_make_private_dataset_public") h = self.model.History(name="History for Annotation", user=u_from) From 77391344856d7d5c10cf26fbd31c24bb36d0f0e1 Mon Sep 17 00:00:00 2001 From: John Davis Date: Tue, 27 Aug 2024 18:43:41 -0400 Subject: [PATCH 21/64] Add unit tests for user/group/role associations --- test/unit/data/model/conftest.py | 30 + test/unit/data/model/db/test_security.py | 868 +++++++++++++++++++++++ 2 files changed, 898 insertions(+) create mode 100644 test/unit/data/model/db/test_security.py diff --git a/test/unit/data/model/conftest.py b/test/unit/data/model/conftest.py index aff81d80af23..a70d4c74209e 100644 --- a/test/unit/data/model/conftest.py +++ b/test/unit/data/model/conftest.py @@ -151,6 +151,26 @@ def f(**kwd): return f +@pytest.fixture +def make_group(session): + def f(**kwd): + model = m.Group(**kwd) + write_to_db(session, model) + return model + + return f + + +@pytest.fixture +def make_group_role_association(session): + def f(group, role): + model = m.GroupRoleAssociation(group, role) + write_to_db(session, model) + return model + + return f + + @pytest.fixture def make_hda(session, make_history): def f(**kwd): @@ -397,6 +417,16 @@ def f(assoc_class, user, item, rating): return f +@pytest.fixture +def make_user_group_association(session): + def f(user, group): + model = m.UserGroupAssociation(user, group) + write_to_db(session, model) + return model + + return f + + @pytest.fixture def make_user_role_association(session): def f(user, role): diff --git a/test/unit/data/model/db/test_security.py b/test/unit/data/model/db/test_security.py new file mode 100644 index 000000000000..f86f77a3507b --- /dev/null +++ b/test/unit/data/model/db/test_security.py @@ -0,0 +1,868 @@ +import pytest + +from galaxy.exceptions import RequestParameterInvalidException +from galaxy.model import ( + Group, + Role, + User, +) +from galaxy.model.security import GalaxyRBACAgent +from . import have_same_elements + + +@pytest.fixture +def make_user_and_role(session, make_user, make_role, make_user_role_association): + """ + Each user created in Galaxy is assumed to have a private role, such that role.name == user.email. + Since we are testing user/group/role associations here, to ensure the correct state of the test database, + we need to ensure that a user is never created without a corresponding private role. + Therefore, we use this fixture instead of make_user (which only creates a user). + """ + + def f(**kwd): + user = make_user() + private_role = make_role(name=user.email, type=Role.types.PRIVATE) + make_user_role_association(user, private_role) + return user, private_role + + return f + + +def test_private_user_role_assoc_not_affected_by_setting_user_roles(session, make_user_and_role): + # Create user with a private role + user, private_role = make_user_and_role() + assert user.email == private_role.name + verify_user_associations(user, [], [private_role]) # the only existing association is with the private role + + # Update users's email so it's no longer the same as the private role's name. + user.email = user.email + "updated" + session.add(user) + session.commit() + assert user.email != private_role.name + + # Delete user roles + GalaxyRBACAgent(session).set_user_group_and_role_associations(user, group_ids=[], role_ids=[]) + # association with private role is preserved + verify_user_associations(user, [], [private_role]) + + +def test_private_user_role_assoc_not_affected_by_setting_role_users(session, make_user_and_role): + # Create user with a private role + user, private_role = make_user_and_role() + assert user.email == private_role.name + verify_user_associations(user, [], [private_role]) # the only existing association is with the private role + + # Update users's email + user.email = user.email + "updated" + session.add(user) + session.commit() + assert user.email != private_role.name + + # Update role users + GalaxyRBACAgent(session).set_role_user_and_group_associations(private_role, user_ids=[], group_ids=[]) + # association of private role with user is preserved + verify_role_associations(private_role, [user], []) + + +class TestSetGroupUserAndRoleAssociations: + + def test_add_associations_to_existing_group(self, session, make_user_and_role, make_role, make_group): + """ + State: group exists in database, has no user and role associations. + Action: add new associations. + """ + group = make_group() + users = [make_user_and_role()[0] for _ in range(5)] + roles = [make_role() for _ in range(5)] + + # users and roles for creating associations + users_to_add = [users[0], users[2], users[4]] + user_ids = [u.id for u in users_to_add] + roles_to_add = [roles[1], roles[3]] + role_ids = [r.id for r in roles_to_add] + + # verify no preexisting associations + verify_group_associations(group, [], []) + + # set associations + GalaxyRBACAgent(session).set_group_user_and_role_associations(group, user_ids=user_ids, role_ids=role_ids) + + # verify new associations + verify_group_associations(group, users_to_add, roles_to_add) + + def test_add_associations_to_new_group(self, session, make_user_and_role, make_role): + """ + State: group does NOT exist in database, has no user and role associations. + Action: add new associations. + """ + group = Group() + session.add(group) + assert group.id is None # group does not exist in database + users = [make_user_and_role()[0] for _ in range(5)] + roles = [make_role() for _ in range(5)] + + # users and roles for creating associations + users_to_add = [users[0], users[2], users[4]] + user_ids = [u.id for u in users_to_add] + roles_to_add = [roles[1], roles[3]] + role_ids = [r.id for r in roles_to_add] + + # set associations + GalaxyRBACAgent(session).set_group_user_and_role_associations(group, user_ids=user_ids, role_ids=role_ids) + + # verify new associations + verify_group_associations(group, users_to_add, roles_to_add) + + def test_update_associations( + self, + session, + make_user_and_role, + make_role, + make_group, + make_user_group_association, + make_group_role_association, + ): + """ + State: group exists in database AND has user and role associations. + Action: update associations (add some/drop some). + Expect: old associations are REPLACED by new associations. + """ + group = make_group() + users = [make_user_and_role()[0] for _ in range(5)] + roles = [make_role() for _ in range(5)] + + # load and verify existing associations + users_to_load = [users[0], users[2]] + roles_to_load = [roles[1], roles[3]] + for user in users_to_load: + make_user_group_association(user, group) + for role in roles_to_load: + make_group_role_association(group, role) + verify_group_associations(group, users_to_load, roles_to_load) + + # users and roles for creating new associations + new_users_to_add = [users[0], users[1], users[3]] + user_ids = [u.id for u in new_users_to_add] + new_roles_to_add = [roles[2]] + role_ids = [r.id for r in new_roles_to_add] + + # sanity check: ensure we are trying to change existing associations + assert not have_same_elements(users_to_load, new_users_to_add) + assert not have_same_elements(roles_to_load, new_roles_to_add) + + # set associations + GalaxyRBACAgent(session).set_group_user_and_role_associations(group, user_ids=user_ids, role_ids=role_ids) + + # verify new associations + verify_group_associations(group, new_users_to_add, new_roles_to_add) + + def test_drop_associations( + self, + session, + make_user_and_role, + make_role, + make_group, + make_user_group_association, + make_group_role_association, + ): + """ + State: group exists in database AND has user and role associations. + Action: drop all associations. + """ + group = make_group() + users = [make_user_and_role()[0] for _ in range(5)] + roles = [make_role() for _ in range(5)] + + # load and verify existing associations + users_to_load = [users[0], users[2]] + roles_to_load = [roles[1], roles[3]] + for user in users_to_load: + make_user_group_association(user, group) + for role in roles_to_load: + make_group_role_association(group, role) + verify_group_associations(group, users_to_load, roles_to_load) + + # drop associations + GalaxyRBACAgent(session).set_group_user_and_role_associations(group, user_ids=[], role_ids=[]) + + # verify associations dropped + verify_group_associations(group, [], []) + + def test_invalid_user(self, session, make_user_and_role, make_role, make_group): + """ + State: group exists in database, has no user and role associations. + Action: try to add several associations, last one having an invalid user id. + Expect: no associations are added, appropriate error is raised. + """ + group = make_group() + users = [make_user_and_role()[0] for _ in range(5)] + + # users for creating associations + user_ids = [users[0].id, -1] # first is valid, second is invalid + + # verify no preexisting associations + assert len(group.users) == 0 + + # try to set associations + with pytest.raises(RequestParameterInvalidException): + GalaxyRBACAgent(session).set_group_user_and_role_associations(group, user_ids=user_ids, role_ids=[]) + + # verify no change + assert len(group.users) == 0 + + def test_invalid_role(self, session, make_role, make_group): + """ + state: group exists in database, has no user and role associations. + action: try to add several associations, last one having an invalid role id. + expect: no associations are added, appropriate error is raised. + """ + group = make_group() + roles = [make_role() for _ in range(5)] + + # roles for creating associations + role_ids = [roles[0].id, -1] # first is valid, second is invalid + + # verify no preexisting associations + assert len(group.roles) == 0 + + # try to set associations + with pytest.raises(RequestParameterInvalidException): + GalaxyRBACAgent(session).set_group_user_and_role_associations(group, user_ids=[], role_ids=role_ids) + + # verify no change + assert len(group.roles) == 0 + + def test_duplicate_user( + self, + session, + make_user_and_role, + make_role, + make_group, + make_user_group_association, + make_group_role_association, + ): + """ + State: group exists in database and has user and role associations. + Action: try update user and role associations including a duplicate user + Expect: error raised, no change is made to group users and group roles. + """ + group = make_group() + users = [make_user_and_role()[0] for _ in range(5)] + roles = [make_role() for _ in range(5)] + + # load and verify existing associations + users_to_load = [users[0], users[2]] + roles_to_load = [roles[1], roles[3]] + for user in users_to_load: + make_user_group_association(user, group) + for role in roles_to_load: + make_group_role_association(group, role) + verify_group_associations(group, users_to_load, roles_to_load) + + # users and roles for creating new associations + new_users_to_add = users + [users[0]] # include a duplicate user + user_ids = [u.id for u in new_users_to_add] + + new_roles_to_add = roles # NO duplice roles + role_ids = [r.id for r in new_roles_to_add] + + # sanity check: ensure we are trying to change existing associations + assert not have_same_elements(users_to_load, new_users_to_add) + assert not have_same_elements(roles_to_load, new_roles_to_add) + + with pytest.raises(RequestParameterInvalidException): + GalaxyRBACAgent(session).set_group_user_and_role_associations(group, user_ids=user_ids, role_ids=role_ids) + + # verify associations not updated + verify_group_associations(group, users_to_load, roles_to_load) + + def test_duplicate_role( + self, + session, + make_user_and_role, + make_role, + make_group, + make_user_group_association, + make_group_role_association, + ): + """ + State: group exists in database and has user and role associations. + Action: try update user and role associations including a duplicate role + Expect: error raised, no change is made to group users and group roles. + """ + group = make_group() + users = [make_user_and_role()[0] for _ in range(5)] + roles = [make_role() for _ in range(5)] + + # load and verify existing associations + users_to_load = [users[0], users[2]] + roles_to_load = [roles[1], roles[3]] + for user in users_to_load: + make_user_group_association(user, group) + for role in roles_to_load: + make_group_role_association(group, role) + verify_group_associations(group, users_to_load, roles_to_load) + + # users and roles for creating new associations + new_users_to_add = users # NO duplicate users + user_ids = [u.id for u in new_users_to_add] + + new_roles_to_add = roles + [roles[0]] # include a duplicate role + role_ids = [r.id for r in new_roles_to_add] + + # sanity check: ensure we are trying to change existing associations + assert not have_same_elements(users_to_load, new_users_to_add) + assert not have_same_elements(roles_to_load, new_roles_to_add) + + with pytest.raises(RequestParameterInvalidException): + GalaxyRBACAgent(session).set_group_user_and_role_associations(group, user_ids=user_ids, role_ids=role_ids) + + # verify associations not updated + verify_group_associations(group, users_to_load, roles_to_load) + + +class TestSetUserGroupAndRoleAssociations: + """ + Note: a user should always have a private role which is not affected + by modifying a user's group associations or role associations. + """ + + def test_add_associations_to_existing_user(self, session, make_user_and_role, make_role, make_group): + """ + State: user exists in database, has no group and only one private role association. + Action: add new associations. + """ + user, private_role = make_user_and_role() + groups = [make_group() for _ in range(5)] + roles = [make_role() for _ in range(5)] + + # groups and roles for creating associations + groups_to_add = [groups[0], groups[2], groups[4]] + group_ids = [g.id for g in groups_to_add] + roles_to_add = [roles[1], roles[3]] + role_ids = [r.id for r in roles_to_add] + + # verify preexisting associations + verify_user_associations(user, [], [private_role]) + + # set associations + GalaxyRBACAgent(session).set_user_group_and_role_associations(user, group_ids=group_ids, role_ids=role_ids) + + # verify new associations + verify_user_associations(user, groups_to_add, roles_to_add + [private_role]) + + def test_add_associations_to_new_user(self, session, make_role, make_group): + """ + State: user does NOT exist in database, has no group and role associations. + Action: add new associations. + """ + user = User(email="foo@foo.com", password="password") + # We are not creating a private role and a user-role association with that role because that would result in + # adding the user to the database before calling the method under test, whereas the test is intended to verify + # correct processing of a user that has NOT been saved to the database. + + session.add(user) + assert user.id is None # user does not exist in database + groups = [make_group() for _ in range(5)] + roles = [make_role() for _ in range(5)] + + # groups and roles for creating associations + groups_to_add = [groups[0], groups[2], groups[4]] + group_ids = [g.id for g in groups_to_add] + roles_to_add = [roles[1], roles[3]] + role_ids = [r.id for r in roles_to_add] + + # set associations + GalaxyRBACAgent(session).set_user_group_and_role_associations(user, group_ids=group_ids, role_ids=role_ids) + + # verify new associations + verify_user_associations(user, groups_to_add, roles_to_add) + + def test_update_associations( + self, + session, + make_user_and_role, + make_role, + make_group, + make_user_group_association, + make_user_role_association, + ): + """ + State: user exists in database AND has group and role associations. + Action: update associations (add some/drop some). + Expect: old associations are REPLACED by new associations. + """ + user, private_role = make_user_and_role() + groups = [make_group() for _ in range(5)] + roles = [make_role() for _ in range(5)] + + # load and verify existing associations + groups_to_load = [groups[0], groups[2]] + roles_to_load = [roles[1], roles[3]] + for group in groups_to_load: + make_user_group_association(user, group) + for role in roles_to_load: + make_user_role_association(user, role) + verify_user_associations(user, groups_to_load, roles_to_load + [private_role]) + + # groups and roles for creating new associations + new_groups_to_add = [groups[0], groups[1], groups[3]] + group_ids = [g.id for g in new_groups_to_add] + new_roles_to_add = [roles[2]] + role_ids = [r.id for r in new_roles_to_add] + + # sanity check: ensure we are trying to change existing associations + assert not have_same_elements(groups_to_load, new_groups_to_add) + assert not have_same_elements(roles_to_load, new_roles_to_add) + + # set associations + GalaxyRBACAgent(session).set_user_group_and_role_associations(user, group_ids=group_ids, role_ids=role_ids) + # verify new associations + verify_user_associations(user, new_groups_to_add, new_roles_to_add + [private_role]) + + def test_drop_associations( + self, + session, + make_user_and_role, + make_role, + make_group, + make_user_group_association, + make_user_role_association, + ): + """ + State: user exists in database AND has group and role associations. + Action: drop all associations. + """ + user, private_role = make_user_and_role() + groups = [make_group() for _ in range(5)] + roles = [make_role() for _ in range(5)] + + # load and verify existing associations + groups_to_load = [groups[0], groups[2]] + roles_to_load = [roles[1], roles[3]] + for group in groups_to_load: + make_user_group_association(user, group) + for role in roles_to_load: + make_user_role_association(user, role) + verify_user_associations(user, groups_to_load, roles_to_load + [private_role]) + + # drop associations + GalaxyRBACAgent(session).set_user_group_and_role_associations(user, group_ids=[], role_ids=[]) + + # verify associations dropped + verify_user_associations(user, [], [private_role]) + + def test_invalid_group(self, session, make_user_and_role, make_group): + """ + State: user exists in database, has no group and only one private role association. + Action: try to add several associations, last one having an invalid group id. + Expect: no associations are added, appropriate error is raised. + """ + user, private_role = make_user_and_role() + groups = [make_group() for _ in range(5)] + + # groups for creating associations + group_ids = [groups[0].id, -1] # first is valid, second is invalid + + # verify no preexisting associations + assert len(user.groups) == 0 + + # try to set associations + with pytest.raises(RequestParameterInvalidException): + GalaxyRBACAgent(session).set_user_group_and_role_associations(user, group_ids=group_ids, role_ids=[]) + + # verify no change + assert len(user.groups) == 0 + + def test_invalid_role(self, session, make_user_and_role, make_role): + """ + State: user exists in database, has no group and only one private role association. + action: try to add several associations, last one having an invalid role id. + expect: no associations are added, appropriate error is raised. + """ + user, private_role = make_user_and_role() + roles = [make_role() for _ in range(5)] + + # roles for creating associations + role_ids = [roles[0].id, -1] # first is valid, second is invalid + + # verify no preexisting associations + assert len(user.roles) == 1 # one is the private role association + + # try to set associations + with pytest.raises(RequestParameterInvalidException): + GalaxyRBACAgent(session).set_user_group_and_role_associations(user, group_ids=[], role_ids=role_ids) + + # verify no change + assert len(user.roles) == 1 # one is the private role association + + def test_duplicate_group( + self, + session, + make_user_and_role, + make_role, + make_group, + make_user_group_association, + make_user_role_association, + ): + """ + State: user exists in database and has group and role associations. + Action: try update group and role associations including a duplicate group + Expect: error raised, no change is made to user groups and user roles. + """ + user, private_role = make_user_and_role() + groups = [make_group() for _ in range(5)] + roles = [make_role() for _ in range(5)] + + # load and verify existing associations + groups_to_load = [groups[0], groups[2]] + roles_to_load = [roles[1], roles[3]] + for group in groups_to_load: + make_user_group_association(user, group) + for role in roles_to_load: + make_user_role_association(user, role) + verify_user_associations(user, groups_to_load, roles_to_load + [private_role]) + + # groups and roles for creating new associations + new_groups_to_add = groups + [groups[0]] # include a duplicate group + group_ids = [g.id for g in new_groups_to_add] + + new_roles_to_add = roles # NO duplicate roles + role_ids = [r.id for r in new_roles_to_add] + + # sanity check: ensure we are trying to change existing associations + assert not have_same_elements(groups_to_load, new_groups_to_add) + assert not have_same_elements(roles_to_load, new_roles_to_add) + + with pytest.raises(RequestParameterInvalidException): + GalaxyRBACAgent(session).set_user_group_and_role_associations(user, group_ids=group_ids, role_ids=role_ids) + + # verify associations not updated + verify_user_associations(user, groups_to_load, roles_to_load + [private_role]) + + def test_duplicate_role( + self, + session, + make_user_and_role, + make_role, + make_group, + make_user_group_association, + make_user_role_association, + ): + """ + State: user exists in database and has group and role associations. + Action: try update group and role associations including a duplicate role + Expect: error raised, no change is made to user groups and user roles. + """ + user, private_role = make_user_and_role() + groups = [make_group() for _ in range(5)] + roles = [make_role() for _ in range(5)] + + # load and verify existing associations + groups_to_load = [groups[0], groups[2]] + roles_to_load = [roles[1], roles[3]] + for group in groups_to_load: + make_user_group_association(user, group) + for role in roles_to_load: + make_user_role_association(user, role) + verify_user_associations(user, groups_to_load, roles_to_load + [private_role]) + + # groups and roles for creating new associations + new_groups_to_add = groups # NO duplicate groups + group_ids = [g.id for g in new_groups_to_add] + + new_roles_to_add = roles + [roles[0]] # include a duplicate role + role_ids = [r.id for r in new_roles_to_add] + + # sanity check: ensure we are trying to change existing associations + assert not have_same_elements(groups_to_load, new_groups_to_add) + assert not have_same_elements(roles_to_load, new_roles_to_add) + + with pytest.raises(RequestParameterInvalidException): + GalaxyRBACAgent(session).set_user_group_and_role_associations(user, group_ids=group_ids, role_ids=role_ids) + + # verify associations not updated + verify_user_associations(user, groups_to_load, roles_to_load + [private_role]) + + +class TestSetRoleUserAndGroupAssociations: + """ + Note: a user should always have a private role which is not affected + by modifying a user's group associations or role associations. + """ + + def test_add_associations_to_existing_role(self, session, make_user_and_role, make_role, make_group): + """ + State: role exists in database, has no group and no user associations. + Action: add new associations. + """ + role = make_role() + users = [make_user_and_role()[0] for _ in range(5)] + groups = [make_group() for _ in range(5)] + + # users and groups for creating associations + users_to_add = [users[0], users[2], users[4]] + user_ids = [u.id for u in users_to_add] + groups_to_add = [groups[0], groups[2], groups[4]] + group_ids = [g.id for g in groups_to_add] + + # verify preexisting associations + verify_role_associations(role, [], []) + + # set associations + GalaxyRBACAgent(session).set_role_user_and_group_associations(role, user_ids=user_ids, group_ids=group_ids) + + # verify new associations + verify_role_associations(role, users_to_add, groups_to_add) + + def test_add_associations_to_new_role(self, session, make_user_and_role, make_group): + """ + State: user does NOT exist in database, has no group and role associations. + Action: add new associations. + """ + role = Role() + session.add(role) + assert role.id is None # role does not exist in database + users = [make_user_and_role()[0] for _ in range(5)] + groups = [make_group() for _ in range(5)] + + # users and groups for creating associations + users_to_add = [users[0], users[2], users[4]] + user_ids = [u.id for u in users_to_add] + groups_to_add = [groups[0], groups[2], groups[4]] + group_ids = [g.id for g in groups_to_add] + + # set associations + GalaxyRBACAgent(session).set_role_user_and_group_associations(role, user_ids=user_ids, group_ids=group_ids) + + # verify new associations + verify_role_associations(role, users_to_add, groups_to_add) + + def test_update_associations( + self, + session, + make_user_and_role, + make_role, + make_group, + make_user_role_association, + make_group_role_association, + ): + """ + State: role exists in database AND has user and group associations. + Action: update associations (add some/drop some). + Expect: old associations are REPLACED by new associations. + """ + role = make_role() + users = [make_user_and_role()[0] for _ in range(5)] + groups = [make_group() for _ in range(5)] + + # load and verify existing associations + users_to_load = [users[1], users[3]] + groups_to_load = [groups[0], groups[2]] + for user in users_to_load: + make_user_role_association(user, role) + for group in groups_to_load: + make_group_role_association(group, role) + verify_role_associations(role, users_to_load, groups_to_load) + + # users and groups for creating new associations + new_users_to_add = [users[0], users[2], users[4]] + user_ids = [u.id for u in new_users_to_add] + new_groups_to_add = [groups[0], groups[2], groups[4]] + group_ids = [g.id for g in new_groups_to_add] + + # sanity check: ensure we are trying to change existing associations + assert not have_same_elements(users_to_load, new_users_to_add) + assert not have_same_elements(groups_to_load, new_groups_to_add) + + # set associations + GalaxyRBACAgent(session).set_role_user_and_group_associations(role, user_ids=user_ids, group_ids=group_ids) + # verify new associations + verify_role_associations(role, new_users_to_add, new_groups_to_add) + + def test_drop_associations( + self, + session, + make_user_and_role, + make_role, + make_group, + make_group_role_association, + make_user_role_association, + ): + """ + State: role exists in database AND has user and group associations. + Action: drop all associations. + """ + role = make_role() + users = [make_user_and_role()[0] for _ in range(5)] + groups = [make_group() for _ in range(5)] + + # load and verify existing associations + users_to_load = [users[1], users[3]] + groups_to_load = [groups[0], groups[2]] + for user in users_to_load: + make_user_role_association(user, role) + for group in groups_to_load: + make_group_role_association(group, role) + verify_role_associations(role, users_to_load, groups_to_load) + + # drop associations + GalaxyRBACAgent(session).set_role_user_and_group_associations(role, user_ids=[], group_ids=[]) + + # verify associations dropped + verify_role_associations(role, [], []) + + def test_invalid_user(self, session, make_role, make_user_and_role): + """ + State: role exists in database, has no user and group eassociations. + action: try to add several associations, last one having an invalid user id. + expect: no associations are added, appropriate error is raised. + """ + role = make_role() + users = [make_user_and_role()[0] for _ in range(5)] + + # users for creating associations + user_ids = [users[0].id, -1] # first is valid, second is invalid + + # verify no preexisting associations + assert len(role.users) == 0 + + # try to set associations + with pytest.raises(RequestParameterInvalidException): + GalaxyRBACAgent(session).set_role_user_and_group_associations(role, user_ids=user_ids, group_ids=[]) + + # verify no change + assert len(role.users) == 0 + + def test_invalid_group(self, session, make_role, make_group): + """ + State: role exists in database, has no user and group eassociations. + Action: try to add several associations, last one having an invalid group id. + Expect: no associations are added, appropriate error is raised. + """ + role = make_role() + groups = [make_group() for _ in range(5)] + + # groups for creating associations + group_ids = [groups[0].id, -1] # first is valid, second is invalid + + # verify no preexisting associations + assert len(role.groups) == 0 + + # try to set associations + with pytest.raises(RequestParameterInvalidException): + GalaxyRBACAgent(session).set_role_user_and_group_associations(role, user_ids=[], group_ids=group_ids) + + # verify no change + assert len(role.groups) == 0 + + def test_duplicate_user( + self, + session, + make_user_and_role, + make_role, + make_group, + make_group_role_association, + make_user_role_association, + ): + """ + State: role exists in database and has group and user associations. + Action: try update group and user associations including a duplicate user + Expect: error raised, no change is made to role groups and role users. + """ + role = make_role() + users = [make_user_and_role()[0] for _ in range(5)] + groups = [make_group() for _ in range(5)] + + # load and verify existing associations + users_to_load = [users[1], users[3]] + groups_to_load = [groups[0], groups[2]] + for user in users_to_load: + make_user_role_association(user, role) + for group in groups_to_load: + make_group_role_association(group, role) + + verify_role_associations(role, users_to_load, groups_to_load) + + # users and groups for creating new associations + new_users_to_add = users + [users[0]] # include a duplicate user + user_ids = [u.id for u in new_users_to_add] + + new_groups_to_add = groups # NO duplicate groups + group_ids = [g.id for g in new_groups_to_add] + + # sanity check: ensure we are trying to change existing associations + assert not have_same_elements(users_to_load, new_users_to_add) + assert not have_same_elements(groups_to_load, new_groups_to_add) + + with pytest.raises(RequestParameterInvalidException): + GalaxyRBACAgent(session).set_role_user_and_group_associations(role, user_ids=user_ids, group_ids=group_ids) + + # verify associations not updated + verify_role_associations(role, users_to_load, groups_to_load) + + def test_duplicate_group( + self, + session, + make_user_and_role, + make_role, + make_group, + make_group_role_association, + make_user_role_association, + ): + """ + State: role exists in database and has group and user associations. + Action: try update group and user associations including a duplicate group + Expect: error raised, no change is made to role groups and role users. + """ + role = make_role() + users = [make_user_and_role()[0] for _ in range(5)] + groups = [make_group() for _ in range(5)] + + # load and verify existing associations + users_to_load = [users[1], users[3]] + groups_to_load = [groups[0], groups[2]] + for user in users_to_load: + make_user_role_association(user, role) + for group in groups_to_load: + make_group_role_association(group, role) + + verify_role_associations(role, users_to_load, groups_to_load) + + # users and groups for creating new associations + new_users_to_add = users # NO duplicate users + user_ids = [u.id for u in new_users_to_add] + + new_groups_to_add = groups + [groups[0]] # include a duplicate group + group_ids = [g.id for g in new_groups_to_add] + + # sanity check: ensure we are trying to change existing associations + assert not have_same_elements(users_to_load, new_users_to_add) + assert not have_same_elements(groups_to_load, new_groups_to_add) + + with pytest.raises(RequestParameterInvalidException): + GalaxyRBACAgent(session).set_role_user_and_group_associations(role, user_ids=user_ids, group_ids=group_ids) + + # verify associations not updated + verify_role_associations(role, users_to_load, groups_to_load) + + +def verify_group_associations(group, expected_users, expected_roles): + new_group_users = [assoc.user for assoc in group.users] + new_group_roles = [assoc.role for assoc in group.roles] + assert have_same_elements(new_group_users, expected_users) + assert have_same_elements(new_group_roles, expected_roles) + + +def verify_user_associations(user, expected_groups, expected_roles): + new_user_groups = [assoc.group for assoc in user.groups] + new_user_roles = [assoc.role for assoc in user.roles] + assert have_same_elements(new_user_groups, expected_groups) + assert have_same_elements(new_user_roles, expected_roles) + + +def verify_role_associations(role, expected_users, expected_groups): + new_role_users = [assoc.user for assoc in role.users] + new_role_groups = [assoc.group for assoc in role.groups] + assert have_same_elements(new_role_users, expected_users) + assert have_same_elements(new_role_groups, expected_groups) From 004d46c7a45a551586bba9c10e257938a7dc4c88 Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 16 Aug 2024 12:33:48 -0400 Subject: [PATCH 22/64] Fix test database handling for db tests --- test/unit/data/model/db/conftest.py | 44 +++++++++++++++++++++++------ 1 file changed, 36 insertions(+), 8 deletions(-) diff --git a/test/unit/data/model/db/conftest.py b/test/unit/data/model/db/conftest.py index 1693cf27eaac..8cd81ed50904 100644 --- a/test/unit/data/model/db/conftest.py +++ b/test/unit/data/model/db/conftest.py @@ -8,6 +8,7 @@ create_engine, text, ) +from sqlalchemy.exc import IntegrityError from sqlalchemy.orm import Session from galaxy import model as m @@ -35,7 +36,11 @@ def engine(db_url: str) -> "Engine": @pytest.fixture def session(engine: "Engine") -> Session: - return Session(engine) + session = Session(engine) + # For sqlite, we need to explicitly enale foreign key constraints. + if engine.name == "sqlite": + session.execute(text("PRAGMA foreign_keys = ON;")) + return session @pytest.fixture(autouse=True, scope="module") @@ -58,12 +63,35 @@ def init_datatypes() -> None: @pytest.fixture(autouse=True) -def clear_database(engine: "Engine") -> "Generator": +def clear_database(engine: "Engine", session) -> "Generator": """Delete all rows from all tables. Called after each test.""" yield - with engine.begin() as conn: - for table in m.mapper_registry.metadata.tables: - # Unless db is sqlite, disable foreign key constraints to delete out of order - if engine.name != "sqlite": - conn.execute(text(f"ALTER TABLE {table} DISABLE TRIGGER ALL")) - conn.execute(text(f"DELETE FROM {table}")) + + # If a test left an open transaction, rollback to prevent database locking. + if session.in_transaction(): + session.rollback() + + with engine.connect() as conn: + if engine.name == "sqlite": + conn.execute(text("PRAGMA foreign_keys = OFF;")) + for table in m.mapper_registry.metadata.tables: + conn.execute(text(f"DELETE FROM {table}")) + else: + # For postgres, we can disable foreign key constraints with this statement: + # conn.execute(text(f"ALTER TABLE {table} DISABLE TRIGGER ALL")) + # However, unless running as superuser, this will raise an error when trying + # to disable a system trigger. Disabling USER triggers instead of ALL + # won't work because the USER option excludes foreign key constraints. + # The following is an alternative: we do multiple passes until all tables have been cleared: + to_delete = list(m.mapper_registry.metadata.tables) + failed = [] + while to_delete: + for table in to_delete: + try: + conn.execute(text(f"DELETE FROM {table}")) + except IntegrityError: + failed.append(table) + conn.rollback() + to_delete, failed = failed, [] + + conn.commit() From 4fb1c704d69c53305871a4c37426fb0583b01569 Mon Sep 17 00:00:00 2001 From: John Davis Date: Mon, 26 Aug 2024 15:07:07 -0400 Subject: [PATCH 23/64] Rename verify_items helper to have_same_elements Improves readability of test code --- test/unit/data/model/db/__init__.py | 4 ++-- test/unit/data/model/db/test_libraries.py | 14 +++++++------- test/unit/data/model/db/test_role.py | 6 +++--- test/unit/data/model/db/test_user.py | 22 +++++++++++----------- 4 files changed, 23 insertions(+), 23 deletions(-) diff --git a/test/unit/data/model/db/__init__.py b/test/unit/data/model/db/__init__.py index 817efe285c17..7b083aa84acd 100644 --- a/test/unit/data/model/db/__init__.py +++ b/test/unit/data/model/db/__init__.py @@ -6,8 +6,8 @@ MockTransaction = namedtuple("MockTransaction", "user") -def verify_items(items, expected_items): +def have_same_elements(items, expected_items): """ Assert that items and expected_items contain the same elements. """ - assert Counter(items) == Counter(expected_items) + return Counter(items) == Counter(expected_items) diff --git a/test/unit/data/model/db/test_libraries.py b/test/unit/data/model/db/test_libraries.py index 80dae0b15b50..3bba9c03b610 100644 --- a/test/unit/data/model/db/test_libraries.py +++ b/test/unit/data/model/db/test_libraries.py @@ -5,7 +5,7 @@ get_library_ids, get_library_permissions_by_role, ) -from . import verify_items +from . import have_same_elements def test_get_library_ids(session, make_library, make_library_permissions): @@ -18,7 +18,7 @@ def test_get_library_ids(session, make_library, make_library_permissions): ids = get_library_ids(session, "b").all() expected = [l2.id, l3.id] - verify_items(ids, expected) + have_same_elements(ids, expected) def test_get_library_permissions_by_role(session, make_role, make_library_permissions): @@ -31,7 +31,7 @@ def test_get_library_permissions_by_role(session, make_role, make_library_permis lp_roles = [lp.role for lp in lps] expected = [r1, r2] - verify_items(lp_roles, expected) + have_same_elements(lp_roles, expected) def test_get_libraries_for_admins(session, make_library): @@ -44,14 +44,14 @@ def test_get_libraries_for_admins(session, make_library): libs_deleted = get_libraries_for_admins(session, True).all() expected = [libs[0], libs[1]] - verify_items(libs_deleted, expected) + have_same_elements(libs_deleted, expected) libs_not_deleted = get_libraries_for_admins(session, False).all() expected = [libs[2], libs[3], libs[4]] - verify_items(libs_not_deleted, expected) + have_same_elements(libs_not_deleted, expected) libs_all = get_libraries_for_admins(session, None).all() - verify_items(libs_all, libs) + have_same_elements(libs_all, libs) def test_get_libraries_for_admins__ordering(session, make_library): @@ -75,7 +75,7 @@ def test_get_libraries_for_non_admins(session, make_library): # Expected: l1 (not deleted, not restricted), l2 (not deleted, restricted but accessible) # Not returned: l3 (not deleted but restricted), l4 (deleted) expected = [l1, l2] - verify_items(allowed, expected) + have_same_elements(allowed, expected) def test_get_libraries_for_admins_non_admins__ordering(session, make_library): diff --git a/test/unit/data/model/db/test_role.py b/test/unit/data/model/db/test_role.py index 59daf8a5a8ea..213314c5c609 100644 --- a/test/unit/data/model/db/test_role.py +++ b/test/unit/data/model/db/test_role.py @@ -4,7 +4,7 @@ get_private_user_role, get_roles_by_ids, ) -from . import verify_items +from . import have_same_elements def test_get_npns_roles(session, make_role): @@ -18,7 +18,7 @@ def test_get_npns_roles(session, make_role): # Expected: r4, r5 # Not returned: r1: deleted, r2: private, r3: sharing expected = [r4, r5] - verify_items(roles, expected) + have_same_elements(roles, expected) def test_get_private_user_role(session, make_user, make_role, make_user_role_association): @@ -41,4 +41,4 @@ def test_get_roles_by_ids(session, make_role): roles2 = get_roles_by_ids(session, ids) expected = [r1, r2, r3] - verify_items(roles2, expected) + have_same_elements(roles2, expected) diff --git a/test/unit/data/model/db/test_user.py b/test/unit/data/model/db/test_user.py index 5085a71b8b42..87d136a125a4 100644 --- a/test/unit/data/model/db/test_user.py +++ b/test/unit/data/model/db/test_user.py @@ -7,7 +7,7 @@ get_users_by_ids, get_users_for_index, ) -from . import verify_items +from . import have_same_elements @pytest.fixture @@ -42,7 +42,7 @@ def test_get_users_by_ids(session, make_random_users): users2 = get_users_by_ids(session, ids) expected = [u1, u2, u3] - verify_items(users2, expected) + have_same_elements(users2, expected) def test_get_users_for_index(session, make_user): @@ -54,25 +54,25 @@ def test_get_users_for_index(session, make_user): u6 = make_user(email="z", username="i") users = get_users_for_index(session, False, f_email="a", expose_user_email=True) - verify_items(users, [u1]) + have_same_elements(users, [u1]) users = get_users_for_index(session, False, f_email="c", is_admin=True) - verify_items(users, [u2]) + have_same_elements(users, [u2]) users = get_users_for_index(session, False, f_name="f", expose_user_name=True) - verify_items(users, [u3]) + have_same_elements(users, [u3]) users = get_users_for_index(session, False, f_name="h", is_admin=True) - verify_items(users, [u4]) + have_same_elements(users, [u4]) users = get_users_for_index(session, False, f_any="i", is_admin=True) - verify_items(users, [u5, u6]) + have_same_elements(users, [u5, u6]) users = get_users_for_index(session, False, f_any="i", expose_user_email=True, expose_user_name=True) - verify_items(users, [u5, u6]) + have_same_elements(users, [u5, u6]) users = get_users_for_index(session, False, f_any="i", expose_user_email=True) - verify_items(users, [u5]) + have_same_elements(users, [u5]) users = get_users_for_index(session, False, f_any="i", expose_user_name=True) - verify_items(users, [u6]) + have_same_elements(users, [u6]) u1.deleted = True users = get_users_for_index(session, True) - verify_items(users, [u1]) + have_same_elements(users, [u1]) def test_username_is_unique(make_user): From 99afd7b5a5047efa6857f1f42b0f125667c860e9 Mon Sep 17 00:00:00 2001 From: John Davis Date: Wed, 28 Aug 2024 18:33:11 -0400 Subject: [PATCH 24/64] Refactor setting user/group/role associations Fix typo Co-authored-by: Marius van den Beek --- lib/galaxy/managers/groups.py | 31 +-- lib/galaxy/model/security.py | 257 ++++++++++++++---- .../webapps/galaxy/controllers/admin.py | 97 +++---- .../app/managers/test_NotificationManager.py | 5 +- 4 files changed, 246 insertions(+), 144 deletions(-) diff --git a/lib/galaxy/managers/groups.py b/lib/galaxy/managers/groups.py index 8edb50218203..e0d6cd177731 100644 --- a/lib/galaxy/managers/groups.py +++ b/lib/galaxy/managers/groups.py @@ -13,8 +13,6 @@ from galaxy.managers.context import ProvidesAppContext from galaxy.model import Group from galaxy.model.base import transaction -from galaxy.model.db.role import get_roles_by_ids -from galaxy.model.db.user import get_users_by_ids from galaxy.model.scoped_session import galaxy_scoped_session from galaxy.schema.fields import Security from galaxy.schema.groups import ( @@ -54,13 +52,11 @@ def create(self, trans: ProvidesAppContext, payload: GroupCreatePayload): group = model.Group(name=name) sa_session.add(group) - user_ids = payload.user_ids - users = get_users_by_ids(sa_session, user_ids) - role_ids = payload.role_ids - roles = get_roles_by_ids(sa_session, role_ids) - trans.app.security_agent.set_entity_group_associations(groups=[group], roles=roles, users=users) - with transaction(sa_session): - sa_session.commit() + + trans.app.security_agent.set_group_user_and_role_associations( + group, user_ids=payload.user_ids, role_ids=payload.role_ids + ) + sa_session.commit() encoded_id = Security.security.encode_id(group.id) item = group.to_dict(view="element") @@ -88,23 +84,12 @@ def update(self, trans: ProvidesAppContext, group_id: int, payload: GroupUpdateP if name := payload.name: self._check_duplicated_group_name(sa_session, name) group.name = name - sa_session.add(group) - - users = None - if payload.user_ids is not None: - users = get_users_by_ids(sa_session, payload.user_ids) - - roles = None - if payload.role_ids is not None: - roles = get_roles_by_ids(sa_session, payload.role_ids) + sa_session.commit() - self._app.security_agent.set_entity_group_associations( - groups=[group], roles=roles, users=users, delete_existing_assocs=False + self._app.security_agent.set_group_user_and_role_associations( + group, user_ids=payload.user_ids, role_ids=payload.role_ids ) - with transaction(sa_session): - sa_session.commit() - encoded_id = Security.security.encode_id(group.id) item = group.to_dict(view="element") item["url"] = self._url_for(trans, "show_group", group_id=encoded_id) diff --git a/lib/galaxy/model/security.py b/lib/galaxy/model/security.py index d2cf628bdffc..29b8d5be45f8 100644 --- a/lib/galaxy/model/security.py +++ b/lib/galaxy/model/security.py @@ -1,23 +1,35 @@ import logging import socket +import sqlite3 from datetime import ( datetime, timedelta, ) -from typing import List +from typing import ( + List, + Optional, +) +from psycopg2.errors import ( + ForeignKeyViolation, + UniqueViolation, +) from sqlalchemy import ( and_, + delete, false, func, + insert, not_, or_, select, + text, ) +from sqlalchemy.exc import IntegrityError from sqlalchemy.orm import joinedload -from sqlalchemy.sql import text import galaxy.model +from galaxy.exceptions import RequestParameterInvalidException from galaxy.model import ( Dataset, DatasetCollection, @@ -1445,62 +1457,171 @@ def get_showable_folders( self.get_showable_folders(user, roles, folder, actions_to_check, showable_folders=showable_folders) return showable_folders - def set_entity_user_associations(self, users=None, roles=None, groups=None, delete_existing_assocs=True): - users = users or [] - roles = roles or [] - groups = groups or [] - for user in users: - if delete_existing_assocs: - flush_needed = False - for a in user.non_private_roles + user.groups: - self.sa_session.delete(a) - flush_needed = True - if flush_needed: - with transaction(self.sa_session): - self.sa_session.commit() - self.sa_session.refresh(user) - for role in roles: - # Make sure we are not creating an additional association with a PRIVATE role - if role not in [x.role for x in user.roles]: - self.associate_components(user=user, role=role) - for group in groups: - self.associate_components(user=user, group=group) + def set_user_group_and_role_associations( + self, + user: User, + *, + group_ids: Optional[List[int]] = None, + role_ids: Optional[List[int]] = None, + ) -> None: + """ + Set user groups and user roles, replacing current associations. - def set_entity_group_associations(self, groups=None, users=None, roles=None, delete_existing_assocs=True): - users = users or [] - roles = roles or [] - groups = groups or [] - for group in groups: - if delete_existing_assocs: - flush_needed = False - for a in group.roles + group.users: - self.sa_session.delete(a) - flush_needed = True - if flush_needed: - with transaction(self.sa_session): - self.sa_session.commit() - for role in roles: - self.associate_components(group=group, role=role) - for user in users: - self.associate_components(group=group, user=user) + Associations are set only if a list of new associations is provided. + If the provided list is empty, existing associations will be removed. + If the provided value is None, existing associations will not be updated. + """ + self._ensure_model_instance_has_id(user) + if group_ids is not None: + self._set_user_groups(user, group_ids or []) + if role_ids is not None: + self._set_user_roles(user, role_ids or []) + # Commit only if both user groups and user roles have been set. + self.sa_session.commit() + + def set_group_user_and_role_associations( + self, + group: Group, + *, + user_ids: Optional[List[int]] = None, + role_ids: Optional[List[int]] = None, + ) -> None: + """ + Set group users and group roles, replacing current associations. - def set_entity_role_associations(self, roles=None, users=None, groups=None, delete_existing_assocs=True): - users = users or [] - roles = roles or [] - groups = groups or [] - for role in roles: - if delete_existing_assocs: - flush_needed = False - for a in role.users + role.groups: - self.sa_session.delete(a) - flush_needed = True - if flush_needed: - with transaction(self.sa_session): - self.sa_session.commit() - for user in users: - self.associate_components(user=user, role=role) - for group in groups: - self.associate_components(group=group, role=role) + Associations are set only if a list of new associations is provided. + If the provided list is empty, existing associations will be removed. + If the provided value is None, existing associations will not be updated. + """ + self._ensure_model_instance_has_id(group) + if user_ids is not None: + self._set_group_users(group, user_ids) + if role_ids is not None: + self._set_group_roles(group, role_ids) + # Commit only if both group users and group roles have been set. + self.sa_session.commit() + + def set_role_user_and_group_associations( + self, + role: Role, + *, + user_ids: Optional[List[int]] = None, + group_ids: Optional[List[int]] = None, + ) -> None: + """ + Set role users and role groups, replacing current associations. + + Associations are set only if a list of new associations is provided. + If the provided list is empty, existing associations will be removed. + If the provided value is None, existing associations will not be updated. + """ + self._ensure_model_instance_has_id(role) + if user_ids is not None: + self._set_role_users(role, user_ids or []) + if group_ids is not None: + self._set_role_groups(role, group_ids or []) + # Commit only if both role users and role groups have been set. + self.sa_session.commit() + + def _set_user_groups(self, user, group_ids): + delete_stmt = delete(UserGroupAssociation).where(UserGroupAssociation.user_id == user.id) + insert_values = [{"user_id": user.id, "group_id": group_id} for group_id in group_ids] + self._set_associations(user, UserGroupAssociation, delete_stmt, insert_values) + + def _set_user_roles(self, user, role_ids): + # Do not include user's private role association in delete statement. + delete_stmt = delete(UserRoleAssociation).where(UserRoleAssociation.user_id == user.id) + private_role = get_private_user_role(user, self.sa_session) + if not private_role: + log.warning("User %s does not have a private role assigned", user) + else: + delete_stmt = delete_stmt.where(UserRoleAssociation.role_id != private_role.id) + role_ids = self._filter_private_roles(role_ids) + insert_values = [{"user_id": user.id, "role_id": role_id} for role_id in role_ids] + self._set_associations(user, UserRoleAssociation, delete_stmt, insert_values) + + def _filter_private_roles(self, role_ids): + """Filter out IDs of private roles: those should not be assignable via UI""" + filtered = [] + for role_id in role_ids: + stmt = select(Role.id).where(Role.id == role_id).where(Role.type == Role.types.PRIVATE) + is_private = bool(self.sa_session.scalars(stmt).all()) + if not is_private: + filtered.append(role_id) + return filtered + + def _set_group_users(self, group, user_ids): + delete_stmt = delete(UserGroupAssociation).where(UserGroupAssociation.group_id == group.id) + insert_values = [{"group_id": group.id, "user_id": user_id} for user_id in user_ids] + self._set_associations(group, UserGroupAssociation, delete_stmt, insert_values) + + def _set_group_roles(self, group, role_ids): + delete_stmt = delete(GroupRoleAssociation).where(GroupRoleAssociation.group_id == group.id) + insert_values = [{"group_id": group.id, "role_id": role_id} for role_id in role_ids] + self._set_associations(group, GroupRoleAssociation, delete_stmt, insert_values) + + def _set_role_users(self, role, user_ids): + # Do not set users if the role is private + # Even though we do not expect to be handling a private role here, the following code is + # a safeguard against deleting a user-role-association record for a private role. + if role.type == Role.types.PRIVATE: + return + + # First, check previously associated users to: + # - delete DefaultUserPermissions for users that are being removed from this role; + # - delete DefaultHistoryPermissions for histories associated with users that are being removed from this role. + for ura in role.users: + if ura.user_id not in user_ids: # If a user will be removed from this role, then: + user = self.sa_session.get(User, ura.user_id) + # Delete DefaultUserPermissions for this user + for dup in user.default_permissions: + if role == dup.role: + self.sa_session.delete(dup) + # Delete DefaultHistoryPermissions for histories associated with this user + for history in user.histories: + for dhp in history.default_permissions: + if role == dhp.role: + self.sa_session.delete(dhp) + + delete_stmt = delete(UserRoleAssociation).where(UserRoleAssociation.role_id == role.id) + insert_values = [{"role_id": role.id, "user_id": user_id} for user_id in user_ids] + self._set_associations(role, UserRoleAssociation, delete_stmt, insert_values) + + def _set_role_groups(self, role, group_ids): + delete_stmt = delete(GroupRoleAssociation).where(GroupRoleAssociation.role_id == role.id) + insert_values = [{"role_id": role.id, "group_id": group_id} for group_id in group_ids] + self._set_associations(role, GroupRoleAssociation, delete_stmt, insert_values) + + def _ensure_model_instance_has_id(self, model_instance): + # If model_instance is new, it may have not been assigned a database id yet, which is required + # for creating association records. Flush if that's the case. + if model_instance.id is None: + self.sa_session.flush([model_instance]) + + def _set_associations(self, parent_model, assoc_model, delete_stmt, insert_values): + """ + Delete current associations for assoc_model, then insert new associations if values are provided. + """ + # Ensure sqlite respects foreign key constraints. + if self.sa_session.bind.dialect.name == "sqlite": + self.sa_session.execute(text("PRAGMA foreign_keys = ON;")) + self.sa_session.execute(delete_stmt) + if not insert_values: + return + try: + self.sa_session.execute(insert(assoc_model), insert_values) + except IntegrityError as ie: + self.sa_session.rollback() + if is_unique_constraint_violation(ie): + msg = f"Attempting to create a duplicate {assoc_model} record ({insert_values})" + log.exception(msg) + raise RequestParameterInvalidException() + elif is_foreign_key_violation(ie): + msg = f"Attempting to create an invalid {assoc_model} record ({insert_values})" + log.exception(msg) + raise RequestParameterInvalidException() + else: + raise def get_component_associations(self, **kwd): assert len(kwd) == 2, "You must specify exactly 2 Galaxy security components to check for associations." @@ -1670,3 +1791,27 @@ def _walk_action_roles(permissions, query_action): yield action, roles elif action == query_action.action and roles: yield action, roles + + +def is_unique_constraint_violation(error): + # A more elegant way to handle sqlite iw this: + # if hasattr(error.orig, "sqlite_errorname"): + # return error.orig.sqlite_errorname == "SQLITE_CONSTRAINT_UNIQUE" + # However, that's only possible with Python 3.11+ + # https://docs.python.org/3/library/sqlite3.html#sqlite3.Error.sqlite_errorcode + if isinstance(error.orig, sqlite3.IntegrityError): + return error.orig.args[0].startswith("UNIQUE constraint failed") + else: + return isinstance(error.orig, UniqueViolation) + + +def is_foreign_key_violation(error): + # A more elegant way to handle sqlite iw this: + # if hasattr(error.orig, "sqlite_errorname"): + # return error.orig.sqlite_errorname == "SQLITE_CONSTRAINT_UNIQUE" + # However, that's only possible with Python 3.11+ + # https://docs.python.org/3/library/sqlite3.html#sqlite3.Error.sqlite_errorcode + if isinstance(error.orig, sqlite3.IntegrityError): + return error.orig.args[0] == "FOREIGN KEY constraint failed" + else: + return isinstance(error.orig, ForeignKeyViolation) diff --git a/lib/galaxy/webapps/galaxy/controllers/admin.py b/lib/galaxy/webapps/galaxy/controllers/admin.py index bd0ea3a06158..b92e37c12786 100644 --- a/lib/galaxy/webapps/galaxy/controllers/admin.py +++ b/lib/galaxy/webapps/galaxy/controllers/admin.py @@ -13,7 +13,10 @@ util, web, ) -from galaxy.exceptions import ActionInputError +from galaxy.exceptions import ( + ActionInputError, + RequestParameterInvalidException, +) from galaxy.managers.quotas import QuotaManager from galaxy.model.base import transaction from galaxy.model.index_filter_util import ( @@ -807,35 +810,17 @@ def manage_users_and_groups_for_role(self, trans, payload=None, **kwd): ], } else: - in_users = [ - trans.sa_session.query(trans.app.model.User).get(trans.security.decode_id(x)) - for x in util.listify(payload.get("in_users")) - ] - in_groups = [ - trans.sa_session.query(trans.app.model.Group).get(trans.security.decode_id(x)) - for x in util.listify(payload.get("in_groups")) - ] - if None in in_users or None in in_groups: + user_ids = [trans.security.decode_id(id) for id in util.listify(payload.get("in_users"))] + group_ids = [trans.security.decode_id(id) for id in util.listify(payload.get("in_groups"))] + try: + trans.app.security_agent.set_role_user_and_group_associations( + role, user_ids=user_ids, group_ids=group_ids + ) + return { + "message": f"Role '{role.name}' has been updated with {len(user_ids)} associated users and {len(group_ids)} associated groups." + } + except RequestParameterInvalidException: return self.message_exception(trans, "One or more invalid user/group id has been provided.") - for ura in role.users: - user = trans.sa_session.query(trans.app.model.User).get(ura.user_id) - if user not in in_users: - # Delete DefaultUserPermissions for previously associated users that have been removed from the role - for dup in user.default_permissions: - if role == dup.role: - trans.sa_session.delete(dup) - # Delete DefaultHistoryPermissions for previously associated users that have been removed from the role - for history in user.histories: - for dhp in history.default_permissions: - if role == dhp.role: - trans.sa_session.delete(dhp) - with transaction(trans.sa_session): - trans.sa_session.commit() - trans.app.security_agent.set_entity_role_associations(roles=[role], users=in_users, groups=in_groups) - trans.sa_session.refresh(role) - return { - "message": f"Role '{role.name}' has been updated with {len(in_users)} associated users and {len(in_groups)} associated groups." - } @web.legacy_expose_api @web.require_admin @@ -912,21 +897,17 @@ def manage_users_and_roles_for_group(self, trans, payload=None, **kwd): ], } else: - in_users = [ - trans.sa_session.query(trans.app.model.User).get(trans.security.decode_id(x)) - for x in util.listify(payload.get("in_users")) - ] - in_roles = [ - trans.sa_session.query(trans.app.model.Role).get(trans.security.decode_id(x)) - for x in util.listify(payload.get("in_roles")) - ] - if None in in_users or None in in_roles: + user_ids = [trans.security.decode_id(id) for id in util.listify(payload.get("in_users"))] + role_ids = [trans.security.decode_id(id) for id in util.listify(payload.get("in_roles"))] + try: + trans.app.security_agent.set_group_user_and_role_associations( + group, user_ids=user_ids, role_ids=role_ids + ) + return { + "message": f"Group '{group.name}' has been updated with {len(user_ids)} associated users and {len(role_ids)} associated roles." + } + except RequestParameterInvalidException: return self.message_exception(trans, "One or more invalid user/role id has been provided.") - trans.app.security_agent.set_entity_group_associations(groups=[group], users=in_users, roles=in_roles) - trans.sa_session.refresh(group) - return { - "message": f"Group '{group.name}' has been updated with {len(in_users)} associated users and {len(in_roles)} associated roles." - } @web.legacy_expose_api @web.require_admin @@ -1099,28 +1080,18 @@ def manage_roles_and_groups_for_user(self, trans, payload=None, **kwd): ], } else: - in_roles = [ - trans.sa_session.query(trans.app.model.Role).get(trans.security.decode_id(x)) - for x in util.listify(payload.get("in_roles")) - ] - in_groups = [ - trans.sa_session.query(trans.app.model.Group).get(trans.security.decode_id(x)) - for x in util.listify(payload.get("in_groups")) - ] - if None in in_groups or None in in_roles: + role_ids = [trans.security.decode_id(id) for id in util.listify(payload.get("in_roles"))] + group_ids = [trans.security.decode_id(id) for id in util.listify(payload.get("in_groups"))] + try: + trans.app.security_agent.set_user_group_and_role_associations( + user, group_ids=group_ids, role_ids=role_ids + ) + return { + "message": f"User '{user.email}' has been updated with {len(role_ids)} associated roles and {len(group_ids)} associated groups (private roles are not displayed)." + } + except RequestParameterInvalidException: return self.message_exception(trans, "One or more invalid role/group id has been provided.") - # make sure the user is not dis-associating himself from his private role - private_role = trans.app.security_agent.get_private_user_role(user) - if private_role not in in_roles: - in_roles.append(private_role) - - trans.app.security_agent.set_entity_user_associations(users=[user], roles=in_roles, groups=in_groups) - trans.sa_session.refresh(user) - return { - "message": f"User '{user.email}' has been updated with {len(in_roles) - 1} associated roles and {len(in_groups)} associated groups (private roles are not displayed)." - } - # ---- Utility methods ------------------------------------------------------- diff --git a/test/unit/app/managers/test_NotificationManager.py b/test/unit/app/managers/test_NotificationManager.py index 6e0c36397c95..76e934cc9e6f 100644 --- a/test/unit/app/managers/test_NotificationManager.py +++ b/test/unit/app/managers/test_NotificationManager.py @@ -524,8 +524,9 @@ def _create_test_group(self, name: str, users: List[User], roles: List[Role]): sa_session = self.trans.sa_session group = Group(name=name) sa_session.add(group) - self.trans.app.security_agent.set_entity_group_associations(groups=[group], roles=roles, users=users) - sa_session.flush() + user_ids = [user.id for user in users] + role_ids = [role.id for role in roles] + self.trans.app.security_agent.set_group_user_and_role_associations(group, user_ids=user_ids, role_ids=role_ids) return group def _create_test_role(self, name: str, users: List[User], groups: List[Group]): From b9ac9f7a6672fe22a775cf29fe1522b0a40c5bc7 Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 29 Aug 2024 09:36:59 -0400 Subject: [PATCH 25/64] Do not import psycopg2; test for error code instead --- lib/galaxy/model/security.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/lib/galaxy/model/security.py b/lib/galaxy/model/security.py index 29b8d5be45f8..d1b993957943 100644 --- a/lib/galaxy/model/security.py +++ b/lib/galaxy/model/security.py @@ -10,10 +10,6 @@ Optional, ) -from psycopg2.errors import ( - ForeignKeyViolation, - UniqueViolation, -) from sqlalchemy import ( and_, delete, @@ -1802,7 +1798,9 @@ def is_unique_constraint_violation(error): if isinstance(error.orig, sqlite3.IntegrityError): return error.orig.args[0].startswith("UNIQUE constraint failed") else: - return isinstance(error.orig, UniqueViolation) + # If this is a PostgreSQL unique constraint, then error.orig is an instance of psycopg2.errors.UniqueViolation + # and should have an attribute `pgcode` = 23505. + return int(getattr(error.orig, "pgcode", -1)) == 23505 def is_foreign_key_violation(error): @@ -1814,4 +1812,6 @@ def is_foreign_key_violation(error): if isinstance(error.orig, sqlite3.IntegrityError): return error.orig.args[0] == "FOREIGN KEY constraint failed" else: - return isinstance(error.orig, ForeignKeyViolation) + # If this is a PostgreSQL foreign key error, then error.orig is an instance of psycopg2.errors.ForeignKeyViolation + # and should have an attribute `pgcode` = 23503. + return int(getattr(error.orig, "pgcode", -1)) == 23503 From 6c2125a7e7e450a264fc45738a93db77d21a6894 Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 29 Aug 2024 09:51:21 -0400 Subject: [PATCH 26/64] Drop associate_components method (no longer used) --- lib/galaxy/model/security.py | 16 +--------------- lib/galaxy/security/__init__.py | 3 --- 2 files changed, 1 insertion(+), 18 deletions(-) diff --git a/lib/galaxy/model/security.py b/lib/galaxy/model/security.py index d1b993957943..79684b9c1c71 100644 --- a/lib/galaxy/model/security.py +++ b/lib/galaxy/model/security.py @@ -710,20 +710,6 @@ def guess_derived_permissions(self, all_input_permissions): perms[action].extend([_ for _ in role_ids if _ not in perms[action]]) return perms - def associate_components(self, **kwd): - if "user" in kwd: - if "group" in kwd: - return self.associate_user_group(kwd["user"], kwd["group"]) - elif "role" in kwd: - return self.associate_user_role(kwd["user"], kwd["role"]) - elif "role" in kwd: - if "group" in kwd: - return self.associate_group_role(kwd["group"], kwd["role"]) - if "action" in kwd: - if "dataset" in kwd and "role" in kwd: - return self.associate_action_dataset_role(kwd["action"], kwd["dataset"], kwd["role"]) - raise Exception(f"No valid method of associating provided components: {kwd}") - def associate_user_group(self, user, group): assoc = UserGroupAssociation(user, group) self.sa_session.add(assoc) @@ -1036,7 +1022,7 @@ def privately_share_dataset(self, dataset, users=None): with transaction(self.sa_session): self.sa_session.commit() for user in users: - self.associate_components(user=user, role=sharing_role) + self.associate_user_role(user, sharing_role) self.set_dataset_permission(dataset, {self.permitted_actions.DATASET_ACCESS: [sharing_role]}) def set_all_library_permissions(self, trans, library_item, permissions=None): diff --git a/lib/galaxy/security/__init__.py b/lib/galaxy/security/__init__.py index 0c1082830259..94e8948042b4 100644 --- a/lib/galaxy/security/__init__.py +++ b/lib/galaxy/security/__init__.py @@ -95,9 +95,6 @@ def can_change_object_store_id(self, user, dataset): def can_manage_library_item(self, roles, item): raise Exception("Unimplemented Method") - def associate_components(self, **kwd): - raise Exception(f"No valid method of associating provided components: {kwd}") - def create_private_user_role(self, user): raise Exception("Unimplemented Method") From 356b8c34b9834a14f4667b3d39c01b8749ae6a6e Mon Sep 17 00:00:00 2001 From: John Davis Date: Wed, 18 Sep 2024 15:53:19 -0400 Subject: [PATCH 27/64] Add scripts to fix association data --- .../data_fixes/association_table_fixer.py | 200 ++++++++++++++++++ 1 file changed, 200 insertions(+) create mode 100644 lib/galaxy/model/migrations/data_fixes/association_table_fixer.py diff --git a/lib/galaxy/model/migrations/data_fixes/association_table_fixer.py b/lib/galaxy/model/migrations/data_fixes/association_table_fixer.py new file mode 100644 index 000000000000..0903260a91b0 --- /dev/null +++ b/lib/galaxy/model/migrations/data_fixes/association_table_fixer.py @@ -0,0 +1,200 @@ +from abc import ( + ABC, + abstractmethod, +) + +from sqlalchemy import ( + delete, + func, + null, + or_, + select, +) + +from galaxy.model import ( + GroupRoleAssociation, + UserGroupAssociation, + UserRoleAssociation, +) + + +class AssociationNullFix(ABC): + + def __init__(self, connection): + self.connection = connection + self.assoc_model = self.association_model() + self.assoc_name = self.assoc_model.__tablename__ + self.where_clause = self.build_where_clause() + + def run(self): + invalid_assocs = self.count_associations_with_nulls() + if invalid_assocs: + self.delete_associations_with_nulls() + + def count_associations_with_nulls( + self, + ): + """ + Retrieve association records where one or both associated item ids are null. + """ + select_stmt = select(func.count()).where(self.where_clause) + return self.connection.scalar(select_stmt) + + def delete_associations_with_nulls(self): + """ + Delete association records where one or both associated item ids are null. + """ + delete_stmt = delete(self.assoc_model).where(self.where_clause) + self.connection.execute(delete_stmt) + + @abstractmethod + def association_model(self): + """Return model class""" + + @abstractmethod + def build_where_clause(self): + """Build where clause for filtering records containing nulls instead of associated item ids""" + + +class UserGroupAssociationNullFix(AssociationNullFix): + + def association_model(self): + return UserGroupAssociation + + def build_where_clause(self): + return or_(UserGroupAssociation.user_id == null(), UserGroupAssociation.group_id == null()) + + +class UserRoleAssociationNullFix(AssociationNullFix): + + def association_model(self): + return UserRoleAssociation + + def build_where_clause(self): + return or_(UserRoleAssociation.user_id == null(), UserRoleAssociation.role_id == null()) + + +class GroupRoleAssociationNullFix(AssociationNullFix): + + def association_model(self): + return GroupRoleAssociation + + def build_where_clause(self): + return or_(GroupRoleAssociation.group_id == null(), GroupRoleAssociation.role_id == null()) + + +class AssociationDuplicateFix(ABC): + + def __init__(self, connection): + self.connection = connection + self.assoc_model = self.association_model() + self.assoc_name = self.assoc_model.__tablename__ + + def run(self): + duplicate_assocs = self.select_duplicate_associations() + if duplicate_assocs: + self.delete_duplicate_associations(duplicate_assocs) + + def select_duplicate_associations(self): + """Retrieve duplicate association records.""" + select_stmt = self.build_duplicate_tuples_statement() + return self.connection.execute(select_stmt).all() + + @abstractmethod + def association_model(self): + """Return model class""" + + @abstractmethod + def build_duplicate_tuples_statement(self): + """ + Build select statement returning a list of tuples (item1_id, item2_id) that have counts > 1 + """ + + @abstractmethod + def build_duplicate_ids_statement(self, user_id, group_id): + """ + Build select statement returning a list of ids for duplicate records retrieved via build_duplicate_tuples_statement(). + """ + + def delete_duplicate_associations(self, records): + """ + Delete duplicate association records retaining oldest record in each group of duplicates. + """ + to_delete = [] + for item1_id, item2_id in records: + to_delete += self._get_duplicates_to_delete(item1_id, item2_id) + for id in to_delete: + delete_stmt = delete(self.assoc_model).where(self.assoc_model.id == id) + self.connection.execute(delete_stmt) + + def _get_duplicates_to_delete(self, item1_id, item2_id): + stmt = self.build_duplicate_ids_statement(item1_id, item2_id) + duplicates = self.connection.scalars(stmt).all() + # IMPORTANT: we slice to skip the first item ([1:]), which is the oldest record and SHOULD NOT BE DELETED. + return duplicates[1:] + + +class UserGroupAssociationDuplicateFix(AssociationDuplicateFix): + + def association_model(self): + return UserGroupAssociation + + def build_duplicate_tuples_statement(self): + stmt = ( + select(UserGroupAssociation.user_id, UserGroupAssociation.group_id) + .group_by(UserGroupAssociation.user_id, UserGroupAssociation.group_id) + .having(func.count() > 1) + ) + return stmt + + def build_duplicate_ids_statement(self, user_id, group_id): + stmt = ( + select(UserGroupAssociation.id) + .where(UserGroupAssociation.user_id == user_id, UserGroupAssociation.group_id == group_id) + .order_by(UserGroupAssociation.update_time) + ) + return stmt + + +class UserRoleAssociationDuplicateFix(AssociationDuplicateFix): + + def association_model(self): + return UserRoleAssociation + + def build_duplicate_tuples_statement(self): + stmt = ( + select(UserRoleAssociation.user_id, UserRoleAssociation.role_id) + .group_by(UserRoleAssociation.user_id, UserRoleAssociation.role_id) + .having(func.count() > 1) + ) + return stmt + + def build_duplicate_ids_statement(self, user_id, role_id): + stmt = ( + select(UserRoleAssociation.id) + .where(UserRoleAssociation.user_id == user_id, UserRoleAssociation.role_id == role_id) + .order_by(UserRoleAssociation.update_time) + ) + return stmt + + +class GroupRoleAssociationDuplicateFix(AssociationDuplicateFix): + + def association_model(self): + return GroupRoleAssociation + + def build_duplicate_tuples_statement(self): + stmt = ( + select(GroupRoleAssociation.group_id, GroupRoleAssociation.role_id) + .group_by(GroupRoleAssociation.group_id, GroupRoleAssociation.role_id) + .having(func.count() > 1) + ) + return stmt + + def build_duplicate_ids_statement(self, group_id, role_id): + stmt = ( + select(GroupRoleAssociation.id) + .where(GroupRoleAssociation.group_id == group_id, GroupRoleAssociation.role_id == role_id) + .order_by(GroupRoleAssociation.update_time) + ) + return stmt From b89c2084973297112ee139a54f0e9f5e7f2a6716 Mon Sep 17 00:00:00 2001 From: John Davis Date: Mon, 26 Aug 2024 15:07:54 -0400 Subject: [PATCH 28/64] Alter model for user-role-association Add unique constraint, set not nullable for user-id, role-id --- lib/galaxy/model/__init__.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index c7ab8a57c06e..288b10cc890e 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -3685,10 +3685,11 @@ class HistoryUserShareAssociation(Base, UserShareAssociation): class UserRoleAssociation(Base, RepresentById): __tablename__ = "user_role_association" + __table_args__ = (UniqueConstraint("user_id", "role_id"),) id: Mapped[int] = mapped_column(primary_key=True) - user_id: Mapped[int] = mapped_column(ForeignKey("galaxy_user.id"), index=True, nullable=True) - role_id: Mapped[int] = mapped_column(ForeignKey("role.id"), index=True, nullable=True) + user_id: Mapped[int] = mapped_column(ForeignKey("galaxy_user.id"), index=True) + role_id: Mapped[int] = mapped_column(ForeignKey("role.id"), index=True) create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) From 0607d2e6a29d1e2ee52484f45bdeacd0a31c6a30 Mon Sep 17 00:00:00 2001 From: John Davis Date: Mon, 23 Sep 2024 16:19:12 -0400 Subject: [PATCH 29/64] Add migration: unique constraint for user-role-assoc --- ...dd_unique_constraint_to_user_role_assoc.py | 45 +++++++++++++++++++ 1 file changed, 45 insertions(+) create mode 100644 lib/galaxy/model/migrations/alembic/versions_gxy/349dd9d9aac9_add_unique_constraint_to_user_role_assoc.py diff --git a/lib/galaxy/model/migrations/alembic/versions_gxy/349dd9d9aac9_add_unique_constraint_to_user_role_assoc.py b/lib/galaxy/model/migrations/alembic/versions_gxy/349dd9d9aac9_add_unique_constraint_to_user_role_assoc.py new file mode 100644 index 000000000000..26245f4a9c87 --- /dev/null +++ b/lib/galaxy/model/migrations/alembic/versions_gxy/349dd9d9aac9_add_unique_constraint_to_user_role_assoc.py @@ -0,0 +1,45 @@ +"""Add unique constraint to user_role_association + +Revision ID: 349dd9d9aac9 +Revises: 1cf595475b58 +Create Date: 2024-09-09 16:14:58.278850 + +""" + +from alembic import op + +from galaxy.model.migrations.data_fixes.association_table_fixer import UserRoleAssociationDuplicateFix +from galaxy.model.migrations.util import ( + create_unique_constraint, + drop_constraint, + transaction, +) + +# revision identifiers, used by Alembic. +revision = "349dd9d9aac9" +down_revision = "1cf595475b58" +branch_labels = None +depends_on = None + +table_name = "user_role_association" +constraint_column_names = ["user_id", "role_id"] +unique_constraint_name = ( + "user_role_association_user_id_key" # This is what the model's naming convention will generate. +) + + +def upgrade(): + with transaction(): + _remove_duplicate_records() + create_unique_constraint(unique_constraint_name, table_name, constraint_column_names) + + +def downgrade(): + with transaction(): + drop_constraint(unique_constraint_name, table_name) + + +def _remove_duplicate_records(): + """Remove duplicate associations""" + connection = op.get_bind() + UserRoleAssociationDuplicateFix(connection).run() From da6a7a7fabb0d4e78bf8854267d240e68b95fa56 Mon Sep 17 00:00:00 2001 From: John Davis Date: Mon, 9 Sep 2024 22:32:46 -0400 Subject: [PATCH 30/64] Add migration: not null constraint for user-role-assoc --- ..._add_not_null_constraints_to_user_role_.py | 42 +++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 lib/galaxy/model/migrations/alembic/versions_gxy/1fdd615f2cdb_add_not_null_constraints_to_user_role_.py diff --git a/lib/galaxy/model/migrations/alembic/versions_gxy/1fdd615f2cdb_add_not_null_constraints_to_user_role_.py b/lib/galaxy/model/migrations/alembic/versions_gxy/1fdd615f2cdb_add_not_null_constraints_to_user_role_.py new file mode 100644 index 000000000000..4fb6f5262f8e --- /dev/null +++ b/lib/galaxy/model/migrations/alembic/versions_gxy/1fdd615f2cdb_add_not_null_constraints_to_user_role_.py @@ -0,0 +1,42 @@ +"""Add not-null constraints to user_role_association + +Revision ID: 1fdd615f2cdb +Revises: 349dd9d9aac9 +Create Date: 2024-09-09 21:28:11.987054 + +""" + +from alembic import op + +from galaxy.model.migrations.data_fixes.association_table_fixer import UserRoleAssociationNullFix +from galaxy.model.migrations.util import ( + alter_column, + transaction, +) + +# revision identifiers, used by Alembic. +revision = "1fdd615f2cdb" +down_revision = "349dd9d9aac9" +branch_labels = None +depends_on = None + +table_name = "user_role_association" + + +def upgrade(): + with transaction(): + _remove_records_with_nulls() + alter_column(table_name, "user_id", nullable=False) + alter_column(table_name, "role_id", nullable=False) + + +def downgrade(): + with transaction(): + alter_column(table_name, "user_id", nullable=True) + alter_column(table_name, "role_id", nullable=True) + + +def _remove_records_with_nulls(): + """Remove associations having null as user_id or role_id""" + connection = op.get_bind() + UserRoleAssociationNullFix(connection).run() From 4ce44cfa6d2adc037a5f9ff6c19fcef352d63408 Mon Sep 17 00:00:00 2001 From: John Davis Date: Wed, 28 Aug 2024 18:40:44 -0400 Subject: [PATCH 31/64] Alter model for user-group-assoc Add unique constraint, set not nullable for user-id, group-id --- lib/galaxy/model/__init__.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 288b10cc890e..b62e3ee18877 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -2967,10 +2967,11 @@ def __init__(self, name=None): class UserGroupAssociation(Base, RepresentById): __tablename__ = "user_group_association" + __table_args__ = (UniqueConstraint("user_id", "group_id"),) id: Mapped[int] = mapped_column(primary_key=True) - user_id: Mapped[int] = mapped_column(ForeignKey("galaxy_user.id"), index=True, nullable=True) - group_id: Mapped[int] = mapped_column(ForeignKey("galaxy_group.id"), index=True, nullable=True) + user_id: Mapped[int] = mapped_column(ForeignKey("galaxy_user.id"), index=True) + group_id: Mapped[int] = mapped_column(ForeignKey("galaxy_group.id"), index=True) create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) user: Mapped["User"] = relationship(back_populates="groups") From 332550ad209be9926e35cf3579f388ee038d53db Mon Sep 17 00:00:00 2001 From: John Davis Date: Mon, 9 Sep 2024 22:31:31 -0400 Subject: [PATCH 32/64] Add migration: unique constraint for user-group-assoc --- ...d0_add_unique_constraint_to_user_group_.py | 45 +++++++++++++++++++ 1 file changed, 45 insertions(+) create mode 100644 lib/galaxy/model/migrations/alembic/versions_gxy/56ddf316dbd0_add_unique_constraint_to_user_group_.py diff --git a/lib/galaxy/model/migrations/alembic/versions_gxy/56ddf316dbd0_add_unique_constraint_to_user_group_.py b/lib/galaxy/model/migrations/alembic/versions_gxy/56ddf316dbd0_add_unique_constraint_to_user_group_.py new file mode 100644 index 000000000000..4a50ddcfcbe0 --- /dev/null +++ b/lib/galaxy/model/migrations/alembic/versions_gxy/56ddf316dbd0_add_unique_constraint_to_user_group_.py @@ -0,0 +1,45 @@ +"""Add unique constraint to user_group_association + +Revision ID: 56ddf316dbd0 +Revises: 1fdd615f2cdb +Create Date: 2024-09-09 16:10:37.081834 + +""" + +from alembic import op + +from galaxy.model.migrations.data_fixes.association_table_fixer import UserGroupAssociationDuplicateFix +from galaxy.model.migrations.util import ( + create_unique_constraint, + drop_constraint, + transaction, +) + +# revision identifiers, used by Alembic. +revision = "56ddf316dbd0" +down_revision = "1fdd615f2cdb" +branch_labels = None +depends_on = None + +table_name = "user_group_association" +constraint_column_names = ["user_id", "group_id"] +unique_constraint_name = ( + "user_group_association_user_id_key" # This is what the model's naming convention will generate. +) + + +def upgrade(): + with transaction(): + _remove_duplicate_records() + create_unique_constraint(unique_constraint_name, table_name, constraint_column_names) + + +def downgrade(): + with transaction(): + drop_constraint(unique_constraint_name, table_name) + + +def _remove_duplicate_records(): + """Remove duplicate associations""" + connection = op.get_bind() + UserGroupAssociationDuplicateFix(connection).run() From cfaef6f1e04ab9eb9eae3124f68b105d5d482459 Mon Sep 17 00:00:00 2001 From: John Davis Date: Mon, 9 Sep 2024 22:32:35 -0400 Subject: [PATCH 33/64] Add migration: not null constraint for user-group-assoc --- ...add_not_null_constraints_to_user_group_.py | 42 +++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 lib/galaxy/model/migrations/alembic/versions_gxy/13fe10b8e35b_add_not_null_constraints_to_user_group_.py diff --git a/lib/galaxy/model/migrations/alembic/versions_gxy/13fe10b8e35b_add_not_null_constraints_to_user_group_.py b/lib/galaxy/model/migrations/alembic/versions_gxy/13fe10b8e35b_add_not_null_constraints_to_user_group_.py new file mode 100644 index 000000000000..822a0229a4bc --- /dev/null +++ b/lib/galaxy/model/migrations/alembic/versions_gxy/13fe10b8e35b_add_not_null_constraints_to_user_group_.py @@ -0,0 +1,42 @@ +"""Add not-null constraints to user_group_association + +Revision ID: 13fe10b8e35b +Revises: 56ddf316dbd0 +Create Date: 2024-09-09 21:26:26.032842 + +""" + +from alembic import op + +from galaxy.model.migrations.data_fixes.association_table_fixer import UserGroupAssociationNullFix +from galaxy.model.migrations.util import ( + alter_column, + transaction, +) + +# revision identifiers, used by Alembic. +revision = "13fe10b8e35b" +down_revision = "56ddf316dbd0" +branch_labels = None +depends_on = None + +table_name = "user_group_association" + + +def upgrade(): + with transaction(): + _remove_records_with_nulls() + alter_column(table_name, "user_id", nullable=False) + alter_column(table_name, "group_id", nullable=False) + + +def downgrade(): + with transaction(): + alter_column(table_name, "user_id", nullable=True) + alter_column(table_name, "group_id", nullable=True) + + +def _remove_records_with_nulls(): + """Remove associations having null as user_id or group_id""" + connection = op.get_bind() + UserGroupAssociationNullFix(connection).run() From b4ebfcafaf04a0112aa782a8d3469635c27e3ef5 Mon Sep 17 00:00:00 2001 From: John Davis Date: Wed, 28 Aug 2024 18:39:37 -0400 Subject: [PATCH 34/64] Alter model for group-role-assoc Add unique constraint, set not nullable for group-id, role-id --- lib/galaxy/model/__init__.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index b62e3ee18877..6fbd0f8932a2 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -3705,10 +3705,11 @@ def __init__(self, user, role): class GroupRoleAssociation(Base, RepresentById): __tablename__ = "group_role_association" + __table_args__ = (UniqueConstraint("group_id", "role_id"),) id: Mapped[int] = mapped_column(primary_key=True) - group_id: Mapped[int] = mapped_column(ForeignKey("galaxy_group.id"), index=True, nullable=True) - role_id: Mapped[int] = mapped_column(ForeignKey("role.id"), index=True, nullable=True) + group_id: Mapped[int] = mapped_column(ForeignKey("galaxy_group.id"), index=True) + role_id: Mapped[int] = mapped_column(ForeignKey("role.id"), index=True) create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) group: Mapped["Group"] = relationship(back_populates="roles") From 89007790ce7d59bc6a647ed8ba13516b0fafcc80 Mon Sep 17 00:00:00 2001 From: John Davis Date: Mon, 9 Sep 2024 22:30:36 -0400 Subject: [PATCH 35/64] Add migration: unique constraint for group-role-association --- ...4e_add_unique_constraint_to_group_role_.py | 45 +++++++++++++++++++ 1 file changed, 45 insertions(+) create mode 100644 lib/galaxy/model/migrations/alembic/versions_gxy/9ef6431f3a4e_add_unique_constraint_to_group_role_.py diff --git a/lib/galaxy/model/migrations/alembic/versions_gxy/9ef6431f3a4e_add_unique_constraint_to_group_role_.py b/lib/galaxy/model/migrations/alembic/versions_gxy/9ef6431f3a4e_add_unique_constraint_to_group_role_.py new file mode 100644 index 000000000000..f84d09d5b043 --- /dev/null +++ b/lib/galaxy/model/migrations/alembic/versions_gxy/9ef6431f3a4e_add_unique_constraint_to_group_role_.py @@ -0,0 +1,45 @@ +"""Add unique constraint to group_role_association + +Revision ID: 9ef6431f3a4e +Revises: 13fe10b8e35b +Create Date: 2024-09-09 15:01:20.426534 + +""" + +from alembic import op + +from galaxy.model.migrations.data_fixes.association_table_fixer import GroupRoleAssociationDuplicateFix +from galaxy.model.migrations.util import ( + create_unique_constraint, + drop_constraint, + transaction, +) + +# revision identifiers, used by Alembic. +revision = "9ef6431f3a4e" +down_revision = "13fe10b8e35b" +branch_labels = None +depends_on = None + +table_name = "group_role_association" +constraint_column_names = ["group_id", "role_id"] +unique_constraint_name = ( + "group_role_association_group_id_key" # This is what the model's naming convention will generate. +) + + +def upgrade(): + with transaction(): + _remove_duplicate_records() + create_unique_constraint(unique_constraint_name, table_name, constraint_column_names) + + +def downgrade(): + with transaction(): + drop_constraint(unique_constraint_name, table_name) + + +def _remove_duplicate_records(): + """Remove duplicate associations""" + connection = op.get_bind() + GroupRoleAssociationDuplicateFix(connection).run() From 49485c865015f791106c36157523675b2f65c685 Mon Sep 17 00:00:00 2001 From: John Davis Date: Mon, 9 Sep 2024 22:32:10 -0400 Subject: [PATCH 36/64] Add migration: not null constraint for group-role-assoc --- ...add_not_null_constraints_to_group_role_.py | 42 +++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 lib/galaxy/model/migrations/alembic/versions_gxy/25b092f7938b_add_not_null_constraints_to_group_role_.py diff --git a/lib/galaxy/model/migrations/alembic/versions_gxy/25b092f7938b_add_not_null_constraints_to_group_role_.py b/lib/galaxy/model/migrations/alembic/versions_gxy/25b092f7938b_add_not_null_constraints_to_group_role_.py new file mode 100644 index 000000000000..f57dd446d0cb --- /dev/null +++ b/lib/galaxy/model/migrations/alembic/versions_gxy/25b092f7938b_add_not_null_constraints_to_group_role_.py @@ -0,0 +1,42 @@ +"""Add not-null constraints to group_role_association + +Revision ID: 25b092f7938b +Revises: 9ef6431f3a4e +Create Date: 2024-09-09 16:17:26.652865 + +""" + +from alembic import op + +from galaxy.model.migrations.data_fixes.association_table_fixer import GroupRoleAssociationNullFix +from galaxy.model.migrations.util import ( + alter_column, + transaction, +) + +# revision identifiers, used by Alembic. +revision = "25b092f7938b" +down_revision = "9ef6431f3a4e" +branch_labels = None +depends_on = None + +table_name = "group_role_association" + + +def upgrade(): + with transaction(): + _remove_records_with_nulls() + alter_column(table_name, "group_id", nullable=True) + alter_column(table_name, "role_id", nullable=False) + + +def downgrade(): + with transaction(): + alter_column(table_name, "group_id", nullable=True) + alter_column(table_name, "role_id", nullable=True) + + +def _remove_records_with_nulls(): + """Remove associations having null as group_id or role_id""" + connection = op.get_bind() + GroupRoleAssociationNullFix(connection).run() From 19d76e95805d606ba0fb91cada95921bd17c23bc Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 29 Aug 2024 17:00:37 -0400 Subject: [PATCH 37/64] Fix mypy: statements are reachable --- test/unit/data/model/db/test_security.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/test/unit/data/model/db/test_security.py b/test/unit/data/model/db/test_security.py index f86f77a3507b..1c9e2e8903b3 100644 --- a/test/unit/data/model/db/test_security.py +++ b/test/unit/data/model/db/test_security.py @@ -98,7 +98,7 @@ def test_add_associations_to_new_group(self, session, make_user_and_role, make_r group = Group() session.add(group) assert group.id is None # group does not exist in database - users = [make_user_and_role()[0] for _ in range(5)] + users = [make_user_and_role()[0] for _ in range(5)] # type: ignore[unreachable] roles = [make_role() for _ in range(5)] # users and roles for creating associations @@ -363,7 +363,7 @@ def test_add_associations_to_new_user(self, session, make_role, make_group): session.add(user) assert user.id is None # user does not exist in database - groups = [make_group() for _ in range(5)] + groups = [make_group() for _ in range(5)] # type: ignore[unreachable] roles = [make_role() for _ in range(5)] # groups and roles for creating associations @@ -623,7 +623,7 @@ def test_add_associations_to_new_role(self, session, make_user_and_role, make_gr role = Role() session.add(role) assert role.id is None # role does not exist in database - users = [make_user_and_role()[0] for _ in range(5)] + users = [make_user_and_role()[0] for _ in range(5)] # type: ignore[unreachable] groups = [make_group() for _ in range(5)] # users and groups for creating associations From f6763a56f882746e453dab8908ea25045251f9f6 Mon Sep 17 00:00:00 2001 From: John Davis Date: Fri, 30 Aug 2024 18:26:00 -0400 Subject: [PATCH 38/64] Fix api test: pass full set of new associations In the previous version, we only passed NEW associations, which were ADDED to the current associations. The new version implements a true UPDATE operation, which replaces the existing set of associations with the provided set of associations. --- lib/galaxy_test/api/test_groups.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/lib/galaxy_test/api/test_groups.py b/lib/galaxy_test/api/test_groups.py index 8e4c5510fe98..0176bde0d21c 100644 --- a/lib/galaxy_test/api/test_groups.py +++ b/lib/galaxy_test/api/test_groups.py @@ -107,7 +107,9 @@ def test_update(self): another_user_id = self.dataset_populator.user_id() another_role_id = self.dataset_populator.user_private_role_id() assert another_user_id is not None - update_response = self._put(f"groups/{group_id}", data={"user_ids": [another_user_id]}, admin=True, json=True) + update_response = self._put( + f"groups/{group_id}", data={"user_ids": [user_id, another_user_id]}, admin=True, json=True + ) self._assert_status_code_is_ok(update_response) # Check if the user was added @@ -119,7 +121,9 @@ def test_update(self): ) # Add another role to the group - update_response = self._put(f"groups/{group_id}", data={"role_ids": [another_role_id]}, admin=True, json=True) + update_response = self._put( + f"groups/{group_id}", data={"role_ids": [user_private_role_id, another_role_id]}, admin=True, json=True + ) self._assert_status_code_is_ok(update_response) # Check if the role was added From 4b959555a66d533e39c5c84da7fc6b47353d4c4c Mon Sep 17 00:00:00 2001 From: John Davis Date: Sun, 1 Sep 2024 13:08:12 -0400 Subject: [PATCH 39/64] Drop unused non_private_roles attr from model --- lib/galaxy/model/__init__.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index 6fbd0f8932a2..5c25e8960f96 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -849,14 +849,6 @@ class User(Base, Dictifiable, RepresentById): all_notifications: Mapped[List["UserNotificationAssociation"]] = relationship( back_populates="user", cascade_backrefs=False ) - non_private_roles: Mapped[List["UserRoleAssociation"]] = relationship( - viewonly=True, - primaryjoin=( - lambda: (User.id == UserRoleAssociation.user_id) - & (UserRoleAssociation.role_id == Role.id) - & not_(Role.name == User.email) - ), - ) preferences: AssociationProxy[Any] From 8c4a90010aae774489b668a088685c95afe5ed69 Mon Sep 17 00:00:00 2001 From: John Davis Date: Sun, 1 Sep 2024 13:54:05 -0400 Subject: [PATCH 40/64] Add tests for user and history default permissions --- test/unit/data/model/conftest.py | 13 +++++ test/unit/data/model/db/test_security.py | 61 ++++++++++++++++++++++++ 2 files changed, 74 insertions(+) diff --git a/test/unit/data/model/conftest.py b/test/unit/data/model/conftest.py index a70d4c74209e..f49454266001 100644 --- a/test/unit/data/model/conftest.py +++ b/test/unit/data/model/conftest.py @@ -119,6 +119,19 @@ def f(**kwd): return f +@pytest.fixture +def make_default_user_permissions(session, make_user, make_role): + def f(**kwd): + kwd["user"] = kwd.get("user") or make_user() + kwd["action"] = kwd.get("action") or random_str() + kwd["role"] = kwd.get("role") or make_role() + model = m.DefaultUserPermissions(**kwd) + write_to_db(session, model) + return model + + return f + + @pytest.fixture def make_event(session): def f(**kwd): diff --git a/test/unit/data/model/db/test_security.py b/test/unit/data/model/db/test_security.py index 1c9e2e8903b3..ccd55f420ddc 100644 --- a/test/unit/data/model/db/test_security.py +++ b/test/unit/data/model/db/test_security.py @@ -846,6 +846,67 @@ def test_duplicate_group( # verify associations not updated verify_role_associations(role, users_to_load, groups_to_load) + def test_delete_default_user_permissions_and_default_history_permissions( + self, + session, + make_role, + make_user_and_role, + make_user_role_association, + make_default_user_permissions, + make_default_history_permissions, + make_history, + ): + """ + When setting role users, we check check previously associated users to: + - delete DefaultUserPermissions for users that are being removed from this role; + - delete DefaultHistoryPermissions for histories associated with users that are being removed from this role. + """ + role = make_role() + users = [make_user_and_role()[0] for _ in range(5)] + # load and verify existing associations + user1, user2 = users[0], users[1] + users_to_load = [user1, user2] + for user in users_to_load: + make_user_role_association(user, role) + verify_role_associations(role, users_to_load, []) + + # users and groups for creating new associations + new_users_to_add = [users[1], users[2]] # REMOVE users[0], LEAVE users[1], ADD users[2] + user_ids = [u.id for u in new_users_to_add] + # sanity check: ensure we are trying to change existing associations + assert not have_same_elements(users_to_load, new_users_to_add) + + # load default user permissions + dup1 = make_default_user_permissions(user=user1, role=role) + dup2 = make_default_user_permissions(user=user2, role=role) + assert have_same_elements(user1.default_permissions, [dup1]) + assert have_same_elements(user2.default_permissions, [dup2]) + + # load and verify default history permissions for users associated with this role + history1, history2 = make_history(user=user1), make_history(user=user1) # 2 histories for user 1 + history3 = make_history(user=user2) # 1 history for user 2 + dhp1 = make_default_history_permissions(history=history1, role=role) + dhp2 = make_default_history_permissions(history=history2, role=role) + dhp3 = make_default_history_permissions(history=history3, role=role) + assert have_same_elements(history1.default_permissions, [dhp1]) + assert have_same_elements(history2.default_permissions, [dhp2]) + assert have_same_elements(history3.default_permissions, [dhp3]) + + # now update role users + GalaxyRBACAgent(session).set_role_user_and_group_associations(role, user_ids=user_ids, group_ids=[]) + + # verify user role associations + verify_role_associations(role, new_users_to_add, []) + + # verify default user permissions + assert have_same_elements(user1.default_permissions, []) # user1 was removed from role + assert have_same_elements(user2.default_permissions, [dup2]) # user2 was NOT removed from role + + # verify default history permissions + assert have_same_elements(history1.default_permissions, []) + assert have_same_elements(history2.default_permissions, []) + assert have_same_elements(history3.default_permissions, [dhp3]) + def verify_group_associations(group, expected_users, expected_roles): new_group_users = [assoc.user for assoc in group.users] From 6c4ddeb91d78c9a2a8982fe6adb0645b6d0ea30b Mon Sep 17 00:00:00 2001 From: John Davis Date: Tue, 3 Sep 2024 14:58:09 -0400 Subject: [PATCH 41/64] Test that private roles are not assignable --- test/unit/data/model/db/test_security.py | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/test/unit/data/model/db/test_security.py b/test/unit/data/model/db/test_security.py index ccd55f420ddc..63ad0e52c794 100644 --- a/test/unit/data/model/db/test_security.py +++ b/test/unit/data/model/db/test_security.py @@ -64,6 +64,20 @@ def test_private_user_role_assoc_not_affected_by_setting_role_users(session, mak verify_role_associations(private_role, [user], []) +def test_cannot_assign_private_roles(session, make_user_and_role, make_role): + user, private_role1 = make_user_and_role() + _, private_role2 = make_user_and_role() + new_role = make_role() + verify_user_associations(user, [], [private_role1]) # the only existing association is with the private role + + # Try to assign 2 more roles: regular role + another private role + GalaxyRBACAgent(session).set_user_group_and_role_associations( + user, group_ids=[], role_ids=[new_role.id, private_role2.id] + ) + # Only regular role has been added: other private role ignored; original private role still assigned + verify_user_associations(user, [], [private_role1, new_role]) + + class TestSetGroupUserAndRoleAssociations: def test_add_associations_to_existing_group(self, session, make_user_and_role, make_role, make_group): From ef9a5dbd66086d1159e87fab62d6511497529cce Mon Sep 17 00:00:00 2001 From: John Davis Date: Tue, 10 Sep 2024 09:01:34 -0400 Subject: [PATCH 42/64] Remove "or []" Co-authored-by: Marius van den Beek --- lib/galaxy/model/security.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/galaxy/model/security.py b/lib/galaxy/model/security.py index 79684b9c1c71..1373e2176d22 100644 --- a/lib/galaxy/model/security.py +++ b/lib/galaxy/model/security.py @@ -1455,7 +1455,7 @@ def set_user_group_and_role_associations( """ self._ensure_model_instance_has_id(user) if group_ids is not None: - self._set_user_groups(user, group_ids or []) + self._set_user_groups(user, group_ids) if role_ids is not None: self._set_user_roles(user, role_ids or []) # Commit only if both user groups and user roles have been set. From 552354c8440e95cc8b069b529f677aebb1ff5e00 Mon Sep 17 00:00:00 2001 From: John Davis Date: Tue, 10 Sep 2024 09:01:57 -0400 Subject: [PATCH 43/64] Remove "or []" Co-authored-by: Marius van den Beek --- lib/galaxy/model/security.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/galaxy/model/security.py b/lib/galaxy/model/security.py index 1373e2176d22..0b0f05aed972 100644 --- a/lib/galaxy/model/security.py +++ b/lib/galaxy/model/security.py @@ -1457,7 +1457,7 @@ def set_user_group_and_role_associations( if group_ids is not None: self._set_user_groups(user, group_ids) if role_ids is not None: - self._set_user_roles(user, role_ids or []) + self._set_user_roles(user, role_ids) # Commit only if both user groups and user roles have been set. self.sa_session.commit() From e839b3966284dc572b2895d17038ee6f232eb3d2 Mon Sep 17 00:00:00 2001 From: John Davis Date: Tue, 10 Sep 2024 09:03:24 -0400 Subject: [PATCH 44/64] Remove last "or []" --- lib/galaxy/model/security.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/galaxy/model/security.py b/lib/galaxy/model/security.py index 0b0f05aed972..41c3ea55b480 100644 --- a/lib/galaxy/model/security.py +++ b/lib/galaxy/model/security.py @@ -1499,9 +1499,9 @@ def set_role_user_and_group_associations( """ self._ensure_model_instance_has_id(role) if user_ids is not None: - self._set_role_users(role, user_ids or []) + self._set_role_users(role, user_ids) if group_ids is not None: - self._set_role_groups(role, group_ids or []) + self._set_role_groups(role, group_ids) # Commit only if both role users and role groups have been set. self.sa_session.commit() From 175423cbef41c447bc4b3ca51c52c9b04fdde994 Mon Sep 17 00:00:00 2001 From: John Davis Date: Tue, 10 Sep 2024 09:45:39 -0400 Subject: [PATCH 45/64] Give method more appropriate name --- lib/galaxy/model/security.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/lib/galaxy/model/security.py b/lib/galaxy/model/security.py index 41c3ea55b480..68351a19ed92 100644 --- a/lib/galaxy/model/security.py +++ b/lib/galaxy/model/security.py @@ -1453,7 +1453,7 @@ def set_user_group_and_role_associations( If the provided list is empty, existing associations will be removed. If the provided value is None, existing associations will not be updated. """ - self._ensure_model_instance_has_id(user) + self._persist_new_model(user) if group_ids is not None: self._set_user_groups(user, group_ids) if role_ids is not None: @@ -1475,7 +1475,7 @@ def set_group_user_and_role_associations( If the provided list is empty, existing associations will be removed. If the provided value is None, existing associations will not be updated. """ - self._ensure_model_instance_has_id(group) + self._persist_new_model(group) if user_ids is not None: self._set_group_users(group, user_ids) if role_ids is not None: @@ -1497,7 +1497,7 @@ def set_role_user_and_group_associations( If the provided list is empty, existing associations will be removed. If the provided value is None, existing associations will not be updated. """ - self._ensure_model_instance_has_id(role) + self._persist_new_model(role) if user_ids is not None: self._set_role_users(role, user_ids) if group_ids is not None: @@ -1574,7 +1574,7 @@ def _set_role_groups(self, role, group_ids): insert_values = [{"role_id": role.id, "group_id": group_id} for group_id in group_ids] self._set_associations(role, GroupRoleAssociation, delete_stmt, insert_values) - def _ensure_model_instance_has_id(self, model_instance): + def _persist_new_model(self, model_instance): # If model_instance is new, it may have not been assigned a database id yet, which is required # for creating association records. Flush if that's the case. if model_instance.id is None: From e3e88561774274ea4dd3be2dce7dbda9e50fb4f7 Mon Sep 17 00:00:00 2001 From: John Davis Date: Tue, 10 Sep 2024 15:20:51 -0400 Subject: [PATCH 46/64] Replace loop with sinle select statement --- lib/galaxy/model/security.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/lib/galaxy/model/security.py b/lib/galaxy/model/security.py index 68351a19ed92..74e12e71c62d 100644 --- a/lib/galaxy/model/security.py +++ b/lib/galaxy/model/security.py @@ -1519,18 +1519,18 @@ def _set_user_roles(self, user, role_ids): else: delete_stmt = delete_stmt.where(UserRoleAssociation.role_id != private_role.id) role_ids = self._filter_private_roles(role_ids) + # breakpoint() + insert_values = [{"user_id": user.id, "role_id": role_id} for role_id in role_ids] self._set_associations(user, UserRoleAssociation, delete_stmt, insert_values) def _filter_private_roles(self, role_ids): """Filter out IDs of private roles: those should not be assignable via UI""" - filtered = [] - for role_id in role_ids: - stmt = select(Role.id).where(Role.id == role_id).where(Role.type == Role.types.PRIVATE) - is_private = bool(self.sa_session.scalars(stmt).all()) - if not is_private: - filtered.append(role_id) - return filtered + stmt = select(Role.id).where(Role.id.in_(role_ids)).where(Role.type == Role.types.PRIVATE) + private_role_ids = self.sa_session.scalars(stmt).all() + # We could simply select only private roles; however, that would get rid of potential duplicates + # and invalid role_ids; which would hide any bugs that should be caught in the _set_associations() method. + return [role_id for role_id in role_ids if role_id not in private_role_ids] def _set_group_users(self, group, user_ids): delete_stmt = delete(UserGroupAssociation).where(UserGroupAssociation.group_id == group.id) From 2fc21f9962ab06b5c1521c611e144c5af0172b3e Mon Sep 17 00:00:00 2001 From: John Davis Date: Thu, 12 Sep 2024 16:56:43 -0400 Subject: [PATCH 47/64] Do not pass unnecessary arguments --- test/unit/data/model/db/test_security.py | 22 ++++++++++------------ 1 file changed, 10 insertions(+), 12 deletions(-) diff --git a/test/unit/data/model/db/test_security.py b/test/unit/data/model/db/test_security.py index 63ad0e52c794..e85bbe694d08 100644 --- a/test/unit/data/model/db/test_security.py +++ b/test/unit/data/model/db/test_security.py @@ -41,7 +41,7 @@ def test_private_user_role_assoc_not_affected_by_setting_user_roles(session, mak assert user.email != private_role.name # Delete user roles - GalaxyRBACAgent(session).set_user_group_and_role_associations(user, group_ids=[], role_ids=[]) + GalaxyRBACAgent(session).set_user_group_and_role_associations(user, role_ids=[]) # association with private role is preserved verify_user_associations(user, [], [private_role]) @@ -59,7 +59,7 @@ def test_private_user_role_assoc_not_affected_by_setting_role_users(session, mak assert user.email != private_role.name # Update role users - GalaxyRBACAgent(session).set_role_user_and_group_associations(private_role, user_ids=[], group_ids=[]) + GalaxyRBACAgent(session).set_role_user_and_group_associations(private_role, user_ids=[]) # association of private role with user is preserved verify_role_associations(private_role, [user], []) @@ -71,9 +71,7 @@ def test_cannot_assign_private_roles(session, make_user_and_role, make_role): verify_user_associations(user, [], [private_role1]) # the only existing association is with the private role # Try to assign 2 more roles: regular role + another private role - GalaxyRBACAgent(session).set_user_group_and_role_associations( - user, group_ids=[], role_ids=[new_role.id, private_role2.id] - ) + GalaxyRBACAgent(session).set_user_group_and_role_associations(user, role_ids=[new_role.id, private_role2.id]) # Only regular role has been added: other private role ignored; original private role still assigned verify_user_associations(user, [], [private_role1, new_role]) @@ -219,7 +217,7 @@ def test_invalid_user(self, session, make_user_and_role, make_role, make_group): # try to set associations with pytest.raises(RequestParameterInvalidException): - GalaxyRBACAgent(session).set_group_user_and_role_associations(group, user_ids=user_ids, role_ids=[]) + GalaxyRBACAgent(session).set_group_user_and_role_associations(group, user_ids=user_ids) # verify no change assert len(group.users) == 0 @@ -241,7 +239,7 @@ def test_invalid_role(self, session, make_role, make_group): # try to set associations with pytest.raises(RequestParameterInvalidException): - GalaxyRBACAgent(session).set_group_user_and_role_associations(group, user_ids=[], role_ids=role_ids) + GalaxyRBACAgent(session).set_group_user_and_role_associations(group, role_ids=role_ids) # verify no change assert len(group.roles) == 0 @@ -483,7 +481,7 @@ def test_invalid_group(self, session, make_user_and_role, make_group): # try to set associations with pytest.raises(RequestParameterInvalidException): - GalaxyRBACAgent(session).set_user_group_and_role_associations(user, group_ids=group_ids, role_ids=[]) + GalaxyRBACAgent(session).set_user_group_and_role_associations(user, group_ids=group_ids) # verify no change assert len(user.groups) == 0 @@ -505,7 +503,7 @@ def test_invalid_role(self, session, make_user_and_role, make_role): # try to set associations with pytest.raises(RequestParameterInvalidException): - GalaxyRBACAgent(session).set_user_group_and_role_associations(user, group_ids=[], role_ids=role_ids) + GalaxyRBACAgent(session).set_user_group_and_role_associations(user, role_ids=role_ids) # verify no change assert len(user.roles) == 1 # one is the private role association @@ -743,7 +741,7 @@ def test_invalid_user(self, session, make_role, make_user_and_role): # try to set associations with pytest.raises(RequestParameterInvalidException): - GalaxyRBACAgent(session).set_role_user_and_group_associations(role, user_ids=user_ids, group_ids=[]) + GalaxyRBACAgent(session).set_role_user_and_group_associations(role, user_ids=user_ids) # verify no change assert len(role.users) == 0 @@ -765,7 +763,7 @@ def test_invalid_group(self, session, make_role, make_group): # try to set associations with pytest.raises(RequestParameterInvalidException): - GalaxyRBACAgent(session).set_role_user_and_group_associations(role, user_ids=[], group_ids=group_ids) + GalaxyRBACAgent(session).set_role_user_and_group_associations(role, group_ids=group_ids) # verify no change assert len(role.groups) == 0 @@ -907,7 +905,7 @@ def test_delete_default_user_permissions_and_default_history_permissions( assert have_same_elements(history3.default_permissions, [dhp3]) # now update role users - GalaxyRBACAgent(session).set_role_user_and_group_associations(role, user_ids=user_ids, group_ids=[]) + GalaxyRBACAgent(session).set_role_user_and_group_associations(role, user_ids=user_ids) # verify user role associations verify_role_associations(role, new_users_to_add, []) From a509bf83cfe8fc0b5488c796603dc59c83ac5c40 Mon Sep 17 00:00:00 2001 From: John Davis Date: Mon, 23 Sep 2024 17:56:44 -0400 Subject: [PATCH 48/64] Fix mypy --- .../model/migrations/data_fixes/association_table_fixer.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/galaxy/model/migrations/data_fixes/association_table_fixer.py b/lib/galaxy/model/migrations/data_fixes/association_table_fixer.py index 0903260a91b0..711f266c5be1 100644 --- a/lib/galaxy/model/migrations/data_fixes/association_table_fixer.py +++ b/lib/galaxy/model/migrations/data_fixes/association_table_fixer.py @@ -111,7 +111,7 @@ def build_duplicate_tuples_statement(self): """ @abstractmethod - def build_duplicate_ids_statement(self, user_id, group_id): + def build_duplicate_ids_statement(self, item1_id, item2_id): """ Build select statement returning a list of ids for duplicate records retrieved via build_duplicate_tuples_statement(). """ From 5524ea77a7424d0d14e6ce568140adc9a066e904 Mon Sep 17 00:00:00 2001 From: John Davis Date: Mon, 23 Sep 2024 23:52:05 -0400 Subject: [PATCH 49/64] Add tests for migration fixes --- .../model/migration_fixes/test_migrations.py | 227 +++++++++++++++++- 1 file changed, 226 insertions(+), 1 deletion(-) diff --git a/test/unit/data/model/migration_fixes/test_migrations.py b/test/unit/data/model/migration_fixes/test_migrations.py index 12b0791689aa..2f8777860a0c 100644 --- a/test/unit/data/model/migration_fixes/test_migrations.py +++ b/test/unit/data/model/migration_fixes/test_migrations.py @@ -1,6 +1,12 @@ import pytest +from sqlalchemy import select -from galaxy.model import User +from galaxy.model import ( + GroupRoleAssociation, + User, + UserGroupAssociation, + UserRoleAssociation, +) from galaxy.model.unittest_utils.migration_scripts_testing_utils import ( # noqa: F401 - contains fixtures we have to import explicitly run_command, tmp_directory, @@ -152,3 +158,222 @@ def test_d619fdfa6168(monkeypatch, session, make_user): assert u1_fixed.deleted is True assert u2_fixed.deleted is True assert u3_fixed.deleted is False + + +def test_349dd9d9aac9(monkeypatch, session, make_user, make_role, make_user_role_association): + # Initialize db and migration environment + dburl = str(session.bind.url) + monkeypatch.setenv("GALAXY_CONFIG_OVERRIDE_DATABASE_CONNECTION", dburl) + monkeypatch.setenv("GALAXY_INSTALL_CONFIG_OVERRIDE_INSTALL_DATABASE_CONNECTION", dburl) + run_command(f"{COMMAND} init") + + # Load pre-migration state + run_command(f"{COMMAND} downgrade 1cf595475b58") + + # Load duplicate records + u1, u2 = make_user(), make_user() + r1, r2 = make_role(), make_role() + make_user_role_association(user=u1, role=r1) + make_user_role_association(user=u1, role=r2) + make_user_role_association(user=u1, role=r2) # duplicate + make_user_role_association(user=u2, role=r1) + make_user_role_association(user=u2, role=r1) # duplicate + + # Verify duplicates + assert len(u1.roles) == 3 + assert len(u2.roles) == 2 + all_associations = session.execute(select(UserRoleAssociation)).all() + assert len(all_associations) == 5 + + # Run migration + run_command(f"{COMMAND} upgrade 349dd9d9aac9") + session.expire_all() + + # Verify clean data + assert len(u1.roles) == 2 + assert len(u2.roles) == 1 + all_associations = session.execute(select(UserRoleAssociation)).all() + assert len(all_associations) == 3 + + +def test_56ddf316dbd0(monkeypatch, session, make_user, make_group, make_user_group_association): + # Initialize db and migration environment + dburl = str(session.bind.url) + monkeypatch.setenv("GALAXY_CONFIG_OVERRIDE_DATABASE_CONNECTION", dburl) + monkeypatch.setenv("GALAXY_INSTALL_CONFIG_OVERRIDE_INSTALL_DATABASE_CONNECTION", dburl) + run_command(f"{COMMAND} init") + + # Load pre-migration state + run_command(f"{COMMAND} downgrade 1fdd615f2cdb") + + # Load duplicate records + u1, u2 = make_user(), make_user() + g1, g2 = make_group(), make_group() + make_user_group_association(user=u1, group=g1) + make_user_group_association(user=u1, group=g2) + make_user_group_association(user=u1, group=g2) # duplicate + make_user_group_association(user=u2, group=g1) + make_user_group_association(user=u2, group=g1) # duplicate + + # Verify duplicates + assert len(u1.groups) == 3 + assert len(u2.groups) == 2 + all_associations = session.execute(select(UserGroupAssociation)).all() + assert len(all_associations) == 5 + + # Run migration + run_command(f"{COMMAND} upgrade 56ddf316dbd0") + session.expire_all() + + # Verify clean data + assert len(u1.groups) == 2 + assert len(u2.groups) == 1 + all_associations = session.execute(select(UserGroupAssociation)).all() + assert len(all_associations) == 3 + + +def test_9ef6431f3a4e(monkeypatch, session, make_group, make_role, make_group_role_association): + # Initialize db and migration environment + dburl = str(session.bind.url) + monkeypatch.setenv("GALAXY_CONFIG_OVERRIDE_DATABASE_CONNECTION", dburl) + monkeypatch.setenv("GALAXY_INSTALL_CONFIG_OVERRIDE_INSTALL_DATABASE_CONNECTION", dburl) + run_command(f"{COMMAND} init") + + # Load pre-migration state + run_command(f"{COMMAND} downgrade 13fe10b8e35b") + + # Load duplicate records + g1, g2 = make_group(), make_group() + r1, r2 = make_role(), make_role() + make_group_role_association(group=g1, role=r1) + make_group_role_association(group=g1, role=r2) + make_group_role_association(group=g1, role=r2) # duplicate + make_group_role_association(group=g2, role=r1) + make_group_role_association(group=g2, role=r1) # duplicate + + # Verify duplicates + assert len(g1.roles) == 3 + assert len(g2.roles) == 2 + all_associations = session.execute(select(GroupRoleAssociation)).all() + assert len(all_associations) == 5 + + # Run migration + run_command(f"{COMMAND} upgrade 9ef6431f3a4e") + session.expire_all() + + # Verify clean data + assert len(g1.roles) == 2 + assert len(g2.roles) == 1 + all_associations = session.execute(select(GroupRoleAssociation)).all() + assert len(all_associations) == 3 + + +def test_1fdd615f2cdb(monkeypatch, session, make_user, make_role, make_user_role_association): + # Initialize db and migration environment + dburl = str(session.bind.url) + monkeypatch.setenv("GALAXY_CONFIG_OVERRIDE_DATABASE_CONNECTION", dburl) + monkeypatch.setenv("GALAXY_INSTALL_CONFIG_OVERRIDE_INSTALL_DATABASE_CONNECTION", dburl) + run_command(f"{COMMAND} init") + + # Load pre-migration state + run_command(f"{COMMAND} downgrade 349dd9d9aac9") + + # Load records w/nulls + ura1 = make_user_role_association(user=make_user(), role=make_role()) + ura2 = make_user_role_association(user=make_user(), role=make_role()) + ura3 = make_user_role_association(user=make_user(), role=make_role()) + ura1.user_id = None + ura2.role_id = None + ura3.user_id = None + ura3.role_id = None + session.add_all([ura1, ura2, ura3]) + session.commit() + + # Load record w/o nulls + make_user_role_association(user=make_user(), role=make_role()) + + # Verify data + all_associations = session.execute(select(UserRoleAssociation)).all() + assert len(all_associations) == 4 + + # Run migration + run_command(f"{COMMAND} upgrade 1fdd615f2cdb") + session.expire_all() + + # Verify clean data + all_associations = session.execute(select(UserRoleAssociation)).all() + assert len(all_associations) == 1 + + +def test_13fe10b8e35b(monkeypatch, session, make_user, make_group, make_user_group_association): + # Initialize db and migration environment + dburl = str(session.bind.url) + monkeypatch.setenv("GALAXY_CONFIG_OVERRIDE_DATABASE_CONNECTION", dburl) + monkeypatch.setenv("GALAXY_INSTALL_CONFIG_OVERRIDE_INSTALL_DATABASE_CONNECTION", dburl) + run_command(f"{COMMAND} init") + + # Load pre-migration state + run_command(f"{COMMAND} downgrade 56ddf316dbd0") + + # Load records w/nulls + uga1 = make_user_group_association(user=make_user(), group=make_group()) + uga2 = make_user_group_association(user=make_user(), group=make_group()) + uga3 = make_user_group_association(user=make_user(), group=make_group()) + uga1.user_id = None + uga2.group_id = None + uga3.user_id = None + uga3.group_id = None + session.add_all([uga1, uga2, uga3]) + session.commit() + + # Load record w/o nulls + make_user_group_association(user=make_user(), group=make_group()) + + # Verify data + all_associations = session.execute(select(UserGroupAssociation)).all() + assert len(all_associations) == 4 + + # Run migration + run_command(f"{COMMAND} upgrade 13fe10b8e35b") + session.expire_all() + + # Verify clean data + all_associations = session.execute(select(UserGroupAssociation)).all() + assert len(all_associations) == 1 + + +def test_25b092f7938b(monkeypatch, session, make_group, make_role, make_group_role_association): + # Initialize db and migration environment + dburl = str(session.bind.url) + monkeypatch.setenv("GALAXY_CONFIG_OVERRIDE_DATABASE_CONNECTION", dburl) + monkeypatch.setenv("GALAXY_INSTALL_CONFIG_OVERRIDE_INSTALL_DATABASE_CONNECTION", dburl) + run_command(f"{COMMAND} init") + + # Load pre-migration state + run_command(f"{COMMAND} downgrade 9ef6431f3a4e") + + # Load records w/nulls + gra1 = make_group_role_association(group=make_group(), role=make_role()) + gra2 = make_group_role_association(group=make_group(), role=make_role()) + gra3 = make_group_role_association(group=make_group(), role=make_role()) + gra1.group_id = None + gra2.role_id = None + gra3.group_id = None + gra3.role_id = None + session.add_all([gra1, gra2, gra3]) + session.commit() + + # Load record w/o nulls + make_group_role_association(group=make_group(), role=make_role()) + + # Verify data + all_associations = session.execute(select(GroupRoleAssociation)).all() + assert len(all_associations) == 4 + + # Run migration + run_command(f"{COMMAND} upgrade 25b092f7938b") + session.expire_all() + + # Verify clean data + all_associations = session.execute(select(GroupRoleAssociation)).all() + assert len(all_associations) == 1 From 5d5065adab4eb30bedbcbc92347f05505c50c1e7 Mon Sep 17 00:00:00 2001 From: John Davis Date: Mon, 23 Sep 2024 23:59:57 -0400 Subject: [PATCH 50/64] Update API schema for Group model --- lib/galaxy/schema/groups.py | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/lib/galaxy/schema/groups.py b/lib/galaxy/schema/groups.py index 1a4bde58f764..b513ba26fa41 100644 --- a/lib/galaxy/schema/groups.py +++ b/lib/galaxy/schema/groups.py @@ -73,5 +73,18 @@ class GroupCreatePayload(Model): @partial_model() -class GroupUpdatePayload(GroupCreatePayload): - pass +class GroupUpdatePayload(Model): + """Payload schema for updating a group.""" + + name: str = Field( + ..., + title="name of the group", + ) + user_ids: Optional[List[DecodedDatabaseIdField]] = Field( + None, + title="user IDs", + ) + role_ids: Optional[List[DecodedDatabaseIdField]] = Field( + None, + title="role IDs", + ) From 724c83f16a533dd85d0cfdc86c239f94e6b88377 Mon Sep 17 00:00:00 2001 From: Wolfgang Maier Date: Tue, 24 Sep 2024 11:27:09 +0200 Subject: [PATCH 51/64] Fix typo --- lib/galaxy/tools/parameters/basic.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/galaxy/tools/parameters/basic.py b/lib/galaxy/tools/parameters/basic.py index 31eb54953175..f017f0e69bcf 100644 --- a/lib/galaxy/tools/parameters/basic.py +++ b/lib/galaxy/tools/parameters/basic.py @@ -1529,7 +1529,7 @@ def get_options(self, trans, other_values): with open(dataset.get_file_name()) as f: head = f.readline() cnames = head.rstrip("\n\r ").split("\t") - options = [("c%s: %s" % (c, cnames[int(c) - 1]), c, False) for i in column_list] + options = [("c%s: %s" % (c, cnames[int(c) - 1]), c, False) for c in column_list] except Exception: # ignore and rely on fallback pass From fb3b7177dd0415312ef35996a4fe0fd8ebcde8d0 Mon Sep 17 00:00:00 2001 From: Wolfgang Maier Date: Tue, 24 Sep 2024 11:46:26 +0200 Subject: [PATCH 52/64] Use format strings to form option values --- lib/galaxy/tools/parameters/basic.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/galaxy/tools/parameters/basic.py b/lib/galaxy/tools/parameters/basic.py index f017f0e69bcf..c33183430771 100644 --- a/lib/galaxy/tools/parameters/basic.py +++ b/lib/galaxy/tools/parameters/basic.py @@ -1520,7 +1520,7 @@ def get_options(self, trans, other_values): and dataset.metadata.element_is_set("column_names") ): try: - options = [("c%s: %s" % (c, dataset.metadata.column_names[int(c) - 1]), c, False) for c in column_list] + options = [(f"c{c}: {dataset.metadata.column_names[int(c) - 1]}", c, False) for c in column_list] except IndexError: # ignore and rely on fallback pass @@ -1529,7 +1529,7 @@ def get_options(self, trans, other_values): with open(dataset.get_file_name()) as f: head = f.readline() cnames = head.rstrip("\n\r ").split("\t") - options = [("c%s: %s" % (c, cnames[int(c) - 1]), c, False) for c in column_list] + options = [(f"c{c}: {cnames[int(c) - 1]}", c, False) for c in column_list] except Exception: # ignore and rely on fallback pass From 675c9670e2d1f771009f004d46b849ccfec0cddc Mon Sep 17 00:00:00 2001 From: John Chilton Date: Fri, 6 Sep 2024 12:33:09 -0400 Subject: [PATCH 53/64] Models for YAML test format. --- lib/galaxy/tool_util/models.py | 98 ++++++++++++++++++- lib/galaxy/tool_util/parser/interface.py | 12 ++- lib/galaxy/tool_util/parser/xml.py | 3 +- lib/galaxy/tool_util/validate_test_format.py | 40 ++++++++ lib/galaxy/tool_util/verify/__init__.py | 58 ++++++----- .../tool_util/verify/assertion_models.py | 1 + lib/galaxy/tool_util/verify/asserts/size.py | 4 +- lib/galaxy/tool_util/verify/codegen.py | 47 +++++++-- lib/galaxy/workflow/scheduling_manager.py | 1 - lib/galaxy_test/base/populators.py | 7 +- .../flatten_collection.gxwf-tests.yml | 1 + lib/galaxy_test/workflow/tests.py | 35 ++++--- packages/tool_util/setup.cfg | 1 + test/functional/tools/sample_tool_conf.xml | 1 + test/unit/tool_util/test_test_format_model.py | 39 ++++++++ 15 files changed, 295 insertions(+), 53 deletions(-) create mode 100644 lib/galaxy/tool_util/validate_test_format.py create mode 100644 test/unit/tool_util/test_test_format_model.py diff --git a/lib/galaxy/tool_util/models.py b/lib/galaxy/tool_util/models.py index deafef1a5243..4f1ea35670c6 100644 --- a/lib/galaxy/tool_util/models.py +++ b/lib/galaxy/tool_util/models.py @@ -5,11 +5,23 @@ """ from typing import ( + Any, + Dict, List, Optional, + Union, ) -from pydantic import BaseModel +from pydantic import ( + AnyUrl, + BaseModel, + ConfigDict, + RootModel, +) +from typing_extensions import ( + NotRequired, + TypedDict, +) from .parameters import ( input_models_for_tool_source, @@ -18,6 +30,7 @@ from .parser.interface import ( Citation, HelpContent, + OutputCompareType, ToolSource, XrefDict, ) @@ -25,6 +38,7 @@ from_tool_source, ToolOutput, ) +from .verify.assertion_models import assertions class ParsedTool(BaseModel): @@ -73,3 +87,85 @@ def parse_tool(tool_source: ToolSource) -> ParsedTool: xrefs=xrefs, help=help, ) + + +class StrictModel(BaseModel): + + model_config = ConfigDict( + extra="forbid", + ) + + +class BaseTestOutputModel(StrictModel): + file: Optional[str] = None + path: Optional[str] = None + location: Optional[AnyUrl] = None + ftype: Optional[str] = None + sort: Optional[bool] = None + compare: Optional[OutputCompareType] = None + checksum: Optional[str] = None + metadata: Optional[Dict[str, Any]] = None + asserts: Optional[assertions] = None + delta: Optional[int] = None + delta_frac: Optional[float] = None + lines_diff: Optional[int] = None + decompress: Optional[bool] = None + + +class TestDataOutputAssertions(BaseTestOutputModel): + pass + + +class TestCollectionCollectionElementAssertions(StrictModel): + elements: Optional[Dict[str, "TestCollectionElementAssertion"]] = None + element_tests: Optional[Dict[str, "TestCollectionElementAssertion"]] = None + + +class TestCollectionDatasetElementAssertions(BaseTestOutputModel): + pass + + +TestCollectionElementAssertion = Union[ + TestCollectionDatasetElementAssertions, TestCollectionCollectionElementAssertions +] +TestCollectionCollectionElementAssertions.model_rebuild() + + +class CollectionAttributes(StrictModel): + collection_type: Optional[str] = None + + +class TestCollectionOutputAssertions(StrictModel): + elements: Optional[Dict[str, TestCollectionElementAssertion]] = None + element_tests: Optional[Dict[str, "TestCollectionElementAssertion"]] = None + attributes: Optional[CollectionAttributes] = None + + +TestOutputLiteral = Union[bool, int, float, str] + +TestOutputAssertions = Union[TestCollectionOutputAssertions, TestDataOutputAssertions, TestOutputLiteral] + +JobDict = Dict[str, Any] + + +class TestJob(StrictModel): + doc: Optional[str] + job: JobDict + outputs: Dict[str, TestOutputAssertions] + + +Tests = RootModel[List[TestJob]] + +# TODO: typed dict versions of all thee above for verify code - make this Dict[str, Any] here more +# specific. +OutputChecks = Union[TestOutputLiteral, Dict[str, Any]] +OutputsDict = Dict[str, OutputChecks] + + +class TestJobDict(TypedDict): + doc: NotRequired[str] + job: NotRequired[JobDict] + outputs: OutputsDict + + +TestDicts = List[TestJobDict] diff --git a/lib/galaxy/tool_util/parser/interface.py b/lib/galaxy/tool_util/parser/interface.py index 50ec9ed30d17..c137955dbeb1 100644 --- a/lib/galaxy/tool_util/parser/interface.py +++ b/lib/galaxy/tool_util/parser/interface.py @@ -5,6 +5,7 @@ ABCMeta, abstractmethod, ) +from enum import Enum from os.path import join from typing import ( Any, @@ -49,9 +50,18 @@ class AssertionDict(TypedDict): XmlInt = Union[str, int] +class OutputCompareType(str, Enum): + diff = "diff" + re_match = "re_match" + sim_size = "sim_size" + re_match_multiline = "re_match_multiline" + contains = "contains" + image_diff = "image_diff" + + class ToolSourceTestOutputAttributes(TypedDict): object: NotRequired[Optional[Any]] - compare: str + compare: OutputCompareType lines_diff: int delta: int delta_frac: Optional[float] diff --git a/lib/galaxy/tool_util/parser/xml.py b/lib/galaxy/tool_util/parser/xml.py index c61b178e64c7..a89754553f84 100644 --- a/lib/galaxy/tool_util/parser/xml.py +++ b/lib/galaxy/tool_util/parser/xml.py @@ -43,6 +43,7 @@ DynamicOptions, HelpContent, InputSource, + OutputCompareType, PageSource, PagesSource, RequiredFiles, @@ -834,7 +835,7 @@ def __parse_test_attributes( value_object = json.loads(attrib.pop("value_json")) # Method of comparison - compare: str = attrib.pop("compare", "diff").lower() + compare: OutputCompareType = cast(OutputCompareType, attrib.pop("compare", "diff").lower()) # Number of lines to allow to vary in logs (for dates, etc) lines_diff: int = int(attrib.pop("lines_diff", "0")) # Allow a file size to vary if sim_size compare diff --git a/lib/galaxy/tool_util/validate_test_format.py b/lib/galaxy/tool_util/validate_test_format.py new file mode 100644 index 000000000000..fd9e055b3789 --- /dev/null +++ b/lib/galaxy/tool_util/validate_test_format.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python + +import argparse +import sys + +import yaml + +from galaxy.tool_util.models import Tests + +DESCRIPTION = """ +A small utility to verify the Planemo test format. + +This script doesn't use semantic information about tools or workflows so only +the structure of the file is checked and things like inputs matching up is not +included. +""" + + +def validate_test_file(test_file: str) -> None: + with open(test_file) as f: + json = yaml.safe_load(f) + Tests.model_validate(json) + + +def arg_parser() -> argparse.ArgumentParser: + parser = argparse.ArgumentParser(description=DESCRIPTION) + parser.add_argument("test_file") + return parser + + +def main(argv=None) -> None: + if argv is None: + argv = sys.argv[1:] + + args = arg_parser().parse_args(argv) + validate_test_file(args.test_file) + + +if __name__ == "__main__": + main() diff --git a/lib/galaxy/tool_util/verify/__init__.py b/lib/galaxy/tool_util/verify/__init__.py index fdc4c2044428..6ddfefeb69f8 100644 --- a/lib/galaxy/tool_util/verify/__init__.py +++ b/lib/galaxy/tool_util/verify/__init__.py @@ -597,6 +597,8 @@ def files_image_diff(file1: str, file2: str, attributes: Optional[Dict[str, Any] # TODO: After tool-util with this included is published, fefactor planemo.test._check_output # to use this function. There is already a comment there about breaking fewer abstractions. # https://github.com/galaxyproject/planemo/blob/master/planemo/test/_check_output.py +# TODO: Also migrate the logic for checking non-dictionaries out of Planemo - this function now +# does that check also. def verify_file_path_against_dict( get_filename: GetFilenameT, get_location: GetLocationT, @@ -621,30 +623,38 @@ def verify_file_contents_against_dict( test_properties, test_data_target_dir: Optional[str] = None, ) -> None: - # Support Galaxy-like file location (using "file") or CWL-like ("path" or "location"). - expected_file = test_properties.get("file", None) - if expected_file is None: - expected_file = test_properties.get("path", None) - if expected_file is None: - location = test_properties.get("location") - if location: - if location.startswith(("http://", "https://")): - assert get_location - expected_file = get_location(location) - else: - expected_file = location.split("file://", 1)[-1] - - if "asserts" in test_properties: - test_properties["assert_list"] = to_test_assert_list(test_properties["asserts"]) - verify( - item_label, - output_content, - attributes=test_properties, - filename=expected_file, - get_filename=get_filename, - keep_outputs_dir=test_data_target_dir, - verify_extra_files=None, - ) + expected_file: Optional[str] = None + if isinstance(test_properties, dict): + # Support Galaxy-like file location (using "file") or CWL-like ("path" or "location"). + expected_file = test_properties.get("file", None) + if expected_file is None: + expected_file = test_properties.get("path", None) + if expected_file is None: + location = test_properties.get("location") + if location: + if location.startswith(("http://", "https://")): + assert get_location + expected_file = get_location(location) + else: + expected_file = location.split("file://", 1)[-1] + + if "asserts" in test_properties: + test_properties["assert_list"] = to_test_assert_list(test_properties["asserts"]) + verify( + item_label, + output_content, + attributes=test_properties, + filename=expected_file, + get_filename=get_filename, + keep_outputs_dir=test_data_target_dir, + verify_extra_files=None, + ) + else: + output_value = json.loads(output_content.decode("utf-8")) + if test_properties != output_value: + template = "Output [%s] value [%s] does not match expected value [%s]." + message = template % (item_label, output_value, test_properties) + raise AssertionError(message) __all__ = [ diff --git a/lib/galaxy/tool_util/verify/assertion_models.py b/lib/galaxy/tool_util/verify/assertion_models.py index 5f21e488e52b..eb4429170655 100644 --- a/lib/galaxy/tool_util/verify/assertion_models.py +++ b/lib/galaxy/tool_util/verify/assertion_models.py @@ -8,6 +8,7 @@ BeforeValidator, ConfigDict, Field, + model_validator, RootModel, StrictFloat, StrictInt, diff --git a/lib/galaxy/tool_util/verify/asserts/size.py b/lib/galaxy/tool_util/verify/asserts/size.py index e4b3e8a6ef1f..0e3eebe06f86 100644 --- a/lib/galaxy/tool_util/verify/asserts/size.py +++ b/lib/galaxy/tool_util/verify/asserts/size.py @@ -14,9 +14,7 @@ def assert_has_size( output_bytes: OutputBytes, - value: Annotated[ - OptionalXmlInt, AssertionParameter("Deprecated alias for `size`", xml_type="Bytes", deprecated=True) - ] = None, + value: Annotated[OptionalXmlInt, AssertionParameter("Deprecated alias for `size`", xml_type="Bytes")] = None, size: Annotated[ OptionalXmlInt, AssertionParameter( diff --git a/lib/galaxy/tool_util/verify/codegen.py b/lib/galaxy/tool_util/verify/codegen.py index 7219d40b6dcb..2e93d29ccbf0 100644 --- a/lib/galaxy/tool_util/verify/codegen.py +++ b/lib/galaxy/tool_util/verify/codegen.py @@ -55,6 +55,7 @@ BeforeValidator, ConfigDict, Field, + model_validator, RootModel, StrictFloat, StrictInt, @@ -113,9 +114,8 @@ def check_non_negative_if_int(v: typing.Any): {{assertion.name}}_{{ parameter.name }}_description = '''{{ parameter.description }}''' {% endfor %} -class {{assertion.name}}_model(AssertionModel): - r\"\"\"{{ assertion.docstring }}\"\"\" - that: Literal["{{assertion.name}}"] = "{{assertion.name}}" +class base_{{assertion.name}}_model(AssertionModel): + '''base model for {{assertion.name}} describing attributes.''' {% for parameter in assertion.parameters %} {% if not parameter.is_deprecated %} {{ parameter.name }}: {{ parameter.type_str }} = Field( @@ -124,21 +124,52 @@ class {{assertion.name}}_model(AssertionModel): ) {% endif %} {% endfor %} +{% if assertion.children in ["required", "allowed"] %} + children: typing.Optional["assertion_list"] = None + asserts: typing.Optional["assertion_list"] = None + {% if assertion.children == "required" %} - children: "assertion_list" + @model_validator(mode='before') + @classmethod + def validate_children(self, data: typing.Any): + if isinstance(data, dict) and 'children' not in data and 'asserts' not in data: + raise ValueError("At least one of 'children' or 'asserts' must be specified for this assertion type.") + return data {% endif %} -{% if assertion.children == "allowed" %} - children: typing.Optional["assertion_list"] = None {% endif %} + + +class {{assertion.name}}_model(base_{{assertion.name}}_model): + r\"\"\"{{ assertion.docstring }}\"\"\" + that: Literal["{{assertion.name}}"] = "{{assertion.name}}" + +class {{assertion.name}}_model_nested(AssertionModel): + r\"\"\"Nested version of this assertion model.\"\"\" + {{assertion.name}}: base_{{assertion.name}}_model {% endfor %} -any_assertion_model = Annotated[typing.Union[ +any_assertion_model_flat = Annotated[typing.Union[ {% for assertion in assertions %} {{assertion.name}}_model, {% endfor %} ], Field(discriminator="that")] -assertion_list = RootModel[typing.List[any_assertion_model]] +any_assertion_model_nested = typing.Union[ +{% for assertion in assertions %} + {{assertion.name}}_model_nested, +{% endfor %} +] + +assertion_list = RootModel[typing.List[typing.Union[any_assertion_model_flat, any_assertion_model_nested]]] + + +class assertion_dict(AssertionModel): +{% for assertion in assertions %} + {{assertion.name}}: typing.Optional[base_{{assertion.name}}_model] = None +{% endfor %} + + +assertions = typing.Union[assertion_list, assertion_dict] """ diff --git a/lib/galaxy/workflow/scheduling_manager.py b/lib/galaxy/workflow/scheduling_manager.py index 3868e24c13a9..8d31130bad21 100644 --- a/lib/galaxy/workflow/scheduling_manager.py +++ b/lib/galaxy/workflow/scheduling_manager.py @@ -329,7 +329,6 @@ def __schedule(self, workflow_scheduler_id, workflow_scheduler): def __attempt_schedule(self, invocation_id, workflow_scheduler): with self.app.model.context() as session: workflow_invocation = session.get(model.WorkflowInvocation, invocation_id) - try: if workflow_invocation.state == workflow_invocation.states.CANCELLING: workflow_invocation.cancel_invocation_steps() diff --git a/lib/galaxy_test/base/populators.py b/lib/galaxy_test/base/populators.py index 5c25baf2bd89..98dc9cb5215f 100644 --- a/lib/galaxy_test/base/populators.py +++ b/lib/galaxy_test/base/populators.py @@ -3151,7 +3151,12 @@ def read_test_data(test_dict): elif is_dict and "type" in value: input_type = value.pop("type") if input_type == "File": - content = open_test_data(value) + if "value" in value: + content = open_test_data(value) + elif "content" in value: + content = value["content"] + else: + raise ValueError(f"Invalid test_data def {test_data}") new_dataset_kwds = {"content": content} if "name" in value: new_dataset_kwds["name"] = value["name"] diff --git a/lib/galaxy_test/workflow/flatten_collection.gxwf-tests.yml b/lib/galaxy_test/workflow/flatten_collection.gxwf-tests.yml index 1a2b5c65b596..bfd0a6a02435 100644 --- a/lib/galaxy_test/workflow/flatten_collection.gxwf-tests.yml +++ b/lib/galaxy_test/workflow/flatten_collection.gxwf-tests.yml @@ -3,6 +3,7 @@ job: {} outputs: out: + attributes: {collection_type: 'list'} elements: 'oe1-ie1': asserts: diff --git a/lib/galaxy_test/workflow/tests.py b/lib/galaxy_test/workflow/tests.py index f0702fb8240b..a850490740b6 100644 --- a/lib/galaxy_test/workflow/tests.py +++ b/lib/galaxy_test/workflow/tests.py @@ -8,6 +8,12 @@ import yaml from gxformat2.yaml import ordered_load +from galaxy.tool_util.models import ( + OutputChecks, + OutputsDict, + TestDicts, + TestJobDict, +) from galaxy.tool_util.parser.interface import TestCollectionOutputDef from galaxy.tool_util.verify import verify_file_contents_against_dict from galaxy.tool_util.verify.interactor import ( @@ -52,7 +58,7 @@ def setUp(self): self.dataset_collection_populator = DatasetCollectionPopulator(self.galaxy_interactor) @pytest.mark.workflow - def test_workflow(self, workflow_path: Path, test_job): + def test_workflow(self, workflow_path: Path, test_job: TestJobDict): with workflow_path.open() as f: yaml_content = ordered_load(f) with self.dataset_populator.test_history() as history_id: @@ -63,30 +69,32 @@ def test_workflow(self, workflow_path: Path, test_job): ) self._verify(run_summary, test_job["outputs"]) - def _verify(self, run_summary: RunJobsSummary, output_definitions): + def _verify(self, run_summary: RunJobsSummary, output_definitions: OutputsDict): for output_name, output_definition in output_definitions.items(): self._verify_output(run_summary, output_name, output_definition) - def _verify_output(self, run_summary: RunJobsSummary, output_name, test_properties): - is_collection_test = "elements" in test_properties + def _verify_output(self, run_summary: RunJobsSummary, output_name, test_properties: OutputChecks): + is_collection_test = isinstance(test_properties, dict) and "elements" in test_properties item_label = f"Output named {output_name}" def get_filename(name): return tempfile.NamedTemporaryFile(prefix=f"gx_workflow_framework_test_file_{output_name}", delete=False) - def verify_dataset(dataset: dict, test_properties: dict): + def verify_dataset(dataset: dict, test_properties: OutputChecks): output_content = self.dataset_populator.get_history_dataset_content( run_summary.history_id, dataset=dataset, type="bytes" ) verify_file_contents_against_dict(get_filename, _get_location, item_label, output_content, test_properties) - metadata = get_metadata_to_test(test_properties) - if metadata: - dataset_details = self.dataset_populator.get_history_dataset_details( - run_summary.history_id, content_id=dataset["id"] - ) - compare_expected_metadata_to_api_response(metadata, dataset_details) + if isinstance(test_properties, dict): + metadata = get_metadata_to_test(test_properties) + if metadata: + dataset_details = self.dataset_populator.get_history_dataset_details( + run_summary.history_id, content_id=dataset["id"] + ) + compare_expected_metadata_to_api_response(metadata, dataset_details) if is_collection_test: + assert isinstance(test_properties, dict) test_properties["name"] = output_name # setup preferred name "elements" in accordance with work in https://github.com/galaxyproject/planemo/pull/1417 test_properties["element_tests"] = test_properties["elements"] @@ -105,14 +113,15 @@ def verify_dataset_element(element, test_properties, element_outfile): verify_collection(output_def, output_collection, verify_dataset_element) else: - test_properties["name"] = output_name + if isinstance(test_properties, dict): + test_properties["name"] = output_name invocation_details = self.workflow_populator.get_invocation(run_summary.invocation_id, step_details=True) assert output_name in invocation_details["outputs"] test_output = invocation_details["outputs"][output_name] verify_dataset(test_output, test_properties) -def _test_jobs(workflow_path: Path) -> list: +def _test_jobs(workflow_path: Path) -> TestDicts: test_path = _workflow_test_path(workflow_path) with test_path.open() as f: jobs = yaml.safe_load(f) diff --git a/packages/tool_util/setup.cfg b/packages/tool_util/setup.cfg index 6fd52ba67a7f..eabca1a6e1b9 100644 --- a/packages/tool_util/setup.cfg +++ b/packages/tool_util/setup.cfg @@ -51,6 +51,7 @@ console_scripts = galaxy-tool-test = galaxy.tool_util.verify.script:main galaxy-tool-test-case-validation = galaxy.tool_util.parameters.scripts.validate_test_cases:main galaxy-tool-upgrade-advisor = galaxy.tool_util.upgrade.script:main + validate-test-format = galaxy.tool_util.validate_test_format:main mulled-build = galaxy.tool_util.deps.mulled.mulled_build:main mulled-build-channel = galaxy.tool_util.deps.mulled.mulled_build_channel:main mulled-build-files = galaxy.tool_util.deps.mulled.mulled_build_files:main diff --git a/test/functional/tools/sample_tool_conf.xml b/test/functional/tools/sample_tool_conf.xml index 8e22ee39eace..ebc4fb3cbd59 100644 --- a/test/functional/tools/sample_tool_conf.xml +++ b/test/functional/tools/sample_tool_conf.xml @@ -139,6 +139,7 @@ + diff --git a/test/unit/tool_util/test_test_format_model.py b/test/unit/tool_util/test_test_format_model.py new file mode 100644 index 000000000000..986f5c12ba87 --- /dev/null +++ b/test/unit/tool_util/test_test_format_model.py @@ -0,0 +1,39 @@ +import os +from pathlib import Path +from typing import List + +import yaml + +from galaxy.tool_util.models import Tests +from galaxy.util import galaxy_directory +from galaxy.util.unittest_utils import skip_unless_environ + +TEST_WORKFLOW_DIRECTORY = os.path.join(galaxy_directory(), "lib", "galaxy_test", "workflow") +IWC_WORKFLOWS_USING_UNVERIFIED_SYNTAX: List[str] = [] + + +def test_validate_workflow_tests(): + path = Path(TEST_WORKFLOW_DIRECTORY) + test_files = path.glob("*.gxwf-tests.yml") + for test_file in test_files: + with open(test_file) as f: + json = yaml.safe_load(f) + Tests.model_validate(json) + + +@skip_unless_environ("GALAXY_TEST_IWC_DIRECTORY") +def test_iwc_directory(): + path = Path(os.environ["GALAXY_TEST_IWC_DIRECTORY"]) + test_files = path.glob("workflows/**/*-test*.yml") + + for test_file in test_files: + print(test_file) + skip_file = False + for unverified in IWC_WORKFLOWS_USING_UNVERIFIED_SYNTAX: + if str(test_file).endswith(unverified): + skip_file = True + if skip_file: + continue + with open(test_file) as f: + json = yaml.safe_load(f) + Tests.model_validate(json) From b07750554df312baa31715bc13fd0bfc0d200445 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Tue, 24 Sep 2024 09:14:49 -0400 Subject: [PATCH 54/64] Migrate mapover expression test to wf test framework. --- lib/galaxy_test/api/test_workflows.py | 50 ------------------- .../map_over_expression.gxwf-tests.yml | 22 ++++++++ .../workflow/map_over_expression.gxwf.yml | 18 +++++++ 3 files changed, 40 insertions(+), 50 deletions(-) create mode 100644 lib/galaxy_test/workflow/map_over_expression.gxwf-tests.yml create mode 100644 lib/galaxy_test/workflow/map_over_expression.gxwf.yml diff --git a/lib/galaxy_test/api/test_workflows.py b/lib/galaxy_test/api/test_workflows.py index d7a77b04bb22..b6a5ec037bea 100644 --- a/lib/galaxy_test/api/test_workflows.py +++ b/lib/galaxy_test/api/test_workflows.py @@ -5150,56 +5150,6 @@ def test_run_with_numeric_input_connection(self, history_id): assert int(str_43) == 43 assert abs(float(str_4point14) - 4.14) < 0.0001 - @skip_without_tool("param_value_from_file") - def test_expression_tool_map_over(self, history_id): - self._run_jobs( - """ -class: GalaxyWorkflow -inputs: - text_input1: collection -steps: -- label: param_out - tool_id: param_value_from_file - in: - input1: text_input1 -- label: consume_expression_parameter - tool_id: validation_default - in: - input1: param_out/text_param - outputs: - out_file1: - rename: "replaced_param_collection" -test_data: - text_input1: - collection_type: list - elements: - - identifier: A - content: A - - identifier: B - content: B -""", - history_id=history_id, - ) - history_contents = self._get(f"histories/{history_id}/contents").json() - collection = [ - c - for c in history_contents - if c["history_content_type"] == "dataset_collection" and c["name"] == "replaced_param_collection" - ][0] - collection_details = self._get(collection["url"]).json() - assert collection_details["element_count"] == 2 - elements = collection_details["elements"] - assert elements[0]["element_identifier"] == "A" - assert elements[1]["element_identifier"] == "B" - element_a_content = self.dataset_populator.get_history_dataset_content( - history_id, dataset=elements[0]["object"] - ) - element_b_content = self.dataset_populator.get_history_dataset_content( - history_id, dataset=elements[1]["object"] - ) - assert element_a_content.strip() == "A" - assert element_b_content.strip() == "B" - @skip_without_tool("create_input_collection") def test_workflow_optional_input_text_parameter_reevaluation(self): with self.dataset_populator.test_history() as history_id: diff --git a/lib/galaxy_test/workflow/map_over_expression.gxwf-tests.yml b/lib/galaxy_test/workflow/map_over_expression.gxwf-tests.yml new file mode 100644 index 000000000000..0357ca6c53ca --- /dev/null +++ b/lib/galaxy_test/workflow/map_over_expression.gxwf-tests.yml @@ -0,0 +1,22 @@ +- doc: | + Test to verify text parameter can be connected to data column param + job: + text_input1: + collection_type: list + elements: + - identifier: A + content: A + - identifier: B + content: B + outputs: + out1: + attributes: { collection_type: list } + elements: + A: + asserts: + - that: has_line + line: A + B: + asserts: + - that: has_line + line: B diff --git a/lib/galaxy_test/workflow/map_over_expression.gxwf.yml b/lib/galaxy_test/workflow/map_over_expression.gxwf.yml new file mode 100644 index 000000000000..c361b2232022 --- /dev/null +++ b/lib/galaxy_test/workflow/map_over_expression.gxwf.yml @@ -0,0 +1,18 @@ +class: GalaxyWorkflow +inputs: + text_input1: collection +outputs: + out1: + outputSource: consume_expression_parameter/out_file1 +steps: + param_out: + tool_id: param_value_from_file + in: + input1: text_input1 + consume_expression_parameter: + tool_id: validation_default + in: + input1: param_out/text_param + outputs: + out_file1: + rename: "replaced_param_collection" From 2d8bb3d913ea209e83edc4d5dc370c26d1d4ac6c Mon Sep 17 00:00:00 2001 From: John Chilton Date: Tue, 24 Sep 2024 11:36:09 -0400 Subject: [PATCH 55/64] Workflow test with literal test outputs. --- .../workflow/output_parameter.gxwf-tests.yml | 8 ++++++++ lib/galaxy_test/workflow/output_parameter.gxwf.yml | 13 +++++++++++++ 2 files changed, 21 insertions(+) create mode 100644 lib/galaxy_test/workflow/output_parameter.gxwf-tests.yml create mode 100644 lib/galaxy_test/workflow/output_parameter.gxwf.yml diff --git a/lib/galaxy_test/workflow/output_parameter.gxwf-tests.yml b/lib/galaxy_test/workflow/output_parameter.gxwf-tests.yml new file mode 100644 index 000000000000..d422d9e3d836 --- /dev/null +++ b/lib/galaxy_test/workflow/output_parameter.gxwf-tests.yml @@ -0,0 +1,8 @@ +- doc: | + Test to verify exact output parameter verification works propery. + job: + text_int: + type: File + content: "43" + outputs: + out_int: 43 diff --git a/lib/galaxy_test/workflow/output_parameter.gxwf.yml b/lib/galaxy_test/workflow/output_parameter.gxwf.yml new file mode 100644 index 000000000000..4157f894f492 --- /dev/null +++ b/lib/galaxy_test/workflow/output_parameter.gxwf.yml @@ -0,0 +1,13 @@ +class: GalaxyWorkflow +inputs: + text_int: data +outputs: + out_int: + outputSource: param_out/integer_param +steps: + param_out: + tool_id: param_value_from_file + state: + param_type: integer + in: + input1: text_int From a38f610067ea32c1d1fb07c62568e5033c3f8368 Mon Sep 17 00:00:00 2001 From: John Chilton Date: Mon, 23 Sep 2024 14:22:47 -0400 Subject: [PATCH 56/64] Rebuild assertion models. --- .../tool_util/verify/assertion_models.py | 1035 ++++++++++++----- 1 file changed, 720 insertions(+), 315 deletions(-) diff --git a/lib/galaxy/tool_util/verify/assertion_models.py b/lib/galaxy/tool_util/verify/assertion_models.py index eb4429170655..142c476ffdb6 100644 --- a/lib/galaxy/tool_util/verify/assertion_models.py +++ b/lib/galaxy/tool_util/verify/assertion_models.py @@ -81,12 +81,8 @@ def check_non_negative_if_int(v: typing.Any): has_line_negate_description = """A boolean that can be set to true to negate the outcome of the assertion.""" -class has_line_model(AssertionModel): - r"""Asserts the specified output contains the line specified by the - argument line. The exact number of occurrences can be optionally - specified by the argument n""" - - that: Literal["has_line"] = "has_line" +class base_has_line_model(AssertionModel): + """base model for has_line describing attributes.""" line: str = Field( ..., @@ -133,6 +129,20 @@ class has_line_model(AssertionModel): ) +class has_line_model(base_has_line_model): + r"""Asserts the specified output contains the line specified by the + argument line. The exact number of occurrences can be optionally + specified by the argument n""" + + that: Literal["has_line"] = "has_line" + + +class has_line_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_line: base_has_line_model + + has_line_matching_expression_description = """The regular expressions to attempt match in the output.""" has_line_matching_n_description = """Desired number, can be suffixed by ``(k|M|G|T|P|E)i?``""" @@ -148,12 +158,8 @@ class has_line_model(AssertionModel): has_line_matching_negate_description = """A boolean that can be set to true to negate the outcome of the assertion.""" -class has_line_matching_model(AssertionModel): - r"""Asserts the specified output contains a line matching the - regular expression specified by the argument expression. If n is given - the assertion checks for exactly n occurences.""" - - that: Literal["has_line_matching"] = "has_line_matching" +class base_has_line_matching_model(AssertionModel): + """base model for has_line_matching describing attributes.""" expression: str = Field( ..., @@ -200,6 +206,20 @@ class has_line_matching_model(AssertionModel): ) +class has_line_matching_model(base_has_line_matching_model): + r"""Asserts the specified output contains a line matching the + regular expression specified by the argument expression. If n is given + the assertion checks for exactly n occurences.""" + + that: Literal["has_line_matching"] = "has_line_matching" + + +class has_line_matching_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_line_matching: base_has_line_matching_model + + has_n_lines_n_description = """Desired number, can be suffixed by ``(k|M|G|T|P|E)i?``""" has_n_lines_delta_description = ( @@ -213,12 +233,8 @@ class has_line_matching_model(AssertionModel): has_n_lines_negate_description = """A boolean that can be set to true to negate the outcome of the assertion.""" -class has_n_lines_model(AssertionModel): - r"""Asserts the specified output contains ``n`` lines allowing - for a difference in the number of lines (delta) - or relative differebce in the number of lines""" - - that: Literal["has_n_lines"] = "has_n_lines" +class base_has_n_lines_model(AssertionModel): + """base model for has_n_lines describing attributes.""" n: Annotated[ typing.Optional[typing.Union[str, int]], @@ -260,6 +276,20 @@ class has_n_lines_model(AssertionModel): ) +class has_n_lines_model(base_has_n_lines_model): + r"""Asserts the specified output contains ``n`` lines allowing + for a difference in the number of lines (delta) + or relative differebce in the number of lines""" + + that: Literal["has_n_lines"] = "has_n_lines" + + +class has_n_lines_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_n_lines: base_has_n_lines_model + + has_text_text_description = """The text to search for in the output.""" has_text_n_description = """Desired number, can be suffixed by ``(k|M|G|T|P|E)i?``""" @@ -275,12 +305,8 @@ class has_n_lines_model(AssertionModel): has_text_negate_description = """A boolean that can be set to true to negate the outcome of the assertion.""" -class has_text_model(AssertionModel): - r"""Asserts specified output contains the substring specified by - the argument text. The exact number of occurrences can be - optionally specified by the argument n""" - - that: Literal["has_text"] = "has_text" +class base_has_text_model(AssertionModel): + """base model for has_text describing attributes.""" text: str = Field( ..., @@ -327,6 +353,20 @@ class has_text_model(AssertionModel): ) +class has_text_model(base_has_text_model): + r"""Asserts specified output contains the substring specified by + the argument text. The exact number of occurrences can be + optionally specified by the argument n""" + + that: Literal["has_text"] = "has_text" + + +class has_text_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_text: base_has_text_model + + has_text_matching_expression_description = """The regular expressions to attempt match in the output.""" has_text_matching_n_description = """Desired number, can be suffixed by ``(k|M|G|T|P|E)i?``""" @@ -342,13 +382,8 @@ class has_text_model(AssertionModel): has_text_matching_negate_description = """A boolean that can be set to true to negate the outcome of the assertion.""" -class has_text_matching_model(AssertionModel): - r"""Asserts the specified output contains text matching the - regular expression specified by the argument expression. - If n is given the assertion checks for exacly n (nonoverlapping) - occurences.""" - - that: Literal["has_text_matching"] = "has_text_matching" +class base_has_text_matching_model(AssertionModel): + """base model for has_text_matching describing attributes.""" expression: str = Field( ..., @@ -395,14 +430,26 @@ class has_text_matching_model(AssertionModel): ) -not_has_text_text_description = """The text to search for in the output.""" +class has_text_matching_model(base_has_text_matching_model): + r"""Asserts the specified output contains text matching the + regular expression specified by the argument expression. + If n is given the assertion checks for exacly n (nonoverlapping) + occurences.""" + + that: Literal["has_text_matching"] = "has_text_matching" -class not_has_text_model(AssertionModel): - r"""Asserts specified output does not contain the substring - specified by the argument text""" +class has_text_matching_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_text_matching: base_has_text_matching_model + + +not_has_text_text_description = """The text to search for in the output.""" - that: Literal["not_has_text"] = "not_has_text" + +class base_not_has_text_model(AssertionModel): + """base model for not_has_text describing attributes.""" text: str = Field( ..., @@ -410,6 +457,19 @@ class not_has_text_model(AssertionModel): ) +class not_has_text_model(base_not_has_text_model): + r"""Asserts specified output does not contain the substring + specified by the argument text""" + + that: Literal["not_has_text"] = "not_has_text" + + +class not_has_text_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + not_has_text: base_not_has_text_model + + has_n_columns_n_description = """Desired number, can be suffixed by ``(k|M|G|T|P|E)i?``""" has_n_columns_delta_description = ( @@ -429,19 +489,8 @@ class not_has_text_model(AssertionModel): has_n_columns_negate_description = """A boolean that can be set to true to negate the outcome of the assertion.""" -class has_n_columns_model(AssertionModel): - r"""Asserts tabular output contains the specified - number (``n``) of columns. - - For instance, ````. The assertion tests only the first line. - Number of columns can optionally also be specified with ``delta``. Alternatively the - range of expected occurences can be specified by ``min`` and/or ``max``. - - Optionally a column separator (``sep``, default is `` ``) `and comment character(s) - can be specified (``comment``, default is empty string). The first non-comment - line is used for determining the number of columns.""" - - that: Literal["has_n_columns"] = "has_n_columns" +class base_has_n_columns_model(AssertionModel): + """base model for has_n_columns describing attributes.""" n: Annotated[ typing.Optional[typing.Union[str, int]], @@ -493,30 +542,38 @@ class has_n_columns_model(AssertionModel): ) -attribute_is_path_description = """The Python xpath-like expression to find the target element.""" +class has_n_columns_model(base_has_n_columns_model): + r"""Asserts tabular output contains the specified + number (``n``) of columns. -attribute_is_attribute_description = """The XML attribute name to test against from the target XML element.""" + For instance, ````. The assertion tests only the first line. + Number of columns can optionally also be specified with ``delta``. Alternatively the + range of expected occurences can be specified by ``min`` and/or ``max``. -attribute_is_text_description = """The expected attribute value to test against on the target XML element""" + Optionally a column separator (``sep``, default is `` ``) `and comment character(s) + can be specified (``comment``, default is empty string). The first non-comment + line is used for determining the number of columns.""" -attribute_is_negate_description = """A boolean that can be set to true to negate the outcome of the assertion.""" + that: Literal["has_n_columns"] = "has_n_columns" -class attribute_is_model(AssertionModel): - r"""Asserts the XML ``attribute`` for the element (or tag) with the specified - XPath-like ``path`` is the specified ``text``. +class has_n_columns_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" - For example: + has_n_columns: base_has_n_columns_model - ```xml - - ``` - The assertion implicitly also asserts that an element matching ``path`` exists. - With ``negate`` the result of the assertion (on the equality) can be inverted (the - implicit assertion on the existence of the path is not affected).""" +attribute_is_path_description = """The Python xpath-like expression to find the target element.""" + +attribute_is_attribute_description = """The XML attribute name to test against from the target XML element.""" + +attribute_is_text_description = """The expected attribute value to test against on the target XML element""" + +attribute_is_negate_description = """A boolean that can be set to true to negate the outcome of the assertion.""" - that: Literal["attribute_is"] = "attribute_is" + +class base_attribute_is_model(AssertionModel): + """base model for attribute_is describing attributes.""" path: str = Field( ..., @@ -539,6 +596,29 @@ class attribute_is_model(AssertionModel): ) +class attribute_is_model(base_attribute_is_model): + r"""Asserts the XML ``attribute`` for the element (or tag) with the specified + XPath-like ``path`` is the specified ``text``. + + For example: + + ```xml + + ``` + + The assertion implicitly also asserts that an element matching ``path`` exists. + With ``negate`` the result of the assertion (on the equality) can be inverted (the + implicit assertion on the existence of the path is not affected).""" + + that: Literal["attribute_is"] = "attribute_is" + + +class attribute_is_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + attribute_is: base_attribute_is_model + + attribute_matches_path_description = """The Python xpath-like expression to find the target element.""" attribute_matches_attribute_description = """The XML attribute name to test against from the target XML element.""" @@ -550,21 +630,8 @@ class attribute_is_model(AssertionModel): attribute_matches_negate_description = """A boolean that can be set to true to negate the outcome of the assertion.""" -class attribute_matches_model(AssertionModel): - r"""Asserts the XML ``attribute`` for the element (or tag) with the specified - XPath-like ``path`` matches the regular expression specified by ``expression``. - - For example: - - ```xml - - ``` - - The assertion implicitly also asserts that an element matching ``path`` exists. - With ``negate`` the result of the assertion (on the matching) can be inverted (the - implicit assertion on the existence of the path is not affected).""" - - that: Literal["attribute_matches"] = "attribute_matches" +class base_attribute_matches_model(AssertionModel): + """base model for attribute_matches describing attributes.""" path: str = Field( ..., @@ -587,12 +654,59 @@ class attribute_matches_model(AssertionModel): ) +class attribute_matches_model(base_attribute_matches_model): + r"""Asserts the XML ``attribute`` for the element (or tag) with the specified + XPath-like ``path`` matches the regular expression specified by ``expression``. + + For example: + + ```xml + + ``` + + The assertion implicitly also asserts that an element matching ``path`` exists. + With ``negate`` the result of the assertion (on the matching) can be inverted (the + implicit assertion on the existence of the path is not affected).""" + + that: Literal["attribute_matches"] = "attribute_matches" + + +class attribute_matches_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + attribute_matches: base_attribute_matches_model + + element_text_path_description = """The Python xpath-like expression to find the target element.""" element_text_negate_description = """A boolean that can be set to true to negate the outcome of the assertion.""" -class element_text_model(AssertionModel): +class base_element_text_model(AssertionModel): + """base model for element_text describing attributes.""" + + path: str = Field( + ..., + description=element_text_path_description, + ) + + negate: typing.Union[bool, str] = Field( + False, + description=element_text_negate_description, + ) + + children: typing.Optional["assertion_list"] = None + asserts: typing.Optional["assertion_list"] = None + + @model_validator(mode="before") + @classmethod + def validate_children(self, data: typing.Any): + if isinstance(data, dict) and "children" not in data and "asserts" not in data: + raise ValueError("At least one of 'children' or 'asserts' must be specified for this assertion type.") + return data + + +class element_text_model(base_element_text_model): r"""This tag allows the developer to recurisively specify additional assertions as child elements about just the text contained in the element specified by the XPath-like ``path``, e.g. @@ -610,17 +724,11 @@ class element_text_model(AssertionModel): that: Literal["element_text"] = "element_text" - path: str = Field( - ..., - description=element_text_path_description, - ) - negate: typing.Union[bool, str] = Field( - False, - description=element_text_negate_description, - ) +class element_text_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" - children: "assertion_list" + element_text: base_element_text_model element_text_is_path_description = """The Python xpath-like expression to find the target element.""" @@ -632,7 +740,26 @@ class element_text_model(AssertionModel): element_text_is_negate_description = """A boolean that can be set to true to negate the outcome of the assertion.""" -class element_text_is_model(AssertionModel): +class base_element_text_is_model(AssertionModel): + """base model for element_text_is describing attributes.""" + + path: str = Field( + ..., + description=element_text_is_path_description, + ) + + text: str = Field( + ..., + description=element_text_is_text_description, + ) + + negate: typing.Union[bool, str] = Field( + False, + description=element_text_is_negate_description, + ) + + +class element_text_is_model(base_element_text_is_model): r"""Asserts the text of the XML element with the specified XPath-like ``path`` is the specified ``text``. @@ -648,20 +775,11 @@ class element_text_is_model(AssertionModel): that: Literal["element_text_is"] = "element_text_is" - path: str = Field( - ..., - description=element_text_is_path_description, - ) - text: str = Field( - ..., - description=element_text_is_text_description, - ) +class element_text_is_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" - negate: typing.Union[bool, str] = Field( - False, - description=element_text_is_negate_description, - ) + element_text_is: base_element_text_is_model element_text_matches_path_description = """The Python xpath-like expression to find the target element.""" @@ -673,21 +791,8 @@ class element_text_is_model(AssertionModel): ) -class element_text_matches_model(AssertionModel): - r"""Asserts the text of the XML element with the specified XPath-like ``path`` - matches the regular expression defined by ``expression``. - - For example: - - ```xml - - ``` - - The assertion implicitly also asserts that an element matching ``path`` exists. - With ``negate`` the result of the assertion (on the matching) can be inverted (the - implicit assertion on the existence of the path is not affected).""" - - that: Literal["element_text_matches"] = "element_text_matches" +class base_element_text_matches_model(AssertionModel): + """base model for element_text_matches describing attributes.""" path: str = Field( ..., @@ -705,24 +810,38 @@ class element_text_matches_model(AssertionModel): ) -has_element_with_path_path_description = """The Python xpath-like expression to find the target element.""" +class element_text_matches_model(base_element_text_matches_model): + r"""Asserts the text of the XML element with the specified XPath-like ``path`` + matches the regular expression defined by ``expression``. -has_element_with_path_negate_description = ( - """A boolean that can be set to true to negate the outcome of the assertion.""" -) - - -class has_element_with_path_model(AssertionModel): - r"""Asserts the XML output contains at least one element (or tag) with the specified - XPath-like ``path``, e.g. + For example: ```xml - + ``` - With ``negate`` the result of the assertion can be inverted.""" + The assertion implicitly also asserts that an element matching ``path`` exists. + With ``negate`` the result of the assertion (on the matching) can be inverted (the + implicit assertion on the existence of the path is not affected).""" - that: Literal["has_element_with_path"] = "has_element_with_path" + that: Literal["element_text_matches"] = "element_text_matches" + + +class element_text_matches_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + element_text_matches: base_element_text_matches_model + + +has_element_with_path_path_description = """The Python xpath-like expression to find the target element.""" + +has_element_with_path_negate_description = ( + """A boolean that can be set to true to negate the outcome of the assertion.""" +) + + +class base_has_element_with_path_model(AssertionModel): + """base model for has_element_with_path describing attributes.""" path: str = Field( ..., @@ -735,6 +854,25 @@ class has_element_with_path_model(AssertionModel): ) +class has_element_with_path_model(base_has_element_with_path_model): + r"""Asserts the XML output contains at least one element (or tag) with the specified + XPath-like ``path``, e.g. + + ```xml + + ``` + + With ``negate`` the result of the assertion can be inverted.""" + + that: Literal["has_element_with_path"] = "has_element_with_path" + + +class has_element_with_path_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_element_with_path: base_has_element_with_path_model + + has_n_elements_with_path_path_description = """The Python xpath-like expression to find the target element.""" has_n_elements_with_path_n_description = """Desired number, can be suffixed by ``(k|M|G|T|P|E)i?``""" @@ -756,21 +894,8 @@ class has_element_with_path_model(AssertionModel): ) -class has_n_elements_with_path_model(AssertionModel): - r"""Asserts the XML output contains the specified number (``n``, optionally with ``delta``) of elements (or - tags) with the specified XPath-like ``path``. - - For example: - - ```xml - - ``` - - Alternatively to ``n`` and ``delta`` also the ``min`` and ``max`` attributes - can be used to specify the range of the expected number of occurences. - With ``negate`` the result of the assertion can be inverted.""" - - that: Literal["has_n_elements_with_path"] = "has_n_elements_with_path" +class base_has_n_elements_with_path_model(AssertionModel): + """base model for has_n_elements_with_path describing attributes.""" path: str = Field( ..., @@ -817,12 +942,45 @@ class has_n_elements_with_path_model(AssertionModel): ) -class is_valid_xml_model(AssertionModel): +class has_n_elements_with_path_model(base_has_n_elements_with_path_model): + r"""Asserts the XML output contains the specified number (``n``, optionally with ``delta``) of elements (or + tags) with the specified XPath-like ``path``. + + For example: + + ```xml + + ``` + + Alternatively to ``n`` and ``delta`` also the ``min`` and ``max`` attributes + can be used to specify the range of the expected number of occurences. + With ``negate`` the result of the assertion can be inverted.""" + + that: Literal["has_n_elements_with_path"] = "has_n_elements_with_path" + + +class has_n_elements_with_path_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_n_elements_with_path: base_has_n_elements_with_path_model + + +class base_is_valid_xml_model(AssertionModel): + """base model for is_valid_xml describing attributes.""" + + +class is_valid_xml_model(base_is_valid_xml_model): r"""Asserts the output is a valid XML file (e.g. ````).""" that: Literal["is_valid_xml"] = "is_valid_xml" +class is_valid_xml_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + is_valid_xml: base_is_valid_xml_model + + xml_element_path_description = """The Python xpath-like expression to find the target element.""" xml_element_attribute_description = """The XML attribute name to test against from the target XML element.""" @@ -844,40 +1002,8 @@ class is_valid_xml_model(AssertionModel): xml_element_negate_description = """A boolean that can be set to true to negate the outcome of the assertion.""" -class xml_element_model(AssertionModel): - r"""Assert if the XML file contains element(s) or tag(s) with the specified - [XPath-like ``path``](https://lxml.de/xpathxslt.html). If ``n`` and ``delta`` - or ``min`` and ``max`` are given also the number of occurences is checked. - - ```xml - - - - - - ``` - - With ``negate="true"`` the outcome of the assertions wrt the precence and number - of ``path`` can be negated. If there are any sub assertions then check them against - - - the content of the attribute ``attribute`` - - the element's text if no attribute is given - - ```xml - - - - - - ``` - - Sub-assertions are not subject to the ``negate`` attribute of ``xml_element``. - If ``all`` is ``true`` then the sub assertions are checked for all occurences. - - Note that all other XML assertions can be expressed by this assertion (Galaxy - also implements the other assertions by calling this one).""" - - that: Literal["xml_element"] = "xml_element" +class base_xml_element_model(AssertionModel): + """base model for xml_element describing attributes.""" path: str = Field( ..., @@ -934,21 +1060,58 @@ class xml_element_model(AssertionModel): ) children: typing.Optional["assertion_list"] = None + asserts: typing.Optional["assertion_list"] = None -has_json_property_with_text_property_description = """The property name to search the JSON document for.""" +class xml_element_model(base_xml_element_model): + r"""Assert if the XML file contains element(s) or tag(s) with the specified + [XPath-like ``path``](https://lxml.de/xpathxslt.html). If ``n`` and ``delta`` + or ``min`` and ``max`` are given also the number of occurences is checked. -has_json_property_with_text_text_description = """The expected text value of the target JSON attribute.""" + ```xml + + + + + + ``` + With ``negate="true"`` the outcome of the assertions wrt the precence and number + of ``path`` can be negated. If there are any sub assertions then check them against -class has_json_property_with_text_model(AssertionModel): - r"""Asserts the JSON document contains a property or key with the specified text (i.e. string) value. + - the content of the attribute ``attribute`` + - the element's text if no attribute is given ```xml - - ```""" + + + + + + ``` + + Sub-assertions are not subject to the ``negate`` attribute of ``xml_element``. + If ``all`` is ``true`` then the sub assertions are checked for all occurences. + + Note that all other XML assertions can be expressed by this assertion (Galaxy + also implements the other assertions by calling this one).""" + + that: Literal["xml_element"] = "xml_element" + + +class xml_element_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + xml_element: base_xml_element_model - that: Literal["has_json_property_with_text"] = "has_json_property_with_text" + +has_json_property_with_text_property_description = """The property name to search the JSON document for.""" + +has_json_property_with_text_text_description = """The expected text value of the target JSON attribute.""" + + +class base_has_json_property_with_text_model(AssertionModel): + """base model for has_json_property_with_text describing attributes.""" property: str = Field( ..., @@ -961,6 +1124,22 @@ class has_json_property_with_text_model(AssertionModel): ) +class has_json_property_with_text_model(base_has_json_property_with_text_model): + r"""Asserts the JSON document contains a property or key with the specified text (i.e. string) value. + + ```xml + + ```""" + + that: Literal["has_json_property_with_text"] = "has_json_property_with_text" + + +class has_json_property_with_text_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_json_property_with_text: base_has_json_property_with_text_model + + has_json_property_with_value_property_description = """The property name to search the JSON document for.""" has_json_property_with_value_value_description = ( @@ -968,14 +1147,8 @@ class has_json_property_with_text_model(AssertionModel): ) -class has_json_property_with_value_model(AssertionModel): - r"""Asserts the JSON document contains a property or key with the specified JSON value. - - ```xml - - ```""" - - that: Literal["has_json_property_with_value"] = "has_json_property_with_value" +class base_has_json_property_with_value_model(AssertionModel): + """base model for has_json_property_with_value describing attributes.""" property: str = Field( ..., @@ -988,19 +1161,29 @@ class has_json_property_with_value_model(AssertionModel): ) -has_h5_attribute_key_description = """HDF5 attribute to check value of.""" +class has_json_property_with_value_model(base_has_json_property_with_value_model): + r"""Asserts the JSON document contains a property or key with the specified JSON value. -has_h5_attribute_value_description = """Expected value of HDF5 attribute to check.""" + ```xml + + ```""" + that: Literal["has_json_property_with_value"] = "has_json_property_with_value" -class has_h5_attribute_model(AssertionModel): - r"""Asserts HDF5 output contains the specified ``value`` for an attribute (``key``), e.g. - ```xml - - ```""" +class has_json_property_with_value_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_json_property_with_value: base_has_json_property_with_value_model - that: Literal["has_h5_attribute"] = "has_h5_attribute" + +has_h5_attribute_key_description = """HDF5 attribute to check value of.""" + +has_h5_attribute_value_description = """Expected value of HDF5 attribute to check.""" + + +class base_has_h5_attribute_model(AssertionModel): + """base model for has_h5_attribute describing attributes.""" key: str = Field( ..., @@ -1013,13 +1196,27 @@ class has_h5_attribute_model(AssertionModel): ) -has_h5_keys_keys_description = """HDF5 attributes to check value of as a comma-separated string.""" +class has_h5_attribute_model(base_has_h5_attribute_model): + r"""Asserts HDF5 output contains the specified ``value`` for an attribute (``key``), e.g. + + ```xml + + ```""" + + that: Literal["has_h5_attribute"] = "has_h5_attribute" -class has_h5_keys_model(AssertionModel): - r"""Asserts the specified HDF5 output has the given keys.""" +class has_h5_attribute_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_h5_attribute: base_has_h5_attribute_model - that: Literal["has_h5_keys"] = "has_h5_keys" + +has_h5_keys_keys_description = """HDF5 attributes to check value of as a comma-separated string.""" + + +class base_has_h5_keys_model(AssertionModel): + """base model for has_h5_keys describing attributes.""" keys: str = Field( ..., @@ -1027,6 +1224,18 @@ class has_h5_keys_model(AssertionModel): ) +class has_h5_keys_model(base_has_h5_keys_model): + r"""Asserts the specified HDF5 output has the given keys.""" + + that: Literal["has_h5_keys"] = "has_h5_keys" + + +class has_h5_keys_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_h5_keys: base_has_h5_keys_model + + has_archive_member_path_description = """The regular expression specifying the archive member.""" has_archive_member_all_description = ( @@ -1046,53 +1255,8 @@ class has_h5_keys_model(AssertionModel): has_archive_member_negate_description = """A boolean that can be set to true to negate the outcome of the assertion.""" -class has_archive_member_model(AssertionModel): - r"""This tag allows to check if ``path`` is contained in a compressed file. - - The path is a regular expression that is matched against the full paths of the objects in - the compressed file (remember that "matching" means it is checked if a prefix of - the full path of an archive member is described by the regular expression). - Valid archive formats include ``.zip``, ``.tar``, and ``.tar.gz``. Note that - depending on the archive creation method: - - - full paths of the members may be prefixed with ``./`` - - directories may be treated as empty files - - ```xml - - ``` - - With ``n`` and ``delta`` (or ``min`` and ``max``) assertions on the number of - archive members matching ``path`` can be expressed. The following could be used, - e.g., to assert an archive containing n±1 elements out of which at least - 4 need to have a ``txt`` extension. - - ```xml - - - ``` - - In addition the tag can contain additional assertions as child elements about - the first member in the archive matching the regular expression ``path``. For - instance - - ```xml - - - - ``` - - If the ``all`` attribute is set to ``true`` then all archive members are subject - to the assertions. Note that, archive members matching the ``path`` are sorted - alphabetically. - - The ``negate`` attribute of the ``has_archive_member`` assertion only affects - the asserts on the presence and number of matching archive members, but not any - sub-assertions (which can offer the ``negate`` attribute on their own). The - check if the file is an archive at all, which is also done by the function, is - not affected.""" - - that: Literal["has_archive_member"] = "has_archive_member" +class base_has_archive_member_model(AssertionModel): + """base model for has_archive_member describing attributes.""" path: str = Field( ..., @@ -1144,6 +1308,62 @@ class has_archive_member_model(AssertionModel): ) children: typing.Optional["assertion_list"] = None + asserts: typing.Optional["assertion_list"] = None + + +class has_archive_member_model(base_has_archive_member_model): + r"""This tag allows to check if ``path`` is contained in a compressed file. + + The path is a regular expression that is matched against the full paths of the objects in + the compressed file (remember that "matching" means it is checked if a prefix of + the full path of an archive member is described by the regular expression). + Valid archive formats include ``.zip``, ``.tar``, and ``.tar.gz``. Note that + depending on the archive creation method: + + - full paths of the members may be prefixed with ``./`` + - directories may be treated as empty files + + ```xml + + ``` + + With ``n`` and ``delta`` (or ``min`` and ``max``) assertions on the number of + archive members matching ``path`` can be expressed. The following could be used, + e.g., to assert an archive containing n±1 elements out of which at least + 4 need to have a ``txt`` extension. + + ```xml + + + ``` + + In addition the tag can contain additional assertions as child elements about + the first member in the archive matching the regular expression ``path``. For + instance + + ```xml + + + + ``` + + If the ``all`` attribute is set to ``true`` then all archive members are subject + to the assertions. Note that, archive members matching the ``path`` are sorted + alphabetically. + + The ``negate`` attribute of the ``has_archive_member`` assertion only affects + the asserts on the presence and number of matching archive members, but not any + sub-assertions (which can offer the ``negate`` attribute on their own). The + check if the file is an archive at all, which is also done by the function, is + not affected.""" + + that: Literal["has_archive_member"] = "has_archive_member" + + +class has_archive_member_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_archive_member: base_has_archive_member_model has_size_value_description = """Deprecated alias for `size`""" @@ -1161,13 +1381,17 @@ class has_archive_member_model(AssertionModel): has_size_negate_description = """A boolean that can be set to true to negate the outcome of the assertion.""" -class has_size_model(AssertionModel): - r"""Asserts the specified output has a size of the specified value - - Attributes size and value or synonyms though value is considered deprecated. - The size optionally allows for absolute (``delta``) difference.""" +class base_has_size_model(AssertionModel): + """base model for has_size describing attributes.""" - that: Literal["has_size"] = "has_size" + value: Annotated[ + typing.Optional[typing.Union[str, int]], + BeforeValidator(check_bytes), + BeforeValidator(check_non_negative_if_int), + ] = Field( + None, + description=has_size_value_description, + ) size: Annotated[ typing.Optional[typing.Union[str, int]], @@ -1209,6 +1433,21 @@ class has_size_model(AssertionModel): ) +class has_size_model(base_has_size_model): + r"""Asserts the specified output has a size of the specified value + + Attributes size and value or synonyms though value is considered deprecated. + The size optionally allows for absolute (``delta``) difference.""" + + that: Literal["has_size"] = "has_size" + + +class has_size_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_size: base_has_size_model + + has_image_center_of_mass_center_of_mass_description = """The required center of mass of the image intensities (horizontal and vertical coordinate, separated by a comma).""" has_image_center_of_mass_channel_description = """Restricts the assertion to a specific channel of the image (where ``0`` corresponds to the first image channel).""" @@ -1218,14 +1457,8 @@ class has_size_model(AssertionModel): ) -class has_image_center_of_mass_model(AssertionModel): - r"""Asserts the specified output is an image and has the specified center of mass. - - Asserts the output is an image and has a specific center of mass, - or has an Euclidean distance of ``eps`` or less to that point (e.g., - ````).""" - - that: Literal["has_image_center_of_mass"] = "has_image_center_of_mass" +class base_has_image_center_of_mass_model(AssertionModel): + """base model for has_image_center_of_mass describing attributes.""" center_of_mass: Annotated[str, BeforeValidator(check_center_of_mass)] = Field( ..., @@ -1243,6 +1476,22 @@ class has_image_center_of_mass_model(AssertionModel): ) +class has_image_center_of_mass_model(base_has_image_center_of_mass_model): + r"""Asserts the specified output is an image and has the specified center of mass. + + Asserts the output is an image and has a specific center of mass, + or has an Euclidean distance of ``eps`` or less to that point (e.g., + ````).""" + + that: Literal["has_image_center_of_mass"] = "has_image_center_of_mass" + + +class has_image_center_of_mass_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_image_center_of_mass: base_has_image_center_of_mass_model + + has_image_channels_channels_description = """Expected number of channels of the image.""" has_image_channels_delta_description = """Maximum allowed difference of the number of channels (default is 0). The observed number of channels has to be in the range ``value +- delta``.""" @@ -1254,14 +1503,8 @@ class has_image_center_of_mass_model(AssertionModel): has_image_channels_negate_description = """A boolean that can be set to true to negate the outcome of the assertion.""" -class has_image_channels_model(AssertionModel): - r"""Asserts the output is an image and has a specific number of channels. - - The number of channels is plus/minus ``delta`` (e.g., ````). - - Alternatively the range of the expected number of channels can be specified by ``min`` and/or ``max``.""" - - that: Literal["has_image_channels"] = "has_image_channels" +class base_has_image_channels_model(AssertionModel): + """base model for has_image_channels describing attributes.""" channels: Annotated[typing.Optional[StrictInt], BeforeValidator(check_non_negative_if_set)] = Field( None, @@ -1289,6 +1532,22 @@ class has_image_channels_model(AssertionModel): ) +class has_image_channels_model(base_has_image_channels_model): + r"""Asserts the output is an image and has a specific number of channels. + + The number of channels is plus/minus ``delta`` (e.g., ````). + + Alternatively the range of the expected number of channels can be specified by ``min`` and/or ``max``.""" + + that: Literal["has_image_channels"] = "has_image_channels" + + +class has_image_channels_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_image_channels: base_has_image_channels_model + + has_image_height_height_description = """Expected height of the image (in pixels).""" has_image_height_delta_description = """Maximum allowed difference of the image height (in pixels, default is 0). The observed height has to be in the range ``value +- delta``.""" @@ -1300,13 +1559,8 @@ class has_image_channels_model(AssertionModel): has_image_height_negate_description = """A boolean that can be set to true to negate the outcome of the assertion.""" -class has_image_height_model(AssertionModel): - r"""Asserts the output is an image and has a specific height (in pixels). - - The height is plus/minus ``delta`` (e.g., ````). - Alternatively the range of the expected height can be specified by ``min`` and/or ``max``.""" - - that: Literal["has_image_height"] = "has_image_height" +class base_has_image_height_model(AssertionModel): + """base model for has_image_height describing attributes.""" height: Annotated[typing.Optional[StrictInt], BeforeValidator(check_non_negative_if_set)] = Field( None, @@ -1334,6 +1588,21 @@ class has_image_height_model(AssertionModel): ) +class has_image_height_model(base_has_image_height_model): + r"""Asserts the output is an image and has a specific height (in pixels). + + The height is plus/minus ``delta`` (e.g., ````). + Alternatively the range of the expected height can be specified by ``min`` and/or ``max``.""" + + that: Literal["has_image_height"] = "has_image_height" + + +class has_image_height_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_image_height: base_has_image_height_model + + has_image_mean_intensity_channel_description = """Restricts the assertion to a specific channel of the image (where ``0`` corresponds to the first image channel).""" has_image_mean_intensity_mean_intensity_description = """The required mean value of the image intensities.""" @@ -1345,13 +1614,8 @@ class has_image_height_model(AssertionModel): has_image_mean_intensity_max_description = """An upper bound of the required mean value of the image intensities.""" -class has_image_mean_intensity_model(AssertionModel): - r"""Asserts the output is an image and has a specific mean intensity value. - - The mean intensity value is plus/minus ``eps`` (e.g., ````). - Alternatively the range of the expected mean intensity value can be specified by ``min`` and/or ``max``.""" - - that: Literal["has_image_mean_intensity"] = "has_image_mean_intensity" +class base_has_image_mean_intensity_model(AssertionModel): + """base model for has_image_mean_intensity describing attributes.""" channel: typing.Optional[StrictInt] = Field( None, @@ -1379,6 +1643,21 @@ class has_image_mean_intensity_model(AssertionModel): ) +class has_image_mean_intensity_model(base_has_image_mean_intensity_model): + r"""Asserts the output is an image and has a specific mean intensity value. + + The mean intensity value is plus/minus ``eps`` (e.g., ````). + Alternatively the range of the expected mean intensity value can be specified by ``min`` and/or ``max``.""" + + that: Literal["has_image_mean_intensity"] = "has_image_mean_intensity" + + +class has_image_mean_intensity_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_image_mean_intensity: base_has_image_mean_intensity_model + + has_image_mean_object_size_channel_description = """Restricts the assertion to a specific channel of the image (where ``0`` corresponds to the first image channel).""" has_image_mean_object_size_labels_description = """List of labels, separated by a comma. Labels *not* on this list will be excluded from consideration. Cannot be used in combination with ``exclude_labels``.""" @@ -1398,14 +1677,8 @@ class has_image_mean_intensity_model(AssertionModel): ) -class has_image_mean_object_size_model(AssertionModel): - r"""Asserts the output is an image with labeled objects which have the specified mean size (number of pixels), - - The mean size is plus/minus ``eps`` (e.g., ````). - - The labels must be unique.""" - - that: Literal["has_image_mean_object_size"] = "has_image_mean_object_size" +class base_has_image_mean_object_size_model(AssertionModel): + """base model for has_image_mean_object_size describing attributes.""" channel: typing.Optional[StrictInt] = Field( None, @@ -1449,6 +1722,22 @@ class has_image_mean_object_size_model(AssertionModel): ) +class has_image_mean_object_size_model(base_has_image_mean_object_size_model): + r"""Asserts the output is an image with labeled objects which have the specified mean size (number of pixels), + + The mean size is plus/minus ``eps`` (e.g., ````). + + The labels must be unique.""" + + that: Literal["has_image_mean_object_size"] = "has_image_mean_object_size" + + +class has_image_mean_object_size_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_image_mean_object_size: base_has_image_mean_object_size_model + + has_image_n_labels_channel_description = """Restricts the assertion to a specific channel of the image (where ``0`` corresponds to the first image channel).""" has_image_n_labels_labels_description = """List of labels, separated by a comma. Labels *not* on this list will be excluded from consideration. Cannot be used in combination with ``exclude_labels``.""" @@ -1466,15 +1755,8 @@ class has_image_mean_object_size_model(AssertionModel): has_image_n_labels_negate_description = """A boolean that can be set to true to negate the outcome of the assertion.""" -class has_image_n_labels_model(AssertionModel): - r"""Asserts the output is an image and has the specified labels. - - Labels can be a number of labels or unique values (e.g., - ````). - - The primary usage of this assertion is to verify the number of objects in images with uniquely labeled objects.""" - - that: Literal["has_image_n_labels"] = "has_image_n_labels" +class base_has_image_n_labels_model(AssertionModel): + """base model for has_image_n_labels describing attributes.""" channel: typing.Optional[StrictInt] = Field( None, @@ -1517,6 +1799,23 @@ class has_image_n_labels_model(AssertionModel): ) +class has_image_n_labels_model(base_has_image_n_labels_model): + r"""Asserts the output is an image and has the specified labels. + + Labels can be a number of labels or unique values (e.g., + ````). + + The primary usage of this assertion is to verify the number of objects in images with uniquely labeled objects.""" + + that: Literal["has_image_n_labels"] = "has_image_n_labels" + + +class has_image_n_labels_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_image_n_labels: base_has_image_n_labels_model + + has_image_width_width_description = """Expected width of the image (in pixels).""" has_image_width_delta_description = """Maximum allowed difference of the image width (in pixels, default is 0). The observed width has to be in the range ``value +- delta``.""" @@ -1528,13 +1827,8 @@ class has_image_n_labels_model(AssertionModel): has_image_width_negate_description = """A boolean that can be set to true to negate the outcome of the assertion.""" -class has_image_width_model(AssertionModel): - r"""Asserts the output is an image and has a specific width (in pixels). - - The width is plus/minus ``delta`` (e.g., ````). - Alternatively the range of the expected width can be specified by ``min`` and/or ``max``.""" - - that: Literal["has_image_width"] = "has_image_width" +class base_has_image_width_model(AssertionModel): + """base model for has_image_width describing attributes.""" width: Annotated[typing.Optional[StrictInt], BeforeValidator(check_non_negative_if_set)] = Field( None, @@ -1562,7 +1856,22 @@ class has_image_width_model(AssertionModel): ) -any_assertion_model = Annotated[ +class has_image_width_model(base_has_image_width_model): + r"""Asserts the output is an image and has a specific width (in pixels). + + The width is plus/minus ``delta`` (e.g., ````). + Alternatively the range of the expected width can be specified by ``min`` and/or ``max``.""" + + that: Literal["has_image_width"] = "has_image_width" + + +class has_image_width_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_image_width: base_has_image_width_model + + +any_assertion_model_flat = Annotated[ typing.Union[ has_line_model, has_line_matching_model, @@ -1597,4 +1906,100 @@ class has_image_width_model(AssertionModel): Field(discriminator="that"), ] -assertion_list = RootModel[typing.List[any_assertion_model]] +any_assertion_model_nested = typing.Union[ + has_line_model_nested, + has_line_matching_model_nested, + has_n_lines_model_nested, + has_text_model_nested, + has_text_matching_model_nested, + not_has_text_model_nested, + has_n_columns_model_nested, + attribute_is_model_nested, + attribute_matches_model_nested, + element_text_model_nested, + element_text_is_model_nested, + element_text_matches_model_nested, + has_element_with_path_model_nested, + has_n_elements_with_path_model_nested, + is_valid_xml_model_nested, + xml_element_model_nested, + has_json_property_with_text_model_nested, + has_json_property_with_value_model_nested, + has_h5_attribute_model_nested, + has_h5_keys_model_nested, + has_archive_member_model_nested, + has_size_model_nested, + has_image_center_of_mass_model_nested, + has_image_channels_model_nested, + has_image_height_model_nested, + has_image_mean_intensity_model_nested, + has_image_mean_object_size_model_nested, + has_image_n_labels_model_nested, + has_image_width_model_nested, +] + +assertion_list = RootModel[typing.List[typing.Union[any_assertion_model_flat, any_assertion_model_nested]]] + + +class assertion_dict(AssertionModel): + + has_line: typing.Optional[base_has_line_model] = None + + has_line_matching: typing.Optional[base_has_line_matching_model] = None + + has_n_lines: typing.Optional[base_has_n_lines_model] = None + + has_text: typing.Optional[base_has_text_model] = None + + has_text_matching: typing.Optional[base_has_text_matching_model] = None + + not_has_text: typing.Optional[base_not_has_text_model] = None + + has_n_columns: typing.Optional[base_has_n_columns_model] = None + + attribute_is: typing.Optional[base_attribute_is_model] = None + + attribute_matches: typing.Optional[base_attribute_matches_model] = None + + element_text: typing.Optional[base_element_text_model] = None + + element_text_is: typing.Optional[base_element_text_is_model] = None + + element_text_matches: typing.Optional[base_element_text_matches_model] = None + + has_element_with_path: typing.Optional[base_has_element_with_path_model] = None + + has_n_elements_with_path: typing.Optional[base_has_n_elements_with_path_model] = None + + is_valid_xml: typing.Optional[base_is_valid_xml_model] = None + + xml_element: typing.Optional[base_xml_element_model] = None + + has_json_property_with_text: typing.Optional[base_has_json_property_with_text_model] = None + + has_json_property_with_value: typing.Optional[base_has_json_property_with_value_model] = None + + has_h5_attribute: typing.Optional[base_has_h5_attribute_model] = None + + has_h5_keys: typing.Optional[base_has_h5_keys_model] = None + + has_archive_member: typing.Optional[base_has_archive_member_model] = None + + has_size: typing.Optional[base_has_size_model] = None + + has_image_center_of_mass: typing.Optional[base_has_image_center_of_mass_model] = None + + has_image_channels: typing.Optional[base_has_image_channels_model] = None + + has_image_height: typing.Optional[base_has_image_height_model] = None + + has_image_mean_intensity: typing.Optional[base_has_image_mean_intensity_model] = None + + has_image_mean_object_size: typing.Optional[base_has_image_mean_object_size_model] = None + + has_image_n_labels: typing.Optional[base_has_image_n_labels_model] = None + + has_image_width: typing.Optional[base_has_image_width_model] = None + + +assertions = typing.Union[assertion_list, assertion_dict] From 03641a98fa6985e44f88757a8b8bee8a81fa15ae Mon Sep 17 00:00:00 2001 From: John Davis Date: Tue, 24 Sep 2024 16:16:05 -0400 Subject: [PATCH 57/64] Fix migration data fixes tests When upgrading back to a valid revision, upgrade to head, not the revision under test: we need the latest version of the model definition in the database for the test to cleanup correctly. --- .../model/migration_fixes/test_migrations.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/test/unit/data/model/migration_fixes/test_migrations.py b/test/unit/data/model/migration_fixes/test_migrations.py index 2f8777860a0c..bfbc4bc0a9dd 100644 --- a/test/unit/data/model/migration_fixes/test_migrations.py +++ b/test/unit/data/model/migration_fixes/test_migrations.py @@ -72,7 +72,7 @@ def test_1cf595475b58(monkeypatch, session, make_user, make_history): # STEP 2: Run migration - run_command(f"{COMMAND} upgrade 1cf595475b58") + run_command(f"{COMMAND} upgrade") session.expire_all() # STEP 3: Verify deduplicated results @@ -143,7 +143,7 @@ def test_d619fdfa6168(monkeypatch, session, make_user): assert u1.create_time < u2.create_time < u3.create_time # u3 is newest user # STEP 2: Run migration - run_command(f"{COMMAND} upgrade d619fdfa6168") + run_command(f"{COMMAND} upgrade") session.expire_all() # STEP 3: Verify deduplicated results @@ -186,7 +186,7 @@ def test_349dd9d9aac9(monkeypatch, session, make_user, make_role, make_user_role assert len(all_associations) == 5 # Run migration - run_command(f"{COMMAND} upgrade 349dd9d9aac9") + run_command(f"{COMMAND} upgrade") session.expire_all() # Verify clean data @@ -222,7 +222,7 @@ def test_56ddf316dbd0(monkeypatch, session, make_user, make_group, make_user_gro assert len(all_associations) == 5 # Run migration - run_command(f"{COMMAND} upgrade 56ddf316dbd0") + run_command(f"{COMMAND} upgrade") session.expire_all() # Verify clean data @@ -258,7 +258,7 @@ def test_9ef6431f3a4e(monkeypatch, session, make_group, make_role, make_group_ro assert len(all_associations) == 5 # Run migration - run_command(f"{COMMAND} upgrade 9ef6431f3a4e") + run_command(f"{COMMAND} upgrade") session.expire_all() # Verify clean data @@ -297,7 +297,7 @@ def test_1fdd615f2cdb(monkeypatch, session, make_user, make_role, make_user_role assert len(all_associations) == 4 # Run migration - run_command(f"{COMMAND} upgrade 1fdd615f2cdb") + run_command(f"{COMMAND} upgrade") session.expire_all() # Verify clean data @@ -334,7 +334,7 @@ def test_13fe10b8e35b(monkeypatch, session, make_user, make_group, make_user_gro assert len(all_associations) == 4 # Run migration - run_command(f"{COMMAND} upgrade 13fe10b8e35b") + run_command(f"{COMMAND} upgrade") session.expire_all() # Verify clean data @@ -371,7 +371,7 @@ def test_25b092f7938b(monkeypatch, session, make_group, make_role, make_group_ro assert len(all_associations) == 4 # Run migration - run_command(f"{COMMAND} upgrade 25b092f7938b") + run_command(f"{COMMAND} upgrade") session.expire_all() # Verify clean data From b57ec18991fe921ae9fdfc1286b747db1ff4037f Mon Sep 17 00:00:00 2001 From: John Davis Date: Tue, 24 Sep 2024 20:43:47 -0400 Subject: [PATCH 58/64] Refactor migration testing setup code - Do not explicitly import fixtures (a little duplication is safer/cleaner) - Move dangerous fixture into module to prevent accidental usage from other modules - Ensure only one database is used for all tests in module --- .../data/model/migration_fixes/conftest.py | 44 ++++++++----------- .../model/migration_fixes/test_migrations.py | 31 ++++++++++--- 2 files changed, 45 insertions(+), 30 deletions(-) diff --git a/test/unit/data/model/migration_fixes/conftest.py b/test/unit/data/model/migration_fixes/conftest.py index 39ba30f5462a..21edbb3a49fc 100644 --- a/test/unit/data/model/migration_fixes/conftest.py +++ b/test/unit/data/model/migration_fixes/conftest.py @@ -1,26 +1,32 @@ -from typing import ( - Generator, - TYPE_CHECKING, -) +import tempfile +from typing import TYPE_CHECKING import pytest -from sqlalchemy import ( - create_engine, - text, -) +from sqlalchemy import create_engine from sqlalchemy.orm import Session -from galaxy import model as m - if TYPE_CHECKING: from sqlalchemy.engine import Engine -from galaxy.model.unittest_utils.model_testing_utils import ( # noqa: F401 - url_factory is a fixture we have to import explicitly - sqlite_url_factory, +from galaxy.model.unittest_utils.model_testing_utils import ( + _generate_unique_database_name, + _make_sqlite_db_url, ) -@pytest.fixture() +@pytest.fixture(scope="module") +def sqlite_url_factory(): + """Return a function that generates a sqlite url""" + + def url(): + database = _generate_unique_database_name() + return _make_sqlite_db_url(tmp_dir, database) + + with tempfile.TemporaryDirectory() as tmp_dir: + yield url + + +@pytest.fixture(scope="module") def db_url(sqlite_url_factory): # noqa: F811 return sqlite_url_factory() @@ -33,15 +39,3 @@ def engine(db_url: str) -> "Engine": @pytest.fixture def session(engine: "Engine") -> Session: return Session(engine) - - -@pytest.fixture(autouse=True) -def clear_database(engine: "Engine") -> "Generator": - """Delete all rows from all tables. Called after each test.""" - yield - with engine.begin() as conn: - for table in m.mapper_registry.metadata.tables: - # Unless db is sqlite, disable foreign key constraints to delete out of order - if engine.name != "sqlite": - conn.execute(text(f"ALTER TABLE {table} DISABLE TRIGGER ALL")) - conn.execute(text(f"DELETE FROM {table}")) diff --git a/test/unit/data/model/migration_fixes/test_migrations.py b/test/unit/data/model/migration_fixes/test_migrations.py index bfbc4bc0a9dd..0c6c8979d8cc 100644 --- a/test/unit/data/model/migration_fixes/test_migrations.py +++ b/test/unit/data/model/migration_fixes/test_migrations.py @@ -1,20 +1,41 @@ +from typing import ( + Generator, + TYPE_CHECKING, +) + import pytest -from sqlalchemy import select +from sqlalchemy import ( + select, + text, +) +from galaxy import model as m from galaxy.model import ( GroupRoleAssociation, User, UserGroupAssociation, UserRoleAssociation, ) -from galaxy.model.unittest_utils.migration_scripts_testing_utils import ( # noqa: F401 - contains fixtures we have to import explicitly - run_command, - tmp_directory, -) +from galaxy.model.unittest_utils.migration_scripts_testing_utils import run_command + +if TYPE_CHECKING: + from sqlalchemy.engine import Engine COMMAND = "manage_db.sh" +@pytest.fixture(autouse=True) +def clear_database(engine: "Engine") -> "Generator": + """Delete all rows from all tables. Called after each test.""" + yield + with engine.begin() as conn: + for table in m.mapper_registry.metadata.tables: + # Unless db is sqlite, disable foreign key constraints to delete out of order + if engine.name != "sqlite": + conn.execute(text(f"ALTER TABLE {table} DISABLE TRIGGER ALL")) + conn.execute(text(f"DELETE FROM {table}")) + + @pytest.fixture(autouse=True) def upgrade_database_after_test(): """Run after each test for proper cleanup""" From 2f1b6e2a36c5bd910e559fad70e4cef2df6c6cea Mon Sep 17 00:00:00 2001 From: mvdbeek Date: Tue, 24 Sep 2024 15:18:35 +0200 Subject: [PATCH 59/64] Limit max number of items in dataproviders that support pagination to 10000. Clients can always request more data via offset requests, but since this is not a streaming API we need to build the response in memory before we can start sending. --- client/src/api/schema/schema.ts | 8 ++++++-- lib/galaxy/datatypes/dataproviders/base.py | 14 ++++++------- lib/galaxy/webapps/galaxy/api/datasets.py | 24 +++++++++++++++++++--- 3 files changed, 34 insertions(+), 12 deletions(-) diff --git a/client/src/api/schema/schema.ts b/client/src/api/schema/schema.ts index 0659ad0d08d9..0e1e42008a65 100644 --- a/client/src/api/schema/schema.ts +++ b/client/src/api/schema/schema.ts @@ -127,7 +127,7 @@ export interface paths { "/api/datasets/{dataset_id}": { /** * Displays information about and/or content of a dataset. - * @description **Note**: Due to the multipurpose nature of this endpoint, which can receive a wild variety of parameters + * @description **Note**: Due to the multipurpose nature of this endpoint, which can receive a wide variety of parameters * and return different kinds of responses, the documentation here will be limited. * To get more information please check the source code. */ @@ -14366,18 +14366,22 @@ export interface operations { show_api_datasets__dataset_id__get: { /** * Displays information about and/or content of a dataset. - * @description **Note**: Due to the multipurpose nature of this endpoint, which can receive a wild variety of parameters + * @description **Note**: Due to the multipurpose nature of this endpoint, which can receive a wide variety of parameters * and return different kinds of responses, the documentation here will be limited. * To get more information please check the source code. */ parameters: { /** @description The type of information about the dataset to be requested. */ /** @description The type of information about the dataset to be requested. Each of these values may require additional parameters in the request and may return different responses. */ + /** @description Maximum number of items to return. Currently only applies to `data_type=raw_data` requests */ + /** @description Starts at the beginning skip the first ( offset - 1 ) items and begin returning at the Nth item. Currently only applies to `data_type=raw_data` requests */ /** @description View to be passed to the serializer */ /** @description Comma-separated list of keys to be passed to the serializer */ query?: { hda_ldda?: components["schemas"]["DatasetSourceType"]; data_type?: components["schemas"]["RequestDataType"] | null; + limit?: number | null; + offset?: number | null; view?: string | null; keys?: string | null; }; diff --git a/lib/galaxy/datatypes/dataproviders/base.py b/lib/galaxy/datatypes/dataproviders/base.py index c1239f86b9ff..a271437f184e 100644 --- a/lib/galaxy/datatypes/dataproviders/base.py +++ b/lib/galaxy/datatypes/dataproviders/base.py @@ -36,6 +36,7 @@ def stop( self ): self.endpoint = source.tell(); raise StopIteration() Building a giant list by sweeping all possible dprov classes doesn't make sense For now - I'm burying them in the class __init__s - but I don't like that """ +MAX_LIMIT = 10000 # ----------------------------------------------------------------------------- base classes @@ -233,21 +234,20 @@ class LimitedOffsetDataProvider(FilteredDataProvider): settings = {"limit": "int", "offset": "int"} # TODO: may want to squash this into DataProvider - def __init__(self, source, offset=0, limit=None, **kwargs): + def __init__(self, source, offset=0, limit=MAX_LIMIT, **kwargs): """ :param offset: the number of data to skip before providing. :param limit: the final number of data to provide. """ super().__init__(source, **kwargs) - # how many valid data to skip before we start outputing data - must be positive - # (diff to support neg. indeces - must be pos.) - self.offset = max(offset, 0) + # how many valid data to skip before we start outputting data - must be positive + self.offset = offset - # how many valid data to return - must be positive (None indicates no limit) + # how many valid data to return - must be positive + if limit is None: + limit = MAX_LIMIT self.limit = limit - if self.limit is not None: - self.limit = max(self.limit, 0) def __iter__(self): """ diff --git a/lib/galaxy/webapps/galaxy/api/datasets.py b/lib/galaxy/webapps/galaxy/api/datasets.py index 53006b4ee8c1..9859f7a7b686 100644 --- a/lib/galaxy/webapps/galaxy/api/datasets.py +++ b/lib/galaxy/webapps/galaxy/api/datasets.py @@ -29,6 +29,7 @@ ) from typing_extensions import Annotated +from galaxy.datatypes.dataproviders.base import MAX_LIMIT from galaxy.schema import ( FilterQueryParams, SerializationParams, @@ -432,18 +433,35 @@ def show( "may return different responses." ), ), + limit: Annotated[ + Optional[int], + Query( + ge=1, + le=MAX_LIMIT, + description="Maximum number of items to return. Currently only applies to `data_type=raw_data` requests", + ), + ] = MAX_LIMIT, + offset: Annotated[ + Optional[int], + Query( + ge=0, + description="Starts at the beginning skip the first ( offset - 1 ) items and begin returning at the Nth item. Currently only applies to `data_type=raw_data` requests", + ), + ] = 0, serialization_params: SerializationParams = Depends(query_serialization_params), ): """ - **Note**: Due to the multipurpose nature of this endpoint, which can receive a wild variety of parameters + **Note**: Due to the multipurpose nature of this endpoint, which can receive a wide variety of parameters and return different kinds of responses, the documentation here will be limited. To get more information please check the source code. """ - exclude_params = {"hda_ldda", "data_type"} + exclude_params = {"hda_ldda", "data_type", "limit", "offset"} exclude_params.update(SerializationParams.model_fields.keys()) extra_params = get_query_parameters_from_request_excluding(request, exclude_params) - return self.service.show(trans, dataset_id, hda_ldda, serialization_params, data_type, **extra_params) + return self.service.show( + trans, dataset_id, hda_ldda, serialization_params, data_type, limit=limit, offset=offset, **extra_params + ) @router.get( "/api/datasets/{dataset_id}/content/{content_type}", From 04abcd5b342a5ed1f5ac2a3c1836a18deb929fac Mon Sep 17 00:00:00 2001 From: Matthias Bernt Date: Wed, 25 Sep 2024 14:29:26 +0200 Subject: [PATCH 60/64] Create version 24.1.2 --- lib/galaxy/version.py | 2 +- packages/app/HISTORY.rst | 51 ++++++++++++++++++++++++++++-- packages/app/setup.cfg | 2 +- packages/auth/HISTORY.rst | 8 ++--- packages/auth/setup.cfg | 2 +- packages/config/HISTORY.rst | 26 +++++++++++++-- packages/config/setup.cfg | 2 +- packages/data/HISTORY.rst | 32 +++++++++++++++++-- packages/data/setup.cfg | 2 +- packages/files/HISTORY.rst | 8 ++--- packages/files/setup.cfg | 2 +- packages/job_execution/HISTORY.rst | 11 +++++-- packages/job_execution/setup.cfg | 2 +- packages/job_metrics/HISTORY.rst | 8 ++--- packages/job_metrics/setup.cfg | 2 +- packages/navigation/HISTORY.rst | 8 ++--- packages/navigation/setup.cfg | 2 +- packages/objectstore/HISTORY.rst | 11 +++++-- packages/objectstore/setup.cfg | 2 +- packages/schema/HISTORY.rst | 22 +++++++++++-- packages/schema/setup.cfg | 2 +- packages/selenium/HISTORY.rst | 8 ++--- packages/selenium/setup.cfg | 2 +- packages/test_api/HISTORY.rst | 23 ++++++++++++-- packages/test_api/setup.cfg | 2 +- packages/test_base/HISTORY.rst | 13 ++++++-- packages/test_base/setup.cfg | 2 +- packages/test_driver/HISTORY.rst | 8 ++--- packages/test_driver/setup.cfg | 2 +- packages/tool_shed/HISTORY.rst | 8 ++--- packages/tool_shed/setup.cfg | 2 +- packages/tool_util/HISTORY.rst | 18 +++++++++-- packages/tool_util/setup.cfg | 2 +- packages/tours/HISTORY.rst | 8 ++--- packages/tours/setup.cfg | 2 +- packages/util/HISTORY.rst | 18 +++++++++-- packages/util/setup.cfg | 2 +- packages/web_apps/HISTORY.rst | 38 ++++++++++++++++++++-- packages/web_apps/setup.cfg | 2 +- packages/web_framework/HISTORY.rst | 13 ++++++-- packages/web_framework/setup.cfg | 2 +- packages/web_stack/HISTORY.rst | 8 ++--- packages/web_stack/setup.cfg | 2 +- 43 files changed, 298 insertions(+), 94 deletions(-) diff --git a/lib/galaxy/version.py b/lib/galaxy/version.py index 127856da436f..8447b0537e06 100644 --- a/lib/galaxy/version.py +++ b/lib/galaxy/version.py @@ -1,3 +1,3 @@ VERSION_MAJOR = "24.1" -VERSION_MINOR = "2.dev0" +VERSION_MINOR = "2" VERSION = VERSION_MAJOR + (f".{VERSION_MINOR}" if VERSION_MINOR else "") diff --git a/packages/app/HISTORY.rst b/packages/app/HISTORY.rst index a01c8a3a25c1..6b4fe789a1f3 100644 --- a/packages/app/HISTORY.rst +++ b/packages/app/HISTORY.rst @@ -3,11 +3,56 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ +------------------- +24.1.2 (2024-09-25) +------------------- + +========= +Bug fixes +========= + +* Return generic message for password reset email by `@ahmedhamidawan `_ in `#18479 `_ +* Increase API robustness to invalid requests, improve compressed data serving by `@mvdbeek `_ in `#18494 `_ +* Prevent job submission if input collection element is deleted by `@mvdbeek `_ in `#18517 `_ +* Strip unicode null from tool stdio by `@mvdbeek `_ in `#18527 `_ +* Fix map over calculation for runtime inputs by `@mvdbeek `_ in `#18535 `_ +* Fix for not-null in 'column_list' object by `@hujambo-dunia `_ in `#18553 `_ +* Also fail ``ensure_dataset_on_disk`` if dataset is in new state by `@mvdbeek `_ in `#18559 `_ +* Fix sqlalchemy statement in tooltagmanager reset output by `@dannon `_ in `#18591 `_ +* Set minimum weasyprint version by `@mvdbeek `_ in `#18606 `_ +* Improve relabel identifiers message when number of columns is not 2 by `@mvdbeek `_ in `#18634 `_ +* Fix extract workflow from history when implicit collection has no jobs by `@mvdbeek `_ in `#18661 `_ +* Make sure we set file size also for purged outputs by `@mvdbeek `_ in `#18681 `_ +* File source and object store instance api fixes by `@mvdbeek `_ in `#18685 `_ +* Fix change datatype PJA on expression tool data outputs by `@mvdbeek `_ in `#18691 `_ +* Fill in missing help for cross product tools. by `@jmchilton `_ in `#18698 `_ +* Fix subworkflow scheduling for delayed subworkflow steps connected to data inputs by `@mvdbeek `_ in `#18731 `_ +* Catch and display exceptions when importing malformatted yaml workflows by `@mvdbeek `_ in `#18734 `_ +* Fix infinitely delayed workflow scheduling if skipped step creates HDCA by `@mvdbeek `_ in `#18751 `_ +* Fix directory get or create logic by `@mvdbeek `_ in `#18752 `_ +* Fix job summary for optional unset job data inputs by `@mvdbeek `_ in `#18754 `_ +* Allow to change only the description of a quota by `@bernt-matthias `_ in `#18775 `_ +* Fix wrong extension on pick data output by `@mvdbeek `_ in `#18798 `_ +* Fix unspecified ``oidc_endpoint`` variable overwriting specified ``redirect_url`` by `@bgruening `_ in `#18818 `_ +* Fix wrong celery_app config on job and workflow handlers by `@mvdbeek `_ in `#18819 `_ +* Fix ``named cursor is not valid anymore`` by `@mvdbeek `_ in `#18825 `_ +* Tighten TRS url check by `@mvdbeek `_ in `#18841 `_ +* Fix Workflow index bookmark filter by `@itisAliRH `_ in `#18842 `_ +* Skip metric collection if job working directory doesn't exist by `@mvdbeek `_ in `#18845 `_ +* Extend on disk checks to running, queued and error states by `@mvdbeek `_ in `#18846 `_ +* Raise MessageException instead of assertions on rerun problems by `@mvdbeek `_ in `#18858 `_ +* Fix data_column ref to nested collection by `@mvdbeek `_ in `#18875 `_ +* Fix loading very old workflows with data inputs by `@mvdbeek `_ in `#18876 `_ + +============ +Enhancements +============ +* Include workflow invocation id in exception logs by `@mvdbeek `_ in `#18594 `_ +* Implemented the generic OIDC backend from python-social-auth into Gal… by `@Edmontosaurus `_ in `#18670 `_ +* Collect job metrics also when job failed by `@mvdbeek `_ in `#18809 `_ +* prevent "missing refresh_token" errors by supporting also with Keycloak backend by `@ljocha `_ in `#18826 `_ ------------------- 24.1.1 (2024-07-02) diff --git a/packages/app/setup.cfg b/packages/app/setup.cfg index cb942f671857..8d067b1afe6d 100644 --- a/packages/app/setup.cfg +++ b/packages/app/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-app url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/auth/HISTORY.rst b/packages/auth/HISTORY.rst index c0cbc0c0d8c3..72450a70903e 100644 --- a/packages/auth/HISTORY.rst +++ b/packages/auth/HISTORY.rst @@ -3,11 +3,11 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ - +------------------- +24.1.2 (2024-09-25) +------------------- +No recorded changes since last release ------------------- 24.1.1 (2024-07-02) diff --git a/packages/auth/setup.cfg b/packages/auth/setup.cfg index 44bac91c764a..25b71dc16efa 100644 --- a/packages/auth/setup.cfg +++ b/packages/auth/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-auth url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/config/HISTORY.rst b/packages/config/HISTORY.rst index f9cf20c3f0f6..ecf0f191d0e4 100644 --- a/packages/config/HISTORY.rst +++ b/packages/config/HISTORY.rst @@ -3,11 +3,31 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ +------------------- +24.1.2 (2024-09-25) +------------------- + + +========= +Bug fixes +========= + +* Strip whitespace when listifying admin users by `@jdavcs `_ in `#18656 `_ +============ +Enhancements +============ + +* Make `default_panel_view` a `_by_host` option by `@natefoo `_ in `#18471 `_ +* More datatype deprecation warnings by `@mvdbeek `_ in `#18612 `_ +* Implemented the generic OIDC backend from python-social-auth into Gal… by `@Edmontosaurus `_ in `#18670 `_ + +============= +Other changes +============= +* Backport pod5 datatype by `@TomHarrop `_ in `#18507 `_ +* Backport PR 18630 "Add BlobToolkit to the list of interactive tools" to release_24.1 by `@cat-bro `_ in `#18784 `_ ------------------- 24.1.1 (2024-07-02) diff --git a/packages/config/setup.cfg b/packages/config/setup.cfg index 8b66d4c8be59..e4bfd764fdef 100644 --- a/packages/config/setup.cfg +++ b/packages/config/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-config url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/data/HISTORY.rst b/packages/data/HISTORY.rst index 5092affb1e05..5a6e54d4e89d 100644 --- a/packages/data/HISTORY.rst +++ b/packages/data/HISTORY.rst @@ -3,11 +3,37 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ +------------------- +24.1.2 (2024-09-25) +------------------- + +========= +Bug fixes +========= + +* Increase API robustness to invalid requests, improve compressed data serving by `@mvdbeek `_ in `#18494 `_ +* Prevent job submission if input collection element is deleted by `@mvdbeek `_ in `#18517 `_ +* Fix shared caches with extended metadata collection. by `@jmchilton `_ in `#18520 `_ +* Also check dataset.deleted when determining if data can be displayed by `@mvdbeek `_ in `#18547 `_ +* Fix for not-null in 'column_list' object by `@hujambo-dunia `_ in `#18553 `_ +* Fix h5ad metadata by `@nilchia `_ in `#18635 `_ +* Don't set file size to zero by `@mvdbeek `_ in `#18653 `_ +* Make sure we set file size also for purged outputs by `@mvdbeek `_ in `#18681 `_ +* Fix wrong extension on pick data output by `@mvdbeek `_ in `#18798 `_ +* Fix copying workflow with subworkflow step for step that you own by `@mvdbeek `_ in `#18802 `_ +* Make pylibmagic import optional by `@mvdbeek `_ in `#18813 `_ +* Ignore converted datasets in invalid input states by `@mvdbeek `_ in `#18850 `_ +* Fix discovered outputs with directory metadata and distributed object by `@mvdbeek `_ in `#18855 `_ +* Raise MessageException instead of assertions on rerun problems by `@mvdbeek `_ in `#18858 `_ +* Fix wrong final state when init_from is used by `@mvdbeek `_ in `#18871 `_ +* Fix history import when parent_hda not serialized by `@mvdbeek `_ in `#18873 `_ + +============= +Other changes +============= +* Backport pod5 datatype by `@TomHarrop `_ in `#18507 `_ ------------------- 24.1.1 (2024-07-02) diff --git a/packages/data/setup.cfg b/packages/data/setup.cfg index 4d8cee887fd9..450a66916897 100644 --- a/packages/data/setup.cfg +++ b/packages/data/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-data url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/files/HISTORY.rst b/packages/files/HISTORY.rst index 0827590918d1..a1c537b77345 100644 --- a/packages/files/HISTORY.rst +++ b/packages/files/HISTORY.rst @@ -3,11 +3,11 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ - +------------------- +24.1.2 (2024-09-25) +------------------- +No recorded changes since last release ------------------- 24.1.1 (2024-07-02) diff --git a/packages/files/setup.cfg b/packages/files/setup.cfg index 198052e2cc25..eabac412a0a8 100644 --- a/packages/files/setup.cfg +++ b/packages/files/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-files url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/job_execution/HISTORY.rst b/packages/job_execution/HISTORY.rst index c43f9b0c30dc..409df54e7732 100644 --- a/packages/job_execution/HISTORY.rst +++ b/packages/job_execution/HISTORY.rst @@ -3,11 +3,16 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ +------------------- +24.1.2 (2024-09-25) +------------------- + +========= +Bug fixes +========= +* Retry container monitor POST if it fails (don't assume it succeeded) by `@natefoo `_ in `#18863 `_ ------------------- 24.1.1 (2024-07-02) diff --git a/packages/job_execution/setup.cfg b/packages/job_execution/setup.cfg index d5f7f2bf4379..295f6528b505 100644 --- a/packages/job_execution/setup.cfg +++ b/packages/job_execution/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-job-execution url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/job_metrics/HISTORY.rst b/packages/job_metrics/HISTORY.rst index 76b0afce976b..65672b77b3a0 100644 --- a/packages/job_metrics/HISTORY.rst +++ b/packages/job_metrics/HISTORY.rst @@ -3,11 +3,11 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ - +------------------- +24.1.2 (2024-09-25) +------------------- +No recorded changes since last release ------------------- 24.1.1 (2024-07-02) diff --git a/packages/job_metrics/setup.cfg b/packages/job_metrics/setup.cfg index bb20a15f6637..a46a3685f144 100644 --- a/packages/job_metrics/setup.cfg +++ b/packages/job_metrics/setup.cfg @@ -28,7 +28,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-job-metrics url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/navigation/HISTORY.rst b/packages/navigation/HISTORY.rst index e1fcd22d984f..bf47150055bb 100644 --- a/packages/navigation/HISTORY.rst +++ b/packages/navigation/HISTORY.rst @@ -3,11 +3,11 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ - +------------------- +24.1.2 (2024-09-25) +------------------- +No recorded changes since last release ------------------- 24.1.1 (2024-07-02) diff --git a/packages/navigation/setup.cfg b/packages/navigation/setup.cfg index 6dbcdaafcb19..5875c9eaafe8 100644 --- a/packages/navigation/setup.cfg +++ b/packages/navigation/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-navigation url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/objectstore/HISTORY.rst b/packages/objectstore/HISTORY.rst index ffcff6f795dc..3eef2e167999 100644 --- a/packages/objectstore/HISTORY.rst +++ b/packages/objectstore/HISTORY.rst @@ -3,11 +3,16 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ +------------------- +24.1.2 (2024-09-25) +------------------- + +========= +Bug fixes +========= +* Fix shared caches with extended metadata collection. by `@jmchilton `_ in `#18520 `_ ------------------- 24.1.1 (2024-07-02) diff --git a/packages/objectstore/setup.cfg b/packages/objectstore/setup.cfg index 38a0e8cf40b8..d700c8e59f2b 100644 --- a/packages/objectstore/setup.cfg +++ b/packages/objectstore/setup.cfg @@ -28,7 +28,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-objectstore url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/schema/HISTORY.rst b/packages/schema/HISTORY.rst index 0fafdcb684cd..5f30f1ec9783 100644 --- a/packages/schema/HISTORY.rst +++ b/packages/schema/HISTORY.rst @@ -3,11 +3,27 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ +------------------- +24.1.2 (2024-09-25) +------------------- + +========= +Bug fixes +========= + +* Handle error when workflow is unowned in Invocation view by `@ahmedhamidawan `_ in `#18730 `_ +* Fix datatype validation of newly built collection by `@mvdbeek `_ in `#18738 `_ +* Fix job summary for optional unset job data inputs by `@mvdbeek `_ in `#18754 `_ +* Fix ``TypeError`` from Pydantic 2.9.0 by `@nsoranzo `_ in `#18788 `_ +* Fix wrong extension on pick data output by `@mvdbeek `_ in `#18798 `_ +* Make all fields optional for HelpForumPost by `@davelopez `_ in `#18839 `_ + +============ +Enhancements +============ +* Include workflow invocation id in exception logs by `@mvdbeek `_ in `#18594 `_ ------------------- 24.1.1 (2024-07-02) diff --git a/packages/schema/setup.cfg b/packages/schema/setup.cfg index 21987bc30fb4..c6bbf4ec1778 100644 --- a/packages/schema/setup.cfg +++ b/packages/schema/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-schema url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/selenium/HISTORY.rst b/packages/selenium/HISTORY.rst index 1b5017e44597..16a90b5b1249 100644 --- a/packages/selenium/HISTORY.rst +++ b/packages/selenium/HISTORY.rst @@ -3,11 +3,11 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ - +------------------- +24.1.2 (2024-09-25) +------------------- +No recorded changes since last release ------------------- 24.1.1 (2024-07-02) diff --git a/packages/selenium/setup.cfg b/packages/selenium/setup.cfg index 799669cc8903..d90a94698378 100644 --- a/packages/selenium/setup.cfg +++ b/packages/selenium/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-selenium url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/test_api/HISTORY.rst b/packages/test_api/HISTORY.rst index 5e9ebad174f1..b45d74ca3ebb 100644 --- a/packages/test_api/HISTORY.rst +++ b/packages/test_api/HISTORY.rst @@ -3,11 +3,28 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ +------------------- +24.1.2 (2024-09-25) +------------------- + +========= +Bug fixes +========= +* Prevent job submission if input collection element is deleted by `@mvdbeek `_ in `#18517 `_ +* Fix view parameter type in Job index API by `@davelopez `_ in `#18521 `_ +* Fix map over calculation for runtime inputs by `@mvdbeek `_ in `#18535 `_ +* Fix Archive header encoding by `@arash77 `_ in `#18583 `_ +* Don't set file size to zero by `@mvdbeek `_ in `#18653 `_ +* Fix extract workflow from history when implicit collection has no jobs by `@mvdbeek `_ in `#18661 `_ +* Fix change datatype PJA on expression tool data outputs by `@mvdbeek `_ in `#18691 `_ +* Fix subworkflow scheduling for delayed subworkflow steps connected to data inputs by `@mvdbeek `_ in `#18731 `_ +* Catch and display exceptions when importing malformatted yaml workflows by `@mvdbeek `_ in `#18734 `_ +* Fix infinitely delayed workflow scheduling if skipped step creates HDCA by `@mvdbeek `_ in `#18751 `_ +* Fix copying workflow with subworkflow step for step that you own by `@mvdbeek `_ in `#18802 `_ +* Raise MessageException instead of assertions on rerun problems by `@mvdbeek `_ in `#18858 `_ +* Fix data_column ref to nested collection by `@mvdbeek `_ in `#18875 `_ ------------------- 24.1.1 (2024-07-02) diff --git a/packages/test_api/setup.cfg b/packages/test_api/setup.cfg index 8fc3a5dbc334..680f9b59e788 100644 --- a/packages/test_api/setup.cfg +++ b/packages/test_api/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-test-api url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/test_base/HISTORY.rst b/packages/test_base/HISTORY.rst index 7867cc9a8686..65168be1c061 100644 --- a/packages/test_base/HISTORY.rst +++ b/packages/test_base/HISTORY.rst @@ -3,11 +3,18 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ +------------------- +24.1.2 (2024-09-25) +------------------- + +========= +Bug fixes +========= +* Fix infinitely delayed workflow scheduling if skipped step creates HDCA by `@mvdbeek `_ in `#18751 `_ +* Fix wrong extension on pick data output by `@mvdbeek `_ in `#18798 `_ +* Fix data_column ref to nested collection by `@mvdbeek `_ in `#18875 `_ ------------------- 24.1.1 (2024-07-02) diff --git a/packages/test_base/setup.cfg b/packages/test_base/setup.cfg index 497247d478d5..26939a4cb6ba 100644 --- a/packages/test_base/setup.cfg +++ b/packages/test_base/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-test-base url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/test_driver/HISTORY.rst b/packages/test_driver/HISTORY.rst index 77740d14fb96..1a7fb845bbc4 100644 --- a/packages/test_driver/HISTORY.rst +++ b/packages/test_driver/HISTORY.rst @@ -3,11 +3,11 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ - +------------------- +24.1.2 (2024-09-25) +------------------- +No recorded changes since last release ------------------- 24.1.1 (2024-07-02) diff --git a/packages/test_driver/setup.cfg b/packages/test_driver/setup.cfg index c1de1fed8578..02c0ae6c17f6 100644 --- a/packages/test_driver/setup.cfg +++ b/packages/test_driver/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-test-driver url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/tool_shed/HISTORY.rst b/packages/tool_shed/HISTORY.rst index efd9ed446874..404fb4d6db11 100644 --- a/packages/tool_shed/HISTORY.rst +++ b/packages/tool_shed/HISTORY.rst @@ -3,11 +3,11 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ - +------------------- +24.1.2 (2024-09-25) +------------------- +No recorded changes since last release ------------------- 24.1.1 (2024-07-02) diff --git a/packages/tool_shed/setup.cfg b/packages/tool_shed/setup.cfg index bb38254a06b8..a85c3bce1b7c 100644 --- a/packages/tool_shed/setup.cfg +++ b/packages/tool_shed/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-tool-shed url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/tool_util/HISTORY.rst b/packages/tool_util/HISTORY.rst index 16fa70ea4b66..0e630494bed2 100644 --- a/packages/tool_util/HISTORY.rst +++ b/packages/tool_util/HISTORY.rst @@ -3,11 +3,23 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ +------------------- +24.1.2 (2024-09-25) +------------------- + +========= +Bug fixes +========= + +* Handle all requests error in ``ApiBiotoolsMetadataSource._raw_get_metadata`` by `@nsoranzo `_ in `#18510 `_ +* xsd: allow `change_format` and `actions` also in statically defined collection elements, and break recursion by `@bernt-matthias `_ in `#18605 `_ + +============ +Enhancements +============ +* Make `default_panel_view` a `_by_host` option by `@natefoo `_ in `#18471 `_ ------------------- 24.1.1 (2024-07-02) diff --git a/packages/tool_util/setup.cfg b/packages/tool_util/setup.cfg index 194481687d64..3ab01b2ce843 100644 --- a/packages/tool_util/setup.cfg +++ b/packages/tool_util/setup.cfg @@ -28,7 +28,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-tool-util url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/tours/HISTORY.rst b/packages/tours/HISTORY.rst index ebc29d06751a..ff76ec68d04b 100644 --- a/packages/tours/HISTORY.rst +++ b/packages/tours/HISTORY.rst @@ -3,11 +3,11 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ - +------------------- +24.1.2 (2024-09-25) +------------------- +No recorded changes since last release ------------------- 24.1.1 (2024-07-02) diff --git a/packages/tours/setup.cfg b/packages/tours/setup.cfg index c236c2202b8b..981707752a62 100644 --- a/packages/tours/setup.cfg +++ b/packages/tours/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-tours url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/util/HISTORY.rst b/packages/util/HISTORY.rst index fed3f29e564a..472ce37790b7 100644 --- a/packages/util/HISTORY.rst +++ b/packages/util/HISTORY.rst @@ -3,11 +3,23 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ +------------------- +24.1.2 (2024-09-25) +------------------- + +========= +Bug fixes +========= + +* Fix Archive header encoding by `@arash77 `_ in `#18583 `_ +* File source and object store instance api fixes by `@mvdbeek `_ in `#18685 `_ + +============ +Enhancements +============ +* Use smtplib send_message to support utf-8 chars in to and from by `@mvdbeek `_ in `#18805 `_ ------------------- 24.1.1 (2024-07-02) diff --git a/packages/util/setup.cfg b/packages/util/setup.cfg index 08067a739317..f8ed152bcaeb 100644 --- a/packages/util/setup.cfg +++ b/packages/util/setup.cfg @@ -28,7 +28,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-util url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/web_apps/HISTORY.rst b/packages/web_apps/HISTORY.rst index a8daf79a97af..de7face2bee5 100644 --- a/packages/web_apps/HISTORY.rst +++ b/packages/web_apps/HISTORY.rst @@ -3,11 +3,43 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ +------------------- +24.1.2 (2024-09-25) +------------------- + + +========= +Bug fixes +========= + +* Return generic message for password reset email by `@ahmedhamidawan `_ in `#18479 `_ +* Fix view parameter type in Job index API by `@davelopez `_ in `#18521 `_ +* Check if dataset has any data before running provider checks by `@mvdbeek `_ in `#18526 `_ +* Raise appropriate exception if ldda not found by `@mvdbeek `_ in `#18569 `_ +* Close install model session when request ends by `@mvdbeek `_ in `#18629 `_ +* Fix resume_paused_jobs if no session provided by `@mvdbeek `_ in `#18640 `_ +* Fix extract workflow from history when implicit collection has no jobs by `@mvdbeek `_ in `#18661 `_ +* Return error when following a link to a non-ready display application by `@mvdbeek `_ in `#18672 `_ +* Only load authnz routes when oidc enabled by `@mvdbeek `_ in `#18683 `_ +* File source and object store instance api fixes by `@mvdbeek `_ in `#18685 `_ +* Fix sorting users in admin by last login by `@jdavcs `_ in `#18694 `_ +* Fix resume paused jobs response handling by `@dannon `_ in `#18733 `_ +* Fix wrong extension on pick data output by `@mvdbeek `_ in `#18798 `_ +* Tighten TRS url check by `@mvdbeek `_ in `#18841 `_ +* Fix Workflow index bookmark filter by `@itisAliRH `_ in `#18842 `_ +* Extend on disk checks to running, queued and error states by `@mvdbeek `_ in `#18846 `_ +============ +Enhancements +============ + +* Make `default_panel_view` a `_by_host` option by `@natefoo `_ in `#18471 `_ + +============= +Other changes +============= +* Fix check dataset check by `@mvdbeek `_ in `#18856 `_ ------------------- 24.1.1 (2024-07-02) diff --git a/packages/web_apps/setup.cfg b/packages/web_apps/setup.cfg index dfbb85e86dc0..ceb404777818 100644 --- a/packages/web_apps/setup.cfg +++ b/packages/web_apps/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-web-apps url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/web_framework/HISTORY.rst b/packages/web_framework/HISTORY.rst index f09158a5f388..cc4a2f4b5d34 100644 --- a/packages/web_framework/HISTORY.rst +++ b/packages/web_framework/HISTORY.rst @@ -3,11 +3,18 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ +------------------- +24.1.2 (2024-09-25) +------------------- + +========= +Bug fixes +========= +* Increase API robustness to invalid requests, improve compressed data serving by `@mvdbeek `_ in `#18494 `_ +* Apply statsd arg sanitization to all pages by `@mvdbeek `_ in `#18509 `_ +* Close install model session when request ends by `@mvdbeek `_ in `#18629 `_ ------------------- 24.1.1 (2024-07-02) diff --git a/packages/web_framework/setup.cfg b/packages/web_framework/setup.cfg index 083f8622406d..0291ca017c09 100644 --- a/packages/web_framework/setup.cfg +++ b/packages/web_framework/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-web-framework url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/web_stack/HISTORY.rst b/packages/web_stack/HISTORY.rst index 91460bc9610d..be5f02f07a0a 100644 --- a/packages/web_stack/HISTORY.rst +++ b/packages/web_stack/HISTORY.rst @@ -3,11 +3,11 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ - +------------------- +24.1.2 (2024-09-25) +------------------- +No recorded changes since last release ------------------- 24.1.1 (2024-07-02) diff --git a/packages/web_stack/setup.cfg b/packages/web_stack/setup.cfg index 2974323c3b3f..b709dc1f6e80 100644 --- a/packages/web_stack/setup.cfg +++ b/packages/web_stack/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-web-stack url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True From a788fbdc6de156ef69d0d41cd52b31b726ee25b4 Mon Sep 17 00:00:00 2001 From: Matthias Bernt Date: Wed, 25 Sep 2024 14:29:29 +0200 Subject: [PATCH 61/64] Start work on 24.1.3.dev0 --- lib/galaxy/version.py | 2 +- packages/app/HISTORY.rst | 6 ++++++ packages/app/setup.cfg | 2 +- packages/auth/HISTORY.rst | 6 ++++++ packages/auth/setup.cfg | 2 +- packages/config/HISTORY.rst | 6 ++++++ packages/config/setup.cfg | 2 +- packages/data/HISTORY.rst | 6 ++++++ packages/data/setup.cfg | 2 +- packages/files/HISTORY.rst | 6 ++++++ packages/files/setup.cfg | 2 +- packages/job_execution/HISTORY.rst | 6 ++++++ packages/job_execution/setup.cfg | 2 +- packages/job_metrics/HISTORY.rst | 6 ++++++ packages/job_metrics/setup.cfg | 2 +- packages/navigation/HISTORY.rst | 6 ++++++ packages/navigation/setup.cfg | 2 +- packages/objectstore/HISTORY.rst | 6 ++++++ packages/objectstore/setup.cfg | 2 +- packages/schema/HISTORY.rst | 6 ++++++ packages/schema/setup.cfg | 2 +- packages/selenium/HISTORY.rst | 6 ++++++ packages/selenium/setup.cfg | 2 +- packages/test_api/HISTORY.rst | 6 ++++++ packages/test_api/setup.cfg | 2 +- packages/test_base/HISTORY.rst | 6 ++++++ packages/test_base/setup.cfg | 2 +- packages/test_driver/HISTORY.rst | 6 ++++++ packages/test_driver/setup.cfg | 2 +- packages/tool_shed/HISTORY.rst | 6 ++++++ packages/tool_shed/setup.cfg | 2 +- packages/tool_util/HISTORY.rst | 6 ++++++ packages/tool_util/setup.cfg | 2 +- packages/tours/HISTORY.rst | 6 ++++++ packages/tours/setup.cfg | 2 +- packages/util/HISTORY.rst | 6 ++++++ packages/util/setup.cfg | 2 +- packages/web_apps/HISTORY.rst | 6 ++++++ packages/web_apps/setup.cfg | 2 +- packages/web_framework/HISTORY.rst | 6 ++++++ packages/web_framework/setup.cfg | 2 +- packages/web_stack/HISTORY.rst | 6 ++++++ packages/web_stack/setup.cfg | 2 +- 43 files changed, 148 insertions(+), 22 deletions(-) diff --git a/lib/galaxy/version.py b/lib/galaxy/version.py index 8447b0537e06..d7d7f3f7f491 100644 --- a/lib/galaxy/version.py +++ b/lib/galaxy/version.py @@ -1,3 +1,3 @@ VERSION_MAJOR = "24.1" -VERSION_MINOR = "2" +VERSION_MINOR = "3.dev0" VERSION = VERSION_MAJOR + (f".{VERSION_MINOR}" if VERSION_MINOR else "") diff --git a/packages/app/HISTORY.rst b/packages/app/HISTORY.rst index 6b4fe789a1f3..27a82fd91ccd 100644 --- a/packages/app/HISTORY.rst +++ b/packages/app/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/app/setup.cfg b/packages/app/setup.cfg index 8d067b1afe6d..80e4d38c3d85 100644 --- a/packages/app/setup.cfg +++ b/packages/app/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-app url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/auth/HISTORY.rst b/packages/auth/HISTORY.rst index 72450a70903e..b46693de1bb3 100644 --- a/packages/auth/HISTORY.rst +++ b/packages/auth/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/auth/setup.cfg b/packages/auth/setup.cfg index 25b71dc16efa..ab82a95b3be2 100644 --- a/packages/auth/setup.cfg +++ b/packages/auth/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-auth url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/config/HISTORY.rst b/packages/config/HISTORY.rst index ecf0f191d0e4..e36fc636cb1f 100644 --- a/packages/config/HISTORY.rst +++ b/packages/config/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/config/setup.cfg b/packages/config/setup.cfg index e4bfd764fdef..68a09c9de456 100644 --- a/packages/config/setup.cfg +++ b/packages/config/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-config url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/data/HISTORY.rst b/packages/data/HISTORY.rst index 5a6e54d4e89d..3b7ca75f5722 100644 --- a/packages/data/HISTORY.rst +++ b/packages/data/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/data/setup.cfg b/packages/data/setup.cfg index 450a66916897..809981ea6394 100644 --- a/packages/data/setup.cfg +++ b/packages/data/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-data url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/files/HISTORY.rst b/packages/files/HISTORY.rst index a1c537b77345..8777207afa0f 100644 --- a/packages/files/HISTORY.rst +++ b/packages/files/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/files/setup.cfg b/packages/files/setup.cfg index eabac412a0a8..85b2493e0f5b 100644 --- a/packages/files/setup.cfg +++ b/packages/files/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-files url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/job_execution/HISTORY.rst b/packages/job_execution/HISTORY.rst index 409df54e7732..3f2f246b5331 100644 --- a/packages/job_execution/HISTORY.rst +++ b/packages/job_execution/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/job_execution/setup.cfg b/packages/job_execution/setup.cfg index 295f6528b505..1dfa56e605e2 100644 --- a/packages/job_execution/setup.cfg +++ b/packages/job_execution/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-job-execution url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/job_metrics/HISTORY.rst b/packages/job_metrics/HISTORY.rst index 65672b77b3a0..025d1cb59bcf 100644 --- a/packages/job_metrics/HISTORY.rst +++ b/packages/job_metrics/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/job_metrics/setup.cfg b/packages/job_metrics/setup.cfg index a46a3685f144..83ce8bebcf0e 100644 --- a/packages/job_metrics/setup.cfg +++ b/packages/job_metrics/setup.cfg @@ -28,7 +28,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-job-metrics url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/navigation/HISTORY.rst b/packages/navigation/HISTORY.rst index bf47150055bb..7c2eb4dde9ff 100644 --- a/packages/navigation/HISTORY.rst +++ b/packages/navigation/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/navigation/setup.cfg b/packages/navigation/setup.cfg index 5875c9eaafe8..63851150c254 100644 --- a/packages/navigation/setup.cfg +++ b/packages/navigation/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-navigation url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/objectstore/HISTORY.rst b/packages/objectstore/HISTORY.rst index 3eef2e167999..bbf9af4264fa 100644 --- a/packages/objectstore/HISTORY.rst +++ b/packages/objectstore/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/objectstore/setup.cfg b/packages/objectstore/setup.cfg index d700c8e59f2b..986fd23622e0 100644 --- a/packages/objectstore/setup.cfg +++ b/packages/objectstore/setup.cfg @@ -28,7 +28,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-objectstore url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/schema/HISTORY.rst b/packages/schema/HISTORY.rst index 5f30f1ec9783..eab78124ea70 100644 --- a/packages/schema/HISTORY.rst +++ b/packages/schema/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/schema/setup.cfg b/packages/schema/setup.cfg index c6bbf4ec1778..d8d9801ffcd5 100644 --- a/packages/schema/setup.cfg +++ b/packages/schema/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-schema url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/selenium/HISTORY.rst b/packages/selenium/HISTORY.rst index 16a90b5b1249..514f1cd8b30d 100644 --- a/packages/selenium/HISTORY.rst +++ b/packages/selenium/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/selenium/setup.cfg b/packages/selenium/setup.cfg index d90a94698378..6e6e3028e5a0 100644 --- a/packages/selenium/setup.cfg +++ b/packages/selenium/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-selenium url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/test_api/HISTORY.rst b/packages/test_api/HISTORY.rst index b45d74ca3ebb..9c111a54405c 100644 --- a/packages/test_api/HISTORY.rst +++ b/packages/test_api/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/test_api/setup.cfg b/packages/test_api/setup.cfg index 680f9b59e788..e12a23480d30 100644 --- a/packages/test_api/setup.cfg +++ b/packages/test_api/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-test-api url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/test_base/HISTORY.rst b/packages/test_base/HISTORY.rst index 65168be1c061..8ff6be4ce45b 100644 --- a/packages/test_base/HISTORY.rst +++ b/packages/test_base/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/test_base/setup.cfg b/packages/test_base/setup.cfg index 26939a4cb6ba..10bf9226dfce 100644 --- a/packages/test_base/setup.cfg +++ b/packages/test_base/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-test-base url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/test_driver/HISTORY.rst b/packages/test_driver/HISTORY.rst index 1a7fb845bbc4..3c7b1543b69a 100644 --- a/packages/test_driver/HISTORY.rst +++ b/packages/test_driver/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/test_driver/setup.cfg b/packages/test_driver/setup.cfg index 02c0ae6c17f6..7b17e7629764 100644 --- a/packages/test_driver/setup.cfg +++ b/packages/test_driver/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-test-driver url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/tool_shed/HISTORY.rst b/packages/tool_shed/HISTORY.rst index 404fb4d6db11..4bbe31d51aa5 100644 --- a/packages/tool_shed/HISTORY.rst +++ b/packages/tool_shed/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/tool_shed/setup.cfg b/packages/tool_shed/setup.cfg index a85c3bce1b7c..7a0edb5c9b86 100644 --- a/packages/tool_shed/setup.cfg +++ b/packages/tool_shed/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-tool-shed url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/tool_util/HISTORY.rst b/packages/tool_util/HISTORY.rst index 0e630494bed2..d82cb227c96a 100644 --- a/packages/tool_util/HISTORY.rst +++ b/packages/tool_util/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/tool_util/setup.cfg b/packages/tool_util/setup.cfg index 3ab01b2ce843..321d9bbfb8e5 100644 --- a/packages/tool_util/setup.cfg +++ b/packages/tool_util/setup.cfg @@ -28,7 +28,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-tool-util url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/tours/HISTORY.rst b/packages/tours/HISTORY.rst index ff76ec68d04b..7a53137118c6 100644 --- a/packages/tours/HISTORY.rst +++ b/packages/tours/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/tours/setup.cfg b/packages/tours/setup.cfg index 981707752a62..a3dc6338c312 100644 --- a/packages/tours/setup.cfg +++ b/packages/tours/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-tours url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/util/HISTORY.rst b/packages/util/HISTORY.rst index 472ce37790b7..befaeb6789f5 100644 --- a/packages/util/HISTORY.rst +++ b/packages/util/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/util/setup.cfg b/packages/util/setup.cfg index f8ed152bcaeb..fdef15f66594 100644 --- a/packages/util/setup.cfg +++ b/packages/util/setup.cfg @@ -28,7 +28,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-util url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/web_apps/HISTORY.rst b/packages/web_apps/HISTORY.rst index de7face2bee5..4444fe0f88e6 100644 --- a/packages/web_apps/HISTORY.rst +++ b/packages/web_apps/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/web_apps/setup.cfg b/packages/web_apps/setup.cfg index ceb404777818..e1b5a1b3df03 100644 --- a/packages/web_apps/setup.cfg +++ b/packages/web_apps/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-web-apps url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/web_framework/HISTORY.rst b/packages/web_framework/HISTORY.rst index cc4a2f4b5d34..a0bd6b14bb14 100644 --- a/packages/web_framework/HISTORY.rst +++ b/packages/web_framework/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/web_framework/setup.cfg b/packages/web_framework/setup.cfg index 0291ca017c09..818b3bef6076 100644 --- a/packages/web_framework/setup.cfg +++ b/packages/web_framework/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-web-framework url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/web_stack/HISTORY.rst b/packages/web_stack/HISTORY.rst index be5f02f07a0a..e7ebb3235a0a 100644 --- a/packages/web_stack/HISTORY.rst +++ b/packages/web_stack/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/web_stack/setup.cfg b/packages/web_stack/setup.cfg index b709dc1f6e80..dd04dc39fe43 100644 --- a/packages/web_stack/setup.cfg +++ b/packages/web_stack/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-web-stack url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True From b34611f42285e9ebbf3fefb51950e7db6d56e8ba Mon Sep 17 00:00:00 2001 From: Matthias Bernt Date: Wed, 25 Sep 2024 14:29:26 +0200 Subject: [PATCH 62/64] Create version 24.1.2 --- lib/galaxy/version.py | 2 +- packages/app/HISTORY.rst | 51 ++++++++++++++++++++++++++++-- packages/app/setup.cfg | 2 +- packages/auth/HISTORY.rst | 8 ++--- packages/auth/setup.cfg | 2 +- packages/config/HISTORY.rst | 26 +++++++++++++-- packages/config/setup.cfg | 2 +- packages/data/HISTORY.rst | 32 +++++++++++++++++-- packages/data/setup.cfg | 2 +- packages/files/HISTORY.rst | 8 ++--- packages/files/setup.cfg | 2 +- packages/job_execution/HISTORY.rst | 11 +++++-- packages/job_execution/setup.cfg | 2 +- packages/job_metrics/HISTORY.rst | 8 ++--- packages/job_metrics/setup.cfg | 2 +- packages/navigation/HISTORY.rst | 8 ++--- packages/navigation/setup.cfg | 2 +- packages/objectstore/HISTORY.rst | 11 +++++-- packages/objectstore/setup.cfg | 2 +- packages/schema/HISTORY.rst | 22 +++++++++++-- packages/schema/setup.cfg | 2 +- packages/selenium/HISTORY.rst | 8 ++--- packages/selenium/setup.cfg | 2 +- packages/test_api/HISTORY.rst | 23 ++++++++++++-- packages/test_api/setup.cfg | 2 +- packages/test_base/HISTORY.rst | 13 ++++++-- packages/test_base/setup.cfg | 2 +- packages/test_driver/HISTORY.rst | 8 ++--- packages/test_driver/setup.cfg | 2 +- packages/tool_shed/HISTORY.rst | 8 ++--- packages/tool_shed/setup.cfg | 2 +- packages/tool_util/HISTORY.rst | 18 +++++++++-- packages/tool_util/setup.cfg | 2 +- packages/tours/HISTORY.rst | 8 ++--- packages/tours/setup.cfg | 2 +- packages/util/HISTORY.rst | 18 +++++++++-- packages/util/setup.cfg | 2 +- packages/web_apps/HISTORY.rst | 38 ++++++++++++++++++++-- packages/web_apps/setup.cfg | 2 +- packages/web_framework/HISTORY.rst | 13 ++++++-- packages/web_framework/setup.cfg | 2 +- packages/web_stack/HISTORY.rst | 8 ++--- packages/web_stack/setup.cfg | 2 +- 43 files changed, 298 insertions(+), 94 deletions(-) diff --git a/lib/galaxy/version.py b/lib/galaxy/version.py index 127856da436f..8447b0537e06 100644 --- a/lib/galaxy/version.py +++ b/lib/galaxy/version.py @@ -1,3 +1,3 @@ VERSION_MAJOR = "24.1" -VERSION_MINOR = "2.dev0" +VERSION_MINOR = "2" VERSION = VERSION_MAJOR + (f".{VERSION_MINOR}" if VERSION_MINOR else "") diff --git a/packages/app/HISTORY.rst b/packages/app/HISTORY.rst index a01c8a3a25c1..6b4fe789a1f3 100644 --- a/packages/app/HISTORY.rst +++ b/packages/app/HISTORY.rst @@ -3,11 +3,56 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ +------------------- +24.1.2 (2024-09-25) +------------------- + +========= +Bug fixes +========= + +* Return generic message for password reset email by `@ahmedhamidawan `_ in `#18479 `_ +* Increase API robustness to invalid requests, improve compressed data serving by `@mvdbeek `_ in `#18494 `_ +* Prevent job submission if input collection element is deleted by `@mvdbeek `_ in `#18517 `_ +* Strip unicode null from tool stdio by `@mvdbeek `_ in `#18527 `_ +* Fix map over calculation for runtime inputs by `@mvdbeek `_ in `#18535 `_ +* Fix for not-null in 'column_list' object by `@hujambo-dunia `_ in `#18553 `_ +* Also fail ``ensure_dataset_on_disk`` if dataset is in new state by `@mvdbeek `_ in `#18559 `_ +* Fix sqlalchemy statement in tooltagmanager reset output by `@dannon `_ in `#18591 `_ +* Set minimum weasyprint version by `@mvdbeek `_ in `#18606 `_ +* Improve relabel identifiers message when number of columns is not 2 by `@mvdbeek `_ in `#18634 `_ +* Fix extract workflow from history when implicit collection has no jobs by `@mvdbeek `_ in `#18661 `_ +* Make sure we set file size also for purged outputs by `@mvdbeek `_ in `#18681 `_ +* File source and object store instance api fixes by `@mvdbeek `_ in `#18685 `_ +* Fix change datatype PJA on expression tool data outputs by `@mvdbeek `_ in `#18691 `_ +* Fill in missing help for cross product tools. by `@jmchilton `_ in `#18698 `_ +* Fix subworkflow scheduling for delayed subworkflow steps connected to data inputs by `@mvdbeek `_ in `#18731 `_ +* Catch and display exceptions when importing malformatted yaml workflows by `@mvdbeek `_ in `#18734 `_ +* Fix infinitely delayed workflow scheduling if skipped step creates HDCA by `@mvdbeek `_ in `#18751 `_ +* Fix directory get or create logic by `@mvdbeek `_ in `#18752 `_ +* Fix job summary for optional unset job data inputs by `@mvdbeek `_ in `#18754 `_ +* Allow to change only the description of a quota by `@bernt-matthias `_ in `#18775 `_ +* Fix wrong extension on pick data output by `@mvdbeek `_ in `#18798 `_ +* Fix unspecified ``oidc_endpoint`` variable overwriting specified ``redirect_url`` by `@bgruening `_ in `#18818 `_ +* Fix wrong celery_app config on job and workflow handlers by `@mvdbeek `_ in `#18819 `_ +* Fix ``named cursor is not valid anymore`` by `@mvdbeek `_ in `#18825 `_ +* Tighten TRS url check by `@mvdbeek `_ in `#18841 `_ +* Fix Workflow index bookmark filter by `@itisAliRH `_ in `#18842 `_ +* Skip metric collection if job working directory doesn't exist by `@mvdbeek `_ in `#18845 `_ +* Extend on disk checks to running, queued and error states by `@mvdbeek `_ in `#18846 `_ +* Raise MessageException instead of assertions on rerun problems by `@mvdbeek `_ in `#18858 `_ +* Fix data_column ref to nested collection by `@mvdbeek `_ in `#18875 `_ +* Fix loading very old workflows with data inputs by `@mvdbeek `_ in `#18876 `_ + +============ +Enhancements +============ +* Include workflow invocation id in exception logs by `@mvdbeek `_ in `#18594 `_ +* Implemented the generic OIDC backend from python-social-auth into Gal… by `@Edmontosaurus `_ in `#18670 `_ +* Collect job metrics also when job failed by `@mvdbeek `_ in `#18809 `_ +* prevent "missing refresh_token" errors by supporting also with Keycloak backend by `@ljocha `_ in `#18826 `_ ------------------- 24.1.1 (2024-07-02) diff --git a/packages/app/setup.cfg b/packages/app/setup.cfg index cb942f671857..8d067b1afe6d 100644 --- a/packages/app/setup.cfg +++ b/packages/app/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-app url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/auth/HISTORY.rst b/packages/auth/HISTORY.rst index c0cbc0c0d8c3..72450a70903e 100644 --- a/packages/auth/HISTORY.rst +++ b/packages/auth/HISTORY.rst @@ -3,11 +3,11 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ - +------------------- +24.1.2 (2024-09-25) +------------------- +No recorded changes since last release ------------------- 24.1.1 (2024-07-02) diff --git a/packages/auth/setup.cfg b/packages/auth/setup.cfg index 44bac91c764a..25b71dc16efa 100644 --- a/packages/auth/setup.cfg +++ b/packages/auth/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-auth url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/config/HISTORY.rst b/packages/config/HISTORY.rst index f9cf20c3f0f6..ecf0f191d0e4 100644 --- a/packages/config/HISTORY.rst +++ b/packages/config/HISTORY.rst @@ -3,11 +3,31 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ +------------------- +24.1.2 (2024-09-25) +------------------- + + +========= +Bug fixes +========= + +* Strip whitespace when listifying admin users by `@jdavcs `_ in `#18656 `_ +============ +Enhancements +============ + +* Make `default_panel_view` a `_by_host` option by `@natefoo `_ in `#18471 `_ +* More datatype deprecation warnings by `@mvdbeek `_ in `#18612 `_ +* Implemented the generic OIDC backend from python-social-auth into Gal… by `@Edmontosaurus `_ in `#18670 `_ + +============= +Other changes +============= +* Backport pod5 datatype by `@TomHarrop `_ in `#18507 `_ +* Backport PR 18630 "Add BlobToolkit to the list of interactive tools" to release_24.1 by `@cat-bro `_ in `#18784 `_ ------------------- 24.1.1 (2024-07-02) diff --git a/packages/config/setup.cfg b/packages/config/setup.cfg index 8b66d4c8be59..e4bfd764fdef 100644 --- a/packages/config/setup.cfg +++ b/packages/config/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-config url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/data/HISTORY.rst b/packages/data/HISTORY.rst index 5092affb1e05..5a6e54d4e89d 100644 --- a/packages/data/HISTORY.rst +++ b/packages/data/HISTORY.rst @@ -3,11 +3,37 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ +------------------- +24.1.2 (2024-09-25) +------------------- + +========= +Bug fixes +========= + +* Increase API robustness to invalid requests, improve compressed data serving by `@mvdbeek `_ in `#18494 `_ +* Prevent job submission if input collection element is deleted by `@mvdbeek `_ in `#18517 `_ +* Fix shared caches with extended metadata collection. by `@jmchilton `_ in `#18520 `_ +* Also check dataset.deleted when determining if data can be displayed by `@mvdbeek `_ in `#18547 `_ +* Fix for not-null in 'column_list' object by `@hujambo-dunia `_ in `#18553 `_ +* Fix h5ad metadata by `@nilchia `_ in `#18635 `_ +* Don't set file size to zero by `@mvdbeek `_ in `#18653 `_ +* Make sure we set file size also for purged outputs by `@mvdbeek `_ in `#18681 `_ +* Fix wrong extension on pick data output by `@mvdbeek `_ in `#18798 `_ +* Fix copying workflow with subworkflow step for step that you own by `@mvdbeek `_ in `#18802 `_ +* Make pylibmagic import optional by `@mvdbeek `_ in `#18813 `_ +* Ignore converted datasets in invalid input states by `@mvdbeek `_ in `#18850 `_ +* Fix discovered outputs with directory metadata and distributed object by `@mvdbeek `_ in `#18855 `_ +* Raise MessageException instead of assertions on rerun problems by `@mvdbeek `_ in `#18858 `_ +* Fix wrong final state when init_from is used by `@mvdbeek `_ in `#18871 `_ +* Fix history import when parent_hda not serialized by `@mvdbeek `_ in `#18873 `_ + +============= +Other changes +============= +* Backport pod5 datatype by `@TomHarrop `_ in `#18507 `_ ------------------- 24.1.1 (2024-07-02) diff --git a/packages/data/setup.cfg b/packages/data/setup.cfg index 4d8cee887fd9..450a66916897 100644 --- a/packages/data/setup.cfg +++ b/packages/data/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-data url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/files/HISTORY.rst b/packages/files/HISTORY.rst index 0827590918d1..a1c537b77345 100644 --- a/packages/files/HISTORY.rst +++ b/packages/files/HISTORY.rst @@ -3,11 +3,11 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ - +------------------- +24.1.2 (2024-09-25) +------------------- +No recorded changes since last release ------------------- 24.1.1 (2024-07-02) diff --git a/packages/files/setup.cfg b/packages/files/setup.cfg index 198052e2cc25..eabac412a0a8 100644 --- a/packages/files/setup.cfg +++ b/packages/files/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-files url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/job_execution/HISTORY.rst b/packages/job_execution/HISTORY.rst index c43f9b0c30dc..409df54e7732 100644 --- a/packages/job_execution/HISTORY.rst +++ b/packages/job_execution/HISTORY.rst @@ -3,11 +3,16 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ +------------------- +24.1.2 (2024-09-25) +------------------- + +========= +Bug fixes +========= +* Retry container monitor POST if it fails (don't assume it succeeded) by `@natefoo `_ in `#18863 `_ ------------------- 24.1.1 (2024-07-02) diff --git a/packages/job_execution/setup.cfg b/packages/job_execution/setup.cfg index d5f7f2bf4379..295f6528b505 100644 --- a/packages/job_execution/setup.cfg +++ b/packages/job_execution/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-job-execution url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/job_metrics/HISTORY.rst b/packages/job_metrics/HISTORY.rst index 76b0afce976b..65672b77b3a0 100644 --- a/packages/job_metrics/HISTORY.rst +++ b/packages/job_metrics/HISTORY.rst @@ -3,11 +3,11 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ - +------------------- +24.1.2 (2024-09-25) +------------------- +No recorded changes since last release ------------------- 24.1.1 (2024-07-02) diff --git a/packages/job_metrics/setup.cfg b/packages/job_metrics/setup.cfg index bb20a15f6637..a46a3685f144 100644 --- a/packages/job_metrics/setup.cfg +++ b/packages/job_metrics/setup.cfg @@ -28,7 +28,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-job-metrics url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/navigation/HISTORY.rst b/packages/navigation/HISTORY.rst index e1fcd22d984f..bf47150055bb 100644 --- a/packages/navigation/HISTORY.rst +++ b/packages/navigation/HISTORY.rst @@ -3,11 +3,11 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ - +------------------- +24.1.2 (2024-09-25) +------------------- +No recorded changes since last release ------------------- 24.1.1 (2024-07-02) diff --git a/packages/navigation/setup.cfg b/packages/navigation/setup.cfg index 6dbcdaafcb19..5875c9eaafe8 100644 --- a/packages/navigation/setup.cfg +++ b/packages/navigation/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-navigation url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/objectstore/HISTORY.rst b/packages/objectstore/HISTORY.rst index ffcff6f795dc..3eef2e167999 100644 --- a/packages/objectstore/HISTORY.rst +++ b/packages/objectstore/HISTORY.rst @@ -3,11 +3,16 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ +------------------- +24.1.2 (2024-09-25) +------------------- + +========= +Bug fixes +========= +* Fix shared caches with extended metadata collection. by `@jmchilton `_ in `#18520 `_ ------------------- 24.1.1 (2024-07-02) diff --git a/packages/objectstore/setup.cfg b/packages/objectstore/setup.cfg index 38a0e8cf40b8..d700c8e59f2b 100644 --- a/packages/objectstore/setup.cfg +++ b/packages/objectstore/setup.cfg @@ -28,7 +28,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-objectstore url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/schema/HISTORY.rst b/packages/schema/HISTORY.rst index 0fafdcb684cd..5f30f1ec9783 100644 --- a/packages/schema/HISTORY.rst +++ b/packages/schema/HISTORY.rst @@ -3,11 +3,27 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ +------------------- +24.1.2 (2024-09-25) +------------------- + +========= +Bug fixes +========= + +* Handle error when workflow is unowned in Invocation view by `@ahmedhamidawan `_ in `#18730 `_ +* Fix datatype validation of newly built collection by `@mvdbeek `_ in `#18738 `_ +* Fix job summary for optional unset job data inputs by `@mvdbeek `_ in `#18754 `_ +* Fix ``TypeError`` from Pydantic 2.9.0 by `@nsoranzo `_ in `#18788 `_ +* Fix wrong extension on pick data output by `@mvdbeek `_ in `#18798 `_ +* Make all fields optional for HelpForumPost by `@davelopez `_ in `#18839 `_ + +============ +Enhancements +============ +* Include workflow invocation id in exception logs by `@mvdbeek `_ in `#18594 `_ ------------------- 24.1.1 (2024-07-02) diff --git a/packages/schema/setup.cfg b/packages/schema/setup.cfg index 21987bc30fb4..c6bbf4ec1778 100644 --- a/packages/schema/setup.cfg +++ b/packages/schema/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-schema url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/selenium/HISTORY.rst b/packages/selenium/HISTORY.rst index 1b5017e44597..16a90b5b1249 100644 --- a/packages/selenium/HISTORY.rst +++ b/packages/selenium/HISTORY.rst @@ -3,11 +3,11 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ - +------------------- +24.1.2 (2024-09-25) +------------------- +No recorded changes since last release ------------------- 24.1.1 (2024-07-02) diff --git a/packages/selenium/setup.cfg b/packages/selenium/setup.cfg index 799669cc8903..d90a94698378 100644 --- a/packages/selenium/setup.cfg +++ b/packages/selenium/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-selenium url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/test_api/HISTORY.rst b/packages/test_api/HISTORY.rst index 5e9ebad174f1..b45d74ca3ebb 100644 --- a/packages/test_api/HISTORY.rst +++ b/packages/test_api/HISTORY.rst @@ -3,11 +3,28 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ +------------------- +24.1.2 (2024-09-25) +------------------- + +========= +Bug fixes +========= +* Prevent job submission if input collection element is deleted by `@mvdbeek `_ in `#18517 `_ +* Fix view parameter type in Job index API by `@davelopez `_ in `#18521 `_ +* Fix map over calculation for runtime inputs by `@mvdbeek `_ in `#18535 `_ +* Fix Archive header encoding by `@arash77 `_ in `#18583 `_ +* Don't set file size to zero by `@mvdbeek `_ in `#18653 `_ +* Fix extract workflow from history when implicit collection has no jobs by `@mvdbeek `_ in `#18661 `_ +* Fix change datatype PJA on expression tool data outputs by `@mvdbeek `_ in `#18691 `_ +* Fix subworkflow scheduling for delayed subworkflow steps connected to data inputs by `@mvdbeek `_ in `#18731 `_ +* Catch and display exceptions when importing malformatted yaml workflows by `@mvdbeek `_ in `#18734 `_ +* Fix infinitely delayed workflow scheduling if skipped step creates HDCA by `@mvdbeek `_ in `#18751 `_ +* Fix copying workflow with subworkflow step for step that you own by `@mvdbeek `_ in `#18802 `_ +* Raise MessageException instead of assertions on rerun problems by `@mvdbeek `_ in `#18858 `_ +* Fix data_column ref to nested collection by `@mvdbeek `_ in `#18875 `_ ------------------- 24.1.1 (2024-07-02) diff --git a/packages/test_api/setup.cfg b/packages/test_api/setup.cfg index 8fc3a5dbc334..680f9b59e788 100644 --- a/packages/test_api/setup.cfg +++ b/packages/test_api/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-test-api url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/test_base/HISTORY.rst b/packages/test_base/HISTORY.rst index 7867cc9a8686..65168be1c061 100644 --- a/packages/test_base/HISTORY.rst +++ b/packages/test_base/HISTORY.rst @@ -3,11 +3,18 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ +------------------- +24.1.2 (2024-09-25) +------------------- + +========= +Bug fixes +========= +* Fix infinitely delayed workflow scheduling if skipped step creates HDCA by `@mvdbeek `_ in `#18751 `_ +* Fix wrong extension on pick data output by `@mvdbeek `_ in `#18798 `_ +* Fix data_column ref to nested collection by `@mvdbeek `_ in `#18875 `_ ------------------- 24.1.1 (2024-07-02) diff --git a/packages/test_base/setup.cfg b/packages/test_base/setup.cfg index 497247d478d5..26939a4cb6ba 100644 --- a/packages/test_base/setup.cfg +++ b/packages/test_base/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-test-base url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/test_driver/HISTORY.rst b/packages/test_driver/HISTORY.rst index 77740d14fb96..1a7fb845bbc4 100644 --- a/packages/test_driver/HISTORY.rst +++ b/packages/test_driver/HISTORY.rst @@ -3,11 +3,11 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ - +------------------- +24.1.2 (2024-09-25) +------------------- +No recorded changes since last release ------------------- 24.1.1 (2024-07-02) diff --git a/packages/test_driver/setup.cfg b/packages/test_driver/setup.cfg index c1de1fed8578..02c0ae6c17f6 100644 --- a/packages/test_driver/setup.cfg +++ b/packages/test_driver/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-test-driver url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/tool_shed/HISTORY.rst b/packages/tool_shed/HISTORY.rst index efd9ed446874..404fb4d6db11 100644 --- a/packages/tool_shed/HISTORY.rst +++ b/packages/tool_shed/HISTORY.rst @@ -3,11 +3,11 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ - +------------------- +24.1.2 (2024-09-25) +------------------- +No recorded changes since last release ------------------- 24.1.1 (2024-07-02) diff --git a/packages/tool_shed/setup.cfg b/packages/tool_shed/setup.cfg index bb38254a06b8..a85c3bce1b7c 100644 --- a/packages/tool_shed/setup.cfg +++ b/packages/tool_shed/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-tool-shed url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/tool_util/HISTORY.rst b/packages/tool_util/HISTORY.rst index 16fa70ea4b66..0e630494bed2 100644 --- a/packages/tool_util/HISTORY.rst +++ b/packages/tool_util/HISTORY.rst @@ -3,11 +3,23 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ +------------------- +24.1.2 (2024-09-25) +------------------- + +========= +Bug fixes +========= + +* Handle all requests error in ``ApiBiotoolsMetadataSource._raw_get_metadata`` by `@nsoranzo `_ in `#18510 `_ +* xsd: allow `change_format` and `actions` also in statically defined collection elements, and break recursion by `@bernt-matthias `_ in `#18605 `_ + +============ +Enhancements +============ +* Make `default_panel_view` a `_by_host` option by `@natefoo `_ in `#18471 `_ ------------------- 24.1.1 (2024-07-02) diff --git a/packages/tool_util/setup.cfg b/packages/tool_util/setup.cfg index 194481687d64..3ab01b2ce843 100644 --- a/packages/tool_util/setup.cfg +++ b/packages/tool_util/setup.cfg @@ -28,7 +28,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-tool-util url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/tours/HISTORY.rst b/packages/tours/HISTORY.rst index ebc29d06751a..ff76ec68d04b 100644 --- a/packages/tours/HISTORY.rst +++ b/packages/tours/HISTORY.rst @@ -3,11 +3,11 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ - +------------------- +24.1.2 (2024-09-25) +------------------- +No recorded changes since last release ------------------- 24.1.1 (2024-07-02) diff --git a/packages/tours/setup.cfg b/packages/tours/setup.cfg index c236c2202b8b..981707752a62 100644 --- a/packages/tours/setup.cfg +++ b/packages/tours/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-tours url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/util/HISTORY.rst b/packages/util/HISTORY.rst index fed3f29e564a..472ce37790b7 100644 --- a/packages/util/HISTORY.rst +++ b/packages/util/HISTORY.rst @@ -3,11 +3,23 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ +------------------- +24.1.2 (2024-09-25) +------------------- + +========= +Bug fixes +========= + +* Fix Archive header encoding by `@arash77 `_ in `#18583 `_ +* File source and object store instance api fixes by `@mvdbeek `_ in `#18685 `_ + +============ +Enhancements +============ +* Use smtplib send_message to support utf-8 chars in to and from by `@mvdbeek `_ in `#18805 `_ ------------------- 24.1.1 (2024-07-02) diff --git a/packages/util/setup.cfg b/packages/util/setup.cfg index 08067a739317..f8ed152bcaeb 100644 --- a/packages/util/setup.cfg +++ b/packages/util/setup.cfg @@ -28,7 +28,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-util url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/web_apps/HISTORY.rst b/packages/web_apps/HISTORY.rst index a8daf79a97af..de7face2bee5 100644 --- a/packages/web_apps/HISTORY.rst +++ b/packages/web_apps/HISTORY.rst @@ -3,11 +3,43 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ +------------------- +24.1.2 (2024-09-25) +------------------- + + +========= +Bug fixes +========= + +* Return generic message for password reset email by `@ahmedhamidawan `_ in `#18479 `_ +* Fix view parameter type in Job index API by `@davelopez `_ in `#18521 `_ +* Check if dataset has any data before running provider checks by `@mvdbeek `_ in `#18526 `_ +* Raise appropriate exception if ldda not found by `@mvdbeek `_ in `#18569 `_ +* Close install model session when request ends by `@mvdbeek `_ in `#18629 `_ +* Fix resume_paused_jobs if no session provided by `@mvdbeek `_ in `#18640 `_ +* Fix extract workflow from history when implicit collection has no jobs by `@mvdbeek `_ in `#18661 `_ +* Return error when following a link to a non-ready display application by `@mvdbeek `_ in `#18672 `_ +* Only load authnz routes when oidc enabled by `@mvdbeek `_ in `#18683 `_ +* File source and object store instance api fixes by `@mvdbeek `_ in `#18685 `_ +* Fix sorting users in admin by last login by `@jdavcs `_ in `#18694 `_ +* Fix resume paused jobs response handling by `@dannon `_ in `#18733 `_ +* Fix wrong extension on pick data output by `@mvdbeek `_ in `#18798 `_ +* Tighten TRS url check by `@mvdbeek `_ in `#18841 `_ +* Fix Workflow index bookmark filter by `@itisAliRH `_ in `#18842 `_ +* Extend on disk checks to running, queued and error states by `@mvdbeek `_ in `#18846 `_ +============ +Enhancements +============ + +* Make `default_panel_view` a `_by_host` option by `@natefoo `_ in `#18471 `_ + +============= +Other changes +============= +* Fix check dataset check by `@mvdbeek `_ in `#18856 `_ ------------------- 24.1.1 (2024-07-02) diff --git a/packages/web_apps/setup.cfg b/packages/web_apps/setup.cfg index dfbb85e86dc0..ceb404777818 100644 --- a/packages/web_apps/setup.cfg +++ b/packages/web_apps/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-web-apps url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/web_framework/HISTORY.rst b/packages/web_framework/HISTORY.rst index f09158a5f388..cc4a2f4b5d34 100644 --- a/packages/web_framework/HISTORY.rst +++ b/packages/web_framework/HISTORY.rst @@ -3,11 +3,18 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ +------------------- +24.1.2 (2024-09-25) +------------------- + +========= +Bug fixes +========= +* Increase API robustness to invalid requests, improve compressed data serving by `@mvdbeek `_ in `#18494 `_ +* Apply statsd arg sanitization to all pages by `@mvdbeek `_ in `#18509 `_ +* Close install model session when request ends by `@mvdbeek `_ in `#18629 `_ ------------------- 24.1.1 (2024-07-02) diff --git a/packages/web_framework/setup.cfg b/packages/web_framework/setup.cfg index 083f8622406d..0291ca017c09 100644 --- a/packages/web_framework/setup.cfg +++ b/packages/web_framework/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-web-framework url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True diff --git a/packages/web_stack/HISTORY.rst b/packages/web_stack/HISTORY.rst index 91460bc9610d..be5f02f07a0a 100644 --- a/packages/web_stack/HISTORY.rst +++ b/packages/web_stack/HISTORY.rst @@ -3,11 +3,11 @@ History .. to_doc ------------ -24.1.2.dev0 ------------ - +------------------- +24.1.2 (2024-09-25) +------------------- +No recorded changes since last release ------------------- 24.1.1 (2024-07-02) diff --git a/packages/web_stack/setup.cfg b/packages/web_stack/setup.cfg index 2974323c3b3f..b709dc1f6e80 100644 --- a/packages/web_stack/setup.cfg +++ b/packages/web_stack/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-web-stack url = https://github.com/galaxyproject/galaxy -version = 24.1.2.dev0 +version = 24.1.2 [options] include_package_data = True From c80ce1d431ebd296439a06452f4057ca72305d88 Mon Sep 17 00:00:00 2001 From: Matthias Bernt Date: Wed, 25 Sep 2024 14:29:29 +0200 Subject: [PATCH 63/64] Start work on 24.1.3.dev0 --- lib/galaxy/version.py | 2 +- packages/app/HISTORY.rst | 6 ++++++ packages/app/setup.cfg | 2 +- packages/auth/HISTORY.rst | 6 ++++++ packages/auth/setup.cfg | 2 +- packages/config/HISTORY.rst | 6 ++++++ packages/config/setup.cfg | 2 +- packages/data/HISTORY.rst | 6 ++++++ packages/data/setup.cfg | 2 +- packages/files/HISTORY.rst | 6 ++++++ packages/files/setup.cfg | 2 +- packages/job_execution/HISTORY.rst | 6 ++++++ packages/job_execution/setup.cfg | 2 +- packages/job_metrics/HISTORY.rst | 6 ++++++ packages/job_metrics/setup.cfg | 2 +- packages/navigation/HISTORY.rst | 6 ++++++ packages/navigation/setup.cfg | 2 +- packages/objectstore/HISTORY.rst | 6 ++++++ packages/objectstore/setup.cfg | 2 +- packages/schema/HISTORY.rst | 6 ++++++ packages/schema/setup.cfg | 2 +- packages/selenium/HISTORY.rst | 6 ++++++ packages/selenium/setup.cfg | 2 +- packages/test_api/HISTORY.rst | 6 ++++++ packages/test_api/setup.cfg | 2 +- packages/test_base/HISTORY.rst | 6 ++++++ packages/test_base/setup.cfg | 2 +- packages/test_driver/HISTORY.rst | 6 ++++++ packages/test_driver/setup.cfg | 2 +- packages/tool_shed/HISTORY.rst | 6 ++++++ packages/tool_shed/setup.cfg | 2 +- packages/tool_util/HISTORY.rst | 6 ++++++ packages/tool_util/setup.cfg | 2 +- packages/tours/HISTORY.rst | 6 ++++++ packages/tours/setup.cfg | 2 +- packages/util/HISTORY.rst | 6 ++++++ packages/util/setup.cfg | 2 +- packages/web_apps/HISTORY.rst | 6 ++++++ packages/web_apps/setup.cfg | 2 +- packages/web_framework/HISTORY.rst | 6 ++++++ packages/web_framework/setup.cfg | 2 +- packages/web_stack/HISTORY.rst | 6 ++++++ packages/web_stack/setup.cfg | 2 +- 43 files changed, 148 insertions(+), 22 deletions(-) diff --git a/lib/galaxy/version.py b/lib/galaxy/version.py index 8447b0537e06..d7d7f3f7f491 100644 --- a/lib/galaxy/version.py +++ b/lib/galaxy/version.py @@ -1,3 +1,3 @@ VERSION_MAJOR = "24.1" -VERSION_MINOR = "2" +VERSION_MINOR = "3.dev0" VERSION = VERSION_MAJOR + (f".{VERSION_MINOR}" if VERSION_MINOR else "") diff --git a/packages/app/HISTORY.rst b/packages/app/HISTORY.rst index 6b4fe789a1f3..27a82fd91ccd 100644 --- a/packages/app/HISTORY.rst +++ b/packages/app/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/app/setup.cfg b/packages/app/setup.cfg index 8d067b1afe6d..80e4d38c3d85 100644 --- a/packages/app/setup.cfg +++ b/packages/app/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-app url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/auth/HISTORY.rst b/packages/auth/HISTORY.rst index 72450a70903e..b46693de1bb3 100644 --- a/packages/auth/HISTORY.rst +++ b/packages/auth/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/auth/setup.cfg b/packages/auth/setup.cfg index 25b71dc16efa..ab82a95b3be2 100644 --- a/packages/auth/setup.cfg +++ b/packages/auth/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-auth url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/config/HISTORY.rst b/packages/config/HISTORY.rst index ecf0f191d0e4..e36fc636cb1f 100644 --- a/packages/config/HISTORY.rst +++ b/packages/config/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/config/setup.cfg b/packages/config/setup.cfg index e4bfd764fdef..68a09c9de456 100644 --- a/packages/config/setup.cfg +++ b/packages/config/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-config url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/data/HISTORY.rst b/packages/data/HISTORY.rst index 5a6e54d4e89d..3b7ca75f5722 100644 --- a/packages/data/HISTORY.rst +++ b/packages/data/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/data/setup.cfg b/packages/data/setup.cfg index 450a66916897..809981ea6394 100644 --- a/packages/data/setup.cfg +++ b/packages/data/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-data url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/files/HISTORY.rst b/packages/files/HISTORY.rst index a1c537b77345..8777207afa0f 100644 --- a/packages/files/HISTORY.rst +++ b/packages/files/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/files/setup.cfg b/packages/files/setup.cfg index eabac412a0a8..85b2493e0f5b 100644 --- a/packages/files/setup.cfg +++ b/packages/files/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-files url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/job_execution/HISTORY.rst b/packages/job_execution/HISTORY.rst index 409df54e7732..3f2f246b5331 100644 --- a/packages/job_execution/HISTORY.rst +++ b/packages/job_execution/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/job_execution/setup.cfg b/packages/job_execution/setup.cfg index 295f6528b505..1dfa56e605e2 100644 --- a/packages/job_execution/setup.cfg +++ b/packages/job_execution/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-job-execution url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/job_metrics/HISTORY.rst b/packages/job_metrics/HISTORY.rst index 65672b77b3a0..025d1cb59bcf 100644 --- a/packages/job_metrics/HISTORY.rst +++ b/packages/job_metrics/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/job_metrics/setup.cfg b/packages/job_metrics/setup.cfg index a46a3685f144..83ce8bebcf0e 100644 --- a/packages/job_metrics/setup.cfg +++ b/packages/job_metrics/setup.cfg @@ -28,7 +28,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-job-metrics url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/navigation/HISTORY.rst b/packages/navigation/HISTORY.rst index bf47150055bb..7c2eb4dde9ff 100644 --- a/packages/navigation/HISTORY.rst +++ b/packages/navigation/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/navigation/setup.cfg b/packages/navigation/setup.cfg index 5875c9eaafe8..63851150c254 100644 --- a/packages/navigation/setup.cfg +++ b/packages/navigation/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-navigation url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/objectstore/HISTORY.rst b/packages/objectstore/HISTORY.rst index 3eef2e167999..bbf9af4264fa 100644 --- a/packages/objectstore/HISTORY.rst +++ b/packages/objectstore/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/objectstore/setup.cfg b/packages/objectstore/setup.cfg index d700c8e59f2b..986fd23622e0 100644 --- a/packages/objectstore/setup.cfg +++ b/packages/objectstore/setup.cfg @@ -28,7 +28,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-objectstore url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/schema/HISTORY.rst b/packages/schema/HISTORY.rst index 5f30f1ec9783..eab78124ea70 100644 --- a/packages/schema/HISTORY.rst +++ b/packages/schema/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/schema/setup.cfg b/packages/schema/setup.cfg index c6bbf4ec1778..d8d9801ffcd5 100644 --- a/packages/schema/setup.cfg +++ b/packages/schema/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-schema url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/selenium/HISTORY.rst b/packages/selenium/HISTORY.rst index 16a90b5b1249..514f1cd8b30d 100644 --- a/packages/selenium/HISTORY.rst +++ b/packages/selenium/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/selenium/setup.cfg b/packages/selenium/setup.cfg index d90a94698378..6e6e3028e5a0 100644 --- a/packages/selenium/setup.cfg +++ b/packages/selenium/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-selenium url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/test_api/HISTORY.rst b/packages/test_api/HISTORY.rst index b45d74ca3ebb..9c111a54405c 100644 --- a/packages/test_api/HISTORY.rst +++ b/packages/test_api/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/test_api/setup.cfg b/packages/test_api/setup.cfg index 680f9b59e788..e12a23480d30 100644 --- a/packages/test_api/setup.cfg +++ b/packages/test_api/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-test-api url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/test_base/HISTORY.rst b/packages/test_base/HISTORY.rst index 65168be1c061..8ff6be4ce45b 100644 --- a/packages/test_base/HISTORY.rst +++ b/packages/test_base/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/test_base/setup.cfg b/packages/test_base/setup.cfg index 26939a4cb6ba..10bf9226dfce 100644 --- a/packages/test_base/setup.cfg +++ b/packages/test_base/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-test-base url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/test_driver/HISTORY.rst b/packages/test_driver/HISTORY.rst index 1a7fb845bbc4..3c7b1543b69a 100644 --- a/packages/test_driver/HISTORY.rst +++ b/packages/test_driver/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/test_driver/setup.cfg b/packages/test_driver/setup.cfg index 02c0ae6c17f6..7b17e7629764 100644 --- a/packages/test_driver/setup.cfg +++ b/packages/test_driver/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-test-driver url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/tool_shed/HISTORY.rst b/packages/tool_shed/HISTORY.rst index 404fb4d6db11..4bbe31d51aa5 100644 --- a/packages/tool_shed/HISTORY.rst +++ b/packages/tool_shed/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/tool_shed/setup.cfg b/packages/tool_shed/setup.cfg index a85c3bce1b7c..7a0edb5c9b86 100644 --- a/packages/tool_shed/setup.cfg +++ b/packages/tool_shed/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-tool-shed url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/tool_util/HISTORY.rst b/packages/tool_util/HISTORY.rst index 0e630494bed2..d82cb227c96a 100644 --- a/packages/tool_util/HISTORY.rst +++ b/packages/tool_util/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/tool_util/setup.cfg b/packages/tool_util/setup.cfg index 3ab01b2ce843..321d9bbfb8e5 100644 --- a/packages/tool_util/setup.cfg +++ b/packages/tool_util/setup.cfg @@ -28,7 +28,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-tool-util url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/tours/HISTORY.rst b/packages/tours/HISTORY.rst index ff76ec68d04b..7a53137118c6 100644 --- a/packages/tours/HISTORY.rst +++ b/packages/tours/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/tours/setup.cfg b/packages/tours/setup.cfg index 981707752a62..a3dc6338c312 100644 --- a/packages/tours/setup.cfg +++ b/packages/tours/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-tours url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/util/HISTORY.rst b/packages/util/HISTORY.rst index 472ce37790b7..befaeb6789f5 100644 --- a/packages/util/HISTORY.rst +++ b/packages/util/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/util/setup.cfg b/packages/util/setup.cfg index f8ed152bcaeb..fdef15f66594 100644 --- a/packages/util/setup.cfg +++ b/packages/util/setup.cfg @@ -28,7 +28,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-util url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/web_apps/HISTORY.rst b/packages/web_apps/HISTORY.rst index de7face2bee5..4444fe0f88e6 100644 --- a/packages/web_apps/HISTORY.rst +++ b/packages/web_apps/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/web_apps/setup.cfg b/packages/web_apps/setup.cfg index ceb404777818..e1b5a1b3df03 100644 --- a/packages/web_apps/setup.cfg +++ b/packages/web_apps/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-web-apps url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/web_framework/HISTORY.rst b/packages/web_framework/HISTORY.rst index cc4a2f4b5d34..a0bd6b14bb14 100644 --- a/packages/web_framework/HISTORY.rst +++ b/packages/web_framework/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/web_framework/setup.cfg b/packages/web_framework/setup.cfg index 0291ca017c09..818b3bef6076 100644 --- a/packages/web_framework/setup.cfg +++ b/packages/web_framework/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-web-framework url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True diff --git a/packages/web_stack/HISTORY.rst b/packages/web_stack/HISTORY.rst index be5f02f07a0a..e7ebb3235a0a 100644 --- a/packages/web_stack/HISTORY.rst +++ b/packages/web_stack/HISTORY.rst @@ -3,6 +3,12 @@ History .. to_doc +----------- +24.1.3.dev0 +----------- + + + ------------------- 24.1.2 (2024-09-25) ------------------- diff --git a/packages/web_stack/setup.cfg b/packages/web_stack/setup.cfg index b709dc1f6e80..dd04dc39fe43 100644 --- a/packages/web_stack/setup.cfg +++ b/packages/web_stack/setup.cfg @@ -27,7 +27,7 @@ long_description = file: README.rst, HISTORY.rst long_description_content_type = text/x-rst name = galaxy-web-stack url = https://github.com/galaxyproject/galaxy -version = 24.1.2 +version = 24.1.3.dev0 [options] include_package_data = True From 45c5a384f1c4803dbb8239c7ef2eef40df4fc144 Mon Sep 17 00:00:00 2001 From: mvdbeek Date: Wed, 25 Sep 2024 16:50:00 +0200 Subject: [PATCH 64/64] Raise exception if CompressedFile used on incompatible file Surprised (or not actually ...) that mypy didn't catch this. --- lib/galaxy/util/compression_utils.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/lib/galaxy/util/compression_utils.py b/lib/galaxy/util/compression_utils.py index d54c0ffcbc24..576e62623f99 100644 --- a/lib/galaxy/util/compression_utils.py +++ b/lib/galaxy/util/compression_utils.py @@ -188,6 +188,8 @@ def __init__(self, file_path: StrPath, mode: str = "r") -> None: self.file_type = "tar" elif zipfile.is_zipfile(file_path) and not file_path_str.endswith(".jar"): self.file_type = "zip" + else: + raise Exception("File must be valid zip or tar file.") self.file_name = os.path.splitext(os.path.basename(file_path))[0] if self.file_name.endswith(".tar"): self.file_name = os.path.splitext(self.file_name)[0]