diff --git a/client/src/api/schema/schema.ts b/client/src/api/schema/schema.ts index 1ba29e901647..c1a287e67872 100644 --- a/client/src/api/schema/schema.ts +++ b/client/src/api/schema/schema.ts @@ -339,7 +339,7 @@ export interface paths { }; /** * Displays information about and/or content of a dataset. - * @description **Note**: Due to the multipurpose nature of this endpoint, which can receive a wild variety of parameters + * @description **Note**: Due to the multipurpose nature of this endpoint, which can receive a wide variety of parameters * and return different kinds of responses, the documentation here will be limited. * To get more information please check the source code. */ @@ -18853,6 +18853,10 @@ export interface operations { hda_ldda?: components["schemas"]["DatasetSourceType"]; /** @description The type of information about the dataset to be requested. Each of these values may require additional parameters in the request and may return different responses. */ data_type?: components["schemas"]["RequestDataType"] | null; + /** @description Maximum number of items to return. Currently only applies to `data_type=raw_data` requests */ + limit?: number | null; + /** @description Starts at the beginning skip the first ( offset - 1 ) items and begin returning at the Nth item. Currently only applies to `data_type=raw_data` requests */ + offset?: number | null; /** @description View to be passed to the serializer */ view?: string | null; /** @description Comma-separated list of keys to be passed to the serializer */ diff --git a/lib/galaxy/app.py b/lib/galaxy/app.py index 38553ee968ca..2f67d6ec3b1b 100644 --- a/lib/galaxy/app.py +++ b/lib/galaxy/app.py @@ -655,7 +655,7 @@ def __init__(self, configure_logging=True, use_converters=True, use_display_appl # Load security policy. self.security_agent = self.model.security_agent self.host_security_agent = galaxy.model.security.HostAgent( - model=self.security_agent.model, permitted_actions=self.security_agent.permitted_actions + self.security_agent.sa_session, permitted_actions=self.security_agent.permitted_actions ) # We need the datatype registry for running certain tasks that modify HDAs, and to build the registry we need diff --git a/lib/galaxy/datatypes/dataproviders/base.py b/lib/galaxy/datatypes/dataproviders/base.py index c1239f86b9ff..a271437f184e 100644 --- a/lib/galaxy/datatypes/dataproviders/base.py +++ b/lib/galaxy/datatypes/dataproviders/base.py @@ -36,6 +36,7 @@ def stop( self ): self.endpoint = source.tell(); raise StopIteration() Building a giant list by sweeping all possible dprov classes doesn't make sense For now - I'm burying them in the class __init__s - but I don't like that """ +MAX_LIMIT = 10000 # ----------------------------------------------------------------------------- base classes @@ -233,21 +234,20 @@ class LimitedOffsetDataProvider(FilteredDataProvider): settings = {"limit": "int", "offset": "int"} # TODO: may want to squash this into DataProvider - def __init__(self, source, offset=0, limit=None, **kwargs): + def __init__(self, source, offset=0, limit=MAX_LIMIT, **kwargs): """ :param offset: the number of data to skip before providing. :param limit: the final number of data to provide. """ super().__init__(source, **kwargs) - # how many valid data to skip before we start outputing data - must be positive - # (diff to support neg. indeces - must be pos.) - self.offset = max(offset, 0) + # how many valid data to skip before we start outputting data - must be positive + self.offset = offset - # how many valid data to return - must be positive (None indicates no limit) + # how many valid data to return - must be positive + if limit is None: + limit = MAX_LIMIT self.limit = limit - if self.limit is not None: - self.limit = max(self.limit, 0) def __iter__(self): """ diff --git a/lib/galaxy/managers/groups.py b/lib/galaxy/managers/groups.py index 8edb50218203..e0d6cd177731 100644 --- a/lib/galaxy/managers/groups.py +++ b/lib/galaxy/managers/groups.py @@ -13,8 +13,6 @@ from galaxy.managers.context import ProvidesAppContext from galaxy.model import Group from galaxy.model.base import transaction -from galaxy.model.db.role import get_roles_by_ids -from galaxy.model.db.user import get_users_by_ids from galaxy.model.scoped_session import galaxy_scoped_session from galaxy.schema.fields import Security from galaxy.schema.groups import ( @@ -54,13 +52,11 @@ def create(self, trans: ProvidesAppContext, payload: GroupCreatePayload): group = model.Group(name=name) sa_session.add(group) - user_ids = payload.user_ids - users = get_users_by_ids(sa_session, user_ids) - role_ids = payload.role_ids - roles = get_roles_by_ids(sa_session, role_ids) - trans.app.security_agent.set_entity_group_associations(groups=[group], roles=roles, users=users) - with transaction(sa_session): - sa_session.commit() + + trans.app.security_agent.set_group_user_and_role_associations( + group, user_ids=payload.user_ids, role_ids=payload.role_ids + ) + sa_session.commit() encoded_id = Security.security.encode_id(group.id) item = group.to_dict(view="element") @@ -88,23 +84,12 @@ def update(self, trans: ProvidesAppContext, group_id: int, payload: GroupUpdateP if name := payload.name: self._check_duplicated_group_name(sa_session, name) group.name = name - sa_session.add(group) - - users = None - if payload.user_ids is not None: - users = get_users_by_ids(sa_session, payload.user_ids) - - roles = None - if payload.role_ids is not None: - roles = get_roles_by_ids(sa_session, payload.role_ids) + sa_session.commit() - self._app.security_agent.set_entity_group_associations( - groups=[group], roles=roles, users=users, delete_existing_assocs=False + self._app.security_agent.set_group_user_and_role_associations( + group, user_ids=payload.user_ids, role_ids=payload.role_ids ) - with transaction(sa_session): - sa_session.commit() - encoded_id = Security.security.encode_id(group.id) item = group.to_dict(view="element") item["url"] = self._url_for(trans, "show_group", group_id=encoded_id) diff --git a/lib/galaxy/managers/workflows.py b/lib/galaxy/managers/workflows.py index da341fa446c4..de6a3a472f9d 100644 --- a/lib/galaxy/managers/workflows.py +++ b/lib/galaxy/managers/workflows.py @@ -1660,7 +1660,7 @@ def _workflow_to_dict_instance(self, trans, stored, workflow, legacy=True): inputs = {} for step in workflow.input_steps: step_type = step.type - step_label = step.label or step.tool_inputs.get("name") + step_label = step.label or step.tool_inputs and step.tool_inputs.get("name") if step_label: label = step_label elif step_type == "data_input": @@ -1954,7 +1954,7 @@ def __set_default_label(self, step, module, state): to the actual `label` attribute which is available for all module types, unique, and mapped to its own database column. """ if not module.label and module.type in ["data_input", "data_collection_input"]: - new_state = safe_loads(state) + new_state = safe_loads(state) or {} default_label = new_state.get("name") if default_label and util.unicodify(default_label).lower() not in [ "input dataset", diff --git a/lib/galaxy/model/__init__.py b/lib/galaxy/model/__init__.py index f45921606266..36aee6b45441 100644 --- a/lib/galaxy/model/__init__.py +++ b/lib/galaxy/model/__init__.py @@ -780,7 +780,7 @@ class User(Base, Dictifiable, RepresentById): id: Mapped[int] = mapped_column(primary_key=True) create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) - email: Mapped[str] = mapped_column(TrimmedString(255), index=True) + email: Mapped[str] = mapped_column(TrimmedString(255), index=True, unique=True) username: Mapped[Optional[str]] = mapped_column(TrimmedString(255), index=True, unique=True) password: Mapped[str] = mapped_column(TrimmedString(255)) last_password_change: Mapped[Optional[datetime]] = mapped_column(default=now) @@ -849,14 +849,6 @@ class User(Base, Dictifiable, RepresentById): all_notifications: Mapped[List["UserNotificationAssociation"]] = relationship( back_populates="user", cascade_backrefs=False ) - non_private_roles: Mapped[List["UserRoleAssociation"]] = relationship( - viewonly=True, - primaryjoin=( - lambda: (User.id == UserRoleAssociation.user_id) - & (UserRoleAssociation.role_id == Role.id) - & not_(Role.name == User.email) - ), - ) preferences: AssociationProxy[Any] @@ -2967,10 +2959,11 @@ def __init__(self, name=None): class UserGroupAssociation(Base, RepresentById): __tablename__ = "user_group_association" + __table_args__ = (UniqueConstraint("user_id", "group_id"),) id: Mapped[int] = mapped_column(primary_key=True) - user_id: Mapped[int] = mapped_column(ForeignKey("galaxy_user.id"), index=True, nullable=True) - group_id: Mapped[int] = mapped_column(ForeignKey("galaxy_group.id"), index=True, nullable=True) + user_id: Mapped[int] = mapped_column(ForeignKey("galaxy_user.id"), index=True) + group_id: Mapped[int] = mapped_column(ForeignKey("galaxy_group.id"), index=True) create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) user: Mapped["User"] = relationship(back_populates="groups") @@ -3685,10 +3678,11 @@ class HistoryUserShareAssociation(Base, UserShareAssociation): class UserRoleAssociation(Base, RepresentById): __tablename__ = "user_role_association" + __table_args__ = (UniqueConstraint("user_id", "role_id"),) id: Mapped[int] = mapped_column(primary_key=True) - user_id: Mapped[int] = mapped_column(ForeignKey("galaxy_user.id"), index=True, nullable=True) - role_id: Mapped[int] = mapped_column(ForeignKey("role.id"), index=True, nullable=True) + user_id: Mapped[int] = mapped_column(ForeignKey("galaxy_user.id"), index=True) + role_id: Mapped[int] = mapped_column(ForeignKey("role.id"), index=True) create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) @@ -3703,10 +3697,11 @@ def __init__(self, user, role): class GroupRoleAssociation(Base, RepresentById): __tablename__ = "group_role_association" + __table_args__ = (UniqueConstraint("group_id", "role_id"),) id: Mapped[int] = mapped_column(primary_key=True) - group_id: Mapped[int] = mapped_column(ForeignKey("galaxy_group.id"), index=True, nullable=True) - role_id: Mapped[int] = mapped_column(ForeignKey("role.id"), index=True, nullable=True) + group_id: Mapped[int] = mapped_column(ForeignKey("galaxy_group.id"), index=True) + role_id: Mapped[int] = mapped_column(ForeignKey("role.id"), index=True) create_time: Mapped[datetime] = mapped_column(default=now, nullable=True) update_time: Mapped[datetime] = mapped_column(default=now, onupdate=now, nullable=True) group: Mapped["Group"] = relationship(back_populates="roles") @@ -4549,7 +4544,9 @@ class DatasetInstance(RepresentById, UsesCreateAndUpdateTime, _HasTable): creating_job_associations: List[Union[JobToOutputDatasetCollectionAssociation, JobToOutputDatasetAssociation]] copied_from_history_dataset_association: Optional["HistoryDatasetAssociation"] copied_from_library_dataset_dataset_association: Optional["LibraryDatasetDatasetAssociation"] + dependent_jobs: List[JobToInputLibraryDatasetAssociation] implicitly_converted_datasets: List["ImplicitlyConvertedDatasetAssociation"] + implicitly_converted_parent_datasets: List["ImplicitlyConvertedDatasetAssociation"] validated_states = DatasetValidatedState diff --git a/lib/galaxy/model/mapping.py b/lib/galaxy/model/mapping.py index e1d975e5be5a..707b20b7ca2f 100644 --- a/lib/galaxy/model/mapping.py +++ b/lib/galaxy/model/mapping.py @@ -97,7 +97,7 @@ def _build_model_mapping(engine, map_install_models, thread_local_log) -> Galaxy model_modules.append(tool_shed_install) model_mapping = GalaxyModelMapping(model_modules, engine) - model_mapping.security_agent = GalaxyRBACAgent(model_mapping) + model_mapping.security_agent = GalaxyRBACAgent(model_mapping.session) model_mapping.thread_local_log = thread_local_log return model_mapping diff --git a/lib/galaxy/model/migrations/alembic/versions_gxy/13fe10b8e35b_add_not_null_constraints_to_user_group_.py b/lib/galaxy/model/migrations/alembic/versions_gxy/13fe10b8e35b_add_not_null_constraints_to_user_group_.py new file mode 100644 index 000000000000..822a0229a4bc --- /dev/null +++ b/lib/galaxy/model/migrations/alembic/versions_gxy/13fe10b8e35b_add_not_null_constraints_to_user_group_.py @@ -0,0 +1,42 @@ +"""Add not-null constraints to user_group_association + +Revision ID: 13fe10b8e35b +Revises: 56ddf316dbd0 +Create Date: 2024-09-09 21:26:26.032842 + +""" + +from alembic import op + +from galaxy.model.migrations.data_fixes.association_table_fixer import UserGroupAssociationNullFix +from galaxy.model.migrations.util import ( + alter_column, + transaction, +) + +# revision identifiers, used by Alembic. +revision = "13fe10b8e35b" +down_revision = "56ddf316dbd0" +branch_labels = None +depends_on = None + +table_name = "user_group_association" + + +def upgrade(): + with transaction(): + _remove_records_with_nulls() + alter_column(table_name, "user_id", nullable=False) + alter_column(table_name, "group_id", nullable=False) + + +def downgrade(): + with transaction(): + alter_column(table_name, "user_id", nullable=True) + alter_column(table_name, "group_id", nullable=True) + + +def _remove_records_with_nulls(): + """Remove associations having null as user_id or group_id""" + connection = op.get_bind() + UserGroupAssociationNullFix(connection).run() diff --git a/lib/galaxy/model/migrations/alembic/versions_gxy/1cf595475b58_email_column_unique_constraint.py b/lib/galaxy/model/migrations/alembic/versions_gxy/1cf595475b58_email_column_unique_constraint.py new file mode 100644 index 000000000000..ba356b1c770d --- /dev/null +++ b/lib/galaxy/model/migrations/alembic/versions_gxy/1cf595475b58_email_column_unique_constraint.py @@ -0,0 +1,52 @@ +"""Email column unique constraint + +Revision ID: 1cf595475b58 +Revises: d619fdfa6168 +Create Date: 2024-07-03 19:53:22.443016 +""" + +from alembic import op + +from galaxy.model.database_object_names import build_index_name +from galaxy.model.migrations.data_fixes.user_table_fixer import EmailDeduplicator +from galaxy.model.migrations.util import ( + create_index, + drop_index, + index_exists, + transaction, +) + +# revision identifiers, used by Alembic. +revision = "1cf595475b58" +down_revision = "d619fdfa6168" +branch_labels = None +depends_on = None + + +table_name = "galaxy_user" +column_name = "email" +index_name = build_index_name(table_name, [column_name]) + + +def upgrade(): + with transaction(): + _fix_duplicate_emails() + # Existing databases may have an existing index we no longer need + # New databases will not have that index, so we must check. + if index_exists(index_name, table_name, False): + drop_index(index_name, table_name) + # Create a UNIQUE index + create_index(index_name, table_name, [column_name], unique=True) + + +def downgrade(): + with transaction(): + drop_index(index_name, table_name) + # Restore a non-unique index + create_index(index_name, table_name, [column_name]) + + +def _fix_duplicate_emails(): + """Fix records with duplicate usernames""" + connection = op.get_bind() + EmailDeduplicator(connection).run() diff --git a/lib/galaxy/model/migrations/alembic/versions_gxy/1fdd615f2cdb_add_not_null_constraints_to_user_role_.py b/lib/galaxy/model/migrations/alembic/versions_gxy/1fdd615f2cdb_add_not_null_constraints_to_user_role_.py new file mode 100644 index 000000000000..4fb6f5262f8e --- /dev/null +++ b/lib/galaxy/model/migrations/alembic/versions_gxy/1fdd615f2cdb_add_not_null_constraints_to_user_role_.py @@ -0,0 +1,42 @@ +"""Add not-null constraints to user_role_association + +Revision ID: 1fdd615f2cdb +Revises: 349dd9d9aac9 +Create Date: 2024-09-09 21:28:11.987054 + +""" + +from alembic import op + +from galaxy.model.migrations.data_fixes.association_table_fixer import UserRoleAssociationNullFix +from galaxy.model.migrations.util import ( + alter_column, + transaction, +) + +# revision identifiers, used by Alembic. +revision = "1fdd615f2cdb" +down_revision = "349dd9d9aac9" +branch_labels = None +depends_on = None + +table_name = "user_role_association" + + +def upgrade(): + with transaction(): + _remove_records_with_nulls() + alter_column(table_name, "user_id", nullable=False) + alter_column(table_name, "role_id", nullable=False) + + +def downgrade(): + with transaction(): + alter_column(table_name, "user_id", nullable=True) + alter_column(table_name, "role_id", nullable=True) + + +def _remove_records_with_nulls(): + """Remove associations having null as user_id or role_id""" + connection = op.get_bind() + UserRoleAssociationNullFix(connection).run() diff --git a/lib/galaxy/model/migrations/alembic/versions_gxy/25b092f7938b_add_not_null_constraints_to_group_role_.py b/lib/galaxy/model/migrations/alembic/versions_gxy/25b092f7938b_add_not_null_constraints_to_group_role_.py new file mode 100644 index 000000000000..f57dd446d0cb --- /dev/null +++ b/lib/galaxy/model/migrations/alembic/versions_gxy/25b092f7938b_add_not_null_constraints_to_group_role_.py @@ -0,0 +1,42 @@ +"""Add not-null constraints to group_role_association + +Revision ID: 25b092f7938b +Revises: 9ef6431f3a4e +Create Date: 2024-09-09 16:17:26.652865 + +""" + +from alembic import op + +from galaxy.model.migrations.data_fixes.association_table_fixer import GroupRoleAssociationNullFix +from galaxy.model.migrations.util import ( + alter_column, + transaction, +) + +# revision identifiers, used by Alembic. +revision = "25b092f7938b" +down_revision = "9ef6431f3a4e" +branch_labels = None +depends_on = None + +table_name = "group_role_association" + + +def upgrade(): + with transaction(): + _remove_records_with_nulls() + alter_column(table_name, "group_id", nullable=True) + alter_column(table_name, "role_id", nullable=False) + + +def downgrade(): + with transaction(): + alter_column(table_name, "group_id", nullable=True) + alter_column(table_name, "role_id", nullable=True) + + +def _remove_records_with_nulls(): + """Remove associations having null as group_id or role_id""" + connection = op.get_bind() + GroupRoleAssociationNullFix(connection).run() diff --git a/lib/galaxy/model/migrations/alembic/versions_gxy/349dd9d9aac9_add_unique_constraint_to_user_role_assoc.py b/lib/galaxy/model/migrations/alembic/versions_gxy/349dd9d9aac9_add_unique_constraint_to_user_role_assoc.py new file mode 100644 index 000000000000..26245f4a9c87 --- /dev/null +++ b/lib/galaxy/model/migrations/alembic/versions_gxy/349dd9d9aac9_add_unique_constraint_to_user_role_assoc.py @@ -0,0 +1,45 @@ +"""Add unique constraint to user_role_association + +Revision ID: 349dd9d9aac9 +Revises: 1cf595475b58 +Create Date: 2024-09-09 16:14:58.278850 + +""" + +from alembic import op + +from galaxy.model.migrations.data_fixes.association_table_fixer import UserRoleAssociationDuplicateFix +from galaxy.model.migrations.util import ( + create_unique_constraint, + drop_constraint, + transaction, +) + +# revision identifiers, used by Alembic. +revision = "349dd9d9aac9" +down_revision = "1cf595475b58" +branch_labels = None +depends_on = None + +table_name = "user_role_association" +constraint_column_names = ["user_id", "role_id"] +unique_constraint_name = ( + "user_role_association_user_id_key" # This is what the model's naming convention will generate. +) + + +def upgrade(): + with transaction(): + _remove_duplicate_records() + create_unique_constraint(unique_constraint_name, table_name, constraint_column_names) + + +def downgrade(): + with transaction(): + drop_constraint(unique_constraint_name, table_name) + + +def _remove_duplicate_records(): + """Remove duplicate associations""" + connection = op.get_bind() + UserRoleAssociationDuplicateFix(connection).run() diff --git a/lib/galaxy/model/migrations/alembic/versions_gxy/56ddf316dbd0_add_unique_constraint_to_user_group_.py b/lib/galaxy/model/migrations/alembic/versions_gxy/56ddf316dbd0_add_unique_constraint_to_user_group_.py new file mode 100644 index 000000000000..4a50ddcfcbe0 --- /dev/null +++ b/lib/galaxy/model/migrations/alembic/versions_gxy/56ddf316dbd0_add_unique_constraint_to_user_group_.py @@ -0,0 +1,45 @@ +"""Add unique constraint to user_group_association + +Revision ID: 56ddf316dbd0 +Revises: 1fdd615f2cdb +Create Date: 2024-09-09 16:10:37.081834 + +""" + +from alembic import op + +from galaxy.model.migrations.data_fixes.association_table_fixer import UserGroupAssociationDuplicateFix +from galaxy.model.migrations.util import ( + create_unique_constraint, + drop_constraint, + transaction, +) + +# revision identifiers, used by Alembic. +revision = "56ddf316dbd0" +down_revision = "1fdd615f2cdb" +branch_labels = None +depends_on = None + +table_name = "user_group_association" +constraint_column_names = ["user_id", "group_id"] +unique_constraint_name = ( + "user_group_association_user_id_key" # This is what the model's naming convention will generate. +) + + +def upgrade(): + with transaction(): + _remove_duplicate_records() + create_unique_constraint(unique_constraint_name, table_name, constraint_column_names) + + +def downgrade(): + with transaction(): + drop_constraint(unique_constraint_name, table_name) + + +def _remove_duplicate_records(): + """Remove duplicate associations""" + connection = op.get_bind() + UserGroupAssociationDuplicateFix(connection).run() diff --git a/lib/galaxy/model/migrations/alembic/versions_gxy/9ef6431f3a4e_add_unique_constraint_to_group_role_.py b/lib/galaxy/model/migrations/alembic/versions_gxy/9ef6431f3a4e_add_unique_constraint_to_group_role_.py new file mode 100644 index 000000000000..f84d09d5b043 --- /dev/null +++ b/lib/galaxy/model/migrations/alembic/versions_gxy/9ef6431f3a4e_add_unique_constraint_to_group_role_.py @@ -0,0 +1,45 @@ +"""Add unique constraint to group_role_association + +Revision ID: 9ef6431f3a4e +Revises: 13fe10b8e35b +Create Date: 2024-09-09 15:01:20.426534 + +""" + +from alembic import op + +from galaxy.model.migrations.data_fixes.association_table_fixer import GroupRoleAssociationDuplicateFix +from galaxy.model.migrations.util import ( + create_unique_constraint, + drop_constraint, + transaction, +) + +# revision identifiers, used by Alembic. +revision = "9ef6431f3a4e" +down_revision = "13fe10b8e35b" +branch_labels = None +depends_on = None + +table_name = "group_role_association" +constraint_column_names = ["group_id", "role_id"] +unique_constraint_name = ( + "group_role_association_group_id_key" # This is what the model's naming convention will generate. +) + + +def upgrade(): + with transaction(): + _remove_duplicate_records() + create_unique_constraint(unique_constraint_name, table_name, constraint_column_names) + + +def downgrade(): + with transaction(): + drop_constraint(unique_constraint_name, table_name) + + +def _remove_duplicate_records(): + """Remove duplicate associations""" + connection = op.get_bind() + GroupRoleAssociationDuplicateFix(connection).run() diff --git a/lib/galaxy/model/migrations/alembic/versions_gxy/d619fdfa6168_username_column_unique_constraint.py b/lib/galaxy/model/migrations/alembic/versions_gxy/d619fdfa6168_username_column_unique_constraint.py new file mode 100644 index 000000000000..de09d29097bb --- /dev/null +++ b/lib/galaxy/model/migrations/alembic/versions_gxy/d619fdfa6168_username_column_unique_constraint.py @@ -0,0 +1,51 @@ +"""Username column unique constraint + +Revision ID: d619fdfa6168 +Revises: d2d8f51ebb7e +Create Date: 2024-07-02 13:13:10.325586 +""" + +from alembic import op + +from galaxy.model.database_object_names import build_index_name +from galaxy.model.migrations.data_fixes.user_table_fixer import UsernameDeduplicator +from galaxy.model.migrations.util import ( + create_index, + drop_index, + index_exists, + transaction, +) + +# revision identifiers, used by Alembic. +revision = "d619fdfa6168" +down_revision = "d2d8f51ebb7e" +branch_labels = None +depends_on = None + +table_name = "galaxy_user" +column_name = "username" +index_name = build_index_name(table_name, [column_name]) + + +def upgrade(): + with transaction(): + _fix_duplicate_usernames() + # Existing databases may have an existing index we no longer need + # New databases will not have that index, so we must check. + if index_exists(index_name, table_name, False): + drop_index(index_name, table_name) + # Create a UNIQUE index + create_index(index_name, table_name, [column_name], unique=True) + + +def downgrade(): + with transaction(): + drop_index(index_name, table_name) + # Restore a non-unique index + create_index(index_name, table_name, [column_name]) + + +def _fix_duplicate_usernames(): + """Fix records with duplicate usernames""" + connection = op.get_bind() + UsernameDeduplicator(connection).run() diff --git a/lib/galaxy/model/migrations/data_fixes/__init__.py b/lib/galaxy/model/migrations/data_fixes/__init__.py new file mode 100644 index 000000000000..8b48aef46800 --- /dev/null +++ b/lib/galaxy/model/migrations/data_fixes/__init__.py @@ -0,0 +1,4 @@ +""" +Package contains code for fixing inconsistent data in the database that must be +run together with a migration script. +""" diff --git a/lib/galaxy/model/migrations/data_fixes/association_table_fixer.py b/lib/galaxy/model/migrations/data_fixes/association_table_fixer.py new file mode 100644 index 000000000000..711f266c5be1 --- /dev/null +++ b/lib/galaxy/model/migrations/data_fixes/association_table_fixer.py @@ -0,0 +1,200 @@ +from abc import ( + ABC, + abstractmethod, +) + +from sqlalchemy import ( + delete, + func, + null, + or_, + select, +) + +from galaxy.model import ( + GroupRoleAssociation, + UserGroupAssociation, + UserRoleAssociation, +) + + +class AssociationNullFix(ABC): + + def __init__(self, connection): + self.connection = connection + self.assoc_model = self.association_model() + self.assoc_name = self.assoc_model.__tablename__ + self.where_clause = self.build_where_clause() + + def run(self): + invalid_assocs = self.count_associations_with_nulls() + if invalid_assocs: + self.delete_associations_with_nulls() + + def count_associations_with_nulls( + self, + ): + """ + Retrieve association records where one or both associated item ids are null. + """ + select_stmt = select(func.count()).where(self.where_clause) + return self.connection.scalar(select_stmt) + + def delete_associations_with_nulls(self): + """ + Delete association records where one or both associated item ids are null. + """ + delete_stmt = delete(self.assoc_model).where(self.where_clause) + self.connection.execute(delete_stmt) + + @abstractmethod + def association_model(self): + """Return model class""" + + @abstractmethod + def build_where_clause(self): + """Build where clause for filtering records containing nulls instead of associated item ids""" + + +class UserGroupAssociationNullFix(AssociationNullFix): + + def association_model(self): + return UserGroupAssociation + + def build_where_clause(self): + return or_(UserGroupAssociation.user_id == null(), UserGroupAssociation.group_id == null()) + + +class UserRoleAssociationNullFix(AssociationNullFix): + + def association_model(self): + return UserRoleAssociation + + def build_where_clause(self): + return or_(UserRoleAssociation.user_id == null(), UserRoleAssociation.role_id == null()) + + +class GroupRoleAssociationNullFix(AssociationNullFix): + + def association_model(self): + return GroupRoleAssociation + + def build_where_clause(self): + return or_(GroupRoleAssociation.group_id == null(), GroupRoleAssociation.role_id == null()) + + +class AssociationDuplicateFix(ABC): + + def __init__(self, connection): + self.connection = connection + self.assoc_model = self.association_model() + self.assoc_name = self.assoc_model.__tablename__ + + def run(self): + duplicate_assocs = self.select_duplicate_associations() + if duplicate_assocs: + self.delete_duplicate_associations(duplicate_assocs) + + def select_duplicate_associations(self): + """Retrieve duplicate association records.""" + select_stmt = self.build_duplicate_tuples_statement() + return self.connection.execute(select_stmt).all() + + @abstractmethod + def association_model(self): + """Return model class""" + + @abstractmethod + def build_duplicate_tuples_statement(self): + """ + Build select statement returning a list of tuples (item1_id, item2_id) that have counts > 1 + """ + + @abstractmethod + def build_duplicate_ids_statement(self, item1_id, item2_id): + """ + Build select statement returning a list of ids for duplicate records retrieved via build_duplicate_tuples_statement(). + """ + + def delete_duplicate_associations(self, records): + """ + Delete duplicate association records retaining oldest record in each group of duplicates. + """ + to_delete = [] + for item1_id, item2_id in records: + to_delete += self._get_duplicates_to_delete(item1_id, item2_id) + for id in to_delete: + delete_stmt = delete(self.assoc_model).where(self.assoc_model.id == id) + self.connection.execute(delete_stmt) + + def _get_duplicates_to_delete(self, item1_id, item2_id): + stmt = self.build_duplicate_ids_statement(item1_id, item2_id) + duplicates = self.connection.scalars(stmt).all() + # IMPORTANT: we slice to skip the first item ([1:]), which is the oldest record and SHOULD NOT BE DELETED. + return duplicates[1:] + + +class UserGroupAssociationDuplicateFix(AssociationDuplicateFix): + + def association_model(self): + return UserGroupAssociation + + def build_duplicate_tuples_statement(self): + stmt = ( + select(UserGroupAssociation.user_id, UserGroupAssociation.group_id) + .group_by(UserGroupAssociation.user_id, UserGroupAssociation.group_id) + .having(func.count() > 1) + ) + return stmt + + def build_duplicate_ids_statement(self, user_id, group_id): + stmt = ( + select(UserGroupAssociation.id) + .where(UserGroupAssociation.user_id == user_id, UserGroupAssociation.group_id == group_id) + .order_by(UserGroupAssociation.update_time) + ) + return stmt + + +class UserRoleAssociationDuplicateFix(AssociationDuplicateFix): + + def association_model(self): + return UserRoleAssociation + + def build_duplicate_tuples_statement(self): + stmt = ( + select(UserRoleAssociation.user_id, UserRoleAssociation.role_id) + .group_by(UserRoleAssociation.user_id, UserRoleAssociation.role_id) + .having(func.count() > 1) + ) + return stmt + + def build_duplicate_ids_statement(self, user_id, role_id): + stmt = ( + select(UserRoleAssociation.id) + .where(UserRoleAssociation.user_id == user_id, UserRoleAssociation.role_id == role_id) + .order_by(UserRoleAssociation.update_time) + ) + return stmt + + +class GroupRoleAssociationDuplicateFix(AssociationDuplicateFix): + + def association_model(self): + return GroupRoleAssociation + + def build_duplicate_tuples_statement(self): + stmt = ( + select(GroupRoleAssociation.group_id, GroupRoleAssociation.role_id) + .group_by(GroupRoleAssociation.group_id, GroupRoleAssociation.role_id) + .having(func.count() > 1) + ) + return stmt + + def build_duplicate_ids_statement(self, group_id, role_id): + stmt = ( + select(GroupRoleAssociation.id) + .where(GroupRoleAssociation.group_id == group_id, GroupRoleAssociation.role_id == role_id) + .order_by(GroupRoleAssociation.update_time) + ) + return stmt diff --git a/lib/galaxy/model/migrations/data_fixes/user_table_fixer.py b/lib/galaxy/model/migrations/data_fixes/user_table_fixer.py new file mode 100644 index 000000000000..4b9054872cd0 --- /dev/null +++ b/lib/galaxy/model/migrations/data_fixes/user_table_fixer.py @@ -0,0 +1,112 @@ +import uuid + +from sqlalchemy import ( + func, + Result, + select, + text, + update, +) + +from galaxy.model import User + + +class UsernameDeduplicator: + + def __init__(self, connection): + self.connection = connection + + def run(self): + """ + Deduplicate usernames by generating a unique value for all duplicates, keeping + the username of the most recently created user unchanged. + Records updated with the generated value are marked as deleted. + """ + duplicates = self._get_duplicate_username_data() + prev_username = None + for id, username, _ in duplicates: + if username == prev_username: + new_username = self._generate_next_available_username(username) + stmt = update(User).where(User.id == id).values(username=new_username, deleted=True) + self.connection.execute(stmt) + else: + prev_username = username + + def _get_duplicate_username_data(self) -> Result: + # Duplicate usernames + duplicates_stmt = select(User.username).group_by(User.username).having(func.count() > 1) + # User data for records with duplicate usernames (ordering: newest to oldest) + stmt = ( + select(User.id, User.username, User.create_time) + .where(User.username.in_(duplicates_stmt)) + .order_by(User.username, User.create_time.desc()) + ) + return self.connection.execute(stmt) + + def _generate_next_available_username(self, username): + i = 1 + while self.connection.execute(select(User).where(User.username == f"{username}-{i}")).first(): + i += 1 + return f"{username}-{i}" + + +class EmailDeduplicator: + + def __init__(self, connection): + self.connection = connection + + def run(self): + """ + Deduplicate user emails by generating a unique value for all duplicates, keeping + the email of the most recently created user that has one or more history unchanged. + If such a user does not exist, keep the oldest user. + Records updated with the generated value are marked as deleted (we presume them + to be invalid, and the user should not be able to login). + """ + stmt = select(User.email).group_by(User.email).having(func.count() > 1) + duplicate_emails = self.connection.scalars(stmt) + for email in duplicate_emails: + users = self._get_users_with_same_email(email) + user_with_history = self._find_oldest_user_with_history(users) + duplicates = self._get_users_to_deduplicate(users, user_with_history) + self._deduplicate_users(email, duplicates) + + def _get_users_with_same_email(self, email: str): + sql = text( + """ + SELECT u.id, EXISTS(SELECT h.id FROM history h WHERE h.user_id = u.id) + FROM galaxy_user u + WHERE u.email = :email + ORDER BY u.create_time + """ + ) + params = {"email": email} + return self.connection.execute(sql, params).all() + + def _find_oldest_user_with_history(self, users): + for user_id, exists in users: + if exists: + return user_id + return None + + def _get_users_to_deduplicate(self, users, user_with_history): + if user_with_history: + # Preserve the oldest user with a history + return [user_id for user_id, _ in users if user_id != user_with_history] + else: + # Preserve the oldest user + return [user_id for user_id, _ in users[1:]] + + def _deduplicate_users(self, email, to_deduplicate): + for id in to_deduplicate: + new_email = self._generate_replacement_for_duplicate_email(email) + stmt = update(User).where(User.id == id).values(email=new_email, deleted=True) + self.connection.execute(stmt) + + def _generate_replacement_for_duplicate_email(self, email: str) -> str: + """ + Generate a replacement for a duplicate email value. The new value consists of the original + email and a unique suffix. Since the original email is part of the new value, it will be + possible to retrieve the user record based on this value, if needed. + """ + return f"{email}-{uuid.uuid4()}" diff --git a/lib/galaxy/model/security.py b/lib/galaxy/model/security.py index 09b425dcd8eb..74e12e71c62d 100644 --- a/lib/galaxy/model/security.py +++ b/lib/galaxy/model/security.py @@ -1,33 +1,48 @@ import logging import socket +import sqlite3 from datetime import ( datetime, timedelta, ) -from typing import List +from typing import ( + List, + Optional, +) from sqlalchemy import ( and_, + delete, false, func, + insert, not_, or_, select, + text, ) +from sqlalchemy.exc import IntegrityError from sqlalchemy.orm import joinedload -from sqlalchemy.sql import text import galaxy.model +from galaxy.exceptions import RequestParameterInvalidException from galaxy.model import ( Dataset, + DatasetCollection, DatasetPermissions, + DefaultHistoryPermissions, + DefaultUserPermissions, Group, GroupRoleAssociation, HistoryDatasetAssociationDisplayAtAuthorization, Library, LibraryDataset, + LibraryDatasetCollectionAssociation, LibraryDatasetDatasetAssociation, + LibraryDatasetDatasetAssociationPermissions, LibraryDatasetPermissions, + LibraryFolder, + LibraryFolderPermissions, LibraryPermissions, Role, User, @@ -51,23 +66,18 @@ class GalaxyRBACAgent(RBACAgent): - def __init__(self, model, permitted_actions=None): - self.model = model + def __init__(self, sa_session, permitted_actions=None): + self.sa_session = sa_session if permitted_actions: self.permitted_actions = permitted_actions # List of "library_item" objects and their associated permissions and info template objects self.library_item_assocs = ( - (self.model.Library, self.model.LibraryPermissions), - (self.model.LibraryFolder, self.model.LibraryFolderPermissions), - (self.model.LibraryDataset, self.model.LibraryDatasetPermissions), - (self.model.LibraryDatasetDatasetAssociation, self.model.LibraryDatasetDatasetAssociationPermissions), + (Library, LibraryPermissions), + (LibraryFolder, LibraryFolderPermissions), + (LibraryDataset, LibraryDatasetPermissions), + (LibraryDatasetDatasetAssociation, LibraryDatasetDatasetAssociationPermissions), ) - @property - def sa_session(self): - """Returns a SQLAlchemy session""" - return self.model.context - def sort_by_attr(self, seq, attr): """ Sort the sequence of objects by object's attribute @@ -139,11 +149,11 @@ def get_valid_roles(self, trans, item, query=None, page=None, page_limit=None, i else: limit = None total_count = None - if isinstance(item, self.model.Library) and self.library_is_public(item): + if isinstance(item, Library) and self.library_is_public(item): is_public_item = True - elif isinstance(item, self.model.Dataset) and self.dataset_is_public(item): + elif isinstance(item, Dataset) and self.dataset_is_public(item): is_public_item = True - elif isinstance(item, self.model.LibraryFolder): + elif isinstance(item, LibraryFolder): is_public_item = True else: is_public_item = False @@ -238,8 +248,8 @@ def get_legitimate_roles(self, trans, item, cntrller): """ admin_controller = cntrller in ["library_admin"] roles = set() - if (isinstance(item, self.model.Library) and self.library_is_public(item)) or ( - isinstance(item, self.model.Dataset) and self.dataset_is_public(item) + if (isinstance(item, Library) and self.library_is_public(item)) or ( + isinstance(item, Dataset) and self.dataset_is_public(item) ): return self.get_all_roles(trans, cntrller) # If item has roles associated with the access permission, we need to start with them. @@ -272,13 +282,13 @@ def ok_to_display(self, user, role): """ role_type = role.type if user: - if role_type == self.model.Role.types.PRIVATE: + if role_type == Role.types.PRIVATE: return role == self.get_private_user_role(user) - if role_type == self.model.Role.types.SHARING: + if role_type == Role.types.SHARING: return role in self.get_sharing_roles(user) # If role_type is neither private nor sharing, it's ok to display return True - return role_type != self.model.Role.types.PRIVATE and role_type != self.model.Role.types.SHARING + return role_type != Role.types.PRIVATE and role_type != Role.types.SHARING def allow_action(self, roles, action, item): """ @@ -329,7 +339,7 @@ def get_actions_for_items(self, trans, action, permission_items): ret_permissions = {} if len(permission_items) > 0: # SM: NB: LibraryDatasets became Datasets for some odd reason. - if isinstance(permission_items[0], trans.model.LibraryDataset): + if isinstance(permission_items[0], LibraryDataset): ids = [item.library_dataset_id for item in permission_items] stmt = select(LibraryDatasetPermissions).where( and_( @@ -348,7 +358,7 @@ def get_actions_for_items(self, trans, action, permission_items): ret_permissions[item.library_dataset_id] = [] for permission in permissions: ret_permissions[permission.library_dataset_id].append(permission) - elif isinstance(permission_items[0], trans.model.Dataset): + elif isinstance(permission_items[0], Dataset): ids = [item.id for item in permission_items] stmt = select(DatasetPermissions).where( @@ -499,7 +509,7 @@ def item_permission_map_for_manage(self, trans, user_roles, libitems): def item_permission_map_for_add(self, trans, user_roles, libitems): return self.allow_action_on_libitems(trans, user_roles, self.permitted_actions.LIBRARY_ADD, libitems) - def can_access_dataset(self, user_roles, dataset: galaxy.model.Dataset): + def can_access_dataset(self, user_roles, dataset: Dataset): # SM: dataset_is_public will access dataset.actions, which is a # backref that causes a query to be made to DatasetPermissions retval = self.dataset_is_public(dataset) or self.allow_action( @@ -518,7 +528,7 @@ def can_access_datasets(self, user_roles, action_tuples): return True - def can_access_collection(self, user_roles: List[galaxy.model.Role], collection: galaxy.model.DatasetCollection): + def can_access_collection(self, user_roles: List[Role], collection: DatasetCollection): action_tuples = collection.dataset_action_tuples if not self.can_access_datasets(user_roles, action_tuples): return False @@ -599,21 +609,21 @@ def __active_folders_have_accessible_library_datasets(self, trans, folder, user, return False def can_access_library_item(self, roles, item, user): - if isinstance(item, self.model.Library): + if isinstance(item, Library): return self.can_access_library(roles, item) - elif isinstance(item, self.model.LibraryFolder): + elif isinstance(item, LibraryFolder): return ( self.can_access_library(roles, item.parent_library) and self.check_folder_contents(user, roles, item)[0] ) - elif isinstance(item, self.model.LibraryDataset): + elif isinstance(item, LibraryDataset): return self.can_access_library(roles, item.folder.parent_library) and self.can_access_dataset( roles, item.library_dataset_dataset_association.dataset ) - elif isinstance(item, self.model.LibraryDatasetDatasetAssociation): + elif isinstance(item, LibraryDatasetDatasetAssociation): return self.can_access_library( roles, item.library_dataset.folder.parent_library ) and self.can_access_dataset(roles, item.dataset) - elif isinstance(item, self.model.LibraryDatasetCollectionAssociation): + elif isinstance(item, LibraryDatasetCollectionAssociation): return self.can_access_library(roles, item.folder.parent_library) else: log.warning(f"Unknown library item type: {type(item)}") @@ -658,7 +668,7 @@ def guess_derived_permissions_for_datasets(self, datasets=None): datasets = datasets or [] perms = {} for dataset in datasets: - if not isinstance(dataset, self.model.Dataset): + if not isinstance(dataset, Dataset): dataset = dataset.dataset these_perms = {} # initialize blank perms @@ -700,43 +710,29 @@ def guess_derived_permissions(self, all_input_permissions): perms[action].extend([_ for _ in role_ids if _ not in perms[action]]) return perms - def associate_components(self, **kwd): - if "user" in kwd: - if "group" in kwd: - return self.associate_user_group(kwd["user"], kwd["group"]) - elif "role" in kwd: - return self.associate_user_role(kwd["user"], kwd["role"]) - elif "role" in kwd: - if "group" in kwd: - return self.associate_group_role(kwd["group"], kwd["role"]) - if "action" in kwd: - if "dataset" in kwd and "role" in kwd: - return self.associate_action_dataset_role(kwd["action"], kwd["dataset"], kwd["role"]) - raise Exception(f"No valid method of associating provided components: {kwd}") - def associate_user_group(self, user, group): - assoc = self.model.UserGroupAssociation(user, group) + assoc = UserGroupAssociation(user, group) self.sa_session.add(assoc) with transaction(self.sa_session): self.sa_session.commit() return assoc def associate_user_role(self, user, role): - assoc = self.model.UserRoleAssociation(user, role) + assoc = UserRoleAssociation(user, role) self.sa_session.add(assoc) with transaction(self.sa_session): self.sa_session.commit() return assoc def associate_group_role(self, group, role): - assoc = self.model.GroupRoleAssociation(group, role) + assoc = GroupRoleAssociation(group, role) self.sa_session.add(assoc) with transaction(self.sa_session): self.sa_session.commit() return assoc def associate_action_dataset_role(self, action, dataset, role): - assoc = self.model.DatasetPermissions(action, dataset, role) + assoc = DatasetPermissions(action, dataset, role) self.sa_session.add(assoc) with transaction(self.sa_session): self.sa_session.commit() @@ -767,14 +763,14 @@ def get_private_user_role(self, user, auto_create=False): return role def get_role(self, name, type=None): - type = type or self.model.Role.types.SYSTEM + type = type or Role.types.SYSTEM # will raise exception if not found stmt = select(Role).where(and_(Role.name == name, Role.type == type)) return self.sa_session.execute(stmt).scalar_one() def create_role(self, name, description, in_users, in_groups, create_group_for_role=False, type=None): - type = type or self.model.Role.types.SYSTEM - role = self.model.Role(name=name, description=description, type=type) + type = type or Role.types.SYSTEM + role = Role(name=name, description=description, type=type) self.sa_session.add(role) # Create the UserRoleAssociations for user in [self.sa_session.get(User, x) for x in in_users]: @@ -784,7 +780,7 @@ def create_role(self, name, description, in_users, in_groups, create_group_for_r self.associate_group_role(group, role) if create_group_for_role: # Create the group - group = self.model.Group(name=name) + group = Group(name=name) self.sa_session.add(group) # Associate the group with the role self.associate_group_role(group, role) @@ -831,7 +827,7 @@ def user_set_default_permissions( for action, roles in permissions.items(): if isinstance(action, Action): action = action.action - for dup in [self.model.DefaultUserPermissions(user, action, role) for role in roles]: + for dup in [DefaultUserPermissions(user, action, role) for role in roles]: self.sa_session.add(dup) flush_needed = True if flush_needed: @@ -871,7 +867,7 @@ def history_set_default_permissions(self, history, permissions=None, dataset=Fal for action, roles in permissions.items(): if isinstance(action, Action): action = action.action - for dhp in [self.model.DefaultHistoryPermissions(history, action, role) for role in roles]: + for dhp in [DefaultHistoryPermissions(history, action, role) for role in roles]: self.sa_session.add(dhp) flush_needed = True if flush_needed: @@ -922,7 +918,7 @@ def set_all_dataset_permissions(self, dataset, permissions=None, new=False, flus for _, roles in _walk_action_roles(permissions, self.permitted_actions.DATASET_ACCESS): dataset_access_roles.extend(roles) - if len(dataset_access_roles) != 1 or dataset_access_roles[0].type != self.model.Role.types.PRIVATE: + if len(dataset_access_roles) != 1 or dataset_access_roles[0].type != Role.types.PRIVATE: return galaxy.model.CANNOT_SHARE_PRIVATE_DATASET_MESSAGE flush_needed = False @@ -940,7 +936,7 @@ def set_all_dataset_permissions(self, dataset, permissions=None, new=False, flus role_id = role.id else: role_id = role - dp = self.model.DatasetPermissions(action, dataset, role_id=role_id) + dp = DatasetPermissions(action, dataset, role_id=role_id) self.sa_session.add(dp) flush_needed = True if flush_needed and flush: @@ -970,7 +966,7 @@ def set_dataset_permission(self, dataset, permission=None): self.sa_session.delete(dp) flush_needed = True # Add the new specific permission on the dataset - for dp in [self.model.DatasetPermissions(action, dataset, role) for role in roles]: + for dp in [DatasetPermissions(action, dataset, role) for role in roles]: self.sa_session.add(dp) flush_needed = True if flush_needed: @@ -993,9 +989,9 @@ def get_permissions(self, item): return permissions def copy_dataset_permissions(self, src, dst, flush=True): - if not isinstance(src, self.model.Dataset): + if not isinstance(src, Dataset): src = src.dataset - if not isinstance(dst, self.model.Dataset): + if not isinstance(dst, Dataset): dst = dst.dataset self.set_all_dataset_permissions(dst, self.get_permissions(src), flush=flush) @@ -1004,7 +1000,7 @@ def privately_share_dataset(self, dataset, users=None): intersect = None users = users or [] for user in users: - roles = [ura.role for ura in user.roles if ura.role.type == self.model.Role.types.SHARING] + roles = [ura.role for ura in user.roles if ura.role.type == Role.types.SHARING] if intersect is None: intersect = roles else: @@ -1021,14 +1017,12 @@ def privately_share_dataset(self, dataset, users=None): sharing_role = role break if sharing_role is None: - sharing_role = self.model.Role( - name=f"Sharing role for: {', '.join(u.email for u in users)}", type=self.model.Role.types.SHARING - ) + sharing_role = Role(name=f"Sharing role for: {', '.join(u.email for u in users)}", type=Role.types.SHARING) self.sa_session.add(sharing_role) with transaction(self.sa_session): self.sa_session.commit() for user in users: - self.associate_components(user=user, role=sharing_role) + self.associate_user_role(user, sharing_role) self.set_dataset_permission(dataset, {self.permitted_actions.DATASET_ACCESS: [sharing_role]}) def set_all_library_permissions(self, trans, library_item, permissions=None): @@ -1047,7 +1041,7 @@ def set_all_library_permissions(self, trans, library_item, permissions=None): for role_assoc in [permission_class(action, library_item, role) for role in roles]: self.sa_session.add(role_assoc) flush_needed = True - if isinstance(library_item, self.model.LibraryDatasetDatasetAssociation): + if isinstance(library_item, LibraryDatasetDatasetAssociation): # Permission setting related to DATASET_MANAGE_PERMISSIONS was broken for a period of time, # so it is possible that some Datasets have no roles associated with the DATASET_MANAGE_PERMISSIONS # permission. In this case, we'll reset this permission to the library_item user's private role. @@ -1086,14 +1080,12 @@ def set_library_item_permission(self, library_item, permission=None): self.sa_session.delete(item_permission) flush_needed = True # Add the new specific permission on the library item - if isinstance(library_item, self.model.LibraryDataset): - for item_permission in [ - self.model.LibraryDatasetPermissions(action, library_item, role) for role in roles - ]: + if isinstance(library_item, LibraryDataset): + for item_permission in [LibraryDatasetPermissions(action, library_item, role) for role in roles]: self.sa_session.add(item_permission) flush_needed = True - elif isinstance(library_item, self.model.LibraryPermissions): - for item_permission in [self.model.LibraryPermissions(action, library_item, role) for role in roles]: + elif isinstance(library_item, LibraryPermissions): + for item_permission in [LibraryPermissions(action, library_item, role) for role in roles]: self.sa_session.add(item_permission) flush_needed = True if flush_needed: @@ -1151,7 +1143,7 @@ def make_folder_public(self, folder): if not dataset.purged and not self.dataset_is_public(dataset): self.make_dataset_public(dataset) - def dataset_is_public(self, dataset: galaxy.model.Dataset): + def dataset_is_public(self, dataset: Dataset): """ A dataset is considered public if there are no "access" actions associated with it. Any other actions ( 'manage permissions', @@ -1194,7 +1186,7 @@ def dataset_is_private_to_a_user(self, dataset): return False else: access_role = access_roles[0] - return access_role.type == self.model.Role.types.PRIVATE + return access_role.type == Role.types.PRIVATE def datasets_are_public(self, trans, datasets): """ @@ -1294,7 +1286,7 @@ def derive_roles_from_access(self, trans, item_id, cntrller, library=False, **kw # permission on this dataset, or the dataset is not accessible. # Since we have more than 1 role, none of them can be private. for role in in_roles: - if role.type == self.model.Role.types.PRIVATE: + if role.type == Role.types.PRIVATE: private_role_found = True break if len(in_roles) == 1: @@ -1358,7 +1350,7 @@ def copy_library_permissions(self, trans, source_library_item, target_library_it f"Invalid class ({target_library_item.__class__}) specified for target_library_item ({target_library_item.__class__.__name__})" ) # Make sure user's private role is included - private_role = self.model.security_agent.get_private_user_role(user) + private_role = self.get_private_user_role(user) for action in self.permitted_actions.values(): if not found_permission_class.filter_by(role_id=private_role.id, action=action.action).first(): lp = found_permission_class(action.action, target_library_item, private_role) @@ -1407,9 +1399,9 @@ def show_library_item(self, user, roles, library_item, actions_to_check, hidden_ for action in actions_to_check: if self.allow_action(roles, action, library_item): return True, hidden_folder_ids - if isinstance(library_item, self.model.Library): + if isinstance(library_item, Library): return self.show_library_item(user, roles, library_item.root_folder, actions_to_check, hidden_folder_ids="") - if isinstance(library_item, self.model.LibraryFolder): + if isinstance(library_item, LibraryFolder): for folder in library_item.active_folders: can_show, hidden_folder_ids = self.show_library_item( user, roles, folder, actions_to_check, hidden_folder_ids=hidden_folder_ids @@ -1433,11 +1425,11 @@ def get_showable_folders( """ hidden_folder_ids = hidden_folder_ids or [] showable_folders = showable_folders or [] - if isinstance(library_item, self.model.Library): + if isinstance(library_item, Library): return self.get_showable_folders( user, roles, library_item.root_folder, actions_to_check, showable_folders=[] ) - if isinstance(library_item, self.model.LibraryFolder): + if isinstance(library_item, LibraryFolder): if library_item.id not in hidden_folder_ids: for action in actions_to_check: if self.allow_action(roles, action, library_item): @@ -1447,62 +1439,171 @@ def get_showable_folders( self.get_showable_folders(user, roles, folder, actions_to_check, showable_folders=showable_folders) return showable_folders - def set_entity_user_associations(self, users=None, roles=None, groups=None, delete_existing_assocs=True): - users = users or [] - roles = roles or [] - groups = groups or [] - for user in users: - if delete_existing_assocs: - flush_needed = False - for a in user.non_private_roles + user.groups: - self.sa_session.delete(a) - flush_needed = True - if flush_needed: - with transaction(self.sa_session): - self.sa_session.commit() - self.sa_session.refresh(user) - for role in roles: - # Make sure we are not creating an additional association with a PRIVATE role - if role not in [x.role for x in user.roles]: - self.associate_components(user=user, role=role) - for group in groups: - self.associate_components(user=user, group=group) + def set_user_group_and_role_associations( + self, + user: User, + *, + group_ids: Optional[List[int]] = None, + role_ids: Optional[List[int]] = None, + ) -> None: + """ + Set user groups and user roles, replacing current associations. - def set_entity_group_associations(self, groups=None, users=None, roles=None, delete_existing_assocs=True): - users = users or [] - roles = roles or [] - groups = groups or [] - for group in groups: - if delete_existing_assocs: - flush_needed = False - for a in group.roles + group.users: - self.sa_session.delete(a) - flush_needed = True - if flush_needed: - with transaction(self.sa_session): - self.sa_session.commit() - for role in roles: - self.associate_components(group=group, role=role) - for user in users: - self.associate_components(group=group, user=user) + Associations are set only if a list of new associations is provided. + If the provided list is empty, existing associations will be removed. + If the provided value is None, existing associations will not be updated. + """ + self._persist_new_model(user) + if group_ids is not None: + self._set_user_groups(user, group_ids) + if role_ids is not None: + self._set_user_roles(user, role_ids) + # Commit only if both user groups and user roles have been set. + self.sa_session.commit() + + def set_group_user_and_role_associations( + self, + group: Group, + *, + user_ids: Optional[List[int]] = None, + role_ids: Optional[List[int]] = None, + ) -> None: + """ + Set group users and group roles, replacing current associations. - def set_entity_role_associations(self, roles=None, users=None, groups=None, delete_existing_assocs=True): - users = users or [] - roles = roles or [] - groups = groups or [] - for role in roles: - if delete_existing_assocs: - flush_needed = False - for a in role.users + role.groups: - self.sa_session.delete(a) - flush_needed = True - if flush_needed: - with transaction(self.sa_session): - self.sa_session.commit() - for user in users: - self.associate_components(user=user, role=role) - for group in groups: - self.associate_components(group=group, role=role) + Associations are set only if a list of new associations is provided. + If the provided list is empty, existing associations will be removed. + If the provided value is None, existing associations will not be updated. + """ + self._persist_new_model(group) + if user_ids is not None: + self._set_group_users(group, user_ids) + if role_ids is not None: + self._set_group_roles(group, role_ids) + # Commit only if both group users and group roles have been set. + self.sa_session.commit() + + def set_role_user_and_group_associations( + self, + role: Role, + *, + user_ids: Optional[List[int]] = None, + group_ids: Optional[List[int]] = None, + ) -> None: + """ + Set role users and role groups, replacing current associations. + + Associations are set only if a list of new associations is provided. + If the provided list is empty, existing associations will be removed. + If the provided value is None, existing associations will not be updated. + """ + self._persist_new_model(role) + if user_ids is not None: + self._set_role_users(role, user_ids) + if group_ids is not None: + self._set_role_groups(role, group_ids) + # Commit only if both role users and role groups have been set. + self.sa_session.commit() + + def _set_user_groups(self, user, group_ids): + delete_stmt = delete(UserGroupAssociation).where(UserGroupAssociation.user_id == user.id) + insert_values = [{"user_id": user.id, "group_id": group_id} for group_id in group_ids] + self._set_associations(user, UserGroupAssociation, delete_stmt, insert_values) + + def _set_user_roles(self, user, role_ids): + # Do not include user's private role association in delete statement. + delete_stmt = delete(UserRoleAssociation).where(UserRoleAssociation.user_id == user.id) + private_role = get_private_user_role(user, self.sa_session) + if not private_role: + log.warning("User %s does not have a private role assigned", user) + else: + delete_stmt = delete_stmt.where(UserRoleAssociation.role_id != private_role.id) + role_ids = self._filter_private_roles(role_ids) + # breakpoint() + + insert_values = [{"user_id": user.id, "role_id": role_id} for role_id in role_ids] + self._set_associations(user, UserRoleAssociation, delete_stmt, insert_values) + + def _filter_private_roles(self, role_ids): + """Filter out IDs of private roles: those should not be assignable via UI""" + stmt = select(Role.id).where(Role.id.in_(role_ids)).where(Role.type == Role.types.PRIVATE) + private_role_ids = self.sa_session.scalars(stmt).all() + # We could simply select only private roles; however, that would get rid of potential duplicates + # and invalid role_ids; which would hide any bugs that should be caught in the _set_associations() method. + return [role_id for role_id in role_ids if role_id not in private_role_ids] + + def _set_group_users(self, group, user_ids): + delete_stmt = delete(UserGroupAssociation).where(UserGroupAssociation.group_id == group.id) + insert_values = [{"group_id": group.id, "user_id": user_id} for user_id in user_ids] + self._set_associations(group, UserGroupAssociation, delete_stmt, insert_values) + + def _set_group_roles(self, group, role_ids): + delete_stmt = delete(GroupRoleAssociation).where(GroupRoleAssociation.group_id == group.id) + insert_values = [{"group_id": group.id, "role_id": role_id} for role_id in role_ids] + self._set_associations(group, GroupRoleAssociation, delete_stmt, insert_values) + + def _set_role_users(self, role, user_ids): + # Do not set users if the role is private + # Even though we do not expect to be handling a private role here, the following code is + # a safeguard against deleting a user-role-association record for a private role. + if role.type == Role.types.PRIVATE: + return + + # First, check previously associated users to: + # - delete DefaultUserPermissions for users that are being removed from this role; + # - delete DefaultHistoryPermissions for histories associated with users that are being removed from this role. + for ura in role.users: + if ura.user_id not in user_ids: # If a user will be removed from this role, then: + user = self.sa_session.get(User, ura.user_id) + # Delete DefaultUserPermissions for this user + for dup in user.default_permissions: + if role == dup.role: + self.sa_session.delete(dup) + # Delete DefaultHistoryPermissions for histories associated with this user + for history in user.histories: + for dhp in history.default_permissions: + if role == dhp.role: + self.sa_session.delete(dhp) + + delete_stmt = delete(UserRoleAssociation).where(UserRoleAssociation.role_id == role.id) + insert_values = [{"role_id": role.id, "user_id": user_id} for user_id in user_ids] + self._set_associations(role, UserRoleAssociation, delete_stmt, insert_values) + + def _set_role_groups(self, role, group_ids): + delete_stmt = delete(GroupRoleAssociation).where(GroupRoleAssociation.role_id == role.id) + insert_values = [{"role_id": role.id, "group_id": group_id} for group_id in group_ids] + self._set_associations(role, GroupRoleAssociation, delete_stmt, insert_values) + + def _persist_new_model(self, model_instance): + # If model_instance is new, it may have not been assigned a database id yet, which is required + # for creating association records. Flush if that's the case. + if model_instance.id is None: + self.sa_session.flush([model_instance]) + + def _set_associations(self, parent_model, assoc_model, delete_stmt, insert_values): + """ + Delete current associations for assoc_model, then insert new associations if values are provided. + """ + # Ensure sqlite respects foreign key constraints. + if self.sa_session.bind.dialect.name == "sqlite": + self.sa_session.execute(text("PRAGMA foreign_keys = ON;")) + self.sa_session.execute(delete_stmt) + if not insert_values: + return + try: + self.sa_session.execute(insert(assoc_model), insert_values) + except IntegrityError as ie: + self.sa_session.rollback() + if is_unique_constraint_violation(ie): + msg = f"Attempting to create a duplicate {assoc_model} record ({insert_values})" + log.exception(msg) + raise RequestParameterInvalidException() + elif is_foreign_key_violation(ie): + msg = f"Attempting to create an invalid {assoc_model} record ({insert_values})" + log.exception(msg) + raise RequestParameterInvalidException() + else: + raise def get_component_associations(self, **kwd): assert len(kwd) == 2, "You must specify exactly 2 Galaxy security components to check for associations." @@ -1594,16 +1695,11 @@ class HostAgent(RBACAgent): ucsc_archaea=("lowepub.cse.ucsc.edu",), ) - def __init__(self, model, permitted_actions=None): - self.model = model + def __init__(self, sa_session, permitted_actions=None): + self.sa_session = sa_session if permitted_actions: self.permitted_actions = permitted_actions - @property - def sa_session(self): - """Returns a SQLAlchemy session""" - return self.model.context - def allow_action(self, addr, action, **kwd): if "dataset" in kwd and action == self.permitted_actions.DATASET_ACCESS: hda = kwd["dataset"] @@ -1664,7 +1760,7 @@ def set_dataset_permissions(self, hda, user, site): if hdadaa: hdadaa.update_time = datetime.utcnow() else: - hdadaa = self.model.HistoryDatasetAssociationDisplayAtAuthorization(hda=hda, user=user, site=site) + hdadaa = HistoryDatasetAssociationDisplayAtAuthorization(hda=hda, user=user, site=site) self.sa_session.add(hdadaa) with transaction(self.sa_session): self.sa_session.commit() @@ -1677,3 +1773,31 @@ def _walk_action_roles(permissions, query_action): yield action, roles elif action == query_action.action and roles: yield action, roles + + +def is_unique_constraint_violation(error): + # A more elegant way to handle sqlite iw this: + # if hasattr(error.orig, "sqlite_errorname"): + # return error.orig.sqlite_errorname == "SQLITE_CONSTRAINT_UNIQUE" + # However, that's only possible with Python 3.11+ + # https://docs.python.org/3/library/sqlite3.html#sqlite3.Error.sqlite_errorcode + if isinstance(error.orig, sqlite3.IntegrityError): + return error.orig.args[0].startswith("UNIQUE constraint failed") + else: + # If this is a PostgreSQL unique constraint, then error.orig is an instance of psycopg2.errors.UniqueViolation + # and should have an attribute `pgcode` = 23505. + return int(getattr(error.orig, "pgcode", -1)) == 23505 + + +def is_foreign_key_violation(error): + # A more elegant way to handle sqlite iw this: + # if hasattr(error.orig, "sqlite_errorname"): + # return error.orig.sqlite_errorname == "SQLITE_CONSTRAINT_UNIQUE" + # However, that's only possible with Python 3.11+ + # https://docs.python.org/3/library/sqlite3.html#sqlite3.Error.sqlite_errorcode + if isinstance(error.orig, sqlite3.IntegrityError): + return error.orig.args[0] == "FOREIGN KEY constraint failed" + else: + # If this is a PostgreSQL foreign key error, then error.orig is an instance of psycopg2.errors.ForeignKeyViolation + # and should have an attribute `pgcode` = 23503. + return int(getattr(error.orig, "pgcode", -1)) == 23503 diff --git a/lib/galaxy/model/store/__init__.py b/lib/galaxy/model/store/__init__.py index a27e8c57b10c..a3bf6a6ec896 100644 --- a/lib/galaxy/model/store/__init__.py +++ b/lib/galaxy/model/store/__init__.py @@ -1023,7 +1023,7 @@ def _reassign_hids(self, object_import_tracker: "ObjectImportTracker", history: if object_import_tracker.copy_hid_for: # in an if to avoid flush if unneeded - for from_dataset, to_dataset in object_import_tracker.copy_hid_for.items(): + for from_dataset, to_dataset in object_import_tracker.copy_hid_for: to_dataset.hid = from_dataset.hid self._session_add(to_dataset) self._flush() @@ -1276,18 +1276,24 @@ def _import_implicit_dataset_conversions(self, object_import_tracker: "ObjectImp metadata_safe = False idc = model.ImplicitlyConvertedDatasetAssociation(metadata_safe=metadata_safe, for_import=True) idc.type = idc_attrs["file_type"] - if idc_attrs.get("parent_hda"): - idc.parent_hda = object_import_tracker.hdas_by_key[idc_attrs["parent_hda"]] + # We may not have exported the parent, so only set the parent_hda attribute if we did. + if (parent_hda_id := idc_attrs.get("parent_hda")) and ( + parent_hda := object_import_tracker.hdas_by_key.get(parent_hda_id) + ): + # exports created prior to 24.2 may not have a parent if the parent had been purged + idc.parent_hda = parent_hda if idc_attrs.get("hda"): idc.dataset = object_import_tracker.hdas_by_key[idc_attrs["hda"]] - # we have a the dataset and the parent, lets ensure they land up with the same HID - if idc.dataset and idc.parent_hda and idc.parent_hda in object_import_tracker.requires_hid: + # we have the dataset and the parent, lets ensure they land up with the same HID + if idc.dataset and idc.parent_hda: try: object_import_tracker.requires_hid.remove(idc.dataset) except ValueError: pass # we wanted to remove it anyway. - object_import_tracker.copy_hid_for[idc.parent_hda] = idc.dataset + # A HDA can be the parent of multiple implicitly converted dataset, + # that's thy we use [(source, target)] here + object_import_tracker.copy_hid_for.append((idc.parent_hda, idc.dataset)) self._session_add(idc) @@ -1370,7 +1376,7 @@ class ObjectImportTracker: hdca_copied_from_sinks: Dict[ObjectKeyType, ObjectKeyType] jobs_by_key: Dict[ObjectKeyType, model.Job] requires_hid: List["HistoryItem"] - copy_hid_for: Dict["HistoryItem", "HistoryItem"] + copy_hid_for: List[Tuple["HistoryItem", "HistoryItem"]] def __init__(self) -> None: self.libraries_by_key = {} @@ -1388,7 +1394,7 @@ def __init__(self) -> None: self.implicit_collection_jobs_by_key: Dict[str, ImplicitCollectionJobs] = {} self.workflows_by_key: Dict[str, model.Workflow] = {} self.requires_hid = [] - self.copy_hid_for = {} + self.copy_hid_for = [] self.new_history: Optional[model.History] = None @@ -2301,6 +2307,14 @@ def add_implicit_conversion_dataset( include_files: bool, conversion: model.ImplicitlyConvertedDatasetAssociation, ) -> None: + parent_hda = conversion.parent_hda + if parent_hda and parent_hda not in self.included_datasets: + # We should always include the parent of an implicit conversion + # to avoid holes in the provenance. + self.included_datasets[parent_hda] = (parent_hda, include_files) + grand_parent_association = parent_hda.implicitly_converted_parent_datasets + if grand_parent_association and (grand_parent_hda := grand_parent_association[0].parent_hda): + self.add_implicit_conversion_dataset(grand_parent_hda, include_files, grand_parent_association[0]) self.included_datasets[dataset] = (dataset, include_files) self.dataset_implicit_conversions[dataset] = conversion diff --git a/lib/galaxy/model/unittest_utils/utils.py b/lib/galaxy/model/unittest_utils/utils.py new file mode 100644 index 000000000000..c558b52b51de --- /dev/null +++ b/lib/galaxy/model/unittest_utils/utils.py @@ -0,0 +1,13 @@ +import random +import string + + +def random_str() -> str: + alphabet = string.ascii_lowercase + string.digits + size = random.randint(5, 10) + return "".join(random.choices(alphabet, k=size)) + + +def random_email() -> str: + text = random_str() + return f"{text}@galaxy.testing" diff --git a/lib/galaxy/schema/groups.py b/lib/galaxy/schema/groups.py index 1a4bde58f764..b513ba26fa41 100644 --- a/lib/galaxy/schema/groups.py +++ b/lib/galaxy/schema/groups.py @@ -73,5 +73,18 @@ class GroupCreatePayload(Model): @partial_model() -class GroupUpdatePayload(GroupCreatePayload): - pass +class GroupUpdatePayload(Model): + """Payload schema for updating a group.""" + + name: str = Field( + ..., + title="name of the group", + ) + user_ids: Optional[List[DecodedDatabaseIdField]] = Field( + None, + title="user IDs", + ) + role_ids: Optional[List[DecodedDatabaseIdField]] = Field( + None, + title="role IDs", + ) diff --git a/lib/galaxy/security/__init__.py b/lib/galaxy/security/__init__.py index 0c1082830259..94e8948042b4 100644 --- a/lib/galaxy/security/__init__.py +++ b/lib/galaxy/security/__init__.py @@ -95,9 +95,6 @@ def can_change_object_store_id(self, user, dataset): def can_manage_library_item(self, roles, item): raise Exception("Unimplemented Method") - def associate_components(self, **kwd): - raise Exception(f"No valid method of associating provided components: {kwd}") - def create_private_user_role(self, user): raise Exception("Unimplemented Method") diff --git a/lib/galaxy/tool_util/models.py b/lib/galaxy/tool_util/models.py index deafef1a5243..4f1ea35670c6 100644 --- a/lib/galaxy/tool_util/models.py +++ b/lib/galaxy/tool_util/models.py @@ -5,11 +5,23 @@ """ from typing import ( + Any, + Dict, List, Optional, + Union, ) -from pydantic import BaseModel +from pydantic import ( + AnyUrl, + BaseModel, + ConfigDict, + RootModel, +) +from typing_extensions import ( + NotRequired, + TypedDict, +) from .parameters import ( input_models_for_tool_source, @@ -18,6 +30,7 @@ from .parser.interface import ( Citation, HelpContent, + OutputCompareType, ToolSource, XrefDict, ) @@ -25,6 +38,7 @@ from_tool_source, ToolOutput, ) +from .verify.assertion_models import assertions class ParsedTool(BaseModel): @@ -73,3 +87,85 @@ def parse_tool(tool_source: ToolSource) -> ParsedTool: xrefs=xrefs, help=help, ) + + +class StrictModel(BaseModel): + + model_config = ConfigDict( + extra="forbid", + ) + + +class BaseTestOutputModel(StrictModel): + file: Optional[str] = None + path: Optional[str] = None + location: Optional[AnyUrl] = None + ftype: Optional[str] = None + sort: Optional[bool] = None + compare: Optional[OutputCompareType] = None + checksum: Optional[str] = None + metadata: Optional[Dict[str, Any]] = None + asserts: Optional[assertions] = None + delta: Optional[int] = None + delta_frac: Optional[float] = None + lines_diff: Optional[int] = None + decompress: Optional[bool] = None + + +class TestDataOutputAssertions(BaseTestOutputModel): + pass + + +class TestCollectionCollectionElementAssertions(StrictModel): + elements: Optional[Dict[str, "TestCollectionElementAssertion"]] = None + element_tests: Optional[Dict[str, "TestCollectionElementAssertion"]] = None + + +class TestCollectionDatasetElementAssertions(BaseTestOutputModel): + pass + + +TestCollectionElementAssertion = Union[ + TestCollectionDatasetElementAssertions, TestCollectionCollectionElementAssertions +] +TestCollectionCollectionElementAssertions.model_rebuild() + + +class CollectionAttributes(StrictModel): + collection_type: Optional[str] = None + + +class TestCollectionOutputAssertions(StrictModel): + elements: Optional[Dict[str, TestCollectionElementAssertion]] = None + element_tests: Optional[Dict[str, "TestCollectionElementAssertion"]] = None + attributes: Optional[CollectionAttributes] = None + + +TestOutputLiteral = Union[bool, int, float, str] + +TestOutputAssertions = Union[TestCollectionOutputAssertions, TestDataOutputAssertions, TestOutputLiteral] + +JobDict = Dict[str, Any] + + +class TestJob(StrictModel): + doc: Optional[str] + job: JobDict + outputs: Dict[str, TestOutputAssertions] + + +Tests = RootModel[List[TestJob]] + +# TODO: typed dict versions of all thee above for verify code - make this Dict[str, Any] here more +# specific. +OutputChecks = Union[TestOutputLiteral, Dict[str, Any]] +OutputsDict = Dict[str, OutputChecks] + + +class TestJobDict(TypedDict): + doc: NotRequired[str] + job: NotRequired[JobDict] + outputs: OutputsDict + + +TestDicts = List[TestJobDict] diff --git a/lib/galaxy/tool_util/parser/interface.py b/lib/galaxy/tool_util/parser/interface.py index 50ec9ed30d17..c137955dbeb1 100644 --- a/lib/galaxy/tool_util/parser/interface.py +++ b/lib/galaxy/tool_util/parser/interface.py @@ -5,6 +5,7 @@ ABCMeta, abstractmethod, ) +from enum import Enum from os.path import join from typing import ( Any, @@ -49,9 +50,18 @@ class AssertionDict(TypedDict): XmlInt = Union[str, int] +class OutputCompareType(str, Enum): + diff = "diff" + re_match = "re_match" + sim_size = "sim_size" + re_match_multiline = "re_match_multiline" + contains = "contains" + image_diff = "image_diff" + + class ToolSourceTestOutputAttributes(TypedDict): object: NotRequired[Optional[Any]] - compare: str + compare: OutputCompareType lines_diff: int delta: int delta_frac: Optional[float] diff --git a/lib/galaxy/tool_util/parser/xml.py b/lib/galaxy/tool_util/parser/xml.py index c61b178e64c7..a89754553f84 100644 --- a/lib/galaxy/tool_util/parser/xml.py +++ b/lib/galaxy/tool_util/parser/xml.py @@ -43,6 +43,7 @@ DynamicOptions, HelpContent, InputSource, + OutputCompareType, PageSource, PagesSource, RequiredFiles, @@ -834,7 +835,7 @@ def __parse_test_attributes( value_object = json.loads(attrib.pop("value_json")) # Method of comparison - compare: str = attrib.pop("compare", "diff").lower() + compare: OutputCompareType = cast(OutputCompareType, attrib.pop("compare", "diff").lower()) # Number of lines to allow to vary in logs (for dates, etc) lines_diff: int = int(attrib.pop("lines_diff", "0")) # Allow a file size to vary if sim_size compare diff --git a/lib/galaxy/tool_util/validate_test_format.py b/lib/galaxy/tool_util/validate_test_format.py new file mode 100644 index 000000000000..fd9e055b3789 --- /dev/null +++ b/lib/galaxy/tool_util/validate_test_format.py @@ -0,0 +1,40 @@ +#!/usr/bin/env python + +import argparse +import sys + +import yaml + +from galaxy.tool_util.models import Tests + +DESCRIPTION = """ +A small utility to verify the Planemo test format. + +This script doesn't use semantic information about tools or workflows so only +the structure of the file is checked and things like inputs matching up is not +included. +""" + + +def validate_test_file(test_file: str) -> None: + with open(test_file) as f: + json = yaml.safe_load(f) + Tests.model_validate(json) + + +def arg_parser() -> argparse.ArgumentParser: + parser = argparse.ArgumentParser(description=DESCRIPTION) + parser.add_argument("test_file") + return parser + + +def main(argv=None) -> None: + if argv is None: + argv = sys.argv[1:] + + args = arg_parser().parse_args(argv) + validate_test_file(args.test_file) + + +if __name__ == "__main__": + main() diff --git a/lib/galaxy/tool_util/verify/__init__.py b/lib/galaxy/tool_util/verify/__init__.py index fdc4c2044428..6ddfefeb69f8 100644 --- a/lib/galaxy/tool_util/verify/__init__.py +++ b/lib/galaxy/tool_util/verify/__init__.py @@ -597,6 +597,8 @@ def files_image_diff(file1: str, file2: str, attributes: Optional[Dict[str, Any] # TODO: After tool-util with this included is published, fefactor planemo.test._check_output # to use this function. There is already a comment there about breaking fewer abstractions. # https://github.com/galaxyproject/planemo/blob/master/planemo/test/_check_output.py +# TODO: Also migrate the logic for checking non-dictionaries out of Planemo - this function now +# does that check also. def verify_file_path_against_dict( get_filename: GetFilenameT, get_location: GetLocationT, @@ -621,30 +623,38 @@ def verify_file_contents_against_dict( test_properties, test_data_target_dir: Optional[str] = None, ) -> None: - # Support Galaxy-like file location (using "file") or CWL-like ("path" or "location"). - expected_file = test_properties.get("file", None) - if expected_file is None: - expected_file = test_properties.get("path", None) - if expected_file is None: - location = test_properties.get("location") - if location: - if location.startswith(("http://", "https://")): - assert get_location - expected_file = get_location(location) - else: - expected_file = location.split("file://", 1)[-1] - - if "asserts" in test_properties: - test_properties["assert_list"] = to_test_assert_list(test_properties["asserts"]) - verify( - item_label, - output_content, - attributes=test_properties, - filename=expected_file, - get_filename=get_filename, - keep_outputs_dir=test_data_target_dir, - verify_extra_files=None, - ) + expected_file: Optional[str] = None + if isinstance(test_properties, dict): + # Support Galaxy-like file location (using "file") or CWL-like ("path" or "location"). + expected_file = test_properties.get("file", None) + if expected_file is None: + expected_file = test_properties.get("path", None) + if expected_file is None: + location = test_properties.get("location") + if location: + if location.startswith(("http://", "https://")): + assert get_location + expected_file = get_location(location) + else: + expected_file = location.split("file://", 1)[-1] + + if "asserts" in test_properties: + test_properties["assert_list"] = to_test_assert_list(test_properties["asserts"]) + verify( + item_label, + output_content, + attributes=test_properties, + filename=expected_file, + get_filename=get_filename, + keep_outputs_dir=test_data_target_dir, + verify_extra_files=None, + ) + else: + output_value = json.loads(output_content.decode("utf-8")) + if test_properties != output_value: + template = "Output [%s] value [%s] does not match expected value [%s]." + message = template % (item_label, output_value, test_properties) + raise AssertionError(message) __all__ = [ diff --git a/lib/galaxy/tool_util/verify/assertion_models.py b/lib/galaxy/tool_util/verify/assertion_models.py index 5f21e488e52b..142c476ffdb6 100644 --- a/lib/galaxy/tool_util/verify/assertion_models.py +++ b/lib/galaxy/tool_util/verify/assertion_models.py @@ -8,6 +8,7 @@ BeforeValidator, ConfigDict, Field, + model_validator, RootModel, StrictFloat, StrictInt, @@ -80,12 +81,8 @@ def check_non_negative_if_int(v: typing.Any): has_line_negate_description = """A boolean that can be set to true to negate the outcome of the assertion.""" -class has_line_model(AssertionModel): - r"""Asserts the specified output contains the line specified by the - argument line. The exact number of occurrences can be optionally - specified by the argument n""" - - that: Literal["has_line"] = "has_line" +class base_has_line_model(AssertionModel): + """base model for has_line describing attributes.""" line: str = Field( ..., @@ -132,6 +129,20 @@ class has_line_model(AssertionModel): ) +class has_line_model(base_has_line_model): + r"""Asserts the specified output contains the line specified by the + argument line. The exact number of occurrences can be optionally + specified by the argument n""" + + that: Literal["has_line"] = "has_line" + + +class has_line_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_line: base_has_line_model + + has_line_matching_expression_description = """The regular expressions to attempt match in the output.""" has_line_matching_n_description = """Desired number, can be suffixed by ``(k|M|G|T|P|E)i?``""" @@ -147,12 +158,8 @@ class has_line_model(AssertionModel): has_line_matching_negate_description = """A boolean that can be set to true to negate the outcome of the assertion.""" -class has_line_matching_model(AssertionModel): - r"""Asserts the specified output contains a line matching the - regular expression specified by the argument expression. If n is given - the assertion checks for exactly n occurences.""" - - that: Literal["has_line_matching"] = "has_line_matching" +class base_has_line_matching_model(AssertionModel): + """base model for has_line_matching describing attributes.""" expression: str = Field( ..., @@ -199,6 +206,20 @@ class has_line_matching_model(AssertionModel): ) +class has_line_matching_model(base_has_line_matching_model): + r"""Asserts the specified output contains a line matching the + regular expression specified by the argument expression. If n is given + the assertion checks for exactly n occurences.""" + + that: Literal["has_line_matching"] = "has_line_matching" + + +class has_line_matching_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_line_matching: base_has_line_matching_model + + has_n_lines_n_description = """Desired number, can be suffixed by ``(k|M|G|T|P|E)i?``""" has_n_lines_delta_description = ( @@ -212,12 +233,8 @@ class has_line_matching_model(AssertionModel): has_n_lines_negate_description = """A boolean that can be set to true to negate the outcome of the assertion.""" -class has_n_lines_model(AssertionModel): - r"""Asserts the specified output contains ``n`` lines allowing - for a difference in the number of lines (delta) - or relative differebce in the number of lines""" - - that: Literal["has_n_lines"] = "has_n_lines" +class base_has_n_lines_model(AssertionModel): + """base model for has_n_lines describing attributes.""" n: Annotated[ typing.Optional[typing.Union[str, int]], @@ -259,6 +276,20 @@ class has_n_lines_model(AssertionModel): ) +class has_n_lines_model(base_has_n_lines_model): + r"""Asserts the specified output contains ``n`` lines allowing + for a difference in the number of lines (delta) + or relative differebce in the number of lines""" + + that: Literal["has_n_lines"] = "has_n_lines" + + +class has_n_lines_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_n_lines: base_has_n_lines_model + + has_text_text_description = """The text to search for in the output.""" has_text_n_description = """Desired number, can be suffixed by ``(k|M|G|T|P|E)i?``""" @@ -274,12 +305,8 @@ class has_n_lines_model(AssertionModel): has_text_negate_description = """A boolean that can be set to true to negate the outcome of the assertion.""" -class has_text_model(AssertionModel): - r"""Asserts specified output contains the substring specified by - the argument text. The exact number of occurrences can be - optionally specified by the argument n""" - - that: Literal["has_text"] = "has_text" +class base_has_text_model(AssertionModel): + """base model for has_text describing attributes.""" text: str = Field( ..., @@ -326,6 +353,20 @@ class has_text_model(AssertionModel): ) +class has_text_model(base_has_text_model): + r"""Asserts specified output contains the substring specified by + the argument text. The exact number of occurrences can be + optionally specified by the argument n""" + + that: Literal["has_text"] = "has_text" + + +class has_text_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_text: base_has_text_model + + has_text_matching_expression_description = """The regular expressions to attempt match in the output.""" has_text_matching_n_description = """Desired number, can be suffixed by ``(k|M|G|T|P|E)i?``""" @@ -341,13 +382,8 @@ class has_text_model(AssertionModel): has_text_matching_negate_description = """A boolean that can be set to true to negate the outcome of the assertion.""" -class has_text_matching_model(AssertionModel): - r"""Asserts the specified output contains text matching the - regular expression specified by the argument expression. - If n is given the assertion checks for exacly n (nonoverlapping) - occurences.""" - - that: Literal["has_text_matching"] = "has_text_matching" +class base_has_text_matching_model(AssertionModel): + """base model for has_text_matching describing attributes.""" expression: str = Field( ..., @@ -394,14 +430,26 @@ class has_text_matching_model(AssertionModel): ) -not_has_text_text_description = """The text to search for in the output.""" +class has_text_matching_model(base_has_text_matching_model): + r"""Asserts the specified output contains text matching the + regular expression specified by the argument expression. + If n is given the assertion checks for exacly n (nonoverlapping) + occurences.""" + + that: Literal["has_text_matching"] = "has_text_matching" -class not_has_text_model(AssertionModel): - r"""Asserts specified output does not contain the substring - specified by the argument text""" +class has_text_matching_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_text_matching: base_has_text_matching_model + + +not_has_text_text_description = """The text to search for in the output.""" - that: Literal["not_has_text"] = "not_has_text" + +class base_not_has_text_model(AssertionModel): + """base model for not_has_text describing attributes.""" text: str = Field( ..., @@ -409,6 +457,19 @@ class not_has_text_model(AssertionModel): ) +class not_has_text_model(base_not_has_text_model): + r"""Asserts specified output does not contain the substring + specified by the argument text""" + + that: Literal["not_has_text"] = "not_has_text" + + +class not_has_text_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + not_has_text: base_not_has_text_model + + has_n_columns_n_description = """Desired number, can be suffixed by ``(k|M|G|T|P|E)i?``""" has_n_columns_delta_description = ( @@ -428,19 +489,8 @@ class not_has_text_model(AssertionModel): has_n_columns_negate_description = """A boolean that can be set to true to negate the outcome of the assertion.""" -class has_n_columns_model(AssertionModel): - r"""Asserts tabular output contains the specified - number (``n``) of columns. - - For instance, ````. The assertion tests only the first line. - Number of columns can optionally also be specified with ``delta``. Alternatively the - range of expected occurences can be specified by ``min`` and/or ``max``. - - Optionally a column separator (``sep``, default is `` ``) `and comment character(s) - can be specified (``comment``, default is empty string). The first non-comment - line is used for determining the number of columns.""" - - that: Literal["has_n_columns"] = "has_n_columns" +class base_has_n_columns_model(AssertionModel): + """base model for has_n_columns describing attributes.""" n: Annotated[ typing.Optional[typing.Union[str, int]], @@ -492,30 +542,38 @@ class has_n_columns_model(AssertionModel): ) -attribute_is_path_description = """The Python xpath-like expression to find the target element.""" +class has_n_columns_model(base_has_n_columns_model): + r"""Asserts tabular output contains the specified + number (``n``) of columns. -attribute_is_attribute_description = """The XML attribute name to test against from the target XML element.""" + For instance, ````. The assertion tests only the first line. + Number of columns can optionally also be specified with ``delta``. Alternatively the + range of expected occurences can be specified by ``min`` and/or ``max``. -attribute_is_text_description = """The expected attribute value to test against on the target XML element""" + Optionally a column separator (``sep``, default is `` ``) `and comment character(s) + can be specified (``comment``, default is empty string). The first non-comment + line is used for determining the number of columns.""" -attribute_is_negate_description = """A boolean that can be set to true to negate the outcome of the assertion.""" + that: Literal["has_n_columns"] = "has_n_columns" -class attribute_is_model(AssertionModel): - r"""Asserts the XML ``attribute`` for the element (or tag) with the specified - XPath-like ``path`` is the specified ``text``. +class has_n_columns_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" - For example: + has_n_columns: base_has_n_columns_model - ```xml - - ``` - The assertion implicitly also asserts that an element matching ``path`` exists. - With ``negate`` the result of the assertion (on the equality) can be inverted (the - implicit assertion on the existence of the path is not affected).""" +attribute_is_path_description = """The Python xpath-like expression to find the target element.""" + +attribute_is_attribute_description = """The XML attribute name to test against from the target XML element.""" + +attribute_is_text_description = """The expected attribute value to test against on the target XML element""" + +attribute_is_negate_description = """A boolean that can be set to true to negate the outcome of the assertion.""" - that: Literal["attribute_is"] = "attribute_is" + +class base_attribute_is_model(AssertionModel): + """base model for attribute_is describing attributes.""" path: str = Field( ..., @@ -538,6 +596,29 @@ class attribute_is_model(AssertionModel): ) +class attribute_is_model(base_attribute_is_model): + r"""Asserts the XML ``attribute`` for the element (or tag) with the specified + XPath-like ``path`` is the specified ``text``. + + For example: + + ```xml + + ``` + + The assertion implicitly also asserts that an element matching ``path`` exists. + With ``negate`` the result of the assertion (on the equality) can be inverted (the + implicit assertion on the existence of the path is not affected).""" + + that: Literal["attribute_is"] = "attribute_is" + + +class attribute_is_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + attribute_is: base_attribute_is_model + + attribute_matches_path_description = """The Python xpath-like expression to find the target element.""" attribute_matches_attribute_description = """The XML attribute name to test against from the target XML element.""" @@ -549,21 +630,8 @@ class attribute_is_model(AssertionModel): attribute_matches_negate_description = """A boolean that can be set to true to negate the outcome of the assertion.""" -class attribute_matches_model(AssertionModel): - r"""Asserts the XML ``attribute`` for the element (or tag) with the specified - XPath-like ``path`` matches the regular expression specified by ``expression``. - - For example: - - ```xml - - ``` - - The assertion implicitly also asserts that an element matching ``path`` exists. - With ``negate`` the result of the assertion (on the matching) can be inverted (the - implicit assertion on the existence of the path is not affected).""" - - that: Literal["attribute_matches"] = "attribute_matches" +class base_attribute_matches_model(AssertionModel): + """base model for attribute_matches describing attributes.""" path: str = Field( ..., @@ -586,12 +654,59 @@ class attribute_matches_model(AssertionModel): ) +class attribute_matches_model(base_attribute_matches_model): + r"""Asserts the XML ``attribute`` for the element (or tag) with the specified + XPath-like ``path`` matches the regular expression specified by ``expression``. + + For example: + + ```xml + + ``` + + The assertion implicitly also asserts that an element matching ``path`` exists. + With ``negate`` the result of the assertion (on the matching) can be inverted (the + implicit assertion on the existence of the path is not affected).""" + + that: Literal["attribute_matches"] = "attribute_matches" + + +class attribute_matches_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + attribute_matches: base_attribute_matches_model + + element_text_path_description = """The Python xpath-like expression to find the target element.""" element_text_negate_description = """A boolean that can be set to true to negate the outcome of the assertion.""" -class element_text_model(AssertionModel): +class base_element_text_model(AssertionModel): + """base model for element_text describing attributes.""" + + path: str = Field( + ..., + description=element_text_path_description, + ) + + negate: typing.Union[bool, str] = Field( + False, + description=element_text_negate_description, + ) + + children: typing.Optional["assertion_list"] = None + asserts: typing.Optional["assertion_list"] = None + + @model_validator(mode="before") + @classmethod + def validate_children(self, data: typing.Any): + if isinstance(data, dict) and "children" not in data and "asserts" not in data: + raise ValueError("At least one of 'children' or 'asserts' must be specified for this assertion type.") + return data + + +class element_text_model(base_element_text_model): r"""This tag allows the developer to recurisively specify additional assertions as child elements about just the text contained in the element specified by the XPath-like ``path``, e.g. @@ -609,17 +724,11 @@ class element_text_model(AssertionModel): that: Literal["element_text"] = "element_text" - path: str = Field( - ..., - description=element_text_path_description, - ) - negate: typing.Union[bool, str] = Field( - False, - description=element_text_negate_description, - ) +class element_text_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" - children: "assertion_list" + element_text: base_element_text_model element_text_is_path_description = """The Python xpath-like expression to find the target element.""" @@ -631,7 +740,26 @@ class element_text_model(AssertionModel): element_text_is_negate_description = """A boolean that can be set to true to negate the outcome of the assertion.""" -class element_text_is_model(AssertionModel): +class base_element_text_is_model(AssertionModel): + """base model for element_text_is describing attributes.""" + + path: str = Field( + ..., + description=element_text_is_path_description, + ) + + text: str = Field( + ..., + description=element_text_is_text_description, + ) + + negate: typing.Union[bool, str] = Field( + False, + description=element_text_is_negate_description, + ) + + +class element_text_is_model(base_element_text_is_model): r"""Asserts the text of the XML element with the specified XPath-like ``path`` is the specified ``text``. @@ -647,20 +775,11 @@ class element_text_is_model(AssertionModel): that: Literal["element_text_is"] = "element_text_is" - path: str = Field( - ..., - description=element_text_is_path_description, - ) - text: str = Field( - ..., - description=element_text_is_text_description, - ) +class element_text_is_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" - negate: typing.Union[bool, str] = Field( - False, - description=element_text_is_negate_description, - ) + element_text_is: base_element_text_is_model element_text_matches_path_description = """The Python xpath-like expression to find the target element.""" @@ -672,21 +791,8 @@ class element_text_is_model(AssertionModel): ) -class element_text_matches_model(AssertionModel): - r"""Asserts the text of the XML element with the specified XPath-like ``path`` - matches the regular expression defined by ``expression``. - - For example: - - ```xml - - ``` - - The assertion implicitly also asserts that an element matching ``path`` exists. - With ``negate`` the result of the assertion (on the matching) can be inverted (the - implicit assertion on the existence of the path is not affected).""" - - that: Literal["element_text_matches"] = "element_text_matches" +class base_element_text_matches_model(AssertionModel): + """base model for element_text_matches describing attributes.""" path: str = Field( ..., @@ -704,24 +810,38 @@ class element_text_matches_model(AssertionModel): ) -has_element_with_path_path_description = """The Python xpath-like expression to find the target element.""" +class element_text_matches_model(base_element_text_matches_model): + r"""Asserts the text of the XML element with the specified XPath-like ``path`` + matches the regular expression defined by ``expression``. -has_element_with_path_negate_description = ( - """A boolean that can be set to true to negate the outcome of the assertion.""" -) - - -class has_element_with_path_model(AssertionModel): - r"""Asserts the XML output contains at least one element (or tag) with the specified - XPath-like ``path``, e.g. + For example: ```xml - + ``` - With ``negate`` the result of the assertion can be inverted.""" + The assertion implicitly also asserts that an element matching ``path`` exists. + With ``negate`` the result of the assertion (on the matching) can be inverted (the + implicit assertion on the existence of the path is not affected).""" - that: Literal["has_element_with_path"] = "has_element_with_path" + that: Literal["element_text_matches"] = "element_text_matches" + + +class element_text_matches_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + element_text_matches: base_element_text_matches_model + + +has_element_with_path_path_description = """The Python xpath-like expression to find the target element.""" + +has_element_with_path_negate_description = ( + """A boolean that can be set to true to negate the outcome of the assertion.""" +) + + +class base_has_element_with_path_model(AssertionModel): + """base model for has_element_with_path describing attributes.""" path: str = Field( ..., @@ -734,6 +854,25 @@ class has_element_with_path_model(AssertionModel): ) +class has_element_with_path_model(base_has_element_with_path_model): + r"""Asserts the XML output contains at least one element (or tag) with the specified + XPath-like ``path``, e.g. + + ```xml + + ``` + + With ``negate`` the result of the assertion can be inverted.""" + + that: Literal["has_element_with_path"] = "has_element_with_path" + + +class has_element_with_path_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_element_with_path: base_has_element_with_path_model + + has_n_elements_with_path_path_description = """The Python xpath-like expression to find the target element.""" has_n_elements_with_path_n_description = """Desired number, can be suffixed by ``(k|M|G|T|P|E)i?``""" @@ -755,21 +894,8 @@ class has_element_with_path_model(AssertionModel): ) -class has_n_elements_with_path_model(AssertionModel): - r"""Asserts the XML output contains the specified number (``n``, optionally with ``delta``) of elements (or - tags) with the specified XPath-like ``path``. - - For example: - - ```xml - - ``` - - Alternatively to ``n`` and ``delta`` also the ``min`` and ``max`` attributes - can be used to specify the range of the expected number of occurences. - With ``negate`` the result of the assertion can be inverted.""" - - that: Literal["has_n_elements_with_path"] = "has_n_elements_with_path" +class base_has_n_elements_with_path_model(AssertionModel): + """base model for has_n_elements_with_path describing attributes.""" path: str = Field( ..., @@ -816,12 +942,45 @@ class has_n_elements_with_path_model(AssertionModel): ) -class is_valid_xml_model(AssertionModel): +class has_n_elements_with_path_model(base_has_n_elements_with_path_model): + r"""Asserts the XML output contains the specified number (``n``, optionally with ``delta``) of elements (or + tags) with the specified XPath-like ``path``. + + For example: + + ```xml + + ``` + + Alternatively to ``n`` and ``delta`` also the ``min`` and ``max`` attributes + can be used to specify the range of the expected number of occurences. + With ``negate`` the result of the assertion can be inverted.""" + + that: Literal["has_n_elements_with_path"] = "has_n_elements_with_path" + + +class has_n_elements_with_path_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_n_elements_with_path: base_has_n_elements_with_path_model + + +class base_is_valid_xml_model(AssertionModel): + """base model for is_valid_xml describing attributes.""" + + +class is_valid_xml_model(base_is_valid_xml_model): r"""Asserts the output is a valid XML file (e.g. ````).""" that: Literal["is_valid_xml"] = "is_valid_xml" +class is_valid_xml_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + is_valid_xml: base_is_valid_xml_model + + xml_element_path_description = """The Python xpath-like expression to find the target element.""" xml_element_attribute_description = """The XML attribute name to test against from the target XML element.""" @@ -843,40 +1002,8 @@ class is_valid_xml_model(AssertionModel): xml_element_negate_description = """A boolean that can be set to true to negate the outcome of the assertion.""" -class xml_element_model(AssertionModel): - r"""Assert if the XML file contains element(s) or tag(s) with the specified - [XPath-like ``path``](https://lxml.de/xpathxslt.html). If ``n`` and ``delta`` - or ``min`` and ``max`` are given also the number of occurences is checked. - - ```xml - - - - - - ``` - - With ``negate="true"`` the outcome of the assertions wrt the precence and number - of ``path`` can be negated. If there are any sub assertions then check them against - - - the content of the attribute ``attribute`` - - the element's text if no attribute is given - - ```xml - - - - - - ``` - - Sub-assertions are not subject to the ``negate`` attribute of ``xml_element``. - If ``all`` is ``true`` then the sub assertions are checked for all occurences. - - Note that all other XML assertions can be expressed by this assertion (Galaxy - also implements the other assertions by calling this one).""" - - that: Literal["xml_element"] = "xml_element" +class base_xml_element_model(AssertionModel): + """base model for xml_element describing attributes.""" path: str = Field( ..., @@ -933,21 +1060,58 @@ class xml_element_model(AssertionModel): ) children: typing.Optional["assertion_list"] = None + asserts: typing.Optional["assertion_list"] = None -has_json_property_with_text_property_description = """The property name to search the JSON document for.""" +class xml_element_model(base_xml_element_model): + r"""Assert if the XML file contains element(s) or tag(s) with the specified + [XPath-like ``path``](https://lxml.de/xpathxslt.html). If ``n`` and ``delta`` + or ``min`` and ``max`` are given also the number of occurences is checked. -has_json_property_with_text_text_description = """The expected text value of the target JSON attribute.""" + ```xml + + + + + + ``` + With ``negate="true"`` the outcome of the assertions wrt the precence and number + of ``path`` can be negated. If there are any sub assertions then check them against -class has_json_property_with_text_model(AssertionModel): - r"""Asserts the JSON document contains a property or key with the specified text (i.e. string) value. + - the content of the attribute ``attribute`` + - the element's text if no attribute is given ```xml - - ```""" + + + + + + ``` + + Sub-assertions are not subject to the ``negate`` attribute of ``xml_element``. + If ``all`` is ``true`` then the sub assertions are checked for all occurences. + + Note that all other XML assertions can be expressed by this assertion (Galaxy + also implements the other assertions by calling this one).""" + + that: Literal["xml_element"] = "xml_element" + + +class xml_element_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + xml_element: base_xml_element_model - that: Literal["has_json_property_with_text"] = "has_json_property_with_text" + +has_json_property_with_text_property_description = """The property name to search the JSON document for.""" + +has_json_property_with_text_text_description = """The expected text value of the target JSON attribute.""" + + +class base_has_json_property_with_text_model(AssertionModel): + """base model for has_json_property_with_text describing attributes.""" property: str = Field( ..., @@ -960,6 +1124,22 @@ class has_json_property_with_text_model(AssertionModel): ) +class has_json_property_with_text_model(base_has_json_property_with_text_model): + r"""Asserts the JSON document contains a property or key with the specified text (i.e. string) value. + + ```xml + + ```""" + + that: Literal["has_json_property_with_text"] = "has_json_property_with_text" + + +class has_json_property_with_text_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_json_property_with_text: base_has_json_property_with_text_model + + has_json_property_with_value_property_description = """The property name to search the JSON document for.""" has_json_property_with_value_value_description = ( @@ -967,14 +1147,8 @@ class has_json_property_with_text_model(AssertionModel): ) -class has_json_property_with_value_model(AssertionModel): - r"""Asserts the JSON document contains a property or key with the specified JSON value. - - ```xml - - ```""" - - that: Literal["has_json_property_with_value"] = "has_json_property_with_value" +class base_has_json_property_with_value_model(AssertionModel): + """base model for has_json_property_with_value describing attributes.""" property: str = Field( ..., @@ -987,19 +1161,29 @@ class has_json_property_with_value_model(AssertionModel): ) -has_h5_attribute_key_description = """HDF5 attribute to check value of.""" +class has_json_property_with_value_model(base_has_json_property_with_value_model): + r"""Asserts the JSON document contains a property or key with the specified JSON value. -has_h5_attribute_value_description = """Expected value of HDF5 attribute to check.""" + ```xml + + ```""" + that: Literal["has_json_property_with_value"] = "has_json_property_with_value" -class has_h5_attribute_model(AssertionModel): - r"""Asserts HDF5 output contains the specified ``value`` for an attribute (``key``), e.g. - ```xml - - ```""" +class has_json_property_with_value_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_json_property_with_value: base_has_json_property_with_value_model - that: Literal["has_h5_attribute"] = "has_h5_attribute" + +has_h5_attribute_key_description = """HDF5 attribute to check value of.""" + +has_h5_attribute_value_description = """Expected value of HDF5 attribute to check.""" + + +class base_has_h5_attribute_model(AssertionModel): + """base model for has_h5_attribute describing attributes.""" key: str = Field( ..., @@ -1012,13 +1196,27 @@ class has_h5_attribute_model(AssertionModel): ) -has_h5_keys_keys_description = """HDF5 attributes to check value of as a comma-separated string.""" +class has_h5_attribute_model(base_has_h5_attribute_model): + r"""Asserts HDF5 output contains the specified ``value`` for an attribute (``key``), e.g. + + ```xml + + ```""" + + that: Literal["has_h5_attribute"] = "has_h5_attribute" -class has_h5_keys_model(AssertionModel): - r"""Asserts the specified HDF5 output has the given keys.""" +class has_h5_attribute_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_h5_attribute: base_has_h5_attribute_model - that: Literal["has_h5_keys"] = "has_h5_keys" + +has_h5_keys_keys_description = """HDF5 attributes to check value of as a comma-separated string.""" + + +class base_has_h5_keys_model(AssertionModel): + """base model for has_h5_keys describing attributes.""" keys: str = Field( ..., @@ -1026,6 +1224,18 @@ class has_h5_keys_model(AssertionModel): ) +class has_h5_keys_model(base_has_h5_keys_model): + r"""Asserts the specified HDF5 output has the given keys.""" + + that: Literal["has_h5_keys"] = "has_h5_keys" + + +class has_h5_keys_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_h5_keys: base_has_h5_keys_model + + has_archive_member_path_description = """The regular expression specifying the archive member.""" has_archive_member_all_description = ( @@ -1045,53 +1255,8 @@ class has_h5_keys_model(AssertionModel): has_archive_member_negate_description = """A boolean that can be set to true to negate the outcome of the assertion.""" -class has_archive_member_model(AssertionModel): - r"""This tag allows to check if ``path`` is contained in a compressed file. - - The path is a regular expression that is matched against the full paths of the objects in - the compressed file (remember that "matching" means it is checked if a prefix of - the full path of an archive member is described by the regular expression). - Valid archive formats include ``.zip``, ``.tar``, and ``.tar.gz``. Note that - depending on the archive creation method: - - - full paths of the members may be prefixed with ``./`` - - directories may be treated as empty files - - ```xml - - ``` - - With ``n`` and ``delta`` (or ``min`` and ``max``) assertions on the number of - archive members matching ``path`` can be expressed. The following could be used, - e.g., to assert an archive containing n±1 elements out of which at least - 4 need to have a ``txt`` extension. - - ```xml - - - ``` - - In addition the tag can contain additional assertions as child elements about - the first member in the archive matching the regular expression ``path``. For - instance - - ```xml - - - - ``` - - If the ``all`` attribute is set to ``true`` then all archive members are subject - to the assertions. Note that, archive members matching the ``path`` are sorted - alphabetically. - - The ``negate`` attribute of the ``has_archive_member`` assertion only affects - the asserts on the presence and number of matching archive members, but not any - sub-assertions (which can offer the ``negate`` attribute on their own). The - check if the file is an archive at all, which is also done by the function, is - not affected.""" - - that: Literal["has_archive_member"] = "has_archive_member" +class base_has_archive_member_model(AssertionModel): + """base model for has_archive_member describing attributes.""" path: str = Field( ..., @@ -1143,6 +1308,62 @@ class has_archive_member_model(AssertionModel): ) children: typing.Optional["assertion_list"] = None + asserts: typing.Optional["assertion_list"] = None + + +class has_archive_member_model(base_has_archive_member_model): + r"""This tag allows to check if ``path`` is contained in a compressed file. + + The path is a regular expression that is matched against the full paths of the objects in + the compressed file (remember that "matching" means it is checked if a prefix of + the full path of an archive member is described by the regular expression). + Valid archive formats include ``.zip``, ``.tar``, and ``.tar.gz``. Note that + depending on the archive creation method: + + - full paths of the members may be prefixed with ``./`` + - directories may be treated as empty files + + ```xml + + ``` + + With ``n`` and ``delta`` (or ``min`` and ``max``) assertions on the number of + archive members matching ``path`` can be expressed. The following could be used, + e.g., to assert an archive containing n±1 elements out of which at least + 4 need to have a ``txt`` extension. + + ```xml + + + ``` + + In addition the tag can contain additional assertions as child elements about + the first member in the archive matching the regular expression ``path``. For + instance + + ```xml + + + + ``` + + If the ``all`` attribute is set to ``true`` then all archive members are subject + to the assertions. Note that, archive members matching the ``path`` are sorted + alphabetically. + + The ``negate`` attribute of the ``has_archive_member`` assertion only affects + the asserts on the presence and number of matching archive members, but not any + sub-assertions (which can offer the ``negate`` attribute on their own). The + check if the file is an archive at all, which is also done by the function, is + not affected.""" + + that: Literal["has_archive_member"] = "has_archive_member" + + +class has_archive_member_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_archive_member: base_has_archive_member_model has_size_value_description = """Deprecated alias for `size`""" @@ -1160,13 +1381,17 @@ class has_archive_member_model(AssertionModel): has_size_negate_description = """A boolean that can be set to true to negate the outcome of the assertion.""" -class has_size_model(AssertionModel): - r"""Asserts the specified output has a size of the specified value - - Attributes size and value or synonyms though value is considered deprecated. - The size optionally allows for absolute (``delta``) difference.""" +class base_has_size_model(AssertionModel): + """base model for has_size describing attributes.""" - that: Literal["has_size"] = "has_size" + value: Annotated[ + typing.Optional[typing.Union[str, int]], + BeforeValidator(check_bytes), + BeforeValidator(check_non_negative_if_int), + ] = Field( + None, + description=has_size_value_description, + ) size: Annotated[ typing.Optional[typing.Union[str, int]], @@ -1208,6 +1433,21 @@ class has_size_model(AssertionModel): ) +class has_size_model(base_has_size_model): + r"""Asserts the specified output has a size of the specified value + + Attributes size and value or synonyms though value is considered deprecated. + The size optionally allows for absolute (``delta``) difference.""" + + that: Literal["has_size"] = "has_size" + + +class has_size_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_size: base_has_size_model + + has_image_center_of_mass_center_of_mass_description = """The required center of mass of the image intensities (horizontal and vertical coordinate, separated by a comma).""" has_image_center_of_mass_channel_description = """Restricts the assertion to a specific channel of the image (where ``0`` corresponds to the first image channel).""" @@ -1217,14 +1457,8 @@ class has_size_model(AssertionModel): ) -class has_image_center_of_mass_model(AssertionModel): - r"""Asserts the specified output is an image and has the specified center of mass. - - Asserts the output is an image and has a specific center of mass, - or has an Euclidean distance of ``eps`` or less to that point (e.g., - ````).""" - - that: Literal["has_image_center_of_mass"] = "has_image_center_of_mass" +class base_has_image_center_of_mass_model(AssertionModel): + """base model for has_image_center_of_mass describing attributes.""" center_of_mass: Annotated[str, BeforeValidator(check_center_of_mass)] = Field( ..., @@ -1242,6 +1476,22 @@ class has_image_center_of_mass_model(AssertionModel): ) +class has_image_center_of_mass_model(base_has_image_center_of_mass_model): + r"""Asserts the specified output is an image and has the specified center of mass. + + Asserts the output is an image and has a specific center of mass, + or has an Euclidean distance of ``eps`` or less to that point (e.g., + ````).""" + + that: Literal["has_image_center_of_mass"] = "has_image_center_of_mass" + + +class has_image_center_of_mass_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_image_center_of_mass: base_has_image_center_of_mass_model + + has_image_channels_channels_description = """Expected number of channels of the image.""" has_image_channels_delta_description = """Maximum allowed difference of the number of channels (default is 0). The observed number of channels has to be in the range ``value +- delta``.""" @@ -1253,14 +1503,8 @@ class has_image_center_of_mass_model(AssertionModel): has_image_channels_negate_description = """A boolean that can be set to true to negate the outcome of the assertion.""" -class has_image_channels_model(AssertionModel): - r"""Asserts the output is an image and has a specific number of channels. - - The number of channels is plus/minus ``delta`` (e.g., ````). - - Alternatively the range of the expected number of channels can be specified by ``min`` and/or ``max``.""" - - that: Literal["has_image_channels"] = "has_image_channels" +class base_has_image_channels_model(AssertionModel): + """base model for has_image_channels describing attributes.""" channels: Annotated[typing.Optional[StrictInt], BeforeValidator(check_non_negative_if_set)] = Field( None, @@ -1288,6 +1532,22 @@ class has_image_channels_model(AssertionModel): ) +class has_image_channels_model(base_has_image_channels_model): + r"""Asserts the output is an image and has a specific number of channels. + + The number of channels is plus/minus ``delta`` (e.g., ````). + + Alternatively the range of the expected number of channels can be specified by ``min`` and/or ``max``.""" + + that: Literal["has_image_channels"] = "has_image_channels" + + +class has_image_channels_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_image_channels: base_has_image_channels_model + + has_image_height_height_description = """Expected height of the image (in pixels).""" has_image_height_delta_description = """Maximum allowed difference of the image height (in pixels, default is 0). The observed height has to be in the range ``value +- delta``.""" @@ -1299,13 +1559,8 @@ class has_image_channels_model(AssertionModel): has_image_height_negate_description = """A boolean that can be set to true to negate the outcome of the assertion.""" -class has_image_height_model(AssertionModel): - r"""Asserts the output is an image and has a specific height (in pixels). - - The height is plus/minus ``delta`` (e.g., ````). - Alternatively the range of the expected height can be specified by ``min`` and/or ``max``.""" - - that: Literal["has_image_height"] = "has_image_height" +class base_has_image_height_model(AssertionModel): + """base model for has_image_height describing attributes.""" height: Annotated[typing.Optional[StrictInt], BeforeValidator(check_non_negative_if_set)] = Field( None, @@ -1333,6 +1588,21 @@ class has_image_height_model(AssertionModel): ) +class has_image_height_model(base_has_image_height_model): + r"""Asserts the output is an image and has a specific height (in pixels). + + The height is plus/minus ``delta`` (e.g., ````). + Alternatively the range of the expected height can be specified by ``min`` and/or ``max``.""" + + that: Literal["has_image_height"] = "has_image_height" + + +class has_image_height_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_image_height: base_has_image_height_model + + has_image_mean_intensity_channel_description = """Restricts the assertion to a specific channel of the image (where ``0`` corresponds to the first image channel).""" has_image_mean_intensity_mean_intensity_description = """The required mean value of the image intensities.""" @@ -1344,13 +1614,8 @@ class has_image_height_model(AssertionModel): has_image_mean_intensity_max_description = """An upper bound of the required mean value of the image intensities.""" -class has_image_mean_intensity_model(AssertionModel): - r"""Asserts the output is an image and has a specific mean intensity value. - - The mean intensity value is plus/minus ``eps`` (e.g., ````). - Alternatively the range of the expected mean intensity value can be specified by ``min`` and/or ``max``.""" - - that: Literal["has_image_mean_intensity"] = "has_image_mean_intensity" +class base_has_image_mean_intensity_model(AssertionModel): + """base model for has_image_mean_intensity describing attributes.""" channel: typing.Optional[StrictInt] = Field( None, @@ -1378,6 +1643,21 @@ class has_image_mean_intensity_model(AssertionModel): ) +class has_image_mean_intensity_model(base_has_image_mean_intensity_model): + r"""Asserts the output is an image and has a specific mean intensity value. + + The mean intensity value is plus/minus ``eps`` (e.g., ````). + Alternatively the range of the expected mean intensity value can be specified by ``min`` and/or ``max``.""" + + that: Literal["has_image_mean_intensity"] = "has_image_mean_intensity" + + +class has_image_mean_intensity_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_image_mean_intensity: base_has_image_mean_intensity_model + + has_image_mean_object_size_channel_description = """Restricts the assertion to a specific channel of the image (where ``0`` corresponds to the first image channel).""" has_image_mean_object_size_labels_description = """List of labels, separated by a comma. Labels *not* on this list will be excluded from consideration. Cannot be used in combination with ``exclude_labels``.""" @@ -1397,14 +1677,8 @@ class has_image_mean_intensity_model(AssertionModel): ) -class has_image_mean_object_size_model(AssertionModel): - r"""Asserts the output is an image with labeled objects which have the specified mean size (number of pixels), - - The mean size is plus/minus ``eps`` (e.g., ````). - - The labels must be unique.""" - - that: Literal["has_image_mean_object_size"] = "has_image_mean_object_size" +class base_has_image_mean_object_size_model(AssertionModel): + """base model for has_image_mean_object_size describing attributes.""" channel: typing.Optional[StrictInt] = Field( None, @@ -1448,6 +1722,22 @@ class has_image_mean_object_size_model(AssertionModel): ) +class has_image_mean_object_size_model(base_has_image_mean_object_size_model): + r"""Asserts the output is an image with labeled objects which have the specified mean size (number of pixels), + + The mean size is plus/minus ``eps`` (e.g., ````). + + The labels must be unique.""" + + that: Literal["has_image_mean_object_size"] = "has_image_mean_object_size" + + +class has_image_mean_object_size_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_image_mean_object_size: base_has_image_mean_object_size_model + + has_image_n_labels_channel_description = """Restricts the assertion to a specific channel of the image (where ``0`` corresponds to the first image channel).""" has_image_n_labels_labels_description = """List of labels, separated by a comma. Labels *not* on this list will be excluded from consideration. Cannot be used in combination with ``exclude_labels``.""" @@ -1465,15 +1755,8 @@ class has_image_mean_object_size_model(AssertionModel): has_image_n_labels_negate_description = """A boolean that can be set to true to negate the outcome of the assertion.""" -class has_image_n_labels_model(AssertionModel): - r"""Asserts the output is an image and has the specified labels. - - Labels can be a number of labels or unique values (e.g., - ````). - - The primary usage of this assertion is to verify the number of objects in images with uniquely labeled objects.""" - - that: Literal["has_image_n_labels"] = "has_image_n_labels" +class base_has_image_n_labels_model(AssertionModel): + """base model for has_image_n_labels describing attributes.""" channel: typing.Optional[StrictInt] = Field( None, @@ -1516,6 +1799,23 @@ class has_image_n_labels_model(AssertionModel): ) +class has_image_n_labels_model(base_has_image_n_labels_model): + r"""Asserts the output is an image and has the specified labels. + + Labels can be a number of labels or unique values (e.g., + ````). + + The primary usage of this assertion is to verify the number of objects in images with uniquely labeled objects.""" + + that: Literal["has_image_n_labels"] = "has_image_n_labels" + + +class has_image_n_labels_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_image_n_labels: base_has_image_n_labels_model + + has_image_width_width_description = """Expected width of the image (in pixels).""" has_image_width_delta_description = """Maximum allowed difference of the image width (in pixels, default is 0). The observed width has to be in the range ``value +- delta``.""" @@ -1527,13 +1827,8 @@ class has_image_n_labels_model(AssertionModel): has_image_width_negate_description = """A boolean that can be set to true to negate the outcome of the assertion.""" -class has_image_width_model(AssertionModel): - r"""Asserts the output is an image and has a specific width (in pixels). - - The width is plus/minus ``delta`` (e.g., ````). - Alternatively the range of the expected width can be specified by ``min`` and/or ``max``.""" - - that: Literal["has_image_width"] = "has_image_width" +class base_has_image_width_model(AssertionModel): + """base model for has_image_width describing attributes.""" width: Annotated[typing.Optional[StrictInt], BeforeValidator(check_non_negative_if_set)] = Field( None, @@ -1561,7 +1856,22 @@ class has_image_width_model(AssertionModel): ) -any_assertion_model = Annotated[ +class has_image_width_model(base_has_image_width_model): + r"""Asserts the output is an image and has a specific width (in pixels). + + The width is plus/minus ``delta`` (e.g., ````). + Alternatively the range of the expected width can be specified by ``min`` and/or ``max``.""" + + that: Literal["has_image_width"] = "has_image_width" + + +class has_image_width_model_nested(AssertionModel): + r"""Nested version of this assertion model.""" + + has_image_width: base_has_image_width_model + + +any_assertion_model_flat = Annotated[ typing.Union[ has_line_model, has_line_matching_model, @@ -1596,4 +1906,100 @@ class has_image_width_model(AssertionModel): Field(discriminator="that"), ] -assertion_list = RootModel[typing.List[any_assertion_model]] +any_assertion_model_nested = typing.Union[ + has_line_model_nested, + has_line_matching_model_nested, + has_n_lines_model_nested, + has_text_model_nested, + has_text_matching_model_nested, + not_has_text_model_nested, + has_n_columns_model_nested, + attribute_is_model_nested, + attribute_matches_model_nested, + element_text_model_nested, + element_text_is_model_nested, + element_text_matches_model_nested, + has_element_with_path_model_nested, + has_n_elements_with_path_model_nested, + is_valid_xml_model_nested, + xml_element_model_nested, + has_json_property_with_text_model_nested, + has_json_property_with_value_model_nested, + has_h5_attribute_model_nested, + has_h5_keys_model_nested, + has_archive_member_model_nested, + has_size_model_nested, + has_image_center_of_mass_model_nested, + has_image_channels_model_nested, + has_image_height_model_nested, + has_image_mean_intensity_model_nested, + has_image_mean_object_size_model_nested, + has_image_n_labels_model_nested, + has_image_width_model_nested, +] + +assertion_list = RootModel[typing.List[typing.Union[any_assertion_model_flat, any_assertion_model_nested]]] + + +class assertion_dict(AssertionModel): + + has_line: typing.Optional[base_has_line_model] = None + + has_line_matching: typing.Optional[base_has_line_matching_model] = None + + has_n_lines: typing.Optional[base_has_n_lines_model] = None + + has_text: typing.Optional[base_has_text_model] = None + + has_text_matching: typing.Optional[base_has_text_matching_model] = None + + not_has_text: typing.Optional[base_not_has_text_model] = None + + has_n_columns: typing.Optional[base_has_n_columns_model] = None + + attribute_is: typing.Optional[base_attribute_is_model] = None + + attribute_matches: typing.Optional[base_attribute_matches_model] = None + + element_text: typing.Optional[base_element_text_model] = None + + element_text_is: typing.Optional[base_element_text_is_model] = None + + element_text_matches: typing.Optional[base_element_text_matches_model] = None + + has_element_with_path: typing.Optional[base_has_element_with_path_model] = None + + has_n_elements_with_path: typing.Optional[base_has_n_elements_with_path_model] = None + + is_valid_xml: typing.Optional[base_is_valid_xml_model] = None + + xml_element: typing.Optional[base_xml_element_model] = None + + has_json_property_with_text: typing.Optional[base_has_json_property_with_text_model] = None + + has_json_property_with_value: typing.Optional[base_has_json_property_with_value_model] = None + + has_h5_attribute: typing.Optional[base_has_h5_attribute_model] = None + + has_h5_keys: typing.Optional[base_has_h5_keys_model] = None + + has_archive_member: typing.Optional[base_has_archive_member_model] = None + + has_size: typing.Optional[base_has_size_model] = None + + has_image_center_of_mass: typing.Optional[base_has_image_center_of_mass_model] = None + + has_image_channels: typing.Optional[base_has_image_channels_model] = None + + has_image_height: typing.Optional[base_has_image_height_model] = None + + has_image_mean_intensity: typing.Optional[base_has_image_mean_intensity_model] = None + + has_image_mean_object_size: typing.Optional[base_has_image_mean_object_size_model] = None + + has_image_n_labels: typing.Optional[base_has_image_n_labels_model] = None + + has_image_width: typing.Optional[base_has_image_width_model] = None + + +assertions = typing.Union[assertion_list, assertion_dict] diff --git a/lib/galaxy/tool_util/verify/asserts/size.py b/lib/galaxy/tool_util/verify/asserts/size.py index e4b3e8a6ef1f..0e3eebe06f86 100644 --- a/lib/galaxy/tool_util/verify/asserts/size.py +++ b/lib/galaxy/tool_util/verify/asserts/size.py @@ -14,9 +14,7 @@ def assert_has_size( output_bytes: OutputBytes, - value: Annotated[ - OptionalXmlInt, AssertionParameter("Deprecated alias for `size`", xml_type="Bytes", deprecated=True) - ] = None, + value: Annotated[OptionalXmlInt, AssertionParameter("Deprecated alias for `size`", xml_type="Bytes")] = None, size: Annotated[ OptionalXmlInt, AssertionParameter( diff --git a/lib/galaxy/tool_util/verify/codegen.py b/lib/galaxy/tool_util/verify/codegen.py index 7219d40b6dcb..2e93d29ccbf0 100644 --- a/lib/galaxy/tool_util/verify/codegen.py +++ b/lib/galaxy/tool_util/verify/codegen.py @@ -55,6 +55,7 @@ BeforeValidator, ConfigDict, Field, + model_validator, RootModel, StrictFloat, StrictInt, @@ -113,9 +114,8 @@ def check_non_negative_if_int(v: typing.Any): {{assertion.name}}_{{ parameter.name }}_description = '''{{ parameter.description }}''' {% endfor %} -class {{assertion.name}}_model(AssertionModel): - r\"\"\"{{ assertion.docstring }}\"\"\" - that: Literal["{{assertion.name}}"] = "{{assertion.name}}" +class base_{{assertion.name}}_model(AssertionModel): + '''base model for {{assertion.name}} describing attributes.''' {% for parameter in assertion.parameters %} {% if not parameter.is_deprecated %} {{ parameter.name }}: {{ parameter.type_str }} = Field( @@ -124,21 +124,52 @@ class {{assertion.name}}_model(AssertionModel): ) {% endif %} {% endfor %} +{% if assertion.children in ["required", "allowed"] %} + children: typing.Optional["assertion_list"] = None + asserts: typing.Optional["assertion_list"] = None + {% if assertion.children == "required" %} - children: "assertion_list" + @model_validator(mode='before') + @classmethod + def validate_children(self, data: typing.Any): + if isinstance(data, dict) and 'children' not in data and 'asserts' not in data: + raise ValueError("At least one of 'children' or 'asserts' must be specified for this assertion type.") + return data {% endif %} -{% if assertion.children == "allowed" %} - children: typing.Optional["assertion_list"] = None {% endif %} + + +class {{assertion.name}}_model(base_{{assertion.name}}_model): + r\"\"\"{{ assertion.docstring }}\"\"\" + that: Literal["{{assertion.name}}"] = "{{assertion.name}}" + +class {{assertion.name}}_model_nested(AssertionModel): + r\"\"\"Nested version of this assertion model.\"\"\" + {{assertion.name}}: base_{{assertion.name}}_model {% endfor %} -any_assertion_model = Annotated[typing.Union[ +any_assertion_model_flat = Annotated[typing.Union[ {% for assertion in assertions %} {{assertion.name}}_model, {% endfor %} ], Field(discriminator="that")] -assertion_list = RootModel[typing.List[any_assertion_model]] +any_assertion_model_nested = typing.Union[ +{% for assertion in assertions %} + {{assertion.name}}_model_nested, +{% endfor %} +] + +assertion_list = RootModel[typing.List[typing.Union[any_assertion_model_flat, any_assertion_model_nested]]] + + +class assertion_dict(AssertionModel): +{% for assertion in assertions %} + {{assertion.name}}: typing.Optional[base_{{assertion.name}}_model] = None +{% endfor %} + + +assertions = typing.Union[assertion_list, assertion_dict] """ diff --git a/lib/galaxy/tools/actions/__init__.py b/lib/galaxy/tools/actions/__init__.py index 777db9a53bbf..841eea988d49 100644 --- a/lib/galaxy/tools/actions/__init__.py +++ b/lib/galaxy/tools/actions/__init__.py @@ -20,6 +20,7 @@ from galaxy import model from galaxy.exceptions import ( + AuthenticationRequired, ItemAccessibilityException, RequestParameterInvalidException, ) @@ -726,14 +727,6 @@ def handle_output(name, output, hidden=None): # Remap any outputs if this is a rerun and the user chose to continue dependent jobs # This functionality requires tracking jobs in the database. if app.config.track_jobs_in_database and rerun_remap_job_id is not None: - # Need to flush here so that referencing outputs by id works - session = trans.sa_session() - try: - session.expire_on_commit = False - with transaction(session): - session.commit() - finally: - session.expire_on_commit = True self._remap_job_on_rerun( trans=trans, galaxy_session=galaxy_session, @@ -774,7 +767,14 @@ def handle_output(name, output, hidden=None): return job, out_data, history - def _remap_job_on_rerun(self, trans, galaxy_session, rerun_remap_job_id, current_job, out_data): + def _remap_job_on_rerun( + self, + trans: ProvidesHistoryContext, + galaxy_session: Optional[model.GalaxySession], + rerun_remap_job_id: int, + current_job: Job, + out_data, + ): """ Re-connect dependent datasets for a job that is being rerun (because it failed initially). @@ -782,22 +782,39 @@ def _remap_job_on_rerun(self, trans, galaxy_session, rerun_remap_job_id, current To be able to resume jobs that depend on this jobs output datasets we change the dependent's job input datasets to be those of the job that is being rerun. """ + old_job = trans.sa_session.get(Job, rerun_remap_job_id) + if not old_job: + # I don't think that can really happen + raise RequestParameterInvalidException("rerun_remap_job_id parameter is invalid") + old_tool = trans.app.toolbox.get_tool(old_job.tool_id, exact=False) + new_tool = trans.app.toolbox.get_tool(current_job.tool_id, exact=False) + if old_tool and new_tool and old_tool.old_id != new_tool.old_id: + # If we currently only have the old or new tool installed we'll find the other tool anyway with `exact=False`. + # If we don't have the tool at all we'll fail anyway, no need to worry here. + raise RequestParameterInvalidException( + f"Old tool id ({old_job.tool_id}) does not match rerun tool id ({current_job.tool_id})" + ) + if trans.user is not None: + if old_job.user_id != trans.user.id: + raise RequestParameterInvalidException( + "Cannot remap job dependencies for job not created by current user." + ) + elif trans.user is None and galaxy_session: + if old_job.session_id != galaxy_session.id: + raise RequestParameterInvalidException( + "Cannot remap job dependencies for job not created by current user." + ) + else: + raise AuthenticationRequired("Authentication required to remap job dependencies") + # Need to flush here so that referencing outputs by id works + session = trans.sa_session() + try: + session.expire_on_commit = False + with transaction(session): + session.commit() + finally: + session.expire_on_commit = True try: - old_job = trans.sa_session.get(Job, rerun_remap_job_id) - assert old_job is not None, f"({rerun_remap_job_id}/{current_job.id}): Old job id is invalid" - assert ( - old_job.tool_id == current_job.tool_id - ), f"({old_job.id}/{current_job.id}): Old tool id ({old_job.tool_id}) does not match rerun tool id ({current_job.tool_id})" - if trans.user is not None: - assert ( - old_job.user_id == trans.user.id - ), f"({old_job.id}/{current_job.id}): Old user id ({old_job.user_id}) does not match rerun user id ({trans.user.id})" - elif trans.user is None and isinstance(galaxy_session, trans.model.GalaxySession): - assert ( - old_job.session_id == galaxy_session.id - ), f"({old_job.id}/{current_job.id}): Old session id ({old_job.session_id}) does not match rerun session id ({galaxy_session.id})" - else: - raise Exception(f"({old_job.id}/{current_job.id}): Remapping via the API is not (yet) supported") # Start by hiding current job outputs before taking over the old job's (implicit) outputs. current_job.hide_outputs(flush=False) # Duplicate PJAs before remap. @@ -819,7 +836,7 @@ def _remap_job_on_rerun(self, trans, galaxy_session, rerun_remap_job_id, current for jtod in old_job.output_datasets: for job_to_remap, jtid in [(jtid.job, jtid) for jtid in jtod.dataset.dependent_jobs]: if (trans.user is not None and job_to_remap.user_id == trans.user.id) or ( - trans.user is None and job_to_remap.session_id == galaxy_session.id + trans.user is None and galaxy_session and job_to_remap.session_id == galaxy_session.id ): self.__remap_parameters(job_to_remap, jtid, jtod, out_data) trans.sa_session.add(job_to_remap) diff --git a/lib/galaxy/tools/parameters/basic.py b/lib/galaxy/tools/parameters/basic.py index 9669b62771e9..1a099bfe7061 100644 --- a/lib/galaxy/tools/parameters/basic.py +++ b/lib/galaxy/tools/parameters/basic.py @@ -1468,8 +1468,8 @@ def get_column_list(self, trans, other_values): # Use representative dataset if a dataset collection is parsed if isinstance(dataset, HistoryDatasetCollectionAssociation): dataset = dataset.to_hda_representative() - if isinstance(dataset, DatasetCollectionElement) and dataset.hda: - dataset = dataset.hda + if isinstance(dataset, DatasetCollectionElement): + dataset = dataset.first_dataset_instance() if isinstance(dataset, HistoryDatasetAssociation) and self.ref_input and self.ref_input.formats: direct_match, target_ext, converted_dataset = dataset.find_conversion_destination( self.ref_input.formats @@ -1507,6 +1507,9 @@ def get_options(self, trans, other_values): Show column labels rather than c1..cn if use_header_names=True """ options: List[Tuple[str, Union[str, Tuple[str, str]], bool]] = [] + column_list = self.get_column_list(trans, other_values) + if not column_list: + return options # if available use column_names metadata for option names # otherwise read first row - assume is a header with tab separated names if self.usecolnames: @@ -1516,29 +1519,23 @@ def get_options(self, trans, other_values): and hasattr(dataset.metadata, "column_names") and dataset.metadata.element_is_set("column_names") ): - column_list = [ - ("%d" % (i + 1), "c%d: %s" % (i + 1, x)) for i, x in enumerate(dataset.metadata.column_names) - ] + try: + options = [(f"c{c}: {dataset.metadata.column_names[int(c) - 1]}", c, False) for c in column_list] + except IndexError: + # ignore and rely on fallback + pass else: try: with open(dataset.get_file_name()) as f: head = f.readline() cnames = head.rstrip("\n\r ").split("\t") - column_list = [("%d" % (i + 1), "c%d: %s" % (i + 1, x)) for i, x in enumerate(cnames)] + options = [(f"c{c}: {cnames[int(c) - 1]}", c, False) for c in column_list] except Exception: - column_list = self.get_column_list(trans, other_values) - if self.numerical: # If numerical was requested, filter columns based on metadata - if hasattr(dataset, "metadata") and getattr(dataset.metadata, "column_types", None) is not None: - if len(dataset.metadata.column_types) >= len(column_list): - numerics = [i for i, x in enumerate(dataset.metadata.column_types) if x in ["int", "float"]] - column_list = [column_list[i] for i in numerics] - else: - column_list = self.get_column_list(trans, other_values) - for col in column_list: - if isinstance(col, tuple) and len(col) == 2: - options.append((col[1], col[0], False)) - else: - options.append((f"Column: {col}", col, False)) + # ignore and rely on fallback + pass + if not options: + # fallback if no options list could be built so far + options = [(f"Column: {col}", col, False) for col in column_list] return options def get_initial_value(self, trans, other_values): @@ -1564,9 +1561,13 @@ def is_file_empty(self, trans, other_values): for dataset in util.listify(other_values.get(self.data_ref)): # Use representative dataset if a dataset collection is parsed if isinstance(dataset, HistoryDatasetCollectionAssociation): - dataset = dataset.to_hda_representative() + if dataset.populated: + dataset = dataset.to_hda_representative() + else: + # That's fine, we'll check again on execution + return True if isinstance(dataset, DatasetCollectionElement): - dataset = dataset.hda + dataset = dataset.first_dataset_instance() if isinstance(dataset, DatasetInstance): return not dataset.has_data() if is_runtime_value(dataset): diff --git a/lib/galaxy/util/compression_utils.py b/lib/galaxy/util/compression_utils.py index d54c0ffcbc24..576e62623f99 100644 --- a/lib/galaxy/util/compression_utils.py +++ b/lib/galaxy/util/compression_utils.py @@ -188,6 +188,8 @@ def __init__(self, file_path: StrPath, mode: str = "r") -> None: self.file_type = "tar" elif zipfile.is_zipfile(file_path) and not file_path_str.endswith(".jar"): self.file_type = "zip" + else: + raise Exception("File must be valid zip or tar file.") self.file_name = os.path.splitext(os.path.basename(file_path))[0] if self.file_name.endswith(".tar"): self.file_name = os.path.splitext(self.file_name)[0] diff --git a/lib/galaxy/webapps/galaxy/api/datasets.py b/lib/galaxy/webapps/galaxy/api/datasets.py index 73a06eaf0d66..eba6795100fd 100644 --- a/lib/galaxy/webapps/galaxy/api/datasets.py +++ b/lib/galaxy/webapps/galaxy/api/datasets.py @@ -27,6 +27,7 @@ ) from typing_extensions import Annotated +from galaxy.datatypes.dataproviders.base import MAX_LIMIT from galaxy.schema import ( FilterQueryParams, SerializationParams, @@ -429,18 +430,35 @@ def show( "may return different responses." ), ), + limit: Annotated[ + Optional[int], + Query( + ge=1, + le=MAX_LIMIT, + description="Maximum number of items to return. Currently only applies to `data_type=raw_data` requests", + ), + ] = MAX_LIMIT, + offset: Annotated[ + Optional[int], + Query( + ge=0, + description="Starts at the beginning skip the first ( offset - 1 ) items and begin returning at the Nth item. Currently only applies to `data_type=raw_data` requests", + ), + ] = 0, serialization_params: SerializationParams = Depends(query_serialization_params), ): """ - **Note**: Due to the multipurpose nature of this endpoint, which can receive a wild variety of parameters + **Note**: Due to the multipurpose nature of this endpoint, which can receive a wide variety of parameters and return different kinds of responses, the documentation here will be limited. To get more information please check the source code. """ - exclude_params = {"hda_ldda", "data_type"} + exclude_params = {"hda_ldda", "data_type", "limit", "offset"} exclude_params.update(SerializationParams.model_fields.keys()) extra_params = get_query_parameters_from_request_excluding(request, exclude_params) - return self.service.show(trans, dataset_id, hda_ldda, serialization_params, data_type, **extra_params) + return self.service.show( + trans, dataset_id, hda_ldda, serialization_params, data_type, limit=limit, offset=offset, **extra_params + ) @router.get( "/api/datasets/{dataset_id}/content/{content_type}", diff --git a/lib/galaxy/webapps/galaxy/controllers/admin.py b/lib/galaxy/webapps/galaxy/controllers/admin.py index bd0ea3a06158..b92e37c12786 100644 --- a/lib/galaxy/webapps/galaxy/controllers/admin.py +++ b/lib/galaxy/webapps/galaxy/controllers/admin.py @@ -13,7 +13,10 @@ util, web, ) -from galaxy.exceptions import ActionInputError +from galaxy.exceptions import ( + ActionInputError, + RequestParameterInvalidException, +) from galaxy.managers.quotas import QuotaManager from galaxy.model.base import transaction from galaxy.model.index_filter_util import ( @@ -807,35 +810,17 @@ def manage_users_and_groups_for_role(self, trans, payload=None, **kwd): ], } else: - in_users = [ - trans.sa_session.query(trans.app.model.User).get(trans.security.decode_id(x)) - for x in util.listify(payload.get("in_users")) - ] - in_groups = [ - trans.sa_session.query(trans.app.model.Group).get(trans.security.decode_id(x)) - for x in util.listify(payload.get("in_groups")) - ] - if None in in_users or None in in_groups: + user_ids = [trans.security.decode_id(id) for id in util.listify(payload.get("in_users"))] + group_ids = [trans.security.decode_id(id) for id in util.listify(payload.get("in_groups"))] + try: + trans.app.security_agent.set_role_user_and_group_associations( + role, user_ids=user_ids, group_ids=group_ids + ) + return { + "message": f"Role '{role.name}' has been updated with {len(user_ids)} associated users and {len(group_ids)} associated groups." + } + except RequestParameterInvalidException: return self.message_exception(trans, "One or more invalid user/group id has been provided.") - for ura in role.users: - user = trans.sa_session.query(trans.app.model.User).get(ura.user_id) - if user not in in_users: - # Delete DefaultUserPermissions for previously associated users that have been removed from the role - for dup in user.default_permissions: - if role == dup.role: - trans.sa_session.delete(dup) - # Delete DefaultHistoryPermissions for previously associated users that have been removed from the role - for history in user.histories: - for dhp in history.default_permissions: - if role == dhp.role: - trans.sa_session.delete(dhp) - with transaction(trans.sa_session): - trans.sa_session.commit() - trans.app.security_agent.set_entity_role_associations(roles=[role], users=in_users, groups=in_groups) - trans.sa_session.refresh(role) - return { - "message": f"Role '{role.name}' has been updated with {len(in_users)} associated users and {len(in_groups)} associated groups." - } @web.legacy_expose_api @web.require_admin @@ -912,21 +897,17 @@ def manage_users_and_roles_for_group(self, trans, payload=None, **kwd): ], } else: - in_users = [ - trans.sa_session.query(trans.app.model.User).get(trans.security.decode_id(x)) - for x in util.listify(payload.get("in_users")) - ] - in_roles = [ - trans.sa_session.query(trans.app.model.Role).get(trans.security.decode_id(x)) - for x in util.listify(payload.get("in_roles")) - ] - if None in in_users or None in in_roles: + user_ids = [trans.security.decode_id(id) for id in util.listify(payload.get("in_users"))] + role_ids = [trans.security.decode_id(id) for id in util.listify(payload.get("in_roles"))] + try: + trans.app.security_agent.set_group_user_and_role_associations( + group, user_ids=user_ids, role_ids=role_ids + ) + return { + "message": f"Group '{group.name}' has been updated with {len(user_ids)} associated users and {len(role_ids)} associated roles." + } + except RequestParameterInvalidException: return self.message_exception(trans, "One or more invalid user/role id has been provided.") - trans.app.security_agent.set_entity_group_associations(groups=[group], users=in_users, roles=in_roles) - trans.sa_session.refresh(group) - return { - "message": f"Group '{group.name}' has been updated with {len(in_users)} associated users and {len(in_roles)} associated roles." - } @web.legacy_expose_api @web.require_admin @@ -1099,28 +1080,18 @@ def manage_roles_and_groups_for_user(self, trans, payload=None, **kwd): ], } else: - in_roles = [ - trans.sa_session.query(trans.app.model.Role).get(trans.security.decode_id(x)) - for x in util.listify(payload.get("in_roles")) - ] - in_groups = [ - trans.sa_session.query(trans.app.model.Group).get(trans.security.decode_id(x)) - for x in util.listify(payload.get("in_groups")) - ] - if None in in_groups or None in in_roles: + role_ids = [trans.security.decode_id(id) for id in util.listify(payload.get("in_roles"))] + group_ids = [trans.security.decode_id(id) for id in util.listify(payload.get("in_groups"))] + try: + trans.app.security_agent.set_user_group_and_role_associations( + user, group_ids=group_ids, role_ids=role_ids + ) + return { + "message": f"User '{user.email}' has been updated with {len(role_ids)} associated roles and {len(group_ids)} associated groups (private roles are not displayed)." + } + except RequestParameterInvalidException: return self.message_exception(trans, "One or more invalid role/group id has been provided.") - # make sure the user is not dis-associating himself from his private role - private_role = trans.app.security_agent.get_private_user_role(user) - if private_role not in in_roles: - in_roles.append(private_role) - - trans.app.security_agent.set_entity_user_associations(users=[user], roles=in_roles, groups=in_groups) - trans.sa_session.refresh(user) - return { - "message": f"User '{user.email}' has been updated with {len(in_roles) - 1} associated roles and {len(in_groups)} associated groups (private roles are not displayed)." - } - # ---- Utility methods ------------------------------------------------------- diff --git a/lib/galaxy/workflow/scheduling_manager.py b/lib/galaxy/workflow/scheduling_manager.py index 3868e24c13a9..8d31130bad21 100644 --- a/lib/galaxy/workflow/scheduling_manager.py +++ b/lib/galaxy/workflow/scheduling_manager.py @@ -329,7 +329,6 @@ def __schedule(self, workflow_scheduler_id, workflow_scheduler): def __attempt_schedule(self, invocation_id, workflow_scheduler): with self.app.model.context() as session: workflow_invocation = session.get(model.WorkflowInvocation, invocation_id) - try: if workflow_invocation.state == workflow_invocation.states.CANCELLING: workflow_invocation.cancel_invocation_steps() diff --git a/lib/galaxy_test/api/test_groups.py b/lib/galaxy_test/api/test_groups.py index 8e4c5510fe98..0176bde0d21c 100644 --- a/lib/galaxy_test/api/test_groups.py +++ b/lib/galaxy_test/api/test_groups.py @@ -107,7 +107,9 @@ def test_update(self): another_user_id = self.dataset_populator.user_id() another_role_id = self.dataset_populator.user_private_role_id() assert another_user_id is not None - update_response = self._put(f"groups/{group_id}", data={"user_ids": [another_user_id]}, admin=True, json=True) + update_response = self._put( + f"groups/{group_id}", data={"user_ids": [user_id, another_user_id]}, admin=True, json=True + ) self._assert_status_code_is_ok(update_response) # Check if the user was added @@ -119,7 +121,9 @@ def test_update(self): ) # Add another role to the group - update_response = self._put(f"groups/{group_id}", data={"role_ids": [another_role_id]}, admin=True, json=True) + update_response = self._put( + f"groups/{group_id}", data={"role_ids": [user_private_role_id, another_role_id]}, admin=True, json=True + ) self._assert_status_code_is_ok(update_response) # Check if the role was added diff --git a/lib/galaxy_test/api/test_jobs.py b/lib/galaxy_test/api/test_jobs.py index 82f9ecbab416..c904e808f70f 100644 --- a/lib/galaxy_test/api/test_jobs.py +++ b/lib/galaxy_test/api/test_jobs.py @@ -464,6 +464,32 @@ def test_no_hide_on_rerun(self): assert hdca["visible"] assert isoparse(hdca["update_time"]) > (isoparse(first_update_time)) + def test_rerun_exception_handling(self): + with self.dataset_populator.test_history() as history_id: + other_run_response = self.dataset_populator.run_tool( + tool_id="job_properties", + inputs={}, + history_id=history_id, + ) + unrelated_job_id = other_run_response["jobs"][0]["id"] + run_response = self._run_map_over_error(history_id) + job_id = run_response["jobs"][0]["id"] + self.dataset_populator.wait_for_job(job_id) + failed_hdca = self.dataset_populator.get_history_collection_details( + history_id=history_id, + content_id=run_response["implicit_collections"][0]["id"], + assert_ok=False, + ) + assert failed_hdca["visible"] + rerun_params = self._get(f"jobs/{job_id}/build_for_rerun").json() + inputs = rerun_params["state_inputs"] + inputs["rerun_remap_job_id"] = unrelated_job_id + before_rerun_items = self.dataset_populator.get_history_contents(history_id) + rerun_response = self._run_detect_errors(history_id=history_id, inputs=inputs) + assert "does not match rerun tool id" in rerun_response["err_msg"] + after_rerun_items = self.dataset_populator.get_history_contents(history_id) + assert len(before_rerun_items) == len(after_rerun_items) + @skip_without_tool("empty_output") def test_common_problems(self): with self.dataset_populator.test_history() as history_id: diff --git a/lib/galaxy_test/api/test_tools.py b/lib/galaxy_test/api/test_tools.py index a8f0cb1ebc34..8a29c39aaa2b 100644 --- a/lib/galaxy_test/api/test_tools.py +++ b/lib/galaxy_test/api/test_tools.py @@ -2578,6 +2578,36 @@ def test_implicit_reduce_with_mapping(self): ) assert output_hdca["collection_type"] == "list" + @skip_without_tool("column_multi_param") + def test_multi_param_column_nested_list(self): + with self.dataset_populator.test_history() as history_id: + hdca = self.dataset_collection_populator.create_list_of_list_in_history( + history_id, ext="tabular", wait=True + ).json() + inputs = { + "input1": {"src": "hdca", "id": hdca["id"]}, + # FIXME: integers don't work here + "col": "1", + } + response = self._run("column_multi_param", history_id, inputs, assert_ok=True) + self.dataset_populator.wait_for_job(job_id=response["jobs"][0]["id"], assert_ok=True) + + @skip_without_tool("column_multi_param") + def test_multi_param_column_nested_list_fails_on_invalid_column(self): + with self.dataset_populator.test_history() as history_id: + hdca = self.dataset_collection_populator.create_list_of_list_in_history( + history_id, ext="tabular", wait=True + ).json() + inputs = { + "input1": {"src": "hdca", "id": hdca["id"]}, + "col": "10", + } + try: + self._run("column_multi_param", history_id, inputs, assert_ok=True) + except AssertionError as e: + exception_raised = e + assert exception_raised, "Expected invalid column selection to fail job" + @skip_without_tool("column_multi_param") def test_implicit_conversion_and_reduce(self): with self.dataset_populator.test_history() as history_id: diff --git a/lib/galaxy_test/api/test_workflows.py b/lib/galaxy_test/api/test_workflows.py index d7a77b04bb22..b6a5ec037bea 100644 --- a/lib/galaxy_test/api/test_workflows.py +++ b/lib/galaxy_test/api/test_workflows.py @@ -5150,56 +5150,6 @@ def test_run_with_numeric_input_connection(self, history_id): assert int(str_43) == 43 assert abs(float(str_4point14) - 4.14) < 0.0001 - @skip_without_tool("param_value_from_file") - def test_expression_tool_map_over(self, history_id): - self._run_jobs( - """ -class: GalaxyWorkflow -inputs: - text_input1: collection -steps: -- label: param_out - tool_id: param_value_from_file - in: - input1: text_input1 -- label: consume_expression_parameter - tool_id: validation_default - in: - input1: param_out/text_param - outputs: - out_file1: - rename: "replaced_param_collection" -test_data: - text_input1: - collection_type: list - elements: - - identifier: A - content: A - - identifier: B - content: B -""", - history_id=history_id, - ) - history_contents = self._get(f"histories/{history_id}/contents").json() - collection = [ - c - for c in history_contents - if c["history_content_type"] == "dataset_collection" and c["name"] == "replaced_param_collection" - ][0] - collection_details = self._get(collection["url"]).json() - assert collection_details["element_count"] == 2 - elements = collection_details["elements"] - assert elements[0]["element_identifier"] == "A" - assert elements[1]["element_identifier"] == "B" - element_a_content = self.dataset_populator.get_history_dataset_content( - history_id, dataset=elements[0]["object"] - ) - element_b_content = self.dataset_populator.get_history_dataset_content( - history_id, dataset=elements[1]["object"] - ) - assert element_a_content.strip() == "A" - assert element_b_content.strip() == "B" - @skip_without_tool("create_input_collection") def test_workflow_optional_input_text_parameter_reevaluation(self): with self.dataset_populator.test_history() as history_id: diff --git a/lib/galaxy_test/base/populators.py b/lib/galaxy_test/base/populators.py index 5c25baf2bd89..82b458d0068e 100644 --- a/lib/galaxy_test/base/populators.py +++ b/lib/galaxy_test/base/populators.py @@ -2906,7 +2906,7 @@ def __create_payload(self, history_id: str, *args, **kwds): else: return self.__create_payload_collection(history_id, *args, **kwds) - def __create_payload_fetch(self, history_id: str, collection_type, **kwds): + def __create_payload_fetch(self, history_id: str, collection_type, ext="txt", **kwds): contents = None if "contents" in kwds: contents = kwds["contents"] @@ -2928,7 +2928,7 @@ def __create_payload_fetch(self, history_id: str, collection_type, **kwds): elements.append(contents_level) continue - element = {"src": "pasted", "ext": "txt"} + element = {"src": "pasted", "ext": ext} # Else older style list of contents or element ID and contents, # convert to fetch API. if isinstance(contents_level, tuple): @@ -3151,7 +3151,12 @@ def read_test_data(test_dict): elif is_dict and "type" in value: input_type = value.pop("type") if input_type == "File": - content = open_test_data(value) + if "value" in value: + content = open_test_data(value) + elif "content" in value: + content = value["content"] + else: + raise ValueError(f"Invalid test_data def {test_data}") new_dataset_kwds = {"content": content} if "name" in value: new_dataset_kwds["name"] = value["name"] diff --git a/lib/galaxy_test/workflow/flatten_collection.gxwf-tests.yml b/lib/galaxy_test/workflow/flatten_collection.gxwf-tests.yml index 1a2b5c65b596..bfd0a6a02435 100644 --- a/lib/galaxy_test/workflow/flatten_collection.gxwf-tests.yml +++ b/lib/galaxy_test/workflow/flatten_collection.gxwf-tests.yml @@ -3,6 +3,7 @@ job: {} outputs: out: + attributes: {collection_type: 'list'} elements: 'oe1-ie1': asserts: diff --git a/lib/galaxy_test/workflow/map_over_expression.gxwf-tests.yml b/lib/galaxy_test/workflow/map_over_expression.gxwf-tests.yml new file mode 100644 index 000000000000..0357ca6c53ca --- /dev/null +++ b/lib/galaxy_test/workflow/map_over_expression.gxwf-tests.yml @@ -0,0 +1,22 @@ +- doc: | + Test to verify text parameter can be connected to data column param + job: + text_input1: + collection_type: list + elements: + - identifier: A + content: A + - identifier: B + content: B + outputs: + out1: + attributes: { collection_type: list } + elements: + A: + asserts: + - that: has_line + line: A + B: + asserts: + - that: has_line + line: B diff --git a/lib/galaxy_test/workflow/map_over_expression.gxwf.yml b/lib/galaxy_test/workflow/map_over_expression.gxwf.yml new file mode 100644 index 000000000000..c361b2232022 --- /dev/null +++ b/lib/galaxy_test/workflow/map_over_expression.gxwf.yml @@ -0,0 +1,18 @@ +class: GalaxyWorkflow +inputs: + text_input1: collection +outputs: + out1: + outputSource: consume_expression_parameter/out_file1 +steps: + param_out: + tool_id: param_value_from_file + in: + input1: text_input1 + consume_expression_parameter: + tool_id: validation_default + in: + input1: param_out/text_param + outputs: + out_file1: + rename: "replaced_param_collection" diff --git a/lib/galaxy_test/workflow/output_parameter.gxwf-tests.yml b/lib/galaxy_test/workflow/output_parameter.gxwf-tests.yml new file mode 100644 index 000000000000..d422d9e3d836 --- /dev/null +++ b/lib/galaxy_test/workflow/output_parameter.gxwf-tests.yml @@ -0,0 +1,8 @@ +- doc: | + Test to verify exact output parameter verification works propery. + job: + text_int: + type: File + content: "43" + outputs: + out_int: 43 diff --git a/lib/galaxy_test/workflow/output_parameter.gxwf.yml b/lib/galaxy_test/workflow/output_parameter.gxwf.yml new file mode 100644 index 000000000000..4157f894f492 --- /dev/null +++ b/lib/galaxy_test/workflow/output_parameter.gxwf.yml @@ -0,0 +1,13 @@ +class: GalaxyWorkflow +inputs: + text_int: data +outputs: + out_int: + outputSource: param_out/integer_param +steps: + param_out: + tool_id: param_value_from_file + state: + param_type: integer + in: + input1: text_int diff --git a/lib/galaxy_test/workflow/tests.py b/lib/galaxy_test/workflow/tests.py index f0702fb8240b..a850490740b6 100644 --- a/lib/galaxy_test/workflow/tests.py +++ b/lib/galaxy_test/workflow/tests.py @@ -8,6 +8,12 @@ import yaml from gxformat2.yaml import ordered_load +from galaxy.tool_util.models import ( + OutputChecks, + OutputsDict, + TestDicts, + TestJobDict, +) from galaxy.tool_util.parser.interface import TestCollectionOutputDef from galaxy.tool_util.verify import verify_file_contents_against_dict from galaxy.tool_util.verify.interactor import ( @@ -52,7 +58,7 @@ def setUp(self): self.dataset_collection_populator = DatasetCollectionPopulator(self.galaxy_interactor) @pytest.mark.workflow - def test_workflow(self, workflow_path: Path, test_job): + def test_workflow(self, workflow_path: Path, test_job: TestJobDict): with workflow_path.open() as f: yaml_content = ordered_load(f) with self.dataset_populator.test_history() as history_id: @@ -63,30 +69,32 @@ def test_workflow(self, workflow_path: Path, test_job): ) self._verify(run_summary, test_job["outputs"]) - def _verify(self, run_summary: RunJobsSummary, output_definitions): + def _verify(self, run_summary: RunJobsSummary, output_definitions: OutputsDict): for output_name, output_definition in output_definitions.items(): self._verify_output(run_summary, output_name, output_definition) - def _verify_output(self, run_summary: RunJobsSummary, output_name, test_properties): - is_collection_test = "elements" in test_properties + def _verify_output(self, run_summary: RunJobsSummary, output_name, test_properties: OutputChecks): + is_collection_test = isinstance(test_properties, dict) and "elements" in test_properties item_label = f"Output named {output_name}" def get_filename(name): return tempfile.NamedTemporaryFile(prefix=f"gx_workflow_framework_test_file_{output_name}", delete=False) - def verify_dataset(dataset: dict, test_properties: dict): + def verify_dataset(dataset: dict, test_properties: OutputChecks): output_content = self.dataset_populator.get_history_dataset_content( run_summary.history_id, dataset=dataset, type="bytes" ) verify_file_contents_against_dict(get_filename, _get_location, item_label, output_content, test_properties) - metadata = get_metadata_to_test(test_properties) - if metadata: - dataset_details = self.dataset_populator.get_history_dataset_details( - run_summary.history_id, content_id=dataset["id"] - ) - compare_expected_metadata_to_api_response(metadata, dataset_details) + if isinstance(test_properties, dict): + metadata = get_metadata_to_test(test_properties) + if metadata: + dataset_details = self.dataset_populator.get_history_dataset_details( + run_summary.history_id, content_id=dataset["id"] + ) + compare_expected_metadata_to_api_response(metadata, dataset_details) if is_collection_test: + assert isinstance(test_properties, dict) test_properties["name"] = output_name # setup preferred name "elements" in accordance with work in https://github.com/galaxyproject/planemo/pull/1417 test_properties["element_tests"] = test_properties["elements"] @@ -105,14 +113,15 @@ def verify_dataset_element(element, test_properties, element_outfile): verify_collection(output_def, output_collection, verify_dataset_element) else: - test_properties["name"] = output_name + if isinstance(test_properties, dict): + test_properties["name"] = output_name invocation_details = self.workflow_populator.get_invocation(run_summary.invocation_id, step_details=True) assert output_name in invocation_details["outputs"] test_output = invocation_details["outputs"][output_name] verify_dataset(test_output, test_properties) -def _test_jobs(workflow_path: Path) -> list: +def _test_jobs(workflow_path: Path) -> TestDicts: test_path = _workflow_test_path(workflow_path) with test_path.open() as f: jobs = yaml.safe_load(f) diff --git a/packages/app/HISTORY.rst b/packages/app/HISTORY.rst index 09d4be63a792..ea152aca9bc8 100644 --- a/packages/app/HISTORY.rst +++ b/packages/app/HISTORY.rst @@ -9,6 +9,57 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + + +========= +Bug fixes +========= + +* Return generic message for password reset email by `@ahmedhamidawan `_ in `#18479 `_ +* Increase API robustness to invalid requests, improve compressed data serving by `@mvdbeek `_ in `#18494 `_ +* Prevent job submission if input collection element is deleted by `@mvdbeek `_ in `#18517 `_ +* Strip unicode null from tool stdio by `@mvdbeek `_ in `#18527 `_ +* Fix map over calculation for runtime inputs by `@mvdbeek `_ in `#18535 `_ +* Fix for not-null in 'column_list' object by `@hujambo-dunia `_ in `#18553 `_ +* Also fail ``ensure_dataset_on_disk`` if dataset is in new state by `@mvdbeek `_ in `#18559 `_ +* Fix sqlalchemy statement in tooltagmanager reset output by `@dannon `_ in `#18591 `_ +* Set minimum weasyprint version by `@mvdbeek `_ in `#18606 `_ +* Improve relabel identifiers message when number of columns is not 2 by `@mvdbeek `_ in `#18634 `_ +* Fix extract workflow from history when implicit collection has no jobs by `@mvdbeek `_ in `#18661 `_ +* Make sure we set file size also for purged outputs by `@mvdbeek `_ in `#18681 `_ +* File source and object store instance api fixes by `@mvdbeek `_ in `#18685 `_ +* Fix change datatype PJA on expression tool data outputs by `@mvdbeek `_ in `#18691 `_ +* Fill in missing help for cross product tools. by `@jmchilton `_ in `#18698 `_ +* Fix subworkflow scheduling for delayed subworkflow steps connected to data inputs by `@mvdbeek `_ in `#18731 `_ +* Catch and display exceptions when importing malformatted yaml workflows by `@mvdbeek `_ in `#18734 `_ +* Fix infinitely delayed workflow scheduling if skipped step creates HDCA by `@mvdbeek `_ in `#18751 `_ +* Fix directory get or create logic by `@mvdbeek `_ in `#18752 `_ +* Fix job summary for optional unset job data inputs by `@mvdbeek `_ in `#18754 `_ +* Allow to change only the description of a quota by `@bernt-matthias `_ in `#18775 `_ +* Fix wrong extension on pick data output by `@mvdbeek `_ in `#18798 `_ +* Fix unspecified ``oidc_endpoint`` variable overwriting specified ``redirect_url`` by `@bgruening `_ in `#18818 `_ +* Fix wrong celery_app config on job and workflow handlers by `@mvdbeek `_ in `#18819 `_ +* Fix ``named cursor is not valid anymore`` by `@mvdbeek `_ in `#18825 `_ +* Tighten TRS url check by `@mvdbeek `_ in `#18841 `_ +* Fix Workflow index bookmark filter by `@itisAliRH `_ in `#18842 `_ +* Skip metric collection if job working directory doesn't exist by `@mvdbeek `_ in `#18845 `_ +* Extend on disk checks to running, queued and error states by `@mvdbeek `_ in `#18846 `_ +* Raise MessageException instead of assertions on rerun problems by `@mvdbeek `_ in `#18858 `_ +* Fix data_column ref to nested collection by `@mvdbeek `_ in `#18875 `_ +* Fix loading very old workflows with data inputs by `@mvdbeek `_ in `#18876 `_ + +============ +Enhancements +============ + +* Include workflow invocation id in exception logs by `@mvdbeek `_ in `#18594 `_ +* Implemented the generic OIDC backend from python-social-auth into Gal… by `@Edmontosaurus `_ in `#18670 `_ +* Collect job metrics also when job failed by `@mvdbeek `_ in `#18809 `_ +* prevent "missing refresh_token" errors by supporting also with Keycloak backend by `@ljocha `_ in `#18826 `_ + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/auth/HISTORY.rst b/packages/auth/HISTORY.rst index ae02cb251cda..5d9a1f321ef1 100644 --- a/packages/auth/HISTORY.rst +++ b/packages/auth/HISTORY.rst @@ -9,6 +9,12 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + +No recorded changes since last release + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/config/HISTORY.rst b/packages/config/HISTORY.rst index 327bc9cea77b..0d24cbb05150 100644 --- a/packages/config/HISTORY.rst +++ b/packages/config/HISTORY.rst @@ -9,6 +9,32 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + + +========= +Bug fixes +========= + +* Strip whitespace when listifying admin users by `@jdavcs `_ in `#18656 `_ + +============ +Enhancements +============ + +* Make `default_panel_view` a `_by_host` option by `@natefoo `_ in `#18471 `_ +* More datatype deprecation warnings by `@mvdbeek `_ in `#18612 `_ +* Implemented the generic OIDC backend from python-social-auth into Gal… by `@Edmontosaurus `_ in `#18670 `_ + +============= +Other changes +============= + +* Backport pod5 datatype by `@TomHarrop `_ in `#18507 `_ +* Backport PR 18630 "Add BlobToolkit to the list of interactive tools" to release_24.1 by `@cat-bro `_ in `#18784 `_ + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/data/HISTORY.rst b/packages/data/HISTORY.rst index a50cce52863c..f3b5913f9da8 100644 --- a/packages/data/HISTORY.rst +++ b/packages/data/HISTORY.rst @@ -9,6 +9,38 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + + +========= +Bug fixes +========= + +* Increase API robustness to invalid requests, improve compressed data serving by `@mvdbeek `_ in `#18494 `_ +* Prevent job submission if input collection element is deleted by `@mvdbeek `_ in `#18517 `_ +* Fix shared caches with extended metadata collection. by `@jmchilton `_ in `#18520 `_ +* Also check dataset.deleted when determining if data can be displayed by `@mvdbeek `_ in `#18547 `_ +* Fix for not-null in 'column_list' object by `@hujambo-dunia `_ in `#18553 `_ +* Fix h5ad metadata by `@nilchia `_ in `#18635 `_ +* Don't set file size to zero by `@mvdbeek `_ in `#18653 `_ +* Make sure we set file size also for purged outputs by `@mvdbeek `_ in `#18681 `_ +* Fix wrong extension on pick data output by `@mvdbeek `_ in `#18798 `_ +* Fix copying workflow with subworkflow step for step that you own by `@mvdbeek `_ in `#18802 `_ +* Make pylibmagic import optional by `@mvdbeek `_ in `#18813 `_ +* Ignore converted datasets in invalid input states by `@mvdbeek `_ in `#18850 `_ +* Fix discovered outputs with directory metadata and distributed object by `@mvdbeek `_ in `#18855 `_ +* Raise MessageException instead of assertions on rerun problems by `@mvdbeek `_ in `#18858 `_ +* Fix wrong final state when init_from is used by `@mvdbeek `_ in `#18871 `_ +* Fix history import when parent_hda not serialized by `@mvdbeek `_ in `#18873 `_ + +============= +Other changes +============= + +* Backport pod5 datatype by `@TomHarrop `_ in `#18507 `_ + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/files/HISTORY.rst b/packages/files/HISTORY.rst index 356f458d7c5a..8bc4ff5427ab 100644 --- a/packages/files/HISTORY.rst +++ b/packages/files/HISTORY.rst @@ -9,6 +9,12 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + +No recorded changes since last release + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/job_execution/HISTORY.rst b/packages/job_execution/HISTORY.rst index a94919c3e982..2153ce10970b 100644 --- a/packages/job_execution/HISTORY.rst +++ b/packages/job_execution/HISTORY.rst @@ -9,6 +9,17 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + + +========= +Bug fixes +========= + +* Retry container monitor POST if it fails (don't assume it succeeded) by `@natefoo `_ in `#18863 `_ + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/job_metrics/HISTORY.rst b/packages/job_metrics/HISTORY.rst index 1501175574b6..d6a048415a20 100644 --- a/packages/job_metrics/HISTORY.rst +++ b/packages/job_metrics/HISTORY.rst @@ -9,6 +9,12 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + +No recorded changes since last release + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/navigation/HISTORY.rst b/packages/navigation/HISTORY.rst index 945b382e4c50..65a78836ad82 100644 --- a/packages/navigation/HISTORY.rst +++ b/packages/navigation/HISTORY.rst @@ -9,6 +9,12 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + +No recorded changes since last release + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/objectstore/HISTORY.rst b/packages/objectstore/HISTORY.rst index 2174755b4f24..7a28dbaa4557 100644 --- a/packages/objectstore/HISTORY.rst +++ b/packages/objectstore/HISTORY.rst @@ -9,6 +9,17 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + + +========= +Bug fixes +========= + +* Fix shared caches with extended metadata collection. by `@jmchilton `_ in `#18520 `_ + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/schema/HISTORY.rst b/packages/schema/HISTORY.rst index d746fe40ad45..3bf84da813cc 100644 --- a/packages/schema/HISTORY.rst +++ b/packages/schema/HISTORY.rst @@ -9,6 +9,28 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + + +========= +Bug fixes +========= + +* Handle error when workflow is unowned in Invocation view by `@ahmedhamidawan `_ in `#18730 `_ +* Fix datatype validation of newly built collection by `@mvdbeek `_ in `#18738 `_ +* Fix job summary for optional unset job data inputs by `@mvdbeek `_ in `#18754 `_ +* Fix ``TypeError`` from Pydantic 2.9.0 by `@nsoranzo `_ in `#18788 `_ +* Fix wrong extension on pick data output by `@mvdbeek `_ in `#18798 `_ +* Make all fields optional for HelpForumPost by `@davelopez `_ in `#18839 `_ + +============ +Enhancements +============ + +* Include workflow invocation id in exception logs by `@mvdbeek `_ in `#18594 `_ + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/selenium/HISTORY.rst b/packages/selenium/HISTORY.rst index 7e4d745b7eda..d1b195a7f953 100644 --- a/packages/selenium/HISTORY.rst +++ b/packages/selenium/HISTORY.rst @@ -9,6 +9,12 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + +No recorded changes since last release + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/test_api/HISTORY.rst b/packages/test_api/HISTORY.rst index fd2142439fec..75536b62bb90 100644 --- a/packages/test_api/HISTORY.rst +++ b/packages/test_api/HISTORY.rst @@ -9,6 +9,29 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + + +========= +Bug fixes +========= + +* Prevent job submission if input collection element is deleted by `@mvdbeek `_ in `#18517 `_ +* Fix view parameter type in Job index API by `@davelopez `_ in `#18521 `_ +* Fix map over calculation for runtime inputs by `@mvdbeek `_ in `#18535 `_ +* Fix Archive header encoding by `@arash77 `_ in `#18583 `_ +* Don't set file size to zero by `@mvdbeek `_ in `#18653 `_ +* Fix extract workflow from history when implicit collection has no jobs by `@mvdbeek `_ in `#18661 `_ +* Fix change datatype PJA on expression tool data outputs by `@mvdbeek `_ in `#18691 `_ +* Fix subworkflow scheduling for delayed subworkflow steps connected to data inputs by `@mvdbeek `_ in `#18731 `_ +* Catch and display exceptions when importing malformatted yaml workflows by `@mvdbeek `_ in `#18734 `_ +* Fix infinitely delayed workflow scheduling if skipped step creates HDCA by `@mvdbeek `_ in `#18751 `_ +* Fix copying workflow with subworkflow step for step that you own by `@mvdbeek `_ in `#18802 `_ +* Raise MessageException instead of assertions on rerun problems by `@mvdbeek `_ in `#18858 `_ +* Fix data_column ref to nested collection by `@mvdbeek `_ in `#18875 `_ + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/test_base/HISTORY.rst b/packages/test_base/HISTORY.rst index 02265af284d7..e2fbc283b025 100644 --- a/packages/test_base/HISTORY.rst +++ b/packages/test_base/HISTORY.rst @@ -9,6 +9,19 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + + +========= +Bug fixes +========= + +* Fix infinitely delayed workflow scheduling if skipped step creates HDCA by `@mvdbeek `_ in `#18751 `_ +* Fix wrong extension on pick data output by `@mvdbeek `_ in `#18798 `_ +* Fix data_column ref to nested collection by `@mvdbeek `_ in `#18875 `_ + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/test_driver/HISTORY.rst b/packages/test_driver/HISTORY.rst index e314c57caf9e..b568c88d14e3 100644 --- a/packages/test_driver/HISTORY.rst +++ b/packages/test_driver/HISTORY.rst @@ -9,6 +9,12 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + +No recorded changes since last release + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/tool_shed/HISTORY.rst b/packages/tool_shed/HISTORY.rst index a238e7eb849c..19ef42776f91 100644 --- a/packages/tool_shed/HISTORY.rst +++ b/packages/tool_shed/HISTORY.rst @@ -9,6 +9,12 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + +No recorded changes since last release + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/tool_util/HISTORY.rst b/packages/tool_util/HISTORY.rst index 83c56edb8548..8210bdf311bc 100644 --- a/packages/tool_util/HISTORY.rst +++ b/packages/tool_util/HISTORY.rst @@ -9,6 +9,24 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + + +========= +Bug fixes +========= + +* Handle all requests error in ``ApiBiotoolsMetadataSource._raw_get_metadata`` by `@nsoranzo `_ in `#18510 `_ +* xsd: allow `change_format` and `actions` also in statically defined collection elements, and break recursion by `@bernt-matthias `_ in `#18605 `_ + +============ +Enhancements +============ + +* Make `default_panel_view` a `_by_host` option by `@natefoo `_ in `#18471 `_ + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/tool_util/setup.cfg b/packages/tool_util/setup.cfg index 6fd52ba67a7f..eabca1a6e1b9 100644 --- a/packages/tool_util/setup.cfg +++ b/packages/tool_util/setup.cfg @@ -51,6 +51,7 @@ console_scripts = galaxy-tool-test = galaxy.tool_util.verify.script:main galaxy-tool-test-case-validation = galaxy.tool_util.parameters.scripts.validate_test_cases:main galaxy-tool-upgrade-advisor = galaxy.tool_util.upgrade.script:main + validate-test-format = galaxy.tool_util.validate_test_format:main mulled-build = galaxy.tool_util.deps.mulled.mulled_build:main mulled-build-channel = galaxy.tool_util.deps.mulled.mulled_build_channel:main mulled-build-files = galaxy.tool_util.deps.mulled.mulled_build_files:main diff --git a/packages/tours/HISTORY.rst b/packages/tours/HISTORY.rst index 17a158839c29..5841b78a1b2a 100644 --- a/packages/tours/HISTORY.rst +++ b/packages/tours/HISTORY.rst @@ -9,6 +9,12 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + +No recorded changes since last release + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/util/HISTORY.rst b/packages/util/HISTORY.rst index ee1512d490a3..99e03538b67a 100644 --- a/packages/util/HISTORY.rst +++ b/packages/util/HISTORY.rst @@ -9,6 +9,24 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + + +========= +Bug fixes +========= + +* Fix Archive header encoding by `@arash77 `_ in `#18583 `_ +* File source and object store instance api fixes by `@mvdbeek `_ in `#18685 `_ + +============ +Enhancements +============ + +* Use smtplib send_message to support utf-8 chars in to and from by `@mvdbeek `_ in `#18805 `_ + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/web_apps/HISTORY.rst b/packages/web_apps/HISTORY.rst index 470b054011de..d60e3b0910f7 100644 --- a/packages/web_apps/HISTORY.rst +++ b/packages/web_apps/HISTORY.rst @@ -9,6 +9,44 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + + +========= +Bug fixes +========= + +* Return generic message for password reset email by `@ahmedhamidawan `_ in `#18479 `_ +* Fix view parameter type in Job index API by `@davelopez `_ in `#18521 `_ +* Check if dataset has any data before running provider checks by `@mvdbeek `_ in `#18526 `_ +* Raise appropriate exception if ldda not found by `@mvdbeek `_ in `#18569 `_ +* Close install model session when request ends by `@mvdbeek `_ in `#18629 `_ +* Fix resume_paused_jobs if no session provided by `@mvdbeek `_ in `#18640 `_ +* Fix extract workflow from history when implicit collection has no jobs by `@mvdbeek `_ in `#18661 `_ +* Return error when following a link to a non-ready display application by `@mvdbeek `_ in `#18672 `_ +* Only load authnz routes when oidc enabled by `@mvdbeek `_ in `#18683 `_ +* File source and object store instance api fixes by `@mvdbeek `_ in `#18685 `_ +* Fix sorting users in admin by last login by `@jdavcs `_ in `#18694 `_ +* Fix resume paused jobs response handling by `@dannon `_ in `#18733 `_ +* Fix wrong extension on pick data output by `@mvdbeek `_ in `#18798 `_ +* Tighten TRS url check by `@mvdbeek `_ in `#18841 `_ +* Fix Workflow index bookmark filter by `@itisAliRH `_ in `#18842 `_ +* Extend on disk checks to running, queued and error states by `@mvdbeek `_ in `#18846 `_ + +============ +Enhancements +============ + +* Make `default_panel_view` a `_by_host` option by `@natefoo `_ in `#18471 `_ + +============= +Other changes +============= + +* Fix check dataset check by `@mvdbeek `_ in `#18856 `_ + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/web_framework/HISTORY.rst b/packages/web_framework/HISTORY.rst index 6ef26301eff3..4aeb57285e9d 100644 --- a/packages/web_framework/HISTORY.rst +++ b/packages/web_framework/HISTORY.rst @@ -9,6 +9,19 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + + +========= +Bug fixes +========= + +* Increase API robustness to invalid requests, improve compressed data serving by `@mvdbeek `_ in `#18494 `_ +* Apply statsd arg sanitization to all pages by `@mvdbeek `_ in `#18509 `_ +* Close install model session when request ends by `@mvdbeek `_ in `#18629 `_ + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/packages/web_stack/HISTORY.rst b/packages/web_stack/HISTORY.rst index 8271f6fa0196..ddde231f1d27 100644 --- a/packages/web_stack/HISTORY.rst +++ b/packages/web_stack/HISTORY.rst @@ -9,6 +9,12 @@ History +------------------- +24.1.2 (2024-09-25) +------------------- + +No recorded changes since last release + ------------------- 24.1.1 (2024-07-02) ------------------- diff --git a/test/functional/tools/sample_tool_conf.xml b/test/functional/tools/sample_tool_conf.xml index 8e22ee39eace..ebc4fb3cbd59 100644 --- a/test/functional/tools/sample_tool_conf.xml +++ b/test/functional/tools/sample_tool_conf.xml @@ -139,6 +139,7 @@ + diff --git a/test/integration/test_celery_user_rate_limit.py b/test/integration/test_celery_user_rate_limit.py index b9b832c5cddc..c36dacc880fd 100644 --- a/test/integration/test_celery_user_rate_limit.py +++ b/test/integration/test_celery_user_rate_limit.py @@ -50,7 +50,7 @@ def setup_users(dburl: str, num_users: int = 2): for user_id in user_ids_to_add: conn.execute( text("insert into galaxy_user(id, active, email, password) values (:id, :active, :email, :pw)"), - [{"id": user_id, "active": True, "email": "e", "pw": "p"}], + [{"id": user_id, "active": True, "email": f"e{user_id}", "pw": "p"}], ) diff --git a/test/unit/app/jobs/test_rule_helper.py b/test/unit/app/jobs/test_rule_helper.py index 1d1197a0dc78..f3be1cdd20c0 100644 --- a/test/unit/app/jobs/test_rule_helper.py +++ b/test/unit/app/jobs/test_rule_helper.py @@ -66,7 +66,7 @@ def __setup_fixtures(app): # user3 has no jobs. user1 = model.User(email=USER_EMAIL_1, password="pass1") user2 = model.User(email=USER_EMAIL_2, password="pass2") - user3 = model.User(email=USER_EMAIL_2, password="pass2") + user3 = model.User(email=USER_EMAIL_3, password="pass3") app.add(user1, user2, user3) diff --git a/test/unit/app/managers/test_NotificationManager.py b/test/unit/app/managers/test_NotificationManager.py index 6e0c36397c95..76e934cc9e6f 100644 --- a/test/unit/app/managers/test_NotificationManager.py +++ b/test/unit/app/managers/test_NotificationManager.py @@ -524,8 +524,9 @@ def _create_test_group(self, name: str, users: List[User], roles: List[Role]): sa_session = self.trans.sa_session group = Group(name=name) sa_session.add(group) - self.trans.app.security_agent.set_entity_group_associations(groups=[group], roles=roles, users=users) - sa_session.flush() + user_ids = [user.id for user in users] + role_ids = [role.id for role in roles] + self.trans.app.security_agent.set_group_user_and_role_associations(group, user_ids=user_ids, role_ids=role_ids) return group def _create_test_role(self, name: str, users: List[User], groups: List[Group]): diff --git a/test/unit/data/model/__init__.py b/test/unit/data/model/__init__.py index e69de29bb2d1..7d0a1eeb1f8d 100644 --- a/test/unit/data/model/__init__.py +++ b/test/unit/data/model/__init__.py @@ -0,0 +1,10 @@ +PRIVATE_OBJECT_STORE_ID = "my_private_data" + + +class MockObjectStore: + + def is_private(self, object): + if object.object_store_id == PRIVATE_OBJECT_STORE_ID: + return True + else: + return False diff --git a/test/unit/data/model/conftest.py b/test/unit/data/model/conftest.py index 26ea7d8b7cc2..f49454266001 100644 --- a/test/unit/data/model/conftest.py +++ b/test/unit/data/model/conftest.py @@ -1,7 +1,5 @@ import contextlib import os -import random -import string import tempfile import uuid @@ -10,6 +8,10 @@ from sqlalchemy.orm import Session from galaxy import model as m +from galaxy.model.unittest_utils.utils import ( + random_email, + random_str, +) @pytest.fixture @@ -117,6 +119,19 @@ def f(**kwd): return f +@pytest.fixture +def make_default_user_permissions(session, make_user, make_role): + def f(**kwd): + kwd["user"] = kwd.get("user") or make_user() + kwd["action"] = kwd.get("action") or random_str() + kwd["role"] = kwd.get("role") or make_role() + model = m.DefaultUserPermissions(**kwd) + write_to_db(session, model) + return model + + return f + + @pytest.fixture def make_event(session): def f(**kwd): @@ -149,6 +164,26 @@ def f(**kwd): return f +@pytest.fixture +def make_group(session): + def f(**kwd): + model = m.Group(**kwd) + write_to_db(session, model) + return model + + return f + + +@pytest.fixture +def make_group_role_association(session): + def f(group, role): + model = m.GroupRoleAssociation(group, role) + write_to_db(session, model) + return model + + return f + + @pytest.fixture def make_hda(session, make_history): def f(**kwd): @@ -395,6 +430,16 @@ def f(assoc_class, user, item, rating): return f +@pytest.fixture +def make_user_group_association(session): + def f(user, group): + model = m.UserGroupAssociation(user, group) + write_to_db(session, model) + return model + + return f + + @pytest.fixture def make_user_role_association(session): def f(user, role): @@ -449,17 +494,6 @@ def transaction(session): yield -def random_str() -> str: - alphabet = string.ascii_lowercase + string.digits - size = random.randint(5, 10) - return "".join(random.choices(alphabet, k=size)) - - -def random_email() -> str: - text = random_str() - return f"{text}@galaxy.testing" - - def write_to_db(session, model) -> None: with transaction(session): session.add(model) diff --git a/test/unit/data/model/db/__init__.py b/test/unit/data/model/db/__init__.py index 13a615086ebe..7b083aa84acd 100644 --- a/test/unit/data/model/db/__init__.py +++ b/test/unit/data/model/db/__init__.py @@ -3,22 +3,11 @@ namedtuple, ) -PRIVATE_OBJECT_STORE_ID = "my_private_data" - MockTransaction = namedtuple("MockTransaction", "user") -class MockObjectStore: - - def is_private(self, object): - if object.object_store_id == PRIVATE_OBJECT_STORE_ID: - return True - else: - return False - - -def verify_items(items, expected_items): +def have_same_elements(items, expected_items): """ Assert that items and expected_items contain the same elements. """ - assert Counter(items) == Counter(expected_items) + return Counter(items) == Counter(expected_items) diff --git a/test/unit/data/model/db/conftest.py b/test/unit/data/model/db/conftest.py index d36a38e71ace..8cd81ed50904 100644 --- a/test/unit/data/model/db/conftest.py +++ b/test/unit/data/model/db/conftest.py @@ -8,12 +8,13 @@ create_engine, text, ) +from sqlalchemy.exc import IntegrityError from sqlalchemy.orm import Session from galaxy import model as m from galaxy.datatypes.registry import Registry as DatatypesRegistry from galaxy.model.triggers.update_audit_table import install as install_timestamp_triggers -from . import MockObjectStore +from .. import MockObjectStore if TYPE_CHECKING: from sqlalchemy.engine import Engine @@ -35,7 +36,11 @@ def engine(db_url: str) -> "Engine": @pytest.fixture def session(engine: "Engine") -> Session: - return Session(engine) + session = Session(engine) + # For sqlite, we need to explicitly enale foreign key constraints. + if engine.name == "sqlite": + session.execute(text("PRAGMA foreign_keys = ON;")) + return session @pytest.fixture(autouse=True, scope="module") @@ -58,12 +63,35 @@ def init_datatypes() -> None: @pytest.fixture(autouse=True) -def clear_database(engine: "Engine") -> "Generator": +def clear_database(engine: "Engine", session) -> "Generator": """Delete all rows from all tables. Called after each test.""" yield - with engine.begin() as conn: - for table in m.mapper_registry.metadata.tables: - # Unless db is sqlite, disable foreign key constraints to delete out of order - if engine.name != "sqlite": - conn.execute(text(f"ALTER TABLE {table} DISABLE TRIGGER ALL")) - conn.execute(text(f"DELETE FROM {table}")) + + # If a test left an open transaction, rollback to prevent database locking. + if session.in_transaction(): + session.rollback() + + with engine.connect() as conn: + if engine.name == "sqlite": + conn.execute(text("PRAGMA foreign_keys = OFF;")) + for table in m.mapper_registry.metadata.tables: + conn.execute(text(f"DELETE FROM {table}")) + else: + # For postgres, we can disable foreign key constraints with this statement: + # conn.execute(text(f"ALTER TABLE {table} DISABLE TRIGGER ALL")) + # However, unless running as superuser, this will raise an error when trying + # to disable a system trigger. Disabling USER triggers instead of ALL + # won't work because the USER option excludes foreign key constraints. + # The following is an alternative: we do multiple passes until all tables have been cleared: + to_delete = list(m.mapper_registry.metadata.tables) + failed = [] + while to_delete: + for table in to_delete: + try: + conn.execute(text(f"DELETE FROM {table}")) + except IntegrityError: + failed.append(table) + conn.rollback() + to_delete, failed = failed, [] + + conn.commit() diff --git a/test/unit/data/model/db/test_libraries.py b/test/unit/data/model/db/test_libraries.py index 80dae0b15b50..3bba9c03b610 100644 --- a/test/unit/data/model/db/test_libraries.py +++ b/test/unit/data/model/db/test_libraries.py @@ -5,7 +5,7 @@ get_library_ids, get_library_permissions_by_role, ) -from . import verify_items +from . import have_same_elements def test_get_library_ids(session, make_library, make_library_permissions): @@ -18,7 +18,7 @@ def test_get_library_ids(session, make_library, make_library_permissions): ids = get_library_ids(session, "b").all() expected = [l2.id, l3.id] - verify_items(ids, expected) + have_same_elements(ids, expected) def test_get_library_permissions_by_role(session, make_role, make_library_permissions): @@ -31,7 +31,7 @@ def test_get_library_permissions_by_role(session, make_role, make_library_permis lp_roles = [lp.role for lp in lps] expected = [r1, r2] - verify_items(lp_roles, expected) + have_same_elements(lp_roles, expected) def test_get_libraries_for_admins(session, make_library): @@ -44,14 +44,14 @@ def test_get_libraries_for_admins(session, make_library): libs_deleted = get_libraries_for_admins(session, True).all() expected = [libs[0], libs[1]] - verify_items(libs_deleted, expected) + have_same_elements(libs_deleted, expected) libs_not_deleted = get_libraries_for_admins(session, False).all() expected = [libs[2], libs[3], libs[4]] - verify_items(libs_not_deleted, expected) + have_same_elements(libs_not_deleted, expected) libs_all = get_libraries_for_admins(session, None).all() - verify_items(libs_all, libs) + have_same_elements(libs_all, libs) def test_get_libraries_for_admins__ordering(session, make_library): @@ -75,7 +75,7 @@ def test_get_libraries_for_non_admins(session, make_library): # Expected: l1 (not deleted, not restricted), l2 (not deleted, restricted but accessible) # Not returned: l3 (not deleted but restricted), l4 (deleted) expected = [l1, l2] - verify_items(allowed, expected) + have_same_elements(allowed, expected) def test_get_libraries_for_admins_non_admins__ordering(session, make_library): diff --git a/test/unit/data/model/db/test_misc.py b/test/unit/data/model/db/test_misc.py index b8ef3fe5cf0c..9dadda4c326a 100644 --- a/test/unit/data/model/db/test_misc.py +++ b/test/unit/data/model/db/test_misc.py @@ -5,10 +5,8 @@ from galaxy import model as m from galaxy.model.unittest_utils.db_helpers import get_hdca_by_name -from . import ( - MockTransaction, - PRIVATE_OBJECT_STORE_ID, -) +from . import MockTransaction +from .. import PRIVATE_OBJECT_STORE_ID def test_history_update(make_history, make_hda, session): diff --git a/test/unit/data/model/db/test_role.py b/test/unit/data/model/db/test_role.py index 59daf8a5a8ea..213314c5c609 100644 --- a/test/unit/data/model/db/test_role.py +++ b/test/unit/data/model/db/test_role.py @@ -4,7 +4,7 @@ get_private_user_role, get_roles_by_ids, ) -from . import verify_items +from . import have_same_elements def test_get_npns_roles(session, make_role): @@ -18,7 +18,7 @@ def test_get_npns_roles(session, make_role): # Expected: r4, r5 # Not returned: r1: deleted, r2: private, r3: sharing expected = [r4, r5] - verify_items(roles, expected) + have_same_elements(roles, expected) def test_get_private_user_role(session, make_user, make_role, make_user_role_association): @@ -41,4 +41,4 @@ def test_get_roles_by_ids(session, make_role): roles2 = get_roles_by_ids(session, ids) expected = [r1, r2, r3] - verify_items(roles2, expected) + have_same_elements(roles2, expected) diff --git a/test/unit/data/model/db/test_security.py b/test/unit/data/model/db/test_security.py new file mode 100644 index 000000000000..e85bbe694d08 --- /dev/null +++ b/test/unit/data/model/db/test_security.py @@ -0,0 +1,941 @@ +import pytest + +from galaxy.exceptions import RequestParameterInvalidException +from galaxy.model import ( + Group, + Role, + User, +) +from galaxy.model.security import GalaxyRBACAgent +from . import have_same_elements + + +@pytest.fixture +def make_user_and_role(session, make_user, make_role, make_user_role_association): + """ + Each user created in Galaxy is assumed to have a private role, such that role.name == user.email. + Since we are testing user/group/role associations here, to ensure the correct state of the test database, + we need to ensure that a user is never created without a corresponding private role. + Therefore, we use this fixture instead of make_user (which only creates a user). + """ + + def f(**kwd): + user = make_user() + private_role = make_role(name=user.email, type=Role.types.PRIVATE) + make_user_role_association(user, private_role) + return user, private_role + + return f + + +def test_private_user_role_assoc_not_affected_by_setting_user_roles(session, make_user_and_role): + # Create user with a private role + user, private_role = make_user_and_role() + assert user.email == private_role.name + verify_user_associations(user, [], [private_role]) # the only existing association is with the private role + + # Update users's email so it's no longer the same as the private role's name. + user.email = user.email + "updated" + session.add(user) + session.commit() + assert user.email != private_role.name + + # Delete user roles + GalaxyRBACAgent(session).set_user_group_and_role_associations(user, role_ids=[]) + # association with private role is preserved + verify_user_associations(user, [], [private_role]) + + +def test_private_user_role_assoc_not_affected_by_setting_role_users(session, make_user_and_role): + # Create user with a private role + user, private_role = make_user_and_role() + assert user.email == private_role.name + verify_user_associations(user, [], [private_role]) # the only existing association is with the private role + + # Update users's email + user.email = user.email + "updated" + session.add(user) + session.commit() + assert user.email != private_role.name + + # Update role users + GalaxyRBACAgent(session).set_role_user_and_group_associations(private_role, user_ids=[]) + # association of private role with user is preserved + verify_role_associations(private_role, [user], []) + + +def test_cannot_assign_private_roles(session, make_user_and_role, make_role): + user, private_role1 = make_user_and_role() + _, private_role2 = make_user_and_role() + new_role = make_role() + verify_user_associations(user, [], [private_role1]) # the only existing association is with the private role + + # Try to assign 2 more roles: regular role + another private role + GalaxyRBACAgent(session).set_user_group_and_role_associations(user, role_ids=[new_role.id, private_role2.id]) + # Only regular role has been added: other private role ignored; original private role still assigned + verify_user_associations(user, [], [private_role1, new_role]) + + +class TestSetGroupUserAndRoleAssociations: + + def test_add_associations_to_existing_group(self, session, make_user_and_role, make_role, make_group): + """ + State: group exists in database, has no user and role associations. + Action: add new associations. + """ + group = make_group() + users = [make_user_and_role()[0] for _ in range(5)] + roles = [make_role() for _ in range(5)] + + # users and roles for creating associations + users_to_add = [users[0], users[2], users[4]] + user_ids = [u.id for u in users_to_add] + roles_to_add = [roles[1], roles[3]] + role_ids = [r.id for r in roles_to_add] + + # verify no preexisting associations + verify_group_associations(group, [], []) + + # set associations + GalaxyRBACAgent(session).set_group_user_and_role_associations(group, user_ids=user_ids, role_ids=role_ids) + + # verify new associations + verify_group_associations(group, users_to_add, roles_to_add) + + def test_add_associations_to_new_group(self, session, make_user_and_role, make_role): + """ + State: group does NOT exist in database, has no user and role associations. + Action: add new associations. + """ + group = Group() + session.add(group) + assert group.id is None # group does not exist in database + users = [make_user_and_role()[0] for _ in range(5)] # type: ignore[unreachable] + roles = [make_role() for _ in range(5)] + + # users and roles for creating associations + users_to_add = [users[0], users[2], users[4]] + user_ids = [u.id for u in users_to_add] + roles_to_add = [roles[1], roles[3]] + role_ids = [r.id for r in roles_to_add] + + # set associations + GalaxyRBACAgent(session).set_group_user_and_role_associations(group, user_ids=user_ids, role_ids=role_ids) + + # verify new associations + verify_group_associations(group, users_to_add, roles_to_add) + + def test_update_associations( + self, + session, + make_user_and_role, + make_role, + make_group, + make_user_group_association, + make_group_role_association, + ): + """ + State: group exists in database AND has user and role associations. + Action: update associations (add some/drop some). + Expect: old associations are REPLACED by new associations. + """ + group = make_group() + users = [make_user_and_role()[0] for _ in range(5)] + roles = [make_role() for _ in range(5)] + + # load and verify existing associations + users_to_load = [users[0], users[2]] + roles_to_load = [roles[1], roles[3]] + for user in users_to_load: + make_user_group_association(user, group) + for role in roles_to_load: + make_group_role_association(group, role) + verify_group_associations(group, users_to_load, roles_to_load) + + # users and roles for creating new associations + new_users_to_add = [users[0], users[1], users[3]] + user_ids = [u.id for u in new_users_to_add] + new_roles_to_add = [roles[2]] + role_ids = [r.id for r in new_roles_to_add] + + # sanity check: ensure we are trying to change existing associations + assert not have_same_elements(users_to_load, new_users_to_add) + assert not have_same_elements(roles_to_load, new_roles_to_add) + + # set associations + GalaxyRBACAgent(session).set_group_user_and_role_associations(group, user_ids=user_ids, role_ids=role_ids) + + # verify new associations + verify_group_associations(group, new_users_to_add, new_roles_to_add) + + def test_drop_associations( + self, + session, + make_user_and_role, + make_role, + make_group, + make_user_group_association, + make_group_role_association, + ): + """ + State: group exists in database AND has user and role associations. + Action: drop all associations. + """ + group = make_group() + users = [make_user_and_role()[0] for _ in range(5)] + roles = [make_role() for _ in range(5)] + + # load and verify existing associations + users_to_load = [users[0], users[2]] + roles_to_load = [roles[1], roles[3]] + for user in users_to_load: + make_user_group_association(user, group) + for role in roles_to_load: + make_group_role_association(group, role) + verify_group_associations(group, users_to_load, roles_to_load) + + # drop associations + GalaxyRBACAgent(session).set_group_user_and_role_associations(group, user_ids=[], role_ids=[]) + + # verify associations dropped + verify_group_associations(group, [], []) + + def test_invalid_user(self, session, make_user_and_role, make_role, make_group): + """ + State: group exists in database, has no user and role associations. + Action: try to add several associations, last one having an invalid user id. + Expect: no associations are added, appropriate error is raised. + """ + group = make_group() + users = [make_user_and_role()[0] for _ in range(5)] + + # users for creating associations + user_ids = [users[0].id, -1] # first is valid, second is invalid + + # verify no preexisting associations + assert len(group.users) == 0 + + # try to set associations + with pytest.raises(RequestParameterInvalidException): + GalaxyRBACAgent(session).set_group_user_and_role_associations(group, user_ids=user_ids) + + # verify no change + assert len(group.users) == 0 + + def test_invalid_role(self, session, make_role, make_group): + """ + state: group exists in database, has no user and role associations. + action: try to add several associations, last one having an invalid role id. + expect: no associations are added, appropriate error is raised. + """ + group = make_group() + roles = [make_role() for _ in range(5)] + + # roles for creating associations + role_ids = [roles[0].id, -1] # first is valid, second is invalid + + # verify no preexisting associations + assert len(group.roles) == 0 + + # try to set associations + with pytest.raises(RequestParameterInvalidException): + GalaxyRBACAgent(session).set_group_user_and_role_associations(group, role_ids=role_ids) + + # verify no change + assert len(group.roles) == 0 + + def test_duplicate_user( + self, + session, + make_user_and_role, + make_role, + make_group, + make_user_group_association, + make_group_role_association, + ): + """ + State: group exists in database and has user and role associations. + Action: try update user and role associations including a duplicate user + Expect: error raised, no change is made to group users and group roles. + """ + group = make_group() + users = [make_user_and_role()[0] for _ in range(5)] + roles = [make_role() for _ in range(5)] + + # load and verify existing associations + users_to_load = [users[0], users[2]] + roles_to_load = [roles[1], roles[3]] + for user in users_to_load: + make_user_group_association(user, group) + for role in roles_to_load: + make_group_role_association(group, role) + verify_group_associations(group, users_to_load, roles_to_load) + + # users and roles for creating new associations + new_users_to_add = users + [users[0]] # include a duplicate user + user_ids = [u.id for u in new_users_to_add] + + new_roles_to_add = roles # NO duplice roles + role_ids = [r.id for r in new_roles_to_add] + + # sanity check: ensure we are trying to change existing associations + assert not have_same_elements(users_to_load, new_users_to_add) + assert not have_same_elements(roles_to_load, new_roles_to_add) + + with pytest.raises(RequestParameterInvalidException): + GalaxyRBACAgent(session).set_group_user_and_role_associations(group, user_ids=user_ids, role_ids=role_ids) + + # verify associations not updated + verify_group_associations(group, users_to_load, roles_to_load) + + def test_duplicate_role( + self, + session, + make_user_and_role, + make_role, + make_group, + make_user_group_association, + make_group_role_association, + ): + """ + State: group exists in database and has user and role associations. + Action: try update user and role associations including a duplicate role + Expect: error raised, no change is made to group users and group roles. + """ + group = make_group() + users = [make_user_and_role()[0] for _ in range(5)] + roles = [make_role() for _ in range(5)] + + # load and verify existing associations + users_to_load = [users[0], users[2]] + roles_to_load = [roles[1], roles[3]] + for user in users_to_load: + make_user_group_association(user, group) + for role in roles_to_load: + make_group_role_association(group, role) + verify_group_associations(group, users_to_load, roles_to_load) + + # users and roles for creating new associations + new_users_to_add = users # NO duplicate users + user_ids = [u.id for u in new_users_to_add] + + new_roles_to_add = roles + [roles[0]] # include a duplicate role + role_ids = [r.id for r in new_roles_to_add] + + # sanity check: ensure we are trying to change existing associations + assert not have_same_elements(users_to_load, new_users_to_add) + assert not have_same_elements(roles_to_load, new_roles_to_add) + + with pytest.raises(RequestParameterInvalidException): + GalaxyRBACAgent(session).set_group_user_and_role_associations(group, user_ids=user_ids, role_ids=role_ids) + + # verify associations not updated + verify_group_associations(group, users_to_load, roles_to_load) + + +class TestSetUserGroupAndRoleAssociations: + """ + Note: a user should always have a private role which is not affected + by modifying a user's group associations or role associations. + """ + + def test_add_associations_to_existing_user(self, session, make_user_and_role, make_role, make_group): + """ + State: user exists in database, has no group and only one private role association. + Action: add new associations. + """ + user, private_role = make_user_and_role() + groups = [make_group() for _ in range(5)] + roles = [make_role() for _ in range(5)] + + # groups and roles for creating associations + groups_to_add = [groups[0], groups[2], groups[4]] + group_ids = [g.id for g in groups_to_add] + roles_to_add = [roles[1], roles[3]] + role_ids = [r.id for r in roles_to_add] + + # verify preexisting associations + verify_user_associations(user, [], [private_role]) + + # set associations + GalaxyRBACAgent(session).set_user_group_and_role_associations(user, group_ids=group_ids, role_ids=role_ids) + + # verify new associations + verify_user_associations(user, groups_to_add, roles_to_add + [private_role]) + + def test_add_associations_to_new_user(self, session, make_role, make_group): + """ + State: user does NOT exist in database, has no group and role associations. + Action: add new associations. + """ + user = User(email="foo@foo.com", password="password") + # We are not creating a private role and a user-role association with that role because that would result in + # adding the user to the database before calling the method under test, whereas the test is intended to verify + # correct processing of a user that has NOT been saved to the database. + + session.add(user) + assert user.id is None # user does not exist in database + groups = [make_group() for _ in range(5)] # type: ignore[unreachable] + roles = [make_role() for _ in range(5)] + + # groups and roles for creating associations + groups_to_add = [groups[0], groups[2], groups[4]] + group_ids = [g.id for g in groups_to_add] + roles_to_add = [roles[1], roles[3]] + role_ids = [r.id for r in roles_to_add] + + # set associations + GalaxyRBACAgent(session).set_user_group_and_role_associations(user, group_ids=group_ids, role_ids=role_ids) + + # verify new associations + verify_user_associations(user, groups_to_add, roles_to_add) + + def test_update_associations( + self, + session, + make_user_and_role, + make_role, + make_group, + make_user_group_association, + make_user_role_association, + ): + """ + State: user exists in database AND has group and role associations. + Action: update associations (add some/drop some). + Expect: old associations are REPLACED by new associations. + """ + user, private_role = make_user_and_role() + groups = [make_group() for _ in range(5)] + roles = [make_role() for _ in range(5)] + + # load and verify existing associations + groups_to_load = [groups[0], groups[2]] + roles_to_load = [roles[1], roles[3]] + for group in groups_to_load: + make_user_group_association(user, group) + for role in roles_to_load: + make_user_role_association(user, role) + verify_user_associations(user, groups_to_load, roles_to_load + [private_role]) + + # groups and roles for creating new associations + new_groups_to_add = [groups[0], groups[1], groups[3]] + group_ids = [g.id for g in new_groups_to_add] + new_roles_to_add = [roles[2]] + role_ids = [r.id for r in new_roles_to_add] + + # sanity check: ensure we are trying to change existing associations + assert not have_same_elements(groups_to_load, new_groups_to_add) + assert not have_same_elements(roles_to_load, new_roles_to_add) + + # set associations + GalaxyRBACAgent(session).set_user_group_and_role_associations(user, group_ids=group_ids, role_ids=role_ids) + # verify new associations + verify_user_associations(user, new_groups_to_add, new_roles_to_add + [private_role]) + + def test_drop_associations( + self, + session, + make_user_and_role, + make_role, + make_group, + make_user_group_association, + make_user_role_association, + ): + """ + State: user exists in database AND has group and role associations. + Action: drop all associations. + """ + user, private_role = make_user_and_role() + groups = [make_group() for _ in range(5)] + roles = [make_role() for _ in range(5)] + + # load and verify existing associations + groups_to_load = [groups[0], groups[2]] + roles_to_load = [roles[1], roles[3]] + for group in groups_to_load: + make_user_group_association(user, group) + for role in roles_to_load: + make_user_role_association(user, role) + verify_user_associations(user, groups_to_load, roles_to_load + [private_role]) + + # drop associations + GalaxyRBACAgent(session).set_user_group_and_role_associations(user, group_ids=[], role_ids=[]) + + # verify associations dropped + verify_user_associations(user, [], [private_role]) + + def test_invalid_group(self, session, make_user_and_role, make_group): + """ + State: user exists in database, has no group and only one private role association. + Action: try to add several associations, last one having an invalid group id. + Expect: no associations are added, appropriate error is raised. + """ + user, private_role = make_user_and_role() + groups = [make_group() for _ in range(5)] + + # groups for creating associations + group_ids = [groups[0].id, -1] # first is valid, second is invalid + + # verify no preexisting associations + assert len(user.groups) == 0 + + # try to set associations + with pytest.raises(RequestParameterInvalidException): + GalaxyRBACAgent(session).set_user_group_and_role_associations(user, group_ids=group_ids) + + # verify no change + assert len(user.groups) == 0 + + def test_invalid_role(self, session, make_user_and_role, make_role): + """ + State: user exists in database, has no group and only one private role association. + action: try to add several associations, last one having an invalid role id. + expect: no associations are added, appropriate error is raised. + """ + user, private_role = make_user_and_role() + roles = [make_role() for _ in range(5)] + + # roles for creating associations + role_ids = [roles[0].id, -1] # first is valid, second is invalid + + # verify no preexisting associations + assert len(user.roles) == 1 # one is the private role association + + # try to set associations + with pytest.raises(RequestParameterInvalidException): + GalaxyRBACAgent(session).set_user_group_and_role_associations(user, role_ids=role_ids) + + # verify no change + assert len(user.roles) == 1 # one is the private role association + + def test_duplicate_group( + self, + session, + make_user_and_role, + make_role, + make_group, + make_user_group_association, + make_user_role_association, + ): + """ + State: user exists in database and has group and role associations. + Action: try update group and role associations including a duplicate group + Expect: error raised, no change is made to user groups and user roles. + """ + user, private_role = make_user_and_role() + groups = [make_group() for _ in range(5)] + roles = [make_role() for _ in range(5)] + + # load and verify existing associations + groups_to_load = [groups[0], groups[2]] + roles_to_load = [roles[1], roles[3]] + for group in groups_to_load: + make_user_group_association(user, group) + for role in roles_to_load: + make_user_role_association(user, role) + verify_user_associations(user, groups_to_load, roles_to_load + [private_role]) + + # groups and roles for creating new associations + new_groups_to_add = groups + [groups[0]] # include a duplicate group + group_ids = [g.id for g in new_groups_to_add] + + new_roles_to_add = roles # NO duplicate roles + role_ids = [r.id for r in new_roles_to_add] + + # sanity check: ensure we are trying to change existing associations + assert not have_same_elements(groups_to_load, new_groups_to_add) + assert not have_same_elements(roles_to_load, new_roles_to_add) + + with pytest.raises(RequestParameterInvalidException): + GalaxyRBACAgent(session).set_user_group_and_role_associations(user, group_ids=group_ids, role_ids=role_ids) + + # verify associations not updated + verify_user_associations(user, groups_to_load, roles_to_load + [private_role]) + + def test_duplicate_role( + self, + session, + make_user_and_role, + make_role, + make_group, + make_user_group_association, + make_user_role_association, + ): + """ + State: user exists in database and has group and role associations. + Action: try update group and role associations including a duplicate role + Expect: error raised, no change is made to user groups and user roles. + """ + user, private_role = make_user_and_role() + groups = [make_group() for _ in range(5)] + roles = [make_role() for _ in range(5)] + + # load and verify existing associations + groups_to_load = [groups[0], groups[2]] + roles_to_load = [roles[1], roles[3]] + for group in groups_to_load: + make_user_group_association(user, group) + for role in roles_to_load: + make_user_role_association(user, role) + verify_user_associations(user, groups_to_load, roles_to_load + [private_role]) + + # groups and roles for creating new associations + new_groups_to_add = groups # NO duplicate groups + group_ids = [g.id for g in new_groups_to_add] + + new_roles_to_add = roles + [roles[0]] # include a duplicate role + role_ids = [r.id for r in new_roles_to_add] + + # sanity check: ensure we are trying to change existing associations + assert not have_same_elements(groups_to_load, new_groups_to_add) + assert not have_same_elements(roles_to_load, new_roles_to_add) + + with pytest.raises(RequestParameterInvalidException): + GalaxyRBACAgent(session).set_user_group_and_role_associations(user, group_ids=group_ids, role_ids=role_ids) + + # verify associations not updated + verify_user_associations(user, groups_to_load, roles_to_load + [private_role]) + + +class TestSetRoleUserAndGroupAssociations: + """ + Note: a user should always have a private role which is not affected + by modifying a user's group associations or role associations. + """ + + def test_add_associations_to_existing_role(self, session, make_user_and_role, make_role, make_group): + """ + State: role exists in database, has no group and no user associations. + Action: add new associations. + """ + role = make_role() + users = [make_user_and_role()[0] for _ in range(5)] + groups = [make_group() for _ in range(5)] + + # users and groups for creating associations + users_to_add = [users[0], users[2], users[4]] + user_ids = [u.id for u in users_to_add] + groups_to_add = [groups[0], groups[2], groups[4]] + group_ids = [g.id for g in groups_to_add] + + # verify preexisting associations + verify_role_associations(role, [], []) + + # set associations + GalaxyRBACAgent(session).set_role_user_and_group_associations(role, user_ids=user_ids, group_ids=group_ids) + + # verify new associations + verify_role_associations(role, users_to_add, groups_to_add) + + def test_add_associations_to_new_role(self, session, make_user_and_role, make_group): + """ + State: user does NOT exist in database, has no group and role associations. + Action: add new associations. + """ + role = Role() + session.add(role) + assert role.id is None # role does not exist in database + users = [make_user_and_role()[0] for _ in range(5)] # type: ignore[unreachable] + groups = [make_group() for _ in range(5)] + + # users and groups for creating associations + users_to_add = [users[0], users[2], users[4]] + user_ids = [u.id for u in users_to_add] + groups_to_add = [groups[0], groups[2], groups[4]] + group_ids = [g.id for g in groups_to_add] + + # set associations + GalaxyRBACAgent(session).set_role_user_and_group_associations(role, user_ids=user_ids, group_ids=group_ids) + + # verify new associations + verify_role_associations(role, users_to_add, groups_to_add) + + def test_update_associations( + self, + session, + make_user_and_role, + make_role, + make_group, + make_user_role_association, + make_group_role_association, + ): + """ + State: role exists in database AND has user and group associations. + Action: update associations (add some/drop some). + Expect: old associations are REPLACED by new associations. + """ + role = make_role() + users = [make_user_and_role()[0] for _ in range(5)] + groups = [make_group() for _ in range(5)] + + # load and verify existing associations + users_to_load = [users[1], users[3]] + groups_to_load = [groups[0], groups[2]] + for user in users_to_load: + make_user_role_association(user, role) + for group in groups_to_load: + make_group_role_association(group, role) + verify_role_associations(role, users_to_load, groups_to_load) + + # users and groups for creating new associations + new_users_to_add = [users[0], users[2], users[4]] + user_ids = [u.id for u in new_users_to_add] + new_groups_to_add = [groups[0], groups[2], groups[4]] + group_ids = [g.id for g in new_groups_to_add] + + # sanity check: ensure we are trying to change existing associations + assert not have_same_elements(users_to_load, new_users_to_add) + assert not have_same_elements(groups_to_load, new_groups_to_add) + + # set associations + GalaxyRBACAgent(session).set_role_user_and_group_associations(role, user_ids=user_ids, group_ids=group_ids) + # verify new associations + verify_role_associations(role, new_users_to_add, new_groups_to_add) + + def test_drop_associations( + self, + session, + make_user_and_role, + make_role, + make_group, + make_group_role_association, + make_user_role_association, + ): + """ + State: role exists in database AND has user and group associations. + Action: drop all associations. + """ + role = make_role() + users = [make_user_and_role()[0] for _ in range(5)] + groups = [make_group() for _ in range(5)] + + # load and verify existing associations + users_to_load = [users[1], users[3]] + groups_to_load = [groups[0], groups[2]] + for user in users_to_load: + make_user_role_association(user, role) + for group in groups_to_load: + make_group_role_association(group, role) + verify_role_associations(role, users_to_load, groups_to_load) + + # drop associations + GalaxyRBACAgent(session).set_role_user_and_group_associations(role, user_ids=[], group_ids=[]) + + # verify associations dropped + verify_role_associations(role, [], []) + + def test_invalid_user(self, session, make_role, make_user_and_role): + """ + State: role exists in database, has no user and group eassociations. + action: try to add several associations, last one having an invalid user id. + expect: no associations are added, appropriate error is raised. + """ + role = make_role() + users = [make_user_and_role()[0] for _ in range(5)] + + # users for creating associations + user_ids = [users[0].id, -1] # first is valid, second is invalid + + # verify no preexisting associations + assert len(role.users) == 0 + + # try to set associations + with pytest.raises(RequestParameterInvalidException): + GalaxyRBACAgent(session).set_role_user_and_group_associations(role, user_ids=user_ids) + + # verify no change + assert len(role.users) == 0 + + def test_invalid_group(self, session, make_role, make_group): + """ + State: role exists in database, has no user and group eassociations. + Action: try to add several associations, last one having an invalid group id. + Expect: no associations are added, appropriate error is raised. + """ + role = make_role() + groups = [make_group() for _ in range(5)] + + # groups for creating associations + group_ids = [groups[0].id, -1] # first is valid, second is invalid + + # verify no preexisting associations + assert len(role.groups) == 0 + + # try to set associations + with pytest.raises(RequestParameterInvalidException): + GalaxyRBACAgent(session).set_role_user_and_group_associations(role, group_ids=group_ids) + + # verify no change + assert len(role.groups) == 0 + + def test_duplicate_user( + self, + session, + make_user_and_role, + make_role, + make_group, + make_group_role_association, + make_user_role_association, + ): + """ + State: role exists in database and has group and user associations. + Action: try update group and user associations including a duplicate user + Expect: error raised, no change is made to role groups and role users. + """ + role = make_role() + users = [make_user_and_role()[0] for _ in range(5)] + groups = [make_group() for _ in range(5)] + + # load and verify existing associations + users_to_load = [users[1], users[3]] + groups_to_load = [groups[0], groups[2]] + for user in users_to_load: + make_user_role_association(user, role) + for group in groups_to_load: + make_group_role_association(group, role) + + verify_role_associations(role, users_to_load, groups_to_load) + + # users and groups for creating new associations + new_users_to_add = users + [users[0]] # include a duplicate user + user_ids = [u.id for u in new_users_to_add] + + new_groups_to_add = groups # NO duplicate groups + group_ids = [g.id for g in new_groups_to_add] + + # sanity check: ensure we are trying to change existing associations + assert not have_same_elements(users_to_load, new_users_to_add) + assert not have_same_elements(groups_to_load, new_groups_to_add) + + with pytest.raises(RequestParameterInvalidException): + GalaxyRBACAgent(session).set_role_user_and_group_associations(role, user_ids=user_ids, group_ids=group_ids) + + # verify associations not updated + verify_role_associations(role, users_to_load, groups_to_load) + + def test_duplicate_group( + self, + session, + make_user_and_role, + make_role, + make_group, + make_group_role_association, + make_user_role_association, + ): + """ + State: role exists in database and has group and user associations. + Action: try update group and user associations including a duplicate group + Expect: error raised, no change is made to role groups and role users. + """ + role = make_role() + users = [make_user_and_role()[0] for _ in range(5)] + groups = [make_group() for _ in range(5)] + + # load and verify existing associations + users_to_load = [users[1], users[3]] + groups_to_load = [groups[0], groups[2]] + for user in users_to_load: + make_user_role_association(user, role) + for group in groups_to_load: + make_group_role_association(group, role) + + verify_role_associations(role, users_to_load, groups_to_load) + + # users and groups for creating new associations + new_users_to_add = users # NO duplicate users + user_ids = [u.id for u in new_users_to_add] + + new_groups_to_add = groups + [groups[0]] # include a duplicate group + group_ids = [g.id for g in new_groups_to_add] + + # sanity check: ensure we are trying to change existing associations + assert not have_same_elements(users_to_load, new_users_to_add) + assert not have_same_elements(groups_to_load, new_groups_to_add) + + with pytest.raises(RequestParameterInvalidException): + GalaxyRBACAgent(session).set_role_user_and_group_associations(role, user_ids=user_ids, group_ids=group_ids) + + # verify associations not updated + verify_role_associations(role, users_to_load, groups_to_load) + + def test_delete_default_user_permissions_and_default_history_permissions( + self, + session, + make_role, + make_user_and_role, + make_user_role_association, + make_default_user_permissions, + make_default_history_permissions, + make_history, + ): + """ + When setting role users, we check check previously associated users to: + - delete DefaultUserPermissions for users that are being removed from this role; + - delete DefaultHistoryPermissions for histories associated with users that are being removed from this role. + """ + role = make_role() + users = [make_user_and_role()[0] for _ in range(5)] + # load and verify existing associations + user1, user2 = users[0], users[1] + users_to_load = [user1, user2] + for user in users_to_load: + make_user_role_association(user, role) + verify_role_associations(role, users_to_load, []) + + # users and groups for creating new associations + new_users_to_add = [users[1], users[2]] # REMOVE users[0], LEAVE users[1], ADD users[2] + user_ids = [u.id for u in new_users_to_add] + # sanity check: ensure we are trying to change existing associations + assert not have_same_elements(users_to_load, new_users_to_add) + + # load default user permissions + dup1 = make_default_user_permissions(user=user1, role=role) + dup2 = make_default_user_permissions(user=user2, role=role) + assert have_same_elements(user1.default_permissions, [dup1]) + assert have_same_elements(user2.default_permissions, [dup2]) + + # load and verify default history permissions for users associated with this role + history1, history2 = make_history(user=user1), make_history(user=user1) # 2 histories for user 1 + history3 = make_history(user=user2) # 1 history for user 2 + dhp1 = make_default_history_permissions(history=history1, role=role) + dhp2 = make_default_history_permissions(history=history2, role=role) + dhp3 = make_default_history_permissions(history=history3, role=role) + assert have_same_elements(history1.default_permissions, [dhp1]) + assert have_same_elements(history2.default_permissions, [dhp2]) + assert have_same_elements(history3.default_permissions, [dhp3]) + + # now update role users + GalaxyRBACAgent(session).set_role_user_and_group_associations(role, user_ids=user_ids) + + # verify user role associations + verify_role_associations(role, new_users_to_add, []) + + # verify default user permissions + assert have_same_elements(user1.default_permissions, []) # user1 was removed from role + assert have_same_elements(user2.default_permissions, [dup2]) # user2 was NOT removed from role + + # verify default history permissions + assert have_same_elements(history1.default_permissions, []) + assert have_same_elements(history2.default_permissions, []) + assert have_same_elements(history3.default_permissions, [dhp3]) + + +def verify_group_associations(group, expected_users, expected_roles): + new_group_users = [assoc.user for assoc in group.users] + new_group_roles = [assoc.role for assoc in group.roles] + assert have_same_elements(new_group_users, expected_users) + assert have_same_elements(new_group_roles, expected_roles) + + +def verify_user_associations(user, expected_groups, expected_roles): + new_user_groups = [assoc.group for assoc in user.groups] + new_user_roles = [assoc.role for assoc in user.roles] + assert have_same_elements(new_user_groups, expected_groups) + assert have_same_elements(new_user_roles, expected_roles) + + +def verify_role_associations(role, expected_users, expected_groups): + new_role_users = [assoc.user for assoc in role.users] + new_role_groups = [assoc.group for assoc in role.groups] + assert have_same_elements(new_role_users, expected_users) + assert have_same_elements(new_role_groups, expected_groups) diff --git a/test/unit/data/model/db/test_user.py b/test/unit/data/model/db/test_user.py index 5085a71b8b42..87d136a125a4 100644 --- a/test/unit/data/model/db/test_user.py +++ b/test/unit/data/model/db/test_user.py @@ -7,7 +7,7 @@ get_users_by_ids, get_users_for_index, ) -from . import verify_items +from . import have_same_elements @pytest.fixture @@ -42,7 +42,7 @@ def test_get_users_by_ids(session, make_random_users): users2 = get_users_by_ids(session, ids) expected = [u1, u2, u3] - verify_items(users2, expected) + have_same_elements(users2, expected) def test_get_users_for_index(session, make_user): @@ -54,25 +54,25 @@ def test_get_users_for_index(session, make_user): u6 = make_user(email="z", username="i") users = get_users_for_index(session, False, f_email="a", expose_user_email=True) - verify_items(users, [u1]) + have_same_elements(users, [u1]) users = get_users_for_index(session, False, f_email="c", is_admin=True) - verify_items(users, [u2]) + have_same_elements(users, [u2]) users = get_users_for_index(session, False, f_name="f", expose_user_name=True) - verify_items(users, [u3]) + have_same_elements(users, [u3]) users = get_users_for_index(session, False, f_name="h", is_admin=True) - verify_items(users, [u4]) + have_same_elements(users, [u4]) users = get_users_for_index(session, False, f_any="i", is_admin=True) - verify_items(users, [u5, u6]) + have_same_elements(users, [u5, u6]) users = get_users_for_index(session, False, f_any="i", expose_user_email=True, expose_user_name=True) - verify_items(users, [u5, u6]) + have_same_elements(users, [u5, u6]) users = get_users_for_index(session, False, f_any="i", expose_user_email=True) - verify_items(users, [u5]) + have_same_elements(users, [u5]) users = get_users_for_index(session, False, f_any="i", expose_user_name=True) - verify_items(users, [u6]) + have_same_elements(users, [u6]) u1.deleted = True users = get_users_for_index(session, True) - verify_items(users, [u1]) + have_same_elements(users, [u1]) def test_username_is_unique(make_user): diff --git a/test/unit/data/model/migration_fixes/__init__.py b/test/unit/data/model/migration_fixes/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/test/unit/data/model/migration_fixes/conftest.py b/test/unit/data/model/migration_fixes/conftest.py new file mode 100644 index 000000000000..21edbb3a49fc --- /dev/null +++ b/test/unit/data/model/migration_fixes/conftest.py @@ -0,0 +1,41 @@ +import tempfile +from typing import TYPE_CHECKING + +import pytest +from sqlalchemy import create_engine +from sqlalchemy.orm import Session + +if TYPE_CHECKING: + from sqlalchemy.engine import Engine + +from galaxy.model.unittest_utils.model_testing_utils import ( + _generate_unique_database_name, + _make_sqlite_db_url, +) + + +@pytest.fixture(scope="module") +def sqlite_url_factory(): + """Return a function that generates a sqlite url""" + + def url(): + database = _generate_unique_database_name() + return _make_sqlite_db_url(tmp_dir, database) + + with tempfile.TemporaryDirectory() as tmp_dir: + yield url + + +@pytest.fixture(scope="module") +def db_url(sqlite_url_factory): # noqa: F811 + return sqlite_url_factory() + + +@pytest.fixture() +def engine(db_url: str) -> "Engine": + return create_engine(db_url) + + +@pytest.fixture +def session(engine: "Engine") -> Session: + return Session(engine) diff --git a/test/unit/data/model/migration_fixes/test_migrations.py b/test/unit/data/model/migration_fixes/test_migrations.py new file mode 100644 index 000000000000..0c6c8979d8cc --- /dev/null +++ b/test/unit/data/model/migration_fixes/test_migrations.py @@ -0,0 +1,400 @@ +from typing import ( + Generator, + TYPE_CHECKING, +) + +import pytest +from sqlalchemy import ( + select, + text, +) + +from galaxy import model as m +from galaxy.model import ( + GroupRoleAssociation, + User, + UserGroupAssociation, + UserRoleAssociation, +) +from galaxy.model.unittest_utils.migration_scripts_testing_utils import run_command + +if TYPE_CHECKING: + from sqlalchemy.engine import Engine + +COMMAND = "manage_db.sh" + + +@pytest.fixture(autouse=True) +def clear_database(engine: "Engine") -> "Generator": + """Delete all rows from all tables. Called after each test.""" + yield + with engine.begin() as conn: + for table in m.mapper_registry.metadata.tables: + # Unless db is sqlite, disable foreign key constraints to delete out of order + if engine.name != "sqlite": + conn.execute(text(f"ALTER TABLE {table} DISABLE TRIGGER ALL")) + conn.execute(text(f"DELETE FROM {table}")) + + +@pytest.fixture(autouse=True) +def upgrade_database_after_test(): + """Run after each test for proper cleanup""" + yield + run_command(f"{COMMAND} upgrade") + + +def test_1cf595475b58(monkeypatch, session, make_user, make_history): + # Initialize db and migration environment + dburl = str(session.bind.url) + monkeypatch.setenv("GALAXY_CONFIG_OVERRIDE_DATABASE_CONNECTION", dburl) + monkeypatch.setenv("GALAXY_INSTALL_CONFIG_OVERRIDE_INSTALL_DATABASE_CONNECTION", dburl) + run_command(f"{COMMAND} init") + + # STEP 0: Load pre-migration state + run_command(f"{COMMAND} downgrade d619fdfa6168") + + # STEP 1: Load users with duplicate emails + + # Duplicate group 1: users have no histories + # Expect: oldest user preserved + u1_1 = make_user(email="a") + u1_2 = make_user(email="a") + u1_3 = make_user(email="a") + original_email1 = u1_1.email + assert u1_1.email == u1_2.email == u1_3.email + assert u1_1.create_time < u1_2.create_time < u1_3.create_time # u1_1 is oldest user + + # Duplicate group 2: oldest user does NOT have a history, another user has a history + # Expect: user with history preserved + u2_1 = make_user(email="b") + u2_2 = make_user(email="b") + u2_3 = make_user(email="b") + original_email2 = u2_1.email + assert u2_1.email == u2_2.email == u2_3.email + assert u2_1.create_time < u2_2.create_time < u2_3.create_time # u2_1 is oldest user + + make_history(user=u2_2) # u2_2 has a history + + # Duplicate group 3: oldest user does NOT have a history, 2 users have a history + # Expect: oldest user with history preserved + u3_1 = make_user(email="c") + u3_2 = make_user(email="c") + u3_3 = make_user(email="c") + original_email3 = u3_1.email + assert u3_1.email == u3_2.email == u3_3.email + assert u3_1.create_time < u3_2.create_time < u3_3.create_time # u2_1 is oldest user + + make_history(user=u3_2) # u3_2 has a history + make_history(user=u3_3) # u3_3 has a history + + # User w/o duplicate email + u4 = make_user() + original_email4 = u4.email + + # STEP 2: Run migration + + run_command(f"{COMMAND} upgrade") + session.expire_all() + + # STEP 3: Verify deduplicated results + + # Duplicate group 1: + u1_1_fixed = session.get(User, u1_1.id) + u1_2_fixed = session.get(User, u1_2.id) + u1_3_fixed = session.get(User, u1_3.id) + + # oldest user's email is preserved; the rest are deduplicated + assert u1_1.email == original_email1 + assert u1_1.email != u1_2.email != u1_3.email + # deduplicated users are marked as deleted + assert u1_1_fixed.deleted is False + assert u1_2_fixed.deleted is True + assert u1_3_fixed.deleted is True + + # Duplicate group 2: + u2_1_fixed = session.get(User, u2_1.id) + u2_2_fixed = session.get(User, u2_2.id) + u2_3_fixed = session.get(User, u2_3.id) + + # the email of the user with a history is preserved; the rest are deduplicated + assert u2_2.email == original_email2 + assert u2_1.email != u1_2.email != u1_3.email + # deduplicated users are marked as deleted + assert u2_1_fixed.deleted is True + assert u2_2_fixed.deleted is False + assert u2_3_fixed.deleted is True + + # Duplicate group 3: + u3_1_fixed = session.get(User, u3_1.id) + u3_2_fixed = session.get(User, u3_2.id) + u3_3_fixed = session.get(User, u3_3.id) + + # the email of the oldest user with a history is preserved; the rest are deduplicated + assert u3_2.email == original_email3 + assert u3_1.email != u3_2.email != u3_3.email + # deduplicated users are marked as deleted + assert u3_1_fixed.deleted is True + assert u3_2_fixed.deleted is False + assert u3_3_fixed.deleted is True + + # User w/o duplicate email + u4_no_change = session.get(User, u4.id) + assert u4_no_change.email == original_email4 + assert u4_no_change.deleted is False + + +def test_d619fdfa6168(monkeypatch, session, make_user): + # Initialize db and migration environment + dburl = str(session.bind.url) + monkeypatch.setenv("GALAXY_CONFIG_OVERRIDE_DATABASE_CONNECTION", dburl) + monkeypatch.setenv("GALAXY_INSTALL_CONFIG_OVERRIDE_INSTALL_DATABASE_CONNECTION", dburl) + run_command(f"{COMMAND} init") + + # STEP 0: Load pre-migration state + run_command(f"{COMMAND} downgrade d2d8f51ebb7e") + + # STEP 1: Load users with duplicate usernames + + # Expect: oldest user preserved + u1 = make_user(username="a") + u2 = make_user(username="a") + u3 = make_user(username="a") + original_username = u3.username + assert u1.username == u2.username == u3.username + assert u1.create_time < u2.create_time < u3.create_time # u3 is newest user + + # STEP 2: Run migration + run_command(f"{COMMAND} upgrade") + session.expire_all() + + # STEP 3: Verify deduplicated results + u1_fixed = session.get(User, u1.id) + u2_fixed = session.get(User, u2.id) + u3_fixed = session.get(User, u3.id) + + # oldest user's username is preserved; the rest are deduplicated + assert u3_fixed.username == original_username + assert u1.username != u2.username != u3.username + # deduplicated users are marked as deleted + assert u1_fixed.deleted is True + assert u2_fixed.deleted is True + assert u3_fixed.deleted is False + + +def test_349dd9d9aac9(monkeypatch, session, make_user, make_role, make_user_role_association): + # Initialize db and migration environment + dburl = str(session.bind.url) + monkeypatch.setenv("GALAXY_CONFIG_OVERRIDE_DATABASE_CONNECTION", dburl) + monkeypatch.setenv("GALAXY_INSTALL_CONFIG_OVERRIDE_INSTALL_DATABASE_CONNECTION", dburl) + run_command(f"{COMMAND} init") + + # Load pre-migration state + run_command(f"{COMMAND} downgrade 1cf595475b58") + + # Load duplicate records + u1, u2 = make_user(), make_user() + r1, r2 = make_role(), make_role() + make_user_role_association(user=u1, role=r1) + make_user_role_association(user=u1, role=r2) + make_user_role_association(user=u1, role=r2) # duplicate + make_user_role_association(user=u2, role=r1) + make_user_role_association(user=u2, role=r1) # duplicate + + # Verify duplicates + assert len(u1.roles) == 3 + assert len(u2.roles) == 2 + all_associations = session.execute(select(UserRoleAssociation)).all() + assert len(all_associations) == 5 + + # Run migration + run_command(f"{COMMAND} upgrade") + session.expire_all() + + # Verify clean data + assert len(u1.roles) == 2 + assert len(u2.roles) == 1 + all_associations = session.execute(select(UserRoleAssociation)).all() + assert len(all_associations) == 3 + + +def test_56ddf316dbd0(monkeypatch, session, make_user, make_group, make_user_group_association): + # Initialize db and migration environment + dburl = str(session.bind.url) + monkeypatch.setenv("GALAXY_CONFIG_OVERRIDE_DATABASE_CONNECTION", dburl) + monkeypatch.setenv("GALAXY_INSTALL_CONFIG_OVERRIDE_INSTALL_DATABASE_CONNECTION", dburl) + run_command(f"{COMMAND} init") + + # Load pre-migration state + run_command(f"{COMMAND} downgrade 1fdd615f2cdb") + + # Load duplicate records + u1, u2 = make_user(), make_user() + g1, g2 = make_group(), make_group() + make_user_group_association(user=u1, group=g1) + make_user_group_association(user=u1, group=g2) + make_user_group_association(user=u1, group=g2) # duplicate + make_user_group_association(user=u2, group=g1) + make_user_group_association(user=u2, group=g1) # duplicate + + # Verify duplicates + assert len(u1.groups) == 3 + assert len(u2.groups) == 2 + all_associations = session.execute(select(UserGroupAssociation)).all() + assert len(all_associations) == 5 + + # Run migration + run_command(f"{COMMAND} upgrade") + session.expire_all() + + # Verify clean data + assert len(u1.groups) == 2 + assert len(u2.groups) == 1 + all_associations = session.execute(select(UserGroupAssociation)).all() + assert len(all_associations) == 3 + + +def test_9ef6431f3a4e(monkeypatch, session, make_group, make_role, make_group_role_association): + # Initialize db and migration environment + dburl = str(session.bind.url) + monkeypatch.setenv("GALAXY_CONFIG_OVERRIDE_DATABASE_CONNECTION", dburl) + monkeypatch.setenv("GALAXY_INSTALL_CONFIG_OVERRIDE_INSTALL_DATABASE_CONNECTION", dburl) + run_command(f"{COMMAND} init") + + # Load pre-migration state + run_command(f"{COMMAND} downgrade 13fe10b8e35b") + + # Load duplicate records + g1, g2 = make_group(), make_group() + r1, r2 = make_role(), make_role() + make_group_role_association(group=g1, role=r1) + make_group_role_association(group=g1, role=r2) + make_group_role_association(group=g1, role=r2) # duplicate + make_group_role_association(group=g2, role=r1) + make_group_role_association(group=g2, role=r1) # duplicate + + # Verify duplicates + assert len(g1.roles) == 3 + assert len(g2.roles) == 2 + all_associations = session.execute(select(GroupRoleAssociation)).all() + assert len(all_associations) == 5 + + # Run migration + run_command(f"{COMMAND} upgrade") + session.expire_all() + + # Verify clean data + assert len(g1.roles) == 2 + assert len(g2.roles) == 1 + all_associations = session.execute(select(GroupRoleAssociation)).all() + assert len(all_associations) == 3 + + +def test_1fdd615f2cdb(monkeypatch, session, make_user, make_role, make_user_role_association): + # Initialize db and migration environment + dburl = str(session.bind.url) + monkeypatch.setenv("GALAXY_CONFIG_OVERRIDE_DATABASE_CONNECTION", dburl) + monkeypatch.setenv("GALAXY_INSTALL_CONFIG_OVERRIDE_INSTALL_DATABASE_CONNECTION", dburl) + run_command(f"{COMMAND} init") + + # Load pre-migration state + run_command(f"{COMMAND} downgrade 349dd9d9aac9") + + # Load records w/nulls + ura1 = make_user_role_association(user=make_user(), role=make_role()) + ura2 = make_user_role_association(user=make_user(), role=make_role()) + ura3 = make_user_role_association(user=make_user(), role=make_role()) + ura1.user_id = None + ura2.role_id = None + ura3.user_id = None + ura3.role_id = None + session.add_all([ura1, ura2, ura3]) + session.commit() + + # Load record w/o nulls + make_user_role_association(user=make_user(), role=make_role()) + + # Verify data + all_associations = session.execute(select(UserRoleAssociation)).all() + assert len(all_associations) == 4 + + # Run migration + run_command(f"{COMMAND} upgrade") + session.expire_all() + + # Verify clean data + all_associations = session.execute(select(UserRoleAssociation)).all() + assert len(all_associations) == 1 + + +def test_13fe10b8e35b(monkeypatch, session, make_user, make_group, make_user_group_association): + # Initialize db and migration environment + dburl = str(session.bind.url) + monkeypatch.setenv("GALAXY_CONFIG_OVERRIDE_DATABASE_CONNECTION", dburl) + monkeypatch.setenv("GALAXY_INSTALL_CONFIG_OVERRIDE_INSTALL_DATABASE_CONNECTION", dburl) + run_command(f"{COMMAND} init") + + # Load pre-migration state + run_command(f"{COMMAND} downgrade 56ddf316dbd0") + + # Load records w/nulls + uga1 = make_user_group_association(user=make_user(), group=make_group()) + uga2 = make_user_group_association(user=make_user(), group=make_group()) + uga3 = make_user_group_association(user=make_user(), group=make_group()) + uga1.user_id = None + uga2.group_id = None + uga3.user_id = None + uga3.group_id = None + session.add_all([uga1, uga2, uga3]) + session.commit() + + # Load record w/o nulls + make_user_group_association(user=make_user(), group=make_group()) + + # Verify data + all_associations = session.execute(select(UserGroupAssociation)).all() + assert len(all_associations) == 4 + + # Run migration + run_command(f"{COMMAND} upgrade") + session.expire_all() + + # Verify clean data + all_associations = session.execute(select(UserGroupAssociation)).all() + assert len(all_associations) == 1 + + +def test_25b092f7938b(monkeypatch, session, make_group, make_role, make_group_role_association): + # Initialize db and migration environment + dburl = str(session.bind.url) + monkeypatch.setenv("GALAXY_CONFIG_OVERRIDE_DATABASE_CONNECTION", dburl) + monkeypatch.setenv("GALAXY_INSTALL_CONFIG_OVERRIDE_INSTALL_DATABASE_CONNECTION", dburl) + run_command(f"{COMMAND} init") + + # Load pre-migration state + run_command(f"{COMMAND} downgrade 9ef6431f3a4e") + + # Load records w/nulls + gra1 = make_group_role_association(group=make_group(), role=make_role()) + gra2 = make_group_role_association(group=make_group(), role=make_role()) + gra3 = make_group_role_association(group=make_group(), role=make_role()) + gra1.group_id = None + gra2.role_id = None + gra3.group_id = None + gra3.role_id = None + session.add_all([gra1, gra2, gra3]) + session.commit() + + # Load record w/o nulls + make_group_role_association(group=make_group(), role=make_role()) + + # Verify data + all_associations = session.execute(select(GroupRoleAssociation)).all() + assert len(all_associations) == 4 + + # Run migration + run_command(f"{COMMAND} upgrade") + session.expire_all() + + # Verify clean data + all_associations = session.execute(select(GroupRoleAssociation)).all() + assert len(all_associations) == 1 diff --git a/test/unit/data/model/test_model_store.py b/test/unit/data/model/test_model_store.py index 80dd3789ef1b..99dab6ddcb9e 100644 --- a/test/unit/data/model/test_model_store.py +++ b/test/unit/data/model/test_model_store.py @@ -122,32 +122,80 @@ def test_import_export_history_allow_discarded_data(): assert imported_job.output_datasets[0].dataset == datasets[1] -def test_import_export_history_with_implicit_conversion(): +def setup_history_with_implicit_conversion(): app = _mock_app() u, h, d1, d2, j = _setup_simple_cat_job(app) + intermediate_ext = "bam" + intermediate_implicit_hda = model.HistoryDatasetAssociation( + extension=intermediate_ext, create_dataset=True, flush=False, history=h + ) + intermediate_implicit_hda.hid = d2.hid convert_ext = "fasta" implicit_hda = model.HistoryDatasetAssociation(extension=convert_ext, create_dataset=True, flush=False, history=h) implicit_hda.hid = d2.hid # this adds and flushes the result... - d2.attach_implicitly_converted_dataset(app.model.context, implicit_hda, convert_ext) + intermediate_implicit_hda.attach_implicitly_converted_dataset(app.model.context, implicit_hda, convert_ext) + d2.attach_implicitly_converted_dataset(app.model.context, intermediate_implicit_hda, intermediate_ext) + + app.object_store.update_from_file(intermediate_implicit_hda.dataset, file_name=TEST_PATH_2_CONVERTED, create=True) app.object_store.update_from_file(implicit_hda.dataset, file_name=TEST_PATH_2_CONVERTED, create=True) - assert len(h.active_datasets) == 3 + assert len(h.active_datasets) == 4 + return app, h, implicit_hda + + +def test_import_export_history_with_implicit_conversion(): + app, h, _ = setup_history_with_implicit_conversion() imported_history = _import_export_history(app, h, export_files="copy", include_hidden=True) - assert len(imported_history.active_datasets) == 3 + assert len(imported_history.active_datasets) == 4 recovered_hda_2 = imported_history.active_datasets[1] assert recovered_hda_2.implicitly_converted_datasets - imported_conversion = recovered_hda_2.implicitly_converted_datasets[0] - assert imported_conversion.type == "fasta" - assert imported_conversion.dataset == imported_history.active_datasets[2] + intermediate_conversion = recovered_hda_2.implicitly_converted_datasets[0] + assert intermediate_conversion.type == "bam" + intermediate_hda = intermediate_conversion.dataset + assert intermediate_hda.implicitly_converted_datasets + final_conversion = intermediate_hda.implicitly_converted_datasets[0] + + assert final_conversion.type == "fasta" + assert final_conversion.dataset == imported_history.active_datasets[-1] # implicit conversions have the same HID... ensure this property is recovered... assert imported_history.active_datasets[2].hid == imported_history.active_datasets[1].hid +def test_import_export_history_with_implicit_conversion_parents_purged(): + app, h, implicit_hda = setup_history_with_implicit_conversion() + # Purge parents + parent = implicit_hda.implicitly_converted_parent_datasets[0].parent_hda + parent.dataset.purged = True + grandparent = parent.implicitly_converted_parent_datasets[0].parent_hda + grandparent.dataset.purged = True + app.model.context.commit() + imported_history = _import_export_history(app, h, export_files="copy", include_hidden=True) + + assert len(imported_history.active_datasets) == 2 + assert len(imported_history.datasets) == 4 + imported_implicit_hda = imported_history.active_datasets[1] + assert imported_implicit_hda.extension == "fasta" + + # implicit conversions have the same HID... ensure this property is recovered... + assert imported_implicit_hda.hid == implicit_hda.hid + assert imported_implicit_hda.implicitly_converted_parent_datasets + intermediate_implicit_conversion = imported_implicit_hda.implicitly_converted_parent_datasets[0] + intermediate_hda = intermediate_implicit_conversion.parent_hda + assert intermediate_hda.hid == implicit_hda.hid + assert intermediate_hda.extension == "bam" + assert intermediate_hda.implicitly_converted_datasets + assert intermediate_hda.implicitly_converted_parent_datasets + first_implicit_conversion = intermediate_hda.implicitly_converted_parent_datasets[0] + source_hda = first_implicit_conversion.parent_hda + assert source_hda.hid == implicit_hda.hid + assert source_hda.extension == "txt" + + def test_import_export_history_with_implicit_conversion_and_extra_files(): app = _mock_app() diff --git a/test/unit/data/test_galaxy_mapping.py b/test/unit/data/test_galaxy_mapping.py index 3e99841db44b..60c9c3116942 100644 --- a/test/unit/data/test_galaxy_mapping.py +++ b/test/unit/data/test_galaxy_mapping.py @@ -22,6 +22,7 @@ get_object_session, ) from galaxy.model.security import GalaxyRBACAgent +from galaxy.model.unittest_utils.utils import random_email from galaxy.objectstore import QuotaSourceMap from galaxy.util.unittest import TestCase @@ -78,7 +79,7 @@ def expunge(cls): class TestMappings(BaseModelTestCase): def test_dataset_instance_order(self) -> None: - u = model.User(email="mary@example.com", password="password") + u = model.User(email=random_email(), password="password") h1 = model.History(name="History 1", user=u) elements = [] list_pair = model.DatasetCollection(collection_type="list:paired") @@ -213,7 +214,7 @@ def test_nested_collection_attributes(self): assert c4.dataset_elements == [dce1, dce2] def test_history_audit(self): - u = model.User(email="contents@foo.bar.baz", password="password") + u = model.User(email=random_email(), password="password") h1 = model.History(name="HistoryAuditHistory", user=u) h2 = model.History(name="HistoryAuditHistory", user=u) @@ -272,7 +273,7 @@ def test_flush_refreshes(self): # states and flushing in SQL Alchemy is very subtle and it is good to have a executable # reference for how it behaves in the context of Galaxy objects. model = self.model - user = model.User(email="testworkflows@bx.psu.edu", password="password") + user = model.User(email=random_email(), password="password") galaxy_session = model.GalaxySession() galaxy_session_other = model.GalaxySession() galaxy_session.user = user @@ -345,7 +346,7 @@ def test_flush_refreshes(self): assert "id" not in inspect(galaxy_model_object_new).unloaded def test_workflows(self): - user = model.User(email="testworkflows@bx.psu.edu", password="password") + user = model.User(email=random_email(), password="password") child_workflow = _workflow_from_steps(user, []) self.persist(child_workflow) @@ -455,7 +456,7 @@ def test_workflows(self): assert counts.root["scheduled"] == 1 def test_role_creation(self): - security_agent = GalaxyRBACAgent(self.model) + security_agent = GalaxyRBACAgent(self.model.session) def check_private_role(private_role, email): assert private_role.type == model.Role.types.PRIVATE @@ -488,7 +489,7 @@ def check_private_role(private_role, email): check_private_role(role, email) def test_private_share_role(self): - security_agent = GalaxyRBACAgent(self.model) + security_agent = GalaxyRBACAgent(self.model.session) u_from, u_to, u_other = self._three_users("private_share_role") @@ -503,7 +504,7 @@ def test_private_share_role(self): assert not security_agent.can_access_dataset(u_other.all_roles(), d1.dataset) def test_make_dataset_public(self): - security_agent = GalaxyRBACAgent(self.model) + security_agent = GalaxyRBACAgent(self.model.session) u_from, u_to, u_other = self._three_users("make_dataset_public") h = model.History(name="History for Annotation", user=u_from) @@ -519,7 +520,7 @@ def test_make_dataset_public(self): assert security_agent.can_access_dataset(u_other.all_roles(), d1.dataset) def test_set_all_dataset_permissions(self): - security_agent = GalaxyRBACAgent(self.model) + security_agent = GalaxyRBACAgent(self.model.session) u_from, _, u_other = self._three_users("set_all_perms") h = model.History(name="History for Annotation", user=u_from) @@ -540,7 +541,7 @@ def test_set_all_dataset_permissions(self): assert not security_agent.can_access_dataset(u_other.all_roles(), d1.dataset) def test_can_manage_privately_shared_dataset(self): - security_agent = GalaxyRBACAgent(self.model) + security_agent = GalaxyRBACAgent(self.model.session) u_from, u_to, u_other = self._three_users("can_manage_dataset") h = model.History(name="History for Prevent Sharing", user=u_from) @@ -555,7 +556,7 @@ def test_can_manage_privately_shared_dataset(self): assert not security_agent.can_manage_dataset(u_to.all_roles(), d1.dataset) def test_can_manage_private_dataset(self): - security_agent = GalaxyRBACAgent(self.model) + security_agent = GalaxyRBACAgent(self.model.session) u_from, _, u_other = self._three_users("can_manage_dataset_ps") h = model.History(name="History for Prevent Sharing", user=u_from) @@ -569,7 +570,7 @@ def test_can_manage_private_dataset(self): assert not security_agent.can_manage_dataset(u_other.all_roles(), d1.dataset) def test_cannot_make_private_objectstore_dataset_public(self): - security_agent = GalaxyRBACAgent(self.model) + security_agent = GalaxyRBACAgent(self.model.session) u_from, u_to, _ = self._three_users("cannot_make_private_public") h = self.model.History(name="History for Prevent Sharing", user=u_from) @@ -586,7 +587,7 @@ def test_cannot_make_private_objectstore_dataset_public(self): assert galaxy.model.CANNOT_SHARE_PRIVATE_DATASET_MESSAGE in str(exec_info.value) def test_cannot_make_private_objectstore_dataset_shared(self): - security_agent = GalaxyRBACAgent(self.model) + security_agent = GalaxyRBACAgent(self.model.session) u_from, u_to, _ = self._three_users("cannot_make_private_shared") h = self.model.History(name="History for Prevent Sharing", user=u_from) @@ -603,7 +604,7 @@ def test_cannot_make_private_objectstore_dataset_shared(self): assert galaxy.model.CANNOT_SHARE_PRIVATE_DATASET_MESSAGE in str(exec_info.value) def test_cannot_set_dataset_permisson_on_private(self): - security_agent = GalaxyRBACAgent(self.model) + security_agent = GalaxyRBACAgent(self.model.session) u_from, u_to, _ = self._three_users("cannot_set_permissions_on_private") h = self.model.History(name="History for Prevent Sharing", user=u_from) @@ -623,7 +624,7 @@ def test_cannot_set_dataset_permisson_on_private(self): assert galaxy.model.CANNOT_SHARE_PRIVATE_DATASET_MESSAGE in str(exec_info.value) def test_cannot_make_private_dataset_public(self): - security_agent = GalaxyRBACAgent(self.model) + security_agent = GalaxyRBACAgent(self.model.session) u_from, u_to, u_other = self._three_users("cannot_make_private_dataset_public") h = self.model.History(name="History for Annotation", user=u_from) diff --git a/test/unit/data/test_quota.py b/test/unit/data/test_quota.py index 5c34ea7cb056..7a3695894445 100644 --- a/test/unit/data/test_quota.py +++ b/test/unit/data/test_quota.py @@ -1,6 +1,7 @@ import uuid from galaxy import model +from galaxy.model.unittest_utils.utils import random_email from galaxy.objectstore import ( QuotaSourceInfo, QuotaSourceMap, @@ -16,7 +17,7 @@ class TestPurgeUsage(BaseModelTestCase): def setUp(self): super().setUp() model = self.model - u = model.User(email="purge_usage@example.com", password="password") + u = model.User(email=random_email(), password="password") u.disk_usage = 25 self.persist(u) diff --git a/test/unit/tool_util/test_test_format_model.py b/test/unit/tool_util/test_test_format_model.py new file mode 100644 index 000000000000..986f5c12ba87 --- /dev/null +++ b/test/unit/tool_util/test_test_format_model.py @@ -0,0 +1,39 @@ +import os +from pathlib import Path +from typing import List + +import yaml + +from galaxy.tool_util.models import Tests +from galaxy.util import galaxy_directory +from galaxy.util.unittest_utils import skip_unless_environ + +TEST_WORKFLOW_DIRECTORY = os.path.join(galaxy_directory(), "lib", "galaxy_test", "workflow") +IWC_WORKFLOWS_USING_UNVERIFIED_SYNTAX: List[str] = [] + + +def test_validate_workflow_tests(): + path = Path(TEST_WORKFLOW_DIRECTORY) + test_files = path.glob("*.gxwf-tests.yml") + for test_file in test_files: + with open(test_file) as f: + json = yaml.safe_load(f) + Tests.model_validate(json) + + +@skip_unless_environ("GALAXY_TEST_IWC_DIRECTORY") +def test_iwc_directory(): + path = Path(os.environ["GALAXY_TEST_IWC_DIRECTORY"]) + test_files = path.glob("workflows/**/*-test*.yml") + + for test_file in test_files: + print(test_file) + skip_file = False + for unverified in IWC_WORKFLOWS_USING_UNVERIFIED_SYNTAX: + if str(test_file).endswith(unverified): + skip_file = True + if skip_file: + continue + with open(test_file) as f: + json = yaml.safe_load(f) + Tests.model_validate(json) diff --git a/test/unit/workflows/test_run_parameters.py b/test/unit/workflows/test_run_parameters.py index 76eae8955744..5718ac923a40 100644 --- a/test/unit/workflows/test_run_parameters.py +++ b/test/unit/workflows/test_run_parameters.py @@ -1,5 +1,6 @@ from galaxy import model from galaxy.model.base import transaction +from galaxy.model.unittest_utils.utils import random_email from galaxy.workflow.run_request import ( _normalize_inputs, _normalize_step_parameters, @@ -89,7 +90,7 @@ def __new_input(): def __workflow_fixure(trans): - user = model.User(email="testworkflow_params@bx.psu.edu", password="pass") + user = model.User(email=random_email(), password="pass") stored_workflow = model.StoredWorkflow() stored_workflow.user = user workflow = model.Workflow()