Skip to content

Commit

Permalink
Adjust API tests
Browse files Browse the repository at this point in the history
Most of the time we just want an OK status code. 202, 204, 200 should
all be OK and some of this depends on the Galaxy instance config,
so this should be more robust to future changes.
  • Loading branch information
mvdbeek committed Aug 25, 2024
1 parent 0beb1bf commit 73176cf
Show file tree
Hide file tree
Showing 8 changed files with 19 additions and 19 deletions.
2 changes: 1 addition & 1 deletion lib/galaxy_test/api/test_datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -651,7 +651,7 @@ def _run_cancel_job(self, history_id: str, use_query_params: bool = False):
history_id, output_hda_id, stop_job=True, use_query_params=use_query_params
)
self._assert_status_code_is_ok(delete_response)
deleted_hda = delete_response.json()
deleted_hda = self.dataset_populator.get_history_dataset_details(history_id, content_id=output_hda_id)
assert deleted_hda["deleted"], deleted_hda

# The job should be cancelled
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy_test/api/test_history_contents.py
Original file line number Diff line number Diff line change
Expand Up @@ -613,7 +613,7 @@ def _check_pair_creation(self, history_id: str, endpoint, payload):
assert not dataset_collection["deleted"]

delete_response = self._delete(collection_url)
self._assert_status_code_is(delete_response, 200)
self._assert_status_code_is_ok(delete_response)

show_response = self._get(collection_url)
dataset_collection = show_response.json()
Expand Down
18 changes: 9 additions & 9 deletions lib/galaxy_test/api/test_jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -710,7 +710,7 @@ def test_resume_job(self, history_id):
output = run_response["outputs"][0]
# Delete second jobs input while second job is waiting for first job
delete_response = self._delete(f"histories/{history_id}/contents/{hda1['id']}")
self._assert_status_code_is(delete_response, 200)
self._assert_status_code_is_ok(delete_response)
self.dataset_populator.wait_for_history_jobs(history_id, assert_ok=False)
dataset_details = self._get(f"histories/{history_id}/contents/{output['id']}").json()
assert dataset_details["state"] == "paused"
Expand Down Expand Up @@ -747,11 +747,11 @@ def test_search(self, history_id):
self._search(search_payload, expected_search_count=1)
# Now we delete the original input HDA that was used -- we should still be able to find the job
delete_respone = self._delete(f"histories/{history_id}/contents/{dataset_id}")
self._assert_status_code_is(delete_respone, 200)
self._assert_status_code_is_ok(delete_respone)
self._search(search_payload, expected_search_count=1)
# Now we also delete the copy -- we shouldn't find a job
delete_respone = self._delete(f"histories/{new_history_id}/contents/{new_dataset_id}")
self._assert_status_code_is(delete_respone, 200)
self._assert_status_code_is_ok(delete_respone)
self._search(search_payload, expected_search_count=0)

@pytest.mark.require_new_history
Expand All @@ -777,7 +777,7 @@ def test_search_delete_outputs(self, history_id):
tool_response = self._job_search(tool_id="cat1", history_id=history_id, inputs=inputs)
output_id = tool_response.json()["outputs"][0]["id"]
delete_respone = self._delete(f"histories/{history_id}/contents/{output_id}")
self._assert_status_code_is(delete_respone, 200)
self._assert_status_code_is_ok(delete_respone)
search_payload = self._search_payload(history_id=history_id, tool_id="cat1", inputs=inputs)
self._search(search_payload, expected_search_count=0)

Expand Down Expand Up @@ -821,7 +821,7 @@ def test_search_with_hdca_list_input(self, history_id):
# and use the correct input job definition, the job should not be found
output_id = tool_response.json()["outputs"][0]["id"]
delete_respone = self._delete(f"histories/{history_id}/contents/{output_id}")
self._assert_status_code_is(delete_respone, 200)
self._assert_status_code_is_ok(delete_respone)
search_payload = self._search_payload(history_id=history_id, tool_id="multi_data_param", inputs=inputs)
self._search(search_payload, expected_search_count=0)

Expand All @@ -837,14 +837,14 @@ def test_search_delete_hdca_output(self, history_id):
output_id = tool_response.json()["outputs"][0]["id"]
# We delete a single tool output, no job should be returned
delete_respone = self._delete(f"histories/{history_id}/contents/{output_id}")
self._assert_status_code_is(delete_respone, 200)
self._assert_status_code_is_ok(delete_respone)
search_payload = self._search_payload(history_id=history_id, tool_id="collection_creates_list", inputs=inputs)
self._search(search_payload, expected_search_count=0)
tool_response = self._job_search(tool_id="collection_creates_list", history_id=history_id, inputs=inputs)
output_collection_id = tool_response.json()["output_collections"][0]["id"]
# We delete a collection output, no job should be returned
delete_respone = self._delete(f"histories/{history_id}/contents/dataset_collections/{output_collection_id}")
self._assert_status_code_is(delete_respone, 200)
self._assert_status_code_is_ok(delete_respone)
search_payload = self._search_payload(history_id=history_id, tool_id="collection_creates_list", inputs=inputs)
self._search(search_payload, expected_search_count=0)

Expand Down Expand Up @@ -876,11 +876,11 @@ def test_search_with_hdca_pair_input(self, history_id):
self._search(search_payload, expected_search_count=1)
# Now we delete the original input HDCA that was used -- we should still be able to find the job
delete_respone = self._delete(f"histories/{history_id}/contents/dataset_collections/{list_id_a}")
self._assert_status_code_is(delete_respone, 200)
self._assert_status_code_is_ok(delete_respone)
self._search(search_payload, expected_search_count=1)
# Now we also delete the copy -- we shouldn't find a job
delete_respone = self._delete(f"histories/{history_id}/contents/dataset_collections/{new_list_a}")
self._assert_status_code_is(delete_respone, 200)
self._assert_status_code_is_ok(delete_respone)
self._search(search_payload, expected_search_count=0)

@pytest.mark.require_new_history
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy_test/api/test_pages.py
Original file line number Diff line number Diff line change
Expand Up @@ -334,7 +334,7 @@ def test_page_requires_slug(self):
def test_delete(self):
response_json = self._create_valid_page_with_slug("testdelete")
delete_response = delete(self._api_url(f"pages/{response_json['id']}", use_key=True))
self._assert_status_code_is(delete_response, 204)
self._assert_status_code_is_ok(delete_response)

def test_400_on_delete_invalid_page_id(self):
delete_response = delete(self._api_url(f"pages/{self._random_key()}", use_key=True))
Expand Down
2 changes: 1 addition & 1 deletion lib/galaxy_test/api/test_tags.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ def test_tags_on_item(self):

new_tags = ["APITag"]
update_history_tags_response = self._update_tags_using_tags_api(item_id, new_tags)
self._assert_status_code_is(update_history_tags_response, 204)
self._assert_status_code_is_ok(update_history_tags_response)
self._assert_tags_in_item(item_id, new_tags)

# other users can't create or update tags
Expand Down
4 changes: 2 additions & 2 deletions lib/galaxy_test/api/test_users.py
Original file line number Diff line number Diff line change
Expand Up @@ -209,10 +209,10 @@ def test_manage_api_key(self):
assert api_key["key"] == user_api_key
# Delete user API key
response = self._delete(f"users/{user_id}/api_key")
self._assert_status_code_is(response, 204)
self._assert_status_code_is_ok(response)
# No API key anymore, so the detailed request returns no content 204 with admin key
response = self._get(f"users/{user_id}/api_key/detailed", admin=True)
self._assert_status_code_is(response, 204)
self._assert_status_code_is_ok(response)
# No API key anymore, so the detailed request returns unauthorized 401 with user key
response = self._get(f"users/{user_id}/api_key/detailed")
self._assert_status_code_is(response, 401)
Expand Down
6 changes: 3 additions & 3 deletions lib/galaxy_test/api/test_workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -388,7 +388,7 @@ def test_delete(self):
self._assert_user_has_workflow_with_name(workflow_name)
workflow_url = self._api_url(f"workflows/{workflow_id}", use_key=True)
delete_response = delete(workflow_url)
self._assert_status_code_is(delete_response, 204)
self._assert_status_code_is_ok(delete_response)
# Make sure workflow is no longer in index by default.
assert workflow_name not in self._workflow_names()

Expand All @@ -407,7 +407,7 @@ def test_undelete(self):
delete(workflow_delete_url)
workflow_undelete_url = self._api_url(f"workflows/{workflow_id}/undelete", use_key=True)
undelete_response = post(workflow_undelete_url)
self._assert_status_code_is(undelete_response, 204)
self._assert_status_code_is_ok(undelete_response)
assert workflow_name in self._workflow_names()

def test_other_cannot_undelete(self):
Expand All @@ -430,7 +430,7 @@ def test_index_deleted(self):
assert [w for w in workflow_index if w["id"] == workflow_id]
workflow_url = self._api_url(f"workflows/{workflow_id}", use_key=True)
delete_response = delete(workflow_url)
self._assert_status_code_is(delete_response, 204)
self._assert_status_code_is_ok(delete_response)
workflow_index = self._get("workflows").json()
assert not [w for w in workflow_index if w["id"] == workflow_id]
workflow_index = self._get("workflows?show_deleted=true").json()
Expand Down
2 changes: 1 addition & 1 deletion test/integration/test_notifications.py
Original file line number Diff line number Diff line change
Expand Up @@ -420,7 +420,7 @@ def _send_broadcast_notification(

def _update_notification(self, notification_id: str, update_state: Dict[str, Any]):
update_response = self._put(f"notifications/{notification_id}", data=update_state, json=True)
self._assert_status_code_is(update_response, 204)
self._assert_status_code_is_ok(update_response)

def _assert_notifications_sent(self, response, expected_count: int = 0):
if self.task_based:
Expand Down

0 comments on commit 73176cf

Please sign in to comment.