Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
39 changes: 22 additions & 17 deletions lib/galaxy_test/api/test_jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -772,12 +772,12 @@ def test_search(self, history_id):
search_payload = self._search_payload(history_id=history_id, tool_id="cat1", inputs=copied_inputs)
self._search(search_payload, expected_search_count=1)
# Now we delete the original input HDA that was used -- we should still be able to find the job
delete_respone = self._delete(f"histories/{history_id}/contents/{dataset_id}")
self._assert_status_code_is_ok(delete_respone)
delete_response = self._delete(f"histories/{history_id}/contents/{dataset_id}")
self._assert_status_code_is_ok(delete_response)
self._search(search_payload, expected_search_count=1)
# Now we also delete the copy -- we shouldn't find a job
delete_respone = self._delete(f"histories/{new_history_id}/contents/{new_dataset_id}")
self._assert_status_code_is_ok(delete_respone)
delete_response = self._delete(f"histories/{new_history_id}/contents/{new_dataset_id}")
self._assert_status_code_is_ok(delete_response)
self._search(search_payload, expected_search_count=0)

@pytest.mark.require_new_history
Expand All @@ -802,8 +802,8 @@ def test_search_delete_outputs(self, history_id):
inputs = json.dumps({"input1": {"src": "hda", "id": dataset_id}})
tool_response = self._job_search(tool_id="cat1", history_id=history_id, inputs=inputs)
output_id = tool_response.json()["outputs"][0]["id"]
delete_respone = self._delete(f"histories/{history_id}/contents/{output_id}")
self._assert_status_code_is_ok(delete_respone)
delete_response = self._delete(f"histories/{history_id}/contents/{output_id}")
self._assert_status_code_is_ok(delete_response)
search_payload = self._search_payload(history_id=history_id, tool_id="cat1", inputs=inputs)
self._search(search_payload, expected_search_count=0)

Expand Down Expand Up @@ -846,8 +846,8 @@ def test_search_with_hdca_list_input(self, history_id):
# We delete the ouput (this is a HDA, as multi_data_param reduces collections)
# and use the correct input job definition, the job should not be found
output_id = tool_response.json()["outputs"][0]["id"]
delete_respone = self._delete(f"histories/{history_id}/contents/{output_id}")
self._assert_status_code_is_ok(delete_respone)
delete_response = self._delete(f"histories/{history_id}/contents/{output_id}")
self._assert_status_code_is_ok(delete_response)
search_payload = self._search_payload(history_id=history_id, tool_id="multi_data_param", inputs=inputs)
self._search(search_payload, expected_search_count=0)

Expand All @@ -860,17 +860,22 @@ def test_search_delete_hdca_output(self, history_id):
}
)
tool_response = self._job_search(tool_id="collection_creates_list", history_id=history_id, inputs=inputs)
output_id = tool_response.json()["outputs"][0]["id"]
output_dict = tool_response.json()["outputs"][0]
assert output_dict["history_content_type"] == "dataset"
output_id = output_dict["id"]
# Wait for job search to register the job, make sure initial conditions set.
search_payload = self._search_payload(history_id=history_id, tool_id="collection_creates_list", inputs=inputs)
self._search(search_payload, expected_search_count=1)
# We delete a single tool output, no job should be returned
delete_respone = self._delete(f"histories/{history_id}/contents/{output_id}")
self._assert_status_code_is_ok(delete_respone)
delete_response = self._delete(f"histories/{history_id}/contents/datasets/{output_id}")
self._assert_status_code_is_ok(delete_response)
search_payload = self._search_payload(history_id=history_id, tool_id="collection_creates_list", inputs=inputs)
self._search(search_payload, expected_search_count=0)
tool_response = self._job_search(tool_id="collection_creates_list", history_id=history_id, inputs=inputs)
output_collection_id = tool_response.json()["output_collections"][0]["id"]
# We delete a collection output, no job should be returned
delete_respone = self._delete(f"histories/{history_id}/contents/dataset_collections/{output_collection_id}")
self._assert_status_code_is_ok(delete_respone)
delete_response = self._delete(f"histories/{history_id}/contents/dataset_collections/{output_collection_id}")
self._assert_status_code_is_ok(delete_response)
search_payload = self._search_payload(history_id=history_id, tool_id="collection_creates_list", inputs=inputs)
self._search(search_payload, expected_search_count=0)

Expand Down Expand Up @@ -901,12 +906,12 @@ def test_search_with_hdca_pair_input(self, history_id):
)
self._search(search_payload, expected_search_count=1)
# Now we delete the original input HDCA that was used -- we should still be able to find the job
delete_respone = self._delete(f"histories/{history_id}/contents/dataset_collections/{list_id_a}")
self._assert_status_code_is_ok(delete_respone)
delete_response = self._delete(f"histories/{history_id}/contents/dataset_collections/{list_id_a}")
self._assert_status_code_is_ok(delete_response)
self._search(search_payload, expected_search_count=1)
# Now we also delete the copy -- we shouldn't find a job
delete_respone = self._delete(f"histories/{history_id}/contents/dataset_collections/{new_list_a}")
self._assert_status_code_is_ok(delete_respone)
delete_response = self._delete(f"histories/{history_id}/contents/dataset_collections/{new_list_a}")
self._assert_status_code_is_ok(delete_response)
self._search(search_payload, expected_search_count=0)

@pytest.mark.require_new_history
Expand Down
Loading