class MaximumWorkflowInvocationDurationTestCase(integration_util.IntegrationTestCase): framework_tool_and_types = True def setUp(self): super(MaximumWorkflowInvocationDurationTestCase, self).setUp() self.dataset_populator = DatasetPopulator(self.galaxy_interactor) self.workflow_populator = WorkflowPopulator(self.galaxy_interactor) @classmethod def handle_galaxy_config_kwds(cls, config): config["maximum_workflow_invocation_duration"] = 20 def do_test(self): workflow = self.workflow_populator.load_workflow_from_resource("test_workflow_pause") workflow_id = self.workflow_populator.create_workflow(workflow) history_id = self.dataset_populator.new_history() hda1 = self.dataset_populator.new_dataset(history_id, content="1 2 3") index_map = { '0': dict(src="hda", id=hda1["id"]) } request = {} request["history"] = "hist_id=%s" % history_id request["inputs"] = dumps(index_map) request["inputs_by"] = 'step_index' url = "workflows/%s/invocations" % (workflow_id) invocation_response = self._post(url, data=request) invocation_url = url + "/" + invocation_response.json()["id"] time.sleep(5) state = self._get(invocation_url).json()["state"] assert state != "failed", state time.sleep(35) state = self._get(invocation_url).json()["state"] assert state == "failed", state
class MaximumWorkflowInvocationDurationTestCase( integration_util.IntegrationTestCase): """Start a Pulsar job.""" framework_tool_and_types = True def setUp(self): super(MaximumWorkflowInvocationDurationTestCase, self).setUp() self.dataset_populator = DatasetPopulator(self.galaxy_interactor) self.workflow_populator = WorkflowPopulator(self.galaxy_interactor) @classmethod def handle_galaxy_config_kwds(cls, config): config["maximum_workflow_invocation_duration"] = 20 def do_test(self): workflow = self.workflow_populator.load_workflow_from_resource( "test_workflow_pause") workflow_id = self.workflow_populator.create_workflow(workflow) history_id = self.dataset_populator.new_history() hda1 = self.dataset_populator.new_dataset(history_id, content="1 2 3") index_map = {'0': dict(src="hda", id=hda1["id"])} request = {} request["history"] = "hist_id=%s" % history_id request["inputs"] = dumps(index_map) request["inputs_by"] = 'step_index' url = "workflows/%s/invocations" % (workflow_id) invocation_response = self._post(url, data=request) invocation_url = url + "/" + invocation_response.json()["id"] time.sleep(5) state = self._get(invocation_url).json()["state"] assert state != "failed", state time.sleep(35) state = self._get(invocation_url).json()["state"] assert state == "failed", state
class FailJobWhenToolUnavailableTestCase(integration_util.IntegrationTestCase): require_admin_user = True def setUp(self): super(FailJobWhenToolUnavailableTestCase, self).setUp() self.dataset_populator = DatasetPopulator(self.galaxy_interactor) self.workflow_populator = WorkflowPopulator(self.galaxy_interactor) self.history_id = self.dataset_populator.new_history() @classmethod def handle_galaxy_config_kwds( cls, config, ): # config["jobs_directory"] = cls.jobs_directory # Disable tool dependency resolution. config["tool_dependency_dir"] = "none" def test_fail_job_when_tool_unavailable(self): self.workflow_populator.run_workflow(""" class: GalaxyWorkflow steps: - label: sleep run: class: GalaxyTool command: sleep 20s && echo 'hello world 2' > '$output1' outputs: output1: format: txt - tool_id: cat1 state: input1: $link: sleep#output1 queries: input2: $link: sleep#output1 """, history_id=self.history_id, assert_ok=False, wait=False) # Wait until workflow is fully scheduled, otherwise can't test effect of removing tool from queued job time.sleep(10) self._app.toolbox.remove_tool_by_id('cat1') self.dataset_populator.wait_for_history(self.history_id, assert_ok=False) state_details = self.galaxy_interactor.get( 'histories/%s' % self.history_id).json()['state_details'] assert state_details['running'] == 0 assert state_details['ok'] == 1 assert state_details['error'] == 1 failed_hda = self.dataset_populator.get_history_dataset_details( history_id=self.history_id, assert_ok=False, details=True) assert failed_hda['state'] == 'error' job = self.galaxy_interactor.get("jobs/%s" % failed_hda['creating_job']).json() assert job['state'] == 'error'
def test_search_workflows(self): workflow_populator = WorkflowPopulator(self.galaxy_interactor) workflow_id = workflow_populator.simple_workflow("test_for_search") search_response = self.__search("select * from workflow") assert self.__has_result_with_name(search_response, "test_for_search"), search_response.json() # Deleted delete_url = self._api_url("workflows/%s" % workflow_id, use_key=True) delete(delete_url) search_response = self.__search("select * from workflow where deleted = False") assert not self.__has_result_with_name(search_response, "test_for_search"), search_response.json()
class WorkflowSyncTestCase(integration_util.IntegrationTestCase): framework_tool_and_types = True require_admin_user = True def setUp(self): super(WorkflowSyncTestCase, self).setUp() self.dataset_populator = DatasetPopulator(self.galaxy_interactor) self.workflow_populator = WorkflowPopulator(self.galaxy_interactor) @classmethod def handle_galaxy_config_kwds(cls, config): cls.workflow_directory = cls._test_driver.mkdtemp() def test_sync_format2(self): workflow_path = self._write_workflow_content( "workflow.yml", WORKFLOW_SIMPLE_CAT_TWICE) workflow_id = self.workflow_populator.import_workflow_from_path( workflow_path) with self.workflow_populator.export_for_update( workflow_id) as workflow_object: workflow_object["annotation"] = "new annotation" with open(workflow_path, "r") as f: data = yaml.safe_load(f) assert data["doc"] == "new annotation" def test_sync_ga(self): workflow_json = self.workflow_populator.load_workflow("synctest") workflow_path = self._write_workflow_content("workflow.ga", json.dumps(workflow_json)) workflow_id = self.workflow_populator.import_workflow_from_path( workflow_path) with self.workflow_populator.export_for_update( workflow_id) as workflow_object: workflow_object["annotation"] = "new annotation" with open(workflow_path, "r") as f: data = json.load(f) assert data["annotation"] == "new annotation" def _write_workflow_content(self, filename, content): workflow_path = os.path.join(self.workflow_directory, filename) with open(workflow_path, "w") as f: f.write(content) return workflow_path
class MaximumWorkflowJobsPerSchedulingIterationTestCase(integration_util.IntegrationTestCase): framework_tool_and_types = True def setUp(self): super(MaximumWorkflowJobsPerSchedulingIterationTestCase, self).setUp() self.dataset_populator = DatasetPopulator(self.galaxy_interactor) self.workflow_populator = WorkflowPopulator(self.galaxy_interactor) self.dataset_collection_populator = DatasetCollectionPopulator(self.galaxy_interactor) @classmethod def handle_galaxy_config_kwds(cls, config): config["maximum_workflow_jobs_per_scheduling_iteration"] = 1 def do_test(self): workflow_id = self.workflow_populator.upload_yaml_workflow(""" class: GalaxyWorkflow steps: - type: input_collection - tool_id: collection_creates_pair state: input1: $link: 0 - tool_id: collection_paired_test state: f1: $link: 1#paired_output - tool_id: cat_list state: input1: $link: 2#out1 """) with self.dataset_populator.test_history() as history_id: hdca1 = self.dataset_collection_populator.create_list_in_history(history_id, contents=["a\nb\nc\nd\n", "e\nf\ng\nh\n"]).json() self.dataset_populator.wait_for_history(history_id, assert_ok=True) inputs = { '0': {"src": "hdca", "id": hdca1["id"]}, } invocation_id = self.workflow_populator.invoke_workflow(history_id, workflow_id, inputs) self.workflow_populator.wait_for_workflow(history_id, workflow_id, invocation_id) self.dataset_populator.wait_for_history(history_id, assert_ok=True) self.assertEqual("a\nc\nb\nd\ne\ng\nf\nh\n", self.dataset_populator.get_history_dataset_content(history_id, hid=0))
class WorkflowSyncTestCase(integration_util.IntegrationTestCase): framework_tool_and_types = True require_admin_user = True def setUp(self): super(WorkflowSyncTestCase, self).setUp() self.dataset_populator = DatasetPopulator(self.galaxy_interactor) self.workflow_populator = WorkflowPopulator(self.galaxy_interactor) @classmethod def handle_galaxy_config_kwds(cls, config): cls.workflow_directory = cls._test_driver.mkdtemp() def test_sync_format2(self): workflow_path = self._write_workflow_content("workflow.yml", WORKFLOW_SIMPLE_CAT_TWICE) workflow_id = self.workflow_populator.import_workflow_from_path(workflow_path) with self.workflow_populator.export_for_update(workflow_id) as workflow_object: workflow_object["annotation"] = "new annotation" with open(workflow_path, "r") as f: data = yaml.load(f) assert data["doc"] == "new annotation" def test_sync_ga(self): workflow_json = self.workflow_populator.load_workflow("synctest") workflow_path = self._write_workflow_content("workflow.ga", json.dumps(workflow_json)) workflow_id = self.workflow_populator.import_workflow_from_path(workflow_path) with self.workflow_populator.export_for_update(workflow_id) as workflow_object: workflow_object["annotation"] = "new annotation" with open(workflow_path, "r") as f: data = json.load(f) assert data["annotation"] == "new annotation" def _write_workflow_content(self, filename, content): workflow_path = os.path.join(self.workflow_directory, filename) with open(workflow_path, "w") as f: f.write(content) return workflow_path
class BaseWorkflowHandlerConfigurationTestCase(integration_util.IntegrationTestCase): framework_tool_and_types = True def setUp(self): super(BaseWorkflowHandlerConfigurationTestCase, self).setUp() self.dataset_populator = DatasetPopulator(self.galaxy_interactor) self.workflow_populator = WorkflowPopulator(self.galaxy_interactor) self.history_id = self.dataset_populator.new_history() @classmethod def handle_galaxy_config_kwds(cls, config): config["job_config_file"] = WORKFLOW_HANDLER_CONFIGURATION_JOB_CONF def _invoke_n_workflows(self, n): workflow_id = self.workflow_populator.upload_yaml_workflow(PAUSE_WORKFLOW) history_id = self.history_id hda1 = self.dataset_populator.new_dataset(history_id, content="1 2 3") index_map = { '0': dict(src="hda", id=hda1["id"]) } request = {} request["history"] = "hist_id=%s" % history_id request["inputs"] = dumps(index_map) request["inputs_by"] = 'step_index' url = "workflows/%s/invocations" % (workflow_id) for i in range(n): self._post(url, data=request) def _get_workflow_invocations(self): # Consider exposing handler via the API to reduce breaking # into Galaxy's internal state. app = self._app history_id = app.security.decode_id(self.history_id) sa_session = app.model.context.current history = sa_session.query(app.model.History).get(history_id) workflow_invocations = history.workflow_invocations return workflow_invocations @property def is_app_workflow_scheduler(self): return self._app.workflow_scheduling_manager.request_monitor is not None
def setUp(self): super(WorkflowSyncTestCase, self).setUp() self.dataset_populator = DatasetPopulator(self.galaxy_interactor) self.workflow_populator = WorkflowPopulator(self.galaxy_interactor)
def setUp(self): super(FailJobWhenToolUnavailableTestCase, self).setUp() self.dataset_populator = DatasetPopulator(self.galaxy_interactor) self.workflow_populator = WorkflowPopulator(self.galaxy_interactor) self.history_id = self.dataset_populator.new_history()
def setUp(self): super(BaseWorkflowHandlerConfigurationTestCase, self).setUp() self.dataset_populator = DatasetPopulator(self.galaxy_interactor) self.workflow_populator = WorkflowPopulator(self.galaxy_interactor) self.history_id = self.dataset_populator.new_history()
def setUp(self): super(MaximumWorkflowInvocationDurationTestCase, self).setUp() self.dataset_populator = DatasetPopulator(self.galaxy_interactor) self.workflow_populator = WorkflowPopulator(self.galaxy_interactor)
def setUp(self): super(MaximumWorkflowJobsPerSchedulingIterationTestCase, self).setUp() self.dataset_populator = DatasetPopulator(self.galaxy_interactor) self.workflow_populator = WorkflowPopulator(self.galaxy_interactor) self.dataset_collection_populator = DatasetCollectionPopulator(self.galaxy_interactor)