コード例 #1
0
class MaximumWorkflowInvocationDurationTestCase(
        integration_util.IntegrationTestCase):

    framework_tool_and_types = True

    def setUp(self):
        super().setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.workflow_populator = WorkflowPopulator(self.galaxy_interactor)

    @classmethod
    def handle_galaxy_config_kwds(cls, config):
        config["maximum_workflow_invocation_duration"] = 20

    def test(self):
        workflow = self.workflow_populator.load_workflow_from_resource(
            "test_workflow_pause")
        workflow_id = self.workflow_populator.create_workflow(workflow)
        history_id = self.dataset_populator.new_history()
        hda1 = self.dataset_populator.new_dataset(history_id, content="1 2 3")
        index_map = {'0': dict(src="hda", id=hda1["id"])}
        request = {}
        request["history"] = "hist_id=%s" % history_id
        request["inputs"] = dumps(index_map)
        request["inputs_by"] = 'step_index'
        url = "workflows/%s/invocations" % (workflow_id)
        invocation_response = self._post(url, data=request)
        invocation_url = url + "/" + invocation_response.json()["id"]
        time.sleep(5)
        state = self._get(invocation_url).json()["state"]
        assert state != "failed", state
        time.sleep(35)
        state = self._get(invocation_url).json()["state"]
        assert state == "failed", state
コード例 #2
0
class MaxDiscoveredFilesTestCase(integration_util.IntegrationTestCase):
    """Describe a Galaxy test instance with embedded pulsar configured."""

    framework_tool_and_types = True
    max_discovered_files = 9

    @classmethod
    def handle_galaxy_config_kwds(cls, config):
        config["max_discovered_files"] = cls.max_discovered_files

    def setUp(self):
        super().setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)

    def test_discover(self):
        with self.dataset_populator.test_history() as history_id:
            response = self.dataset_populator.run_tool("discover_sort_by",
                                                       inputs={},
                                                       history_id=history_id)
            job_id = response["jobs"][0]['id']
            self.dataset_populator.wait_for_job(job_id, assert_ok=False)
            job_details_response = self.dataset_populator.get_job_details(
                job_id, full=True)
            job_details_response.raise_for_status()
            job_details = job_details_response.json()
            assert job_details['state'] == 'error'
            assert f"Job generated more than maximum number ({self.max_discovered_files}) of output datasets" in job_details[
                'job_messages']
コード例 #3
0
 def setUp(self):
     super().setUp()
     self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
     self.dataset_collection_populator = DatasetCollectionPopulator(
         self.galaxy_interactor)
     self.library_populator = LibraryPopulator(self.galaxy_interactor)
     self.history_id = self.dataset_populator.new_history()
コード例 #4
0
class BaseUploadContentConfigurationInstance(integration_util.IntegrationInstance):

    framework_tool_and_types = True

    def setUp(self):
        super().setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.library_populator = LibraryPopulator(self.galaxy_interactor)
        self.history_id = self.dataset_populator.new_history()

    def fetch_target(self, target, assert_ok=False, attach_test_file=False, wait=False):
        payload = {
            "history_id": self.history_id,
            "targets": json.dumps([target]),
        }
        if attach_test_file:
            payload["__files"] = {"files_0|file_data": open(self.test_data_resolver.get_filename("4.bed"))}

        response = self.dataset_populator.fetch(payload, assert_ok=assert_ok, wait=wait)
        return response

    def _write_file(self, dir_path, content, filename="test"):
        """Helper for writing ftp/server dir files."""
        self._ensure_directory(dir_path)
        path = os.path.join(dir_path, filename)
        with open(path, "w") as f:
            f.write(content)
        return path

    def _ensure_directory(self, path):
        if not os.path.exists(path):
            os.makedirs(path)
コード例 #5
0
ファイル: test_quota.py プロジェクト: maikenp/galaxy
class QuotaIntegrationTestCase(integration_util.IntegrationTestCase):
    require_admin_user = True

    @classmethod
    def handle_galaxy_config_kwds(cls, config):
        config["enable_quotas"] = True

    def setUp(self):
        super().setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)

    def test_quota_crud(self):
        quotas = self.dataset_populator.get_quotas()
        assert len(quotas) == 0

        payload = {
            'name': 'defaultquota1',
            'description': 'first default quota',
            'amount': '100MB',
            'operation': '=',
            'default': 'registered',
        }
        self.dataset_populator.create_quota(payload)

        quotas = self.dataset_populator.get_quotas()
        assert len(quotas) == 1
コード例 #6
0
 def setUp(self):
     super(HistoryContentsApiTestCase, self).setUp()
     self.history_id = self._new_history()
     self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
     self.dataset_collection_populator = DatasetCollectionPopulator(
         self.galaxy_interactor)
     self.library_populator = LibraryPopulator(self.galaxy_interactor)
コード例 #7
0
class WebDavIntegrationTestCase(integration_util.IntegrationTestCase):
    @classmethod
    def handle_galaxy_config_kwds(cls, config):
        config["file_sources_config_file"] = FILE_SOURCES_JOB_CONF

    def setUp(self):
        super().setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)

    def test_simple_usage(self):
        plugin_config_response = self.galaxy_interactor.get(
            "remote_files/plugins")
        api_asserts.assert_status_code_is_ok(plugin_config_response)
        plugins = plugin_config_response.json()
        assert len(plugins) == 1
        assert plugins[0]["type"] == "webdav"
        assert plugins[0]["uri_prefix"] == "gxfiles://test1"

        data = {"target": "gxfiles://test1"}
        list_response = self.galaxy_interactor.get("remote_files", data)
        api_asserts.assert_status_code_is_ok(list_response)
        remote_files = list_response.json()
        print(remote_files)

        with self.dataset_populator.test_history() as history_id:
            new_dataset = self.dataset_populator.new_dataset(
                history_id, content="gxfiles://test1/a", assert_ok=True)
            content = self.dataset_populator.get_history_dataset_content(
                history_id, dataset=new_dataset)
            assert content == "a\n", content
コード例 #8
0
ファイル: test_histories.py プロジェクト: mvdbeek/galaxy
 def create(self, name: str) -> str:
     response_json = self._create_history(name)
     history_id = response_json["id"]
     # History to share cannot be empty
     populator = DatasetPopulator(self.galaxy_interactor)
     populator.new_dataset(history_id)
     return history_id
コード例 #9
0
class JobRecoveryAfterHandledIntegerationTestCase(
        integration_util.IntegrationTestCase):
    framework_tool_and_types = True

    def setUp(self):
        super(JobRecoveryAfterHandledIntegerationTestCase, self).setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)

    @classmethod
    def handle_galaxy_config_kwds(cls, config):
        config["job_config_file"] = DELAY_JOB_CONFIG_FILE

    def handle_reconfigure_galaxy_config_kwds(self, config):
        config["job_config_file"] = SIMPLE_JOB_CONFIG_FILE

    def test_recovery(self):
        history_id = self.dataset_populator.new_history()
        self.dataset_populator.run_tool(
            "exit_code_oom",
            {},
            history_id,
            assert_ok=False,
        ).json()
        self.restart(
            handle_reconfig=self.handle_reconfigure_galaxy_config_kwds)
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.dataset_populator.wait_for_history(history_id, assert_ok=True)
コード例 #10
0
class PageJsonEncodingIntegrationTestCase(integration_util.IntegrationTestCase
                                          ):
    def setUp(self):
        super().setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.history_id = self.dataset_populator.new_history()

    def test_page_encoding(self):
        request = dict(
            slug="mypage",
            title="MY PAGE",
            content=
            '''<p>Page!<div class="embedded-item" id="History-%s"></div></p>'''
            % self.history_id,
        )
        page_response = self._post("pages", request, json=True)
        api_asserts.assert_status_code_is_ok(page_response)
        sa_session = self._app.model.context
        page_revision = sa_session.query(
            model.PageRevision).filter_by(content_format="html").all()[0]
        assert '''id="History-1"''' in page_revision.content, page_revision.content
        assert '''id="History-%s"''' % self.history_id not in page_revision.content, page_revision.content

        show_page_response = self._get("pages/%s" % page_response.json()["id"])
        api_asserts.assert_status_code_is_ok(show_page_response)
        content = show_page_response.json()["content"]
        assert '''id="History-1"''' not in content, content
        assert '''id="History-%s"''' % self.history_id in content, content

    def test_page_encoding_markdown(self):
        dataset = self.dataset_populator.new_dataset(self.history_id)
        dataset_id = dataset["id"]
        request = dict(
            slug="mypage-markdown",
            title="MY PAGE",
            content='''```galaxy
history_dataset_display(history_dataset_id=%s)
```''' % dataset["id"],
            content_format="markdown",
        )
        page_response = self._post("pages", request, json=True)
        api_asserts.assert_status_code_is_ok(page_response)
        sa_session = self._app.model.context
        page_revision = sa_session.query(
            model.PageRevision).filter_by(content_format="markdown").all()[0]
        assert '''```galaxy
history_dataset_display(history_dataset_id=1)
```''' in page_revision.content, page_revision.content
        assert '''::: history_dataset_display history_dataset_id=%s''' % dataset_id not in page_revision.content, page_revision.content

        show_page_response = self._get("pages/%s" % page_response.json()["id"])
        api_asserts.assert_status_code_is_ok(show_page_response)
        content = show_page_response.json()["content"]
        assert '''```galaxy
history_dataset_display(history_dataset_id=1)
```''' not in content, content
        assert '''```galaxy
history_dataset_display(history_dataset_id=%s)
```''' % dataset_id in content, content
コード例 #11
0
    def setUp(self):
        super().setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.dataset_collection_populator = DatasetCollectionPopulator(self.galaxy_interactor)
        self.library_populator = LibraryPopulator(self.galaxy_interactor)

        self.history_id = self.dataset_populator.new_history()
        self.library = self.library_populator.new_private_library("FolderContentsTestsLibrary")
        self.root_folder_id = self._create_folder_in_library("Test Folder Contents")
コード例 #12
0
ファイル: test_pages.py プロジェクト: xingyongma/galaxy
 def test_400_on_invalid_embedded_content(self):
     dataset_populator = DatasetPopulator(self.galaxy_interactor)
     valid_id = dataset_populator.new_history()
     page_request = self._test_page_payload(slug="invalid-embed-content")
     page_request["content"] = '''<p>Page!<div class="embedded-item" id="CoolObject-%s"></div></p>''' % valid_id
     page_response = self._post("pages", page_request)
     self._assert_status_code_is(page_response, 400)
     self._assert_error_code_is(page_response, error_codes.USER_REQUEST_INVALID_PARAMETER)
     assert "embedded HTML content" in page_response.text
コード例 #13
0
class FailJobWhenToolUnavailableTestCase(integration_util.IntegrationTestCase):

    require_admin_user = True

    def setUp(self):
        super(FailJobWhenToolUnavailableTestCase, self).setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.workflow_populator = WorkflowPopulator(self.galaxy_interactor)
        self.history_id = self.dataset_populator.new_history()

    @classmethod
    def handle_galaxy_config_kwds(
        cls,
        config,
    ):
        # config["jobs_directory"] = cls.jobs_directory
        # Disable tool dependency resolution.
        config["tool_dependency_dir"] = "none"

    def test_fail_job_when_tool_unavailable(self):
        self.workflow_populator.run_workflow("""
class: GalaxyWorkflow
steps:
  - label: sleep
    run:
      class: GalaxyTool
      command: sleep 20s && echo 'hello world 2' > '$output1'
      outputs:
        output1:
          format: txt
  - tool_id: cat1
    state:
      input1:
        $link: sleep#output1
      queries:
        input2:
          $link: sleep#output1
""",
                                             history_id=self.history_id,
                                             assert_ok=False,
                                             wait=False)
        # Wait until workflow is fully scheduled, otherwise can't test effect of removing tool from queued job
        time.sleep(10)
        self._app.toolbox.remove_tool_by_id('cat1')
        self.dataset_populator.wait_for_history(self.history_id,
                                                assert_ok=False)
        state_details = self.galaxy_interactor.get(
            'histories/%s' % self.history_id).json()['state_details']
        assert state_details['running'] == 0
        assert state_details['ok'] == 1
        assert state_details['error'] == 1
        failed_hda = self.dataset_populator.get_history_dataset_details(
            history_id=self.history_id, assert_ok=False, details=True)
        assert failed_hda['state'] == 'error'
        job = self.galaxy_interactor.get("jobs/%s" %
                                         failed_hda['creating_job']).json()
        assert job['state'] == 'error'
コード例 #14
0
class DataManagerIntegrationTestCase(integration_util.IntegrationTestCase, UsesShed):

    """Test data manager installation and table reload through the API"""

    framework_tool_and_types = True
    use_shared_connection_for_amqp = True

    def setUp(self):
        super(DataManagerIntegrationTestCase, self).setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)

    @classmethod
    def handle_galaxy_config_kwds(cls, config):
        try:
            import watchdog  # noqa: F401
        except ImportError:
            raise SkipTest("watchdog library is not available")
        cls.configure_shed_and_conda(config)
        config["tool_data_path"] = cls.shed_tool_data_dir
        config["watch_tool_data_dir"] = True
        cls.username = cls.get_secure_ascii_digits()
        config["admin_users"] = "*****@*****.**" % cls.username

    @skip_if_toolshed_down
    def test_data_manager_installation_table_reload(self):
        """
        Test that we can install data managers, create a new dbkey, and use that dbkey in a downstream data manager.
        """
        self.install_repository("devteam", "data_manager_fetch_genome_dbkeys_all_fasta", "14eb0fc65c62")
        self.install_repository("devteam", "data_manager_sam_fasta_index_builder", "cc4ef4d38cf9")
        with self._different_user(email="*****@*****.**" % self.username):
            with self.dataset_populator.test_history() as history_id:
                run_response = self.dataset_populator.run_tool(tool_id=FETCH_TOOL_ID,
                                                               inputs=FETCH_GENOME_DBKEYS_ALL_FASTA_INPUT,
                                                               history_id=history_id,
                                                               assert_ok=False)
                self.dataset_populator.wait_for_tool_run(history_id=history_id, run_response=run_response, timeout=CONDA_AUTO_INSTALL_JOB_TIMEOUT)
                run_response = self.dataset_populator.run_tool(tool_id=SAM_FASTA_ID,
                                                               inputs=SAM_FASTA_INPUT,
                                                               history_id=history_id,
                                                               assert_ok=False)
                self.dataset_populator.wait_for_tool_run(history_id=history_id, run_response=run_response, timeout=CONDA_AUTO_INSTALL_JOB_TIMEOUT)

    def test_data_manager_manual(self):
        """
        Test that data_manager_manual works, which uses a signigicant amount of Galaxy-internal code
        """
        self.install_repository('iuc', 'data_manager_manual', '1ed87dee9e68')
        with self._different_user(email="*****@*****.**" % self.username):
            with self.dataset_populator.test_history() as history_id:
                self.dataset_populator.run_tool(tool_id=DATA_MANAGER_MANUAL_ID,
                                                inputs=DATA_MANAGER_MANUAL_INPUT,
                                                history_id=history_id)

    @classmethod
    def get_secure_ascii_digits(cls, n=12):
        return ''.join(random.SystemRandom().choice(string.ascii_lowercase + string.digits) for _ in range(12))
コード例 #15
0
ファイル: test_job_recovery.py プロジェクト: mvdbeek/galaxy
 def test_recovery(self):
     history_id = self.dataset_populator.new_history()
     self.dataset_populator.run_tool_raw(
         "exit_code_oom",
         {},
         history_id,
     )
     self.restart(
         handle_reconfig=self.handle_reconfigure_galaxy_config_kwds)
     self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
     self.dataset_populator.wait_for_history(history_id, assert_ok=True)
コード例 #16
0
class TestProvenance(ApiTestCase):

    def setUp(self):
        super(TestProvenance, self).setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)

    def test_show_prov(self):
        history_id = self.dataset_populator.new_history()
        new_dataset1 = self.dataset_populator.new_dataset(history_id, content='for prov')
        prov_response = self._get("histories/%s/contents/%s/provenance" % (history_id, new_dataset1["id"]))
        self._assert_status_code_is(prov_response, 200)
        self._assert_has_keys(prov_response.json(), "job_id", "id", "stdout", "stderr", "parameters", "tool_id")
コード例 #17
0
ファイル: test_pages.py プロジェクト: msauria/galaxy
    def test_create_from_report(self):
        dataset_populator = DatasetPopulator(self.galaxy_interactor)
        workflow_populator = WorkflowPopulator(self.galaxy_interactor)
        test_data = """
input_1:
  value: 1.bed
  type: File
"""
        with dataset_populator.test_history() as history_id:
            summary = workflow_populator.run_workflow("""
class: GalaxyWorkflow
inputs:
  input_1: data
outputs:
  output_1:
    outputSource: first_cat/out_file1
steps:
  first_cat:
    tool_id: cat
    in:
      input1: input_1
""",
                                                      test_data=test_data,
                                                      history_id=history_id)

            workflow_id = summary.workflow_id
            invocation_id = summary.invocation_id
            report_json = workflow_populator.workflow_report_json(
                workflow_id, invocation_id)
            assert "markdown" in report_json
            self._assert_has_keys(report_json, "markdown", "render_format")
            assert report_json["render_format"] == "markdown"
            markdown_content = report_json["markdown"]
            page_request = dict(
                slug="invocation-report",
                title="Invocation Report",
                invocation_id=invocation_id,
            )
            page_response = self._post("pages", page_request, json=True)
            self._assert_status_code_is(page_response, 200)
            page_response = page_response.json()
            show_response = self._get(f"pages/{page_response['id']}")
            self._assert_status_code_is(show_response, 200)
            show_json = show_response.json()
            self._assert_has_keys(show_json, "slug", "title", "id")
            self.assertEqual(show_json["slug"], "invocation-report")
            self.assertEqual(show_json["title"], "Invocation Report")
            self.assertEqual(show_json["content_format"], "markdown")
            markdown_content = show_json["content"]
            assert "## Workflow Outputs" in markdown_content
            assert "## Workflow Inputs" in markdown_content
            assert "## About This Report" not in markdown_content
コード例 #18
0
class AdminToolDataIntegrationTestCase(integration_util.IntegrationTestCase):
    require_admin_user = True

    def setUp(self):
        super().setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)

    @classmethod
    def configure_temp_tool_data_dir(cls):
        cls.temp_tool_data_dir = cls.temp_config_dir("tool-data")
        cls.temp_tool_data_tables_file = os.path.join(
            cls.temp_tool_data_dir, "sample_tool_data_tables.xml")
        shutil.copytree(SOURCE_TOOL_DATA_DIRECTORY, cls.temp_tool_data_dir)
        cls._test_driver.temp_directories.append(cls.temp_tool_data_dir)

    @classmethod
    def handle_galaxy_config_kwds(cls, config):
        cls.configure_temp_tool_data_dir()
        config["tool_data_path"] = cls.temp_tool_data_dir
        config["tool_data_table_config_path"] = cls.temp_tool_data_tables_file

    def test_admin_delete_data_table_entry(self):
        show_response = self._get("tool_data/testbeta")
        original_count = len(show_response.json()["fields"])

        history_id = self.dataset_populator.new_history()
        payload = self.dataset_populator.run_tool_payload(
            tool_id="data_manager",
            inputs={"ignored_value": "moo"},
            history_id=history_id,
        )
        create_response = self._post("tools", data=payload)
        create_response.raise_for_status()
        self.dataset_populator.wait_for_history(history_id, assert_ok=True)
        time.sleep(2)
        show_response = self._get("tool_data/testbeta")
        updated_fields = show_response.json()["fields"]
        self.assertEqual(len(updated_fields), original_count + 1)
        new_field = updated_fields[-1]
        url = self._api_url(
            f"tool_data/testbeta?key={self.galaxy_interactor.api_key}")

        delete_payload = {"values": "\t".join(new_field)}
        delete_response = self._delete(url, data=delete_payload, json=True)
        delete_response.raise_for_status()
        time.sleep(2)
        show_response = self._get("tool_data/testbeta")
        show_response.raise_for_status()
        updated_fields = show_response.json()["fields"]
        assert len(updated_fields) == original_count
コード例 #19
0
class BaseInteractiveToolsIntegrationTestCase(ContainerizedIntegrationTestCase
                                              ):
    framework_tool_and_types = True
    container_type = "docker"
    require_uwsgi = True
    enable_realtime_mapping = True

    def setUp(self):
        super(BaseInteractiveToolsIntegrationTestCase, self).setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.history_id = self.dataset_populator.new_history()

    # Move helpers to populators.py
    def wait_on_proxied_content(self, target):
        def get_hosted_content():
            try:
                scheme, rest = target.split("://", 1)
                prefix, host_and_port = rest.split(".interactivetool.")
                faked_host = rest
                if "/" in rest:
                    faked_host = rest.split("/", 1)[0]
                url = "%s://%s" % (scheme, host_and_port)
                response = requests.get(url,
                                        timeout=1,
                                        headers={"Host": faked_host})
                return response.text
            except Exception as e:
                print(e)
                return None

        content = wait_on(get_hosted_content,
                          "realtime hosted content at %s" % target)
        return content

    def entry_point_target(self, entry_point_id):
        entry_point_access_response = self._get("entry_points/%s/access" %
                                                entry_point_id)
        api_asserts.assert_status_code_is(entry_point_access_response, 200)
        access_json = entry_point_access_response.json()
        api_asserts.assert_has_key(access_json, "target")
        return access_json["target"]

    def wait_on_entry_points_active(self, job_id, expected_num=1):
        def active_entry_points():
            entry_points = self.entry_points_for_job(job_id)
            if len(entry_points) != expected_num:
                return None
            elif any([not e["active"] for e in entry_points]):
                return None
            else:
                return entry_points

        return wait_on(active_entry_points, "entry points to become active")

    def entry_points_for_job(self, job_id):
        entry_points_response = self._get("entry_points?job_id=%s" % job_id)
        api_asserts.assert_status_code_is(entry_points_response, 200)
        return entry_points_response.json()
コード例 #20
0
    def setUp(self):
        super().setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.dataset_collection_populator = DatasetCollectionPopulator(self.galaxy_interactor)

        for d in [self.library_dir, self.user_library_dir, self.ftp_upload_dir]:
            if os.path.exists(d):
                shutil.rmtree(d)
            os.mkdir(d)
コード例 #21
0
class BaseWorkflowHandlerConfigurationTestCase(
        integration_util.IntegrationTestCase):

    framework_tool_and_types = True
    assign_with = ""

    def setUp(self):
        super().setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.workflow_populator = WorkflowPopulator(self.galaxy_interactor)
        self.history_id = self.dataset_populator.new_history()

    @classmethod
    def handle_galaxy_config_kwds(cls, config):
        config["job_config_file"] = config_file(
            WORKFLOW_HANDLER_JOB_CONFIG_TEMPLATE, assign_with=cls.assign_with)

    def _invoke_n_workflows(self, n):
        workflow_id = self.workflow_populator.upload_yaml_workflow(
            PAUSE_WORKFLOW)
        history_id = self.history_id
        hda1 = self.dataset_populator.new_dataset(history_id, content="1 2 3")
        index_map = {'0': dict(src="hda", id=hda1["id"])}
        request = {}
        request["history"] = "hist_id=%s" % history_id
        request["inputs"] = dumps(index_map)
        request["inputs_by"] = 'step_index'
        url = "workflows/%s/invocations" % (workflow_id)
        for i in range(n):
            self._post(url, data=request)

    def _get_workflow_invocations(self):
        # Consider exposing handler via the API to reduce breaking
        # into Galaxy's internal state.
        app = self._app
        history_id = app.security.decode_id(self.history_id)
        sa_session = app.model.context.current
        history = sa_session.query(app.model.History).get(history_id)
        workflow_invocations = history.workflow_invocations
        return workflow_invocations

    @property
    def is_app_workflow_scheduler(self):
        return self._app.workflow_scheduling_manager.request_monitor is not None
コード例 #22
0
ファイル: test_celery_tasks.py プロジェクト: mvdbeek/galaxy
class CeleryTasksIntegrationTestCase(IntegrationTestCase, UsesCeleryTasks):
    def setUp(self):
        super().setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)

    def test_random_simple_task_to_verify_framework_for_testing(self):
        assert mul.delay(4, 4).get(timeout=10) == 16

    def test_task_with_pydantic_argument(self):
        request = CreatePagePayload(
            content_format="markdown",
            title="my cool title",
            slug="my-cool-title",
            annotation="my cool annotation",
        )
        assert process_page.delay(request).get(
            timeout=10
        ) == "content_format is markdown with annotation my cool annotation"

    def test_galaxy_task(self):
        history_id = self.dataset_populator.new_history()
        dataset = self.dataset_populator.new_dataset(history_id, wait=True)
        hda = self._latest_hda
        assert hda

        def hda_purged():
            latest_details = self.dataset_populator.get_history_dataset_details(
                history_id, dataset=dataset, assert_ok=False, wait=False)
            return True if latest_details["purged"] else None

        assert not hda_purged()

        purge_hda.delay(hda_id=hda.id).get(timeout=10)

        wait_on(hda_purged, "dataset to become purged")
        assert hda_purged()

    @property
    def _latest_hda(self):
        latest_hda = self._app.model.session.query(
            HistoryDatasetAssociation).order_by(
                HistoryDatasetAssociation.table.c.id.desc()).first()
        return latest_hda
コード例 #23
0
class DefaultPermissionsIntegrationTestCase(
        integration_util.IntegrationTestCase):
    expected_access_status_code = 200

    def setUp(self):
        super().setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.history_id = self.dataset_populator.new_history()

    @classmethod
    def handle_galaxy_config_kwds(cls, config):
        if hasattr(cls, "new_user_dataset_access_role_default_private"):
            config[
                "new_user_dataset_access_role_default_private"] = cls.new_user_dataset_access_role_default_private

    def test_setting(self):
        hda = self.dataset_populator.new_dataset(self.history_id, wait=True)
        with self._different_user():
            details_response = self.dataset_populator.get_history_dataset_details_raw(
                history_id=self.history_id, dataset_id=hda["id"])
            assert details_response.status_code == self.expected_access_status_code, details_response.content
コード例 #24
0
class InlineJobEnvironmentContainerResolverTestCase(
        integration_util.IntegrationTestCase):

    framework_tool_and_types = True
    container_type = 'docker'
    job_config_file = DOCKERIZED_JOB_CONFIG_FILE

    @classmethod
    def handle_galaxy_config_kwds(cls, config):
        cls.jobs_directory = cls._test_driver.mkdtemp()
        config["jobs_directory"] = cls.jobs_directory
        config["job_config_file"] = cls.job_config_file
        disable_dependency_resolution(config)

    @classmethod
    def setUpClass(cls):
        skip_if_container_type_unavailable(cls)
        super().setUpClass()

    def setUp(self):
        super().setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.history_id = self.dataset_populator.new_history()

    def test_inline_environment_container_resolver_configuration(self):
        self.dataset_populator.run_tool(
            "mulled_example_broken_no_requirements_fallback", {},
            self.history_id)
        self.dataset_populator.wait_for_history(self.history_id,
                                                assert_ok=True)
        output = self.dataset_populator.get_history_dataset_content(
            self.history_id, timeout=EXTENDED_TIMEOUT)
        assert "0.7.15-r1140" in output
コード例 #25
0
class ExtendedMetadataIntegrationTestCase(integration_util.IntegrationTestCase
                                          ):
    def setUp(self):
        super().setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)

    @classmethod
    def handle_galaxy_config_kwds(cls, config):
        config["metadata_strategy"] = "extended"
        config["tool_evaluation_strategy"] = "remote"
        config["object_store_store_by"] = "uuid"
        config["retry_metadata_internally"] = False

    def test_fetch_data(self):
        history_id = self.dataset_populator.new_history()
        element = dict(src="files")
        target = {
            "destination": {
                "type": "hdas"
            },
            "elements": [element],
        }
        targets = json.dumps([target])
        upload_content = 'abcdef'
        payload = {
            "history_id": history_id,
            "targets": targets,
            "__files": {
                "files_0|file_data": upload_content
            }
        }
        new_dataset = self.dataset_populator.fetch(
            payload, assert_ok=True).json()["outputs"][0]
        self.dataset_populator.wait_for_history(history_id, assert_ok=True)
        content = self.dataset_populator.get_history_dataset_content(
            history_id=history_id,
            dataset_id=new_dataset['id'],
        )
        assert content == upload_content
コード例 #26
0
ファイル: test_jobs.py プロジェクト: xingyongma/galaxy
 def setUp(self):
     super(ObjectStoreJobsIntegrationTestCase, self).setUp()
     self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
     with self.dataset_populator.test_history() as history_id:
         hda1 = self.dataset_populator.new_dataset(
             history_id, content=TEST_INPUT_FILES_CONTENT)
         create_10_inputs = {
             "input1": {
                 "src": "hda",
                 "id": hda1["id"]
             },
             "input2": {
                 "src": "hda",
                 "id": hda1["id"]
             },
         }
         self.dataset_populator.run_tool(
             "create_10",
             create_10_inputs,
             history_id,
             assert_ok=True,
         )
         self.dataset_populator.wait_for_history(history_id)
コード例 #27
0
class WorkQueuePutFailureTestCase(integration_util.IntegrationTestCase):

    def setUp(self):
        super(WorkQueuePutFailureTestCase, self).setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.history_id = self.dataset_populator.new_history()

    @classmethod
    def handle_galaxy_config_kwds(cls, config, ):
        # config["jobs_directory"] = cls.jobs_directory
        fd, path = tempfile.mkstemp(suffix='job_conf.yml')
        with open(path, 'w') as job_conf:
            job_conf.write(job_conf_yaml)
        config["job_config_file"] = path
        # Disable tool dependency resolution.
        config["tool_dependency_dir"] = "none"

    def test_job_fails(self):
        self.dataset_populator.new_dataset(self.history_id, content="1 2 3")
        self.dataset_populator.wait_for_history(self.history_id, assert_ok=False)
        state_details = self.galaxy_interactor.get('histories/%s' % self.history_id).json()['state_details']
        assert state_details['running'] == 0
        assert state_details['error'] == 1
コード例 #28
0
class MaximumWorkflowJobsPerSchedulingIterationTestCase(
        integration_util.IntegrationTestCase):

    framework_tool_and_types = True

    def setUp(self):
        super(MaximumWorkflowJobsPerSchedulingIterationTestCase, self).setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.workflow_populator = WorkflowPopulator(self.galaxy_interactor)
        self.dataset_collection_populator = DatasetCollectionPopulator(
            self.galaxy_interactor)

    @classmethod
    def handle_galaxy_config_kwds(cls, config):
        config["maximum_workflow_jobs_per_scheduling_iteration"] = 1

    def do_test(self):
        workflow_id = self.workflow_populator.upload_yaml_workflow("""
class: GalaxyWorkflow
steps:
  - type: input_collection
  - tool_id: collection_creates_pair
    state:
      input1:
        $link: 0
  - tool_id: collection_paired_test
    state:
      f1:
        $link: 1#paired_output
  - tool_id: cat_list
    state:
      input1:
        $link: 2#out1
""")
        with self.dataset_populator.test_history() as history_id:
            hdca1 = self.dataset_collection_populator.create_list_in_history(
                history_id, contents=["a\nb\nc\nd\n", "e\nf\ng\nh\n"]).json()
            self.dataset_populator.wait_for_history(history_id, assert_ok=True)
            inputs = {
                '0': {
                    "src": "hdca",
                    "id": hdca1["id"]
                },
            }
            invocation_id = self.workflow_populator.invoke_workflow(
                history_id, workflow_id, inputs)
            self.workflow_populator.wait_for_workflow(history_id, workflow_id,
                                                      invocation_id)
            self.dataset_populator.wait_for_history(history_id, assert_ok=True)
            self.assertEqual(
                "a\nc\nb\nd\ne\ng\nf\nh\n",
                self.dataset_populator.get_history_dataset_content(history_id,
                                                                   hid=0))
コード例 #29
0
class BaseEmbeddedPulsarContainerIntegrationTestCase(integration_util.IntegrationTestCase):
    framework_tool_and_types = True

    @classmethod
    def handle_galaxy_config_kwds(cls, config):
        cls.jobs_directory = cls._test_driver.mkdtemp()
        config["jobs_directory"] = cls.jobs_directory
        config["job_config_file"] = cls.job_config_file
        disable_dependency_resolution(config)

    def setUp(self):
        super().setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.history_id = self.dataset_populator.new_history()

    @classmethod
    def setUpClass(cls):
        skip_if_container_type_unavailable(cls)
        super().setUpClass()
コード例 #30
0
ファイル: test_resolvers.py プロジェクト: mvdbeek/galaxy
 def test_legacy_r_mapping(self):
     """
     """
     tool_id = "legacy_R"
     dataset_populator = DatasetPopulator(self.galaxy_interactor)
     history_id = dataset_populator.new_history()
     endpoint = "tools/%s/install_dependencies" % tool_id
     data = {'id': tool_id}
     create_response = self._post(endpoint, data=data, admin=True)
     self._assert_status_code_is(create_response, 200)
     payload = dataset_populator.run_tool_payload(
         tool_id=tool_id,
         inputs={},
         history_id=history_id,
     )
     create_response = self._post("tools", data=payload)
     self._assert_status_code_is(create_response, 200)
     dataset_populator.wait_for_history(history_id, assert_ok=True)