class MaximumWorkflowInvocationDurationTestCase(integration_util.IntegrationTestCase):

    framework_tool_and_types = True

    def setUp(self):
        super(MaximumWorkflowInvocationDurationTestCase, self).setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.workflow_populator = WorkflowPopulator(self.galaxy_interactor)

    @classmethod
    def handle_galaxy_config_kwds(cls, config):
        config["maximum_workflow_invocation_duration"] = 20

    def do_test(self):
        workflow = self.workflow_populator.load_workflow_from_resource("test_workflow_pause")
        workflow_id = self.workflow_populator.create_workflow(workflow)
        history_id = self.dataset_populator.new_history()
        hda1 = self.dataset_populator.new_dataset(history_id, content="1 2 3")
        index_map = {
            '0': dict(src="hda", id=hda1["id"])
        }
        request = {}
        request["history"] = "hist_id=%s" % history_id
        request["inputs"] = dumps(index_map)
        request["inputs_by"] = 'step_index'
        url = "workflows/%s/invocations" % (workflow_id)
        invocation_response = self._post(url, data=request)
        invocation_url = url + "/" + invocation_response.json()["id"]
        time.sleep(5)
        state = self._get(invocation_url).json()["state"]
        assert state != "failed", state
        time.sleep(35)
        state = self._get(invocation_url).json()["state"]
        assert state == "failed", state
class JobRecoveryAfterHandledIntegerationTestCase(
        integration_util.IntegrationTestCase):
    framework_tool_and_types = True

    def setUp(self):
        super(JobRecoveryAfterHandledIntegerationTestCase, self).setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)

    @classmethod
    def handle_galaxy_config_kwds(cls, config):
        config["job_config_file"] = DELAY_JOB_CONFIG_FILE

    def handle_reconfigure_galaxy_config_kwds(self, config):
        config["job_config_file"] = SIMPLE_JOB_CONFIG_FILE

    def test_recovery(self):
        history_id = self.dataset_populator.new_history()
        self.dataset_populator.run_tool(
            "exit_code_oom",
            {},
            history_id,
            assert_ok=False,
        ).json()
        self.restart(
            handle_reconfig=self.handle_reconfigure_galaxy_config_kwds)
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.dataset_populator.wait_for_history(history_id, assert_ok=True)
class MaximumWorkflowInvocationDurationTestCase(
        integration_util.IntegrationTestCase):
    """Start a Pulsar job."""

    framework_tool_and_types = True

    def setUp(self):
        super(MaximumWorkflowInvocationDurationTestCase, self).setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.workflow_populator = WorkflowPopulator(self.galaxy_interactor)

    @classmethod
    def handle_galaxy_config_kwds(cls, config):
        config["maximum_workflow_invocation_duration"] = 20

    def do_test(self):
        workflow = self.workflow_populator.load_workflow_from_resource(
            "test_workflow_pause")
        workflow_id = self.workflow_populator.create_workflow(workflow)
        history_id = self.dataset_populator.new_history()
        hda1 = self.dataset_populator.new_dataset(history_id, content="1 2 3")
        index_map = {'0': dict(src="hda", id=hda1["id"])}
        request = {}
        request["history"] = "hist_id=%s" % history_id
        request["inputs"] = dumps(index_map)
        request["inputs_by"] = 'step_index'
        url = "workflows/%s/invocations" % (workflow_id)
        invocation_response = self._post(url, data=request)
        invocation_url = url + "/" + invocation_response.json()["id"]
        time.sleep(5)
        state = self._get(invocation_url).json()["state"]
        assert state != "failed", state
        time.sleep(35)
        state = self._get(invocation_url).json()["state"]
        assert state == "failed", state
Example #4
0
 def setUp(self):
     super(HistoryContentsApiTestCase, self).setUp()
     self.history_id = self._new_history()
     self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
     self.dataset_collection_populator = DatasetCollectionPopulator(
         self.galaxy_interactor)
     self.library_populator = LibraryPopulator(self.galaxy_interactor)
class JobRecoveryAfterHandledIntegerationTestCase(integration_util.IntegrationTestCase):
    framework_tool_and_types = True

    def setUp(self):
        super(JobRecoveryAfterHandledIntegerationTestCase, self).setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)

    @classmethod
    def handle_galaxy_config_kwds(cls, config):
        config["job_config_file"] = DELAY_JOB_CONFIG_FILE

    def handle_reconfigure_galaxy_config_kwds(self, config):
        config["job_config_file"] = SIMPLE_JOB_CONFIG_FILE

    def test_recovery(self):
        history_id = self.dataset_populator.new_history()
        self.dataset_populator.run_tool(
            "exit_code_oom",
            {},
            history_id,
            assert_ok=False,
        ).json()
        self.restart(handle_reconfig=self.handle_reconfigure_galaxy_config_kwds)
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.dataset_populator.wait_for_history(history_id, assert_ok=True)
Example #6
0
 def test_400_on_invalid_embedded_content(self):
     dataset_populator = DatasetPopulator(self.galaxy_interactor)
     valid_id = dataset_populator.new_history()
     page_request = self._test_page_payload(slug="invalid-id-encding")
     page_request["content"] = '''<p>Page!<div class="embedded-item" id="CoolObject-%s"></div></p>''' % valid_id
     page_response = self._post("pages", page_request)
     self._assert_status_code_is(page_response, 400)
     self._assert_error_code_is(page_response, error_codes.USER_REQUEST_INVALID_PARAMETER)
     assert "embedded HTML content" in page_response.text
Example #7
0
class DataManagerIntegrationTestCase(integration_util.IntegrationTestCase, UsesShed):

    """Test data manager installation and table reload through the API"""

    framework_tool_and_types = True
    use_shared_connection_for_amqp = True

    def setUp(self):
        super(DataManagerIntegrationTestCase, self).setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)

    @classmethod
    def handle_galaxy_config_kwds(cls, config):
        try:
            import watchdog  # noqa: F401
        except ImportError:
            raise SkipTest("watchdog library is not available")
        cls.configure_shed_and_conda(config)
        config["tool_data_path"] = cls.shed_tool_data_dir
        config["watch_tool_data_dir"] = True
        cls.username = cls.get_secure_ascii_digits()
        config["admin_users"] = "*****@*****.**" % cls.username

    @skip_if_toolshed_down
    def test_data_manager_installation_table_reload(self):
        """
        Test that we can install data managers, create a new dbkey, and use that dbkey in a downstream data manager.
        """
        self.install_repository("devteam", "data_manager_fetch_genome_dbkeys_all_fasta", "b1bc53e9bbc5")
        self.install_repository("devteam", "data_manager_sam_fasta_index_builder", "406896e00d0e", 'https://testtoolshed.g2.bx.psu.edu')
        with self._different_user(email="*****@*****.**" % self.username):
            with self.dataset_populator.test_history() as history_id:
                run_response = self.dataset_populator.run_tool(tool_id=FETCH_TOOL_ID,
                                                               inputs=FETCH_GENOME_DBKEYS_ALL_FASTA_INPUT,
                                                               history_id=history_id,
                                                               assert_ok=False)
                self.dataset_populator.wait_for_tool_run(history_id=history_id, run_response=run_response, timeout=CONDA_AUTO_INSTALL_JOB_TIMEOUT)
                run_response = self.dataset_populator.run_tool(tool_id=SAM_FASTA_ID,
                                                               inputs=SAM_FASTA_INPUT,
                                                               history_id=history_id,
                                                               assert_ok=False)
                self.dataset_populator.wait_for_tool_run(history_id=history_id, run_response=run_response, timeout=CONDA_AUTO_INSTALL_JOB_TIMEOUT)

    def test_data_manager_manual(self):
        """
        Test that data_manager_manual works, which uses a signigicant amount of Galaxy-internal code
        """
        self.install_repository('iuc', 'data_manager_manual', '1ed87dee9e68')
        with self._different_user(email="*****@*****.**" % self.username):
            with self.dataset_populator.test_history() as history_id:
                self.dataset_populator.run_tool(tool_id=DATA_MANAGER_MANUAL_ID,
                                                inputs=DATA_MANAGER_MANUAL_INPUT,
                                                history_id=history_id)

    @classmethod
    def get_secure_ascii_digits(cls, n=12):
        return ''.join(random.SystemRandom().choice(string.ascii_lowercase + string.digits) for _ in range(12))
Example #8
0
class FailJobWhenToolUnavailableTestCase(integration_util.IntegrationTestCase):

    require_admin_user = True

    def setUp(self):
        super(FailJobWhenToolUnavailableTestCase, self).setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.workflow_populator = WorkflowPopulator(self.galaxy_interactor)
        self.history_id = self.dataset_populator.new_history()

    @classmethod
    def handle_galaxy_config_kwds(
        cls,
        config,
    ):
        # config["jobs_directory"] = cls.jobs_directory
        # Disable tool dependency resolution.
        config["tool_dependency_dir"] = "none"

    def test_fail_job_when_tool_unavailable(self):
        self.workflow_populator.run_workflow("""
class: GalaxyWorkflow
steps:
  - label: sleep
    run:
      class: GalaxyTool
      command: sleep 20s && echo 'hello world 2' > '$output1'
      outputs:
        output1:
          format: txt
  - tool_id: cat1
    state:
      input1:
        $link: sleep#output1
      queries:
        input2:
          $link: sleep#output1
""",
                                             history_id=self.history_id,
                                             assert_ok=False,
                                             wait=False)
        # Wait until workflow is fully scheduled, otherwise can't test effect of removing tool from queued job
        time.sleep(10)
        self._app.toolbox.remove_tool_by_id('cat1')
        self.dataset_populator.wait_for_history(self.history_id,
                                                assert_ok=False)
        state_details = self.galaxy_interactor.get(
            'histories/%s' % self.history_id).json()['state_details']
        assert state_details['running'] == 0
        assert state_details['ok'] == 1
        assert state_details['error'] == 1
        failed_hda = self.dataset_populator.get_history_dataset_details(
            history_id=self.history_id, assert_ok=False, details=True)
        assert failed_hda['state'] == 'error'
        job = self.galaxy_interactor.get("jobs/%s" %
                                         failed_hda['creating_job']).json()
        assert job['state'] == 'error'
 def test_recovery(self):
     history_id = self.dataset_populator.new_history()
     self.dataset_populator.run_tool(
         "exit_code_oom",
         {},
         history_id,
         assert_ok=False,
     ).json()
     self.restart(
         handle_reconfig=self.handle_reconfigure_galaxy_config_kwds)
     self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
     self.dataset_populator.wait_for_history(history_id, assert_ok=True)
class TestProvenance(api.ApiTestCase):

    def setUp(self):
        super(TestProvenance, self).setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)

    def test_show_prov(self):
        history_id = self.dataset_populator.new_history()
        new_dataset1 = self.dataset_populator.new_dataset(history_id, content='for prov')
        prov_response = self._get("histories/%s/contents/%s/provenance" % (history_id, new_dataset1["id"]))
        self._assert_status_code_is(prov_response, 200)
        self._assert_has_keys(prov_response.json(), "job_id", "id", "stdout", "stderr", "parameters", "tool_id")
class TestProvenance(api.ApiTestCase):
    def setUp(self):
        super(TestProvenance, self).setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)

    def test_show_prov(self):
        history_id = self.dataset_populator.new_history()
        new_dataset1 = self.dataset_populator.new_dataset(history_id,
                                                          content='for prov')
        prov_response = self._get("histories/%s/contents/%s/provenance" %
                                  (history_id, new_dataset1["id"]))
        self._assert_status_code_is(prov_response, 200)
        self._assert_has_keys(prov_response.json(), "job_id", "id", "stdout",
                              "stderr", "parameters", "tool_id")
Example #12
0
class PageJsonEncodingIntegrationTestCase(integration_util.IntegrationTestCase
                                          ):
    def setUp(self):
        super(PageJsonEncodingIntegrationTestCase, self).setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)

    def test_page_encoding(self):
        history_id = self.dataset_populator.new_history()
        request = dict(
            slug="mypage",
            title="MY PAGE",
            content=
            '''<p>Page!<div class="embedded-item" id="History-%s"></div></p>'''
            % history_id,
        )
        page_response = self._post("pages", request)
        api_asserts.assert_status_code_is_ok(page_response)
        sa_session = self._app.model.context
        page_revision = sa_session.query(model.PageRevision).all()[0]
        assert '''id="History-1"''' in page_revision.content, page_revision.content
        assert '''id="History-%s"''' % history_id not in page_revision.content, page_revision.content

        show_page_response = self._get("pages/%s" % page_response.json()["id"])
        api_asserts.assert_status_code_is_ok(show_page_response)
        content = show_page_response.json()["content"]
        assert '''id="History-1"''' not in content, content
        assert '''id="History-%s"''' % history_id in content, content
Example #13
0
class BaseCheckUploadContentConfigurationTestCase(integration_util.IntegrationTestCase):

    framework_tool_and_types = True

    def setUp(self):
        super(BaseCheckUploadContentConfigurationTestCase, self).setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.history_id = self.dataset_populator.new_history()
class BaseUploadContentConfigurationTestCase(integration_util.IntegrationTestCase):

    framework_tool_and_types = True

    def setUp(self):
        super(BaseUploadContentConfigurationTestCase, self).setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.library_populator = LibraryPopulator(self.galaxy_interactor)
        self.history_id = self.dataset_populator.new_history()
Example #15
0
 def test_legacy_r_mapping(self):
     """
     """
     tool_id = "legacy_R"
     dataset_populator = DatasetPopulator(self.galaxy_interactor)
     history_id = dataset_populator.new_history()
     endpoint = "tools/%s/install_dependencies" % tool_id
     data = {'id': tool_id}
     create_response = self._post(endpoint, data=data, admin=True)
     self._assert_status_code_is(create_response, 200)
     payload = dataset_populator.run_tool_payload(
         tool_id=tool_id,
         inputs={},
         history_id=history_id,
     )
     create_response = self._post("tools", data=payload)
     self._assert_status_code_is(create_response, 200)
     dataset_populator.wait_for_history(history_id, assert_ok=True)
Example #16
0
class BaseWorkflowHandlerConfigurationTestCase(integration_util.IntegrationTestCase):

    framework_tool_and_types = True

    def setUp(self):
        super(BaseWorkflowHandlerConfigurationTestCase, self).setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.workflow_populator = WorkflowPopulator(self.galaxy_interactor)
        self.history_id = self.dataset_populator.new_history()

    @classmethod
    def handle_galaxy_config_kwds(cls, config):
        config["job_config_file"] = WORKFLOW_HANDLER_CONFIGURATION_JOB_CONF

    def _invoke_n_workflows(self, n):
        workflow_id = self.workflow_populator.upload_yaml_workflow(PAUSE_WORKFLOW)
        history_id = self.history_id
        hda1 = self.dataset_populator.new_dataset(history_id, content="1 2 3")
        index_map = {
            '0': dict(src="hda", id=hda1["id"])
        }
        request = {}
        request["history"] = "hist_id=%s" % history_id
        request["inputs"] = dumps(index_map)
        request["inputs_by"] = 'step_index'
        url = "workflows/%s/invocations" % (workflow_id)
        for i in range(n):
            self._post(url, data=request)

    def _get_workflow_invocations(self):
        # Consider exposing handler via the API to reduce breaking
        # into Galaxy's internal state.
        app = self._app
        history_id = app.security.decode_id(self.history_id)
        sa_session = app.model.context.current
        history = sa_session.query(app.model.History).get(history_id)
        workflow_invocations = history.workflow_invocations
        return workflow_invocations

    @property
    def is_app_workflow_scheduler(self):
        return self._app.workflow_scheduling_manager.request_monitor is not None
class BaseUploadContentConfigurationTestCase(
        integration_util.IntegrationTestCase):

    framework_tool_and_types = True

    def setUp(self):
        super(BaseUploadContentConfigurationTestCase, self).setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.library_populator = LibraryPopulator(self.galaxy_interactor)
        self.history_id = self.dataset_populator.new_history()

    def fetch_target(self, target, assert_ok=False, attach_test_file=False):
        payload = {
            "history_id": self.history_id,
            "targets": json.dumps([target]),
        }
        if attach_test_file:
            payload["__files"] = {
                "files_0|file_data":
                open(self.test_data_resolver.get_filename("4.bed"))
            }

        response = self.dataset_populator.fetch(payload, assert_ok=assert_ok)
        return response

    @classmethod
    def temp_config_dir(cls, name):
        # realpath here to get around problems with symlinks being blocked.
        return os.path.realpath(
            os.path.join(cls._test_driver.galaxy_test_tmp_dir, name))

    def _write_file(self, dir_path, content, filename="test"):
        """Helper for writing ftp/server dir files."""
        self._ensure_directory(dir_path)
        path = os.path.join(dir_path, filename)
        with open(path, "w") as f:
            f.write(content)
        return path

    def _ensure_directory(self, path):
        if not os.path.exists(path):
            os.makedirs(path)
 def test_recovery(self):
     history_id = self.dataset_populator.new_history()
     self.dataset_populator.run_tool(
         "exit_code_oom",
         {},
         history_id,
         assert_ok=False,
     ).json()
     self.restart(handle_reconfig=self.handle_reconfigure_galaxy_config_kwds)
     self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
     self.dataset_populator.wait_for_history(history_id, assert_ok=True)
Example #19
0
 def setUp(self):
     super(ObjectStoreJobsIntegrationTestCase, self).setUp()
     self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
     with self.dataset_populator.test_history() as history_id:
         hda1 = self.dataset_populator.new_dataset(
             history_id, content=TEST_INPUT_FILES_CONTENT)
         create_10_inputs = {
             "input1": {
                 "src": "hda",
                 "id": hda1["id"]
             },
             "input2": {
                 "src": "hda",
                 "id": hda1["id"]
             },
         }
         self.dataset_populator.run_tool(
             "create_10",
             create_10_inputs,
             history_id,
             assert_ok=True,
         )
         self.dataset_populator.wait_for_history(history_id)
class BaseUploadContentConfigurationTestCase(integration_util.IntegrationTestCase):

    framework_tool_and_types = True

    def setUp(self):
        super(BaseUploadContentConfigurationTestCase, self).setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.library_populator = LibraryPopulator(self.galaxy_interactor)
        self.history_id = self.dataset_populator.new_history()

    def fetch_target(self, target, assert_ok=False, attach_test_file=False):
        payload = {
            "history_id": self.history_id,
            "targets": json.dumps([target]),
        }
        if attach_test_file:
            payload["__files"] = {"files_0|file_data": open(self.test_data_resolver.get_filename("4.bed"))}

        response = self.dataset_populator.fetch(payload, assert_ok=assert_ok)
        return response

    @classmethod
    def temp_config_dir(cls, name):
        # realpath here to get around problems with symlinks being blocked.
        return os.path.realpath(os.path.join(cls._test_driver.galaxy_test_tmp_dir, name))

    def _write_file(self, dir_path, content, filename="test"):
        """Helper for writing ftp/server dir files."""
        self._ensure_directory(dir_path)
        path = os.path.join(dir_path, filename)
        with open(path, "w") as f:
            f.write(content)
        return path

    def _ensure_directory(self, path):
        if not os.path.exists(path):
            os.makedirs(path)
class ObjectStoreJobsIntegrationTestCase(integration_util.IntegrationTestCase):

    framework_tool_and_types = True

    @classmethod
    def handle_galaxy_config_kwds(cls, config):
        temp_directory = cls._test_driver.mkdtemp()
        cls.object_stores_parent = temp_directory
        for disk_store_file_name in ["files1", "files2", "files3"]:
            disk_store_path = os.path.join(temp_directory, disk_store_file_name)
            os.makedirs(disk_store_path)
            setattr(cls, "%s_path" % disk_store_file_name, disk_store_path)
        config_path = os.path.join(temp_directory, "object_store_conf.xml")
        with open(config_path, "w") as f:
            f.write(DISTRIBUTED_OBJECT_STORE_CONFIG_TEMPLATE.safe_substitute({"temp_directory": temp_directory}))
        config["object_store_config_file"] = config_path

    def setUp(self):
        super(ObjectStoreJobsIntegrationTestCase, self).setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)

    def test_tool_simple_constructs(self):
        with self.dataset_populator.test_history() as history_id:
            hda1 = self.dataset_populator.new_dataset(history_id, content="1 2 3")
            create_10_inputs = {
                "input1": {"src": "hda", "id": hda1["id"]},
                "input2": {"src": "hda", "id": hda1["id"]},
            }
            self.dataset_populator.run_tool(
                "create_10",
                create_10_inputs,
                history_id,
                assert_ok=True,
            )
            self.dataset_populator.wait_for_history(history_id)

        files_1_count = _files_count(self.files1_path)
        files_2_count = _files_count(self.files2_path)
        files_3_count = _files_count(self.files3_path)

        # Ensure no files written to the secondary/inactive hierarchical disk store.
        assert files_3_count == 0

        # Ensure the 10 inputs were written to one of the distributed object store's disk
        # stores (it will have either 10 or 11 depeending on whether the input was also
        # written there. The other disk store may or may not have the input file so should
        # have at most one file.
        assert (files_1_count >= 10) or (files_2_count >= 10)
        assert (files_1_count <= 1) or (files_2_count <= 1)

        # Other sanity checks on the test - just make sure the test was setup as intended
        # and not actually testing object store behavior.
        assert (files_1_count <= 11) and (files_2_count <= 11)
        assert (files_1_count >= 0) and (files_2_count >= 0)
class BaseEmbeddedPulsarContainerIntegrationTestCase(
        integration_util.IntegrationTestCase):
    framework_tool_and_types = True

    @classmethod
    def handle_galaxy_config_kwds(cls, config):
        cls.jobs_directory = cls._test_driver.mkdtemp()
        config["jobs_directory"] = cls.jobs_directory
        config["job_config_file"] = cls.job_config_file
        disable_dependency_resolution(config)

    def setUp(self):
        super(BaseEmbeddedPulsarContainerIntegrationTestCase, self).setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.history_id = self.dataset_populator.new_history()

    @classmethod
    def setUpClass(cls):
        skip_if_container_type_unavailable(cls)
        super(BaseEmbeddedPulsarContainerIntegrationTestCase, cls).setUpClass()
class DataManagerIntegrationTestCase(integration_util.IntegrationTestCase, UsesShed):

    """Test data manager installation and table reload through the API"""

    framework_tool_and_types = True

    def setUp(self):
        super(DataManagerIntegrationTestCase, self).setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)

    @classmethod
    def handle_galaxy_config_kwds(cls, config):
        try:
            import watchdog  # noqa: F401
        except ImportError:
            raise SkipTest("watchdog library is not available")
        cls.configure_shed_and_conda(config)
        config["tool_data_path"] = cls.shed_tool_data_dir
        config["watch_tool_data_dir"] = True
        cls.username = cls.get_secure_ascii_digits()
        config["admin_users"] = "*****@*****.**" % cls.username

    def test_data_manager_installation_table_reload(self):
        """
        Test that we can install data managers, create a new dbkey, and use that dbkey in a downstream data manager.
        """
        self.install_repository("devteam", "data_manager_fetch_genome_dbkeys_all_fasta", "b1bc53e9bbc5")
        self.install_repository("devteam", "data_manager_sam_fasta_index_builder", "1865e693d8b2")
        with self._different_user(email="*****@*****.**" % self.username):
            with self.dataset_populator.test_history() as history_id:
                run_response = self.dataset_populator.run_tool(tool_id=FETCH_TOOL_ID,
                                                               inputs=FETCH_GENOME_DBKEYS_ALL_FASTA_INPUT,
                                                               history_id=history_id,
                                                               assert_ok=False)
                self.dataset_populator.wait_for_tool_run(history_id=history_id, run_response=run_response, timeout=CONDA_AUTO_INSTALL_JOB_TIMEOUT)
                run_response = self.dataset_populator.run_tool(tool_id=SAM_FASTA_ID,
                                                               inputs=SAM_FASTA_INPUT,
                                                               history_id=history_id,
                                                               assert_ok=False)
                self.dataset_populator.wait_for_tool_run(history_id=history_id, run_response=run_response, timeout=CONDA_AUTO_INSTALL_JOB_TIMEOUT)

    @classmethod
    def get_secure_ascii_digits(cls, n=12):
        return ''.join(random.SystemRandom().choice(string.ascii_lowercase + string.digits) for _ in range(12))
 def test_runs_on_mule(self):
     tool_id = 'config_vars'
     expect_server_name = self.expected_server_name
     dataset_populator = DatasetPopulator(self.galaxy_interactor)
     history_id = dataset_populator.new_history()
     payload = dataset_populator.run_tool(
         tool_id=tool_id,
         inputs={'var': 'server_name'},
         history_id=history_id,
     )
     dataset_id = payload['outputs'][0]['id']
     dataset_populator.wait_for_dataset(history_id, dataset_id, assert_ok=True)
     output = dataset_populator.get_history_dataset_content(history_id, dataset_id=dataset_id).strip()
     assert output.startswith(expect_server_name), (
         "Job handler's server name '{output}' does not start with expected string '{expected}'".format(
             output=output,
             expected=expect_server_name,
         )
     )
Example #25
0
 def test_legacy_r_mapping(self):
     """
     """
     tool_id = "legacy_R"
     dataset_populator = DatasetPopulator(self.galaxy_interactor)
     history_id = dataset_populator.new_history()
     endpoint = "tools/%s/install_dependencies" % tool_id
     data = {'id': tool_id}
     create_response = self._post(endpoint, data=data, admin=True)
     self._assert_status_code_is(create_response, 200)
     payload = dataset_populator.run_tool_payload(
         tool_id=tool_id,
         inputs={},
         history_id=history_id,
     )
     create_response = self._post("tools", data=payload)
     self._assert_status_code_is(create_response, 200)
     dataset_populator.wait_for_history(history_id, assert_ok=True)
class MaximumWorkflowJobsPerSchedulingIterationTestCase(integration_util.IntegrationTestCase):

    framework_tool_and_types = True

    def setUp(self):
        super(MaximumWorkflowJobsPerSchedulingIterationTestCase, self).setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.workflow_populator = WorkflowPopulator(self.galaxy_interactor)
        self.dataset_collection_populator = DatasetCollectionPopulator(self.galaxy_interactor)

    @classmethod
    def handle_galaxy_config_kwds(cls, config):
        config["maximum_workflow_jobs_per_scheduling_iteration"] = 1

    def do_test(self):
        workflow_id = self.workflow_populator.upload_yaml_workflow("""
class: GalaxyWorkflow
steps:
  - type: input_collection
  - tool_id: collection_creates_pair
    state:
      input1:
        $link: 0
  - tool_id: collection_paired_test
    state:
      f1:
        $link: 1#paired_output
  - tool_id: cat_list
    state:
      input1:
        $link: 2#out1
""")
        with self.dataset_populator.test_history() as history_id:
            hdca1 = self.dataset_collection_populator.create_list_in_history(history_id, contents=["a\nb\nc\nd\n", "e\nf\ng\nh\n"]).json()
            self.dataset_populator.wait_for_history(history_id, assert_ok=True)
            inputs = {
                '0': {"src": "hdca", "id": hdca1["id"]},
            }
            invocation_id = self.workflow_populator.invoke_workflow(history_id, workflow_id, inputs)
            self.workflow_populator.wait_for_workflow(history_id, workflow_id, invocation_id)
            self.dataset_populator.wait_for_history(history_id, assert_ok=True)
            self.assertEqual("a\nc\nb\nd\ne\ng\nf\nh\n", self.dataset_populator.get_history_dataset_content(history_id, hid=0))
 def setUp(self):
     super(HistoryContentsApiTestCase, self).setUp()
     self.history_id = self._new_history()
     self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
     self.dataset_collection_populator = DatasetCollectionPopulator(self.galaxy_interactor)
     self.library_populator = LibraryPopulator(self.galaxy_interactor)
 def setUp(self):
     super(ScriptsIntegrationTestCase, self).setUp()
     self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
     self.config_dir = tempfile.mkdtemp()
class ScriptsIntegrationTestCase(integration_util.IntegrationTestCase):

    def setUp(self):
        super(ScriptsIntegrationTestCase, self).setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.config_dir = tempfile.mkdtemp()

    @classmethod
    def handle_galaxy_config_kwds(cls, config):
        cls._raw_config = config

    def test_helper(self):
        script = "helper.py"
        self._scripts_check_argparse_help(script)

        history_id = self.dataset_populator.new_history()
        dataset = self.dataset_populator.new_dataset(history_id, wait=True)
        dataset_id = dataset["id"]
        config_file = self.write_config_file()
        output = self._scripts_check_output(script, ["-c", config_file, "--decode-id", dataset_id])
        assert "Decoded " in output

    def test_cleanup(self):
        script = "cleanup_datasets/cleanup_datasets.py"
        self._scripts_check_argparse_help(script)

        history_id = self.dataset_populator.new_history()
        delete_response = self.dataset_populator._delete("histories/%s" % history_id)
        assert delete_response.status_code == 200
        assert delete_response.json()["purged"] is False
        config_file = self.write_config_file()
        output = self._scripts_check_output(script, ["-c", config_file, "--days", "0", "--purge_histories"])
        print(output)
        history_response = self.dataset_populator._get("histories/%s" % history_id)
        assert history_response.status_code == 200
        assert history_response.json()["purged"] is True, history_response.json()

    def test_pgcleanup(self):
        self._skip_if_not_postgres()

        script = "cleanup_datasets/pgcleanup.py"
        self._scripts_check_argparse_help(script)

        history_id = self.dataset_populator.new_history()
        delete_response = self.dataset_populator._delete("histories/%s" % history_id)
        assert delete_response.status_code == 200
        assert delete_response.json()["purged"] is False
        config_file = self.write_config_file()
        output = self._scripts_check_output(script, ["-c", config_file, "--older-than", "0", "--sequence", "purge_deleted_histories"])
        print(output)
        history_response = self.dataset_populator._get("histories/%s" % history_id)
        assert history_response.status_code == 200
        assert history_response.json()["purged"] is True, history_response.json()

    def test_set_user_disk_usage(self):
        script = "set_user_disk_usage.py"
        self._scripts_check_argparse_help(script)

        history_id = self.dataset_populator.new_history()
        self.dataset_populator.new_dataset(history_id, wait=True)
        config_file = self.write_config_file()
        output = self._scripts_check_output(script, ["-c", config_file])
        # verify the script runs to completion without crashing
        assert "100% complete" in output, output

    def test_set_dataset_sizes(self):
        script = "set_dataset_sizes.py"
        self._scripts_check_argparse_help(script)

        # TODO: change the size of the dataset and verify this works.
        history_id = self.dataset_populator.new_history()
        self.dataset_populator.new_dataset(history_id, wait=True)
        config_file = self.write_config_file()
        output = self._scripts_check_output(script, ["-c", config_file])
        # verify the script runs to completion without crashing
        assert "Completed 100%" in output, output

    def test_populate_uuid(self):
        script = "cleanup_datasets/populate_uuid.py"
        self._scripts_check_argparse_help(script)

        history_id = self.dataset_populator.new_history()
        self.dataset_populator.new_dataset(history_id, wait=True)
        config_file = self.write_config_file()
        output = self._scripts_check_output(script, ["-c", config_file])
        assert "Complete" in output

    @integration_util.skip_if_jenkins
    def test_grt_export(self):
        script = "grt/export.py"
        self._scripts_check_argparse_help(script)

        history_id = self.dataset_populator.new_history()
        self.dataset_populator.new_dataset(history_id, wait=True)
        config_file = self.write_config_file()
        grt_config_file = os.path.join(self.config_dir, "grt.yml")
        with open(grt_config_file, "w") as f:
            yaml.dump({"grt": {"share_toolbox": True}, "sanitization": {"tools": []}, "tool_params": {}}, f)
        self._scripts_check_output(script, ["-c", config_file, "-g", grt_config_file, "-r", self.config_dir])
        report_files = os.listdir(self.config_dir)
        json_files = [j for j in report_files if j.endswith(".json")]
        assert len(json_files) == 1, "Expected one json report file in [%s]" % json_files
        json_file = os.path.join(self.config_dir, json_files[0])
        with open(json_file, "r") as f:
            export = json.load(f)
        assert export["version"] == 2

    def test_admin_cleanup_datasets(self):
        self._scripts_check_argparse_help("cleanup_datasets/admin_cleanup_datasets.py")

    @skip_unless_module("flask_socketio")
    def test_communication_server(self):
        self._scripts_check_argparse_help("communication/communication_server.py")

    def test_secret_decoder_ring(self):
        script = "secret_decoder_ring.py"
        self._scripts_check_argparse_help(script)

        config_file = self.write_config_file()
        output = self._scripts_check_output(script, ["-c", config_file, "encode", "1"])
        encoded_id = output.strip()

        output = self._scripts_check_output(script, ["-c", config_file, "decode", encoded_id])
        assert output.strip() == "1"

    def test_database_scripts(self):
        self._scripts_check_argparse_help("create_db.py")
        self._scripts_check_argparse_help("manage_db.py")
        # TODO: test creating a smaller database - e.g. tool install database based on fresh
        # config file.

    def test_galaxy_main(self):
        self._scripts_check_argparse_help("galaxy-main")

    def test_runtime_stats(self):
        self._skip_if_not_postgres()
        self._scripts_check_argparse_help("runtime_stats.py")

    def _skip_if_not_postgres(self):
        if not self._app.config.database_connection.startswith("post"):
            raise unittest.SkipTest("Test only valid for postgres")

    def _scripts_check_argparse_help(self, script):
        # Test imports and argparse response to --help with 0 exit code.
        output = self._scripts_check_output(script, ["--help"])
        # Test -h, --help in printed output message.
        assert "-h, --help" in output

    def _scripts_check_output(self, script, args):
        cwd = galaxy_directory()
        cmd = ["python", os.path.join(cwd, "scripts", script)] + args
        clean_env = {
            "PATH": os.environ.get("PATH", None),
        }  # Don't let testing environment variables interfere with config.
        return unicodify(subprocess.check_output(cmd, cwd=cwd, env=clean_env))

    def write_config_file(self):
        config_dir = self.config_dir
        path = os.path.join(config_dir, "galaxy.yml")
        self._test_driver.temp_directories.extend([config_dir])
        with open(path, "w") as f:
            yaml.dump({"galaxy": self._raw_config}, f)

        return path
 def setUp(self):
     super(ToolsUploadTestCase, self).setUp()
     self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
 def setUp(self):
     super(DataManagerIntegrationTestCase, self).setUp()
     self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
 def setUp(self):
     super(JobRecoveryAfterHandledIntegerationTestCase, self).setUp()
     self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
Example #33
0
 def setUp(self):
     super(BaseWorkflowHandlerConfigurationTestCase, self).setUp()
     self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
     self.workflow_populator = WorkflowPopulator(self.galaxy_interactor)
     self.history_id = self.dataset_populator.new_history()
Example #34
0
 def setUp(self):
     super(RolesApiTestCase, self).setUp()
     self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
Example #35
0
 def setUp(self):
     super(HistoriesApiTestCase, self).setUp()
     self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
     self.dataset_collection_populator = DatasetCollectionPopulator(
         self.galaxy_interactor)
 def setUp(self):
     super(BaseUploadContentConfigurationTestCase, self).setUp()
     self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
     self.library_populator = LibraryPopulator(self.galaxy_interactor)
     self.history_id = self.dataset_populator.new_history()
 def setUp(self):
     super(MaximumWorkflowInvocationDurationTestCase, self).setUp()
     self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
     self.workflow_populator = WorkflowPopulator(self.galaxy_interactor)
class HistoryContentsApiTestCase(api.ApiTestCase, TestsDatasets):

    def setUp(self):
        super(HistoryContentsApiTestCase, self).setUp()
        self.history_id = self._new_history()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.dataset_collection_populator = DatasetCollectionPopulator(self.galaxy_interactor)
        self.library_populator = LibraryPopulator(self.galaxy_interactor)

    def test_index_hda_summary(self):
        hda1 = self._new_dataset(self.history_id)
        contents_response = self._get("histories/%s/contents" % self.history_id)
        hda_summary = self.__check_for_hda(contents_response, hda1)
        assert "display_types" not in hda_summary  # Quick summary, not full details

    def test_make_private_and_public(self):
        hda1 = self._wait_for_new_hda()
        update_url = "histories/%s/contents/%s/permissions" % (self.history_id, hda1["id"])

        role_id = self.dataset_populator.user_private_role_id()
        # Give manage permission to the user.
        payload = {
            "access": [],
            "manage": [role_id],
        }
        update_response = self._update_permissions(update_url, payload, admin=True)
        self._assert_status_code_is(update_response, 200)
        self._assert_other_user_can_access(hda1["id"])
        # Then we restrict access.
        payload = {
            "action": "make_private",
        }
        update_response = self._update_permissions(update_url, payload)
        self._assert_status_code_is(update_response, 200)
        self._assert_other_user_cannot_access(hda1["id"])

        # Then we restrict access.
        payload = {
            "action": "remove_restrictions",
        }
        update_response = self._update_permissions(update_url, payload)
        self._assert_status_code_is(update_response, 200)
        self._assert_other_user_can_access(hda1["id"])

    def test_set_permissions_add_admin_history_contents(self):
        self._verify_dataset_permissions("history_contents")

    def test_set_permissions_add_admin_datasets(self):
        self._verify_dataset_permissions("dataset")

    def _verify_dataset_permissions(self, api_endpoint):
        hda1 = self._wait_for_new_hda()
        hda_id = hda1["id"]
        if api_endpoint == "history_contents":
            update_url = "histories/%s/contents/%s/permissions" % (self.history_id, hda_id)
        else:
            update_url = "datasets/%s/permissions" % hda_id

        role_id = self.dataset_populator.user_private_role_id()

        payload = {
            "access": [role_id],
            "manage": [role_id],
        }

        # Other users cannot modify permissions.
        with self._different_user():
            update_response = self._update_permissions(update_url, payload)
            self._assert_status_code_is(update_response, 403)

        # First the details render for another user.
        self._assert_other_user_can_access(hda_id)

        # Then we restrict access.
        update_response = self._update_permissions(update_url, payload, admin=True)
        self._assert_status_code_is(update_response, 200)

        # Finally the details don't render.
        self._assert_other_user_cannot_access(hda_id)

        # But they do for the original user.
        contents_response = self._get("histories/%s/contents/%s" % (self.history_id, hda_id)).json()
        assert "name" in contents_response

        update_response = self._update_permissions(update_url, payload)
        self._assert_status_code_is(update_response, 200)

        payload = {
            "access": [role_id],
            "manage": [role_id],
        }
        update_response = self._update_permissions(update_url, payload)
        self._assert_status_code_is(update_response, 200)
        self._assert_other_user_cannot_access(hda_id)

        user_id = self.dataset_populator.user_id()
        with self._different_user():
            different_user_id = self.dataset_populator.user_id()
        combined_user_role = self.dataset_populator.create_role([user_id, different_user_id], description="role for testing permissions")

        payload = {
            "access": [combined_user_role["id"]],
            "manage": [role_id],
        }
        update_response = self._update_permissions(update_url, payload)
        self._assert_status_code_is(update_response, 200)
        # Now other user can see dataset again with access permission.
        self._assert_other_user_can_access(hda_id)
        # access doesn't imply management though...
        with self._different_user():
            update_response = self._update_permissions(update_url, payload)
            self._assert_status_code_is(update_response, 403)

    def _assert_other_user_cannot_access(self, history_content_id):
        with self._different_user():
            contents_response = self._get("histories/%s/contents/%s" % (self.history_id, history_content_id)).json()
            assert "name" not in contents_response

    def _assert_other_user_can_access(self, history_content_id):
        with self._different_user():
            contents_response = self._get("histories/%s/contents/%s" % (self.history_id, history_content_id)).json()
            assert "name" in contents_response

    def test_index_hda_all_details(self):
        hda1 = self._new_dataset(self.history_id)
        contents_response = self._get("histories/%s/contents?details=all" % self.history_id)
        hda_details = self.__check_for_hda(contents_response, hda1)
        self.__assert_hda_has_full_details(hda_details)

    def test_index_hda_detail_by_id(self):
        hda1 = self._new_dataset(self.history_id)
        contents_response = self._get("histories/%s/contents?details=%s" % (self.history_id, hda1["id"]))
        hda_details = self.__check_for_hda(contents_response, hda1)
        self.__assert_hda_has_full_details(hda_details)

    def test_show_hda(self):
        hda1 = self._new_dataset(self.history_id)
        show_response = self.__show(hda1)
        self._assert_status_code_is(show_response, 200)
        self.__assert_matches_hda(hda1, show_response.json())

    def test_hda_copy(self):
        hda1 = self._new_dataset(self.history_id)
        create_data = dict(
            source='hda',
            content=hda1["id"],
        )
        second_history_id = self._new_history()
        assert self.__count_contents(second_history_id) == 0
        create_response = self._post("histories/%s/contents" % second_history_id, create_data)
        self._assert_status_code_is(create_response, 200)
        assert self.__count_contents(second_history_id) == 1

    def test_library_copy(self):
        ld = self.library_populator.new_library_dataset("lda_test_library")
        create_data = dict(
            source='library',
            content=ld["id"],
        )
        assert self.__count_contents(self.history_id) == 0
        create_response = self._post("histories/%s/contents" % self.history_id, create_data)
        self._assert_status_code_is(create_response, 200)
        assert self.__count_contents(self.history_id) == 1

    def test_update(self):
        hda1 = self._wait_for_new_hda()
        assert str(hda1["deleted"]).lower() == "false"
        update_response = self._raw_update(hda1["id"], dict(deleted=True))
        self._assert_status_code_is(update_response, 200)
        show_response = self.__show(hda1)
        assert str(show_response.json()["deleted"]).lower() == "true"

        update_response = self._raw_update(hda1["id"], dict(name="Updated Name"))
        assert self.__show(hda1).json()["name"] == "Updated Name"

        update_response = self._raw_update(hda1["id"], dict(name="Updated Name"))
        assert self.__show(hda1).json()["name"] == "Updated Name"

        unicode_name = u'ржевский сапоги'
        update_response = self._raw_update(hda1["id"], dict(name=unicode_name))
        updated_hda = self.__show(hda1).json()
        assert updated_hda["name"] == unicode_name, updated_hda

        quoted_name = '"Mooo"'
        update_response = self._raw_update(hda1["id"], dict(name=quoted_name))
        updated_hda = self.__show(hda1).json()
        assert updated_hda["name"] == quoted_name, quoted_name

    def test_update_type_failures(self):
        hda1 = self._wait_for_new_hda()
        update_response = self._raw_update(hda1["id"], dict(deleted='not valid'))
        self._assert_status_code_is(update_response, 400)

    def _wait_for_new_hda(self):
        hda1 = self._new_dataset(self.history_id)
        self._wait_for_history(self.history_id)
        return hda1

    def _raw_update(self, item_id, data, admin=False, history_id=None):
        history_id = history_id or self.history_id
        key_param = "use_admin_key" if admin else "use_key"
        update_url = self._api_url("histories/%s/contents/%s" % (history_id, item_id), **{key_param: True})
        update_response = put(update_url, json=data)
        return update_response

    def _update_permissions(self, url, data, admin=False):
        key_param = "use_admin_key" if admin else "use_key"
        update_url = self._api_url(url, **{key_param: True})
        update_response = put(update_url, json=data)
        return update_response

    def test_delete(self):
        hda1 = self._new_dataset(self.history_id)
        self._wait_for_history(self.history_id)
        assert str(self.__show(hda1).json()["deleted"]).lower() == "false"
        delete_response = self._delete("histories/%s/contents/%s" % (self.history_id, hda1["id"]))
        assert delete_response.status_code < 300  # Something in the 200s :).
        assert str(self.__show(hda1).json()["deleted"]).lower() == "true"

    def test_purge(self):
        hda1 = self._new_dataset(self.history_id)
        self._wait_for_history(self.history_id)
        assert str(self.__show(hda1).json()["deleted"]).lower() == "false"
        assert str(self.__show(hda1).json()["purged"]).lower() == "false"
        data = {'purge': True}
        delete_response = self._delete("histories/%s/contents/%s" % (self.history_id, hda1["id"]), data=data)
        assert delete_response.status_code < 300  # Something in the 200s :).
        assert str(self.__show(hda1).json()["deleted"]).lower() == "true"
        assert str(self.__show(hda1).json()["purged"]).lower() == "true"

    def test_dataset_collection_creation_on_contents(self):
        payload = self.dataset_collection_populator.create_pair_payload(
            self.history_id,
            type="dataset_collection"
        )
        endpoint = "histories/%s/contents" % self.history_id
        self._check_pair_creation(endpoint, payload)

    def test_dataset_collection_creation_on_typed_contents(self):
        payload = self.dataset_collection_populator.create_pair_payload(
            self.history_id,
        )
        endpoint = "histories/%s/contents/dataset_collections" % self.history_id
        self._check_pair_creation(endpoint, payload)

    def test_dataset_collection_create_from_exisiting_datasets_with_new_tags(self):
        with self.dataset_populator.test_history() as history_id:
            hda_id = self.dataset_populator.new_dataset(history_id, content="1 2 3")['id']
            hda2_id = self.dataset_populator.new_dataset(history_id, content="1 2 3")['id']
            update_response = self._raw_update(hda2_id, dict(tags=['existing:tag']), history_id=history_id).json()
            assert update_response['tags'] == ['existing:tag']
            creation_payload = {'collection_type': 'list',
                                'history_id': history_id,
                                'element_identifiers': json.dumps([{'id': hda_id,
                                                                    'src': 'hda',
                                                                    'name': 'element_id1',
                                                                    'tags': ['my_new_tag']},
                                                                   {'id': hda2_id,
                                                                    'src': 'hda',
                                                                    'name': 'element_id2',
                                                                    'tags': ['another_new_tag']}
                                                                   ]),
                                'type': 'dataset_collection',
                                'copy_elements': True}
            r = self._post("histories/%s/contents" % self.history_id, creation_payload).json()
            assert r['elements'][0]['object']['id'] != hda_id, "HDA has not been copied"
            assert len(r['elements'][0]['object']['tags']) == 1
            assert r['elements'][0]['object']['tags'][0] == 'my_new_tag'
            assert len(r['elements'][1]['object']['tags']) == 2, r['elements'][1]['object']['tags']
            original_hda = self.dataset_populator.get_history_dataset_details(history_id=history_id, dataset_id=hda_id)
            assert len(original_hda['tags']) == 0, original_hda['tags']

    def _check_pair_creation(self, endpoint, payload):
        pre_collection_count = self.__count_contents(type="dataset_collection")
        pre_dataset_count = self.__count_contents(type="dataset")
        pre_combined_count = self.__count_contents(type="dataset,dataset_collection")

        dataset_collection_response = self._post(endpoint, payload)

        dataset_collection = self.__check_create_collection_response(dataset_collection_response)

        post_collection_count = self.__count_contents(type="dataset_collection")
        post_dataset_count = self.__count_contents(type="dataset")
        post_combined_count = self.__count_contents(type="dataset,dataset_collection")

        # Test filtering types with index.
        assert pre_collection_count == 0
        assert post_collection_count == 1
        assert post_combined_count == pre_dataset_count + 1
        assert post_combined_count == pre_combined_count + 1
        assert pre_dataset_count == post_dataset_count

        # Test show dataset colleciton.
        collection_url = "histories/%s/contents/dataset_collections/%s" % (self.history_id, dataset_collection["id"])
        show_response = self._get(collection_url)
        self._assert_status_code_is(show_response, 200)
        dataset_collection = show_response.json()
        self._assert_has_keys(dataset_collection, "url", "name", "deleted")

        assert not dataset_collection["deleted"]

        delete_response = delete(self._api_url(collection_url, use_key=True))
        self._assert_status_code_is(delete_response, 200)

        show_response = self._get(collection_url)
        dataset_collection = show_response.json()
        assert dataset_collection["deleted"]

    @skip_without_tool("collection_creates_list")
    def test_jobs_summary_simple_hdca(self):
        create_response = self.dataset_collection_populator.create_list_in_history(self.history_id, contents=["a\nb\nc\nd", "e\nf\ng\nh"])
        hdca_id = create_response.json()["id"]
        run = self.dataset_populator.run_collection_creates_list(self.history_id, hdca_id)
        collections = run['output_collections']
        collection = collections[0]
        jobs_summary_url = "histories/%s/contents/dataset_collections/%s/jobs_summary" % (self.history_id, collection["id"])
        jobs_summary_response = self._get(jobs_summary_url)
        self._assert_status_code_is(jobs_summary_response, 200)
        jobs_summary = jobs_summary_response.json()
        self._assert_has_keys(jobs_summary, "populated_state", "states")

    @skip_without_tool("cat1")
    def test_jobs_summary_implicit_hdca(self):
        create_response = self.dataset_collection_populator.create_pair_in_history(self.history_id, contents=["123", "456"])
        hdca_id = create_response.json()["id"]
        inputs = {
            "input1": {'batch': True, 'values': [{'src': 'hdca', 'id': hdca_id}]},
        }
        run = self.dataset_populator.run_tool("cat1", inputs=inputs, history_id=self.history_id)
        self.dataset_populator.wait_for_history_jobs(self.history_id)
        collections = run['implicit_collections']
        collection = collections[0]
        jobs_summary_url = "histories/%s/contents/dataset_collections/%s/jobs_summary" % (self.history_id, collection["id"])
        jobs_summary_response = self._get(jobs_summary_url)
        self._assert_status_code_is(jobs_summary_response, 200)
        jobs_summary = jobs_summary_response.json()
        self._assert_has_keys(jobs_summary, "populated_state", "states")
        states = jobs_summary["states"]
        assert states.get("ok") == 2, states

    def test_dataset_collection_hide_originals(self):
        payload = self.dataset_collection_populator.create_pair_payload(
            self.history_id,
            type="dataset_collection"
        )

        payload["hide_source_items"] = True
        dataset_collection_response = self._post("histories/%s/contents" % self.history_id, payload)
        self.__check_create_collection_response(dataset_collection_response)

        contents_response = self._get("histories/%s/contents" % self.history_id)
        datasets = [d for d in contents_response.json() if d["history_content_type"] == "dataset" and d["hid"] in [1, 2]]
        # Assert two datasets in source were hidden.
        assert len(datasets) == 2
        assert not datasets[0]["visible"]
        assert not datasets[1]["visible"]

    def test_update_dataset_collection(self):
        payload = self.dataset_collection_populator.create_pair_payload(
            self.history_id,
            type="dataset_collection"
        )
        dataset_collection_response = self._post("histories/%s/contents" % self.history_id, payload)
        self._assert_status_code_is(dataset_collection_response, 200)
        hdca = dataset_collection_response.json()
        update_url = self._api_url("histories/%s/contents/dataset_collections/%s" % (self.history_id, hdca["id"]), use_key=True)
        # Awkward json.dumps required here because of https://trello.com/c/CQwmCeG6
        body = json.dumps(dict(name="newnameforpair"))
        update_response = put(update_url, data=body)
        self._assert_status_code_is(update_response, 200)
        show_response = self.__show(hdca)
        assert str(show_response.json()["name"]) == "newnameforpair"

    def test_hdca_copy(self):
        hdca = self.dataset_collection_populator.create_pair_in_history(self.history_id).json()
        hdca_id = hdca["id"]
        second_history_id = self._new_history()
        create_data = dict(
            source='hdca',
            content=hdca_id,
        )
        assert len(self._get("histories/%s/contents/dataset_collections" % second_history_id).json()) == 0
        create_response = self._post("histories/%s/contents/dataset_collections" % second_history_id, create_data)
        self.__check_create_collection_response(create_response)
        contents = self._get("histories/%s/contents/dataset_collections" % second_history_id).json()
        assert len(contents) == 1
        new_forward, _ = self.__get_paired_response_elements(contents[0])
        self._assert_has_keys(new_forward, "history_id")
        assert new_forward["history_id"] == self.history_id

    def test_hdca_copy_and_elements(self):
        hdca = self.dataset_collection_populator.create_pair_in_history(self.history_id).json()
        hdca_id = hdca["id"]
        second_history_id = self._new_history()
        create_data = dict(
            source='hdca',
            content=hdca_id,
            copy_elements=True,
        )
        assert len(self._get("histories/%s/contents/dataset_collections" % second_history_id).json()) == 0
        create_response = self._post("histories/%s/contents/dataset_collections" % second_history_id, create_data)
        self.__check_create_collection_response(create_response)

        contents = self._get("histories/%s/contents/dataset_collections" % second_history_id).json()
        assert len(contents) == 1
        new_forward, _ = self.__get_paired_response_elements(contents[0])
        self._assert_has_keys(new_forward, "history_id")
        assert new_forward["history_id"] == second_history_id

    def __get_paired_response_elements(self, contents):
        hdca = self.__show(contents).json()
        self._assert_has_keys(hdca, "name", "deleted", "visible", "elements")
        elements = hdca["elements"]
        assert len(elements) == 2
        element0 = elements[0]
        element1 = elements[1]
        self._assert_has_keys(element0, "object")
        self._assert_has_keys(element1, "object")

        return element0["object"], element1["object"]

    def test_hdca_from_library_datasets(self):
        ld = self.library_populator.new_library_dataset("el1")
        ldda_id = ld["ldda_id"]
        element_identifiers = [{"name": "el1", "src": "ldda", "id": ldda_id}]
        create_data = dict(
            history_id=self.history_id,
            type="dataset_collection",
            name="Test From Library",
            element_identifiers=json.dumps(element_identifiers),
            collection_type="list",
        )
        create_response = self._post("histories/%s/contents/dataset_collections" % self.history_id, create_data)
        hdca = self.__check_create_collection_response(create_response)
        elements = hdca["elements"]
        assert len(elements) == 1
        hda = elements[0]["object"]
        assert hda["hda_ldda"] == "hda"
        assert hda["history_content_type"] == "dataset"
        assert hda["copied_from_ldda_id"] == ldda_id

    def test_hdca_from_inaccessible_library_datasets(self):
        library, library_dataset = self.library_populator.new_library_dataset_in_private_library("HDCACreateInaccesibleLibrary")
        ldda_id = library_dataset["id"]
        element_identifiers = [{"name": "el1", "src": "ldda", "id": ldda_id}]
        create_data = dict(
            history_id=self.history_id,
            type="dataset_collection",
            name="Test From Library",
            element_identifiers=json.dumps(element_identifiers),
            collection_type="list",
        )
        with self._different_user():
            second_history_id = self._new_history()
            create_response = self._post("histories/%s/contents/dataset_collections" % second_history_id, create_data)
            self._assert_status_code_is(create_response, 403)

    def __check_create_collection_response(self, response):
        self._assert_status_code_is(response, 200)
        dataset_collection = response.json()
        self._assert_has_keys(dataset_collection, "url", "name", "deleted", "visible", "elements")
        return dataset_collection

    def __show(self, contents):
        show_response = self._get("histories/%s/contents/%ss/%s" % (self.history_id, contents["history_content_type"], contents["id"]))
        return show_response

    def __count_contents(self, history_id=None, **kwds):
        if history_id is None:
            history_id = self.history_id
        contents_response = self._get("histories/%s/contents" % history_id, kwds)
        return len(contents_response.json())

    def __assert_hda_has_full_details(self, hda_details):
        self._assert_has_keys(hda_details, "display_types", "display_apps")

    def __check_for_hda(self, contents_response, hda):
        self._assert_status_code_is(contents_response, 200)
        contents = contents_response.json()
        assert len(contents) == 1
        hda_summary = contents[0]
        self.__assert_matches_hda(hda, hda_summary)
        return hda_summary

    def __assert_matches_hda(self, input_hda, query_hda):
        self._assert_has_keys(query_hda, "id", "name")
        assert input_hda["name"] == query_hda["name"]
        assert input_hda["id"] == query_hda["id"]
Example #39
0
class LibrariesApiTestCase(api.ApiTestCase, TestsDatasets):

    def setUp(self):
        super(LibrariesApiTestCase, self).setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.dataset_collection_populator = DatasetCollectionPopulator(self.galaxy_interactor)
        self.library_populator = LibraryPopulator(self.galaxy_interactor)

    def test_create(self):
        data = dict(name="CreateTestLibrary")
        create_response = self._post("libraries", data=data, admin=True)
        self._assert_status_code_is(create_response, 200)
        library = create_response.json()
        self._assert_has_keys(library, "name")
        assert library["name"] == "CreateTestLibrary"

    def test_delete(self):
        library = self.library_populator.new_library("DeleteTestLibrary")
        create_response = self._delete("libraries/%s" % library["id"], admin=True)
        self._assert_status_code_is(create_response, 200)
        library = create_response.json()
        self._assert_has_keys(library, "deleted")
        assert library["deleted"] is True
        # Test undeleting
        data = dict(undelete='true')
        create_response = self._delete("libraries/%s" % library["id"], data=data, admin=True)
        library = create_response.json()
        self._assert_status_code_is(create_response, 200)
        assert library["deleted"] is False

    def test_nonadmin(self):
        # Anons can't create libs
        data = dict(name="CreateTestLibrary")
        create_response = self._post("libraries", data=data, admin=False, anon=True)
        self._assert_status_code_is(create_response, 403)
        # Anons can't delete libs
        library = self.library_populator.new_library("AnonDeleteTestLibrary")
        create_response = self._delete("libraries/%s" % library["id"], admin=False, anon=True)
        self._assert_status_code_is(create_response, 403)
        # Anons can't update libs
        data = dict(name="ChangedName", description="ChangedDescription", synopsis='ChangedSynopsis')
        create_response = self._patch("libraries/%s" % library["id"], data=data, admin=False, anon=True)
        self._assert_status_code_is(create_response, 403)

    def test_update(self):
        library = self.library_populator.new_library("UpdateTestLibrary")
        data = dict(name='ChangedName', description='ChangedDescription', synopsis='ChangedSynopsis')
        create_response = self._patch("libraries/%s" % library["id"], data=data, admin=True)
        self._assert_status_code_is(create_response, 200)
        library = create_response.json()
        self._assert_has_keys(library, 'name', 'description', 'synopsis')
        assert library['name'] == 'ChangedName'
        assert library['description'] == 'ChangedDescription'
        assert library['synopsis'] == 'ChangedSynopsis'

    def test_create_private_library_permissions(self):
        library = self.library_populator.new_library("PermissionTestLibrary")
        library_id = library["id"]
        role_id = self.library_populator.user_private_role_id()
        self.library_populator.set_permissions(library_id, role_id)
        create_response = self._create_folder(library)
        self._assert_status_code_is(create_response, 200)

    def test_create_dataset_denied(self):
        library = self.library_populator.new_private_library("ForCreateDatasets")
        folder_response = self._create_folder(library)
        self._assert_status_code_is(folder_response, 200)
        folder_id = folder_response.json()[0]['id']
        history_id = self.dataset_populator.new_history()
        hda_id = self.dataset_populator.new_dataset(history_id, content="1 2 3")['id']
        with self._different_user():
            payload = {'from_hda_id': hda_id}
            create_response = self._post("folders/%s/contents" % folder_id, payload)
            self._assert_status_code_is(create_response, 403)

    def test_show_private_dataset_permissions(self):
        library, library_dataset = self.library_populator.new_library_dataset_in_private_library("ForCreateDatasets", wait=True)
        with self._different_user():
            response = self.library_populator.show_ldda(library["id"], library_dataset["id"])
            # TODO: this should really be 403 and a proper JSON exception.
            self._assert_status_code_is(response, 400)

    def test_create_dataset(self):
        library, library_dataset = self.library_populator.new_library_dataset_in_private_library("ForCreateDatasets", wait=True)
        self._assert_has_keys(library_dataset, "peek", "data_type")
        assert library_dataset["peek"].find("create_test") >= 0
        assert library_dataset["file_ext"] == "txt", library_dataset["file_ext"]

    def test_create_dataset_in_folder(self):
        library = self.library_populator.new_private_library("ForCreateDatasets")
        folder_response = self._create_folder(library)
        self._assert_status_code_is(folder_response, 200)
        folder_id = folder_response.json()[0]['id']
        history_id = self.dataset_populator.new_history()
        hda_id = self.dataset_populator.new_dataset(history_id, content="1 2 3")['id']
        payload = {'from_hda_id': hda_id}
        create_response = self._post("folders/%s/contents" % folder_id, payload)
        self._assert_status_code_is(create_response, 200)
        self._assert_has_keys(create_response.json(), "name", "id")

    def test_update_dataset_in_folder(self):
        library = self.library_populator.new_private_library("ForUpdateDataset")
        folder_response = self._create_folder(library)
        self._assert_status_code_is(folder_response, 200)
        folder_id = folder_response.json()[0]['id']
        history_id = self.dataset_populator.new_history()
        hda_id = self.dataset_populator.new_dataset(history_id, content="1 2 3")['id']
        payload = {'from_hda_id': hda_id, 'create_type': 'file', 'folder_id': folder_id}
        ld = self._post("libraries/%s/contents" % folder_id, payload)
        data = {'name': 'updated_name', 'file_ext': 'fastq', 'misc_info': 'updated_info', 'genome_build': 'updated_genome_build'}
        create_response = self._patch("libraries/datasets/%s" % ld.json()["id"], data=data)
        self._assert_status_code_is(create_response, 200)
        self._assert_has_keys(create_response.json(), "name", "file_ext", "misc_info", "genome_build")

    def test_invalid_update_dataset_in_folder(self):
        library = self.library_populator.new_private_library("ForInvalidUpdateDataset")
        folder_response = self._create_folder(library)
        self._assert_status_code_is(folder_response, 200)
        folder_id = folder_response.json()[0]['id']
        history_id = self.dataset_populator.new_history()
        hda_id = self.dataset_populator.new_dataset(history_id, content="1 2 3")['id']
        payload = {'from_hda_id': hda_id, 'create_type': 'file', 'folder_id': folder_id}
        ld = self._post("libraries/%s/contents" % folder_id, payload)
        data = {'file_ext': 'nonexisting_type'}
        create_response = self._patch("libraries/datasets/%s" % ld.json()["id"], data=data)
        self._assert_status_code_is(create_response, 400)
        assert 'This Galaxy does not recognize the datatype of:' in create_response.json()['err_msg']

    def test_create_datasets_in_library_from_collection(self):
        library = self.library_populator.new_private_library("ForCreateDatasetsFromCollection")
        folder_response = self._create_folder(library)
        self._assert_status_code_is(folder_response, 200)
        folder_id = folder_response.json()[0]['id']
        history_id = self.dataset_populator.new_history()
        hdca_id = self.dataset_collection_populator.create_list_in_history(history_id, contents=["xxx", "yyy"]).json()["id"]
        payload = {'from_hdca_id': hdca_id, 'create_type': 'file', 'folder_id': folder_id}
        create_response = self._post("libraries/%s/contents" % library['id'], payload)
        self._assert_status_code_is(create_response, 200)

    def test_create_datasets_in_folder_from_collection(self):
        library = self.library_populator.new_private_library("ForCreateDatasetsFromCollection")
        history_id = self.dataset_populator.new_history()
        hdca_id = self.dataset_collection_populator.create_list_in_history(history_id, contents=["xxx", "yyy"]).json()["id"]
        folder_response = self._create_folder(library)
        self._assert_status_code_is(folder_response, 200)
        folder_id = folder_response.json()[0]['id']
        payload = {'from_hdca_id': hdca_id}
        create_response = self._post("folders/%s/contents" % folder_id, payload)
        self._assert_status_code_is(create_response, 200)
        assert len(create_response.json()) == 2
        # Also test that anything different from a flat dataset collection list
        # is refused
        hdca_pair_id = self.dataset_collection_populator.create_list_of_pairs_in_history(history_id).json()['id']
        payload = {'from_hdca_id': hdca_pair_id}
        create_response = self._post("folders/%s/contents" % folder_id, payload)
        self._assert_status_code_is(create_response, 501)
        assert create_response.json()['err_msg'] == 'Cannot add nested collections to library. Please flatten your collection first.'

    def _create_folder(self, library):
        create_data = dict(
            folder_id=library["root_folder_id"],
            create_type="folder",
            name="New Folder",
        )
        return self._post("libraries/%s/contents" % library["id"], data=create_data)
 def setUp(self):
     super(MaximumWorkflowInvocationDurationTestCase, self).setUp()
     self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
     self.workflow_populator = WorkflowPopulator(self.galaxy_interactor)
class DataManagerIntegrationTestCase(integration_util.IntegrationTestCase):

    """Test data manager installation and table reload through the API"""

    framework_tool_and_types = True

    def setUp(self):
        super(DataManagerIntegrationTestCase, self).setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)

    @classmethod
    def handle_galaxy_config_kwds(cls, config):
        try:
            import watchdog  # noqa: F401
        except ImportError:
            raise SkipTest("watchdog library is not available")
        cls.username = cls.get_secure_ascii_digits()
        cls.conda_tmp_prefix = tempfile.mkdtemp()
        cls.shed_tools_dir = tempfile.mkdtemp()
        cls.shed_tool_data_dir = tempfile.mkdtemp()
        cls._test_driver.temp_directories.extend([cls.conda_tmp_prefix, cls.shed_tool_data_dir, cls.shed_tools_dir])
        config["conda_auto_init"] = True
        config["conda_auto_install"] = True
        config["conda_prefix"] = os.path.join(cls.conda_tmp_prefix, 'conda')
        config["tool_sheds_config_file"] = TOOL_SHEDS_CONF
        config["tool_config_file"] = os.path.join(cls.shed_tools_dir, 'shed_tool_conf.xml')
        config["shed_data_manager_config_file"] = os.path.join(cls.shed_tool_data_dir, 'shed_data_manager_config_file')
        config["shed_tool_data_table_config"] = os.path.join(cls.shed_tool_data_dir, 'shed_data_table_conf.xml')
        config["shed_tool_data_path"] = cls.shed_tool_data_dir
        config["tool_data_path"] = cls.shed_tool_data_dir
        config["watch_tool_data_dir"] = True
        config["admin_users"] = "*****@*****.**" % cls.username
        with open(config["tool_config_file"], 'w') as tool_conf_file:
            tool_conf_file.write(SHED_TOOL_CONF.substitute(shed_tools_path=cls.shed_tools_dir))
        with open(config["shed_data_manager_config_file"], 'w') as shed_data_config:
            shed_data_config.write(SHED_DATA_MANAGER_CONF)
        with open(config["shed_tool_data_table_config"], 'w') as shed_data_table_config:
            shed_data_table_config.write(SHED_DATA_TABLES)

    def test_data_manager_installation_table_reload(self):
        """
        Test that we can install data managers, create a new dbkey, and use that dbkey in a downstream data manager.
        """
        create_response = self._post('/tool_shed_repositories/new/install_repository_revision', data=CREATE_DBKEY_PAYLOAD, admin=True)
        self._assert_status_code_is(create_response, 200)
        create_response = self._post('/tool_shed_repositories/new/install_repository_revision', data=SAM_FASTA_PAYLOAD, admin=True)
        self._assert_status_code_is(create_response, 200)

        with self._different_user(email="*****@*****.**" % self.username):
            with self.dataset_populator.test_history() as history_id:
                run_response = self.dataset_populator.run_tool(tool_id=FETCH_TOOL_ID,
                                                               inputs=FETCH_GENOME_DBKEYS_ALL_FASTA_INPUT,
                                                               history_id=history_id,
                                                               assert_ok=False)
                self.dataset_populator.wait_for_tool_run(history_id=history_id, run_response=run_response)
                run_response = self.dataset_populator.run_tool(tool_id=SAM_FASTA_ID,
                                                               inputs=SAM_FASTA_INPUT,
                                                               history_id=history_id,
                                                               assert_ok=False)
                self.dataset_populator.wait_for_tool_run(history_id=history_id, run_response=run_response)

    def create_local_user(self):
        """Creates a local user and returns the user id."""
        password = self.get_secure_ascii_digits()
        payload = {'username': self.username,
                   'password': password,
                   'email': "*****@*****.**" % self.username}
        create_response = self._post('/users', data=payload, admin=True)
        self._assert_status_code_is(create_response, 200)
        response = create_response.json()
        return response['id']

    def create_api_key_for_user(self, user_id):
        create_response = self._post("/users/%s/api_key" % user_id, data={}, admin=True)
        self._assert_status_code_is(create_response, 200)
        return create_response.json()

    @classmethod
    def get_secure_ascii_digits(cls, n=12):
        return ''.join(random.SystemRandom().choice(string.ascii_lowercase + string.digits) for _ in range(12))
Example #42
0
 def setUp(self):
     super(ObjectStoreJobsIntegrationTestCase, self).setUp()
     self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
Example #43
0
 def setUp(self):
     super(FailJobWhenToolUnavailableTestCase, self).setUp()
     self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
     self.workflow_populator = WorkflowPopulator(self.galaxy_interactor)
     self.history_id = self.dataset_populator.new_history()
Example #44
0
class HistoriesApiTestCase(api.ApiTestCase):

    def setUp(self):
        super(HistoriesApiTestCase, self).setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.dataset_collection_populator = DatasetCollectionPopulator(self.galaxy_interactor)

    def test_create_history(self):
        # Create a history.
        create_response = self._create_history("TestHistory1")
        created_id = create_response["id"]

        # Make sure new history appears in index of user's histories.
        index_response = self._get("histories").json()
        indexed_history = [h for h in index_response if h["id"] == created_id][0]
        self.assertEquals(indexed_history["name"], "TestHistory1")

    def test_show_history(self):
        history_id = self._create_history("TestHistoryForShow")["id"]
        show_response = self._show(history_id)
        self._assert_has_key(
            show_response,
            'id', 'name', 'annotation', 'size', 'contents_url',
            'state', 'state_details', 'state_ids'
        )

        state_details = show_response["state_details"]
        state_ids = show_response["state_ids"]
        states = [
            'discarded', 'empty', 'error', 'failed_metadata', 'new',
            'ok', 'paused', 'queued', 'running', 'setting_metadata', 'upload'
        ]
        assert isinstance(state_details, dict)
        assert isinstance(state_ids, dict)
        self._assert_has_keys(state_details, *states)
        self._assert_has_keys(state_ids, *states)

    def test_show_most_recently_used(self):
        history_id = self._create_history("TestHistoryRecent")["id"]
        show_response = self._get("histories/most_recently_used").json()
        assert show_response["id"] == history_id

    def test_index_order(self):
        slightly_older_history_id = self._create_history("TestHistorySlightlyOlder")["id"]
        newer_history_id = self._create_history("TestHistoryNewer")["id"]
        index_response = self._get("histories").json()
        assert index_response[0]["id"] == newer_history_id
        assert index_response[1]["id"] == slightly_older_history_id

    def test_delete(self):
        # Setup a history and ensure it is in the index
        history_id = self._create_history("TestHistoryForDelete")["id"]
        index_response = self._get("histories").json()
        assert index_response[0]["id"] == history_id

        show_response = self._show(history_id)
        assert not show_response["deleted"]

        # Delete the history
        self._delete("histories/%s" % history_id)

        # Check can view it - but it is deleted
        show_response = self._show(history_id)
        assert show_response["deleted"]

        # Verify it is dropped from history index
        index_response = self._get("histories").json()
        assert len(index_response) == 0 or index_response[0]["id"] != history_id

        # Add deleted filter to index to view it
        index_response = self._get("histories", {"deleted": "true"}).json()
        assert index_response[0]["id"] == history_id

    def test_purge(self):
        history_id = self._create_history("TestHistoryForPurge")["id"]
        data = {'purge': True}
        self._delete("histories/%s" % history_id, data=data)
        show_response = self._show(history_id)
        assert show_response["deleted"]
        assert show_response["purged"]

    def test_undelete(self):
        history_id = self._create_history("TestHistoryForDeleteAndUndelete")["id"]
        self._delete("histories/%s" % history_id)
        self._post("histories/deleted/%s/undelete" % history_id)
        show_response = self._show(history_id)
        assert not show_response["deleted"]

    def test_update(self):
        history_id = self._create_history("TestHistoryForUpdating")["id"]

        self._update(history_id, {"name": "New Name"})
        show_response = self._show(history_id)
        assert show_response["name"] == "New Name"

        unicode_name = u'桜ゲノム'
        self._update(history_id, {"name": unicode_name})
        show_response = self._show(history_id)
        assert show_response["name"] == unicode_name, show_response

        quoted_name = "'MooCow'"
        self._update(history_id, {"name": quoted_name})
        show_response = self._show(history_id)
        assert show_response["name"] == quoted_name

        self._update(history_id, {"deleted": True})
        show_response = self._show(history_id)
        assert show_response["deleted"], show_response

        self._update(history_id, {"deleted": False})
        show_response = self._show(history_id)
        assert not show_response["deleted"]

        self._update(history_id, {"published": True})
        show_response = self._show(history_id)
        assert show_response["published"]

        self._update(history_id, {"genome_build": "hg18"})
        show_response = self._show(history_id)
        assert show_response["genome_build"] == "hg18"

        self._update(history_id, {"annotation": "The annotation is cool"})
        show_response = self._show(history_id)
        assert show_response["annotation"] == "The annotation is cool"

        self._update(history_id, {"annotation": unicode_name})
        show_response = self._show(history_id)
        assert show_response["annotation"] == unicode_name, show_response

        self._update(history_id, {"annotation": quoted_name})
        show_response = self._show(history_id)
        assert show_response["annotation"] == quoted_name

    def test_update_invalid_attribute(self):
        history_id = self._create_history("TestHistoryForInvalidUpdating")["id"]
        put_response = self._update(history_id, {"invalidkey": "moo"})
        assert "invalidkey" not in put_response.json()

    def test_update_invalid_types(self):
        history_id = self._create_history("TestHistoryForUpdatingInvalidTypes")["id"]
        for str_key in ["name", "annotation"]:
            assert self._update(history_id, {str_key: False}).status_code == 400

        for bool_key in ['deleted', 'importable', 'published']:
            assert self._update(history_id, {bool_key: "a string"}).status_code == 400

        assert self._update(history_id, {"tags": "a simple string"}).status_code == 400
        assert self._update(history_id, {"tags": [True]}).status_code == 400

    def test_invalid_keys(self):
        invalid_history_id = "1234123412341234"

        assert self._get("histories/%s" % invalid_history_id).status_code == 400
        assert self._update(invalid_history_id, {"name": "new name"}).status_code == 400
        assert self._delete("histories/%s" % invalid_history_id).status_code == 400
        assert self._post("histories/deleted/%s/undelete" % invalid_history_id).status_code == 400

    def test_create_anonymous_fails(self):
        post_data = dict(name="CannotCreate")
        # Using lower-level _api_url will cause key to not be injected.
        histories_url = self._api_url("histories")
        create_response = post(url=histories_url, data=post_data)
        self._assert_status_code_is(create_response, 403)

    def test_import_export(self):
        history_name = "for_export"
        history_id = self.dataset_populator.new_history(name=history_name)
        self.dataset_populator.new_dataset(history_id, content="1 2 3")
        imported_history_id = self._reimport_history(history_id, history_name)

        contents_response = self._get("histories/%s/contents" % imported_history_id)
        self._assert_status_code_is(contents_response, 200)
        contents = contents_response.json()
        assert len(contents) == 1
        imported_content = self.dataset_populator.get_history_dataset_content(
            history_id=imported_history_id,
            dataset_id=contents[0]["id"]
        )
        assert imported_content == "1 2 3\n"

    def test_import_export_collection(self):
        from nose.plugins.skip import SkipTest
        raise SkipTest("Collection import/export not yet implemented")

        history_name = "for_export_with_collections"
        history_id = self.dataset_populator.new_history(name=history_name)
        self.dataset_collection_populator.create_list_in_history(history_id, contents=["Hello", "World"])

        imported_history_id = self._reimport_history(history_id, history_name)

        contents_response = self._get("histories/%s/contents" % imported_history_id)
        self._assert_status_code_is(contents_response, 200)
        contents = contents_response.json()
        assert len(contents) == 3

    def _reimport_history(self, history_id, history_name):
        # Ensure the history is ready to go...
        self.dataset_populator.wait_for_history(history_id, assert_ok=True)

        # Export the history.
        download_path = self._export(history_id)

        # Create download for history
        full_download_url = "%s%s?key=%s" % (self.url, download_path, self.galaxy_interactor.api_key)
        download_response = get(full_download_url)
        self._assert_status_code_is(download_response, 200)

        def history_names():
            history_index = self._get("histories")
            return dict((h["name"], h) for h in history_index.json())

        import_name = "imported from archive: %s" % history_name
        assert import_name not in history_names()

        import_data = dict(archive_source=full_download_url, archive_type="url")
        import_response = self._post("histories", data=import_data)

        self._assert_status_code_is(import_response, 200)

        def has_history_with_name():
            histories = history_names()
            return histories.get(import_name, None)

        imported_history = wait_on(has_history_with_name, desc="import history")
        imported_history_id = imported_history["id"]
        self.dataset_populator.wait_for_history(imported_history_id)

        return imported_history_id

    def test_create_tag(self):
        post_data = dict(name="TestHistoryForTag")
        history_id = self._post("histories", data=post_data).json()["id"]
        tag_data = dict(value="awesometagvalue")
        tag_url = "histories/%s/tags/awesometagname" % history_id
        tag_create_response = self._post(tag_url, data=tag_data)
        self._assert_status_code_is(tag_create_response, 200)

    def _export(self, history_id):
        export_url = self._api_url("histories/%s/exports" % history_id, use_key=True)
        put_response = put(export_url)
        self._assert_status_code_is(put_response, 202)

        def export_ready_response():
            put_response = put(export_url)
            if put_response.status_code == 202:
                return None
            return put_response

        put_response = wait_on(export_ready_response, desc="export ready")
        self._assert_status_code_is(put_response, 200)
        response = put_response.json()
        self._assert_has_keys(response, "download_url")
        download_path = response["download_url"]
        return download_path

    def _show(self, history_id):
        return self._get("histories/%s" % history_id).json()

    def _update(self, history_id, data):
        update_url = self._api_url("histories/%s" % history_id, use_key=True)
        put_response = put(update_url, json=data)
        return put_response

    def _create_history(self, name):
        post_data = dict(name=name)
        create_response = self._post("histories", data=post_data).json()
        self._assert_has_keys(create_response, "name", "id")
        self.assertEquals(create_response["name"], name)
        return create_response
 def setUp(self):
     super(DatasetCollectionApiTestCase, self).setUp()
     self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
     self.dataset_collection_populator = DatasetCollectionPopulator(self.galaxy_interactor)
     self.history_id = self.dataset_populator.new_history()
 def setUp(self):
     super(MaximumWorkflowJobsPerSchedulingIterationTestCase, self).setUp()
     self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
     self.workflow_populator = WorkflowPopulator(self.galaxy_interactor)
     self.dataset_collection_populator = DatasetCollectionPopulator(self.galaxy_interactor)
Example #47
0
class HistoriesApiTestCase(api.ApiTestCase):
    def setUp(self):
        super(HistoriesApiTestCase, self).setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.dataset_collection_populator = DatasetCollectionPopulator(
            self.galaxy_interactor)

    def test_create_history(self):
        # Create a history.
        create_response = self._create_history("TestHistory1")
        created_id = create_response["id"]

        # Make sure new history appears in index of user's histories.
        index_response = self._get("histories").json()
        indexed_history = [h for h in index_response
                           if h["id"] == created_id][0]
        self.assertEquals(indexed_history["name"], "TestHistory1")

    def test_show_history(self):
        history_id = self._create_history("TestHistoryForShow")["id"]
        show_response = self._show(history_id)
        self._assert_has_key(show_response, 'id', 'name', 'annotation', 'size',
                             'contents_url', 'state', 'state_details',
                             'state_ids')

        state_details = show_response["state_details"]
        state_ids = show_response["state_ids"]
        states = [
            'discarded', 'empty', 'error', 'failed_metadata', 'new', 'ok',
            'paused', 'queued', 'running', 'setting_metadata', 'upload'
        ]
        assert isinstance(state_details, dict)
        assert isinstance(state_ids, dict)
        self._assert_has_keys(state_details, *states)
        self._assert_has_keys(state_ids, *states)

    def test_show_most_recently_used(self):
        history_id = self._create_history("TestHistoryRecent")["id"]
        show_response = self._get("histories/most_recently_used").json()
        assert show_response["id"] == history_id

    def test_index_order(self):
        slightly_older_history_id = self._create_history(
            "TestHistorySlightlyOlder")["id"]
        newer_history_id = self._create_history("TestHistoryNewer")["id"]
        index_response = self._get("histories").json()
        assert index_response[0]["id"] == newer_history_id
        assert index_response[1]["id"] == slightly_older_history_id

    def test_delete(self):
        # Setup a history and ensure it is in the index
        history_id = self._create_history("TestHistoryForDelete")["id"]
        index_response = self._get("histories").json()
        assert index_response[0]["id"] == history_id

        show_response = self._show(history_id)
        assert not show_response["deleted"]

        # Delete the history
        self._delete("histories/%s" % history_id)

        # Check can view it - but it is deleted
        show_response = self._show(history_id)
        assert show_response["deleted"]

        # Verify it is dropped from history index
        index_response = self._get("histories").json()
        assert len(
            index_response) == 0 or index_response[0]["id"] != history_id

        # Add deleted filter to index to view it
        index_response = self._get("histories", {"deleted": "true"}).json()
        assert index_response[0]["id"] == history_id

    def test_purge(self):
        history_id = self._create_history("TestHistoryForPurge")["id"]
        data = {'purge': True}
        self._delete("histories/%s" % history_id, data=data)
        show_response = self._show(history_id)
        assert show_response["deleted"]
        assert show_response["purged"]

    def test_undelete(self):
        history_id = self._create_history(
            "TestHistoryForDeleteAndUndelete")["id"]
        self._delete("histories/%s" % history_id)
        self._post("histories/deleted/%s/undelete" % history_id)
        show_response = self._show(history_id)
        assert not show_response["deleted"]

    def test_update(self):
        history_id = self._create_history("TestHistoryForUpdating")["id"]

        self._update(history_id, {"name": "New Name"})
        show_response = self._show(history_id)
        assert show_response["name"] == "New Name"

        unicode_name = u'桜ゲノム'
        self._update(history_id, {"name": unicode_name})
        show_response = self._show(history_id)
        assert show_response["name"] == unicode_name, show_response

        quoted_name = "'MooCow'"
        self._update(history_id, {"name": quoted_name})
        show_response = self._show(history_id)
        assert show_response["name"] == quoted_name

        self._update(history_id, {"deleted": True})
        show_response = self._show(history_id)
        assert show_response["deleted"], show_response

        self._update(history_id, {"deleted": False})
        show_response = self._show(history_id)
        assert not show_response["deleted"]

        self._update(history_id, {"published": True})
        show_response = self._show(history_id)
        assert show_response["published"]

        self._update(history_id, {"genome_build": "hg18"})
        show_response = self._show(history_id)
        assert show_response["genome_build"] == "hg18"

        self._update(history_id, {"annotation": "The annotation is cool"})
        show_response = self._show(history_id)
        assert show_response["annotation"] == "The annotation is cool"

        self._update(history_id, {"annotation": unicode_name})
        show_response = self._show(history_id)
        assert show_response["annotation"] == unicode_name, show_response

        self._update(history_id, {"annotation": quoted_name})
        show_response = self._show(history_id)
        assert show_response["annotation"] == quoted_name

    def test_update_invalid_attribute(self):
        history_id = self._create_history(
            "TestHistoryForInvalidUpdating")["id"]
        put_response = self._update(history_id, {"invalidkey": "moo"})
        assert "invalidkey" not in put_response.json()

    def test_update_invalid_types(self):
        history_id = self._create_history(
            "TestHistoryForUpdatingInvalidTypes")["id"]
        for str_key in ["name", "annotation"]:
            assert self._update(history_id, {
                str_key: False
            }).status_code == 400

        for bool_key in ['deleted', 'importable', 'published']:
            assert self._update(history_id, {
                bool_key: "a string"
            }).status_code == 400

        assert self._update(history_id, {
            "tags": "a simple string"
        }).status_code == 400
        assert self._update(history_id, {"tags": [True]}).status_code == 400

    def test_invalid_keys(self):
        invalid_history_id = "1234123412341234"

        assert self._get("histories/%s" %
                         invalid_history_id).status_code == 400
        assert self._update(invalid_history_id, {
            "name": "new name"
        }).status_code == 400
        assert self._delete("histories/%s" %
                            invalid_history_id).status_code == 400
        assert self._post("histories/deleted/%s/undelete" %
                          invalid_history_id).status_code == 400

    def test_create_anonymous_fails(self):
        post_data = dict(name="CannotCreate")
        # Using lower-level _api_url will cause key to not be injected.
        histories_url = self._api_url("histories")
        create_response = post(url=histories_url, data=post_data)
        self._assert_status_code_is(create_response, 403)

    def test_import_export(self):
        history_name = "for_export"
        history_id = self.dataset_populator.new_history(name=history_name)
        self.dataset_populator.new_dataset(history_id, content="1 2 3")
        imported_history_id = self._reimport_history(history_id, history_name)

        contents_response = self._get("histories/%s/contents" %
                                      imported_history_id)
        self._assert_status_code_is(contents_response, 200)
        contents = contents_response.json()
        assert len(contents) == 1
        imported_content = self.dataset_populator.get_history_dataset_content(
            history_id=imported_history_id, dataset_id=contents[0]["id"])
        assert imported_content == "1 2 3\n"

    def test_import_export_collection(self):
        from nose.plugins.skip import SkipTest
        raise SkipTest("Collection import/export not yet implemented")

        history_name = "for_export_with_collections"
        history_id = self.dataset_populator.new_history(name=history_name)
        self.dataset_collection_populator.create_list_in_history(
            history_id, contents=["Hello", "World"])

        imported_history_id = self._reimport_history(history_id, history_name)

        contents_response = self._get("histories/%s/contents" %
                                      imported_history_id)
        self._assert_status_code_is(contents_response, 200)
        contents = contents_response.json()
        assert len(contents) == 3

    def _reimport_history(self, history_id, history_name):
        # Ensure the history is ready to go...
        self.dataset_populator.wait_for_history(history_id, assert_ok=True)

        # Export the history.
        download_path = self._export(history_id)

        # Create download for history
        full_download_url = "%s%s?key=%s" % (self.url, download_path,
                                             self.galaxy_interactor.api_key)
        download_response = get(full_download_url)
        self._assert_status_code_is(download_response, 200)

        def history_names():
            history_index = self._get("histories")
            return dict((h["name"], h) for h in history_index.json())

        import_name = "imported from archive: %s" % history_name
        assert import_name not in history_names()

        import_data = dict(archive_source=full_download_url,
                           archive_type="url")
        import_response = self._post("histories", data=import_data)

        self._assert_status_code_is(import_response, 200)

        def has_history_with_name():
            histories = history_names()
            return histories.get(import_name, None)

        imported_history = wait_on(has_history_with_name,
                                   desc="import history")
        imported_history_id = imported_history["id"]
        self.dataset_populator.wait_for_history(imported_history_id)

        return imported_history_id

    def test_create_tag(self):
        post_data = dict(name="TestHistoryForTag")
        history_id = self._post("histories", data=post_data).json()["id"]
        tag_data = dict(value="awesometagvalue")
        tag_url = "histories/%s/tags/awesometagname" % history_id
        tag_create_response = self._post(tag_url, data=tag_data)
        self._assert_status_code_is(tag_create_response, 200)

    def _export(self, history_id):
        export_url = self._api_url("histories/%s/exports" % history_id,
                                   use_key=True)
        put_response = put(export_url)
        self._assert_status_code_is(put_response, 202)

        def export_ready_response():
            put_response = put(export_url)
            if put_response.status_code == 202:
                return None
            return put_response

        put_response = wait_on(export_ready_response, desc="export ready")
        self._assert_status_code_is(put_response, 200)
        response = put_response.json()
        self._assert_has_keys(response, "download_url")
        download_path = response["download_url"]
        return download_path

    def _show(self, history_id):
        return self._get("histories/%s" % history_id).json()

    def _update(self, history_id, data):
        update_url = self._api_url("histories/%s" % history_id, use_key=True)
        put_response = put(update_url, json=data)
        return put_response

    def _create_history(self, name):
        post_data = dict(name=name)
        create_response = self._post("histories", data=post_data).json()
        self._assert_has_keys(create_response, "name", "id")
        self.assertEquals(create_response["name"], name)
        return create_response
Example #48
0
 def setUp(self):
     super(BaseJobEnvironmentIntegrationTestCase, self).setUp()
     self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
Example #49
0
class RolesApiTestCase(api.ApiTestCase):

    def setUp(self):
        super(RolesApiTestCase, self).setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)

    def test_list_and_show(self):

        def check_roles_response(response):
            assert response.status_code == 200
            as_list = response.json()
            assert isinstance(as_list, list)
            assert len(as_list) > 0
            for role in as_list:
                RolesApiTestCase.check_role_dict(role)

        user_role_id = self.dataset_populator.user_private_role_id()
        with self._different_user():
            different_user_role_id = self.dataset_populator.user_private_role_id()

        admin_roles_response = self._get("roles", admin=True)
        user_roles_response = self._get("roles")

        check_roles_response(admin_roles_response)
        check_roles_response(user_roles_response)

        admin_roles_response_ids = [r["id"] for r in admin_roles_response.json()]
        user_roles_response_ids = [r["id"] for r in user_roles_response.json()]

        # User can see their own private role not the other users, admin can see both.
        assert user_role_id in user_roles_response_ids
        assert different_user_role_id not in user_roles_response_ids

        assert user_role_id in admin_roles_response_ids
        assert different_user_role_id in admin_roles_response_ids

        # Check showing a valid, role.
        role_response = self._get("roles/%s" % user_role_id)
        assert role_response.status_code == 200
        role = role_response.json()
        RolesApiTestCase.check_role_dict(role, assert_id=user_role_id)

    def test_create_valid(self):
        name = self.dataset_populator.get_random_name()
        description = "A test role."
        payload = {
            "name": name,
            "description": description,
            "user_ids": json.dumps([self.dataset_populator.user_id()]),
        }
        response = self._post("roles", payload, admin=True)
        assert_status_code_is(response, 200)
        # TODO: Why does this return a singleton list - that is bad - should be deprecated
        # and return a single role.
        role = response.json()[0]
        RolesApiTestCase.check_role_dict(role)

        assert role["name"] == name
        assert role["description"] == description

        user_roles_response = self._get("roles")
        with self._different_user():
            different_user_roles_response = self._get("roles")

        user_roles_response_ids = [r["id"] for r in user_roles_response.json()]
        different_user_roles_response_ids = [r["id"] for r in different_user_roles_response.json()]

        # This new role is public, all users see it.
        assert role["id"] in user_roles_response_ids
        assert role["id"] in different_user_roles_response_ids

    def test_show_error_codes(self):
        # Bad role ids are 400.
        response = self._get("roles/badroleid")
        assert_status_code_is(response, 400)

        # Trying to access roles are errors - should probably be 403 not 400 though?
        with self._different_user():
            different_user_role_id = self.dataset_populator.user_private_role_id()
        response = self._get("roles/%s" % different_user_role_id)
        assert_status_code_is(response, 400)

    def test_create_only_admin(self):
        response = self._post("roles")
        assert_status_code_is(response, 403)

    @staticmethod
    def check_role_dict(role_dict, assert_id=None):
        assert_has_keys(role_dict, "id", "name", "model_class", "url")
        assert role_dict["model_class"] == "Role"
        if assert_id is not None:
            assert role_dict["id"] == assert_id
class LibrariesApiTestCase(api.ApiTestCase, TestsDatasets):

    def setUp(self):
        super(LibrariesApiTestCase, self).setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.dataset_collection_populator = DatasetCollectionPopulator(self.galaxy_interactor)
        self.library_populator = LibraryPopulator(self.galaxy_interactor)

    def test_create(self):
        data = dict(name="CreateTestLibrary")
        create_response = self._post("libraries", data=data, admin=True)
        self._assert_status_code_is(create_response, 200)
        library = create_response.json()
        self._assert_has_keys(library, "name")
        assert library["name"] == "CreateTestLibrary"

    def test_delete(self):
        library = self.library_populator.new_library("DeleteTestLibrary")
        create_response = self._delete("libraries/%s" % library["id"], admin=True)
        self._assert_status_code_is(create_response, 200)
        library = create_response.json()
        self._assert_has_keys(library, "deleted")
        assert library["deleted"] is True
        # Test undeleting
        data = dict(undelete='true')
        create_response = self._delete("libraries/%s" % library["id"], data=data, admin=True)
        library = create_response.json()
        self._assert_status_code_is(create_response, 200)
        assert library["deleted"] is False

    def test_nonadmin(self):
        # Anons can't create libs
        data = dict(name="CreateTestLibrary")
        create_response = self._post("libraries", data=data, admin=False, anon=True)
        self._assert_status_code_is(create_response, 403)
        # Anons can't delete libs
        library = self.library_populator.new_library("AnonDeleteTestLibrary")
        create_response = self._delete("libraries/%s" % library["id"], admin=False, anon=True)
        self._assert_status_code_is(create_response, 403)
        # Anons can't update libs
        data = dict(name="ChangedName", description="ChangedDescription", synopsis='ChangedSynopsis')
        create_response = self._patch("libraries/%s" % library["id"], data=data, admin=False, anon=True)
        self._assert_status_code_is(create_response, 403)

    def test_update(self):
        library = self.library_populator.new_library("UpdateTestLibrary")
        data = dict(name='ChangedName', description='ChangedDescription', synopsis='ChangedSynopsis')
        create_response = self._patch("libraries/%s" % library["id"], data=data, admin=True)
        self._assert_status_code_is(create_response, 200)
        library = create_response.json()
        self._assert_has_keys(library, 'name', 'description', 'synopsis')
        assert library['name'] == 'ChangedName'
        assert library['description'] == 'ChangedDescription'
        assert library['synopsis'] == 'ChangedSynopsis'

    def test_create_private_library_permissions(self):
        library = self.library_populator.new_library("PermissionTestLibrary")
        library_id = library["id"]
        role_id = self.library_populator.user_private_role_id()
        self.library_populator.set_permissions(library_id, role_id)
        create_response = self._create_folder(library)
        self._assert_status_code_is(create_response, 200)

    def test_create_dataset_denied(self):
        library = self.library_populator.new_private_library("ForCreateDatasets")
        folder_response = self._create_folder(library)
        self._assert_status_code_is(folder_response, 200)
        folder_id = folder_response.json()[0]['id']
        history_id = self.dataset_populator.new_history()
        hda_id = self.dataset_populator.new_dataset(history_id, content="1 2 3")['id']
        with self._different_user():
            payload = {'from_hda_id': hda_id}
            create_response = self._post("folders/%s/contents" % folder_id, payload)
            self._assert_status_code_is(create_response, 403)

    def test_show_private_dataset_permissions(self):
        library, library_dataset = self.library_populator.new_library_dataset_in_private_library("ForCreateDatasets", wait=True)
        with self._different_user():
            response = self.library_populator.show_ldda(library["id"], library_dataset["id"])
            # TODO: this should really be 403 and a proper JSON exception.
            self._assert_status_code_is(response, 400)

    def test_create_dataset(self):
        library, library_dataset = self.library_populator.new_library_dataset_in_private_library("ForCreateDatasets", wait=True)
        self._assert_has_keys(library_dataset, "peek", "data_type")
        assert library_dataset["peek"].find("create_test") >= 0
        assert library_dataset["file_ext"] == "txt", library_dataset["file_ext"]

    def test_fetch_upload_to_folder(self):
        history_id, library, destination = self._setup_fetch_to_folder("flat_zip")
        items = [{"src": "files", "dbkey": "hg19", "info": "my cool bed"}]
        targets = [{
            "destination": destination,
            "items": items
        }]
        payload = {
            "history_id": history_id,  # TODO: Shouldn't be needed :(
            "targets": json.dumps(targets),
            "__files": {"files_0|file_data": open(self.test_data_resolver.get_filename("4.bed"))},
        }
        self.dataset_populator.fetch(payload)
        dataset = self.library_populator.get_library_contents_with_path(library["id"], "/4.bed")
        assert dataset["file_size"] == 61, dataset
        assert dataset["genome_build"] == "hg19", dataset
        assert dataset["misc_info"] == "my cool bed", dataset
        assert dataset["file_ext"] == "bed", dataset

    def test_fetch_zip_to_folder(self):
        history_id, library, destination = self._setup_fetch_to_folder("flat_zip")
        bed_test_data_path = self.test_data_resolver.get_filename("4.bed.zip")
        targets = [{
            "destination": destination,
            "items_from": "archive", "src": "files",
        }]
        payload = {
            "history_id": history_id,  # TODO: Shouldn't be needed :(
            "targets": json.dumps(targets),
            "__files": {"files_0|file_data": open(bed_test_data_path)}
        }
        self.dataset_populator.fetch(payload)
        dataset = self.library_populator.get_library_contents_with_path(library["id"], "/4.bed")
        assert dataset["file_size"] == 61, dataset

    def test_fetch_single_url_to_folder(self):
        history_id, library, destination = self._setup_fetch_to_folder("single_url")
        items = [{"src": "url", "url": "https://raw.githubusercontent.com/galaxyproject/galaxy/dev/test-data/4.bed"}]
        targets = [{
            "destination": destination,
            "items": items
        }]
        payload = {
            "history_id": history_id,  # TODO: Shouldn't be needed :(
            "targets": json.dumps(targets),
        }
        self.dataset_populator.fetch(payload)
        dataset = self.library_populator.get_library_contents_with_path(library["id"], "/4.bed")
        assert dataset["file_size"] == 61, dataset

    def test_fetch_url_archive_to_folder(self):
        history_id, library, destination = self._setup_fetch_to_folder("single_url")
        targets = [{
            "destination": destination,
            "items_from": "archive",
            "src": "url",
            "url": "https://raw.githubusercontent.com/galaxyproject/galaxy/dev/test-data/4.bed.zip",
        }]
        payload = {
            "history_id": history_id,  # TODO: Shouldn't be needed :(
            "targets": json.dumps(targets),
        }
        self.dataset_populator.fetch(payload)
        dataset = self.library_populator.get_library_contents_with_path(library["id"], "/4.bed")
        assert dataset["file_size"] == 61, dataset

    @unittest.skip  # reference URLs changed, checksums now invalid.
    def test_fetch_bagit_archive_to_folder(self):
        history_id, library, destination = self._setup_fetch_to_folder("bagit_archive")
        example_bag_path = self.test_data_resolver.get_filename("example-bag.zip")
        targets = [{
            "destination": destination,
            "items_from": "bagit_archive", "src": "files",
        }]
        payload = {
            "history_id": history_id,  # TODO: Shouldn't be needed :(
            "targets": json.dumps(targets),
            "__files": {"files_0|file_data": open(example_bag_path)},
        }
        self.dataset_populator.fetch(payload)
        dataset = self.library_populator.get_library_contents_with_path(library["id"], "/README.txt")
        assert dataset["file_size"] == 66, dataset

        dataset = self.library_populator.get_library_contents_with_path(library["id"], "/bdbag-profile.json")
        assert dataset["file_size"] == 723, dataset

    def _setup_fetch_to_folder(self, test_name):
        return self.library_populator.setup_fetch_to_folder(test_name)

    def test_create_dataset_in_folder(self):
        library = self.library_populator.new_private_library("ForCreateDatasets")
        folder_response = self._create_folder(library)
        self._assert_status_code_is(folder_response, 200)
        folder_id = folder_response.json()[0]['id']
        history_id = self.dataset_populator.new_history()
        hda_id = self.dataset_populator.new_dataset(history_id, content="1 2 3")['id']
        payload = {'from_hda_id': hda_id}
        create_response = self._post("folders/%s/contents" % folder_id, payload)
        self._assert_status_code_is(create_response, 200)
        self._assert_has_keys(create_response.json(), "name", "id")

    def test_update_dataset_in_folder(self):
        library = self.library_populator.new_private_library("ForUpdateDataset")
        folder_response = self._create_folder(library)
        self._assert_status_code_is(folder_response, 200)
        folder_id = folder_response.json()[0]['id']
        history_id = self.dataset_populator.new_history()
        hda_id = self.dataset_populator.new_dataset(history_id, content="1 2 3")['id']
        payload = {'from_hda_id': hda_id, 'create_type': 'file', 'folder_id': folder_id}
        ld = self._post("libraries/%s/contents" % folder_id, payload)
        data = {'name': 'updated_name', 'file_ext': 'fastq', 'misc_info': 'updated_info', 'genome_build': 'updated_genome_build'}
        create_response = self._patch("libraries/datasets/%s" % ld.json()["id"], data=data)
        self._assert_status_code_is(create_response, 200)
        self._assert_has_keys(create_response.json(), "name", "file_ext", "misc_info", "genome_build")

    def test_invalid_update_dataset_in_folder(self):
        library = self.library_populator.new_private_library("ForInvalidUpdateDataset")
        folder_response = self._create_folder(library)
        self._assert_status_code_is(folder_response, 200)
        folder_id = folder_response.json()[0]['id']
        history_id = self.dataset_populator.new_history()
        hda_id = self.dataset_populator.new_dataset(history_id, content="1 2 3")['id']
        payload = {'from_hda_id': hda_id, 'create_type': 'file', 'folder_id': folder_id}
        ld = self._post("libraries/%s/contents" % folder_id, payload)
        data = {'file_ext': 'nonexisting_type'}
        create_response = self._patch("libraries/datasets/%s" % ld.json()["id"], data=data)
        self._assert_status_code_is(create_response, 400)
        assert 'This Galaxy does not recognize the datatype of:' in create_response.json()['err_msg']

    def test_create_datasets_in_library_from_collection(self):
        library = self.library_populator.new_private_library("ForCreateDatasetsFromCollection")
        folder_response = self._create_folder(library)
        self._assert_status_code_is(folder_response, 200)
        folder_id = folder_response.json()[0]['id']
        history_id = self.dataset_populator.new_history()
        hdca_id = self.dataset_collection_populator.create_list_in_history(history_id, contents=["xxx", "yyy"]).json()["id"]
        payload = {'from_hdca_id': hdca_id, 'create_type': 'file', 'folder_id': folder_id}
        create_response = self._post("libraries/%s/contents" % library['id'], payload)
        self._assert_status_code_is(create_response, 200)

    def test_create_datasets_in_folder_from_collection(self):
        library = self.library_populator.new_private_library("ForCreateDatasetsFromCollection")
        history_id = self.dataset_populator.new_history()
        hdca_id = self.dataset_collection_populator.create_list_in_history(history_id, contents=["xxx", "yyy"]).json()["id"]
        folder_response = self._create_folder(library)
        self._assert_status_code_is(folder_response, 200)
        folder_id = folder_response.json()[0]['id']
        payload = {'from_hdca_id': hdca_id}
        create_response = self._post("folders/%s/contents" % folder_id, payload)
        self._assert_status_code_is(create_response, 200)
        assert len(create_response.json()) == 2
        # Also test that anything different from a flat dataset collection list
        # is refused
        hdca_pair_id = self.dataset_collection_populator.create_list_of_pairs_in_history(history_id).json()['id']
        payload = {'from_hdca_id': hdca_pair_id}
        create_response = self._post("folders/%s/contents" % folder_id, payload)
        self._assert_status_code_is(create_response, 501)
        assert create_response.json()['err_msg'] == 'Cannot add nested collections to library. Please flatten your collection first.'

    def _create_folder(self, library):
        create_data = dict(
            folder_id=library["root_folder_id"],
            create_type="folder",
            name="New Folder",
        )
        return self._post("libraries/%s/contents" % library["id"], data=create_data)
Example #51
0
class ObjectStoreJobsIntegrationTestCase(integration_util.IntegrationTestCase):

    framework_tool_and_types = True

    @classmethod
    def handle_galaxy_config_kwds(cls, config):
        temp_directory = cls._test_driver.mkdtemp()
        cls.object_stores_parent = temp_directory
        for disk_store_file_name in [
                "files_default", "files_static", "files_dynamic_ebs",
                "files_dynamic_s3"
        ]:
            disk_store_path = os.path.join(temp_directory,
                                           disk_store_file_name)
            os.makedirs(disk_store_path)
            setattr(cls, "%s_path" % disk_store_file_name, disk_store_path)
        config_path = os.path.join(temp_directory, "object_store_conf.xml")
        with open(config_path, "w") as f:
            f.write(
                DISTRIBUTED_OBJECT_STORE_CONFIG_TEMPLATE.safe_substitute(
                    {"temp_directory": temp_directory}))
        config["object_store_config_file"] = config_path
        config["job_config_file"] = JOB_CONFIG_FILE
        config[
            "job_resource_params_file"] = JOB_RESOURCE_PARAMETERS_CONFIG_FILE

    def setUp(self):
        super(ObjectStoreJobsIntegrationTestCase, self).setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)

    def _object_store_counts(self):
        files_default_count = _files_count(self.files_default_path)
        files_static_count = _files_count(self.files_static_path)
        files_dynamic_count = _files_count(self.files_dynamic_path)
        return files_default_count, files_static_count, files_dynamic_count

    def _assert_file_counts(self, default, static, dynamic_ebs, dynamic_s3):
        files_default_count = _files_count(self.files_default_path)
        files_static_count = _files_count(self.files_static_path)
        files_dynamic_ebs_count = _files_count(self.files_dynamic_ebs_path)
        files_dynamic_s3_count = _files_count(self.files_dynamic_s3_path)
        assert default == files_default_count
        assert static == files_static_count
        assert dynamic_ebs == files_dynamic_ebs_count
        assert dynamic_s3 == files_dynamic_s3_count

    def test_tool_simple_constructs(self):
        with self.dataset_populator.test_history() as history_id:

            def _run_tool(tool_id, inputs):
                self.dataset_populator.run_tool(
                    tool_id,
                    inputs,
                    history_id,
                    assert_ok=True,
                )
                self.dataset_populator.wait_for_history(history_id)

            self._assert_file_counts(0, 0, 0, 0)

            hda1 = self.dataset_populator.new_dataset(history_id,
                                                      content="1 2 3")
            self.dataset_populator.wait_for_history(history_id)
            hda1_input = {"src": "hda", "id": hda1["id"]}

            # One file uploaded, added to default object store ID.
            self._assert_file_counts(1, 0, 0, 0)

            # should create two files in static object store.
            _run_tool("multi_data_param", {"f1": hda1_input, "f2": hda1_input})
            self._assert_file_counts(1, 2, 0, 0)

            # should create two files in ebs object store.
            create_10_inputs = {
                "input1": hda1_input,
                "input2": hda1_input,
            }
            _run_tool("create_10", create_10_inputs)
            self._assert_file_counts(1, 2, 10, 0)

            # should create 10 files in S3 object store.
            create_10_inputs = {
                "__job_resource|__job_resource__select": "yes",
                "__job_resource|how_store": "slow",
                "input1": hda1_input,
                "input2": hda1_input,
            }
            _run_tool("create_10", create_10_inputs)
            self._assert_file_counts(1, 2, 10, 10)
Example #52
0
 def setUp(self):
     super(DataManagerIntegrationTestCase, self).setUp()
     self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
Example #53
0
 def setUp(self):
     super(WorkflowSyncTestCase, self).setUp()
     self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
     self.workflow_populator = WorkflowPopulator(self.galaxy_interactor)
class ToolsUploadTestCase(api.ApiTestCase):
    def setUp(self):
        super(ToolsUploadTestCase, self).setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)

    def test_upload1_paste(self):
        with self.dataset_populator.test_history() as history_id:
            payload = self.dataset_populator.upload_payload(
                history_id, 'Hello World')
            create_response = self._post("tools", data=payload)
            self._assert_has_keys(create_response.json(), 'outputs')

    def test_upload1_paste_bad_datatype(self):
        # Check that you get a nice message if you upload an incorrect datatype
        with self.dataset_populator.test_history() as history_id:
            file_type = "johnsawesomebutfakedatatype"
            payload = self.dataset_populator.upload_payload(
                history_id, 'Hello World', file_type=file_type)
            create = self._post("tools", data=payload).json()
            self._assert_has_keys(create, 'err_msg')
            assert file_type in create['err_msg']

    def test_upload_posix_newline_fixes(self):
        windows_content = ONE_TO_SIX_ON_WINDOWS
        result_content = self._upload_and_get_content(windows_content)
        self.assertEquals(result_content, ONE_TO_SIX_WITH_TABS)

    def test_upload_disable_posix_fix(self):
        windows_content = ONE_TO_SIX_ON_WINDOWS
        result_content = self._upload_and_get_content(windows_content,
                                                      to_posix_lines=None)
        self.assertEquals(result_content, windows_content)

    def test_upload_tab_to_space(self):
        table = ONE_TO_SIX_WITH_SPACES
        result_content = self._upload_and_get_content(table,
                                                      space_to_tab="Yes")
        self.assertEquals(result_content, ONE_TO_SIX_WITH_TABS)

    def test_upload_tab_to_space_off_by_default(self):
        table = ONE_TO_SIX_WITH_SPACES
        result_content = self._upload_and_get_content(table)
        self.assertEquals(result_content, table)

    @skip_without_datatype("rdata")
    def test_rdata_not_decompressed(self):
        # Prevent regression of https://github.com/galaxyproject/galaxy/issues/753
        rdata_path = TestDataResolver().get_filename("1.RData")
        rdata_metadata = self._upload_and_get_details(open(rdata_path, "rb"),
                                                      file_type="auto")
        self.assertEquals(rdata_metadata["file_ext"], "rdata")

    @skip_without_datatype("velvet")
    def test_composite_datatype(self):
        with self.dataset_populator.test_history() as history_id:
            dataset = self._velvet_upload(history_id,
                                          extra_inputs={
                                              "files_1|url_paste":
                                              "roadmaps content",
                                              "files_1|type": "upload_dataset",
                                              "files_2|url_paste":
                                              "log content",
                                              "files_2|type": "upload_dataset",
                                          })

            roadmaps_content = self._get_roadmaps_content(history_id, dataset)
            assert roadmaps_content.strip(
            ) == "roadmaps content", roadmaps_content

    @skip_without_datatype("velvet")
    def test_composite_datatype_space_to_tab(self):
        # Like previous test but set one upload with space_to_tab to True to
        # verify that works.
        with self.dataset_populator.test_history() as history_id:
            dataset = self._velvet_upload(history_id,
                                          extra_inputs={
                                              "files_1|url_paste":
                                              "roadmaps content",
                                              "files_1|type": "upload_dataset",
                                              "files_1|space_to_tab": "Yes",
                                              "files_2|url_paste":
                                              "log content",
                                              "files_2|type": "upload_dataset",
                                          })

            roadmaps_content = self._get_roadmaps_content(history_id, dataset)
            assert roadmaps_content.strip(
            ) == "roadmaps\tcontent", roadmaps_content

    @skip_without_datatype("velvet")
    def test_composite_datatype_posix_lines(self):
        # Like previous test but set one upload with space_to_tab to True to
        # verify that works.
        with self.dataset_populator.test_history() as history_id:
            dataset = self._velvet_upload(history_id,
                                          extra_inputs={
                                              "files_1|url_paste":
                                              "roadmaps\rcontent",
                                              "files_1|type": "upload_dataset",
                                              "files_1|space_to_tab": "Yes",
                                              "files_2|url_paste":
                                              "log\rcontent",
                                              "files_2|type": "upload_dataset",
                                          })

            roadmaps_content = self._get_roadmaps_content(history_id, dataset)
            assert roadmaps_content.strip(
            ) == "roadmaps\ncontent", roadmaps_content

    def test_upload_dbkey(self):
        with self.dataset_populator.test_history() as history_id:
            payload = self.dataset_populator.upload_payload(history_id,
                                                            "Test123",
                                                            dbkey="hg19")
            run_response = self.dataset_populator.tools_post(payload)
            self.dataset_populator.wait_for_tool_run(history_id, run_response)
            datasets = run_response.json()["outputs"]
            assert datasets[0].get("genome_build") == "hg19", datasets[0]

    def test_upload_multiple_files_1(self):
        with self.dataset_populator.test_history() as history_id:
            payload = self.dataset_populator.upload_payload(
                history_id,
                "Test123",
                dbkey="hg19",
                extra_inputs={
                    "files_1|url_paste": "SecondOutputContent",
                    "files_1|NAME": "SecondOutputName",
                    "files_1|file_type": "tabular",
                    "files_1|dbkey": "hg18",
                    "file_count": "2",
                })
            run_response = self.dataset_populator.tools_post(payload)
            self.dataset_populator.wait_for_tool_run(history_id, run_response)
            datasets = run_response.json()["outputs"]

            assert len(datasets) == 2, datasets
            content = self.dataset_populator.get_history_dataset_content(
                history_id, dataset=datasets[0])
            assert content.strip() == "Test123"
            assert datasets[0]["file_ext"] == "txt"
            assert datasets[0]["genome_build"] == "hg19", datasets

            content = self.dataset_populator.get_history_dataset_content(
                history_id, dataset=datasets[1])
            assert content.strip() == "SecondOutputContent"
            assert datasets[1]["file_ext"] == "tabular"
            assert datasets[1]["genome_build"] == "hg18", datasets

    def test_upload_multiple_files_2(self):
        with self.dataset_populator.test_history() as history_id:
            payload = self.dataset_populator.upload_payload(
                history_id,
                "Test123",
                file_type="tabular",
                dbkey="hg19",
                extra_inputs={
                    "files_1|url_paste": "SecondOutputContent",
                    "files_1|NAME": "SecondOutputName",
                    "files_1|file_type": "txt",
                    "files_1|dbkey": "hg18",
                    "file_count": "2",
                })
            run_response = self.dataset_populator.tools_post(payload)
            self.dataset_populator.wait_for_tool_run(history_id, run_response)
            datasets = run_response.json()["outputs"]

            assert len(datasets) == 2, datasets
            content = self.dataset_populator.get_history_dataset_content(
                history_id, dataset=datasets[0])
            assert content.strip() == "Test123"
            assert datasets[0]["file_ext"] == "tabular", datasets
            assert datasets[0]["genome_build"] == "hg19", datasets

            content = self.dataset_populator.get_history_dataset_content(
                history_id, dataset=datasets[1])
            assert content.strip() == "SecondOutputContent"
            assert datasets[1]["file_ext"] == "txt"
            assert datasets[1]["genome_build"] == "hg18", datasets

    def test_upload_multiple_files_3(self):
        with self.dataset_populator.test_history() as history_id:
            payload = self.dataset_populator.upload_payload(
                history_id,
                "Test123",
                file_type="tabular",
                dbkey="hg19",
                extra_inputs={
                    "files_0|file_type": "txt",
                    "files_0|dbkey": "hg18",
                    "files_1|url_paste": "SecondOutputContent",
                    "files_1|NAME": "SecondOutputName",
                    "files_1|file_type": "txt",
                    "files_1|dbkey": "hg18",
                    "file_count": "2",
                })
            run_response = self.dataset_populator.tools_post(payload)
            self.dataset_populator.wait_for_tool_run(history_id, run_response)
            datasets = run_response.json()["outputs"]

            assert len(datasets) == 2, datasets
            content = self.dataset_populator.get_history_dataset_content(
                history_id, dataset=datasets[0])
            assert content.strip() == "Test123"
            assert datasets[0]["file_ext"] == "txt", datasets
            assert datasets[0]["genome_build"] == "hg18", datasets

            content = self.dataset_populator.get_history_dataset_content(
                history_id, dataset=datasets[1])
            assert content.strip() == "SecondOutputContent"
            assert datasets[1]["file_ext"] == "txt"
            assert datasets[1]["genome_build"] == "hg18", datasets

    def test_upload_multiple_files_no_dbkey(self):
        with self.dataset_populator.test_history() as history_id:
            payload = self.dataset_populator.upload_payload(
                history_id,
                "Test123",
                file_type="tabular",
                dbkey=None,
                extra_inputs={
                    "files_0|file_type": "txt",
                    "files_1|url_paste": "SecondOutputContent",
                    "files_1|NAME": "SecondOutputName",
                    "files_1|file_type": "txt",
                    "file_count": "2",
                })
            run_response = self.dataset_populator.tools_post(payload)
            self.dataset_populator.wait_for_tool_run(history_id, run_response)
            datasets = run_response.json()["outputs"]

            assert len(datasets) == 2, datasets
            content = self.dataset_populator.get_history_dataset_content(
                history_id, dataset=datasets[0])
            assert content.strip() == "Test123"
            assert datasets[0]["file_ext"] == "txt", datasets
            assert datasets[0]["genome_build"] == "?", datasets

            content = self.dataset_populator.get_history_dataset_content(
                history_id, dataset=datasets[1])
            assert content.strip() == "SecondOutputContent"
            assert datasets[1]["file_ext"] == "txt"
            assert datasets[1]["genome_build"] == "?", datasets

    def test_upload_multiple_files_space_to_tab(self):
        with self.dataset_populator.test_history() as history_id:
            payload = self.dataset_populator.upload_payload(
                history_id,
                content=ONE_TO_SIX_WITH_SPACES,
                file_type="tabular",
                dbkey="hg19",
                extra_inputs={
                    "files_0|file_type": "txt",
                    "files_0|space_to_tab": "Yes",
                    "files_1|url_paste": ONE_TO_SIX_WITH_SPACES,
                    "files_1|NAME": "SecondOutputName",
                    "files_1|file_type": "txt",
                    "files_2|url_paste": ONE_TO_SIX_WITH_SPACES,
                    "files_2|NAME": "ThirdOutputName",
                    "files_2|file_type": "txt",
                    "files_2|space_to_tab": "Yes",
                    "file_count": "3",
                })
            run_response = self.dataset_populator.tools_post(payload)
            self.dataset_populator.wait_for_tool_run(history_id, run_response)
            datasets = run_response.json()["outputs"]

            assert len(datasets) == 3, datasets
            content = self.dataset_populator.get_history_dataset_content(
                history_id, dataset=datasets[0])
            assert content == ONE_TO_SIX_WITH_TABS

            content = self.dataset_populator.get_history_dataset_content(
                history_id, dataset=datasets[1])
            assert content == ONE_TO_SIX_WITH_SPACES

            content = self.dataset_populator.get_history_dataset_content(
                history_id, dataset=datasets[2])
            assert content == ONE_TO_SIX_WITH_TABS

    def test_multiple_files_posix_lines(self):
        with self.dataset_populator.test_history() as history_id:
            payload = self.dataset_populator.upload_payload(
                history_id,
                content=ONE_TO_SIX_ON_WINDOWS,
                file_type="tabular",
                dbkey="hg19",
                extra_inputs={
                    "files_0|file_type": "txt",
                    "files_0|to_posix_lines": "Yes",
                    "files_1|url_paste": ONE_TO_SIX_ON_WINDOWS,
                    "files_1|NAME": "SecondOutputName",
                    "files_1|file_type": "txt",
                    "files_1|to_posix_lines": None,
                    "files_2|url_paste": ONE_TO_SIX_ON_WINDOWS,
                    "files_2|NAME": "ThirdOutputName",
                    "files_2|file_type": "txt",
                    "file_count": "3",
                })
            run_response = self.dataset_populator.tools_post(payload)
            self.dataset_populator.wait_for_tool_run(history_id, run_response)
            datasets = run_response.json()["outputs"]

            assert len(datasets) == 3, datasets
            content = self.dataset_populator.get_history_dataset_content(
                history_id, dataset=datasets[0])
            assert content == ONE_TO_SIX_WITH_TABS

            content = self.dataset_populator.get_history_dataset_content(
                history_id, dataset=datasets[1])
            assert content == ONE_TO_SIX_ON_WINDOWS

            content = self.dataset_populator.get_history_dataset_content(
                history_id, dataset=datasets[2])
            assert content == ONE_TO_SIX_WITH_TABS

    def _velvet_upload(self, history_id, extra_inputs):
        payload = self.dataset_populator.upload_payload(
            history_id,
            "sequences content",
            file_type="velvet",
            extra_inputs=extra_inputs,
        )
        run_response = self.dataset_populator.tools_post(payload)
        self.dataset_populator.wait_for_tool_run(history_id, run_response)
        datasets = run_response.json()["outputs"]

        assert len(datasets) == 1
        dataset = datasets[0]

        return dataset

    def _get_roadmaps_content(self, history_id, dataset):
        roadmaps_content = self.dataset_populator.get_history_dataset_content(
            history_id, dataset=dataset, filename="Roadmaps")
        return roadmaps_content

    def _upload_and_get_content(self, content, **upload_kwds):
        history_id, new_dataset = self._upload(content, **upload_kwds)
        return self.dataset_populator.get_history_dataset_content(
            history_id, dataset=new_dataset)

    def _upload_and_get_details(self, content, **upload_kwds):
        history_id, new_dataset = self._upload(content, **upload_kwds)
        return self.dataset_populator.get_history_dataset_details(
            history_id, dataset=new_dataset)

    def _upload(self, content, **upload_kwds):
        history_id = self.dataset_populator.new_history()
        new_dataset = self.dataset_populator.new_dataset(history_id,
                                                         content=content,
                                                         **upload_kwds)
        self.dataset_populator.wait_for_history(history_id, assert_ok=True)
        return history_id, new_dataset
Example #55
0
 def setUp(self):
     super(HistoriesApiTestCase, self).setUp()
     self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
     self.dataset_collection_populator = DatasetCollectionPopulator(self.galaxy_interactor)
class DatasetCollectionApiTestCase(api.ApiTestCase):

    def setUp(self):
        super(DatasetCollectionApiTestCase, self).setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.dataset_collection_populator = DatasetCollectionPopulator(self.galaxy_interactor)
        self.history_id = self.dataset_populator.new_history()

    def test_create_pair_from_history(self):
        payload = self.dataset_collection_populator.create_pair_payload(
            self.history_id,
            instance_type="history",
        )
        create_response = self._post("dataset_collections", payload)
        dataset_collection = self._check_create_response(create_response)
        returned_datasets = dataset_collection["elements"]
        assert len(returned_datasets) == 2, dataset_collection

    def test_create_list_from_history(self):
        element_identifiers = self.dataset_collection_populator.list_identifiers(self.history_id)

        payload = dict(
            instance_type="history",
            history_id=self.history_id,
            element_identifiers=json.dumps(element_identifiers),
            collection_type="list",
        )

        create_response = self._post("dataset_collections", payload)
        dataset_collection = self._check_create_response(create_response)
        returned_datasets = dataset_collection["elements"]
        assert len(returned_datasets) == 3, dataset_collection

    def test_create_list_of_existing_pairs(self):
        pair_payload = self.dataset_collection_populator.create_pair_payload(
            self.history_id,
            instance_type="history",
        )
        pair_create_response = self._post("dataset_collections", pair_payload)
        dataset_collection = self._check_create_response(pair_create_response)
        hdca_id = dataset_collection["id"]

        element_identifiers = [
            dict(name="test1", src="hdca", id=hdca_id)
        ]

        payload = dict(
            instance_type="history",
            history_id=self.history_id,
            element_identifiers=json.dumps(element_identifiers),
            collection_type="list",
        )
        create_response = self._post("dataset_collections", payload)
        dataset_collection = self._check_create_response(create_response)
        returned_collections = dataset_collection["elements"]
        assert len(returned_collections) == 1, dataset_collection

    def test_create_list_of_new_pairs(self):
        identifiers = self.dataset_collection_populator.nested_collection_identifiers(self.history_id, "list:paired")
        payload = dict(
            collection_type="list:paired",
            instance_type="history",
            history_id=self.history_id,
            name="a nested collection",
            element_identifiers=json.dumps(identifiers),
        )
        create_response = self._post("dataset_collections", payload)
        dataset_collection = self._check_create_response(create_response)
        assert dataset_collection["collection_type"] == "list:paired"
        assert dataset_collection["name"] == "a nested collection"
        returned_collections = dataset_collection["elements"]
        assert len(returned_collections) == 1, dataset_collection
        pair_1_element = returned_collections[0]
        self._assert_has_keys(pair_1_element, "element_identifier", "element_index", "object")
        assert pair_1_element["element_identifier"] == "test_level_1", pair_1_element
        assert pair_1_element["element_index"] == 0, pair_1_element
        pair_1_object = pair_1_element["object"]
        self._assert_has_keys(pair_1_object, "collection_type", "elements", "element_count")
        self.assertEqual(pair_1_object["collection_type"], "paired")
        self.assertEqual(pair_1_object["populated"], True)
        pair_elements = pair_1_object["elements"]
        assert len(pair_elements) == 2
        pair_1_element_1 = pair_elements[0]
        assert pair_1_element_1["element_index"] == 0

    def test_list_download(self):
        fetch_response = self.dataset_collection_populator.create_list_in_history(self.history_id, direct_upload=True).json()
        dataset_collection = self.dataset_collection_populator.wait_for_fetched_collection(fetch_response)
        returned_dce = dataset_collection["elements"]
        assert len(returned_dce) == 3, dataset_collection
        create_response = self._download_dataset_collection(history_id=self.history_id, hdca_id=dataset_collection['id'])
        self._assert_status_code_is(create_response, 200)
        tar_contents = tarfile.open(fileobj=BytesIO(create_response.content))
        namelist = tar_contents.getnames()
        assert len(namelist) == 3, "Expected 3 elements in [%s]" % namelist
        collection_name = dataset_collection['name']
        for element, zip_path in zip(returned_dce, namelist):
            assert "%s/%s.%s" % (collection_name, element['element_identifier'], element['object']['file_ext']) == zip_path

    def test_pair_download(self):
        fetch_response = self.dataset_collection_populator.create_pair_in_history(self.history_id, direct_upload=True).json()
        dataset_collection = self.dataset_collection_populator.wait_for_fetched_collection(fetch_response)
        returned_dce = dataset_collection["elements"]
        assert len(returned_dce) == 2, dataset_collection
        hdca_id = dataset_collection['id']
        create_response = self._download_dataset_collection(history_id=self.history_id, hdca_id=hdca_id)
        self._assert_status_code_is(create_response, 200)
        tar_contents = tarfile.open(fileobj=BytesIO(create_response.content))
        namelist = tar_contents.getnames()
        assert len(namelist) == 2, "Expected 2 elements in [%s]" % namelist
        collection_name = dataset_collection['name']
        for element, zip_path in zip(returned_dce, namelist):
            assert "%s/%s.%s" % (collection_name, element['element_identifier'], element['object']['file_ext']) == zip_path

    def test_list_pair_download(self):
        fetch_response = self.dataset_collection_populator.create_list_of_pairs_in_history(self.history_id).json()
        dataset_collection = self.dataset_collection_populator.wait_for_fetched_collection(fetch_response)
        returned_dce = dataset_collection["elements"]
        assert len(returned_dce) == 1, dataset_collection
        list_collection_name = dataset_collection['name']
        pair = returned_dce[0]
        create_response = self._download_dataset_collection(history_id=self.history_id, hdca_id=dataset_collection['id'])
        self._assert_status_code_is(create_response, 200)
        tar_contents = tarfile.open(fileobj=BytesIO(create_response.content))
        namelist = tar_contents.getnames()
        assert len(namelist) == 2, "Expected 2 elements in [%s]" % namelist
        pair_collection_name = pair['element_identifier']
        for element, zip_path in zip(pair['object']['elements'], namelist):
            assert "%s/%s/%s.%s" % (list_collection_name, pair_collection_name, element['element_identifier'], element['object']['file_ext']) == zip_path

    def test_list_list_download(self):
        dataset_collection = self.dataset_collection_populator.create_list_of_list_in_history(self.history_id).json()
        self.dataset_collection_populator.wait_for_dataset_collection(dataset_collection, assert_ok=True)
        returned_dce = dataset_collection["elements"]
        assert len(returned_dce) == 1, dataset_collection
        create_response = self._download_dataset_collection(history_id=self.history_id, hdca_id=dataset_collection['id'])
        self._assert_status_code_is(create_response, 200)
        tar_contents = tarfile.open(fileobj=BytesIO(create_response.content))
        namelist = tar_contents.getnames()
        assert len(namelist) == 3, "Expected 3 elements in [%s]" % namelist

    def test_list_list_list_download(self):
        dataset_collection = self.dataset_collection_populator.create_list_of_list_in_history(self.history_id, collection_type='list:list:list').json()
        self.dataset_collection_populator.wait_for_dataset_collection(dataset_collection, assert_ok=True)
        returned_dce = dataset_collection["elements"]
        assert len(returned_dce) == 1, dataset_collection
        create_response = self._download_dataset_collection(history_id=self.history_id, hdca_id=dataset_collection['id'])
        self._assert_status_code_is(create_response, 200)
        tar_contents = tarfile.open(fileobj=BytesIO(create_response.content))
        namelist = tar_contents.getnames()
        assert len(namelist) == 3, "Expected 3 elements in [%s]" % namelist

    def test_hda_security(self):
        element_identifiers = self.dataset_collection_populator.pair_identifiers(self.history_id)
        self.dataset_populator.make_private(self.history_id, element_identifiers[0]["id"])
        with self._different_user():
            history_id = self.dataset_populator.new_history()
            payload = dict(
                instance_type="history",
                history_id=history_id,
                element_identifiers=json.dumps(element_identifiers),
                collection_type="paired",
            )
            create_response = self._post("dataset_collections", payload)
            self._assert_status_code_is(create_response, 403)

    def test_enforces_unique_names(self):
        element_identifiers = self.dataset_collection_populator.list_identifiers(self.history_id)
        element_identifiers[2]["name"] = element_identifiers[0]["name"]
        payload = dict(
            instance_type="history",
            history_id=self.history_id,
            element_identifiers=json.dumps(element_identifiers),
            collection_type="list",
        )

        create_response = self._post("dataset_collections", payload)
        self._assert_status_code_is(create_response, 400)

    def test_upload_collection(self):
        elements = [{"src": "files", "dbkey": "hg19", "info": "my cool bed", "tags": ["name:data1", "group:condition:treated", "machine:illumina"]}]
        targets = [{
            "destination": {"type": "hdca"},
            "elements": elements,
            "collection_type": "list",
            "name": "Test upload",
            "tags": ["name:collection1"]
        }]
        payload = {
            "history_id": self.history_id,
            "targets": json.dumps(targets),
            "__files": {"files_0|file_data": open(self.test_data_resolver.get_filename("4.bed"))},
        }
        self.dataset_populator.fetch(payload)
        hdca = self._assert_one_collection_created_in_history()
        self.assertEqual(hdca["name"], "Test upload")
        hdca_tags = hdca["tags"]
        assert len(hdca_tags) == 1
        assert "name:collection1" in hdca_tags
        assert len(hdca["elements"]) == 1, hdca
        element0 = hdca["elements"][0]
        assert element0["element_identifier"] == "4.bed"
        dataset0 = element0["object"]
        assert dataset0["file_size"] == 61
        dataset_tags = dataset0["tags"]
        assert len(dataset_tags) == 3, dataset0

    def test_upload_nested(self):
        elements = [{"name": "samp1", "elements": [{"src": "files", "dbkey": "hg19", "info": "my cool bed"}]}]
        targets = [{
            "destination": {"type": "hdca"},
            "elements": elements,
            "collection_type": "list:list",
            "name": "Test upload",
        }]
        payload = {
            "history_id": self.history_id,
            "targets": json.dumps(targets),
            "__files": {"files_0|file_data": open(self.test_data_resolver.get_filename("4.bed"))},
        }
        self.dataset_populator.fetch(payload)
        hdca = self._assert_one_collection_created_in_history()
        self.assertEqual(hdca["name"], "Test upload")
        assert len(hdca["elements"]) == 1, hdca
        element0 = hdca["elements"][0]
        assert element0["element_identifier"] == "samp1"

    def test_upload_collection_from_url(self):
        elements = [{"src": "url", "url": "https://raw.githubusercontent.com/galaxyproject/galaxy/dev/test-data/4.bed", "info": "my cool bed"}]
        targets = [{
            "destination": {"type": "hdca"},
            "elements": elements,
            "collection_type": "list",
        }]
        payload = {
            "history_id": self.history_id,
            "targets": json.dumps(targets),
            "__files": {"files_0|file_data": open(self.test_data_resolver.get_filename("4.bed"))},
        }
        self.dataset_populator.fetch(payload)
        hdca = self._assert_one_collection_created_in_history()
        assert len(hdca["elements"]) == 1, hdca
        element0 = hdca["elements"][0]
        assert element0["element_identifier"] == "4.bed"
        assert element0["object"]["file_size"] == 61

    def _assert_one_collection_created_in_history(self):
        contents_response = self._get("histories/%s/contents/dataset_collections" % self.history_id)
        self._assert_status_code_is(contents_response, 200)
        contents = contents_response.json()
        assert len(contents) == 1
        hdca = contents[0]
        assert hdca["history_content_type"] == "dataset_collection"
        hdca_id = hdca["id"]
        collection_response = self._get("histories/%s/contents/dataset_collections/%s" % (self.history_id, hdca_id))
        self._assert_status_code_is(collection_response, 200)
        return collection_response.json()

    def _check_create_response(self, create_response):
        self._assert_status_code_is(create_response, 200)
        dataset_collection = create_response.json()
        self._assert_has_keys(dataset_collection, "elements", "url", "name", "collection_type", "element_count")
        return dataset_collection

    def _download_dataset_collection(self, history_id, hdca_id):
        return self._get("histories/%s/contents/dataset_collections/%s/download" % (history_id, hdca_id))