Exemplo n.º 1
0
 def setUp(self):
     super().setUp()
     self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
     self.dataset_collection_populator = DatasetCollectionPopulator(
         self.galaxy_interactor)
     self.library_populator = LibraryPopulator(self.galaxy_interactor)
     self.history_id = self.dataset_populator.new_history()
Exemplo n.º 2
0
 def setUp(self):
     super(HistoryContentsApiTestCase, self).setUp()
     self.history_id = self._new_history()
     self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
     self.dataset_collection_populator = DatasetCollectionPopulator(
         self.galaxy_interactor)
     self.library_populator = LibraryPopulator(self.galaxy_interactor)
Exemplo n.º 3
0
    def setUp(self):
        super().setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.dataset_collection_populator = DatasetCollectionPopulator(self.galaxy_interactor)
        self.library_populator = LibraryPopulator(self.galaxy_interactor)

        self.history_id = self.dataset_populator.new_history()
        self.library = self.library_populator.new_private_library("FolderContentsTestsLibrary")
        self.root_folder_id = self._create_folder_in_library("Test Folder Contents")
Exemplo n.º 4
0
class ConfigurationDecodeIntegrationTestCase(integration_util.IntegrationTestCase):

    def setUp(self):
        super().setUp()
        self.library_populator = LibraryPopulator(self.galaxy_interactor)

    def test_admin_decode_id(self):
        new_lib = self.library_populator.new_library('DecodeTestLibrary')
        decode_response = self._get("configuration/decode/" + new_lib["id"], admin=True)
        response_id = decode_response.json()["decoded_id"]
        decoded_library_id = self._app.security.decode_id(new_lib["id"])
        assert decoded_library_id == response_id
        # fake valid folder id by prepending F
        valid_encoded_folder_id = 'F' + new_lib["id"]
        folder_decode_response = self._get("configuration/decode/" + valid_encoded_folder_id, admin=True)
        folder_response_id = folder_decode_response.json()["decoded_id"]
        assert decoded_library_id == folder_response_id
Exemplo n.º 5
0
class ConfigurationApiTestCase(ApiTestCase):
    def setUp(self):
        super(ConfigurationApiTestCase, self).setUp()
        self.library_populator = LibraryPopulator(self.galaxy_interactor)

    def test_normal_user_configuration(self):
        config = self._get_configuration()
        assert_has_keys(config, *TEST_KEYS_FOR_ALL_USERS)
        assert_not_has_keys(config, *TEST_KEYS_FOR_ADMIN_ONLY)

    def test_admin_user_configuration(self):
        config = self._get_configuration(admin=True)
        assert_has_keys(config, *TEST_KEYS_FOR_ALL_USERS)
        assert_has_keys(config, *TEST_KEYS_FOR_ADMIN_ONLY)

    def test_admin_decode_id(self):
        new_lib = self.library_populator.new_library('DecodeTestLibrary')
        decode_response = self._get("configuration/decode/" + new_lib["id"],
                                    admin=True)
        response_id = decode_response.json()["decoded_id"]
        decoded_library_id = self.security.decode_id(new_lib["id"])
        assert decoded_library_id == response_id
        # fake valid folder id by prepending F
        valid_encoded_folder_id = 'F' + new_lib["id"]
        folder_decode_response = self._get("configuration/decode/" +
                                           valid_encoded_folder_id,
                                           admin=True)
        folder_response_id = folder_decode_response.json()["decoded_id"]
        assert decoded_library_id == folder_response_id

    def test_normal_user_decode_id(self):
        decode_response = self._get("configuration/decode/badhombre",
                                    admin=False)
        self._assert_status_code_is(decode_response, 403)

    def _get_configuration(self, data={}, admin=False):
        response = self._get("configuration", data=data, admin=admin)
        self._assert_status_code_is(response, 200)
        configuration = response.json()
        return configuration
Exemplo n.º 6
0
class LibrariesApiTestCase(ApiTestCase, TestsDatasets):
    def setUp(self):
        super().setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.dataset_collection_populator = DatasetCollectionPopulator(
            self.galaxy_interactor)
        self.library_populator = LibraryPopulator(self.galaxy_interactor)

    def test_create(self):
        data = dict(name="CreateTestLibrary")
        create_response = self._post("libraries", data=data, admin=True)
        self._assert_status_code_is(create_response, 200)
        library = create_response.json()
        self._assert_has_keys(library, "name")
        assert library["name"] == "CreateTestLibrary"

    def test_delete(self):
        library = self.library_populator.new_library("DeleteTestLibrary")
        create_response = self._delete("libraries/%s" % library["id"],
                                       admin=True)
        self._assert_status_code_is(create_response, 200)
        library = create_response.json()
        self._assert_has_keys(library, "deleted")
        assert library["deleted"] is True
        # Test undeleting
        data = dict(undelete=True)
        create_response = self._delete("libraries/%s" % library["id"],
                                       data=data,
                                       admin=True)
        library = create_response.json()
        self._assert_status_code_is(create_response, 200)
        assert library["deleted"] is False

    def test_nonadmin(self):
        # Anons can't create libs
        data = dict(name="CreateTestLibrary")
        create_response = self._post("libraries",
                                     data=data,
                                     admin=False,
                                     anon=True)
        self._assert_status_code_is(create_response, 403)
        # Anons can't delete libs
        library = self.library_populator.new_library("AnonDeleteTestLibrary")
        create_response = self._delete("libraries/%s" % library["id"],
                                       admin=False,
                                       anon=True)
        self._assert_status_code_is(create_response, 403)
        # Anons can't update libs
        data = dict(name="ChangedName",
                    description="ChangedDescription",
                    synopsis='ChangedSynopsis')
        create_response = self._patch("libraries/%s" % library["id"],
                                      data=data,
                                      admin=False,
                                      anon=True)
        self._assert_status_code_is(create_response, 403)

    def test_update(self):
        library = self.library_populator.new_library("UpdateTestLibrary")
        data = dict(name='ChangedName',
                    description='ChangedDescription',
                    synopsis='ChangedSynopsis')
        create_response = self._patch("libraries/%s" % library["id"],
                                      data=data,
                                      admin=True)
        self._assert_status_code_is(create_response, 200)
        library = create_response.json()
        self._assert_has_keys(library, 'name', 'description', 'synopsis')
        assert library['name'] == 'ChangedName'
        assert library['description'] == 'ChangedDescription'
        assert library['synopsis'] == 'ChangedSynopsis'

    def test_create_private_library_permissions(self):
        library = self.library_populator.new_library("PermissionTestLibrary")
        library_id = library["id"]
        role_id = self.library_populator.user_private_role_id()
        self.library_populator.set_permissions(library_id, role_id)
        create_response = self._create_folder(library)
        self._assert_status_code_is(create_response, 200)

    def test_create_dataset_denied(self):
        library = self.library_populator.new_private_library(
            "ForCreateDatasets")
        folder_response = self._create_folder(library)
        self._assert_status_code_is(folder_response, 200)
        folder_id = folder_response.json()[0]['id']
        history_id = self.dataset_populator.new_history()
        hda_id = self.dataset_populator.new_dataset(history_id,
                                                    content="1 2 3")['id']
        with self._different_user():
            payload = {'from_hda_id': hda_id}
            create_response = self._post("folders/%s/contents" % folder_id,
                                         payload)
            self._assert_status_code_is(create_response, 403)

    def test_show_private_dataset_permissions(self):
        library, library_dataset = self.library_populator.new_library_dataset_in_private_library(
            "ForCreateDatasets", wait=True)
        with self._different_user():
            response = self.library_populator.show_ldda(
                library["id"], library_dataset["id"])
            # TODO: this should really be 403 and a proper JSON exception.
            self._assert_status_code_is(response, 400)

    def test_create_dataset(self):
        library, library_dataset = self.library_populator.new_library_dataset_in_private_library(
            "ForCreateDatasets", wait=True)
        self._assert_has_keys(library_dataset, "peek", "data_type")
        assert library_dataset["peek"].find("create_test") >= 0
        assert library_dataset["file_ext"] == "txt", library_dataset[
            "file_ext"]

    def test_fetch_upload_to_folder(self):
        history_id, library, destination = self._setup_fetch_to_folder(
            "flat_zip")
        items = [{
            "src": "files",
            "dbkey": "hg19",
            "info": "my cool bed",
            "created_from_basename": "4.bed"
        }]
        targets = [{"destination": destination, "items": items}]
        payload = {
            "history_id": history_id,  # TODO: Shouldn't be needed :(
            "targets": json.dumps(targets),
            "__files": {
                "files_0|file_data":
                open(self.test_data_resolver.get_filename("4.bed"))
            },
        }
        self.dataset_populator.fetch(payload)
        dataset = self.library_populator.get_library_contents_with_path(
            library["id"], "/4.bed")
        assert dataset["file_size"] == 61, dataset
        assert dataset["genome_build"] == "hg19", dataset
        assert dataset["misc_info"] == "my cool bed", dataset
        assert dataset["file_ext"] == "bed", dataset
        assert dataset["created_from_basename"] == "4.bed"

    def test_fetch_zip_to_folder(self):
        history_id, library, destination = self._setup_fetch_to_folder(
            "flat_zip")
        bed_test_data_path = self.test_data_resolver.get_filename("4.bed.zip")
        targets = [{
            "destination": destination,
            "items_from": "archive",
            "src": "files",
        }]
        payload = {
            "history_id": history_id,  # TODO: Shouldn't be needed :(
            "targets": json.dumps(targets),
            "__files": {
                "files_0|file_data": open(bed_test_data_path, 'rb')
            }
        }
        self.dataset_populator.fetch(payload)
        dataset = self.library_populator.get_library_contents_with_path(
            library["id"], "/4.bed")
        assert dataset["file_size"] == 61, dataset

    def test_fetch_single_url_to_folder(self):
        library, response = self._fetch_single_url_to_folder()
        dataset = self.library_populator.get_library_contents_with_path(
            library["id"], "/4.bed")
        assert dataset["file_size"] == 61, dataset

    def test_fetch_single_url_with_invalid_datatype(self):
        _, response = self._fetch_single_url_to_folder('xxx', assert_ok=False)
        self._assert_status_code_is(response, 400)
        assert response.json(
        )['err_msg'] == "Requested extension 'xxx' unknown, cannot upload dataset."

    def _fetch_single_url_to_folder(self, file_type='auto', assert_ok=True):
        history_id, library, destination = self._setup_fetch_to_folder(
            "single_url")
        items = [{
            "src": "url",
            "url": FILE_URL,
            "MD5": FILE_MD5,
            "ext": file_type,
        }]
        targets = [{
            "destination": destination,
            "items": items,
        }]
        payload = {
            "history_id": history_id,  # TODO: Shouldn't be needed :(
            "targets": json.dumps(targets),
            "validate_hashes": True
        }
        return library, self.dataset_populator.fetch(payload,
                                                     assert_ok=assert_ok)

    def test_legacy_upload_unknown_datatype(self):
        library = self.library_populator.new_private_library("ForLegacyUpload")
        folder_response = self._create_folder(library)
        self._assert_status_code_is(folder_response, 200)
        folder_id = folder_response.json()[0]['id']
        payload = {
            'folder_id': folder_id,
            'create_type': 'file',
            'file_type': 'xxx',
            'upload_option': 'upload_file',
            'files_0|url_paste': FILE_URL,
        }
        create_response = self._post("libraries/%s/contents" % library['id'],
                                     payload)
        self._assert_status_code_is(create_response, 400)
        assert create_response.json(
        ) == "Requested extension 'xxx' unknown, cannot upload dataset."

    def test_fetch_failed_validation(self):
        # Exception handling is really rough here - we should be creating a dataset in error instead
        # of just failing the job like this.
        history_id, library, destination = self._setup_fetch_to_folder(
            "single_url")
        items = [{
            "src": "url",
            "url":
            "https://raw.githubusercontent.com/galaxyproject/galaxy/dev/test-data/4.bed",
            "MD5": "37b59762b59fff860460522d271bc112",
            "name": "4.bed",
        }]
        targets = [{
            "destination": destination,
            "items": items,
        }]
        payload = {
            "history_id": history_id,  # TODO: Shouldn't be needed :(
            "targets": json.dumps(targets),
            "validate_hashes": True
        }
        tool_response = self.dataset_populator.fetch(payload, assert_ok=False)
        job = self.dataset_populator.check_run(tool_response)
        self.dataset_populator.wait_for_job(job["id"])

        job = tool_response.json()["jobs"][0]
        details = self.dataset_populator.get_job_details(job["id"]).json()
        assert details["state"] == "ok", details

        dataset = self.library_populator.get_library_contents_with_path(
            library["id"], "/4.bed")
        assert dataset["state"] == "error", dataset

    def test_fetch_url_archive_to_folder(self):
        history_id, library, destination = self._setup_fetch_to_folder(
            "single_url")
        targets = [{
            "destination":
            destination,
            "items_from":
            "archive",
            "src":
            "url",
            "url":
            "https://raw.githubusercontent.com/galaxyproject/galaxy/dev/test-data/4.bed.zip",
        }]
        payload = {
            "history_id": history_id,  # TODO: Shouldn't be needed :(
            "targets": json.dumps(targets),
        }
        self.dataset_populator.fetch(payload)
        dataset = self.library_populator.get_library_contents_with_path(
            library["id"], "/4.bed")
        assert dataset["file_size"] == 61, dataset

    @unittest.skip  # reference URLs changed, checksums now invalid.
    def test_fetch_bagit_archive_to_folder(self):
        history_id, library, destination = self._setup_fetch_to_folder(
            "bagit_archive")
        example_bag_path = self.test_data_resolver.get_filename(
            "example-bag.zip")
        targets = [{
            "destination": destination,
            "items_from": "bagit_archive",
            "src": "files",
        }]
        payload = {
            "history_id": history_id,  # TODO: Shouldn't be needed :(
            "targets": json.dumps(targets),
            "__files": {
                "files_0|file_data": open(example_bag_path)
            },
        }
        self.dataset_populator.fetch(payload)
        dataset = self.library_populator.get_library_contents_with_path(
            library["id"], "/README.txt")
        assert dataset["file_size"] == 66, dataset

        dataset = self.library_populator.get_library_contents_with_path(
            library["id"], "/bdbag-profile.json")
        assert dataset["file_size"] == 723, dataset

    def _setup_fetch_to_folder(self, test_name):
        return self.library_populator.setup_fetch_to_folder(test_name)

    def test_create_dataset_in_folder(self):
        library = self.library_populator.new_private_library(
            "ForCreateDatasets")
        folder_response = self._create_folder(library)
        self._assert_status_code_is(folder_response, 200)
        folder_id = folder_response.json()[0]['id']
        history_id = self.dataset_populator.new_history()
        hda_id = self.dataset_populator.new_dataset(history_id,
                                                    content="1 2 3")['id']
        payload = {'from_hda_id': hda_id}
        create_response = self._post("folders/%s/contents" % folder_id,
                                     payload)
        self._assert_status_code_is(create_response, 200)
        self._assert_has_keys(create_response.json(), "name", "id")

    def test_create_dataset_in_subfolder(self):
        library = self.library_populator.new_private_library(
            "ForCreateDatasets")
        folder_response = self._create_folder(library)
        self._assert_status_code_is(folder_response, 200)
        folder_id = folder_response.json()[0]['id']
        subfolder_response = self._create_subfolder(folder_id)
        self._assert_status_code_is(folder_response, 200)
        print(subfolder_response.json())
        subfolder_id = subfolder_response.json()['id']
        history_id = self.dataset_populator.new_history()
        hda_id = self.dataset_populator.new_dataset(history_id,
                                                    content="1 2 3 sub")['id']
        payload = {'from_hda_id': hda_id}
        create_response = self._post("folders/%s/contents" % subfolder_id,
                                     payload)
        self._assert_status_code_is(create_response, 200)
        self._assert_has_keys(create_response.json(), "name", "id")
        dataset_update_time = create_response.json()['update_time']
        container_fetch_response = self.galaxy_interactor.get(
            "folders/%s/contents" % folder_id)
        container_update_time = container_fetch_response.json(
        )['folder_contents'][0]['update_time']
        assert dataset_update_time == container_update_time, container_fetch_response

    def test_update_dataset_in_folder(self):
        ld = self._create_dataset_in_folder_in_library("ForUpdateDataset")
        data = {
            'name': 'updated_name',
            'file_ext': 'fastq',
            'misc_info': 'updated_info',
            'genome_build': 'updated_genome_build'
        }
        create_response = self._patch("libraries/datasets/%s" %
                                      ld.json()["id"],
                                      data=data)
        self._assert_status_code_is(create_response, 200)
        self._assert_has_keys(create_response.json(), "name", "file_ext",
                              "misc_info", "genome_build")

    def test_update_dataset_tags(self):
        ld = self._create_dataset_in_folder_in_library("ForTagtestDataset")
        data = {"tags": ["#Lancelot", "name:Holy Grail", "blue"]}
        create_response = self._patch("libraries/datasets/%s" %
                                      ld.json()["id"],
                                      data=data)
        self._assert_status_code_is(create_response, 200)
        self._assert_has_keys(create_response.json(), "tags")
        assert create_response.json(
        )["tags"] == "name:Lancelot, name:HolyGrail, blue"

    def test_invalid_update_dataset_in_folder(self):
        ld = self._create_dataset_in_folder_in_library(
            "ForInvalidUpdateDataset")
        data = {'file_ext': 'nonexisting_type'}
        create_response = self._patch("libraries/datasets/%s" %
                                      ld.json()["id"],
                                      data=data)
        self._assert_status_code_is(create_response, 400)
        assert 'This Galaxy does not recognize the datatype of:' in create_response.json(
        )['err_msg']

    def test_detect_datatype_of_dataset_in_folder(self):
        ld = self._create_dataset_in_folder_in_library("ForDetectDataset")
        # Wait for metadata job to finish.
        time.sleep(2)
        data = {'file_ext': 'data'}
        create_response = self._patch("libraries/datasets/%s" %
                                      ld.json()["id"],
                                      data=data)
        self._assert_status_code_is(create_response, 200)
        self._assert_has_keys(create_response.json(), "file_ext")
        assert create_response.json()["file_ext"] == "data"
        # Wait for metadata job to finish.
        time.sleep(2)
        data = {'file_ext': 'auto'}
        create_response = self._patch("libraries/datasets/%s" %
                                      ld.json()["id"],
                                      data=data)
        self._assert_status_code_is(create_response, 200)
        self._assert_has_keys(create_response.json(), "file_ext")
        assert create_response.json()["file_ext"] == "txt"

    def test_ldda_collection_import_to_history(self):
        self._import_to_history(visible=True)

    def test_ldda_collection_import_to_history_hide_source(self):
        self._import_to_history(visible=False)

    def test_import_paired_collection(self):
        ld = self._create_dataset_in_folder_in_library(
            "ForHistoryImport").json()
        history_id = self.dataset_populator.new_history()
        url = "histories/%s/contents" % history_id
        collection_name = 'Paired-end data (from library)'
        payload = {
            'name':
            collection_name,
            'collection_type':
            'list:paired',
            "type":
            "dataset_collection",
            'element_identifiers':
            json.dumps([{
                'src':
                'new_collection',
                'name':
                'pair1',
                'collection_type':
                'paired',
                'element_identifiers': [{
                    'name': 'forward',
                    'src': 'ldda',
                    'id': ld['id']
                }, {
                    'name': 'reverse',
                    'src': 'ldda',
                    'id': ld['id']
                }]
            }])
        }
        new_collection = self._post(url, payload).json()
        assert new_collection['name'] == collection_name
        pair = new_collection['elements'][0]
        assert pair['element_identifier'] == 'pair1'
        assert pair['object']['elements'][0]['object'][
            'history_id'] == history_id

    def _import_to_history(self, visible=True):
        ld = self._create_dataset_in_folder_in_library(
            "ForHistoryImport").json()
        history_id = self.dataset_populator.new_history()
        url = "histories/%s/contents" % history_id
        collection_name = 'new_collection_name'
        element_identifer = 'new_element_identifier'
        payload = {
            "collection_type":
            "list",
            "history_content_type":
            "dataset_collection",
            "model_class":
            "HistoryDatasetCollectionAssociation",
            "history_id":
            history_id,
            "name":
            collection_name,
            "hide_source_items":
            not visible,
            "element_identifiers":
            json.dumps([{
                "id": ld['id'],
                "name": element_identifer,
                "src": "ldda"
            }]),
            "type":
            "dataset_collection",
            "elements": []
        }
        new_collection = self._post(url, payload).json()
        assert new_collection['name'] == collection_name
        assert new_collection['element_count'] == 1
        element = new_collection['elements'][0]
        assert element['element_identifier'] == element_identifer
        assert element['object']['visible'] == visible

    def test_create_datasets_in_library_from_collection(self):
        library = self.library_populator.new_private_library(
            "ForCreateDatasetsFromCollection")
        folder_response = self._create_folder(library)
        self._assert_status_code_is(folder_response, 200)
        folder_id = folder_response.json()[0]['id']
        history_id = self.dataset_populator.new_history()
        hdca_id = self.dataset_collection_populator.create_list_in_history(
            history_id, contents=["xxx", "yyy"],
            direct_upload=True).json()["outputs"][0]["id"]
        payload = {
            'from_hdca_id': hdca_id,
            'create_type': 'file',
            'folder_id': folder_id
        }
        create_response = self._post("libraries/%s/contents" % library['id'],
                                     payload)
        self._assert_status_code_is(create_response, 200)

    def test_create_datasets_in_folder_from_collection(self):
        library = self.library_populator.new_private_library(
            "ForCreateDatasetsFromCollection")
        history_id = self.dataset_populator.new_history()
        hdca_id = self.dataset_collection_populator.create_list_in_history(
            history_id, contents=["xxx", "yyy"],
            direct_upload=True).json()["outputs"][0]["id"]
        folder_response = self._create_folder(library)
        self._assert_status_code_is(folder_response, 200)
        folder_id = folder_response.json()[0]['id']
        payload = {'from_hdca_id': hdca_id}
        create_response = self._post("folders/%s/contents" % folder_id,
                                     payload)
        self._assert_status_code_is(create_response, 200)
        assert len(create_response.json()) == 2
        # Also test that anything different from a flat dataset collection list
        # is refused
        hdca_pair_id = self.dataset_collection_populator.create_list_of_pairs_in_history(
            history_id).json()["outputs"][0]['id']
        payload = {'from_hdca_id': hdca_pair_id}
        create_response = self._post("folders/%s/contents" % folder_id,
                                     payload)
        self._assert_status_code_is(create_response, 501)
        assert create_response.json(
        )['err_msg'] == 'Cannot add nested collections to library. Please flatten your collection first.'

    def _create_folder(self, library):
        create_data = dict(
            folder_id=library["root_folder_id"],
            create_type="folder",
            name="New Folder",
        )
        return self._post("libraries/%s/contents" % library["id"],
                          data=create_data)

    def _create_subfolder(self, containing_folder_id):
        create_data = dict(
            description="new subfolder desc",
            name="New Subfolder",
        )
        return self._post("folders/%s" % containing_folder_id,
                          data=create_data)

    def _create_dataset_in_folder_in_library(self, library_name):
        library = self.library_populator.new_private_library(library_name)
        folder_response = self._create_folder(library)
        self._assert_status_code_is(folder_response, 200)
        folder_id = folder_response.json()[0]['id']
        history_id = self.dataset_populator.new_history()
        hda_id = self.dataset_populator.new_dataset(history_id,
                                                    content="1 2 3")['id']
        payload = {
            'from_hda_id': hda_id,
            'create_type': 'file',
            'folder_id': folder_id
        }
        ld = self._post("libraries/%s/contents" % folder_id, payload)
        return ld
Exemplo n.º 7
0
 def setUp(self):
     super().setUp()
     self.library_populator = LibraryPopulator(self.galaxy_interactor)
     self.library = self.library_populator.new_library("FolderTestsLibrary")
Exemplo n.º 8
0
class FoldersApiTestCase(ApiTestCase):
    def setUp(self):
        super().setUp()
        self.library_populator = LibraryPopulator(self.galaxy_interactor)
        self.library = self.library_populator.new_library("FolderTestsLibrary")

    def test_create(self):
        folder = self._create_folder("Test Create Folder")
        self._assert_valid_folder(folder)

    def test_create_without_name_raises_400(self):
        root_folder_id = self.library["root_folder_id"]
        data = {
            "description": "Description only",
        }
        create_response = self._post(f"folders/{root_folder_id}",
                                     data=data,
                                     admin=True)
        self._assert_status_code_is(create_response, 400)

    def test_permissions(self):
        folder = self._create_folder("Test Permissions Folder")
        folder_id = folder["id"]

        empty_permissions = self._get_permissions(folder_id)
        self._assert_permissions_empty(empty_permissions)

        role_id = self.library_populator.user_private_role_id()
        action = "set_permissions"
        data = {
            "add_ids[]": [role_id],
            "manage_ids[]": role_id,  # string-lists also supported
            "modify_ids[]": [role_id]
        }
        response = self._post(
            f"folders/{folder_id}/permissions?action={action}",
            data=data,
            admin=True)
        self._assert_status_code_is(response, 200)
        new_permissions = response.json()

        permissions = self._get_permissions(folder_id)
        assert permissions == new_permissions
        self._assert_permissions_contains_role(permissions, role_id)

    def test_update(self):
        folder = self._create_folder("Test Update Folder")
        folder_id = folder["id"]
        updated_name = "UPDATED"
        updated_desc = "UPDATED DESCRIPTION"
        data = {
            "name": updated_name,
            "description": updated_desc,
        }
        patch_response = self._patch(f"folders/{folder_id}",
                                     data=data,
                                     admin=True)
        self._assert_status_code_is(patch_response, 200)
        updated_folder = patch_response.json()
        self._assert_valid_folder(updated_folder)
        assert updated_folder["name"] == updated_name
        assert updated_folder["description"] == updated_desc

    def test_delete(self):
        folder = self._create_folder("Test Delete Folder")
        folder_id = folder["id"]

        deleted_folder = self._delete_folder(folder_id)
        assert deleted_folder["deleted"] is True

    def test_undelete(self):
        folder = self._create_folder("Test Undelete Folder")
        folder_id = folder["id"]

        deleted_folder = self._delete_folder(folder_id)
        assert deleted_folder["deleted"] is True

        undelete = True
        undelete_response = self._delete(
            f"folders/{folder_id}?undelete={undelete}", admin=True)
        self._assert_status_code_is(undelete_response, 200)
        undeleted_folder = undelete_response.json()
        assert undeleted_folder["deleted"] is False

    def test_update_deleted_raise_403(self):
        folder = self._create_folder("Test Update Deleted Folder")
        folder_id = folder["id"]

        deleted_folder = self._delete_folder(folder_id)
        assert deleted_folder["deleted"] is True

        data = {
            "name": "test",
        }
        patch_response = self._patch(f"folders/{folder_id}",
                                     data=data,
                                     admin=True)
        self._assert_status_code_is(patch_response, 403)

    def _create_folder(self, name: str):
        root_folder_id = self.library["root_folder_id"]
        data = {
            "name": name,
            "description": f"The description of {name}",
        }
        create_response = self._post(f"folders/{root_folder_id}",
                                     data=data,
                                     admin=True)
        self._assert_status_code_is(create_response, 200)
        folder = create_response.json()
        return folder

    def _delete_folder(self, folder_id):
        delete_response = self._delete(f"folders/{folder_id}", admin=True)
        self._assert_status_code_is(delete_response, 200)
        deleted_folder = delete_response.json()
        return deleted_folder

    def _get_permissions(self, folder_id):
        response = self._get(f"folders/{folder_id}/permissions", admin=True)
        self._assert_status_code_is(response, 200)
        permissions = response.json()
        self._assert_valid_permissions(permissions)
        return permissions

    def _assert_valid_folder(self, folder):
        self._assert_has_keys(folder, "id", "name", "model_class", "parent_id",
                              "item_count", "genome_build", "update_time",
                              "deleted", "library_path", "parent_library_id")

    def _assert_valid_permissions(self, permissions):
        self._assert_has_keys(
            permissions,
            "modify_folder_role_list",
            "manage_folder_role_list",
            "add_library_item_role_list",
        )

    def _assert_permissions_empty(self, permissions):
        assert permissions["modify_folder_role_list"] == []
        assert permissions["manage_folder_role_list"] == []
        assert permissions["add_library_item_role_list"] == []

    def _assert_permissions_contains_role(self, permissions, role_id):
        assert role_id in permissions["modify_folder_role_list"][0]
        assert role_id in permissions["manage_folder_role_list"][0]
        assert role_id in permissions["add_library_item_role_list"][0]
Exemplo n.º 9
0
class HistoryContentsApiTestCase(ApiTestCase):
    def setUp(self):
        super().setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.dataset_collection_populator = DatasetCollectionPopulator(
            self.galaxy_interactor)
        self.library_populator = LibraryPopulator(self.galaxy_interactor)
        self.history_id = self.dataset_populator.new_history()

    def test_index_hda_summary(self):
        hda1 = self.dataset_populator.new_dataset(self.history_id)
        contents_response = self._get("histories/%s/contents" %
                                      self.history_id)
        hda_summary = self.__check_for_hda(contents_response, hda1)
        assert "display_types" not in hda_summary  # Quick summary, not full details

    def test_make_private_and_public(self):
        hda1 = self._wait_for_new_hda()
        update_url = "histories/{}/contents/{}/permissions".format(
            self.history_id, hda1["id"])

        role_id = self.dataset_populator.user_private_role_id()
        # Give manage permission to the user.
        payload = {
            "access": [],
            "manage": [role_id],
        }
        update_response = self._update_permissions(update_url,
                                                   payload,
                                                   admin=True)
        self._assert_status_code_is(update_response, 200)
        self._assert_other_user_can_access(hda1["id"])
        # Then we restrict access.
        payload = {
            "action": "make_private",
        }
        update_response = self._update_permissions(update_url, payload)
        self._assert_status_code_is(update_response, 200)
        self._assert_other_user_cannot_access(hda1["id"])

        # Then we restrict access.
        payload = {
            "action": "remove_restrictions",
        }
        update_response = self._update_permissions(update_url, payload)
        self._assert_status_code_is(update_response, 200)
        self._assert_other_user_can_access(hda1["id"])

    def test_set_permissions_add_admin_history_contents(self):
        self._verify_dataset_permissions("history_contents")

    def test_set_permissions_add_admin_datasets(self):
        self._verify_dataset_permissions("dataset")

    def _verify_dataset_permissions(self, api_endpoint):
        hda1 = self._wait_for_new_hda()
        hda_id = hda1["id"]
        if api_endpoint == "history_contents":
            update_url = f"histories/{self.history_id}/contents/{hda_id}/permissions"
        else:
            update_url = "datasets/%s/permissions" % hda_id

        role_id = self.dataset_populator.user_private_role_id()

        payload = {
            "access": [role_id],
            "manage": [role_id],
        }

        # Other users cannot modify permissions.
        with self._different_user():
            update_response = self._update_permissions(update_url, payload)
            self._assert_status_code_is(update_response, 403)

        # First the details render for another user.
        self._assert_other_user_can_access(hda_id)

        # Then we restrict access.
        update_response = self._update_permissions(update_url,
                                                   payload,
                                                   admin=True)
        self._assert_status_code_is(update_response, 200)

        # Finally the details don't render.
        self._assert_other_user_cannot_access(hda_id)

        # But they do for the original user.
        contents_response = self._get(
            f"histories/{self.history_id}/contents/{hda_id}").json()
        assert "name" in contents_response

        update_response = self._update_permissions(update_url, payload)
        self._assert_status_code_is(update_response, 200)

        payload = {
            "access": [role_id],
            "manage": [role_id],
        }
        update_response = self._update_permissions(update_url, payload)
        self._assert_status_code_is(update_response, 200)
        self._assert_other_user_cannot_access(hda_id)

        user_id = self.dataset_populator.user_id()
        with self._different_user():
            different_user_id = self.dataset_populator.user_id()
        combined_user_role = self.dataset_populator.create_role(
            [user_id, different_user_id],
            description="role for testing permissions")

        payload = {
            "access": [combined_user_role["id"]],
            "manage": [role_id],
        }
        update_response = self._update_permissions(update_url, payload)
        self._assert_status_code_is(update_response, 200)
        # Now other user can see dataset again with access permission.
        self._assert_other_user_can_access(hda_id)
        # access doesn't imply management though...
        with self._different_user():
            update_response = self._update_permissions(update_url, payload)
            self._assert_status_code_is(update_response, 403)

    def _assert_other_user_cannot_access(self, history_content_id):
        with self._different_user():
            contents_response = self.dataset_populator.get_history_dataset_details_raw(
                history_id=self.history_id, dataset_id=history_content_id)
            assert contents_response.status_code == 403

    def _assert_other_user_can_access(self, history_content_id):
        with self._different_user():
            contents_response = self.dataset_populator.get_history_dataset_details_raw(
                history_id=self.history_id, dataset_id=history_content_id)
            contents_response.raise_for_status()
            assert "name" in contents_response.json()

    def test_index_hda_all_details(self):
        hda1 = self.dataset_populator.new_dataset(self.history_id)
        contents_response = self._get("histories/%s/contents?details=all" %
                                      self.history_id)
        hda_details = self.__check_for_hda(contents_response, hda1)
        self.__assert_hda_has_full_details(hda_details)

    def test_index_hda_detail_by_id(self):
        hda1 = self.dataset_populator.new_dataset(self.history_id)
        contents_response = self._get(
            "histories/{}/contents?details={}".format(self.history_id,
                                                      hda1["id"]))
        hda_details = self.__check_for_hda(contents_response, hda1)
        self.__assert_hda_has_full_details(hda_details)

    def test_show_hda(self):
        hda1 = self.dataset_populator.new_dataset(self.history_id)
        show_response = self.__show(hda1)
        self._assert_status_code_is(show_response, 200)
        self.__assert_matches_hda(hda1, show_response.json())

    def test_hda_copy(self):
        hda1 = self.dataset_populator.new_dataset(self.history_id)
        create_data = dict(
            source='hda',
            content=hda1["id"],
        )
        second_history_id = self.dataset_populator.new_history()
        assert self.__count_contents(second_history_id) == 0
        create_response = self._post(
            "histories/%s/contents" % second_history_id, create_data)
        self._assert_status_code_is(create_response, 200)
        assert self.__count_contents(second_history_id) == 1

    def test_library_copy(self):
        ld = self.library_populator.new_library_dataset("lda_test_library")
        create_data = dict(
            source='library',
            content=ld["id"],
        )
        assert self.__count_contents(self.history_id) == 0
        create_response = self._post("histories/%s/contents" % self.history_id,
                                     create_data)
        self._assert_status_code_is(create_response, 200)
        assert self.__count_contents(self.history_id) == 1

    def test_update(self):
        hda1 = self._wait_for_new_hda()
        assert str(hda1["deleted"]).lower() == "false"
        update_response = self._raw_update(hda1["id"], dict(deleted=True))
        self._assert_status_code_is(update_response, 200)
        show_response = self.__show(hda1)
        assert str(show_response.json()["deleted"]).lower() == "true"

        update_response = self._raw_update(hda1["id"],
                                           dict(name="Updated Name"))
        assert self.__show(hda1).json()["name"] == "Updated Name"

        update_response = self._raw_update(hda1["id"],
                                           dict(name="Updated Name"))
        assert self.__show(hda1).json()["name"] == "Updated Name"

        unicode_name = 'ржевский сапоги'
        update_response = self._raw_update(hda1["id"], dict(name=unicode_name))
        updated_hda = self.__show(hda1).json()
        assert updated_hda["name"] == unicode_name, updated_hda

        quoted_name = '"Mooo"'
        update_response = self._raw_update(hda1["id"], dict(name=quoted_name))
        updated_hda = self.__show(hda1).json()
        assert updated_hda["name"] == quoted_name, quoted_name

        data = {
            "dataset_id": hda1["id"],
            "name": "moocow",
            "dbkey": "?",
            "annotation": None,
            "info": "my info is",
            "operation": "attributes"
        }
        update_response = self._set_edit_update(data)
        # No key or anything supplied, expect a permission problem.
        # A bit questionable but I think this is a 400 instead of a 403 so that
        # we don't distinguish between this is a valid ID you don't have access to
        # and this is an invalid ID.
        assert update_response.status_code == 400, update_response.content

    def test_update_batch(self):
        hda1 = self._wait_for_new_hda()
        assert str(hda1["deleted"]).lower() == "false"
        payload = dict(items=[{
            "history_content_type": "dataset",
            "id": hda1["id"]
        }],
                       deleted=True)
        update_response = self._raw_update_batch(payload)
        objects = update_response.json()
        assert objects[0]["deleted"]

    def test_update_type_failures(self):
        hda1 = self._wait_for_new_hda()
        update_response = self._raw_update(hda1["id"],
                                           dict(deleted='not valid'))
        self._assert_status_code_is(update_response, 400)

    def _wait_for_new_hda(self):
        hda1 = self.dataset_populator.new_dataset(self.history_id)
        self.dataset_populator.wait_for_history(self.history_id)
        return hda1

    def _set_edit_update(self, json):
        set_edit_url = "%s/dataset/set_edit" % self.url
        update_response = put(set_edit_url, json=json)
        return update_response

    def _raw_update(self, item_id, data, admin=False, history_id=None):
        history_id = history_id or self.history_id
        key_param = "use_admin_key" if admin else "use_key"
        update_url = self._api_url(
            f"histories/{history_id}/contents/{item_id}", **{key_param: True})
        update_response = put(update_url, json=data)
        return update_response

    def _update_permissions(self, url, data, admin=False):
        key_param = "use_admin_key" if admin else "use_key"
        update_url = self._api_url(url, **{key_param: True})
        update_response = put(update_url, json=data)
        return update_response

    def _raw_update_batch(self, data):
        update_url = self._api_url("histories/%s/contents" % (self.history_id),
                                   use_key=True)
        update_response = put(update_url, json=data)
        return update_response

    def test_delete(self):
        hda1 = self.dataset_populator.new_dataset(self.history_id)
        self.dataset_populator.wait_for_history(self.history_id)
        assert str(self.__show(hda1).json()["deleted"]).lower() == "false"
        delete_response = self._delete("histories/{}/contents/{}".format(
            self.history_id, hda1["id"]))
        assert delete_response.status_code < 300  # Something in the 200s :).
        assert str(self.__show(hda1).json()["deleted"]).lower() == "true"

    def test_purge(self):
        hda1 = self.dataset_populator.new_dataset(self.history_id)
        self.dataset_populator.wait_for_history(self.history_id)
        assert str(self.__show(hda1).json()["deleted"]).lower() == "false"
        assert str(self.__show(hda1).json()["purged"]).lower() == "false"
        data = {'purge': True}
        delete_response = self._delete("histories/{}/contents/{}".format(
            self.history_id, hda1["id"]),
                                       data=data)
        assert delete_response.status_code < 300  # Something in the 200s :).
        assert str(self.__show(hda1).json()["deleted"]).lower() == "true"
        assert str(self.__show(hda1).json()["purged"]).lower() == "true"

    def test_dataset_collection_creation_on_contents(self):
        payload = self.dataset_collection_populator.create_pair_payload(
            self.history_id, type="dataset_collection")
        endpoint = "histories/%s/contents" % self.history_id
        self._check_pair_creation(endpoint, payload)

    def test_dataset_collection_creation_on_typed_contents(self):
        payload = self.dataset_collection_populator.create_pair_payload(
            self.history_id, )
        endpoint = "histories/%s/contents/dataset_collections" % self.history_id
        self._check_pair_creation(endpoint, payload)

    def test_dataset_collection_create_from_exisiting_datasets_with_new_tags(
            self):
        with self.dataset_populator.test_history() as history_id:
            hda_id = self.dataset_populator.new_dataset(history_id,
                                                        content="1 2 3")['id']
            hda2_id = self.dataset_populator.new_dataset(history_id,
                                                         content="1 2 3")['id']
            update_response = self._raw_update(hda2_id,
                                               dict(tags=['existing:tag']),
                                               history_id=history_id).json()
            assert update_response['tags'] == ['existing:tag']
            creation_payload = {
                'collection_type':
                'list',
                'history_id':
                history_id,
                'element_identifiers':
                json.dumps([{
                    'id': hda_id,
                    'src': 'hda',
                    'name': 'element_id1',
                    'tags': ['my_new_tag']
                }, {
                    'id': hda2_id,
                    'src': 'hda',
                    'name': 'element_id2',
                    'tags': ['another_new_tag']
                }]),
                'type':
                'dataset_collection',
                'copy_elements':
                True
            }
            r = self._post("histories/%s/contents" % self.history_id,
                           creation_payload).json()
            assert r['elements'][0]['object'][
                'id'] != hda_id, "HDA has not been copied"
            assert len(r['elements'][0]['object']['tags']) == 1
            assert r['elements'][0]['object']['tags'][0] == 'my_new_tag'
            assert len(r['elements'][1]['object']
                       ['tags']) == 2, r['elements'][1]['object']['tags']
            original_hda = self.dataset_populator.get_history_dataset_details(
                history_id=history_id, dataset_id=hda_id)
            assert len(original_hda['tags']) == 0, original_hda['tags']

    def _check_pair_creation(self, endpoint, payload):
        pre_collection_count = self.__count_contents(type="dataset_collection")
        pre_dataset_count = self.__count_contents(type="dataset")
        pre_combined_count = self.__count_contents(
            type="dataset,dataset_collection")

        dataset_collection_response = self._post(endpoint, payload)

        dataset_collection = self.__check_create_collection_response(
            dataset_collection_response)

        post_collection_count = self.__count_contents(
            type="dataset_collection")
        post_dataset_count = self.__count_contents(type="dataset")
        post_combined_count = self.__count_contents(
            type="dataset,dataset_collection")

        # Test filtering types with index.
        assert pre_collection_count == 0
        assert post_collection_count == 1
        assert post_combined_count == pre_dataset_count + 1
        assert post_combined_count == pre_combined_count + 1
        assert pre_dataset_count == post_dataset_count

        # Test show dataset colleciton.
        collection_url = "histories/{}/contents/dataset_collections/{}".format(
            self.history_id, dataset_collection["id"])
        show_response = self._get(collection_url)
        self._assert_status_code_is(show_response, 200)
        dataset_collection = show_response.json()
        self._assert_has_keys(dataset_collection, "url", "name", "deleted")

        assert not dataset_collection["deleted"]

        delete_response = delete(self._api_url(collection_url, use_key=True))
        self._assert_status_code_is(delete_response, 200)

        show_response = self._get(collection_url)
        dataset_collection = show_response.json()
        assert dataset_collection["deleted"]

    @skip_without_tool("collection_creates_list")
    def test_jobs_summary_simple_hdca(self):
        create_response = self.dataset_collection_populator.create_list_in_history(
            self.history_id, contents=["a\nb\nc\nd", "e\nf\ng\nh"])
        hdca_id = create_response.json()["id"]
        run = self.dataset_populator.run_collection_creates_list(
            self.history_id, hdca_id)
        collections = run['output_collections']
        collection = collections[0]
        jobs_summary_url = "histories/{}/contents/dataset_collections/{}/jobs_summary".format(
            self.history_id, collection["id"])
        jobs_summary_response = self._get(jobs_summary_url)
        self._assert_status_code_is(jobs_summary_response, 200)
        jobs_summary = jobs_summary_response.json()
        self._assert_has_keys(jobs_summary, "populated_state", "states")

    @skip_without_tool("cat1")
    def test_jobs_summary_implicit_hdca(self):
        create_response = self.dataset_collection_populator.create_pair_in_history(
            self.history_id, contents=["123", "456"])
        hdca_id = create_response.json()["id"]
        inputs = {
            "input1": {
                'batch': True,
                'values': [{
                    'src': 'hdca',
                    'id': hdca_id
                }]
            },
        }
        run = self.dataset_populator.run_tool("cat1",
                                              inputs=inputs,
                                              history_id=self.history_id)
        self.dataset_populator.wait_for_history_jobs(self.history_id)
        collections = run['implicit_collections']
        collection = collections[0]
        jobs_summary_url = "histories/{}/contents/dataset_collections/{}/jobs_summary".format(
            self.history_id, collection["id"])
        jobs_summary_response = self._get(jobs_summary_url)
        self._assert_status_code_is(jobs_summary_response, 200)
        jobs_summary = jobs_summary_response.json()
        self._assert_has_keys(jobs_summary, "populated_state", "states")
        states = jobs_summary["states"]
        assert states.get("ok") == 2, states

    def test_dataset_collection_hide_originals(self):
        payload = self.dataset_collection_populator.create_pair_payload(
            self.history_id, type="dataset_collection")

        payload["hide_source_items"] = True
        dataset_collection_response = self._post(
            "histories/%s/contents" % self.history_id, payload)
        self.__check_create_collection_response(dataset_collection_response)

        contents_response = self._get("histories/%s/contents" %
                                      self.history_id)
        datasets = [
            d for d in contents_response.json()
            if d["history_content_type"] == "dataset" and d["hid"] in [1, 2]
        ]
        # Assert two datasets in source were hidden.
        assert len(datasets) == 2
        assert not datasets[0]["visible"]
        assert not datasets[1]["visible"]

    def test_update_dataset_collection(self):
        payload = self.dataset_collection_populator.create_pair_payload(
            self.history_id, type="dataset_collection")
        dataset_collection_response = self._post(
            "histories/%s/contents" % self.history_id, payload)
        self._assert_status_code_is(dataset_collection_response, 200)
        hdca = dataset_collection_response.json()
        update_url = self._api_url(
            "histories/{}/contents/dataset_collections/{}".format(
                self.history_id, hdca["id"]),
            use_key=True)
        # Awkward json.dumps required here because of https://trello.com/c/CQwmCeG6
        body = json.dumps(dict(name="newnameforpair"))
        update_response = put(update_url, data=body)
        self._assert_status_code_is(update_response, 200)
        show_response = self.__show(hdca)
        assert str(show_response.json()["name"]) == "newnameforpair"

    def test_hdca_copy(self):
        hdca = self.dataset_collection_populator.create_pair_in_history(
            self.history_id).json()
        hdca_id = hdca["id"]
        second_history_id = self.dataset_populator.new_history()
        create_data = dict(
            source='hdca',
            content=hdca_id,
        )
        assert len(
            self._get("histories/%s/contents/dataset_collections" %
                      second_history_id).json()) == 0
        create_response = self._post(
            "histories/%s/contents/dataset_collections" % second_history_id,
            create_data)
        self.__check_create_collection_response(create_response)
        contents = self._get("histories/%s/contents/dataset_collections" %
                             second_history_id).json()
        assert len(contents) == 1
        new_forward, _ = self.__get_paired_response_elements(contents[0])
        self._assert_has_keys(new_forward, "history_id")
        assert new_forward["history_id"] == self.history_id

    def test_hdca_copy_with_new_dbkey(self):
        hdca = self.dataset_collection_populator.create_pair_in_history(
            self.history_id).json()
        hdca_id = hdca["id"]
        assert hdca["elements"][0]["object"]["metadata_dbkey"] == "?"
        assert hdca["elements"][0]["object"]["genome_build"] == "?"
        create_data = {'source': 'hdca', 'content': hdca_id, 'dbkey': 'hg19'}
        create_response = self._post(
            f"histories/{self.history_id}/contents/dataset_collections",
            create_data)
        collection = self.__check_create_collection_response(create_response)
        new_forward = collection['elements'][0]['object']
        assert new_forward["metadata_dbkey"] == "hg19"
        assert new_forward["genome_build"] == "hg19"

    def test_hdca_copy_and_elements(self):
        hdca = self.dataset_collection_populator.create_pair_in_history(
            self.history_id).json()
        hdca_id = hdca["id"]
        second_history_id = self.dataset_populator.new_history()
        create_data = dict(
            source='hdca',
            content=hdca_id,
            copy_elements=True,
        )
        assert len(
            self._get("histories/%s/contents/dataset_collections" %
                      second_history_id).json()) == 0
        create_response = self._post(
            "histories/%s/contents/dataset_collections" % second_history_id,
            create_data)
        self.__check_create_collection_response(create_response)

        contents = self._get("histories/%s/contents/dataset_collections" %
                             second_history_id).json()
        assert len(contents) == 1
        new_forward, _ = self.__get_paired_response_elements(contents[0])
        self._assert_has_keys(new_forward, "history_id")
        assert new_forward["history_id"] == second_history_id

    def __get_paired_response_elements(self, contents):
        hdca = self.__show(contents).json()
        self._assert_has_keys(hdca, "name", "deleted", "visible", "elements")
        elements = hdca["elements"]
        assert len(elements) == 2
        element0 = elements[0]
        element1 = elements[1]
        self._assert_has_keys(element0, "object")
        self._assert_has_keys(element1, "object")

        return element0["object"], element1["object"]

    def test_hdca_from_library_datasets(self):
        ld = self.library_populator.new_library_dataset("el1")
        ldda_id = ld["ldda_id"]
        element_identifiers = [{"name": "el1", "src": "ldda", "id": ldda_id}]
        history_id = self.dataset_populator.new_history()
        create_data = dict(
            history_id=history_id,
            type="dataset_collection",
            name="Test From Library",
            element_identifiers=json.dumps(element_identifiers),
            collection_type="list",
        )
        create_response = self._post(
            "histories/%s/contents/dataset_collections" % history_id,
            create_data)
        hdca = self.__check_create_collection_response(create_response)
        elements = hdca["elements"]
        assert len(elements) == 1
        hda = elements[0]["object"]
        assert hda["hda_ldda"] == "hda"
        assert hda["history_content_type"] == "dataset"
        assert hda["copied_from_ldda_id"] == ldda_id
        assert hda['history_id'] == history_id

    def test_hdca_from_inaccessible_library_datasets(self):
        library, library_dataset = self.library_populator.new_library_dataset_in_private_library(
            "HDCACreateInaccesibleLibrary")
        ldda_id = library_dataset["id"]
        element_identifiers = [{"name": "el1", "src": "ldda", "id": ldda_id}]
        create_data = dict(
            history_id=self.history_id,
            type="dataset_collection",
            name="Test From Library",
            element_identifiers=json.dumps(element_identifiers),
            collection_type="list",
        )
        with self._different_user():
            second_history_id = self.dataset_populator.new_history()
            create_response = self._post(
                "histories/%s/contents/dataset_collections" %
                second_history_id, create_data)
            self._assert_status_code_is(create_response, 403)

    def __check_create_collection_response(self, response):
        self._assert_status_code_is(response, 200)
        dataset_collection = response.json()
        self._assert_has_keys(dataset_collection, "url", "name", "deleted",
                              "visible", "elements")
        return dataset_collection

    def __show(self, contents):
        show_response = self._get("histories/{}/contents/{}s/{}".format(
            self.history_id, contents["history_content_type"], contents["id"]))
        return show_response

    def __count_contents(self, history_id=None, **kwds):
        if history_id is None:
            history_id = self.history_id
        contents_response = self._get("histories/%s/contents" % history_id,
                                      kwds)
        return len(contents_response.json())

    def __assert_hda_has_full_details(self, hda_details):
        self._assert_has_keys(hda_details, "display_types", "display_apps")

    def __check_for_hda(self, contents_response, hda):
        self._assert_status_code_is(contents_response, 200)
        contents = contents_response.json()
        assert len(contents) == 1
        hda_summary = contents[0]
        self.__assert_matches_hda(hda, hda_summary)
        return hda_summary

    def __assert_matches_hda(self, input_hda, query_hda):
        self._assert_has_keys(query_hda, "id", "name")
        assert input_hda["name"] == query_hda["name"]
        assert input_hda["id"] == query_hda["id"]

    def test_job_state_summary_field(self):
        create_response = self.dataset_collection_populator.create_pair_in_history(
            self.history_id, contents=["123", "456"])
        self._assert_status_code_is(create_response, 200)
        contents_response = self._get(
            "histories/%s/contents?v=dev&keys=job_state_summary&view=summary" %
            self.history_id)
        self._assert_status_code_is(contents_response, 200)
        contents = contents_response.json()
        for c in filter(
                lambda c: c['history_content_type'] == 'dataset_collection',
                contents):
            assert isinstance(c, dict)
            assert 'job_state_summary' in c
            assert isinstance(c['job_state_summary'], dict)

    def _get_content(self, history_id, update_time):
        return self._get(
            f"/api/histories/{history_id}/contents/near/100/100?update_time-ge={update_time}"
        ).json()

    def test_history_contents_near_with_update_time(self):
        with self.dataset_populator.test_history() as history_id:
            first_time = datetime.utcnow().isoformat()
            assert len(self._get_content(history_id,
                                         update_time=first_time)) == 0
            self.dataset_collection_populator.create_list_in_history(
                history_id=history_id)
            assert len(self._get_content(
                history_id, update_time=first_time)) == 4  # 3 datasets
            self.dataset_populator.wait_for_history(history_id)
            all_datasets_finished = first_time = datetime.utcnow().isoformat()
            assert len(
                self._get_content(history_id,
                                  update_time=all_datasets_finished)) == 0

    @skip_without_tool('cat_data_and_sleep')
    def test_history_contents_near_with_update_time_implicit_collection(self):
        with self.dataset_populator.test_history() as history_id:
            hdca_id = self.dataset_collection_populator.create_list_in_history(
                history_id=history_id).json()['id']
            self.dataset_populator.wait_for_history(history_id)
            inputs = {
                "input1": {
                    'batch': True,
                    'values': [{
                        "src": "hdca",
                        "id": hdca_id
                    }]
                },
                "sleep_time": 2,
            }
            response = self.dataset_populator.run_tool(
                "cat_data_and_sleep",
                inputs,
                history_id,
                assert_ok=False,
            ).json()
            update_time = datetime.utcnow().isoformat()
            collection_id = response['implicit_collections'][0]['id']
            for _ in range(20):
                time.sleep(1)
                update = self._get_content(history_id, update_time=update_time)
                if any(c for c in update
                       if c['history_content_type'] == 'dataset_collection'
                       and c['job_state_summary']['ok'] == 3):
                    return
            raise Exception(
                f"History content update time query did not include final update for implicit collection {collection_id}"
            )

    @skip_without_tool('collection_creates_dynamic_nested')
    def test_history_contents_near_with_update_time_explicit_collection(self):
        with self.dataset_populator.test_history() as history_id:
            inputs = {'foo': 'bar', 'sleep_time': 2}
            response = self.dataset_populator.run_tool(
                "collection_creates_dynamic_nested",
                inputs,
                history_id,
                assert_ok=False,
            ).json()
            update_time = datetime.utcnow().isoformat()
            collection_id = response['output_collections'][0]['id']
            for _ in range(20):
                time.sleep(1)
                update = self._get_content(history_id, update_time=update_time)
                if any(c for c in update
                       if c['history_content_type'] == 'dataset_collection'
                       and c['populated_state'] == 'ok'):
                    return
            raise Exception(
                f"History content update time query did not include populated_state update for dynamic nested collection {collection_id}"
            )
Exemplo n.º 10
0
 def setUp(self):
     super(ConfigurationApiTestCase, self).setUp()
     self.library_populator = LibraryPopulator(self.galaxy_interactor)
Exemplo n.º 11
0
class HistoryContentsApiTestCase(ApiTestCase):
    def setUp(self):
        super().setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.dataset_collection_populator = DatasetCollectionPopulator(
            self.galaxy_interactor)
        self.library_populator = LibraryPopulator(self.galaxy_interactor)
        self.history_id = self.dataset_populator.new_history()

    def test_index_hda_summary(self):
        hda1 = self.dataset_populator.new_dataset(self.history_id)
        contents_response = self._get(f"histories/{self.history_id}/contents")
        hda_summary = self.__check_for_hda(contents_response, hda1)
        assert "display_types" not in hda_summary  # Quick summary, not full details

    def test_make_private_and_public(self):
        hda1 = self._wait_for_new_hda()
        update_url = f"histories/{self.history_id}/contents/{hda1['id']}/permissions"

        role_id = self.dataset_populator.user_private_role_id()
        # Give manage permission to the user.
        payload = {
            "access": [],
            "manage": [role_id],
        }
        update_response = self._update_permissions(update_url,
                                                   payload,
                                                   admin=True)
        self._assert_status_code_is(update_response, 200)
        self._assert_other_user_can_access(hda1["id"])
        # Then we restrict access.
        payload = {
            "action": "make_private",
        }
        update_response = self._update_permissions(update_url, payload)
        self._assert_status_code_is(update_response, 200)
        self._assert_other_user_cannot_access(hda1["id"])

        # Then we restrict access.
        payload = {
            "action": "remove_restrictions",
        }
        update_response = self._update_permissions(update_url, payload)
        self._assert_status_code_is(update_response, 200)
        self._assert_other_user_can_access(hda1["id"])

    def test_set_permissions_add_admin_history_contents(self):
        self._verify_dataset_permissions("history_contents")

    def test_set_permissions_add_admin_datasets(self):
        self._verify_dataset_permissions("dataset")

    def _verify_dataset_permissions(self, api_endpoint):
        hda1 = self._wait_for_new_hda()
        hda_id = hda1["id"]
        if api_endpoint == "history_contents":
            update_url = f"histories/{self.history_id}/contents/{hda_id}/permissions"
        else:
            update_url = f"datasets/{hda_id}/permissions"

        role_id = self.dataset_populator.user_private_role_id()

        payload = {
            "access": [role_id],
            "manage": [role_id],
        }

        # Other users cannot modify permissions.
        with self._different_user():
            update_response = self._update_permissions(update_url, payload)
            self._assert_status_code_is(update_response, 403)

        # First the details render for another user.
        self._assert_other_user_can_access(hda_id)

        # Then we restrict access.
        update_response = self._update_permissions(update_url,
                                                   payload,
                                                   admin=True)
        self._assert_status_code_is(update_response, 200)

        # Finally the details don't render.
        self._assert_other_user_cannot_access(hda_id)

        # But they do for the original user.
        contents_response = self._get(
            f"histories/{self.history_id}/contents/{hda_id}").json()
        assert "name" in contents_response

        update_response = self._update_permissions(update_url, payload)
        self._assert_status_code_is(update_response, 200)

        payload = {
            "access": [role_id],
            "manage": [role_id],
        }
        update_response = self._update_permissions(update_url, payload)
        self._assert_status_code_is(update_response, 200)
        self._assert_other_user_cannot_access(hda_id)

        user_id = self.dataset_populator.user_id()
        with self._different_user():
            different_user_id = self.dataset_populator.user_id()
        combined_user_role = self.dataset_populator.create_role(
            [user_id, different_user_id],
            description="role for testing permissions")

        payload = {
            "access": [combined_user_role["id"]],
            "manage": [role_id],
        }
        update_response = self._update_permissions(update_url, payload)
        self._assert_status_code_is(update_response, 200)
        # Now other user can see dataset again with access permission.
        self._assert_other_user_can_access(hda_id)
        # access doesn't imply management though...
        with self._different_user():
            update_response = self._update_permissions(update_url, payload)
            self._assert_status_code_is(update_response, 403)

    def _assert_other_user_cannot_access(self, history_content_id):
        with self._different_user():
            contents_response = self.dataset_populator.get_history_dataset_details_raw(
                history_id=self.history_id, dataset_id=history_content_id)
            assert contents_response.status_code == 403

    def _assert_other_user_can_access(self, history_content_id):
        with self._different_user():
            contents_response = self.dataset_populator.get_history_dataset_details_raw(
                history_id=self.history_id, dataset_id=history_content_id)
            contents_response.raise_for_status()
            assert "name" in contents_response.json()

    def test_index_hda_all_details(self):
        hda1 = self.dataset_populator.new_dataset(self.history_id)
        contents_response = self._get(
            f"histories/{self.history_id}/contents?details=all")
        hda_details = self.__check_for_hda(contents_response, hda1)
        self.__assert_hda_has_full_details(hda_details)

    def test_index_hda_detail_by_id(self):
        hda1 = self.dataset_populator.new_dataset(self.history_id)
        contents_response = self._get(
            f"histories/{self.history_id}/contents?details={hda1['id']}")
        hda_details = self.__check_for_hda(contents_response, hda1)
        self.__assert_hda_has_full_details(hda_details)

    def test_show_hda(self):
        hda1 = self.dataset_populator.new_dataset(self.history_id)
        show_response = self.__show(hda1)
        self._assert_status_code_is(show_response, 200)
        self.__assert_matches_hda(hda1, show_response.json())

    def _create_copy(self):
        hda1 = self.dataset_populator.new_dataset(self.history_id)
        create_data = dict(
            source='hda',
            content=hda1["id"],
        )
        second_history_id = self.dataset_populator.new_history()
        assert self.__count_contents(second_history_id) == 0
        create_response = self._post(f"histories/{second_history_id}/contents",
                                     create_data,
                                     json=True)
        self._assert_status_code_is(create_response, 200)
        return create_response.json()

    def test_hda_copy(self):
        response = self._create_copy()
        assert self.__count_contents(response['history_id']) == 1

    def test_inheritance_chain(self):
        response = self._create_copy()
        inheritance_chain_response = self._get(
            f"datasets/{response['id']}/inheritance_chain")
        self._assert_status_code_is_ok(inheritance_chain_response)
        inheritance_chain = inheritance_chain_response.json()
        assert len(inheritance_chain) == 1

    def test_library_copy(self):
        ld = self.library_populator.new_library_dataset("lda_test_library")
        create_data = dict(
            source='library',
            content=ld["id"],
        )
        assert self.__count_contents(self.history_id) == 0
        create_response = self._post(f"histories/{self.history_id}/contents",
                                     create_data,
                                     json=True)
        self._assert_status_code_is(create_response, 200)
        assert self.__count_contents(self.history_id) == 1

    def test_update(self):
        hda1 = self._wait_for_new_hda()
        assert str(hda1["deleted"]).lower() == "false"
        update_response = self._update(hda1["id"], dict(deleted=True))
        self._assert_status_code_is(update_response, 200)
        show_response = self.__show(hda1)
        assert str(show_response.json()["deleted"]).lower() == "true"

        update_response = self._update(hda1["id"], dict(name="Updated Name"))
        assert self.__show(hda1).json()["name"] == "Updated Name"

        update_response = self._update(hda1["id"], dict(name="Updated Name"))
        assert self.__show(hda1).json()["name"] == "Updated Name"

        unicode_name = 'ржевский сапоги'
        update_response = self._update(hda1["id"], dict(name=unicode_name))
        updated_hda = self.__show(hda1).json()
        assert updated_hda["name"] == unicode_name, updated_hda

        quoted_name = '"Mooo"'
        update_response = self._update(hda1["id"], dict(name=quoted_name))
        updated_hda = self.__show(hda1).json()
        assert updated_hda["name"] == quoted_name, quoted_name

        data = {
            "dataset_id": hda1["id"],
            "name": "moocow",
            "dbkey": "?",
            "annotation": None,
            "info": "my info is",
            "operation": "attributes"
        }
        update_response = self._set_edit_update(data)
        # No key or anything supplied, expect a permission problem.
        # A bit questionable but I think this is a 400 instead of a 403 so that
        # we don't distinguish between this is a valid ID you don't have access to
        # and this is an invalid ID.
        assert update_response.status_code == 400, update_response.content

    def test_update_batch(self):
        hda1 = self._wait_for_new_hda()
        assert str(hda1["deleted"]).lower() == "false"
        assert str(hda1["visible"]).lower() == "true"

        # update deleted flag => true
        payload = dict(items=[{
            "history_content_type": "dataset",
            "id": hda1["id"]
        }],
                       deleted=True)
        update_response = self._update_batch(payload)
        objects = update_response.json()
        assert objects[0]["deleted"] is True
        assert objects[0]["visible"] is True

        # update visibility flag => false
        payload = dict(items=[{
            "history_content_type": "dataset",
            "id": hda1["id"]
        }],
                       visible=False)
        update_response = self._update_batch(payload)
        objects = update_response.json()
        assert objects[0]["deleted"] is True
        assert objects[0]["visible"] is False

        # update both flags
        payload = dict(items=[{
            "history_content_type": "dataset",
            "id": hda1["id"]
        }],
                       deleted=False,
                       visible=True)
        update_response = self._update_batch(payload)
        objects = update_response.json()
        assert objects[0]["deleted"] is False
        assert objects[0]["visible"] is True

    def test_update_batch_collections(self):
        hdca = self._create_pair_collection()
        assert hdca["deleted"] is False
        assert hdca["visible"] is True

        # update deleted flag => true
        payload = dict(items=[{
            "history_content_type": "dataset_collection",
            "id": hdca["id"]
        }],
                       deleted=True)
        update_response = self._update_batch(payload)
        objects = update_response.json()
        assert objects[0]["deleted"] is True
        assert objects[0]["visible"] is True

        # update visibility flag => false
        payload = dict(items=[{
            "history_content_type": "dataset_collection",
            "id": hdca["id"]
        }],
                       visible=False)
        update_response = self._update_batch(payload)
        objects = update_response.json()
        assert objects[0]["deleted"] is True
        assert objects[0]["visible"] is False

        # update both flags
        payload = dict(items=[{
            "history_content_type": "dataset_collection",
            "id": hdca["id"]
        }],
                       deleted=False,
                       visible=True)
        update_response = self._update_batch(payload)
        objects = update_response.json()
        assert objects[0]["deleted"] is False
        assert objects[0]["visible"] is True

    def test_update_type_failures(self):
        hda1 = self._wait_for_new_hda()
        update_response = self._update(hda1["id"], dict(deleted='not valid'))
        self._assert_status_code_is(update_response, 400)

    def _wait_for_new_hda(self):
        hda1 = self.dataset_populator.new_dataset(self.history_id)
        self.dataset_populator.wait_for_history(self.history_id)
        return hda1

    def _set_edit_update(self, data):
        update_response = self._put(urllib.parse.urljoin(
            self.url, "dataset/set_edit"),
                                    data=data,
                                    json=True)
        return update_response

    def _update(self, item_id, data, admin=False, history_id=None):
        history_id = history_id or self.history_id
        update_response = self._put(
            f"histories/{history_id}/contents/{item_id}",
            data=data,
            json=True,
            admin=admin)
        return update_response

    def _update_permissions(self, url, data, admin=False):
        update_response = self._put(url, data=data, json=True, admin=admin)
        return update_response

    def _update_batch(self, data):
        update_response = self._put(f"histories/{self.history_id}/contents",
                                    data=data,
                                    json=True)
        return update_response

    def test_delete(self):
        hda1 = self.dataset_populator.new_dataset(self.history_id)
        self.dataset_populator.wait_for_history(self.history_id)
        assert str(self.__show(hda1).json()["deleted"]).lower() == "false"
        delete_response = self._delete(
            f"histories/{self.history_id}/contents/{hda1['id']}")
        assert delete_response.status_code < 300  # Something in the 200s :).
        assert str(self.__show(hda1).json()["deleted"]).lower() == "true"

    def test_delete_anon(self):
        with self._different_user(anon=True):
            history_id = self._get(
                urllib.parse.urljoin(
                    self.url, "history/current_history_json")).json()['id']
            hda1 = self.dataset_populator.new_dataset(history_id)
            self.dataset_populator.wait_for_history(history_id)
            assert str(self.__show(hda1).json()["deleted"]).lower() == "false"
            delete_response = self._delete(
                f"histories/{history_id}/contents/{hda1['id']}")
            assert delete_response.status_code < 300  # Something in the 200s :).
            assert str(self.__show(hda1).json()["deleted"]).lower() == "true"

    def test_delete_permission_denied(self):
        hda1 = self.dataset_populator.new_dataset(self.history_id)
        with self._different_user(anon=True):
            delete_response = self._delete(
                f"histories/{self.history_id}/contents/{hda1['id']}")
            assert delete_response.status_code == 403
            assert delete_response.json(
            )['err_msg'] == 'HistoryDatasetAssociation is not owned by user'

    def test_purge(self):
        hda1 = self.dataset_populator.new_dataset(self.history_id)
        self.dataset_populator.wait_for_history(self.history_id)
        assert str(self.__show(hda1).json()["deleted"]).lower() == "false"
        assert str(self.__show(hda1).json()["purged"]).lower() == "false"
        data = {'purge': True}
        delete_response = self._delete(
            f"histories/{self.history_id}/contents/{hda1['id']}",
            data=data,
            json=True)
        assert delete_response.status_code < 300  # Something in the 200s :).
        assert str(self.__show(hda1).json()["deleted"]).lower() == "true"
        assert str(self.__show(hda1).json()["purged"]).lower() == "true"

    def test_dataset_collection_creation_on_contents(self):
        payload = self.dataset_collection_populator.create_pair_payload(
            self.history_id, type="dataset_collection")
        endpoint = f"histories/{self.history_id}/contents"
        self._check_pair_creation(endpoint, payload)

    def test_dataset_collection_creation_on_typed_contents(self):
        payload = self.dataset_collection_populator.create_pair_payload(
            self.history_id, )
        endpoint = f"histories/{self.history_id}/contents/dataset_collections"
        self._check_pair_creation(endpoint, payload)

    def test_dataset_collection_create_from_exisiting_datasets_with_new_tags(
            self):
        with self.dataset_populator.test_history() as history_id:
            hda_id = self.dataset_populator.new_dataset(history_id,
                                                        content="1 2 3")['id']
            hda2_id = self.dataset_populator.new_dataset(history_id,
                                                         content="1 2 3")['id']
            update_response = self._update(hda2_id,
                                           dict(tags=['existing:tag']),
                                           history_id=history_id).json()
            assert update_response['tags'] == ['existing:tag']
            creation_payload = {
                'collection_type':
                'list',
                'history_id':
                history_id,
                'element_identifiers': [{
                    'id': hda_id,
                    'src': 'hda',
                    'name': 'element_id1',
                    'tags': ['my_new_tag']
                }, {
                    'id': hda2_id,
                    'src': 'hda',
                    'name': 'element_id2',
                    'tags': ['another_new_tag']
                }],
                'type':
                'dataset_collection',
                'copy_elements':
                True
            }
            r = self._post(f"histories/{self.history_id}/contents",
                           creation_payload,
                           json=True).json()
            assert r['elements'][0]['object'][
                'id'] != hda_id, "HDA has not been copied"
            assert len(r['elements'][0]['object']['tags']) == 1
            assert r['elements'][0]['object']['tags'][0] == 'my_new_tag'
            assert len(r['elements'][1]['object']
                       ['tags']) == 2, r['elements'][1]['object']['tags']
            original_hda = self.dataset_populator.get_history_dataset_details(
                history_id=history_id, dataset_id=hda_id)
            assert len(original_hda['tags']) == 0, original_hda['tags']

    def _check_pair_creation(self, endpoint, payload):
        pre_collection_count = self.__count_contents(type="dataset_collection")
        pre_dataset_count = self.__count_contents(type="dataset")
        pre_combined_count = self.__count_contents(
            type="dataset,dataset_collection")

        dataset_collection_response = self._post(endpoint, payload, json=True)

        dataset_collection = self.__check_create_collection_response(
            dataset_collection_response)

        post_collection_count = self.__count_contents(
            type="dataset_collection")
        post_dataset_count = self.__count_contents(type="dataset")
        post_combined_count = self.__count_contents(
            type="dataset,dataset_collection")

        # Test filtering types with index.
        assert pre_collection_count == 0
        assert post_collection_count == 1
        assert post_combined_count == pre_dataset_count + 1
        assert post_combined_count == pre_combined_count + 1
        assert pre_dataset_count == post_dataset_count

        # Test show dataset colleciton.
        collection_url = f"histories/{self.history_id}/contents/dataset_collections/{dataset_collection['id']}"
        show_response = self._get(collection_url)
        self._assert_status_code_is(show_response, 200)
        dataset_collection = show_response.json()
        self._assert_has_keys(dataset_collection, "url", "name", "deleted")

        assert not dataset_collection["deleted"]

        delete_response = self._delete(collection_url)
        self._assert_status_code_is(delete_response, 200)

        show_response = self._get(collection_url)
        dataset_collection = show_response.json()
        assert dataset_collection["deleted"]

    @skip_without_tool("collection_creates_list")
    def test_jobs_summary_simple_hdca(self):
        create_response = self.dataset_collection_populator.create_list_in_history(
            self.history_id, contents=["a\nb\nc\nd", "e\nf\ng\nh"])
        hdca_id = create_response.json()["id"]
        run = self.dataset_populator.run_collection_creates_list(
            self.history_id, hdca_id)
        collections = run['output_collections']
        collection = collections[0]
        jobs_summary_url = f"histories/{self.history_id}/contents/dataset_collections/{collection['id']}/jobs_summary"
        jobs_summary_response = self._get(jobs_summary_url)
        self._assert_status_code_is(jobs_summary_response, 200)
        jobs_summary = jobs_summary_response.json()
        self._assert_has_keys(jobs_summary, "populated_state", "states")

    @skip_without_tool("cat1")
    def test_jobs_summary_implicit_hdca(self):
        create_response = self.dataset_collection_populator.create_pair_in_history(
            self.history_id, contents=["123", "456"])
        hdca_id = create_response.json()["id"]
        inputs = {
            "input1": {
                'batch': True,
                'values': [{
                    'src': 'hdca',
                    'id': hdca_id
                }]
            },
        }
        run = self.dataset_populator.run_tool("cat1",
                                              inputs=inputs,
                                              history_id=self.history_id)
        self.dataset_populator.wait_for_history_jobs(self.history_id)
        collections = run['implicit_collections']
        collection = collections[0]
        jobs_summary_url = f"histories/{self.history_id}/contents/dataset_collections/{collection['id']}/jobs_summary"
        jobs_summary_response = self._get(jobs_summary_url)
        self._assert_status_code_is(jobs_summary_response, 200)
        jobs_summary = jobs_summary_response.json()
        self._assert_has_keys(jobs_summary, "populated_state", "states")
        states = jobs_summary["states"]
        assert states.get("ok") == 2, states

    def test_dataset_collection_hide_originals(self):
        payload = self.dataset_collection_populator.create_pair_payload(
            self.history_id, type="dataset_collection")

        payload["hide_source_items"] = True
        dataset_collection_response = self._post(
            f"histories/{self.history_id}/contents", payload, json=True)
        self.__check_create_collection_response(dataset_collection_response)

        contents_response = self._get(f"histories/{self.history_id}/contents")
        datasets = [
            d for d in contents_response.json()
            if d["history_content_type"] == "dataset" and d["hid"] in [1, 2]
        ]
        # Assert two datasets in source were hidden.
        assert len(datasets) == 2
        assert not datasets[0]["visible"]
        assert not datasets[1]["visible"]

    def test_update_dataset_collection(self):
        hdca = self._create_pair_collection()
        body = dict(name="newnameforpair")
        update_response = self._put(
            f"histories/{self.history_id}/contents/dataset_collections/{hdca['id']}",
            data=body,
            json=True)
        self._assert_status_code_is(update_response, 200)
        show_response = self.__show(hdca)
        assert str(show_response.json()["name"]) == "newnameforpair"

    def test_update_batch_dataset_collection(self):
        hdca = self._create_pair_collection()
        body = {
            "items": [{
                "history_content_type": "dataset_collection",
                "id": hdca["id"]
            }],
            "name":
            "newnameforpair"
        }
        update_response = self._put(f"histories/{self.history_id}/contents",
                                    data=body,
                                    json=True)
        self._assert_status_code_is(update_response, 200)
        show_response = self.__show(hdca)
        assert str(show_response.json()["name"]) == "newnameforpair"

    def _create_pair_collection(self):
        payload = self.dataset_collection_populator.create_pair_payload(
            self.history_id, type="dataset_collection")
        dataset_collection_response = self._post(
            f"histories/{self.history_id}/contents", payload, json=True)
        self._assert_status_code_is(dataset_collection_response, 200)
        hdca = dataset_collection_response.json()
        return hdca

    def test_hdca_copy(self):
        hdca = self.dataset_collection_populator.create_pair_in_history(
            self.history_id).json()
        hdca_id = hdca["id"]
        second_history_id = self.dataset_populator.new_history()
        create_data = dict(
            source='hdca',
            content=hdca_id,
        )
        assert len(
            self._get(
                f"histories/{second_history_id}/contents/dataset_collections").
            json()) == 0
        create_response = self._post(
            f"histories/{second_history_id}/contents/dataset_collections",
            create_data,
            json=True)
        self.__check_create_collection_response(create_response)
        contents = self._get(
            f"histories/{second_history_id}/contents/dataset_collections"
        ).json()
        assert len(contents) == 1
        new_forward, _ = self.__get_paired_response_elements(contents[0])
        self._assert_has_keys(new_forward, "history_id")
        assert new_forward["history_id"] == self.history_id

    def test_hdca_copy_with_new_dbkey(self):
        hdca = self.dataset_collection_populator.create_pair_in_history(
            self.history_id).json()
        hdca_id = hdca["id"]
        assert hdca["elements"][0]["object"]["metadata_dbkey"] == "?"
        assert hdca["elements"][0]["object"]["genome_build"] == "?"
        create_data = {'source': 'hdca', 'content': hdca_id, 'dbkey': 'hg19'}
        create_response = self._post(
            f"histories/{self.history_id}/contents/dataset_collections",
            create_data,
            json=True)
        collection = self.__check_create_collection_response(create_response)
        new_forward = collection['elements'][0]['object']
        assert new_forward["metadata_dbkey"] == "hg19"
        assert new_forward["genome_build"] == "hg19"

    def test_hdca_copy_and_elements(self):
        hdca = self.dataset_collection_populator.create_pair_in_history(
            self.history_id).json()
        hdca_id = hdca["id"]
        second_history_id = self.dataset_populator.new_history()
        create_data = dict(
            source='hdca',
            content=hdca_id,
            copy_elements=True,
        )
        assert len(
            self._get(
                f"histories/{second_history_id}/contents/dataset_collections").
            json()) == 0
        create_response = self._post(
            f"histories/{second_history_id}/contents/dataset_collections",
            create_data,
            json=True)
        self.__check_create_collection_response(create_response)

        contents = self._get(
            f"histories/{second_history_id}/contents/dataset_collections"
        ).json()
        assert len(contents) == 1
        new_forward, _ = self.__get_paired_response_elements(contents[0])
        self._assert_has_keys(new_forward, "history_id")
        assert new_forward["history_id"] == second_history_id

    def __get_paired_response_elements(self, contents):
        hdca = self.__show(contents).json()
        self._assert_has_keys(hdca, "name", "deleted", "visible", "elements")
        elements = hdca["elements"]
        assert len(elements) == 2
        element0 = elements[0]
        element1 = elements[1]
        self._assert_has_keys(element0, "object")
        self._assert_has_keys(element1, "object")

        return element0["object"], element1["object"]

    def test_hdca_from_library_datasets(self):
        ld = self.library_populator.new_library_dataset("el1")
        ldda_id = ld["ldda_id"]
        element_identifiers = [{"name": "el1", "src": "ldda", "id": ldda_id}]
        history_id = self.dataset_populator.new_history()
        create_data = dict(
            history_id=history_id,
            type="dataset_collection",
            name="Test From Library",
            element_identifiers=element_identifiers,
            collection_type="list",
        )
        create_response = self._post(
            f"histories/{history_id}/contents/dataset_collections",
            create_data,
            json=True)
        hdca = self.__check_create_collection_response(create_response)
        elements = hdca["elements"]
        assert len(elements) == 1
        hda = elements[0]["object"]
        assert hda["hda_ldda"] == "hda"
        assert hda["history_content_type"] == "dataset"
        assert hda["copied_from_ldda_id"] == ldda_id
        assert hda['history_id'] == history_id

    def test_hdca_from_inaccessible_library_datasets(self):
        library, library_dataset = self.library_populator.new_library_dataset_in_private_library(
            "HDCACreateInaccesibleLibrary")
        ldda_id = library_dataset["id"]
        element_identifiers = [{"name": "el1", "src": "ldda", "id": ldda_id}]
        create_data = dict(
            history_id=self.history_id,
            type="dataset_collection",
            name="Test From Library",
            element_identifiers=element_identifiers,
            collection_type="list",
        )
        with self._different_user():
            second_history_id = self.dataset_populator.new_history()
            create_response = self._post(
                f"histories/{second_history_id}/contents/dataset_collections",
                create_data,
                json=True)
            self._assert_status_code_is(create_response, 403)

    def __check_create_collection_response(self, response):
        self._assert_status_code_is(response, 200)
        dataset_collection = response.json()
        self._assert_has_keys(dataset_collection, "url", "name", "deleted",
                              "visible", "elements")
        return dataset_collection

    def __show(self, contents):
        show_response = self._get(
            f"histories/{self.history_id}/contents/{contents['history_content_type']}s/{contents['id']}"
        )
        return show_response

    def __count_contents(self, history_id=None, **kwds):
        if history_id is None:
            history_id = self.history_id
        contents_response = self._get(f"histories/{history_id}/contents", kwds)
        return len(contents_response.json())

    def __assert_hda_has_full_details(self, hda_details):
        self._assert_has_keys(hda_details, "display_types", "display_apps")

    def __check_for_hda(self, contents_response, hda):
        self._assert_status_code_is(contents_response, 200)
        contents = contents_response.json()
        assert len(contents) == 1
        hda_summary = contents[0]
        self.__assert_matches_hda(hda, hda_summary)
        return hda_summary

    def __assert_matches_hda(self, input_hda, query_hda):
        self._assert_has_keys(query_hda, "id", "name")
        assert input_hda["name"] == query_hda["name"]
        assert input_hda["id"] == query_hda["id"]

    def test_job_state_summary_field(self):
        create_response = self.dataset_collection_populator.create_pair_in_history(
            self.history_id, contents=["123", "456"])
        self._assert_status_code_is(create_response, 200)
        contents_response = self._get(
            f"histories/{self.history_id}/contents?v=dev&keys=job_state_summary&view=summary"
        )
        self._assert_status_code_is(contents_response, 200)
        contents = contents_response.json()
        for c in filter(
                lambda c: c['history_content_type'] == 'dataset_collection',
                contents):
            assert isinstance(c, dict)
            assert 'job_state_summary' in c
            assert isinstance(c['job_state_summary'], dict)

    def _get_content(self, history_id, update_time):
        return self._get(
            f"/api/histories/{history_id}/contents/near/100/100?update_time-gt={update_time}"
        ).json()

    def test_history_contents_near_with_update_time(self):
        with self.dataset_populator.test_history() as history_id:
            first_time = datetime.utcnow().isoformat()
            assert len(self._get_content(history_id,
                                         update_time=first_time)) == 0
            self.dataset_collection_populator.create_list_in_history(
                history_id=history_id)
            assert len(self._get_content(
                history_id, update_time=first_time)) == 4  # 3 datasets
            self.dataset_populator.wait_for_history(history_id)
            all_datasets_finished = first_time = datetime.utcnow().isoformat()
            assert len(
                self._get_content(history_id,
                                  update_time=all_datasets_finished)) == 0

    def test_history_contents_near_with_since(self):
        with self.dataset_populator.test_history() as history_id:
            original_history = self._get(f"/api/histories/{history_id}").json()
            original_history_stamp = original_history['update_time']

            # check empty contents, with no since flag, should return an empty 200 result
            history_contents = self._get(
                f"/api/histories/{history_id}/contents/near/100/100")
            assert history_contents.status_code == 200
            assert len(history_contents.json()) == 0

            # adding a since parameter, should return a 204 if history has not changed at all
            history_contents = self._get(
                f"/api/histories/{history_id}/contents/near/100/100?since={original_history_stamp}"
            )
            assert history_contents.status_code == 204

            # add some stuff
            self.dataset_collection_populator.create_list_in_history(
                history_id=history_id)
            self.dataset_populator.wait_for_history(history_id)

            # check to make sure the added stuff is there
            changed_history_contents = self._get(
                f"/api/histories/{history_id}/contents/near/100/100")
            assert changed_history_contents.status_code == 200
            assert len(changed_history_contents.json()) == 4

            # check to make sure the history date has actually changed due to changing the contents
            changed_history = self._get(f"/api/histories/{history_id}").json()
            changed_history_stamp = changed_history['update_time']
            assert original_history_stamp != changed_history_stamp

            # a repeated contents request with since=original_history_stamp should now return data
            # because we have added datasets and the update_time should have been changed
            changed_content = self._get(
                f"/api/histories/{history_id}/contents/near/100/100?since={original_history_stamp}"
            )
            assert changed_content.status_code == 200
            assert len(changed_content.json()) == 4

    def test_history_contents_near_since_with_standard_iso8601_date(self):
        with self.dataset_populator.test_history() as history_id:
            original_history = self._get(f"/api/histories/{history_id}").json()
            original_history_stamp = original_history['update_time']

            # this is the standard date format that javascript will emit using .toISOString(), it
            # should be the expected date format for any modern api
            # https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString

            # checking to make sure that the same exact history.update_time returns a "not changed"
            # result after date parsing
            valid_iso8601_date = original_history_stamp + 'Z'
            encoded_valid_date = urllib.parse.quote_plus(valid_iso8601_date)
            history_contents = self._get(
                f"/api/histories/{history_id}/contents/near/100/100?since={encoded_valid_date}"
            )
            assert history_contents.status_code == 204

            # test parsing for other standard is08601 formats
            sample_formats = [
                '2021-08-26T15:53:02+00:00', '2021-08-26T15:53:02Z',
                '2002-10-10T12:00:00-05:00'
            ]
            for date_str in sample_formats:
                encoded_date = urllib.parse.quote_plus(
                    date_str)  # handles pluses, minuses
                history_contents = self._get(
                    f"/api/histories/{history_id}/contents/near/100/100?since={encoded_date}"
                )
                self._assert_status_code_is_ok(history_contents)

    @skip_without_tool('cat_data_and_sleep')
    def test_history_contents_near_with_update_time_implicit_collection(self):
        with self.dataset_populator.test_history() as history_id:
            hdca_id = self.dataset_collection_populator.create_list_in_history(
                history_id=history_id).json()['id']
            self.dataset_populator.wait_for_history(history_id)
            inputs = {
                "input1": {
                    'batch': True,
                    'values': [{
                        "src": "hdca",
                        "id": hdca_id
                    }]
                },
                "sleep_time": 2,
            }
            response = self.dataset_populator.run_tool(
                "cat_data_and_sleep",
                inputs,
                history_id,
            )
            update_time = datetime.utcnow().isoformat()
            collection_id = response['implicit_collections'][0]['id']
            for _ in range(20):
                time.sleep(1)
                update = self._get_content(history_id, update_time=update_time)
                if any(c for c in update
                       if c['history_content_type'] == 'dataset_collection'
                       and c['job_state_summary']['ok'] == 3):
                    return
            raise Exception(
                f"History content update time query did not include final update for implicit collection {collection_id}"
            )

    @skip_without_tool('collection_creates_dynamic_nested')
    def test_history_contents_near_with_update_time_explicit_collection(self):
        with self.dataset_populator.test_history() as history_id:
            inputs = {'foo': 'bar', 'sleep_time': 2}
            response = self.dataset_populator.run_tool(
                "collection_creates_dynamic_nested",
                inputs,
                history_id,
            )
            update_time = datetime.utcnow().isoformat()
            collection_id = response['output_collections'][0]['id']
            for _ in range(20):
                time.sleep(1)
                update = self._get_content(history_id, update_time=update_time)
                if any(c for c in update
                       if c['history_content_type'] == 'dataset_collection'
                       and c['populated_state'] == 'ok'):
                    return
            raise Exception(
                f"History content update time query did not include populated_state update for dynamic nested collection {collection_id}"
            )

    def test_index_filter_by_type(self):
        history_id = self.dataset_populator.new_history()
        self.dataset_populator.new_dataset(history_id)
        self.dataset_collection_populator.create_list_in_history(
            history_id=history_id)

        contents_response = self._get(
            f"histories/{history_id}/contents").json()
        num_items = len(contents_response)
        expected_num_collections = 1
        expected_num_datasets = num_items - expected_num_collections

        contents_response = self._get(
            f"histories/{history_id}/contents?types=dataset").json()
        assert len(contents_response) == expected_num_datasets
        contents_response = self._get(
            f"histories/{history_id}/contents?types=dataset_collection").json(
            )
        assert len(contents_response) == expected_num_collections

    def test_elements_datatypes_field(self):
        history_id = self.dataset_populator.new_history()
        collection_name = "homogeneous"
        expected_datatypes = ["txt"]
        elements = [  # List with all elements of txt datatype (homogeneous)
            {
                "name": "test1",
                "src": "pasted",
                "paste_content": "abc",
                "ext": "txt"
            },
            {
                "name": "test2",
                "src": "pasted",
                "paste_content": "abc",
                "ext": "txt"
            },
        ]
        self._upload_collection_list_with_elements(history_id, collection_name,
                                                   elements)
        self._assert_collection_has_expected_elements_datatypes(
            history_id, collection_name, expected_datatypes)

        collection_name = "heterogeneous"
        expected_datatypes = ["txt", "tabular"]
        elements = [  # List with txt and tabular datatype (heterogeneous)
            {
                "name": "test2",
                "src": "pasted",
                "paste_content": "abc",
                "ext": "txt"
            },
            {
                "name": "test3",
                "src": "pasted",
                "paste_content": "a,b,c\n",
                "ext": "tabular"
            },
        ]
        self._upload_collection_list_with_elements(history_id, collection_name,
                                                   elements)
        self._assert_collection_has_expected_elements_datatypes(
            history_id, collection_name, expected_datatypes)

    def _upload_collection_list_with_elements(self, history_id: str,
                                              collection_name: str,
                                              elements: List[Any]):
        create_homogeneous_response = self.dataset_collection_populator.upload_collection(
            history_id, "list", elements=elements, name=collection_name)
        self._assert_status_code_is_ok(create_homogeneous_response)

    def _assert_collection_has_expected_elements_datatypes(
            self, history_id, collection_name, expected_datatypes):
        contents_response = self._get(
            f"histories/{history_id}/contents?v=dev&view=betawebclient&q=name-eq&qv={collection_name}"
        )
        self._assert_status_code_is(contents_response, 200)
        collection = contents_response.json()[0]
        self.assertCountEqual(collection["elements_datatypes"],
                              expected_datatypes)
Exemplo n.º 12
0
 def setUp(self):
     super(LibrariesApiTestCase, self).setUp()
     self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
     self.dataset_collection_populator = DatasetCollectionPopulator(
         self.galaxy_interactor)
     self.library_populator = LibraryPopulator(self.galaxy_interactor)
Exemplo n.º 13
0
class FolderContentsApiTestCase(ApiTestCase):

    def setUp(self):
        super().setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.dataset_collection_populator = DatasetCollectionPopulator(self.galaxy_interactor)
        self.library_populator = LibraryPopulator(self.galaxy_interactor)

        self.history_id = self.dataset_populator.new_history()
        self.library = self.library_populator.new_private_library("FolderContentsTestsLibrary")
        self.root_folder_id = self._create_folder_in_library("Test Folder Contents")

    def test_create_hda_with_ldda_message(self):
        hda_id = self._create_hda()
        ldda_message = "Test message"
        data = {
            "from_hda_id": hda_id,
            "ldda_message": ldda_message,
        }
        ldda = self._create_content_in_folder_with_payload(self.root_folder_id, data)
        self._assert_has_keys(ldda, "name", "id")

    def test_create_hdca_with_ldda_message(self):
        contents = ["dataset01", "dataset02"]
        hdca_id = self._create_hdca_with_contents(contents)
        ldda_message = "Test message"
        data = {
            "from_hdca_id": hdca_id,
            "ldda_message": ldda_message,
        }
        lddas = self._create_content_in_folder_with_payload(self.root_folder_id, data)
        assert len(contents) == len(lddas)

    def test_index(self):
        folder_id = self._create_folder_in_library("Test Folder Contents Index")

        self._create_dataset_in_folder(folder_id)

        response = self._get(f"folders/{folder_id}/contents")
        self._assert_status_code_is(response, 200)
        contents = response.json()["folder_contents"]
        assert len(contents) == 1

    def test_index_include_deleted(self):
        folder_name = "Test Folder Contents Index include deleted"
        folder_id = self._create_folder_in_library(folder_name)

        hda_id = self._create_dataset_in_folder(folder_id)
        self._delete_library_dataset(hda_id)

        response = self._get(f"folders/{folder_id}/contents")
        self._assert_status_code_is(response, 200)
        contents = response.json()["folder_contents"]
        assert len(contents) == 0

        include_deleted = True
        response = self._get(f"folders/{folder_id}/contents?include_deleted={include_deleted}")
        self._assert_status_code_is(response, 200)
        contents = response.json()["folder_contents"]
        assert len(contents) == 1
        assert contents[0]["deleted"] is True

    def test_index_limit_offset(self):
        folder_name = "Test Folder Contents Index limit"
        folder_id = self._create_folder_in_library(folder_name)

        num_subfolders = 5
        for index in range(num_subfolders):
            self._create_subfolder_in(folder_id, name=f"Folder_{index}")

        num_datasets = 5
        for _ in range(num_datasets):
            self._create_dataset_in_folder(folder_id)

        total_items = num_datasets + num_subfolders

        response = self._get(f"folders/{folder_id}/contents")
        self._assert_status_code_is(response, 200)
        original_contents = response.json()["folder_contents"]
        assert len(original_contents) == total_items

        limit = 7
        response = self._get(f"folders/{folder_id}/contents?limit={limit}")
        self._assert_status_code_is(response, 200)
        contents = response.json()["folder_contents"]
        assert len(contents) == limit

        offset = 3
        response = self._get(f"folders/{folder_id}/contents?offset={offset}")
        self._assert_status_code_is(response, 200)
        contents = response.json()["folder_contents"]
        assert len(contents) == total_items - offset

        limit = 4
        offset = 4
        response = self._get(f"folders/{folder_id}/contents?limit={limit}&offset={offset}")
        self._assert_status_code_is(response, 200)
        contents = response.json()["folder_contents"]
        assert len(contents) == limit
        expected_query_result = original_contents[offset:offset + limit]
        for index in range(limit):
            assert contents[index]["id"] == expected_query_result[index]["id"]

    def test_index_search_text(self):
        folder_name = "Test Folder Contents Index search text"
        folder_id = self._create_folder_in_library(folder_name)

        dataset_names = ["AB", "BC", "ABC"]
        for name in dataset_names:
            self._create_dataset_in_folder(folder_id, name)

        subfolder_names = ["Folder_A", "Folder_C"]
        for name in subfolder_names:
            self._create_subfolder_in(folder_id, name)

        all_names = dataset_names + subfolder_names

        search_terms = ["A", "B", "C"]
        for search_text in search_terms:
            response = self._get(f"folders/{folder_id}/contents?search_text={search_text}")
            self._assert_status_code_is(response, 200)
            contents = response.json()["folder_contents"]
            matching_names = [name for name in all_names if search_text in name]
            assert len(contents) == len(matching_names)

    def test_index_permissions_include_deleted(self):

        folder_name = "Test Folder Contents Index permissions include deteleted"
        folder_id = self._create_folder_in_library(folder_name)

        num_subfolders = 5
        subfolder_ids: List[str] = []
        deleted_subfolder_ids: List[str] = []
        for index in range(num_subfolders):
            id = self._create_subfolder_in(folder_id, name=f"Folder_{index}")
            subfolder_ids.append(id)

        for index, subfolder_id in enumerate(subfolder_ids):
            if index % 2 == 0:
                self._delete_subfolder(subfolder_id)
                deleted_subfolder_ids.append(subfolder_id)

        num_datasets = 5
        datasets_ids: List[str] = []
        deleted_datasets_ids: List[str] = []
        for _ in range(num_datasets):
            id = self._create_dataset_in_folder(folder_id)
            datasets_ids.append(id)

        for index, ldda_id in enumerate(datasets_ids):
            if index % 2 == 0:
                self._delete_library_dataset(ldda_id)
                deleted_datasets_ids.append(ldda_id)

        num_total_contents = num_subfolders + num_datasets
        num_non_deleted = num_total_contents - len(deleted_subfolder_ids) - len(deleted_datasets_ids)

        # Verify deleted contents are not listed
        include_deleted = False
        response = self._get(f"folders/{folder_id}/contents?include_deleted={include_deleted}")
        self._assert_status_code_is(response, 200)
        contents = response.json()["folder_contents"]
        assert len(contents) == num_non_deleted

        include_deleted = True
        # Admins can see everything...
        response = self._get(f"folders/{folder_id}/contents?include_deleted={include_deleted}", admin=True)
        self._assert_status_code_is(response, 200)
        contents = response.json()["folder_contents"]
        assert len(contents) == num_total_contents

        # Owner can see everything too
        response = self._get(f"folders/{folder_id}/contents?include_deleted={include_deleted}")
        self._assert_status_code_is(response, 200)
        contents = response.json()["folder_contents"]
        assert len(contents) == num_total_contents

        # Users with access but no modify permission can't see deleted
        with self._different_user():
            different_user_role_id = self.dataset_populator.user_private_role_id()

        self._allow_library_access_to_user_role(different_user_role_id)

        with self._different_user():
            response = self._get(f"folders/{folder_id}/contents?include_deleted={include_deleted}")
            self._assert_status_code_is(response, 200)
            contents = response.json()["folder_contents"]
            assert len(contents) == num_non_deleted

    def _create_folder_in_library(self, name: str) -> Any:
        root_folder_id = self.library["root_folder_id"]
        return self._create_subfolder_in(root_folder_id, name)

    def _create_subfolder_in(self, folder_id: str, name: str) -> str:
        data = {
            "name": name,
            "description": f"The description of {name}",
        }
        create_response = self._post(f"folders/{folder_id}", data=data)
        self._assert_status_code_is(create_response, 200)
        folder = create_response.json()
        return folder["id"]

    def _create_dataset_in_folder(self, folder_id: str, name: Optional[str] = None) -> str:
        hda_id = self._create_hda(name)
        data = {
            "from_hda_id": hda_id,
        }
        ldda = self._create_content_in_folder_with_payload(folder_id, data)
        return ldda["id"]

    def _create_content_in_folder_with_payload(self, folder_id: str, payload) -> Any:
        create_response = self._post(f"folders/{folder_id}/contents", data=payload)
        self._assert_status_code_is(create_response, 200)
        return create_response.json()

    def _create_hda(self, name: Optional[str] = None) -> str:
        hda = self.dataset_populator.new_dataset(self.history_id, name=name)
        hda_id = hda["id"]
        return hda_id

    def _create_hdca_with_contents(self, contents: List[str]) -> str:
        hdca = self.dataset_collection_populator.create_list_in_history(self.history_id, contents=contents, direct_upload=True).json()["outputs"][0]
        hdca_id = hdca["id"]
        return hdca_id

    def _delete_library_dataset(self, ldda_id: str) -> None:
        delete_response = self._delete(f"libraries/datasets/{ldda_id}")
        self._assert_status_code_is(delete_response, 200)

    def _delete_subfolder(self, folder_id: str) -> None:
        delete_response = self._delete(f"folders/{folder_id}")
        self._assert_status_code_is(delete_response, 200)

    def _allow_library_access_to_user_role(self, role_id: str):
        library_id = self.library["id"]
        action = "set_permissions"
        data = {
            "access_ids[]": role_id,
        }
        response = self._post(f"libraries/{library_id}/permissions?action={action}", data=data, admin=True, json=True)
        self._assert_status_code_is(response, 200)
Exemplo n.º 14
0
 def setUp(self):
     super().setUp()
     self.library_populator = LibraryPopulator(self.galaxy_interactor)
Exemplo n.º 15
0
 def setUp(self):
     super(BaseUploadContentConfigurationInstance, self).setUp()
     self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
     self.library_populator = LibraryPopulator(self.galaxy_interactor)
     self.history_id = self.dataset_populator.new_history()