Beispiel #1
0
class LibrariesApiTestCase(ApiTestCase, TestsDatasets):
    def setUp(self):
        super().setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.dataset_collection_populator = DatasetCollectionPopulator(
            self.galaxy_interactor)
        self.library_populator = LibraryPopulator(self.galaxy_interactor)

    def test_create(self):
        data = dict(name="CreateTestLibrary")
        create_response = self._post("libraries", data=data, admin=True)
        self._assert_status_code_is(create_response, 200)
        library = create_response.json()
        self._assert_has_keys(library, "name")
        assert library["name"] == "CreateTestLibrary"

    def test_delete(self):
        library = self.library_populator.new_library("DeleteTestLibrary")
        create_response = self._delete("libraries/%s" % library["id"],
                                       admin=True)
        self._assert_status_code_is(create_response, 200)
        library = create_response.json()
        self._assert_has_keys(library, "deleted")
        assert library["deleted"] is True
        # Test undeleting
        data = dict(undelete=True)
        create_response = self._delete("libraries/%s" % library["id"],
                                       data=data,
                                       admin=True)
        library = create_response.json()
        self._assert_status_code_is(create_response, 200)
        assert library["deleted"] is False

    def test_nonadmin(self):
        # Anons can't create libs
        data = dict(name="CreateTestLibrary")
        create_response = self._post("libraries",
                                     data=data,
                                     admin=False,
                                     anon=True)
        self._assert_status_code_is(create_response, 403)
        # Anons can't delete libs
        library = self.library_populator.new_library("AnonDeleteTestLibrary")
        create_response = self._delete("libraries/%s" % library["id"],
                                       admin=False,
                                       anon=True)
        self._assert_status_code_is(create_response, 403)
        # Anons can't update libs
        data = dict(name="ChangedName",
                    description="ChangedDescription",
                    synopsis='ChangedSynopsis')
        create_response = self._patch("libraries/%s" % library["id"],
                                      data=data,
                                      admin=False,
                                      anon=True)
        self._assert_status_code_is(create_response, 403)

    def test_update(self):
        library = self.library_populator.new_library("UpdateTestLibrary")
        data = dict(name='ChangedName',
                    description='ChangedDescription',
                    synopsis='ChangedSynopsis')
        create_response = self._patch("libraries/%s" % library["id"],
                                      data=data,
                                      admin=True)
        self._assert_status_code_is(create_response, 200)
        library = create_response.json()
        self._assert_has_keys(library, 'name', 'description', 'synopsis')
        assert library['name'] == 'ChangedName'
        assert library['description'] == 'ChangedDescription'
        assert library['synopsis'] == 'ChangedSynopsis'

    def test_create_private_library_permissions(self):
        library = self.library_populator.new_library("PermissionTestLibrary")
        library_id = library["id"]
        role_id = self.library_populator.user_private_role_id()
        self.library_populator.set_permissions(library_id, role_id)
        create_response = self._create_folder(library)
        self._assert_status_code_is(create_response, 200)

    def test_create_dataset_denied(self):
        library = self.library_populator.new_private_library(
            "ForCreateDatasets")
        folder_response = self._create_folder(library)
        self._assert_status_code_is(folder_response, 200)
        folder_id = folder_response.json()[0]['id']
        history_id = self.dataset_populator.new_history()
        hda_id = self.dataset_populator.new_dataset(history_id,
                                                    content="1 2 3")['id']
        with self._different_user():
            payload = {'from_hda_id': hda_id}
            create_response = self._post("folders/%s/contents" % folder_id,
                                         payload)
            self._assert_status_code_is(create_response, 403)

    def test_show_private_dataset_permissions(self):
        library, library_dataset = self.library_populator.new_library_dataset_in_private_library(
            "ForCreateDatasets", wait=True)
        with self._different_user():
            response = self.library_populator.show_ldda(
                library["id"], library_dataset["id"])
            # TODO: this should really be 403 and a proper JSON exception.
            self._assert_status_code_is(response, 400)

    def test_create_dataset(self):
        library, library_dataset = self.library_populator.new_library_dataset_in_private_library(
            "ForCreateDatasets", wait=True)
        self._assert_has_keys(library_dataset, "peek", "data_type")
        assert library_dataset["peek"].find("create_test") >= 0
        assert library_dataset["file_ext"] == "txt", library_dataset[
            "file_ext"]

    def test_fetch_upload_to_folder(self):
        history_id, library, destination = self._setup_fetch_to_folder(
            "flat_zip")
        items = [{
            "src": "files",
            "dbkey": "hg19",
            "info": "my cool bed",
            "created_from_basename": "4.bed"
        }]
        targets = [{"destination": destination, "items": items}]
        payload = {
            "history_id": history_id,  # TODO: Shouldn't be needed :(
            "targets": json.dumps(targets),
            "__files": {
                "files_0|file_data":
                open(self.test_data_resolver.get_filename("4.bed"))
            },
        }
        self.dataset_populator.fetch(payload)
        dataset = self.library_populator.get_library_contents_with_path(
            library["id"], "/4.bed")
        assert dataset["file_size"] == 61, dataset
        assert dataset["genome_build"] == "hg19", dataset
        assert dataset["misc_info"] == "my cool bed", dataset
        assert dataset["file_ext"] == "bed", dataset
        assert dataset["created_from_basename"] == "4.bed"

    def test_fetch_zip_to_folder(self):
        history_id, library, destination = self._setup_fetch_to_folder(
            "flat_zip")
        bed_test_data_path = self.test_data_resolver.get_filename("4.bed.zip")
        targets = [{
            "destination": destination,
            "items_from": "archive",
            "src": "files",
        }]
        payload = {
            "history_id": history_id,  # TODO: Shouldn't be needed :(
            "targets": json.dumps(targets),
            "__files": {
                "files_0|file_data": open(bed_test_data_path, 'rb')
            }
        }
        self.dataset_populator.fetch(payload)
        dataset = self.library_populator.get_library_contents_with_path(
            library["id"], "/4.bed")
        assert dataset["file_size"] == 61, dataset

    def test_fetch_single_url_to_folder(self):
        library, response = self._fetch_single_url_to_folder()
        dataset = self.library_populator.get_library_contents_with_path(
            library["id"], "/4.bed")
        assert dataset["file_size"] == 61, dataset

    def test_fetch_single_url_with_invalid_datatype(self):
        _, response = self._fetch_single_url_to_folder('xxx', assert_ok=False)
        self._assert_status_code_is(response, 400)
        assert response.json(
        )['err_msg'] == "Requested extension 'xxx' unknown, cannot upload dataset."

    def _fetch_single_url_to_folder(self, file_type='auto', assert_ok=True):
        history_id, library, destination = self._setup_fetch_to_folder(
            "single_url")
        items = [{
            "src": "url",
            "url": FILE_URL,
            "MD5": FILE_MD5,
            "ext": file_type,
        }]
        targets = [{
            "destination": destination,
            "items": items,
        }]
        payload = {
            "history_id": history_id,  # TODO: Shouldn't be needed :(
            "targets": json.dumps(targets),
            "validate_hashes": True
        }
        return library, self.dataset_populator.fetch(payload,
                                                     assert_ok=assert_ok)

    def test_legacy_upload_unknown_datatype(self):
        library = self.library_populator.new_private_library("ForLegacyUpload")
        folder_response = self._create_folder(library)
        self._assert_status_code_is(folder_response, 200)
        folder_id = folder_response.json()[0]['id']
        payload = {
            'folder_id': folder_id,
            'create_type': 'file',
            'file_type': 'xxx',
            'upload_option': 'upload_file',
            'files_0|url_paste': FILE_URL,
        }
        create_response = self._post("libraries/%s/contents" % library['id'],
                                     payload)
        self._assert_status_code_is(create_response, 400)
        assert create_response.json(
        ) == "Requested extension 'xxx' unknown, cannot upload dataset."

    def test_fetch_failed_validation(self):
        # Exception handling is really rough here - we should be creating a dataset in error instead
        # of just failing the job like this.
        history_id, library, destination = self._setup_fetch_to_folder(
            "single_url")
        items = [{
            "src": "url",
            "url":
            "https://raw.githubusercontent.com/galaxyproject/galaxy/dev/test-data/4.bed",
            "MD5": "37b59762b59fff860460522d271bc112",
            "name": "4.bed",
        }]
        targets = [{
            "destination": destination,
            "items": items,
        }]
        payload = {
            "history_id": history_id,  # TODO: Shouldn't be needed :(
            "targets": json.dumps(targets),
            "validate_hashes": True
        }
        tool_response = self.dataset_populator.fetch(payload, assert_ok=False)
        job = self.dataset_populator.check_run(tool_response)
        self.dataset_populator.wait_for_job(job["id"])

        job = tool_response.json()["jobs"][0]
        details = self.dataset_populator.get_job_details(job["id"]).json()
        assert details["state"] == "ok", details

        dataset = self.library_populator.get_library_contents_with_path(
            library["id"], "/4.bed")
        assert dataset["state"] == "error", dataset

    def test_fetch_url_archive_to_folder(self):
        history_id, library, destination = self._setup_fetch_to_folder(
            "single_url")
        targets = [{
            "destination":
            destination,
            "items_from":
            "archive",
            "src":
            "url",
            "url":
            "https://raw.githubusercontent.com/galaxyproject/galaxy/dev/test-data/4.bed.zip",
        }]
        payload = {
            "history_id": history_id,  # TODO: Shouldn't be needed :(
            "targets": json.dumps(targets),
        }
        self.dataset_populator.fetch(payload)
        dataset = self.library_populator.get_library_contents_with_path(
            library["id"], "/4.bed")
        assert dataset["file_size"] == 61, dataset

    @unittest.skip  # reference URLs changed, checksums now invalid.
    def test_fetch_bagit_archive_to_folder(self):
        history_id, library, destination = self._setup_fetch_to_folder(
            "bagit_archive")
        example_bag_path = self.test_data_resolver.get_filename(
            "example-bag.zip")
        targets = [{
            "destination": destination,
            "items_from": "bagit_archive",
            "src": "files",
        }]
        payload = {
            "history_id": history_id,  # TODO: Shouldn't be needed :(
            "targets": json.dumps(targets),
            "__files": {
                "files_0|file_data": open(example_bag_path)
            },
        }
        self.dataset_populator.fetch(payload)
        dataset = self.library_populator.get_library_contents_with_path(
            library["id"], "/README.txt")
        assert dataset["file_size"] == 66, dataset

        dataset = self.library_populator.get_library_contents_with_path(
            library["id"], "/bdbag-profile.json")
        assert dataset["file_size"] == 723, dataset

    def _setup_fetch_to_folder(self, test_name):
        return self.library_populator.setup_fetch_to_folder(test_name)

    def test_create_dataset_in_folder(self):
        library = self.library_populator.new_private_library(
            "ForCreateDatasets")
        folder_response = self._create_folder(library)
        self._assert_status_code_is(folder_response, 200)
        folder_id = folder_response.json()[0]['id']
        history_id = self.dataset_populator.new_history()
        hda_id = self.dataset_populator.new_dataset(history_id,
                                                    content="1 2 3")['id']
        payload = {'from_hda_id': hda_id}
        create_response = self._post("folders/%s/contents" % folder_id,
                                     payload)
        self._assert_status_code_is(create_response, 200)
        self._assert_has_keys(create_response.json(), "name", "id")

    def test_create_dataset_in_subfolder(self):
        library = self.library_populator.new_private_library(
            "ForCreateDatasets")
        folder_response = self._create_folder(library)
        self._assert_status_code_is(folder_response, 200)
        folder_id = folder_response.json()[0]['id']
        subfolder_response = self._create_subfolder(folder_id)
        self._assert_status_code_is(folder_response, 200)
        print(subfolder_response.json())
        subfolder_id = subfolder_response.json()['id']
        history_id = self.dataset_populator.new_history()
        hda_id = self.dataset_populator.new_dataset(history_id,
                                                    content="1 2 3 sub")['id']
        payload = {'from_hda_id': hda_id}
        create_response = self._post("folders/%s/contents" % subfolder_id,
                                     payload)
        self._assert_status_code_is(create_response, 200)
        self._assert_has_keys(create_response.json(), "name", "id")
        dataset_update_time = create_response.json()['update_time']
        container_fetch_response = self.galaxy_interactor.get(
            "folders/%s/contents" % folder_id)
        container_update_time = container_fetch_response.json(
        )['folder_contents'][0]['update_time']
        assert dataset_update_time == container_update_time, container_fetch_response

    def test_update_dataset_in_folder(self):
        ld = self._create_dataset_in_folder_in_library("ForUpdateDataset")
        data = {
            'name': 'updated_name',
            'file_ext': 'fastq',
            'misc_info': 'updated_info',
            'genome_build': 'updated_genome_build'
        }
        create_response = self._patch("libraries/datasets/%s" %
                                      ld.json()["id"],
                                      data=data)
        self._assert_status_code_is(create_response, 200)
        self._assert_has_keys(create_response.json(), "name", "file_ext",
                              "misc_info", "genome_build")

    def test_update_dataset_tags(self):
        ld = self._create_dataset_in_folder_in_library("ForTagtestDataset")
        data = {"tags": ["#Lancelot", "name:Holy Grail", "blue"]}
        create_response = self._patch("libraries/datasets/%s" %
                                      ld.json()["id"],
                                      data=data)
        self._assert_status_code_is(create_response, 200)
        self._assert_has_keys(create_response.json(), "tags")
        assert create_response.json(
        )["tags"] == "name:Lancelot, name:HolyGrail, blue"

    def test_invalid_update_dataset_in_folder(self):
        ld = self._create_dataset_in_folder_in_library(
            "ForInvalidUpdateDataset")
        data = {'file_ext': 'nonexisting_type'}
        create_response = self._patch("libraries/datasets/%s" %
                                      ld.json()["id"],
                                      data=data)
        self._assert_status_code_is(create_response, 400)
        assert 'This Galaxy does not recognize the datatype of:' in create_response.json(
        )['err_msg']

    def test_detect_datatype_of_dataset_in_folder(self):
        ld = self._create_dataset_in_folder_in_library("ForDetectDataset")
        # Wait for metadata job to finish.
        time.sleep(2)
        data = {'file_ext': 'data'}
        create_response = self._patch("libraries/datasets/%s" %
                                      ld.json()["id"],
                                      data=data)
        self._assert_status_code_is(create_response, 200)
        self._assert_has_keys(create_response.json(), "file_ext")
        assert create_response.json()["file_ext"] == "data"
        # Wait for metadata job to finish.
        time.sleep(2)
        data = {'file_ext': 'auto'}
        create_response = self._patch("libraries/datasets/%s" %
                                      ld.json()["id"],
                                      data=data)
        self._assert_status_code_is(create_response, 200)
        self._assert_has_keys(create_response.json(), "file_ext")
        assert create_response.json()["file_ext"] == "txt"

    def test_ldda_collection_import_to_history(self):
        self._import_to_history(visible=True)

    def test_ldda_collection_import_to_history_hide_source(self):
        self._import_to_history(visible=False)

    def test_import_paired_collection(self):
        ld = self._create_dataset_in_folder_in_library(
            "ForHistoryImport").json()
        history_id = self.dataset_populator.new_history()
        url = "histories/%s/contents" % history_id
        collection_name = 'Paired-end data (from library)'
        payload = {
            'name':
            collection_name,
            'collection_type':
            'list:paired',
            "type":
            "dataset_collection",
            'element_identifiers':
            json.dumps([{
                'src':
                'new_collection',
                'name':
                'pair1',
                'collection_type':
                'paired',
                'element_identifiers': [{
                    'name': 'forward',
                    'src': 'ldda',
                    'id': ld['id']
                }, {
                    'name': 'reverse',
                    'src': 'ldda',
                    'id': ld['id']
                }]
            }])
        }
        new_collection = self._post(url, payload).json()
        assert new_collection['name'] == collection_name
        pair = new_collection['elements'][0]
        assert pair['element_identifier'] == 'pair1'
        assert pair['object']['elements'][0]['object'][
            'history_id'] == history_id

    def _import_to_history(self, visible=True):
        ld = self._create_dataset_in_folder_in_library(
            "ForHistoryImport").json()
        history_id = self.dataset_populator.new_history()
        url = "histories/%s/contents" % history_id
        collection_name = 'new_collection_name'
        element_identifer = 'new_element_identifier'
        payload = {
            "collection_type":
            "list",
            "history_content_type":
            "dataset_collection",
            "model_class":
            "HistoryDatasetCollectionAssociation",
            "history_id":
            history_id,
            "name":
            collection_name,
            "hide_source_items":
            not visible,
            "element_identifiers":
            json.dumps([{
                "id": ld['id'],
                "name": element_identifer,
                "src": "ldda"
            }]),
            "type":
            "dataset_collection",
            "elements": []
        }
        new_collection = self._post(url, payload).json()
        assert new_collection['name'] == collection_name
        assert new_collection['element_count'] == 1
        element = new_collection['elements'][0]
        assert element['element_identifier'] == element_identifer
        assert element['object']['visible'] == visible

    def test_create_datasets_in_library_from_collection(self):
        library = self.library_populator.new_private_library(
            "ForCreateDatasetsFromCollection")
        folder_response = self._create_folder(library)
        self._assert_status_code_is(folder_response, 200)
        folder_id = folder_response.json()[0]['id']
        history_id = self.dataset_populator.new_history()
        hdca_id = self.dataset_collection_populator.create_list_in_history(
            history_id, contents=["xxx", "yyy"],
            direct_upload=True).json()["outputs"][0]["id"]
        payload = {
            'from_hdca_id': hdca_id,
            'create_type': 'file',
            'folder_id': folder_id
        }
        create_response = self._post("libraries/%s/contents" % library['id'],
                                     payload)
        self._assert_status_code_is(create_response, 200)

    def test_create_datasets_in_folder_from_collection(self):
        library = self.library_populator.new_private_library(
            "ForCreateDatasetsFromCollection")
        history_id = self.dataset_populator.new_history()
        hdca_id = self.dataset_collection_populator.create_list_in_history(
            history_id, contents=["xxx", "yyy"],
            direct_upload=True).json()["outputs"][0]["id"]
        folder_response = self._create_folder(library)
        self._assert_status_code_is(folder_response, 200)
        folder_id = folder_response.json()[0]['id']
        payload = {'from_hdca_id': hdca_id}
        create_response = self._post("folders/%s/contents" % folder_id,
                                     payload)
        self._assert_status_code_is(create_response, 200)
        assert len(create_response.json()) == 2
        # Also test that anything different from a flat dataset collection list
        # is refused
        hdca_pair_id = self.dataset_collection_populator.create_list_of_pairs_in_history(
            history_id).json()["outputs"][0]['id']
        payload = {'from_hdca_id': hdca_pair_id}
        create_response = self._post("folders/%s/contents" % folder_id,
                                     payload)
        self._assert_status_code_is(create_response, 501)
        assert create_response.json(
        )['err_msg'] == 'Cannot add nested collections to library. Please flatten your collection first.'

    def _create_folder(self, library):
        create_data = dict(
            folder_id=library["root_folder_id"],
            create_type="folder",
            name="New Folder",
        )
        return self._post("libraries/%s/contents" % library["id"],
                          data=create_data)

    def _create_subfolder(self, containing_folder_id):
        create_data = dict(
            description="new subfolder desc",
            name="New Subfolder",
        )
        return self._post("folders/%s" % containing_folder_id,
                          data=create_data)

    def _create_dataset_in_folder_in_library(self, library_name):
        library = self.library_populator.new_private_library(library_name)
        folder_response = self._create_folder(library)
        self._assert_status_code_is(folder_response, 200)
        folder_id = folder_response.json()[0]['id']
        history_id = self.dataset_populator.new_history()
        hda_id = self.dataset_populator.new_dataset(history_id,
                                                    content="1 2 3")['id']
        payload = {
            'from_hda_id': hda_id,
            'create_type': 'file',
            'folder_id': folder_id
        }
        ld = self._post("libraries/%s/contents" % folder_id, payload)
        return ld
Beispiel #2
0
class FolderContentsApiTestCase(ApiTestCase):

    def setUp(self):
        super().setUp()
        self.dataset_populator = DatasetPopulator(self.galaxy_interactor)
        self.dataset_collection_populator = DatasetCollectionPopulator(self.galaxy_interactor)
        self.library_populator = LibraryPopulator(self.galaxy_interactor)

        self.history_id = self.dataset_populator.new_history()
        self.library = self.library_populator.new_private_library("FolderContentsTestsLibrary")
        self.root_folder_id = self._create_folder_in_library("Test Folder Contents")

    def test_create_hda_with_ldda_message(self):
        hda_id = self._create_hda()
        ldda_message = "Test message"
        data = {
            "from_hda_id": hda_id,
            "ldda_message": ldda_message,
        }
        ldda = self._create_content_in_folder_with_payload(self.root_folder_id, data)
        self._assert_has_keys(ldda, "name", "id")

    def test_create_hdca_with_ldda_message(self):
        contents = ["dataset01", "dataset02"]
        hdca_id = self._create_hdca_with_contents(contents)
        ldda_message = "Test message"
        data = {
            "from_hdca_id": hdca_id,
            "ldda_message": ldda_message,
        }
        lddas = self._create_content_in_folder_with_payload(self.root_folder_id, data)
        assert len(contents) == len(lddas)

    def test_index(self):
        folder_id = self._create_folder_in_library("Test Folder Contents Index")

        self._create_dataset_in_folder(folder_id)

        response = self._get(f"folders/{folder_id}/contents")
        self._assert_status_code_is(response, 200)
        contents = response.json()["folder_contents"]
        assert len(contents) == 1

    def test_index_include_deleted(self):
        folder_name = "Test Folder Contents Index include deleted"
        folder_id = self._create_folder_in_library(folder_name)

        hda_id = self._create_dataset_in_folder(folder_id)
        self._delete_library_dataset(hda_id)

        response = self._get(f"folders/{folder_id}/contents")
        self._assert_status_code_is(response, 200)
        contents = response.json()["folder_contents"]
        assert len(contents) == 0

        include_deleted = True
        response = self._get(f"folders/{folder_id}/contents?include_deleted={include_deleted}")
        self._assert_status_code_is(response, 200)
        contents = response.json()["folder_contents"]
        assert len(contents) == 1
        assert contents[0]["deleted"] is True

    def test_index_limit_offset(self):
        folder_name = "Test Folder Contents Index limit"
        folder_id = self._create_folder_in_library(folder_name)

        num_subfolders = 5
        for index in range(num_subfolders):
            self._create_subfolder_in(folder_id, name=f"Folder_{index}")

        num_datasets = 5
        for _ in range(num_datasets):
            self._create_dataset_in_folder(folder_id)

        total_items = num_datasets + num_subfolders

        response = self._get(f"folders/{folder_id}/contents")
        self._assert_status_code_is(response, 200)
        original_contents = response.json()["folder_contents"]
        assert len(original_contents) == total_items

        limit = 7
        response = self._get(f"folders/{folder_id}/contents?limit={limit}")
        self._assert_status_code_is(response, 200)
        contents = response.json()["folder_contents"]
        assert len(contents) == limit

        offset = 3
        response = self._get(f"folders/{folder_id}/contents?offset={offset}")
        self._assert_status_code_is(response, 200)
        contents = response.json()["folder_contents"]
        assert len(contents) == total_items - offset

        limit = 4
        offset = 4
        response = self._get(f"folders/{folder_id}/contents?limit={limit}&offset={offset}")
        self._assert_status_code_is(response, 200)
        contents = response.json()["folder_contents"]
        assert len(contents) == limit
        expected_query_result = original_contents[offset:offset + limit]
        for index in range(limit):
            assert contents[index]["id"] == expected_query_result[index]["id"]

    def test_index_search_text(self):
        folder_name = "Test Folder Contents Index search text"
        folder_id = self._create_folder_in_library(folder_name)

        dataset_names = ["AB", "BC", "ABC"]
        for name in dataset_names:
            self._create_dataset_in_folder(folder_id, name)

        subfolder_names = ["Folder_A", "Folder_C"]
        for name in subfolder_names:
            self._create_subfolder_in(folder_id, name)

        all_names = dataset_names + subfolder_names

        search_terms = ["A", "B", "C"]
        for search_text in search_terms:
            response = self._get(f"folders/{folder_id}/contents?search_text={search_text}")
            self._assert_status_code_is(response, 200)
            contents = response.json()["folder_contents"]
            matching_names = [name for name in all_names if search_text in name]
            assert len(contents) == len(matching_names)

    def test_index_permissions_include_deleted(self):

        folder_name = "Test Folder Contents Index permissions include deteleted"
        folder_id = self._create_folder_in_library(folder_name)

        num_subfolders = 5
        subfolder_ids: List[str] = []
        deleted_subfolder_ids: List[str] = []
        for index in range(num_subfolders):
            id = self._create_subfolder_in(folder_id, name=f"Folder_{index}")
            subfolder_ids.append(id)

        for index, subfolder_id in enumerate(subfolder_ids):
            if index % 2 == 0:
                self._delete_subfolder(subfolder_id)
                deleted_subfolder_ids.append(subfolder_id)

        num_datasets = 5
        datasets_ids: List[str] = []
        deleted_datasets_ids: List[str] = []
        for _ in range(num_datasets):
            id = self._create_dataset_in_folder(folder_id)
            datasets_ids.append(id)

        for index, ldda_id in enumerate(datasets_ids):
            if index % 2 == 0:
                self._delete_library_dataset(ldda_id)
                deleted_datasets_ids.append(ldda_id)

        num_total_contents = num_subfolders + num_datasets
        num_non_deleted = num_total_contents - len(deleted_subfolder_ids) - len(deleted_datasets_ids)

        # Verify deleted contents are not listed
        include_deleted = False
        response = self._get(f"folders/{folder_id}/contents?include_deleted={include_deleted}")
        self._assert_status_code_is(response, 200)
        contents = response.json()["folder_contents"]
        assert len(contents) == num_non_deleted

        include_deleted = True
        # Admins can see everything...
        response = self._get(f"folders/{folder_id}/contents?include_deleted={include_deleted}", admin=True)
        self._assert_status_code_is(response, 200)
        contents = response.json()["folder_contents"]
        assert len(contents) == num_total_contents

        # Owner can see everything too
        response = self._get(f"folders/{folder_id}/contents?include_deleted={include_deleted}")
        self._assert_status_code_is(response, 200)
        contents = response.json()["folder_contents"]
        assert len(contents) == num_total_contents

        # Users with access but no modify permission can't see deleted
        with self._different_user():
            different_user_role_id = self.dataset_populator.user_private_role_id()

        self._allow_library_access_to_user_role(different_user_role_id)

        with self._different_user():
            response = self._get(f"folders/{folder_id}/contents?include_deleted={include_deleted}")
            self._assert_status_code_is(response, 200)
            contents = response.json()["folder_contents"]
            assert len(contents) == num_non_deleted

    def _create_folder_in_library(self, name: str) -> Any:
        root_folder_id = self.library["root_folder_id"]
        return self._create_subfolder_in(root_folder_id, name)

    def _create_subfolder_in(self, folder_id: str, name: str) -> str:
        data = {
            "name": name,
            "description": f"The description of {name}",
        }
        create_response = self._post(f"folders/{folder_id}", data=data)
        self._assert_status_code_is(create_response, 200)
        folder = create_response.json()
        return folder["id"]

    def _create_dataset_in_folder(self, folder_id: str, name: Optional[str] = None) -> str:
        hda_id = self._create_hda(name)
        data = {
            "from_hda_id": hda_id,
        }
        ldda = self._create_content_in_folder_with_payload(folder_id, data)
        return ldda["id"]

    def _create_content_in_folder_with_payload(self, folder_id: str, payload) -> Any:
        create_response = self._post(f"folders/{folder_id}/contents", data=payload)
        self._assert_status_code_is(create_response, 200)
        return create_response.json()

    def _create_hda(self, name: Optional[str] = None) -> str:
        hda = self.dataset_populator.new_dataset(self.history_id, name=name)
        hda_id = hda["id"]
        return hda_id

    def _create_hdca_with_contents(self, contents: List[str]) -> str:
        hdca = self.dataset_collection_populator.create_list_in_history(self.history_id, contents=contents, direct_upload=True).json()["outputs"][0]
        hdca_id = hdca["id"]
        return hdca_id

    def _delete_library_dataset(self, ldda_id: str) -> None:
        delete_response = self._delete(f"libraries/datasets/{ldda_id}")
        self._assert_status_code_is(delete_response, 200)

    def _delete_subfolder(self, folder_id: str) -> None:
        delete_response = self._delete(f"folders/{folder_id}")
        self._assert_status_code_is(delete_response, 200)

    def _allow_library_access_to_user_role(self, role_id: str):
        library_id = self.library["id"]
        action = "set_permissions"
        data = {
            "access_ids[]": role_id,
        }
        response = self._post(f"libraries/{library_id}/permissions?action={action}", data=data, admin=True, json=True)
        self._assert_status_code_is(response, 200)