def test_split_upload_files_should_generate_upload_files_list_for_only_one_file(self, getsize):
        # GIVEN
        entry = UploadEntry("/tmp/test.gz", "test.gz")
        size = 10 * self.MAX_PACKAGE_SIZE
        getsize.return_value = size

        # EXPECT
        expected = UploadPackage()
        expected.update(entry, size)
        self.assertEqual(list(split_upload_files([entry], max_package_size=self.MAX_PACKAGE_SIZE)), [expected])
    def test_split_upload_files_should_not_generate_empty_packages(self, getsize):
        # GIVEN
        entry = UploadEntry("/tmp/test.gz", "test.gz")
        # AND
        upload_entry = UploadEntry(entry.source_path, entry.target_path)
        size = 10 * self.MAX_PACKAGE_SIZE
        getsize.return_value = size

        # EXPECT
        expected = UploadPackage()
        expected.update(entry, size)
        for package in split_upload_files([upload_entry], max_package_size=self.MAX_PACKAGE_SIZE):
            self.assertFalse(package.is_empty())
Example #3
0
    def test_split_upload_files_should_not_generate_empty_packages(
            self, getsize):
        # GIVEN
        entry = UploadEntry("/tmp/test.gz", "test.gz")
        # AND
        upload_entry = UploadEntry(entry.source_path, entry.target_path)
        size = 10 * self.MAX_PACKAGE_SIZE
        config = AttributeUploadConfiguration(size)
        getsize.return_value = size

        # EXPECT
        expected = UploadPackage()
        expected.update(entry, size)
        for package in split_upload_files(upload_entries={upload_entry},
                                          upload_configuration=config):
            self.assertFalse(package.is_empty())
Example #4
0
    def test_split_upload_files_should_generate_upload_files_list_for_only_one_file(
            self, getsize):
        # GIVEN
        entry = UploadEntry("/tmp/test.gz", "test.gz")
        size = 10 * self.MAX_PACKAGE_SIZE
        config = AttributeUploadConfiguration(size)
        getsize.return_value = size

        # EXPECT
        expected = UploadPackage()
        expected.update(entry, size)
        self.assertEqual(
            list(
                split_upload_files(upload_entries={entry},
                                   upload_configuration=config)),
            [expected],
        )
Example #5
0
def upload_file_set_attribute(
    swagger_client: SwaggerClient,
    container_id: str,
    attribute: str,
    file_globs: Iterable[str],
    reset: bool,
    multipart_config: Optional[MultipartConfig],
) -> List[NeptuneException]:
    unique_upload_entries = get_unique_upload_entries(file_globs)

    try:
        upload_configuration = DEFAULT_UPLOAD_CONFIG
        for package in split_upload_files(
            upload_entries=unique_upload_entries,
            upload_configuration=upload_configuration,
        ):
            if package.is_empty() and not reset:
                continue

            uploading_multiple_entries = package.len > 1
            creating_a_single_empty_dir = (
                package.len == 1
                and not package.items[0].is_stream()
                and os.path.isdir(package.items[0].source_path)
            )

            if (
                uploading_multiple_entries
                or creating_a_single_empty_dir
                or package.is_empty()
            ):
                data = compress_to_tar_gz_in_memory(upload_entries=package.items)
                url = build_operation_url(
                    swagger_client.swagger_spec.api_url,
                    swagger_client.api.uploadFileSetAttributeTar.operation.path_name,
                )
                result = upload_raw_data(
                    http_client=swagger_client.swagger_spec.http_client,
                    url=url,
                    data=data,
                    headers={"Content-Type": "application/octet-stream"},
                    query_params={
                        "experimentId": container_id,
                        "attribute": attribute,
                        "reset": str(reset),
                    },
                )
                _attribute_upload_response_handler(result)
            else:
                upload_entry = package.items[0]
                if multipart_config is None:
                    # the legacy upload procedure
                    url = build_operation_url(
                        swagger_client.swagger_spec.api_url,
                        swagger_client.api.uploadFileSetAttributeChunk.operation.path_name,
                    )
                    file_chunk_stream = FileChunkStream(
                        upload_entry=upload_entry,
                        upload_configuration=upload_configuration,
                    )
                    _upload_loop(
                        file_chunk_stream=file_chunk_stream,
                        http_client=swagger_client.swagger_spec.http_client,
                        url=url,
                        query_params={
                            "experimentId": container_id,
                            "attribute": attribute,
                            "reset": str(reset),
                            "path": upload_entry.target_path,
                        },
                    )
                else:
                    _multichunk_upload(
                        upload_entry,
                        query_params={
                            "experimentIdentifier": container_id,
                            "attribute": attribute,
                            "subPath": upload_entry.target_path,
                        },
                        swagger_client=swagger_client,
                        multipart_config=multipart_config,
                        target=FileUploadTarget.FILE_SET,
                    )

            reset = False
    except MetadataInconsistency as e:
        if len(e.args) == 1:
            return [e]
        else:
            return [MetadataInconsistency(desc) for desc in e.args]