def test_piping_upload_and_download_large_file(self):
        # large means azcopy has to rotate buffers when uploading.

        # create file of size 8MB
        filename = "test_8mb_blob_piping_upload.txt"
        source_file_path = util.create_test_file(filename, 8 * 1024 * 1024)

        # compute source md5 to compare later
        source_file_md5 = compute_md5(source_file_path)

        # uploadfile using azcopy
        # TODO reviewers please note, this used to use a 4MB file, with a 1 KiB block size, but now we don't support block sizes
        #    smaller than 1 MB. I've compensated slightly by changing it to an 8 MB file
        destination_url = util.get_resource_sas(filename)
        azcopy_cmd = util.Command("copy").add_arguments(destination_url).add_flags("block-size-mb", '1').add_flags("from-to", "PipeBlob").string()
        self.assertTrue(execute_command_with_pipe(azcopy_cmd, source_file_to_pipe=source_file_path))

        # downloading the uploaded file
        azcopy_cmd = util.Command("copy").add_arguments(destination_url).add_flags("block-size-mb", '1').add_flags("from-to", "BlobPipe").string()
        destination_file_path = util.test_directory_path + "/test_8mb_blob_piping_download.txt"
        self.assertTrue(execute_command_with_pipe(azcopy_cmd, destination_file_to_pipe=destination_file_path))

        # compute destination md5 to compare
        destination_file_md5 = compute_md5(destination_file_path)

        # verifying the downloaded blob
        self.assertEqual(source_file_md5, destination_file_md5)
    def recursive_download_blob(self):
        # create directory and 5 files of 1KB inside that directory.
        dir_name = "dir_" + str(10) + "_files"
        dir1_path = util.create_test_n_files(1024, 5, dir_name)

        # upload the directory to container through azcopy with recursive set to true.
        result = util.Command("copy").add_arguments(dir1_path).add_arguments(util.test_container_url).\
                        add_flags("log-level","info").add_flags("recursive", "true").execute_azcopy_copy_command()
        self.assertTrue(result)

        # verify the uploaded file.
        destination_sas = util.get_resource_sas(dir_name)
        result = util.Command("testBlob").add_arguments(dir1_path).add_arguments(destination_sas).\
            add_flags("is-object-dir","true").execute_azcopy_verify()
        self.assertTrue(result)
        try:
            shutil.rmtree(dir1_path)
        except OSError as e:
            self.fail('error removing the file ' + dir1_path)

        # downloading the directory created from container through azcopy with recursive flag to true.
        result = util.Command("copy").add_arguments(destination_sas).add_arguments(util.test_directory_path).add_flags(
            "log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command()
        self.assertTrue(result)

        # verify downloaded blob.
        result = util.Command("testBlob").add_arguments(dir1_path).add_arguments(destination_sas).\
            add_flags("is-object-dir","true").execute_azcopy_verify()
        self.assertTrue(result)
    def test_block_size(self):

        block_size = 4 * 1024 * 1024
        # create file of size 63 Mb
        filename = "test63Mb_blob.txt"
        file_path = util.create_test_file(filename, 63 * 1024 * 1024)

        # execute azcopy upload of 63 Mb file.
        destination_sas = util.get_resource_sas(filename)
        result = util.Command("copy").add_arguments(file_path).add_arguments(destination_sas).add_flags("log-level", "info"). \
            add_flags("block-size", str(block_size)).add_flags("recursive", "true").execute_azcopy_copy_command()
        self.assertTrue(result)

        # Verifying the uploaded blob
        # calling the testBlob validator to verify whether blob has been successfully uploaded or not
        if (63 * 1024 * 1024) % block_size == 0:
            number_of_blocks = int(63 * 1024 * 1024 / block_size)
        else:
            number_of_blocks = int(63 * 1024 * 1024 / block_size) + 1
        result = util.Command("testBlob").add_arguments(
            file_path).add_arguments(destination_sas).add_flags(
                "verify-block-size", "true").add_flags(
                    "number-blocks-or-pages",
                    str(number_of_blocks)).execute_azcopy_verify()
        self.assertTrue(result)
    def test_blob_download_63mb_in_4mb(self):
        # create file of 63mb
        file_name = "test_63mb_in4mb_upload.txt"
        file_path = util.create_test_file(file_name, 63 * 1024 * 1024)

        # uploading file through azcopy with flag block-size set to 4mb
        destination_sas = util.get_resource_sas(file_name)
        result = util.Command("copy").add_arguments(file_path).add_arguments(destination_sas).\
                    add_flags("log-level","info").add_flags("block-size-mb", "4").execute_azcopy_copy_command()
        self.assertTrue(result)

        # verify the uploaded file.
        result = util.Command("testBlob").add_arguments(
            file_path).add_arguments(destination_sas).execute_azcopy_verify()
        self.assertTrue(result)

        # downloading the created parallely in blocks of 4mb file through azcopy.
        download_file = util.test_directory_path + "/test_63mb_in4mb_download.txt"
        result = util.Command("copy").add_arguments(destination_sas).add_arguments(download_file)\
                    .add_flags("log-level","info").add_flags("block-size-mb", "4").execute_azcopy_copy_command()
        self.assertTrue(result)

        # verify the downloaded file
        result = util.Command("testBlob").add_arguments(
            download_file).add_arguments(
                destination_sas).execute_azcopy_verify()
        self.assertTrue(result)
Exemple #5
0
    def test_piping_upload_and_download_large_file(self):
        # large means azcopy has to rotate buffers when uploading.

        # create file of size 4MB
        filename = "test_4mb_blob_piping_upload.txt"
        source_file_path = util.create_test_file(filename, 4 * 1024 * 1024)

        # compute source md5 to compare later
        source_file_md5 = compute_md5(source_file_path)

        # upload 1KB file using azcopy
        destination_url = util.get_resource_sas(filename)
        azcopy_cmd = util.Command("copy").add_arguments(
            destination_url).add_flags("block-size", '1024').string()
        self.assertTrue(
            execute_command_with_pipe(azcopy_cmd,
                                      source_file_to_pipe=source_file_path))

        # downloading the uploaded file
        destination_file_path = util.test_directory_path + "/test_1kb_blob_piping_download.txt"
        self.assertTrue(
            execute_command_with_pipe(
                azcopy_cmd, destination_file_to_pipe=destination_file_path))

        # compute destination md5 to compare
        destination_file_md5 = compute_md5(destination_file_path)

        # verifying the downloaded blob
        self.assertEqual(source_file_md5, destination_file_md5)
    def util_test_1GB_blob_upload(self, use_oauth_session=False):
        # create 1Gb file
        filename = "test_1G_blob.txt"
        file_path = util.create_test_file(filename, 1 * 1024 * 1024 * 1024)

        # execute azcopy upload.
        if not use_oauth_session:
            dest = util.get_resource_sas(filename)
            dest_validate = dest
        else:
            dest = util.get_resource_from_oauth_container(filename)
            dest_validate = util.get_resource_from_oauth_container_validate(
                filename)

        result = util.Command("copy").add_arguments(file_path).add_arguments(dest).add_flags("log-level", "info"). \
            add_flags("block-size", "104857600").add_flags("recursive", "true").execute_azcopy_copy_command()
        self.assertTrue(result)

        # Verifying the uploaded blob.
        # adding local file path as first argument.
        # adding file sas as local argument.
        # calling the testBlob validator to verify whether blob has been successfully uploaded or not.
        result = util.Command("testBlob").add_arguments(
            file_path).add_arguments(dest_validate).execute_azcopy_verify()
        self.assertTrue(result)
    def util_test_1kb_blob_upload(self, use_oauth_session=False):
        # Creating a single File Of size 1 KB
        filename = "test1KB.txt"
        file_path = util.create_test_file(filename, 1024)

        # executing the azcopy command to upload the 1KB file.
        src = file_path
        if not use_oauth_session:
            dest = util.get_resource_sas(filename)
            dest_validate = dest
        else:
            dest = util.get_resource_from_oauth_container(filename)
            dest_validate = util.get_resource_from_oauth_container_validate(
                filename)

        result = util.Command("copy").add_arguments(src).add_arguments(dest). \
            add_flags("log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command()
        self.assertTrue(result)

        # Verifying the uploaded blob.
        # the resource local path should be the first argument for the azcopy validator.
        # the resource sas should be the second argument for azcopy validator.
        result = util.Command("testBlob").add_arguments(
            file_path).add_arguments(dest_validate).execute_azcopy_verify()
        self.assertTrue(result)
    def util_test_n_1kb_blob_upload(self,
                                    number_of_files,
                                    use_oauth_session=False):
        # create dir dir_n_files and 1 kb files inside the dir.
        dir_name = "dir_" + str(number_of_files) + "_files"
        dir_n_files_path = util.create_test_n_files(1024, number_of_files,
                                                    dir_name)

        if not use_oauth_session:
            dest = util.test_container_url
            dest_validate = util.get_resource_sas(dir_name)
        else:
            dest = util.test_oauth_container_url
            dest_validate = util.get_resource_from_oauth_container_validate(
                dir_name)

        # execute azcopy command
        result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(dest). \
            add_flags("recursive", "true").add_flags("log-level", "info").execute_azcopy_copy_command()
        self.assertTrue(result)

        # execute the validator.
        result = util.Command("testBlob").add_arguments(dir_n_files_path).add_arguments(dest_validate). \
            add_flags("is-object-dir", "true").execute_azcopy_verify()
        self.assertTrue(result)
    def test_blob_download_preserve_last_modified_time(self):
        # create a file of 2KB
        filename = "test_upload_preserve_last_mtime.txt"
        file_path = util.create_test_file(filename, 2048)

        # upload file through azcopy.
        destination_sas = util.get_resource_sas(filename)
        result = util.Command("copy").add_arguments(file_path).add_arguments(destination_sas). \
            add_flags("log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command()
        self.assertTrue(result)

        # Verifying the uploaded blob
        result = util.Command("testBlob").add_arguments(
            file_path).add_arguments(destination_sas).execute_azcopy_verify()
        self.assertTrue(result)

        time.sleep(5)

        # download file through azcopy with flag preserve-last-modified-time set to true
        download_file_name = util.test_directory_path + "/test_download_preserve_last_mtime.txt"
        result = util.Command("copy").add_arguments(destination_sas).add_arguments(download_file_name)\
                    .add_flags("log-level","info").add_flags("preserve-last-modified-time", "true").\
                    execute_azcopy_copy_command()
        self.assertTrue(result)

        # Verifying the downloaded blob and its modified with the modified time of blob.
        result = util.Command("testBlob").add_arguments(
            download_file_name).add_arguments(destination_sas).add_flags(
                "preserve-last-modified-time", "true").execute_azcopy_verify()
        self.assertTrue(result)
    def test_piping_upload_and_download_small_file(self):
        # small means azcopy doesn't have to rotate buffers when uploading.
        # it is assumed that the default block size is much larger than 1KB.

        # create file of size 1KB
        filename = "test_1kb_blob_piping_upload.txt"
        source_file_path = util.create_test_file(filename, 1024)

        # compute source md5 to compare later
        source_file_md5 = compute_md5(source_file_path)

        # upload 1KB file using azcopy
        destination_url = util.get_resource_sas(filename)
        azcopy_cmd = util.Command("copy").add_arguments(destination_url).add_flags("from-to", "PipeBlob").string()
        self.assertTrue(execute_command_with_pipe(azcopy_cmd, source_file_to_pipe=source_file_path))

        # downloading the uploaded file
        azcopy_cmd = util.Command("copy").add_arguments(destination_url).add_flags("from-to", "BlobPipe").string()
        destination_file_path = util.test_directory_path + "/test_1kb_blob_piping_download.txt"
        self.assertTrue(execute_command_with_pipe(azcopy_cmd, destination_file_to_pipe=destination_file_path))

        # compute destination md5 to compare
        destination_file_md5 = compute_md5(destination_file_path)

        # verifying the downloaded blob
        self.assertEqual(source_file_md5, destination_file_md5)
    def test_sync_blob_download_without_wildcards(self):
        # created a directory and created 10 files inside the directory
        dir_name = "sync_download_without_wildcards"
        dir_n_files_path = util.create_test_n_files(1024, 10, dir_name)

        # upload the directory
        # execute azcopy command
        result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \
            add_flags("recursive", "true").add_flags("log-level", "info").execute_azcopy_copy_command()
        self.assertTrue(result)

        # execute the validator.
        dir_sas = util.get_resource_sas(dir_name)
        result = util.Command("testBlob").add_arguments(dir_n_files_path).add_arguments(dir_sas). \
            add_flags("is-object-dir", "true").execute_azcopy_verify()
        self.assertTrue(result)

        # download the destination to the source to match the last modified time
        result = util.Command("copy").add_arguments(dir_sas).add_arguments(util.test_directory_path). \
            add_flags("log-level", "info").add_flags("recursive", "true").add_flags("output", "json"). \
            add_flags("preserve-last-modified-time", "true").execute_azcopy_copy_command_get_output()
        self.assertNotEquals(result, None)

        # execute the validator and verify the downloaded dir
        result = util.Command("testBlob").add_arguments(dir_n_files_path).add_arguments(dir_sas). \
            add_flags("is-object-dir", "true").execute_azcopy_verify()
        self.assertTrue(result)

        # sync the source and destination
        result = util.Command("sync").add_arguments(dir_sas).add_arguments(dir_n_files_path). \
            add_flags("log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command()
        self.assertFalse(result)
    def test_blob_download_with_special_characters(self):
        filename_special_characters = "abc|>rd*"
        # encode filename beforehand to avoid erroring out
        resource_url = util.get_resource_sas(
            filename_special_characters.replace("*", "%2A"))
        # creating the file with random characters and with file name having special characters.
        result = util.Command("create").add_arguments(resource_url).add_flags(
            "serviceType",
            "Blob").add_flags("resourceType", "SingleFile").add_flags(
                "blob-size", "1024").execute_azcopy_verify()
        self.assertTrue(result)

        # downloading the blob created above.
        result = util.Command("copy").add_arguments(
            resource_url).add_arguments(util.test_directory_path).add_flags(
                "log-level", "info").execute_azcopy_copy_command()
        self.assertTrue(result)

        expected_filename = filename_special_characters
        if os.name == "nt":
            # Windows will encode special characters.
            expected_filename = urllib.parse.quote_plus(
                filename_special_characters)
        # verify if the downloaded file exists or not.
        filepath = util.test_directory_path + "/" + expected_filename
        self.assertTrue(os.path.isfile(filepath))

        # verify the downloaded blob.
        result = util.Command("testBlob").add_arguments(
            filepath).add_arguments(resource_url).execute_azcopy_verify()
        self.assertTrue(result)
Exemple #13
0
    def test_sync_entire_directory_with_local(self):
        dir_name = "dir_sync_test"
        dir_path = util.create_test_n_files(1024, 10, dir_name)

        # create sub-directory inside directory
        sub_dir_name = os.path.join(dir_name, "sub_dir_sync_test")
        util.create_test_n_files(1024, 10, sub_dir_name)

        # upload the directory with 20 files
        # upload the directory
        # execute azcopy command
        result = util.Command("copy").add_arguments(dir_path).add_arguments(util.test_container_url). \
            add_flags("recursive", "true").add_flags("log-level", "info").execute_azcopy_copy_command()
        self.assertTrue(result)

        # execute the validator.
        vdir_sas = util.get_resource_sas(dir_name)
        result = util.Command("testBlob").add_arguments(dir_path).add_arguments(vdir_sas). \
            add_flags("is-object-dir", "true").execute_azcopy_verify()
        self.assertTrue(result)

        # sync to local
        src = vdir_sas
        dst = dir_path
        result = util.Command("sync").add_arguments(src).add_arguments(dst).add_flags("log-level", "info")\
            .execute_azcopy_copy_command()
        self.assertTrue(result)

        # sync back to blob after recreating the files
        util.create_test_n_files(1024, 10, sub_dir_name)
        src = dir_path
        dst = vdir_sas
        result = util.Command("sync").add_arguments(src).add_arguments(dst).add_flags("log-level", "info") \
            .execute_azcopy_copy_command()
        self.assertTrue(result)
    def test_set_block_blob_tier(self):
        # create a file file_hot_block_blob_tier
        filename = "test_hot_block_blob_tier.txt"
        file_path = util.create_test_file(filename, 10 * 1024)

        # uploading the file file_hot_block_blob_tier using azcopy and setting the block-blob-tier to Hot
        destination_sas = util.get_resource_sas(filename)
        result = util.Command("copy").add_arguments(file_path).add_arguments(destination_sas). \
            add_flags("log-level", "info").add_flags("block-blob-tier", "Hot").execute_azcopy_copy_command()
        self.assertTrue(result)
        # execute azcopy validate order.
        # added the expected blob-tier "Hot"
        result = util.Command("testBlob").add_arguments(file_path).add_arguments(destination_sas).add_flags("blob-tier",
                                                                                                            "Hot").execute_azcopy_verify()
        self.assertTrue(result)

        # create file to upload with block blob tier set to "Cool".
        filename = "test_cool_block_blob_tier.txt"
        file_path = util.create_test_file(filename, 10 * 1024)

        # uploading the file file_cool_block_blob_tier using azcopy and setting the block-blob-tier to Cool.
        destination_sas = util.get_resource_sas(filename)
        result = util.Command("copy").add_arguments(file_path).add_arguments(destination_sas). \
            add_flags("log-level", "info").add_flags("block-blob-tier", "Cool").execute_azcopy_copy_command()
        self.assertTrue(result)

        # execute azcopy validate order.
        # added the expected blob-tier "Cool"
        result = util.Command("testBlob").add_arguments(file_path).add_arguments(destination_sas).add_flags("blob-tier",                                                                                                "Cool").execute_azcopy_verify()
        self.assertTrue(result)

        # create file to upload with block blob tier set to "Archive".
        filename = "test_archive_block_blob_tier.txt"
        file_path = util.create_test_file(filename, 10 * 1024)

        # uploading the file file_archive_block_blob_tier using azcopy and setting the block-blob-tier to Archive.
        destination_sas = util.get_resource_sas(filename)
        result = util.Command("copy").add_arguments(file_path).add_arguments(destination_sas). \
            add_flags("log-level", "info").add_flags("block-blob-tier", "archive").execute_azcopy_copy_command()
        self.assertTrue(result)

        # execute azcopy validate order.
        # added the expected blob-tier "Archive"
        result = util.Command("testBlob").add_arguments(file_path).add_arguments(destination_sas).add_flags("blob-tier",
                                                                                                            "Archive").execute_azcopy_verify()
        self.assertTrue(result)
Exemple #15
0
    def test_sync_single_blob(self):
        # create file of size 1KB.
        filename = "test_1kb_blob_sync.txt"
        file_path = util.create_test_file(filename, 1024)
        blob_path = util.get_resource_sas(filename)

        # Upload 1KB file using azcopy.
        src = file_path
        dest = blob_path
        result = util.Command("cp").add_arguments(src).add_arguments(dest). \
            add_flags("log-level", "info").execute_azcopy_copy_command()
        self.assertTrue(result)

        # Verifying the uploaded blob.
        # the resource local path should be the first argument for the azcopy validator.
        # the resource sas should be the second argument for azcopy validator.
        resource_url = util.get_resource_sas(filename)
        result = util.Command("testBlob").add_arguments(
            file_path).add_arguments(resource_url).execute_azcopy_verify()
        self.assertTrue(result)

        # Sync 1KB file to local using azcopy.
        src = blob_path
        dest = file_path
        result = util.Command("sync").add_arguments(src).add_arguments(dest). \
            add_flags("log-level", "info").execute_azcopy_copy_command()
        self.assertTrue(result)

        # Sync 1KB file to blob using azcopy.
        # reset local file lmt first
        util.create_test_file(filename, 1024)
        src = file_path
        dest = blob_path
        result = util.Command("sync").add_arguments(src).add_arguments(dest). \
            add_flags("log-level", "info").execute_azcopy_copy_command()
        self.assertTrue(result)
    def test_download_1kb_blob_to_null(self):
        # create file of size 1kb
        filename = "test_1kb_blob_upload_download_null.txt"
        file_path = util.create_test_file(filename, 1024)

        # upload 1kb using azcopy
        src = file_path
        dst = util.test_container_url
        result = util.Command("copy").add_arguments(src).add_arguments(dst). \
            add_flags("log-level", "info").execute_azcopy_copy_command()
        self.assertTrue(result)

        # verify the uploaded blob
        resource_url = util.get_resource_sas(filename)
        result = util.Command("testBlob").add_arguments(
            file_path).add_arguments(resource_url).execute_azcopy_verify()
        self.assertTrue(result)

        # downloading the uploaded blob to devnull
        # note we have no tests to verify the success of check-md5. TODO: remove this when fault induction is introduced
        src = util.get_resource_sas(filename)
        dst = os.devnull
        result = util.Command("copy").add_arguments(src).add_arguments(
            dst).add_flags("log-level", "info")
    def test_guess_mime_type(self):
        # create a test html file
        filename = "test_guessmimetype.html"
        file_path = util.create_test_html_file(filename)

        # execute azcopy upload of html file.
        destination_sas = util.get_resource_sas(filename)
        result = util.Command("copy").add_arguments(file_path).add_arguments(destination_sas).add_flags("log-level", "info"). \
            add_flags("recursive", "true").execute_azcopy_copy_command()
        self.assertTrue(result)

        # execute the validator to verify the content-type.
        result = util.Command("testBlob").add_arguments(file_path).add_arguments(destination_sas).add_flags("log-level",
                                                                                                            "info"). \
            add_flags("recursive", "true")
        self.assertTrue(result)
    def test_63mb_blob_upload(self):
        # creating file of 63mb size.
        filename = "test63Mb_blob.txt"
        file_path = util.create_test_file(filename, 8 * 1024 * 1024)

        # execute azcopy copy upload.
        dest = util.get_resource_sas(filename)
        result = util.Command("copy").add_arguments(file_path).add_arguments(dest) \
            .add_flags("log-level", "info").add_flags("block-size-mb", "100").add_flags("recursive", "true"). \
            execute_azcopy_copy_command()
        self.assertTrue(result)

        # Verifying the uploaded blob
        # calling the testBlob validator to verify whether blob has been successfully uploaded or not
        result = util.Command("testBlob").add_arguments(file_path).add_arguments(dest).execute_azcopy_verify()
        self.assertTrue(result)
    def test_0KB_blob_upload(self):
        # Creating a single File Of size 0 KB
        filename = "test0KB.txt"
        file_path = util.create_test_file(filename, 0)

        # executing the azcopy command to upload the 0KB file.
        src = file_path
        dest = util.get_resource_sas(filename)
        result = util.Command("copy").add_arguments(src).add_arguments(dest). \
            add_flags("log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command()
        self.assertTrue(result)

        # Verifying the uploaded blob.
        # the resource local path should be the first argument for the azcopy validator.
        # the resource sas should be the second argument for azcopy validator.
        result = util.Command("testBlob").add_arguments(file_path).add_arguments(dest).execute_azcopy_verify()
        self.assertTrue(result)
    def test_page_range_for_complete_sparse_file(self):
        # create test file.
        file_name = "sparse_file.vhd"
        file_path = util.create_complete_sparse_file(file_name,
                                                     4 * 1024 * 1024)

        # execute azcopy page blob upload.
        destination_sas = util.get_resource_sas(file_name)
        result = util.Command("copy").add_arguments(file_path).add_arguments(destination_sas).add_flags("log-level", "info"). \
            add_flags("block-size", "4194304").add_flags("blobType","PageBlob").execute_azcopy_copy_command()
        self.assertTrue(result)

        # execute validator.
        # no of page ranges should be 0 for the empty sparse file.
        result = util.Command("testBlob").add_arguments(file_path).add_arguments(destination_sas).\
                    add_flags("blob-type","PageBlob").add_flags("verify-block-size", "true").\
                    add_flags("number-blocks-or-pages", "0").execute_azcopy_verify()
        self.assertTrue(result)
Exemple #21
0
    def test_remove_virtual_directory(self):
        # create dir dir_10_files and 1 kb files inside the dir.
        dir_name = "dir_" + str(10) + "_files_rm"
        dir_n_files_path = util.create_test_n_files(1024, 10, dir_name)

        # execute azcopy command
        result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \
            add_flags("recursive", "true").add_flags("log-level", "info").execute_azcopy_copy_command()
        self.assertTrue(result)

        destination = util.get_resource_sas(dir_name)
        result = util.Command("rm").add_arguments(destination).add_flags(
            "recursive", "true").execute_azcopy_copy_command()
        self.assertTrue(result)

        result = util.Command("list").add_arguments(destination).add_flags(
            "resource-num", "0").execute_azcopy_verify()
        self.assertTrue(result)
    def test_page_blob_upload_partial_sparse_file(self):
        # create test file.
        file_name = "test_partial_sparse_file.vhd"
        file_path = util.create_partial_sparse_file(file_name,
                                                    16 * 1024 * 1024)

        # execute azcopy pageblob upload.
        destination_sas = util.get_resource_sas(file_name)
        result = util.Command("copy").add_arguments(file_path).add_arguments(destination_sas).add_flags("log-level", "info"). \
            add_flags("block-size", "4194304").add_flags("blobType","PageBlob").execute_azcopy_copy_command()
        self.assertTrue(result)

        # number of page range for partial sparse created above will be (size/2)
        number_of_page_ranges = int((16 * 1024 * 1024 / (4 * 1024 * 1024)) / 2)
        # execute validator to verify the number of page range for uploaded blob.
        result = util.Command("testBlob").add_arguments(file_path).add_arguments(destination_sas). \
            add_flags("blob-type", "PageBlob").add_flags("verify-block-size", "true"). \
            add_flags("number-blocks-or-pages", str(number_of_page_ranges)).execute_azcopy_verify()
        self.assertTrue(result)
    def test_upload_download_file_non_ascii_characters(self):
        file_name = u"Espa\u00F1a"
        #file_name = "abc.txt"
        file_path = util.create_file_in_path(util.test_directory_path,
                                             file_name, "non ascii characters")
        # Upload the file
        result = util.Command("copy").add_arguments(file_path).add_arguments(util.test_container_url).\
                add_flags("log-level", "Info").add_flags("output", "json").execute_azcopy_copy_command_get_output()
        result = util.parseAzcopyOutput(result)
        try:
            # parse the Json Output
            x = json.loads(result,
                           object_hook=lambda d: namedtuple('X', d.keys())
                           (*d.values()))
        except:
            self.fail('error parsing the output in Json Format')

        self.assertEquals(x.TransfersCompleted, 1)
        self.assertEquals(x.TransfersFailed, 0)

        #download the file
        dir_path = os.path.join(util.test_directory_path, "non-ascii-dir")
        try:
            shutil.rmtree(dir_path)
        except:
            print("")
        finally:
            os.mkdir(dir_path)
        destination_url = util.get_resource_sas(file_name)
        result = util.Command("copy").add_arguments(destination_url).add_arguments(dir_path).\
                add_flags("log-level", "Info").add_flags("output", "json").execute_azcopy_copy_command_get_output()
        result = util.parseAzcopyOutput(result)
        try:
            # parse the Json Output
            x = json.loads(result,
                           object_hook=lambda d: namedtuple('X', d.keys())
                           (*d.values()))
        except:
            self.fail('error parsing the output in Json Format')
        self.assertEquals(x.TransfersCompleted, 1)
        self.assertEquals(x.TransfersFailed, 0)
    def util_test_page_blob_upload_1mb(self, use_oauth=False):
        # create the test gile.
        file_name = "test_page_blob_1mb.vhd"
        file_path = util.create_test_file(file_name, 1024 * 1024)

        # execute azcopy upload.
        if not use_oauth:
            dest = util.get_resource_sas(file_name)
            dest_validate = dest
        else:
            dest = util.get_resource_from_oauth_container(file_name)
            dest_validate = util.get_resource_from_oauth_container_validate(
                file_name)

        result = util.Command("copy").add_arguments(file_path).add_arguments(dest).add_flags("log-level", "info"). \
            add_flags("block-size", "4194304").add_flags("blobType","PageBlob").execute_azcopy_copy_command()
        self.assertTrue(result)

        # execute validator.
        result = util.Command("testBlob").add_arguments(file_path).add_arguments(dest_validate).\
                add_flags("blob-type","PageBlob").execute_azcopy_verify()
        self.assertTrue(result)
    def test_blob_metaData_content_encoding_content_type(self):
        # create 2kb file test_mcect.txt
        filename = "test_mcect.txt"
        file_path = util.create_test_file(filename, 2048)

        # execute azcopy upload command.
        destination_sas = util.get_resource_sas(filename)
        result = util.Command("copy").add_arguments(file_path).add_arguments(destination_sas). \
            add_flags("log-level", "info").add_flags("recursive", "true").add_flags("metadata",
                                                                                  "author=prjain;viewport=width;description=test file"). \
            add_flags("content-type", "testctype").add_flags("content-encoding", "testenc").add_flags("no-guess-mime-type",
                                                                                                      "true").execute_azcopy_copy_command()
        self.assertTrue(result)

        # execute azcopy validate order.
        # adding the source in validator as first argument.
        # adding the destination in validator as second argument.
        result = util.Command("testBlob").add_arguments(file_path).add_arguments(destination_sas).add_flags("metadata",
                                                                                                            "author=prjain;viewport=width;description=test file"). \
            add_flags("content-type", "testctype").add_flags("content-encoding", "testenc").add_flags("no-guess-mime-type",
                                                                                                      "true").execute_azcopy_verify()
        self.assertTrue(result)
    def test_blob_download_list_of_files_flag(self):
        #This test verifies the azcopy behavior blobs are downloaded using
        # list-of-files flag
        dir_name = "dir_download_list_of_files_flag"
        dir_path = util.create_test_n_files(1024, 10, dir_name)

        #create sub-directory inside directory
        sub_dir_name_1 = os.path.join(dir_name, "logs")
        sub_dir_path_1 = util.create_test_n_files(1024, 10, sub_dir_name_1)

        #create sub-directory inside sub-directory
        sub_dir_name_2 = os.path.join(sub_dir_name_1, "abc")
        sub_dir_path_2 = util.create_test_n_files(1024, 10, sub_dir_name_2)

        #upload the directory with 30 files
        # upload the directory
        # execute azcopy command
        result = util.Command("copy").add_arguments(dir_path).add_arguments(util.test_container_url). \
            add_flags("recursive", "true").add_flags("log-level", "info").execute_azcopy_copy_command()
        self.assertTrue(result)

        # execute the validator.
        dir_sas = util.get_resource_sas(dir_name)
        result = util.Command("testBlob").add_arguments(dir_path).add_arguments(dir_sas). \
            add_flags("is-object-dir", "true").execute_azcopy_verify()
        self.assertTrue(result)

        #download the entire directory with list-of-files-flag
        dict = {}
        dict["Files"] = [dir_name]
        filePath = util.create_json_file("testfile", dict)
        result = util.Command("copy").add_arguments(util.test_container_url).add_arguments(dir_path). \
            add_flags("log-level", "Info").add_flags("output","json").add_flags("recursive","true") \
            .add_flags("list-of-files", filePath).execute_azcopy_copy_command_get_output()
        # parse the result to get the last job progress summary
        result = util.parseAzcopyOutput(result)
        try:
            # parse the Json Output
            x = json.loads(result, object_hook=lambda d: namedtuple('X', d.keys())(*d.values()))
        except:
            self.fail('error parsing the output in Json Format')
        # since entire directory is downloaded
        self.assertEquals(x.TransfersCompleted, 30)
        self.assertEquals(x.TransfersFailed, 0)

        # create the resource sas
        dir_sas = util.get_resource_sas(dir_name)
        #download the logs directory inside the dir
        dict = {}
        dict["Files"] = ["logs"]
        filePath = util.create_json_file("testfile", dict)
        result = util.Command("copy").add_arguments(dir_sas).add_arguments(dir_path). \
            add_flags("log-level", "Info").add_flags("output","json").add_flags("recursive","true"). \
            add_flags("list-of-files", filePath).execute_azcopy_copy_command_get_output()
        # parse the result to get the last job progress summary
        result = util.parseAzcopyOutput(result)
        try:
            # parse the Json Output
            x = json.loads(result, object_hook=lambda d: namedtuple('X', d.keys())(*d.values()))
        except:
            self.fail('error parsing the output in Json Format')
        #since only logs sub-directory is downloaded, transfers will be 20
        self.assertEquals(x.TransfersCompleted, 20)
        self.assertEquals(x.TransfersFailed, 0)
    def test_blob_download_wildcard_recursive_true_1(self):
        #This test verifies the azcopy behavior when wildcards are
        # provided in the source and recursive flag is set to false
        # example src = https://<container>/<vd-1>/*?<sig> recursive = false
        dir_name = "dir_download_wildcard_recursive=true"
        dir_path = util.create_test_n_files(1024, 10, dir_name)

        #create sub-directory inside directory
        sub_dir_name_1 = os.path.join(dir_name, "logs")
        sub_dir_path_1 = util.create_test_n_files(1024, 10, sub_dir_name_1)

        #create sub-directory inside sub-directory
        sub_dir_name_2 = os.path.join(sub_dir_name_1, "abc")
        sub_dir_path_2 = util.create_test_n_files(1024, 10, sub_dir_name_2)

        #upload the directory with 30 files
        # upload the directory
        # execute azcopy command
        result = util.Command("copy").add_arguments(dir_path).add_arguments(util.test_container_url). \
            add_flags("recursive", "true").add_flags("log-level", "info").execute_azcopy_copy_command()
        self.assertTrue(result)

        # execute the validator.
        dir_sas = util.get_resource_sas(dir_name)
        result = util.Command("testBlob").add_arguments(dir_path).add_arguments(dir_sas). \
            add_flags("is-object-dir", "true").execute_azcopy_verify()
        self.assertTrue(result)

        # create the resource sas
        dir_sas_with_wildcard = util.get_resource_sas(dir_name + "/*")
        #download the directory
        result = util.Command("copy").add_arguments(dir_sas_with_wildcard).add_arguments(dir_path). \
            add_flags("log-level", "Info").add_flags("output","json").add_flags("recursive","true").\
            execute_azcopy_copy_command_get_output()
        # parse the result to get the last job progress summary
        result = util.parseAzcopyOutput(result)
        try:
            # parse the Json Output
            x = json.loads(result, object_hook=lambda d: namedtuple('X', d.keys())(*d.values()))
        except:
            self.fail('error parsing the output in Json Format')
        # since the wildcards '*' exists at the end of dir_name in the sas
        # and recursive is set to true, all files inside dir and
        # inside sub-dirs will be download
        # Number of Expected Transfer should be 30
        self.assertEquals(x.TransfersCompleted, 30)
        self.assertEquals(x.TransfersFailed, 0)

        # create the resource sas
        dir_sas_with_wildcard = util.get_resource_sas(dir_name + "/*/*")
        #download the directory
        result = util.Command("copy").add_arguments(dir_sas_with_wildcard).add_arguments(dir_path). \
            add_flags("log-level", "Info").add_flags("output","json") \
            .add_flags("recursive", "true").execute_azcopy_copy_command_get_output()
        # parse the result to get the last job progress summary
        result = util.parseAzcopyOutput(result)
        try:
            # parse the Json Output
            x = json.loads(result, object_hook=lambda d: namedtuple('X', d.keys())(*d.values()))
        except:
            self.fail('error parsing the output in Json Format')
        # since the wildcards '*/*' exists at the end of dir_name in the sas
        # and recursive is set to true, files immediately inside will not be downloaded
        # but files inside sub-dir logs and sub-dir inside logs i.e abc inside dir will be downloaded
        # Number of Expected Transfer should be 20
        self.assertEquals(x.TransfersCompleted, 20)
        self.assertEquals(x.TransfersFailed, 0)
    def test_sync_blob_download_with_wildcards(self):
        # created a directory and created 10 files inside the directory
        dir_name = "sync_download_with_wildcards"
        dir_n_files_path = util.create_test_n_files(1024, 10, dir_name)

        # upload the directory
        # execute azcopy command
        result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \
            add_flags("recursive", "true").add_flags("log-level", "info").execute_azcopy_copy_command()
        self.assertTrue(result)

        # execute the validator.
        dir_sas = util.get_resource_sas(dir_name)
        result = util.Command("testBlob").add_arguments(dir_n_files_path).add_arguments(dir_sas). \
            add_flags("is-object-dir", "true").execute_azcopy_verify()
        self.assertTrue(result)

        # download the destination to the source to match the last modified time
        result = util.Command("copy").add_arguments(dir_sas).add_arguments(util.test_directory_path). \
            add_flags("log-level", "info").add_flags("recursive", "true").add_flags("output", "json"). \
            add_flags("preserve-last-modified-time", "true").execute_azcopy_copy_command_get_output()
        self.assertNotEquals(result, None)

        # execute the validator and verify the downloaded dir
        result = util.Command("testBlob").add_arguments(dir_n_files_path).add_arguments(dir_sas). \
            add_flags("is-object-dir", "true").execute_azcopy_verify()
        self.assertTrue(result)

        # add "*" at the end of dir sas
        # since both the source and destination are in sync, it will fail
        dir_sas = util.append_text_path_resource_sas(dir_sas, "*")
        # sync the source and destination
        result = util.Command("sync").add_arguments(dir_sas).add_arguments(dir_n_files_path). \
            add_flags("log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command()
        self.assertFalse(result)

        subdir1 = os.path.join(dir_name, "subdir1")
        subdir1_file_path = util.create_test_n_files(1024, 10, subdir1)

        subdir2 = os.path.join(dir_name, "subdir2")
        subdir2_file_path = util.create_test_n_files(1024, 10, subdir2)

        # upload the directory
        # execute azcopy command
        result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \
            add_flags("recursive", "true").add_flags("log-level", "info").execute_azcopy_copy_command()
        self.assertTrue(result)

        # execute the validator.
        dir_sas = util.get_resource_sas(dir_name)
        result = util.Command("testBlob").add_arguments(dir_n_files_path).add_arguments(dir_sas). \
            add_flags("is-object-dir", "true").execute_azcopy_verify()
        self.assertTrue(result)

        # Download the directory to match the blob modified time
        result = util.Command("copy").add_arguments(dir_sas).add_arguments(util.test_directory_path). \
            add_flags("log-level", "Info").add_flags("recursive", "true").execute_azcopy_copy_command()
        self.assertTrue(result)

        # sync the source and destination
        # add extra wildcards
        # since source and destination both are in sync, it will fail
        dir_sas = util.append_text_path_resource_sas(dir_sas, "*/*.txt")
        result = util.Command("sync").add_arguments(dir_sas).add_arguments(dir_n_files_path). \
            add_flags("log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command()
        self.assertFalse(result)

        # delete 5 files inside each sub-directories locally
        for r in range(5, 10):
            filename = "test101024_" + str(r) + ".txt"
            filepath = os.path.join(subdir1_file_path, filename)
            try:
                os.remove(filepath)
            except:
                self.fail('error deleting the file ' + filepath)
            filepath = os.path.join(subdir2_file_path, filename)
            try:
                os.remove(filepath)
            except:
                self.fail('error deleting the file ' + filepath)
        # 10 files have been deleted inside the sub-dir
        # sync remote to local
        # 10 files will be downloaded
        result = util.Command("sync").add_arguments(dir_sas).add_arguments(dir_n_files_path). \
            add_flags("log-level", "info").add_flags("recursive", "true").add_flags("output","json").\
            execute_azcopy_copy_command_get_output()
        # parse the result to get the last job progress summary
        result = util.parseAzcopyOutput(result)
        try:
            # parse the Json Output
            x = json.loads(result, object_hook=lambda d: namedtuple('X', d.keys())(*d.values()))
        except:
            self.fail('error parsing the output in Json Format')
        # Number of Expected Transfer should be 10 since 10 files were deleted
        self.assertEquals(x.TransfersCompleted, 10)
        self.assertEquals(x.TransfersFailed, 0)
    def test_sync_local_to_blob_with_wildCards(self):
        # create 10 files inside the dir 'sync_local_blob'
        dir_name = "sync_local_blob_wc"
        dir_n_files_path = util.create_test_n_files(1024, 10, dir_name)

        # create sub-dir inside dir sync_local_blob_wc
        # create 10 files inside the sub-dir of size 1024
        sub_dir_1 = os.path.join(dir_name, "sub_dir_1")
        sub_dir1_n_file_path = util.create_test_n_files(1024, 10, sub_dir_1)

        # create sub-dir inside dir sync_local_blob_wc
        sub_dir_2 = os.path.join(dir_name, "sub_dir_2")
        sub_dir2_n_file_path = util.create_test_n_files(1024, 10, sub_dir_2)

        # uploading the directory with 30 files in it.
        result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \
            add_flags("recursive", "true").add_flags("log-level", "info").execute_azcopy_copy_command()
        self.assertTrue(result)

        # execute the validator and validating the uploaded directory.
        destination = util.get_resource_sas(dir_name)
        result = util.Command("testBlob").add_arguments(dir_n_files_path).add_arguments(destination). \
            add_flags("is-object-dir", "true").execute_azcopy_verify()
        self.assertTrue(result)

        # download the destination to the source to match the last modified time
        result = util.Command("copy").add_arguments(destination).add_arguments(util.test_directory_path). \
            add_flags("log-level", "info").add_flags("recursive", "true").add_flags("output", "json"). \
            add_flags("preserve-last-modified-time", "true").execute_azcopy_copy_command_get_output()
        self.assertNotEquals(result, None)

        # add wildcard at the end of dirpath
        dir_n_files_path_wcard = os.path.join(dir_n_files_path, "*")
        # execute a sync command
        dir_sas = util.get_resource_sas(dir_name)
        result = util.Command("sync").add_arguments(dir_n_files_path_wcard).add_arguments(dir_sas). \
            add_flags("log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command()
        # since source and destination both are in sync, there should no sync and the azcopy should exit with error code
        self.assertFalse(result)

        # sync all the files the ends with .txt extension inside all sub-dirs inside inside
        # sd_dir_n_files_path_wcard is in format dir/*/*.txt
        sd_dir_n_files_path_wcard = os.path.join(dir_n_files_path_wcard,
                                                 "*.txt")
        result = util.Command("sync").add_arguments(sd_dir_n_files_path_wcard).add_arguments(dir_sas). \
            add_flags("log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command()
        # since source and destination both are in sync, there should no sync and the azcopy should exit with error code
        self.assertFalse(result)

        # remove 5 files inside both the sub-directories
        for r in range(5, 10):
            filename = "test101024_" + str(r) + ".txt"
            filepath = os.path.join(sub_dir1_n_file_path, filename)
            try:
                os.remove(filepath)
            except:
                self.fail('error deleting the file ' + filepath)
            filepath = os.path.join(sub_dir2_n_file_path, filename)
            try:
                os.remove(filepath)
            except:
                self.fail('error deleting the file ' + filepath)
        # sync all the files the ends with .txt extension inside all sub-dirs inside inside
        # since 5 files inside each sub-dir are deleted, sync will have total 10 transfer
        # 10 files will deleted from container
        sd_dir_n_files_path_wcard = os.path.join(dir_n_files_path_wcard,
                                                 "*.txt")
        result = util.Command("sync").add_arguments(sd_dir_n_files_path_wcard).add_arguments(dir_sas). \
            add_flags("log-level", "info").add_flags("recursive", "true").add_flags("output",
                                                                                  "json").execute_azcopy_copy_command_get_output()
        # parse the result to get the last job progress summary
        result = util.parseAzcopyOutput(result)
        try:
            # parse the Json Output
            x = json.loads(result,
                           object_hook=lambda d: namedtuple('X', d.keys())
                           (*d.values()))
        except:
            self.fail('error parsing the output in Json Format')
        # Number of Expected Transfer should be 10 since 10 files were deleted
        self.assertEquals(x.TransfersCompleted, 10)
        self.assertEquals(x.TransfersFailed, 0)
    def test_sync_local_to_blob_without_wildCards(self):
        # create 10 files inside the dir 'sync_local_blob'
        dir_name = "sync_local_blob"
        dir_n_files_path = util.create_test_n_files(1024, 10, dir_name)

        # create sub-dir inside dir sync_local_blob
        # create 10 files inside the sub-dir of size 1024
        sub_dir_name = os.path.join(dir_name, "sub_dir_sync_local_blob")
        sub_dir_n_file_path = util.create_test_n_files(1024, 10, sub_dir_name)

        # uploading the directory with 20 files in it.
        result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \
            add_flags("recursive", "true").add_flags("log-level", "info").execute_azcopy_copy_command()
        self.assertTrue(result)
        # execute the validator and validating the uploaded directory.
        destination = util.get_resource_sas(dir_name)
        result = util.Command("testBlob").add_arguments(dir_n_files_path).add_arguments(destination). \
            add_flags("is-object-dir", "true").execute_azcopy_verify()
        self.assertTrue(result)

        # download the destination to the source to match the last modified time
        result = util.Command("copy").add_arguments(destination).add_arguments(util.test_directory_path). \
            add_flags("log-level", "info").add_flags("recursive", "true").add_flags("output", "json"). \
            add_flags("preserve-last-modified-time", "true").execute_azcopy_copy_command_get_output()
        self.assertNotEquals(result, None)

        # execute a sync command
        dir_sas = util.get_resource_sas(dir_name)
        result = util.Command("sync").add_arguments(dir_n_files_path).add_arguments(dir_sas). \
            add_flags("log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command()
        # since source and destination both are in sync, there should no sync and the azcopy should exit with error code
        self.assertFalse(result)
        try:
            shutil.rmtree(sub_dir_n_file_path)
        except:
            self.fail('error deleting the directory' + sub_dir_n_file_path)

        # deleted entire sub-dir inside the dir created above
        # sync between source and destination should delete the sub-dir on container
        # number of successful transfer should be equal to 10
        result = util.Command("sync").add_arguments(dir_n_files_path).add_arguments(dir_sas). \
            add_flags("log-level", "info").add_flags("recursive", "true").add_flags("output",
                                                                                  "json").execute_azcopy_copy_command_get_output()
        # parse the result to get the last job progress summary
        result = util.parseAzcopyOutput(result)
        try:
            # parse the Json Output
            x = json.loads(result,
                           object_hook=lambda d: namedtuple('X', d.keys())
                           (*d.values()))
        except:
            self.fail('error parsing the output in Json Format')

        # Number of Expected Transfer should be 10 since sub-dir is to exclude which has 10 files in it.
        self.assertEquals(x.TransfersCompleted, 10)
        self.assertEquals(x.TransfersFailed, 0)

        # delete 5 files inside the directory
        for r in range(5, 10):
            filename = "test101024_" + str(r) + ".txt"
            filepath = os.path.join(dir_n_files_path, filename)
            try:
                os.remove(filepath)
            except:
                self.fail('error deleting the file ' + filepath)

        # sync between source and destination should delete the deleted files on container
        # number of successful transfer should be equal to 5
        result = util.Command("sync").add_arguments(dir_n_files_path).add_arguments(dir_sas). \
            add_flags("log-level", "info").add_flags("recursive", "true").add_flags("output",
                                                                                  "json").execute_azcopy_copy_command_get_output()
        # parse the result to get the last job progress summary
        result = util.parseAzcopyOutput(result)
        try:
            # parse the Json Output
            x = json.loads(result,
                           object_hook=lambda d: namedtuple('X', d.keys())
                           (*d.values()))
        except:
            self.fail('error parsing the output in Json Format')

        # Number of Expected Transfer should be 10 since 10 files were deleted
        self.assertEquals(x.TransfersCompleted, 5)
        self.assertEquals(x.TransfersFailed, 0)

        # change the modified time of file
        # perform the sync
        # expected number of transfer is 1
        filepath = os.path.join(dir_n_files_path, "test101024_0.txt")
        st = os.stat(filepath)
        atime = st[ST_ATIME]  # access time
        mtime = st[ST_MTIME]  # modification time
        new_mtime = mtime + (4 * 3600)  # new modification time
        os.utime(filepath, (atime, new_mtime))
        # sync source to destination
        result = util.Command("sync").add_arguments(dir_n_files_path).add_arguments(dir_sas). \
            add_flags("log-level", "info").add_flags("recursive", "true").add_flags("output",
                                                                                  "json").execute_azcopy_copy_command_get_output()
        # parse the result to get the last job progress summary
        result = util.parseAzcopyOutput(result)
        try:
            # parse the Json Output
            x = json.loads(result,
                           object_hook=lambda d: namedtuple('X', d.keys())
                           (*d.values()))
        except:
            self.fail('error parsing the output in Json Format')
        # Number of Expected Transfer should be 1 since 1 file's modified time was changed
        self.assertEquals(x.TransfersCompleted, 1)
        self.assertEquals(x.TransfersFailed, 0)