コード例 #1
0
    def test_download_blob_include_flag(self):
        # create dir and 10 files of size 1024 inside it
        dir_name = "dir_include_flag_set_download"
        dir_n_files_path = util.create_test_n_files(1024, 10, dir_name)

        # create sub-dir inside dir dir_include_flag_set_download
        # create 10 files inside the sub-dir of size 1024
        sub_dir_name = os.path.join(dir_name,
                                    "sub_dir_include_flag_set_download")
        sub_dir_n_file_path = util.create_test_n_files(1024, 10, sub_dir_name)

        # uploading the directory with 20 files in it.
        result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \
            add_flags("recursive", "true").add_flags("log-level", "info").execute_azcopy_copy_command()
        self.assertTrue(result)

        # execute the validator and validating the uploaded directory.
        destination = util.get_resource_sas(dir_name)
        result = util.Command("testBlob").add_arguments(dir_n_files_path).add_arguments(destination). \
            add_flags("is-object-dir", "true").execute_azcopy_verify()
        self.assertTrue(result)

        # download from container with include flags
        destination_sas = util.get_resource_sas(dir_name)
        result = util.Command("copy").add_arguments(destination_sas).add_arguments(util.test_directory_path). \
            add_flags("recursive", "true").add_flags("log-level", "info").add_flags("output-type", "json"). \
            add_flags("include-pattern", "test101024_1.txt;test101024_2.txt;test101024_3.txt"). \
            execute_azcopy_copy_command_get_output()
        # parse the result to get the last job progress summary
        result = util.parseAzcopyOutput(result)
        try:
            # parse the Json Output
            x = json.loads(result,
                           object_hook=lambda d: namedtuple('X', d.keys())
                           (*d.values()))
        except:
            self.fail('error parsing the output in Json Format')
        self.assertEquals(x.TransfersCompleted, "6")
        self.assertEquals(x.TransfersFailed, "0")

        # download from container with sub-dir in include flags
        # TODO: Make this use include-path in the DL refactor
        destination_sas = util.get_resource_sas(dir_name)
        result = util.Command("copy").add_arguments(destination_sas).add_arguments(util.test_directory_path). \
            add_flags("recursive", "true").add_flags("log-level", "info").add_flags("output-type", "json"). \
            add_flags("include-path", "sub_dir_include_flag_set_download/"). \
            execute_azcopy_copy_command_get_output()
        # parse the result to get the last job progress summary
        result = util.parseAzcopyOutput(result)
        try:
            # parse the Json Output
            x = json.loads(result,
                           object_hook=lambda d: namedtuple('X', d.keys())
                           (*d.values()))
        except:
            self.fail('error parsing the output in Json Format')
        self.assertEquals(x.TransfersCompleted, "10")
        self.assertEquals(x.TransfersFailed, "0")
コード例 #2
0
    def test_download_blob_exclude_flag(self):
        # create dir and 10 files of size 1024 inside it
        dir_name = "dir_exclude_flag_set_download"
        dir_n_files_path = util.create_test_n_files(1024, 10, dir_name)

        # create sub-dir inside dir dir_exclude_flag_set_download
        # create 10 files inside the sub-dir of size 1024
        sub_dir_name = os.path.join(dir_name, "sub_dir_exclude_flag_set_download")
        sub_dir_n_file_path = util.create_test_n_files(1024, 10, sub_dir_name)

        # uploading the directory with 20 files in it.
        result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \
            add_flags("recursive", "true").add_flags("log-level", "info").execute_azcopy_copy_command()
        self.assertTrue(result)

        # execute the validator and validating the uploaded directory.
        destination = util.get_resource_sas(dir_name)
        result = util.Command("testBlob").add_arguments(dir_n_files_path).add_arguments(destination). \
            add_flags("is-object-dir", "true").execute_azcopy_verify()
        self.assertTrue(result)

        # download from container with exclude flags
        destination_sas = util.get_resource_sas(dir_name)
        result = util.Command("copy").add_arguments(destination_sas).add_arguments(util.test_directory_path). \
            add_flags("recursive", "true").add_flags("log-level", "info").add_flags("output", "json"). \
            add_flags("exclude", "test101024_1.txt;test101024_2.txt;test101024_3.txt"). \
            execute_azcopy_copy_command_get_output()
        # parse the result to get the last job progress summary
        result = util.parseAzcopyOutput(result)
        try:
            # parse the Json Output
            x = json.loads(result, object_hook=lambda d: namedtuple('X', d.keys())(*d.values()))
        except:
            self.fail('error parsing the output in JSON Format')
        # Number of expected successful transfer should be 18 since two files in directory are set to exclude
        self.assertEquals(x.TransfersCompleted, 17)
        self.assertEquals(x.TransfersFailed, 0)

        # download from container with sub-dir in exclude flags
        destination_sas = util.get_resource_sas(dir_name)
        result = util.Command("copy").add_arguments(destination_sas).add_arguments(util.test_directory_path). \
            add_flags("recursive", "true").add_flags("log-level", "info").add_flags("output", "json"). \
            add_flags("exclude", "sub_dir_exclude_flag_set_download/*"). \
            execute_azcopy_copy_command_get_output()

        # parse the result to get the last job progress summary
        result = util.parseAzcopyOutput(result)
        try:
            # parse the Json Output
            x = json.loads(result, object_hook=lambda d: namedtuple('X', d.keys())(*d.values()))
        except:
            self.fail('error parsing the output in Json Format')

        # Number of Expected Transfer should be 10 since sub-dir is to exclude which has 10 files in it.
        self.assertEquals(x.TransfersCompleted, 10)
        self.assertEquals(x.TransfersFailed, 0)
コード例 #3
0
    def test_upload_block_blob_exclude_flag(self):
        dir_name = "dir_exclude_flag_set_upload"
        # create 10 files inside the directory
        dir_n_files_path = util.create_test_n_files(1024, 10, dir_name)

        # create sub-directory inside the  dir_exclude_flag_set_upload
        sub_dir_name = os.path.join(dir_name,
                                    "sub_dir_exclude_flag_set_upload")
        # create 10 files inside the sub-dir
        sub_dir_n_file_path = util.create_test_n_files(1024, 10, sub_dir_name)

        # uploading the directory with 2 files in the exclude flag.
        result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \
            add_flags("recursive", "true").add_flags("log-level", "info") \
            .add_flags("exclude", "test101024_2.txt;test101024_3.txt").add_flags("output",
                                                                                 "json").execute_azcopy_copy_command_get_output()
        # parse the result to get the last job progress summary
        result = util.parseAzcopyOutput(result)
        try:
            # parse the Json Output
            x = json.loads(result,
                           object_hook=lambda d: namedtuple('X', d.keys())
                           (*d.values()))
        except:
            self.fail('error parsing the output in Json Format')
        # Number of successful transfer should be 18 and there should be not failed transfer
        # Since total number of files inside dir_exclude_flag_set_upload is 20 and 2 files are set
        # to exclude, so total number of transfer should be 18
        self.assertEquals(x.TransfersCompleted, 18)
        self.assertEquals(x.TransfersFailed, 0)

        # uploading the directory with sub-dir in the exclude flag.
        result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \
            add_flags("recursive", "true").add_flags("log-level", "info") \
            .add_flags("exclude", "sub_dir_exclude_flag_set_upload/*").add_flags("output",
                                                                               "json").execute_azcopy_copy_command_get_output()
        # parse the result to get the last job progress summary
        result = util.parseAzcopyOutput(result)
        try:
            # parse the Json Output
            x = json.loads(result,
                           object_hook=lambda d: namedtuple('X', d.keys())
                           (*d.values()))
        except:
            self.fail('error parsing the output in Json Format')

        # Number of successful transfer should be 10 and there should be not failed transfer
        # Since the total number of files in dir_exclude_flag_set_upload is 20 and sub_dir_exclude_flag_set_upload
        # sub-dir is set to exclude, total number of transfer will be 10
        self.assertEquals(x.TransfersCompleted, 10)
        self.assertEquals(x.TransfersFailed, 0)
コード例 #4
0
    def test_overwrite_flag_set_to_if_source_new_upload(self):
        # creating directory with 20 files in it.
        dir_name = "dir_overwrite_flag_set_upload"
        dir_n_files_path = util.create_test_n_files(1024, 20, dir_name)

        # uploading the directory with 20 files in it. Wait a bit so that the lmt of the source is in the past
        time.sleep(2)
        result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \
            add_flags("recursive", "true").add_flags("log-level", "info").execute_azcopy_copy_command()
        self.assertTrue(result)

        # uploading the directory again with force flag set to ifSourceNewer.
        result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \
            add_flags("recursive", "true").add_flags("overwrite", "ifSourceNewer").add_flags("log-level", "info"). \
            add_flags("output-type", "json").execute_azcopy_copy_command_get_output()
        self.assertNotEquals(result, None)

        # parsing the json and comparing the number of failed and successful transfers.
        result = util.parseAzcopyOutput(result)
        try:
            x = json.loads(result,
                           object_hook=lambda d: namedtuple('X', d.keys())
                           (*d.values()))
        except:
            self.fail('error parsing the output in Json Format')
        self.assertEquals(x.TransfersSkipped, "20")
        self.assertEquals(x.TransfersCompleted, "0")

        time.sleep(10)
        # refresh the lmts of the source files so that they appear newer
        for filename in os.listdir(dir_n_files_path):
            # update the lmts of the files to the latest
            os.utime(os.path.join(dir_n_files_path, filename), None)

        # uploading the directory again with force flag set to ifSourceNewer.
        result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \
            add_flags("recursive", "true").add_flags("overwrite", "ifSourceNewer").add_flags("log-level", "info"). \
            add_flags("output-type", "json").execute_azcopy_copy_command_get_output()
        self.assertNotEquals(result, None)

        # parsing the json and comparing the number of failed and successful transfers.
        result = util.parseAzcopyOutput(result)
        try:
            x = json.loads(result,
                           object_hook=lambda d: namedtuple('X', d.keys())
                           (*d.values()))
        except:
            self.fail('error parsing the output in Json Format')
        self.assertEquals(x.TransfersSkipped, "0")
        self.assertEquals(x.TransfersCompleted, "20")
コード例 #5
0
    def test_upload_hidden_file(self):
        # Create directory for storing the hidden files
        dir_name = "dir_hidden_files"
        dir_path = os.path.join(util.test_directory_path, dir_name)
        try:
            shutil.rmtree(dir_path)
        except:
            print("")
        finally:
            os.mkdir(dir_path)
        for i in range(0, 10):
            file_name = "hidden_" + str(i) + ".txt"
            util.create_hidden_file(dir_path, file_name, "hidden file text")

        result = util.Command("copy").add_arguments(dir_path).add_arguments(util.test_container_url). \
            add_flags("log-level", "Info").add_flags("recursive", "true").add_flags("output", "json").execute_azcopy_copy_command_get_output()

        result = util.parseAzcopyOutput(result)
        try:
            # parse the Json Output
            x = json.loads(result,
                           object_hook=lambda d: namedtuple('X', d.keys())
                           (*d.values()))
        except:
            self.fail('error parsing the output in Json Format')
        self.assertEquals(x.TransfersCompleted, 10)
        self.assertEquals(x.TransfersFailed, 0)
コード例 #6
0
    def test_follow_symlinks_upload(self):
        link_name = "dir_link"
        outside_dir = "dir_outside_linkto"
        home_dir = "dir_home_follow_symlink_upload"

        # produce all necessary paths
        outside_path = util.create_test_n_files(1024, 10, outside_dir)
        home_path = util.create_test_dir(home_dir)
        link_path = os.path.join(home_path, link_name)

        # Create the symlink
        os.symlink(outside_path, link_path, target_is_directory=True)

        # Upload home path
        result = util.Command("copy").add_arguments(home_path).add_arguments(util.test_container_url). \
            add_flags("log-level", "Info").add_flags("recursive", "true").add_flags("output-type", "json"). \
            add_flags("follow-symlinks", "true").execute_azcopy_copy_command_get_output()
        result = util.parseAzcopyOutput(result)

        try:
            # parse the JSON output
            x = json.loads(result,
                           object_hook=lambda d: namedtuple('X', d.keys())
                           (*d.values()))
        except:
            self.fail('error parsing the output in JSON format')

        self.assertEquals(x.TransfersCompleted, "10")
        self.assertEquals(x.TransfersFailed, "0")
コード例 #7
0
    def test_upload_block_blob_include_flag(self):
        dir_name = "dir_include_flag_set_upload"
        # create 10 files inside the directory
        dir_n_files_path = util.create_test_n_files(1024, 10, dir_name)

        # create sub-directory inside the  dir_include_flag_set_upload
        sub_dir_name = os.path.join(dir_name,
                                    "sub_dir_include_flag_set_upload")
        # create 10 files inside the sub-dir
        sub_dir_n_file_path = util.create_test_n_files(1024, 10, sub_dir_name)

        # uploading the directory with 2 file names (4 files) in the include flag.
        result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \
            add_flags("recursive", "true").add_flags("log-level", "info") \
            .add_flags("include-pattern", "test101024_2.txt;test101024_3.txt").add_flags("output-type",
                                                                                 "json").execute_azcopy_copy_command_get_output()
        # parse the result to get the last job progress summary
        result = util.parseAzcopyOutput(result)
        # parse the Json Output
        try:
            x = json.loads(result,
                           object_hook=lambda d: namedtuple('X', d.keys())
                           (*d.values()))
        except:
            self.fail('error parsing output in Json format')
        # Number of successful transfer should be 4 and there should be not a failed transfer
        self.assertEquals(x.TransfersCompleted, "4")
        self.assertEquals(x.TransfersFailed, "0")

        # uploading the directory with sub-dir in the include flag.
        result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \
            add_flags("recursive", "true").add_flags("log-level", "info") \
            .add_flags("include-path", "sub_dir_include_flag_set_upload/").add_flags("output-type",
                                                                               "json").execute_azcopy_copy_command_get_output()
        # parse the result to get the last job progress summary
        result = util.parseAzcopyOutput(result)
        try:
            # parse the Json Output
            x = json.loads(result,
                           object_hook=lambda d: namedtuple('X', d.keys())
                           (*d.values()))
        except:
            self.fail('error parsing the output in Json Format')
        # Number of successful transfer should be 10 and there should be not failed transfer
        self.assertEquals(x.TransfersCompleted, "10")
        self.assertEquals(x.TransfersFailed, "0")
コード例 #8
0
    def test_upload_download_file_non_ascii_characters(self):
        file_name = u"Espa\u00F1a"
        #file_name = "abc.txt"
        file_path = util.create_file_in_path(util.test_directory_path,
                                             file_name, "non ascii characters")
        # Upload the file
        result = util.Command("copy").add_arguments(file_path).add_arguments(util.test_container_url).\
                add_flags("log-level", "Info").add_flags("output", "json").execute_azcopy_copy_command_get_output()
        result = util.parseAzcopyOutput(result)
        try:
            # parse the Json Output
            x = json.loads(result,
                           object_hook=lambda d: namedtuple('X', d.keys())
                           (*d.values()))
        except:
            self.fail('error parsing the output in Json Format')

        self.assertEquals(x.TransfersCompleted, 1)
        self.assertEquals(x.TransfersFailed, 0)

        #download the file
        dir_path = os.path.join(util.test_directory_path, "non-ascii-dir")
        try:
            shutil.rmtree(dir_path)
        except:
            print("")
        finally:
            os.mkdir(dir_path)
        destination_url = util.get_resource_sas(file_name)
        result = util.Command("copy").add_arguments(destination_url).add_arguments(dir_path).\
                add_flags("log-level", "Info").add_flags("output", "json").execute_azcopy_copy_command_get_output()
        result = util.parseAzcopyOutput(result)
        try:
            # parse the Json Output
            x = json.loads(result,
                           object_hook=lambda d: namedtuple('X', d.keys())
                           (*d.values()))
        except:
            self.fail('error parsing the output in Json Format')
        self.assertEquals(x.TransfersCompleted, 1)
        self.assertEquals(x.TransfersFailed, 0)
コード例 #9
0
    def test_long_file_path_upload_with_nested_directories(self):
        dir_name = "dir_lfpupwnds"
        dir_path = util.create_test_n_files(1024, 10, dir_name)
        parent_dir = dir_name
        for i in range(0, 30):
            sub_dir_name = "s_" + str(i)
            parent_dir = os.path.join(parent_dir, sub_dir_name)
            util.create_test_n_files(1024, 10, parent_dir)

        # Upload the file
        result = util.Command("copy").add_arguments(dir_path).add_arguments(util.test_container_url). \
            add_flags("log-level", "Info").add_flags("output-type", "json").add_flags("recursive", "true").execute_azcopy_copy_command_get_output()
        result = util.parseAzcopyOutput(result)
        try:
            # parse the Json Output
            x = json.loads(result, object_hook=lambda d: namedtuple('X', d.keys())(*d.values()))
        except:
            self.fail('error parsing the output in Json Format')

        self.assertEquals(x.TransfersCompleted, 310)
        self.assertEquals(x.TransfersFailed, 0)
コード例 #10
0
    def test_sync_local_to_blob_with_wildCards(self):
        # create 10 files inside the dir 'sync_local_blob'
        dir_name = "sync_local_blob_wc"
        dir_n_files_path = util.create_test_n_files(1024, 10, dir_name)

        # create sub-dir inside dir sync_local_blob_wc
        # create 10 files inside the sub-dir of size 1024
        sub_dir_1 = os.path.join(dir_name, "sub_dir_1")
        sub_dir1_n_file_path = util.create_test_n_files(1024, 10, sub_dir_1)

        # create sub-dir inside dir sync_local_blob_wc
        sub_dir_2 = os.path.join(dir_name, "sub_dir_2")
        sub_dir2_n_file_path = util.create_test_n_files(1024, 10, sub_dir_2)

        # uploading the directory with 30 files in it.
        result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \
            add_flags("recursive", "true").add_flags("log-level", "info").execute_azcopy_copy_command()
        self.assertTrue(result)

        # execute the validator and validating the uploaded directory.
        destination = util.get_resource_sas(dir_name)
        result = util.Command("testBlob").add_arguments(dir_n_files_path).add_arguments(destination). \
            add_flags("is-object-dir", "true").execute_azcopy_verify()
        self.assertTrue(result)

        # download the destination to the source to match the last modified time
        result = util.Command("copy").add_arguments(destination).add_arguments(util.test_directory_path). \
            add_flags("log-level", "info").add_flags("recursive", "true").add_flags("output", "json"). \
            add_flags("preserve-last-modified-time", "true").execute_azcopy_copy_command_get_output()
        self.assertNotEquals(result, None)

        # add wildcard at the end of dirpath
        dir_n_files_path_wcard = os.path.join(dir_n_files_path, "*")
        # execute a sync command
        dir_sas = util.get_resource_sas(dir_name)
        result = util.Command("sync").add_arguments(dir_n_files_path_wcard).add_arguments(dir_sas). \
            add_flags("log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command()
        # since source and destination both are in sync, there should no sync and the azcopy should exit with error code
        self.assertFalse(result)

        # sync all the files the ends with .txt extension inside all sub-dirs inside inside
        # sd_dir_n_files_path_wcard is in format dir/*/*.txt
        sd_dir_n_files_path_wcard = os.path.join(dir_n_files_path_wcard,
                                                 "*.txt")
        result = util.Command("sync").add_arguments(sd_dir_n_files_path_wcard).add_arguments(dir_sas). \
            add_flags("log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command()
        # since source and destination both are in sync, there should no sync and the azcopy should exit with error code
        self.assertFalse(result)

        # remove 5 files inside both the sub-directories
        for r in range(5, 10):
            filename = "test101024_" + str(r) + ".txt"
            filepath = os.path.join(sub_dir1_n_file_path, filename)
            try:
                os.remove(filepath)
            except:
                self.fail('error deleting the file ' + filepath)
            filepath = os.path.join(sub_dir2_n_file_path, filename)
            try:
                os.remove(filepath)
            except:
                self.fail('error deleting the file ' + filepath)
        # sync all the files the ends with .txt extension inside all sub-dirs inside inside
        # since 5 files inside each sub-dir are deleted, sync will have total 10 transfer
        # 10 files will deleted from container
        sd_dir_n_files_path_wcard = os.path.join(dir_n_files_path_wcard,
                                                 "*.txt")
        result = util.Command("sync").add_arguments(sd_dir_n_files_path_wcard).add_arguments(dir_sas). \
            add_flags("log-level", "info").add_flags("recursive", "true").add_flags("output",
                                                                                  "json").execute_azcopy_copy_command_get_output()
        # parse the result to get the last job progress summary
        result = util.parseAzcopyOutput(result)
        try:
            # parse the Json Output
            x = json.loads(result,
                           object_hook=lambda d: namedtuple('X', d.keys())
                           (*d.values()))
        except:
            self.fail('error parsing the output in Json Format')
        # Number of Expected Transfer should be 10 since 10 files were deleted
        self.assertEquals(x.TransfersCompleted, 10)
        self.assertEquals(x.TransfersFailed, 0)
コード例 #11
0
    def test_sync_local_to_blob_without_wildCards(self):
        # create 10 files inside the dir 'sync_local_blob'
        dir_name = "sync_local_blob"
        dir_n_files_path = util.create_test_n_files(1024, 10, dir_name)

        # create sub-dir inside dir sync_local_blob
        # create 10 files inside the sub-dir of size 1024
        sub_dir_name = os.path.join(dir_name, "sub_dir_sync_local_blob")
        sub_dir_n_file_path = util.create_test_n_files(1024, 10, sub_dir_name)

        # uploading the directory with 20 files in it.
        result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \
            add_flags("recursive", "true").add_flags("log-level", "info").execute_azcopy_copy_command()
        self.assertTrue(result)
        # execute the validator and validating the uploaded directory.
        destination = util.get_resource_sas(dir_name)
        result = util.Command("testBlob").add_arguments(dir_n_files_path).add_arguments(destination). \
            add_flags("is-object-dir", "true").execute_azcopy_verify()
        self.assertTrue(result)

        # download the destination to the source to match the last modified time
        result = util.Command("copy").add_arguments(destination).add_arguments(util.test_directory_path). \
            add_flags("log-level", "info").add_flags("recursive", "true").add_flags("output", "json"). \
            add_flags("preserve-last-modified-time", "true").execute_azcopy_copy_command_get_output()
        self.assertNotEquals(result, None)

        # execute a sync command
        dir_sas = util.get_resource_sas(dir_name)
        result = util.Command("sync").add_arguments(dir_n_files_path).add_arguments(dir_sas). \
            add_flags("log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command()
        # since source and destination both are in sync, there should no sync and the azcopy should exit with error code
        self.assertFalse(result)
        try:
            shutil.rmtree(sub_dir_n_file_path)
        except:
            self.fail('error deleting the directory' + sub_dir_n_file_path)

        # deleted entire sub-dir inside the dir created above
        # sync between source and destination should delete the sub-dir on container
        # number of successful transfer should be equal to 10
        result = util.Command("sync").add_arguments(dir_n_files_path).add_arguments(dir_sas). \
            add_flags("log-level", "info").add_flags("recursive", "true").add_flags("output",
                                                                                  "json").execute_azcopy_copy_command_get_output()
        # parse the result to get the last job progress summary
        result = util.parseAzcopyOutput(result)
        try:
            # parse the Json Output
            x = json.loads(result,
                           object_hook=lambda d: namedtuple('X', d.keys())
                           (*d.values()))
        except:
            self.fail('error parsing the output in Json Format')

        # Number of Expected Transfer should be 10 since sub-dir is to exclude which has 10 files in it.
        self.assertEquals(x.TransfersCompleted, 10)
        self.assertEquals(x.TransfersFailed, 0)

        # delete 5 files inside the directory
        for r in range(5, 10):
            filename = "test101024_" + str(r) + ".txt"
            filepath = os.path.join(dir_n_files_path, filename)
            try:
                os.remove(filepath)
            except:
                self.fail('error deleting the file ' + filepath)

        # sync between source and destination should delete the deleted files on container
        # number of successful transfer should be equal to 5
        result = util.Command("sync").add_arguments(dir_n_files_path).add_arguments(dir_sas). \
            add_flags("log-level", "info").add_flags("recursive", "true").add_flags("output",
                                                                                  "json").execute_azcopy_copy_command_get_output()
        # parse the result to get the last job progress summary
        result = util.parseAzcopyOutput(result)
        try:
            # parse the Json Output
            x = json.loads(result,
                           object_hook=lambda d: namedtuple('X', d.keys())
                           (*d.values()))
        except:
            self.fail('error parsing the output in Json Format')

        # Number of Expected Transfer should be 10 since 10 files were deleted
        self.assertEquals(x.TransfersCompleted, 5)
        self.assertEquals(x.TransfersFailed, 0)

        # change the modified time of file
        # perform the sync
        # expected number of transfer is 1
        filepath = os.path.join(dir_n_files_path, "test101024_0.txt")
        st = os.stat(filepath)
        atime = st[ST_ATIME]  # access time
        mtime = st[ST_MTIME]  # modification time
        new_mtime = mtime + (4 * 3600)  # new modification time
        os.utime(filepath, (atime, new_mtime))
        # sync source to destination
        result = util.Command("sync").add_arguments(dir_n_files_path).add_arguments(dir_sas). \
            add_flags("log-level", "info").add_flags("recursive", "true").add_flags("output",
                                                                                  "json").execute_azcopy_copy_command_get_output()
        # parse the result to get the last job progress summary
        result = util.parseAzcopyOutput(result)
        try:
            # parse the Json Output
            x = json.loads(result,
                           object_hook=lambda d: namedtuple('X', d.keys())
                           (*d.values()))
        except:
            self.fail('error parsing the output in Json Format')
        # Number of Expected Transfer should be 1 since 1 file's modified time was changed
        self.assertEquals(x.TransfersCompleted, 1)
        self.assertEquals(x.TransfersFailed, 0)
コード例 #12
0
    def test_force_flag_set_to_false_download(self):
        # creating directory with 20 files in it.
        dir_name = "dir_force_flag_set_download"
        dir_n_files_path = util.create_test_n_files(1024, 20, dir_name)
        # uploading the directory with 20 files in it.
        result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \
            add_flags("recursive", "true").add_flags("log-level", "info").execute_azcopy_copy_command()
        self.assertTrue(result)

        # execute the validator and validating the uploaded directory.
        destination = util.get_resource_sas(dir_name)
        result = util.Command("testBlob").add_arguments(dir_n_files_path).add_arguments(destination). \
            add_flags("is-object-dir", "true").execute_azcopy_verify()
        self.assertTrue(result)

        # removing the directory dir_force_flag_set_download
        try:
            shutil.rmtree(dir_n_files_path)
        except:
            self.fail('error removing the directory ' + dir_n_files_path)

        # downloading the directory created from container through azcopy with recursive flag to true.
        result = util.Command("copy").add_arguments(destination).add_arguments(
            util.test_directory_path).add_flags("log-level", "info").add_flags(
                "recursive", "true").execute_azcopy_copy_command()
        self.assertTrue(result)

        # verify downloaded blob.
        result = util.Command("testBlob").add_arguments(
            dir_n_files_path).add_arguments(destination).add_flags(
                "is-object-dir", "true").execute_azcopy_verify()
        self.assertTrue(result)

        # downloading the directory created from container through azcopy with recursive flag to true and force flag set to false.
        result = util.Command("copy").add_arguments(destination).add_arguments(util.test_directory_path).add_flags(
            "log-level", "info"). \
            add_flags("recursive", "true").add_flags("overwrite", "false").add_flags("output",
                                                                                 "json").execute_azcopy_copy_command_get_output()
        result = util.parseAzcopyOutput(result)
        try:
            x = json.loads(result,
                           object_hook=lambda d: namedtuple('X', d.keys())
                           (*d.values()))
        except:
            self.fail('erorr parsing the output in Json Format')
        # Since all files exists locally and overwrite flag is set to false, all 20 transfers will be skipped
        self.assertEquals(x.TransfersSkipped, 20)
        self.assertEquals(x.TransfersCompleted, 0)

        # removing 5 files with suffix from 10 to 14
        for index in range(10, 15):
            file_path_remove = dir_n_files_path + os.sep + "test201024" + "_" + str(
                index) + ".txt"
            try:
                os.remove(file_path_remove)
            except:
                self.fail('error deleting the file ' + file_path_remove)

        # downloading the directory created from container through azcopy with recursive flag to true and force flag set to false.
        # 5 deleted files should be downloaded. Number of failed transfer should be 15 and number of completed transfer should be 5
        result = util.Command("copy").add_arguments(destination).add_arguments(util.test_directory_path).add_flags(
            "log-level", "info"). \
            add_flags("recursive", "true").add_flags("overwrite", "false").add_flags("output",
                                                                                 "json").execute_azcopy_copy_command_get_output()
        result = util.parseAzcopyOutput(result)
        try:
            x = json.loads(result,
                           object_hook=lambda d: namedtuple('X', d.keys())
                           (*d.values()))
        except:
            self.fail('error parsing the output in Json Format')
        self.assertEquals(x.TransfersSkipped, 15)
        self.assertEquals(x.TransfersCompleted, 5)
コード例 #13
0
    def test_remove_files_with_Wildcard(self):
        # create dir dir_remove_files_with_wildcard
        # create 40 files inside the dir
        dir_name = "dir_remove_files_with_wildcard"
        dir_n_files_path = util.create_test_n_files(1024, 40, dir_name)

        # Upload the directory by azcopy
        result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \
            add_flags("log-level", "Info").add_flags("recursive", "true").execute_azcopy_copy_command()
        self.assertTrue(result)

        # destination is the remote URl of the uploaded dir
        destination = util.get_resource_sas(dir_name)
        # Verify the Uploaded directory
        # execute the validator.
        result = util.Command("testBlob").add_arguments(dir_n_files_path).add_arguments(destination). \
            add_flags("is-object-dir", "true").execute_azcopy_verify()
        self.assertTrue(result)

        # removes the files that ends with 4.txt
        destination_sas_with_wildcard = util.append_text_path_resource_sas(
            destination, "*4.txt")
        result = util.Command("rm").add_arguments(destination_sas_with_wildcard).add_flags("log-level", "Info"). \
            add_flags("recursive", "true").add_flags("output", "json").execute_azcopy_operation_get_output()
        # Get the latest Job Summary
        result = util.parseAzcopyOutput(result)
        try:
            # Parse the Json Output
            x = json.loads(result,
                           object_hook=lambda d: namedtuple('X', d.keys())
                           (*d.values()))
        except:
            self.fail('error parsing the output in Json Format')
        self.assertEquals(x.TransfersFailed, 0)
        self.assertEquals(x.TransfersCompleted, 4)

        # removes the files that starts with test
        destination_sas_with_wildcard = util.append_text_path_resource_sas(
            destination, "test*")
        result = util.Command("rm").add_arguments(destination_sas_with_wildcard).add_flags("log-level", "Info"). \
            add_flags("recursive", "true").add_flags("output", "json").execute_azcopy_operation_get_output()
        # Get the latest Job Summary
        result = util.parseAzcopyOutput(result)
        try:
            # Parse the Json Output
            x = json.loads(result,
                           object_hook=lambda d: namedtuple('X', d.keys())
                           (*d.values()))
        except:
            self.fail('error parsing the output in Json Format')

        # Expected number of successful transfer will be 36 since 4 files have already been deleted
        self.assertEquals(x.TransfersCompleted, 36)
        self.assertEquals(x.TransfersFailed, 0)

        # Create directory dir_remove_all_files_with_wildcard
        dir_name = "dir_remove_all_files_with_wildcard"
        dir_n_files_path = util.create_test_n_files(1024, 40, dir_name)

        # Upload the directory using Azcopy
        result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \
            add_flags("log-level", "Info").add_flags("recursive", "true").execute_azcopy_copy_command()
        self.assertTrue(result)

        # destination is the remote URl of the uploaded dir
        destination = util.get_resource_sas(dir_name)
        # Validate the Uploaded directory
        # execute the validator.
        result = util.Command("testBlob").add_arguments(dir_n_files_path).add_arguments(destination). \
            add_flags("is-object-dir", "true").execute_azcopy_verify()
        self.assertTrue(result)

        # add * at the end of destination sas
        # destination_sas_with_wildcard = https://<container-name>/<dir-name>/*?<sig>
        destination_sas_with_wildcard = util.append_text_path_resource_sas(
            destination, "*")
        result = util.Command("rm").add_arguments(destination_sas_with_wildcard).add_flags("log-level", "Info"). \
            add_flags("recursive", "true").add_flags("output", "json").execute_azcopy_operation_get_output()
        # Get the latest Job Summary
        result = util.parseAzcopyOutput(result)
        try:
            # Parse the Json Output
            x = json.loads(result,
                           object_hook=lambda d: namedtuple('X', d.keys())
                           (*d.values()))
        except:
            self.fail('error parsing the output in Json Format')
        # Expected number of successful transfer will be 40 since all files will be deleted
        self.assertEquals(x.TransfersFailed, 0)
        self.assertEquals(x.TransfersCompleted, 40)

        # removing multiple directories with use of WildCards
        for i in range(1, 4):
            dir_name = "rdir" + str(i)
            dir_n_files_path = util.create_test_n_files(1024, 40, dir_name)
            # Upload the directory
            result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \
                add_flags("log-level", "Info").add_flags("recursive", "true").execute_azcopy_copy_command()
            self.assertTrue(result)

            # execute the validator
            destination = util.get_resource_sas(dir_name)
            result = util.Command("testBlob").add_arguments(dir_n_files_path).add_arguments(destination). \
                add_flags("is-object-dir", "true").execute_azcopy_verify()
            self.assertTrue(result)

        destination_sas_with_wildcard = util.append_text_path_resource_sas(
            util.test_container_url, "rdir*")
        result = util.Command("rm").add_arguments(destination_sas_with_wildcard).add_flags("log-level", "Info"). \
            add_flags("output", "json").add_flags("recursive", "true").execute_azcopy_operation_get_output()

        # Get the latest Job Summary
        result = util.parseAzcopyOutput(result)
        try:
            # Parse the Json Output
            x = json.loads(result,
                           object_hook=lambda d: namedtuple('X', d.keys())
                           (*d.values()))
        except:
            self.fail('error parsing the Output in Json Format')
        # Expected number of successful transfer will be 40 since all files will be deleted
        self.assertEquals(x.TransfersFailed, 0)
        self.assertEquals(x.TransfersCompleted, 120)
コード例 #14
0
    def test_blob_download_wildcard_recursive_true_1(self):
        #This test verifies the azcopy behavior when wildcards are
        # provided in the source and recursive flag is set to false
        # example src = https://<container>/<vd-1>/*?<sig> recursive = false
        dir_name = "dir_download_wildcard_recursive=true"
        dir_path = util.create_test_n_files(1024, 10, dir_name)

        #create sub-directory inside directory
        sub_dir_name_1 = os.path.join(dir_name, "logs")
        sub_dir_path_1 = util.create_test_n_files(1024, 10, sub_dir_name_1)

        #create sub-directory inside sub-directory
        sub_dir_name_2 = os.path.join(sub_dir_name_1, "abc")
        sub_dir_path_2 = util.create_test_n_files(1024, 10, sub_dir_name_2)

        #upload the directory with 30 files
        # upload the directory
        # execute azcopy command
        result = util.Command("copy").add_arguments(dir_path).add_arguments(util.test_container_url). \
            add_flags("recursive", "true").add_flags("log-level", "info").execute_azcopy_copy_command()
        self.assertTrue(result)

        # execute the validator.
        dir_sas = util.get_resource_sas(dir_name)
        result = util.Command("testBlob").add_arguments(dir_path).add_arguments(dir_sas). \
            add_flags("is-object-dir", "true").execute_azcopy_verify()
        self.assertTrue(result)

        # create the resource sas
        dir_sas_with_wildcard = util.get_resource_sas(dir_name + "/*")
        #download the directory
        result = util.Command("copy").add_arguments(dir_sas_with_wildcard).add_arguments(dir_path). \
            add_flags("log-level", "Info").add_flags("output","json").add_flags("recursive","true").\
            execute_azcopy_copy_command_get_output()
        # parse the result to get the last job progress summary
        result = util.parseAzcopyOutput(result)
        try:
            # parse the Json Output
            x = json.loads(result, object_hook=lambda d: namedtuple('X', d.keys())(*d.values()))
        except:
            self.fail('error parsing the output in Json Format')
        # since the wildcards '*' exists at the end of dir_name in the sas
        # and recursive is set to true, all files inside dir and
        # inside sub-dirs will be download
        # Number of Expected Transfer should be 30
        self.assertEquals(x.TransfersCompleted, 30)
        self.assertEquals(x.TransfersFailed, 0)

        # create the resource sas
        dir_sas_with_wildcard = util.get_resource_sas(dir_name + "/*/*")
        #download the directory
        result = util.Command("copy").add_arguments(dir_sas_with_wildcard).add_arguments(dir_path). \
            add_flags("log-level", "Info").add_flags("output","json") \
            .add_flags("recursive", "true").execute_azcopy_copy_command_get_output()
        # parse the result to get the last job progress summary
        result = util.parseAzcopyOutput(result)
        try:
            # parse the Json Output
            x = json.loads(result, object_hook=lambda d: namedtuple('X', d.keys())(*d.values()))
        except:
            self.fail('error parsing the output in Json Format')
        # since the wildcards '*/*' exists at the end of dir_name in the sas
        # and recursive is set to true, files immediately inside will not be downloaded
        # but files inside sub-dir logs and sub-dir inside logs i.e abc inside dir will be downloaded
        # Number of Expected Transfer should be 20
        self.assertEquals(x.TransfersCompleted, 20)
        self.assertEquals(x.TransfersFailed, 0)
コード例 #15
0
    def test_overwrite_flag_set_to_if_source_new_download(self):
        # creating directory with 20 files in it.
        dir_name = "dir_overwrite_flag_set_download_setup"
        dir_n_files_path = util.create_test_n_files(1024, 20, dir_name)
        # uploading the directory with 20 files in it.
        result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \
            add_flags("recursive", "true").add_flags("log-level", "info").execute_azcopy_copy_command()
        self.assertTrue(result)

        # case 1: destination is empty
        # download the directory with force flag set to ifSourceNewer.
        # target an empty folder, so the download should succeed normally
        # sleep a bit so that the lmts of the source blobs are in the past
        time.sleep(10)
        source = util.get_resource_sas(dir_name)
        destination = os.path.join(util.test_directory_path,
                                   "dir_overwrite_flag_set_download")
        os.mkdir(destination)
        result = util.Command("copy").add_arguments(source).add_arguments(destination). \
            add_flags("recursive", "true").add_flags("overwrite", "ifSourceNewer").add_flags("log-level", "info"). \
            add_flags("output-type", "json").execute_azcopy_copy_command_get_output()
        self.assertNotEquals(result, None)

        # parsing the json and comparing the number of failed and successful transfers.
        result = util.parseAzcopyOutput(result)
        try:
            x = json.loads(result,
                           object_hook=lambda d: namedtuple('X', d.keys())
                           (*d.values()))
        except:
            self.fail('error parsing the output in Json Format')
        self.assertEquals(x.TransfersSkipped, "0")
        self.assertEquals(x.TransfersCompleted, "20")

        # case 2: local files are newer
        # download the directory again with force flag set to ifSourceNewer.
        # this time, since the local files are newer, no download should occur
        result = util.Command("copy").add_arguments(source).add_arguments(destination). \
            add_flags("recursive", "true").add_flags("overwrite", "ifSourceNewer").add_flags("log-level", "info"). \
            add_flags("output-type", "json").execute_azcopy_copy_command_get_output()
        self.assertNotEquals(result, None)

        # parsing the json and comparing the number of failed and successful transfers.
        result = util.parseAzcopyOutput(result)
        try:
            x = json.loads(result,
                           object_hook=lambda d: namedtuple('X', d.keys())
                           (*d.values()))
        except:
            self.fail('error parsing the output in Json Format')
        self.assertEquals(x.TransfersSkipped, "20")
        self.assertEquals(x.TransfersCompleted, "0")

        # re-uploading the directory with 20 files in it, to refresh the lmts of the source
        time.sleep(2)
        result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \
            add_flags("recursive", "true").add_flags("log-level", "info").execute_azcopy_copy_command()
        self.assertTrue(result)

        # case 3: source blobs are newer now, so download should proceed
        result = util.Command("copy").add_arguments(source).add_arguments(destination). \
            add_flags("recursive", "true").add_flags("overwrite", "ifSourceNewer").add_flags("log-level", "info"). \
            add_flags("output-type", "json").execute_azcopy_copy_command_get_output()
        self.assertNotEquals(result, None)

        # parsing the json and comparing the number of failed and successful transfers.
        result = util.parseAzcopyOutput(result)
        try:
            x = json.loads(result,
                           object_hook=lambda d: namedtuple('X', d.keys())
                           (*d.values()))
        except:
            self.fail('error parsing the output in Json Format')
        self.assertEquals(x.TransfersSkipped, "0")
        self.assertEquals(x.TransfersCompleted, "20")
コード例 #16
0
    def test_sync_blob_download_with_wildcards(self):
        # created a directory and created 10 files inside the directory
        dir_name = "sync_download_with_wildcards"
        dir_n_files_path = util.create_test_n_files(1024, 10, dir_name)

        # upload the directory
        # execute azcopy command
        result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \
            add_flags("recursive", "true").add_flags("log-level", "info").execute_azcopy_copy_command()
        self.assertTrue(result)

        # execute the validator.
        dir_sas = util.get_resource_sas(dir_name)
        result = util.Command("testBlob").add_arguments(dir_n_files_path).add_arguments(dir_sas). \
            add_flags("is-object-dir", "true").execute_azcopy_verify()
        self.assertTrue(result)

        # download the destination to the source to match the last modified time
        result = util.Command("copy").add_arguments(dir_sas).add_arguments(util.test_directory_path). \
            add_flags("log-level", "info").add_flags("recursive", "true").add_flags("output", "json"). \
            add_flags("preserve-last-modified-time", "true").execute_azcopy_copy_command_get_output()
        self.assertNotEquals(result, None)

        # execute the validator and verify the downloaded dir
        result = util.Command("testBlob").add_arguments(dir_n_files_path).add_arguments(dir_sas). \
            add_flags("is-object-dir", "true").execute_azcopy_verify()
        self.assertTrue(result)

        # add "*" at the end of dir sas
        # since both the source and destination are in sync, it will fail
        dir_sas = util.append_text_path_resource_sas(dir_sas, "*")
        # sync the source and destination
        result = util.Command("sync").add_arguments(dir_sas).add_arguments(dir_n_files_path). \
            add_flags("log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command()
        self.assertFalse(result)

        subdir1 = os.path.join(dir_name, "subdir1")
        subdir1_file_path = util.create_test_n_files(1024, 10, subdir1)

        subdir2 = os.path.join(dir_name, "subdir2")
        subdir2_file_path = util.create_test_n_files(1024, 10, subdir2)

        # upload the directory
        # execute azcopy command
        result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \
            add_flags("recursive", "true").add_flags("log-level", "info").execute_azcopy_copy_command()
        self.assertTrue(result)

        # execute the validator.
        dir_sas = util.get_resource_sas(dir_name)
        result = util.Command("testBlob").add_arguments(dir_n_files_path).add_arguments(dir_sas). \
            add_flags("is-object-dir", "true").execute_azcopy_verify()
        self.assertTrue(result)

        # Download the directory to match the blob modified time
        result = util.Command("copy").add_arguments(dir_sas).add_arguments(util.test_directory_path). \
            add_flags("log-level", "Info").add_flags("recursive", "true").execute_azcopy_copy_command()
        self.assertTrue(result)

        # sync the source and destination
        # add extra wildcards
        # since source and destination both are in sync, it will fail
        dir_sas = util.append_text_path_resource_sas(dir_sas, "*/*.txt")
        result = util.Command("sync").add_arguments(dir_sas).add_arguments(dir_n_files_path). \
            add_flags("log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command()
        self.assertFalse(result)

        # delete 5 files inside each sub-directories locally
        for r in range(5, 10):
            filename = "test101024_" + str(r) + ".txt"
            filepath = os.path.join(subdir1_file_path, filename)
            try:
                os.remove(filepath)
            except:
                self.fail('error deleting the file ' + filepath)
            filepath = os.path.join(subdir2_file_path, filename)
            try:
                os.remove(filepath)
            except:
                self.fail('error deleting the file ' + filepath)
        # 10 files have been deleted inside the sub-dir
        # sync remote to local
        # 10 files will be downloaded
        result = util.Command("sync").add_arguments(dir_sas).add_arguments(dir_n_files_path). \
            add_flags("log-level", "info").add_flags("recursive", "true").add_flags("output","json").\
            execute_azcopy_copy_command_get_output()
        # parse the result to get the last job progress summary
        result = util.parseAzcopyOutput(result)
        try:
            # parse the Json Output
            x = json.loads(result, object_hook=lambda d: namedtuple('X', d.keys())(*d.values()))
        except:
            self.fail('error parsing the output in Json Format')
        # Number of Expected Transfer should be 10 since 10 files were deleted
        self.assertEquals(x.TransfersCompleted, 10)
        self.assertEquals(x.TransfersFailed, 0)
コード例 #17
0
    def test_blob_download_wildcard_recursive_false_1(self):
        #This test verifies the azcopy behavior when wildcards are
        # provided in the source and recursive flag is set to false
        # example src = https://<container>/<vd-1>/*?<sig> recursive = false
        dir_name = "dir_download_wildcard_recursive_false_1"
        dir_path = util.create_test_n_files(1024, 10, dir_name)

        #create sub-directory inside directory
        sub_dir_name = os.path.join(
            dir_name, "sub_dir_download_wildcard_recursive_false_1")
        sub_dir_path = util.create_test_n_files(1024, 10, sub_dir_name)

        #upload the directory with 20 files
        # upload the directory
        # execute azcopy command
        result = util.Command("copy").add_arguments(dir_path).add_arguments(util.test_container_url). \
            add_flags("recursive", "true").add_flags("log-level", "info").execute_azcopy_copy_command()
        self.assertTrue(result)

        # execute the validator.
        dir_sas = util.get_resource_sas(dir_name)
        result = util.Command("testBlob").add_arguments(dir_path).add_arguments(dir_sas). \
            add_flags("is-object-dir", "true").execute_azcopy_verify()
        self.assertTrue(result)

        # Dir dir_download_wildcard_recursive_false_1 inside the container is attempted to download
        # but recursive flag is set to false, so no files will be downloaded
        dir_sas = util.get_resource_sas(dir_name)
        result = util.Command("copy").add_arguments(dir_sas).add_arguments(dir_path). \
            add_flags("log-level", "Info").add_flags("output-type","json").execute_azcopy_copy_command()
        self.assertEquals(result, False)

        # create the resource sas
        dir_sas = util.get_resource_sas(dir_name + "/*")
        #download the directory
        result = util.Command("copy").add_arguments(dir_sas).add_arguments(dir_path). \
            add_flags("log-level", "Info").add_flags("output-type","json").\
            execute_azcopy_copy_command_get_output()
        # parse the result to get the last job progress summary
        result = util.parseAzcopyOutput(result)
        try:
            # parse the Json Output
            x = json.loads(result,
                           object_hook=lambda d: namedtuple('X', d.keys())
                           (*d.values()))
        except:
            self.fail('error parsing the output in Json Format')
        # since the wildcards '*' exists at the end of dir_name in the sas
        # and recursive is set to false, files inside dir will be download
        # and not files inside the sub-dir
        # Number of Expected Transfer should be 10
        self.assertEquals(x.TransfersCompleted, "10")
        self.assertEquals(x.TransfersFailed, "0")

        # create the resource sas
        dir_sas = util.get_resource_sas(
            dir_name + "/sub_dir_download_wildcard_recursive_false_1/*")
        result = util.Command("copy").add_arguments(dir_sas).add_arguments(dir_path).\
            add_flags("log-level", "Info").add_flags("output-type","json").add_flags("include-pattern", "*.txt").\
            execute_azcopy_copy_command_get_output()
        #download the directory
        # parse the result to get the last job progress summary
        result = util.parseAzcopyOutput(result)
        try:
            # parse the Json Output
            x = json.loads(result,
                           object_hook=lambda d: namedtuple('X', d.keys())
                           (*d.values()))
        except:
            self.fail('error parsing the output in Json Format')
        # since the wildcards '*/*.txt' exists at the end of dir_name in the sas
        # and recursive is set to false, .txt files inside sub-dir inside the dir
        # will be downloaded
        # Number of Expected Transfer should be 10
        self.assertEquals(x.TransfersCompleted, "10")
        self.assertEquals(x.TransfersFailed, "0")
コード例 #18
0
    def test_blob_download_list_of_files_flag(self):
        #This test verifies the azcopy behavior blobs are downloaded using
        # list-of-files flag
        dir_name = "dir_download_list_of_files_flag"
        dir_path = util.create_test_n_files(1024, 10, dir_name)

        #create sub-directory inside directory
        sub_dir_name_1 = os.path.join(dir_name, "logs")
        sub_dir_path_1 = util.create_test_n_files(1024, 10, sub_dir_name_1)

        #create sub-directory inside sub-directory
        sub_dir_name_2 = os.path.join(sub_dir_name_1, "abc")
        sub_dir_path_2 = util.create_test_n_files(1024, 10, sub_dir_name_2)

        #upload the directory with 30 files
        # upload the directory
        # execute azcopy command
        result = util.Command("copy").add_arguments(dir_path).add_arguments(util.test_container_url). \
            add_flags("recursive", "true").add_flags("log-level", "info").execute_azcopy_copy_command()
        self.assertTrue(result)

        # execute the validator.
        dir_sas = util.get_resource_sas(dir_name)
        result = util.Command("testBlob").add_arguments(dir_path).add_arguments(dir_sas). \
            add_flags("is-object-dir", "true").execute_azcopy_verify()
        self.assertTrue(result)

        #download the entire directory with list-of-files-flag
        dict = {}
        dict["Files"] = [dir_name]
        filePath = util.create_json_file("testfile", dict)
        result = util.Command("copy").add_arguments(util.test_container_url).add_arguments(dir_path). \
            add_flags("log-level", "Info").add_flags("output","json").add_flags("recursive","true") \
            .add_flags("list-of-files", filePath).execute_azcopy_copy_command_get_output()
        # parse the result to get the last job progress summary
        result = util.parseAzcopyOutput(result)
        try:
            # parse the Json Output
            x = json.loads(result, object_hook=lambda d: namedtuple('X', d.keys())(*d.values()))
        except:
            self.fail('error parsing the output in Json Format')
        # since entire directory is downloaded
        self.assertEquals(x.TransfersCompleted, 30)
        self.assertEquals(x.TransfersFailed, 0)

        # create the resource sas
        dir_sas = util.get_resource_sas(dir_name)
        #download the logs directory inside the dir
        dict = {}
        dict["Files"] = ["logs"]
        filePath = util.create_json_file("testfile", dict)
        result = util.Command("copy").add_arguments(dir_sas).add_arguments(dir_path). \
            add_flags("log-level", "Info").add_flags("output","json").add_flags("recursive","true"). \
            add_flags("list-of-files", filePath).execute_azcopy_copy_command_get_output()
        # parse the result to get the last job progress summary
        result = util.parseAzcopyOutput(result)
        try:
            # parse the Json Output
            x = json.loads(result, object_hook=lambda d: namedtuple('X', d.keys())(*d.values()))
        except:
            self.fail('error parsing the output in Json Format')
        #since only logs sub-directory is downloaded, transfers will be 20
        self.assertEquals(x.TransfersCompleted, 20)
        self.assertEquals(x.TransfersFailed, 0)
コード例 #19
0
    def test_force_flag_set_to_false_upload(self):
        # creating directory with 20 files in it.
        dir_name = "dir_force_flag_set_upload"
        dir_n_files_path = util.create_test_n_files(1024, 20, dir_name)
        # uploading the directory with 20 files in it.
        result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \
            add_flags("recursive", "true").add_flags("log-level", "info").execute_azcopy_copy_command()
        self.assertTrue(result)

        # execute the validator and validating the uploaded directory.
        destination = util.get_resource_sas(dir_name)
        result = util.Command("testBlob").add_arguments(dir_n_files_path).add_arguments(destination). \
            add_flags("is-object-dir", "true").execute_azcopy_verify()
        self.assertTrue(result)

        # uploading the directory again with force flag set to false.
        result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \
            add_flags("recursive", "true").add_flags("overwrite", "false").add_flags("log-level", "info"). \
            add_flags("output", "json").execute_azcopy_copy_command_get_output()
        self.assertNotEquals(result, None)

        # parsing the json and comparing the number of failed and successful transfers.
        result = util.parseAzcopyOutput(result)
        try:
            x = json.loads(result,
                           object_hook=lambda d: namedtuple('X', d.keys())
                           (*d.values()))
        except:
            self.fail('error parsing the output in Json Format')
        self.assertEquals(x.TransfersSkipped, 20)
        self.assertEquals(x.TransfersCompleted, 0)

        # uploading a sub-directory inside the above dir with 20 files inside the sub-directory.
        # total number of file inside the dir is 40
        sub_dir_name = os.path.join(dir_name +
                                    "/sub_dir_force_flag_set_upload")
        sub_dir_n_files_path = util.create_test_n_files(1024, 20, sub_dir_name)

        result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \
            add_flags("recursive", "true").add_flags("log-level", "info").execute_azcopy_copy_command()
        self.assertTrue(result)

        # execute the validator and verifying the uploaded sub directory.
        sub_directory_resource_sas = util.get_resource_sas(sub_dir_name)

        result = util.Command("testBlob").add_arguments(sub_dir_n_files_path).add_arguments(sub_directory_resource_sas). \
            add_flags("is-object-dir", "true").execute_azcopy_verify()
        self.assertTrue(result)

        # removing the sub directory.
        result = util.Command("rm").add_arguments(sub_directory_resource_sas). \
            add_flags("log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command()
        self.assertTrue(result)

        # uploading the directory again with force flag set to false.
        result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \
            add_flags("recursive", "true").add_flags("overwrite", "false").add_flags("log-level", "info"). \
            add_flags("output", "json").execute_azcopy_copy_command_get_output()
        self.assertNotEquals(result, None)

        # parsing the json and comparing the number of failed and successful transfers.
        # Number of failed transfers should be 20 and number of successful transfer should be 20.
        result = util.parseAzcopyOutput(result)
        try:
            x = json.loads(result,
                           object_hook=lambda d: namedtuple('X', d.keys())
                           (*d.values()))
        except:
            self.fail('error parsing the output in json format')
        self.assertEquals(x.TransfersCompleted, 20)
        self.assertEquals(x.TransfersSkipped, 20)