def test_sync_entire_vdir_to_vdir(self): content_dir_name = "dir_sync_test" content_dir_path = util.create_test_n_files(1024, 10, content_dir_name) src_vdir_path = util.get_resource_sas("srcdir") dst_vdir_path = util.get_resource_sas("dstdir") # create sub-directory inside directory sub_dir_name = os.path.join(content_dir_name, "sub_dir_sync_test") util.create_test_n_files(1024, 10, sub_dir_name) # upload the directory with 20 files # upload the directory # execute azcopy command result = util.Command("copy").add_arguments(content_dir_path).add_arguments(src_vdir_path). \ add_flags("recursive", "true").add_flags("log-level", "info").execute_azcopy_copy_command() self.assertTrue(result) # execute the validator. result = util.Command("testBlob").add_arguments(content_dir_path).add_arguments(src_vdir_path). \ add_flags("is-object-dir", "true").execute_azcopy_verify() self.assertTrue(result) # sync to destination result = util.Command("sync").add_arguments(src_vdir_path).add_arguments(dst_vdir_path)\ .add_flags("log-level", "info").execute_azcopy_copy_command() self.assertTrue(result)
def test_copy_entire_dir_to_dir(self): content_dir_name_src = "dir_file_copy_test_src" content_dir_path_src = util.create_test_n_files( 1024, 10, content_dir_name_src) content_dir_name_dst = "dir_file_copy_test_dst" # create sub-directory inside directory sub_dir_name = os.path.join(content_dir_name_src, "sub_dir_copy_test") util.create_test_n_files(1024, 10, sub_dir_name) # upload to the source result = util.Command("copy").add_arguments(content_dir_path_src).add_arguments(util.test_share_url). \ add_flags("recursive", "true").add_flags("log-level", "debug").execute_azcopy_copy_command() self.assertTrue(result) # copy to destination remote_src_dir_path = util.get_resource_sas_from_share( content_dir_name_src) remote_dst_dir_path = util.get_resource_sas_from_share( content_dir_name_dst) result = util.Command("copy").add_arguments(util.get_resource_sas_from_share(content_dir_name_src+"/*"))\ .add_arguments(remote_dst_dir_path).add_flags("log-level", "debug").add_flags("recursive", "true")\ .execute_azcopy_copy_command() self.assertTrue(result) # execute the validator to make sure the copy worked, both remote src and dst should match local src result = util.Command("testFile").add_arguments(content_dir_path_src).add_arguments(remote_src_dir_path). \ add_flags("is-object-dir", "true").execute_azcopy_verify() self.assertTrue(result) result = util.Command("testFile").add_arguments(content_dir_path_src).add_arguments(remote_dst_dir_path). \ add_flags("is-object-dir", "true").execute_azcopy_verify() self.assertTrue(result)
def util_test_n_1kb_file_in_dir_upload_to_azure_directory( self, number_of_files, recursive): # create dir dir_n_files and 1 kb files inside the dir. dir_name = "dir_" + str(number_of_files) + "_files" sub_dir_name = "dir_subdir_" + str(number_of_files) + "_files" # create n test files in dir src_dir = util.create_test_n_files(1024, number_of_files, dir_name) # create n test files in subdir, subdir is contained in dir util.create_test_n_files(1024, number_of_files, os.path.join(dir_name, sub_dir_name)) # prepare destination directory. # TODO: note azcopy v2 currently only support existing directory and share. dest_azure_dir_name = "dest azure_dir_name" dest_azure_dir = util.get_resource_sas_from_share(dest_azure_dir_name) result = util.Command("create").add_arguments(dest_azure_dir).add_flags("serviceType", "File"). \ add_flags("resourceType", "Bucket").execute_azcopy_create() self.assertTrue(result) # execute azcopy command result = util.Command("copy").add_arguments(src_dir).add_arguments(dest_azure_dir). \ add_flags("recursive", recursive).add_flags("log-level", "info").execute_azcopy_copy_command() self.assertTrue(result) # execute the validator. dest_azure_dir_to_compare = util.get_resource_sas_from_share( dest_azure_dir_name + "/" + dir_name) result = util.Command("testFile").add_arguments(src_dir).add_arguments(dest_azure_dir_to_compare). \ add_flags("is-object-dir", "true").add_flags("is-recursive", recursive).execute_azcopy_verify() self.assertTrue(result)
def test_sync_entire_directory_with_local(self): dir_name = "dir_sync_test" dir_path = util.create_test_n_files(1024, 10, dir_name) # create sub-directory inside directory sub_dir_name = os.path.join(dir_name, "sub_dir_sync_test") util.create_test_n_files(1024, 10, sub_dir_name) # upload the directory with 20 files # upload the directory # execute azcopy command result = util.Command("copy").add_arguments(dir_path).add_arguments(util.test_container_url). \ add_flags("recursive", "true").add_flags("log-level", "info").execute_azcopy_copy_command() self.assertTrue(result) # execute the validator. vdir_sas = util.get_resource_sas(dir_name) result = util.Command("testBlob").add_arguments(dir_path).add_arguments(vdir_sas). \ add_flags("is-object-dir", "true").execute_azcopy_verify() self.assertTrue(result) # sync to local src = vdir_sas dst = dir_path result = util.Command("sync").add_arguments(src).add_arguments(dst).add_flags("log-level", "info")\ .execute_azcopy_copy_command() self.assertTrue(result) # sync back to blob after recreating the files util.create_test_n_files(1024, 10, sub_dir_name) src = dir_path dst = vdir_sas result = util.Command("sync").add_arguments(src).add_arguments(dst).add_flags("log-level", "info") \ .execute_azcopy_copy_command() self.assertTrue(result)
def util_test_n_1kb_file_in_dir_upload_download_azure_directory( self, number_of_files, recursive): # create dir dir_n_files and 1 kb files inside the dir. dir_name = "util_test_n_1kb_file_in_dir_upload_download_azure_directory_" + recursive + "_" + str( number_of_files) + "_files" sub_dir_name = "dir_subdir_" + str(number_of_files) + "_files" # create n test files in dir src_dir = util.create_test_n_files(1024, number_of_files, dir_name) # create n test files in subdir, subdir is contained in dir util.create_test_n_files(1024, number_of_files, os.path.join(dir_name, sub_dir_name)) # prepare destination directory. # TODO: note azcopy v2 currently only support existing directory and share. dest_azure_dir_name = "dest azure_dir_name" dest_azure_dir = util.get_resource_sas_from_share(dest_azure_dir_name) result = util.Command("create").add_arguments(dest_azure_dir).add_flags("serviceType", "File"). \ add_flags("resourceType", "Bucket").execute_azcopy_create() self.assertTrue(result) # execute azcopy command result = util.Command("copy").add_arguments(src_dir).add_arguments(dest_azure_dir). \ add_flags("recursive", recursive).add_flags("log-level", "info").execute_azcopy_copy_command() self.assertTrue(result) # execute the validator. dest_azure_dir_to_compare = util.get_resource_sas_from_share( dest_azure_dir_name + "/" + dir_name) result = util.Command("testFile").add_arguments(src_dir).add_arguments(dest_azure_dir_to_compare). \ add_flags("is-object-dir", "true").add_flags("is-recursive", recursive).execute_azcopy_verify() self.assertTrue(result) download_azure_src_dir = dest_azure_dir_to_compare download_local_dest_dir = src_dir + "_download" try: if os.path.exists(download_local_dest_dir) and os.path.isdir( download_local_dest_dir): shutil.rmtree(download_local_dest_dir) except: print("catch error for removing " + download_local_dest_dir) finally: os.makedirs(download_local_dest_dir) # downloading the directory created from azure file directory through azcopy with recursive flag to true. result = util.Command("copy").add_arguments(download_azure_src_dir).add_arguments( download_local_dest_dir).add_flags("log-level", "info"). \ add_flags("recursive", recursive).execute_azcopy_copy_command() self.assertTrue(result) # verify downloaded file. # todo: ensure the comparing here result = util.Command("testFile").add_arguments(os.path.join(download_local_dest_dir, dir_name)).add_arguments( download_azure_src_dir). \ add_flags("is-object-dir", "true").add_flags("is-recursive", recursive).execute_azcopy_verify() self.assertTrue(result)
def util_test_copy_n_files_from_x_dir_to_x_dir_strip_top_dir( self, srcBucketURL, srcType, dstBucketURL, dstType, n=10, sizeInKB=1, recursive=True): result = util.Command("create").add_arguments(srcBucketURL).add_flags("serviceType", srcType). \ add_flags("resourceType", "Bucket").execute_azcopy_create() self.assertTrue(result) src_dir_name = "copy_%d_%dKB_files_from_%s_dir_to_%s_dir_recursive_%s" % ( n, sizeInKB, srcType, dstType, recursive) src_dir_path = util.create_test_n_files(sizeInKB * 1024, n, src_dir_name) src_sub_dir_name = src_dir_name + "/" + "subdir" util.create_test_n_files(sizeInKB * 1024, 1, src_sub_dir_name) self.util_upload_to_src(src_dir_path, srcType, srcBucketURL, True) if srcType == "GCP": src_dir_url = util.get_object_without_sas(srcBucketURL, src_dir_name + "/*") else: src_dir_url = util.get_object_sas(srcBucketURL, src_dir_name + "/*") dstDirURL = util.get_object_sas(dstBucketURL, src_dir_name) if recursive: result = util.Command("copy").add_arguments(src_dir_url).add_arguments(dstDirURL). \ add_flags("log-level", "info").add_flags("recursive", "true"). \ execute_azcopy_copy_command() self.assertTrue(result) else: result = util.Command("copy").add_arguments(src_dir_url).add_arguments(dstDirURL). \ add_flags("log-level", "info").execute_azcopy_copy_command() self.assertTrue(result) validate_dir_name = "validate_copy_%d_%dKB_files_from_%s_dir_to_%s_dir" % ( n, sizeInKB, srcType, dstType) local_validate_dest = util.create_test_dir(validate_dir_name) result = util.Command("copy").add_arguments(dstDirURL).add_arguments(local_validate_dest). \ add_flags("log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command() self.assertTrue(result) if recursive: result = self.util_are_dir_trees_equal( src_dir_path, os.path.join(local_validate_dest, src_dir_name)) else: dirs_cmp = filecmp.dircmp( src_dir_path, os.path.join(local_validate_dest, src_dir_name)) if len(dirs_cmp.left_only) > 0 and len(dirs_cmp.common_files) == n: result = True else: result = False self.assertTrue(result)
def test_download_blob_include_flag(self): # create dir and 10 files of size 1024 inside it dir_name = "dir_include_flag_set_download" dir_n_files_path = util.create_test_n_files(1024, 10, dir_name) # create sub-dir inside dir dir_include_flag_set_download # create 10 files inside the sub-dir of size 1024 sub_dir_name = os.path.join(dir_name, "sub_dir_include_flag_set_download") sub_dir_n_file_path = util.create_test_n_files(1024, 10, sub_dir_name) # uploading the directory with 20 files in it. result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \ add_flags("recursive", "true").add_flags("log-level", "info").execute_azcopy_copy_command() self.assertTrue(result) # execute the validator and validating the uploaded directory. destination = util.get_resource_sas(dir_name) result = util.Command("testBlob").add_arguments(dir_n_files_path).add_arguments(destination). \ add_flags("is-object-dir", "true").execute_azcopy_verify() self.assertTrue(result) # download from container with include flags destination_sas = util.get_resource_sas(dir_name) result = util.Command("copy").add_arguments(destination_sas).add_arguments(util.test_directory_path). \ add_flags("recursive", "true").add_flags("log-level", "info").add_flags("output-type", "json"). \ add_flags("include-pattern", "test101024_1.txt;test101024_2.txt;test101024_3.txt"). \ execute_azcopy_copy_command_get_output() # parse the result to get the last job progress summary result = util.parseAzcopyOutput(result) try: # parse the Json Output x = json.loads(result, object_hook=lambda d: namedtuple('X', d.keys()) (*d.values())) except: self.fail('error parsing the output in Json Format') self.assertEquals(x.TransfersCompleted, "6") self.assertEquals(x.TransfersFailed, "0") # download from container with sub-dir in include flags # TODO: Make this use include-path in the DL refactor destination_sas = util.get_resource_sas(dir_name) result = util.Command("copy").add_arguments(destination_sas).add_arguments(util.test_directory_path). \ add_flags("recursive", "true").add_flags("log-level", "info").add_flags("output-type", "json"). \ add_flags("include-path", "sub_dir_include_flag_set_download/"). \ execute_azcopy_copy_command_get_output() # parse the result to get the last job progress summary result = util.parseAzcopyOutput(result) try: # parse the Json Output x = json.loads(result, object_hook=lambda d: namedtuple('X', d.keys()) (*d.values())) except: self.fail('error parsing the output in Json Format') self.assertEquals(x.TransfersCompleted, "10") self.assertEquals(x.TransfersFailed, "0")
def test_download_blob_exclude_flag(self): # create dir and 10 files of size 1024 inside it dir_name = "dir_exclude_flag_set_download" dir_n_files_path = util.create_test_n_files(1024, 10, dir_name) # create sub-dir inside dir dir_exclude_flag_set_download # create 10 files inside the sub-dir of size 1024 sub_dir_name = os.path.join(dir_name, "sub_dir_exclude_flag_set_download") sub_dir_n_file_path = util.create_test_n_files(1024, 10, sub_dir_name) # uploading the directory with 20 files in it. result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \ add_flags("recursive", "true").add_flags("log-level", "info").execute_azcopy_copy_command() self.assertTrue(result) # execute the validator and validating the uploaded directory. destination = util.get_resource_sas(dir_name) result = util.Command("testBlob").add_arguments(dir_n_files_path).add_arguments(destination). \ add_flags("is-object-dir", "true").execute_azcopy_verify() self.assertTrue(result) # download from container with exclude flags destination_sas = util.get_resource_sas(dir_name) result = util.Command("copy").add_arguments(destination_sas).add_arguments(util.test_directory_path). \ add_flags("recursive", "true").add_flags("log-level", "info").add_flags("output", "json"). \ add_flags("exclude", "test101024_1.txt;test101024_2.txt;test101024_3.txt"). \ execute_azcopy_copy_command_get_output() # parse the result to get the last job progress summary result = util.parseAzcopyOutput(result) try: # parse the Json Output x = json.loads(result, object_hook=lambda d: namedtuple('X', d.keys())(*d.values())) except: self.fail('error parsing the output in JSON Format') # Number of expected successful transfer should be 18 since two files in directory are set to exclude self.assertEquals(x.TransfersCompleted, 17) self.assertEquals(x.TransfersFailed, 0) # download from container with sub-dir in exclude flags destination_sas = util.get_resource_sas(dir_name) result = util.Command("copy").add_arguments(destination_sas).add_arguments(util.test_directory_path). \ add_flags("recursive", "true").add_flags("log-level", "info").add_flags("output", "json"). \ add_flags("exclude", "sub_dir_exclude_flag_set_download/*"). \ execute_azcopy_copy_command_get_output() # parse the result to get the last job progress summary result = util.parseAzcopyOutput(result) try: # parse the Json Output x = json.loads(result, object_hook=lambda d: namedtuple('X', d.keys())(*d.values())) except: self.fail('error parsing the output in Json Format') # Number of Expected Transfer should be 10 since sub-dir is to exclude which has 10 files in it. self.assertEquals(x.TransfersCompleted, 10) self.assertEquals(x.TransfersFailed, 0)
def util_test_n_1kb_file_in_dir_upload_download_share( self, number_of_files): # create dir dir_n_files and 1 kb files inside the dir. dir_name = "dir_test_n_1kb_file_in_dir_upload_download_share_" + str( number_of_files) + "_files" sub_dir_name = "dir subdir_" + str(number_of_files) + "_files" # create n test files in dir src_dir = util.create_test_n_files(1024, number_of_files, dir_name) # create n test files in subdir, subdir is contained in dir util.create_test_n_files(1024, number_of_files, os.path.join(dir_name, sub_dir_name)) # execute azcopy command dest_share = util.test_share_url result = util.Command("copy").add_arguments(src_dir).add_arguments(dest_share). \ add_flags("recursive", "true").add_flags("log-level", "info").execute_azcopy_copy_command() self.assertTrue(result) # execute the validator. dest_azure_dir = util.get_resource_sas_from_share(dir_name) result = util.Command("testFile").add_arguments(src_dir).add_arguments(dest_azure_dir). \ add_flags("is-object-dir", "true").execute_azcopy_verify() self.assertTrue(result) download_azure_src_dir = dest_azure_dir download_local_dest_dir = src_dir + "_download" try: if os.path.exists(download_local_dest_dir) and os.path.isdir( download_local_dest_dir): shutil.rmtree(download_local_dest_dir) except: self.fail("error removing " + download_local_dest_dir) finally: os.makedirs(download_local_dest_dir) # downloading the directory created from azure file share through azcopy with recursive flag to true. result = util.Command("copy").add_arguments( download_azure_src_dir).add_arguments( download_local_dest_dir).add_flags( "log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command() self.assertTrue(result) # verify downloaded file. result = util.Command("testFile").add_arguments( os.path.join( download_local_dest_dir, dir_name)).add_arguments(download_azure_src_dir).add_flags( "is-object-dir", "true").execute_azcopy_verify() self.assertTrue(result)
def test_upload_block_blob_exclude_flag(self): dir_name = "dir_exclude_flag_set_upload" # create 10 files inside the directory dir_n_files_path = util.create_test_n_files(1024, 10, dir_name) # create sub-directory inside the dir_exclude_flag_set_upload sub_dir_name = os.path.join(dir_name, "sub_dir_exclude_flag_set_upload") # create 10 files inside the sub-dir sub_dir_n_file_path = util.create_test_n_files(1024, 10, sub_dir_name) # uploading the directory with 2 files in the exclude flag. result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \ add_flags("recursive", "true").add_flags("log-level", "info") \ .add_flags("exclude", "test101024_2.txt;test101024_3.txt").add_flags("output", "json").execute_azcopy_copy_command_get_output() # parse the result to get the last job progress summary result = util.parseAzcopyOutput(result) try: # parse the Json Output x = json.loads(result, object_hook=lambda d: namedtuple('X', d.keys()) (*d.values())) except: self.fail('error parsing the output in Json Format') # Number of successful transfer should be 18 and there should be not failed transfer # Since total number of files inside dir_exclude_flag_set_upload is 20 and 2 files are set # to exclude, so total number of transfer should be 18 self.assertEquals(x.TransfersCompleted, 18) self.assertEquals(x.TransfersFailed, 0) # uploading the directory with sub-dir in the exclude flag. result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \ add_flags("recursive", "true").add_flags("log-level", "info") \ .add_flags("exclude", "sub_dir_exclude_flag_set_upload/*").add_flags("output", "json").execute_azcopy_copy_command_get_output() # parse the result to get the last job progress summary result = util.parseAzcopyOutput(result) try: # parse the Json Output x = json.loads(result, object_hook=lambda d: namedtuple('X', d.keys()) (*d.values())) except: self.fail('error parsing the output in Json Format') # Number of successful transfer should be 10 and there should be not failed transfer # Since the total number of files in dir_exclude_flag_set_upload is 20 and sub_dir_exclude_flag_set_upload # sub-dir is set to exclude, total number of transfer will be 10 self.assertEquals(x.TransfersCompleted, 10) self.assertEquals(x.TransfersFailed, 0)
def test_blobfs_upload_100_1Kb_file( explictFromTo=False, forceOAuthLogin=False, tenantID="", aadEndpoint=""): # create dir with 100 1KB files inside it dir_name = "dir_blobfs_100_1K" dir_n_file_path = util.create_test_n_files(1024, 100, dir_name) # Upload the directory with 100 files inside it cmd = util.Command("copy").add_arguments(dir_n_file_path).add_arguments(util.test_bfs_account_url). \ add_flags("log-level", "Info").add_flags("recursive","true") util.process_oauth_command( cmd, "LocalBlobFS" if explictFromTo else "", forceOAuthLogin, tenantID, aadEndpoint) if forceOAuthLogin: result = cmd.execute_azcopy_command_interactive() else: result = cmd.execute_azcopy_copy_command() if not result: print("test_blobfs_upload_100_1Kb_file failed uploading the dir ", dir_name, " to the filesystem") sys.exit(1) # Validate the uploaded directory dirUrl = util.test_bfs_account_url + dir_name result = util.Command("testBlobFS").add_arguments(dir_n_file_path).add_arguments(dirUrl).\ add_flags("is-object-dir", "true").execute_azcopy_verify() if not result: print("test_blobfs_upload_100_1Kb_file failed while validating the file upload") sys.exit(1) print("test_blobfs_upload_100_1Kb_file successfully passed")
def util_test_blobfs_download_100_1Kb_file(self, explictFromTo=False): # create dir with 100 1KB files inside it dir_name = "dir_blobfs_d_100_1K" dir_n_file_path = util.create_test_n_files(1024, 100, dir_name) # Upload the directory with 100 files inside it cmd = util.Command("copy").add_arguments(dir_n_file_path).add_arguments(util.test_bfs_account_url). \ add_flags("log-level", "Info").add_flags("recursive","true") util.process_oauth_command(cmd, "LocalBlobFS" if explictFromTo else "") result = cmd.execute_azcopy_copy_command() self.assertTrue(result) # Validate the uploaded directory dirUrl = util.test_bfs_account_url + dir_name result = util.Command("testBlobFS").add_arguments(dir_n_file_path).add_arguments(dirUrl). \ add_flags("is-object-dir", "true").execute_azcopy_verify() self.assertTrue(result) # delete the local directory created try: shutil.rmtree(dir_n_file_path) except: self.fail('error deleting the directory ' + dir_n_file_path) # download the directory cmd = util.Command("copy").add_arguments(dirUrl).add_arguments(util.test_directory_path).\ add_flags("log-level", "Info").add_flags("recursive", "true") util.process_oauth_command(cmd, "BlobFSLocal" if explictFromTo else "") result = cmd.execute_azcopy_copy_command() self.assertTrue(result) # validate the downloaded directory result = util.Command("testBlobFS").add_arguments(dir_n_file_path).add_arguments(dirUrl).\ add_flags("is-object-dir", "true").execute_azcopy_verify() self.assertTrue(result)
def recursive_download_blob(self): # create directory and 5 files of 1KB inside that directory. dir_name = "dir_" + str(10) + "_files" dir1_path = util.create_test_n_files(1024, 5, dir_name) # upload the directory to container through azcopy with recursive set to true. result = util.Command("copy").add_arguments(dir1_path).add_arguments(util.test_container_url).\ add_flags("log-level","info").add_flags("recursive", "true").execute_azcopy_copy_command() self.assertTrue(result) # verify the uploaded file. destination_sas = util.get_resource_sas(dir_name) result = util.Command("testBlob").add_arguments(dir1_path).add_arguments(destination_sas).\ add_flags("is-object-dir","true").execute_azcopy_verify() self.assertTrue(result) try: shutil.rmtree(dir1_path) except OSError as e: self.fail('error removing the file ' + dir1_path) # downloading the directory created from container through azcopy with recursive flag to true. result = util.Command("copy").add_arguments(destination_sas).add_arguments(util.test_directory_path).add_flags( "log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command() self.assertTrue(result) # verify downloaded blob. result = util.Command("testBlob").add_arguments(dir1_path).add_arguments(destination_sas).\ add_flags("is-object-dir","true").execute_azcopy_verify() self.assertTrue(result)
def test_blobfs_sas_download_100_1Kb_file(self): # Create dir with 100 1kb files inside it dir_name = "dir_blobfs_sas_d_100_1K" dir_n_file_path = util.create_test_n_files(1024, 100, dir_name) # Upload the directory with 100 files inside it result = util.Command("copy").add_arguments(dir_n_file_path).add_arguments(util.test_bfs_sas_account_url). \ add_flags("log-level", "Info").add_flags("recursive", "true").execute_azcopy_copy_command() self.assertTrue(result) # Validate the uploaded directory dirUrl = util.get_resource_sas_from_bfs(dir_name) dirUrl_nosas = util.test_bfs_account_url + dir_name result = util.Command("testBlobFS").add_arguments(dir_n_file_path).add_arguments(dirUrl). \ add_flags("is-object-dir", "true").execute_azcopy_verify() self.assertTrue(result) # Delete the local files try: shutil.rmtree(dir_n_file_path) except: self.fail('error deleting the directory ' + dir_n_file_path) # Download the directory result = util.Command("copy").add_arguments(dirUrl).add_arguments(util.test_directory_path). \ add_flags("log-level", "Info").add_flags("recursive", "true").execute_azcopy_copy_command() self.assertTrue(result) # Validate the downloaded directory result = util.Command("testBlobFS").add_arguments(dir_n_file_path).add_arguments(dirUrl). \ add_flags("is-object-dir", "true").execute_azcopy_verify() self.assertTrue(result)
def test_sync_blob_download_without_wildcards(self): # created a directory and created 10 files inside the directory dir_name = "sync_download_without_wildcards" dir_n_files_path = util.create_test_n_files(1024, 10, dir_name) # upload the directory # execute azcopy command result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \ add_flags("recursive", "true").add_flags("log-level", "info").execute_azcopy_copy_command() self.assertTrue(result) # execute the validator. dir_sas = util.get_resource_sas(dir_name) result = util.Command("testBlob").add_arguments(dir_n_files_path).add_arguments(dir_sas). \ add_flags("is-object-dir", "true").execute_azcopy_verify() self.assertTrue(result) # download the destination to the source to match the last modified time result = util.Command("copy").add_arguments(dir_sas).add_arguments(util.test_directory_path). \ add_flags("log-level", "info").add_flags("recursive", "true").add_flags("output", "json"). \ add_flags("preserve-last-modified-time", "true").execute_azcopy_copy_command_get_output() self.assertNotEquals(result, None) # execute the validator and verify the downloaded dir result = util.Command("testBlob").add_arguments(dir_n_files_path).add_arguments(dir_sas). \ add_flags("is-object-dir", "true").execute_azcopy_verify() self.assertTrue(result) # sync the source and destination result = util.Command("sync").add_arguments(dir_sas).add_arguments(dir_n_files_path). \ add_flags("log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command() self.assertFalse(result)
def util_test_copy_n_files_from_gcp_bucket_to_blob_account( self, srcBucketURL, dstAccountURL, n=10, sizeInKB=1): srcType = "GCP" result = util.Command("create").add_arguments(srcBucketURL).add_flags("serviceType", srcType). \ add_flags("resourceType", "Bucket").execute_azcopy_create() self.assertTrue(result) src_dir_name = "copy_%d_%dKB_files_from_gcp_bucket_to_blob_account" % ( n, sizeInKB) src_dir_path = util.create_test_n_files(sizeInKB * 1024, n, src_dir_name) self.util_upload_to_src(src_dir_path, srcType, srcBucketURL, True) result = util.Command("copy").add_arguments(srcBucketURL).add_arguments(dstAccountURL). \ add_flags("log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command() self.assertTrue(result) validate_dir_name = "validate_copy_%d_%dKB_files_from_gcp_bucket_to_blob_account" % ( n, sizeInKB) local_validate_dest = util.create_test_dir(validate_dir_name) validateDstBucketURL = util.get_object_sas(dstAccountURL, self.bucket_name) dst_directory_url = util.get_object_sas(validateDstBucketURL, src_dir_name) result = util.Command("copy").add_arguments(dst_directory_url).add_arguments(local_validate_dest). \ add_flags("log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command() self.assertTrue(result) result = self.util_are_dir_trees_equal( src_dir_path, os.path.join(local_validate_dest, src_dir_name)) self.assertTrue(result)
def test_follow_symlinks_upload(self): link_name = "dir_link" outside_dir = "dir_outside_linkto" home_dir = "dir_home_follow_symlink_upload" # produce all necessary paths outside_path = util.create_test_n_files(1024, 10, outside_dir) home_path = util.create_test_dir(home_dir) link_path = os.path.join(home_path, link_name) # Create the symlink os.symlink(outside_path, link_path, target_is_directory=True) # Upload home path result = util.Command("copy").add_arguments(home_path).add_arguments(util.test_container_url). \ add_flags("log-level", "Info").add_flags("recursive", "true").add_flags("output-type", "json"). \ add_flags("follow-symlinks", "true").execute_azcopy_copy_command_get_output() result = util.parseAzcopyOutput(result) try: # parse the JSON output x = json.loads(result, object_hook=lambda d: namedtuple('X', d.keys()) (*d.values())) except: self.fail('error parsing the output in JSON format') self.assertEquals(x.TransfersCompleted, "10") self.assertEquals(x.TransfersFailed, "0")
def util_test_n_1kb_blob_upload(self, number_of_files, use_oauth_session=False): # create dir dir_n_files and 1 kb files inside the dir. dir_name = "dir_" + str(number_of_files) + "_files" dir_n_files_path = util.create_test_n_files(1024, number_of_files, dir_name) if not use_oauth_session: dest = util.test_container_url dest_validate = util.get_resource_sas(dir_name) else: dest = util.test_oauth_container_url dest_validate = util.get_resource_from_oauth_container_validate( dir_name) # execute azcopy command result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(dest). \ add_flags("recursive", "true").add_flags("log-level", "info").execute_azcopy_copy_command() self.assertTrue(result) # execute the validator. result = util.Command("testBlob").add_arguments(dir_n_files_path).add_arguments(dest_validate). \ add_flags("is-object-dir", "true").execute_azcopy_verify() self.assertTrue(result)
def test_upload_block_blob_include_flag(self): dir_name = "dir_include_flag_set_upload" # create 10 files inside the directory dir_n_files_path = util.create_test_n_files(1024, 10, dir_name) # create sub-directory inside the dir_include_flag_set_upload sub_dir_name = os.path.join(dir_name, "sub_dir_include_flag_set_upload") # create 10 files inside the sub-dir sub_dir_n_file_path = util.create_test_n_files(1024, 10, sub_dir_name) # uploading the directory with 2 file names (4 files) in the include flag. result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \ add_flags("recursive", "true").add_flags("log-level", "info") \ .add_flags("include-pattern", "test101024_2.txt;test101024_3.txt").add_flags("output-type", "json").execute_azcopy_copy_command_get_output() # parse the result to get the last job progress summary result = util.parseAzcopyOutput(result) # parse the Json Output try: x = json.loads(result, object_hook=lambda d: namedtuple('X', d.keys()) (*d.values())) except: self.fail('error parsing output in Json format') # Number of successful transfer should be 4 and there should be not a failed transfer self.assertEquals(x.TransfersCompleted, "4") self.assertEquals(x.TransfersFailed, "0") # uploading the directory with sub-dir in the include flag. result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \ add_flags("recursive", "true").add_flags("log-level", "info") \ .add_flags("include-path", "sub_dir_include_flag_set_upload/").add_flags("output-type", "json").execute_azcopy_copy_command_get_output() # parse the result to get the last job progress summary result = util.parseAzcopyOutput(result) try: # parse the Json Output x = json.loads(result, object_hook=lambda d: namedtuple('X', d.keys()) (*d.values())) except: self.fail('error parsing the output in Json Format') # Number of successful transfer should be 10 and there should be not failed transfer self.assertEquals(x.TransfersCompleted, "10") self.assertEquals(x.TransfersFailed, "0")
def test_recursive_download_blob_oauth(forceOAuthLogin=False, tenantID="", aadEndpoint=""): # create directory and 5 files of 1KB inside that directory. dir_name = "dir_" + str(10) + "_files" dir1_path = util.create_test_n_files(1024, 5, dir_name) dest = util.test_oauth_container_url # upload the directory to container through azcopy with recursive set to true. cmd = util.Command("copy").add_arguments(dir1_path).add_arguments(dest).add_flags("log-level", "info") \ .add_flags("recursive", "true") util.process_oauth_command(cmd, "", forceOAuthLogin, tenantID, aadEndpoint) if forceOAuthLogin: result = cmd.execute_azcopy_command_interactive() else: result = cmd.execute_azcopy_copy_command() if not result: print("error uploading recursive dir ", dir1_path) return # verify the uploaded file. dest_validate = util.get_resource_from_oauth_container_validate(dir_name) result = util.Command("testBlob").add_arguments(dir1_path).add_arguments( dest_validate).add_flags("is-object-dir", "true").execute_azcopy_verify() if not result: print("error verify the recursive dir ", dir1_path, " upload") return try: shutil.rmtree(dir1_path) except OSError as e: print("error removing the uploaded files. ", e) return src_download = util.get_resource_from_oauth_container(dir_name) # downloading the directory created from container through azcopy with recursive flag to true. cmd = util.Command("copy").add_arguments(src_download).add_arguments( util.test_directory_path).add_flags("log-level", "info").add_flags( "recursive", "true") util.process_oauth_command(cmd, "", forceOAuthLogin, tenantID, aadEndpoint) if forceOAuthLogin: result = cmd.execute_azcopy_command_interactive() else: result = cmd.execute_azcopy_copy_command() if not result: print("error download recursive dir ", dir1_path) return # verify downloaded blob. result = util.Command("testBlob").add_arguments(dir1_path).add_arguments( dest_validate).add_flags("is-object-dir", "true").execute_azcopy_verify() if not result: print("error verifying the recursive download ") return print("test_recursive_download_blob successfully passed")
def test_blobfs_download_100_1Kb_file(explictFromTo=False): # create dir with 100 1KB files inside it dir_name = "dir_blobfs_d_100_1K" dir_n_file_path = util.create_test_n_files(1024, 100, dir_name) # Upload the directory with 100 files inside it cmd = util.Command("copy").add_arguments(dir_n_file_path).add_arguments(util.test_bfs_account_url). \ add_flags("log-level", "Info").add_flags("recursive","true") util.process_oauth_command(cmd, "LocalBlobFS" if explictFromTo else "") result = cmd.execute_azcopy_copy_command() if not result: print("test_blobfs_download_100_1Kb_file failed uploading the dir ", dir_name, " to the filesystem") sys.exit(1) # Validate the uploaded directory dirUrl = util.test_bfs_account_url + dir_name result = util.Command("testBlobFS").add_arguments(dir_n_file_path).add_arguments(dirUrl). \ add_flags("is-object-dir", "true").execute_azcopy_verify() if not result: print( "test_blobfs_download_100_1Kb_file failed while validating the uploaded directory" ) sys.exit(1) # delete the local directory created try: shutil.rmtree(dir_n_file_path) except: print( "test_blobfs_download_100_1Kb_file failed while deleting the local directory" ) sys.exit(1) # download the directory cmd = util.Command("copy").add_arguments(dirUrl).add_arguments(util.test_directory_path).\ add_flags("log-level", "Info").add_flags("recursive", "true") util.process_oauth_command(cmd, "BlobFSLocal" if explictFromTo else "") result = cmd.execute_azcopy_copy_command() if not result: print( "test_blobfs_download_100_1Kb_file failed while downloading the directory" ) sys.exit(1) # validate the downloaded directory result = util.Command("testBlobFS").add_arguments(dir_n_file_path).add_arguments(dirUrl).\ add_flags("is-object-dir", "true").execute_azcopy_verify() if not result: print( "test_blobfs_download_100_1Kb_file failed while validating the downloaded directory" ) sys.exit(1) print("test_blobfs_download_100_1Kb_file successfully passed")
def test_load_entire_directory(self): dir_name = "dir_load_test" dir_path = util.create_test_n_files(1024, 10, dir_name) # create sub-directory inside directory sub_dir_name = os.path.join(dir_name, "sub_dir_load_test") util.create_test_n_files(1024, 10, sub_dir_name) # clean out the container # execute azcopy command # ignore the error since the container might be already empty util.Command("rm").add_arguments(util.test_container_url). \ add_flags("recursive", "true").execute_azcopy_copy_command() # invoke the load command state_path = os.path.join(util.test_directory_path, "clfsload-state") result = util.Command("load clfs").add_arguments(dir_path).add_arguments(util.test_container_url). \ add_flags("max-errors", "8").add_flags("state-path", state_path).add_flags("preserve-hardlinks", "true") \ .add_flags("compression-type", "LZ4").execute_azcopy_copy_command() self.assertTrue(result)
def util_test_copy_n_files_from_x_dir_to_x_dir(self, srcBucketURL, srcType, dstBucketURL, dstType, n=10, sizeInKB=1): # create source bucket result = util.Command("create").add_arguments(srcBucketURL).add_flags("serviceType", srcType). \ add_flags("resourceType", "Bucket").execute_azcopy_create() self.assertTrue(result) # create file of size n KBs in newly created directory. src_dir_name = "copy_%d_%dKB_files_from_%s_dir_to_%s_dir" % ( n, sizeInKB, srcType, dstType) src_dir_path = util.create_test_n_files(sizeInKB * 1024, n, src_dir_name) # Upload files using azcopy. # TODO: Note for S3/Google need special logic result = util.Command("copy").add_arguments(src_dir_path).add_arguments(srcBucketURL). \ add_flags("log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command() self.assertTrue(result) srcDirURL = util.get_object_sas(srcBucketURL, src_dir_name) dstDirURL = util.get_object_sas(dstBucketURL, src_dir_name) # Copy files using azcopy from srcURL to destURL result = util.Command("copy").add_arguments(srcDirURL).add_arguments(dstDirURL). \ add_flags("log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command() self.assertTrue(result) # Downloading the copied files for validation validate_dir_name = "validate_copy_%d_%dKB_files_from_%s_dir_to_%s_dir" % ( n, sizeInKB, srcType, dstType) local_validate_dest = util.create_test_dir(validate_dir_name) result = util.Command("copy").add_arguments(dstDirURL).add_arguments(local_validate_dest). \ add_flags("log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command() self.assertTrue(result) # Verifying the downloaded blob # here is the special behavior need confirm result = self.util_are_dir_trees_equal( src_dir_path, os.path.join(local_validate_dest, src_dir_name, src_dir_name)) #result = self.util_are_dir_trees_equal(src_dir_path, local_validate_dest) self.assertTrue(result) # clean up both source and destination bucket util.Command("clean").add_arguments(srcBucketURL).add_flags("serviceType", srcType). \ add_flags("resourceType", "Bucket").execute_azcopy_create() util.Command("clean").add_arguments(dstBucketURL).add_flags("serviceType", dstType). \ add_flags("resourceType", "Bucket").execute_azcopy_create()
def test_long_file_path_upload_with_nested_directories(self): dir_name = "dir_lfpupwnds" dir_path = util.create_test_n_files(1024, 10, dir_name) parent_dir = dir_name for i in range(0, 30): sub_dir_name = "s_" + str(i) parent_dir = os.path.join(parent_dir, sub_dir_name) util.create_test_n_files(1024, 10, parent_dir) # Upload the file result = util.Command("copy").add_arguments(dir_path).add_arguments(util.test_container_url). \ add_flags("log-level", "Info").add_flags("output-type", "json").add_flags("recursive", "true").execute_azcopy_copy_command_get_output() result = util.parseAzcopyOutput(result) try: # parse the Json Output x = json.loads(result, object_hook=lambda d: namedtuple('X', d.keys())(*d.values())) except: self.fail('error parsing the output in Json Format') self.assertEquals(x.TransfersCompleted, 310) self.assertEquals(x.TransfersFailed, 0)
def util_test_n_1kb_file_in_dir_upload_to_share(self, number_of_files): # create dir dir_n_files and 1 kb files inside the dir. dir_name = "dir_" + str(number_of_files) + "_files" sub_dir_name = "dir subdir_" + str(number_of_files) + "_files" # create n test files in dir src_dir = util.create_test_n_files(1024, number_of_files, dir_name) # create n test files in subdir, subdir is contained in dir util.create_test_n_files(1024, number_of_files, os.path.join(dir_name, sub_dir_name)) # execute azcopy command dest_share = util.test_share_url result = util.Command("copy").add_arguments(src_dir).add_arguments(dest_share). \ add_flags("recursive", "true").add_flags("log-level", "info").execute_azcopy_copy_command() self.assertTrue(result) # execute the validator. dest_azure_dir = util.get_resource_sas_from_share(dir_name) result = util.Command("testFile").add_arguments(src_dir).add_arguments(dest_azure_dir). \ add_flags("is-object-dir", "true").execute_azcopy_verify() self.assertTrue(result)
def util_test_copy_files_from_x_account_to_x_account( self, srcAccountURL, srcType, dstAccountURL, dstType): src_container_url = util.get_object_sas(srcAccountURL, self.bucket_name) validate_dst_container_url = util.get_object_sas( dstAccountURL, self.bucket_name) # create source bucket result = util.Command("create").add_arguments(src_container_url).add_flags("serviceType", srcType). \ add_flags("resourceType", "Bucket").execute_azcopy_create() self.assertTrue(result) # create files of size n KBs. src_dir_name = "copy_files_from_%s_account_to_%s_account" % (srcType, dstType) src_dir_path = util.create_test_n_files(1 * 1024, 100, src_dir_name) # Upload files using azcopy. # TODO: Note for S3/Google need special logic result = util.Command("copy").add_arguments(src_dir_path).add_arguments(src_container_url). \ add_flags("log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command() self.assertTrue(result) # Copy files using azcopy from srcURL to destURL result = util.Command("copy").add_arguments(srcAccountURL).add_arguments(dstAccountURL). \ add_flags("log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command() self.assertTrue(result) # Downloading the copied files for validation validate_dir_name = "validate_copy_files_from_%s_account_to_%s_account" % ( srcType, dstType) local_validate_dest = util.create_test_dir(validate_dir_name) dst_container_url = util.get_object_sas(dstAccountURL, self.bucket_name) dst_directory_url = util.get_object_sas(dst_container_url, src_dir_name) result = util.Command("copy").add_arguments(dst_directory_url).add_arguments(local_validate_dest). \ add_flags("log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command() self.assertTrue(result) # Verifying the downloaded blob result = self.util_are_dir_trees_equal( src_dir_path, os.path.join(local_validate_dest, src_dir_name)) self.assertTrue(result) # clean up both source and destination bucket util.Command("clean").add_arguments(src_container_url).add_flags("serviceType", srcType). \ add_flags("resourceType", "Bucket").execute_azcopy_create() util.Command("clean").add_arguments(validate_dst_container_url).add_flags("serviceType", dstType). \ add_flags("resourceType", "Bucket").execute_azcopy_create()
def test_overwrite_flag_set_to_if_source_new_upload(self): # creating directory with 20 files in it. dir_name = "dir_overwrite_flag_set_upload" dir_n_files_path = util.create_test_n_files(1024, 20, dir_name) # uploading the directory with 20 files in it. Wait a bit so that the lmt of the source is in the past time.sleep(2) result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \ add_flags("recursive", "true").add_flags("log-level", "info").execute_azcopy_copy_command() self.assertTrue(result) # uploading the directory again with force flag set to ifSourceNewer. result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \ add_flags("recursive", "true").add_flags("overwrite", "ifSourceNewer").add_flags("log-level", "info"). \ add_flags("output-type", "json").execute_azcopy_copy_command_get_output() self.assertNotEquals(result, None) # parsing the json and comparing the number of failed and successful transfers. result = util.parseAzcopyOutput(result) try: x = json.loads(result, object_hook=lambda d: namedtuple('X', d.keys()) (*d.values())) except: self.fail('error parsing the output in Json Format') self.assertEquals(x.TransfersSkipped, "20") self.assertEquals(x.TransfersCompleted, "0") time.sleep(10) # refresh the lmts of the source files so that they appear newer for filename in os.listdir(dir_n_files_path): # update the lmts of the files to the latest os.utime(os.path.join(dir_n_files_path, filename), None) # uploading the directory again with force flag set to ifSourceNewer. result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \ add_flags("recursive", "true").add_flags("overwrite", "ifSourceNewer").add_flags("log-level", "info"). \ add_flags("output-type", "json").execute_azcopy_copy_command_get_output() self.assertNotEquals(result, None) # parsing the json and comparing the number of failed and successful transfers. result = util.parseAzcopyOutput(result) try: x = json.loads(result, object_hook=lambda d: namedtuple('X', d.keys()) (*d.values())) except: self.fail('error parsing the output in Json Format') self.assertEquals(x.TransfersSkipped, "0") self.assertEquals(x.TransfersCompleted, "20")
def test_blobfs_download_200_1Kb_file(self): # create dir with 100 1KB files inside it dir_name = "dir_blobfs_200_1K" dir_n_file_path = util.create_test_n_files(1024, 200, dir_name) # Upload the directory with 2000 files inside it result = util.Command("copy").add_arguments(dir_n_file_path).add_arguments(util.test_bfs_account_url). \ add_flags("log-level", "Info").add_flags("recursive","true").execute_azcopy_copy_command() self.assertTrue(result) # Validate the uploaded directory dirUrl = util.test_bfs_account_url + dir_name result = util.Command("testBlobFS").add_arguments(dir_n_file_path).add_arguments(dirUrl). \ add_flags("is-object-dir", "true").execute_azcopy_verify() self.assertTrue(result)
def test_remove_virtual_directory(self): # create dir dir_10_files and 1 kb files inside the dir. dir_name = "dir_" + str(10) + "_files_rm" dir_n_files_path = util.create_test_n_files(1024, 10, dir_name) # execute azcopy command result = util.Command("copy").add_arguments(dir_n_files_path).add_arguments(util.test_container_url). \ add_flags("recursive", "true").add_flags("log-level", "info").execute_azcopy_copy_command() self.assertTrue(result) destination = util.get_resource_sas(dir_name) result = util.Command("rm").add_arguments(destination).add_flags( "recursive", "true").execute_azcopy_copy_command() self.assertTrue(result) result = util.Command("list").add_arguments(destination).add_flags( "resource-num", "0").execute_azcopy_verify() self.assertTrue(result)
def util_test_copy_n_files_from_x_dir_to_x_dir(self, srcBucketURL, srcType, dstBucketURL, dstType, n=10, sizeInKB=1): result = util.Command("create").add_arguments(srcBucketURL).add_flags("serviceType", srcType). \ add_flags("resourceType", "Bucket").execute_azcopy_create() self.assertTrue(result) src_dir_name = "copy_%d_%dKB_files_from_%s_dir_to_%s_dir" % ( n, sizeInKB, srcType, dstType) src_dir_path = util.create_test_n_files(sizeInKB * 1024, n, src_dir_name) self.util_upload_to_src(src_dir_path, srcType, srcBucketURL, True) if srcType == "GCP": srcDirURL = util.get_object_without_sas(srcBucketURL, src_dir_name) else: srcDirURL = util.get_object_sas(srcBucketURL, src_dir_name) dstDirURL = util.get_object_sas(dstBucketURL, src_dir_name) result = util.Command("copy").add_arguments(srcDirURL).add_arguments(dstDirURL). \ add_flags("log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command() self.assertTrue(result) validate_dir_name = "validate_copy_%d_%dKB_files_from_%s_dir_to_%s_dir" % ( n, sizeInKB, srcType, dstType) local_validate_dest = util.create_test_dir(validate_dir_name) result = util.Command("copy").add_arguments(dstDirURL).add_arguments(local_validate_dest). \ add_flags("log-level", "info").add_flags("recursive", "true").execute_azcopy_copy_command() self.assertTrue(result) print(src_dir_path) print(os.path.join(local_validate_dest, src_dir_name, src_dir_name)) result = self.util_are_dir_trees_equal( src_dir_path, os.path.join(local_validate_dest, src_dir_name, src_dir_name)) self.assertTrue(result)