def test_unpack_zip_directory(self): first_level_dir_name = "first_dir" second_level_dir_name = "second_dir" first_level_path = os.path.join(test_dir, first_level_dir_name) second_level_path = os.path.join(first_level_path, second_level_dir_name) os.makedirs(second_level_path) compressed_file_path = os.path.join(test_dir, "bad_ext_dep_zip.zip") destination = test_dir internal_path = os.path.join(first_level_dir_name, second_level_dir_name) compression_type = "zip" # only files inside internal_path should be there after unpack # (file path, is this file expected to be unpacked?) test_files = [(os.path.join(test_dir, internal_path, "bad_json_file.json"), True), (os.path.join(test_dir, first_level_dir_name, "json_file.json"), False)] for test_file in test_files: with open(test_file[0], "w+") as ext_dep_file: ext_dep_file.write(bad_json_file) with zipfile.ZipFile(compressed_file_path, 'w') as _zip: for test_file in test_files: _zip.write(test_file[0], arcname=test_file[0].split(test_dir)[1]) shutil.rmtree(first_level_path) self.assertFalse(os.path.isdir(first_level_path)) WebDependency.unpack(compressed_file_path, destination, internal_path, compression_type) for test_file in test_files: if test_file[1]: self.assertTrue(os.path.isfile(test_file[0])) else: self.assertFalse(os.path.isfile(test_file[0]))
def test_unpack_tar_file(self): compressed_file_path = os.path.join(test_dir, "bad_ext_dep_zip.tar.gz") destination = test_dir internal_path = "bad_ext_dep.json" compression_type = "tar" file_path = os.path.join(test_dir, internal_path) with open(file_path, "w+") as ext_dep_file: ext_dep_file.write(bad_json_file) with tarfile.open(compressed_file_path, "w:gz") as _tar: _tar.add(file_path, arcname=os.path.basename(file_path)) os.remove(file_path) self.assertFalse(os.path.isfile(file_path)) WebDependency.unpack(compressed_file_path, destination, internal_path, compression_type) self.assertTrue(os.path.isfile(file_path))
def test_unpack_zip_file(self): compressed_file_path = os.path.join(test_dir, "bad_ext_dep_zip.zip") destination = test_dir internal_path = "bad_ext_dep.json" compression_type = "zip" file_path = os.path.join(test_dir, internal_path) with open(file_path, "w+") as ext_dep_file: ext_dep_file.write(bad_json_file) with zipfile.ZipFile(compressed_file_path, 'w') as _zip: _zip.write(file_path, arcname=os.path.basename(file_path)) os.remove(file_path) self.assertFalse(os.path.isfile(file_path)) WebDependency.unpack(compressed_file_path, destination, internal_path, compression_type) self.assertTrue(os.path.isfile(file_path))
def test_multi_level_directory(self): global test_dir number_of_layers = 5 directory_name = "test" file_name = "file" compression_type = "tar" internal_paths = [""] # Set up internal_paths list.... # It will look like: # ["test", "test/testtest", "test/testtest/testtesttest"] # To describe the file structure: # test_dir/ # > test/ # >> testtest/ # >>> testtesttest/ # >>>> testtesttesttest/ for i in range(1, number_of_layers): internal_path = (directory_name * i) if i - 1 > 0: internal_path = os.path.join(internal_paths[i - 1], internal_path) internal_paths.insert(i, internal_path) # We will pick internal_path each iteration and make sure # only the files INSIDE the internal_path were unpacked. # If the second level directory is the internal_path, the first level # file SHOULD NOT be unpacked because it is out of scope. for internal_path_level in range(1, number_of_layers): destination = test_dir compressed_file_path = os.path.join(test_dir, "bad_ext_dep_zip.tar") os.makedirs(os.path.join(test_dir, internal_paths[-1])) # create files in each folder files = [""] for file_list_counter in range(1, number_of_layers): files.insert( file_list_counter, os.path.join(test_dir, internal_paths[file_list_counter], file_name * file_list_counter)) with open(files[file_list_counter], "w+") as ext_dep_file: ext_dep_file.write(bad_json_file) # zip up the whole thing with tarfile.open(compressed_file_path, "w:gz") as _tar: for file in files[1:]: _tar.add(file, arcname=file.split(test_dir)[1]) shutil.rmtree(os.path.join(test_dir, directory_name)) self.assertFalse( os.path.isdir(os.path.join(test_dir, directory_name))) # The internal path moves down the directory structure each iteration internal_path = internal_paths[internal_path_level] WebDependency.unpack(compressed_file_path, destination, internal_path, compression_type) # the file should be unpacked if file_list_counter >= internal_path_level for file_list_counter in range(1, number_of_layers): if internal_path_level <= file_list_counter: self.assertTrue(os.path.isfile(files[file_list_counter])) else: self.assertFalse(os.path.isfile(files[file_list_counter])) clean_workspace() prep_workspace()