def test_fail_with_bad_url(self): ext_dep_file_path = os.path.join(test_dir, "bad_ext_dep.json") with open(ext_dep_file_path, "w+") as ext_dep_file: ext_dep_file.write(bad_json_file) ext_dep_descriptor = EDF.ExternDepDescriptor( ext_dep_file_path).descriptor_contents ext_dep = WebDependency(ext_dep_descriptor) with self.assertRaises(urllib.error.HTTPError): ext_dep.fetch() self.fail("should have thrown an Exception")
def test_tar_uses_linux_path_sep(self): first_level_dir_name = "first_dir" second_level_dir_name = "second_dir" first_level_path = os.path.join(test_dir, first_level_dir_name) second_level_path = os.path.join(first_level_path, second_level_dir_name) os.makedirs(second_level_path) compressed_file_path = os.path.join(test_dir, "bad_ext_dep_zip.zip") internal_path = os.path.join(first_level_dir_name, second_level_dir_name) internal_path_win = "\\".join( (first_level_dir_name, second_level_dir_name)) test_file = os.path.join(test_dir, internal_path, "bad_json_file.json") with open(test_file, "w+") as ext_dep_file: ext_dep_file.write(bad_json_file) with tarfile.open(compressed_file_path, "w:gz") as _tar: _tar.add(test_file, arcname=test_file.split(test_dir)[1]) with tarfile.open(compressed_file_path, "r:*") as _tar: namelist = _tar.getnames() self.assertTrue(len(namelist) == 1) self.assertFalse(internal_path_win in namelist[0]) self.assertTrue( WebDependency.linuxize_path(internal_path_win) in namelist[0])
def test_single_file(self): ext_dep_file_path = os.path.join(test_dir, "good_ext_dep.json") with open(ext_dep_file_path, "w+") as ext_dep_file: ext_dep_file.write( json.dumps(single_file_extdep)) # dump to a file ext_dep_descriptor = EDF.ExternDepDescriptor( ext_dep_file_path).descriptor_contents ext_dep = WebDependency(ext_dep_descriptor) ext_dep.fetch() ext_dep_name = single_file_extdep['name'] + "_extdep" file_path = os.path.join(test_dir, ext_dep_name, single_file_extdep['internal_path']) if not os.path.isfile(file_path): self.fail("The downloaded file isn't there")
def test_get_internal_path_root_flat(self): outer_dir = test_dir inner_dir_name = "inner_dir" inner_dir_path = os.path.join(outer_dir, inner_dir_name) self.assertEqual( WebDependency.get_internal_path_root(outer_dir, inner_dir_name), inner_dir_path)
def test_sha256_lowercase_single_file(self): ext_dep_file_path = os.path.join(test_dir, "good_ext_dep.json") jquery_json = jquery_json_file.copy() jquery_json["sha256"] = jquery_json["sha256"].lower() with open(ext_dep_file_path, "w+") as ext_dep_file: ext_dep_file.write(json.dumps(jquery_json)) # dump to a file ext_dep_descriptor = EDF.ExternDepDescriptor( ext_dep_file_path).descriptor_contents ext_dep = WebDependency(ext_dep_descriptor) ext_dep.fetch() ext_dep_name = jquery_json['name'] + "_extdep" file_path = os.path.join(test_dir, ext_dep_name, jquery_json['internal_path']) if not os.path.isfile(file_path): self.fail("The downloaded file isn't there")
def test_sha256_whole_tar_directory(self): ext_dep_file_path = os.path.join(test_dir, "good_ext_dep.json") with open(ext_dep_file_path, "w+") as ext_dep_file: ext_dep_file.write( json.dumps(tar_directory_extdep)) # dump to a file ext_dep_descriptor = EDF.ExternDepDescriptor( ext_dep_file_path).descriptor_contents ext_dep = WebDependency(ext_dep_descriptor) ext_dep.fetch() ext_dep_name = tar_directory_extdep['name'] + "_extdep" folder_path = os.path.join(test_dir, ext_dep_name) if not os.path.exists(os.path.join(folder_path, "README")): logging.warning(folder_path) self.fail()
def test_unpack_zip_directory(self): first_level_dir_name = "first_dir" second_level_dir_name = "second_dir" first_level_path = os.path.join(test_dir, first_level_dir_name) second_level_path = os.path.join(first_level_path, second_level_dir_name) os.makedirs(second_level_path) compressed_file_path = os.path.join(test_dir, "bad_ext_dep_zip.zip") destination = test_dir internal_path = os.path.join(first_level_dir_name, second_level_dir_name) compression_type = "zip" # only files inside internal_path should be there after unpack # (file path, is this file expected to be unpacked?) test_files = [(os.path.join(test_dir, internal_path, "bad_json_file.json"), True), (os.path.join(test_dir, first_level_dir_name, "json_file.json"), False)] for test_file in test_files: with open(test_file[0], "w+") as ext_dep_file: ext_dep_file.write(bad_json_file) with zipfile.ZipFile(compressed_file_path, 'w') as _zip: for test_file in test_files: _zip.write(test_file[0], arcname=test_file[0].split(test_dir)[1]) shutil.rmtree(first_level_path) self.assertFalse(os.path.isdir(first_level_path)) WebDependency.unpack(compressed_file_path, destination, internal_path, compression_type) for test_file in test_files: if test_file[1]: self.assertTrue(os.path.isfile(test_file[0])) else: self.assertFalse(os.path.isfile(test_file[0]))
def test_unpack_tar_file(self): compressed_file_path = os.path.join(test_dir, "bad_ext_dep_zip.tar.gz") destination = test_dir internal_path = "bad_ext_dep.json" compression_type = "tar" file_path = os.path.join(test_dir, internal_path) with open(file_path, "w+") as ext_dep_file: ext_dep_file.write(bad_json_file) with tarfile.open(compressed_file_path, "w:gz") as _tar: _tar.add(file_path, arcname=os.path.basename(file_path)) os.remove(file_path) self.assertFalse(os.path.isfile(file_path)) WebDependency.unpack(compressed_file_path, destination, internal_path, compression_type) self.assertTrue(os.path.isfile(file_path))
def test_unpack_zip_file(self): compressed_file_path = os.path.join(test_dir, "bad_ext_dep_zip.zip") destination = test_dir internal_path = "bad_ext_dep.json" compression_type = "zip" file_path = os.path.join(test_dir, internal_path) with open(file_path, "w+") as ext_dep_file: ext_dep_file.write(bad_json_file) with zipfile.ZipFile(compressed_file_path, 'w') as _zip: _zip.write(file_path, arcname=os.path.basename(file_path)) os.remove(file_path) self.assertFalse(os.path.isfile(file_path)) WebDependency.unpack(compressed_file_path, destination, internal_path, compression_type) self.assertTrue(os.path.isfile(file_path))
def test_get_internal_path_root_with_subfolders(self): outer_dir = test_dir first_level_dir_name = "first_dir" second_level_dir_name = "second_dir" inner_dir_path = os.path.join(outer_dir, first_level_dir_name) inner_second_dir_path = os.path.join(first_level_dir_name, second_level_dir_name) self.assertEqual( WebDependency.get_internal_path_root(outer_dir, inner_second_dir_path), inner_dir_path)
def ExtDepFactory(descriptor): # Add all supported external dependencies here to avoid import errors. from edk2toolext.environment.extdeptypes.web_dependency import WebDependency from edk2toolext.environment.extdeptypes.nuget_dependency import NugetDependency from edk2toolext.environment.extdeptypes.git_dependency import GitDependency if descriptor['type'] == NugetDependency.TypeString: return NugetDependency(descriptor) elif descriptor['type'] == WebDependency.TypeString: return WebDependency(descriptor) elif descriptor['type'] == GitDependency.TypeString: return GitDependency(descriptor) raise ValueError("Unknown extdep type '%s' requested!" % descriptor['type'])
def test_multi_level_directory(self): global test_dir number_of_layers = 5 directory_name = "test" file_name = "file" compression_type = "tar" internal_paths = [""] # Set up internal_paths list.... # It will look like: # ["test", "test/testtest", "test/testtest/testtesttest"] # To describe the file structure: # test_dir/ # > test/ # >> testtest/ # >>> testtesttest/ # >>>> testtesttesttest/ for i in range(1, number_of_layers): internal_path = (directory_name * i) if i - 1 > 0: internal_path = os.path.join(internal_paths[i - 1], internal_path) internal_paths.insert(i, internal_path) # We will pick internal_path each iteration and make sure # only the files INSIDE the internal_path were unpacked. # If the second level directory is the internal_path, the first level # file SHOULD NOT be unpacked because it is out of scope. for internal_path_level in range(1, number_of_layers): destination = test_dir compressed_file_path = os.path.join(test_dir, "bad_ext_dep_zip.tar") os.makedirs(os.path.join(test_dir, internal_paths[-1])) # create files in each folder files = [""] for file_list_counter in range(1, number_of_layers): files.insert( file_list_counter, os.path.join(test_dir, internal_paths[file_list_counter], file_name * file_list_counter)) with open(files[file_list_counter], "w+") as ext_dep_file: ext_dep_file.write(bad_json_file) # zip up the whole thing with tarfile.open(compressed_file_path, "w:gz") as _tar: for file in files[1:]: _tar.add(file, arcname=file.split(test_dir)[1]) shutil.rmtree(os.path.join(test_dir, directory_name)) self.assertFalse( os.path.isdir(os.path.join(test_dir, directory_name))) # The internal path moves down the directory structure each iteration internal_path = internal_paths[internal_path_level] WebDependency.unpack(compressed_file_path, destination, internal_path, compression_type) # the file should be unpacked if file_list_counter >= internal_path_level for file_list_counter in range(1, number_of_layers): if internal_path_level <= file_list_counter: self.assertTrue(os.path.isfile(files[file_list_counter])) else: self.assertFalse(os.path.isfile(files[file_list_counter])) clean_workspace() prep_workspace()