def test_good_files(): files = os.listdir("./test/data/good") for file_ in files: with TarSafe.open(f"./test/data/good/{file_}", "r") as tar: tar.extractall() assert os.path.exists("./evil.sh") os.remove("./evil.sh")
def create_model_package( self, model_archive_path: Union[Text, Path], model_configuration: GraphModelConfiguration, domain: Domain, ) -> ModelMetadata: """Creates model package (see parent class for full docstring).""" logger.debug(f"Start to created model package for path '{model_archive_path}'.") with tempfile.TemporaryDirectory() as temp_dir: temporary_directory = Path(temp_dir) shutil.copytree( self._storage_path, temporary_directory / MODEL_ARCHIVE_COMPONENTS_DIR ) model_metadata = self._create_model_metadata(domain, model_configuration) self._persist_metadata(model_metadata, temporary_directory) if isinstance(model_archive_path, str): model_archive_path = Path(model_archive_path) if not model_archive_path.parent.exists(): model_archive_path.parent.mkdir(parents=True) with TarSafe.open(model_archive_path, "w:gz") as tar: tar.add(temporary_directory, arcname="") logger.debug(f"Model package created in path '{model_archive_path}'.") return model_metadata
def _extract_archive_to_directory( model_archive_path: Union[Text, Path], temporary_directory: Path, ) -> None: with TarSafe.open(model_archive_path, mode="r:gz") as tar: tar.extractall(temporary_directory) LocalModelStorage._assert_not_rasa2_archive(temporary_directory)
def test_read_from_rasa2_resource(tmp_path_factory: TempPathFactory): # we only search for the fingerprint file - nlu and core folder do not even need # to exist model_dir = tmp_path_factory.mktemp("model_dir") version = "2.8.5" rasa.shared.utils.io.dump_obj_as_json_to_file( model_dir / "fingerprint.json", { "version": version, "irrelevant-other-key": "bla" }, ) model_zips = tmp_path_factory.mktemp("model_zips") resource_name = "model" with TarSafe.open(model_zips / resource_name, "w:gz") as tar: tar.add(model_dir, arcname="") storage_dir = tmp_path_factory.mktemp("storage_dir") storage = LocalModelStorage(storage_path=storage_dir) with pytest.raises(UnsupportedModelVersionError, match=f".*{version}.*"): storage.from_model_archive(storage_path=storage_dir, model_archive_path=model_zips / resource_name) with pytest.raises(UnsupportedModelVersionError, match=f".*{version}.*"): storage.metadata_from_archive(model_archive_path=model_zips / resource_name)
def lookup_by_name(self, name: str) -> List[Dict[str, Any]]: potential_vuln_paths = [] potential_vulns = [] with TarSafe.open(self.latest_db_path, "r:gz") as tar: vulns = tar.getmembers() for vuln in vulns: if re.match(VULN_PATTERN.format(name), vuln.name): potential_vuln_paths.append(vuln.name) for vuln_path in potential_vuln_paths: potential_vulns.append( json.loads(tar.extractfile(vuln_path).read())) self._logger.debug( f"Found {len(potential_vulns)} vulnerabilities potentially affecting package: {name}" ) return potential_vulns
def test_bad_files(): files = os.listdir("./test/data/bad") for file_ in files: with pytest.raises(TarSafeException) as ex: with TarSafe.open(f"./test/data/bad/{file_}", "r") as tar: tar.extractall()