def __init__(self, path: Union[str, Path]): self._path = Path(path) # in case the given path are a Pack and not zipped pack - we init the metadata from the pack if not str(path).endswith('.zip'): self._metadata = PackMetaData(self._path.joinpath('metadata.json')) self._filter_items_by_id_set = False self._pack_info_from_id_set: Dict[Any, Any] = {}
def _create_target_dump_dir(self, dest_dir: Optional[Union[Path, str]] = None) -> Path: """Create destination directory, Destination must be valid directory, If not specified dump in path of origin object. Args: dest_dir: destination directory to dump object. Returns: Path: Destionaion directory. Raises: DumpContentObjectError: If not valid directory path - not directory or not exists. """ if dest_dir: dest_dir = Path(dest_dir) if dest_dir.exists() and not Path(dest_dir).is_dir(): raise exc.ContentDumpError( self, self._path, "Destiantion is not valid directory path") else: dest_dir.mkdir(parents=True, exist_ok=True) else: dest_dir = self._path.parent return dest_dir
def _create_failed_packs_report(lint_status: dict, path: str): """ Creates and saves a file containing all lint failed packs :param lint_status: dict Dictionary containing type of failures and corresponding failing tests. Looks like this: lint_status = { "fail_packs_flake8": [], "fail_packs_bandit": [], "fail_packs_mypy": [], "fail_packs_vulture": [], "fail_packs_pylint": [], "fail_packs_pytest": [], "fail_packs_pwsh_analyze": [], "fail_packs_pwsh_test": [], "fail_packs_image": [] } :param path: str The path to save the report. """ failed_ut: set = set() for key in lint_status: if key.startswith('fail'): failed_ut = failed_ut.union(lint_status[key]) if path and failed_ut: file_path = Path(path) / "failed_lint_report.txt" file_path.write_text('\n'.join(failed_ut))
def _filter_changed_packages(content_repo: git.Repo, pkgs: List[Path]) -> List[Path]: """ Checks which packages had changes using git (working tree, index, diff between HEAD and master in them and should run on Lint. Args: pkgs(List[Path]): pkgs to check Returns: List[Path]: A list of names of packages that should run. """ print( f"Comparing to {Colors.Fg.cyan}{content_repo.remote()}/master{Colors.reset} using branch {Colors.Fg.cyan}" f"{content_repo.active_branch}{Colors.reset}") staged_files = { content_repo.working_dir / Path(item.b_path).parent for item in content_repo.active_branch.commit.tree.diff(None, paths=pkgs) } last_common_commit = content_repo.merge_base( content_repo.active_branch.commit, content_repo.remote().refs.master) changed_from_master = { content_repo.working_dir / Path(item.b_path).parent for item in content_repo.active_branch.commit.tree.diff( last_common_commit, paths=pkgs) } all_changed = staged_files.union(changed_from_master) pkgs_to_check = all_changed.intersection(pkgs) return list(pkgs_to_check)
def _sanitize_patterns(self): self.match_patterns = [ str(Path(pattern)) for pattern in self.match_patterns ] self.skip_patterns = [ str(Path(pattern)) for pattern in self.skip_patterns ]
def match_pattern(path: Path, pattern: str) -> bool: """Checks whether a path matches a `Unix`-like pattern e.g. `**/*.txt`.""" enhanced_path = EnhancedPath(str(path)) return any([ enhanced_path.match(pattern), enhanced_path.globmatch(pattern, flags=GLOBSTAR) ])
def should_watch_file(self, entry): entry = Path(entry) return any( entry.globmatch(pattern, flags=GLOBSTAR) for pattern in self.match_patterns) and not any( entry.globmatch(pattern, flags=GLOBSTAR) for pattern in self.skip_patterns)
def _get_packages(self, content_repo: git.Repo, input: str, git: bool, all_packs: bool) -> List[Path]: """ Get packages paths to run lint command. Args: content_repo(git.Repo): Content repository object. input(str): dir pack specified as argument. git(bool): Perform lint and test only on chaged packs. all_packs(bool): Whether to run on all packages. Returns: List[Path]: Pkgs to run lint """ pkgs: list if all_packs or git: pkgs = LintManager._get_all_packages( content_dir=content_repo.working_dir) elif not all_packs and not git and not input: pkgs = [Path().cwd()] else: pkgs = [Path(item) for item in input.split(',')] total_found = len(pkgs) if git: pkgs = LintManager._filter_changed_packages( content_repo=content_repo, pkgs=pkgs) for pkg in pkgs: print_v( f"Found changed package {Colors.Fg.cyan}{pkg}{Colors.reset}", log_verbose=self._verbose) print( f"Execute lint and test on {Colors.Fg.cyan}{len(pkgs)}/{total_found}{Colors.reset} packages" ) return pkgs
def empty_directory(filepath: Path, ctx: dict, directory: str): directory = Path(directory) tmp_nm = directory.parent / ("__" + directory.name) directory.rename(tmp_nm) shutil.rmtree(tmp_nm) Path.mkdir(directory) return ctx
def linter_obj(mocker) -> Linter: mocker.patch.object(linter, 'docker') return Linter(pack_dir=Path(__file__).parent / 'content' / 'Integrations' / 'Sample_integration', content_repo=Path(__file__).parent / 'data', req_3=["pytest==3.0"], req_2=["pytest==2.0"], docker_engine=True)
def test_integration_create_content_artifacts_zip(mock_git): with temp_dir() as temp: runner = CliRunner(mix_stderr=False) result = runner.invoke(main, [ARTIFACTS_CMD, '-a', temp]) assert Path(temp / 'content_new.zip').exists() assert Path(temp / 'all_content.zip').exists() assert Path(temp / 'content_packs.zip').exists() assert Path(temp / 'content_test.zip').exists() assert result.exit_code == 0
def test_integration_create_content_artifacts_zip(mock_git, repo): with ChangeCWD(repo.path): dir_path = repo.make_dir() runner = CliRunner(mix_stderr=False) result = runner.invoke(main, [ARTIFACTS_CMD, '-a', dir_path]) dir_path = Path(dir_path) assert Path(dir_path / 'content_new.zip').exists() assert Path(dir_path / 'all_content.zip').exists() assert Path(dir_path / 'content_packs.zip').exists() assert Path(dir_path / 'content_test.zip').exists() assert result.exit_code == 0
def _serialize(self, dest_dir: Path, zip: bool = True) -> List[Path]: """ Serialize Agent tool. Args: dest_dir: Destination directory. zip: True if agent tool should be zipped when serializing. Notes: 1. Agent tool should be zip when deleivered for installation. 2. Comment should be added to zip when its system agent tool - not contribution. Returns: List[Path]: Path of new created files. """ created_files: List[Path] = [] if zip: zip_file = (dest_dir / self.normalize_file_name()).with_suffix('.zip') created_files.append(zip_file) with zipfile.ZipFile(zip_file, 'w', zipfile.ZIP_DEFLATED) as zipf: zipf.comment = b'{ "system": true }' for root, _, files in os.walk(self.path): for file_name in files: zipf.write(os.path.join(root, file_name), file_name) else: created_files.extend(Path(copytree(src=self.path, dst=dest_dir / self.normalize_file_name())).iterdir()) return created_files
def __init__(self, artifacts_path: str, content_version: str, suffix: str, zip: bool, packs: bool, cpus: int): """ Content artifacts configuration Args: artifacts_path: existing destination directory for creating artifacts. content_version: release content varsion. packs: create only content_packs artifacts if True. suffix: suffix to add all file we creates. zip: True for zip all content artifacts to 3 diffrent zip files in same structure else False. cpus: Availble cpus in the computer. """ self.suffix = suffix self.content_version = content_version self.zip_artifacts = zip self.only_content_packs = packs self.artifacts_path = Path(artifacts_path) self.content_new_path = self.artifacts_path / 'content_new' self.content_test_path = self.artifacts_path / 'content_test' self.content_packs_path = self.artifacts_path / 'content_packs' self.content_all_path = self.artifacts_path / 'all_content' self.cpus = cpus self.execution_start = time.time() self.content = Content.from_cwd() self.exit_code = EX_SUCCESS
def initiate_linter(demisto_content, integration_path, docker_engine=False): return linter.Linter(content_repo=demisto_content, pack_dir=Path(integration_path), req_2=[], req_3=[], docker_engine=docker_engine, docker_timeout=60)
def _unify(self, dest_dir: Path) -> List[Path]: """Unify YAMLContentUnfiedObject in destination dir. Args: dest_dir: Destination directory. Returns: List[Path]: List of new created files. TODO: 1. Add Exception raising in unify module. 2. Verbosity to quiet mode option in unify module. """ # Directory configuration - Integrations or Scripts unify_dir = ENTITY_TYPE_TO_DIR[self._content_type.value] # Unify step unifier: Union[IntegrationScriptUnifier, RuleUnifier] if self._content_type in [FileType.SCRIPT, FileType.INTEGRATION]: unifier = IntegrationScriptUnifier(input=str(self.path.parent), dir_name=unify_dir, output=dest_dir, force=True, yml_modified_data=self.to_dict()) elif self._content_type in [FileType.PARSING_RULE, FileType.MODELING_RULE]: unifier = RuleUnifier(input=str(self.path.parent), output=dest_dir, force=True) created_files: List[str] = unifier.unify() # Validate that unify succeed - there is not exception raised in unify module. if not created_files: raise exc.ContentDumpError(self, self.path, "Unable to unify object") return [Path(path) for path in created_files]
def _get_all_packages(content_dir: str) -> List[str]: """Gets all integration, script in packages and packs in content repo. Returns: list: A list of integration, script and beta_integration names. """ # Get packages from main content path content_main_pkgs: set = set(Path(content_dir).glob(['Integrations/*/', 'Scripts/*/', ])) # Get packages from packs path packs_dir: Path = Path(content_dir) / 'Packs' content_packs_pkgs: set = set(packs_dir.glob(['*/Integrations/*/', '*/Scripts/*/'])) all_pkgs = content_packs_pkgs.union(content_main_pkgs) return list(all_pkgs)
def load_img_file(): for idx in tqdm(range(len(img_files))): try: img_path = img_files[idx % len(img_files)].rstrip() img = Image.open(img_path).convert('RGB') # if img.size[0] != 416 or img.size[1] != 416: # img = img.resize((416, 416), resample=3) img = np.array(img, dtype=np.uint8) except: logger.opt(colors=True).info(f'{img_path} damaged or non-existent') continue try: label_path = label_files[idx % len(img_files)].rstrip() boxes = np.loadtxt(label_path).reshape(-1, 5) except Exception as e: print(f"Could not read label '{label_path}'.") continue # dict_to_save = {'image': img, 'bbox': boxes, 'img_path' : img_path} p = Path(img_path) path_to_save = serial_path / p.parts[-2] path_to_save.mkdir(parents=True, exist_ok=True) fname = path_to_save / f'{p.stem}.pcl' # serial = m.packb(img, default=m.encode) with open(fname, 'wb') as f: pickle.dump({ 'image': img, 'boxes': boxes, 'img_path': img_path }, f)
def _unify(self, dest_dir: Path) -> List[Path]: """Unify YAMLContentUnfiedObject in destination dir. Args: dest_dir: Destination directory. Returns: List[Path]: List of new created files. TODO: 1. Add Exception raising in unify module. 2. Verbosity to quiet mode option in unify module. """ # Directory configuration - Integrations or Scripts unify_dir = SCRIPTS_DIR if self._content_type == FileType.SCRIPT else INTEGRATIONS_DIR # Unify step unifier = Unifier(input=str(self.path.parent), dir_name=unify_dir, output=dest_dir, force=True) created_files: List[str] = unifier.merge_script_package_to_yml() # Validate that unify succeed - there is not exception raised in unify module. if not created_files: raise exc.ContentDumpError(self, self.path, "Unable to unify object") return [Path(path) for path in created_files]
def run_task_on_matches(filepath: Path, ctx: dict, task: Task, match_pattern: str): for filepath_ in glob(str(match_pattern), recursive=True): filepath_ = Path(filepath_) if task.should_run(filepath_): logging.info(f" Running task {task.name} on {filepath_}:") ctx = task.run(deepcopy(ctx), filepath_) return ctx
def _split_yaml_4_5_0(self, dest_dir: Path) -> List[Path]: """Split YAMLContentUnfiedObject in destination dir. Args: dest_dir: Destination directory. Returns: List[Path]: List of new created files. Notes: 1. If object contain docker_image_4_5 key with value -> should split to: a. <original_file> b. <original_file_name>_4_5.yml TODO: 1. Add Exception raising in unify module. 2. Verbosity to quiet mode option in unify module. """ # Directory configuration - Integrations or Scripts unify_dir = ENTITY_TYPE_TO_DIR[self._content_type.value] # Split step unifier = IntegrationScriptUnifier(input=str(self.path.parent), dir_name=unify_dir, output=str(dest_dir / self.path.name), force=True) yaml_dict = self.to_dict() yaml_dict_copy = copy.deepcopy(yaml_dict) script_object = self.script created_files: List[str] = unifier.write_yaml_with_docker(yaml_dict_copy, yaml_dict, script_object).keys() # Validate that split succeed - there is not exception raised in unify module. if not created_files: raise exc.ContentDumpError(self, self.path, "Unable to split object") return [Path(path) for path in created_files]
def _fix_path(path: Union[Path, str]): """Find and validate object path is valid. Rules: 1. Path exists. 2. One of the following options: a. Path is a file. b. Path is directory and file with a yml/yaml suffix exists in the given directory. 3. File suffix equal "yml" or "yaml". Returns: Path: valid file path. Raises: ContentInitializeError: If path not valid. """ path = Path(path) if path.is_dir(): try: path = next( path.glob(patterns=r'@(*.yml|*yaml|!*unified*)', flags=EXTGLOB | NEGATE)) except StopIteration: raise exc.ContentInitializeError( path, path, "Can't find yaml or yml file in path (excluding unified).") elif not (path.is_file() and path.suffix in [".yaml", ".yml"]): raise exc.ContentInitializeError(path, path, "file suffix isn't yaml or yml.") return path
def path_to_pack_object(path: Union[Path, str]) -> GeneralObject: """ Create content object by path, By the following steps: 1. Try determinist file name -> pack_metadata.json, .secrets-ignore, .pack-ignore, reputations.json 2. If 'Tools' in path -> Object is AgentTool. 3. If file start with 'doc-*' -> Object is Documentation. 4. Let find_type determine object type. Args: path: File path to determine object type. Returns: object: Content object. Raises: ContentFactoryError: If not able to determine object type from file path. """ path = Path(path) # Determinist conversion by file name. object_type = TYPE_CONVERSION_BY_FILE_NAME.get(path.name) # Tools in path if not object_type and 'Tools' in path.parts: object_type = AgentTool # File name start with doc-* if not object_type and path.name.startswith('doc-'): object_type = Documentation # find_type handling if not object_type: file_type = find_type(str(path)) object_type = TYPE_CONVERSION_BY_FileType.get(file_type) # Raise exception if not succeed if not object_type: raise ContentFactoryError(None, path, "Unable to get object type from path.") return object_type(path)
def _fix_path(path: Union[Path, str]) -> Path: """Find and validate object path is valid. Rules: 1. Path exists. 2. One of the following options: a. Path is a file. b. Path is directory and file with a json suffix exists in the given directory. 3. File suffix equal ".json". Returns: Path: valid file path. Raises: ContentInitializeError: If path not valid. """ path = Path(path) if path.is_dir(): try: path = next(path.glob(["*.json"])) except StopIteration: raise exc.ContentInitializeError(JSONObject, path) elif not (path.is_file() and path.suffix in [".json"]) and path.name != 'metadata.json': raise exc.ContentInitializeError(JSONObject, path) return path
def modify_common_server_constants(code_path: Path, content_version: str, branch_name: Optional[str] = None): """ Modify content/Packs/Base/Scripts/CommonServerPython.py global variables: a. CONTENT_RELEASE_VERSION to given content version flag. b. CONTENT_BRANCH_NAME to active branch Args: code_path: Packs/Base/Scripts/CommonServerPython.py full code path. branch_name: branch name to update in CONTENT_BRANCH_NAME content_version: content version to update in CONTENT_RELEASE_VERSION """ file_content_new = re.sub(r"CONTENT_RELEASE_VERSION = '\d.\d.\d'", f"CONTENT_RELEASE_VERSION = '{content_version}'", code_path.read_text()) file_content_new = re.sub(r"CONTENT_BRANCH_NAME = '\w+'", f"CONTENT_BRANCH_NAME = '{branch_name}'", file_content_new) code_path.write_text(file_content_new)
def write_text_to_file( filepath: Path, ctx: dict, text: str, out_dir: str, ext: Optional[str] = None, relative_to: Optional[str] = None, ): if relative_to is not None: filepath = filepath.relative_to(relative_to) new_filepath = Path(out_dir) / filepath new_filepath.parent.mkdir(parents=True, exist_ok=True) if ext is not None: new_filepath = new_filepath.with_suffix(ext) with open(new_filepath, "w") as f: f.write(text) return ctx
def _get_packages(self, content_repo: git.Repo, input: str, git: bool, all_packs: bool, base_branch: str) \ -> List[Path]: """ Get packages paths to run lint command. Args: content_repo(git.Repo): Content repository object. input(str): dir pack specified as argument. git(bool): Perform lint and test only on changed packs. all_packs(bool): Whether to run on all packages. base_branch (str): Name of the branch to run the diff on. Returns: List[Path]: Pkgs to run lint """ pkgs: list if all_packs or git: pkgs = LintManager._get_all_packages( content_dir=content_repo.working_dir) elif not all_packs and not git and not input: pkgs = [Path().cwd()] else: pkgs = [] for item in input.split(','): is_pack = os.path.isdir(item) and os.path.exists( os.path.join(item, PACKS_PACK_META_FILE_NAME)) if is_pack: pkgs.extend( LintManager._get_all_packages(content_dir=item)) else: pkgs.append(Path(item)) total_found = len(pkgs) if git: pkgs = self._filter_changed_packages(content_repo=content_repo, pkgs=pkgs, base_branch=base_branch) for pkg in pkgs: print_v( f"Found changed package {Colors.Fg.cyan}{pkg}{Colors.reset}", log_verbose=self._verbose) print( f"Execute lint and test on {Colors.Fg.cyan}{len(pkgs)}/{total_found}{Colors.reset} packages" ) return pkgs
def copy_files( filepath: Path, ctx: dict, out_dir: str, relative_to: Optional[str] = None ): out_dir = Path(out_dir) if relative_to is not None: new_filepath = Path(out_dir) / filepath.relative_to(relative_to) else: new_filepath = Path(out_dir) / filepath if filepath.is_file(): new_filepath.parent.mkdir(parents=True, exist_ok=True) shutil.copy2(filepath, new_filepath) elif filepath.is_dir(): if new_filepath.exists(): shutil.rmtree(new_filepath) shutil.copytree(filepath, new_filepath) return ctx
def __init__(self, path: Union[str, Path]): """ Content object. Args: path: Path to content. Notes: 1. No validation to path validity. TODO: 1. Add attribute which init only changed objects by git. """ self._path = Path(path)
def __init__(self, artifacts_path: str, zip: bool, packs: bool, content_version: str, suffix: str, cpus: int, id_set_path: str = '', pack_names: str = 'all', signature_key: str = '', sign_directory: Path = None, remove_test_playbooks: bool = True): """ Content artifacts configuration Args: artifacts_path: existing destination directory for creating artifacts. zip: True for zip all content artifacts to 3 different zip files in same structure else False. packs: create only content_packs artifacts if True. content_version: release content version. suffix: suffix to add all file we creates. cpus: available cpus in the computer. id_set_path: the full path of id_set.json. pack_names: Packs to create artifacts for. signature_key: Base64 encoded signature key used for signing packs. sign_directory: Path to the signDirectory executable file. remove_test_playbooks: Should remove test playbooks from content packs or not. """ # options arguments self.artifacts_path = Path(artifacts_path) self.zip_artifacts = zip self.only_content_packs = packs self.content_version = content_version self.suffix = suffix self.cpus = cpus self.id_set_path = id_set_path self.pack_names = arg_to_list(pack_names) self.signature_key = signature_key self.signDirectory = sign_directory self.remove_test_playbooks = remove_test_playbooks # run related arguments self.content_new_path = self.artifacts_path / 'content_new' self.content_test_path = self.artifacts_path / 'content_test' self.content_packs_path = self.artifacts_path / 'content_packs' self.content_all_path = self.artifacts_path / 'all_content' self.content_uploadable_zips_path = self.artifacts_path / 'uploadable_packs' # inits self.content = Content.from_cwd() self.execution_start = time.time() self.exit_code = EX_SUCCESS