def validate(config, **kwargs): sys.path.append(config.configuration.env_dir) file_path = kwargs['path'] or kwargs['input'] if file_path and not os.path.isfile(file_path) and not os.path.isdir( file_path): print_error(f'File {file_path} was not found') return 1 else: is_external_repo = tools.is_external_repository() validator = ValidateManager( is_backward_check=not kwargs['no_backward_comp'], only_committed_files=kwargs['post_commit'], prev_ver=kwargs['prev_ver'], skip_conf_json=kwargs['no_conf_json'], use_git=kwargs['use_git'], file_path=file_path, validate_all=kwargs.get('validate_all'), validate_id_set=kwargs['id_set'], skip_pack_rn_validation=kwargs['skip_pack_release_notes'], print_ignored_errors=kwargs['print_ignored_errors'], is_external_repo=is_external_repo, print_ignored_files=kwargs['print_ignored_files'], no_docker_checks=kwargs['no_docker_checks'], silence_init_prints=kwargs['silence_init_prints']) return validator.run_validation()
def main(): arg_parser = argparse.ArgumentParser() arg_parser.add_argument('version', help='Release version') arg_parser.add_argument('git_sha1', help='commit sha1 to compare changes with') arg_parser.add_argument('server_version', help='Server version') arg_parser.add_argument('-d', '--date', help='release date in the format %Y-%m-%d', required=False) args = arg_parser.parse_args() date = args.date if args.date else datetime.now().strftime('%Y-%m-%d') # get changed yaml/json files (filter only relevant changed files) validate_manager = ValidateManager() change_log = run_command('git diff --name-status {}'.format(args.git_sha1)) modified_files, added_files, _, _, _ = validate_manager.filter_changed_files(change_log) for file_path in get_changed_content_entities(modified_files, added_files): if not should_clear(file_path, args.server_version): continue rn_path = get_release_notes_file_path(file_path) if os.path.isfile(rn_path): # if file exist, mark the current notes as release relevant with open(rn_path, 'r+') as rn_file: text = rn_file.read() rn_file.seek(0) text = text.replace(UNRELEASE_HEADER, CHANGE_LOG_FORMAT.format(version=args.version, date=date)) rn_file.write(text) else: # if file doesn't exist, create it with new header with open(rn_path, 'w') as rn_file: text = CHANGE_LOG_FORMAT.format(version=args.version, date=date) + get_new_header(file_path) rn_file.write(text) run_command('git add {}'.format(rn_path))
def test_update_release_on_matadata_change(demisto_client, mocker, repo): """ Given - change only in metadata When - Running demisto-sdk update-release-notes command. Then - Ensure not find changes which would belong in release notes . """ pack = repo.create_pack('FeedAzureValid') pack.pack_metadata.write_json(open('demisto_sdk/tests/test_files/1.pack_metadata.json').read()) validate_manager = ValidateManager(skip_pack_rn_validation=True, silence_init_prints=True, skip_conf_json=True, check_is_unskipped=False) validate_manager.git_util = "Not None" mocker.patch.object(UpdateRN, 'is_bump_required', return_value=True) mocker.patch.object(ValidateManager, 'get_unfiltered_changed_files_from_git', return_value=({pack.pack_metadata.path}, set(), set())) mocker.patch.object(UpdateReleaseNotesManager, 'setup_validate_manager', return_value=validate_manager) mocker.patch.object(ValidateManager, 'setup_git_params', return_value='') mocker.patch.object(GitUtil, 'get_current_working_branch', return_value="branch_name") mocker.patch.object(UpdateRN, 'get_pack_metadata', return_value={'currentVersion': '1.0.0'}) mocker.patch('demisto_sdk.commands.common.tools.get_pack_name', return_value='FeedAzureValid') mocker.patch('demisto_sdk.commands.common.tools.get_pack_names_from_files', return_value={'FeedAzureValid'}) with ChangeCWD(repo.path): runner = CliRunner(mix_stderr=False) result = runner.invoke(main, [UPDATE_RN_COMMAND, "-g"]) assert result.exit_code == 0 assert 'No changes that require release notes were detected. If such changes were made, ' \ 'please commit the changes and rerun the command' in result.stdout
def run_validate(file_path: str, json_output_file: str) -> None: os.environ['DEMISTO_SDK_SKIP_VERSION_CHECK'] = '1' tests_dir = 'Tests' if not os.path.exists(tests_dir): os.makedirs(tests_dir) with open(f'{tests_dir}/id_set.json', 'w') as f: json.dump({}, f) v_manager = ValidateManager(is_backward_check=False, prev_ver=None, use_git=False, only_committed_files=False, print_ignored_files=False, skip_conf_json=True, validate_id_set=False, file_path=file_path, validate_all=False, is_external_repo=False, skip_pack_rn_validation=False, print_ignored_errors=False, silence_init_prints=False, no_docker_checks=False, skip_dependencies=False, id_set_path=None, staged=False, json_file_path=json_output_file, skip_schema_check=True, create_id_set=False) v_manager.run_validation()
def test_validate_no_old_format__without_toversion(self, mocker): """ Given: - an old format_file without toversion When: - running validate_no_old_format on the file Then: - return a False as the file is invalid """ mocker.patch.object(BaseValidator, "handle_error", return_value="not-a-non-string") validate_manager = ValidateManager() old_format_files = {"demisto_sdk/tests/test_files/script-valid.yml"} assert not validate_manager.validate_no_old_format(old_format_files)
def test_validate_no_old_format__with_toversion(self): """ Given: - an old format_file with toversion When: - running validate_no_old_format on the file Then: - return a True as the file is valid """ validate_manager = ValidateManager() old_format_files = {"demisto_sdk/tests/test_files/Unifier/SampleScriptPackage/" "script-SampleScriptPackageSanityDocker45_45.yml"} assert validate_manager.validate_no_old_format(old_format_files)
def test_verify_no_dup_rn__validate_manager(self, added_files: set, expected: bool): """ Given: - A list of added files When: - verifying there are no other new release notes. Then: - return a validation response Case 1: Release notes in different packs. Case 2: Release notes where one is in the same pack """ validate_manager = ValidateManager(skip_conf_json=True) result = validate_manager.validate_no_duplicated_release_notes(added_files) assert result is expected
def test_validate_invalid_pack_dependencies__validate_manager(self, ): """ Given: - A file path with invalid pack dependencies When: - checking validity of pack dependencies for added or modified files Then: - return a False validation response """ validate_manager = ValidateManager(skip_conf_json=True) id_set_path = os.path.normpath( os.path.join(__file__, git_path(), 'demisto_sdk', 'tests', 'test_files', 'id_set', 'id_set.json')) result = validate_manager.validate_pack_unique_files('QRadar', pack_error_ignore_list={}, id_set_path=id_set_path) assert not result
def test_is_py_or_yml__validate_manager(self): """ Given: - A file path which contains a python script When: - validating the associated yml file Then: - return a False validation response """ files_path = os.path.normpath( os.path.join(__file__, f'{git_path()}/demisto_sdk/tests', 'test_files')) test_file = os.path.join(files_path, 'CortexXDR', 'Integrations/PaloAltoNetworks_XDR/PaloAltoNetworks_XDR.yml') validate_manager = ValidateManager() res = validate_manager._is_py_script_or_integration(test_file) assert res is False
def test_is_py_or_yml_invalid__validate_manager(self): """ Given: - A file path which contains a python script in a legacy yml schema When: - verifying the yml is valid using validate manager Then: - return a False validation response """ files_path = os.path.normpath( os.path.join(__file__, f'{git_path()}/demisto_sdk/tests', 'test_files')) test_file = os.path.join(files_path, 'UnifiedIntegrations/Integrations/integration-Symantec_Messaging_Gateway.yml') validate_manager = ValidateManager() res = validate_manager._is_py_script_or_integration(test_file) assert res is False
def __init__(self, input: str = '', output: str = '', path: str = '', from_version: str = '', no_validate: bool = False, verbose: bool = False, assume_yes: bool = False, interactive: bool = True, clear_cache: bool = False, **kwargs): self.source_file = input self.output_file = self.set_output_file_path(output) self.verbose = verbose _, self.relative_content_path = is_file_from_content_repo( self.output_file) self.old_file = self.is_old_file( self.relative_content_path if self.relative_content_path else self.output_file, self.verbose) self.schema_path = path self.from_version = from_version self.no_validate = no_validate self.assume_yes = assume_yes self.interactive = interactive self.updated_ids: Dict = {} if not self.no_validate: self.validate_manager = ValidateManager( silence_init_prints=True, skip_conf_json=True, skip_dependencies=True, skip_pack_rn_validation=True, check_is_unskipped=False, validate_id_set=False) if not self.source_file: raise Exception( 'Please provide <source path>, <optional - destination path>.') try: self.data, self.file_type = get_dict_from_file( self.source_file, clear_cache=clear_cache) except Exception: raise Exception( F'Provided file {self.source_file} is not a valid file.') self.from_version_key = self.set_from_version_key_name() self.id_set_file, _ = get_dict_from_file( path=kwargs.get('id_set_path')) # type: ignore[arg-type]
def test_get_error_ignore_list__validate_manager(self, mocker): """ Given: - A file path to pack ignore When: - running get_error_ignore_list from validate manager Then: - verify that the created ignored_errors list is correct """ files_path = os.path.normpath( os.path.join(__file__, f'{git_path()}/demisto_sdk/tests', 'test_files')) test_file = os.path.join(files_path, 'fake_pack/.pack-ignore') mocker.patch.object(ValidateManager, 'get_pack_ignore_file_path', return_value=test_file) validate_manager = ValidateManager() ignore_errors_list = validate_manager.get_error_ignore_list("fake") assert ignore_errors_list['file_name'] == ['BA101', 'IF107'] assert 'SC100' not in ignore_errors_list['file_name']
def test_validate_no_missing_release_notes__missing_rn(self, repo): """ Given: - 2 packs with modified files and release notes for only one When: - running validate_no_missing_release_notes on the files Then: - return a False as there are release notes missing """ pack1 = repo.create_pack('PackName1') incident_field1 = pack1.create_incident_field('incident-field', content=INCIDENT_FIELD) pack2 = repo.create_pack('PackName2') incident_field2 = pack2.create_incident_field('incident-field', content=INCIDENT_FIELD) validate_manager = ValidateManager() modified_files = {incident_field1.get_path_from_pack(), incident_field2.get_path_from_pack()} added_files = {'Packs/PackName1/ReleaseNotes/1_0_0.md'} with ChangeCWD(repo.path): assert validate_manager.validate_no_missing_release_notes(modified_files, added_files) is False
def test_setup_git_params(self, mocker): mocker.patch.object(ValidateManager, 'get_content_release_identifier', return_value='') mocker.patch.object(ValidateManager, 'get_current_working_branch', return_value='20.0.7') validate_manager = ValidateManager() validate_manager.setup_git_params() assert validate_manager.always_valid assert validate_manager.compare_type == '..' mocker.patch.object(ValidateManager, 'get_current_working_branch', return_value='master') # resetting always_valid flag validate_manager.always_valid = False validate_manager.setup_git_params() assert not validate_manager.always_valid assert validate_manager.compare_type == '..' mocker.patch.object(ValidateManager, 'get_current_working_branch', return_value='not-master-branch') validate_manager.setup_git_params() assert not validate_manager.always_valid assert validate_manager.compare_type == '...'
def filter_to_relevant_files(self, file_set: set, validate_manager: ValidateManager) -> Tuple[set, set, bool]: """ Given a file set, filter it to only files which require RN and if given, from a specific pack """ filtered_set = set() for file in file_set: if isinstance(file, tuple): file_path = str(file[1]) else: file_path = str(file) if self.given_pack: file_pack_name = get_pack_name(file_path) if not file_pack_name or file_pack_name not in self.given_pack: continue filtered_set.add(file) return validate_manager.filter_to_relevant_files(filtered_set)
class BaseUpdate: """BaseUpdate is the base class for all format commands. Attributes: source_file (str): the path to the file we are updating at the moment. output_file (str): the desired file name to save the updated version of the YML to. relative_content_path (str): Relative content path of output path. old_file (dict): Data of old file from content repo, if exist. schema_path (str): Schema path of file. from_version (str): Value of Wanted fromVersion key in file. data (dict): Dictionary of loaded file. file_type (str): Whether the file is yml or json. from_version_key (str): The fromVersion key in file, different between yml and json files. verbose (bool): Whether to print a verbose log assume_yes (bool): Whether to assume "yes" as answer to all prompts and run non-interactively interactive (bool): Whether to run the format interactively or not (usually for contribution management) """ def __init__(self, input: str = '', output: str = '', path: str = '', from_version: str = '', no_validate: bool = False, verbose: bool = False, assume_yes: bool = False, interactive: bool = True, clear_cache: bool = False, **kwargs): self.source_file = input self.output_file = self.set_output_file_path(output) self.verbose = verbose _, self.relative_content_path = is_file_from_content_repo( self.output_file) self.old_file = self.is_old_file( self.relative_content_path if self.relative_content_path else self.output_file, self.verbose) self.schema_path = path self.from_version = from_version self.no_validate = no_validate self.assume_yes = assume_yes self.interactive = interactive self.updated_ids: Dict = {} if not self.no_validate: self.validate_manager = ValidateManager( silence_init_prints=True, skip_conf_json=True, skip_dependencies=True, skip_pack_rn_validation=True, check_is_unskipped=False, validate_id_set=False) if not self.source_file: raise Exception( 'Please provide <source path>, <optional - destination path>.') try: self.data, self.file_type = get_dict_from_file( self.source_file, clear_cache=clear_cache) except Exception: raise Exception( F'Provided file {self.source_file} is not a valid file.') self.from_version_key = self.set_from_version_key_name() self.id_set_file, _ = get_dict_from_file( path=kwargs.get('id_set_path')) # type: ignore[arg-type] def set_output_file_path(self, output_file_path) -> str: """Creates and format the output file name according to user input. Args: output_file_path: The output file name the user defined. Returns: str. the full formatted output file name. """ if not output_file_path: source_dir = os.path.dirname(self.source_file) file_name = os.path.basename(self.source_file) if self.__class__.__name__ == 'PlaybookYMLFormat': if "Pack" not in source_dir: if not file_name.startswith('playbook-'): file_name = F'playbook-{file_name}' return os.path.join(source_dir, file_name) else: return output_file_path def set_version_to_default(self, location=None): self.set_default_value('version', DEFAULT_VERSION, location) def set_default_value(self, key: str, value: Any, location=None): """Replaces the version to default.""" if self.verbose: click.echo(f'Setting {key} to default={value}' + ' in custom location' if location else '') if location: location[key] = value else: self.data[key] = value def remove_unnecessary_keys(self): """Removes keys that are in file but not in schema of file type""" with open(self.schema_path, 'r') as file_obj: schema = yaml.load(file_obj) extended_schema = self.recursive_extend_schema(schema, schema) if self.verbose: print('Removing Unnecessary fields from file') if isinstance(extended_schema, dict): self.recursive_remove_unnecessary_keys( extended_schema.get('mapping', {}), self.data) @staticmethod def recursive_extend_schema( current_schema: Union[str, bool, list, dict], full_schema: dict) -> Union[str, bool, list, dict]: """ Parses partial schemas into one schema. Removing the `schema;(schema-name)` and include syntax. See here for more info https://pykwalify.readthedocs.io/en/unstable/partial-schemas.html#schema-schema-name. This method recursively returns the unified scheme Args: current_schema: The current analyzed recursive schema full_schema: The original schema Returns: The unified schema with out the `schema;(schema-name)` and include syntax. """ # This is the base condition, if the current schema is str or bool we can safely return it. if isinstance(current_schema, str) or isinstance(current_schema, bool): return current_schema # If the current schema is a list - we will return the extended schema of each of it's elements if isinstance(current_schema, list): return [ BaseUpdate.recursive_extend_schema(value, full_schema) for value in current_schema ] # If the current schema is a dict this is the main condition we will handle if isinstance(current_schema, dict): modified_schema = {} for key, value in current_schema.items(): # There is no need to add the sub-schemas themselves, as we want to drop them if key.startswith('schema;'): continue # If this is a reference to a sub-schema - we will replace the reference with the original. if isinstance(value, str) and key == 'include': extended_schema: dict = full_schema.get( f'schema;{value}') # type: ignore if extended_schema is None: click.echo(f"Could not find sub-schema for {value}", LOG_COLORS.YELLOW) # sometimes the sub-schema can have it's own sub-schemas so we need to unify that too return BaseUpdate.recursive_extend_schema( deepcopy(extended_schema), full_schema) else: # This is the mapping case in which we can let the recursive method do it's thing on the values modified_schema[key] = BaseUpdate.recursive_extend_schema( value, full_schema) return modified_schema def recursive_remove_unnecessary_keys(self, schema: dict, data: dict) -> None: """Recursively removes all the unnecessary fields in the file Args: schema: The schema with which we can check if a field should be removed data: The actual data of the file from which we will want to remove the fields. """ data_fields = set(data.keys()) for field in data_fields: if field not in schema.keys(): # check if one of the schema keys is a regex that matches the data field - for example refer to the # tasks key in playbook.yml schema where a field should match the regex (^[0-9]+$) matching_key = self.regex_matching_key(field, schema.keys()) if matching_key: mapping = schema.get(matching_key, {}).get('mapping') if mapping: self.recursive_remove_unnecessary_keys( schema.get(matching_key, {}).get('mapping'), data.get(field, {})) else: if self.verbose: print(f'Removing {field} field') data.pop(field, None) else: mapping = schema.get(field, {}).get('mapping') if mapping: # type: ignore self.recursive_remove_unnecessary_keys( schema.get(field, {}).get('mapping'), data.get(field, {})) # In case he have a sequence with mapping key in it's first element it's a continuation of the schema # and we need to remove unnecessary keys from it too. # In any other case there is nothing to do with the sequence else: sequence = schema.get(field, {}).get('sequence', []) if sequence and sequence[0].get('mapping'): if data[field] is None: if self.verbose: print( f'Adding an empty array - `[]` as the value of the `{field}` field' ) data[field] = [] else: for list_element in data[field]: self.recursive_remove_unnecessary_keys( sequence[0].get('mapping'), list_element) def regex_matching_key(self, field, schema_keys): """ Checks if the given data field matches a regex key in the schema. Args: field: the data field that should be matched. schema_keys: the keys in the schema that the data field should be checked against. Returns: the schema-key that is a regex which matches the given data field, if such a key exists, otherwise None. """ regex_keys = [ regex_key for regex_key in schema_keys if 'regex;' in regex_key ] for reg in regex_keys: if re.match(reg.split(';')[1], field): return reg return None def get_answer(self, promote): click.secho(promote, fg='red') return input() def ask_user(self): user_answer = self.get_answer( 'Either no fromversion is specified in your file,' ' or it is lower than the minimal fromversion for this content type, would you like to set it to the default? [Y/n]' ) if user_answer and user_answer.lower() in ['y', 'yes']: return True else: click.secho('Skipping update of fromVersion', fg='yellow') return False def set_default_from_version(self, default_from_version: str, current_fromversion_value: str, file_type: str): """ Sets the default fromVersion key in the file: In case the user approved it: Set the fromversion to 5.0.0 for old content items. Set/update the fromversion to the input default if supplied.(checks if it is the highest one). In any other case set it to the general one. Args: default_from_version: default fromVersion specific to the content type. current_fromversion_value: current from_version if exists in the file. file_type: the file type. """ max_version = get_max_version([ GENERAL_DEFAULT_FROMVERSION, default_from_version, current_fromversion_value ]) if max_version != current_fromversion_value and (self.assume_yes or self.ask_user()): self.data[self.from_version_key] = max_version def set_fromVersion(self, default_from_version='', file_type: str = ''): """Sets fromVersion key in the file. Args: default_from_version: default fromVersion specific to the content type. file_type: the file type. """ current_fromversion_value = self.data.get(self.from_version_key, '') if self.verbose: click.echo('Setting fromVersion field') if self.from_version: self.data[self.from_version_key] = self.from_version elif self.old_file.get(self.from_version_key): if not current_fromversion_value: self.data[self.from_version_key] = self.old_file.get( self.from_version_key) elif file_type and file_type in OLD_FILE_TYPES: self.data[self.from_version_key] = VERSION_5_5_0 else: self.set_default_from_version(default_from_version, current_fromversion_value, file_type) def arguments_to_remove(self) -> Set[str]: """ Finds diff between keys in file and schema of file type. Returns: List of keys that should be deleted in file """ with open(self.schema_path, 'r') as file_obj: a = yaml.load(file_obj) schema_fields = a.get('mapping').keys() arguments_to_remove = set(self.data.keys()) - set(schema_fields) return arguments_to_remove def set_from_version_key_name(self) -> Union[str, None]: """fromversion key is different between yml and json , in yml file : fromversion, in json files : fromVersion""" if self.file_type == "yml": return 'fromversion' elif self.file_type == "json": return 'fromVersion' return None @staticmethod def is_old_file(path: str, verbose: bool = False) -> dict: """Check whether the file is in git repo or new file. """ if path: data = get_remote_file(path, suppress_print=not verbose) if not data: return {} else: return data return {} def remove_copy_and_dev_suffixes_from_name(self): """Removes any _dev and _copy suffixes in the file. When developer clones playbook/integration/script it will automatically add _copy or _dev suffix. """ if self.verbose: click.echo( 'Removing _dev and _copy suffixes from name, id and display tags' ) if self.data['name']: self.data['name'] = self.data.get('name', '').replace('_copy', '').replace( '_dev', '') if self.data.get('display'): self.data['display'] = self.data.get('display', '').replace( '_copy', '').replace('_dev', '') if self.data.get('id'): self.data['id'] = self.data.get('id', '').replace('_copy', '').replace( '_dev', '') def initiate_file_validator(self) -> int: """ Run schema validate and file validate of file Returns: int 0 in case of success int 1 in case of error int 2 in case of skip """ if self.no_validate: if self.verbose: click.secho( f'Validator Skipped on file: {self.output_file} , no-validate flag was set.', fg='yellow') return SKIP_RETURN_CODE else: self.validate_manager.file_path = self.output_file if self.is_old_file(self.output_file): validation_result = self.validate_manager.run_validation_using_git( ) else: validation_result = self.validate_manager.run_validation_on_specific_files( ) if not validation_result: return ERROR_RETURN_CODE else: return SUCCESS_RETURN_CODE def sync_data_to_master(self): if self.old_file: diff = dictdiffer.diff(self.old_file, self.data) self.data = dictdiffer.patch(diff, self.old_file)
def setup_validate_manager(self): return ValidateManager(skip_pack_rn_validation=True, prev_ver=self.prev_ver, silence_init_prints=True, skip_conf_json=True, check_is_unskipped=False, file_path=self.given_pack)
def filter_changed_files(files_string, tag='master', print_ignored_files=False): """Get lists of the modified files in your branch according to the files string. Args: files_string (string): String that was calculated by git using `git diff` command. tag (string): String of git tag used to update modified files. print_ignored_files (bool): should print ignored files. Returns: Tuple of sets. """ all_files = files_string.split('\n') deleted_files = set() added_files_list = set() modified_files_list = set() old_format_files = set() changed_meta_files = set() ignored_files = set() new_packs = set() for f in all_files: file_data: list = list(filter(None, f.split('\t'))) if not file_data: continue file_status = file_data[0] file_path = file_data[1] if file_status.lower().startswith('r'): file_status = 'r' file_path = file_data[2] try: file_type = find_type(file_path) # if the file is a code file - change path to # the associated yml path to trigger release notes validation. if file_status.lower() != 'd' and \ file_type in [FileType.POWERSHELL_FILE, FileType.PYTHON_FILE] and \ not (file_path.endswith('_test.py') or file_path.endswith('.Tests.ps1')): # naming convention - code file and yml file in packages must have same name. file_path = os.path.splitext(file_path)[0] + '.yml' # ignore changes in JS files and unit test files. elif file_path.endswith('.js') or file_path.endswith( '.py') or file_path.endswith('.ps1'): if file_path not in ignored_files: ignored_files.add(file_path) if print_ignored_files: click.secho( 'Ignoring file path: {} - code file'.format( file_path), fg="yellow") continue # ignore changes in TESTS_DIRECTORIES files. elif any(test_dir in file_path for test_dir in TESTS_AND_DOC_DIRECTORIES): if file_path not in ignored_files: ignored_files.add(file_path) if print_ignored_files: click.secho( 'Ignoring file path: {} - test file'.format( file_path), fg="yellow") continue # identify deleted files if file_status.lower() == 'd' and not file_path.startswith('.'): deleted_files.add(file_path) # ignore directories elif not os.path.isfile(file_path): if print_ignored_files: click.secho( 'Ignoring file path: {} - directory'.format(file_path), fg="yellow") continue # changes in old scripts and integrations - unified python scripts/integrations elif file_status.lower() in ['m', 'a', 'r'] and \ file_type in [FileType.INTEGRATION, FileType.SCRIPT] and \ ValidateManager.is_old_file_format(file_path, file_type): old_format_files.add(file_path) # identify modified files elif file_status.lower( ) == 'm' and file_type and not file_path.startswith('.'): modified_files_list.add(file_path) # identify added files elif file_status.lower( ) == 'a' and file_type and not file_path.startswith('.'): added_files_list.add(file_path) # identify renamed files elif file_status.lower().startswith('r') and file_type: # if a code file changed, take the associated yml file. if file_type in [ FileType.POWERSHELL_FILE, FileType.PYTHON_FILE ]: modified_files_list.add(file_path) else: # file_data[1] = old name, file_data[2] = new name modified_files_list.add((file_data[1], file_data[2])) elif file_status.lower() not in KNOWN_FILE_STATUSES: click.secho( '{} file status is an unknown one, please check. File status was: {}' .format(file_path, file_status), fg="bright_red") # handle meta data file changes elif file_path.endswith(PACKS_PACK_META_FILE_NAME): if file_status.lower() == 'a': new_packs.add(get_pack_name(file_path)) elif file_status.lower() == 'm': changed_meta_files.add(file_path) else: # pipefile and pipelock files should not enter to ignore_files if 'Pipfile' not in file_path: if file_path not in ignored_files: ignored_files.add(file_path) if print_ignored_files: click.secho( 'Ignoring file path: {} - system file'.format( file_path), fg="yellow") else: if print_ignored_files: click.secho( 'Ignoring file path: {} - system file'.format( file_path), fg="yellow") # handle a case where a file was deleted locally though recognised as added against master. except FileNotFoundError: if file_path not in ignored_files: ignored_files.add(file_path) if print_ignored_files: click.secho( 'Ignoring file path: {} - File not found'.format( file_path), fg="yellow") modified_files_list, added_files_list, deleted_files = filter_packagify_changes( modified_files_list, added_files_list, deleted_files, tag) return modified_files_list, added_files_list, deleted_files, old_format_files, \ changed_meta_files, ignored_files, new_packs
def test_filter_changed_files(self, mocker): """ Given: - A string of git diff results When: - running filter_changed_files on the string Then: - Ensure the modified files are recognized correctly. - Ensure the added files are recognized correctly. - Ensure the renamed file is in a tup;e in the modified files. - Ensure modified metadata files are in the changed_meta_files and that the added one is not. - Ensure the added code and meta files are not in added files. - Ensure old format file is recognized correctly. - Ensure deleted file is recognized correctly. - Ensure ignored files are set correctly. """ mocker.patch.object(os.path, 'isfile', return_value=True) mocker.patch.object(ValidateManager, '_is_py_script_or_integration', return_value=True) diff_string = "M Packs/CommonTypes/IncidentFields/incidentfield-Detection_URL.json\n" \ "M Packs/EWS/Classifiers/classifier-EWS_v2.json\n" \ "M Packs/Elasticsearch/Integrations/Elasticsearch_v2/Elasticsearch_v2.py\n" \ "M Packs/Elasticsearch/Integrations/integration-Elasticsearch.yml\n" \ "M Packs/F5/pack_metadata.json\n"\ "R100 Packs/EclecticIQ/Integrations/EclecticIQ/EclecticIQ.yml " \ "Packs/EclecticIQ/Integrations/EclecticIQ_new/EclecticIQ_new.yml\n" \ "A Packs/MyNewPack/.pack-ignore\n" \ "A Packs/MyNewPack/.secrets-ignore\n" \ "A Packs/MyNewPack/Integrations/MyNewIntegration/MyNewIntegration.py\n" \ "A Packs/MyNewPack/Integrations/MyNewIntegration/MyNewIntegration.yml\n" \ "A Packs/MyNewPack/Integrations/MyNewIntegration/MyNewIntegration_description.md\n" \ "A Packs/MyNewPack/Integrations/MyNewIntegration/MyNewIntegration_image.png\n" \ "A Packs/MyNewPack/Integrations/MyNewIntegration/MyNewIntegration_test.py\n" \ "A Packs/MyNewPack/Integrations/MyNewIntegration/Pipfile\n" \ "A Packs/MyNewPack/Integrations/MyNewIntegration/Pipfile.lock\n" \ "A Packs/MyNewPack/Integrations/MyNewIntegration/README.md\n" \ "A Packs/MyNewPack/README.md\n" \ "A Packs/MyNewPack/pack_metadata.json\n" \ "D Packs/DeprecatedContent/Scripts/script-ExtractURL.yml" validate_manager = ValidateManager() modified_files, added_files, deleted_files, old_format_files, changed_meta_files = validate_manager.\ filter_changed_files(files_string=diff_string, print_ignored_files=True) # checking that modified files are recognized correctly assert 'Packs/CommonTypes/IncidentFields/incidentfield-Detection_URL.json' in modified_files assert 'Packs/EWS/Classifiers/classifier-EWS_v2.json' in modified_files assert ('Packs/EclecticIQ/Integrations/EclecticIQ/EclecticIQ.yml', 'Packs/EclecticIQ/Integrations/EclecticIQ_new/EclecticIQ_new.yml') in modified_files # check that the modified code file is not there but the yml file is assert 'Packs/Elasticsearch/Integrations/Elasticsearch_v2/Elasticsearch_v2.yml' in modified_files assert 'Packs/Elasticsearch/Integrations/Elasticsearch_v2/Elasticsearch_v2.py' not in modified_files # check that the modified metadata file is in the changed_meta_files but the added one is not assert 'Packs/F5/pack_metadata.json' in changed_meta_files assert 'Packs/MyNewPack/pack_metadata.json' not in changed_meta_files # check that the added files are recognized correctly assert 'Packs/MyNewPack/Integrations/MyNewIntegration/README.md' in added_files assert 'Packs/MyNewPack/Integrations/MyNewIntegration/MyNewIntegration.yml' in added_files # check that the added code files and meta file are not in the added_files assert 'Packs/MyNewPack/Integrations/MyNewIntegration/MyNewIntegration.py' not in added_files assert 'Packs/MyNewPack/Integrations/MyNewIntegration/MyNewIntegration_test.py' not in added_files assert 'Packs/MyNewPack/pack_metadata.json' not in added_files # check that non-image, pipfile, description or schema are in the ignored files and the rest are assert 'Packs/MyNewPack/Integrations/MyNewIntegration/Pipfile' not in validate_manager.ignored_files assert 'Packs/MyNewPack/Integrations/MyNewIntegration/Pipfile.lock' not in validate_manager.ignored_files assert 'Packs/MyNewPack/Integrations/MyNewIntegration/MyNewIntegration_description.md' not \ in validate_manager.ignored_files assert 'Packs/MyNewPack/Integrations/MyNewIntegration/MyNewIntegration_image.png' not \ in validate_manager.ignored_files assert 'Packs/MyNewPack/.secrets-ignore' in validate_manager.ignored_files assert 'Packs/MyNewPack/Integrations/MyNewIntegration/MyNewIntegration_test.py' in \ validate_manager.ignored_files assert 'Packs/MyNewPack/.pack-ignore' in validate_manager.ignored_files # check recognized old-format file assert 'Packs/Elasticsearch/Integrations/integration-Elasticsearch.yml' in old_format_files # check recognized deleted file assert 'Packs/DeprecatedContent/Scripts/script-ExtractURL.yml' in deleted_files
def test_files_validator_validate_pack_unique_files__validate_manager(self, ): validate_manager = ValidateManager(skip_conf_json=True) result = validate_manager.validate_pack_unique_files(VALID_PACK, pack_error_ignore_list={}) assert result
def test_create_ignored_errors_list__validate_manager(self): validate_manager = ValidateManager() errors_to_check = ["IN", "SC", "CJ", "DA", "DB", "DO", "ID", "DS", "IM", "IF", "IT", "RN", "RM", "PA", "PB", "WD", "RP", "BA100", "BC100", "ST", "CL", "MP"] ignored_list = validate_manager.create_ignored_errors_list(errors_to_check) assert ignored_list == ["BA101", "BA102", "BA103", "BA104", "BC101", "BC102", "BC103", "BC104"]