def get_new_and_modified_integration_files(git_sha1): """Return 2 lists - list of new integrations and list of modified integrations since the commit of the git_sha1. Args: git_sha1 (str): The git sha of the commit against which we will run the 'git diff' command. Returns: (tuple): Returns a tuple of two lists, the file paths of the new integrations and modified integrations. """ # get changed yaml files (filter only added and modified files) tag = get_last_release_version() file_validator = FilesValidator() change_log = run_command('git diff --name-status {}'.format(git_sha1)) modified_files, added_files, removed_files, old_format_files = file_validator.get_modified_files(change_log, tag) all_integration_regexes = YML_INTEGRATION_REGEXES all_integration_regexes.extend([INTEGRATION_REGEX, PACKS_INTEGRATION_REGEX, BETA_INTEGRATION_REGEX]) new_integration_files = [ file_path for file_path in added_files if checked_type(file_path, all_integration_regexes) ] modified_integration_files = [ file_path for file_path in modified_files if isinstance(file_path, str) and checked_type(file_path, all_integration_regexes) ] return new_integration_files, modified_integration_files
def __init__(self, file_path): self._is_valid = True if checked_type(file_path, INTEGRATION_REGXES) or re.match(IMAGE_REGEX, file_path, re.IGNORECASE): self.file_path = file_path else: if checked_type(file_path, YML_INTEGRATION_REGEXES): try: self.file_path = glob.glob(os.path.join(os.path.dirname(file_path), '*.png'))[0] except IndexError: self._is_valid = False print_error("You've created/modified a package but failed to provide an image as a .png file, " "please add an image in order to proceed.")
def is_valid_scheme(self): # type: () -> bool """Validate the file scheme according to the scheme we have saved in SCHEMAS_PATH. Returns: bool. Whether the scheme is valid on self.file_path. """ if self.scheme_name in [None, FileType.IMAGE, FileType.README, FileType.RELEASE_NOTES, FileType.TEST_PLAYBOOK]: return True # ignore reputations.json if checked_type(self.file_path, JSON_ALL_REPUTATIONS_INDICATOR_TYPES_REGEXES): return True try: # disabling massages of level INFO and beneath of pykwalify such as: INFO:pykwalify.core:validation.valid log = logging.getLogger('pykwalify.core') log.setLevel(logging.WARNING) scheme_file_name = 'integration' if self.scheme_name.value == 'betaintegration' else self.scheme_name.value path = os.path.normpath( os.path.join(__file__, "..", "..", self.SCHEMAS_PATH, '{}.yml'.format(scheme_file_name))) core = Core(source_file=self.file_path, schema_files=[path]) core.validate(raise_exception=True) except Exception as err: try: error_message, error_code = self.parse_error_msg(err) if self.handle_error(error_message, error_code, self.file_path, suggested_fix=Errors.suggest_fix(self.file_path)): self.is_valid = False return False except Exception: error_message, error_code = Errors.pykwalify_general_error(err) if self.handle_error(error_message, error_code, self.file_path): self.is_valid = False return False return True
def is_valid_scheme(self): # type: () -> bool """Validate the file scheme according to the scheme we have saved in SCHEMAS_PATH. Returns: bool. Whether the scheme is valid on self.file_path. """ if self.scheme_name in [None, 'image', 'readme', 'changelog']: return True # ignore reputations.json if checked_type(self.file_path, JSON_ALL_REPUTATIONS_INDICATOR_TYPES_REGEXES): return True try: # disabling massages of level INFO and beneath of pykwalify such as: INFO:pykwalify.core:validation.valid log = logging.getLogger('pykwalify.core') log.setLevel(logging.WARNING) path = os.path.normpath( os.path.join(__file__, "..", "..", self.SCHEMAS_PATH, '{}.yml'.format(self.scheme_name))) core = Core(source_file=self.file_path, schema_files=[path]) core.validate(raise_exception=True) except Exception as err: try: print_error(self.parse_error_msg(err)) print_error(Errors.suggest_fix(self.file_path)) except Exception: print_error('Failed: {} failed.\nin {}'.format( self.file_path, str(err))) self.is_valid = False return False return True
def get_modified_files(files_string): """Get lists of the modified files in your branch according to the files string. Args: files_string (string): String that was calculated by git using `git diff` command. Returns: (yml_files, md_files). Tuple of sets. """ all_files = files_string.split('\n') yml_files = set([]) md_files = set([]) for f in all_files: file_data = f.split() if not file_data: continue file_status = file_data[0] file_path = file_data[1] if file_path.endswith('.js') or file_path.endswith('.py'): continue if file_status.lower().startswith('r'): file_path = file_data[2] if file_status.lower() == 'm' or file_status.lower( ) == 'a' or file_status.lower().startswith('r'): if checked_type(file_path, SPELLCHECK_FILE_TYPES): yml_files.add(file_path) elif re.match(DESCRIPTION_REGEX, file_path, re.IGNORECASE): md_files.add(file_path) return yml_files, md_files
def get_related_yml_contents(self, file_path): # if script or readme file, search for yml in order to retrieve temp white list yml_file_contents = '' # Validate if it is integration documentation file or supported file extension if checked_type(file_path, REQUIRED_YML_FILE_TYPES): yml_file_contents = self.retrieve_related_yml(os.path.dirname(file_path)) return yml_file_contents
def __init__(self, file_path, ignored_errors=None, print_as_warnings=False): super().__init__(ignored_errors=ignored_errors, print_as_warnings=print_as_warnings) self._is_valid = True if checked_type(file_path, INTEGRATION_REGXES) or re.match( IMAGE_REGEX, file_path, re.IGNORECASE): self.file_path = file_path else: if checked_type(file_path, YML_INTEGRATION_REGEXES): try: self.file_path = glob.glob( os.path.join(os.path.dirname(file_path), '*.png'))[0] except IndexError: error_message, error_code = Errors.no_image_given() if self.handle_error(error_message, error_code, file_path=self.file_path): self._is_valid = False self.file_path = ''
def validate_committed_files(self): """Validate that all the committed files in your branch are valid""" modified_files, added_files, old_format_files, packs = self.get_modified_and_added_files() schema_changed = False for f in modified_files: if isinstance(f, tuple): _, f = f if checked_type(f, [SCHEMA_REGEX]): schema_changed = True # Ensure schema change did not break BC if schema_changed: print("Schema changed, validating all files") self.validate_all_files() else: self.validate_modified_files(modified_files) self.validate_added_files(added_files) self.validate_no_old_format(old_format_files) self.validate_pack_unique_files(packs)
def verify(acceptable, unacceptable, matched_regex): for test_path in acceptable: assert checked_type(test_path, compared_regexes=matched_regex) for test_path in unacceptable: assert not checked_type(test_path, compared_regexes=matched_regex)
def validate_added_files(self, added_files, file_type: str = None): # noqa: C901 """Validate the added files from your branch. In case we encounter an invalid file we set the self._is_valid param to False. Args: added_files (set): A set of the modified files in the current branch. file_type (str): Used only with -p flag (the type of the file). """ for file_path in added_files: print('Validating {}'.format(file_path)) if re.match(TEST_PLAYBOOK_REGEX, file_path, re.IGNORECASE) and not file_type: continue elif 'README' in file_path: readme_validator = ReadMeValidator(file_path) if not readme_validator.is_valid_file(): self._is_valid = False continue structure_validator = StructureValidator( file_path, is_new_file=True, predefined_scheme=file_type) if not structure_validator.is_valid_file(): self._is_valid = False if self.validate_id_set: if not self.id_set_validator.is_file_valid_in_set(file_path): self._is_valid = False if self.id_set_validator.is_file_has_used_id(file_path): self._is_valid = False elif re.match(PLAYBOOK_REGEX, file_path, re.IGNORECASE) or file_type == 'playbook': playbook_validator = PlaybookValidator(structure_validator) if not playbook_validator.is_valid_playbook(): self._is_valid = False elif checked_type( file_path, YML_INTEGRATION_REGEXES) or file_type == 'integration': image_validator = ImageValidator(file_path) # if file_type(non git path) the image is not in a separate path image_validator.file_path = file_path if file_type else image_validator.file_path if not image_validator.is_valid(): self._is_valid = False description_validator = DescriptionValidator(file_path) if not description_validator.is_valid(): self._is_valid = False integration_validator = IntegrationValidator( structure_validator) if not integration_validator.is_valid_file( validate_rn=not file_type): self._is_valid = False elif checked_type( file_path, PACKAGE_SCRIPTS_REGEXES) or file_type == 'script': unifier = Unifier(os.path.dirname(file_path)) yml_path, _ = unifier.get_script_package_data() # Set file path to the yml file structure_validator.file_path = yml_path script_validator = ScriptValidator(structure_validator) if not script_validator.is_valid_file( validate_rn=not file_type): self._is_valid = False elif re.match(BETA_INTEGRATION_REGEX, file_path, re.IGNORECASE) or \ re.match(BETA_INTEGRATION_YML_REGEX, file_path, re.IGNORECASE): description_validator = DescriptionValidator(file_path) if not description_validator.is_valid_beta_description(): self._is_valid = False integration_validator = IntegrationValidator( structure_validator) if not integration_validator.is_valid_beta_integration(): self._is_valid = False elif re.match(IMAGE_REGEX, file_path, re.IGNORECASE): image_validator = ImageValidator(file_path) if not image_validator.is_valid(): self._is_valid = False # incident fields and indicator fields are using the same scheme. elif checked_type(file_path, JSON_INDICATOR_AND_INCIDENT_FIELDS) or \ file_type in ('incidentfield', 'indicatorfield'): incident_field_validator = IncidentFieldValidator( structure_validator) if not incident_field_validator.is_valid_file( validate_rn=not file_type): self._is_valid = False elif checked_type(file_path, [REPUTATION_REGEX]) or file_type == 'reputation': reputation_validator = ReputationValidator(structure_validator) if not reputation_validator.is_valid_file( validate_rn=not file_type): self._is_valid = False elif checked_type( file_path, JSON_ALL_LAYOUT_REGEXES) or file_type == 'layout': layout_validator = LayoutValidator(structure_validator) if not layout_validator.is_valid_layout( validate_rn=not file_type): self._is_valid = False elif checked_type( file_path, JSON_ALL_DASHBOARDS_REGEXES) or file_type == 'dashboard': dashboard_validator = DashboardValidator(structure_validator) if not dashboard_validator.is_valid_dashboard( validate_rn=not file_type): self._is_valid = False elif checked_type(file_path, JSON_ALL_INCIDENT_TYPES_REGEXES): incident_type_validator = IncidentTypeValidator( structure_validator) if not incident_type_validator.is_valid_incident_type( validate_rn=not file_type): self._is_valid = False elif 'CHANGELOG' in file_path: self.is_valid_release_notes(file_path) elif checked_type(file_path, CHECKED_TYPES_REGEXES): pass else: print_error( "The file type of {} is not supported in validate command". format(file_path)) print_error( "validate command supports: Integrations, Scripts, Playbooks, " "Incident fields, Indicator fields, Images, Release notes, Layouts and Descriptions" ) self._is_valid = False
def validate_modified_files(self, modified_files): # noqa: C901 """Validate the modified files from your branch. In case we encounter an invalid file we set the self._is_valid param to False. Args: modified_files (set): A set of the modified files in the current branch. """ for file_path in modified_files: old_file_path = None if isinstance(file_path, tuple): old_file_path, file_path = file_path print('Validating {}'.format(file_path)) if not checked_type(file_path): print_warning( '- Skipping validation of non-content entity file.') continue if re.match(TEST_PLAYBOOK_REGEX, file_path, re.IGNORECASE): continue elif 'README' in file_path: readme_validator = ReadMeValidator(file_path) if not readme_validator.is_valid_file(): self._is_valid = False continue structure_validator = StructureValidator( file_path, old_file_path=old_file_path) if not structure_validator.is_valid_file(): self._is_valid = False if self.validate_id_set: if not self.id_set_validator.is_file_valid_in_set(file_path): self._is_valid = False elif checked_type(file_path, YML_INTEGRATION_REGEXES): image_validator = ImageValidator(file_path) if not image_validator.is_valid(): self._is_valid = False description_validator = DescriptionValidator(file_path) if not description_validator.is_valid(): self._is_valid = False integration_validator = IntegrationValidator( structure_validator) if self.is_backward_check and not integration_validator.is_backward_compatible( ): self._is_valid = False if not integration_validator.is_valid_file(): self._is_valid = False elif checked_type(file_path, YML_BETA_INTEGRATIONS_REGEXES): image_validator = ImageValidator(file_path) if not image_validator.is_valid(): self._is_valid = False description_validator = DescriptionValidator(file_path) if not description_validator.is_valid_beta_description(): self._is_valid = False integration_validator = IntegrationValidator( structure_validator) if not integration_validator.is_valid_beta_integration(): self._is_valid = False elif checked_type(file_path, [SCRIPT_REGEX]): script_validator = ScriptValidator(structure_validator) if self.is_backward_check and not script_validator.is_backward_compatible( ): self._is_valid = False if not script_validator.is_valid_file(): self._is_valid = False elif checked_type(file_path, PLAYBOOKS_REGEXES_LIST): playbook_validator = PlaybookValidator(structure_validator) if not playbook_validator.is_valid_playbook( is_new_playbook=False): self._is_valid = False elif checked_type(file_path, PACKAGE_SCRIPTS_REGEXES): unifier = Unifier(os.path.dirname(file_path)) yml_path, _ = unifier.get_script_package_data() # Set file path to the yml file structure_validator.file_path = yml_path script_validator = ScriptValidator(structure_validator) if self.is_backward_check and not script_validator.is_backward_compatible( ): self._is_valid = False if not script_validator.is_valid_file(): self._is_valid = False elif re.match(IMAGE_REGEX, file_path, re.IGNORECASE): image_validator = ImageValidator(file_path) if not image_validator.is_valid(): self._is_valid = False # incident fields and indicator fields are using the same scheme. elif checked_type(file_path, JSON_INDICATOR_AND_INCIDENT_FIELDS): incident_field_validator = IncidentFieldValidator( structure_validator) if not incident_field_validator.is_valid_file( validate_rn=True): self._is_valid = False if self.is_backward_check and not incident_field_validator.is_backward_compatible( ): self._is_valid = False elif checked_type(file_path, [REPUTATION_REGEX]): reputation_validator = ReputationValidator(structure_validator) if not reputation_validator.is_valid_file(validate_rn=True): self._is_valid = False elif checked_type(file_path, JSON_ALL_LAYOUT_REGEXES): layout_validator = LayoutValidator(structure_validator) if not layout_validator.is_valid_layout(validate_rn=True): self._is_valid = False elif checked_type(file_path, JSON_ALL_DASHBOARDS_REGEXES): dashboard_validator = DashboardValidator(structure_validator) if not dashboard_validator.is_valid_dashboard( validate_rn=True): self._is_valid = False elif checked_type(file_path, JSON_ALL_INCIDENT_TYPES_REGEXES): incident_type_validator = IncidentTypeValidator( structure_validator) if not incident_type_validator.is_valid_incident_type( validate_rn=True): self._is_valid = False if self.is_backward_check and not incident_type_validator.is_backward_compatible( ): self._is_valid = False elif 'CHANGELOG' in file_path: self.is_valid_release_notes(file_path) elif checked_type(file_path, CHECKED_TYPES_REGEXES): pass else: print_error( "The file type of {} is not supported in validate command". format(file_path)) print_error( "'validate' command supports: Integrations, Scripts, Playbooks, " "Incident fields, Indicator fields, Images, Release notes, Layouts and Descriptions" ) self._is_valid = False
def get_modified_files(files_string, tag='master', print_ignored_files=False): """Get lists of the modified files in your branch according to the files string. Args: files_string (string): String that was calculated by git using `git diff` command. tag (string): String of git tag used to update modified files. print_ignored_files (bool): should print ignored files. Returns: (modified_files_list, added_files_list, deleted_files). Tuple of sets. """ all_files = files_string.split('\n') deleted_files = set([]) added_files_list = set([]) modified_files_list = set([]) old_format_files = set([]) for f in all_files: file_data = f.split() if not file_data: continue file_status = file_data[0] file_path = file_data[1] if file_status.lower().startswith('r'): file_status = 'r' file_path = file_data[2] if checked_type(file_path, CODE_FILES_REGEX) and file_status.lower() != 'd' \ and not file_path.endswith('_test.py'): # naming convention - code file and yml file in packages must have same name. file_path = os.path.splitext(file_path)[0] + '.yml' elif file_path.endswith('.js') or file_path.endswith('.py'): continue if file_status.lower() == 'd' and checked_type( file_path) and not file_path.startswith('.'): deleted_files.add(file_path) elif not os.path.isfile(file_path): continue elif file_status.lower() in ['m', 'a', 'r'] and checked_type(file_path, OLD_YML_FORMAT_FILE) and \ FilesValidator._is_py_script_or_integration(file_path): old_format_files.add(file_path) elif file_status.lower() == 'm' and checked_type( file_path) and not file_path.startswith('.'): modified_files_list.add(file_path) elif file_status.lower() == 'a' and checked_type( file_path) and not file_path.startswith('.'): added_files_list.add(file_path) elif file_status.lower().startswith('r') and checked_type( file_path): # if a code file changed, take the associated yml file. if checked_type(file_data[2], CODE_FILES_REGEX): modified_files_list.add(file_path) else: modified_files_list.add((file_data[1], file_data[2])) elif checked_type(file_path, [SCHEMA_REGEX]): modified_files_list.add(file_path) elif file_status.lower() not in KNOWN_FILE_STATUSES: print_error( '{} file status is an unknown one, please check. File status was: {}' .format(file_path, file_status)) elif print_ignored_files and not checked_type( file_path, IGNORED_TYPES_REGEXES): print_warning('Ignoring file path: {}'.format(file_path)) modified_files_list, added_files_list, deleted_files = filter_packagify_changes( modified_files_list, added_files_list, deleted_files, tag) return modified_files_list, added_files_list, deleted_files, old_format_files
def test_packs_regex(acceptable, non_acceptable, regex): for test_path in acceptable: assert checked_type(test_path, compared_regexes=regex) for test_path in non_acceptable: assert not checked_type(test_path, compared_regexes=regex)
def run_all_validations_on_file(self, file_path: str, file_type: str = None) -> None: """ Runs all validations on file specified in 'file_path' Args: file_path: A relative content path to a file to be validated file_type: The output of 'find_type' method """ file_extension = os.path.splitext(file_path)[-1] # We validate only yml json and .md files if file_extension not in ['.yml', '.json', '.md']: return # Ignoring changelog and description files since these are checked on the integration validation if 'changelog' in file_path.lower( ) or 'description' in file_path.lower(): return # unified files should not be validated if file_path.endswith('_unified.yml'): return print(f'Validating {file_path}') if 'README' in file_path: readme_validator = ReadMeValidator(file_path) if not readme_validator.is_valid_file(): self._is_valid = False return structure_validator = StructureValidator(file_path, predefined_scheme=file_type) if not structure_validator.is_valid_file(): self._is_valid = False elif re.match(TEST_PLAYBOOK_REGEX, file_path, re.IGNORECASE): playbook_validator = PlaybookValidator(structure_validator) if not playbook_validator.is_valid_playbook(): self._is_valid = False elif re.match(PLAYBOOK_REGEX, file_path, re.IGNORECASE) or file_type == 'playbook': playbook_validator = PlaybookValidator(structure_validator) if not playbook_validator.is_valid_playbook(validate_rn=False): self._is_valid = False elif checked_type(file_path, INTEGRATION_REGXES) or file_type == 'integration': integration_validator = IntegrationValidator(structure_validator) if not integration_validator.is_valid_file(validate_rn=False): self._is_valid = False elif checked_type(file_path, YML_ALL_SCRIPTS_REGEXES) or file_type == 'script': # Set file path to the yml file structure_validator.file_path = file_path script_validator = ScriptValidator(structure_validator) if not script_validator.is_valid_file(validate_rn=False): self._is_valid = False elif checked_type(file_path, YML_BETA_INTEGRATIONS_REGEXES ) or file_type == 'betaintegration': integration_validator = IntegrationValidator(structure_validator) if not integration_validator.is_valid_beta_integration(): self._is_valid = False # incident fields and indicator fields are using the same scheme. elif checked_type(file_path, JSON_INDICATOR_AND_INCIDENT_FIELDS) or \ file_type in ('incidentfield', 'indicatorfield'): incident_field_validator = IncidentFieldValidator( structure_validator) if not incident_field_validator.is_valid_file(validate_rn=False): self._is_valid = False elif checked_type( file_path, JSON_ALL_INDICATOR_TYPES_REGEXES) or file_type == 'reputation': reputation_validator = ReputationValidator(structure_validator) if not reputation_validator.is_valid_file(validate_rn=False): self._is_valid = False elif checked_type(file_path, JSON_ALL_LAYOUT_REGEXES) or file_type == 'layout': layout_validator = LayoutValidator(structure_validator) if not layout_validator.is_valid_layout(validate_rn=False): self._is_valid = False elif checked_type( file_path, JSON_ALL_DASHBOARDS_REGEXES) or file_type == 'dashboard': dashboard_validator = DashboardValidator(structure_validator) if not dashboard_validator.is_valid_dashboard(validate_rn=False): self._is_valid = False elif checked_type(file_path, JSON_ALL_INCIDENT_TYPES_REGEXES ) or file_type == 'incidenttype': incident_type_validator = IncidentTypeValidator( structure_validator) if not incident_type_validator.is_valid_incident_type( validate_rn=False): self._is_valid = False elif checked_type(file_path, CHECKED_TYPES_REGEXES): print(f'Could not find validations for file {file_path}') else: print_error( 'The file type of {} is not supported in validate command'. format(file_path)) print_error( 'validate command supports: Integrations, Scripts, Playbooks, dashboards, incident types, ' 'reputations, Incident fields, Indicator fields, Images, Release notes, ' 'Layouts and Descriptions') self._is_valid = False
def validate_added_files(self, added_files): # noqa: C901 """Validate the added files from your branch. In case we encounter an invalid file we set the self._is_valid param to False. Args: added_files (set): A set of the modified files in the current branch. """ for file_path in added_files: print('Validating {}'.format(file_path)) if re.match(TEST_PLAYBOOK_REGEX, file_path, re.IGNORECASE): continue structure_validator = StructureValidator(file_path, is_new_file=True) if not structure_validator.is_valid_file(): self._is_valid = False if self.validate_id_set: if not self.id_set_validator.is_file_valid_in_set(file_path): self._is_valid = False if self.id_set_validator.is_file_has_used_id(file_path): self._is_valid = False elif re.match(PLAYBOOK_REGEX, file_path, re.IGNORECASE): playbook_validator = PlaybookValidator(structure_validator) if not playbook_validator.is_valid_playbook(): self._is_valid = False elif checked_type(file_path, YML_INTEGRATION_REGEXES): image_validator = ImageValidator(file_path) if not image_validator.is_valid(): self._is_valid = False description_validator = DescriptionValidator(file_path) if not description_validator.is_valid(): self._is_valid = False integration_validator = IntegrationValidator(structure_validator) if not integration_validator.is_valid_file(validate_rn=False): self._is_valid = False elif checked_type(file_path, PACKAGE_SCRIPTS_REGEXES): unifier = Unifier(os.path.dirname(file_path)) yml_path, _ = unifier.get_script_package_data() # Set file path to the yml file structure_validator.file_path = yml_path script_validator = ScriptValidator(structure_validator) if not script_validator.is_valid_file(validate_rn=False): self._is_valid = False elif re.match(BETA_INTEGRATION_REGEX, file_path, re.IGNORECASE) or \ re.match(BETA_INTEGRATION_YML_REGEX, file_path, re.IGNORECASE): description_validator = DescriptionValidator(file_path) if not description_validator.is_valid_beta_description(): self._is_valid = False integration_validator = IntegrationValidator(structure_validator) if not integration_validator.is_valid_beta_integration(): self._is_valid = False elif re.match(IMAGE_REGEX, file_path, re.IGNORECASE): image_validator = ImageValidator(file_path) if not image_validator.is_valid(): self._is_valid = False # incident fields and indicator fields are using the same scheme. elif checked_type(file_path, JSON_INDICATOR_AND_INCIDENT_FIELDS): incident_field_validator = IncidentFieldValidator(structure_validator) if not incident_field_validator.is_valid_file(): self._is_valid = False elif checked_type(file_path, JSON_ALL_LAYOUT_REGEXES): layout_validator = LayoutValidator(structure_validator) if not layout_validator.is_valid_layout(): self._is_valid = False elif 'CHANGELOG' in file_path: self.is_valid_release_notes(file_path) elif checked_type(file_path, [REPUTATION_REGEX]): print_color( F'Skipping validation for file {file_path} since no validation is currently defined.', LOG_COLORS.YELLOW) elif checked_type(file_path, CHECKED_TYPES_REGEXES): pass else: print_error("The file type of {} is not supported in validate command".format(file_path)) print_error("validate command supports: Integrations, Scripts, Playbooks, " "Incident fields, Indicator fields, Images, Release notes, Layouts and Descriptions") self._is_valid = False
def filter_changed_files(self, files_string, tag='master', print_ignored_files=False): """Get lists of the modified files in your branch according to the files string. Args: files_string (string): String that was calculated by git using `git diff` command. tag (string): String of git tag used to update modified files. print_ignored_files (bool): should print ignored files. Returns: Tuple of sets. """ all_files = files_string.split('\n') deleted_files = set() added_files_list = set() modified_files_list = set() old_format_files = set() changed_meta_files = set() for f in all_files: file_data = list(filter(None, f.split('\t'))) if not file_data: continue file_status = file_data[0] file_path = file_data[1] if file_status.lower().startswith('r'): file_status = 'r' file_path = file_data[2] # if the file is a code file - change path to the associated yml path. if checked_type(file_path, CODE_FILES_REGEX) and file_status.lower() != 'd' \ and not (file_path.endswith('_test.py') or file_path.endswith('.Tests.ps1')): # naming convention - code file and yml file in packages must have same name. file_path = os.path.splitext(file_path)[0] + '.yml' # ignore changes in JS files and unit test files. elif file_path.endswith('.js') or file_path.endswith('.py') or file_path.endswith('.ps1'): self.ignored_files.add(file_path) continue # identify deleted files if file_status.lower() == 'd' and checked_type(file_path) and not file_path.startswith('.'): deleted_files.add(file_path) # ignore directories elif not os.path.isfile(file_path): continue # changes in old scripts and integrations - unified python scripts/integrations elif file_status.lower() in ['m', 'a', 'r'] and checked_type(file_path, OLD_YML_FORMAT_FILE) and \ self._is_py_script_or_integration(file_path): old_format_files.add(file_path) # identify modified files elif file_status.lower() == 'm' and checked_type(file_path) and not file_path.startswith('.'): modified_files_list.add(file_path) # identify added files elif file_status.lower() == 'a' and checked_type(file_path) and not file_path.startswith('.'): added_files_list.add(file_path) # identify renamed files elif file_status.lower().startswith('r') and checked_type(file_path): # if a code file changed, take the associated yml file. if checked_type(file_data[2], CODE_FILES_REGEX): modified_files_list.add(file_path) else: # file_data[1] = old name, file_data[2] = new name modified_files_list.add((file_data[1], file_data[2])) # detect changes in schema elif checked_type(file_path, [SCHEMA_REGEX]): modified_files_list.add(file_path) self.changes_in_schema = True elif file_status.lower() not in KNOWN_FILE_STATUSES: click.secho('{} file status is an unknown one, please check. File status was: {}' .format(file_path, file_status), fg="bright_red") elif file_path.endswith(PACKS_PACK_META_FILE_NAME): if file_status.lower() == 'a': self.new_packs.add(get_pack_name(file_path)) elif file_status.lower() == 'm': changed_meta_files.add(file_path) elif print_ignored_files and not checked_type(file_path, IGNORED_TYPES_REGEXES): if file_path not in self.ignored_files: self.ignored_files.add(file_path) click.secho('Ignoring file path: {}'.format(file_path), fg="yellow") modified_files_list, added_files_list, deleted_files = filter_packagify_changes( modified_files_list, added_files_list, deleted_files, tag) return modified_files_list, added_files_list, deleted_files, old_format_files, changed_meta_files