def _is_valid_version(self): # type: () -> bool """Base is_valid_version method for files that version is their root. Return: True if version is valid, else False """ if self.current_file.get('version') != self.DEFAULT_VERSION: error_message, error_code = Errors.wrong_version( self.DEFAULT_VERSION) if self.handle_error(error_message, error_code, file_path=self.file_path, suggested_fix=Errors.suggest_fix( self.file_path)): self.is_valid = False return False return True
def test_integration_is_skipped__comment(self): integration_id = "dummy_integration" skip_comment = "Issue 00000" expected = f"The integration {integration_id} is currently in skipped. Please add working tests and " + \ f"unskip. Skip comment: {skip_comment}" result = Errors.integration_is_skipped(integration_id, skip_comment) assert result[0] == expected
def _is_id_equals_name(self, file_type): """Validate that the id of the file equals to the name. Args: file_type (str): the file type. can be 'integration', 'script', 'playbook', 'dashboard', 'id' Returns: bool. Whether the file's id is equal to to its name """ file_id = _get_file_id(file_type, self.current_file) name = self.current_file.get('name', '') if file_id != name: error_message, error_code = Errors.id_should_equal_name(name, file_id) if self.handle_error(error_message, error_code, file_path=self.file_path, suggested_fix=Errors.suggest_fix(self.file_path)): return False return True
def is_incident_field_exist(self, id_set_file, is_circle) -> bool: """Checks if incident field is valid - exist in the content. Returns: bool. True if incident field is valid, else False. """ if not is_circle: return True if not id_set_file: click.secho( "Skipping mapper incident field validation. Could not read id_set.json.", fg="yellow") return True layout_container_items = [] for layout_container_field in LAYOUT_CONTAINER_FIELDS: if self.current_file.get(layout_container_field): layout_container_items.append(layout_container_field) layout_incident_fields = [] for layout_container_item in layout_container_items: layout = self.current_file.get(layout_container_item, {}) layout_tabs = layout.get('tabs', []) for layout_tab in layout_tabs: layout_sections = layout_tab.get('sections', []) for section in layout_sections: if section and section.get('items'): for item in section.get('items', []): layout_incident_fields.append( item.get('fieldId', '').replace('incident_', '')) content_incident_fields = get_all_incident_and_indicator_fields_from_id_set( id_set_file, 'layout') built_in_fields = [field.lower() for field in BUILT_IN_FIELDS ] + LAYOUT_AND_MAPPER_BUILT_IN_FIELDS invalid_inc_fields_list = [] for inc_field in layout_incident_fields: if inc_field and inc_field.lower( ) not in built_in_fields and inc_field not in content_incident_fields: invalid_inc_fields_list.append( inc_field ) if inc_field not in invalid_inc_fields_list else None if invalid_inc_fields_list: error_message, error_code = Errors.invalid_incident_field_in_layout( invalid_inc_fields_list) if self.handle_error(error_message, error_code, file_path=self.file_path): return False return True
def is_valid_unsearchable_key(self): # type: () -> bool """Validate that the unsearchable key is set to true Returns: bool. Whether the file's unsearchable key is set to true. """ generic_field_unsearchable = self.current_file.get( 'unsearchable', True) if generic_field_unsearchable: return True error_message, error_code = Errors.unsearchable_key_should_be_true_generic_field( ) if self.handle_error(error_message, error_code, file_path=self.file_path, suggested_fix=Errors.suggest_fix(self.file_path)): return False return True
def __init__(self, input: str, dir_name=INTEGRATIONS_DIR, output: str = '', image_prefix=DEFAULT_IMAGE_PREFIX, force: bool = False): directory_name = '' # Changing relative path to current abspath fixed problem with default output file name. if input == '.': input = os.path.abspath(input) for optional_dir_name in DIR_TO_PREFIX: if optional_dir_name in input: directory_name = optional_dir_name if not directory_name: print_error( 'You have failed to provide a legal file path, a legal file path ' 'should contain either Integrations or Scripts directories') self.image_prefix = image_prefix self.package_path = input self.use_force = force if self.package_path.endswith(os.sep): self.package_path = self.package_path.rstrip(os.sep) self.dest_path = output yml_paths, self.yml_path = get_yml_paths_in_dir( self.package_path, Errors.no_yml_file(self.package_path)) for path in yml_paths: # The plugin creates a unified YML file for the package. # In case this script runs locally and there is a unified YML file in the package we need to ignore it. # Also, # we don't take the unified file by default because # there might be packages that were not created by the plugin. if 'unified' not in path and os.path.basename( os.path.dirname(path)) not in [ SCRIPTS_DIR, INTEGRATIONS_DIR ]: self.yml_path = path break self.ryaml = YAML() self.ryaml.preserve_quotes = True self.ryaml.width = 50000 # make sure long lines will not break (relevant for code section) if self.yml_path: with io.open(self.yml_path, 'r', encoding='utf8') as yml_file: self.yml_data = self.ryaml.load(yml_file) else: self.yml_data = {} print_error(f'No yml found in path: {self.package_path}') # script key for scripts is a string. # script key for integrations is a dictionary. self.is_script_package = isinstance(self.yml_data.get('script'), str) self.dir_name = SCRIPTS_DIR if self.is_script_package else dir_name
def is_name_not_empty(self): name = self.current_file.get('name') if (not name) or (name.isspace()): error_message, error_code = Errors.empty_or_missing_job_name() formatted_error = self.handle_error(error_message, error_code, file_path=self.file_path) if formatted_error: self._errors.append(error_message) return False return True
def check_for_spaces_in_file_name(self): file_name = os.path.basename(self.file_path) if file_name.count(' ') > 0: error_message, error_code = Errors.file_name_include_spaces_error( file_name) if self.handle_error(error_message, error_code, self.file_path): return False return True
def contains_forbidden_fields(self): # type: () -> bool """Return if root and widgets exclude the unnecessary fields. Returns: True if exclude, else False. """ error_msg = "" is_valid = True fields_to_exclude = [ 'system', 'isCommon', 'shared', 'owner', 'sortValues', 'vcShouldIgnore', 'commitMessage', 'shouldCommit' ] widgets = self.get_widgets_from_dashboard(self.current_file) for field in fields_to_exclude: if self.current_file.get(field) is not None: error_message, error_code = Errors.remove_field_from_dashboard( field) formatted_message = self.handle_error(error_message, error_code, file_path=self.file_path, should_print=False) if formatted_message: is_valid = False error_msg += formatted_message # iterate over the widgets if exist if widgets: for widget in widgets: if widget.get(field): error_message, error_code = Errors.remove_field_from_widget( field, widget) formatted_message = self.handle_error( error_message, error_code, file_path=self.file_path, should_print=False) if formatted_message: is_valid = False error_msg += formatted_message if error_msg: print_error(error_msg) return is_valid
def parse_docker_image(self, docker_image): """Verify that the docker image is of demisto format & parse the name and tag Args: docker_image: String representation of the docker image name and tag Returns: The name and the tag of the docker image """ if docker_image: tag = '' image = '' try: image_regex = re.findall(r'(demisto\/.+)', docker_image, re.IGNORECASE) if image_regex: image = image_regex[0] if ':' in image: image_split = image.split(':') image = image_split[0] tag = image_split[1] else: error_message, error_code = Errors.no_docker_tag( docker_image) self.handle_error(error_message, error_code, file_path=self.file_path) except IndexError: error_message, error_code = Errors.docker_not_formatted_correctly( docker_image) self.handle_error(error_message, error_code, file_path=self.file_path) return image, tag else: if self.py_version == 'python2': # If the yml file has no docker image we provide a default one with numeric tag return 'demisto/python', self.get_docker_image_latest_tag( 'demisto/python', None) else: return 'demisto/python3', self.get_docker_image_latest_tag( 'demisto/python3', None)
def is_valid_scheme(self): # type: () -> bool """Validate the file scheme according to the scheme we have saved in SCHEMAS_PATH. Returns: bool. Whether the scheme is valid on self.file_path. """ # ignore schema checks for unsupported file types, reputations.json or is skip-schema-check is set. if self.scheme_name in [None, FileType.IMAGE, FileType.README, FileType.RELEASE_NOTES, FileType.TEST_PLAYBOOK] \ or self.skip_schema_check or (self.scheme_name == FileType.REPUTATION and os.path.basename(self.file_path) == OLD_REPUTATION): return True click.secho(f'Validating scheme for {self.file_path}') try: # disabling massages of level INFO and beneath of pykwalify such as: INFO:pykwalify.core:validation.valid log = logging.getLogger('pykwalify.core') log.setLevel(logging.WARNING) if self.suppress_print: logging.disable(logging.CRITICAL) scheme_file_name = 'integration' if self.scheme_name.value == 'betaintegration' else self.scheme_name.value # type: ignore path = os.path.normpath( os.path.join(__file__, "..", "..", self.SCHEMAS_PATH, '{}.yml'.format(scheme_file_name))) core = Core(source_file=self.file_path, schema_files=[path]) core.validate(raise_exception=True) except Exception as err: try: error_message, error_code = self.parse_error_msg(err) if self.handle_error(error_message, error_code, self.file_path, suggested_fix=Errors.suggest_fix( self.file_path)): self.is_valid = False return False except Exception: error_message, error_code = Errors.pykwalify_general_error(err) if self.handle_error(error_message, error_code, self.file_path): self.is_valid = False return False return True
def is_valid_feed_fields(self): is_feed = self.current_file.get('isFeed') selected_feeds = self.current_file.get('selectedFeeds') is_all_feeds = self.current_file.get('isAllFeeds') if is_feed: if selected_feeds and is_all_feeds: error_message, error_code = Errors.invalid_both_selected_and_all_feeds_in_job( ) formatted_error = self.handle_error(error_message, error_code, file_path=self.file_path) if formatted_error: self._errors.append(error_message) return False elif selected_feeds: return True # feeds are validated in the id_set elif is_all_feeds: return True else: # neither selected_fields nor is_all_fields error_message, error_code = Errors.missing_field_values_in_feed_job( ) formatted_error = self.handle_error(error_message, error_code, file_path=self.file_path) if formatted_error: self._errors.append(error_message) return False else: # is_feed=false if selected_feeds or is_all_feeds: error_message, error_code = \ Errors.unexpected_field_values_in_non_feed_job(bool(selected_feeds), bool(is_all_feeds)) formatted_error = self.handle_error(error_message, error_code, file_path=self.file_path) if formatted_error: self._errors.append(error_message) return False return True
def is_docker_image_same_as_yml(self) -> bool: """ Iterates on all modified yaml files, checking if the yaml is related to one of the sections in the RN and if there's a docker-image version update mentioned in the RN. If so, make sure the versions match. Return: True if for all the modified yaml files, if there was a change in the docker image in the RN, it's the same version as the yaml. Otherwise, return False and a release_notes_docker_image_not_match_yaml Error """ release_notes_categories = self.get_categories_from_rn( "\n" + self.latest_release_notes) # renamed files will appear in the modified list as a tuple: (old path, new path) modified_files_list = [ file[1] if isinstance(file, tuple) else file for file in (self.modified_files or []) ] modified_yml_list = [ file for file in modified_files_list if file.endswith('.yml') ] rn_file_name = self.release_notes_file_path[ self.release_notes_file_path.rindex('/') + 1:] error_list = [] for type, field in zip(['Integrations', 'Scripts'], ['display', 'name']): if (type in release_notes_categories): splited_release_notes_entities = self.get_entities_from_category( f'\n{release_notes_categories.get(type)}') for modified_yml_file in modified_yml_list: modified_yml_dict = get_yaml(modified_yml_file) or {} if modified_yml_dict.get( field) in splited_release_notes_entities: entity_conent = splited_release_notes_entities.get( modified_yml_dict.get(field, {}), '') + "\n" docker_version = self.get_docker_version_from_rn( entity_conent) yml_docker_version = modified_yml_dict.get("dockerimage") if type == 'Scripts' else \ modified_yml_dict.get("script", {}).get("dockerimage", '') if docker_version and yml_docker_version and yml_docker_version != docker_version: error_list.append({ 'name': modified_yml_dict.get(field), 'rn_version': docker_version, 'yml_version': yml_docker_version }) if len(error_list) > 0: error_message, error_code = Errors.release_notes_docker_image_not_match_yaml( rn_file_name, error_list, self.pack_path) if self.handle_error(error_message, error_code, file_path=self.release_notes_file_path): return False return True
def _name_has_no_beta_substring(self): # type: () -> bool """Checks that 'name' field dose not include the substring 'beta'""" name = self.current_file.get('name', '') if 'beta' in name.lower(): error_message, error_code = Errors.beta_in_name() if self.handle_error(error_message, error_code, file_path=self.file_path): return False return True
def _is_valid_deprecated_integration_display_name(self) -> bool: is_valid = True is_deprecated = self.current_file.get('deprecated', False) display_name = self.current_file.get('display', '') if is_deprecated: if not display_name.endswith('(Deprecated)'): error_message, error_code = Errors.invalid_deprecated_integration_display_name() if self.handle_error(error_message, error_code, file_path=self.file_path): is_valid = False return is_valid
def is_not_default_image(self): """Check if the image is the default one""" image = self.load_image() if image in [DEFAULT_IMAGE_BASE64, DEFAULT_DBOT_IMAGE_BASE64]: # disable-secrets-detection error_message, error_code = Errors.default_image_error() if self.handle_error(error_message, error_code, file_path=self.file_path): self._is_valid = False return False return True
def is_valid_required(self) -> bool: """Validate that the incident field is not required.""" # due to a current platform limitation, incident fields can not be set to required # after it will be fixed, need to validate that required field are not associated to all incident types # as can be seen in this pr: https://github.com/demisto/content/pull/5682 if self.current_file.get('required'): error_message, error_code = Errors.new_field_required() if self.handle_error(error_message, error_code, file_path=self.file_path): return False return True
def is_field_mapping_removed(self): """checks if some incidents fields or incidents types were removed""" old_mapper = self.old_file.get('mapping', {}) current_mapper = self.current_file.get('mapping', {}) old_incidents_types = {inc for inc in old_mapper} current_incidents_types = {inc for inc in current_mapper} if not old_incidents_types.issubset(current_incidents_types): removed_incident_types = old_incidents_types - current_incidents_types removed_dict = {} for removed in removed_incident_types: removed_dict[removed] = old_mapper[removed] error_message, error_code = Errors.removed_incident_types( removed_dict) if self.handle_error(error_message, error_code, file_path=self.file_path): self.is_valid = False return True else: removed_incident_fields = {} for inc in old_incidents_types: old_incident_fields = old_mapper[inc].get( 'internalMapping', {}) current_incident_fields = current_mapper[inc].get( 'internalMapping', {}) old_fields = {inc for inc in old_incident_fields} current_fields = {inc for inc in current_incident_fields} if not old_fields.issubset(current_fields): removed_fields = old_fields - current_fields removed_incident_fields[inc] = removed_fields if removed_incident_fields: error_message, error_code = Errors.changed_incident_field_in_mapper( removed_incident_fields) if self.handle_error(error_message, error_code, file_path=self.file_path): self.is_valid = False return True return False
def is_valid_feed(self): # type: () -> bool valid_from_version = valid_feed_params = True if self.current_file.get("script", {}).get("feed"): from_version = self.current_file.get("fromversion", "0.0.0") if not from_version or server_version_compare( "5.5.0", from_version) == 1: error_message, error_code = Errors.feed_wrong_from_version( from_version) if self.handle_error(error_message, error_code, file_path=self.file_path, suggested_fix=Errors.suggest_fix( self.file_path, '--from-version', '5.5.0')): valid_from_version = False valid_feed_params = self.all_feed_params_exist() return valid_from_version and valid_feed_params
def is_right_usage_of_usecase_tag(self): """Checks whether Use Case tag in pack_metadata is used properly Return: bool: True if the Pack contains at least one PB, Incident Type or Layout, otherwise False """ try: pack_meta_file_content = self._read_metadata_content() if "Use Case" in pack_meta_file_content['tags']: if not self._contains_use_case(): if self._add_error(Errors.is_wrong_usage_of_usecase_tag(), self.pack_meta_file): return False except (ValueError, TypeError): if self._add_error(Errors.is_wrong_usage_of_usecase_tag(), self.pack_meta_file): return False return True
def are_integrations_mapped_to_dependency_packs( integrations: set) -> bool: integration_in_dependency_packs = True for integration in integrations: if integration not in integrations_to_pack: error_message, error_code = Errors.invalid_integration_in_wizard( integration) if self.handle_error(error_message, error_code, file_path=self.file_path): integration_in_dependency_packs = False elif (pack := integrations_to_pack[integration] ) not in self._pack_deps: error_message, error_code = Errors.missing_dependency_pack_in_wizard( pack, f'integration "{integration}"') if self.handle_error(error_message, error_code, file_path=self.file_path): integration_in_dependency_packs = False
def test_file_name_includes_spaces(self): """ Given: File Name with spaces When: Returning an error message Then: Return error message with the input value as a tuple containing error and error code. """ file_name = "test file.gif" expected_result = ("Please remove spaces from the file's name: 'test file.gif'.", 'BA103') result = Errors.file_name_include_spaces_error(file_name) assert expected_result == result
def is_valid_max_fetch_and_first_fetch(self) -> bool: """ validate that the max_fetch and first_fetch params exist in the yml and the max_fetch has default value Returns: bool. True if the integration is defined as well False otherwise. """ fetch_params_exist = True if self.current_file.get('script', {}).get('isfetch') is True: params = self.current_file.get('configuration', []) first_fetch_param = None max_fetch_param = None for param in params: # the common names for the first_fetch param if param.get('name') == FIRST_FETCH: first_fetch_param = param elif param.get('name') == MAX_FETCH: max_fetch_param = param if not first_fetch_param: error_message, error_code = Errors.parameter_missing_from_yml_not_community_contributor( 'first_fetch', yaml.dump(FIRST_FETCH_PARAM)) if self.handle_error(error_message, error_code, file_path=self.file_path): fetch_params_exist = False if not max_fetch_param: error_message, error_code = Errors.parameter_missing_from_yml_not_community_contributor( 'max_fetch', yaml.dump(MAX_FETCH_PARAM)) if self.handle_error(error_message, error_code, file_path=self.file_path): fetch_params_exist = False elif not max_fetch_param.get("defaultvalue"): error_message, error_code = Errors.no_default_value_in_parameter( 'max_fetch') if self.handle_error(error_message, error_code, file_path=self.file_path): fetch_params_exist = False return fetch_params_exist
def _is_pack_ignore_file_structure_valid(self): """Check if .pack-ignore structure is parse-able""" try: if self._parse_file_into_list(self.pack_ignore_file): return True except re.error: if not self._add_error(Errors.pack_file_bad_format(self.pack_ignore_file), self.pack_ignore_file): return True return False
def _has_beta_param(self): # type: () -> bool """Checks that integration has 'beta' field with value set to true""" beta = self.current_file.get('beta', False) if not beta: error_message, error_code = Errors.beta_field_not_found() if self.handle_error(error_message, error_code, file_path=self.file_path): return False return True
def _is_layouts_container_scripts_found(self, layouts_container_data, layouts_container_file_path=None): """Check if scripts of a layouts container is in the id_set Args: layouts_container_data (dict): Dictionary that holds the extracted details from the given layouts container. layouts_container_file_path (str): Path to the file. Returns: bool. Whether the scripts are in the id_set or not. """ is_valid = True scripts_not_in_id_set = set() layouts_container_id = list(layouts_container_data.keys())[0] layouts_container = layouts_container_data.get(layouts_container_id, {}) layouts_container_name = layouts_container.get('name', layouts_container_id) layouts_container_tabs = self._get_layouts_container_tabs( layouts_container) scripts_set = set(get_layouts_scripts_ids(layouts_container_tabs)) # Check if the layouts container's scripts are in the id_set: if scripts_set: scripts_not_in_id_set = self._get_scripts_that_are_not_in_id_set( scripts_set) # Add error message if there are scripts that aren't in the id_set: if scripts_not_in_id_set: is_valid = False scripts_not_in_id_set_str = ', '.join(scripts_not_in_id_set) error_message, error_code = Errors.layouts_container_non_existent_script_id( layouts_container_name, scripts_not_in_id_set_str) if not self.handle_error( error_message, error_code, file_path=layouts_container_file_path, suggested_fix=Errors.suggest_fix_non_existent_script_id()): is_valid = True return is_valid
def is_valid_file_path(self) -> bool: output_basename = os.path.basename(self.file_path) if not output_basename.startswith('layout-'): error_message, error_code = Errors.invalid_file_path_layout( output_basename) if self.handle_error(error_message, error_code, file_path=self.file_path): return False return True
def validate_support_details_exist(self, pack_meta_file_content): """Validate either email or url exist in contributed pack details.""" if not pack_meta_file_content[ PACK_METADATA_URL] and not pack_meta_file_content[ PACK_METADATA_EMAIL]: if self._add_error(Errors.pack_metadata_missing_url_and_email(), self.pack_meta_file): return False return True
def is_valid_as_deprecated(self) -> bool: is_valid = True is_hidden = self.current_file.get('hidden', False) description = self.current_file.get('description', '') if is_hidden: if not description.startswith('Deprecated.'): error_message, error_code = Errors.invalid_deprecated_playbook() if self.handle_error(error_message, error_code, file_path=self.file_path): is_valid = False return is_valid
def test_wrong_required_value(self): """ Given: Param value When: Returning an error message Then: Return error message with the input value as a tuple containing error and error code. """ param_name = "test param" expected_result = ("The required field of the test param parameter should be False", "IN102") result = Errors.wrong_required_value(param_name) assert result == expected_result