def is_valid_default_arguments(self): # type: () -> bool """Check if a reputation command (domain/email/file/ip/url) has a default non required argument with the same name Returns: bool. Whether a reputation command hold a valid argument """ commands = self.current_file.get('script', {}).get('commands', []) flag = True for command in commands: command_name = command.get('name') if command_name in BANG_COMMAND_NAMES: flag_found_arg = False for arg in command.get('arguments', []): arg_name = arg.get('name') if arg_name == command_name: flag_found_arg = True if arg.get('default') is False: self.is_valid = False flag = False print_error(Errors.wrong_default_argument(self.file_path, arg_name, command_name)) if not flag_found_arg: print_error(Errors.no_default_arg(self.file_path, command_name)) flag = False return flag
def is_valid_param(self, param_name, param_display): # type: (str, str) -> bool """Check if the given parameter has the right configuration.""" err_msgs = [] configuration = self.current_file.get('configuration', []) for configuration_param in configuration: configuration_param_name = configuration_param['name'] if configuration_param_name == param_name: if configuration_param['display'] != param_display: err_msgs.append( Errors.wrong_display_name(param_name, param_display)) elif configuration_param.get('defaultvalue', '') not in ('false', ''): err_msgs.append(Errors.wrong_default_parameter(param_name)) elif configuration_param.get('required', False): err_msgs.append(Errors.wrong_required_value(param_name)) elif configuration_param.get('type') != 8: err_msgs.append(Errors.wrong_required_type(param_name)) if err_msgs: print_error( '{} Received the following error for {} validation:\n{}'. format(self.file_path, param_name, '\n'.join(err_msgs))) self.is_valid = False return False return True
def is_not_valid_display_configuration(self): """Validate that the display settings are not empty for non-hidden fields and for type 17 params. Returns: bool. Whether the display is there for non-hidden fields. """ configuration = self.current_file.get('configuration', []) for configuration_param in configuration: field_type = configuration_param['type'] is_field_hidden = configuration_param.get('hidden', False) configuration_display = configuration_param.get('display') # This parameter type will not use the display value. if field_type == self.EXPIRATION_FIELD_TYPE: if configuration_display: print_error(Errors.not_used_display_name(self.file_path, configuration_param['name'])) self.is_valid = False return True elif not is_field_hidden and not configuration_display: print_error(Errors.empty_display_configuration(self.file_path, configuration_param['name'])) self.is_valid = False return True return False
def validate_all_files(self): """Validate all files in the repo are in the right format.""" # go over packs for root, dirs, _ in os.walk(PACKS_DIR): for dir_in_dirs in dirs: for directory in PACKS_DIRECTORIES: for inner_root, inner_dirs, files in os.walk( os.path.join(root, dir_in_dirs, directory)): for inner_dir in inner_dirs: if inner_dir.startswith('.'): continue project_dir = os.path.join(inner_root, inner_dir) _, file_path = get_yml_paths_in_dir( os.path.normpath(project_dir), Errors.no_yml_file(project_dir)) if file_path: print("Validating {}".format(file_path)) structure_validator = StructureValidator( file_path) if not structure_validator.is_valid_scheme(): self._is_valid = False # go over regular content entities for directory in DIR_LIST_FOR_REGULAR_ENTETIES: print_color('Validating {} directory:'.format(directory), LOG_COLORS.GREEN) for root, dirs, files in os.walk(directory): for file_name in files: file_path = os.path.join(root, file_name) # skipping hidden files if not file_name.endswith('.yml'): continue print('Validating ' + file_name) structure_validator = StructureValidator(file_path) if not structure_validator.is_valid_scheme(): self._is_valid = False # go over regular PACKAGE_SUPPORTING_DIRECTORIES entities for directory in PACKAGE_SUPPORTING_DIRECTORIES: for root, dirs, files in os.walk(directory): for inner_dir in dirs: if inner_dir.startswith('.'): continue project_dir = os.path.join(root, inner_dir) _, file_path = get_yml_paths_in_dir( project_dir, Errors.no_yml_file(project_dir)) if file_path: print('Validating ' + file_path) structure_validator = StructureValidator(file_path) if not structure_validator.is_valid_scheme(): self._is_valid = False
def is_outputs_for_reputations_commands_valid(self): # type: () -> bool """Check if a reputation command (domain/email/file/ip/url) has the correct DBotScore outputs according to the context standard https://github.com/demisto/content/blob/master/docs/context_standards/README.MD Returns: bool. Whether a reputation command holds valid outputs """ context_standard = "https://github.com/demisto/content/blob/master/docs/context_standards/README.MD" commands = self.current_file.get('script', {}).get('commands', []) output_for_reputation_valid = True for command in commands: command_name = command.get('name') # look for reputations commands if command_name in BANG_COMMAND_NAMES: context_outputs_paths = set() context_outputs_descriptions = set() for output in command.get('outputs', []): context_outputs_paths.add(output.get('contextPath')) context_outputs_descriptions.add(output.get('description')) # validate DBotScore outputs and descriptions missing_outputs = set() missing_descriptions = set() for dbot_score_output in DBOT_SCORES_DICT: if dbot_score_output not in context_outputs_paths: missing_outputs.add(dbot_score_output) self.is_valid = False output_for_reputation_valid = False else: # DBot Score output path is in the outputs if DBOT_SCORES_DICT.get(dbot_score_output) not in context_outputs_descriptions: missing_descriptions.add(dbot_score_output) # self.is_valid = False - Do not fail build over wrong description if missing_outputs: print_error(Errors.dbot_invalid_output( self.file_path, command_name, missing_outputs, context_standard)) if missing_descriptions: print_warning(Errors.dbot_invalid_description( self.file_path, command_name, missing_descriptions, context_standard)) # validate the IOC output reputation_output = IOC_OUTPUTS_DICT.get(command_name) if reputation_output and not reputation_output.intersection(context_outputs_paths): self.is_valid = False output_for_reputation_valid = False print_error(Errors.missing_reputation( self.file_path, command_name, reputation_output, context_standard)) return output_for_reputation_valid
def is_valid_version(self): # type: () -> bool if self.current_file.get("commonfields", {}).get('version') == self.DEFAULT_VERSION: return True self.is_valid = False print_error(Errors.wrong_version(self.file_path)) return False
def get_common_server_python(self) -> bool: """Getting common server python in not exists changes self.common_server_created to True if needed. Returns: bool. True if exists/created, else False """ # If not CommonServerPython is dir if not os.path.isfile( os.path.join(self.project_dir, self.common_server_target_path)): # Get file from git try: res = requests.get(self.common_server_remote_path, verify=False) with open( os.path.join(self.project_dir, self.common_server_target_path), "w+") as f: f.write(res.text) self.common_server_created = True except requests.exceptions.RequestException: print_error( Errors.no_common_server_python( self.common_server_remote_path)) return False return True
def _name_has_no_beta_substring(self): # type: () -> bool """Checks that 'name' field dose not include the substring 'beta'""" name = self.current_file.get('name', '') if 'beta' in name.lower(): print_error(Errors.beta_in_name(self.file_path)) return False return True
def _has_beta_param(self): # type: () -> bool """Checks that integration has 'beta' field with value set to true""" beta = self.current_file.get('beta', False) if not beta: print_error(Errors.beta_field_not_found(self.file_path)) return False return True
def _is_display_contains_beta(self): # type: () -> bool """Checks that 'display' field includes the substring 'beta'""" display = self.current_file.get('display', '') if 'beta' not in display.lower(): print_error(Errors.no_beta_in_display(self.file_path)) return False return True
def is_valid_feed(self): # type: () -> bool if self.current_file.get("feed"): from_version = self.current_file.get("fromversion", "0.0.0") if not from_version or server_version_compare("5.5.0", from_version) == 1: print_error(Errors.feed_wrong_from_version(self.file_path, from_version)) return False return True
def is_valid_category(self): # type: () -> bool """Check that the integration category is in the schema.""" category = self.current_file.get('category', None) if category not in INTEGRATION_CATEGORIES: self.is_valid = False print_error(Errors.wrong_category(self.file_path, category)) return False return True
def _id_has_no_beta_substring(self): # type: () -> bool """Checks that 'id' field dose not include the substring 'beta'""" common_fields = self.current_file.get('commonfields', {}) integration_id = common_fields.get('id', '') if 'beta' in integration_id.lower(): print_error(Errors.beta_in_id(self.file_path)) return False return True
def is_valid_subtype(self): """Validate that the subtype is python2 or python3.""" type_ = self.current_file.get('type') if type_ == 'python': subtype = self.current_file.get('subtype') if subtype not in PYTHON_SUBTYPES: print_error(Errors.wrong_subtype(self.file_path)) return False return True
def is_added_required_fields(self): # type: () -> bool """Check if required field were added.""" current_field_to_required = self._get_field_to_required_dict(self.current_file) old_field_to_required = self._get_field_to_required_dict(self.old_file) is_added_required = False for field, required in current_field_to_required.items(): if field in old_field_to_required.keys(): # if required is True and old_field is False. if required and required != old_field_to_required[field]: print_error(Errors.added_required_fields(self.file_path, field)) self.is_valid = False is_added_required = True # if required is True but no old field. elif required: print_error(Errors.added_required_fields(self.file_path, field)) self.is_valid = False is_added_required = True return is_added_required
def is_valid_version(self): # type: () -> bool """Return if version is valid. Returns: True if version is valid, else False. """ if self.current_file.get('layout', {}).get('version') != self.DEFAULT_VERSION: print_error(Errors.wrong_version(self.file_path, self.DEFAULT_VERSION)) return False return True
def merge_script_package_to_yml(self): """Merge the various components to create an output yml file """ print("Merging package: {}".format(self.package_path)) if self.package_path.endswith('/'): self.package_path = self.package_path.rstrip('/') package_dir_name = os.path.basename(self.package_path) output_filename = '{}-{}.yml'.format(DIR_TO_PREFIX[self.dir_name], package_dir_name) if self.dest_path: self.dest_path = os.path.join(self.dest_path, output_filename) else: self.dest_path = os.path.join(self.dir_name, output_filename) yml_paths, yml_path = get_yml_paths_in_dir( self.package_path, Errors.no_yml_file(self.package_path)) for path in yml_paths: # The plugin creates a unified YML file for the package. # In case this script runs locally and there is a unified YML file in the package we need to ignore it. # Also, # we don't take the unified file by default because # there might be packages that were not created by the plugin. if 'unified' not in path: yml_path = path break with open(yml_path, 'r') as yml_file: yml_data = yaml.safe_load(yml_file) script_obj = yml_data if self.dir_name != SCRIPTS_DIR: script_obj = yml_data['script'] script_type = TYPE_TO_EXTENSION[script_obj['type']] with io.open(yml_path, mode='r', encoding='utf-8') as yml_file: yml_text = yml_file.read() yml_text, script_path = self.insert_script_to_yml( script_type, yml_text, yml_data) image_path = None desc_path = None if self.dir_name in (INTEGRATIONS_DIR, BETA_INTEGRATIONS_DIR): yml_text, image_path = self.insert_image_to_yml(yml_data, yml_text) yml_text, desc_path = self.insert_description_to_yml( yml_data, yml_text) output_map = self.write_yaml_with_docker(yml_text, yml_data, script_obj) unifier_outputs = list( output_map.keys()), yml_path, script_path, image_path, desc_path print_color("Created unified yml: {}".format(unifier_outputs[0][0]), LOG_COLORS.GREEN) return unifier_outputs
def is_arg_changed(self): # type: () -> bool """Check if the argument has been changed.""" current_args = [ arg['name'] for arg in self.current_file.get('args', []) ] old_args = [arg['name'] for arg in self.old_file.get('args', [])] if not self._is_sub_set(current_args, old_args): print_error(Errors.breaking_backwards_arg_changed(self.file_path)) return True return False
def is_docker_image_changed(self): """Check if the Docker image was changed or not.""" # Unnecessary to check docker image only on 5.0 and up if server_version_compare(self.old_file.get('fromversion', '0'), '5.0.0') < 0: old_docker = get_dockerimage45(self.old_file.get('script', {})) new_docker = get_dockerimage45(self.current_file.get('script', {})) if old_docker != new_docker: print_error(Errors.breaking_backwards_docker(self.file_path, old_docker, new_docker)) self.is_valid = False return True return False
def is_changed_subtype(self): """Validate that the subtype was not changed.""" type_ = self.current_file.get('type') if type_ == 'python': subtype = self.current_file.get('subtype') if self.old_file: old_subtype = self.old_file.get('subtype', "") if old_subtype and old_subtype != subtype: print_error( Errors.breaking_backwards_subtype(self.file_path)) return True return False
def _is_valid_version(self): # type: () -> bool """Base is_valid_version method for files that version is their root. Return: True if version is valid, else False """ if self.current_file.get('version') != self.DEFAULT_VERSION: print_error( Errors.wrong_version(self.file_path, self.DEFAULT_VERSION)) self.is_valid = False return False return True
def is_file_id_without_slashes(self): # type: () -> bool """Check if the ID of the file contains any slashes ('/'). Returns: bool. Whether the file's ID contains slashes or not. """ file_id = self.get_file_id_from_loaded_file_data(self.current_file) if file_id and '/' in file_id: self.is_valid = False print_error(Errors.file_id_contains_slashes()) return False return True
def is_docker_image_changed(self): # type: () -> bool """Check if the docker image as been changed.""" # Unnecessary to check docker image only on 5.0 and up if server_version_compare(self.old_file.get('fromversion', '0'), '5.0.0') < 0: old_docker = get_dockerimage45(self.old_file) new_docker = get_dockerimage45(self.current_file) if old_docker != new_docker: print_error( Errors.breaking_backwards_docker(self.file_path, old_docker, new_docker)) return True return False
def is_added_required_args(self): """Check if required arg were added.""" current_args_to_required = self._get_arg_to_required_dict( self.current_file) old_args_to_required = self._get_arg_to_required_dict(self.old_file) for arg, required in current_args_to_required.items(): if required: if (arg not in old_args_to_required) or \ (arg in old_args_to_required and required != old_args_to_required[arg]): print_error( Errors.added_required_fields(self.file_path, arg)) return True return False
def is_valid_version(self): # type: () -> bool """Validate that the reputations file as version of -1.""" is_valid = True reputations = self.current_file.get('reputations', []) for reputation in reputations: internal_version = reputation.get('version') if internal_version != self.DEFAULT_VERSION: object_id = reputation.get('id') print_error( Errors.wrong_version_reputations(self.file_path, object_id, self.DEFAULT_VERSION)) is_valid = False self.is_valid = False return is_valid
def is_context_path_changed(self): # type: () -> bool """Check if the context path as been changed.""" current_context = [ output['contextPath'] for output in self.current_file.get('outputs', []) ] old_context = [ output['contextPath'] for output in self.old_file.get('outputs', []) ] if not self._is_sub_set(current_context, old_context): print_error(Errors.breaking_backwards_context(self.file_path)) return True return False
def is_changed_context_path(self): # type: () -> bool """Check if a context path as been changed. Returns: bool. Whether a context path as been changed. """ current_command_to_context_paths = self._get_command_to_context_paths(self.current_file) old_command_to_context_paths = self._get_command_to_context_paths(self.old_file) for old_command, old_context_paths in old_command_to_context_paths.items(): if old_command in current_command_to_context_paths.keys(): if not self._is_sub_set(current_command_to_context_paths[old_command], old_context_paths): print_error(Errors.breaking_backwards_command(self.file_path, old_command)) self.is_valid = False return True return False
def is_changed_command_name_or_arg(self): # type: () -> bool """Check if a command name or argument as been changed. Returns: bool. Whether a command name or argument as been changed. """ current_command_to_args = self._get_command_to_args(self.current_file) old_command_to_args = self._get_command_to_args(self.old_file) for command, args_dict in old_command_to_args.items(): if command not in current_command_to_args.keys() or \ not self.is_subset_dictionary(current_command_to_args[command], args_dict): print_error(Errors.breaking_backwards_command_arg_changed(self.file_path, command)) self.is_valid = False return True return False
def is_there_duplicate_params(self): # type: () -> bool """Check if the integration has the same param more than once Returns: bool. True if there are duplicates, False otherwise. """ configurations = self.current_file.get('configuration', []) param_list = [] # type: list for configuration_param in configurations: param_name = configuration_param['name'] if param_name in param_list: self.is_valid = False print_error(Errors.duplicate_param(self.file_path, param_name)) else: param_list.append(param_name) return not self.is_valid
def is_there_duplicate_args(self): # type: () -> bool """Check if a command has the same arg more than once Returns: bool. True if there are duplicates, False otherwise. """ commands = self.current_file.get('script', {}).get('commands', []) is_there_duplicates = False for command in commands: arg_list = [] # type: list for arg in command.get('arguments', []): if arg in arg_list: self.is_valid = False is_there_duplicates = True print_error(Errors.duplicate_arg_in_file(self.file_path, arg['name'], command['name'])) else: arg_list.append(arg) return is_there_duplicates