def _read_group_vars(self): """ Read all files in {base_dir}/piedpiper.d/{vars_dir}/group_vars.d/ and returns a list of variable configurations. We first parse all.yml if it exists so that it is the first item in the list. This allows for the other group_vars files to override the values in all.yml :return: list """ group_vars_dir = f'{self.base_config.vars_dir}/group_vars.d' group_configs = [] if os.path.isdir(group_vars_dir): for root, dirs, files in os.walk( f'{self.base_config.vars_dir}/group_vars.d/'): if not len(files): message = f'No group_vars found in {self.base_config.vars_dir}' util.sysexit_with_message(message) for file in files: with open(os.path.join(root, file)) as f: group_config = f.read() group_configs.append({ 'file': file, 'config': util.safe_load(group_config) }) return group_configs else: message = f'Failed to read group_vars in {self.base_config.vars_dir}.' util.sysexit_with_message(message)
def _build_file_definitions(self): file_definitions = [] for config in self.config: file_list = self._build_file_list(config) file_definition_list = [] for file in file_list: if os.path.isdir(file): continue try: file_definition = { 'file': os.path.relpath(file, self.base_config.base_dir), 'styler': config['styler'] if 'styler' in config else None, 'sast': config['sast'] if 'sast' in config else None, } # Clear values that are none. file_definition = { key: value for key, value in file_definition.items() if value is not None } except KeyError as e: message = f"Invalid key found in run_vars.{self.config} " \ f"\n\n{e}" util.sysexit_with_message(message) file_definition_list.append(file_definition) file_definitions.append(file_definition_list) return [ definition for element in file_definitions for definition in element ]
def execute(self): """ Executes the SAST analyzer. This default implementation will zip all files in the configuration.files list and send that zipfile across the network to the specified SAST analyzer function. :return: None """ LOG.info(f"Executing SAST analyzer: {self.name}") with tempfile.TemporaryDirectory() as temp_dir: zip_file = self.zip_files(temp_dir) with open(zip_file.filename, 'rb') as file: files = [('files', file)] try: if self.config.debug: LOG.info(f'Sending zipfile to {self.url}') r = requests.post(self.url, files=files) except requests.exceptions.RequestException as e: message = f"Failed to execute SAST analyzer {self.name}. \n\n{e}" util.sysexit_with_message(message) try: r.raise_for_status() except requests.exceptions.HTTPError as e: message = f'Failed to execute SAST analyzer. \n\n{e}' util.sysexit_with_message(message) else: LOG.warn(r.text)
def _validate(self): """ Validate the loaded configuration object. Validations are defined in model/base_schema.py :return: None. Exit if errors are found. """ errors = base_schema.validate(self.config) if errors: msg = f"Failed to validate. \n\n{errors.messages}" util.sysexit_with_message(msg)
def piedpiper_dir(self): """ Property defining the location of the pipedpiper.d directory. :return: String of path to piedpiper.d directory. """ piedpiper_dir = os.path.join(self.base_dir, 'piedpiper.d') if os.path.isdir(f'{piedpiper_dir}'): return piedpiper_dir else: message = f"Piedpiper directory doesn't exist in {piedpiper_dir}." util.sysexit_with_message(message)
def get_sequence(step): if step == 'validate': return ['validate'] elif step == 'style': return ['style'] elif step == 'lint': return ['validate', 'style', 'sast'] elif step == 'sast': return ['sast'] else: util.sysexit_with_message(f"picli sequence not found for {step}")
def vars_dir(self): """ Property defining the vars_directory to use. By default this will be {base_dir}/piedpiper.d/default_vars.d :return: """ if os.path.isdir(self.piedpiper_dir + self.global_vars['vars_dir']): return self.piedpiper_dir + self.global_vars['vars_dir'] else: message = f"Piedpiper vars directory doesn't exist in {self.piedpiper_dir}." \ f"You gave {self.global_vars['vars_dir']}." util.sysexit_with_message(message)
def _read_config(self, config): """ Read pi_global_vars configuration file and return a YAML object. :param config: Path to configuration file :return: YAML object """ try: with open(config) as c: return util.safe_load(c) except IOError as e: message = f"Failed to parse config. \n\n{e}" util.sysexit_with_message(message)
def _build_pipe_config(self): """ Read pipe_vars.d for configuration file for the pipe. Each child class will have its own pi_{self.name}.yml file located in {vars_dir}/pipe_vars.d/ which will be read during creation of the child class object. :return: Configuration dictionary for the pipe """ try: with open( f'{self.base_config.vars_dir}/pipe_vars.d/pi_{self.name}.yml' ) as config: return util.safe_load(config) except IOError as e: message = f"Failed to parse pi_{self.name}.yml. \n\n{e}" util.sysexit_with_message(message)
def zip_files(self, destination): """ Create a zipfile containing run variables of PiCli. :param destination: Directory to write zipfile to :return: ZipFile """ try: zip_file = zipfile.ZipFile(f'{destination}/validation.zip', 'w', zipfile.ZIP_DEFLATED) if self.pipe_config.debug: message = f'Writing run_vars.yml to zip' LOG.info(message) zip_file.writestr("run_vars.yml", self.pipe_config.dump_configs()) zip_file.close() return zip_file except Exception as e: message = f"Zipping failed in validator. \n\n{e}" util.sysexit_with_message(message)
def _read_file_vars(self): """ Read all files in {base_dir}/piedpiper.d/{vars_dir}/files_vars.d/ :return: iterator """ if os.path.isdir(f'{self.base_config.vars_dir}/file_vars.d/'): for root, dirs, files in os.walk( f'{self.base_config.vars_dir}/file_vars.d/' ): for file in files: file_name = os.path.join(root, file) with open(file_name) as f: file_config = f.read() yield (file_config, file_name) else: message = f"Failed to read file_vars.d in {self.base_config.vars_dir}/file_vars.d/." util.sysexit_with_message(message)
def execute(self): with tempfile.TemporaryDirectory() as temp_dir: zip_file = self.zip_files(temp_dir) files = [('files', open(zip_file.filename, 'rb'))] try: if self.pipe_config.debug: LOG.info(f'Sending zipfile to {self.url}') r = requests.post(self.url, files=files) except requests.exceptions.RequestException as e: message = f'Failed to execute validator. \n\n{e}' util.sysexit_with_message(message) try: r.raise_for_status() except requests.exceptions.HTTPError as e: message = f'Failed to execute validator. \n\n{e}' util.sysexit_with_message(message) else: results = r.json() self._parse_results(results)
def _parse_results(self, results): """ Parse results returned from the execute request. Builds a list of errors found in the JSON-object and :param results: JSON-object from execute :return: None """ result_list = [] for result in results.values(): for stage_result in result: for value in stage_result.values(): if value['errors']: result_list.append(stage_result) if len(result_list): if self.pipe_config.policy_enforcing: util.sysexit_with_message(json.dumps(result_list, indent=4)) else: LOG.warn(json.dumps(result_list, indent=4)) LOG.success("Validation completed successfully.")
def _build_file_list(self, group): """ Build a list of files based on the glob pattern given in group_vars.d/{pipe}. The glob will be applied to a path relative to the specified vars directory. :return: """ try: file_glob = group['name'] except KeyError as e: message = f'Invalid group_vars file found. \n{e}' util.sysexit_with_message(message) file_list = \ glob.glob(f'{self.base_config.base_dir}/{file_glob}', recursive=True) if not file_list: message = \ f'File Glob {file_glob} returned nothing ' \ f'in {self.base_config.base_dir}' LOG.warn(message) return file_list
def _build_group_configs(self): """ Performs the merging of variables defined in file_vars with those found in group_vars. The inspiration for this functionality was taken from Ansible's group_vars and file_vars. If a file definition exists in file_vars.d/ that also exists in a group_vars RunConfig, we overwrite the group_vars RunConfig variable with the one found in file_vars. :return: list """ group_configs = [] for group in self._read_group_vars(): for step, config in group.items(): if step == f'pi_{self.name}': run_config = RunConfig(config, self.base_config) for file_definition in run_config.files: for file, file_name in self._read_file_vars(): file_config = util.safe_load(file) try: if file_definition['file'] == file_config['file']: file_definition.update(file_config) except KeyError as e: message = f'Invalid file_vars config in {file_name}. \n\nInvalid Key: {e}' util.sysexit_with_message(message) group_configs.append(run_config) elif self.name == 'validate': run_config = RunConfig(config, self.base_config) for file_definition in run_config.files: for file, file_name in self._read_file_vars(): file_config = util.safe_load(file) try: if file_definition['file'] == file_config['file']: file_definition.update(file_config) except KeyError as e: message = f'Invalid file_vars config in {file_name}. \n\nInvalid Key: {e}' util.sysexit_with_message(message) group_configs.append(run_config) return group_configs
def _read_file_vars(self): """ Read all files in {base_dir}/piedpiper.d/{vars_dir}/files_vars.d/ :return: iterator """ if os.path.isdir(f'{self.base_config.vars_dir}/file_vars.d/'): for root, dirs, files in os.walk( f'{self.base_config.vars_dir}/file_vars.d/'): for file in files: if file.endswith(".yml") or file.endswith(".yaml"): file_name = os.path.join(root, file) with open(file_name) as f: file_config = f.read() yield (file_config, file_name) else: message = f"Skipping invalid file_vars.d file " \ f"{os.path.join(root,file)}" LOG.debug(message) else: message = f"Failed to read file_vars.d in" \ f"{self.base_config.vars_dir}/file_vars.d/." util.sysexit_with_message(message)
def _validate(self): errors = sast_pipeconfig_schema.validate(self.pipe_config) if errors: msg = f"Failed to validate SAST Pipe Config. \n\n{errors.messages}" util.sysexit_with_message(msg)