def run(self, root_folder, external_checks_dir=None, files=None, runner_filter=RunnerFilter(), collect_skip_comments=True): report = Report(self.check_type) self.tf_definitions = {} parsing_errors = {} if external_checks_dir: for directory in external_checks_dir: resource_registry.load_external_checks(directory, runner_filter) if root_folder: root_folder = os.path.abspath(root_folder) self.parser.parse_directory(directory=root_folder, out_definitions=self.tf_definitions, out_evaluations_context=self.evaluations_context, out_parsing_errors=parsing_errors, download_external_modules=runner_filter.download_external_modules, external_modules_download_path=runner_filter.external_modules_download_path, evaluate_variables=runner_filter.evaluate_variables) self.check_tf_definition(report, root_folder, runner_filter, collect_skip_comments) if files: files = [os.path.abspath(file) for file in files] root_folder = os.path.split(os.path.commonprefix(files))[0] for file in files: if file.endswith(".tf"): file_parsing_errors = {} self.tf_definitions[file] = self.parser.parse_file(file=file, parsing_errors=file_parsing_errors) if file_parsing_errors: parsing_errors.update(file_parsing_errors) continue self.check_tf_definition(report, root_folder, runner_filter, collect_skip_comments) report.add_parsing_errors(parsing_errors.keys()) return report
def run(self, root_folder, external_checks_dir=None, files=None, runner_filter=RunnerFilter(), collect_skip_comments=True): report = Report(self.check_type) self.tf_definitions = {} parsing_errors = {} if external_checks_dir: for directory in external_checks_dir: resource_registry.load_external_checks(directory, runner_filter) if root_folder: root_folder = os.path.abspath(root_folder) self.parser.hcl2(directory=root_folder, tf_definitions=self.tf_definitions, parsing_errors=parsing_errors) self.check_tf_definition(report, root_folder, runner_filter, collect_skip_comments) if files: files = [os.path.abspath(file) for file in files] root_folder = os.path.split(os.path.commonprefix(files))[0] for file in files: if file.endswith(".tf"): self.tf_definitions[file] = self.parser.parse_file( file=file, parsing_errors=parsing_errors) self.check_tf_definition(report, root_folder, runner_filter, collect_skip_comments) report.add_parsing_errors(parsing_errors.keys()) return report
def run(self, root_folder, external_checks_dir=None, files=None): report = Report() tf_definitions = {} parsing_errors = {} if external_checks_dir: for directory in external_checks_dir: resource_registry.load_external_checks(directory) if root_folder: Parser().hcl2(directory=root_folder, tf_definitions=tf_definitions, parsing_errors=parsing_errors) self.check_tf_definition(report, root_folder, tf_definitions) if files: root_folder = os.path.commonprefix(files) for file in files: file_tf_definitions = {} Parser().parse_file(file=file, tf_definitions=file_tf_definitions, parsing_errors=parsing_errors) self.check_tf_definition(report, root_folder, file_tf_definitions) report.add_parsing_errors(parsing_errors.keys()) return report
def run(self, root_folder, external_checks_dir=None, file=None): report = Report() tf_definitions = {} parsing_errors = {} if external_checks_dir: for directory in external_checks_dir: resource_registry.load_external_checks(directory) if file: Parser().parse_file(file=file, tf_definitions=tf_definitions, parsing_errors=parsing_errors) root_folder = os.path.dirname(file) else: Parser().hcl2(directory=root_folder, tf_definitions=tf_definitions, parsing_errors=parsing_errors) report.add_parsing_errors(parsing_errors.keys()) for definition in tf_definitions.items(): full_file_path = definition[0] definition_context = parser_registry.enrich_definitions_context(definition) scanned_file = definition[0].split(root_folder)[1] logging.debug("Scanning file: %s", scanned_file) for block_type in definition[1].keys(): if block_type in ['resource', 'data']: self.run_block(definition[1][block_type], definition_context, full_file_path, report, scanned_file, block_type) return report
def run(self, root_folder, external_checks_dir=None): report = Report() tf_definitions = {} parsing_errors = {} if external_checks_dir: for directory in external_checks_dir: resource_registry.load_external_checks(directory) Parser().hcl2(directory=root_folder, tf_definitions=tf_definitions, parsing_errors=parsing_errors) report.add_parsing_errors(parsing_errors.keys()) for definition in tf_definitions.items(): full_file_path = definition[0] definition_context = parser_registry.enrich_definitions_context(definition) scanned_file = definition[0].split(root_folder)[1] logging.debug("Scanning file: %s", scanned_file) if 'resource' in definition[1]: for resource in definition[1]['resource']: resource_type = list(resource.keys())[0] resource_name = list(list(resource.values())[0].keys())[0] resource_id = "{}.{}".format(resource_type, resource_name) resource_context = definition_context[full_file_path][resource_type][resource_name] resource_lines_range = [resource_context['start_line'], resource_context['end_line']] resource_code_lines = resource_context['code_lines'] skipped_checks = resource_context.get('skipped_checks') results = resource_registry.scan(resource, scanned_file, skipped_checks) for check, check_result in results.items(): record = Record(check_id=check.id, check_name=check.name, check_result=check_result, code_block=resource_code_lines, file_path=scanned_file, file_line_range=resource_lines_range, resource=resource_id, check_class=check.__class__.__module__) report.add_record(record=record) return report
def run(self, root_folder=None, external_checks_dir=None, files=None, runner_filter=RunnerFilter(), collect_skip_comments=True): report = Report(self.check_type) self.tf_definitions = {} parsing_errors = {} if external_checks_dir: for directory in external_checks_dir: resource_registry.load_external_checks(directory, runner_filter) if files: files = [os.path.abspath(file) for file in files] for file in files: if file.endswith(".json"): tf_definitions, template_lines = parse_tf_plan(file) self.tf_definitions = tf_definitions self.template_lines = template_lines self.check_tf_definition(report, runner_filter) report.add_parsing_errors(parsing_errors.keys()) return report
def run(self, root_folder=None, external_checks_dir=None, files=None, runner_filter=RunnerFilter(), collect_skip_comments=True): report = Report(self.check_type) self.tf_definitions = {} parsing_errors = {} if external_checks_dir: for directory in external_checks_dir: resource_registry.load_external_checks(directory, runner_filter) if root_folder: files = [] if not files else files for root, d_names, f_names in os.walk(root_folder): for file in f_names: file_ending = os.path.splitext(file)[1] if file_ending == '.json': try: with open(f'{root}/{file}') as f: content = json.load(f) if isinstance( content, dict) and content.get('terraform_version'): files.append(os.path.join(root, file)) except Exception as e: logging.debug( f'Failed to load json file {root}/{file}, skipping' ) logging.debug('Failure message:') logging.debug(e, stack_info=True) if files: files = [os.path.abspath(file) for file in files] for file in files: if file.endswith(".json"): tf_definitions, template_lines = parse_tf_plan(file) if not tf_definitions: continue self.tf_definitions = tf_definitions self.template_lines = template_lines self.check_tf_definition(report, runner_filter) else: logging.debug( f'Failed to load {file} as is not a .json file, skipping' ) report.add_parsing_errors(parsing_errors.keys()) return report
def load_external_checks(self, external_checks_dir: List[str]): if external_checks_dir: for directory in external_checks_dir: resource_registry.load_external_checks(directory) graph_registry.load_external_checks(directory)
def run(self, root_folder: Optional[str] = None, external_checks_dir: Optional[List[str]] = None, files: Optional[List[str]] = None, runner_filter: RunnerFilter = RunnerFilter(), collect_skip_comments: bool = True) -> Report: report = Report(self.check_type) self.tf_definitions = {} parsing_errors = {} if external_checks_dir: for directory in external_checks_dir: resource_registry.load_external_checks(directory) self.graph_registry.load_external_checks(directory) if root_folder: files = [] if not files else files for root, d_names, f_names in os.walk(root_folder): filter_ignored_paths(root, d_names, runner_filter.excluded_paths) filter_ignored_paths(root, f_names, runner_filter.excluded_paths) for file in f_names: file_ending = os.path.splitext(file)[1] if file_ending == '.json': try: with open(f'{root}/{file}') as f: content = json.load(f) if isinstance( content, dict) and content.get('terraform_version'): files.append(os.path.join(root, file)) except Exception as e: logging.debug( f'Failed to load json file {root}/{file}, skipping' ) logging.debug('Failure message:') logging.debug(e, stack_info=True) if files: files = [os.path.abspath(file) for file in files] for file in files: if file.endswith(".json"): tf_definitions, template_lines = parse_tf_plan(file) if not tf_definitions: continue self.tf_definitions = tf_definitions self.template_lines = template_lines self.check_tf_definition(report, runner_filter) else: logging.debug( f'Failed to load {file} as is not a .json file, skipping' ) report.add_parsing_errors(parsing_errors.keys()) if self.tf_definitions: graph = self.graph_manager.build_graph_from_definitions( self.tf_definitions, render_variables=False) self.graph_manager.save_graph(graph) graph_report = self.get_graph_checks_report( root_folder, runner_filter) merge_reports(report, graph_report) return report