def test_blocks_from_local_graph_module(self): resources_dir = os.path.realpath( os.path.join(TEST_DIRNAME, '../resources/modules/stacks')) hcl_config_parser = Parser() module, _ = hcl_config_parser.parse_hcl_module(resources_dir, self.source) self.assertEqual( len( list( filter( lambda block: block.block_type == BlockType.RESOURCE and block.name == 'aws_s3_bucket.inner_s3', module.blocks))), 3) self.assertEqual( len( list( filter( lambda block: block.block_type == BlockType.MODULE and block.name == 'inner_module_call', module.blocks))), 3) self.assertEqual( len( list( filter( lambda block: block.block_type == BlockType.MODULE and block.name == 's3', module.blocks))), 3) self.assertEqual( len( list( filter( lambda block: block.block_type == BlockType.MODULE and block.name == 'sub-module', module.blocks))), 1)
def run(self, root_folder, external_checks_dir=None, file=None): report = Report() tf_definitions = {} parsing_errors = {} if external_checks_dir: for directory in external_checks_dir: resource_registry.load_external_checks(directory) if file: Parser().parse_file(file=file, tf_definitions=tf_definitions, parsing_errors=parsing_errors) root_folder = os.path.dirname(file) else: Parser().hcl2(directory=root_folder, tf_definitions=tf_definitions, parsing_errors=parsing_errors) report.add_parsing_errors(parsing_errors.keys()) for definition in tf_definitions.items(): full_file_path = definition[0] definition_context = parser_registry.enrich_definitions_context(definition) scanned_file = definition[0].split(root_folder)[1] logging.debug("Scanning file: %s", scanned_file) for block_type in definition[1].keys(): if block_type in ['resource', 'data']: self.run_block(definition[1][block_type], definition_context, full_file_path, report, scanned_file, block_type) return report
def test_module_dependencies(self): resources_dir = os.path.realpath( os.path.join(TEST_DIRNAME, '../resources/modules/stacks')) hcl_config_parser = Parser() module, _ = hcl_config_parser.parse_hcl_module(resources_dir, self.source) self.assertEqual(module.module_dependency_map[f'{resources_dir}/prod'], [[]]) self.assertEqual( module.module_dependency_map[f'{resources_dir}/stage'], [[]]) self.assertEqual(module.module_dependency_map[f'{resources_dir}/test'], [[]]) self.assertEqual( module.module_dependency_map[f'{resources_dir}/prod/sub-prod'], [[f'{resources_dir}/prod/main.tf']]) expected_inner_modules = [[ f'{resources_dir}/prod/main.tf', f'{resources_dir}/prod/sub-prod/main.tf' ], [f'{resources_dir}/stage/main.tf'], [f'{resources_dir}/test/main.tf']] self.assertEqual( module.module_dependency_map[ f'{os.path.dirname(resources_dir)}/s3_inner_modules'], expected_inner_modules) self.assertEqual( module.module_dependency_map[ f'{os.path.dirname(resources_dir)}/s3_inner_modules/inner'], list( map( lambda dep_list: dep_list + [ f'{os.path.dirname(resources_dir)}/s3_inner_modules/main.tf' ], expected_inner_modules)))
def test_load_inner_registry_module(self): parser = Parser() directory = os.path.join(self.resources_dir, "registry_security_group_inner_module") self.external_module_path = os.path.join(self.tmp_path, DEFAULT_EXTERNAL_MODULES_DIR) out_definitions = {} parser.parse_directory(directory=directory, out_definitions=out_definitions, out_evaluations_context={}, download_external_modules=True, external_modules_download_path=self.external_module_path) self.assertEqual(11, len(list(out_definitions.keys()))) expected_remote_module_path = f'{self.external_module_path}/github.com/terraform-aws-modules/terraform-aws-security-group/v4.0.0' expected_inner_remote_module_path = f'{expected_remote_module_path}/modules/http-80' expected_main_file = os.path.join(directory, 'main.tf') expected_inner_main_file = os.path.join(directory, expected_inner_remote_module_path, 'main.tf') expected_file_names = [ expected_main_file, os.path.join(directory, expected_inner_remote_module_path, f'auto_values.tf[{expected_main_file}#0]'), os.path.join(directory, expected_inner_remote_module_path, f'main.tf[{expected_main_file}#0]'), os.path.join(directory, expected_inner_remote_module_path, f'outputs.tf[{expected_main_file}#0]'), os.path.join(directory, expected_inner_remote_module_path, f'variables.tf[{expected_main_file}#0]'), os.path.join(directory, expected_inner_remote_module_path, f'versions.tf[{expected_main_file}#0]'), os.path.join(directory, expected_remote_module_path, f'main.tf[{expected_inner_main_file}#0]'), os.path.join(directory, expected_remote_module_path, f'outputs.tf[{expected_inner_main_file}#0]'), os.path.join(directory, expected_remote_module_path, f'rules.tf[{expected_inner_main_file}#0]'), os.path.join(directory, expected_remote_module_path, f'variables.tf[{expected_inner_main_file}#0]'), os.path.join(directory, expected_remote_module_path, f'versions.tf[{expected_inner_main_file}#0]'), ] for expected_file_name in expected_file_names: if expected_file_name not in list(out_definitions.keys()): self.fail(f"expected file {expected_file_name} to be in out_definitions")
def test_encryption_aws(self): resources_dir = os.path.realpath(os.path.join(TEST_DIRNAME, '../resources/encryption')) hcl_config_parser = Parser() module, module_dependency_map, _ = hcl_config_parser.parse_hcl_module(resources_dir, self.source) local_graph = LocalGraph(module, module_dependency_map) local_graph._create_vertices() local_graph.calculate_encryption_attribute() all_attributes = [vertex.get_attribute_dict() for vertex in local_graph.vertices] for attribute_dict in all_attributes: [resource_type, resource_name] = decode_graph_property_value( attribute_dict[CustomAttributes.ID]).split(".") if resource_type in ENCRYPTION_BY_RESOURCE_TYPE: is_encrypted = attribute_dict[CustomAttributes.ENCRYPTION] details = attribute_dict[CustomAttributes.ENCRYPTION_DETAILS] self.assertEqual(is_encrypted, EncryptionValues.ENCRYPTED.value if resource_name.startswith("encrypted") else EncryptionValues.UNENCRYPTED.value, f'failed for "{resource_type}.{resource_name}"') if is_encrypted == EncryptionValues.ENCRYPTED.value: if 'kms_key_id' in attribute_dict or 'kms_master_key_id' in attribute_dict: self.assertEqual(details, EncryptionTypes.KMS_VALUE.value, f'Bad encryption details for "{resource_type}.{resource_name}"') else: self.assertIn(details, [EncryptionTypes.AES256.value, EncryptionTypes.KMS_VALUE.value, EncryptionTypes.NODE_TO_NODE.value, EncryptionTypes.DEFAULT_KMS.value], f'Bad encryption details for "{resource_type}.{resource_name}"') else: self.assertEqual(details, "") else: self.assertIsNone(attribute_dict.get(CustomAttributes.ENCRYPTION)) self.assertIsNone(attribute_dict.get(CustomAttributes.ENCRYPTION_DETAILS))
def run(self, root_folder, external_checks_dir=None, files=None): report = Report() tf_definitions = {} parsing_errors = {} if external_checks_dir: for directory in external_checks_dir: resource_registry.load_external_checks(directory) if root_folder: Parser().hcl2(directory=root_folder, tf_definitions=tf_definitions, parsing_errors=parsing_errors) self.check_tf_definition(report, root_folder, tf_definitions) if files: root_folder = os.path.commonprefix(files) for file in files: file_tf_definitions = {} Parser().parse_file(file=file, tf_definitions=file_tf_definitions, parsing_errors=parsing_errors) self.check_tf_definition(report, root_folder, file_tf_definitions) report.add_parsing_errors(parsing_errors.keys()) return report
def go(dir_name): dir_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), f"resources/parser_scenarios/{dir_name}") assert os.path.exists(dir_path) expected_data = TestParserScenarios.load_expected_data("expected.json", dir_path) assert expected_data is not None, f"{dir_name}: expected.json file not found" evaluation_data = TestParserScenarios.load_expected_data("eval.json", dir_path) actual_data = {} actual_eval_data = {} errors = {} parser = Parser() parser.parse_directory(dir_path, actual_data, actual_eval_data, errors, download_external_modules=True) assert not errors, f"{dir_name}: Unexpected errors: {errors}" definition_string = json.dumps(actual_data, indent=2, default=json_encoder) definition_encoded = json.loads(definition_string) assert definition_encoded == expected_data, \ f"{dir_name}: Data mismatch:\n" \ f" Expected: \n{json.dumps(expected_data, indent=2, default=json_encoder)}\n\n" \ f" Actual: \n{definition_string}" if evaluation_data is not None: definition_string = json.dumps(actual_eval_data, indent=2, default=json_encoder) definition_encoded = json.loads(definition_string) assert definition_encoded == evaluation_data, \ f"{dir_name}: Evaluation data mismatch:\n" \ f" Expected: \n{json.dumps(evaluation_data, indent=2, default=json_encoder)}\n\n" \ f" Actual: \n{definition_string}"
def test_hcl_parsing_consistent_old_new(self): cur_dir = os.path.dirname(os.path.realpath(__file__)) tf_dir = f'{cur_dir}/../resources/tf_parsing_comparison/tf_regular' old_tf_dir = f'{cur_dir}/../resources/tf_parsing_comparison/tf_old' _, tf_definitions = Parser().parse_hcl_module(tf_dir, 'AWS') _, old_tf_definitions = Parser().parse_hcl_module(old_tf_dir, 'AWS') self.assertDictEqual(tf_definitions[f'{tf_dir}/main.tf'], old_tf_definitions[f'{old_tf_dir}/main.tf'])
def test_file_dir_parser_results_match(self): parser = Parser() current_dir = os.path.dirname(os.path.realpath(__file__)) file_path = current_dir + '/resources/parse_file_vs_dir/main.tf' dir_path = current_dir + '/resources/parse_file_vs_dir' tf_definitions_file = parser.parse_file(file_path) _, tf_definitions_dir = parser.parse_hcl_module(dir_path, 'terraform') self.assertDictEqual(tf_definitions_file, tf_definitions_dir.get(list(tf_definitions_dir.keys())[0]))
def build_graph_from_tf_definitions(self, tf_definitions, render_variables=True): hcl_config_parser = Parser() module, module_dependency_map, _ = \ hcl_config_parser.parse_hcl_module_from_tf_definitions(tf_definitions, '', self.source) local_graph = LocalGraph(module, module_dependency_map) local_graph.build_graph(render_variables=render_variables) return local_graph
def test_bool_parsing_sort_only_lists_with_bools(self): conf = { 'enabled_metrics': [['a', 'true', 'false'], 'b', 'true', 'false'] } config_parser = Parser() actual = config_parser._clean_parser_types(conf) expected = {'enabled_metrics': [[True, False, 'a'], True, False, 'b']} self.assertDictEqual(expected, actual)
def test_vertices_from_local_graph_module(self): resources_dir = os.path.realpath(os.path.join(TEST_DIRNAME, '../resources/modules/stacks')) hcl_config_parser = Parser() module, module_dependency_map, _ = hcl_config_parser.parse_hcl_module(resources_dir, self.source) local_graph = LocalGraph(module, module_dependency_map) local_graph.build_graph(render_variables=True) self.assertEqual(12, len(local_graph.edges))
def test_creating_graph(self): resources_dir = os.path.realpath( os.path.join(TEST_DIRNAME, '../resources/encryption')) hcl_config_parser = Parser() module, _ = hcl_config_parser.parse_hcl_module(resources_dir, 'AWS') local_graph = TerraformLocalGraph(module) local_graph._create_vertices() nxc = NetworkxConnector() nxc.save_graph(local_graph)
def build_graph_from_source_directory(self, source_dir, render_variables=True, local_graph_class=LocalGraph, parsing_errors=None): parser = Parser() module, module_dependency_map, tf_definitions = \ parser.parse_hcl_module(source_dir, self.source, parsing_errors) local_graph = local_graph_class(module, module_dependency_map) local_graph.build_graph(render_variables=render_variables) return local_graph, tf_definitions
def _load_and_test(self, test_file, test_expected_file): current_dir = os.path.dirname(os.path.realpath(__file__)) parser = Parser() parse_errors = {} result = parser.parse_file(f'{current_dir}/{test_file}', parse_errors) with open(f'{current_dir}/{test_expected_file}', 'r') as f: expected_result = json.load(f) self.assertEqual(len(parse_errors.keys()), 0) self.assertEqual(json.dumps(expected_result), json.dumps(result))
def test_malformed_output_blocks(self): parser = Parser() directory = os.path.join(self.resources_dir, "malformed_outputs") self.external_module_path = os.path.join(directory, DEFAULT_EXTERNAL_MODULES_DIR) out_definitions = {} parser.parse_directory(directory=directory, out_definitions=out_definitions, out_evaluations_context={}, download_external_modules=True, external_modules_download_path=DEFAULT_EXTERNAL_MODULES_DIR) file_path, entity_definitions = next(iter(out_definitions.items())) self.assertEqual(2, len(list(out_definitions[file_path]['output'])))
def test_invalid_module_sources(self): parser = Parser() directory = os.path.join(self.resources_dir, "failing_module_address") self.external_module_path = os.path.join(directory, DEFAULT_EXTERNAL_MODULES_DIR) out_definitions = {} parser.parse_directory(directory=directory, out_definitions=out_definitions, out_evaluations_context={}, download_external_modules=True, external_modules_download_path=DEFAULT_EXTERNAL_MODULES_DIR) # check that only the original file was parsed successfully without getting bad external modules self.assertEqual(1, len(list(out_definitions.keys())))
def test_vertices_from_local_graph(self): resources_dir = os.path.realpath(os.path.join(TEST_DIRNAME, '../resources/variable_rendering/render_from_module_vpc')) hcl_config_parser = Parser() module, module_dependency_map, _ = hcl_config_parser.parse_hcl_module(resources_dir, self.source) local_graph = LocalGraph(module, module_dependency_map) local_graph._create_vertices() tf_definitions, breadcrumbs = convert_graph_vertices_to_tf_definitions(local_graph.vertices, resources_dir) self.assertIsNotNone(tf_definitions) self.assertIsNotNone(breadcrumbs)
def test_set_parsing_to_list(self): conf = { 'enabled_metrics': [['a', 'true', 'false'], 'b', 'true', 'false'], 'example_set': [{'1', '2', '3'}] } config_parser = Parser() actual = config_parser._clean_parser_types(conf) expected = { 'enabled_metrics': [[True, False, 'a'], True, False, 'b'], 'example_set': [['1', '2', '3']] } self.assertDictEqual(expected, actual)
def test_load_registry_module(self): parser = Parser() directory = os.path.join(self.resources_dir, "registry_security_group") self.external_module_path = os.path.join(directory, DEFAULT_EXTERNAL_MODULES_DIR) out_definitions = {} parser.parse_directory(directory=directory, out_definitions=out_definitions, out_evaluations_context={}, download_external_modules=True, external_modules_download_path=DEFAULT_EXTERNAL_MODULES_DIR) external_aws_modules_path = os.path.join(self.external_module_path, 'github.com/terraform-aws-modules/terraform-aws-security-group/v3.18.0') assert os.path.exists(external_aws_modules_path)
def build_graph_from_source_directory(self, source_dir, render_variables=True, local_graph_class=LocalGraph, parsing_errors=None, download_external_modules=False, excluded_paths: List[str]=None): parser = Parser() logging.info('Parsing HCL files in source dir') module, module_dependency_map, tf_definitions = \ parser.parse_hcl_module(source_dir, self.source, download_external_modules, parsing_errors, excluded_paths=excluded_paths) logging.info('Building graph from parsed module') local_graph = local_graph_class(module, module_dependency_map) local_graph.build_graph(render_variables=render_variables) return local_graph, tf_definitions
def test_tree_parsing_to_str(self): conf = { 'enabled_metrics': [['a', 'true', 'false'], 'b', 'true', 'false'], 'example_set': Tree("data", ["child1", "child2"]) } config_parser = Parser() actual = config_parser._clean_parser_types(conf) expected = { 'enabled_metrics': [[True, False, 'a'], True, False, 'b'], 'example_set': 'Tree(\'data\', [\'child1\', \'child2\'])' } self.assertDictEqual(expected, actual)
def test_set_variables_values_from_modules(self): resources_dir = os.path.realpath(os.path.join(TEST_DIRNAME, '../resources/variable_rendering/render_from_module_vpc')) hcl_config_parser = Parser() module, module_dependency_map, tf_definitions = hcl_config_parser.parse_hcl_module(resources_dir, source=self.source) local_graph = LocalGraph(module, module_dependency_map) local_graph._create_vertices() variables_before_module_definitions = { "cidr": "0.0.0.0/0", "private_subnets": [], "public_subnets": [], "enable_nat_gateway": False, "single_nat_gateway": False, "enable_dns_hostnames": False, "public_subnet_tags": {}, "private_subnet_tags": {}, } for var_name, var_value in variables_before_module_definitions.items(): vertex_index = local_graph.vertices_block_name_map[BlockType.VARIABLE].get(var_name)[0] vertex = local_graph.vertices[vertex_index] default_val = vertex.attributes['default'] if type(default_val) == list: self.assertEqual(var_value, default_val[0]) else: self.assertEqual(var_value, default_val) local_graph.build_graph(resources_dir) expected_variables_after = { "cidr": "172.16.0.0/16", "private_subnets": ["172.16.1.0/24", "172.16.2.0/24", "172.16.3.0/24"], "public_subnets": ["172.16.4.0/24", "172.16.5.0/24", "172.16.6.0/24"], "enable_nat_gateway": True, "single_nat_gateway": True, "enable_dns_hostnames": True, "public_subnet_tags": {"kubernetes.io/cluster/${local.cluster_name}": "shared", "kubernetes.io/role/elb": "1"}, "private_subnet_tags": {"kubernetes.io/cluster/${local.cluster_name}" : "shared", "kubernetes.io/role/internal-elb": "1"} } for var_name, var_value in expected_variables_after.items(): vertex_index = local_graph.vertices_block_name_map[BlockType.VARIABLE].get(var_name)[0] vertex = local_graph.vertices[vertex_index] default_val = vertex.attributes['default'] if type(default_val) == list: self.assertEqual(var_value, default_val[0]) else: self.assertEqual(var_value, default_val)
def test_bool_parsing_avoid_remove_non_existing(self): conf = { 'test': ['Bool'], 'variable': ['aws:SecureTransport'], 'values': [['false']] } config_parser = Parser() actual = config_parser._clean_parser_types(conf) expected = { 'test': ['Bool'], 'variable': ['aws:SecureTransport'], 'values': [[False]] } self.assertDictEqual(expected, actual)
def test_load_local_module(self): # given parser = Parser() directory = os.path.join(self.resources_dir, "local_module") out_definitions = {} # when parser.parse_directory( directory=directory, out_definitions=out_definitions, out_evaluations_context={} ) # then self.assertEqual(len(out_definitions), 3) # root file + 2x module file self.assertEqual(len(parser.loaded_files_map), 2) # root file + 1x module file
def run(self, root_folder, external_checks_dir=None): report = Report() tf_definitions = {} parsing_errors = {} if external_checks_dir: for directory in external_checks_dir: resource_registry.load_external_checks(directory) Parser().hcl2(directory=root_folder, tf_definitions=tf_definitions, parsing_errors=parsing_errors) report.add_parsing_errors(parsing_errors.keys()) for definition in tf_definitions.items(): full_file_path = definition[0] definition_context = parser_registry.enrich_definitions_context(definition) scanned_file = definition[0].split(root_folder)[1] logging.debug("Scanning file: %s", scanned_file) if 'resource' in definition[1]: for resource in definition[1]['resource']: resource_type = list(resource.keys())[0] resource_name = list(list(resource.values())[0].keys())[0] resource_id = "{}.{}".format(resource_type, resource_name) resource_context = definition_context[full_file_path][resource_type][resource_name] resource_lines_range = [resource_context['start_line'], resource_context['end_line']] resource_code_lines = resource_context['code_lines'] skipped_checks = resource_context.get('skipped_checks') results = resource_registry.scan(resource, scanned_file, skipped_checks) for check, check_result in results.items(): record = Record(check_id=check.id, check_name=check.name, check_result=check_result, code_block=resource_code_lines, file_path=scanned_file, file_line_range=resource_lines_range, resource=resource_id, check_class=check.__class__.__module__) report.add_record(record=record) return report
def test_clean_definitions(self): current_dir = os.path.dirname(os.path.realpath(__file__)) tf_definitions = Parser._parse_tf_definitions( f'{current_dir}/skip_bad_tf_example.tf') assert len(tf_definitions['variable']) == 1 assert 'okay' in tf_definitions['variable'][0] assert len(tf_definitions['module']) == 2 assert 'bar' in tf_definitions['module'][0] assert 'okay' in tf_definitions['module'][1]
def test_hcl_parsing_sorting(self): source_dir = os.path.realpath( os.path.join( TEST_DIRNAME, '../resources/tf_parsing_comparison/modifications_diff')) config_parser = Parser() _, tf_definitions = config_parser.parse_hcl_module(source_dir, 'AWS') expected = [ 'https://www.googleapis.com/auth/devstorage.read_only', 'https://www.googleapis.com/auth/logging.write', 'https://www.googleapis.com/auth/monitoring.write', 'https://www.googleapis.com/auth/service.management.readonly', 'https://www.googleapis.com/auth/servicecontrol', 'https://www.googleapis.com/auth/trace.append' ] result_resource = tf_definitions[ source_dir + '/main.tf']['resource'][0]['google_compute_instance'][ 'tfer--test3']['service_account'][0]['scopes'][0] self.assertListEqual(result_resource, expected)
def setUp(self): test_root_dir = os.path.dirname(os.path.realpath(__file__)) + '/../evaluation/resources/default_evaluation/' tf_definitions = {} parsing_errors = {} Parser().parse_directory(directory=test_root_dir, out_definitions=tf_definitions, out_evaluations_context={}, out_parsing_errors=parsing_errors) for definition in tf_definitions.items(): definitions_context = parser_registry.enrich_definitions_context(definition) self.definitions_context = definitions_context
def setUp(self): test_root_dir = 'tests/terraform/evaluation/resources/default_evaluation' tf_definitions = {} parsing_errors = {} Parser().hcl2(directory=test_root_dir, tf_definitions=tf_definitions, parsing_errors=parsing_errors) for definition in tf_definitions.items(): definitions_context = parser_registry.enrich_definitions_context( definition) self.definitions_context = definitions_context