def _match_exports_with_imports( self, exports: List[StackExport]) -> Iterable[StackExport]: for export in exports: export_name = export.export_name should_paginate = True next_token = None imports: List[str] = [] try: while should_paginate: logger.debug( f"Gather import stacks for export name: {export_name}" ) # TODO investigate pagniators result = (self.cfn_client.list_imports( ExportName=export_name, NextToken=next_token) if next_token else self.cfn_client.list_imports( ExportName=export_name)) next_token = result.get("NextToken", None) imports.extend(result["Imports"]) if not next_token: should_paginate = False except ClientError as e: if "is not imported by any stack" in str(e): continue else: raise yield StackExport( export_name=export_name, exporting_stack_name=export.exporting_stack_name, export_value=export.export_value, importing_stacks=imports, )
def test_export_filled(self, tmp_path): """Graph :: can be exported without failure with contents""" # GIVEN an filed infra stack_infos = [ StackInfo( stack_name="dev-teamName-api", service_name="api", component_name="service", resources=[], ), StackInfo( stack_name="dev-teamName-etl", service_name="etl", component_name="task", parameters=[ StackParameter( name="datawarehouseHost", value="fake", external_dependency=ExternalDependency( "data", "Snowflake"), ) ], resources=[], ), ] stack_exports = [ StackExport( export_name="etl-data-path", export_value="fake", exporting_stack_name="dev-teamName-etl", importing_stacks=["dev-teamName-api"], importing_services=["api"], export_service="etl", ) ] graph_exporter = InfraGraphExporter( env="dev", project_name="testTeam", config_path="tests/test_config.hocon", output_folder=str(tmp_path), data_extractor=FakeDataExtractor(stack_infos, stack_exports), ) # WHEN i try to export it graph_exporter.export(refresh=True, cluster_stack_graph=False) # THEN it should create output files resulting_files = {file.name for file in tmp_path.iterdir()} expect(resulting_files).to_equal(EXPECTED_OUTPUT_FILES) with open(tmp_path / "export-services.gv") as file: contents = file.read() # AND the expected dependencies to be in the output expect(contents).to_contain("Snowflake -> etl") expect(contents).to_contain("etl -> api") expect(contents).to_contain("api -> ExternalService")
def _extract_export(self, raw_export: Dict): stack_id = raw_export["ExportingStackId"] match = re.search(".*/(.*)/.*", stack_id) if match: stack = match.group(1) name = raw_export["Name"] value = raw_export["Value"] if stack.startswith(self._get_stack_prefix()): return StackExport(export_name=name, exporting_stack_name=stack, export_value=value)
def _enrich_service_name( exports_enriched: List[StackExport], stack_infos: List[StackInfo]) -> Iterable[StackExport]: grouped_by_stack = {} for stack_info in stack_infos: if stack_info.service_name is not None: grouped_by_stack[ stack_info.stack_name] = stack_info.service_name for export in exports_enriched: service_name = grouped_by_stack[export.exporting_stack_name] importing_services = [ grouped_by_stack[importing_stack] for importing_stack in export.importing_stacks if grouped_by_stack.get(importing_stack) is not None ] yield StackExport( export_name=export.export_name, exporting_stack_name=export.exporting_stack_name, export_value=export.export_value, export_service=service_name, importing_stacks=export.importing_stacks, importing_services=importing_services, )