def main(): logging.basicConfig(stream=sys.stderr, level=logging.INFO) #logging.getLogger("commons.configurator").setLevel(logging.DEBUG) Configurator().default() show = False if len(sys.argv) > 1 and sys.argv[1] == 'show': show = True Configurator().create_instance(ParseLinkDependencies).process(show=show)
def main(): logging.basicConfig(stream=sys.stderr, level=logging.WARNING) Configurator().default() from_module = sys.argv[1] to_module = sys.argv[2] fidp = FileIncludeDepsProcessor() graph = fidp.graph_for_modules(from_module, to_module) basename = os.path.join( config_basic.get_results_directory(), 'include_links_for_modules-%s-%s' % (from_module, to_module)) decorator_config = DecoratorSet( node_label_decorators=(FilenameURLNodeDecorator(), ), edge_label_decorators=(), node_tooltip_decorators=(), edge_tooltip_decorators=()) description = GraphDescriptionImpl( description="file-level include dependencies", basename=basename, extra="(%s->%s)" % (from_module, to_module)) DependencyFilterOutputter(decorator_config).output( filter=None, graph=graph, module_group_conf=DefaultNodeGroupingConfiguration(), description=description)
def main(): logging.basicConfig(stream=sys.stderr, level=logging.DEBUG) Configurator().default() if len(sys.argv) == 1: print("call: list_module_include_deps_per_target_file TargetModule") return modulenames = set(sys.argv[1].split(",")) exceptions = () if "BtcCommonsCore" in modulenames: exceptions = set((("stringnumconvint", "hashmap"), )) file_to_module_map = config_file_to_module_map_supply.generate_file_to_module_map( ) file_name_mapper = FileNameMapper(file_to_module_map, modulenames) generator = FileLevelPseudoModuleDependencyGenerator( file_to_module_map=file_to_module_map) merged_graph = generator.get_file_level_pseudomodule_graph( modulenames=modulenames, mapper=file_name_mapper, ignore_external=True, exceptions=exceptions) clustering_processor = MyClusteringProcessor(modulenames) clustering_processor.output_clusters() cluster_graph = ClusterDependencyLifter( file_name_mapper).get_cluster_dependency_graph( clustering_result=clustering_processor.get_clustering_result(), base_dependency_graph=merged_graph) cluster_graph = SCCMerger(cluster_graph).get_scc_merged_graph() basename = os.path.join( config_basic.get_results_directory(), 'cluster_graph_for_module-%s' % ("-".join(modulenames), )) size_fun = PseudoModuleSizer( file_name_mapper, path_resolver=FixedBaseDirPathResolver( config_basic.get_local_source_base_dir())).module_size decorator_config = DecoratorSet( node_label_decorators=( NodeSizeLabelDecorator(size_func=size_fun), NodeGroupDecorator(), NodeSizeScalingDecorator(min_render_size=1.5, max_render_size=8, size_func=size_fun), ), edge_label_decorators=(SCCEdgeDecorator(), ), node_tooltip_decorators=(), edge_tooltip_decorators=()) description = GraphDescriptionImpl( description= "file-level pseudomodule cluster graph based on include dependencies", basename=basename, extra="(%s)" % (", ".join(modulenames), )) DependencyFilterOutputter(decorator_config).output( filter=None, graph=cluster_graph, module_group_conf=DefaultNodeGroupingConfiguration( node_grouper=NullNodeGrouper()), description=description) clustering_processor.output_refcounts()
def main(): logging.basicConfig(stream=sys.stderr, level=logging.INFO) Configurator().default() duplicates = determine_duplicates() logging.info( "%i duplicate (i.e. in more than than module spec) headers found" % (len(duplicates))) print_duplicate_headers(duplicates)
def main(): logging.basicConfig(stream=sys.stderr, level=logging.DEBUG) Configurator().default() if len(sys.argv) < 2: node_restriction_in = None else: node_restriction_in = set(sys.argv[1].split(",")) LiftIncludeLinksRunner().process(node_restriction_in)
def main(): logging.basicConfig(stream=sys.stderr, level=logging.INFO) os.environ['LANGUAGE'] = 'cpp' Configurator().default() return FileRepairProcessorRunner( output=sys.stderr, basic_config=config_basic, cpp_file_configuration=config_cpp_file_configuration).run( argv=sys.argv[1:])
def main(): logging.basicConfig(stream=sys.stderr,level=logging.DEBUG) Configurator().default() if len(sys.argv) > 1: file_links = csv.reader(open(sys.argv[1]), delimiter=',') else: file_links = config_file_include_deps_supply.get_file_include_deps() illegal_links, total_count, rule_violations = IncludeRuleCheckerProcessor().check_links(file_links, config_checker.get_include_rules()) IncludeRuleCheckerOutputter().output(sys.stdout, illegal_links, total_count, rule_violations)
def main(): Configurator().default() output_file = open( os.path.join(config_basic.get_results_directory(), 'index.html'), "wt") version_info = FallbackVersionDescriber( config_version_describer).describe_local_version( basepath=config_basic.get_local_source_base_dir(), detailed=False) generation_log = config_generation_log_supply.get_generation_log_iter() writer = EPMHTMLGeneratedGraphOverviewWriter( output_file, generation_log, (config_basic.get_system(), config_basic.get_version(), version_info[2], config_basic.get_local_source_base_dir_subset())) writer.write()
def main(): logging.basicConfig(stream=sys.stderr, level=logging.INFO) Configurator().default() parser = __create_option_parser() (options, args) = parser.parse_args() logger = logging.getLogger(main.__module__) if len(args) > 0: for target in FileIncludeDepsListerFacade(include_deps_supply=config_file_include_deps, closure=options.closure, sort=options.sort).required_files(args): print target else: print >>sys.stderr, "No valid input files specified!" sys.exit(1)
def main(): logging.basicConfig(stream=sys.stderr,level=logging.DEBUG) Configurator().default() file_links = config_file_include_deps_supply.get_file_include_deps() report_filename = os.path.join(config_basic.get_results_directory(), "IncludeDeps", "include_rule_report.txt") illegal_links, total_count, rule_violations = IncludeRuleCheckerProcessor().check_links(file_links, config_checker.get_include_rules()) IncludeRuleCheckerOutputter().output(open(report_filename, "w"), illegal_links, total_count, rule_violations) description = GraphDescriptionImpl(description="file-level include deps", category="report") config_generation_log.add_generated_file(description=description, filename=report_filename)
def main(): logging.basicConfig(stream=sys.stderr, level=logging.DEBUG) Configurator().default() if len(sys.argv) > 1: module_grouper_class = ClassLoader.get_class( qualified_class_name=sys.argv[1]) else: module_grouper_class = config_module_grouper module_grouper = module_grouper_class( config_module_list_supply.get_module_list()) for prefix in sorted(module_grouper.node_group_prefixes()): print "%s (%s)" % ( prefix, list(module for module in config_module_list_supply.get_module_list() if prefix == module_grouper.get_node_group_prefix(module)))
def main(): logging.basicConfig(stream=sys.stderr, level=logging.DEBUG) Configurator().default() module_name = 'BtcCommonsCore' restrict_to_headers = False file_to_module_map = config_file_to_module_map_supply.generate_file_to_module_map( False) all_file_links = config_file_include_deps_supply.get_file_include_deps() module_file_links = (AttributedEdge(from_node=from_file, to_node=to_file) for (from_file, to_file) in all_file_links if from_file in file_to_module_map and \ VirtualModuleTypes.remove_suffixes(file_to_module_map[from_file]) == module_name and \ (not restrict_to_headers or from_file.endswith('.h'))) #pprint(list(module_file_links)) # TODO is this correct? does the output graph contain anything if the module list is empty? dependency_filter = DefaultDependencyFilter( config=MyDependencyFilterConfiguration(), module_list=()) #module_list=set(edge.get_from_node() for edge in module_file_links)) graph = DependencyFilterHelper.filter_graph( dependency_filter=dependency_filter, graph=AttributeGraph(edges=module_file_links)) decorator_config = DecoratorSet( edge_label_decorators=[SCCEdgeDecorator()], node_label_decorators=[ NodeSizeLabelDecorator(size_func=FileSizer.get_file_size), NodeSizeScalingDecorator(size_func=FileSizer.get_file_size, min_render_size=3, max_render_size=14, scale_type=ScalingTypes.RADICAL), ModuleColorNodeDecorator(EPMNodeColorer()), ]) graph_description = GraphDescriptionImpl( description="internal include dependencies of %s" % module_name, basename=os.path.join(config_basic.get_results_directory(), module_name), section='module-internal') DependencyFilterOutputterTools.output_detail_and_overview_graph( graph=graph, #decorator_config=decorator_config, description=graph_description, outputter=DependencyFilterOutputter(decorator_config=decorator_config), module_group_conf=MyNodeGroupingConfiguration(MyNodeGrouper()))
def main(): logging.basicConfig(stream=sys.stderr, level=logging.DEBUG) Configurator().default() print "Checking EPM physical module dependency rules..." module_link_deps_supply = config_module_link_deps_supply() #module_link_deps_supply = OnTheFlyModuleLinkDepsSupply() module_link_deps_graph = module_link_deps_supply.get_module_link_deps_graph() module_grouper = config_module_grouper(modules=module_link_deps_graph.node_names()) rule_violations = config_style_checker(module_grouper).physical_rule_violations(module_link_deps_supply) rule_checker_outputter = Configurator().create_instance(EPMRuleCheckerOutputter, rule_violations, module_link_deps_graph) rule_checker_outputter.output_report() #for edge in sorted(rule_violations): # print "%s -> %s" % (edge.get_from_node(), edge.get_to_node()) rule_checker_outputter.output_graph()
def main(): logging.basicConfig(stream=sys.stderr, level=logging.INFO) Configurator().default() vcproj_lines = config_msvc_data_supply().get_vcproj_list() dirs_to_vcprojs_map = SetValuedDictTools.convert_from_itemiterator( (os.path.dirname(line[0]), line[0]) for line in vcproj_lines) os.chdir(config_cpp_paths.get_module_spec_basedir()) mapper = VcprojToModuleNameMapper() for (dirname, vcprojs) in sorted(dirs_to_vcprojs_map.iteritems()): if dirname == '': # this is an entry containing solution folders, not projects # TODO this should be filtered somewhere else continue if len(vcprojs) == 1: top_vcproj = vcprojs.pop() else: top_vcproj = max(vcprojs, key=lambda name: os.stat(name).st_size) # TODO this has not the correct case... better: extract module name from vcproj file # das ist doch schon gefixed, oder nicht?? if len(dirname): print "%s:%s" % (dirname, mapper.get_module_name(top_vcproj))
len(tuple(self._graph().edges())))) self.file().write('digraph G {\n') #f.write('concentrate = true;\n') #f.write('ordering = out;\n') self.file().write('ranksep=1.0;\n') self.file().write('node [style=filled,fontname=Calibri,fontsize=24];\n') def _output_tail(self): self.file().write('}\n') config_module_grouper = ModuleGrouper config_module_list = ModuleListSupply() class PydepRunner(object): def run(self, paths, dep_filter=None): if dep_filter is None: dep_filter = DefaultDependencyFilter(config=NullDependencyFilterConfiguration(), module_list=config_module_list.get_module_list()) processor = PydepProcessor() processor.process(paths) processor.output(dep_filter) PydepGraphOutputter(colored=True, types=processor.types(), output_groups=True, graph=dep_filter.graph(), outfile=sys.stdout).output_all() if __name__ == '__main__': logging.basicConfig(stream=sys.stderr, level=logging.DEBUG) Configurator().default() PydepRunner().run(sys.argv[1:])
for to_proj in parser.get_assembly_references(): print "%s,%s,AR" % (from_proj, to_proj) if to_proj == None: print "%s,None,None" % (from_proj) self.__logger.info( "Assembly %s has no references (project file %s)" % (from_proj, filename)) print list(parser.get_source_files()) except ExpatError, exc: self.__logger.error("Unparsable file %s: %s" % (filename, exc)) def parse_files(self, filenames): for filename in filenames: self.parse_file(filename) if __name__ == "__main__": logging.basicConfig(level=logging.DEBUG) configurator = Configurator() configurator.default() vcxproj_processor = configurator.create_instance(VCXProjProcessor) if len(sys.argv) > 1: logging.info( "Processing %i files to process specified as cmdline arguments" % (len(sys.argv) - 1)) vcxproj_processor.parse_files(sys.argv[1:]) else: logging.info("Reading list of files to process from stdin") vcxproj_processor.parse_files(x.strip() for x in sys.stdin)
def run(self, argv): # TODO abstract from UsedSymbolsLister + SymbolScanner + HeaderLister, instead allow to use directly the # correct list of files to include # TODO also copy all non-source files to external target directory (when working recursively) # Namenskonvention für verschiedene Arten von Pfaden (mit \\ als Seperator, wenn nicht anders angegeben): # Absoluter Server-Pfad: *_path_server # Absoluter lokaler Pfad: *_path_local # Pfad relativ zum Basisverzeichnis (prinsmod) mit / als Separator: *_path_rel_to_root_unix # Pfad relativ zu einer anderen Datei (deprecated) # basename: *_basename # (alle Pfade sollten stets mit os.path.normpath normalisiert sein) # TODO Dateien mit generierter Include-Liste erkennen und Zeitstempel vergleichen # PRINS-Spezifika: # TODO #pragma pack berücksichtigen! # TODO #define INCL_STS_CLIB_* berücksichtigen # TODO #include-Liste hinter #pragma warning einfügen # TODO Sonderbehandlung f�r vorhandene .co-Includes. # Sind diese nicht eigentlich �berfl�ssig, da es genau eine Datei gibt, die sie einbindet? # TODO daus.df,antablis.df,auskopip.df enthalten vermutlich zu viele Includes (alle PSM-Includes) # TODO mssql.df ... ist wohl ein Wrapper um externe Header. Muss wohl nicht gefixt werden. parser = self.__create_option_parser() (options, args) = parser.parse_args(args=argv) if options.normalize_only and options.list_includes: print >> self.__output, "Cannot use both --normalize-only and --list-includes" return 1 if options.normalize_only: mode = FileTransformationModes.NormalizeOnly elif options.list_includes: mode = FileTransformationModes.ListIncludes else: mode = FileTransformationModes.Repair local_source_base_dir = options.source_dir if options.source_dir else self.__basic_config.get_local_source_base_dir( ) self.__log_and_print( logging.INFO, "Using source base directory %s" % local_source_base_dir) if options.debug: BaseFileNormalizer.debug = True logging.getLogger().setLevel(logging.DEBUG) else: BaseFileNormalizer.debug = False #logging.getLogger().setLevel(logging.INFO) if len(args) == 0 or options.help: print >> self.__output, "No input paths given" parser.print_help(file=self.__output) return 0 if options.recursive: # TODO filelist can be used here too args = CppProjectUtil.scan_project_files( args, local_source_base_dir, self.__cpp_file_configuration, self.__logger) self.__logger.debug("Result of file scan: %s" % (args)) if options.target == "inplace" and options.target_dir: print >> self.__output, "When using target==inplace, no target directory must be given" return 1 if len(args) > 0: if mode == FileTransformationModes.Repair: if not options.required_includes: required_include_files_calculator_class = Configurator( ).get_concrete_adapter(RequiredIncludeFilesCalculator) else: required_include_files_calculator_class = None include_list_generator_factory = IncludeListGenerator else: required_include_files_calculator_class = None include_list_generator_factory = None file_repair_processor = self.__file_repair_processor_factory.create_file_repair_processor( local_source_base_dir=local_source_base_dir, target=options.target, target_dir=options.target_dir, mode=mode, required_includes=options.required_includes, closure=options.closure, invalidate=options.invalidate, required_include_files_calculator_class= required_include_files_calculator_class, include_list_generator_factory=include_list_generator_factory) runner = FileRepairProcessorRunnerHelper( file_repair_processor=file_repair_processor) statistics = runner.process_files(args) if options.list_includes: assert hasattr(file_repair_processor, "get_include_map") include_map_items = file_repair_processor.get_include_map() for line in IncludeCollectorProcessor.format_include_map( local_source_base_dir, include_map_items): print line self.__print_statistics(statistics) #total = reduce(lambda x, y: x+y, statistics) #print "Total files: %i" % total #print "%i successful, %i skipped, %i with error, %i with unexpected exception" % statistics else: print >> self.__output, "No valid input files specified!" return 1
AttributedEdge(from_node="BTC.CAB.X.ClassC", to_node="BTC.CAB.Y.B"), AttributedEdge(from_node="BTC.CAB.Z.ClassD", to_node="BTC.CAB.Y.B"), )) graph.lookup_edge("BTC.CAB.X.ClassA", "BTC.CAB.Y.B").set_attrs( {EdgeAttributes.COLOR: Colors.RED}) outfile = open(output_filename, "w") outputter = self.__graph_outputter_class( graph=graph, outfile=outfile, decorator_config=DecoratorSet( node_label_decorators=(MyNodeColorDecorator(), ), ), output_groups=True, node_grouper=CABStyleFinestLevelModuleGrouperInternal( modules=graph.node_names(), internal_modules=graph.node_names(), min_parts=3), description=GraphDescriptionImpl(description="test", basename=output_filename)) outputter.output_all() if __name__ == '__main__': logging.basicConfig(stream=sys.stderr, level=logging.INFO) configurator = Configurator() configurator.default() GraphOutputDemo(graph_outputter_class=configurator.get_concrete_adapter( GraphicalGraphOutputter)).run( output_filename=sys.argv[1] if len(sys.argv) > 1 else "test")
def main(show_combined_graph): logging.basicConfig(stream=sys.stderr, level=logging.INFO) configurator = Configurator() configurator.default() dependency_filter_config_class = Configurator().get_concrete_adapter( DependencyFilterConfiguration) dep_filter_config = dependency_filter_config_class() file_to_module_map_supply = OnTheFlyHeaderExceptionOnlyFileToModuleMapSupply( ) link_deps_supply = OnTheFlyModuleLinkDepsSupply(dep_filter_config) link_deps_graph = link_deps_supply.get_module_link_deps_graph() full_include_graph = OnTheFlyModuleIncludeDepsSupply( outputter_config=NullDependencyFilterConfiguration(), file_to_module_map_supply=file_to_module_map_supply ).get_module_include_deps_graph() module_grouper = config_module_grouper( modules=full_include_graph.node_names()) include_deps_graph = DependencyFilterHelper.filter_graph( dependency_filter_configuration=dep_filter_config, graph=full_include_graph) ec = ConsistencyChecker( link_deps_graph, include_deps_graph, node_group_conf=config_node_group_conf(module_grouper)) report_filename = os.path.join(config_basic.get_results_directory(), "IncludeDeps", "include_link_dep_consistency_report.txt") report_output = configurator.create_instance( cls=ConsistencyCheckerReportOutputter, file_to_module_map_supply=file_to_module_map_supply, module_grouper=module_grouper, report_filename=report_filename) report_output.print_all( missing_link_deps_graph=ec.get_missing_link_deps_graph(), irregular_link_deps_graph=ec.get_irregular_link_deps_graph()) IncludeDepsGraphOutputter( module_grouper=module_grouper).output_include_deps_graph( ec.get_module_include_deps_graph()) result_output = configurator.create_instance( cls=ConsistencyCheckerGraphOutputter, file_to_module_map_supply=file_to_module_map_supply, module_grouper=module_grouper) result_output.output_combined_graph( show_combined_graph, ec.get_combined_graph(), ec.get_overview_combined_graph(), ) if '--focus_on_each_group' in sys.argv: PerGroupOutputter.output_focus_on_each_group( module_grouper=module_grouper, full_graph=full_include_graph, base_name=FileModuleIncludeDepsSupply( ).get_module_include_deps_basename(), base_description=IncludeDepsGraphOutputter.BASE_DESCRIPTION, dependency_filter_config_class=dependency_filter_config_class)
def main(): logging.basicConfig(stream=sys.stderr, level=logging.INFO) #logging.getLogger("commons.configurator").setLevel(logging.DEBUG) Configurator().default() usage = "usage: %prog [options]" parser = OptionParser(usage) # parser.add_option("-s", "--show", dest="show", # help="open the resulting graph output after generation") parser.add_option("", "--no_graphs", action="store_false", dest="generate_graphs", help="disable graph generation completely") parser.add_option("-g", "--focus_on_each_group", action="store_true", dest="focus_on_each_group", help="output separate graphs for each module group") parser.add_option("-l", "--list_modules", action="store_true", dest="list_modules", help="generate a module list") parser.set_defaults(show=False, focus_on_each_group=False, list_modules=False, generate_graphs=True) (options, _args) = parser.parse_args() Configurator().create_instance(ParseLinkDependencies).process( show=options.show, focus_on_each_group=options.focus_on_each_group, generate_graphs=options.generate_graphs) if options.list_modules: generation_logger = config_generation_log #TODO: Kann das schoener über eine for Schleife geloest werden? report_filename = os.path.join(config_basic.get_results_directory(), "module_list.html") if (HTMLModuleLister(output_file=open(report_filename, "w"), analysis_info=(config_basic.get_system(), config_basic.get_version(), "Unknown"), module_list_supply=config_module_list).write()): description = GraphDescriptionImpl(description="module list", category="report") generation_logger.add_generated_file(description=description, filename=report_filename) report_filename = os.path.join(config_basic.get_results_directory(), "results_per_rule_list.html") if (HTMLRuleLister( output_file=open(report_filename, "w"), analysis_info=( config_basic.get_system(), config_basic.get_version(), "Unknown", ), module_list_supply=config_module_list, output_type=HTMLRuleListerOutputType.RESULTS_PER_RULE).write() ): description = GraphDescriptionImpl(description="results per rule", category="report") generation_logger.add_generated_file(description=description, filename=report_filename) report_filename = os.path.join(config_basic.get_results_directory(), "results_per_subject_list.html") if (HTMLRuleLister(output_file=open(report_filename, "w"), analysis_info=(config_basic.get_system(), config_basic.get_version(), "Unknown"), module_list_supply=config_module_list, output_type=HTMLRuleListerOutputType. RESULTS_PER_SUBJECT).write()): description = GraphDescriptionImpl( description="results per subject", category="report") generation_logger.add_generated_file(description=description, filename=report_filename) report_filename = os.path.join(config_basic.get_results_directory(), "rule_overview.html") if (HTMLRuleLister( output_file=open(report_filename, "w"), analysis_info=(config_basic.get_system(), config_basic.get_version(), "Unknown"), module_list_supply=config_module_list, output_type=HTMLRuleListerOutputType.RULE_OVERVIEW).write()): description = GraphDescriptionImpl(description="rules", category="overview") generation_logger.add_generated_file(description=description, filename=report_filename) report_filename = os.path.join(config_basic.get_results_directory(), "checkstyle_result.xml") if (XMLRuleLister( output_file=open(report_filename, "w"), module_list_supply=config_module_list, output_type=XMLRuleListerOutputType.CHECKSTYLE_XML).write()): description = GraphDescriptionImpl( description="results per subject", category="report") generation_logger.add_generated_file(description=description, filename=report_filename) report_filename = os.path.join(config_basic.get_results_directory(), "revengtools_result.xml") if (XMLRuleLister( output_file=open(report_filename, "w"), module_list_supply=config_module_list, output_type=XMLRuleListerOutputType.REVENGTOOLS_XML).write()): description = GraphDescriptionImpl( description="results per subject", category="report") generation_logger.add_generated_file(description=description, filename=report_filename) report_filename = os.path.join(config_basic.get_results_directory(), "revengtools.xsd") description = GraphDescriptionImpl(description="results per subject", category="overview") generation_logger.add_generated_file(description=description, filename=report_filename)
def main(): logging.basicConfig(stream=sys.stderr, level=logging.INFO) #pydevd.settrace() Configurator().default() config_include_deps_generator().generate()
def main(): logging.basicConfig(stream=sys.stderr,level=logging.DEBUG) Configurator().default() header_list = IncludeLinksHeaderListSupply().get_header_list() config_header_linker(outputter = ProcessableOutput(separate_inc=0)).link_all_headers(header_list)
#if not checker.has_errors(): from_proj = parser.get_assembly_name() to_proj = None for to_proj in IndividualModuleResolver().calc_resolved_modules(from_proj, parser.get_project_references()): print "%s,%s,PR" % (from_proj, to_proj) for to_proj in parser.get_assembly_references(): print "%s,%s,AR" % (from_proj, to_proj) if to_proj == None: print "%s,None,None" % (from_proj) self.__logger.info("Assembly %s has no references (project file %s)" % (from_proj, filename)) print list(parser.get_source_files()) except ExpatError, exc: self.__logger.error("Unparsable file %s: %s" % (filename, exc)) def parse_files(self, filenames): for filename in filenames: self.parse_file(filename) if __name__ == "__main__": logging.basicConfig(level=logging.DEBUG) configurator = Configurator() configurator.default() cs_proj_processor = configurator.create_instance(CSProjProcessor) if len(sys.argv) > 1: logging.info("Processing %i files to process specified as cmdline arguments" % (len(sys.argv)-1)) cs_proj_processor.parse_files(sys.argv[1:]) else: logging.info("Reading list of files to process from stdin") cs_proj_processor.parse_files(x.strip() for x in sys.stdin)
# -*- coding: UTF-8 -*- from commons.configurator import Configurator from configuration.revengtools_config import RevEngToolsConfigParser import os.path import logging.config # configure logging logging.config.fileConfig(os.path.join(RevEngToolsConfigParser().revengtools_basepath(), "configuration", "logging.conf")) # TODO das sollte noch etwas ausgeklügelter funktionieren, mit Sub-Technologien # (z.B. cpp.msvc vs. cpp.cmake u.ä.) bzw. Kombinationen von Technologien (cast+cpp vs. cdep+cpp) # TODO Sicherstellen, dass spätere autowire-Konfigurationsdateien frühere Mappings überschreiben config_parser = RevEngToolsConfigParser() # technology-specific config Configurator().get_autowire_config_finder().add_autowire_config_in("autowire.config.%s" % (RevEngToolsConfigParser().get("LANGUAGE"),)) # system-specific config Configurator().get_autowire_config_finder().add_autowire_config_in("autowire.config.%s" % (RevEngToolsConfigParser().get("SYSTEM"),)) if config_parser.has("FLAVORS"): flavors = config_parser.get("FLAVORS").split(",") Configurator().get_autowire_config_finder().add_flavors(flavors)
def main(): logging.basicConfig(stream=sys.stderr, level=logging.WARNING) Configurator().default() if len(sys.argv) < 1: print( "usage: %s <comma-separated list of module names> <comma-separated list of options>" % (sys.argv[0])) print("valid options:") print( " graph: Output a graph (otherwise, only dependency metrics are calculated)" ) print(" external: Include include dependency across module bounds") print(" merge: Merge cycles into one node") sys.exit(1) output_graph = False ignore_external = True merge_sccs = False exceptions = set() modulenames = set(sys.argv[1].split(",")) # TODO !!! if "BtcCommonsCore" in modulenames: exceptions = set((("stringnumconvint", "hashmap"), )) pass if len(sys.argv) > 2: options = set(sys.argv[2].split(",")) if 'graph' in options: output_graph = True if 'external' in options: ignore_external = False if 'merge' in options: merge_sccs = True file_to_module_map = config_file_to_module_map_supply.generate_file_to_module_map( ) file_name_mapper = FileNameMapper(file_to_module_map, modulenames) generator = FileLevelPseudoModuleDependencyGenerator( file_to_module_map=file_to_module_map) merged_graph = generator.get_file_level_pseudomodule_graph( modulenames=modulenames, mapper=file_name_mapper, ignore_external=ignore_external, exceptions=exceptions) logging.debug(merged_graph) cdm = ComponentDependencyMetric(merged_graph) print("Internal file-level CCD of %s is %i, nCCD=%f, ACD=%f" % (", ".join(modulenames), cdm.calculate_ccd(), cdm.calculate_nccd(), cdm.calculate_acd())) if merge_sccs: merger = SCCMerger(merged_graph) merged_graph = merger.get_scc_merged_graph() if merger.get_sccs_iter(): print("SCCs:") for (i, scc) in enumerate(merger.get_sccs_iter()): print(" SCC %i: %s" % (i, ", ".join(scc))) print() if output_graph: node_grouper = config_node_grouper_class() node_grouper.configure_nodes(merged_graph.node_names_iter()) basename = os.path.join( config_basic.get_results_directory(), 'include_links_for_module-%s' % ("-".join(modulenames), )) size_fun = PseudoModuleSizer( file_name_mapper, path_resolver=FixedBaseDirPathResolver( config_basic.get_local_source_base_dir())).module_size decorator_config = DecoratorSet( node_label_decorators=( NodeSizeLabelDecorator(size_func=size_fun), NodeGroupDecorator(), NodeSizeScalingDecorator(min_render_size=1.5, max_render_size=8, size_func=size_fun), ), edge_label_decorators=(SCCEdgeDecorator(), ), node_tooltip_decorators=(), edge_tooltip_decorators=()) description = GraphDescriptionImpl( description="file-level pseudomodule include dependencies", basename=basename, extra="(%s->*)" % (", ".join(modulenames), )) DependencyFilterOutputter(decorator_config).output( filter=None, graph=merged_graph, module_group_conf=DefaultNodeGroupingConfiguration( node_grouper=node_grouper), description=description)
def main(): logging.basicConfig(stream=sys.stderr, level=logging.DEBUG) Configurator().default() ListModulesRunner(module_list_supply=config_module_list_supply).process( sys.argv)