def worker(input_tree, dep_info, build_info_file, output_dep_file, mongo_path, timer=False, client_build=False): with Timer('parsing', timer): results = parse_tree(input_tree, mongo_path) with Timer('importing dep info', timer): ingest_deps(dep_info, results) with Timer('generating graph', timer): g = generate_edges(results, client_build) with Timer('adding version and build info', timer): version_and_build_info = {} version_and_build_info['version_info'] = get_version_info(mongo_path) with open(build_info_file) as build_info: version_and_build_info['build_info'] = json.loads( build_info.read()) g.set_extra_info(version_and_build_info) with Timer('writing output file', timer): g.export(output_dep_file)
def export(self, fn='.depgraph.json'): data_dir = os.path.dirname(fn) with Timer('constructing object for export', self.timer): o = self.fetch() with ThreadPool() as p: for k, v in o.items(): if k == 'main': part_fn = fn else: part_fn = os.path.join(data_dir, '.'.join([k, 'json'])) with Timer('writing file name ' + fn, self.timer): p.apply_async(dump_to_file, args=[part_fn, v])
def get_required_libs(args): if args.g is None: g = get_graph(args) else: g = args.g with Timer('get libs needed query', args.timers): render(find_libraries_needed_multi(g, args.names))
def get_circle_deps(args): if args.g is None: g = get_graph(args) else: g = args.g with Timer('get libs needed query', args.timers): render(find_libraries_needed_full(g, args.names))
def get_file_family_tree(args): if args.g is None: g = get_graph(args) else: g = args.g with Timer('get file family tree query', args.timers): render(file_family_tree(g, args.name, args.depth))
def return_unneeded_archive_dependencies(): archive = request.args.get('archive') if archive is not None: archive = archive.split(',') with Timer('render all extra archives', True): unneeded_deps = find_all_extra_archives(app.g) return render(unneeded_deps.narrow(archive).render())
def resolve_leak_info(g, names, depth, timers, source_names): with Timer('generating direct leak list', timers): direct_leaks = find_direct_leaks(g, names, source_names) for leak in direct_leaks: del leak['parents'] del leak['type'] leak['sources'] = symbol_family_tree(g, leak['symbol'], depth) return direct_leaks
def get_leaks(args): if args.g is None: g = get_graph(args) else: g = args.g with Timer('leaks tree query', args.timers): render( resolve_leak_info(g, args.names, args.depth, args.timers, args.source_names))
def get_symbol_family_tree(args): if args.g is None: g = get_graph(args) else: g = args.g if len(args.name) > 0: raise Exception('Currently only one symbol is allowed') with Timer('get symbol family tree query', args.timers): render(symbol_family_tree(g, args.name[0], args.depth))
def get_unneeded_libdeps(args): g = get_graph(args) ct = 0 for filename in g.files: if filename.endswith(".a"): ct += 1 print('[wil]: total number of archives: ' + str(ct)) with Timer('find unneeded libdeps', args.timers): render(list(find_extra_archives(g, args.name)))
def get_file_graph(args): g = get_graph(args) with Timer('get file graph query', args.timers): render(generate_file_graph(g))
def export_temporary_test_graph(graph): if not os.path.exists(temp_data_dir): os.mkdir(temp_data_dir) with Timer('exporting graph {0}'.format(temp_data_file), True): graph.export(temp_data_file)
def start_app(args): with Timer('importing data into global app state'): app.g = MultiGraph(timers=False).load(args.data) app.debug = True app.run()
def get_interface(args): g = get_graph(args) with Timer('direct leak query', args.timers): render(find_interface(g, args.names))
def get_test_graph(): with Timer('loading graph {0}'.format(data_dir), True): g = MultiGraph(timers=True).load(data_dir) return g
def data_collector(args): # Output of scons dependency tree tree_output = os.path.join(args.data, 'dependency_tree.txt') # Output from our libdeps patch libdeps_output = os.path.join(args.data, 'deps.json') # Output for our scons build flags build_info = os.path.join(args.data, 'build_info.json') # TODO: Just remove the whole data directory for fn in [ libdeps_output, build_info, os.path.join(args.data, 'dep_graph.json'), tree_output ]: if os.path.exists(fn): os.remove(fn) print('[wil]: cleaned up previous artifacts.') command('git checkout SConstruct', cwd=args.mongo) command('git checkout site_scons/libdeps.py', cwd=args.mongo) print('[wil]: checked out clean SCons files.') patch_path = os.path.join(args.cwd, 'assets', 'print_scons_libdeps.patch') command('git apply {0}'.format(patch_path), cwd=args.mongo) print('[wil]: applied patch to SCons file.') print('[wil]: running SCons all build.') with Timer('running scons', args.timers): tree = command('scons {0} --tree=all,prune all'.format(' '.join( args.scons)), cwd=args.mongo, capture=True) print('[wil]: checked out clean SCons files.') command('git checkout SConstruct', cwd=args.mongo) command('git checkout site_scons/libdeps.py', cwd=args.mongo) print('[wil]: gathering dependency information from SCons output.') if not os.path.exists(args.data): os.mkdir(args.data) with open(tree_output, 'w') as f: with Timer('writing data to ' + tree_output, args.timers): f.writelines(tree['out']) ct = 0 tree_data = tree['out'].split('\n') dep_object_regex = re.compile("^({.*}).*") with open(libdeps_output, 'w') as f: with Timer('filtering out dep info from scons output', args.timers): for ln in tree_data: m = dep_object_regex.search(ln) if m is not None: ct += 1 f.write(m.group(1)) f.write('\n') with open(build_info, 'w') as f: with Timer('writing data to ' + build_info, args.timers): f.write( json.dumps({ 'flags': args.scons, 'platform': util.get_platform() })) print('[wil]: collected {0} dependencies.'.format(ct)) print('[wil]: data collection complete!')
def get_graph(args): with Timer('loading graph {0}'.format(args.data), args.timers): g = MultiGraph(timers=args.timers).load(args.data) return g
def load_temporary_test_graph(): with Timer('loading graph {0}'.format(temp_data_file), True): g = MultiGraph(timers=True).load(temp_data_file) return g