def get_paths(g, archive_target, archive_sources): queue = expand_file_names(g, archive_sources) traversed = [] traversed.extend(queue) full_archive_targets = expand_file_names(g, archive_target) if len(full_archive_targets) != 1: print "Error: did not get exactly one valid archive target" print archive_target print full_archive_targets raise StopIteration full_archive_target = full_archive_targets[0] current_level_children = len(queue) next_level_children = 0 parents = None for parent_archive in queue: if isinstance(parent_archive, tuple): parents = parent_archive[0] parent_archive = parent_archive[1] else: if parents is None: parents = [parent_archive] # Check to see if we've reached the end of our path. If so, return an object and continue # iteration if parent_archive == full_archive_target: yield { 'target': full_archive_target, 'source': parent_archive, 'parents': parents } def add_path_info(item): if parent_archive is not parents[-1]: parents.append(parent_archive) return list(parents), item next_level_libs_needed = find_libraries_needed(g, parent_archive) next_level_libs_needed = [ lib for lib in next_level_libs_needed if lib not in traversed ] traversed.extend(next_level_libs_needed) next_level_nodes = map(add_path_info, next_level_libs_needed) next_level_nodes_count = len(queue) queue.extend(next_level_nodes) next_level_nodes_count = len(queue) - next_level_nodes_count next_level_children += next_level_nodes_count current_level_children -= 1 if current_level_children == 0: current_level_children = next_level_children next_level_children = 0
def get_paths(g, archive_target, archive_sources): queue = expand_file_names(g, archive_sources) traversed = [] traversed.extend(queue) full_archive_targets = expand_file_names(g, archive_target) if len(full_archive_targets) != 1: print "Error: did not get exactly one valid archive target" print archive_target print full_archive_targets raise StopIteration full_archive_target = full_archive_targets[0] current_level_children = len(queue) next_level_children = 0 parents = None for parent_archive in queue: if isinstance(parent_archive, tuple): parents = parent_archive[0] parent_archive = parent_archive[1] else: if parents is None: parents = [ parent_archive ] # Check to see if we've reached the end of our path. If so, return an object and continue # iteration if parent_archive == full_archive_target: yield { 'target' : full_archive_target, 'source' : parent_archive, 'parents': parents } def add_path_info(item): if parent_archive is not parents[-1]: parents.append(parent_archive) return list(parents), item next_level_libs_needed = find_libraries_needed(g, parent_archive) next_level_libs_needed = [ lib for lib in next_level_libs_needed if lib not in traversed ] traversed.extend(next_level_libs_needed) next_level_nodes = map(add_path_info, next_level_libs_needed) next_level_nodes_count = len(queue) queue.extend(next_level_nodes) next_level_nodes_count = len(queue) - next_level_nodes_count next_level_children += next_level_nodes_count current_level_children -= 1 if current_level_children == 0: current_level_children = next_level_children next_level_children = 0
def get_relationship_node(args): g = get_graph(args) try: name = args.name rel = args.relationship if rel is None: raise Exception('invalid relationship type.') result = {} if get_relationship_types()[rel][1] == 'symbol': full_symbol_names = expand_symbol_names(g, args.name) for full_symbol_name in full_symbol_names: result[full_symbol_name] = g.get( get_relationship_types()[rel][0], full_symbol_name) else: full_file_names = expand_file_names(g, args.name) for full_file_name in full_file_names: result[full_file_name] = g.get( get_relationship_types()[rel][0], full_file_name) render(result) except KeyError: print('[wil]: there is no {0} named {1}'.format(args.thing, args.name))
def get_executable_list(graph, file_name): file_names = expand_file_names(graph, file_name) checked_file_names = set() # TODO: don't hard code these executable lists. Instead, use something like this: # wil.py list files "" | grep -v "\." # This is getting the list of actual things that are built that have no file extension supported_binaries = ["mongod", "mongos"] tool_binaries = [ "mongotop", "mongodump", "mongoexport", "mongoimport", "mongobridge", "mongoperf", "bsondump", "mongofiles", "mongosniff", "mongorestore", "mongostat", "mongooplog" ] dbtests = ["test", "perftest"] client_examples = [ "firstExample", "rsExample", "authTest", "httpClientTest", "tutorial", "clientTest", "whereExample", "secondExample" ] supported_binaries += tool_binaries supported_binaries += dbtests supported_binaries += client_examples result_binaries = set() for current_file_name in file_names: if current_file_name in supported_binaries: result_binaries.add(current_file_name) # Add the parents to the list we are iterating for parent in get_parent_objects(graph, current_file_name): if parent not in checked_file_names: file_names.append(parent) checked_file_names.add(parent) return list(result_binaries)
def get_executable_list(graph, file_name): file_names = expand_file_names(graph, file_name) checked_file_names = set() # TODO: don't hard code these executable lists. Instead, use something like this: # wil.py list files "" | grep -v "\." # This is getting the list of actual things that are built that have no file extension supported_binaries = [ "mongod", "mongos" ] tool_binaries = [ "mongotop", "mongodump", "mongoexport", "mongoimport", "mongobridge", "mongoperf", "bsondump", "mongofiles", "mongosniff", "mongorestore", "mongostat", "mongooplog" ] dbtests = [ "test", "perftest" ] client_examples = ["firstExample", "rsExample", "authTest", "httpClientTest", "tutorial", "clientTest", "whereExample", "secondExample" ] supported_binaries += tool_binaries supported_binaries += dbtests supported_binaries += client_examples result_binaries = set() for current_file_name in file_names: if current_file_name in supported_binaries: result_binaries.add(current_file_name) # Add the parents to the list we are iterating for parent in get_parent_objects(graph, current_file_name): if parent not in checked_file_names: file_names.append(parent) checked_file_names.add(parent) return list(result_binaries)
def test_get_full_filenames(self): expected = [ "provides_strlen_needs_memcmp.o", "provides_malloc_needs_memcmp.o", "provides_free_needs_memcmp.o", "provides_memcmp.o", ] self.assertTrue(dict_compare(expected, expand_file_names(self.graph, "memcmp.o"), reporter=self.fail))
def test_get_full_filenames(self): expected = [ "provides_strlen_needs_memcmp.o", "provides_malloc_needs_memcmp.o", "provides_free_needs_memcmp.o", "provides_memcmp.o", ] self.assertTrue( dict_compare(expected, expand_file_names(self.graph, "memcmp.o"), reporter=self.fail))
def find_libraries_needed_full(graph, archive_names): # Get all symbols needed by this archive symbols_needed = get_symbol_info(graph, archive_names, search_depth=1, symbol_type='dependency') # GOAL: Get list of archives needed # STEP1: Get object files for each symbol # Basic object files needed objects_needed = [] # Object files that we need that has some problem (in this case multiple definitions) bad_objects_needed = [] for symbol_needed in symbols_needed: symbol_locations = graph.get('symbol_to_file_sources', symbol_needed['symbol']) symbol_locations = [ symbol_location for symbol_location in symbol_locations if symbol_location.find("client_build") == -1 ] # If this symbol was defined in more than one place, add the objects as a dict with a # description if len(symbol_locations) > 1: bad_objects_needed.append({ "objects": symbol_locations, "multiple_definitions": True }) elif len(symbol_locations) != 0: objects_needed.append(symbol_locations[0]) # STEP2: Get archives containing each object file archives_needed = [] for object_needed in objects_needed: archives_needed.extend( graph.get('dependency_to_targets', object_needed)) results = [] for archive_name in expand_file_names(graph, archive_names): results.extend( get_paths(graph, archive_name, list(set(archives_needed)))) return results
def find_extra_archives(graph, archive_name): symbols_defined = { s['symbol'] for s in get_symbol_info(graph, [ archive_name ], search_depth=1, symbol_type='definition') } # loop over full names of this file for full_archive_name in expand_file_names(graph, archive_name): # Get all symbols needed by this archive symbols_needed = { s['symbol'] for s in get_symbol_info(graph, [ full_archive_name ], search_depth=1, symbol_type='dependency') } extra_archives = list() for archive_dependency in graph.get('target_to_dependencies', full_archive_name): if not symbols_defined.issubset(symbols_needed): extra_archives.append(archive_dependency) yield { 'archive': full_archive_name, 'extras': extra_archives }
def find_libraries_needed_full(graph, archive_names): # Get all symbols needed by this archive symbols_needed = get_symbol_info(graph, archive_names, search_depth=1, symbol_type='dependency') # GOAL: Get list of archives needed # STEP1: Get object files for each symbol # Basic object files needed objects_needed = [] # Object files that we need that has some problem (in this case multiple definitions) bad_objects_needed = [] for symbol_needed in symbols_needed: symbol_locations = graph.get('symbol_to_file_sources', symbol_needed['symbol']) symbol_locations = [ symbol_location for symbol_location in symbol_locations if symbol_location.find("client_build") == -1 ] # If this symbol was defined in more than one place, add the objects as a dict with a # description if len(symbol_locations) > 1: bad_objects_needed.append({ "objects" : symbol_locations, "multiple_definitions" : True }) elif len(symbol_locations) != 0: objects_needed.append(symbol_locations[0]) # STEP2: Get archives containing each object file archives_needed = [] for object_needed in objects_needed: archives_needed.extend(graph.get('dependency_to_targets', object_needed)) results = [] for archive_name in expand_file_names(graph, archive_names): results.extend(get_paths(graph, archive_name, list(set(archives_needed)))) return results
def get_relationship_node(args): g = get_graph(args) try: name = args.name rel = args.relationship if rel is None: raise Exception('invalid relationship type.') result = {} if get_relationship_types()[rel][1] == 'symbol': full_symbol_names = expand_symbol_names(g, args.name) for full_symbol_name in full_symbol_names: result[full_symbol_name] = g.get(get_relationship_types()[rel][0], full_symbol_name) else: full_file_names = expand_file_names(g, args.name) for full_file_name in full_file_names: result[full_file_name] = g.get(get_relationship_types()[rel][0], full_file_name) render(result) except KeyError: print('[wil]: there is no {0} named {1}'.format(args.thing, args.name))
def find_extra_archives(graph, archive_name): symbols_defined = { s['symbol'] for s in get_symbol_info( graph, [archive_name], search_depth=1, symbol_type='definition') } # loop over full names of this file for full_archive_name in expand_file_names(graph, archive_name): # Get all symbols needed by this archive symbols_needed = { s['symbol'] for s in get_symbol_info(graph, [full_archive_name], search_depth=1, symbol_type='dependency') } extra_archives = list() for archive_dependency in graph.get('target_to_dependencies', full_archive_name): if not symbols_defined.issubset(symbols_needed): extra_archives.append(archive_dependency) yield {'archive': full_archive_name, 'extras': extra_archives}
def get_symbol_info(g, build_object_names, search_depth=None, symbol_type='dependency'): queue = expand_file_names(g, build_object_names) current_level_children = len(queue) next_level_children = 0 parents = None for full_build_object_name in queue: if isinstance(full_build_object_name, tuple): parents = full_build_object_name[0] full_build_object_name = full_build_object_name[1] else: if parents is None: parents = [ full_build_object_name ] if detect_type(full_build_object_name) == "object": if symbol_type == "dependency": for symbol_needed in g.get('file_to_symbol_dependencies', full_build_object_name): yield { 'symbol' : symbol_needed, 'type' : 'dependency', 'object' : full_build_object_name, 'parents': parents } elif symbol_type == "definition": for symbol_defined in g.get('file_to_symbol_definitions', full_build_object_name): yield { 'symbol' : symbol_defined, 'type' : 'definition', 'object' : full_build_object_name, 'parents': parents } else: for object_file in g.get('archives_to_components', full_build_object_name): if symbol_type == "dependency": for symbol_needed in g.get('file_to_symbol_dependencies', object_file): yield { 'symbol' : symbol_needed, 'type' : 'dependency', 'object' : object_file, 'archive' : full_build_object_name, 'parents': parents } elif symbol_type == "definition": for symbol_defined in g.get('file_to_symbol_definitions', object_file): yield { 'symbol' : symbol_defined, 'type' : 'definition', 'object' : object_file, 'archive' : full_build_object_name, 'parents': parents } def add_path_info(item): if full_build_object_name is not parents[-1]: parents.append(full_build_object_name) return list(parents), item next_level_nodes = map(add_path_info, g.get('target_to_dependencies', full_build_object_name)) next_level_nodes_count = len(queue) queue.extend(next_level_nodes) next_level_nodes_count = len(queue) - next_level_nodes_count next_level_children += next_level_nodes_count current_level_children -= 1 if current_level_children == 0: if search_depth is not None: search_depth -= 1 if search_depth == 0: raise StopIteration current_level_children = next_level_children next_level_children = 0
def file_family_tree(g, file_name, depth=None): return family_tree_base(graph=g, relations=expand_file_names(g, file_name), depth=depth, flipped=True)
def get_symbol_info(g, build_object_names, search_depth=None, symbol_type='dependency'): queue = expand_file_names(g, build_object_names) current_level_children = len(queue) next_level_children = 0 parents = None for full_build_object_name in queue: if isinstance(full_build_object_name, tuple): parents = full_build_object_name[0] full_build_object_name = full_build_object_name[1] else: if parents is None: parents = [full_build_object_name] if detect_type(full_build_object_name) == "object": if symbol_type == "dependency": for symbol_needed in g.get('file_to_symbol_dependencies', full_build_object_name): yield { 'symbol': symbol_needed, 'type': 'dependency', 'object': full_build_object_name, 'parents': parents } elif symbol_type == "definition": for symbol_defined in g.get('file_to_symbol_definitions', full_build_object_name): yield { 'symbol': symbol_defined, 'type': 'definition', 'object': full_build_object_name, 'parents': parents } else: for object_file in g.get('archives_to_components', full_build_object_name): if symbol_type == "dependency": for symbol_needed in g.get('file_to_symbol_dependencies', object_file): yield { 'symbol': symbol_needed, 'type': 'dependency', 'object': object_file, 'archive': full_build_object_name, 'parents': parents } elif symbol_type == "definition": for symbol_defined in g.get('file_to_symbol_definitions', object_file): yield { 'symbol': symbol_defined, 'type': 'definition', 'object': object_file, 'archive': full_build_object_name, 'parents': parents } def add_path_info(item): if full_build_object_name is not parents[-1]: parents.append(full_build_object_name) return list(parents), item next_level_nodes = map( add_path_info, g.get('target_to_dependencies', full_build_object_name)) next_level_nodes_count = len(queue) queue.extend(next_level_nodes) next_level_nodes_count = len(queue) - next_level_nodes_count next_level_children += next_level_nodes_count current_level_children -= 1 if current_level_children == 0: if search_depth is not None: search_depth -= 1 if search_depth == 0: raise StopIteration current_level_children = next_level_children next_level_children = 0