def _GetEncodedRangesFromStrings(self, string_data): params = ((chunk,) for chunk in self._encoded_strings_by_path_chunks) # Order of the jobs doesn't matter since each job owns independent paths, # and our output is a dict where paths are the key. results = concurrent.BulkForkAndCall( string_extract.ResolveStringPieces, params, string_data=string_data) return list(results)
def AnalyzePaths(self, paths): def iter_job_params(): object_paths = [] for path in paths: # Note: _ResolveStringPieces relies upon .a not being grouped. if path.endswith('.a'): yield path, self._tool_prefix, self._output_directory else: object_paths.append(path) BATCH_SIZE = 50 # Chosen arbitrarily. for i in xrange(0, len(object_paths), BATCH_SIZE): batch = object_paths[i:i + BATCH_SIZE] yield batch, self._tool_prefix, self._output_directory params = list(iter_job_params()) # Order of the jobs doesn't matter since each job owns independent paths, # and our output is a dict where paths are the key. results = concurrent.BulkForkAndCall(_RunNmOnIntermediates, params) all_paths_by_name = self._paths_by_name for encoded_syms, encoded_strs in results: symbol_names_by_path = concurrent.DecodeDictOfLists(encoded_syms) for path, names in symbol_names_by_path.iteritems(): for name in names: all_paths_by_name[name].append(path) if encoded_strs != concurrent.EMPTY_ENCODED_DICT: self._encoded_string_addresses_by_path_chunks.append( encoded_strs) logging.debug('worker: AnalyzePaths() completed.')
def AnalyzeStringLiterals(self, elf_path, elf_string_positions): logging.debug('worker: AnalyzeStringLiterals() started.') # Read string_data from elf_path, to be shared by forked processes. address, offset, _ = string_extract.LookupElfRodataInfo( elf_path, self._tool_prefix) adjust = address - offset abs_string_positions = ((addr - adjust, s) for addr, s in elf_string_positions) string_data = string_extract.ReadFileChunks(elf_path, abs_string_positions) params = ((chunk, ) for chunk in self._encoded_string_addresses_by_path_chunks) # Order of the jobs doesn't matter since each job owns independent paths, # and our output is a dict where paths are the key. results = concurrent.BulkForkAndCall( string_extract.ResolveStringPieces, params, string_data=string_data, tool_prefix=self._tool_prefix, output_directory=self._output_directory) results = list(results) final_result = [] for i in xrange(len(elf_string_positions)): final_result.append( concurrent.JoinEncodedDictOfLists([r[i] for r in results])) self._list_of_encoded_elf_string_positions_by_path = final_result logging.debug('worker: AnalyzeStringLiterals() completed.')
def testBulkForkAndCall_many_kwargs(self): parent_pid = os.getpid() args = [(1, 2) for _ in xrange(100)] results = concurrent.BulkForkAndCall( _ForkTestHelper, args, pickle_me_not=Unpicklable(), test_instance=self, parent_pid=parent_pid) self.assertEquals([3] * 100, list(results))
def _DoBulkFork(self, runner, batches): # Order of the jobs doesn't matter since each job owns independent paths, # and our output is a dict where paths are the key. return concurrent.BulkForkAndCall( runner, batches, tool_prefix=self._tool_prefix, output_directory=self._output_directory)
def testBulkForkAndCall_few_kwargs(self): parent_pid = os.getpid() results = concurrent.BulkForkAndCall(_ForkTestHelper, [(1, 2, Unpicklable()), (3, 4, None)], test_instance=self, parent_pid=parent_pid) self.assertEquals({3, 7}, set(results))
def AnalyzePaths(self, paths): def iter_job_params(): object_paths = [] for path in paths: if path.endswith('.a'): yield path, self._tool_prefix, self._output_directory else: object_paths.append(path) BATCH_SIZE = 50 # Chosen arbitrarily. for i in xrange(0, len(object_paths), BATCH_SIZE): batch = object_paths[i:i + BATCH_SIZE] yield batch, self._tool_prefix, self._output_directory paths_by_name = collections.defaultdict(list) params = list(iter_job_params()) for encoded_ret in concurrent.BulkForkAndCall(_BatchCollectNames, params): names_by_path = concurrent.DecodeDictOfLists(*encoded_ret) for path, names in names_by_path.iteritems(): for name in names: paths_by_name[name].append(path) self._batches.append(paths_by_name)
def testBulkForkAndCall_exception(self): parent_pid = os.getpid() results = concurrent.BulkForkAndCall(_ForkTestHelper, [(self, parent_pid, 1, 'a')]) self.assertRaises(TypeError, results.next)
def testBulkForkAndCall_many(self): parent_pid = os.getpid() args = [(self, parent_pid, 1, 2, Unpicklable())] * 100 results = concurrent.BulkForkAndCall(_ForkTestHelper, args) self.assertEquals([3] * 100, list(results))
def testBulkForkAndCall_few(self): parent_pid = os.getpid() results = concurrent.BulkForkAndCall( _ForkTestHelper, [(self, parent_pid, 1, 2, Unpicklable()), (self, parent_pid, 3, 4)]) self.assertEquals({3, 7}, set(results))
def testBulkForkAndCall_none(self): results = concurrent.BulkForkAndCall(_ForkTestHelper, []) self.assertEquals([], list(results))