def _GetEncodedRangesFromStrings(self, string_data): params = ((chunk,) for chunk in self._encoded_strings_by_path_chunks) # Order of the jobs doesn't matter since each job owns independent paths, # and our output is a dict where paths are the key. results = parallel.BulkForkAndCall( string_extract.ResolveStringPieces, params, string_data=string_data) return list(results)
def testBulkForkAndCall_few_kwargs(self): parent_pid = os.getpid() results = parallel.BulkForkAndCall( _ForkTestHelper, [(1, 2, Unpicklable()), (3, 4, None)], test_instance=self, parent_pid=parent_pid) self.assertEquals({3, 7}, set(results))
def _DoBulkFork(self, runner, batches): # Order of the jobs doesn't matter since each job owns independent paths, # and our output is a dict where paths are the key. return parallel.BulkForkAndCall( runner, batches, tool_prefix=self._tool_prefix, output_directory=self._output_directory)
def testBulkForkAndCall_many_kwargs(self): parent_pid = os.getpid() args = [(1, 2) for _ in range(100)] results = parallel.BulkForkAndCall(_ForkTestHelper, args, pickle_me_not=Unpicklable(), test_instance=self, parent_pid=parent_pid) self.assertEqual([3] * 100, list(results))
def testBulkForkAndCall_exception(self): parent_pid = os.getpid() results = parallel.BulkForkAndCall(_ForkTestHelper, [(1, 'a', None, self, parent_pid)]) self.assertRaises(TypeError, results.__next__)
def testBulkForkAndCall_many(self): parent_pid = os.getpid() args = [(1, 2, Unpicklable(), self, parent_pid) for _ in range(100)] results = parallel.BulkForkAndCall(_ForkTestHelper, args) self.assertEqual([3] * 100, list(results))
def testBulkForkAndCall_few(self): parent_pid = os.getpid() results = parallel.BulkForkAndCall( _ForkTestHelper, [(1, 2, Unpicklable(), self, parent_pid), (3, 4, None, self, parent_pid)]) self.assertEqual({3, 7}, set(results))
def testBulkForkAndCall_none(self): results = parallel.BulkForkAndCall(_ForkTestHelper, []) self.assertEqual([], list(results))