def test_parallel(self): n_procs = 10 self.mc.configure(wf_creator=wf_creator_accuracy, dimensions=self.dims_parallel, opt_label='test_parallel', maximize=True, **common_kwargs) for i in range(n_procs): # Assume the worst case, with n_procs forced duplicates launchpad.add_wf(wf_creator_accuracy([1, 5, 3])) launch_multiprocess(launchpad, None, 'INFO', 12, n_procs, 0) try: for _ in range(10): launch_rocket(launchpad) except ExhaustedSpaceError: pass self.assertEqual(125, self.c.count_documents({'y': {'$exists': 1}})) all_x, all_y = self.mc.fetch_matrices(include_reserved=True) self.assertEqual(len(all_x), 125) self.assertEqual(len(all_x), 125) all_x, all_y = self.mc.fetch_matrices(include_reserved=False) self.assertGreaterEqual(len(all_x), 114) self.assertGreaterEqual(len(all_y), 114)
def test_parallel(self): n_procs = 10 self.mc.configure(wf_creator=wf_creator_accuracy, dimensions=self.dims_parallel, opt_label='test_parallel', maximize=True, **common_kwargs) for i in range(n_procs): # Assume the worst case, with n_procs forced duplicates launchpad.add_wf(wf_creator_accuracy([1, 5, 3])) launch_multiprocess(launchpad, None, 'INFO', 12, n_procs, 0) try: for _ in range(10): launch_rocket(launchpad) except ExhaustedSpaceError: pass self.assertEqual(125, self.c.count_documents({'y': {'$exists': 1}})) all_x_unique = [] for doc in self.c.find({'x_new': {"$exists": 1}}): all_x_unique.append(doc['x_new']) for doc in self.c.find({'y': 'reserved'}): all_x_unique.append(doc['x']) self.assertEqual(len(all_x_unique), 125)
def test_parallel(self): n_procs = 10 self.lp.reset(password=None, require_password=False) for i in range(n_procs): # Assume the worst case, with n_procs forced duplicates self.lp.add_wf(wf_creator_parallel([1, 5, 3], self.lp)) try: launch_multiprocess(self.lp, None, 'INFO', 13, n_procs, 0) except ExhaustedSpaceError: pass self.assertEqual( self.db.test_parallel.count_documents({'y': { '$exists': 1 }}), 125) X_unique = [] for doc in self.db.test_parallel.find({'x_new': {"$exists": 1}}): X_unique.append(doc['x_new']) for doc in self.db.test_parallel.find({'y': 'reserved'}): X_unique.append(doc['x']) self.assertEqual(len(X_unique), 125)
name='rsled', duplicate_check=True, opt_label="opt_parallel") ], spec=spec) return Workflow([firework1]) # try a parallel implementation of rocketsled def load_parallel_wfs(n_processes): for i in range(n_processes): launchpad.add_wf(wf_creator(random_guess(dims))) if __name__ == "__main__": TESTDB_NAME = 'rsled' launchpad = LaunchPad(name=TESTDB_NAME) launchpad.reset(password=None, require_password=False) n_processes = 10 n_runs = 10 # Should throw an 'Exhausted' error when n_processes*n_runs > 125 (the total space size) load_parallel_wfs(n_processes) launch_multiprocess(launchpad, None, 'INFO', n_runs, n_processes, 0) # tear down database # launchpad.connection.drop_database(TESTDB_NAME)