def engine_helper(self, engine, sleep_times): dc = DummyComputation(engine) aggregators = [] num_submissions = len(sleep_times) for i in range(num_submissions): aggregators.append(dc.go_to_bed(sleep_times[i])) self.assertEqual(len(aggregators), num_submissions) engine.wait_for_all() results = [] for i in range(num_submissions): aggregators[i].finalize() results.append(aggregators[i].get_final_result().result) aggregators[i].clean_up() for i in range(num_submissions): self.assertEqual(results[i], sleep_times[i]) if engine.do_clean_up: for i in range(num_submissions): self.assertFalse( FileSystem.file_exists_new_shell(aggregators[i].filename))
def dispatch(filename): # wait until FS says that the file exists while not FileSystem.file_exists_new_shell(filename): time.sleep(1) job = Serialization.deserialize_object(filename) job.compute()
def dispatch(filename): # wait until FS says that the file exists while not FileSystem.file_exists_new_shell(filename): time.sleep(1) job = Serialization.deserialize_object(filename) job.compute()
def test_file_not_exists1(self): filename = "./temp.bin" try: os.remove(filename) except OSError: pass self.assertFalse(FileSystem.file_exists_new_shell(filename))
def test_file_not_exists2(self): filename = "temp.bin" try: os.remove(filename) except OSError: pass self.assertFalse(FileSystem.file_exists_new_shell(filename))
def test_file_exists2(self): filename = "temp.bin" f = open(filename, 'w') f.close() self.assertTrue(FileSystem.file_exists_new_shell(filename)) try: os.remove(filename) except OSError: pass
def test_file_exists2(self): filename = "temp.bin" f = open(filename, 'w') f.close() self.assertTrue(FileSystem.file_exists_new_shell(filename)) try: os.remove(filename) except OSError: pass
def engine_helper(self, engine, sleep_times): dc = DummyComputation(engine) aggregators = [] num_submissions = len(sleep_times) for i in range(num_submissions): aggregators.append(dc.go_to_bed(sleep_times[i])) self.assertEqual(len(aggregators), num_submissions) engine.wait_for_all() results = [] for i in range(num_submissions): aggregators[i].finalize() results.append(aggregators[i].get_final_result().result) aggregators[i].clean_up() for i in range(num_submissions): self.assertEqual(results[i], sleep_times[i]) if engine.do_clean_up: for i in range(num_submissions): self.assertFalse(FileSystem.file_exists_new_shell(aggregators[i].filename))
def _check_job_done(self, job_name): # race condition is fine here, but use a new python shell # due to NFS cache problems otherwise filename = self.get_aggregator_filename(job_name) return FileSystem.file_exists_new_shell(filename)
def _check_job_done(self, job_name): # race condition is fine here, but use a new python shell # due to NFS cache problems otherwise filename = self.get_aggregator_filename(job_name) return FileSystem.file_exists_new_shell(filename)