def test_read(self): """Tests that data put to MockBlockingConn by put_bytes method can be read from it. """ class LineReader(threading.Thread): """A test class that launches a thread, calls readline on the specified conn repeatedly and puts the read data to the specified queue. """ def __init__(self, conn, queue): threading.Thread.__init__(self) self._queue = queue self._conn = conn self.setDaemon(True) self.start() def run(self): while True: data = self._conn.readline() self._queue.put(data) conn = mock.MockBlockingConn() queue = six.moves.queue.Queue() reader = LineReader(conn, queue) self.assertTrue(queue.empty()) conn.put_bytes(b'Foo bar\r\n') read = queue.get() self.assertEqual(b'Foo bar\r\n', read)
def _summary_writer_worker(dir, queue, done_event): global summary_writer summary_writer = SummaryWriter(dir) while True: if done_event.is_set() and queue.empty(): return try: op, v, step = queue.get(timeout=cg_general.TIMEOUT) op(step, v) except six.moves.queue.Empty: continue
def kill_processes(queue, processes): print('[INFO] %s Send signal to processes' % (dt.now())) for p in processes: p.shutdown() print('[INFO] %s Empty queue' % (dt.now())) while not queue.empty(): sleep(0.5) queue.get(False) print('[INFO] %s kill processes' % (dt.now())) for p in processes: p.terminate()
def kill_processes(queue, processes): print('Signal processes') for p in processes: p.shutdown() print('Empty queue') while not queue.empty(): time.sleep(0.5) queue.get(False) print('kill processes') for p in processes: p.terminate()
def kill_data_processes(queue, processes): print('Signal processes') for p in processes: p.shutdown() time.sleep(0.5) while not queue.empty(): queue.get(False) time.sleep(0.5) # Close the queue queue.close() for p in processes: p.terminate()
def _fn(*args, **kwargs): for i in range(int(1e6)): assert not queue.empty(), \ "trying to get() from an empty queue will deadlock" priority, next_trace = queue.get() try: ftr = poutine.trace(poutine.escape(poutine.replay(fn, next_trace), functools.partial(sample_escape, next_trace))) return ftr(*args, **kwargs) except NonlocalExit as site_container: site_container.reset_stack() for tr in poutine.util.enum_extend(ftr.trace.copy(), site_container.site): # add a little bit of noise to the priority to break ties... queue.put((tr.log_prob_sum().item() - torch.rand(1).item() * 1e-2, tr)) raise ValueError("max tries ({}) exceeded".format(str(1e6)))
def kill_processes(queue, processes): print('signal processes to shutdown') for p in processes: p.shutdown() print("empty queue") ################################################# ## The get method will be successful only when ## the item stored in the queue are not tensor, but numpy array ## otherwise, we cannot run queue.get(False): ## ref: https://discuss.pytorch.org/t/using-torch-tensor-over-multiprocessing-queue-process-fails/2847/2 ################################################# while not queue.empty(): # If queue is not empty time.sleep(0.5) try: queue.get(False) except: print('now queue size is {0}'.format(queue.qsize())) pass print('killing processes.') for p in processes: p.terminate()
def test__get_ftdc_file_path(self): """ Test that a given directory is correctly searched for diagnostic.data directories and that the ouput is of the correct format """ dir_path = "test_reports" if os.path.exists(dir_path): shutil.rmtree(dir_path) directory_structure = { "test_reports": { "graphs": { "test_false.txt": None }, "fio": { "mongod.0": { "diagnostic.data": { "metrics.2019-09-09T17-24-55Z-00000": None, "metrics.2019-09-09T17-24-25Z-00000": None, }, "mongod.log": None, }, "mongod.1": { "diagnostic.data": {}, "mongod.log": None }, }, "test_false.txt": None, "iperf": { "db-correctness": { "db-hash-check": { "test_false.txt": None } }, "mongod.0": { "diagnostic.data": { "metrics.2019-09-09T17-24-55Z-00000": None }, "mongod.log": None, }, "mongod.1": { "diagnostic.data": { "metrics.2019-09-09T17-24-55Z-00000": None }, "mongod.log": None, }, "test_false.txt": None, }, "_post_task": { "mongod.0": { "mdiag.sh": None }, "mongod.1": { "mdiag.sh": None } }, } } curr_dir = directory_structure[dir_path] queue = six.moves.queue.Queue() queue.put((dir_path, curr_dir)) while not queue.empty(): path, curr_dir = queue.get() os.mkdir(path) for sub_dir in curr_dir: if curr_dir[sub_dir] is None: with open(os.path.join(path, sub_dir), "w") as handle: handle.write("test") else: queue.put((os.path.join(path, sub_dir), curr_dir[sub_dir])) ftdc_metric_paths = ftdc_analysis._get_ftdc_file_paths(dir_path) expected_result = { "mongod.0": { "iperf": os.path.abspath( "test_reports/iperf/mongod.0/diagnostic.data/metrics.2019-09-09T17-24-55Z-00000" ), "fio": os.path.abspath( "test_reports/fio/mongod.0/diagnostic.data/metrics.2019-09-09T17-24-55Z-00000" ), }, "mongod.1": { "iperf": os.path.abspath( "test_reports/iperf/mongod.1/diagnostic.data/metrics.2019-09-09T17-24-55Z-00000" ) }, } self.assertEqual(ftdc_metric_paths, expected_result) shutil.rmtree(dir_path)