def test_zmq_topic_filtering_works(caplog): class ThreeEmitsProcedure(Procedure): def execute(self): self.emit('results', 'Data 1') self.emit('progress', 33) self.emit('results', 'Data 2') self.emit('progress', 66) self.emit('results', 'Data 3') self.emit('progress', 99) procedure = ThreeEmitsProcedure() file = tempfile.mktemp() results = Results(procedure, file) received = [] worker = Worker(results, port=5888, log_level=logging.DEBUG) listener = Listener(port=5888, topic='results', timeout=4.0) sleep( 4.0 ) # leave time for subscriber and publisher to establish a connection worker.start() while True: if not listener.message_waiting(): break topic, record = listener.receive() received.append((topic, record)) worker.join(timeout=20.0) # give it enough time to finish the procedure assert procedure.status == procedure.FINISHED assert len(received) == 3 assert all([item[0] == 'results' for item in received])
def test_worker_stop(): procedure = RandomProcedure() file = tempfile.mktemp() results = Results(procedure, file) worker = Worker(results) worker.start() worker.stop() assert worker.should_stop() worker.join()
def test_worker_finish(): p = procedure.TestProcedure() f = tempfile.mktemp() r = Results(p, f) w = Worker(r) w.start() w.join() sleep(2) assert w.is_alive() == False
def test_worker_stop(): p = procedure.TestProcedure() f = tempfile.mktemp() r = Results(p, f) w = Worker(r) w.start() w.stop() assert w.should_stop() w.join()
def test_procedure_wrapper(): assert RandomProcedure.iterations.value == 100 procedure = RandomProcedure() procedure.iterations = 101 file = tempfile.mktemp() results = Results(procedure, file) new_results = pickle.loads(pickle.dumps(results)) assert hasattr(new_results, 'procedure') assert new_results.procedure.iterations == 101 assert RandomProcedure.iterations.value == 100
def test_regression_param_str_should_not_include_newlines(self, tmpdir): class DummyProcedure(Procedure): par = Parameter('Generic Parameter with newline chars') DATA_COLUMNS = ['Foo', 'Bar', 'Baz'] procedure = DummyProcedure() procedure.par = np.linspace(1,100,17) filename = os.path.join(str(tmpdir), 'header_linebreak_test.csv') result = Results(procedure, filename) result.reload() # assert no error pd.read_csv(filename, comment="#") # assert no error assert (result.parameters['par'].value == np.linspace(1,100,17)).all()
def test_worker_closes_file_after_finishing(): procedure = RandomProcedure() procedure.iterations = 100 procedure.delay = 0.001 file = tempfile.mktemp() results = Results(procedure, file) worker = Worker(results) worker.start() worker.join(timeout=5) # Test if the file has been properly closed by removing the file os.remove(file)
def test_worker_finish(): procedure = RandomProcedure() procedure.iterations = 100 procedure.delay = 0.001 file = tempfile.mktemp() results = Results(procedure, file) worker = Worker(results) worker.start() worker.join(timeout=5) new_results = Results.load(file, procedure_class=RandomProcedure) assert new_results.data.shape == (100, 2)
def test_zmq_does_not_crash_worker(caplog): """Check that a ZMQ serialisation usage error does not cause a crash. See https://github.com/ralph-group/pymeasure/issues/168 """ procedure = RandomProcedure() file = tempfile.mktemp() results = Results(procedure, file) # If we define a port here we get ZMQ communication # if cloudpickle is installed worker = Worker(results, port=5888, log_level=logging.DEBUG) worker.start() worker.join(timeout=20.0) # give it enough time to finish the procedure assert procedure.status == procedure.FINISHED del worker # make sure to clean up, reduce the possibility of test
def test_regression_attr_data_when_up_to_date_should_retain_dtype(self, read_csv_mock, path_exists_mock): procedure_mock = mock.MagicMock(spec=Procedure) result = Results(procedure_mock, 'test.csv') read_csv_mock.return_value = [pd.DataFrame(data={ 'A': [1,2,3,4,5,6,7], 'B': [2,3,4,5,6,7,8] })] first_data = result.data # if no updates, read_csv returns a zero-row dataframe read_csv_mock.return_value = [pd.DataFrame(data={ 'A': [], 'B': [] }, dtype=object)] second_data = result.data assert second_data.iloc[:,0].dtype is not object assert first_data.iloc[:,0].dtype is second_data.iloc[:,0].dtype