async def handle_client(self, websocket, path): with self.store_client(websocket): message = self.create_snapshot_msg( self._ee_id, self._iter, self._snapshot, self.event_index() ) await websocket.send(message) async for message in websocket: client_event = from_json( message, data_unmarshaller=serialization.evaluator_unmarshaller ) logger.debug(f"got message from client: {client_event}") if client_event["type"] == identifiers.EVTYPE_EE_USER_CANCEL: logger.debug(f"Client {websocket.remote_address} asked to cancel.") if self._ensemble.is_cancellable(): # The evaluator will stop after the ensemble has # indicated it has been cancelled. self._ensemble.cancel() else: self._stop() if client_event["type"] == identifiers.EVTYPE_EE_USER_DONE: logger.debug(f"Client {websocket.remote_address} signalled done.") self._stop() # NOTE: due to how the monitor is implemented, a monitor that # signals will open a connection for each signal and # immediately exit after signalling. Consequently, it should be # harmless to remove the client from the pool. # If https://github.com/equinor/ert/issues/1538 is solved, then # this necessarily needs to change. self._clients.remove(websocket)
def test_from_json_base64(specversion): # Create base64 encoded data raw_data = {"data-key": "val"} data = json.dumps(raw_data).encode() data_base64_str = base64.b64encode(data).decode() # Create json payload payload = { "type": "com.example.string", "source": "https://example.com/event-producer", "id": "1234", "specversion": specversion, "data_base64": data_base64_str, } payload_json = json.dumps(payload) # Create event event = from_json(payload_json) # Test fields were marshalled properly for key, val in payload.items(): if key == "data_base64": # Check data_base64 was unmarshalled properly assert event.data == raw_data else: assert event[key] == val
async def handle_client(self, websocket, path): with self.store_client(websocket): event = self._create_cloud_event(identifiers.EVTYPE_EE_SNAPSHOT, self._ensemble.snapshot.to_dict()) await websocket.send(event) async for message in websocket: client_event = from_json( message, data_unmarshaller=serialization.evaluator_unmarshaller) logger.debug(f"got message from client: {client_event}") if client_event["type"] == identifiers.EVTYPE_EE_USER_CANCEL: logger.debug( f"Client {websocket.remote_address} asked to cancel.") if self._ensemble.is_cancellable(): # The evaluator will stop after the ensemble has # indicated it has been cancelled. self._ensemble.cancel() else: self._stop() if client_event["type"] == identifiers.EVTYPE_EE_USER_DONE: logger.debug( f"Client {websocket.remote_address} signalled done.") self._stop()
async def _handler(websocket, path): while True: event = await websocket.recv() events.append(event) cloud_event = from_json(event) if cloud_event["type"] == "com.equinor.ert.forward_model_stage.success": break
async def handle_dispatch(self, websocket, path): async with self.count_dispatcher(): async for msg in websocket: event = from_json(msg) await self._dispatch.handle_event(self, event) if event["type"] == identifiers.EVTYPE_ENSEMBLE_STOPPED: return
def raw_pubsub_cloud_event_output(marshalled_pubsub_request): event = PUBSUB_CLOUD_EVENT.copy() # the data payload is more complex for the raw pubsub request data = marshalled_pubsub_request["data"] data["messageId"] = event["id"] data["publishTime"] = event["time"] event["data"] = {"message": data} return from_json(json.dumps(event))
async def _send(): async with websockets.connect( self._result_uri, ssl=self._ssl_context, extra_headers=self._extra_headers, ) as websocket: result = await websocket.recv() message = from_json(result, lambda x: pickle.loads(x)) return message.data
def test_json_can_talk_to_itself(specversion): event = CloudEvent(test_attributes, test_data) event_json = to_json(event) event = from_json(event_json) for key, val in test_attributes.items(): assert event[key] == val assert event.data == test_data
async def handle_dispatch(self, websocket, path): async with self.count_dispatcher(): async for msg in websocket: try: event = from_json( msg, data_unmarshaller=serialization.evaluator_unmarshaller) except cloudevents.exceptions.DataUnmarshallerError: event = from_json(msg, data_unmarshaller=pickle.loads) if self._get_ee_id(event["source"]) != self._ee_id: logger.info( f"Got event from evaluator {self._get_ee_id(event['source'])} with source {event['source']}, ignoring since I am {self._ee_id}" ) continue await self._dispatcher.handle_event(event) if event["type"] in [ identifiers.EVTYPE_ENSEMBLE_STOPPED, identifiers.EVTYPE_ENSEMBLE_FAILED, ]: return
def from_json( self, data: Union[str, bytes], data_unmarshaller: types.UnmarshallerType = None, ) -> CloudEvent: raw_ce = json.loads(data) if not data_unmarshaller: datacontenttype = (raw_ce[_DATACONTENTTYPE] if _DATACONTENTTYPE in raw_ce else _DEFAULT_DATACONTETYPE) data_unmarshaller = self._unmarshallers.get(datacontenttype) return from_json(data, data_unmarshaller=data_unmarshaller)
def get_event(self, sub_id): path = '/'.join(['listen', str(sub_id)]) while True: resp = self._get(path) data = resp.json().get('data') if not data: continue event = from_json(data) if event.data == 'BEEP': continue return event
def test_json_can_talk_to_itself_base64(specversion): data = b"test123" event = CloudEvent(test_attributes, data) event_json = to_json(event) event = from_json(event_json) for key, val in test_attributes.items(): assert event[key] == val assert event.data == data
def track(self): with ExitStack() as stack: duplexer = self._ws_duplexer if not duplexer: duplexer = SyncWebsocketDuplexer(self._client_uri, self._base_uri, self._cert, self._token) stack.callback(duplexer.stop) for message in duplexer.receive(): try: event = from_json( message, data_unmarshaller=serialization.evaluator_unmarshaller) except DataUnmarshallerError: event = from_json(message, data_unmarshaller=pickle.loads) yield event if event["type"] == identifiers.EVTYPE_EE_TERMINATED: logger.debug( f"monitor-{self._id} client received terminated") break
async def test_happy_path(tmpdir, unused_tcp_port, event_loop, make_ensemble_builder, queue_config, caplog): asyncio.set_event_loop(event_loop) host = "localhost" url = f"ws://{host}:{unused_tcp_port}" done = asyncio.get_event_loop().create_future() mock_ws_task = asyncio.get_event_loop().create_task( mock_ws(host, unused_tcp_port, done)) await wait(url, 10) ensemble = make_ensemble_builder(tmpdir, 1, 1).build() queue = queue_config.create_job_queue() for real in ensemble.get_reals(): queue.add_ee_stage(real.get_stages()[0]) queue.submit_complete() adaptor = QueueAdaptor(queue, {"dispatch_url": url}, "ee_id_123") execute_task = asyncio.get_event_loop().create_task( adaptor.execute_queue(threading.BoundedSemaphore(value=10), None)) await execute_task done.set_result(None) await mock_ws_task mock_ws_task.result() execute_task.result() assert mock_ws_task.done() event_0 = from_json(mock_ws_task.result()[0]) assert event_0["source"] == "/ert/ee/0/real/0/stage/0" assert event_0["type"] == "com.equinor.ert.forward_model_stage.waiting" assert event_0.data == {"queue_event_type": "JOB_QUEUE_WAITING"} end_event_index = len(mock_ws_task.result()) - 1 end_event = from_json(mock_ws_task.result()[end_event_index]) assert end_event["type"] == "com.equinor.ert.forward_model_stage.success" assert end_event.data == {"queue_event_type": "JOB_QUEUE_SUCCESS"}
async def _receive(self): logger.debug(f"monitor-{self._id} starting receive") async with websockets.connect(self._client_uri, max_size=2**26, max_queue=500) as websocket: async for message in websocket: event = from_json(message) self._incoming.put_nowait(event) if event["type"] == identifiers.EVTYPE_EE_TERMINATED: logger.debug( f"monitor-{self._id} client received terminated") break logger.debug(f"monitor-{self._id} disconnected")
async def test_happy_path(tmpdir, unused_tcp_port, event_loop, make_ensemble_builder, queue_config, caplog): asyncio.set_event_loop(event_loop) host = "localhost" url = f"ws://{host}:{unused_tcp_port}" done = get_event_loop().create_future() mock_ws_task = get_event_loop().create_task( mock_ws(host, unused_tcp_port, done)) await wait_for_evaluator(base_url=url, timeout=5) ensemble = make_ensemble_builder(tmpdir, 1, 1).build() queue = queue_config.create_job_queue() for real in ensemble.reals: queue.add_ee_stage(real.steps[0], None) queue.submit_complete() await queue.execute_queue_async(url, "ee_0", threading.BoundedSemaphore(value=10), None) done.set_result(None) await mock_ws_task mock_ws_task.result() assert mock_ws_task.done() event_0 = from_json(mock_ws_task.result()[0]) assert event_0["source"] == "/ert/ee/ee_0/real/0/step/0" assert event_0["type"] == "com.equinor.ert.forward_model_step.waiting" assert event_0.data == {"queue_event_type": "JOB_QUEUE_WAITING"} end_event_index = len(mock_ws_task.result()) - 1 end_event = from_json(mock_ws_task.result()[end_event_index]) assert end_event["type"] == "com.equinor.ert.forward_model_step.success" assert end_event.data == {"queue_event_type": "JOB_QUEUE_SUCCESS"}
def test_from_json(specversion): payload = { "type": "com.example.string", "source": "https://example.com/event-producer", "id": "1234", "specversion": specversion, "data": { "data-key": "val" }, } event = from_json(json.dumps(payload)) for key, val in payload.items(): if key == "data": assert event.data == payload["data"] else: assert event[key] == val
def pubsub_cloudevent_output(): event = { "specversion": "1.0", "id": "1215011316659232", "source": "//pubsub.googleapis.com/projects/sample-project/topics/gcf-test", "time": "2020-05-18T12:13:19Z", "type": "google.cloud.pubsub.topic.v1.messagePublished", "datacontenttype": "application/json", "data": { "message": { "data": "10", }, }, } return from_json(json.dumps(event))
def test_data_marshaller_and_unmarshaller(): data = {"start_time": datetime.datetime.now()} out_cloudevent = CloudEvent( { "type": "com.equinor.ert.ee.snapshot", "source": f"/ert/ee/{0}", "id": 0, }, data, ) ce_to_json = to_json(out_cloudevent, data_marshaller=serialization.evaluator_marshaller) ce_from_json = from_json( ce_to_json, data_unmarshaller=serialization.evaluator_unmarshaller) assert isinstance(ce_from_json.data["start_time"], datetime.datetime) assert out_cloudevent == ce_from_json
async def test_interaction(unused_tcp_port): narrative = ( Consumer("Consumer") .forms_narrative_with(Provider("Provider")) .given("some data exists") .receives("a request") .cloudevents_in_order([EventDescription(type_="start", source="/consumer")]) .responds_with("an end response") .cloudevents_in_order([EventDescription(type_="end", source="/provider")]) .on_uri(f"ws://localhost:{unused_tcp_port}") ) async with narrative: async with websockets.connect(narrative.uri) as websocket: await websocket.send( to_json(CloudEvent({"id": "0", "source": "/consumer", "type": "start"})) ) end = await websocket.recv() assert from_json(end)["type"] == "end"
async def _receive(self): logger.debug(f"monitor-{self._id} starting receive") async with websockets.connect( self._client_uri, ssl=self._ssl_context, extra_headers=self._extra_headers, max_size=2**26, max_queue=500, ) as websocket: async for message in websocket: event = from_json( message, data_unmarshaller=serialization.evaluator_unmarshaller) self._incoming.put_nowait(event) if event["type"] == identifiers.EVTYPE_EE_TERMINATED: logger.debug( f"monitor-{self._id} client received terminated") break logger.debug(f"monitor-{self._id} disconnected")
async def handle_client(self, websocket, path): with self.store_client(websocket): message = self.create_snapshot_msg(self._ee_id, self._snapshot, self.event_index()) await websocket.send(message) async for message in websocket: client_event = from_json(message) logger.debug(f"got message from client: {client_event}") if client_event["type"] == identifiers.EVTYPE_EE_USER_CANCEL: logger.debug( f"Client {websocket.remote_address} asked to cancel.") if self._ensemble.is_cancellable(): # The evaluator will stop after the ensemble has # indicated it has been cancelled. self._ensemble.cancel() else: self._stop() if client_event["type"] == identifiers.EVTYPE_EE_USER_DONE: logger.debug( f"Client {websocket.remote_address} signalled done.") self._stop()
async def checkout_order(request: Request): body: bytes = await request.body() event: CloudEvent = from_json(body) print(event.data)
def firebase_db_cloud_event_output(): with open(TEST_DATA_DIR / "firebase-db-cloud-event-output.json", "r") as f: return from_json(f.read())
def pubsub_cloudevent_output(): return from_json(json.dumps(PUBSUB_CLOUD_EVENT))
def get_event(self, sub_id): path = '/'.join(['listen', str(sub_id)]) resp = self._get(path) return from_json(resp.json().get('data'))
def raw_pubsub_cloudevent_output(marshalled_pubsub_request): event = PUBSUB_CLOUD_EVENT.copy() # the data payload is more complex for the raw pubsub request event["data"] = {"message": marshalled_pubsub_request["data"]} return from_json(json.dumps(event))
def firebase_auth_cloudevent_output(): with open(TEST_DATA_DIR / "firebase-auth-cloudevent-output.json", "r") as f: return from_json(f.read())
async def _send(): async with websockets.connect(self._result_uri) as websocket: result = await websocket.recv() message = from_json(result, lambda x: pickle.loads(x)) return message.data