async def run_strategy(self, data_socket: azmq.Socket, order_socket: azmq.Socket): bars = self.panel.update(str_to_datum(await data_socket.recv_string())) if bars is not None: timestamp, prices = bars new_positions = self.strategy.generate_positions(timestamp, prices) if new_positions is not None: # round towards 0 to avoid over-execution position_delta = np.fix(new_positions - self.positions) # send order order = { k: v for k, v in zip(self.asset_strings, position_delta) if abs(v) >= 1 } order['Strategy'] = self.name order_socket.send_json(order) self.logger.info(f'Sent order {order}') executions = await utils.wait_for_reply( order_socket, self.wait_time, const.DCode, 'Order execution') # for post processing self.equity_curve.update(timestamp, executions, prices['close'][-1]) else: self.equity_curve.update(timestamp, {}, prices['close'][-1])
async def process_subscription(self, socket: azmq.Socket): """ Receive request of the form "asset1:ticker1,asset2:ticker2,..." Subscribe to the broker TODO: add handling for exception. Currently always succeed """ msg = await socket.recv_string( ) # format: asset1:ticker1,asset2:ticker2,... self.logger.info(f'Receive subscription request on {msg}') # filter existing assets asset_strings = msg.split(',') assets = [Asset(s) for s in asset_strings] assets = [asset for asset in assets if asset not in self.subscribed] if len(assets) > 0: # submit subscription await utils.wait_for(self.subscribe(assets), self.subscription_wait_time, JobError('Data subscription timeout')) # remember newly added subscription self.subscribed.update(dict.fromkeys(assets)) self.logger.info(f'Subscribed: {str(assets)}') else: self.logger.info(f'No new subscription is needed') socket.send_json({'code': const.DCode.Succeeded.value})
async def read_file_handler(store: StorageServerStore, argframes: List[Frame], sock: Socket, id_frame: Frame): filename = str(argframes.pop(0).bytes, 'utf8') vfile = store.files.get(filename, None) if vfile: sock.send_multipart([id_frame, Frame(), bytes([0]), vfile.content]) else: sock.send(bytes([0]))
async def iopub_listener(send, context): client = context.jupyter_client socket = Socket(context=Context(), socket_type=client.iopub_channel.socket.socket_type) try: with open(client.connection_file) as f: connection_info = json.load(f) socket.connect( f'{connection_info["transport"]}://{connection_info["ip"]}:{connection_info["iopub_port"]}' ) socket.subscribe(b'') while True: message = await socket.recv_multipart() _, _, _, _, parent_header, _, content, *_ = message parent_header = json.loads(parent_header) content = json.loads(content) execution_state = content.get('execution_state', None) if context.evaluation_state is not RESTARTING: if context.iopub_buffer is not None: logger.debug('buffered %s', content) context.iopub_buffer.append(content) else: logger.debug('unbuffered %s', content) await send('IOPub', content) if execution_state == 'idle': msg_id = parent_header['msg_id'] context.pending_messages.discard(msg_id) logger.info('removing message id %s; %s', msg_id, context.pending_messages) if context.kernel_restart_completion_id == msg_id: if context.realtime_evaluation_mode: context.job_a = asyncio.create_task(job_a(context)) else: set_state(context, IDLE) elif not context.pending_messages: # only change state when there's no pending messages waiting response evaluation_state = context.evaluation_state if evaluation_state is A_RUNNING: if context.b_queued: context.job_b = asyncio.create_task( job_b(send, context)) else: set_state(context, IDLE) elif evaluation_state is B_RUNNING: asyncio.create_task(reset_kernel(context)) except asyncio.CancelledError: socket.close() logger.info('iopub socket closed')
async def subscribe(self, sub_socket: azmq.Socket, broadcast_socket: azmq.Socket): """ Send subscription request to data server and subscribe This job can't be run as startup sequence. Otherwise, we lose the subscribed assets """ # send subscription msg = ','.join(self.asset_strings) self.logger.info(f'Subscribing {msg}') await sub_socket.send_string(msg) await utils.wait_for_reply(sub_socket, self.wait_time, const.DCode, 'Data subscription') # use DCode self.logger.info('Data subscribed') # subscribe to ticker for asset_string in self.asset_strings: broadcast_socket.subscribe(asset_string)
async def _cothread_connection_monitor(self, physical_addr: PhysicalAddress, connection: Socket): monitor_socket: Socket = connection.get_monitor_socket() while True: messages = await monitor_socket.recv_multipart() evd = parse_monitor_message(messages) evtype = evd["event"] if evtype == zmq.EVENT_DISCONNECTED: physical_addr.status = PhysicalAddressStatus.OFFLINE elif evtype == zmq.EVENT_CONNECTED: physical_addr.status = PhysicalAddressStatus.CONNECTED elif evtype == zmq.EVENT_CLOSED: physical_addr.status = PhysicalAddressStatus.OFFLINE self.physical_connections.pop(physical_addr.address) return
async def wait_for_reply(socket: azmq.Socket, wait_time: float, code: Type[Union[const.CCode, const.DCode]], desc: str): """ we duplicate the code from "wait_for" because we intend to totally replace "wait_for" :param socket: socket from which we wait for reply :param wait_time: in seconds :param code: what reply code class to use :param desc: request description, used in raising error """ try: reply = await asyncio.wait_for(socket.recv_json(), wait_time) # type: dict except asyncio.TimeoutError: raise JobError(f'{desc} times out') if reply.get('code', 0) < 0: raise JobError(code(reply['code']).name) return reply
def apply_tcp_sock_options(sock: Socket): sock.setsockopt(zmq.LINGER, 1)
async def _cothread_user_socket_proxy(self, router_info: RouterInfo, lease: Lease, socket: Socket): current_physical_address: Optional[ PhysicalAddress] = await self.get_socket_for_lease( router_info, lease) if not current_physical_address: socket.close() return poller = Poller() poller.register(current_physical_address.socket) poller.register(socket, zmq.POLLIN) buffer: List[Tuple[Socket, List[Frame]]] = [] while True: if (not current_physical_address) or ( not current_physical_address.is_connected): if current_physical_address: poller.unregister(current_physical_address.socket) current_physical_address = await self.get_socket_for_lease( router_info, lease) if not current_physical_address: socket.close() return else: poller.register(socket, zmq.POLLIN) if not current_physical_address.socket: socket.close() return if buffer: buffer_pointer = -1 for target_socket, frames in buffer: target_socket = (current_physical_address.socket if target_socket != socket else socket) try: await asyncio.wait_for( target_socket.send_multipart(frames, copy=False, track=True), len(frames) * 5, ) buffer_pointer += 1 except asyncio.TimeoutError: break if buffer_pointer != (len(buffer) - 1): buffer = buffer[buffer_pointer:] else: buffer = [] else: pevents: List[Tuple[Socket, int]] = await poller.poll() for sock, ev in pevents: target_sock = (current_physical_address.socket if sock == socket else socket) if ev & zmq.POLLIN: frames = await sock.recv_multipart(copy=False) try: await asyncio.wait_for( target_sock.send_multipart(frames, copy=False, track=True), len(frames) * 5, ) except asyncio.TimeoutError: buffer.append((target_sock, frames))
async def get_spec(socket: Socket, request_id: str) -> APISpec: """ Construct open api spec by interrogating FlowMachine. Parameters ---------- socket : Socket request_id : str Unique id of the request Returns ------- APISpec The specification object """ msg = {"request_id": request_id, "action": "get_query_schemas"} socket.send_json(msg) # Get the reply. reply = await socket.recv_json() flowmachine_query_schemas = reply["payload"]["query_schemas"] # Need to mark query_kind as a required field # this is a workaround because the marshmallow-oneOf plugin strips # the query_kind off, which means it can't be required from the marshmallow # side without raising an error for schema, schema_dict in flowmachine_query_schemas.items(): try: if "query_kind" in schema_dict["properties"]: schema_dict["required"].append("query_kind") except KeyError: pass # Doesn't have any properties spec = APISpec( title="FlowAPI", version=__version__, openapi_version="3.0.1", info=dict( description="FlowKit Analytical API", license=dict(name="MPLv2", url="https://www.mozilla.org/en-US/MPL/2.0/"), contact=dict(email="*****@*****.**"), ), ) spec.components.schemas.update(flowmachine_query_schemas) spec.components.security_scheme( "token", { "type": "http", "scheme": "bearer", "bearerFormat": "JWT", "x-security-scopes": sorted(schema_to_scopes(spec.to_dict())), "x-audience": current_app.config["JWT_DECODE_AUDIENCE"], }, ) # Loop over all the registered views and try to parse a yaml # openapi spec from their docstrings for rule in current_app.url_map.iter_rules(): try: func = current_app.view_functions[rule.endpoint] operations = yaml_utils.load_operations_from_docstring( func.__doc__) if len(operations) > 0: for method, op in operations.items(): op["operationId"] = f"{rule.endpoint}.{method}" spec.path( path=rule.rule, operations=operations, ) except Exception as e: pass # Don't include in API return spec
async def get_spec(socket: Socket, request_id: str) -> APISpec: """ Construct open api spec by interrogating FlowMachine. Parameters ---------- socket : Socket request_id : str Unique id of the request Returns ------- APISpec The specification object """ msg = {"request_id": request_id, "action": "get_query_schemas"} socket.send_json(msg) # Get the reply. reply = await socket.recv_json() flowmachine_query_schemas = reply["payload"]["query_schemas"] # Need to mark query_kind as a required field # this is a workaround because the marshmallow-oneOf plugin strips # the query_kind off, which means it can't be required from the marshmallow # side without raising an error for schema, schema_dict in flowmachine_query_schemas.items(): try: schema_dict["required"].append("query_kind") except KeyError: pass # Doesn't have any properties spec = APISpec( title="FlowAPI", version=__version__, openapi_version="3.0.1", info=dict( description="FlowKit Analytical API", license=dict(name="MPLv2", url="https://www.mozilla.org/en-US/MPL/2.0/"), contact=dict(email="*****@*****.**"), ), ) spec.components._schemas = flowmachine_query_schemas spec.components.security_scheme( "token", dict(type="http", scheme="bearer", bearerFormat="JWT") ) # Loop over all the registered views and try to parse a yaml # openapi spec from their docstrings for endpoint_func_name, rule in current_app.url_map.endpoints.items(): try: func = current_app.view_functions[endpoint_func_name] operations = yaml_utils.load_operations_from_docstring(func.__doc__) if len(operations) > 0: for method, op in operations.items(): op["operationId"] = f"{endpoint_func_name}.{method}" spec.path( path=rule[ 0 ].rule, # In theory, could have multiple rules that match but will only be a single one here operations=operations, ) except Exception as e: pass # Don't include in API return spec