def startUpdater(self, mpUpdater): queueCmd = aioprocessing.AioQueue() queueData = aioprocessing.AioQueue() self.updater = aioprocessing.AioProcess(name='updater', target=startcoUpdater, args=(queueCmd, queueData)) self.updater.start() StartCoroutine(mpUpdater(self.updater, queueCmd, queueData), self) return queueCmd, queueData
def start(self): self.pending_cmds = {} self.channel, client_channel = aioprocessing.AioPipe() self.stdout_q = sout = aioprocessing.AioQueue() self.stderr_q = serr = aioprocessing.AioQueue() self.listener = asyncio.ensure_future(self.listen_task()) loop = asyncio.get_event_loop() kernel = self.slave(client_channel, sout, serr) self.process = p = aioprocessing.AioProcess(target=kernel.start) p.start()
async def example(queue, event, lock): l = [1, 2, 3, 4, 5] p = aioprocessing.AioProcess(target=func, args=(queue, event, lock, l)) p.start() while True: result = await queue.coro_get() print('pie') if result is None: break print("Got result {}".format(result)) await p.coro_join()
def test_pickle_queue(self): t = ("a", "b") q = multiprocessing.Queue() p = aioprocessing.AioProcess(target=f, args=(q, ) + t) p.start() async def join(): await p.coro_join() self.loop.run_until_complete(join()) self.assertEqual(q.get(), t)
def __init__(self, enabled=True): super(ProcessingQueueNode, self).__init__(enabled) self.read_queue = aioprocessing.AioQueue() self.write_queue = aioprocessing.AioQueue() self.lock = aioprocessing.AioLock() self.exit_event = aioprocessing.AioEvent() self.process = aioprocessing.AioProcess( target=self.processor_heavy_fn, args=(self.exit_event, self.lock, self.write_queue, self.read_queue))
def __init__(self, loop, pool, func, consumer, start_event, terminate_event): self.start_event = start_event self.terminate_event = terminate_event self.pool = pool self.func = func self.queue = aioprocessing.AioQueue() self.lock = aioprocessing.AioLock() self._event = aioprocessing.AioEvent() self.consumer = consumer self.loop = loop self.process = aioprocessing.AioProcess( target=GeneratorWorker.func_run, args=(self.loop, self.queue, self.lock, self._event, self.func))
def main(): # Setup logging logging.basicConfig(level='INFO', format='[%(levelname)s] %(message)s') # Setup event loop loop = asyncio.get_event_loop() # Setup UDP server logging.info('Starting UDP server listening on: %s#%d' % (host, port)) udp_listen = loop.create_datagram_endpoint(UdpDohProtocol, local_addr = (host, port)) udp, protocol = loop.run_until_complete(udp_listen) # Setup TCP server if args.tcp: logging.info('Starting TCP server listening on %s#%d' % (host, port)) tcp_listen = loop.create_server(TcpDohProtocol, host, port) tcp = loop.run_until_complete(tcp_listen) # # Connect to upstream servers # for upstream in upstreams: # logging.info('Connecting to upstream server: %s' % (upstream)) # conns.append(loop.run_until_complete(upstream_connect())) # Serve forever try: for _ in range(3): aioprocessing.AioProcess(target=forwarder, daemon=True).start() loop.run_forever() except (KeyboardInterrupt, SystemExit): pass # # Close upstream connections # for conn in conns: # loop.run_until_complete(upstream_close(conn)) # Close listening servers and event loop udp.close() if args.tcp: tcp.close() loop.close()
def main(): port_identifier = None for port in list_ports.comports(): # Check for VendorID and ProductID in hardware id of antenna if "0403:6001" in port.hwid: # port identifier is first part of str(port) (until whitespace) port_identifier = str(port).split()[0] print(f"Portnumber: {port_identifier}") if port_identifier is None: raise IOError("Antenna is not connected!") left_pipe_end, right_pipe_end = aioprocessing.AioPipe() # separate process that contains communication and data processing # pass one end of the pipe so data can be sent to this process p = aioprocessing.AioProcess(target=dataproducer.communication, args=(port_identifier, left_pipe_end)) p.start() # pass other end of the pipe to the websocket server, to receive telemetry # from the other process websocket.start_server(right_pipe_end)
if n % 2 == 0: return False sqrt_n = int(math.floor(math.sqrt(n))) for i in range(3, sqrt_n + 1, 2): if n % i == 0: return False return True def on_next(x): print("on_next = {}".format(x)) def on_error(err): print("on_error = {}".format(err)) def on_completed(): print("on_completed = {}".format("complete")) if __name__ == "__main__": multiprocessing.freeze_support() p = aioprocessing.AioProcess(target=is_prime, args=(115797848077099, )) p.start() fut = asyncio.ensure_future(p.coro_join()) rx.from_future(fut).subscribe(on_next, on_error, on_completed)
self.scheduler.add_job(self.supervisor, trigger='cron', minute='*') await self.schedule_monitors() # while True: # for k, v in self.tunnel._container.items(): # print(f'{k}: {v.qsize()}') # print(f'{len(self.tunnel.keys())} workers: {len(self._worked_tunnel)}') # await asyncio.sleep(5) def main(exchange_info): try: import logging logging.basicConfig(level=logging.WARNING) loop = asyncio.get_event_loop() m = Main(exchange_info) loop.run_until_complete(m.main()) loop.run_forever() except KeyboardInterrupt: return if __name__ == '__main__': import os import aioprocessing processes = [] chunk_num = len(settings['EXCHANGES']) // os.cpu_count() for exchange_info in chunk(settings['EXCHANGES'].items(), chunk_num): p = aioprocessing.AioProcess(target=main, args=(exchange_info, )) processes.append(p) [p.start() for p in processes]
reporter_args = args.reporter_args.split() reporter = create_reporter(args.reporter_name, *reporter_args) reporter_queue = start_reporter(reporter) synchronize_args = override_defaults(default_synchronize_args, vars(args)) synchronize = get_synchronize_fn(**synchronize_args) demultiplex_flows = get_demultiplex_flow_fn(seed_fn, sequence_cls, synchronize, reporter_queue) # Print settings logging.info('Starting UDP observer...') logging.info(f'sequence_args={sequence_args}') logging.info(f'reporter={args.reporter_name} {reporter_args}') # Start libtrace process queue = aioprocessing.AioQueue() lt_process = aioprocessing.AioProcess(target=try_run_libtrace, args=(args.in_uri, queue)) lt_process.start() # Start observing flows loop = asyncio.get_event_loop() try: loop.run_until_complete(demultiplex_flows(queue)) except KeyboardInterrupt: logging.info('Stopping observer...') finally: reporter_queue.join() loop.run_until_complete(reporter.cleanup()) pending = asyncio.Task.all_tasks() cancel_pending_tasks() loop.run_until_complete(lt_process.coro_join())
def __init__(self, width, height, address=("10.76.76.1", 80), enabled=True, enable_images=True): super(WebsiteClient, self).__init__(enabled) # http://user:[email protected]/api/robot/rightcam self.address = address self.requested_width = width self.requested_height = height self.width = width self.height = height self.num_frames = 0 self.reader = None self.writer = None self.enable_images = enable_images self.response_start_header = b'\xbb\x08' self.message_start_header = b'\xde\xad\xbe\xef' self.frame_len = 4 self.timestamp_len = 8 self.width_len = 2 self.height_len = 2 self.endian = 'big' self.chunk_size = int(self.width * self.height / 2) self.fps = 30.0 self.length_sec = 0.0 self.fps_sum = 0.0 self.fps_avg = 30.0 self.prev_t = None self.credentials = base64.b64encode(b'robot:naboris').decode('ascii') # self.manager = aioprocessing.AioSyncManager() self.connection = HTTPConnection("%s:%s" % (self.address[0], self.address[1])) self.headers = { 'Content-type': 'image/jpeg', 'Authorization': 'Basic %s' % self.credentials } if self.enable_images: self.connection.request("GET", "/api/robot/rightcam_meta", headers=self.headers) response = self.connection.getresponse() else: response = None self.image_process = aioprocessing.AioProcess( target=self.retrieve_images, args=(response, )) self.image_queue = aioprocessing.AioQueue() self.connection.request("GET", "/cmd", headers=self.headers) response = self.connection.getresponse() self.command_process = aioprocessing.AioProcess( target=self.retrieve_commands, args=(response, )) self.command_queue = aioprocessing.AioQueue() self.exit_event = aioprocessing.AioEvent() self.command_service_tag = "commands" self.define_service(self.command_service_tag, str)