def test_default_event_loop(self): port = find_unused_port() asyncio.set_event_loop_policy(aiozmq.ZmqEventLoopPolicy()) self.addCleanup(asyncio.set_event_loop_policy, None) self.addCleanup(asyncio.set_event_loop, None) @asyncio.coroutine def create(): server = yield from aiozmq.rpc.serve_rpc( MyHandler(self.loop), bind='tcp://127.0.0.1:{}'.format(port), loop=None) client = yield from aiozmq.rpc.connect_rpc( connect='tcp://127.0.0.1:{}'.format(port), loop=None) return client, server self.loop = loop = asyncio.get_event_loop() self.client, self.server = loop.run_until_complete(create()) @asyncio.coroutine def communicate(): ret = yield from self.client.call.func(1) self.assertEqual(2, ret) loop.run_until_complete(communicate())
def upload(request): """View to handle file uploads. They are sent to Xbus. """ # Check request parameters. emission_profile_id = request.params.get('emission_profile_id') file = request.params.get('file') if not emission_profile_id or file is None: raise HTTPBadRequest( json_body={'error': 'No emission profile selected'}, ) # Get emission profile data from the database. emission_profile = DBSession.query(EmissionProfile).filter( EmissionProfile.id == emission_profile_id).first() if not emission_profile: raise HTTPBadRequest(json_body={'error': 'Invalid emission profile'}, ) # Ensure execution of the emission profile is authorized for the current # user. if emission_profile.owner_id != get_logged_user_id(request): raise HTTPBadRequest( json_body={'error': 'Emission profile unauthorized'}, ) # Fetch the input descriptor. descriptor = emission_profile.input_descriptor.descriptor.decode('utf-8') # TODO Use the selected encoding when decoding the file. front_url = request.registry.settings['xbus.broker.front.url'] login = request.registry.settings['xbus.broker.front.login'] password = request.registry.settings['xbus.broker.front.password'] # Use a temporary file to store the upload. # TODO Use a pipe or some such? with NamedTemporaryFile(prefix='xbus-monitor-upload-') as f_temp: while True: buf = file.file.read(io.DEFAULT_BUFFER_SIZE) f_temp.write(buf) if len(buf) == 0: break # Open the file as text. f_temp.flush() f_temp_text = open(f_temp.name, 'r', newline='') # Send our data via 0mq to the Xbus front-end. zmq_loop = aiozmq.ZmqEventLoopPolicy().new_event_loop() try: emitter = FileEmitter(front_url, login, password, [descriptor], loop=zmq_loop) zmq_loop.run_until_complete(emitter.login()) envelope_id = zmq_loop.run_until_complete( emitter.send_files([(f_temp_text, None)])) except FileEmitterException as e: raise HTTPBadRequest(json_body={'error': str(e)}) return {'envelope_id': envelope_id}
def setUp(self): asyncio.set_event_loop_policy(aiozmq.ZmqEventLoopPolicy()) self.loop = asyncio.new_event_loop() # feed None to asyncio.set_event_loop() to directly specify the # fact that the library should not rely on global loop existence # and safely work by explicit loop passing asyncio.set_event_loop(None) self.front_socket = 'inproc://#test'
def test_default_event_loop(self): asyncio.set_event_loop_policy(aiozmq.ZmqEventLoopPolicy()) self.addCleanup(asyncio.set_event_loop_policy, None) self.addCleanup(self.loop.close) self.loop = asyncio.get_event_loop() self.client, self.server = self.make_pipeline_pair(use_loop=False) self.assertIs(self.client._loop, self.loop) self.assertIs(self.server._loop, self.loop)
def main(): frnt_url = 'tcp://127.0.0.1:1984' cons_url = 'tcp://127.0.0.1:9148' work_url = 'tcp://127.0.0.1:8419' back_url = 'tcp://127.0.0.1:4891' emit_log = 'test_emitter' emit_pwd = 'password' work_log = 'worker_role' work_pwd = 'password' cons_log = 'consumer_role' cons_pwd = 'password' multiprocess = True loop = aiozmq.ZmqEventLoopPolicy().new_event_loop() if multiprocess: emit_loop = loop work_loop = aiozmq.ZmqEventLoopPolicy().new_event_loop() cons_loop = aiozmq.ZmqEventLoopPolicy().new_event_loop() else: work_loop = cons_loop = emit_loop = loop emitter = coro_emitter(frnt_url, emit_log, emit_pwd, 5, 1000, emit_loop) worker = coro_worker(work_url, back_url, work_log, work_pwd, work_loop) consumer = coro_consumer(cons_url, back_url, cons_log, cons_pwd, cons_loop) if multiprocess: asyncio. async (emitter, loop=emit_loop) asyncio. async (worker, loop=work_loop) asyncio. async (consumer, loop=cons_loop) work_run_proc = multiprocessing.Process(target=work_loop.run_forever) cons_run_proc = multiprocessing.Process(target=cons_loop.run_forever) time.sleep(1) emit_run_proc = multiprocessing.Process(target=emit_loop.run_forever) emit_run_proc.start() work_run_proc.start() cons_run_proc.start() else: loop.run_until_complete(asyncio.gather(worker, consumer, loop=loop)) loop.run_until_complete(emitter) loop.run_forever() worker.close() consumer.close()
def test_default_event_loop(self): asyncio.set_event_loop_policy(aiozmq.ZmqEventLoopPolicy()) self.addCleanup(asyncio.set_event_loop_policy, None) self.addCleanup(self.loop.close) self.loop = loop = asyncio.get_event_loop() client, server = self.make_rpc_pair(use_loop=False) async def communicate(): ret = await client.call.func(1) self.assertEqual(2, ret) loop.run_until_complete(communicate())
def test_default_event_loop(self): asyncio.set_event_loop_policy(aiozmq.ZmqEventLoopPolicy()) @asyncio.coroutine def create(): server = yield from aiozmq.rpc.serve_pipeline( MyHandler(self.queue, self.loop), bind='tcp://127.0.0.1:*', loop=None) connect = next(iter(server.transport.bindings())) client = yield from aiozmq.rpc.connect_pipeline(connect=connect, loop=None) return client, server self.loop = loop = asyncio.get_event_loop() self.client, self.server = loop.run_until_complete(create())
def __init__(self, url: str, back_url: str, login: str, password: str, loop=None): self.url = url self.back_url = back_url self.login = login self.password = password if loop is None: loop = aiozmq.ZmqEventLoopPolicy().new_event_loop() self.loop = loop self.client = None self.token = None super(rpc.AttrHandler, self).__init__()
def _send_item_request(request, data): """Send a data clearing item request to Xbus. :param data: Data (of any type) to send to the Xbus consumer. :return: The result of an "end_event" Xbus API call. """ _ensure_item_clearing_event_type(request) front_url = request.registry.settings['xbus.broker.front.url'] login = request.registry.settings['xbus.broker.front.login'] password = request.registry.settings['xbus.broker.front.password'] # Send our request via 0mq to the Xbus front-end. zmq_loop = aiozmq.ZmqEventLoopPolicy().new_event_loop() future = _send_item_request_(front_url, login, password, data, zmq_loop) return zmq_loop.run_until_complete(future)
def test_default_event_loop(self): asyncio.set_event_loop_policy(aiozmq.ZmqEventLoopPolicy()) self.addCleanup(asyncio.set_event_loop_policy, None) self.addCleanup(self.loop.close) self.loop = loop = asyncio.get_event_loop() # should use the default loop, not closed one self.queue = asyncio.Queue() self.client, self.server = self.make_pubsub_pair(use_loop=False, subscribe="topic") async def communicate(): await self.client.publish("topic").func(1) ret = await self.queue.get() self.assertEqual(2, ret) loop.run_until_complete(communicate())
def replay_envelope(request): """Attempt to send failed envelopes into Xbus again. """ envelope_id = request.params.get('envelope_id') if not envelope_id: raise HTTPBadRequest(json_body={'error': 'No envelope selected'}, ) front_url = request.registry.settings['xbus.broker.front.url'] login = request.registry.settings['xbus.broker.front.login'] password = request.registry.settings['xbus.broker.front.password'] # Send our data via 0mq to the Xbus front-end. zmq_loop = aiozmq.ZmqEventLoopPolicy().new_event_loop() emitter = _coro_emitter(front_url, login, password, envelope_id, zmq_loop) envelope_id, logs = zmq_loop.run_until_complete(emitter) return {'envelope_id': envelope_id, 'logs': logs}
def test_default_event_loop(self): port = find_unused_port() asyncio.set_event_loop_policy(aiozmq.ZmqEventLoopPolicy()) self.addCleanup(asyncio.set_event_loop_policy, None) self.addCleanup(asyncio.set_event_loop, None) queue = asyncio.Queue() @asyncio.coroutine def create(): server = yield from aiozmq.rpc.serve_pubsub( MyHandler(queue), bind='tcp://127.0.0.1:{}'.format(port), loop=None, subscribe='topic') client = yield from aiozmq.rpc.connect_pubsub( connect='tcp://127.0.0.1:{}'.format(port), loop=None) return client, server self.loop = loop = asyncio.get_event_loop() self.client, self.server = loop.run_until_complete(create()) @asyncio.coroutine def communicate(): for i in range(3): try: yield from self.client.publish('topic').start() ret = yield from asyncio.wait_for(queue.get(), 0.1) self.assertEqual(ret, 'started') break except asyncio.TimeoutError: self.assertLess(i, 3) else: self.fail('Cannot connect') yield from self.client.publish('topic').func(1) ret = yield from queue.get() self.assertEqual(2, ret) loop.run_until_complete(communicate())
def refresh_consumers(request): """Ask Xbus for a fresh new list of Xbus consumers. """ # Global arrays we are going to refresh. global _consumers global _consumer_clearing_sessions front_url = request.registry.settings['xbus.broker.front.url'] login = request.registry.settings['xbus.broker.front.login'] password = request.registry.settings['xbus.broker.front.password'] # Send our request via 0mq to the Xbus front-end. zmq_loop = aiozmq.ZmqEventLoopPolicy().new_event_loop() emitter = _request_consumers(front_url, login, password, zmq_loop) consumer_data = zmq_loop.run_until_complete(emitter) # consumer_data: List of 2-element tuples (metadata dict, feature dict). # feature dict: {feature name: feature data} # data clearing feature data: 2-element tuple (feature support, DB URL). # Fill the consumer cache. _consumers = [ { 'clearing': bool(consumer_info[1]['clearing'][0]), 'id': uuid.uuid4().hex, # Just make one on-the-fly. 'name': consumer_info[0]['name'], } for consumer_info in consumer_data ] # Refresh the cache of consumers with data clearing. _consumer_clearing_sessions = { _consumers[consumer_index]['id']: (_make_session(consumer_info[1]['clearing'][1])) for consumer_index, consumer_info in enumerate(consumer_data) if consumer_info[1]['clearing'][0] }
def setUp(self): self.policy = aiozmq.ZmqEventLoopPolicy()
import asyncio import aiozmq import zmq from jsonrpcserver import method, async_dispatch as dispatch @method async def ping(): return "pong" async def main(): rep = await aiozmq.create_zmq_stream(zmq.REP, bind="tcp://*:5000") while True: request = await rep.read() response = await dispatch(request[0].decode()) rep.write((str(response).encode(), )) if __name__ == "__main__": asyncio.set_event_loop_policy(aiozmq.ZmqEventLoopPolicy()) asyncio.get_event_loop().run_until_complete(main())
def main(): asyncio.set_event_loop_policy(aiozmq.ZmqEventLoopPolicy()) asyncio.get_event_loop().run_until_complete(go()) print("DONE")
def prepare_event_loop(): asyncio.set_event_loop_policy(aiozmq.ZmqEventLoopPolicy())
def setUp(self): self.policy = aiozmq.ZmqEventLoopPolicy(io_threads=5)