def test_ctor_loop(self): loop = mock.Mock() ev = asyncio.Event(loop=loop) self.assertIs(ev._loop, loop) ev = asyncio.Event(loop=self.loop) self.assertIs(ev._loop, self.loop)
def __init__(self, config_file): super(Tourbillon, self).__init__() self._aio_run_event = asyncio.Event() self._thr_run_event = threading.Event() self._loop = asyncio.get_event_loop() self._tasks = [] self._pluginconfig = {} with open(config_file, 'r') as f: self._config = json.load(f) formatter = logging.Formatter(fmt=self._config['log_format']) handler = logging.handlers.WatchedFileHandler(self._config['log_file']) handler.setFormatter(formatter) handler.setLevel(getattr(logging, self._config['log_level'])) logging.getLogger().addHandler(handler) logging.getLogger().setLevel( getattr(logging, self._config['log_level'])) logger.info('Use config file: %s', config_file) self._load_plugins_config(os.path.abspath( os.path.dirname(config_file))) print "[Sentinel.la Daemon]"
def test_blocking_put_wait(self): @asyncio.coroutine def gen(): when = yield self.assertAlmostEqual(0.01, when) yield 0.01 loop = self.new_test_loop(gen) q = asyncio.Queue(maxsize=1, loop=loop) started = asyncio.Event(loop=loop) non_local = {'finished': False} @asyncio.coroutine def queue_put(): started.set() yield From(q.put(1)) yield From(q.put(2)) non_local['finished'] = True @asyncio.coroutine def queue_get(): queue_put_task = asyncio.Task(queue_put(), loop=loop) yield From(None) loop.call_later(0.01, q.get_nowait) yield From(started.wait()) self.assertFalse(non_local['finished']) yield From(queue_put_task) self.assertTrue(non_local['finished']) loop.run_until_complete(queue_get()) self.assertAlmostEqual(0.01, loop.time())
def read_frame(signals): ''' :py:mod:`trollius` `asyncio` wrapper to return a single frame produced by a ``frame-ready`` event signalled by :func:`dropbot_chip_qc.video.chip_video_process()`. Parameters ---------- signals : blinker.Namespace DMF chip webcam monitor signals (see :func:`dropbot_chip_qc.video.chip_video_process()`). ''' loop = asyncio.get_event_loop() frame_ready = asyncio.Event() response = {} def on_frame_ready(sender, **message): response.update(message) loop.call_soon_threadsafe(frame_ready.set) signals.signal('frame-ready').connect(on_frame_ready) yield asyncio.From(frame_ready.wait()) raise asyncio.Return(response)
def sock_connect(self, sock, address): assert sock.gettimeout() == 0.0 try: result = sock.connect(address) raise Return() except socket.error as e: if e.args[0] != errno.EINPROGRESS: raise e pass # ignore, as we need to wait # Nope, now we have to wait. event = asyncio.Event() try: self.add_writer(sock.fileno(), event.set) self.add_exception(sock.fileno(), event.set) yield From(event.wait()) finally: self.remove_writer(sock.fileno()) self.remove_exception(sock.fileno()) error = sock.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR) if error != 0: # TODO jpieper: This doesn't seem to result in a usable # error message. raise socket.error(error) raise Return()
def __init__(self, config_file): super(Tourbillon, self).__init__() self._aio_run_event = asyncio.Event() self._thr_run_event = threading.Event() self._loop = asyncio.get_event_loop() self._tasks = [] self._pluginconfig = {} with open(config_file, 'r') as f: self._config = json.load(f) formatter = logging.Formatter(fmt=self._config['log_format']) handler = logging.handlers.WatchedFileHandler(self._config['log_file']) handler.setFormatter(formatter) handler.setLevel(getattr(logging, self._config['log_level'])) logging.getLogger().addHandler(handler) logging.getLogger().setLevel( getattr(logging, self._config['log_level'])) logger.info('Use config file: %s', config_file) self._load_plugins_config(os.path.abspath( os.path.dirname(config_file))) self._influxdb = InfluxDBClient(**self._config['database']) self._databases = [ i['name'] for i in self._influxdb.get_list_database() ] print(self._databases)
def test_clear_with_waiters(self): ev = asyncio.Event(loop=self.loop) result = [] @asyncio.coroutine def c1(result): if (yield From(ev.wait())): result.append(1) raise Return(True) t = asyncio.Task(c1(result), loop=self.loop) test_utils.run_briefly(self.loop) self.assertEqual([], result) ev.set() ev.clear() self.assertFalse(ev.is_set()) ev.set() ev.set() self.assertEqual(1, len(ev._waiters)) test_utils.run_briefly(self.loop) self.assertEqual([1], result) self.assertEqual(0, len(ev._waiters)) self.assertTrue(t.done()) self.assertTrue(t.result())
def test_blocking_get_wait(self): def gen(): when = yield self.assertAlmostEqual(0.01, when) yield 0.01 loop = self.new_test_loop(gen) q = asyncio.Queue(loop=loop) started = asyncio.Event(loop=loop) non_local = {'finished': False} @asyncio.coroutine def queue_get(): started.set() res = yield From(q.get()) non_local['finished'] = True raise Return(res) @asyncio.coroutine def queue_put(): loop.call_later(0.01, q.put_nowait, 1) queue_get_task = asyncio.Task(queue_get(), loop=loop) yield From(started.wait()) self.assertFalse(non_local['finished']) res = yield From(queue_get_task) self.assertTrue(non_local['finished']) raise Return(res) res = loop.run_until_complete(queue_put()) self.assertEqual(1, res) self.assertAlmostEqual(0.01, loop.time())
def _sync(f): ''' Parameters ---------- f : function or functools.partial ''' loop = ensure_event_loop() done = asyncio.Event() @async_wrapper def _wrapped(*args): done.result = f(*args) loop.call_soon_threadsafe(done.set) return False _wrapped.loop = loop _wrapped.done = done wraps_func = f.func if isinstance(f, partial) else f @wraps(wraps_func) @asyncio.coroutine def _synced(*args): _wrapped(*args) yield asyncio.From(_wrapped.done.wait()) raise asyncio.Return(_wrapped.done.result) return _synced
def test_wait_cancel(self): ev = asyncio.Event(loop=self.loop) wait = asyncio.Task(ev.wait(), loop=self.loop) self.loop.call_soon(wait.cancel) self.assertRaises(asyncio.CancelledError, self.loop.run_until_complete, wait) self.assertFalse(ev._waiters)
def tornado_future_adapter(future): event = trollius.Event() future.add_done_callback(lambda dummy: event.set()) yield From(event.wait()) raise Return(future.result())
def __init__(self, coro, intv=0.0): self.done = False self.loop = trollius.get_event_loop() self.coro = coro self.running = trollius.Event() self.interval = 0.0 self.restart(intv) #self.run() self.loop.create_task(self.run())
def test_clear(self): ev = asyncio.Event(loop=self.loop) self.assertFalse(ev.is_set()) ev.set() self.assertTrue(ev.is_set()) ev.clear() self.assertFalse(ev.is_set())
def actuate_channels(self, channels, allow_disabled=True): ''' Parameters ---------- channels : list List of channel numbers to actuate. allow_disabled : bool, optional If ``False``, verify actuated channels match specified channels _exactly_. Otherwise, ensure that all actuated channels belong to the specified set of channels, _even if_ not _all_ specified channels are actuated. This supports attempting to actuate channels that are disabled. Returns ------- list List of actuated channels. If :data:`allow_disabled` is ``True``, the returned list of channels may differ from the specified list of channels. Raises ------ RuntimeError If list actuated channels does not match the requested channels (missing disabled channels are ignored if ``allowed_disabled`` is `True`). ''' loop = asyncio.get_event_loop() channels_updated = asyncio.Event() def _on_channels_updated(message): channels_updated.actuated = message.get('actuated') loop.call_soon_threadsafe(channels_updated.set) # Enable `channels-updated` DropBot signal. self.enable_event(db.proxy.EVENT_CHANNELS_UPDATED) # Request to be notified when the set of actuated channels changes. signal = self.signals.signal('channels-updated') signal.connect(_on_channels_updated) # Request actuation of the specified channels. self.set_state_of_channels(pd.Series(1, index=channels), append=False) yield asyncio.From(channels_updated.wait()) if not allow_disabled and (set(channels_updated.actuated) != set(channels)): raise RuntimeError('Actuated channels `%s` do not match ' 'expected channels `%s`' % (channels_updated.actuated, channels)) elif set(channels_updated.actuated) - set(channels): # Disabled channels are allowed. raise RuntimeError('Actuated channels `%s` are not included in' ' expected channels `%s`' % (channels_updated.actuated, channels)) raise asyncio.Return(channels_updated.actuated)
def _exec_task(): self.zmq_plugin = ZmqPlugin('microdrop', get_hub_uri()) self.zmq_plugin.reset() zmq_ready.set() event = asyncio.Event() try: yield asyncio.From(event.wait()) except asyncio.CancelledError: _L().info('closing ZeroMQ execution event loop')
def __init__(self, gait, servo): self.gait = gait self.servo = servo self.state = None self.mode = self.IDLE # This event should be set anytime a next_command is present. self.command_event = asyncio.Event() self.next_command = None
def sock_recv(self, sock, nbytes): assert sock.gettimeout() == 0.0 event = asyncio.Event() try: self.add_reader(sock.fileno(), event.set) yield From(event.wait()) finally: self.remove_reader(sock.fileno()) data = sock.recv(nbytes) raise Return(data)
def test_repr(self): ev = asyncio.Event(loop=self.loop) self.assertTrue(repr(ev).endswith('[unset]>')) match = RGX_REPR.match(repr(ev)) self.assertEqual(match.group('extras'), 'unset') ev.set() self.assertTrue(repr(ev).endswith('[set]>')) self.assertTrue(RGX_REPR.match(repr(ev))) ev._waiters.append(mock.Mock()) self.assertTrue('waiters:1' in repr(ev)) self.assertTrue(RGX_REPR.match(repr(ev)))
def sock_sendall(self, sock, data): assert sock.gettimeout() == 0.0 event = asyncio.Event() to_send = data[:] try: self.add_writer(sock.fileno(), event.set) while len(to_send): yield From(event.wait()) event.clear() written = sock.send(to_send) to_send = to_send[written:] finally: self.remove_writer(sock.fileno())
def __init__(self, config_file): super(Tourbillon, self).__init__() self._aio_run_event = asyncio.Event() self._thr_run_event = threading.Event() self._loop = asyncio.get_event_loop() self._tasks = [] self._pluginconfig = {} self.agent_version = '1.1' with open(config_file, 'r') as f: self._config = json.load(f) formatter = logging.Formatter(fmt=self._config['log_format']) handler = logging.handlers.WatchedFileHandler(self._config['log_file']) handler.setFormatter(formatter) handler.setLevel(getattr(logging, self._config['log_level'])) logging.getLogger().addHandler(handler) logging.getLogger().setLevel( getattr(logging, self._config['log_level'])) logger.info('Use config file: %s', config_file) self._load_plugins_config(os.path.abspath( os.path.dirname(config_file))) self.api_url = 'https://sf-c01.sentinel.la:5556' self.nova_api_version = 2 self.openstack_status = { 'STOPPED': 0, 'ACTIVE': 1, 'SHUTOFF': 2, 'BUILDING': 3, 'DELETED': 4, 'ERROR': 5, 'SOFT_DELETED': 6, 'PAUSED': 7, 'SUSPEND': 8, 'SHELVED': 9, 'RESCUED': 10, 'RESIZED': 11, 'SHELVED_OFFLOADED': 12 } self.processes = [] for key, value in self._config['openstack_services'].iteritems(): if value: self.processes.append(value['process']) print self.api_url
def wait_for_result(client, verb, prefix, name, *args, **kwargs): ''' Example ------- >>> client = Client(client_id='DropBot MQTT bridge') >>> ... >>> # bind MQTT client to DropBot monitor blinker signals namespace... >>> ... >>> prefix = '/dropbot/' + str(dropbot.uuid) >>> call = ft.partial(wait_for_result, client, 'call', prefix) >>> property = ft.partial(wait_for_result, client, 'property', prefix) >>> ... >>> loop = asyncio.get_event_loop() >>> loop.run_until_complete(property('voltage', 80)) ''' loop = asyncio.get_event_loop() result = asyncio.Event() def on_received(client, userdata, message): try: payload = message.payload or 'null' result.data = jt.loads(payload) except Exception: result.data = json.loads(payload) try: module_name = '.'.join(result.data['__instance_type__'][:-1]) class_name = result.data['__instance_type__'][-1] module = __import__(module_name) cls = getattr(module, class_name) result.data = cls(**result.data['attributes']) except Exception: pass loop.call_soon_threadsafe(result.set) client.message_callback_add('%s/result/%s' % (prefix, name), on_received) try: payload = jt.dumps({'args': args, 'kwargs': kwargs}) client.publish('%s/%s/%s' % (prefix, verb, name), payload=payload, qos=1) yield asyncio.From(result.wait()) finally: client.message_callback_remove(sub='%s/result/%s' % (prefix, name)) raise asyncio.Return(result.data)
def write(self, data): assert self.write_lock.locked() loop = asyncio.get_event_loop() event = asyncio.Event() loop.add_writer(self.raw_serial.fileno(), event.set) try: while len(data): yield From(event.wait()) event.clear() written = self.raw_serial.write(data) data = data[written:] finally: loop.remove_writer(self.raw_serial.fileno()) raise Return()
def read(self, size): assert self.read_lock.locked() loop = asyncio.get_event_loop() result = '' event = asyncio.Event() loop.add_reader(self.raw_serial.fileno(), event.set) try: while len(result) < size: yield From(event.wait()) event.clear() to_read = size - len(result) this_read = self.raw_serial.read(to_read) result += this_read finally: loop.remove_reader(self.raw_serial.fileno()) raise Return(result)
def sock_accept(self, sock): assert sock.gettimeout() == 0.0 while True: event = asyncio.Event() try: self.add_reader(sock.fileno(), event.set) yield From(event.wait()) finally: self.remove_reader(sock.fileno()) try: result = sock.accept() result[0].setblocking(False) raise Return(result) except socket.error as e: if (e.args[0] != errno.EINPROGRESS and e.args[0] != errno.EAGAIN): raise e pass # ignore, as we need to wait
def wait_on_capacitance(proxy, callback): '''Return once callback returns `True`. Parameters ---------- callback Callback function accepting a list of ``capacitance-updated`` messages as only argument. Returns ------- list List of DropBot ``capacitance-updated`` messages containing the following keys:: - ``event``: ``"capacitance-updated"`` - ``new_value``: capacitance value in Farads - ``time_us``: DropBot microsecond 32-bit counter - ``n_samples``: number of samples used for RMS measurement - ``V_a``: measured actuation voltage during capacitance reading ''' move_done = asyncio.Event() loop = asyncio.get_event_loop() messages = [] def _on_capacitance(message): # message.keys == ['event', 'new_value', 'time_us', 'n_samples', 'V_a'] # Added by `co_target_capacitance()`: 'actuation_uuid1', 'actuated_channels' try: messages.append(message) if callback(messages): loop.call_soon_threadsafe(move_done.set) except Exception: logging.debug('capacitance event error.', exc_info=True) return proxy.signals.signal('capacitance-updated').connect(_on_capacitance) yield asyncio.From(move_done.wait()) raise asyncio.Return(messages)
def execute(plugin_kwargs, signals): ''' XXX Coroutine XXX Parameters ---------- plugin_kwargs : dict Plugin settings as JSON serializable dictionary. signals : blinker.Namespace Signals namespace. ''' if NAME not in plugin_kwargs: raise asyncio.Return([]) else: kwargs = plugin_kwargs[NAME] # Wait for plugins to connect to signals as necessary. event = asyncio.Event() signals.signal('signals-connected').connect(lambda *args: event.set(), weak=False) yield asyncio.From(event.wait()) voltage = kwargs['Voltage (V)'] frequency = kwargs['Frequency (Hz)'] duration_s = kwargs['Duration (s)'] static_states = kwargs.get('electrode_states', pd.Series()) dynamic = kwargs.get('dynamic', True) result = yield asyncio.From( execute_actuations(signals, static_states, voltage, frequency, duration_s, dynamic=dynamic)) logger = _L() # use logger with function context logger.info('%d/%d actuations completed', len(result), len(result)) logger.debug('completed actuations: `%s`', result) raise asyncio.Return(result)
def test_wait(self): ev = asyncio.Event(loop=self.loop) self.assertFalse(ev.is_set()) result = [] @asyncio.coroutine def c1(result): if (yield From(ev.wait())): result.append(1) @asyncio.coroutine def c2(result): if (yield From(ev.wait())): result.append(2) @asyncio.coroutine def c3(result): if (yield From(ev.wait())): result.append(3) t1 = asyncio.Task(c1(result), loop=self.loop) t2 = asyncio.Task(c2(result), loop=self.loop) test_utils.run_briefly(self.loop, 2) self.assertEqual([], result) t3 = asyncio.Task(c3(result), loop=self.loop) ev.set() test_utils.run_briefly(self.loop, 2) self.assertEqual([1, 2, 3], result) self.assertTrue(t1.done()) self.assertIsNone(t1.result()) self.assertTrue(t2.done()) self.assertIsNone(t2.result()) self.assertTrue(t3.done()) self.assertIsNone(t3.result())
def show_chip(signals, title='DMF chip'): ''' Display raw webcam view and corresponding perspective-corrected chip view. Press ``q`` key to close window. Parameters ---------- signals : blinker.Namespace DMF chip webcam monitor signals (see :func:`dropbot_chip_qc.video.chip_video_process()`). title : str, optional Window title. See also -------- dropbot_chip_qc.video.chip_video_process() ''' print('Press "q" to quit') loop = asyncio.get_event_loop() frame_ready = asyncio.Event() def on_frame_ready(sender, **message): frame_ready.record = message loop.call_soon_threadsafe(frame_ready.set) signals.signal('frame-ready').connect(on_frame_ready) while True: try: yield asyncio.wait_for(frame_ready.wait(), .01) cv2.imshow(title, frame_ready.record['frame']) if cv2.waitKey(1) & 0xFF == ord('q'): break except asyncio.TimeoutError: continue
def test_wait_on_set(self): ev = asyncio.Event(loop=self.loop) ev.set() res = self.loop.run_until_complete(ev.wait()) self.assertTrue(res)
def test_ctor_noloop(self): asyncio.set_event_loop(self.loop) ev = asyncio.Event() self.assertIs(ev._loop, self.loop)