def connect(self): '''Establish a connection.''' _logger.debug(__('Connecting to {0}.', self._address)) if self._state != ConnectionState.ready: raise Exception('Closed connection must be reset before reusing.') if self._sock: connection_future = trollius.open_connection( sock=self._sock, **self._connection_kwargs() ) else: # TODO: maybe we don't want to ignore flow-info and scope-id? host = self._address[0] port = self._address[1] connection_future = trollius.open_connection( host, port, **self._connection_kwargs() ) self.reader, self.writer = yield From( self.run_network_operation( connection_future, wait_timeout=self._connect_timeout, name='Connect') ) if self._timeout is not None: self._close_timer = CloseTimer(self._timeout, self) else: self._close_timer = DummyCloseTimer() self._state = ConnectionState.created _logger.debug('Connected.')
def connect(self): '''Establish a connection.''' _logger.debug(__('Connecting to {0}.', self._address)) if self._state != ConnectionState.ready: raise Exception('Closed connection must be reset before reusing.') if self._sock: connection_future = trollius.open_connection( sock=self._sock, **self._connection_kwargs()) else: # TODO: maybe we don't want to ignore flow-info and scope-id? host = self._address[0] port = self._address[1] connection_future = trollius.open_connection( host, port, **self._connection_kwargs()) self.reader, self.writer = yield From( self.run_network_operation(connection_future, wait_timeout=self._connect_timeout, name='Connect')) if self._timeout is not None: self._close_timer = CloseTimer(self._timeout, self) else: self._close_timer = DummyCloseTimer() self._state = ConnectionState.created _logger.debug('Connected.')
def activity(self): backoff = 0 while True: try: self.reader, self.writer = yield From(asyncio.open_connection( self.host, self.port, ssl=self.sslctx, loop=self.loop)) except Exception as exc: backoff = min(args.max_backoff, backoff + (backoff//2) + 1) logging.info('Error connecting: %r; sleep %s', exc, backoff) yield From(asyncio.sleep(backoff, loop=self.loop)) continue backoff = 0 self.next_id = 0 self.pending = {} self. initialized = True try: while self.todo: payload, waiter = self.todo.pop() if not waiter.done(): yield From(self.send(payload, waiter)) while True: resp_id, resp = yield From(self.process()) if resp_id in self.pending: payload, waiter = self.pending.pop(resp_id) if not waiter.done(): waiter.set_result(resp) except Exception as exc: self.initialized = False self.writer.close() while self.pending: req_id, pair = self.pending.popitem() payload, waiter = pair if not waiter.done(): self.todo.add(pair) logging.info('Error processing: %r', exc)
def connect(self, username, password, **kw): logging.info('Connecting...') try: reader, writer = yield From( asyncio.open_connection(self.host, self.port)) self.reader = reader self.writer = writer self.send_msg( dict(type="connect", username=username, password=password, observe=True)) self.sockname = writer.get_extra_info('sockname') unpacker = msgpack.Unpacker(encoding='utf-8') logging.info("reader eof? {}".format(repr(reader.at_eof()))) while not reader.at_eof(): pack = yield From(reader.read(1024)) unpacker.feed(pack) for msg in unpacker: self.inform(*msg) logging.info('The server closed the connection') self.writer = None except ConnectionRefusedError as e: logging.info('Connection refused: {}'.format(e)) except Exception as e: logging.error("WTF did just happend?") logging.error(traceback.format_exception()) finally: logging.info("close ...") self.close()
def _connect(self): self.reader, self.writer = yield asyncio.From(asyncio.open_connection(self.addy, self.port)) protocol = self.writer.transport._protocol protocol.connection_lost = self._onDisconnected self.connected=True if self.onConnected: self.onConnected()
def open(self): """ open session to i2p router """ self._log.debug('connecting...') tsk = self._async(asyncio.open_connection(self._i2cp_host, self._i2cp_port, loop=self._loop)) tsk.add_done_callback(self._cb_connected)
def test_open_connection_no_loop_ssl(self): with test_utils.run_test_server(use_ssl=True) as httpd: conn_fut = asyncio.open_connection( *httpd.address, ssl=test_utils.dummy_ssl_context(), loop=self.loop) self._basetest_open_connection_no_loop_ssl(conn_fut)
def connect(self): self.reader, self.writer = yield From(asyncio.open_connection( self.host, self.port, ssl=self.ssl)) peername = self.writer.get_extra_info('peername') if peername: self.host, self.port = peername[:2] else: self.log(1, 'NO PEERNAME???', self.host, self.port, self.ssl) self.key = self.host, self.port, self.ssl
def connect(self): self.reader, self.writer = yield From( asyncio.open_connection(self.host, self.port, ssl=self.ssl)) peername = self.writer.get_extra_info('peername') if peername: self.host, self.port = peername[:2] else: self.log(1, 'NO PEERNAME???', self.host, self.port, self.ssl) self.key = self.host, self.port, self.ssl
def client(addr): reader, writer = yield From( asyncio.open_connection(*addr, loop=self.loop)) # send a line writer.write(b"hello world!\n") # read it back msgback = yield From(reader.readline()) writer.close() raise Return(msgback)
def client(addr): reader, writer = yield From(asyncio.open_connection( *addr, loop=self.loop)) # send a line writer.write(b"hello world!\n") # read it back msgback = yield From(reader.readline()) writer.close() raise Return(msgback)
def measure_connection_once(args, rate, num_heaps, required_heaps): def write(s): writer.write(s.encode('ascii')) reader, writer = yield From(trollius.open_connection(args.host, args.port)) write(json.dumps({'cmd': 'start', 'args': vars(args)}) + '\n') # Wait for "ready" response response = yield From(reader.readline()) assert response == b'ready\n' if args.send_affinity is not None and len(args.send_affinity) > 0: spead2.ThreadPool.set_affinity(args.send_affinity[0]) thread_pool = spead2.ThreadPool(1, args.send_affinity[1:] + args.send_affinity[:1]) else: thread_pool = spead2.ThreadPool() thread_pool = spead2.ThreadPool() config = spead2.send.StreamConfig( max_packet_size=args.packet, burst_size=args.burst, rate=rate, max_heaps=num_heaps + 1, burst_rate_ratio=args.burst_rate_ratio) host = args.host if args.multicast is not None: host = args.multicast if 'send_ibv' in args and args.send_ibv is not None: stream = spead2.send.trollius.UdpIbvStream( thread_pool, host, args.port, config, args.send_ibv, args.send_buffer, 1, args.send_ibv_vector, args.send_ibv_max_poll) else: stream = spead2.send.trollius.UdpStream( thread_pool, host, args.port, config, args.send_buffer) item_group = spead2.send.ItemGroup( flavour=spead2.Flavour(4, 64, args.addr_bits, 0)) item_group.add_item(id=None, name='Test item', description='A test item with arbitrary value', shape=(args.heap_size,), dtype=np.uint8, value=np.zeros((args.heap_size,), dtype=np.uint8)) start = timeit.default_timer() transferred = yield From(send_stream(item_group, stream, num_heaps)) end = timeit.default_timer() elapsed = end - start actual_rate = transferred / elapsed # Give receiver time to catch up with any queue yield From(trollius.sleep(0.1)) write(json.dumps({'cmd': 'stop'}) + '\n') # Read number of heaps received response = yield From(reader.readline()) response = json.loads(response.decode('ascii')) received_heaps = response['received_heaps'] yield From(trollius.sleep(0.5)) yield From(writer.drain()) writer.close() logging.debug("Received %d/%d heaps in %f seconds, rate %.3f Gbps", received_heaps, num_heaps, elapsed, actual_rate * 8e-9) raise Return(received_heaps >= required_heaps, actual_rate)
def echo_client(): reader, writer = yield From(asyncio.open_connection('localhost', 8000)) writer.write(b'Hello, world\n') writer.write(b'What a fine day it is.\n') writer.write(END) while True: line = yield From(reader.readline()) print('received:', line) if line == END or not line: break writer.close()
def handle_ident(self, message, host, port): try: client_reader, client_writer = yield From(asyncio.open_connection(host, port)) request = "{}\r\n".format(message) client_writer.write(request.encode()) data = yield From(asyncio.wait_for(client_reader.readline(), timeout=6.0)) data = data.decode().rstrip() raise Return(data) print("Ident Lookup got '{}'".format(data)) except: raise Return(None)
def wget(host): # print('wget %s...' % host) connect = asyncio.open_connection(host, 80) reader, writer = yield From(connect) header = 'GET / HTTP/1.0\r\nHost: %s\r\n\r\n' % host writer.write(header.encode('utf-8')) yield From(writer.drain()) while True: line = yield From(reader.readline()) if line == b'\r\n': break print('%s header > %s' % (host, line.decode('utf-8').rstrip())) # Ignore the body, close the socket writer.close()
def open(self): """ open session to i2p router this is a coroutine """ self._log.debug('connecting...') self._reader, self._writer = yield From(asyncio.open_connection(self._i2cp_host, self._i2cp_port, loop=self._loop)) if self._reader and self._writer: self._log.info("connected") self._loop.call_soon_threadsafe(self._async, self._send_raw(util.PROTOCOL_VERSION)) self._loop.call_soon_threadsafe(self._begin_session) else: self.log.error("could not connect to router") return
def get_memcached_stats(agent): yield From(agent.run_event.wait()) config = agent.config['memcached'] logger.info('starting "get_memcached_stats" task for "%s"', config['hostname']) db_config = config['database'] yield From(agent.async_create_database(**db_config)) memcached_host = config['hostname'] memcached_port = config['port'] while agent.run_event.is_set(): yield From(asyncio.sleep(config['frequency'])) try: logger.debug('open connection to memcached server') reader, writer = yield From(asyncio.open_connection( memcached_host, memcached_port)) writer.write(bytes('stats\n'.encode('ascii'))) logger.debug('command stats sent') results = [] while True: buf = yield From(reader.read(1024)) logger.debug('data read from socket') data = buf.decode() results.append(data) if data.endswith('END\r\n'): logger.debug('data read finished') break writer.close() results = ''.join(results).split('\r\n')[0:-2] points = [{ 'measurement': 'memcached_stats', 'tags': { 'host': memcached_host, }, 'fields': { } }] logger.debug('parsing results') for line in results: stat, key, val = line.split(' ') if key in STATS_FIELDS: points[0]['fields'][key] = STATS_FIELDS[key](val) logger.debug(points) yield From(agent.async_push(points, db_config['name'])) except: logger.exception('cannot get the memcached stats') logger.info('get_memcached_stats terminated')
def get_uwsgi_stats(agent): yield From(agent.run_event.wait()) config = agent.config['uwsgi'] logger.info('starting "get_uwsgi_stats" task for "%s"', config['hostname']) db_config = config['database'] yield From(agent.async_create_database(**db_config)) workers_stats = None uwsgi_host = config['hostname'] uwsgi_port = config['port'] while agent.run_event.is_set(): yield From(asyncio.sleep(config['frequency'])) try: logger.debug('open connection to uwsgi stats server') reader, writer = yield From( asyncio.open_connection(uwsgi_host, uwsgi_port)) data = yield From(reader.read()) d = json.loads(data.decode('utf-8')) if workers_stats is None: logger.debug('first run, no previous statistics in memory') workers_stats = dict() for worker in d['workers']: workers_stats[worker['id']] = worker logger.debug('current statistcs: %s', workers_stats) continue ref_worker = d['workers'][0] stored_last_spawn = workers_stats[ref_worker['id']]['last_spawn'] received_last_spawn = ref_worker['last_spawn'] if stored_last_spawn != received_last_spawn: logger.warn( 'a restart of the uwsgi server "%s" ' 'has been detected', uwsgi_host) workers_stats = dict() for worker in d['workers']: workers_stats[worker['id']] = worker continue points = [] for worker in d['workers']: logger.debug('process worker data...') stored_wk_data = workers_stats[worker['id']] requests = worker['requests'] - stored_wk_data['requests'] exceptions = worker['exceptions'] - \ stored_wk_data['exceptions'] tx = worker['tx'] - stored_wk_data['tx'] points.append({ 'measurement': 'uwsgi_stats', 'tags': { 'host': config['hostname'], 'worker': worker['id'] }, 'fields': { 'requests': requests, 'exceptions': exceptions, 'tx': tx, 'rss': worker['rss'], 'vsz': worker['vsz'], 'avg_rt': worker['avg_rt'], 'wid': worker['id'], 'status': worker['status'] } }) workers_stats[worker['id']] = worker yield From(agent.async_push(points, db_config['name'])) except: logger.exception('cannot get the uwsgi stats') logger.info('get_uwsgi_stats terminated')
def get_uwsgi_stats(agent): yield From(agent.run_event.wait()) config = agent.config['uwsgi'] logger.info('starting "get_uwsgi_stats" task for "%s"', config['hostname']) db_config = config['database'] yield From(agent.async_create_database(**db_config)) workers_stats = None uwsgi_host = config['hostname'] uwsgi_port = config['port'] while agent.run_event.is_set(): yield From(asyncio.sleep(config['frequency'])) try: logger.debug('open connection to uwsgi stats server') reader, writer = yield From(asyncio.open_connection(uwsgi_host, uwsgi_port)) data = yield From(reader.read()) d = json.loads(data.decode('utf-8')) if workers_stats is None: logger.debug('first run, no previous statistics in memory') workers_stats = dict() for worker in d['workers']: workers_stats[worker['id']] = worker logger.debug('current statistcs: %s', workers_stats) continue ref_worker = d['workers'][0] stored_last_spawn = workers_stats[ref_worker['id']]['last_spawn'] received_last_spawn = ref_worker['last_spawn'] if stored_last_spawn != received_last_spawn: logger.warn('a restart of the uwsgi server "%s" ' 'has been detected', uwsgi_host) workers_stats = dict() for worker in d['workers']: workers_stats[worker['id']] = worker continue points = [] for worker in d['workers']: logger.debug('process worker data...') stored_wk_data = workers_stats[worker['id']] requests = worker['requests'] - stored_wk_data['requests'] exceptions = worker['exceptions'] - \ stored_wk_data['exceptions'] tx = worker['tx'] - stored_wk_data['tx'] points.append({ 'measurement': 'uwsgi_stats', 'tags': { 'host': config['hostname'], 'worker': worker['id'] }, 'fields': { 'requests': requests, 'exceptions': exceptions, 'tx': tx, 'rss': worker['rss'], 'vsz': worker['vsz'], 'avg_rt': worker['avg_rt'], 'wid': worker['id'], 'status': worker['status'] } }) workers_stats[worker['id']] = worker yield From(agent.async_push(points, db_config['name'])) except: logger.exception('cannot get the uwsgi stats') logger.info('get_uwsgi_stats terminated')
def test_open_connection(self): with test_utils.run_test_server() as httpd: conn_fut = asyncio.open_connection(*httpd.address, loop=self.loop) self._basetest_open_connection(conn_fut)