def connect(self): '''Establish a connection.''' _logger.debug(__('Connecting to {0}.', self._address)) if self._state != ConnectionState.ready: raise Exception('Closed connection must be reset before reusing.') if self._sock: connection_future = asyncio.open_connection( sock=self._sock, **self._connection_kwargs() ) else: # TODO: maybe we don't want to ignore flow-info and scope-id? host = self._address[0] port = self._address[1] connection_future = asyncio.open_connection( host, port, **self._connection_kwargs() ) self.reader, self.writer = yield from \ self.run_network_operation( connection_future, wait_timeout=self._connect_timeout, name='Connect') if self._timeout is not None: self._close_timer = CloseTimer(self._timeout, self) else: self._close_timer = DummyCloseTimer() self._state = ConnectionState.created _logger.debug('Connected.')
def _connect_hub(self, host, port): """ connect out to a hub """ hub = '{} {}'.format(host, port) self.persist_hubs[hub] = 0 prnt('connecting to hub at {} port {}'.format(host, port)) if hasattr(self, 'use_socks') and self.use_socks: r, w = yield from asyncio.open_connection(self.socks_host, int(self.socks_port), loop=self.loop) result = yield from self._socks_handshake(r, w, host, int(port)) self.log.debug('socks = {}'.format(result)) else: try: r, w = yield from asyncio.open_connection(host, int(port), loop=self.loop) except Exception as e: prnt('error connecting to {} {} {}'.format(host, port, e)) return else: result = True if result is True: prnt('connected to hub at {} port {}'.format(host, port)) con = self._new_hub_connection(r, w) con.addr = hub return con else: prnt('connection to hub at {} port {} failed'.format(host, port)) w.close()
def checkURLResource(r, counter): url_str = r[13].strip() creator = r[11]; doi_id = r[0].strip(); try: url = urllib.parse.urlsplit(url_str) if url.scheme.find('http') != 0: handleErrors(creator,'Not http: DOI_ID: %s URL: %s' %(doi_id, url_str), counter) return urlPath = url.path if url.query == '' else url.path + "?" + url.query asyncio.sleep(0.3) if url.scheme.find('https') == 0: port = url.port if url.port else 443 reader, writer = yield from asyncio.open_connection(url.hostname, port, ssl=myconfig.context) else: port = url.port if url.port else 80 reader, writer = yield from asyncio.open_connection(url.hostname, port) query =('HEAD ' + urlPath + ' HTTP/1.0\r\n' 'Host: {url.hostname}\r\n' 'User-agent: Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US; rv:1.8.1.6) Gecko/20070725 Firefox/2.0.0.6\r\n' 'Accept: text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5\r\n' 'Accept-Language: en-us,en;q=0.5\r\n' 'Accept-Encoding: gzip,deflate\r\n' 'Accept-Charset: ISO-8859-1,utf-8;q=0.7,*;q=0.7\r\n' 'Keep-Alive: 300\r\n' '\r\n').format(url=url) writer.write(query.encode("utf-8")) while True: line = yield from reader.readline() if not line: break line = line.decode("utf-8").rstrip() if line: if(line.find('Content-Type') == 0): mType = line if(line.find('HTTP/1.') == 0): mStatus = line if(line.find('Location:') == 0) or (line.find('location:') == 0): location = line.split()[1] if mStatus: statusCode = int(mStatus.split()[1]) if(statusCode > 399): handleErrors(creator,'4/500s: DOI_ID: %s URL: %s Status %s' %(doi_id, url_str, mStatus), counter) elif statusCode == 301 or statusCode == 302: location = constructAbsolutePath(url.scheme, url.hostname, url.port, location) if(url_str != location): yield from checkRedirect(location, creator, doi_id, counter) else: handleErrors(creator,'Error Redirect url same as original: DOI_ID: %s URL: %s' %(doi_id, url_str), counter) else: try: del testingArray[counter] except KeyError: pass except UnboundLocalError as e: handleErrors(creator,'Error DOI_ID: %s URL: %s exception %s' %(doi_id, url_str, repr(e)), counter) except Exception as e: handleErrors(creator,'Error DOI_ID: %s URL: %s exception %s' %(doi_id, url_str, repr(e)), counter)
def mycoro(): c_reader, c_writer = yield from asyncio.open_connection(sock=c_sock) s_reader, s_writer = yield from asyncio.open_connection(sock=s_sock) data = b'Echo... Echo... Echo...' s_writer.write(data) yield from s_writer.drain() read_data = yield from c_reader.readexactly(len(data)) assert data == read_data s_writer.close()
async def open_connection(self, host, port, local_addr, lbind): if self.reuse or self.ssh: if self.streams is None or self.streams.done() and (self.reuse and not self.handler): self.streams = asyncio.get_event_loop().create_future() else: if not self.streams.done(): await self.streams return self.streams.result() try: if self.direct: if host == 'tunnel': raise Exception('Unknown tunnel endpoint') local_addr = local_addr if lbind == 'in' else (lbind, 0) if lbind else None family = 0 if local_addr is None else socket.AF_INET6 if ':' in local_addr[0] else socket.AF_INET wait = asyncio.open_connection(host=host, port=port, local_addr=local_addr, family=family) elif self.ssh: try: import asyncssh for s in ('read_', 'read_n', 'read_until'): setattr(asyncssh.SSHReader, s, getattr(asyncio.StreamReader, s)) except Exception: raise Exception('Missing library: "pip3 install asyncssh"') username, password = self.auth.decode().split(':', 1) if password.startswith(':'): client_keys = [password[1:]] password = None else: client_keys = None local_addr = local_addr if self.lbind == 'in' else (self.lbind, 0) if self.lbind else None family = 0 if local_addr is None else socket.AF_INET6 if ':' in local_addr[0] else socket.AF_INET conn = await asyncssh.connect(host=self.host_name, port=self.port, local_addr=local_addr, family=family, x509_trusted_certs=None, known_hosts=None, username=username, password=password, client_keys=client_keys) if not self.streams.done(): self.streams.set_result((conn, None)) return conn, None elif self.backward: wait = self.backward.open_connection() elif self.unix: wait = asyncio.open_unix_connection(path=self.bind, ssl=self.sslclient, server_hostname='' if self.sslclient else None) else: local_addr = local_addr if self.lbind == 'in' else (self.lbind, 0) if self.lbind else None family = 0 if local_addr is None else socket.AF_INET6 if ':' in local_addr[0] else socket.AF_INET wait = asyncio.open_connection(host=self.host_name, port=self.port, ssl=self.sslclient, local_addr=local_addr, family=family) reader, writer = await asyncio.wait_for(wait, timeout=SOCKET_TIMEOUT) except Exception as ex: if self.reuse: self.streams.set_exception(ex) self.streams = None raise return reader, writer
async def _get_remote(self, target): """ Getting response from web server. Args: target: The target address. Returns: The response is a tuple in form of (resp_hdr, resp_cont) """ # Parsing url into different parts url = urllib.parse.urlsplit(target) # Setting up connection according to protocol type conn = asyncio.open_connection(url.hostname, 443, ssl=True) \ if url.scheme == 'https' else \ asyncio.open_connection(url.hostname, 80) reader, writer = await conn # Assembling request header req = ('GET {path} HTTP/1.0\r\n' 'Host: {hostname} \r\n' '\r\n').format(path = url.path or '/', hostname = url.hostname) # Sending request writer.write(req.encode('utf-8')) # Reading response resp_hdr = '' resp_cont = '' content_flag = False while True: line = await reader.readline() if not line: break else: if content_flag: resp_cont += line.decode('utf-8').rstrip() else: if line.decode('utf-8') \ == '<?xml version="1.0" encoding="utf-8"?>\n': content_flag = True resp_cont += line.decode('utf-8').rstrip() else: resp_hdr += line.decode('utf-8') return (resp_hdr, resp_cont)
async def connect(self, ssl=False): err = None msg = '%s' % 'SSL: ' if ssl else '' stime = time.time() self.log('%sInitial connection' % msg) try: if ssl: _type = 'ssl' sock = self._writer['conn'].get_extra_info('socket') params = {'ssl': self._ssl_context, 'sock': sock, 'server_hostname': self.host} else: _type = 'conn' params = {'host': self.host, 'port': self.port} self._reader[_type], self._writer[_type] = \ await asyncio.wait_for(asyncio.open_connection(**params), timeout=self._timeout) except asyncio.TimeoutError: msg += 'Connection: timeout' err = ProxyTimeoutError(msg) raise err except (ConnectionRefusedError, OSError, _ssl.SSLError): msg += 'Connection: failed' err = ProxyConnError(msg) raise err # except asyncio.CancelledError: # log.debug('Cancelled in proxy.connect()') # raise ProxyConnError() else: msg += 'Connection: success' self._closed = False finally: self.stat['requests'] += 1 self.log(msg, stime, err=err)
def connect(self): is_unix_socket = (self.options.family == getattr(socket, 'AF_UNIX', None)) if self.options.use_ssl: # TODO: cache at Pool level. ctx = ssl.SSLContext(ssl.PROTOCOL_SSLv23) if self.options.certfile is not None: ctx.load_cert_chain(self.options.certfile, self.options.keyfile) if self.options.ca_certs is not None: ctx.load_verify_locations(self.options.ca_certs) if self.options.cert_reqs is not None: ctx.verify_mode = self.options.cert_reqs if ctx.verify_mode in (ssl.CERT_OPTIONAL, ssl.CERT_REQUIRED): ctx.check_hostname = True else: ctx = None if is_unix_socket: path = self.options.address[0] reader, writer = yield from asyncio.open_unix_connection( path, loop=self.loop, ssl=ctx) else: host, port = self.options.address reader, writer = yield from asyncio.open_connection( host=host, port=port, ssl=ctx, loop=self.loop) sock = writer.transport.get_extra_info('socket') sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, self.options.socket_keepalive) self._reader, self._writer = reader, writer
def data_client(host, port): reader, writer = yield from asyncio.open_connection(host, port) i = 0 if len(HANDLERS) == i: return writer.write(HANDLERS[i]()) print() while True: msg_head = yield from reader.read(msg.HEADER_SIZE) (msg_type, msg_size) = msg.unpack_head(msg_head) msg_body = yield from reader.read(msg_size) i += 1 if len(HANDLERS) == i: break HANDLERS[i](msg_body) print() i += 1 if len(HANDLERS) == i: break writer.write(HANDLERS[i]()) print() writer.close() print('\nOKAY\n')
def test_pack_ping(self): config = kademlia.utils.load_config("config.json") loop = asyncio.get_event_loop() loop.set_debug(config['debug']['asyncio']['enabled']) service = kademlia.Service(config, loop) echo = kademlia.utils.get_echo_bytes() rsock, wsock = socketpair() reader, writer = loop.run_until_complete( asyncio.open_connection(sock=rsock, loop=loop) ) wsock.send( service.tcpService.rpc.pack_ping(service.tcpService.node, echo) ) _command, _echo, _remoteNode, _data = loop.run_until_complete( asyncio.ensure_future( service.tcpService.rpc.read_command(reader) ) ) writer.close() wsock.close() self.assertEqual(_command, kademlia.const.kad.command.PING) self.assertEqual(echo, _echo)
def _select_next_server(self): """ Looks up in the server pool for an available server and attempts to connect. """ srv = None now = time.monotonic() for s in self._server_pool: if s.reconnects > self.options["max_reconnect_attempts"]: continue if s.did_connect and now > s.last_attempt + self.options["reconnect_time_wait"]: yield from asyncio.sleep(self.options["reconnect_time_wait"], loop=self._loop) try: s.last_attempt = time.monotonic() r, w = yield from asyncio.open_connection( s.uri.hostname, s.uri.port, loop=self._loop, limit=DEFAULT_BUFFER_SIZE) srv = s self._io_reader = r self._io_writer = w s.did_connect = True break except Exception as e: self._err = e if srv is None: raise ErrNoServers self._current_server = srv
async def _ssl_wrap_connection(self): # like aiohttp/connector.py ProxyConnector._create_connection() stime = time.time() msg = '' try: # self._writer.transport.pause_reading() conn = asyncio.open_connection( ssl=self.sslContext, sock=self._writer.get_extra_info('socket'), server_hostname=self.host) self.__reader['ssl'], self.__writer['ssl'] = \ await asyncio.wait_for(conn, timeout=self._timeout) except (ConnectionResetError, OSError) as e: msg = 'SSL: failed' return except asyncio.TimeoutError: msg = 'SSL: timeout' return except ssl.SSLError as e: msg = 'SSL: %s' % e return False else: msg = 'SSL: enabled' return True finally: self.log(msg, stime)
def connect(wsurl, *, loop = None, limit = None, **kwds): """ Connect to a websocket server. Connect will automatically carry out a websocket handshake. :param wsurl: Websocket uri. See `RFC6455 URIs. <https://tools.ietf.org/html/rfc6455#section-3>`_ :param kwargs: See `open_connection. \ <https://docs.python.org/3.4/library/asyncio-stream.html#asyncio.open_connection>`_ :return: :class:`Websocket` object on success. :raises Exception: When there is an error during connection or handshake. """ writer = None try: url = urllib.parse.urlparse(wsurl) port = 80 if url.port: port = url.port use_ssl = False if url.scheme.startswith('wss://'): use_ssl = True if not url.port: port = 443 reader, writer = yield from asyncio.open_connection(host = url.hostname, port = port, loop = loop, **kwds) response = yield from handshake_with_server(reader, writer, url) websocket = Websocket(reader, writer) websocket._mask = True websocket.reponse = response return websocket except BaseException as exp: if writer: writer.close() raise exp
def client(host, port): reader, writer = yield from asyncio.open_connection( host, port, loop=self.loop) while True: writer.write(b"foo\n") yield from writer.drain()
def hmland(manager, host, port): while True: print("Connecting to hmland") try: loop = events.get_event_loop() reader = StreamReader(limit=_DEFAULT_LIMIT, loop=loop) protocol = StreamReaderProtocol(reader, loop=loop) transport, _ = yield from loop.create_connection(lambda: protocol, host, port) writer = StreamWriter(transport, protocol, reader, loop) reader, writer = yield from asyncio.open_connection(host, port) transport._sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1) manager.setWriter(writer) asyncio.async(manager.update_locks()) while True: line = yield from reader.readline() if not line: break yield from manager.handle(line.decode("UTF-8").strip()) except: pass yield from asyncio.sleep(2)
def activity(self): backoff = 0 while True: try: self.reader, self.writer = yield from asyncio.open_connection( self.host, self.port, ssl=self.sslctx, loop=self.loop) except Exception as exc: backoff = min(args.max_backoff, backoff + (backoff//2) + 1) logging.info('Error connecting: %r; sleep %s', exc, backoff) yield from asyncio.sleep(backoff, loop=self.loop) continue backoff = 0 self.next_id = 0 self.pending = {} self. initialized = True try: while self.todo: payload, waiter = self.todo.pop() if not waiter.done(): yield from self.send(payload, waiter) while True: resp_id, resp = yield from self.process() if resp_id in self.pending: payload, waiter = self.pending.pop(resp_id) if not waiter.done(): waiter.set_result(resp) except Exception as exc: self.initialized = False self.writer.close() while self.pending: req_id, pair = self.pending.popitem() payload, waiter = pair if not waiter.done(): self.todo.add(pair) logging.info('Error processing: %r', exc)
def server_loop(self): """ Main server loop. As clients connect to the proxy, pass the connection on to the upstream server and bind it to a 'connection'. Start sniffing all packets as they fly by. :return: """ (self._client_reader, self._client_writer) = \ yield from asyncio.open_connection(self.config['upstream_host'], self.config['upstream_port']) self._client_loop_future = asyncio.ensure_future(self.client_loop()) try: while True: packet = yield from read_packet(self._reader, Direction.TO_SERVER) # Break in case of emergencies: # if packet['type'] not in [17, 40, 43, 48, 51]: # logger.debug('c->s {}'.format(packet['type'])) if (yield from self.check_plugins(packet)): yield from self.write_client(packet) except asyncio.IncompleteReadError: # Pass on these errors. These occur when a player disconnects badly pass except Exception as err: logger.error('Server loop exception occurred:' '{}: {}'.format(err.__class__.__name__, err)) finally: self.die()
def connect_to_master(self, ip): reader, writer = yield from asyncio.open_connection( host=ip, port=self.args.port, loop=self.loop) print('Connected to master node') self.nodes.append((reader, writer)) self.master = (reader, writer) asyncio.async(self.handle_node(reader, writer))
def issue_client(ip, port, params, ipub, keypair, public_attr, private_attr, loop, repeat=1): """ Implements a client for the ISSUE protocol. """ # Part 2. Send the encrypted attributes to server user_token = cred_secret_issue_user(params, keypair, private_attr) (pub, EGenc, sig_u) = user_token t0 = time.monotonic() for _ in range(repeat): ## Setup the channel reader, writer = yield from asyncio.open_connection( ip, port, loop=loop) sr = SReader(reader, writer) # Send the FULL command sr.put("ISSUE") sr.put( (user_token, public_attr) ) # Part 3. Get the credential back cred = yield from sr.get() t1 = time.monotonic() if repeat > 1: print("CORE ISSUE time (1): %.3f sec (repeat=%s)" % ((t1-t0) / repeat, repeat)) (u, EncE, sig_s) = cred mac = cred_secret_issue_user_decrypt(params, keypair, u, EncE, ipub, public_attr, EGenc, sig_s) return mac, user_token, cred
def connect_to_remote(self, remote, **kwargs): self.debug("Connecting to %s", remote) host, port = remote conn = yield from asyncio.open_connection( host=host, port=port, **kwargs ) return conn
def sample_cli(loop): reader, writer = yield from asyncio.open_connection( '127.0.0.1', 8888, loop=loop ) print('Connected.') helo = ffi.new('ProtoHelo[]', 1) ffi.buffer(helo)[:] = yield from reader.read(ffi.sizeof(helo)) print('Received Helo: {}, {}'.format( helo[0].cmd, helo[0].version )) for i in range(0, 100+1): sendMsg = 'msg_{}'.format(i) sendEcho = ffi.new('ProtoEcho[]', [(i, len(sendMsg), sendMsg.encode('utf-8'))]) writer.write(bytes(ffi.buffer(sendEcho))) yield from writer.drain() recvEcho = ffi.new('ProtoEcho[]', 1) try: ffi.buffer(recvEcho)[:] = yield from reader.read(ffi.sizeof(recvEcho)) except ValueError as e: print('ValueError: ', e) break print('Received {}, {}, {}'.format( recvEcho[0].cmd, recvEcho[0].msgLen, ffi.string(recvEcho[0].msg).decode('utf-8') )) writer.close()
def threadWriter(host, message, Loop): try: reader, writer = yield from asyncio.open_connection(host=host[0], port=int(host[1]), loop=Loop) # print("Sending messsssssage:", host[0], host[1]) writer.write(message.encode()) yield from writer.drain() writer.close() print("Message Send Complete") except: print("Lost connection to a process") # writer.write(message.toJSON().encode()) # pickle.dump(message,writer,pickle.HIGHEST_PROTOCOL) # writer.close() return try: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) except: print("Failed to create socket") s.connect((host[0], int(host[1]))) s.send(jsonpickle.encode(message).encode()) # print("HOST IS DOWN") s.close()
def test_del_stream_before_sock_closing(self): messages = [] self.loop.set_exception_handler(lambda loop, ctx: messages.append(ctx)) with test_utils.run_test_server() as httpd: rd, wr = self.loop.run_until_complete( asyncio.open_connection(*httpd.address, loop=self.loop)) sock = wr.get_extra_info('socket') self.assertNotEqual(sock.fileno(), -1) wr.write(b'GET / HTTP/1.0\r\n\r\n') f = rd.readline() data = self.loop.run_until_complete(f) self.assertEqual(data, b'HTTP/1.0 200 OK\r\n') # drop refs to reader/writer del rd del wr gc.collect() # make a chance to close the socket test_utils.run_briefly(self.loop) self.assertEqual(1, len(messages)) self.assertEqual(sock.fileno(), -1) self.assertEqual(1, len(messages)) self.assertEqual('An open stream object is being garbage ' 'collected; call "stream.close()" explicitly.', messages[0]['message'])
def test_if_user_gets_disconnected_on_close(self): yield from self.mainserver.run_server() reader, writer = yield from asyncio.open_connection(self.mainserver.host, self.mainserver.port) writer.write(b'close()\n') while not reader.at_eof(): yield from reader.readline() self.assertFalse(self.mainserver.clients)
def connect(self): while self.running: self.logger.info('connecting to {}:{}'.format(self.host, self.port)) try: connect_coro = asyncio.open_connection(self.host, self.port, loop=self.loop) reader, writer = yield from asyncio.wait_for(connect_coro, 8.0) yield from self.handler(reader, writer) except asyncio.TimeoutError: self.logger.info('connection timeout') except ConnectionRefusedError: self.logger.info('connection refused') yield from asyncio.sleep(8.0) except KeyboardInterrupt: self.loop.stop() self.logger.debug('execution aborted') break except asyncio.CancelledError: break except: self.logger.error('unknown tunnel exception: \n{}'.format(traceback.format_exc())) break
def connect(self, loop=None): loop = loop or self.loop for addr in self.addrs: if addr['type'] == 'unix': to = addr.get('path') to = to or addr.get('tmpdir') to = to or addr.get('abstract') return aio.open_unix_connection(to, loop=loop) elif addr['type'] == 'launchd': env = addr.get('env') to = environ.get(env, None) return aio.open_unix_connection(to, loop=loop) elif addr['tcp'] == 'launchd': host = addr.get('host', '127.0.0.1') bind = addr.get('bind', None) port = addr.get('port', None) family = addr.get('family', None) return aio.open_connection( host=host, port=port, ssl= True if port == '443' else None, family=AF_INET6 if family == 'ipv6' else AF_INET, local_addr=bind ) raise Exception('UNKNOWN ADDRESS TYPE')
def test_if_eof_is_set(self): yield from self.mainserver.run_server() reader, writer = yield from asyncio.open_connection(self.mainserver.host, self.mainserver.port) self.mainserver.close_clients() while not reader.at_eof(): yield from reader.readline() self.assertTrue(reader.at_eof())
def connect_upstream(self, client_reader, client_writer, user): cph, cpp = client_writer.get_extra_info('peername') csh, csp = client_writer.get_extra_info('sockname') try: coro = asyncio.open_connection(host=self.upstream_ip, port=self.upstream_port) remote_reader, remote_writer = yield from asyncio.wait_for(coro, \ timeout=self.upstream_timeout) rph, rps = remote_writer.get_extra_info('peername') rsh, rsp = remote_writer.get_extra_info('sockname') except asyncio.TimeoutError: s = '[{}] Timeout connecting to: {}:{} ({} seconds)' log.error(s.format(user, self.upstream_ip, self.upstream_port, self.upstream_timeout)) log.info('[{}] Closing connection to {}:{}'.format(user, cph, cpp)) client_writer.close() except Exception as e: s = '[{}] Error connecting to {}:{}: {}' log.error(s.format(user, self.upstream_ip, self.upstream_port, e)) log.info('[{}] Closing connection to {}:{}'.format(user, cph, cpp)) client_writer.close() else: asyncio.async(self.proxy_data(client_reader, remote_writer, rph, rps, 'upstream -> pipecast', user)) asyncio.async(self.proxy_data(remote_reader, client_writer, cph, cpp, 'pipecast -> client', user)) s = '[{}] Opened proxy from [client {}:{} -> {}:{}] -> pipe ->' + \ ' [{}:{} -> {}:{} upstream]' log.info(s.format(user, cph, cpp, csh, csp, rsh, rsp, rph, rps))
def start_irc_bot(): while True: irc_reader, irc_writer = yield from asyncio.open_connection(host=config.irc_host, port=config.irc_port, **config.irc_kwargs) def _send_line(line): msg = '%s\r\n' % line irc_writer.write(msg.encode('utf-8')) logging.debug('[IRC] >>> {0}'.format(line)) global send_line send_line = _send_line send_line('USER {0} 8 * :{0}'.format(config.irc_nick)) send_line('NICK {0}'.format(config.irc_nick)) while True: try: line = yield from irc_reader.readline() line = line.rstrip().decode('utf-8', 'ignore') except EOFError: break if not line: break logging.debug('[IRC] <<< {0}'.format(line)) try: triskelion.irc_handle(line) except Exception: ty, exc, tb = sys.exc_info() send_line('PRIVMSG %s :ERROR! %s %s' % (config.irc_channel, ty, exc)) traceback.print_exception(ty, exc, tb) yield from asyncio.sleep(10)
def client_connection(self): """ Coroutine to open the client connection """ reader, writer = yield from asyncio.open_connection( self.ip, self.port, loop=self.loop ) return reader, writer
def connect_client(server): return QDataStreamProtocol(*(yield from asyncio.open_connection( *server.sockets[0].getsockname())))
def connect(self): if not self.open: self._reader, self._writer = yield from asyncio.open_connection( self.host, self.port, loop=self.loop) self.open = True
def open_connection(host, port, *args, **kwargs): reader, writer = yield from aio.open_connection(host, port, *args, **kwargs) writer.write(encode_data('HHB', [EIB_OPEN_GROUPCON, 0, 0])) return reader, writer
def create_connection(address, *, password=None, encoding=None, parser=None, loop=None, timeout=None, connect_cls=None, reusable=True): ''' 创建SSDB数据库连接 :param address: 类似于socket的地址,如果是tuple或者list,则应该是(host, port)这种形式, 但是不支持unix socket :param password: SSDB数据库的密码,默认是None :param encoding: 用于将读取的数据从bytes解码成str,默认为None :param parser: 根据SSDB协议解析返回数据的解析器,默认会使用自定义的SSDBParser :param loop: :param timeout: 默认情况,timeout会在连接状态下应用限制等待时间, 也可以使用这个参数来定义创建连接所花的时间 :param connect_cls: :param reusable: 设置端口重用,默认为True :return: 返回一个SSDBConnection对象,如果传递了connect_cls,则会返回这个类的实例 ''' # 首先判断address assert isinstance(address, (tuple, list)), "tuple or list expected" # 判断timeout if timeout is not None and timeout <= 0: raise ValueError("Timeout has to be None or a number greater than 0") # 默认connect_cls是SSDBConnection # TODO: 判断传入的类是否是可以应用的连接类型 if connect_cls is None: connect_cls = SSDBConnection # 开始连接 host, port = address logger.debug("Creating tcp connection to %r", address) # asyncio.open_connection创建套接字连接,返回reader和writer对象,它也是一个协程 # 实际调用的是loop.create_connection # wait_for函数提供等待Future或者协程完成直到超时的功能,返回协程或者Future的结果 reader, writer = yield from asyncio.wait_for(asyncio.open_connection( host, port, loop=loop), timeout, loop=loop) sock = writer.transport.get_extra_info('socket') if sock is not None: # 设置端口重用 if reusable: sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) # 设置TCP无延迟,其相对是 Nagle’s Algorithm sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) address = sock.getpeername() address = tuple(address[:2]) conn = connect_cls(reader, writer, encoding=encoding, address=address, parser=parser, loop=loop) try: if password is not None: yield from conn.auth(password) except Exception: conn.close() yield from conn.wait_closed() raise return conn
async def connect(self): """Connect to the DIMM controller's TCP/IP.""" async with self.cmd_lock: self.log.debug(f"connecting to: {self.host}:{self.port}") if self.connected: self.log.error("Already connected.") self.status["status"] = DIMMStatus["ERROR"] return try: self.connect_task = asyncio.open_connection( host=self.host, port=self.port ) self.reader, self.writer = await asyncio.wait_for( self.connect_task, timeout=self.connection_timeout ) # Read welcome message read_bytes = await asyncio.wait_for( self.reader.readuntil("\n".encode()), timeout=self.read_timeout ) if "TPL" not in read_bytes.decode().rstrip(): raise RuntimeError("No welcome message from controller.") self.log.debug( f"connected: {read_bytes.decode().rstrip()} : Starting authentication" ) if not self.auto_auth: auth_str = f"AUTH PLAIN {self.user} {self.password}\r\n" # Write authentication self.writer.write(auth_str.encode()) await self.writer.drain() # Get reply from auth. This is published even in auto_auth mode read_bytes = await asyncio.wait_for( self.reader.readuntil("\n".encode()), timeout=self.read_timeout ) s = re.search( r"AUTH\s+(?P<AUTH>\S+)\s+(?P<read_level>\d)\s+(?P<write_level>\d)\n", read_bytes.decode(), ) if not s or s.group("AUTH") != "OK": await self.disconnect() raise RuntimeError("Not authorized.") self.read_level = int(s.group("read_level")) self.write_level = int(s.group("write_level")) # Start loop to monitor replied. self.log.debug("Start controller reply handler.") self.reply_handler_loop = asyncio.create_task(self.reply_hander()) # Start status loop self.log.debug("Start status loop.") self.run_status_loop = True self.status_loop_future = asyncio.create_task(self.status_loop()) except Exception: self.log.exception("Error connecting to DIMM controller.") self.status["status"] = DIMMStatus["ERROR"] else: self.status["status"] = DIMMStatus["RUNNING"]
def handle_message(self, reader, writer): data = yield from reader.read(100) message = data.decode() addr = writer.get_extra_info('peername') print(self.name, 'received message', message) res = '' #try: command = message.split()[0] #first possible command if (command == 'IAMAT'): clientID, latlng, time = message.split()[1:] #check latlng assert latlng[0] == '+' or latlng[0] == '-' index = max(latlng[1:].find('-'), latlng[1:].find('+')) + 1 assert (index != 0) float(latlng[:index]) float(latlng[index:]) #check clientTime clientTime = datetime.datetime.utcfromtimestamp(float(time)) currTime = datetime.datetime.utcnow() timeDiff = (currTime - clientTime).total_seconds() #response sign = '+' if timeDiff > 0 else '' res = "AT {} {}{} {} {} {}".format(self.name, sign, timeDiff, clientID, latlng, time) #spread to others self.spread_message("SERVERMSG" + res) elif (command == "WHATSAT"): radius = message.split()[2] infoBound = int(message.split()[3]) clientID = message.split()[1] _, otherClientInfo = self.find_most_recent_message(clientID) if (otherClientInfo == ''): #try to wait for propagation yield from asyncio.sleep(3) _, otherClientInfo = self.find_most_recent_message(clientID) assert otherClientInfo != '' latlng = otherClientInfo.split()[-2] index = max(latlng[1:].find('-'), latlng[1:].find('+')) + 1 latlngstr = latlng[:index] + ',' + latlng[index:] key = 'AIzaSyBLb-TWOXTNMqa1Carx5XJG8U8fM6q6P3I' url = "https://maps.googleapis.com/maps/api/place/nearbysearch/json?key={}&location={}&radius={}".format( key, latlngstr, radius) url = urllib.parse.urlsplit(url) reader, writer = yield from asyncio.open_connection(url.hostname, 443, ssl=True) query = ('GET {path}?{query} HTTP/1.0\r\n' 'Host: {hostname}\r\n' '\r\n').format(path=url.path, query=url.query, hostname=url.hostname) writer.write(query.encode('latin1')) lines = yield from reader.read() writer.close() lines = lines.decode() lines = lines[lines.find('{'):] d = json.loads(lines) d['results'] = d['results'][:infoBound] result = json.dumps(d, indent=1) res = otherClientInfo + '\n' + result + '\n' elif (command == "SERVERMSGAT"): #don't respond res = 'NONE' #spread to others self.spread_message(message) else: raise #except: #error #print("Unexpected error:", sys.exc_info()) # res="? {}".format(message) #finally: if (res != 'NONE'): print(res) writer.write(res.encode()) yield from writer.drain() writer.close() with open(self.name + ".txt", "a") as file: file.write("Received: {}\n".format(message)) file.write("Responded: {}\n".format(res))
def test_open_connection(self): with test_utils.run_test_server() as httpd: conn_fut = asyncio.open_connection(*httpd.address, loop=self.loop) self._basetest_open_connection(conn_fut)
def _process_connect_init(self): """ Process INFO received from the server and CONNECT to the server with authentication. It is also responsible of setting up the reading and ping interval tasks from the client. """ self._status = Client.CONNECTING # FIXME: Add readline timeout info_line = yield from self._io_reader.readline() _, info = info_line.split(INFO_OP + _SPC_, 1) self._server_info = json.loads(info.decode()) self._max_payload = self._server_info["max_payload"] if self._server_info['tls_required']: ssl_context = self.options.get('tls') if not ssl_context: raise NatsError('no ssl context provided') transport = self._io_writer.transport sock = transport.get_extra_info('socket') if not sock: # This shouldn't happen raise NatsError('unable to get socket') yield from self._io_writer.drain( ) # just in case something is left self._io_reader, self._io_writer = \ yield from asyncio.open_connection( loop=self._loop, limit=DEFAULT_BUFFER_SIZE, sock=sock, ssl=ssl_context, server_hostname=self._current_server.uri.hostname, ) # Refresh state of parser upon reconnect. if self.is_reconnecting: self._ps.reset() connect_cmd = self._connect_command() self._io_writer.write(connect_cmd) self._io_writer.write(PING_PROTO) yield from self._io_writer.drain() # FIXME: Add readline timeout next_op = yield from self._io_reader.readline() if self.options["verbose"] and OK_OP in next_op: next_op = yield from self._io_reader.readline() if ERR_OP in next_op: err_line = next_op.decode() _, err_msg = err_line.split(" ", 1) # FIXME: Maybe handling could be more special here, # checking for ErrAuthorization for example. # yield from self._process_err(err_msg) raise NatsError("nats: " + err_msg.rstrip('\r\n')) if PONG_PROTO in next_op: self._status = Client.CONNECTED self._reading_task = self._loop.create_task(self._read_loop()) self._pongs = [] self._pings_outstanding = 0 self._ping_interval_task = self._loop.create_task( self._ping_interval()) # Task for kicking the flusher queue self._flusher_task = self._loop.create_task(self._flusher())
def _client(): # yield from asyncio.sleep(500) r, w = yield from asyncio.open_connection('127.0.0.1', unused_tcp_port) assert isinstance(r, asyncio.StreamReader) w.write_eof() w.close()
def _client(): r, w = yield from asyncio.open_connection('127.0.0.1', unused_tcp_port) assert isinstance(r, asyncio.StreamReader) w.write(b"a98asdfyhsfhhb2l3irjwef\n") data = yield from asyncio.wait_for(r.read(1024), 1.0) assert data.startswith(b"HTTP/1.1 400 Bad Request")
def _check_URL_resource(self, ssl_context, timestamp, r, counter, testing_array, test_results): """Check one URL resource. Request the header for each resource and try to determine if it is resolvable. Record a log entry if an exception occurs, or the server returns a 400/500 error. Note the extreme care to close all opened sockets: the calls to writer.close() all throughout the body. If any sockets were to remain open, the asyncio system attempts to close them at the end of the program, and this fails, causing exceptions to be generated. Arguments: ssl_context -- The SSL context to use when making HTTP requests. timestamp -- The timestamp to use when storing the test result. r -- The tuple containing a row from the doi_object table with the details of the link to be tested. counter -- The key of testing_array corresponding to this test. If the key is valid, and the link is valid, the key/value pair will be removed from testing_array. testing_array -- The dict containing the details of the current batch of tests. test_results -- The dict containing the details of test results. """ # Hmm, why did we put the data in testing_array? # See _run_tests for the same code. url_str = url_str_original = r['link'] SCHEME_NOT_HTTP_FORMAT = ('Error: Scheme is not http(s): ') URL_PARSE_ERROR_FORMAT = ('Error: Parsing URL failed') STATUS_ERROR_FORMAT = '4/500s: Status {}' NO_STATUS_ERROR_FORMAT = ('Error: Server did not return an ' 'HTTP status code') REDIRECT_SAME_FORMAT = ('Error: Redirect URL same as original: ') EXCEPTION_FORMAT = 'Error: {}' TOO_MANY_REDIRECTS_FORMAT = ('Error: too many redirects: ' 'FINAL URL: {}') SUCCESS = 'SUCCESS' try: # First time round (i.e., before attempting to follow any # redirects), do a small sleep. This helps avoid # DoS attacking the server. # NB This "should" say "yield from asyncio.sleep(0.3)", # but we do really want the whole system to pause at # this point, to give a delay between each # connection initiation. time.sleep(0.3) for redirect_count in range(0, self.ATTEMPTS_MAX): url = urllib.parse.urlsplit(url_str) if not url.scheme.startswith('http'): # The scheme must begin with "http", # i.e., be either "http" or "https". self._handle_one_error(url_str_original, SCHEME_NOT_HTTP_FORMAT, timestamp, testing_array, counter, test_results) if self._debug: print("DEBUG:", counter, "Not opening writer", file=sys.stderr) return if not url.hostname: # Something wrong with the parsing of the URL, # possibly "http:/only-one-slash.com". self._handle_one_error(url_str_original, URL_PARSE_ERROR_FORMAT, timestamp, testing_array, counter, test_results) if self._debug: print("DEBUG:", counter, "Not opening writer", file=sys.stderr) return # Scheme OK, so now construct the query path to be sent to the # server in a HEAD request. url_path = url.path # Handle the case of "http://hostname.but.no.trailing.slash" if url_path == '': url_path = '/' if url.query != '': url_path += "?" + url.query if self._debug: print('DEBUG: Counter:', counter, 'redirect_count:', redirect_count, 'url_str:', url_str, file=sys.stderr) # Determine the port to use for the connection. # Since 'https' contains 'http' as a prefix, # check for the former. if url.scheme.startswith('https'): # For HTTPS, default to port 443. port = url.port if url.port else 443 if self._debug: print("DEBUG: Opening HTTPS connection to " "host {}, port {}".format(url.hostname, port), file=sys.stderr) reader, writer = yield from \ asyncio.open_connection(url.hostname, port, ssl=ssl_context) else: # "Plain" HTTP request; port defaults to 80. port = url.port if url.port else 80 if self._debug: print("DEBUG: Opening HTTP connection to " "host {}, port {}".format(url.hostname, port), file=sys.stderr) reader, writer = yield from \ asyncio.open_connection(url.hostname, port) query = self.HEAD_QUERY_FORMAT.format( url_path=url_path, url=url) if self._debug: print("DEBUG:", counter, "Sending query string: ", query, file=sys.stderr) try: writer.write(query.encode("utf-8")) except Exception as e: if self._debug: print("DEBUG:", counter, "Got exception attempting " "to write", file=sys.stderr) print("DEBUG:", counter, "Closing writer", file=sys.stderr) writer.close() return # Await and read the response. while True: line = yield from reader.readline() if not line: # End of file read. break # readline() returns a bytes, so it must be decoded. line = line.decode("utf-8").rstrip() if line.startswith('<'): # Oh dear, the server is now sending the page. # This has been seen with an IIS/6.0 server. break if line: # The next two lines are not used for now, # but might be useful in the future. # Apparently, there are some pages that are # "soft 404s", i.e., they return a status code of # (say) 200, but the content of the page is text # which says "No such page" or the like. # So in future, we may # scrape pages to see if the page returned actually # reports that the page is missing/deleted. # if line.startswith('Content-Type'): # mType = line if self._debug: print('DEBUG:', counter, line, file=sys.stderr) if line.startswith('HTTP/1.'): mStatus = line if line.startswith(('Location:', 'location:')): location = line.split()[1] else: # Empty line was read; end of headers. break if 'mStatus' not in locals(): # Made it through the loop without setting mStatus, # which means (for some reason) we didn't get # an HTTP status code. self._handle_one_error(url_str_original, NO_STATUS_ERROR_FORMAT, timestamp, testing_array, counter, test_results) if self._debug: print("DEBUG:", counter, "Closing writer", file=sys.stderr) writer.close() return if mStatus: # The status line is "HTTP/1.x 300 ....", so the status # code is the second field after split, # i.e., at position 1. status_code = int(mStatus.split()[1]) # Now treat the different status codes as appropriate. if status_code > 399: # Status > 399 is an error, e.g., a "404". self._handle_one_error(url_str_original, STATUS_ERROR_FORMAT.format( mStatus), timestamp, testing_array, counter, test_results) if self._debug: print("DEBUG:", counter, "Closing writer", file=sys.stderr) writer.close() return elif status_code == 301 or status_code == 302: # Handle a redirection. location = self.construct_absolute_path(url.scheme, url.hostname, url.port, location) if url_str != location: # Follow a redirect. url_str = location # This is the only branch that falls through and # leads to the next iteration of the for loop. else: # The redirected URL was the same as the original. # Don't proceed any further. self._handle_one_error(url_str_original, REDIRECT_SAME_FORMAT, timestamp, testing_array, counter, test_results) if self._debug: print("DEBUG:", counter, "Closing writer", file=sys.stderr) writer.close() return else: # Success. This is indicated by deleting # the corresponding element of testing_array. try: self._mark_status_and_timestamp(url_str_original, SUCCESS, timestamp) del testing_array[counter] except KeyError: pass if self._debug: print("DEBUG:", counter, "Closing writer", file=sys.stderr) writer.close() return # Broken out of the loop: we may be about to follow a # redirect. Close the existing writer. if self._debug: print("DEBUG:", counter, "Closing writer", file=sys.stderr) writer.close() # "Successful" conclusion of the for loop. But this means # we have now followed too many redirects. self._handle_one_error(url_str_original, TOO_MANY_REDIRECTS_FORMAT.format( url_str), timestamp, testing_array, counter, test_results) if self._debug: print("DEBUG:", counter, "Closing writer", file=sys.stderr) writer.close() return # An UnboundLocalError occurs if mStatus is tested without # having been set. Handle this using the catch-all handler # below. # except UnboundLocalError as e: # _handle_one_error(result_list, error_count, testing_array, # creator, # EXCEPTION_FORMAT.format(doi_id, # url_str, repr(e)), # counter) except asyncio.futures.CancelledError: # This is caused by _run_tests() cancelling the task # because of a timeout. if self._debug: print("DEBUG:", counter, "Cancelled task due to timeout", file=sys.stderr) if 'writer' in locals(): if self._debug: print("DEBUG:", counter, "Closing writer", file=sys.stderr) writer.close() except Exception as e: if 'writer' in locals(): if self._debug: print("DEBUG:", counter, "Closing writer", file=sys.stderr) writer.close() self._handle_one_error(url_str_original, EXCEPTION_FORMAT.format(e), timestamp, testing_array, counter, test_results)
async def connect( self) -> Tuple[asyncio.StreamReader, asyncio.StreamWriter]: return await self.cancel_token.cancellable_wait( asyncio.open_connection(host=self.remote.address.ip, port=self.remote.address.tcp_port), timeout=REPLY_TIMEOUT)
def open_connection(host=None, port=None, *, loop=None, limit=_DEFAULT_LIMIT, **kwds): """Open a stream and wrap it with LEAP.""" connection = yield from asyncio.open_connection(host, port, loop=loop, limit=limit, **kwds) return LeapReader(connection[0]), LeapWriter(connection[1])
def _check_local_connection(self, listen_port, delay=None): """Open a local connection and test if an input line is echoed back""" reader, writer = yield from asyncio.open_connection(None, listen_port) yield from self._check_echo_line(reader, writer, delay=delay)
from curio import run, run_in_thread import time import asyncio def echo(var): time.sleep(1) print('echo', var) # run(echo, 'Yi') g = run_in_thread(echo, 'Yi') run(g) asyncio.open_connection(host, 80) loop = asyncio.events.get_event_loop() loop.create_connection()
async def _start(self): while 'stopped' not in self.my_state: # print("<<<<<<<<<<<<<<<<<<<<<<<========================PEER CONNECTION=========================>>>>>>>>>>>>>>>>>>>>>>>>>>>>>") ip, port = await self.available_peers.get() self.ip = ip self.port = port logging.info('Got assigned peer with: {ip}:{port}'.format( ip=ip, port=port)) try: # TODO For some reason it does not seem to work to open a new # connection if the first one drops (i.e. second loop). connect = asyncio.open_connection(ip, port) # connect 为协程,立即返回不阻塞 self.reader, self.writer = await connect # 阻塞操作,释放cpu logging.info('Connection open to peer: {ip}'.format(ip=ip)) # It's our responsibility to initiate the handshake. buffer = await self._send_handshake() # TODO Add support for sending data # Sending BitField is optional and not needed when client does # not have any pieces. Thus we do not send any bitfield message # The default state for a connection is that peer is not # interested and we are choked self.my_state.append('choked') # Let the peer know we're interested in downloading pieces await self._send_interested() self.my_state.append('interested') # Start reading responses as a stream of messages for as # long as the connection is open and data is transmitted async for message in PeerStreamIterator(self.reader, buffer): # print("i am alive {peer}".format(peer=self.remote_id)) if 'stopped' in self.my_state: break if type(message) is BitField: # logging.info("receive BitField from peer {peer}".format(peer=self.remote_id)) self.piece_manager.add_peer(self.remote_id, message.bitfield) # elif type(message) is Interested: # # logging.info("receive Interested from peer {peer}".format(peer=self.remote_id)) # self.peer_state.append('interested') # elif type(message) is NotInterested: # # logging.info("receive NotInterested from peer {peer}".format(peer=self.remote_id)) # if 'interested' in self.peer_state: # self.peer_state.remove('interested') elif type(message) is Choke: # logging.info("receive Choke from peer {peer}".format(peer=self.remote_id)) self.my_state.append('choked') elif type(message) is Unchoke: # logging.info("receive Unchoke from peer {peer}".format(peer=self.remote_id)) if 'choked' in self.my_state: self.my_state.remove('choked') elif type(message) is Have: # logging.info("receive Have from peer {peer}".format(peer=self.remote_id)) self.piece_manager.update_peer(self.remote_id, message.index) elif type(message) is KeepAlive: # logging.info("receive KeepAlive from peer {peer}".format(peer=self.remote_id)) await asyncio.sleep(1) pass elif type(message) is Piece: # logging.info("receive Piece from peer {peer}".format(peer=self.remote_id)) self.my_state.remove('pending_request') self.on_block_cb(remote_id=self.remote_id, piece_index=message.index, block_offset=message.begin, data=message.block) elif type(message) is Request: logging.info("receive Request from peer {peer}".format( peer=self.remote_id)) # TODO Add support for sending data logging.info('Ignoring the received Request message.') elif type(message) is Cancel: logging.info("receive Cancel from peer {peer}".format( peer=self.remote_id)) # TODO Add support for sending data logging.info('Ignoring the received Cancel message.') # Send block request to remote peer if we're interested if 'choked' not in self.my_state: if 'interested' in self.my_state: if 'pending_request' not in self.my_state: self.my_state.append('pending_request') await self._request_piece() except ProtocolError as e: logging.exception( 'Connection to peer with: {ip}:{port} Protocol error'. format(ip=ip, port=port)) # logging.warning('Connection to peer with: {ip}:{port} Protocol error'.format(ip=ip, port=port)) except (ConnectionRefusedError): logging.warning( 'Connection to peer with: {ip}:{port} refused'.format( ip=ip, port=port)) except (TimeoutError): logging.warning( 'Connection to peer with: {ip}:{port} timeout'.format( ip=ip, port=port)) except (ConnectionResetError, CancelledError): logging.warning( 'Connection to peer with: {ip}:{port} closed'.format( ip=ip, port=port)) except Exception as e: logging.exception('An error occurred') self.cancel() raise e self.cancel()
async def httpproxy(self, req): # self.session.invisible!!!! if req.method == 'CONNECT': rhost, rport = req.uri.split(':') # https://tools.ietf.org/html/rfc7231#section-4.3.6 if not self.session.proxy_ssl_intercept: # not intercepting SSL traffic, acting as a generic proxy try: remote_reader, remote_writer = await asyncio.wait_for(asyncio.open_connection(host=rhost, port=int(rport)), timeout=1) except Exception as e: await self.logger.exception('Failed to create remote connection to %s:%s!' % (rhost, rport)) return # indicating to the client that TCP socket has opened towards the remote host await asyncio.wait_for(self.send_data(HTTP200Resp().to_bytes()), timeout = 1) self.loop.create_task(self.proxy_forwarder(remote_reader, self.cwriter, '%s:%d' % (rhost,int(rport)), self.session.connection.get_local_address())) self.loop.create_task(self.proxy_forwarder(self.creader, remote_writer, self.session.connection.get_local_address(), '%s:%d' % (rhost,int(rport)))) await asyncio.wait_for(self.session.proxy_closed.wait(), timeout = None) else: print('a') while not self.session.close_session.is_set(): print('aa') data = await self.creader.read(-1) print('=====request======') print(data) # sending data to remote host remote_writer.write(data) await remote_writer.drain() data_return = await remote_reader.read() print('=======response===============') print(data_return) await asyncio.wait_for(self.send_data(data_return), timeout = 1) # req = await asyncio.wait_for(self.parse_message(), timeout = 10) else: while not self.session.close_session.is_set(): o = urlparse(req.uri) if o.netloc.find(':') != -1: rhost, rport = o.netloc.split(':') else: rhost = o.netloc rport = 80 if o.query != '': uri = '?'.join([o.path, o.query]) else: uri = o.path # removing proxy authorization header req.remove_header('proxy-authorization') req_new = HTTPRequest.construct(req.method, uri, req.headers, req.body, req.version) await self.log('======== request sent ============', logging.DEBUG) # print(req_new) try: remote_reader, remote_writer = await asyncio.wait_for(asyncio.open_connection(host=rhost, port=int(rport)), timeout=1) except Exception as e: await self.logger.exception() return # sending data to remote host remote_writer.write(req_new.to_bytes()) await remote_writer.drain() resp = await asyncio.wait_for(HTTPResponse.from_streamreader(remote_reader), timeout = 1) await self.log('=== proxyying response ====', logging.DEBUG) await asyncio.wait_for(self.send_data(resp.to_bytes()), timeout = None) await self.log('=== PROXY === \r\n %s \r\n %s ======' % (req_new, resp)) if req.props.connection is not None and req.props.connection == HTTPConnection.KEEP_ALIVE: req = await asyncio.wait_for(self.parse_message(timeout = None), timeout = None) if req is None: self.session.close_session.set() return else: await self.log('Closing connection!', logging.DEBUG) self.session.close_session.set() remote_writer.close() self.cwriter.close() return
async def open_connection(self, host, port, local_addr, lbind): if self.reuse or self.ssh: if self.streams is None or self.streams.done() and ( self.reuse and not self.handler): self.streams = asyncio.get_event_loop().create_future() else: if not self.streams.done(): await self.streams return self.streams.result() try: local_addr = local_addr if self.lbind == 'in' else (self.lbind, 0) if self.lbind else \ local_addr if lbind == 'in' else (lbind, 0) if lbind else None family = 0 if local_addr is None else socket.AF_INET6 if ':' in local_addr[ 0] else socket.AF_INET if self.direct: if host == 'tunnel': raise Exception('Unknown tunnel endpoint') wait = asyncio.open_connection(host=host, port=port, local_addr=local_addr, family=family) elif self.ssh: try: import asyncssh for s in ('read_', 'read_n', 'read_until'): setattr(asyncssh.SSHReader, s, getattr(asyncio.StreamReader, s)) except Exception: raise Exception('Missing library: "pip3 install asyncssh"') username, password = self.auth.decode().split(':', 1) if password.startswith(':'): client_keys = [password[1:]] password = None else: client_keys = None conn = await asyncssh.connect(host=self.host_name, port=self.port, local_addr=local_addr, family=family, x509_trusted_certs=None, known_hosts=None, username=username, password=password, client_keys=client_keys) if not self.streams.done(): self.streams.set_result((conn, None)) return conn, None elif self.backward: wait = self.backward.open_connection() elif self.unix: wait = asyncio.open_unix_connection( path=self.bind, ssl=self.sslclient, server_hostname='' if self.sslclient else None) else: wait = asyncio.open_connection(host=self.host_name, port=self.port, ssl=self.sslclient, local_addr=local_addr, family=family) reader, writer = await asyncio.wait_for(wait, timeout=SOCKET_TIMEOUT) except Exception as ex: if self.reuse: self.streams.set_exception(ex) self.streams = None raise return reader, writer
def create_connection(address, *, db=None, password=None, ssl=None, encoding=None, loop=None): """Creates redis connection. Opens connection to Redis server specified by address argument. Address argument is similar to socket address argument, ie: * when address is a tuple it represents (host, port) pair; * when address is a str it represents unix domain socket path. (no other address formats supported) SSL argument is passed through to asyncio.create_connection. By default SSL/TLS is not used. Encoding argument can be used to decode byte-replies to strings. By default no decoding is done. Return value is RedisConnection instance. This function is a coroutine. """ assert isinstance(address, (tuple, list, str)), "tuple or str expected" if isinstance(address, (list, tuple)): host, port = address logger.debug("Creating tcp connection to %r", address) reader, writer = yield from asyncio.open_connection(host, port, ssl=ssl, loop=loop) sock = writer.transport.get_extra_info('socket') if sock is not None: sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) address = sock.getpeername() address = tuple(address[:2]) else: logger.debug("Creating unix connection to %r", address) reader, writer = yield from asyncio.open_unix_connection(address, ssl=ssl, loop=loop) sock = writer.transport.get_extra_info('socket') if sock is not None: address = sock.getpeername() conn = RedisConnection(reader, writer, encoding=encoding, address=address, loop=loop) try: if password is not None: yield from conn.auth(password) if db is not None: yield from conn.select(db) except Exception: conn.close() yield from conn.wait_closed() raise return conn
def mock_open_unix_connection(self, expected_path, sock, path): self.assertEqual(expected_path, path) return asyncio.open_connection(sock=sock)
async def do_connect_to_padre(loop): con = asyncio.open_connection("127.0.0.1", context.padre.port) context.connections.append(await asyncio.wait_for(con, int(TIMEOUT), loop=loop))
def _request_authentication(self): # https://dev.mysql.com/doc/internals/en/connection-phase-packets.html#packet-Protocol::HandshakeResponse if int(self.server_version.split('.', 1)[0]) >= 5: self.client_flag |= CLIENT.MULTI_RESULTS if self.user is None: raise ValueError("Did not specify a username") if self._ssl_context: # capablities, max packet, charset data = struct.pack('<IIB', self.client_flag, 16777216, 33) data += b'\x00' * (32 - len(data)) self.write_packet(data) # Stop sending events to data_received self._writer.transport.pause_reading() # Get the raw socket from the transport raw_sock = self._writer.transport.get_extra_info('socket', default=None) if raw_sock is None: raise RuntimeError("Transport does not expose socket instance") raw_sock = raw_sock.dup() self._writer.transport.close() # MySQL expects TLS negotiation to happen in the middle of a # TCP connection not at start. Passing in a socket to # open_connection will cause it to negotiate TLS on an existing # connection not initiate a new one. self._reader, self._writer = yield from asyncio.open_connection( sock=raw_sock, ssl=self._ssl_context, loop=self._loop, server_hostname=self._host) charset_id = charset_by_name(self.charset).id if isinstance(self.user, str): _user = self.user.encode(self.encoding) data_init = struct.pack('<iIB23s', self.client_flag, 1, charset_id, b'') data = data_init + _user + b'\0' authresp = b'' auth_plugin = self._client_auth_plugin if not self._client_auth_plugin: # Contains the auth plugin from handshake auth_plugin = self._server_auth_plugin if auth_plugin in ('', 'mysql_native_password'): authresp = _scramble(self._password.encode('latin1'), self.salt) elif auth_plugin in ('', 'mysql_clear_password'): authresp = self._password.encode('latin1') + b'\0' if self.server_capabilities & CLIENT.PLUGIN_AUTH_LENENC_CLIENT_DATA: data += lenenc_int(len(authresp)) + authresp elif self.server_capabilities & CLIENT.SECURE_CONNECTION: data += struct.pack('B', len(authresp)) + authresp else: # pragma: no cover # not testing against servers without secure auth (>=5.0) data += authresp + b'\0' if self._db and self.server_capabilities & CLIENT.CONNECT_WITH_DB: if isinstance(self._db, str): db = self._db.encode(self.encoding) else: db = self._db data += db + b'\0' if self.server_capabilities & CLIENT.PLUGIN_AUTH: name = auth_plugin if isinstance(name, str): name = name.encode('ascii') data += name + b'\0' self._auth_plugin_used = auth_plugin self.write_packet(data) auth_packet = yield from self._read_packet() # if authentication method isn't accepted the first byte # will have the octet 254 if auth_packet.is_auth_switch_request(): # https://dev.mysql.com/doc/internals/en/ # connection-phase-packets.html#packet-Protocol::AuthSwitchRequest auth_packet.read_uint8() # 0xfe packet identifier plugin_name = auth_packet.read_string() if (self.server_capabilities & CLIENT.PLUGIN_AUTH and plugin_name is not None): auth_packet = yield from self._process_auth( plugin_name, auth_packet) else: # send legacy handshake data = _scramble_323(self._password.encode('latin1'), self.salt) + b'\0' self.write_packet(data) auth_packet = yield from self._read_packet()
# head[1]="https://ceit.aut.ac.ir/~9431018/filter.html" # return logger.info('%sWARPING <%s %s>' % ('[%s] ' % ident if verbose >= 1 else '', head[0], head[1])) new_head = ' '.join([head[0], path, head[2]]) m = REGEX_HOST.search(phost) if m: host = m.group(1) port = int(m.group(2)) else: host = phost port = 80 try: req_reader, req_writer = yield from asyncio.open_connection(host, port, flags=TCP_NODELAY, loop=loop) req_writer.write(('%s\r\n' % new_head).encode()) yield from req_writer.drain() yield from asyncio.sleep(0.2, loop=loop) def generate_dummyheaders(): def generate_rndstrs(strings, length): return ''.join(random.choice(strings) for _ in range(length)) import string return ['X-%s: %s\r\n' % (generate_rndstrs(string.ascii_uppercase, 16), generate_rndstrs(string.ascii_letters + string.digits, 128)) for _ in range(32)] req_writer.writelines(list(map(lambda x: x.encode(), generate_dummyheaders()))) yield from req_writer.drain() req_writer.write(b'Host: ')
def connect(self): """ Connect to mochad """ connection = asyncio.open_connection(self.host, 1099) self.reader, self.writer = yield from connection
def test_telnet_server_given_shell( event_loop, bind_host, unused_tcp_port): """Iterate all state-reading commands of default telnet_server_shell.""" from telnetlib3.telopt import IAC, WILL, DO, WONT, ECHO, SGA, BINARY, TTYPE from telnetlib3 import telnet_server_shell # given _waiter = asyncio.Future() yield from telnetlib3.create_server( host=bind_host, port=unused_tcp_port, shell=telnet_server_shell, _waiter_connected=_waiter, connect_maxwait=0.05, timeout=0.25, loop=event_loop, limit=1337) reader, writer = yield from asyncio.open_connection( host=bind_host, port=unused_tcp_port, loop=event_loop) expected = IAC + DO + TTYPE result = yield from asyncio.wait_for( reader.readexactly(len(expected)), 0.5) assert result == expected writer.write(IAC + WONT + TTYPE) expected = b'Ready.\r\ntel:sh> ' result = yield from asyncio.wait_for( reader.readexactly(len(expected)), 0.5) assert result == expected server = yield from asyncio.wait_for(_waiter, 0.5) server_port = str(server._transport.get_extra_info('peername')[1]) cmd_output_table = ( # exercise backspace in input for help command ((b'\bhel\blp\r'), ( b'\r\nquit, writer, slc, toggle [option|all], reader, proto' b'\r\ntel:sh> ' )), (b'writer\r\x00', ( b'\r\n<TelnetWriter server mode:local +lineflow -xon_any +slc_sim>' b'\r\ntel:sh> ' )), (b'reader\r\n', ( b"\r\n<TelnetReaderUnicode encoding='US-ASCII' limit=1337 buflen=1 eof=False>" b'\r\ntel:sh> ' )), (b'proto\n', ( b'\r\n<Peer ' + bind_host.encode('ascii') + b' ' + server_port.encode('ascii') + b'>' + b'\r\ntel:sh> ' )), (b'slc\r\n', ( b'\r\nSpecial Line Characters:' b'\r\n SLC_AO: (^O, variable|flushout)' b'\r\n SLC_EC: (^?, variable)' b'\r\n SLC_EL: (^U, variable)' b'\r\n SLC_EW: (^W, variable)' b'\r\n SLC_IP: (^C, variable|flushin|flushout)' b'\r\n SLC_RP: (^R, variable)' b'\r\n SLC_AYT: (^T, variable)' b'\r\n SLC_EOF: (^D, variable)' b'\r\n SLC_XON: (^Q, variable)' b'\r\n SLC_SUSP: (^Z, variable|flushin)' b'\r\n SLC_XOFF: (^S, variable)' b'\r\n SLC_ABORT: (^\, variable|flushin|flushout)' b'\r\n SLC_LNEXT: (^V, variable)' b'\r\nUnset by client: SLC_BRK, SLC_EOR, SLC_SYNCH' b'\r\nNot supported by server: SLC_EBOL, SLC_ECR, SLC_EEOL, ' b'SLC_EWR, SLC_FORW1, SLC_FORW2, SLC_INSRT, SLC_MCBOL, ' b'SLC_MCEOL, SLC_MCL, SLC_MCR, SLC_MCWL, SLC_MCWR, SLC_OVER' b'\r\ntel:sh> ' )), (b'toggle\n', ( b'\r\nbinary off' b'\r\necho off' b'\r\ngoahead ON' b'\r\ninbinary off' b'\r\nlflow ON' b'\r\noutbinary off' b'\r\nxon-any off' b'\r\ntel:sh> ' )), (b'toggle not-an-option\r', ( b'\r\ntoggle: not an option.' b'\r\ntel:sh> ' )), (b'toggle all\r\n', ( b'\r\n' + # negotiation options received, # though ignored by our dumb client. IAC + WILL + ECHO + IAC + WILL + SGA + IAC + WILL + BINARY + IAC + DO + BINARY + b'will echo.' b'\r\nwill suppress go-ahead.' b'\r\nwill outbinary.' b'\r\ndo inbinary.' b'\r\nxon-any enabled.' b'\r\nlineflow disabled.' b'\r\ntel:sh> ' )), (b'toggle\n', ( # and therefor the same state values remain unchanged -- # with exception of lineflow and xon-any, which are # states toggled by the shell directly (and presumably # knows what to do with it!) b'\r\nbinary off' b'\r\necho off' b'\r\ngoahead ON' b'\r\ninbinary off' b'\r\nlflow off' # flipped b'\r\noutbinary off' b'\r\nxon-any ON' # flipped b'\r\ntel:sh> ' )), (b'\r\n', ( b'\r\ntel:sh> ' )), (b'not-a-command\n', ( b'\r\nno such command.' b'\r\ntel:sh> ' )), (b'quit\r', b'\r\nGoodbye.\r\n'), ) for (cmd, output_expected) in cmd_output_table: writer.write(cmd) try: result = yield from asyncio.wait_for( reader.readexactly(len(output_expected)), 0.5) except asyncio.streams.IncompleteReadError as err: result = err.partial assert result == output_expected # nothing more to read. result = yield from reader.read() assert result == b''
def process_warp(client_reader, client_writer, *, loop=None): ident = str(hex(id(client_reader)))[-6:] header = '' payload = b'' try: RECV_MAX_RETRY = 3 recvRetry = 0 while True: line = yield from client_reader.readline() if not line: if len(header) == 0 and recvRetry < RECV_MAX_RETRY: # handle the case when the client make connection but sending data is delayed for some reasons recvRetry += 1 yield from asyncio.sleep(0.2, loop=loop) continue else: break if line == b'\r\n': break if line != b'': header += line.decode() m = REGEX_CONTENT_LENGTH.search(header) if m: cl = int(m.group(1)) while (len(payload) < cl): payload += yield from client_reader.read(1024) except: print_exc() if len(header) == 0: logger.debug('[%s] !!! Task reject (empty request)' % ident) return req = header.split('\r\n')[:-1] if len(req) < 4: logger.debug('[%s] !!! Task reject (invalid request)' % ident) return head = req[0].split(' ') if head[0] == 'CONNECT': # https proxy try: searchfile2 = open("link.txt", "r") for list in full_list: if (full_list.index(list)==1): for s in list: if str(head[1]).find(s) == -1: print("No here! ",str(head[1]),s) else: #webbrowser.open('https://ceit.aut.ac.ir/~9431018/filter.html',new=2) print("Found string.") ctypes.windll.user32.MessageBoxW(0, "------This Site is Filter------", "Warning", 1) print("FILTER", s) return # #print(head[1]) # if str(head[1]).find(str(line)) == -1: # print("No here! ",str(head[1]),str(line)) # # else: # print("Found string.") # print("FILTER", line) # return logger.info('%sBYPASSING <%s %s> (SSL connection)' % ('[%s] ' % ident if verbose >= 1 else '', head[0], head[1])) m = REGEX_HOST.search(head[1]) host = m.group(1) port = int(m.group(2)) req_reader, req_writer = yield from asyncio.open_connection(host, port, ssl=False, loop=loop) client_writer.write(b'HTTP/1.1 200 Connection established\r\n\r\n') @asyncio.coroutine def relay_stream(reader, writer): try: while True: line = yield from reader.read(1024) if len(line) == 0: break writer.write(line) except: print_exc() tasks = [ asyncio.async(relay_stream(client_reader, req_writer), loop=loop), asyncio.async(relay_stream(req_reader, client_writer), loop=loop), ] yield from asyncio.wait(tasks, loop=loop)
def _connect_coro(self): kwargs = dict() # Decode URI attributes uri_attributes = urlparse(self.session.broker_uri) scheme = uri_attributes.scheme secure = True if scheme in ('mqtts', 'wss') else False if not self.session.username: self.session.username = self.config.get('username', uri_attributes.username) if not self.session.password: self.session.password = self.config.get('password', uri_attributes.password) self.session.remote_address = uri_attributes.hostname self.session.remote_port = uri_attributes.port if scheme in ('mqtt', 'mqtts') and not self.session.remote_port: self.session.remote_port = 8883 if scheme == 'mqtts' else 1883 if scheme in ('ws', 'wss') and not self.session.remote_port: self.session.remote_port = 443 if scheme == 'wss' else 80 if scheme in ('ws', 'wss'): # Rewrite URI to conform to https://tools.ietf.org/html/rfc6455#section-3 uri = (scheme, self.session.remote_address + ":" + str(self.session.remote_port), uri_attributes[2], uri_attributes[3], uri_attributes[4], uri_attributes[5]) self.session.broker_uri = urlunparse(uri) # Init protocol handler #if not self._handler: self._handler = ClientProtocolHandler(self.plugins_manager, loop=self._loop) if secure: sc = ssl.create_default_context(ssl.Purpose.SERVER_AUTH, cafile=self.session.cafile, capath=self.session.capath, cadata=self.session.cadata) if 'certfile' in self.config and 'keyfile' in self.config: sc.load_cert_chain(self.config['certfile'], self.config['keyfile']) if 'check_hostname' in self.config and isinstance( self.config['check_hostname'], bool): sc.check_hostname = self.config['check_hostname'] kwargs['ssl'] = sc try: reader = None writer = None self._connected_state.clear() # Open connection if scheme in ('mqtt', 'mqtts'): conn_reader, conn_writer = \ yield from asyncio.open_connection( self.session.remote_address, self.session.remote_port, loop=self._loop, **kwargs) reader = StreamReaderAdapter(conn_reader) writer = StreamWriterAdapter(conn_writer) elif scheme in ('ws', 'wss'): websocket = yield from websockets.connect( self.session.broker_uri, subprotocols=['mqtt'], loop=self._loop, extra_headers=self.extra_headers, ping_timeout=None, **kwargs) reader = WebSocketsReader(websocket) writer = WebSocketsWriter(websocket) # Start MQTT protocol self._handler.attach(self.session, reader, writer) return_code = yield from self._handler.mqtt_connect() if return_code is not CONNECTION_ACCEPTED: self.session.transitions.disconnect() self.logger.warning("Connection rejected with code '%s'" % return_code) exc = ConnectException("Connection rejected by broker") exc.return_code = return_code raise exc else: # Handle MQTT protocol yield from self._handler.start() self.session.transitions.connect() self._connected_state.set() self.logger.debug( "connected to %s:%s" % (self.session.remote_address, self.session.remote_port)) return return_code except InvalidURI as iuri: self.logger.warning("connection failed: invalid URI '%s'" % self.session.broker_uri) self.session.transitions.disconnect() raise ConnectException( "connection failed: invalid URI '%s'" % self.session.broker_uri, iuri) except InvalidHandshake as ihs: self.logger.warning( "connection failed: invalid websocket handshake") self.session.transitions.disconnect() raise ConnectException( "connection failed: invalid websocket handshake", ihs) except (ProtocolHandlerException, ConnectionError, OSError) as e: self.logger.warning("MQTT connection failed: %r" % e) self.session.transitions.disconnect() raise ConnectException(e)
def create_connection(host, port, *, loop=None): reader, writer = yield from asyncio.open_connection(host, port, loop=loop) conn = RedisClient(reader, writer, loop=loop) return conn
def write_numpy(color_array, depth_array, loop): _, writer = yield from asyncio.open_connection(IP, PORT, loop=loop) writer.write(build_payload(color_array, depth_array)) yield from writer.drain() writer.close()
async def worker(id_: int, host, send, recv, event, delimiter, timeout=None): q: asyncio.Queue send: asyncio.Queue recv: asyncio.Queue event: asyncio.Event try: # if one thread crashes, will trigger event and gradually stop all threads. while not event.is_set(): # announce server that the worker is ready. print(f"[CS{id_:2}][INFO] Worker {id_:2} READY.") await send.put(id_) try: p = await asyncio.wait_for(recv.get(), timeout=timeout) p = int(p) recv.task_done() except asyncio.TimeoutError: print( SharedData.red( f"[CS{id_:2}][WARN] Worker {id_:2} timeout fetching from Queue." )) continue except ValueError: print( SharedData.cyan( f"[CS{id_:2}][INFO] Stop Signal received!")) break print(f"[CS{id_:2}][INFO] Connecting Port {p}.") try: child_recv, child_send = await asyncio.wait_for( asyncio.open_connection(host, p), timeout) except asyncio.TimeoutError: print( SharedData.purple(f"[CS{id_:2}][INFO] Port {p} timeout.")) except OSError: print( SharedData.red( f"[CS{id_:2}][WARN] Port {p} connection refused.")) else: try: print(await tcp_recv(child_recv, delimiter, timeout=timeout)) except asyncio.TimeoutError: print( SharedData.purple( f"[CS{id_:2}][INFO] Port {p} timeout.")) except asyncio.IncompleteReadError: print( SharedData.red( f"[CS{id_:2}][WARN] Port {p} disconnected!")) else: print(f"[CS{id_:2}][INFO] Port {p} open.") print( SharedData.green( f"[CS{id_:2}][INFO] Port {p} is available.")) finally: child_send.close() await child_send.wait_closed() except Exception: # trigger event to stop all threads. print(SharedData.red(f"[CS{id_:2}][CRIT] Exception Event set!.")) event.set() raise print(SharedData.bold(f"[CS{id_:2}][INFO] Task Finished."))