Exemplo n.º 1
0
 def _assembleConnection(self, reader: asyncio.StreamReader,
                         writer: asyncio.StreamWriter) -> Connection:
     sock = writer.get_extra_info('socket')
     credentials = sock.getsockopt(socket.SOL_SOCKET, socket.SO_PEERCRED,
                                   struct.calcsize('3i'))
     pid, uid, gid = struct.unpack('3i', credentials)
     return Connection(socket=sock,
                       sslctx=writer.get_extra_info('sslcontext')
                       if self.sslctx else None,
                       sslobj=writer.get_extra_info('ssl_object')
                       if self.sslctx else None,
                       pid=pid,
                       uid=uid,
                       gid=gid,
                       peer_cert=writer.get_extra_info('peercert')
                       if self.sslctx else None,
                       peer_address=None,
                       host_address=None,
                       reader=reader,
                       writer=writer,
                       read_handlers=set(),
                       write_handlers=set(),
                       state={
                           'mode': self.mode or ConnectionType.PERSISTENT,
                           'created': time.time(),
                           'updated': time.time(),
                           'closed': None
                       })
async def relay(
        dreader: asyncio.StreamReader,
        dwriter: asyncio.StreamWriter,
        ureader: asyncio.StreamReader,
        uwriter: asyncio.StreamWriter,
) -> None:
    """Pass data/EOF from dreader to uwriter, and ureader to dwriter.

    Both writers are ensured to be closed upon exiting this function.
    """
    _logger.debug(
        'Relaying %r <=> %r', dwriter.get_extra_info('peername'),
        uwriter.get_extra_info('peername'))
    utask = asyncio.create_task(_relay_data_side(dreader, uwriter))
    dtask = asyncio.create_task(_relay_data_side(ureader, dwriter))
    async with contexts.aclosing_multiple_writers(dwriter, uwriter):
        try:
            await asyncio.gather(utask, dtask)
            _logger.debug(
                'Relay %r <=> %r ended normally',
                dwriter.get_extra_info('peername'),
                uwriter.get_extra_info('peername'))
        except:
            dtask.cancel()
            utask.cancel()
            raise
        finally:
            await asyncio.wait({dtask, utask})
            for t in (dtask, utask):
                if t.exception():
                    _logger.debug(
                        'Relay task %r caught exception %r', t, t.exception())
Exemplo n.º 3
0
async def relay(
    dreader: asyncio.StreamReader,
    dwriter: asyncio.StreamWriter,
    ureader: asyncio.StreamReader,
    uwriter: asyncio.StreamWriter,
) -> None:
    """Pass data/EOF from dreader to uwriter, and ureader to dwriter.

    Both writers are ensured to be closed upon exiting this function.
    """
    _logger.debug('Relaying %r <=> %r', dwriter.get_extra_info('peername'),
                  uwriter.get_extra_info('peername'))
    utask = asyncio.create_task(_relay_data_side(dreader, uwriter))
    dtask = asyncio.create_task(_relay_data_side(ureader, dwriter))
    async with contexts.aclosing_multiple_writers(dwriter, uwriter):
        try:
            await asyncio.gather(utask, dtask)
            _logger.debug('Relay %r <=> %r ended normally',
                          dwriter.get_extra_info('peername'),
                          uwriter.get_extra_info('peername'))
        except:
            dtask.cancel()
            utask.cancel()
            raise
        finally:
            await asyncio.wait({dtask, utask})
            for t in (dtask, utask):
                if t.exception():
                    _logger.debug('Relay task %r caught exception %r', t,
                                  t.exception())
async def handle_client_connection(
        adapter: adapters.ClientAdapter,
        transport: str,
        upstream_host: str,
        upstream_port: int,
        args: Dict[str, str],
        reader: asyncio.StreamReader,
        writer: asyncio.StreamWriter,
) -> None:
    handler_logger.debug(
        'Accepted connection for transport %s from %r on %r',
        transport,
        writer.get_extra_info('peername'), writer.get_extra_info('sockname'))
    async with contexts.log_unhandled_exc(handler_logger), \
               contexts.aclosing_multiple_writers(writer) as writers:
        try:
            ureader, uwriter = await adapter.open_transport_connection(
                transport, upstream_host, upstream_port, args)
        except exceptions.PTConnectError as e:
            handler_logger.warning(
                'PT reported error while connecting to upstream '
                '(%r, %r): %r', upstream_host, upstream_port, e)
            writer.transport.abort()
            return
        writers.add(uwriter)
        logname = (f'{writer.get_extra_info("peername")!r} ==> '
                   f'({upstream_host!r}, {upstream_port})')
        handler_logger.info('[%s] %s', transport, logname)
        try:
            await relays.relay(reader, writer, ureader, uwriter)
        except OSError as e:
            handler_logger.warning(
                '[%s] %s caught %r', transport, logname, e)
async def handle_client(reader: asyncio.StreamReader,
                        writer: asyncio.StreamWriter):
    global client_counter
    this_client = client_counter
    client_counter += 1

    logging.info(f"Client {client_counter} connected")

    while True:
        try:
            command = await reader.readexactly(1)
        except asyncio.IncompleteReadError:
            logging.info(f"Client {this_client} closed the connection")
            break

        if command == b'0':
            response = str(writer.get_extra_info("peername"))
        elif command == b'1':
            response = str(writer.get_extra_info("sockname"))
        elif command == b'2':
            response = str(this_client)
        elif command == b'3':
            response = str(threading.active_count())
        elif command == b'4':
            response = ', '.join(t.name for t in threading.enumerate())
        elif command == b'q':
            response = "Goodbye"
        else:
            response = "Not recognized."
            logging.warning(
                f"Client {this_client} sent an unrecognized command {command}")

        writer.write(f"{response}\n".encode())
        await writer.drain()
Exemplo n.º 6
0
async def close_connection(writer: asyncio.StreamWriter):
    global CLIENTS
    addr = writer.get_extra_info('peername')
    logging.warning(f"Closed connection: {addr}")
    CLIENTS.remove(writer.get_extra_info('peername'))
    writer.close()
    await writer.wait_closed()
Exemplo n.º 7
0
    async def handle(self, reader: asyncio.StreamReader,
                     writer: asyncio.StreamWriter) -> None:
        """
        Main method for the request handler, performs the following:

            1. Read the request bytes from the reader stream
            2. Parse the request and generate response data
            3. Write the response bytes to the writer stream
        """
        self.reader = reader
        self.writer = writer
        self.remote_addr = writer.get_extra_info("peername")[0]
        self.client_cert = writer.get_extra_info("peercert")
        self.received_timestamp = time.localtime()

        try:
            await self.parse_header()
        except Exception:
            # Malformed request, throw it away and exit immediately
            self.write_status(Status.BAD_REQUEST, "Malformed request")
            return await self.close_connection()

        try:
            environ = self.build_environ()
            app = self.app(environ, self.write_status)
            for data in app:
                await self.write_body(data)
        except Exception:
            self.write_status(Status.CGI_ERROR, "An unexpected error occurred")
            raise
        finally:
            await self.close_connection()
Exemplo n.º 8
0
 def _assembleConnection(self, reader: asyncio.StreamReader,
                         writer: asyncio.StreamWriter) -> Connection:
     sock = writer.get_extra_info('socket')
     return Connection(socket=sock,
                       sslctx=writer.get_extra_info('sslcontext')
                       if self.sslctx else None,
                       sslobj=writer.get_extra_info('ssl_object')
                       if self.sslctx else None,
                       pid=os.getpid(),
                       uid=os.getuid(),
                       gid=os.getgid(),
                       peer_cert=writer.get_extra_info('peercert')
                       if self.sslctx else None,
                       peer_address=sock.getpeername(),
                       host_address=sock.getsockname(),
                       reader=reader,
                       writer=writer,
                       read_handlers=set(),
                       write_handlers=set(),
                       state={
                           'mode': self.mode or ConnectionType.PERSISTENT,
                           'created': time.time(),
                           'updated': time.time(),
                           'closed': None
                       })
async def handle_client_connection(
    adapter: adapters.ClientAdapter,
    transport: str,
    upstream_host: str,
    upstream_port: int,
    args: Dict[str, str],
    reader: asyncio.StreamReader,
    writer: asyncio.StreamWriter,
) -> None:
    handler_logger.debug('Accepted connection for transport %s from %r on %r',
                         transport, writer.get_extra_info('peername'),
                         writer.get_extra_info('sockname'))
    async with contexts.log_unhandled_exc(handler_logger), \
               contexts.aclosing_multiple_writers(writer) as writers:
        try:
            ureader, uwriter = await adapter.open_transport_connection(
                transport, upstream_host, upstream_port, args)
        except exceptions.PTConnectError as e:
            handler_logger.warning(
                'PT reported error while connecting to upstream '
                '(%r, %r): %r', upstream_host, upstream_port, e)
            writer.transport.abort()
            return
        writers.add(uwriter)
        logname = (f'{writer.get_extra_info("peername")!r} ==> '
                   f'({upstream_host!r}, {upstream_port})')
        handler_logger.info('[%s] %s', transport, logname)
        try:
            await relays.relay(reader, writer, ureader, uwriter)
        except OSError as e:
            handler_logger.warning('[%s] %s caught %r', transport, logname, e)
Exemplo n.º 10
0
async def handler(reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
    """Client handler"""
    try:
        addr = writer.get_extra_info("peername")
        print("Incoming connection from: {}:{}".format(addr[0], addr[1]))

        encryption = Encryptor()
        pub = encryption.create_rsa_pair()

        writer.write(pub)
        await writer.drain()

        aes_key = encryption.rsa_decrypt(await reader.read(1024))
        encryption.set_aes(aes_key)

        writer.write(encryption.aes_encrypt(b"ok"))
        await writer.drain()

        db_conn = sqlite3.connect("db.sqlite")
        db_cursor = db_conn.cursor()

        while True:
            req = await reader.read(4096)
            if not req:
                raise ConnectionResetError
            data = encryption.aes_decrypt(req)
            if data:
                if data[0] == "0".encode()[0]:
                    type, short_name, *key, level, hash = data.split(b":")
                    key = b":".join(key)

                    db_cursor.execute(
                        "INSERT INTO Files "
                        "(FileName, Hash, AccessLvl, Key) "
                        "VALUES (?, ?, ?, ?)",
                        (short_name.decode(), hash.decode(), level.decode(),
                         key))
                    db_conn.commit()
                    writer.write(encryption.aes_encrypt(b"ok"))
                    await writer.drain()
                elif data[0] == "1".encode()[0]:
                    type, key, hash = data.split(b":")
                    qu = """SELECT FileName, Key FROM Files WHERE Hash = ?
     AND AccessLvl <= (SELECT AccessLvl From Users WHERE Keys = ?)"""
                    res = db_cursor.execute(
                        qu, (hash.decode(), key.decode())).fetchone()
                    if res:
                        file_name, file_key = res
                        resp = b":".join((file_name.encode(), file_key))
                        writer.write(encryption.aes_encrypt(resp))
                        await writer.drain()
                    else:
                        writer.write(encryption.aes_encrypt(b"err"))

        writer.close()
    except ConnectionResetError as err:
        addr = writer.get_extra_info("peername")
        print("Connection closed: {}:{}".format(addr[0], addr[1]))
        writer.close()
Exemplo n.º 11
0
    async def handle_connection(self, reader: asyncio.StreamReader,
                                writer: asyncio.StreamWriter):
        """
            Handles a new client connection using NIO.
            When reading, it invokes the suitable request callback depending
            on the JSON request.

            :param: reader, the stream reader to read data from (in bytes)
            :param: writer, the stream writer to write data to (in bytes)
        """
        log.info("Incoming client connection from {}".format(
            writer.get_extra_info('peername')))
        await self.bot.change_presence(status=discord.Status.online,
                                       game=discord.Game(name="Connected"))

        try:
            self.__writer = writer

            while self.__alive:

                # I suspect this is blocking too long...however I think this is wrong.
                #data = await asyncio.wait_for(reader.readuntil(b"\r\n"), timeout=10.0)
                data = await reader.readuntil(b"\r\n")
                try:
                    data = data.decode('ascii')
                except UnicodeDecodeError:
                    continue

                log.info("Received data from {}:{}".format(
                    writer.get_extra_info('peername'), data))

                try:
                    # try parse the JSON, then get the type of request
                    data = json.loads(data)

                    # if it's a /terminate/ type, terminate the server
                    if (data['type'] == 'terminate'):
                        self.stop()

                    # invoke a callback belonging to a requset type if it is available
                    funcs = request_type.get_types()
                    if (funcs.get(data['type'])):
                        log.info(
                            "Invoking request callback, %s, with default timeout=%d."
                            % (data['type'], REQUEST_TIMEOUT))
                        await asyncio.wait_for(funcs[data['type']](self, data),
                                               timeout=REQUEST_TIMEOUT)

                except:
                    pass

        except (ConnectionError, asyncio.streams.IncompleteReadError):
            self.__writer = None
            log.info("There was a connection error. The client has timed out.")
            await self.bot.change_presence(
                status=discord.Status.do_not_disturb,
                game=discord.Game(name="Not connected"))
Exemplo n.º 12
0
 def _get_connection_identity_display_name(connection_writer: asyncio.StreamWriter) -> str:
     """
     Try to query the connection_writer to identify the connection source and return it.
     If we failed to identify the source, we return "Unknown"
     """
     client_process: subprocess.Popen = connection_writer.get_extra_info('subprocess')
     client_peername = connection_writer.get_extra_info('peername')
     if client_process is not None:
         return f'PID `{client_process.pid}`'
     elif client_peername is not None:
         return f'Socket `{client_peername}`'
     return 'Unknown'
Exemplo n.º 13
0
    async def _verify_tcp_req(self, reader: asyncio.StreamReader,
                              writer: asyncio.StreamWriter) -> Receiver:
        conn = TcpReceiverConn(writer=writer, reader=reader)
        addr, _ = writer.get_extra_info('peername')
        if isinstance(addr, str) and self._blacklist.should_be_banned(addr):
            print(f'拒绝黑名单用户 IP {addr} 连接请求')
            self._blacklist.refresh(addr)
            raise tcp_req_exception.BanError(conn)

        json_data = await conn.recv_json()

        if json_data is not None and isinstance(json_data,
                                                dict) and 'data' in json_data:
            data = json_data['data']
            if isinstance(data, dict) and 'key' in data:
                try:
                    orig_key = str(data['key'])
                    key_index = self._key_handler.verify_key(orig_key)
                    print(f'来自 IP {addr}的用户 {key_index[:5]} 成功验证身份')
                    return Receiver(user_conn=conn, user_key_index=key_index)
                except KeyCheckMaxError:
                    print(f'IP {addr} 使用的 key 已连接用户过多')
                    raise tcp_req_exception.MaxError(conn)
                except KeyCheckVerificationError:
                    self._blacklist.refresh(addr)
                    print(f'IP {addr} 错误尝试 key')
                    raise tcp_req_exception.VerificationError(conn)
                except:
                    raise tcp_req_exception.DataError(conn)
        raise tcp_req_exception.DataError(conn)
Exemplo n.º 14
0
 async def receive_handshake(
     self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter
 ) -> None:
     ip, socket, *_ = writer.get_extra_info("peername")
     remote_address = Address(ip, socket)
     if self.peer_pool.chk_dialin_blacklist(remote_address):
         Logger.info_every_n(
             "{} has been blacklisted, refusing connection".format(remote_address),
             100,
         )
         reader.feed_eof()
         writer.close()
     expected_exceptions = (
         TimeoutError,
         PeerConnectionLost,
         HandshakeFailure,
         asyncio.IncompleteReadError,
         HandshakeDisconnectedFailure,
     )
     try:
         await self._receive_handshake(reader, writer)
     except expected_exceptions as e:
         self.logger.debug("Could not complete handshake: %s", e)
         Logger.error_every_n("Could not complete handshake: {}".format(e), 100)
         reader.feed_eof()
         writer.close()
     except OperationCancelled:
         self.logger.error("OperationCancelled")
         reader.feed_eof()
         writer.close()
     except Exception as e:
         self.logger.exception("Unexpected error handling handshake")
         reader.feed_eof()
         writer.close()
Exemplo n.º 15
0
 async def run(self, reader: StreamReader,
               writer: StreamWriter) -> NoReturn:
     Logger.get_instance().debug_item('New connection from {}'.format(
         writer.get_extra_info('peername')))
     self.add_connection(writer)
     while True:
         try:
             msgs_bytes = await self.wait_message(reader)
             reader._eof = False
             # if len(msgs_bytes) == 1 and msgs_bytes[0] is self.EMPTY_BYTES:
             #     reader._eof = True
             #     await self.drop_connection(writer)
             #     Logger.get_instance().debug_item(
             #         'Connection with {} has been dropped'.format(writer.get_extra_info('peername')))
             #     break
             for msg in msgs_bytes:
                 await self.handle(msg, writer)
         except Exception as e:
             traceback.print_exc()
             Logger.get_instance().debug_item(
                 'An error has occurred: {}'.format(e.args[0]),
                 LogLevels.ERROR)
             await PubSub().remove_all()
             await self.drop_connection(writer)
         except KeyboardInterrupt:
             return
Exemplo n.º 16
0
async def build_scope(reader: asyncio.StreamReader,
                      writer: asyncio.StreamWriter) -> dict:
    request_line = await reader.readline()
    method, path, protocol = request_line.decode().rstrip().split(" ", 3)
    url = urlparse(path)
    path = url.path
    query_string = url.query.encode()
    __, http_version = protocol.split("/")

    headers = []
    while True:
        header_line = await reader.readline()
        header = header_line.decode().rstrip()
        if not header:
            break
        key, value = header.split(": ", 1)
        headers.append((key.encode(), value.encode()))

    sock = writer.get_extra_info("socket")

    return {
        "type": "http",
        "http_version": http_version,
        "method": method,
        "scheme": "http",
        "path": path,
        "query_string": query_string,
        "headers": [],
        "client": sock.getpeername(),
        "server": sock.getsockname(),
    }
Exemplo n.º 17
0
async def handle_request(
    reader: asyncio.StreamReader, writer: asyncio.StreamWriter,
    buff=4096, cd='utf8') -> None:

    while True:
        request = await reader.read(buff)
        request = request.decode(cd)

        addr = writer.get_extra_info('peername')
        print(f"Received {request!r} from {addr!r}")
        if not request:
            break
        else:
            if PUT_PATTERN.match(request):
                response = handle_put(request)
            elif GET_PATTERN.match(request):
                response = handle_get(request)
            else:
                response = raise_error()

            print(f"Send {response} at {addr}")
            writer.write(response.encode())
            await writer.drain()

    print(f"Closing connection with {addr}")
    writer.close()
Exemplo n.º 18
0
    async def handle_rpc_call(self, reader: StreamReader,
                              writer: StreamWriter):
        ''' handle rpc call async '''
        while True:
            peer = writer.get_extra_info('socket').getpeername()
            request_raw = await self.read(reader)
            logging.info(f"get data from {peer}")
            if not request_raw:
                break  # Client close connection, Clean close
            request_raw = request_raw.decode()

            # check for invalid json first
            request_json = None
            try:
                request_json = json.loads(request_raw)
            except (json.JSONDecodeError, TypeError) as e:
                response = ErrorResponse(ParseError("Parse error"), None)
                self.send_response(writer, response)
            else:
                if isinstance(request_json, list):
                    response = await self.handle_batched_rpc_call(request_json)
                else:
                    response = await self.handle_simple_rpc_call(request_json)

                if response:
                    self.send_response(writer, response)
Exemplo n.º 19
0
async def handler(accept: AcceptFnType, connect: ConnectFnType,
                  relay: RelayFnType, dreader: asyncio.StreamReader,
                  dwriter: asyncio.StreamWriter) -> None:
    """Main server handler."""
    logger = logging.getLogger('handler')
    dname = repr(dwriter.get_extra_info('peername'))
    log_name = '{!s} <=> ()'.format(dname)
    try:
        async with contextlib.AsyncExitStack() as stack:
            logger.debug('%s received connection', log_name)
            await stack.enter_async_context(closing_writer(dwriter))
            uhost, uport, ureader, uwriter = await accept(
                dreader, dwriter, connect)
            await stack.enter_async_context(closing_writer(uwriter))
            uname = '({!r}, {!r})'.format(uhost, uport)
            log_name = '{!s} <=> {!s}'.format(dname, uname)
            logger.info('%s relaying', log_name)
            await relay(dreader, dwriter, ureader, uwriter, uname)
            logger.info('%s done', log_name)
    except asyncio.CancelledError:
        logger.info('%s handler cancelled', log_name)
        raise
    except (OSError, ValueError, TimeoutError, HappyEyeballsConnectError) as e:
        logger.info('%s exception: %r', log_name, e)
    except Exception as e:
        logger.error('%s exception:', log_name, exc_info=e)
Exemplo n.º 20
0
async def client(reader: StreamReader, writer: StreamWriter):
    # The client() coroutine function will produce a long-lived coroutine for each
    # new connection. Think of it as a callback for the TCP server started in main().
    # On this line, I’ve shown how the host and port of the remote peer can be
    # obtained, for example, for logging.
    peername = writer.get_extra_info("peername")
    subscribe_chan = await read_msg(reader)
    SUBSCRIBERS[subscribe_chan].append(writer)

    send_task = asyncio.create_task(send_client(writer, SEND_QUEUES[writer]))
    print(f"Remote {peername} subscribed to {subscribe_chan}")

    try:
        while channel_name := await read_msg(reader):
            data = await read_msg(reader)

            if channel_name not in CHAN_QUEUES:
                CHAN_QUEUES[channel_name] = Queue(maxsize=10)
                asyncio.create_task(chan_sender(channel_name))
            await CHAN_QUEUES[channel_name].put(data)
    except asyncio.CancelledError:
        print(f"Remote {peername} closing connection.")
    except asyncio.IncompleteReadError:
        print(f"Remote {peername} disconnected.")
    finally:
        print(f"Remote {peername} closed.")
        await SEND_QUEUES[writer].put(None)
        await send_task
        del SEND_QUEUES[writer]
        SUBSCRIBERS[subscribe_chan].remove(writer)
Exemplo n.º 21
0
 async def _conn_handler(self, reader: StreamReader, writer: StreamWriter):
     client_ip = writer.get_extra_info('peername')[0]
     if client_ip not in self.peers:  # only allow connection from peers
         writer.close()
         print(f"Refuse connection from {client_ip}")
     # update online status
     self.peers[client_ip].update({"is_online": True})
     # get message
     msg_type = MsgType(int.from_bytes(await reader.readexactly(1), "big"))
     msg_length = int.from_bytes(await reader.readexactly(8), "big")
     if msg_type == MsgType.REQ_INDEX and self._event_listener[
             "on_request_index"]:
         client_index = pickle.loads(await reader.readexactly(msg_length))
         print(f"{client_ip} request index exchange")
         await self._event_listener["on_request_index"](writer,
                                                        client_index)
     elif msg_type == MsgType.REQ_INDEX_UPDATE and self._event_listener[
             "on_request_index_update"]:
         client_index = pickle.loads(await reader.readexactly(msg_length))
         print(f"{client_ip} request index update")
         await self._event_listener["on_request_index_update"](writer,
                                                               client_index)
     elif msg_type == MsgType.REQ_FILE and self._event_listener[
             "on_request_file"]:
         msg = pickle.loads(await reader.readexactly(msg_length))
         print(
             f"{client_ip} request file {msg['file_path']} blk:{msg['block_index']}"
         )
         await self._event_listener["on_request_file"](writer,
                                                       msg["file_path"],
                                                       msg["block_index"])
     else:
         writer.close()
         print(f"Invalid message from {client_ip}")
Exemplo n.º 22
0
 def _assembleConnection(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter) -> Connection:
     '''
     Template method: Assemble a connection object based on the info we get from the reader/writer.
     Might be overridden by a subclass method to set different parameters
     '''
     return Connection(
         socket=writer.get_extra_info('socket'),
         sslctx=None,
         sslobj=None,
         pid=None,
         uid=None,
         gid=None,
         cert=None,
         peer_address=None,
         host_address=None,
         reader=reader,
         writer=writer,
         read_handlers=set(),
         write_handlers=set(),
         state={
             'mode': self.mode or ConnectionType.PERSISTENT,
             'created': time.time(),
             'updated': time.time(),
             'closed': None
             }
         )
Exemplo n.º 23
0
    async def handle_client(self, reader: asyncio.StreamReader,
                            writer: asyncio.StreamWriter):
        addr = writer.get_extra_info('peername')
        log.info("Client connected %r", addr)

        while not reader.at_eof():
            try:
                async with async_timeout.timeout(5):
                    line = await reader.readline()

                if line:
                    metric = line.decode()
                    name, value, timestamp = metric.split(" ", 3)
                    timestamp = float(timestamp)

                    if value == 'nan':
                        value = None
                    else:
                        value = float(value) if '.' in value else int(value)

                    await self.storage.write_async((name, (timestamp, value)))
            except asyncio.CancelledError:
                log.info('Client connection closed after timeout')
                break
            except:  # noqa
                continue

        log.info("Client disconnected %r", addr)
Exemplo n.º 24
0
 async def _handle(self, connection: StreamWriter,
                   message: RegisterMessage):
     bn_address = self.format_address(connection.get_extra_info('peername'))
     already_registered: Token = Token.find_one_by_address(
         bn_address, message.current_epoch)
     if already_registered:
         Logger.get_instance().debug_item('Found a valid token!',
                                          LogLevels.INFO)
         login_message = LoginMessage(already_registered.base,
                                      already_registered.proof,
                                      self.config.get_address())
         await self.send(connection, login_message)
     else:
         Logger.get_instance().debug_item('Computing a valid PoW ...',
                                          LogLevels.INFO)
         pow_solution = Hashcash.new(message.difficulty,
                                     message.puzzle.encode('utf-8'))
         Logger.get_instance().debug_item(
             'PoW found! Salt: {}, percentile: {}'.format(
                 pow_solution.salt.hex(), pow_solution.percentile()),
             LogLevels.INFO)
         login_message = LoginMessage(message.puzzle,
                                      pow_solution.salt.hex(),
                                      self.config.get_address())
         await self.send(connection, login_message)
Exemplo n.º 25
0
    async def handle_broadcast(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter) -> None:
        # log address
        addr = writer.get_extra_info("peername")
        logging.info(f"Broadcasting to {addr!r}")

        while self._broadcasting:
            # wait for data from serial port
            try:
                await asyncio.wait_for(self._datavalid.wait(), timeout=0.5) # set timeout such that there is never pileup
            except asyncio.TimeoutError:
                continue
            # take lock of db and prepare packet
            with self._dblock:
                values: List[float] = self._values
                alarms = self._alarms if len(self._alarms) > 0 else None

            broadcast_packet = {}
            broadcast_packet["sensors"] = values
            broadcast_packet["alarms"] = alarms # add alarms key/value pair

            logging.info(f"Send: {json.dumps(broadcast_packet,indent=4)}")

            try:
                writer.write(json.dumps(broadcast_packet).encode())
                await writer.drain()
            except (ConnectionResetError, BrokenPipeError):
                # Connection lost, stop trying to broadcast and free up socket
                logging.warning(f"Connection lost with {addr!r}")
                self._broadcasting = False
            # take control of datavalid and reset it
            with self._dvlock:
                self._datavalid.clear()

        self._broadcasting = True
        writer.close()
Exemplo n.º 26
0
 async def accept_handler(self, reader: asyncio.StreamReader,
                          writer: asyncio.StreamWriter) -> None:
     # First, we need the peer_addr (and maybe the peer_port) to determine
     # the right BGP session settings.
     peername = writer.get_extra_info('peername')
     peer_addr: str
     peer_port: int
     peer_addr = peername[0]
     peer_port = peername[1]
     try:
         net, base_session = self.peers.lookup(netaddr.IPAddress(peer_addr))
     except KeyError:
         return
     # Create ServerSession from session template
     session = self.create_session(base_session, server=self)
     session.reader = reader
     session.writer = writer
     session.reader_task = asyncio.current_task()
     session.peername = peername
     session.active = False
     try:
         base_session.connect_retry_timer.force_stop()
         # Run session
         await self.run_session(reader, writer, session)
     finally:
         base_session.last_error = session.last_error
         if base_session.active:
             base_session.connect_retry_timer.start()
Exemplo n.º 27
0
 async def _handle(self, connection: StreamWriter, message: LoginMessage):
     is_valid_registration, id = self.is_valid_proof(message)
     if is_valid_registration:
         Logger.get_instance().debug_item(
             'Valid PoW received! Crafting token...')
         Registration.update_registration(message.base, message.proof)
         current_view_peers = ViewMessage.get_current_view()
         view_message = ViewMessage(peer_list=current_view_peers,
                                    epoch=self.get_current_epoch().epoch)
         Logger.get_instance().debug_list(view_message.peer_list,
                                          separator='\n')
         token_message = self.create_token(message.base, message.proof)
         view_message.set_token(token_message)
         await self.send(connection, view_message)
         peer_address = self.format_address(
             connection.get_extra_info('peername'))
         peer_public_key = message.get_public_key()
         new_peer = Peer(address=peer_address,
                         public_key=peer_public_key,
                         registration=id,
                         public_address=message.address)
         Peer.find_or_add(new_peer)
         next_epoch = self.get_next_epoch()
         View.add(View(peer=new_peer.id, epoch_id=next_epoch.id))
         Logger.get_instance().debug_item('View message sent!')
Exemplo n.º 28
0
    async def _incoming(self, reader: asyncio.StreamReader,
                        writer: asyncio.StreamWriter) -> None:
        """
        Accept an incoming connection and signal the upper_half.

        This method does the minimum necessary to accept a single
        incoming connection. It signals back to the upper_half ASAP so
        that any errors during session initialization can occur
        naturally in the caller's stack.

        :param reader: Incoming `asyncio.StreamReader`
        :param writer: Incoming `asyncio.StreamWriter`
        """
        peer = writer.get_extra_info('peername', 'Unknown peer')
        self.logger.debug("Incoming connection from %s", peer)

        if self._reader or self._writer:
            # Sadly, we can have more than one pending connection
            # because of https://bugs.python.org/issue46715
            # Close any extra connections we don't actually want.
            self.logger.warning("Extraneous connection inadvertently accepted")
            writer.close()
            return

        # A connection has been accepted; stop listening for new ones.
        assert self._accepted is not None
        await self._stop_server()
        self._reader, self._writer = (reader, writer)
        self._accepted.set()
Exemplo n.º 29
0
    async def _server_handler(self, reader: asyncio.StreamReader,
                              writer: asyncio.StreamWriter):
        """Handler for the server loop. Extracts incoming requests, executes
        them, and returns the result back to the sender node.

        Args:
            reader (asyncio.StreamReader): Stream reader object for
            incoming node.

            writer (asyncio.StreamWriter): Stream writer object for
            incoming node.
        """

        # Replaces the 0.0.0.0 with the host's actual IP
        sock = writer.get_extra_info("socket")
        self.address.host = sock.getsockname()[0]
        self.logger.debug(f"Received connection from {sock.getpeername()[0]}")

        # Read request and parse for a response
        request = await reader.read(self.__buf)
        response = await self.parse_request(request)

        # Return response back to sender
        writer.write(response.encode("utf-8"))
        await writer.drain()
        self.logger.debug(f"Sent response back to {sock.getpeername()[0]}")

        # Close down writer
        writer.close()
        await writer.wait_closed()
Exemplo n.º 30
0
  async def forward_buffer_msg(self, writer: asyncio.StreamWriter):
    ip, port, *_ = writer.get_extra_info('peername')
    message_id = get_connection_id((ip, port))
    try:
      logger.debug(f"Start forwarding {hex(message_id)}")
      while message_id in self.buffers:
        message = await self.buffers[message_id].get()
        logger.debug(f"Receive message: {len(message)}")

        if len(message) == 0:
          return

        writer.write(message)
        await writer.drain()
    except asyncio.CancelledError:
      logger.debug(f"Cancelled")
    except ConnectionResetError:
      logger.debug(f"Connection lost from {(ip, port)}")
      pass
    finally:
      del self.buffers[message_id]
      writer.close()
      await writer.wait_closed()
      logger.debug(f"Finish forwarding {hex(message_id)}")
      pass
Exemplo n.º 31
0
  async def server_remote_serve(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter):

    logger.info(f"Connection from {writer.get_extra_info('peername')}")

    if not self.ws or self.ws.closed:
      logger.error(f"Client is not started. ")
      writer.close()
      return
    if len(self.buffers) > self.max_connections:
      logger.error(f"Max connection {self.max_connections} exceeded. ")
      writer.close()
      return

    peer_name = writer.get_extra_info('peername')
    ip, port, *_ = peer_name
    message_id = get_connection_id((ip, port))
    self.buffers[message_id] = asyncio.Queue()

    done, pending = await asyncio.wait([asyncio.wait([self.forward_buffer_msg(writer),
                                                      self.forward_remote_msg(reader, writer)]),
                                        asyncio.ensure_future(self.wait_event(self.client_down))],
                                        return_when=asyncio.FIRST_COMPLETED)

    for coro in pending:
      coro.cancel()
Exemplo n.º 32
0
  async def forward_remote_msg(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
    try:
      ip, port, *_ = writer.get_extra_info('peername')
      message_id = get_connection_id((ip, port))
      seq = 0
      while not reader.at_eof():
        data: bytes = await reader.read(4096)

        if seq == 0:
          tag = BEGIN
        else:
          tag = MID

        if tag == BEGIN or len(data) > 0:
          message = TCPMessage(id=message_id, tag=tag, data=data)
          await self.ws.send(message.to_bytes())

        seq += 1
      message = TCPMessage(id=message_id, tag=END, data=b'')
      await self.ws.send(message.to_bytes())
    except asyncio.CancelledError:
      pass
    finally:
      writer.close()
      await writer.wait_closed()
Exemplo n.º 33
0
def QueueSender(reader: asyncio, writer: asyncio.StreamWriter, queue_name: str):
    """
    A coroutine for pulling items from the Queue to the streams.
    """
    client = writer.get_extra_info("peername")
    sclient = ':'.join(str(_) for _ in client)
    while True:
        try:
            data = yield from reader.read(65536)
        except ConnectionResetError:
            rlogger.info("Client {} closed connection".format(sclient))
            return
        if not data:
            slogger.info("Client {} closed connection".format(sclient))
            return
        # Unpack data
        try:
            sub_data = msgpack.unpackb(data, encoding='utf-8')
        except (msgpack.UnpackException, ValueError) as e:
            slogger.error("Recieved non-msgpack pull from {}".format(sclient))
            continue
        action = sub_data.get("action", -1)
        if not action == 1:
            slogger.error("Recieved non-pull action on pull channel from client (action: {})"
                          .format(sclient, action))
            continue
        queue = queues[queue_name][1]
        assert isinstance(queue, asyncio.Queue)
        data = yield from queue.get()
        slogger.debug("Packing data {} for queue {}".format(data[1], queue_name))
        response = {"status": 0, "data": data[1], "msgnum": data[0]}
        msgpack.pack(response, writer)
Exemplo n.º 34
0
def QueueWaiter(reader: asyncio.StreamReader, writer: asyncio.StreamWriter, queue_name: str):
    """
    A coroutine for waiting upon new items to be placed into the Queue.
    """
    client = writer.get_extra_info("peername")
    sclient = ':'.join(str(_) for _ in client)
    while True:
        try:
            data = yield from reader.read(65536)
        except ConnectionResetError:
            rlogger.info("Client {} closed connection".format(sclient))
            return
        if not data:
            rlogger.info("Client {} closed connection".format(sclient))
            return
        # Unpack
        try:
            sub_data = msgpack.unpackb(data, encoding="utf-8")
        except (msgpack.UnpackException, ValueError) as e:
            rlogger.error("Recieved non-msgpack push from {}".format(sclient))
            continue
        rlogger.debug("Recieved data from client {}: {}".format(sclient, sub_data))
        assert isinstance(sub_data, dict)
        action = sub_data.get("action", -1)
        if not action == 0:
            rlogger.error("Recieved non-push action on push channel from client {} (action: {})"
                          .format(sclient, action))
            continue
        # Get data to place
        data = sub_data.get("data", None)
        if not data:
            rlogger.error("Recieved no data on push channel from client {}".format(sclient))
            continue
        # Increment and get message number
        queues[queue_name][0] += 1
        msgnum = queues[queue_name][0]
        queue = queues[queue_name][1]
        # Put it on the queue
        assert isinstance(queue, asyncio.Queue)
        yield from queue.put([msgnum, data])
        # Respond to the client
        response = {"msgnum": msgnum, "status": 0}
        rlogger.debug("Sending response with message number {}".format(msgnum))
        msgpack.pack(response, writer)
Exemplo n.º 35
0
    async def _accept(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
        addr = writer.get_extra_info('peername')
        peer = Peer(addr[0], addr[1])

        client = PeerTCPClient(self._our_peer_id, peer)

        try:
            info_hash = await client.accept(reader, writer)
            if info_hash not in self._torrent_managers:
                raise ValueError('Unknown info_hash')
        except Exception as e:
            client.close()

            if isinstance(e, asyncio.CancelledError):
                raise
            else:
                logger.debug("%s wasn't accepted because of %r", peer, e)
        else:
            self._torrent_managers[info_hash].accept_client(peer, client)
Exemplo n.º 36
0
def connected_cb(reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
    """
    A callback for connected clients.
    """
    client = writer.get_extra_info("peername")
    sclient = ':'.join(str(_) for _ in client)
    logger.info("Recieved connection from {}:{}".format(*client))
    # Read a subscription message.
    try:
        sub = yield from reader.read(65536)
    except ConnectionResetError:
        rlogger.info("Client {} closed connection".format(sclient))
        return
    if not sub:
        logger.error("Client {} terminated connection abnormally".format(sclient))
        return
    try:
        sub_data = msgpack.unpackb(sub)
    except (msgpack.UnpackException, ValueError) as e:
        logger.error("Recieved unknown subscription message from {}:{}".format(*client))
        yield from writer.drain()
        writer.close()
        return
    # Get the data from the subscription message.
    if not b'queue' in sub_data:
        logger.error("Recieved null queue from {}".format(sclient))
        yield from writer.drain()
        writer.close()
        return
    queue_to_sub = sub_data[b"queue"]
    action = sub_data.get(b"action", 0)
    queue_created = False
    if queue_to_sub not in queues:
        queues[queue_to_sub] = [0, asyncio.Queue()]
        logger.debug("Created queue {}".format(queue_to_sub))
        queue_created = True
    logger.debug("Client {} subscribed to queue {} in mode {} ({})".format(sclient, queue_to_sub,
                                                                           action, "push" if not action else "pull"))
    if action == 0:
        loop.create_task(QueueWaiter(reader, writer, queue_to_sub))
    else:
        loop.create_task(QueueSender(reader, writer, queue_to_sub))
    msgpack.pack({"created": queue_created}, writer)
Exemplo n.º 37
0
 async def handle_client(self, reader: StreamReader, writer: StreamWriter) -> None:
     connection = h11.Connection(h11.SERVER)
     body = None  # type: StreamReader
     while True:
         data = await reader.read(65536)
         connection.receive_data(data)
         event = connection.next_event()
         if event is h11.NEED_DATA:
             continue
         elif isinstance(event, h11.Request):
             headers = CIMultiDict((key.decode('ascii'), value.decode('iso-8859-1'))
                                   for key, value in event.headers)
             peername = writer.get_extra_info('peername')
             peercert = writer.get_extra_info('peercert')
             parsed = urlparse(event.target, allow_fragments=False)
             query = unquote(parsed.query.decode('ascii'))
             request = HTTPRequest(
                 event.http_version.decode('ascii'), event.method.decode('ascii'),
                 parsed.path.decode('utf-8'), query, headers, body, bool(self.tls_context),
                 peername, peercert)
         elif isinstance(event, h11.Data):
             body.feed_data(event.data)
         elif isinstance(event, h11.EndOfMessage):
             body.feed_eof()