Esempio n. 1
2
    def _read_stream(self, stream: asyncio.StreamReader) -> None:
        """

        :param stream: asyncio.StreamReader
        :return: None
        """
        while not stream.at_eof():
            data = yield from stream.read(1)
            if data == b'\n':
                self.__new_element()
            else:
                self.__update_last_item_text(data)
Esempio n. 2
0
    def __init__(self, loop, config):
        super().__init__(loop=loop)
        self._loop = loop
        self._config = config

        self._transport = None
        self._write_pending_data_topic = []     # tuple (data, topic)
        self._connected = False

        self._encryptor = cryptor.Cryptor(self._config['password'], self._config['method'])

        self._peername = None

        self._reader_task = None
        self._data_task = None
        self._keepalive_task = None
        self._keepalive_timeout = self._config['timeout']
        self._reader_ready = None
        self._reader_stopped = asyncio.Event(loop=self._loop)
        self._stream_reader = StreamReader(loop=self._loop)
        self._stream_writer = None
        self._reader = None

        self._topic_to_clients = {}

        self._queue = Queue(loop=loop)
Esempio n. 3
0
async def _handshake(
    initiator: "HandshakeInitiator",
    reader: asyncio.StreamReader,
    writer: asyncio.StreamWriter,
    token: CancelToken,
) -> Tuple[bytes, bytes, keccak_256, keccak_256]:
    """See the handshake() function above.

    This code was factored out into this helper so that we can create Peers with directly
    connected readers/writers for our tests.
    """
    initiator_nonce = keccak(os.urandom(HASH_LEN))
    auth_msg = initiator.create_auth_message(initiator_nonce)
    auth_init = initiator.encrypt_auth_message(auth_msg)
    writer.write(auth_init)

    auth_ack = await token.cancellable_wait(
        reader.read(ENCRYPTED_AUTH_ACK_LEN), timeout=REPLY_TIMEOUT)

    if reader.at_eof():
        # This is what happens when Parity nodes have blacklisted us
        # (https://github.com/ethereum/py-evm/issues/901).
        raise HandshakeFailure("%s disconnected before sending auth ack",
                               repr(initiator.remote))

    ephemeral_pubkey, responder_nonce = initiator.decode_auth_ack_message(
        auth_ack)
    aes_secret, mac_secret, egress_mac, ingress_mac = initiator.derive_secrets(
        initiator_nonce, responder_nonce, ephemeral_pubkey, auth_init,
        auth_ack)

    return aes_secret, mac_secret, egress_mac, ingress_mac
Esempio n. 4
0
    async def connect(self, aio_loop):
        self._allow_states(*ServiceState.halted_states())

        self.script = Queue(loop=aio_loop)
        self.in_buffer = StreamReader(loop=aio_loop)
        self.out_buffer = StreamReader(loop=aio_loop)

        # Transfer deferred actions.
        for action in self._deferred_actions:
            self._add_action_now(action)

        self._deferred_actions = []

        # If the client wishes to delay startup of the scripted actions (e.g. to register an EventStream first),
        # we set this up here.
        if self.defer_script:
            self._defer_event = Event(loop=aio_loop)

        # We're inside of an asyncio task, but need a synchronous start. Hack up
        # an asyncio version of it.
        started = Event(loop=aio_loop)

        def async_start():
            self.sync_start()
            self.aio_loop.call_soon_threadsafe(started.set)

        Thread(target=async_start).start()

        await started.wait()

        return self.out_buffer, self
Esempio n. 5
0
async def tcp_recv(reader: asyncio.StreamReader,
                   delimiter: bytes = b'\n',
                   timeout=None) -> str:
    """
    Receives string result. Handle Incomplete error on your own.
    """

    try:
        data_length: bytes = await asyncio.wait_for(
            reader.readuntil(delimiter), timeout=timeout)
    except TypeError:
        msg = "tcp_recv: expects"
        if not isinstance(delimiter, bytes):
            print(msg,
                  f"<bytes> for delimiter, got {type(delimiter)} instead.")

        if not isinstance(timeout, Number) and timeout is not None:
            print(msg,
                  f"<numbers> for delimiter, got {type(timeout)} instead.")

        raise

    # why this is ignored??? why I am getting ConnReset directly??
    except ConnectionResetError:
        print("tcp_recv: Disconnected from Server.")
        raise

    except asyncio.IncompleteReadError:
        print("tcp_recv: Incomplete read error.")
        raise

    data = await asyncio.wait_for(reader.readexactly(
        int(data_length.strip(delimiter))),
                                  timeout=timeout)
    return data.decode()
Esempio n. 6
0
    async def _handle_client(self, reader: asyncio.StreamReader,
                             writer: asyncio.StreamWriter):
        while not reader.at_eof():
            try:
                array = await reader.readline()

                if reader.at_eof():
                    break

                assert array[:1] == b'*'

                count = parse_int(array[1:-2])

                items = [await read_bulk_string(reader) for _ in range(count)]
                command = items[0]
                params = items[1:]

                try:
                    with timeout(REDIS_SERVER_TIMEOUT):
                        await self._execute(command, params, writer)
                except Exception:
                    logger.exception(f"Command failed: {command}")
                    writer.write(encode_error("Command failed"))
            except Exception:
                logger.exception(f"Invalid command: {array}")
                writer.write(encode_error("Invalid command"))
                writer.close()
Esempio n. 7
0
async def read_stream(reader: StreamReader,
                      writer: StreamWriter,
                      log: bool = True) -> AsyncGenerator[str, None]:
    """Reading line from stream."""

    reconnect_counter = 0

    try:
        while not reader.at_eof():
            data = await asyncio.wait_for(reader.readline(), timeout=60)
            message = data.decode('utf-8').strip()
            if log:
                logger.debug(message)
            yield message

    except asyncio.TimeoutError:
        logger.exception(f'TimeoutError -> StreamReader timeout get message',
                         exc_info=False)
    except Exception as ex:
        logger.exception(f'OtherError -> {ex.__class__.__name__}: {ex}',
                         exc_info=False)

    finally:
        reconnect_counter += 1
        await asyncio.sleep(60 if reconnect_counter > 60 else reconnect_counter
                            )
Esempio n. 8
0
    async def proxy(self, client_reader: StreamReader,
                    client_writer: StreamWriter, remote_reader: StreamReader,
                    remote_writer: StreamWriter):

        client_read_task = create_task(client_reader.read(READ_BYTES_DEFAULT))
        remote_read_task = create_task(remote_reader.read(READ_BYTES_DEFAULT))

        while client_read_task and remote_read_task:

            done, pending = await asyncio.wait(
                {client_read_task, remote_read_task},
                return_when=FIRST_COMPLETED)

            if client_read_task in done:
                client_read_task = await self._proxy_connection(
                    in_read=client_read_task,
                    out_read=remote_read_task,
                    in_reader=client_reader,
                    out_writer=remote_writer)

            if remote_read_task in done:
                remote_read_task = await self._proxy_connection(
                    in_read=remote_read_task,
                    out_read=client_read_task,
                    in_reader=remote_reader,
                    out_writer=client_writer)

        if client_read_task:
            client_read_task.cancel()

        if remote_read_task:
            remote_read_task.cancel()

        remote_writer.close()
Esempio n. 9
0
    async def handler(reader: asyncio.StreamReader,
                      writer: asyncio.StreamWriter):

        nonlocal data

        header_size = struct.calcsize('!L')

        while not reader.at_eof():
            try:
                header = await reader.readexactly(header_size)
            except asyncio.IncompleteReadError:
                break

            payload_size = struct.unpack("!L", header)[0]

            try:
                payload = await reader.readexactly(payload_size)
            except asyncio.IncompleteReadError:
                break

            for metric in pickle.loads(payload):
                data.append(metric)

        if len(data) == count:
            event.set()
        writer.close()
        reader.feed_eof()
Esempio n. 10
0
async def json_lines_with_timeout(reader: asyncio.StreamReader, timeout: 'seconds' = DEFAULT_TIMEOUT):
    while not reader.at_eof():
        line = await asyncio.wait_for(reader.readline(), timeout)
        try:
            yield json.loads(line)
        except json.JSONDecodeError as e:
            pass
Esempio n. 11
0
async def _connect_streams(reader: asyncio.StreamReader,
                           writer: asyncio.StreamWriter,
                           queue: "asyncio.Queue[int]",
                           token: CancelToken) -> None:
    try:
        while not token.triggered:
            if reader.at_eof():
                break

            try:
                size = queue.get_nowait()
            except asyncio.QueueEmpty:
                await asyncio.sleep(0)
                continue
            data = await token.cancellable_wait(reader.readexactly(size))
            writer.write(data)
            queue.task_done()
            await token.cancellable_wait(writer.drain())
    except OperationCancelled:
        pass
    finally:
        writer.write_eof()

    if reader.at_eof():
        reader.feed_eof()
Esempio n. 12
0
    async def decrypt_stream(self, reader: StreamReader) -> StreamReader:
        data_length = await reader.read(ENCRYPTED_DATA_LENGTH)

        if not data_length:
            raise SecurityError('Connection closed')

        data_length_int = int.from_bytes(data_length,
                                         byteorder='little') + AUTH_TAG_LENGTH

        encrypted_data = await reader.read(data_length_int)

        chacha = ChaCha20Poly1305(self.context['decrypt_key'])

        nonce = b'\x00\x00\x00\x00' + self.encrypted_request_count.to_bytes(
            8, byteorder='little')
        try:
            decrypted_data = chacha.decrypt(nonce, encrypted_data, data_length)
        except InvalidTag:
            decrypted_data = None

        if not decrypted_data:
            raise SecurityError('Unable to decrypt encrypted data')

        self.encrypted_request_count += 1

        decrypted_reader = StreamReader()
        decrypted_reader.feed_data(decrypted_data)
        return decrypted_reader
Esempio n. 13
0
    async def _wait_for_either_until_neither(
            self, stdout: asyncio.StreamReader,
            stderr: asyncio.StreamReader) -> None:
        """
        Wait for a line of data from either stdout or stderr and log this data as received.
        When both are EOF then exit.

        :returns: Tuple of stdout, stderr
        """
        future_out = asyncio.ensure_future(stdout.readline())
        future_err = asyncio.ensure_future(stderr.readline())

        pending = set([future_out,
                       future_err])  # type: typing.Set[asyncio.Future]
        done = set()  # type: typing.Set[asyncio.Future]

        while len(pending) > 0:

            done, pending = await asyncio.wait(pending)

            for future_done in done:
                result = future_done.result().strip()
                if len(result) > 0:
                    line = result.decode(errors='replace')
                    if future_done == future_err:
                        future_err = asyncio.ensure_future(stderr.readline())
                        pending.add(future_err)
                        self._logger.error(line)
                    else:
                        future_out = asyncio.ensure_future(stdout.readline())
                        pending.add(future_out)
                        self._logger.info(line.strip())
Esempio n. 14
0
    def _handler(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
        logger.debug('handle')

        while True:
            cmd = Command.unpack((yield from reader.readexactly(len(Command))))
            method = cmd.key.lower()

            if method not in self.implements:
                yield from self.response(
                    writer,
                    Code.INTERNAL_SERVER_ERROR,
                    bytes('ERROR : method `%s` not implemented\n' % method.strip())
                )

                continue

            args_length = Types.ulong.unpack((
                yield from reader.readexactly(len(Types.ulong))
            ))

            kwargs = msgpack.loads((yield from reader.readexactly(args_length)))

            try:
                code, result = yield from asyncio.coroutine(getattr(self, method))(**kwargs)
                yield from self.response(writer, code, result)
            except Exception as e:
                yield from self.response(writer, Code.INTERNAL_SERVER_ERROR, bytes('ERROR : %s' % e))
                logger.exception("Exception when handling request %r")
Esempio n. 15
0
    def connection_made(self, transport):
        if self.kill_active:
            return

        sock = transport.get_extra_info('socket')
        set_tcp_keepalive(sock,
                          opts=dict(
                              tcp_keepalive=True,
                              tcp_keepalive_idle=self.tcp_keepalive_time,
                              tcp_keepalive_intvl=self.tcp_keepalive_interval,
                              tcp_keepalive_cnt=self.tcp_keepalive_probes,
                          ))
        # https://eklitzke.org/the-caveats-of-tcp-nodelay
        sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
        if hasattr(socket, 'TCP_USER_TIMEOUT'):
            logger.debug('Setting TCP_USER_TIMEOUT to %s',
                         self.tcp_user_timeout_seconds * 1000)
            sock.setsockopt(socket.SOL_TCP, socket.TCP_USER_TIMEOUT,
                            self.tcp_user_timeout_seconds * 1000)
        self.transport = transport
        self._reader = StreamReader()
        self._reader.set_transport(transport)

        loop = asyncio.get_event_loop()
        loop.create_task(self.receive_commands())
        super().connection_made(transport)
Esempio n. 16
0
async def _capture_subprocess_output(
    stream: asyncio.StreamReader, ) -> List[bytes]:
    lines = []
    while not stream.at_eof():
        line = await stream.readline()
        if line or not stream.at_eof():
            lines.append(line.rstrip(b'\n'))
    return lines
Esempio n. 17
0
async def connection_loop(execute_rpc: Callable[[Any], Any],
                          reader: asyncio.StreamReader,
                          writer: asyncio.StreamWriter,
                          logger: logging.Logger,
                          cancel_token: CancelToken) -> None:
    # TODO: we should look into using an io.StrinIO here for more efficient
    # writing to the end of the string.
    raw_request = ''
    while True:
        request_bytes = b''
        try:
            request_bytes = await cancel_token.cancellable_wait(reader.readuntil(b'}'))
        except asyncio.LimitOverrunError as e:
            logger.info("Client request was too long. Erasing buffer and restarting...")
            request_bytes = await cancel_token.cancellable_wait(reader.read(e.consumed))
            await cancel_token.cancellable_wait(write_error(
                writer,
                f"reached limit: {e.consumed} bytes, starting with '{request_bytes[:20]!r}'",
            ))
            continue

        raw_request += request_bytes.decode()

        bad_prefix, raw_request = strip_non_json_prefix(raw_request)
        if bad_prefix:
            logger.info("Client started request with non json data: %r", bad_prefix)
            await cancel_token.cancellable_wait(
                write_error(writer, 'Cannot parse json: ' + bad_prefix),
            )

        try:
            request = json.loads(raw_request)
        except json.JSONDecodeError:
            # invalid json request, keep reading data until a valid json is formed
            logger.debug("Invalid JSON, waiting for rest of message: %r", raw_request)
            continue

        # reset the buffer for the next message
        raw_request = ''

        if not request:
            logger.debug("Client sent empty request")
            await cancel_token.cancellable_wait(
                write_error(writer, 'Invalid Request: empty'),
            )
            continue

        try:
            result = await execute_rpc(request)
        except Exception as e:
            logger.exception("Unrecognized exception while executing RPC")
            await cancel_token.cancellable_wait(
                write_error(writer, "unknown failure: " + str(e)),
            )
        else:
            writer.write(result.encode())

        await cancel_token.cancellable_wait(writer.drain())
Esempio n. 18
0
def handle_stream(reader: StreamReader, writer: StreamWriter):
    addr = writer.get_extra_info('peername')
    print("Connect from %r" % (addr, ))

    labels_list = load_labels('labels/labels.txt')
    load_graph('model/my_frozen_graph_okyonsei.pb')

    model = load_model()

    count = 0
    detected_count = 0
    last_data_dict = dict()

    try:
        while True:
            count += 1
            data = yield from reader.readexactly(8)
            user_id, body_len = struct.unpack("!II", data)
            data = yield from reader.readexactly(body_len)

            last_data = last_data_dict.get(user_id)
            last_data_dict[user_id] = data

            if last_data is None:
                continue

            print(count)
            wav_data, np_data = convert_pcm_to_wav(last_data + data)

            with open(f'data/{count:06}.wav', 'wb') as fout:
                fout.write(wav_data)

            detected, prediction = run_graph(wav_data, labels_list, 3)

            if detected:
                if detected_count:
                    detected_count = 0
                    continue

                try:
                    start_p = max(np_data.argmax() - 300, 0)
                    name = find_who(model, np_data[start_p:])
                    msg = f'안녕하세요. {name} [정확도: {int(prediction * 100)} %]'
                    msg_encoded = str.encode(msg)
                    header = struct.pack("!II", user_id, len(msg_encoded))
                    writer.write(header + msg_encoded)
                    yield from writer.drain()
                    detected_count += 1
                except Exception:
                    pass
            else:
                detected_count = 0

    except Exception as e:
        print(e)
        writer.close()
Esempio n. 19
0
async def read_lines_until(reader: StreamReader, timeout: float):
    lines = []
    while not reader.at_eof():
        try:
            line = await shield(wait_for(reader.readuntil(b'\r'), timeout))
            lines.append(line.decode("ascii"))
        # pylint: disable=protected-access
        except concurrent.futures._base.TimeoutError:
            break
    return lines
Esempio n. 20
0
async def test_BufferedReader_overflow():
    stream_data = b"1234"

    stream_reader = StreamReader()
    stream_reader.feed_data(stream_data)
    reader = BufferedReader(stream_reader, 1)

    result = await reader.read_bytes(4)

    assert result == stream_data
Esempio n. 21
0
async def receive_message(reader: asyncio.StreamReader):
    """ Get the initial command message from the agent.
     Used only when both containers are not on a shared-kernel """
    buf = bytearray()
    while len(buf) != 4 and not reader.at_eof():
        buf += await reader.read(4 - len(buf))
    length = struct.unpack('!I', bytes(buf))[0]
    buf = bytearray()
    while len(buf) != length and not reader.at_eof():
        buf += await reader.read(length - len(buf))
    return msgpack.unpackb(bytes(buf), use_list=False)
Esempio n. 22
0
    async def _client_accept(
        self,
        reader: StreamReader,
        writer: StreamWriter,
        read_ahead: bytes = None,
    ) -> None:
        """ Accept new clients and inform the tunnel about connections """
        host, port = writer.get_extra_info("peername")[:2]
        ip = ipaddress.ip_address(host)

        # Block connections using the networks
        if self.block(ip):
            reader.feed_eof()
            writer.close()
            await writer.wait_closed()

            _logger.info("Connection from %s blocked", ip)
            return

        self.connections[ip].hits += 1

        # Create the client object and generate an unique token
        client = Connection(reader, writer, self.protocol,
                            utils.generate_token())
        self.add(client)

        _logger.info("Client %s connected on %s:%s", client.uuid, host, port)

        # Inform the tunnel about the new client
        pkg = package.ClientInitPackage(ip, port, client.token)
        await self.tunnel.tun_write(pkg)

        # Send the buffer read ahead of initialization through the tunnel
        if read_ahead:
            await self.tunnel.tun_data(client.token, read_ahead)

        # Serve data from the client
        while True:
            data = await client.read(self.chunk_size)
            # Client disconnected. Inform the tunnel
            if not data:
                break

            await self.tunnel.tun_data(client.token, data)

        if self.server and self.server.is_serving():
            pkg = package.ClientClosePackage(client.token)
            await self.tunnel.tun_write(pkg)

        await self._disconnect_client(client.token)
Esempio n. 23
0
 async def receive_handshake(
     self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter
 ) -> None:
     ip, socket, *_ = writer.get_extra_info("peername")
     remote_address = Address(ip, socket)
     if self.peer_pool.chk_dialin_blacklist(remote_address):
         Logger.info_every_n(
             "{} has been blacklisted, refusing connection".format(remote_address),
             100,
         )
         reader.feed_eof()
         writer.close()
     expected_exceptions = (
         TimeoutError,
         PeerConnectionLost,
         HandshakeFailure,
         asyncio.IncompleteReadError,
         HandshakeDisconnectedFailure,
     )
     try:
         await self._receive_handshake(reader, writer)
     except expected_exceptions as e:
         self.logger.debug("Could not complete handshake: %s", e)
         Logger.error_every_n("Could not complete handshake: {}".format(e), 100)
         reader.feed_eof()
         writer.close()
     except OperationCancelled:
         self.logger.error("OperationCancelled")
         reader.feed_eof()
         writer.close()
     except Exception as e:
         self.logger.exception("Unexpected error handling handshake")
         reader.feed_eof()
         writer.close()
Esempio n. 24
0
async def _log_subprocess_output(
    pid: int,
    stream: asyncio.StreamReader,
    logger: logging.Logger,
    level: int,
    log_processor: Optional[Callable[[str], Tuple[str, int]]] = None,
) -> List[bytes]:
    while not stream.at_eof():
        line = await stream.readline()
        if line or not stream.at_eof():
            log_line = line.rstrip(b'\n').decode()
            if log_processor is not None:
                log_line, level = log_processor(log_line)
            logger.log(level, log_line, extra={"process": pid})
    return []
Esempio n. 25
0
 async def _wait_for_event(
     self,
     event_name: str,
     event_type: str,
     reader: asyncio.StreamReader,
 ) -> Any:
     matching = self._match_event(
         event_name,
         event_type,
     )
     if matching:
         return matching
     while True:
         data = await reader.readline()
         line = data.decode('utf-8').rstrip()
         if not len(line) and reader.at_eof():
             raise Exception(
                 'Reached end of output waiting for {0}/{1}'.format(
                 event_name,
                 event_type,
             ))
         log.info(line)
         event = json.loads(line)
         matching = self._match_event(
             event_name,
             event_type,
             json.loads(line),
         )
         if matching:
             return matching
Esempio n. 26
0
 async def _pipe(self, reader: asyncio.StreamReader,
                 writer: asyncio.StreamWriter):
     try:
         while not reader.at_eof():
             writer.write(await reader.read(self.CHUNK_SIZE))
     finally:
         writer.close()
Esempio n. 27
0
    async def handler(self, reader: asyncio.StreamReader,
                      writer: asyncio.StreamWriter):
        while not reader.at_eof():
            self.data += await reader.read(1)

        if self.data:
            self.event.set()
Esempio n. 28
0
    async def _get_message(cls, reader: asyncio.StreamReader,
                           writer: asyncio.StreamWriter) -> str:
        """Gets the next message.

        Args:
            reader: The reader to receive messages from the client.
            writer: The writer to send messages to the client.

        Returns:
            The next message received from the client.
        """
        message = ""

        try:
            line = await asyncio.wait_for(reader.readline(), timeout=5)
            message = line.decode("latin-1").strip()

        except asyncio.TimeoutError:
            pass

        # Check that we're still connected if we didn't get a message
        if not message and not await cls._send(writer, ["S,SERVER CONNECTED"]):
            raise BrokenPipeError("Client disconnected")

        return message
Esempio n. 29
0
async def test_authenticate(transport, protocol):
    conn = AsyncTelnetTransport(
        "localhost",
        auth_password="******",
        auth_username="******",
        timeout_ops=10,
        timeout_socket=5,
    )
    conn.stdout = StreamReader()
    conn.stdin = StreamWriter(transport,
                              protocol,
                              reader=None,
                              loop=asyncio.get_event_loop())
    # feed data in w/ the username prompt already in it -- auth should then send the username and
    # clear the buffer... then it should read and see the password that we are inserting later
    data = b"\xff\xfd\x18\xff\xfd \xff\xfd#username:\xff\xfd'\xff\xfb\x03\xff\xfd\x01\xff\xfd\x1f\xff\xfb\x05\xff\xfd!\xff\xfb\x03\xff\xfb\x01"
    conn.stdout.feed_data(data)

    async def _put_login_data_in_reader():
        # this sleep is enough that we should *always* see a an additional return char that we send
        # when we dont get any output on telnet connections
        await asyncio.sleep(1.5)
        conn.stdout.feed_data(b"Password:"******"lookforthisusername" in conn.stdin.transport.out_buf
    assert b"lookforthispassword" in conn.stdin.transport.out_buf
    # a return for the username, one for password and one for the extra return we send when we dont
    # see the password prompt soon enough
    assert conn.stdin.transport.out_buf.count(b"\n") >= 3
Esempio n. 30
0
 async def _listen_for_messages(self, username: str,
                                reader: StreamReader):  #D
     try:
         while (data := await asyncio.wait_for(reader.readline(),
                                               60)) != b'':
             await self._notify_all(f'{username}: {data.decode()}')
         await self._notify_all(f'{username} has left the chat\n')
Esempio n. 31
0
async def test_isalive_false_not_auth():
    conn = AsyncTelnetTransport("localhost")
    conn._isauthenticated = False
    conn.stdout = StreamReader()
    data = b"somethingtoread"
    conn.stdout.feed_data(data)
    assert conn.isalive() is False
Esempio n. 32
0
async def test_read_timeout():
    conn = AsyncTelnetTransport("localhost")
    conn.stdout = StreamReader()
    conn._stdout_binary_transmission = True
    conn.set_timeout(0.01)
    with pytest.raises(ScrapliTimeout):
        await conn.read()
Esempio n. 33
0
 def __init__(self, headers, parser, transport, **kw):
     self.headers = headers
     self.parser = parser
     self.reader = StreamReader(**kw)
     self.reader.set_transport(transport)
     self.feed_data = self.reader.feed_data
     self.feed_eof = self.reader.feed_eof
Esempio n. 34
0
    async def handle_client(self, reader: asyncio.StreamReader,
                            writer: asyncio.StreamWriter):
        addr = writer.get_extra_info('peername')
        log.info("Client connected %r", addr)

        while not reader.at_eof():
            try:
                async with async_timeout.timeout(5):
                    line = await reader.readline()

                if line:
                    metric = line.decode()
                    name, value, timestamp = metric.split(" ", 3)
                    timestamp = float(timestamp)

                    if value == 'nan':
                        value = None
                    else:
                        value = float(value) if '.' in value else int(value)

                    await self.storage.write_async((name, (timestamp, value)))
            except asyncio.CancelledError:
                log.info('Client connection closed after timeout')
                break
            except:  # noqa
                continue

        log.info("Client disconnected %r", addr)
Esempio n. 35
0
 def from_stream(cls, reader: asyncio.StreamReader, fixed_header: MQTTFixedHeader,
                 variable_header: MQTTVariableHeader):
     data = bytearray()
     data_length = fixed_header.remaining_length - variable_header.bytes_length
     length_read = 0
     while length_read < data_length:
         buffer = yield from reader.read(data_length - length_read)
         data.extend(buffer)
         length_read = len(data)
     return cls(data)
Esempio n. 36
0
    def test_get(self, loop):
        result = 39
        result_queue = Queue()
        called_queue = Queue()
        reader = StreamReader()
        unix_socket = UnixSocket(None, loop)
        unix_socket.reader = reader

        async def readline():
            called_queue.put(True)
            return unix_socket.encode(result)

        reader.readline = readline

        async def run():
            unix_socket.ready.set()
            result = await unix_socket.get()
            result_queue.put(result)

        loop.run_until_complete(run())
        check_queue(called_queue, True)
        check_queue(result_queue, result)
Esempio n. 37
0
 def handle_stdin(self, reader: asyncio.StreamReader):
     """
     Handle messages from the agent
     """
     try:
         while not reader.at_eof():
             buf = bytearray()
             while len(buf) != 4:
                 buf += yield from reader.read(4 - len(buf))
             length = struct.unpack('I', bytes(buf))[0]
             buf = bytearray()
             while len(buf) != length:
                 buf += yield from reader.read(length - len(buf))
             message = msgpack.unpackb(bytes(buf), encoding="utf8", use_list=False)
             yield from self.handle_stdin_message(message)
     except asyncio.CancelledError:
         return
     except KeyboardInterrupt:
         return
     except:
         self._logger.exception("Exception occured while reading stdin")
         os._exit(1)  # DIE!
Esempio n. 38
0
def read_fdms_packet(reader: asyncio.StreamReader) -> bytes:
    buffer = bytearray()
    ba = yield from reader.read(1)
    b = ba[0]
    if b > 0x7f:
        b &= 0x7f

    buffer.append(b)

    if b == STX:
        got_etx = False
        while True:
            ba = yield from reader.read(1)
            b = ba[0]
            if b > 0x7f:
                b &= 0x7f
            buffer.append(b)
            if got_etx:
                break
            else:
                got_etx = (b == ETX)

    return buffer
Esempio n. 39
0
def QueueWaiter(reader: asyncio.StreamReader, writer: asyncio.StreamWriter, queue_name: str):
    """
    A coroutine for waiting upon new items to be placed into the Queue.
    """
    client = writer.get_extra_info("peername")
    sclient = ':'.join(str(_) for _ in client)
    while True:
        try:
            data = yield from reader.read(65536)
        except ConnectionResetError:
            rlogger.info("Client {} closed connection".format(sclient))
            return
        if not data:
            rlogger.info("Client {} closed connection".format(sclient))
            return
        # Unpack
        try:
            sub_data = msgpack.unpackb(data, encoding="utf-8")
        except (msgpack.UnpackException, ValueError) as e:
            rlogger.error("Recieved non-msgpack push from {}".format(sclient))
            continue
        rlogger.debug("Recieved data from client {}: {}".format(sclient, sub_data))
        assert isinstance(sub_data, dict)
        action = sub_data.get("action", -1)
        if not action == 0:
            rlogger.error("Recieved non-push action on push channel from client {} (action: {})"
                          .format(sclient, action))
            continue
        # Get data to place
        data = sub_data.get("data", None)
        if not data:
            rlogger.error("Recieved no data on push channel from client {}".format(sclient))
            continue
        # Increment and get message number
        queues[queue_name][0] += 1
        msgnum = queues[queue_name][0]
        queue = queues[queue_name][1]
        # Put it on the queue
        assert isinstance(queue, asyncio.Queue)
        yield from queue.put([msgnum, data])
        # Respond to the client
        response = {"msgnum": msgnum, "status": 0}
        rlogger.debug("Sending response with message number {}".format(msgnum))
        msgpack.pack(response, writer)
Esempio n. 40
0
def connected_cb(reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
    """
    A callback for connected clients.
    """
    client = writer.get_extra_info("peername")
    sclient = ':'.join(str(_) for _ in client)
    logger.info("Recieved connection from {}:{}".format(*client))
    # Read a subscription message.
    try:
        sub = yield from reader.read(65536)
    except ConnectionResetError:
        rlogger.info("Client {} closed connection".format(sclient))
        return
    if not sub:
        logger.error("Client {} terminated connection abnormally".format(sclient))
        return
    try:
        sub_data = msgpack.unpackb(sub)
    except (msgpack.UnpackException, ValueError) as e:
        logger.error("Recieved unknown subscription message from {}:{}".format(*client))
        yield from writer.drain()
        writer.close()
        return
    # Get the data from the subscription message.
    if not b'queue' in sub_data:
        logger.error("Recieved null queue from {}".format(sclient))
        yield from writer.drain()
        writer.close()
        return
    queue_to_sub = sub_data[b"queue"]
    action = sub_data.get(b"action", 0)
    queue_created = False
    if queue_to_sub not in queues:
        queues[queue_to_sub] = [0, asyncio.Queue()]
        logger.debug("Created queue {}".format(queue_to_sub))
        queue_created = True
    logger.debug("Client {} subscribed to queue {} in mode {} ({})".format(sclient, queue_to_sub,
                                                                           action, "push" if not action else "pull"))
    if action == 0:
        loop.create_task(QueueWaiter(reader, writer, queue_to_sub))
    else:
        loop.create_task(QueueSender(reader, writer, queue_to_sub))
    msgpack.pack({"created": queue_created}, writer)
Esempio n. 41
0
async def read_lines_matching(reader: StreamReader, *patterns: Pattern):
    waiting_for = {}
    for (index, pattern) in enumerate(patterns):
        if isinstance(pattern, str):
            pattern = re.compile(pattern)
        waiting_for[pattern] = index

    values = [None] * len(waiting_for)
    while not reader.at_eof():
        if all(values):
            break
        line = await reader.readuntil(b'\r')
        line = line.decode("ascii").strip()
        logger.debug("got line: %s", line)
        for pattern in waiting_for:
            index = waiting_for[pattern]
            if not values[index]:
                match = pattern.fullmatch(line)
                if match:
                    values[index] = match[1]

    return values
Esempio n. 42
0
class HttpBodyReader():
    _expect_sent = None
    _waiting = None

    def __init__(self, headers, parser, transport, **kw):
        self.headers = headers
        self.parser = parser
        self.reader = StreamReader(**kw)
        self.reader.set_transport(transport)
        self.feed_data = self.reader.feed_data
        self.feed_eof = self.reader.feed_eof

    def waiting_expect(self):
        '''``True`` when the client is waiting for 100 Continue.
        '''
        if self._expect_sent is None:
            if (not self.reader.at_eof() and
                    self.headers.has('expect', '100-continue')):
                return True
            self._expect_sent = ''
        return False

    def can_continue(self):
        if self.waiting_expect():
            if self.parser.get_version() < (1, 1):
                raise HttpException(status=417)
            else:
                msg = '%s 100 Continue\r\n\r\n' % http_protocol(self.parser)
                self._expect_sent = msg
                self.reader._transport.write(msg.encode(DEFAULT_CHARSET))

    def fail(self):
        if self.waiting_expect():
            raise HttpException(status=417)

    def read(self, n=-1):
        self.can_continue()
        return self.reader.read(n=n)

    def readline(self):
        self.can_continue()
        return self.reader.readline()

    def readexactly(self, n):
        self.can_continue()
        return self.reader.readexactly(n)
Esempio n. 43
0
def fdms_session(reader: asyncio.StreamReader, writer: asyncio.StreamWriter):
    online = None
    ''':type: (FdmsHeader, FdmsTransaction)'''
    add_on = None
    ''':type: (FdmsHeader, FdmsTransaction)'''
    offline = list()

    writer.write(bytes((ENQ,)))
    yield from writer.drain()

    while True:

        # Get Request
        attempt = 0
        while True:
            try:
                if attempt > 4:
                    return

                request = yield from asyncio.wait_for(read_fdms_packet(reader), timeout=15.0)
                if len(request) == 0:
                    return

                control_byte = request[0]
                if control_byte == STX:
                    lrs = functools.reduce(lambda x, y: x ^ int(y), request[2:-1], int(request[1]))
                    if lrs != request[-1]:
                        raise ValueError('LRS sum')

                    pos, header = parse_header(request)
                    txn = header.create_txn()
                    txn.parse(request[pos:-2])
                    if header.txn_type == FdmsTransactionType.Online.value:
                        if online is None:
                            online = (header, txn)
                        else:
                            add_on = (header, txn)
                    else:
                        offline.append((header, txn))

                    if header.protocol_type == '2':
                        break

                    # Respond with ACK
                    attempt = 0
                    writer.write(bytes((ACK,)))

                elif control_byte == EOT:
                    break

            # Close session
            except asyncio.TimeoutError:
                return

            # Respond with NAK
            except Exception as e:
                logging.getLogger(LOG_NAME).debug('Request error: %s', str(e))
                attempt += 1
                writer.write(bytes((NAK,)))

            yield from writer.drain()

        if online is None:
            return

        # Process Transactions & Send Response
        for txn in offline:
            rs = process_txn(txn)
        offline.clear()

        if add_on is not None:
            process_add_on_txn(online, add_on)
        add_on = None

        rs = process_txn(online)

        # Send Response
        rs_bytes = rs.response()

        if rs.action_code == FdmsActionCode.HostSpecificPoll or rs.action_code == FdmsActionCode.RevisionInquiry:
            writer.write(rs_bytes)
            yield from writer.drain()
        else:
            attempt = 0
            while True:
                if attempt >= 4:
                    return

                writer.write(rs_bytes)
                yield from writer.drain()

                control_byte = 0
                try:
                    while True:
                        rs_head = yield from asyncio.wait_for(reader.read(1), timeout=4.0)
                        if len(rs_head) == 0:
                            return
                        control_byte = rs_head[0] & 0x7f
                        if control_byte == ACK:
                            break
                        elif control_byte == NAK:
                            break
                # Close session
                except asyncio.TimeoutError as e:
                    return

                if control_byte == ACK:
                    break
                else:
                    attempt += 1

            if online[0].wcc in {'B', 'C'}:
                # Send ENQ
                writer.write(bytes((ENQ,)))
                yield from writer.drain()
                continue
            else:
                break

    writer.write(bytes((EOT,)))
    yield from writer.drain()
    if writer.can_write_eof():
        writer.write_eof()