Esempio n. 1
0
    def test_join_aiters(self):
        int_vals = [1, 2, 3, 4]
        str_vals = "abcdefg"

        list_of_lists = [int_vals, str_vals]
        iter_of_aiters = [iter_to_aiter(_) for _ in list_of_lists]
        aiter_of_aiters = iter_to_aiter(iter_of_aiters)
        r = run(get_n(join_aiters(aiter_of_aiters)))

        r1 = [_ for _ in r if isinstance(_, int)]
        r2 = [_ for _ in r if isinstance(_, str)]
        self.assertEqual(r1, int_vals)
        self.assertEqual(r2, list(str_vals))
Esempio n. 2
0
async def main():
    server, aiter = await start_server_aiter(7777)

    line_writer_aiter_aiter = map_aiter(
        functools.partial(stream_reader_writer_to_line_writer_aiter, server),
        aiter)
    line_writer_aiter = join_aiters(line_writer_aiter_aiter)
    completed_event_aiter = map_aiter(handle_event,
                                      line_writer_aiter,
                                      worker_count=5)

    async for line in completed_event_aiter:
        print(line)
Esempio n. 3
0
async def main():
    server, aiter = await start_server_aiter(7777)
    line_writer_aiter_aiter = map_aiter(
        stream_reader_writer_to_line_writer_aiter, aiter)
    line_writer_aiter = join_aiters(line_writer_aiter_aiter)

    async for line, sw in line_writer_aiter:
        print(line)
        await sw.drain()
        if line == b"\n":
            sw.close()
        sw.write(line)
        if line == b"quit\n":
            server.close()
Esempio n. 4
0
def rws_to_event_aiter(rws_aiter, reader_to_message_stream):

    def rws_to_reader_event_template_adaptor(rws):
        return rws, rws["reader"]

    def reader_event_template_to_event_stream_adaptor(rws_reader):
        rws, reader = rws_reader
        return message_stream_to_event_stream(rws, reader_to_message_stream(reader))

    def adaptor(rws):
        return reader_event_template_to_event_stream_adaptor(
            rws_to_reader_event_template_adaptor(rws))

    return join_aiters(map_aiter(adaptor, rws_aiter))
Esempio n. 5
0
    def test_join_aiters_1(self):
        # make sure nothing's dropped
        # even if lots of events come in at once
        main_aiter = push_aiter()
        child_aiters = []
        aiter = join_aiters(main_aiter)

        child_aiters.append(push_aiter())
        child_aiters[0].push(100)
        main_aiter.push(child_aiters[0])

        t = run(get_n(aiter, 1))
        self.assertEqual(t, [100])

        child_aiters.append(push_aiter())
        child_aiters[0].push(101)
        child_aiters[1].push(200)
        child_aiters[1].push(201)
        main_aiter.push(child_aiters[1])

        t = run(get_n(aiter, 3))
        self.assertEqual(set(t), set([101, 200, 201]))

        for _ in range(3):
            child_aiters.append(push_aiter())
            main_aiter.push(child_aiters[-1])
        for _, ca in enumerate(child_aiters):
            ca.push((_ + 1) * 100)
            ca.push((_ + 1) * 100 + 1)

        t = run(get_n(aiter, len(child_aiters) * 2))
        self.assertEqual(
            set(t), set([100, 101, 200, 201, 300, 301, 400, 401, 500, 501]))

        child_aiters[-1].push(5000)
        main_aiter.stop()
        t = run(get_n(aiter, 1))
        self.assertEqual(t, [5000])

        for ca in child_aiters:
            ca.push(99)
            ca.stop()
        t = run(get_n(aiter))
        self.assertEqual(t, [99] * len(child_aiters))
Esempio n. 6
0
    def initialize_pipeline(self, aiter, api: Any,
                            server_port: int) -> asyncio.Task:
        """
        A pipeline that starts with (StreamReader, StreamWriter), maps it though to
        connections, messages, executes a local API call, and returns responses.
        """

        # Maps a stream reader, writer and NodeType to a Connection object
        connections_aiter = map_aiter(
            partial_func.partial_async(self.stream_reader_writer_to_connection,
                                       server_port),
            aiter,
        )
        # Performs a handshake with the peer
        handshaked_connections_aiter = join_aiters(
            map_aiter(self.perform_handshake, connections_aiter))
        forker = aiter_forker(handshaked_connections_aiter)
        handshake_finished_1 = forker.fork(is_active=True)
        handshake_finished_2 = forker.fork(is_active=True)

        # Reads messages one at a time from the TCP connection
        messages_aiter = join_aiters(
            map_aiter(self.connection_to_message, handshake_finished_1, 100))

        # Handles each message one at a time, and yields responses to send back or broadcast
        responses_aiter = join_aiters(
            map_aiter(
                partial_func.partial_async_gen(self.handle_message, api),
                messages_aiter,
                100,
            ))

        # Uses a forked aiter, and calls the on_connect function to send some initial messages
        # as soon as the connection is established
        on_connect_outbound_aiter = join_aiters(
            map_aiter(self.connection_to_outbound, handshake_finished_2, 100))

        # Also uses the instance variable _outbound_aiter, which clients can use to send messages
        # at any time, not just on_connect.
        outbound_aiter_mapped = map_aiter(lambda x: (None, x),
                                          self._outbound_aiter)

        responses_aiter = join_aiters(
            iter_to_aiter([
                responses_aiter, on_connect_outbound_aiter,
                outbound_aiter_mapped
            ]))

        # For each outbound message, replicate for each peer that we need to send to
        expanded_messages_aiter = join_aiters(
            map_aiter(self.expand_outbound_messages, responses_aiter, 100))

        # This will run forever. Sends each message through the TCP connection, using the
        # length encoding and CBOR serialization
        async def serve_forever():
            async for connection, message in expanded_messages_aiter:
                log.info(
                    f"-> {message.function} to peer {connection.get_peername()}"
                )
                try:
                    await connection.send(message)
                except (
                        ConnectionResetError,
                        BrokenPipeError,
                        RuntimeError,
                        TimeoutError,
                ) as e:
                    log.error(
                        f"Cannot write to {connection}, already closed. Error {e}."
                    )
                    self.global_connections.close(connection, True)

        # We will return a task for this, so user of start_chia_server or start_chia_client can wait until
        # the server is closed.
        return asyncio.get_running_loop().create_task(serve_forever())
Esempio n. 7
0
async def initialize_pipeline(
    srwt_aiter,
    api: Any,
    server_port: int,
    outbound_aiter: push_aiter,
    global_connections: PeerConnections,
    local_type: NodeType,
    node_id: bytes32,
    network_id: bytes32,
    log: logging.Logger,
):
    """
    A pipeline that starts with (StreamReader, StreamWriter), maps it though to
    connections, messages, executes a local API call, and returns responses.
    """
    # Maps a stream reader, writer and NodeType to a Connection object
    connections_aiter = map_aiter(
        partial_func.partial_async(
            stream_reader_writer_to_connection, server_port, local_type, log,
        ),
        join_aiters(srwt_aiter),
    )

    def add_global_connections(connection):
        return connection, global_connections

    connections_with_global_connections_aiter = map_aiter(
        add_global_connections, connections_aiter
    )

    # Performs a handshake with the peer

    outbound_handshake = Message(
        "handshake",
        Handshake(
            network_id, protocol_version, node_id, uint16(server_port), local_type,
        ),
    )

    handshaked_connections_aiter = join_aiters(
        map_aiter(
            lambda _: perform_handshake(_, srwt_aiter, outbound_handshake),
            connections_with_global_connections_aiter,
        )
    )
    forker = aiter_forker(handshaked_connections_aiter)
    handshake_finished_1 = forker.fork(is_active=True)
    handshake_finished_2 = forker.fork(is_active=True)

    # Reads messages one at a time from the TCP connection
    messages_aiter = join_aiters(
        map_aiter(connection_to_message, handshake_finished_1, 100)
    )

    # Handles each message one at a time, and yields responses to send back or broadcast
    responses_aiter = join_aiters(
        map_aiter(
            partial_func.partial_async_gen(handle_message, api), messages_aiter, 100,
        )
    )

    # Uses a forked aiter, and calls the on_connect function to send some initial messages
    # as soon as the connection is established
    on_connect_outbound_aiter = join_aiters(
        map_aiter(connection_to_outbound, handshake_finished_2, 100)
    )

    # Also uses the instance variable _outbound_aiter, which clients can use to send messages
    # at any time, not just on_connect.
    outbound_aiter_mapped = map_aiter(
        lambda x: (None, x, global_connections), outbound_aiter
    )

    responses_aiter = join_aiters(
        iter_to_aiter(
            [responses_aiter, on_connect_outbound_aiter, outbound_aiter_mapped]
        )
    )

    # For each outbound message, replicate for each peer that we need to send to
    expanded_messages_aiter = join_aiters(
        map_aiter(expand_outbound_messages, responses_aiter, 100)
    )

    # This will run forever. Sends each message through the TCP connection, using the
    # length encoding and CBOR serialization
    async for connection, message in expanded_messages_aiter:
        if message is None:
            # Does not ban the peer, this is just a graceful close of connection.
            global_connections.close(connection, True)
            continue
        if connection.is_closing():
            connection.log.info(
                f"Closing, so will not send {message.function} to peer {connection.get_peername()}"
            )
            continue
        connection.log.info(
            f"-> {message.function} to peer {connection.get_peername()}"
        )
        try:
            await connection.send(message)
        except (RuntimeError, TimeoutError, OSError,) as e:
            connection.log.warning(
                f"Cannot write to {connection}, already closed. Error {e}."
            )
            global_connections.close(connection, True)