def test_make_delayed_pipeline(self):
        def make_wait_index(idx):
            async def wait(item):
                await asyncio.sleep(item[idx] / 10.)
                return item

            return wait

        TEST_CASE = [
            (0, 0, 0, 7),
            (5, 0, 0, 0),
            (0, 0, 1, 0),
            (1, 1, 1, 1),
            (2, 0, 0, 1),
            (3, 1, 2, 0),
        ]

        q = push_aiter()
        aiter = map_aiter(
            make_wait_index(0),
            map_aiter(
                make_wait_index(1),
                map_aiter(make_wait_index(2),
                          map_aiter(make_wait_index(3), q, 10), 10), 10), 10)
        q.push(*TEST_CASE)
        q.stop()
        r = run(get_n(aiter))
        r1 = sorted(r, key=lambda x: sum(x))
        self.assertEqual(r, r1)
示例#2
0
async def api_server(rws_aiter, api, workers=1):
    """
    An rws_aiter is an aiter which streams a (StreamReader, StreamWriter, SocketServer) tuples.
    For a given rws_aiter, create a task which fetches messages from the StreamReader, parses them,
    and turns them into api calls on api.

    You can wait forever on this task. If you close the socket, once all connected clients drop off, the
    task will complete.
    """
    event_aiter = rws_to_event_aiter(rws_aiter, reader_to_cbor_stream)

    response_writer_for_event = make_response_map_for_api(api)

    if workers > 1:
        response_writer_aiter = parallel_map_aiter(response_writer_for_event, workers, event_aiter)
    else:
        response_writer_aiter = map_aiter(response_writer_for_event, event_aiter)

    def to_cbor(response_writer_pair):
        response, writer = response_writer_pair
        try:
            msg = xform_to_cbor_message(response)
        except Exception as ex:
            msg = xform_to_cbor_message("problem streaming message: %s" % ex)
        return writer, msg

    cbor_msg_aiter = map_aiter(to_cbor, response_writer_aiter)

    async for writer, cbor_msg in cbor_msg_aiter:
        writer.write(cbor_msg)
async def main():
    server, aiter = await start_server_aiter(7777)

    line_writer_aiter_aiter = map_aiter(
        functools.partial(stream_reader_writer_to_line_writer_aiter, server),
        aiter)
    line_writer_aiter = join_aiters(line_writer_aiter_aiter)
    completed_event_aiter = map_aiter(handle_event,
                                      line_writer_aiter,
                                      worker_count=5)

    async for line in completed_event_aiter:
        print(line)
示例#4
0
async def start_server(
        self: "ChiaServer",
        on_connect: OnConnectFunc = None) -> asyncio.AbstractServer:
    """
    Launches a listening server on host and port specified, to connect to NodeType nodes. On each
    connection, the on_connect asynchronous generator will be called, and responses will be sent.
    Whenever a new TCP connection is made, a new srwt tuple is sent through the pipeline.
    """
    require_cert = self._local_type not in (NodeType.FULL_NODE,
                                            NodeType.INTRODUCER)
    ssl_context = ssl_context_for_server(self.root_path,
                                         self.config,
                                         require_cert=require_cert)

    server, aiter = await start_server_aiter(self._port,
                                             host=None,
                                             reuse_address=True,
                                             ssl=ssl_context)

    def add_connection_type(
        srw: Tuple[asyncio.StreamReader, asyncio.StreamWriter]
    ) -> Tuple[asyncio.StreamReader, asyncio.StreamWriter, OnConnectFunc]:
        ssl_object = srw[1].get_extra_info(name="ssl_object")
        peer_cert = ssl_object.getpeercert()
        self.log.info(f"Client authed as {peer_cert}")
        return (srw[0], srw[1], on_connect)

    srwt_aiter = map_aiter(add_connection_type, aiter)

    # Push aiters that come from the server into the pipeline
    if not self._srwt_aiter.is_stopped():
        self._srwt_aiter.push(srwt_aiter)

    self.log.info(f"Server started on port {self._port}")
    return server
示例#5
0
    async def start_server(
        self,
        host: str,
        on_connect: OnConnectFunc = None,
    ) -> bool:
        """
        Launches a listening server on host and port specified, to connect to NodeType nodes. On each
        connection, the on_connect asynchronous generator will be called, and responses will be sent.
        Whenever a new TCP connection is made, a new srwt tuple is sent through the pipeline.
        """
        if self._server is not None or self._pipeline_task.done():
            return False
        self._host = host

        self._server, aiter = await start_server_aiter(self._port,
                                                       host=None,
                                                       reuse_address=True)
        if on_connect is not None:
            self._on_inbound_connect = on_connect

        def add_connection_type(
            srw: Tuple[asyncio.StreamReader, asyncio.StreamWriter]
        ) -> Tuple[asyncio.StreamReader, asyncio.StreamWriter, None]:
            return (srw[0], srw[1], None)

        srwt_aiter = map_aiter(add_connection_type, aiter)

        # Push all aiters that come from the server, into the pipeline
        asyncio.create_task(self._add_to_srwt_aiter(srwt_aiter))

        log.info(f"Server started on port {self._port}")
        return True
示例#6
0
    def test_filter_pipeline(self):
        async def filter(item_list_of_lists):
            r = []
            for l1 in item_list_of_lists:
                for item in l1:
                    if item != 0:
                        r.append(item)
            return r

        TEST_CASE = [
            (0, 0, 0, 7),
            (5, 0, 0, 0),
            (0, 0, 1, 0),
            (1, 1, 1, 1),
            (2, 0, 0, 1),
            (3, 1, 2, 0),
        ]

        q = push_aiter()
        aiter = flatten_aiter(map_aiter(filter, q))
        q.push(TEST_CASE)
        q.stop()
        r = run(get_n(aiter, 12))
        r1 = [7, 5, 1, 1, 1, 1, 1, 2, 1, 3, 1, 2]
        self.assertEqual(r, r1)
示例#7
0
def make_client_server():
    init_logging()
    run = asyncio.get_event_loop().run_until_complete
    path = pathlib.Path(tempfile.mkdtemp(), "port")
    server, aiter = run(start_unix_server_aiter(path))
    rws_aiter = map_aiter(lambda rw: dict(
        reader=rw[0], writer=rw[1], server=server), aiter)
    initial_block_hash = bytes(([0] * 31) + [1])
    ledger = ledger_api.LedgerAPI(initial_block_hash, RAM_DB())
    server_task = asyncio.ensure_future(api_server(rws_aiter, ledger))
    remote = run(proxy_for_unix_connection(path))
    # make sure server_task isn't garbage collected
    remote.server_task = server_task
    return remote
async def main():
    server, aiter = await start_server_aiter(7777)
    line_writer_aiter_aiter = map_aiter(
        stream_reader_writer_to_line_writer_aiter, aiter)
    line_writer_aiter = join_aiters(line_writer_aiter_aiter)

    async for line, sw in line_writer_aiter:
        print(line)
        await sw.drain()
        if line == b"\n":
            sw.close()
        sw.write(line)
        if line == b"quit\n":
            server.close()
示例#9
0
def rws_to_event_aiter(rws_aiter, reader_to_message_stream):

    def rws_to_reader_event_template_adaptor(rws):
        return rws, rws["reader"]

    def reader_event_template_to_event_stream_adaptor(rws_reader):
        rws, reader = rws_reader
        return message_stream_to_event_stream(rws, reader_to_message_stream(reader))

    def adaptor(rws):
        return reader_event_template_to_event_stream_adaptor(
            rws_to_reader_event_template_adaptor(rws))

    return join_aiters(map_aiter(adaptor, rws_aiter))
示例#10
0
def message_stream_to_event_stream(event_template, message_stream):
    """
    This tweaks each message from message_stream by wrapping it with a dictionary
    populated with the given template, putting the message is at the top
    level under "message".
    """

    template = dict(event_template)

    def adaptor(message):
        event = dict(template)
        event.update(message=message)
        return event

    return map_aiter(adaptor, message_stream)
示例#11
0
    def test_make_pipe(self):
        async def map_f(x):
            await asyncio.sleep(x / 100.0)
            return x * x

        q = push_aiter()
        aiter = map_aiter(map_f, q)
        for _ in range(4):
            q.push(_)
        for _ in range(3, 9):
            q.push(_)
        r = run(get_n(aiter, 10))
        q.stop()
        r.extend(run(get_n(aiter)))
        r1 = sorted([_*_ for _ in range(4)] + [_ * _ for _ in range(3, 9)])
        self.assertEqual(r, r1)
示例#12
0
    async def start_server(self, on_connect: OnConnectFunc = None) -> bool:
        """
        Launches a listening server on host and port specified, to connect to NodeType nodes. On each
        connection, the on_connect asynchronous generator will be called, and responses will be sent.
        Whenever a new TCP connection is made, a new srwt tuple is sent through the pipeline.
        """
        if self._server is not None or self._pipeline_task.done():
            return False

        ssl_context = ssl._create_unverified_context(
            purpose=ssl.Purpose.CLIENT_AUTH)
        private_cert, private_key = self.loadSSLConfig("ssl", self.root_path,
                                                       self.config)
        ssl_context.load_cert_chain(certfile=private_cert, keyfile=private_key)
        ssl_context.load_verify_locations(private_cert)

        if (self._local_type == NodeType.FULL_NODE
                or self._local_type == NodeType.INTRODUCER):
            ssl_context.verify_mode = ssl.CERT_NONE
        else:
            ssl_context.verify_mode = ssl.CERT_REQUIRED

        self._server, aiter = await start_server_aiter(self._port,
                                                       host=None,
                                                       reuse_address=True,
                                                       ssl=ssl_context)

        if on_connect is not None:
            self._on_inbound_connect = on_connect

        def add_connection_type(
            srw: Tuple[asyncio.StreamReader, asyncio.StreamWriter]
        ) -> Tuple[asyncio.StreamReader, asyncio.StreamWriter, None]:
            ssl_object = srw[1].get_extra_info(name="ssl_object")
            peer_cert = ssl_object.getpeercert()
            self.log.info(f"Client authed as {peer_cert}")
            return (srw[0], srw[1], None)

        srwt_aiter = map_aiter(add_connection_type, aiter)

        # Push all aiters that come from the server, into the pipeline
        self._tasks.append(
            asyncio.create_task(self._add_to_srwt_aiter(srwt_aiter)))

        self.log.info(f"Server started on port {self._port}")
        return True
示例#13
0
def test_client_server():
    init_logging()

    run = asyncio.get_event_loop().run_until_complete

    path = pathlib.Path(tempfile.mkdtemp(), "port")

    server, aiter = run(start_unix_server_aiter(path))

    rws_aiter = map_aiter(
        lambda rw: dict(reader=rw[0], writer=rw[1], server=server), aiter)

    initial_block_hash = bytes(([0] * 31) + [1])
    ledger = ledger_api.LedgerAPI(initial_block_hash, RAM_DB())
    server_task = asyncio.ensure_future(api_server(rws_aiter, ledger))

    run(client_test(path))
    server_task.cancel()
示例#14
0
    def test_syncmap(self):

        def make_sync_transformation_f(results):
            def sync_transformation_f(item):
                results.append(item)
                return item + 1
            return sync_transformation_f

        results = []
        q = push_aiter()
        q.push(5, 4, 3)
        q.stop()
        r = list(q.available_iter())
        self.assertEqual(r, [5, 4, 3])
        aiter = map_aiter(make_sync_transformation_f(results), q)
        r = run(get_n(aiter))
        self.assertEqual(r, [6, 5, 4])
        self.assertEqual(results, [5, 4, 3])
示例#15
0
    def initialize_pipeline(self, aiter, api: Any,
                            server_port: int) -> asyncio.Task:
        """
        A pipeline that starts with (StreamReader, StreamWriter), maps it though to
        connections, messages, executes a local API call, and returns responses.
        """

        # Maps a stream reader, writer and NodeType to a Connection object
        connections_aiter = map_aiter(
            partial_func.partial_async(self.stream_reader_writer_to_connection,
                                       server_port),
            aiter,
        )
        # Performs a handshake with the peer
        handshaked_connections_aiter = join_aiters(
            map_aiter(self.perform_handshake, connections_aiter))
        forker = aiter_forker(handshaked_connections_aiter)
        handshake_finished_1 = forker.fork(is_active=True)
        handshake_finished_2 = forker.fork(is_active=True)

        # Reads messages one at a time from the TCP connection
        messages_aiter = join_aiters(
            map_aiter(self.connection_to_message, handshake_finished_1, 100))

        # Handles each message one at a time, and yields responses to send back or broadcast
        responses_aiter = join_aiters(
            map_aiter(
                partial_func.partial_async_gen(self.handle_message, api),
                messages_aiter,
                100,
            ))

        # Uses a forked aiter, and calls the on_connect function to send some initial messages
        # as soon as the connection is established
        on_connect_outbound_aiter = join_aiters(
            map_aiter(self.connection_to_outbound, handshake_finished_2, 100))

        # Also uses the instance variable _outbound_aiter, which clients can use to send messages
        # at any time, not just on_connect.
        outbound_aiter_mapped = map_aiter(lambda x: (None, x),
                                          self._outbound_aiter)

        responses_aiter = join_aiters(
            iter_to_aiter([
                responses_aiter, on_connect_outbound_aiter,
                outbound_aiter_mapped
            ]))

        # For each outbound message, replicate for each peer that we need to send to
        expanded_messages_aiter = join_aiters(
            map_aiter(self.expand_outbound_messages, responses_aiter, 100))

        # This will run forever. Sends each message through the TCP connection, using the
        # length encoding and CBOR serialization
        async def serve_forever():
            async for connection, message in expanded_messages_aiter:
                log.info(
                    f"-> {message.function} to peer {connection.get_peername()}"
                )
                try:
                    await connection.send(message)
                except (
                        ConnectionResetError,
                        BrokenPipeError,
                        RuntimeError,
                        TimeoutError,
                ) as e:
                    log.error(
                        f"Cannot write to {connection}, already closed. Error {e}."
                    )
                    self.global_connections.close(connection, True)

        # We will return a task for this, so user of start_chia_server or start_chia_client can wait until
        # the server is closed.
        return asyncio.get_running_loop().create_task(serve_forever())
示例#16
0
def run_ledger_api(server, aiter):
    db = RAM_DB()
    INITIAL_BLOCK_HASH = bytes(([0] * 31) + [1])
    ledger = ledger_api.LedgerAPI(INITIAL_BLOCK_HASH, db)
    rws_aiter = map_aiter(lambda rw: dict(reader=rw[0], writer=rw[1], server=server), aiter)
    return api_server(rws_aiter, ledger)
示例#17
0
def serve_api_on_unix_port(api, path):
    run = asyncio.get_event_loop().run_until_complete
    server, aiter = run(start_unix_server_aiter(path))
    rws_aiter = map_aiter(lambda rw: dict(reader=rw[0], writer=rw[1], server=server), aiter)
    return asyncio.ensure_future(api_server(rws_aiter, api, 20))
示例#18
0
async def initialize_pipeline(
    srwt_aiter,
    api: Any,
    server_port: int,
    outbound_aiter: push_aiter,
    global_connections: PeerConnections,
    local_type: NodeType,
    node_id: bytes32,
    network_id: bytes32,
    log: logging.Logger,
):
    """
    A pipeline that starts with (StreamReader, StreamWriter), maps it though to
    connections, messages, executes a local API call, and returns responses.
    """
    # Maps a stream reader, writer and NodeType to a Connection object
    connections_aiter = map_aiter(
        partial_func.partial_async(
            stream_reader_writer_to_connection, server_port, local_type, log,
        ),
        join_aiters(srwt_aiter),
    )

    def add_global_connections(connection):
        return connection, global_connections

    connections_with_global_connections_aiter = map_aiter(
        add_global_connections, connections_aiter
    )

    # Performs a handshake with the peer

    outbound_handshake = Message(
        "handshake",
        Handshake(
            network_id, protocol_version, node_id, uint16(server_port), local_type,
        ),
    )

    handshaked_connections_aiter = join_aiters(
        map_aiter(
            lambda _: perform_handshake(_, srwt_aiter, outbound_handshake),
            connections_with_global_connections_aiter,
        )
    )
    forker = aiter_forker(handshaked_connections_aiter)
    handshake_finished_1 = forker.fork(is_active=True)
    handshake_finished_2 = forker.fork(is_active=True)

    # Reads messages one at a time from the TCP connection
    messages_aiter = join_aiters(
        map_aiter(connection_to_message, handshake_finished_1, 100)
    )

    # Handles each message one at a time, and yields responses to send back or broadcast
    responses_aiter = join_aiters(
        map_aiter(
            partial_func.partial_async_gen(handle_message, api), messages_aiter, 100,
        )
    )

    # Uses a forked aiter, and calls the on_connect function to send some initial messages
    # as soon as the connection is established
    on_connect_outbound_aiter = join_aiters(
        map_aiter(connection_to_outbound, handshake_finished_2, 100)
    )

    # Also uses the instance variable _outbound_aiter, which clients can use to send messages
    # at any time, not just on_connect.
    outbound_aiter_mapped = map_aiter(
        lambda x: (None, x, global_connections), outbound_aiter
    )

    responses_aiter = join_aiters(
        iter_to_aiter(
            [responses_aiter, on_connect_outbound_aiter, outbound_aiter_mapped]
        )
    )

    # For each outbound message, replicate for each peer that we need to send to
    expanded_messages_aiter = join_aiters(
        map_aiter(expand_outbound_messages, responses_aiter, 100)
    )

    # This will run forever. Sends each message through the TCP connection, using the
    # length encoding and CBOR serialization
    async for connection, message in expanded_messages_aiter:
        if message is None:
            # Does not ban the peer, this is just a graceful close of connection.
            global_connections.close(connection, True)
            continue
        if connection.is_closing():
            connection.log.info(
                f"Closing, so will not send {message.function} to peer {connection.get_peername()}"
            )
            continue
        connection.log.info(
            f"-> {message.function} to peer {connection.get_peername()}"
        )
        try:
            await connection.send(message)
        except (RuntimeError, TimeoutError, OSError,) as e:
            connection.log.warning(
                f"Cannot write to {connection}, already closed. Error {e}."
            )
            global_connections.close(connection, True)