Exemple #1
0
async def test_gui_launches(nursery: trio.Nursery, tmp_path: pathlib.Path,
                            launch_command: typing.List[str]) -> None:
    # TODO: this should be written to use logging features to see where the GUI has
    # traversed to.

    debug_path = tmp_path.joinpath("debug_file")
    debug_bytes = b"lkjflkjnlknrlfaljfdsaoivjxcewa\n981439874298785379876298349887\n"

    async def run() -> None:
        # Remember that many exceptions will be caught sufficiently to present in
        # a dialog which will keep the process running indefinitely.

        await trio.run_process(
            [*launch_command, "gui"],
            env={
                **os.environ,
                "SSST_DEBUG_FILE": os.fspath(debug_path),
                "SSST_DEBUG_BYTES": debug_bytes.decode("ASCII"),
            },
        )

    nursery.start_soon(run)

    with trio.fail_after(seconds=40):
        while True:
            await trio.sleep(0.2)

            if debug_path.exists():
                break

    assert debug_path.read_bytes() == debug_bytes
Exemple #2
0
async def noise_conn_factory(
        nursery: trio.Nursery
) -> AsyncIterator[Tuple[ISecureConn, ISecureConn]]:
    local_transport = cast(
        NoiseTransport, noise_transport_factory(create_secp256k1_key_pair()))
    remote_transport = cast(
        NoiseTransport, noise_transport_factory(create_secp256k1_key_pair()))

    local_secure_conn: ISecureConn = None
    remote_secure_conn: ISecureConn = None

    async def upgrade_local_conn() -> None:
        nonlocal local_secure_conn
        local_secure_conn = await local_transport.secure_outbound(
            local_conn, remote_transport.local_peer)

    async def upgrade_remote_conn() -> None:
        nonlocal remote_secure_conn
        remote_secure_conn = await remote_transport.secure_inbound(remote_conn)

    async with raw_conn_factory(nursery) as conns:
        local_conn, remote_conn = conns
        async with trio.open_nursery() as nursery:
            nursery.start_soon(upgrade_local_conn)
            nursery.start_soon(upgrade_remote_conn)
        if local_secure_conn is None or remote_secure_conn is None:
            raise Exception(
                "local or remote secure conn has not been successfully upgraded"
                f"local_secure_conn={local_secure_conn}, remote_secure_conn={remote_secure_conn}"
            )
        yield local_secure_conn, remote_secure_conn
Exemple #3
0
    async def receive_load_blocks(
            self, blocks: List[BlockAccess],
            nursery: trio.Nursery) -> "MemoryReceiveChannel[BlockAccess]":
        """
           Raises:
               FSError
               FSRemoteBlockNotFound
               FSBackendOfflineError
               FSWorkspaceInMaintenance
           """
        blocks_iter = iter(blocks)

        send_channel, receive_channel = open_memory_channel[BlockAccess](
            math.inf)

        async def _loader(
                send_channel: "MemorySendChannel[BlockAccess]") -> None:
            async with send_channel:
                while True:
                    access = next(blocks_iter, None)
                    if not access:
                        break
                    await self.load_block(access)
                    await send_channel.send(access)

        async with send_channel:
            for _ in range(4):
                nursery.start_soon(_loader, send_channel.clone())

        return receive_channel
Exemple #4
0
def run(nursery: trio.Nursery, async_fn: Callable[..., Awaitable[Any]], *args) -> Future:
    """Run an async function and capture its result in a Future. This is the main entrypoint for trio-future.

    Note that this is a synchronous function; it will immediately a :class:`Future` object. This object
    can be used to access the return value of ``async_fn``. However, the function will *not* have begun execution.
    Under the hood, we will pass the function to ``nursery.start_soon``; its execution will begin when we
    next defer to the scheduler.

    :param nursery: Nursery in which to run the function
    :type nursery: trio.Nursery
    :param async_fn: A trio-flavored async function to run. Positional arguments may be passed as trailing args, keyword arguments must use ``functools.partial`` or similar.
    :type async_fn: Callable[..., Awaitable[Any]]
    :return: A Future object allowing access to the return value of ``async_fn``.
    :rtype: Future
    """
    # Set buffer size to 1 so that producer can send a single result
    # witout blocking on a receiver.
    send_chan, recv_chan = trio.open_memory_channel(1)

    async def producer(*args):
        return_val = await outcome.acapture(async_fn, *args)
        # Shield sending the result from parent cancellation. This allows the Future to store
        # the outcome of the operation, namely that it was cancelled.
        # Note that the channel is buffered and only sent from here, so it should not block.
        with trio.CancelScope(shield=True):
            async with send_chan:
                await send_chan.send(return_val)

    nursery.start_soon(producer, *args)

    return Future(recv_chan, nursery)
Exemple #5
0
    async def _bg_key_exchange(
        self,
        nursery: trio.Nursery,
        whitelist: Optional[PublicKey] = None
            ) -> None:

        # send key right away and await remote key
        nursery.start_soon(
            partial(
                self.send_raw,
                bytes(self.key.public_key),
                encrypted=False
                )
            )

        async with self.inbound.subscribe(
            lambda *args: isinstance(args[0], bytes) and (len(args[0]) == 32),
            history=True
                ) as pkqueue:
            data = await pkqueue.receive()

        rkey = PublicKey(data)

        # drop context if not in whitelist
        if (whitelist is not None) and \
                (rkey not in whitelist):
            await self.inbound.send(UDPContext.F_DROPPED)
            return

        self.remote_pkey = rkey
        self.boxes[self.addr] = Box(self.key, self.remote_pkey)

        await self.inbound.send(UDPContext.F_KEYEX)

        return None
Exemple #6
0
def jsonrpc_client(
    transport: BaseTransport,
    nursery: trio.Nursery,
) -> JsonRpcConnection:
    """ Create a JSON-RPC peer instance using the specified transport. """
    peer = JsonRpcConnection(transport, JsonRpcConnectionType.CLIENT)
    nursery.start_soon(peer._background_task)
    return peer
Exemple #7
0
 async def worker(self, nursery: trio.Nursery) -> None:
     while True:
         url = await self._queue.get()
         request_delay = await self._get_request_delay(url)
         if request_delay == -1:  # url is not accessible
             self.robots_excluded_urls.add(url)
             self._queue.task_done()
             continue
         nursery.start_soon(self._handle_url, url)
         await trio.sleep(request_delay)
Exemple #8
0
def jsonrpc_server(
    transport: BaseTransport,
    nursery: trio.Nursery,
    request_buffer_len: int = 1,
) -> JsonRpcConnection:
    """ Create a JSON-RPC peer instance using the specified transport. """
    request_buffer_send, request_buffer_recv = trio.open_memory_channel(
        request_buffer_len)

    peer = JsonRpcConnection(transport, JsonRpcConnectionType.SERVER)
    nursery.start_soon(peer._background_task)
    return peer
Exemple #9
0
def gather(nursery: trio.Nursery, futures: List[Future]) -> Future:
    """Concurrently run multiple Futures.

    This function will allow the provided Futures to run concurrently, and gather their results
    into a single :class:`Future` containing the results of all the inputs.
    That is, if each input Future contained an ``int``, then the returned Future will contain a ``List[int]``. In practice,
    the types need not be homogenous. The list enclosed in the returned Future will have the same ordering as the input list.

    If any Futures throw an exception, then the output Future will contain those exceptions, wrapped in a ``trio.MultiError``.

    :param nursery: Nursery that manages the concurrent execution of the provided futures
    :type nursery: trio.Nursery
    :param futures: Futures to run
    :type futures: List[Future]
    :return: A Future containing the results of all the provided futures.
    :rtype: Future
    """
    result_list = [None] * len(futures)
    parent_send_chan, parent_recv_chan = trio.open_memory_channel(0)
    child_send_chan, child_recv_chan = trio.open_memory_channel(0)

    async def producer():
        async with child_send_chan:
            for i in range(len(futures)):
                nursery.start_soon(child_producer, i, child_send_chan.clone())

    async def child_producer(i: int, out_chan):
        async with futures[i].result_chan:
            return_val = await futures[i].result_chan.receive()
            result_list[i] = return_val
            async with out_chan:
                await out_chan.send(i)

    async def receiver():
        async with child_recv_chan:
            async for i in child_recv_chan:
                # Just consume all results from the channel until exhausted
                pass
        # And then wrap up the result and push it to the parent channel
        errors = [e.error for e in result_list if isinstance(e, outcome.Error)]
        if len(errors) > 0:
            result = outcome.Error(trio.MultiError(errors))
        else:
            result = outcome.Value([o.unwrap() for o in result_list])
        async with parent_send_chan:
            await parent_send_chan.send(result)

    # Start parent producer, which will in turn start all children
    # (doing this inside the nursery because it needs to act async)
    nursery.start_soon(producer)
    nursery.start_soon(receiver)
    return Future(parent_recv_chan, nursery)
Exemple #10
0
    def start_bgkeyex(
        self,
        nursery: trio.Nursery,
        whitelist: Optional[PublicKey] = None
            ) -> None:

        nursery.start_soon(
            partial(
                self._bg_key_exchange,
                nursery,
                whitelist=whitelist
                )
            )

        return None
Exemple #11
0
    async def recv_message(self, nursery: trio.Nursery):
        self._log.debug("Listening for new message")
        num_bytes = await self._sock.recv_into(self.__recv_view)
        data_view = self.__recv_view[:num_bytes]

        self._log.debug(f"Received {num_bytes} bytes")

        wrapper_packet = SSL_WrapperPacket()
        wrapper_packet.ParseFromString(data_view)

        self._log.debug("Wrapper packet", wrapper_packet=wrapper_packet)
        if wrapper_packet.HasField(
                "detection") and self.detection_messages is not None:
            self._log.debug("Queueing detection message",
                            detection=wrapper_packet.detection)
            nursery.start_soon(self.detection_messages.send,
                               wrapper_packet.detection)

        if wrapper_packet.HasField(
                "geometry") and self.geometry_messages is not None:
            self._log.debug("Queueing geometry message",
                            detection=wrapper_packet.geometry)
            nursery.start_soon(self.geometry_messages.send,
                               wrapper_packet.geometry)
Exemple #12
0
async def _shared_logic(
    websocket: trio_websocket.WebSocketConnection,
    data: _ShardData,
    nursery: trio.Nursery,
    hello: typing.Dict[str, typing.Any],
    after_start: typing.Callable[[], typing.Awaitable[None]],
) -> bool:
    # the return value is whether or not to resume next time.

    async for message in _stream(websocket):
        if message['op'] == 0:
            if message['t'] == 'READY':
                data.session_id = message['d']['session_id']

            seq = message['s']

            if data.seq and seq < data.seq:
                raise _NonMonotonicHeartbeat()

            data.seq = seq

            # https://discord.com/channels/613425648685547541/697489244649816084/870221091849793587
            if message['t'] == 'GUILD_APPLICATION_COMMAND_COUNTS_UPDATE':
                continue
            # TODO: ???
            elif message['t'] == 'APPLICATION_COMMAND_PERMISSIONS_UPDATE':
                continue
            # https://github.com/discord/discord-api-docs/pull/3871
            elif message['t'] == 'GUILD_JOIN_REQUEST_DELETE':
                continue

            try:
                model: object = data.converter.structure(
                    message['d'], tags_to_model[message['t']])
                reverse: typing.Dict[str, object] = data.converter.unstructure(
                    model)

                if not _skip_differences(message['t']):
                    differences = _diff_differences(
                        reverse, message['d']) - _allowed_differences(
                            message['t'])

                    differences = differences - {
                        # https://github.com/discord/discord-api-docs/issues/1789
                        'guild_hashes',
                        # TODO: what's this attribute?
                        'hashes',
                    }

                    if differences:
                        raise _MissingKey(message['t'], message['d'],
                                          differences)
            except Exception as e:
                _LOGGER.exception('improper payload', exc_info=e)

            await data.substrate.broadcast(model)

        elif message['op'] == 1:
            await websocket.send_message(json.dumps({'op': 1, 'd': data.seq}))

        elif message['op'] == 7:
            return True

        elif message['op'] == 9:
            return bool(message['d'])

        elif message['op'] == 10:
            nursery.start_soon(_heartbeat, websocket,
                               message['d']['heartbeat_interval'] / 1000, data)
            await websocket.send_message(json.dumps(hello))
            nursery.start_soon(after_start)

        elif message['op'] == 11:
            data.have_acked = True

        else:
            _LOGGER.warning('UNIMPLEMENTED %r', message['op'])
            _never(message)

    # early return? the backoff will handle identifying eventually, so a RESUME is fine
    return True
Exemple #13
0
async def parse_package(
    nursery: trio.Nursery,
    pack: Package,
    obj_override: dict[Type[PakObject], dict[str, list[ParseData]]],
    loader: LoadScreen,
    has_tag: bool = False,
    has_mel: bool = False,
) -> None:
    """Parse through the given package to find all the components."""
    from packages import template_brush  # Avoid circular imports
    for pre in pack.info.find_children('Prerequisites'):
        # Special case - disable these packages when the music isn't copied.
        if pre.value == '<TAG_MUSIC>':
            if not has_tag:
                return
        elif pre.value == '<MEL_MUSIC>':
            if not has_mel:
                return
        elif pre.value.casefold() not in packages:
            LOGGER.warning(
                'Package "{pre}" required for "{id}" - '
                'ignoring package!',
                pre=pre.value,
                id=pack.id,
            )
            return

    desc: list[str] = []

    for obj in pack.info:
        await trio.sleep(0)
        if obj.name in ['prerequisites', 'id', 'name']:
            # Not object IDs.
            continue
        if obj.name in ['desc', 'description']:
            desc.extend(obj.as_array())
            continue
        if not obj.has_children():
            LOGGER.warning(
                'Unknown package option "{}" with value "{}"!',
                obj.real_name,
                obj.value,
            )
            continue
        if obj.name in ('templatebrush', 'brushtemplate'):
            LOGGER.warning(
                'TemplateBrush {} no longer needs to be defined in info.txt',
                obj['id', '<NO ID>'],
            )
            continue
        if obj.name == 'overrides':
            for over_prop in obj:
                if over_prop.name in ('templatebrush', 'brushtemplate'):
                    LOGGER.warning(
                        'TemplateBrush {} no longer needs to be defined in info.txt',
                        over_prop['id', '<NO ID>'],
                    )
                    continue
                try:
                    obj_type = OBJ_TYPES[over_prop.name]
                except KeyError:
                    LOGGER.warning('Unknown object type "{}" with ID "{}"!',
                                   over_prop.real_name, over_prop['id',
                                                                  '<NO ID>'])
                    continue
                try:
                    obj_id = over_prop['id']
                except LookupError:
                    raise ValueError('No ID for "{}" object type!'.format(
                        obj_type)) from None
                obj_override[obj_type][obj_id].append(
                    ParseData(pack.fsys, obj_id, over_prop, pack.id, True))
        else:
            try:
                obj_type = OBJ_TYPES[obj.name]
            except KeyError:
                LOGGER.warning('Unknown object type "{}" with ID "{}"!',
                               obj.real_name, obj['id', '<NO ID>'])
                continue
            try:
                obj_id = obj['id']
            except LookupError:
                raise ValueError(
                    'No ID for "{}" object type in "{}" package!'.format(
                        obj_type, pack.id)) from None
            if obj_id in all_obj[obj_type]:
                if obj_type.allow_mult:
                    # Pretend this is an override
                    obj_override[obj_type][obj_id].append(
                        ParseData(pack.fsys, obj_id, obj, pack.id, True))
                    # Don't continue to parse and overwrite
                    continue
                else:
                    raise Exception('ERROR! "' + obj_id + '" defined twice!')
            all_obj[obj_type][obj_id] = ObjData(
                pack.fsys,
                obj,
                pack.id,
                pack.disp_name,
            )

    pack.desc = '\n'.join(desc)

    for template in pack.fsys.walk_folder('templates'):
        await trio.sleep(0)
        if template.path.casefold().endswith('.vmf'):
            nursery.start_soon(template_brush.parse_template, pack.id,
                               template)
    loader.step('PAK')
Exemple #14
0
async def find_packages(nursery: trio.Nursery, pak_dir: Path) -> None:
    """Search a folder for packages, recursing if necessary."""
    found_pak = False
    try:
        contents = list(pak_dir.iterdir())
    except FileNotFoundError:
        LOGGER.warning('Package search location "{}" does not exist!', pak_dir)
        return

    for name in contents:  # Both files and dirs
        folded = name.stem.casefold()
        if folded.endswith('.vpk') and not folded.endswith('_dir.vpk'):
            # _000.vpk files, useless without the directory
            continue

        if name.is_dir():
            filesys = RawFileSystem(name)
        else:
            ext = name.suffix.casefold()
            if ext in ('.bee_pack', '.zip'):
                filesys = await trio.to_thread.run_sync(ZipFileSystem,
                                                        name,
                                                        cancellable=True)
            elif ext == '.vpk':
                filesys = await trio.to_thread.run_sync(VPKFileSystem,
                                                        name,
                                                        cancellable=True)
            else:
                LOGGER.info('Extra file: {}', name)
                continue

        LOGGER.debug('Reading package "{}"', name)

        # Valid packages must have an info.txt file!
        try:
            info = await trio.to_thread.run_sync(filesys.read_prop,
                                                 'info.txt',
                                                 cancellable=True)
        except FileNotFoundError:
            if name.is_dir():
                # This isn't a package, so check the subfolders too...
                LOGGER.debug('Checking subdir "{}" for packages...', name)
                nursery.start_soon(find_packages, nursery, name)
            else:
                LOGGER.warning('ERROR: package "{}" has no info.txt!', name)
            # Don't continue to parse this "package"
            continue
        pak_id = info['ID']

        if pak_id.casefold() in packages:
            raise ValueError(
                f'Duplicate package with id "{pak_id}"!\n'
                'If you just updated the mod, delete any old files in packages/.'
            ) from None

        PACKAGE_SYS[pak_id.casefold()] = filesys

        packages[pak_id.casefold()] = Package(
            pak_id,
            filesys,
            info,
            name,
        )
        found_pak = True

    if not found_pak:
        LOGGER.info('No packages in folder {}!', pak_dir)