Example #1
1
 def __init__(self, *args: typing.Any, **kw: typing.Any):
     self._static_mappings = kw.pop('static_mappings', None)
     self._src_dir = kw.pop('src_dir', None)
     base = kw.pop('base_href', None)
     assert not base or (base[0:4] == 'http' and base[-1] == '/'), base
     self._base_href = base
     super().__init__(*args, **kw)
Example #2
0
def _process(typeinfo: typecheck.Type, val: typing.Any, make: bool) -> typing.Any:
    if val is None:
        return None
    elif make and hasattr(typeinfo, "from_state"):
        return typeinfo.from_state(val)
    elif not make and hasattr(val, "get_state"):
        return val.get_state()

    typename = str(typeinfo)

    if typename.startswith("typing.List"):
        T = typecheck.sequence_type(typeinfo)
        return [_process(T, x, make) for x in val]
    elif typename.startswith("typing.Tuple"):
        Ts = typecheck.tuple_types(typeinfo)
        if len(Ts) != len(val):
            raise ValueError("Invalid data. Expected {}, got {}.".format(Ts, val))
        return tuple(
            _process(T, x, make) for T, x in zip(Ts, val)
        )
    elif typename.startswith("typing.Dict"):
        k_cls, v_cls = typecheck.mapping_types(typeinfo)
        return {
            _process(k_cls, k, make): _process(v_cls, v, make)
            for k, v in val.items()
        }
    elif typename.startswith("typing.Any"):
        # FIXME: Remove this when we remove flow.metadata
        assert isinstance(val, (int, str, bool, bytes))
        return val
    else:
        return typeinfo(val)
Example #3
0
def pytest_addoption(parser: typing.Any) -> None:
    """Add force option to pytest."""
    parser.addoption(
        "--zpool",
        action="store",
        help="Select a ZFS pool for the unit tests"
    )
Example #4
0
def play_sound(sound: typing.Any):
    """
    Play a previously loaded sound.
    """

    load_sound_library()
    sound.play()
Example #5
0
def exec(
    command: typing.List[str],
    logger: typing.Optional['libioc.Logger.Logger']=None,
    ignore_error: bool=False,
    **subprocess_args: typing.Any
) -> typing.Tuple[
    typing.Optional[str],
    typing.Optional[str],
    int
]:
    """Execute a shell command."""
    if isinstance(command, str):
        command = [command]

    command_str = " ".join(command)

    if logger is not None:
        logger.log(f"Executing: {command_str}", level="spam")

    subprocess_args["stdout"] = subprocess_args.get("stdout", subprocess.PIPE)
    subprocess_args["stderr"] = subprocess_args.get("stderr", subprocess.PIPE)
    subprocess_args["shell"] = subprocess_args.get("shell", False)

    child = subprocess.Popen(  # nosec: TODO: #113
        command,
        **subprocess_args
    )

    stdout, stderr = child.communicate()

    if stderr is not None:
        stderr = stderr.decode("UTF-8").strip()

    if (stdout is not None):
        stdout = stdout.decode("UTF-8").strip()
        if logger:
            logger.spam(_prettify_output(stdout))

    returncode = child.wait()
    if returncode > 0:

        if logger:
            log_level = "spam" if ignore_error else "warn"
            logger.log(
                f"Command exited with {returncode}: {command_str}",
                level=log_level
            )
            if stderr:
                logger.log(_prettify_output(stderr), level=log_level)

        if ignore_error is False:
            raise libioc.errors.CommandFailure(
                returncode=returncode,
                logger=logger
            )

    return stdout, stderr, returncode
Example #6
0
 def mount(self, member: t.Any, *, force=False, name=None) -> None:
     if member in self._seen:
         return
     if name is not None:
         member = _Alias(member, name=name)
     self._seen.add(member)
     self.members.append(member.on_mount(self))
     if is_namespace(member):
         self.children[member.get_name()] = member
Example #7
0
def json_serial(obj: typing.Any) -> typing.Optional[str]:
    """JSON serializer for objects not serializable by default json code"""

    if isinstance(obj, datetime.datetime):
        if obj.tzinfo is not None:
            serial = obj.astimezone(datetime.timezone.utc).isoformat()
        else:
            serial = obj.isoformat()

        return serial

    raise TypeError('Type not serializable - {}'.format(str(type(obj))))
Example #8
0
    def _pop_object(self,
                    yaml_data: typing.Any,
                    key: str,
                    typ: type) -> typing.Any:
        """Get a global object from the given data."""
        if not isinstance(yaml_data, dict):
            desc = configexc.ConfigErrorDesc("While loading data",
                                             "Toplevel object is not a dict")
            raise configexc.ConfigFileErrors('autoconfig.yml', [desc])

        if key not in yaml_data:
            desc = configexc.ConfigErrorDesc(
                "While loading data",
                "Toplevel object does not contain '{}' key".format(key))
            raise configexc.ConfigFileErrors('autoconfig.yml', [desc])

        data = yaml_data.pop(key)

        if not isinstance(data, typ):
            desc = configexc.ConfigErrorDesc(
                "While loading data",
                "'{}' object is not a {}".format(key, typ.__name__))
            raise configexc.ConfigFileErrors('autoconfig.yml', [desc])

        return data
Example #9
0
def json_serial(obj: typing.Any) -> typing.Optional[str]:
    """JSON serializer for objects not serializable by default json code"""

    if isinstance(obj, datetime.datetime):
        serial = obj.isoformat()
        return serial

    return None
 def url_for(context: dict, name: str, **path_params: typing.Any) -> str:
     request = context["request"]
     path_params = {
         k: v
         for k, v in path_params.items()
         if not isinstance(v, Undefined) and v is not None
     }
     return request.url_for(name, **path_params)
Example #11
0
    def __hook_module(self, module: typing.Any) -> None:

        class _Module(module.__class__, _HookedModule):

            pass


        module.__class__ = _Module
        return module
Example #12
0
def parse_none(
    data: typing.Any,
    none_matches: typing.List[str]=["none", "-", ""]
) -> None:
    """Raise if the input does not translate to None."""
    if data is None:
        return None
    if isinstance(data, str) and (data.lower() in none_matches):
        return None
    raise TypeError("Value is not None")
Example #13
0
 def is_valid(self, manager: _CommandBase, typ: typing.Any, val: typing.Any) -> bool:
     if not isinstance(val, str):
         return False
     parts = [x.strip() for x in val.split(",")]
     for p in parts:
         for pref in self.valid_prefixes:
             if p.startswith(pref):
                 break
         else:
             return False
     return True
Example #14
0
 def sub(obj: typing.Any, res: list) -> typing.Iterator[typing.Tuple[str, typing.Any]]:
     if type(obj) == dict:
         for k, v in obj.items():
             yield from sub(v, res + [k])
     elif type(obj) == list:
         for v in obj:
             yield from sub(v, res)
     elif obj is None:
         yield ("_".join(res), '')
     else:
         yield ("_".join(res), obj)
Example #15
0
def ensure_str(var: typing.Any) -> str:
    """
    Try to convert passed variable to a str object.
    """
    if isinstance(var, str):
        return var
    if var is None:
        return ""
    if isinstance(var, (bytes, bytearray)):
        strvar = var.decode("utf-8")
    else:
        strvar = var
    return str(strvar)
Example #16
0
def _process(typeinfo: typecheck.Type, val: typing.Any, make: bool) -> typing.Any:
    if val is None:
        return None
    elif make and hasattr(typeinfo, "from_state"):
        return typeinfo.from_state(val)
    elif not make and hasattr(val, "get_state"):
        return val.get_state()

    typename = str(typeinfo)

    if typename.startswith("typing.List"):
        T = typecheck.sequence_type(typeinfo)
        return [_process(T, x, make) for x in val]
    elif typename.startswith("typing.Tuple"):
        Ts = typecheck.tuple_types(typeinfo)
        if len(Ts) != len(val):
            raise ValueError("Invalid data. Expected {}, got {}.".format(Ts, val))
        return tuple(
            _process(T, x, make) for T, x in zip(Ts, val)
        )
    elif typename.startswith("typing.Dict"):
        k_cls, v_cls = typecheck.mapping_types(typeinfo)
        return {
            _process(k_cls, k, make): _process(v_cls, v, make)
            for k, v in val.items()
        }
    elif typename.startswith("typing.Any"):
        # This requires a bit of explanation. We can't import our IO layer here,
        # because it causes a circular import. Rather than restructuring the
        # code for this, we use JSON serialization, which has similar primitive
        # type restrictions as tnetstring, to check for conformance.
        try:
            json.dumps(val)
        except TypeError:
            raise ValueError(f"Data not serializable: {val}")
        return val
    else:
        return typeinfo(val)
Example #17
0
def _generate_tokens(pat: GenericAny, text: str) -> Iterator[Token]:
    """Generate a sequence of tokens from `text` that match `pat`

    Parameters
    ----------
    pat : compiled regex
        The pattern to use for tokenization
    text : str
        The text to tokenize

    """
    rules = _TYPE_RULES
    keys = _TYPE_KEYS
    groupindex = pat.groupindex
    scanner = pat.scanner(text)
    for m in iter(scanner.match, None):
        lastgroup = m.lastgroup
        func = rules[keys[groupindex[lastgroup] - 1]]
        if func is not None:
            yield func(m.group(lastgroup))
Example #18
0
 def format_embedded_scheme_value(
     value: typing.Any, force_quotes: bool = False
 ) -> str:
     """
     Formats embedded Scheme ``value``.
     """
     if isinstance(
         value, (enums.HorizontalAlignment, enums.VerticalAlignment)
     ):
         return "#" + repr(value).lower()
     result = Scheme.format_scheme_value(value, force_quotes=force_quotes)
     if isinstance(value, bool):
         result = "#" + result
     elif isinstance(value, str) and value.startswith("#"):
         pass
     elif isinstance(value, str) and not force_quotes:
         result = "#" + result
     elif isinstance(value, Scheme):
         result = "#" + result
     return result
Example #19
0
    def __contains__(self, key: typing.Any) -> bool:
        """Return whether a (nested) key is included in the dict."""
        if isinstance(key, str) is False:
            return False
        data = self
        while True:
            keys = dict.keys(data)
            try:
                i = key.index(self.delimiter)
            except ValueError:
                return any((
                    (key in keys) is True,
                    (key in dict.keys(data)) is True
                ))

            current = key[0:i]
            if current not in keys:
                return False
            key = key[(i + 1):]
            data = dict.__getitem__(data, current)
Example #20
0
 def __call__(self, *args: tp.Any, **kwargs: tp.Any) -> str:
     """Call the cammand line with addidional arguments
     The keyword arguments will be sent as --{key}={val}
     The logs are bufferized. They will be printed if the job fails, or sent as output of the function
     Errors are provided with the internal stderr
     """
     # TODO make the following command more robust (probably fails in multiple cases)
     full_command = self.command + [str(x) for x in args] + ["--{}={}".format(x, y) for x, y in kwargs.items()]
     if self.verbose:
         print(f"The following command is sent: {full_command}")
     outlines: tp.List[str] = []
     with subprocess.Popen(full_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,
                           shell=False, cwd=self.cwd, env=self.env) as process:
         try:
             assert process.stdout is not None
             for line in iter(process.stdout.readline, b''):
                 if not line:
                     break
                 outlines.append(line.decode().strip())
                 if self.verbose:
                     print(outlines[-1], flush=True)
         except Exception:  # pylint: disable=broad-except
             process.kill()
             process.wait()
             raise FailedJobError("Job got killed for an unknown reason.")
         stderr = process.communicate()[1]  # we already got stdout
         stdout = "\n".join(outlines)
         retcode = process.poll()
         if stderr and (retcode or self.verbose):
             print(stderr.decode(), file=sys.stderr)
         if retcode:
             subprocess_error = subprocess.CalledProcessError(retcode, process.args, output=stdout, stderr=stderr)
             raise FailedJobError(stderr.decode()) from subprocess_error
     return stdout
Example #21
0
 def construct_yaml_object(self, node: t.Any, cls: t.Any) -> t.Any:
     state = self.construct_mapping(node, deep=True)
     data = cls.__new__(cls, **state)
     if hasattr(data, '__setstate__'):
         data.__setstate__(state)
     yield data
Example #22
0
 def get_attr(key: dtype.Constant[str],
              dataclass: typing.Any) -> typing.Any:
     return dataclass.__getattribute__(key)
Example #23
0
 def setUpClass(cls: t.Any) -> None:
     cls.BBLFSH_SERVER_EXISTED = ensure_bblfsh_is_running()
Example #24
0
def load_and_map(stream: typing.Any,
                 schema: 'yamap.schema.yamatchable') -> typing.Any:
    ''' Iterative stack based implementation of the mapper.

        Visits each none-branch node once and each branch node (that
        being sequence or mapping) twice: Once before and once after
        working through all its children.

        The first visit will try to map all children to schema types
        using the method appropriate to the current type and push all of
        them on the stack to be evaluated in the iterations directly
        following.

        The second visit will (or the only one for none-branching nodes)
        will evaluate the actual value of the node using the children
        prepared before as context. '''

    if not hasattr(stream, 'read') and hasattr(stream, 'open'):
        stream = stream.open('rb')

    loader = Loader(stream)
    node = loader.get_single_node()
    stack = [stackitem(node, schema.matches(node), None)]

    while stack:
        # peak at top of stack
        top = stack[-1]

        # first time visiting this node
        if not top.branch_visited:
            children = top.schema.match_children(top.node)

            # and it is a branching node
            if children is not None:
                # push stackitems of its children
                for child_node, child_schema in children:
                    stack.append(stackitem(child_node, child_schema, top))

                # mark node as visited
                top.branch_visited = True

                # and iterate
                continue

        # otherwise pop from stack
        stack.pop()

        # second time visiting a branching node
        if top.branch_visited:
            value = top.children

        # visiting a leaf node
        else:
            value = top.schema.matches(top.node).construct_leaf(
                loader, top.node)

        # convert type
        value = top.schema.resolve(value)

        if top.parent:
            top.parent.children.append(value)
        else:
            return value
Example #25
0
 def _revision_history(alembic_config: typing.Any):
     for revision in revision_history:
         alembic_config.print_stdout(f"none -> {revision}, revision name")
Example #26
0
 def __contains__(self, key: typing.Any) -> bool:
     get_header_key = key.lower().encode(self.encoding)
     for header_key, _ in self._list:
         if header_key == get_header_key:
             return True
     return False
Example #27
0
    def validate(self,
                 value: typing.Any,
                 *,
                 strict: bool = False) -> typing.Any:
        if value is None and self.allow_null:
            return None
        elif value is None:
            raise self.validation_error("null")
        elif not isinstance(value, (dict, typing.Mapping)):
            raise self.validation_error("type")

        validated = {}
        error_messages = []

        # Ensure all property keys are strings.
        for key in value.keys():
            if not isinstance(key, str):
                text = self.get_error_text("invalid_key")
                message = Message(text=text, code="invalid_key", index=[key])
                error_messages.append(message)
            elif self.property_names is not None:
                _, error = self.property_names.validate_or_error(key)
                if error is not None:
                    text = self.get_error_text("invalid_property")
                    message = Message(text=text,
                                      code="invalid_property",
                                      index=[key])
                    error_messages.append(message)

        # Min/Max properties
        if self.min_properties is not None:
            if len(value) < self.min_properties:
                if self.min_properties == 1:
                    raise self.validation_error("empty")
                else:
                    raise self.validation_error("min_properties")
        if self.max_properties is not None:
            if len(value) > self.max_properties:
                raise self.validation_error("max_properties")

        # Required properties
        for key in self.required:
            if key not in value:
                text = self.get_error_text("required")
                message = Message(text=text, code="required", index=[key])
                error_messages.append(message)

        # Properties
        for key, child_schema in self.properties.items():
            if key not in value:
                if child_schema.has_default():
                    validated[key] = child_schema.get_default_value()
                continue
            item = value[key]
            child_value, error = child_schema.validate_or_error(item,
                                                                strict=strict)
            if not error:
                validated[key] = child_value
            else:
                error_messages += error.messages(add_prefix=key)

        # Pattern properties
        if self.pattern_properties:
            for key in list(value.keys()):
                for pattern, child_schema in self.pattern_properties.items():
                    if isinstance(key, str) and re.search(pattern, key):
                        item = value[key]
                        child_value, error = child_schema.validate_or_error(
                            item, strict=strict)
                        if not error:
                            validated[key] = child_value
                        else:
                            error_messages += error.messages(add_prefix=key)

        # Additional properties
        validated_keys = set(validated.keys())
        error_keys = set(
            [message.index[0] for message in error_messages if message.index])

        remaining = [
            key for key in value.keys()
            if key not in validated_keys | error_keys
        ]

        if self.additional_properties is True:
            for key in remaining:
                validated[key] = value[key]
        elif self.additional_properties is False:
            for key in remaining:
                text = self.get_error_text("invalid_property")
                message = Message(text=text, code="invalid_property", key=key)
                error_messages.append(message)
        elif self.additional_properties is not None:
            assert isinstance(self.additional_properties, Field)
            child_schema = self.additional_properties
            for key in remaining:
                item = value[key]
                child_value, error = child_schema.validate_or_error(
                    item, strict=strict)
                if not error:
                    validated[key] = child_value
                else:
                    error_messages += error.messages(add_prefix=key)

        if error_messages:
            raise ValidationError(messages=error_messages)

        return validated
Example #28
0
 def __call__(self, location: Location, system_context: SystemContext,
              *args: typing.Any, **kwargs: typing.Any) -> None:
     """Execute command."""
     protocol = kwargs.get("protocol", "tcp")
     comment = kwargs.get("comment", None)
     open_port(system_context, args[0], protocol=protocol, comment=comment)
Example #29
0
    def _get_result(self, context: UserContext, source: typing.Any) -> Result:
        if source.disabled_syntaxes and 'syntax_names' not in context:
            context['syntax_names'] = get_syn_names(self._vim)

        ctx = copy.deepcopy(context)

        charpos = source.get_complete_position(ctx)
        if charpos >= 0 and source.is_bytepos:
            charpos = bytepos2charpos(ctx['encoding'], ctx['input'], charpos)

        ctx['char_position'] = charpos
        ctx['complete_position'] = charpos2bytepos(ctx['encoding'],
                                                   ctx['input'], charpos)
        ctx['complete_str'] = ctx['input'][ctx['char_position']:]

        if charpos < 0 or self._is_skip(ctx, source):
            if source.name in self._prev_results:
                self._prev_results.pop(source.name)
            # Skip
            return {}

        if (source.name in self._prev_results and self._use_previous_result(
                context, self._prev_results[source.name], source.is_volatile,
                source.is_async)):
            return self._prev_results[source.name]

        ctx['is_async'] = False
        ctx['is_refresh'] = True
        ctx['max_abbr_width'] = min(source.max_abbr_width,
                                    ctx['max_abbr_width'])
        ctx['max_kind_width'] = min(source.max_kind_width,
                                    ctx['max_kind_width'])
        ctx['max_info_width'] = source.max_info_width
        ctx['max_menu_width'] = min(source.max_menu_width,
                                    ctx['max_menu_width'])
        if ctx['max_abbr_width'] > 0:
            ctx['max_abbr_width'] = max(20, ctx['max_abbr_width'])
        if ctx['max_kind_width'] > 0:
            ctx['max_kind_width'] = max(10, ctx['max_kind_width'])
        if ctx['max_info_width'] > 0:
            ctx['max_info_width'] = max(10, ctx['max_info_width'])
        if ctx['max_menu_width'] > 0:
            ctx['max_menu_width'] = max(10, ctx['max_menu_width'])

        # Gathering
        self._profile_start(ctx, source.name)
        ctx['vars'] = self._vim.vars
        ctx['candidates'] = source.gather_candidates(ctx)
        if ctx['is_async']:
            source.is_async = True
        ctx['vars'] = None
        self._profile_end(source.name)

        if ctx['candidates'] is None:
            return {}

        ctx['candidates'] = convert2candidates(ctx['candidates'])

        return {
            'name': source.name,
            'source': source,
            'context': ctx,
            'is_async': ctx['is_async'],
            'prev_linenr': ctx['position'][1],
            'prev_input': ctx['input'],
            'input': ctx['input'],
            'complete_position': ctx['complete_position'],
            'candidates': ctx['candidates'],
        }
Example #30
0
 def _write(self, stdout: typing.Any, expr: typing.Any) -> None:
     stdout.buffer.write(self._packer.pack(expr))
     stdout.flush()
Example #31
0
 def add_child(self, child: t.Any):
     child._parent = self
     self.children.append(child)
     child.update()
Example #32
0
    def _cast(self, value: ty.Any, path: t.Path, *args: ty.Any, **context: ty.Any) -> ty.Any:
        if isinstance(value, str):
            return [content_type.strip() for content_type in value.split(',')]

        return super(CommaDelimitedArrayType, self)._cast(value, path, **context)
Example #33
0
 def can_get_messages(channel: typing.Any) -> bool:
     if not isinstance(channel, discord.TextChannel):
         return False
     return channel.permissions_for(channel.guild.me).read_message_history
Example #34
0
def aws_to_py(in_type: t.Type[T], aws_field: t.Any) -> T:
    """
    Convert an AWS item back into its py equivalent

    This might not even be strictly required, but we check that
    all the types are roughly what we expect, and convert
    Decimals back into ints/floats
    """
    origin = t.get_origin(in_type)
    args = t.get_args(in_type)

    check_type = origin
    if in_type is float:
        check_type = Decimal
    elif in_type is int:
        check_type = (int, Decimal)
    elif is_dataclass(in_type):
        check_type = dict
    elif check_type is set and args:
        if args[0] not in (str, float, int, Decimal):
            check_type = list

    if not isinstance(aws_field, check_type or in_type):
        # If you are getting random deserialization errors in tests that you did
        # not touch, have a look at
        # https://github.com/facebook/ThreatExchange/issues/697
        raise AWSSerializationFailure(
            "Deserialization error: "
            f"Expected {in_type} got {type(aws_field)} ({aws_field!r})")

    if in_type is int:  # N
        return int(aws_field)  # type: ignore # mypy/issues/10003
    if in_type is float:  # N
        return float(aws_field)  # type: ignore # mypy/issues/10003
    if in_type is Decimal:  # N
        return aws_field  # type: ignore # mypy/issues/10003
    if in_type is str:  # S
        return aws_field  # type: ignore # mypy/issues/10003
    if in_type is bool:  # BOOL
        return aws_field  # type: ignore # mypy/issues/10003
    if in_type is t.Set[str]:  # SS
        return aws_field  # type: ignore # mypy/issues/10003
    if in_type is t.Set[int]:  # SN
        return {int(s) for s in aws_field}  # type: ignore # mypy/issues/10003
    if in_type is t.Set[float]:  # SN
        return {float(s)
                for s in aws_field}  # type: ignore # mypy/issues/10003

    if origin is set:  # L - special case
        return {aws_to_py(args[0], v)
                for v in aws_field}  # type: ignore # mypy/issues/10003
    if origin is list:  # L
        return [aws_to_py(args[0], v)
                for v in aws_field]  # type: ignore # mypy/issues/10003
    # It would be possible to add support for nested dataclasses here, which
    # just become maps with the keys as their attributes
    # Another option would be adding a new class that adds methods to convert
    # to an AWS-friendly struct and back
    if origin is dict and args[0] is str:  # M
        # check if value type of map origin is explicitly set
        return {k: aws_to_py(args[1], v)
                for k, v in aws_field.items()
                }  # type: ignore # mypy/issues/10003
    if is_dataclass(in_type):
        kwargs = {}
        for field in fields(in_type):
            if not field.init:
                continue
            val = aws_field.get(field.name)
            if val is None:
                continue  # Hopefully missing b/c default or version difference
            kwargs[field.name] = aws_to_py(field.type, val)
        return in_type(
            **kwargs)  # type: ignore  # No idea how to correctly type this

    raise AWSSerializationFailure(
        f"Missing deserialization logic for {in_type!r}")
Example #35
0
 def test(dat: typing.Any) -> bool:
     type_test(dict)(dat)
     for dom, rng in dat.items():
         dom_test(dom)
         rng_test(rng)
     return True
Example #36
0
def py_to_aws(py_field: t.Any, in_type: t.Optional[t.Type[T]] = None) -> T:
    """
    Convert a py item into its AWS equivalent.

    Should exactly inverse aws_to_py
    """
    if in_type is None:
        in_type = type(py_field)
    origin = t.get_origin(in_type)
    args = t.get_args(in_type)

    check_type = origin or in_type

    if isinstance(check_type, t.ForwardRef):
        raise AWSSerializationFailure(
            "Serialization error: "
            f"Expected no forward refs, but detected {check_type}. "
            "Rework your dataclasses to avoid forward references.")

    if not isinstance(py_field, check_type):
        raise AWSSerializationFailure(
            "Serialization error: "
            f"Expected {check_type} got {type(py_field)} ({py_field!r})")

    if in_type == int:  # N
        # Technically, this also needs to be converted to decimal,
        # but the boto3 translater seems to handle it fine
        return py_field  # type: ignore # mypy/issues/10003
    if in_type == float:  # N
        # WARNING WARNING
        # floating point is not truly supported in dynamodb
        # We can fake it for numbers without too much precision
        # but Decimal("3.4") != float(3.4)
        return Decimal(str(py_field))  # type: ignore # mypy/issues/10003
    if in_type == Decimal:  # N
        return py_field  # type: ignore # mypy/issues/10003
    if in_type == str:  # S
        return py_field  # type: ignore # mypy/issues/10003
    if in_type == bool:  # BOOL
        return py_field  # type: ignore # mypy/issues/10003
    if in_type == t.Set[str]:  # SS
        return py_field  # type: ignore # mypy/issues/10003
    if in_type == t.Set[int]:  # SN
        return {i for i in py_field}  # type: ignore # mypy/issues/10003
    if in_type == t.Set[float]:  # SN
        # WARNING WARNING
        # floating point is not truly supported in dynamodb
        # See note above
        return {Decimal(str(s))
                for s in py_field}  # type: ignore # mypy/issues/10003

    if origin is list:  # L
        return [py_to_aws(v, args[0])
                for v in py_field]  # type: ignore # mypy/issues/10003
    # various simple collections that don't fit into a
    # special cases above can likely be coerced into list.
    if origin is set:  # L - Special case
        return [py_to_aws(v, args[0])
                for v in py_field]  # type: ignore # mypy/issues/10003

    if origin is dict and args[0] is str:  # M
        return {k: py_to_aws(v, args[1])
                for k, v in py_field.items()
                }  # type: ignore # mypy/issues/10003
    if is_dataclass(in_type):
        return {
            field.name: py_to_aws(getattr(py_field, field.name), field.type)
            for field in fields(in_type)
        }  # type: ignore # mypy/issues/10003

    raise AWSSerializationFailure(
        f"Missing Serialization logic for {in_type!r}")
Example #37
0
 def test_Any(self):
     o = object()
     self.assertIs(Any(o), o)
     s = 'x'
     self.assertIs(Any(s), s)
Example #38
0
 def render(self, content: typing.Any) -> bytes:
     if isinstance(content, bytes):
         return content
     return content.encode(self.charset)
Example #39
0
async def _unittest_serial_transport(caplog: typing.Any) -> None:
    from pyuavcan.transport import MessageDataSpecifier, ServiceDataSpecifier, PayloadMetadata, Transfer, TransferFrom
    from pyuavcan.transport import Priority, Timestamp, InputSessionSpecifier, OutputSessionSpecifier
    from pyuavcan.transport import ProtocolParameters

    get_monotonic = asyncio.get_event_loop().time

    service_multiplication_factor = 2

    with pytest.raises(ValueError):
        _ = SerialTransport(serial_port="loop://", local_node_id=None, mtu=1)

    with pytest.raises(ValueError):
        _ = SerialTransport(serial_port="loop://", local_node_id=None, service_transfer_multiplier=10000)

    with pytest.raises(pyuavcan.transport.InvalidMediaConfigurationError):
        _ = SerialTransport(serial_port=serial.serial_for_url("loop://", do_not_open=True), local_node_id=None)

    tr = SerialTransport(serial_port="loop://", local_node_id=None, mtu=1024)

    assert tr.loop is asyncio.get_event_loop()
    assert tr.local_node_id is None
    assert tr.serial_port.is_open

    assert tr.input_sessions == []
    assert tr.output_sessions == []

    assert tr.protocol_parameters == ProtocolParameters(
        transfer_id_modulo=2 ** 64,
        max_nodes=4096,
        mtu=1024,
    )

    assert tr.sample_statistics() == SerialTransportStatistics()

    sft_capacity = 1024

    payload_single = [_mem("qwertyui"), _mem("01234567")] * (sft_capacity // 16)
    assert sum(map(len, payload_single)) == sft_capacity

    payload_x3 = (payload_single * 3)[:-1]
    payload_x3_size_bytes = sft_capacity * 3 - 8
    assert sum(map(len, payload_x3)) == payload_x3_size_bytes

    #
    # Instantiate session objects.
    #
    meta = PayloadMetadata(10000)

    broadcaster = tr.get_output_session(OutputSessionSpecifier(MessageDataSpecifier(2345), None), meta)
    assert broadcaster is tr.get_output_session(OutputSessionSpecifier(MessageDataSpecifier(2345), None), meta)

    subscriber_promiscuous = tr.get_input_session(InputSessionSpecifier(MessageDataSpecifier(2345), None), meta)
    assert subscriber_promiscuous is tr.get_input_session(InputSessionSpecifier(MessageDataSpecifier(2345), None), meta)

    subscriber_selective = tr.get_input_session(InputSessionSpecifier(MessageDataSpecifier(2345), 3210), meta)
    assert subscriber_selective is tr.get_input_session(InputSessionSpecifier(MessageDataSpecifier(2345), 3210), meta)

    server_listener = tr.get_input_session(
        InputSessionSpecifier(ServiceDataSpecifier(333, ServiceDataSpecifier.Role.REQUEST), None), meta
    )
    assert server_listener is tr.get_input_session(
        InputSessionSpecifier(ServiceDataSpecifier(333, ServiceDataSpecifier.Role.REQUEST), None), meta
    )

    client_listener = tr.get_input_session(
        InputSessionSpecifier(ServiceDataSpecifier(333, ServiceDataSpecifier.Role.RESPONSE), 3210), meta
    )
    assert client_listener is tr.get_input_session(
        InputSessionSpecifier(ServiceDataSpecifier(333, ServiceDataSpecifier.Role.RESPONSE), 3210), meta
    )

    print("INPUTS:", tr.input_sessions)
    print("OUTPUTS:", tr.output_sessions)
    assert set(tr.input_sessions) == {subscriber_promiscuous, subscriber_selective, server_listener, client_listener}
    assert set(tr.output_sessions) == {broadcaster}
    assert tr.sample_statistics() == SerialTransportStatistics()

    #
    # Message exchange test.
    #
    assert await broadcaster.send(
        Transfer(
            timestamp=Timestamp.now(), priority=Priority.LOW, transfer_id=77777, fragmented_payload=payload_single
        ),
        monotonic_deadline=get_monotonic() + 5.0,
    )

    rx_transfer = await subscriber_promiscuous.receive(get_monotonic() + 5.0)
    print("PROMISCUOUS SUBSCRIBER TRANSFER:", rx_transfer)
    assert isinstance(rx_transfer, TransferFrom)
    assert rx_transfer.priority == Priority.LOW
    assert rx_transfer.transfer_id == 77777
    assert rx_transfer.fragmented_payload == [b"".join(payload_single)]

    print(tr.sample_statistics())
    assert tr.sample_statistics().in_bytes >= 32 + sft_capacity + 2
    assert tr.sample_statistics().in_frames == 1
    assert tr.sample_statistics().in_out_of_band_bytes == 0
    assert tr.sample_statistics().out_bytes == tr.sample_statistics().in_bytes
    assert tr.sample_statistics().out_frames == 1
    assert tr.sample_statistics().out_transfers == 1
    assert tr.sample_statistics().out_incomplete == 0

    with pytest.raises(pyuavcan.transport.OperationNotDefinedForAnonymousNodeError):
        # Anonymous nodes can't send multiframe transfers.
        assert await broadcaster.send(
            Transfer(
                timestamp=Timestamp.now(), priority=Priority.LOW, transfer_id=77777, fragmented_payload=payload_x3
            ),
            monotonic_deadline=get_monotonic() + 5.0,
        )

    assert None is await subscriber_selective.receive(get_monotonic() + 0.1)
    assert None is await subscriber_promiscuous.receive(get_monotonic() + 0.1)
    assert None is await server_listener.receive(get_monotonic() + 0.1)
    assert None is await client_listener.receive(get_monotonic() + 0.1)

    #
    # Service exchange test.
    #
    with pytest.raises(pyuavcan.transport.OperationNotDefinedForAnonymousNodeError):
        # Anonymous nodes can't emit service transfers.
        tr.get_output_session(
            OutputSessionSpecifier(ServiceDataSpecifier(333, ServiceDataSpecifier.Role.REQUEST), 3210), meta
        )

    #
    # Replace the transport with a different one where the local node-ID is not None.
    #
    tr = SerialTransport(serial_port="loop://", local_node_id=3210, mtu=1024)
    assert tr.local_node_id == 3210

    #
    # Re-instantiate session objects because the transport instances have been replaced.
    #
    broadcaster = tr.get_output_session(OutputSessionSpecifier(MessageDataSpecifier(2345), None), meta)
    assert broadcaster is tr.get_output_session(OutputSessionSpecifier(MessageDataSpecifier(2345), None), meta)

    subscriber_promiscuous = tr.get_input_session(InputSessionSpecifier(MessageDataSpecifier(2345), None), meta)

    subscriber_selective = tr.get_input_session(InputSessionSpecifier(MessageDataSpecifier(2345), 3210), meta)

    server_listener = tr.get_input_session(
        InputSessionSpecifier(ServiceDataSpecifier(333, ServiceDataSpecifier.Role.REQUEST), None), meta
    )

    server_responder = tr.get_output_session(
        OutputSessionSpecifier(ServiceDataSpecifier(333, ServiceDataSpecifier.Role.RESPONSE), 3210), meta
    )
    assert server_responder is tr.get_output_session(
        OutputSessionSpecifier(ServiceDataSpecifier(333, ServiceDataSpecifier.Role.RESPONSE), 3210), meta
    )

    client_requester = tr.get_output_session(
        OutputSessionSpecifier(ServiceDataSpecifier(333, ServiceDataSpecifier.Role.REQUEST), 3210), meta
    )
    assert client_requester is tr.get_output_session(
        OutputSessionSpecifier(ServiceDataSpecifier(333, ServiceDataSpecifier.Role.REQUEST), 3210), meta
    )

    client_listener = tr.get_input_session(
        InputSessionSpecifier(ServiceDataSpecifier(333, ServiceDataSpecifier.Role.RESPONSE), 3210), meta
    )
    assert client_listener is tr.get_input_session(
        InputSessionSpecifier(ServiceDataSpecifier(333, ServiceDataSpecifier.Role.RESPONSE), 3210), meta
    )

    assert set(tr.input_sessions) == {subscriber_promiscuous, subscriber_selective, server_listener, client_listener}
    assert set(tr.output_sessions) == {broadcaster, server_responder, client_requester}
    assert tr.sample_statistics() == SerialTransportStatistics()

    assert await client_requester.send(
        Transfer(timestamp=Timestamp.now(), priority=Priority.HIGH, transfer_id=88888, fragmented_payload=payload_x3),
        monotonic_deadline=get_monotonic() + 5.0,
    )

    rx_transfer = await server_listener.receive(get_monotonic() + 5.0)
    print("SERVER LISTENER TRANSFER:", rx_transfer)
    assert isinstance(rx_transfer, TransferFrom)
    assert rx_transfer.priority == Priority.HIGH
    assert rx_transfer.transfer_id == 88888
    assert len(rx_transfer.fragmented_payload) == 3
    assert b"".join(rx_transfer.fragmented_payload) == b"".join(payload_x3)

    assert None is await subscriber_selective.receive(get_monotonic() + 0.1)
    assert None is await subscriber_promiscuous.receive(get_monotonic() + 0.1)
    assert None is await server_listener.receive(get_monotonic() + 0.1)
    assert None is await client_listener.receive(get_monotonic() + 0.1)

    print(tr.sample_statistics())
    assert tr.sample_statistics().in_bytes >= (32 * 3 + payload_x3_size_bytes + 2) * service_multiplication_factor
    assert tr.sample_statistics().in_frames == 3 * service_multiplication_factor
    assert tr.sample_statistics().in_out_of_band_bytes == 0
    assert tr.sample_statistics().out_bytes == tr.sample_statistics().in_bytes
    assert tr.sample_statistics().out_frames == 3 * service_multiplication_factor
    assert tr.sample_statistics().out_transfers == 1 * service_multiplication_factor
    assert tr.sample_statistics().out_incomplete == 0

    #
    # Write timeout test.
    #
    assert not await broadcaster.send(
        Transfer(
            timestamp=Timestamp.now(), priority=Priority.IMMEDIATE, transfer_id=99999, fragmented_payload=payload_x3
        ),
        monotonic_deadline=get_monotonic() - 5.0,  # The deadline is in the past.
    )

    assert None is await subscriber_selective.receive(get_monotonic() + 0.1)
    assert None is await subscriber_promiscuous.receive(get_monotonic() + 0.1)
    assert None is await server_listener.receive(get_monotonic() + 0.1)
    assert None is await client_listener.receive(get_monotonic() + 0.1)

    print(tr.sample_statistics())
    assert tr.sample_statistics().in_bytes >= (32 * 3 + payload_x3_size_bytes + 2) * service_multiplication_factor
    assert tr.sample_statistics().in_frames == 3 * service_multiplication_factor
    assert tr.sample_statistics().in_out_of_band_bytes == 0
    assert tr.sample_statistics().out_bytes == tr.sample_statistics().in_bytes
    assert tr.sample_statistics().out_frames == 3 * service_multiplication_factor
    assert tr.sample_statistics().out_transfers == 1 * service_multiplication_factor
    assert tr.sample_statistics().out_incomplete == 1  # INCREMENTED HERE

    #
    # Selective message exchange test.
    #
    assert await broadcaster.send(
        Transfer(
            timestamp=Timestamp.now(), priority=Priority.IMMEDIATE, transfer_id=99999, fragmented_payload=payload_x3
        ),
        monotonic_deadline=get_monotonic() + 5.0,
    )

    rx_transfer = await subscriber_promiscuous.receive(get_monotonic() + 5.0)
    print("PROMISCUOUS SUBSCRIBER TRANSFER:", rx_transfer)
    assert isinstance(rx_transfer, TransferFrom)
    assert rx_transfer.priority == Priority.IMMEDIATE
    assert rx_transfer.transfer_id == 99999
    assert b"".join(rx_transfer.fragmented_payload) == b"".join(payload_x3)

    rx_transfer = await subscriber_selective.receive(get_monotonic() + 1.0)
    print("SELECTIVE SUBSCRIBER TRANSFER:", rx_transfer)
    assert isinstance(rx_transfer, TransferFrom)
    assert rx_transfer.priority == Priority.IMMEDIATE
    assert rx_transfer.transfer_id == 99999
    assert b"".join(rx_transfer.fragmented_payload) == b"".join(payload_x3)

    assert None is await subscriber_selective.receive(get_monotonic() + 0.1)
    assert None is await subscriber_promiscuous.receive(get_monotonic() + 0.1)
    assert None is await server_listener.receive(get_monotonic() + 0.1)
    assert None is await client_listener.receive(get_monotonic() + 0.1)

    #
    # Out-of-band data test.
    #
    with caplog.at_level(logging.CRITICAL, logger=pyuavcan.transport.serial.__name__):
        stats_reference = tr.sample_statistics()

        # The frame delimiter is needed to force new frame into the state machine.
        grownups = b"Aren't there any grownups at all? - No grownups!\x00"
        tr.serial_port.write(grownups)
        stats_reference.in_bytes += len(grownups)
        stats_reference.in_out_of_band_bytes += len(grownups)

        # Wait for the reader thread to catch up.
        assert None is await subscriber_selective.receive(get_monotonic() + 0.2)
        assert None is await subscriber_promiscuous.receive(get_monotonic() + 0.2)
        assert None is await server_listener.receive(get_monotonic() + 0.2)
        assert None is await client_listener.receive(get_monotonic() + 0.2)

        print(tr.sample_statistics())
        assert tr.sample_statistics() == stats_reference

        # The frame delimiter is needed to force new frame into the state machine.
        tr.serial_port.write(bytes([0xFF, 0xFF, SerialFrame.FRAME_DELIMITER_BYTE]))
        stats_reference.in_bytes += 3
        stats_reference.in_out_of_band_bytes += 3

        # Wait for the reader thread to catch up.
        assert None is await subscriber_selective.receive(get_monotonic() + 0.2)
        assert None is await subscriber_promiscuous.receive(get_monotonic() + 0.2)
        assert None is await server_listener.receive(get_monotonic() + 0.2)
        assert None is await client_listener.receive(get_monotonic() + 0.2)

        print(tr.sample_statistics())
        assert tr.sample_statistics() == stats_reference

    #
    # Termination.
    #
    assert set(tr.input_sessions) == {subscriber_promiscuous, subscriber_selective, server_listener, client_listener}
    assert set(tr.output_sessions) == {broadcaster, server_responder, client_requester}

    subscriber_promiscuous.close()
    subscriber_promiscuous.close()  # Idempotency.

    assert set(tr.input_sessions) == {subscriber_selective, server_listener, client_listener}
    assert set(tr.output_sessions) == {broadcaster, server_responder, client_requester}

    broadcaster.close()
    broadcaster.close()  # Idempotency.

    assert set(tr.input_sessions) == {subscriber_selective, server_listener, client_listener}
    assert set(tr.output_sessions) == {server_responder, client_requester}

    tr.close()
    tr.close()  # Idempotency.

    assert not set(tr.input_sessions)
    assert not set(tr.output_sessions)

    with pytest.raises(pyuavcan.transport.ResourceClosedError):
        _ = tr.get_output_session(OutputSessionSpecifier(MessageDataSpecifier(2345), None), meta)

    with pytest.raises(pyuavcan.transport.ResourceClosedError):
        _ = tr.get_input_session(InputSessionSpecifier(MessageDataSpecifier(2345), None), meta)

    await asyncio.sleep(1)  # Let all pending tasks finalize properly to avoid stack traces in the output.
Example #40
0
def url_for(endpoint: str, **values: t.Any) -> str:
    """Generates a URL to the given endpoint with the method provided.

    Variable arguments that are unknown to the target endpoint are appended
    to the generated URL as query arguments.  If the value of a query argument
    is ``None``, the whole pair is skipped.  In case blueprints are active
    you can shortcut references to the same blueprint by prefixing the
    local endpoint with a dot (``.``).

    This will reference the index function local to the current blueprint::

        url_for('.index')

    See :ref:`url-building`.

    Configuration values ``APPLICATION_ROOT`` and ``SERVER_NAME`` are only used when
    generating URLs outside of a request context.

    To integrate applications, :class:`Flask` has a hook to intercept URL build
    errors through :attr:`Flask.url_build_error_handlers`.  The `url_for`
    function results in a :exc:`~werkzeug.routing.BuildError` when the current
    app does not have a URL for the given endpoint and values.  When it does, the
    :data:`~flask.current_app` calls its :attr:`~Flask.url_build_error_handlers` if
    it is not ``None``, which can return a string to use as the result of
    `url_for` (instead of `url_for`'s default to raise the
    :exc:`~werkzeug.routing.BuildError` exception) or re-raise the exception.
    An example::

        def external_url_handler(error, endpoint, values):
            "Looks up an external URL when `url_for` cannot build a URL."
            # This is an example of hooking the build_error_handler.
            # Here, lookup_url is some utility function you've built
            # which looks up the endpoint in some external URL registry.
            url = lookup_url(endpoint, **values)
            if url is None:
                # External lookup did not have a URL.
                # Re-raise the BuildError, in context of original traceback.
                exc_type, exc_value, tb = sys.exc_info()
                if exc_value is error:
                    raise exc_type(exc_value).with_traceback(tb)
                else:
                    raise error
            # url_for will use this result, instead of raising BuildError.
            return url

        app.url_build_error_handlers.append(external_url_handler)

    Here, `error` is the instance of :exc:`~werkzeug.routing.BuildError`, and
    `endpoint` and `values` are the arguments passed into `url_for`.  Note
    that this is for building URLs outside the current application, and not for
    handling 404 NotFound errors.

    .. versionadded:: 0.10
       The `_scheme` parameter was added.

    .. versionadded:: 0.9
       The `_anchor` and `_method` parameters were added.

    .. versionadded:: 0.9
       Calls :meth:`Flask.handle_build_error` on
       :exc:`~werkzeug.routing.BuildError`.

    :param endpoint: the endpoint of the URL (name of the function)
    :param values: the variable arguments of the URL rule
    :param _external: if set to ``True``, an absolute URL is generated. Server
      address can be changed via ``SERVER_NAME`` configuration variable which
      falls back to the `Host` header, then to the IP and port of the request.
    :param _scheme: a string specifying the desired URL scheme. The `_external`
      parameter must be set to ``True`` or a :exc:`ValueError` is raised. The default
      behavior uses the same scheme as the current request, or
      :data:`PREFERRED_URL_SCHEME` if no request context is available.
      This also can be set to an empty string to build protocol-relative
      URLs.
    :param _anchor: if provided this is added as anchor to the URL.
    :param _method: if provided this explicitly specifies an HTTP method.
    """
    appctx = _app_ctx_stack.top
    reqctx = _request_ctx_stack.top

    if appctx is None:
        raise RuntimeError(
            "Attempted to generate a URL without the application context being"
            " pushed. This has to be executed when application context is"
            " available."
        )

    # If request specific information is available we have some extra
    # features that support "relative" URLs.
    if reqctx is not None:
        url_adapter = reqctx.url_adapter
        blueprint_name = request.blueprint

        if endpoint[:1] == ".":
            if blueprint_name is not None:
                endpoint = f"{blueprint_name}{endpoint}"
            else:
                endpoint = endpoint[1:]

        external = values.pop("_external", False)

    # Otherwise go with the url adapter from the appctx and make
    # the URLs external by default.
    else:
        url_adapter = appctx.url_adapter

        if url_adapter is None:
            raise RuntimeError(
                "Application was not able to create a URL adapter for request"
                " independent URL generation. You might be able to fix this by"
                " setting the SERVER_NAME config variable."
            )

        external = values.pop("_external", True)

    anchor = values.pop("_anchor", None)
    method = values.pop("_method", None)
    scheme = values.pop("_scheme", None)
    appctx.app.inject_url_defaults(endpoint, values)

    # This is not the best way to deal with this but currently the
    # underlying Werkzeug router does not support overriding the scheme on
    # a per build call basis.
    old_scheme = None
    if scheme is not None:
        if not external:
            raise ValueError("When specifying _scheme, _external must be True")
        old_scheme = url_adapter.url_scheme
        url_adapter.url_scheme = scheme

    try:
        try:
            rv = url_adapter.build(
                endpoint, values, method=method, force_external=external
            )
        finally:
            if old_scheme is not None:
                url_adapter.url_scheme = old_scheme
    except BuildError as error:
        # We need to inject the values again so that the app callback can
        # deal with that sort of stuff.
        values["_external"] = external
        values["_anchor"] = anchor
        values["_method"] = method
        values["_scheme"] = scheme
        return appctx.app.handle_url_build_error(error, endpoint, values)

    if anchor is not None:
        rv += f"#{url_quote(anchor)}"
    return rv
Example #41
0
 def make_parser(*args: typing.Any, **kwargs: typing.Any) -> HTMLMinParser:
     kwargs.update(kw)
     return HTMLMinParser(*args, **kwargs)
Example #42
0
    def cast(self, value: t.Any, safe=True) -> bool:
        if isinstance(value, str):
            value = value.lower().strip()
            return self._mapping_dict.get(value, bool(value))

        return bool(value)
Example #43
0
def test_experiment_remains_disabled_after_user_disables_it(
        selenium: typing.Any, addon_ids: dict, pytestconfig: typing.Any,
        pings: typing.Any):
    """Disable experiment, restart Firefox to make sure it stays disabled."""
    selenium.get("about:addons")

    selenium.execute_script(
        """
            const { AddonManager } = ChromeUtils.import(
            "resource://gre/modules/AddonManager.jsm"
            );

            async function callit(add_on) {
                let addon = await AddonManager.getAddonByID(add_on);
                console.log(add_on)
                await addon.disable();
            }
            callit(arguments[0]);
        """,
        list(addon_ids.keys())[0],
    )
    toolbar = ToolBar(selenium)
    for item in toolbar.toolbar_items:
        if list(addon_ids)[0] not in item._id:
            continue
        else:
            raise AssertionError("Extension is Found")
    selenium.quit()
    # Start firefox again with new selenium driver
    # Build new Firefox Instance with appropriate profile and binary
    if pytestconfig.getoption("--run-update-test"):
        profile = FirefoxProfile(
            f'{os.path.abspath("utilities/klaatu-profile")}')
        options = Options()
        options.add_argument("-profile")
        options.add_argument(f'{os.path.abspath("utilities/klaatu-profile")}')
        options.headless = True
        binary = os.path.abspath(
            "utilities/firefox-old-nightly/firefox/firefox-bin")
        options.binary = binary
    elif pytestconfig.getoption("--run-firefox-release"):
        profile = FirefoxProfile(
            f'{os.path.abspath("utilities/klaatu-profile-release-firefox")}')
        options = Options()
        options.add_argument("-profile")
        options.add_argument(
            f'{os.path.abspath("utilities/klaatu-profile-release-firefox")}')
        options.headless = True
        binary = os.path.abspath(
            "utilities/firefox-release/firefox/firefox-bin")
        options.binary = binary
    else:
        profile = FirefoxProfile(
            f'{os.path.abspath("utilities/klaatu-profile-current-nightly")}')
        options = Options()
        options.add_argument("-profile")
        options.add_argument(
            f'{os.path.abspath("utilities/klaatu-profile-current-nightly")}')
        options.headless = True
        binary = "/usr/bin/firefox"
        options.binary = binary

    # Start Firefox and test
    selenium = webdriver.Firefox(firefox_profile=profile,
                                 firefox_options=options,
                                 firefox_binary=binary)
    selenium.get("about:addons")

    # Make sure experiment is still not enabled
    toolbar = ToolBar(selenium)
    for item in toolbar.toolbar_items:
        if list(addon_ids)[0] not in item._id:
            continue
        else:
            raise AssertionError("Extension is Found")

    # Check last telemetry ping
    pings = pings.get_pings()
    addons = pings[-1]["environment"]["addons"]["activeAddons"]
    for item in addons:
        if list(addon_ids)[0] in item:
            assert True
        else:
            continue
    selenium.quit()
Example #44
0
async def test_select_user(tables: t.Any, sa_engine: t.Any) -> None:
    async with sa_engine.acquire() as conn:
        res = await select_user_by_id(conn, 1)

    assert res.id == 1
Example #45
0
 def construct_yaml_object(self, node: t.Any, cls: t.Any) -> t.Any:
     state = self.construct_mapping(node, deep=True)
     data = cls.__new__(cls, **state)
     if hasattr(data, '__setstate__'):
         data.__setstate__(state)
     yield data
Example #46
0
 def default(self, obj: typing.Any) -> typing.Any:
     if isinstance(obj, datetime):
         return obj.isoformat()
     return super().default(obj)
Example #47
0
def fixture_labels_file_write(tmpdir: typing.Any) -> str:
    """Return a filepath to a temporary file."""
    labels_file = tmpdir.join("labels.toml")
    return str(labels_file)
Example #48
0
 def _check(self, file: _Any) -> None:
     if self._rolled: return
     max_size = self._max_size
     if max_size and file.tell() > max_size:
         self.rollover()
Example #49
0
async def _unittest_serial_transport_capture(caplog: typing.Any) -> None:
    from pyuavcan.transport import MessageDataSpecifier, ServiceDataSpecifier, PayloadMetadata, Transfer
    from pyuavcan.transport import Priority, Timestamp, OutputSessionSpecifier

    get_monotonic = asyncio.get_event_loop().time

    tr = SerialTransport(serial_port="loop://", local_node_id=42, mtu=1024, service_transfer_multiplier=2)
    sft_capacity = 1024
    payload_single = [_mem("qwertyui"), _mem("01234567")] * (sft_capacity // 16)
    assert sum(map(len, payload_single)) == sft_capacity
    payload_x3 = (payload_single * 3)[:-1]
    payload_x3_size_bytes = sft_capacity * 3 - 8
    assert sum(map(len, payload_x3)) == payload_x3_size_bytes

    broadcaster = tr.get_output_session(
        OutputSessionSpecifier(MessageDataSpecifier(2345), None), PayloadMetadata(10000)
    )
    client_requester = tr.get_output_session(
        OutputSessionSpecifier(ServiceDataSpecifier(333, ServiceDataSpecifier.Role.REQUEST), 3210),
        PayloadMetadata(10000),
    )

    events: typing.List[SerialCapture] = []
    events2: typing.List[pyuavcan.transport.Capture] = []

    def append_events(cap: pyuavcan.transport.Capture) -> None:
        assert isinstance(cap, SerialCapture)
        events.append(cap)

    tr.begin_capture(append_events)
    tr.begin_capture(events2.append)
    assert events == []
    assert events2 == []

    #
    # Multi-frame message.
    #
    ts = Timestamp.now()
    assert await broadcaster.send(
        Transfer(timestamp=ts, priority=Priority.LOW, transfer_id=777, fragmented_payload=payload_x3),
        monotonic_deadline=get_monotonic() + 5.0,
    )
    await asyncio.sleep(0.1)
    assert events == events2
    # Send three, receive three.
    # Sorting is required because the ordering of the events in the middle is not defined: arrival events
    # may or may not be registered before the emission event depending on how the serial loopback is operating.
    a, b, c, d, e, f = sorted(events, key=lambda x: x.direction == SerialCapture.Direction.RX)
    assert isinstance(a, SerialCapture) and a.direction == SerialCapture.Direction.TX
    assert isinstance(b, SerialCapture) and b.direction == SerialCapture.Direction.TX
    assert isinstance(c, SerialCapture) and c.direction == SerialCapture.Direction.TX
    assert isinstance(d, SerialCapture) and d.direction == SerialCapture.Direction.RX
    assert isinstance(e, SerialCapture) and e.direction == SerialCapture.Direction.RX
    assert isinstance(f, SerialCapture) and f.direction == SerialCapture.Direction.RX

    def parse(x: SerialCapture) -> SerialFrame:
        out = SerialFrame.parse_from_cobs_image(x.fragment)
        assert out is not None
        return out

    assert parse(a).transfer_id == 777
    assert parse(b).transfer_id == 777
    assert parse(c).transfer_id == 777
    assert a.timestamp.monotonic >= ts.monotonic
    assert b.timestamp.monotonic >= ts.monotonic
    assert c.timestamp.monotonic >= ts.monotonic
    assert parse(a).index == 0
    assert parse(b).index == 1
    assert parse(c).index == 2
    assert not parse(a).end_of_transfer
    assert not parse(b).end_of_transfer
    assert parse(c).end_of_transfer

    assert a.fragment.tobytes().strip(b"\x00") == d.fragment.tobytes().strip(b"\x00")
    assert b.fragment.tobytes().strip(b"\x00") == e.fragment.tobytes().strip(b"\x00")
    assert c.fragment.tobytes().strip(b"\x00") == f.fragment.tobytes().strip(b"\x00")

    events.clear()
    events2.clear()

    #
    # Single-frame service request with dual frame duplication.
    #
    ts = Timestamp.now()
    assert await client_requester.send(
        Transfer(timestamp=ts, priority=Priority.HIGH, transfer_id=888, fragmented_payload=payload_single),
        monotonic_deadline=get_monotonic() + 5.0,
    )
    await asyncio.sleep(0.1)
    assert events == events2
    # Send two, receive two.
    # Sorting is required because the order of the two events in the middle is not defined: the arrival event
    # may or may not be registered before the emission event depending on how the serial loopback is operating.
    a, b, c, d = sorted(events, key=lambda x: x.direction == SerialCapture.Direction.RX)
    assert isinstance(a, SerialCapture) and a.direction == SerialCapture.Direction.TX
    assert isinstance(b, SerialCapture) and b.direction == SerialCapture.Direction.TX
    assert isinstance(c, SerialCapture) and c.direction == SerialCapture.Direction.RX
    assert isinstance(d, SerialCapture) and d.direction == SerialCapture.Direction.RX

    assert parse(a).transfer_id == 888
    assert parse(b).transfer_id == 888
    assert a.timestamp.monotonic >= ts.monotonic
    assert b.timestamp.monotonic >= ts.monotonic
    assert parse(a).index == 0
    assert parse(b).index == 0
    assert parse(a).end_of_transfer
    assert parse(b).end_of_transfer

    assert a.fragment.tobytes().strip(b"\x00") == c.fragment.tobytes().strip(b"\x00")
    assert b.fragment.tobytes().strip(b"\x00") == d.fragment.tobytes().strip(b"\x00")

    events.clear()
    events2.clear()

    #
    # Out-of-band data.
    #
    grownups = b"Aren't there any grownups at all? - No grownups!\x00"
    with caplog.at_level(logging.CRITICAL, logger=pyuavcan.transport.serial.__name__):
        # The frame delimiter is needed to force new frame into the state machine.
        tr.serial_port.write(grownups)
        await asyncio.sleep(1)
    assert events == events2
    (oob,) = events
    assert isinstance(oob, SerialCapture)
    assert oob.direction == SerialCapture.Direction.RX
    assert bytes(oob.fragment) == grownups

    events.clear()
    events2.clear()
Example #50
0
def sprint(text: str, *args: typing.Any, **kwargs: typing.Any) -> None:
    file = kwargs.pop("file", sys.stdout)
    return print(style(text, file=file, *args, **kwargs), file=file)
Example #51
0
def init_app(app: t.Any) -> None:
    """Initialize the flask app by setting an error handler.

    :param app: The app to initialize
    """

    def handle_api_error(error: APIException) -> Response:
        """Handle an :class:`APIException` by converting it to a
        :class:`flask.Response`.

        :param APIException error: The error that occurred
        :returns: A response with the JSON serialized error as content.
        :rtype: flask.Response
        """
        response = jsonify(error)
        response.status_code = error.status_code
        logger.warning(
            'APIException occurred',
            api_exception=error.__to_json__(),
            exc_info=True,
        )

        psef.models.db.session.rollback()

        return response

    app.register_error_handler(APIException, handle_api_error)

    def handle_parse_error(error: rqa.SimpleParseError) -> Response:
        return handle_api_error(
            APIException(
                'The request body contained invalid data',
                str(error),
                APICodes.INVALID_PARAM,
                400,
                parse_error=error.to_dict(),
            ),
        )

    app.register_error_handler(rqa.SimpleParseError, handle_parse_error)
    app.register_error_handler(rqa.MultipleParseErrors, handle_parse_error)

    # Coverage is disabled for the next to handlers as they should never
    # run. When they run there is a bug in the application so we cant really
    # test them.

    @app.errorhandler(404)
    def handle_404(_: object) -> JSONResponse[APIException]:  # pylint: disable=unused-variable; #pragma: no cover
        logger.warning('A unknown route was requested')

        api_exp = APIException(
            'The request route was not found',
            f'The route "{request.path}" does not exist',
            APICodes.ROUTE_NOT_FOUND, 404
        )

        psef.models.db.session.rollback()

        return jsonify(api_exp, status_code=404)

    @app.errorhandler(Exception)
    def __handle_unknown_error(
        _: Exception
    ) -> JSONResponse[APIException]:  # pragma: no cover
        """Handle an unhandled error.

        This function should never really be called, as it means our code
        contains a bug.
        """

        logger.error(
            'Unknown exception occurred', exc_info=True, report_to_sentry=True
        )

        api_exp = APIException(
            f'Something went wrong (id: {g.request_id})', (
                'The reason for this is unknown, '
                'please contact the system administrator'
            ), APICodes.UNKOWN_ERROR, 500
        )

        psef.models.db.session.rollback()

        return jsonify(api_exp, status_code=500)
Example #52
0
    def format_scheme_value(
        value: typing.Any, force_quotes: bool = False, verbatim: bool = False
    ) -> str:
        r"""
        Formats ``value`` as Scheme would.

        ..  container:: example

            Some basic values:

            >>> abjad.Scheme.format_scheme_value(1)
            '1'

            >>> abjad.Scheme.format_scheme_value('foo')
            'foo'

            >>> abjad.Scheme.format_scheme_value('bar baz')
            '"bar baz"'

            >>> abjad.Scheme.format_scheme_value([1.5, True, False])
            '(1.5 #t #f)'

        ..  container:: example

            Strings without whitespace can be forcibly quoted via the
            ``force_quotes`` keyword:

            >>> abjad.Scheme.format_scheme_value(
            ...     'foo',
            ...     force_quotes=True,
            ...     )
            '"foo"'

        ..  container:: example

            Set verbatim to true to format value exactly (with only hash
            preprended):

            >>> string = '(lambda (grob) (grob-interpret-markup grob'
            >>> string += r' #{ \markup \musicglyph #"noteheads.s0harmonic" #}))'
            >>> abjad.Scheme.format_scheme_value(string, verbatim=True)
            '(lambda (grob) (grob-interpret-markup grob #{ \\markup \\musicglyph #"noteheads.s0harmonic" #}))'

        ..  container:: example

            Hash symbol at the beginning of a string does not result in quoted
            output:

            >>> string = '#1-finger'
            >>> abjad.Scheme.format_scheme_value(string)
            '#1-finger'

        """
        if isinstance(value, str) and verbatim:
            return value
        elif isinstance(value, str) and not verbatim:
            value = value.replace('"', r"\"")
            if value.startswith("#"):
                pass
            elif value.startswith("\\"):
                pass
            elif force_quotes or " " in value or "#" in value:
                return f'"{value}"'
            return value
        elif value is True:
            return "#t"
        elif value is False:
            return "#f"
        elif isinstance(value, (list, tuple)):
            string = " ".join(Scheme.format_scheme_value(_) for _ in value)
            return f"({string})"
        elif isinstance(value, Scheme):
            return str(value)
        elif value is None:
            return "#f"
        return str(value)
Example #53
0
 def __release_local__(storage: t.Any) -> None:
     # Can remove when support for non-stdlib ContextVars is
     # removed, see "Fake" version below.
     storage.set({})
 async def send_jsonb(self, data: t.Any, **dump_kwargs):
     data = json.dumps(data, **dump_kwargs)
     await self.send({"type": SendEvent.SEND, "bytes": data.encode()})
Example #55
0
def version_option(
    version: t.Optional[str] = None,
    *param_decls: str,
    package_name: t.Optional[str] = None,
    prog_name: t.Optional[str] = None,
    message: t.Optional[str] = None,
    **kwargs: t.Any,
) -> t.Callable[[FC], FC]:
    """Add a ``--version`` option which immediately prints the version
    number and exits the program.

    If ``version`` is not provided, Click will try to detect it using
    :func:`importlib.metadata.version` to get the version for the
    ``package_name``. On Python < 3.8, the ``importlib_metadata``
    backport must be installed.

    If ``package_name`` is not provided, Click will try to detect it by
    inspecting the stack frames. This will be used to detect the
    version, so it must match the name of the installed package.

    :param version: The version number to show. If not provided, Click
        will try to detect it.
    :param param_decls: One or more option names. Defaults to the single
        value ``"--version"``.
    :param package_name: The package name to detect the version from. If
        not provided, Click will try to detect it.
    :param prog_name: The name of the CLI to show in the message. If not
        provided, it will be detected from the command.
    :param message: The message to show. The values ``%(prog)s``,
        ``%(package)s``, and ``%(version)s`` are available. Defaults to
        ``"%(prog)s, version %(version)s"``.
    :param kwargs: Extra arguments are passed to :func:`option`.
    :raise RuntimeError: ``version`` could not be detected.

    .. versionchanged:: 8.0
        Add the ``package_name`` parameter, and the ``%(package)s``
        value for messages.

    .. versionchanged:: 8.0
        Use :mod:`importlib.metadata` instead of ``pkg_resources``. The
        version is detected based on the package name, not the entry
        point name. The Python package name must match the installed
        package name, or be passed with ``package_name=``.
    """
    if message is None:
        message = _("%(prog)s, version %(version)s")

    if version is None and package_name is None:
        frame = inspect.currentframe()
        f_back = frame.f_back if frame is not None else None
        f_globals = f_back.f_globals if f_back is not None else None
        # break reference cycle
        # https://docs.python.org/3/library/inspect.html#the-interpreter-stack
        del frame

        if f_globals is not None:
            package_name = f_globals.get("__name__")

            if package_name == "__main__":
                package_name = f_globals.get("__package__")

            if package_name:
                package_name = package_name.partition(".")[0]

    def callback(ctx: Context, param: Parameter, value: bool) -> None:
        if not value or ctx.resilient_parsing:
            return

        nonlocal prog_name
        nonlocal version

        if prog_name is None:
            prog_name = ctx.find_root().info_name

        if version is None and package_name is not None:
            metadata: t.Optional[types.ModuleType]

            try:
                from importlib import metadata  # type: ignore
            except ImportError:
                # Python < 3.8
                import pipenv.vendor.importlib_metadata as metadata  # type: ignore

            try:
                version = metadata.version(package_name)  # type: ignore
            except metadata.PackageNotFoundError:  # type: ignore
                raise RuntimeError(
                    f"{package_name!r} is not installed. Try passing"
                    " 'package_name' instead.") from None

        if version is None:
            raise RuntimeError(
                f"Could not determine the version for {package_name!r} automatically."
            )

        echo(
            t.cast(str, message) % {
                "prog": prog_name,
                "package": package_name,
                "version": version
            },
            color=ctx.color,
        )
        ctx.exit()

    if not param_decls:
        param_decls = ("--version", )

    kwargs.setdefault("is_flag", True)
    kwargs.setdefault("expose_value", False)
    kwargs.setdefault("is_eager", True)
    kwargs.setdefault("help", _("Show the version and exit."))
    kwargs["callback"] = callback
    return option(*param_decls, **kwargs)