Exemple #1
0
class NodeInclude(NodeVoid):
    """@API
    Node {% include ... %}"""
    incfile = attr.ib(default=None, init=False)
    vars = attr.ib(default=attr.Factory(dict), init=False)
    LIQUID_INCLUDE_SOURCE = '_liquid_include_source'

    def start(self):
        if not self.attrs:
            raise LiquidSyntaxError("No file to include", self.context)
        parts = safe_split(self.attrs, " ", limit=1)
        if not check_quotes(parts[0]):  # pragma: no cover
            # safe_split makes it impossible to Fail
            raise LiquidSyntaxError(
                "Incorrectly quoted inclusion file: "
                f"{parts[0]!r}", self.context)
        parts[0] = unquote(parts[0])
        # also scan the directory with current template file
        incdirs = (self.config.include +
                   [Path(self.context.filename).resolve().parent])
        self.incfile = scan_file(parts[0], incdirs)
        incdirstr = ""
        if LOGGER.level < LIQUID_LOGLEVELID_DETAIL:
            incdirstr = "\nInclusion directories:\n"
            incdirstr += "\n".join(f"- {incdir}" for incdir in incdirs)
        if not self.incfile:
            raise LiquidSyntaxError(
                "Cannot find file for inclusion: "
                f"{parts[0]}{incdirstr}", self.context)
        if not self.incfile.is_file():
            raise LiquidSyntaxError(
                "File not exists for inclusion: "
                f"{parts[0]}{incdirstr}", self.context)
        if len(parts) > 1:
            vars_witheq = safe_split(parts[1], ',')
            for vareq in vars_witheq:
                if not vareq:
                    raise LiquidSyntaxError(
                        "Empty variable item in "
                        f"{self.name!r} node", self.context)
            parts_eq = safe_split(vareq, '=', limit=1)
            if len(parts_eq) > 1:
                self.vars[parts_eq[0]] = self.try_mixed(parts_eq[1])
            elif parts_eq[0].isidentifier():
                self.vars[parts_eq[0]] = parts_eq[0]
            else:
                raise LiquidSyntaxError(
                    "A variable or a kwarg needed "
                    "for variables passed to "
                    f"{self.name!r} node", self.context)
        self.vars[LIQUID_RENDERED_APPEND] = LIQUID_RENDERED_APPEND
        self.vars[LIQUID_RENDERED_EXTEND] = LIQUID_RENDERED_EXTEND

    def parse_node(self):
        super().parse_node()
        funcname = f"{NodeInclude.LIQUID_INCLUDE_SOURCE}_{id(self)}"
        varnames = ", ".join(self.vars)
        kwargs = ", ".join(f"{key}={val}" for key, val in self.vars.items())
        inccode = LiquidCode()
        self.code.add_line('')
        self.code.add_line(f"def {funcname}({varnames}):")
        self.code.indent()
        self.code.add_line("'''Build source from included file'''")
        self.code.add_line('')
        self.code.add_code(inccode)
        self.code.add_line('')
        self.code.dedent()
        self.code.add_line(f"{funcname}({kwargs})")
        with self.config.tear() as teared_config:
            parser = self.context.parser.__class__(
                stream=None,
                code=inccode,
                shared_code=self.shared_code,
                filename=self.incfile,
                prev=(self.context.lineno, self.context.parser),
                config=teared_config)
            parser.parse()
Exemple #2
0
class DFE:
    """
    Class representing a "Distribution of Fitness Effects", i.e., a DFE.
    The class records the different *mutation types*, and the *proportions*
    with which they occur. The overall rate of mutations will be determined
    by the Contig to which the DFE is applied (see :meth:`.Contig.add_dfe`).

    Instances of this class are constructed by DFE implementors, following the
    :ref:`developer documentation <sec_development_dfe_model>`. To instead
    obtain a pre-specified model as listed in the :ref:`sec_catalog`,
    see :meth:`Species.get_dfe`.

    ``proportions`` and ``mutation_types`` must be lists of the same length,
    and ``proportions`` should be nonnegative numbers summing to 1.

    :ivar ~.mutation_types: A list of MutationTypes associated with the DFE.
    :vartype ~.mutation_types: list
    :ivar ~.proportions: A list of the proportions of new mutations that
        fall in to each of the mutation types (must sum to 1).
    :vartype ~.proportions: list
    :ivar ~.id: The unique identifier for this model. DFE IDs should be
        short and memorable, and conform to the stdpopsim
        :ref:`naming conventions <sec_development_naming_conventions>`
        for DFE models.
    :vartype ~.id: str
    :ivar ~.description: A short description of this model as it would be used in
        written text, e.g., "Lognormal DFE". This should
        describe the DFE itself and not contain author or year information.
    :vartype ~.description: str
    :ivar long_description: A concise, but detailed, summary of the DFE model.
    :vartype long_description: str
    :ivar citations: A list of :class:`Citations <.Citation>`, that describe the primary
        reference(s) for the DFE model.
    :vartype citations: list of :class:`Citation`
    """

    id = attr.ib()
    description = attr.ib()
    long_description = attr.ib()
    mutation_types = attr.ib(default=attr.Factory(list))
    proportions = attr.ib(default=attr.Factory(list))
    citations = attr.ib(default=attr.Factory(list))

    def __attrs_post_init__(self):
        self.citations = [] if self.citations is None else self.citations
        if self.proportions == [] and len(self.mutation_types) == 1:
            self.proportions = [1]

        if not (isinstance(self.proportions, collections.abc.Collection)
                and isinstance(self.mutation_types, collections.abc.Collection)
                and len(self.proportions) == len(self.mutation_types)):
            raise ValueError(
                "proportions and mutation_types must be lists of the same length."
            )

        for p in self.proportions:
            if not isinstance(p, (float, int)) or p < 0:
                raise ValueError("proportions must be nonnegative numbers.")
        sum_p = sum(self.proportions)
        if not np.isclose(sum_p, 1):
            raise ValueError("proportions must sum 1.0.")

        for m in self.mutation_types:
            if not isinstance(m, MutationType):
                raise ValueError(
                    "mutation_types must be a list of MutationType objects.")

    def __str__(self):
        long_desc_lines = [
            line.strip()
            for line in textwrap.wrap(textwrap.dedent(self.long_description))
        ]
        long_desc = "\n║                     ".join(long_desc_lines)
        s = (
            "DFE:\n"
            f"║  id               = {self.id}\n"
            f"║  description      = {self.description}\n"
            f"║  long_description = {long_desc}\n"
            f"║  citations        = {[cite.doi for cite in self.citations]}\n")
        return s
Exemple #3
0
class FormattedExcinfo:
    """ presenting information about failing Functions and Generators. """

    # for traceback entries
    flow_marker = ">"
    fail_marker = "E"

    showlocals = attr.ib(type=bool, default=False)
    style = attr.ib(type="_TracebackStyle", default="long")
    abspath = attr.ib(type=bool, default=True)
    tbfilter = attr.ib(type=bool, default=True)
    funcargs = attr.ib(type=bool, default=False)
    truncate_locals = attr.ib(type=bool, default=True)
    chain = attr.ib(type=bool, default=True)
    astcache = attr.ib(default=attr.Factory(dict), init=False, repr=False)

    def _getindent(self, source: "Source") -> int:
        # figure out indent for given source
        try:
            s = str(source.getstatement(len(source) - 1))
        except KeyboardInterrupt:
            raise
        except BaseException:
            try:
                s = str(source[-1])
            except KeyboardInterrupt:
                raise
            except BaseException:
                return 0
        return 4 + (len(s) - len(s.lstrip()))

    def _getentrysource(self, entry: TracebackEntry) -> Optional["Source"]:
        source = entry.getsource(self.astcache)
        if source is not None:
            source = source.deindent()
        return source

    def repr_args(self, entry: TracebackEntry) -> Optional["ReprFuncArgs"]:
        if self.funcargs:
            args = []
            for argname, argvalue in entry.frame.getargs(var=True):
                args.append((argname, saferepr(argvalue)))
            return ReprFuncArgs(args)
        return None

    def get_source(
        self,
        source: "Source",
        line_index: int = -1,
        excinfo: Optional[ExceptionInfo] = None,
        short: bool = False,
    ) -> List[str]:
        """ return formatted and marked up source lines. """
        lines = []
        if source is None or line_index >= len(source.lines):
            source = Source("???")
            line_index = 0
        if line_index < 0:
            line_index += len(source)
        space_prefix = "    "
        if short:
            lines.append(space_prefix + source.lines[line_index].strip())
        else:
            for line in source.lines[:line_index]:
                lines.append(space_prefix + line)
            lines.append(self.flow_marker + "   " + source.lines[line_index])
            for line in source.lines[line_index + 1:]:
                lines.append(space_prefix + line)
        if excinfo is not None:
            indent = 4 if short else self._getindent(source)
            lines.extend(self.get_exconly(excinfo, indent=indent,
                                          markall=True))
        return lines

    def get_exconly(self,
                    excinfo: ExceptionInfo,
                    indent: int = 4,
                    markall: bool = False) -> List[str]:
        lines = []
        indentstr = " " * indent
        # get the real exception information out
        exlines = excinfo.exconly(tryshort=True).split("\n")
        failindent = self.fail_marker + indentstr[1:]
        for line in exlines:
            lines.append(failindent + line)
            if not markall:
                failindent = indentstr
        return lines

    def repr_locals(self, locals: Mapping[str,
                                          object]) -> Optional["ReprLocals"]:
        if self.showlocals:
            lines = []
            keys = [loc for loc in locals if loc[0] != "@"]
            keys.sort()
            for name in keys:
                value = locals[name]
                if name == "__builtins__":
                    lines.append("__builtins__ = <builtins>")
                else:
                    # This formatting could all be handled by the
                    # _repr() function, which is only reprlib.Repr in
                    # disguise, so is very configurable.
                    if self.truncate_locals:
                        str_repr = saferepr(value)
                    else:
                        str_repr = safeformat(value)
                    # if len(str_repr) < 70 or not isinstance(value,
                    #                            (list, tuple, dict)):
                    lines.append("{:<10} = {}".format(name, str_repr))
                    # else:
                    #    self._line("%-10s =\\" % (name,))
                    #    # XXX
                    #    pprint.pprint(value, stream=self.excinfowriter)
            return ReprLocals(lines)
        return None

    def repr_traceback_entry(
            self,
            entry: TracebackEntry,
            excinfo: Optional[ExceptionInfo] = None) -> "ReprEntry":
        lines = []  # type: List[str]
        style = entry._repr_style if entry._repr_style is not None else self.style
        if style in ("short", "long"):
            source = self._getentrysource(entry)
            if source is None:
                source = Source("???")
                line_index = 0
            else:
                line_index = entry.lineno - entry.getfirstlinesource()
            short = style == "short"
            reprargs = self.repr_args(entry) if not short else None
            s = self.get_source(source, line_index, excinfo, short=short)
            lines.extend(s)
            if short:
                message = "in %s" % (entry.name)
            else:
                message = excinfo and excinfo.typename or ""
            path = self._makepath(entry.path)
            reprfileloc = ReprFileLocation(path, entry.lineno + 1, message)
            localsrepr = self.repr_locals(entry.locals)
            return ReprEntry(lines, reprargs, localsrepr, reprfileloc, style)
        elif style == "value":
            if excinfo:
                lines.extend(str(excinfo.value).split("\n"))
            return ReprEntry(lines, None, None, None, style)
        else:
            if excinfo:
                lines.extend(self.get_exconly(excinfo, indent=4))
            return ReprEntry(lines, None, None, None, style)

    def _makepath(self, path):
        if not self.abspath:
            try:
                np = py.path.local().bestrelpath(path)
            except OSError:
                return path
            if len(np) < len(str(path)):
                path = np
        return path

    def repr_traceback(self, excinfo: ExceptionInfo) -> "ReprTraceback":
        traceback = excinfo.traceback
        if self.tbfilter:
            traceback = traceback.filter()

        if excinfo.errisinstance(RecursionError):
            traceback, extraline = self._truncate_recursive_traceback(
                traceback)
        else:
            extraline = None

        last = traceback[-1]
        entries = []
        if self.style == "value":
            reprentry = self.repr_traceback_entry(last, excinfo)
            entries.append(reprentry)
            return ReprTraceback(entries, None, style=self.style)

        for index, entry in enumerate(traceback):
            einfo = (last == entry) and excinfo or None
            reprentry = self.repr_traceback_entry(entry, einfo)
            entries.append(reprentry)
        return ReprTraceback(entries, extraline, style=self.style)

    def _truncate_recursive_traceback(
            self, traceback: Traceback) -> Tuple[Traceback, Optional[str]]:
        """
        Truncate the given recursive traceback trying to find the starting point
        of the recursion.

        The detection is done by going through each traceback entry and finding the
        point in which the locals of the frame are equal to the locals of a previous frame (see ``recursionindex()``.

        Handle the situation where the recursion process might raise an exception (for example
        comparing numpy arrays using equality raises a TypeError), in which case we do our best to
        warn the user of the error and show a limited traceback.
        """
        try:
            recursionindex = traceback.recursionindex()
        except Exception as e:
            max_frames = 10
            extraline = (
                "!!! Recursion error detected, but an error occurred locating the origin of recursion.\n"
                "  The following exception happened when comparing locals in the stack frame:\n"
                "    {exc_type}: {exc_msg}\n"
                "  Displaying first and last {max_frames} stack frames out of {total}."
            ).format(
                exc_type=type(e).__name__,
                exc_msg=str(e),
                max_frames=max_frames,
                total=len(traceback),
            )  # type: Optional[str]
            # Type ignored because adding two instaces of a List subtype
            # currently incorrectly has type List instead of the subtype.
            traceback = traceback[:max_frames] + traceback[
                -max_frames:]  # type: ignore
        else:
            if recursionindex is not None:
                extraline = "!!! Recursion detected (same locals & position)"
                traceback = traceback[:recursionindex + 1]
            else:
                extraline = None

        return traceback, extraline

    def repr_excinfo(self, excinfo: ExceptionInfo) -> "ExceptionChainRepr":
        repr_chain = (
            []
        )  # type: List[Tuple[ReprTraceback, Optional[ReprFileLocation], Optional[str]]]
        e = excinfo.value
        excinfo_ = excinfo  # type: Optional[ExceptionInfo]
        descr = None
        seen = set()  # type: Set[int]
        while e is not None and id(e) not in seen:
            seen.add(id(e))
            if excinfo_:
                reprtraceback = self.repr_traceback(excinfo_)
                reprcrash = (
                    excinfo_._getreprcrash() if self.style != "value" else None
                )  # type: Optional[ReprFileLocation]
            else:
                # fallback to native repr if the exception doesn't have a traceback:
                # ExceptionInfo objects require a full traceback to work
                reprtraceback = ReprTracebackNative(
                    traceback.format_exception(type(e), e, None))
                reprcrash = None

            repr_chain += [(reprtraceback, reprcrash, descr)]
            if e.__cause__ is not None and self.chain:
                e = e.__cause__
                excinfo_ = (ExceptionInfo(
                    (type(e), e,
                     e.__traceback__)) if e.__traceback__ else None)
                descr = "The above exception was the direct cause of the following exception:"
            elif (e.__context__ is not None and not e.__suppress_context__
                  and self.chain):
                e = e.__context__
                excinfo_ = (ExceptionInfo(
                    (type(e), e,
                     e.__traceback__)) if e.__traceback__ else None)
                descr = "During handling of the above exception, another exception occurred:"
            else:
                e = None
        repr_chain.reverse()
        return ExceptionChainRepr(repr_chain)
Exemple #4
0
class BasePath(object):
    path = attr.ib(default=None)  # type: Path
    _children = attr.ib(default=attr.Factory(dict),
                        cmp=False)  # type: Dict[str, PathEntry]
    only_python = attr.ib(default=False)  # type: bool
    name = attr.ib(type=str)
    _py_version = attr.ib(default=None,
                          cmp=False)  # type: Optional[PythonVersion]
    _pythons = attr.ib(default=attr.Factory(defaultdict),
                       cmp=False)  # type: DefaultDict[str, PathEntry]
    _is_dir = attr.ib(default=None, cmp=False)  # type: Optional[bool]
    _is_executable = attr.ib(default=None, cmp=False)  # type: Optional[bool]
    _is_python = attr.ib(default=None, cmp=False)  # type: Optional[bool]

    def __str__(self):
        # type: () -> str
        return fs_str("{0}".format(self.path.as_posix()))

    def __lt__(self, other):
        # type: ("BasePath") -> bool
        return self.path.as_posix() < other.path.as_posix()

    def __lte__(self, other):
        # type: ("BasePath") -> bool
        return self.path.as_posix() <= other.path.as_posix()

    def __gt__(self, other):
        # type: ("BasePath") -> bool
        return self.path.as_posix() > other.path.as_posix()

    def __gte__(self, other):
        # type: ("BasePath") -> bool
        return self.path.as_posix() >= other.path.as_posix()

    def which(self, name):
        # type: (str) -> Optional[PathEntry]
        """Search in this path for an executable.

        :param executable: The name of an executable to search for.
        :type executable: str
        :returns: :class:`~pythonfinder.models.PathEntry` instance.
        """

        valid_names = [name] + [
            "{0}.{1}".format(name, ext).lower()
            if ext else "{0}".format(name).lower() for ext in KNOWN_EXTS
        ]
        children = self.children
        found = None
        if self.path is not None:
            found = next(
                (children[(self.path / child).as_posix()]
                 for child in valid_names
                 if (self.path / child).as_posix() in children),
                None,
            )
        return found

    def __del__(self):
        for key in ["_is_dir", "_is_python", "_is_executable", "_py_version"]:
            if getattr(self, key, None):
                try:
                    delattr(self, key)
                except Exception:
                    print("failed deleting key: {0}".format(key))
        self._children = {}
        for key in list(self._pythons.keys()):
            del self._pythons[key]
        self._pythons = None
        self._py_version = None
        self.path = None

    @property
    def children(self):
        # type: () -> Dict[str, PathEntry]
        if not self.is_dir:
            return {}
        return self._children

    @property
    def as_python(self):
        # type: () -> PythonVersion
        py_version = None
        if self.py_version:
            return self.py_version
        if not self.is_dir and self.is_python:
            try:
                from .python import PythonVersion

                py_version = PythonVersion.from_path(  # type: ignore
                    path=self, name=self.name)
            except (ValueError, InvalidPythonVersion):
                pass
        if py_version is None:
            pass
        self.py_version = py_version
        return py_version  # type: ignore

    @name.default
    def get_name(self):
        # type: () -> Optional[str]
        if self.path:
            return self.path.name
        return None

    @property
    def is_dir(self):
        # type: () -> bool
        if self._is_dir is None:
            if not self.path:
                ret_val = False
            try:
                ret_val = self.path.is_dir()
            except OSError:
                ret_val = False
            self._is_dir = ret_val
        return self._is_dir

    @is_dir.setter
    def is_dir(self, val):
        # type: (bool) -> None
        self._is_dir = val

    @is_dir.deleter
    def is_dir(self):
        # type: () -> None
        self._is_dir = None

    @property
    def is_executable(self):
        # type: () -> bool
        if self._is_executable is None:
            if not self.path:
                self._is_executable = False
            else:
                self._is_executable = path_is_known_executable(self.path)
        return self._is_executable

    @is_executable.setter
    def is_executable(self, val):
        # type: (bool) -> None
        self._is_executable = val

    @is_executable.deleter
    def is_executable(self):
        # type: () -> None
        self._is_executable = None

    @property
    def is_python(self):
        # type: () -> bool
        if self._is_python is None:
            if not self.path:
                self._is_python = False
            else:
                self._is_python = self.is_executable and (looks_like_python(
                    self.path.name))
        return self._is_python

    @is_python.setter
    def is_python(self, val):
        # type: (bool) -> None
        self._is_python = val

    @is_python.deleter
    def is_python(self):
        # type: () -> None
        self._is_python = None

    def get_py_version(self):
        # type: () -> Optional[PythonVersion]
        from ..environment import IGNORE_UNSUPPORTED

        if self.is_dir:
            return None
        if self.is_python:
            py_version = None
            from .python import PythonVersion

            try:
                py_version = PythonVersion.from_path(  # type: ignore
                    path=self, name=self.name)
            except (InvalidPythonVersion, ValueError):
                py_version = None
            except Exception:
                if not IGNORE_UNSUPPORTED:
                    raise
            return py_version
        return None

    @property
    def py_version(self):
        # type: () -> Optional[PythonVersion]
        if not self._py_version:
            py_version = self.get_py_version()
            self._py_version = py_version
        else:
            py_version = self._py_version
        return py_version

    @py_version.setter
    def py_version(self, val):
        # type: (Optional[PythonVersion]) -> None
        self._py_version = val

    @py_version.deleter
    def py_version(self):
        # type: () -> None
        self._py_version = None

    def _iter_pythons(self):
        # type: () -> Iterator
        if self.is_dir:
            for entry in self.children.values():
                if entry is None:
                    continue
                elif entry.is_dir:
                    for python in entry._iter_pythons():
                        yield python
                elif entry.is_python and entry.as_python is not None:
                    yield entry
        elif self.is_python and self.as_python is not None:
            yield self  # type: ignore

    @property
    def pythons(self):
        # type: () -> DefaultDict[Union[str, Path], PathEntry]
        if not self._pythons:
            from .path import PathEntry

            self._pythons = defaultdict(PathEntry)
            for python in self._iter_pythons():
                python_path = python.path.as_posix()  # type: ignore
                self._pythons[python_path] = python
        return self._pythons

    def __iter__(self):
        # type: () -> Iterator
        for entry in self.children.values():
            yield entry

    def __next__(self):
        # type: () -> Generator
        return next(iter(self))

    def next(self):
        # type: () -> Generator
        return self.__next__()

    def find_all_python_versions(
            self,
            major=None,  # type: Optional[Union[str, int]]
            minor=None,  # type: Optional[int]
            patch=None,  # type: Optional[int]
            pre=None,  # type: Optional[bool]
            dev=None,  # type: Optional[bool]
            arch=None,  # type: Optional[str]
            name=None,  # type: Optional[str]
    ):
        # type: (...) -> List[PathEntry]
        """Search for a specific python version on the path. Return all copies

        :param major: Major python version to search for.
        :type major: int
        :param int minor: Minor python version to search for, defaults to None
        :param int patch: Patch python version to search for, defaults to None
        :param bool pre: Search for prereleases (default None) - prioritize releases if None
        :param bool dev: Search for devreleases (default None) - prioritize releases if None
        :param str arch: Architecture to include, e.g. '64bit', defaults to None
        :param str name: The name of a python version, e.g. ``anaconda3-5.3.0``
        :return: A list of :class:`~pythonfinder.models.PathEntry` instances matching the version requested.
        :rtype: List[:class:`~pythonfinder.models.PathEntry`]
        """

        call_method = "find_all_python_versions" if self.is_dir else "find_python_version"
        sub_finder = operator.methodcaller(call_method, major, minor, patch,
                                           pre, dev, arch, name)
        if not self.is_dir:
            return sub_finder(self)
        unnested = [sub_finder(path) for path in expand_paths(self)]
        version_sort = operator.attrgetter("as_python.version_sort")
        unnested = [
            p for p in unnested if p is not None and p.as_python is not None
        ]
        paths = sorted(unnested, key=version_sort, reverse=True)
        return list(paths)

    def find_python_version(
            self,
            major=None,  # type: Optional[Union[str, int]]
            minor=None,  # type: Optional[int]
            patch=None,  # type: Optional[int]
            pre=None,  # type: Optional[bool]
            dev=None,  # type: Optional[bool]
            arch=None,  # type: Optional[str]
            name=None,  # type: Optional[str]
    ):
        # type: (...) -> Optional[PathEntry]
        """Search or self for the specified Python version and return the first match.

        :param major: Major version number.
        :type major: int
        :param int minor: Minor python version to search for, defaults to None
        :param int patch: Patch python version to search for, defaults to None
        :param bool pre: Search for prereleases (default None) - prioritize releases if None
        :param bool dev: Search for devreleases (default None) - prioritize releases if None
        :param str arch: Architecture to include, e.g. '64bit', defaults to None
        :param str name: The name of a python version, e.g. ``anaconda3-5.3.0``
        :returns: A :class:`~pythonfinder.models.PathEntry` instance matching the version requested.
        """

        version_matcher = operator.methodcaller("matches",
                                                major,
                                                minor,
                                                patch,
                                                pre,
                                                dev,
                                                arch,
                                                python_name=name)
        if not self.is_dir:
            if self.is_python and self.as_python and version_matcher(
                    self.py_version):
                return self  # type: ignore

        matching_pythons = [
            [entry, entry.as_python.version_sort]
            for entry in self._iter_pythons()
            if (entry is not None and entry.as_python is not None
                and version_matcher(entry.py_version))
        ]
        results = sorted(matching_pythons,
                         key=operator.itemgetter(1, 0),
                         reverse=True)
        return next(iter(r[0] for r in results if r is not None), None)
Exemple #5
0
class Visual(object):
    highlights = attr.ib(default=attr.Factory(list))
    focus = attr.ib(default=None)
Exemple #6
0
class EventBuilder:
    """A format independent event builder used to build up the event content
    before signing the event.

    (Note that while objects of this class are frozen, the
    content/unsigned/internal_metadata fields are still mutable)

    Attributes:
        room_version: Version of the target room
        room_id
        type
        sender
        content
        unsigned
        internal_metadata

        _state
        _auth
        _store
        _clock
        _hostname: The hostname of the server creating the event
        _signing_key: The signing key to use to sign the event as the server
    """

    _state: StateHandler
    _event_auth_handler: "EventAuthHandler"
    _store: DataStore
    _clock: Clock
    _hostname: str
    _signing_key: SigningKey

    room_version: RoomVersion

    room_id: str
    type: str
    sender: str

    content: JsonDict = attr.Factory(dict)
    unsigned: JsonDict = attr.Factory(dict)

    # These only exist on a subset of events, so they raise AttributeError if
    # someone tries to get them when they don't exist.
    _state_key: Optional[str] = None
    _redacts: Optional[str] = None
    _origin_server_ts: Optional[int] = None

    internal_metadata: _EventInternalMetadata = attr.Factory(
        lambda: _EventInternalMetadata({})
    )

    @property
    def state_key(self) -> str:
        if self._state_key is not None:
            return self._state_key

        raise AttributeError("state_key")

    def is_state(self) -> bool:
        return self._state_key is not None

    async def build(
        self,
        prev_event_ids: List[str],
        auth_event_ids: Optional[List[str]],
        depth: Optional[int] = None,
    ) -> EventBase:
        """Transform into a fully signed and hashed event

        Args:
            prev_event_ids: The event IDs to use as the prev events
            auth_event_ids: The event IDs to use as the auth events.
                Should normally be set to None, which will cause them to be calculated
                based on the room state at the prev_events.
            depth: Override the depth used to order the event in the DAG.
                Should normally be set to None, which will cause the depth to be calculated
                based on the prev_events.

        Returns:
            The signed and hashed event.
        """
        if auth_event_ids is None:
            state_ids = await self._state.get_current_state_ids(
                self.room_id, prev_event_ids
            )
            auth_event_ids = self._event_auth_handler.compute_auth_events(
                self, state_ids
            )

        format_version = self.room_version.event_format
        if format_version == EventFormatVersions.V1:
            # The types of auth/prev events changes between event versions.
            auth_events: Union[
                List[str], List[Tuple[str, Dict[str, str]]]
            ] = await self._store.add_event_hashes(auth_event_ids)
            prev_events: Union[
                List[str], List[Tuple[str, Dict[str, str]]]
            ] = await self._store.add_event_hashes(prev_event_ids)
        else:
            auth_events = auth_event_ids
            prev_events = prev_event_ids

        # Otherwise, progress the depth as normal
        if depth is None:
            (
                _,
                most_recent_prev_event_depth,
            ) = await self._store.get_max_depth_of(prev_event_ids)

            depth = most_recent_prev_event_depth + 1

        # we cap depth of generated events, to ensure that they are not
        # rejected by other servers (and so that they can be persisted in
        # the db)
        depth = min(depth, MAX_DEPTH)

        event_dict: Dict[str, Any] = {
            "auth_events": auth_events,
            "prev_events": prev_events,
            "type": self.type,
            "room_id": self.room_id,
            "sender": self.sender,
            "content": self.content,
            "unsigned": self.unsigned,
            "depth": depth,
            "prev_state": [],
        }

        if self.is_state():
            event_dict["state_key"] = self._state_key

        if self._redacts is not None:
            event_dict["redacts"] = self._redacts

        if self._origin_server_ts is not None:
            event_dict["origin_server_ts"] = self._origin_server_ts

        return create_local_event_from_event_dict(
            clock=self._clock,
            hostname=self._hostname,
            signing_key=self._signing_key,
            room_version=self.room_version,
            event_dict=event_dict,
            internal_metadata_dict=self.internal_metadata.get_dict(),
        )
Exemple #7
0
class TremoloPickingEffect:
    """A tremolo picking effect."""

    duration: Duration = attr.Factory(Duration)
Exemple #8
0
class LicenseScanner(ScanPlugin):
    """
    Scan a Resource for licenses.
    """

    resource_attributes = OrderedDict([
        ('licenses', attr.ib(default=attr.Factory(list))),
        ('license_expressions', attr.ib(default=attr.Factory(list))),
    ])

    sort_order = 2

    options = [
        CommandLineOption(('-l', '--license'),
                          is_flag=True,
                          help='Scan <input> for licenses.',
                          help_group=SCAN_GROUP,
                          sort_order=10),
        CommandLineOption(
            ('--license-score', ),
            type=int,
            default=0,
            show_default=True,
            required_options=['license'],
            help=
            'Do not return license matches with a score lower than this score. '
            'A number between 0 and 100.',
            help_group=SCAN_OPTIONS_GROUP),
        CommandLineOption(('--license-text', ),
                          is_flag=True,
                          required_options=['license'],
                          help='Include the detected licenses matched text.',
                          help_group=SCAN_OPTIONS_GROUP),
        CommandLineOption(
            ('--license-url-template', ),
            default=DEJACODE_LICENSE_URL,
            show_default=True,
            required_options=['license'],
            help='Set the template URL used for the license reference URLs. '
            'Curly braces ({}) are replaced by the license key.',
            help_group=SCAN_OPTIONS_GROUP),
        CommandLineOption(
            ('--license-diag', ),
            is_flag=True,
            required_options=['license'],
            help='Include diagnostic information in license scan results.',
            help_group=SCAN_OPTIONS_GROUP),
        CommandLineOption(
            ('--reindex-licenses', ),
            is_flag=True,
            is_eager=True,
            callback=reindex_licenses,
            help=
            'Check the license index cache and reindex if needed and exit.',
            help_group=MISC_GROUP)
    ]

    def is_enabled(self, license, **kwargs):  # NOQA
        return license

    def setup(self, **kwargs):
        """
        This is a cache warmup such that child process inherit from this.
        """
        from licensedcode.cache import get_index
        get_index(return_value=False)

    def get_scanner(self,
                    license_score=0,
                    license_text=False,
                    license_url_template=DEJACODE_LICENSE_URL,
                    license_diag=False,
                    **kwargs):

        from scancode.api import get_licenses
        return partial(get_licenses,
                       min_score=license_score,
                       include_text=license_text,
                       diag=license_diag,
                       license_url_template=license_url_template)
Exemple #9
0
 class SubOverwrite(Base):
     x = attr.ib(default=attr.Factory(list))
Exemple #10
0
class SaltVirtMinionContainerFactory(SaltMinion):

    host_uuid = attr.ib(default=attr.Factory(uuid.uuid4))
    ssh_port = attr.ib(
        default=attr.Factory(ports.get_unused_localhost_port), repr=False
    )
    sshd_port = attr.ib(default=attr.Factory(ports.get_unused_localhost_port))
    libvirt_tcp_port = attr.ib(
        default=attr.Factory(ports.get_unused_localhost_port), repr=False
    )
    libvirt_tls_port = attr.ib(
        default=attr.Factory(ports.get_unused_localhost_port), repr=False
    )

    uri = attr.ib(init=False)
    ssh_uri = attr.ib(init=False)
    tcp_uri = attr.ib(init=False)
    tls_uri = attr.ib(init=False)

    def __attrs_post_init__(self):
        self.uri = "localhost:{}".format(self.sshd_port)
        self.ssh_uri = "qemu+ssh://{}/system".format(self.uri)
        self.tcp_uri = "qemu+tcp://localhost:{}/system".format(self.libvirt_tcp_port)
        self.tls_uri = "qemu+tls://127.0.0.1:{}/system".format(self.libvirt_tls_port)

        # pylint: disable=access-member-before-definition
        if self.check_ports is None:
            self.check_ports = []
        # pylint: enable=access-member-before-definition
        self.check_ports.extend(
            [self.sshd_port, self.libvirt_tcp_port, self.libvirt_tls_port]
        )
        if "environment" not in self.container_run_kwargs:
            self.container_run_kwargs["environment"] = {}
        self.container_run_kwargs["environment"].update(
            {
                "SSH_PORT": str(self.ssh_port),
                "SSHD_PORT": str(self.sshd_port),
                "LIBVIRT_TCP_PORT": str(self.libvirt_tcp_port),
                "LIBVIRT_TLS_PORT": str(self.libvirt_tls_port),
                "NO_START_MINION": "1",
                "HOST_UUID": self.host_uuid,
                "PYTHONDONTWRITEBYTECODE": "1",
            }
        )
        if "ports" not in self.container_run_kwargs:
            self.container_run_kwargs["ports"] = {}
        self.container_run_kwargs["ports"].update(
            {
                "{}/tcp".format(self.ssh_port): self.ssh_port,
                "{}/tcp".format(self.sshd_port): self.sshd_port,
                "{}/tcp".format(self.libvirt_tcp_port): self.libvirt_tcp_port,
                "{}/tcp".format(self.libvirt_tls_port): self.libvirt_tls_port,
            }
        )
        if "volumes" not in self.container_run_kwargs:
            self.container_run_kwargs["volumes"] = {}
        self.container_run_kwargs["volumes"].update(
            {
                RUNTIME_VARS.CODE_DIR: {"bind": "/salt", "mode": "z"},
                RUNTIME_VARS.CODE_DIR: {"bind": RUNTIME_VARS.CODE_DIR, "mode": "z"},
            }
        )
        self.container_run_kwargs["working_dir"] = RUNTIME_VARS.CODE_DIR
        self.container_run_kwargs["network_mode"] = "host"
        self.container_run_kwargs["cap_add"] = ["ALL"]
        self.container_run_kwargs["privileged"] = True
        super().__attrs_post_init__()
        self.python_executable = "python3"

    def _container_start_checks(self):
        # Once we're able to ls the salt-minion script it means the container
        # has salt installed
        ret = self.run("ls", "-lah", self.get_script_path())
        if ret.exitcode == 0:
            return True
        time.sleep(1)
        return False
Exemple #11
0
class Place:
    name = attr.ib()
    aliases = attr.ib(default=attr.Factory(set), converter=set)
    comment = attr.ib(default="")
    tags = attr.ib(default=attr.Factory(dict))
    matches = attr.ib(default=attr.Factory(list))
    acquired = attr.ib(default=None)
    acquired_resources = attr.ib(default=attr.Factory(list))
    allowed = attr.ib(default=attr.Factory(set), converter=set)
    created = attr.ib(default=attr.Factory(time.time))
    changed = attr.ib(default=attr.Factory(time.time))
    reservation = attr.ib(default=None)

    def asdict(self):
        # in the coordinator, we have resource objects, otherwise just a path
        acquired_resources = []
        for resource in self.acquired_resources:  # pylint: disable=not-an-iterable
            if isinstance(resource, (tuple, list)):
                acquired_resources.append(resource)
            else:
                acquired_resources.append(resource.path)

        return {
            'aliases': list(self.aliases),
            'comment': self.comment,
            'tags': self.tags,
            'matches': [attr.asdict(x) for x in self.matches],
            'acquired': self.acquired,
            'acquired_resources': acquired_resources,
            'allowed': list(self.allowed),
            'created': self.created,
            'changed': self.changed,
            'reservation': self.reservation,
        }

    def update(self, config):
        fields = attr.fields_dict(type(self))
        for k, v in config.items():
            assert k in fields
            if k == 'name':
                # we cannot rename places
                assert v == self.name
                continue
            setattr(self, k, v)

    def show(self, level=0):
        indent = '  ' * level
        if self.aliases:
            print(indent +
                  "aliases: {}".format(', '.join(sorted(self.aliases))))
        if self.comment:
            print(indent + "comment: {}".format(self.comment))
        if self.tags:
            print(indent + "tags: {}".format(', '.join(
                k + "=" + v for k, v in sorted(self.tags.items()))))
        print(indent + "matches:")
        for match in sorted(self.matches):  # pylint: disable=not-an-iterable
            print(indent + "  {}".format(match))
        print(indent + "acquired: {}".format(self.acquired))
        print(indent + "acquired resources:")
        # in the coordinator, we have resource objects, otherwise just a path
        for resource in sorted(self.acquired_resources):  # pylint: disable=not-an-iterable
            if isinstance(resource, (tuple, list)):
                resource_path = resource
            else:
                resource_path = resource.path
            match = self.getmatch(resource_path)
            if match.rename:
                print(
                    indent +
                    "  {} -> {}".format('/'.join(resource_path), match.rename))
            else:
                print(indent + "  {}".format('/'.join(resource_path)))
        if self.allowed:
            print(indent + "allowed: {}".format(', '.join(self.allowed)))
        print(indent +
              "created: {}".format(datetime.fromtimestamp(self.created)))
        print(indent +
              "changed: {}".format(datetime.fromtimestamp(self.changed)))
        if self.reservation:
            print(indent + "reservation: {}".format(self.reservation))

    def getmatch(self, resource_path):
        """Return the ResourceMatch object for the given resource path or None if not found.

        A resource_path has the structure (exporter, group, cls, name).
        """
        for match in self.matches:  # pylint: disable=not-an-iterable
            if match.ismatch(resource_path):
                return match

        return None

    def hasmatch(self, resource_path):
        """Return True if this place as a ResourceMatch object for the given resource path.

        A resource_path has the structure (exporter, group, cls, name).
        """
        return self.getmatch(resource_path) is not None

    def unmatched(self, resource_paths):
        """Returns a match which could not be matched to the list of resource_path

        A resource_path has the structure (exporter, group, cls, name).
        """
        for match in self.matches:
            if not any(
                [match.ismatch(resource) for resource in resource_paths]):
                return match

    def touch(self):
        self.changed = time.time()
Exemple #12
0
class DummyShell(object):

    """Dummy Shell"""

    _env = attr.ib(default=attr.Factory(dict))

    _cwd = attr.ib(default="")

    def clone(self):
        """Return a copy of the shell"""
        return attr.evolve(self, env=dict(self._env))

    def setenv(self, key, value):
        """Set an environment variable"""
        self._env[key] = value

    def chdir(self, path):
        """change directory"""
        self._cwd = os.path.join(self._cwd, path)

    def getenv(self, key):
        """Get an environment variable"""
        return self._env[key]

    def batch(self, *args, **kwargs):
        """(Pretend to) run a command in batch mode"""
        if args == ('git rev-parse HEAD'.split(),) and kwargs == {}:
            return '777', ''
        if args == ('docker-machine env --shell cmd confluent'.split(),) and kwargs == {}:
            return ('SET DOCKER_TLS_VERIFY=1\n'
                    'SET DOCKER_HOST=tcp://192.168.99.103:2376\n'
                    'SET DOCKER_CERT_PATH=/Users/u/.docker/machine/machines/confluent\n'
                    'SET DOCKER_MACHINE_NAME=confluent\n'
                    'REM Run this command to configure your shell: \n'
                    'REM 	@FOR /f "tokens=*" %i IN (\'docker-machine env --shell cmd confluent\') '
                    'DO @%i\n', '')
        if (len(args) == 1 and args[0][:2] == 'docker run'.split() and
                set(args[0][2:-1]) == set('--interactive --remove --terminal'.split()) and
                args[0][-1] == 'a-machine:a-tag' and
                self._env['DOCKER_MACHINE_NAME'] == 'confluent' and
                self._env['DOCKER_CERT_PATH'] == '/Users/u/.docker/machine/machines/confluent' and
                self._env['DOCKER_TLS_VERIFY'] == '1' and
                self._env['DOCKER_HOST'] == 'tcp://192.168.99.103:2376'):
            return 'hello\r\n', ''
        if args == ('pip install attrs'.split(),):
            return 'attrs installed', ''
        if (args == ('pip install a-local-package'.split(),) and
                self._env['VIRTUAL_ENV'] == '/appenv'):
            return 'a-local-package installed', ''
        if args == ('apt-get update'.split(),):
            return 'update finished successfully', ''
        if args == ('echo hello'.split(),):
            return 'hello\n', ''
        if (len(args) == 1 and args[0][:2] == 'pip install'.split() and
                args[0][-1] == 'attrs' and
                '--trusted-host' in args[0] and
                args[0][args[0].index('--trusted-host')+1] == 'orbifold.xyz' and
                '--extra-index-url' in args[0] and
                args[0][args[0].index('--extra-index-url')+1] == 'http://orbifold.xyz'):
            return 'attrs installed from orbifold', ''
        if (len(args) == 1 and args[0][:2] == 'conda install'.split() and
                set(args[0][2:-1]) == set('--show-channel-urls --quiet --yes'.split()) and
                args[0][-1] == 'numpy'):
            return 'numpy installed', ''
        if (len(args) == 1 and args[0][:2] == 'docker run'.split() and
                args[0][2] == '--env' and
                args[0][4] == '--env' and
                set([args[0][3], args[0][5]]) == set(['SONG=awesome', 'SPECIAL=emett']) and
                args[0][-1] == 'lego:1'):
            return 'everything', ''
        if args == ('do-stuff special --verbosity 5'.split(),):
            return 'doing stuff very specially', ''
        if args == ('do-stuff special --verbose'.split(),):
            return 'doing stuff slightly more verbosely', ''
        if (len(args) == 1 and args[0][:2] == 'chat mention'.split() and
                args[0][2] == '--person' and
                args[0][4] == '--person' and
                set([args[0][3], args[0][5]]) == set(['emett', 'lucy'])):
            return 'mentioning folks', ''
        if args == (['pwd'],):
            return self._cwd, ''
        if args == ('docker exec 3433 echo yay'.split(),):
            return 'yay\r\n', ''
        if (args[0][:2] == 'git show'.split() and
                '--no-patch' in args[0] and
                '--format=%ct' in args[0] and
                len(args[0]) == 4):
            return '1496798292', ''
        raise ValueError(self, args, kwargs)

    def interactive(self, *args, **kwargs):
        """(Pretend to) run a command in interactive mode"""
        if args == (['python'],):
            return
        raise ValueError(self, args, kwargs)

    def popen(self, *args, **kwargs):
        """(Pretend to) run a command in popen mode"""
        if args == (['grep', 'foo'],):
            return
        raise ValueError(self, args, kwargs)

    def redirect(self, cmd, outfp, errfp, *args, **kwargs):
        """(Pretend to) redirect a command"""
        if cmd == ['docker', 'run', 'confluent'] and outfp == 1 and errfp == 2:
            return
        raise ValueError(self, cmd, outfp, errfp, args, kwargs)
Exemple #13
0
class WaitpidState:
    pid = attr.ib()
    event = attr.ib(default=attr.Factory(Event))
    outcome = attr.ib(default=None)
Exemple #14
0
class Board:
    # Board holds all the positional data turn to turn, the groundwork that the game is built on
    board = attr.ib(attr.Factory(list))

    def setup(self, start: str = ""):
        """
        Generates 64 squares for the board, starting top left (White Square) and moving to the bottom right id's 0-63
        """
        for i in range(0, 64):
            self.board.append(Square(i, None, None))
            self.board[i].colour_setter()
        if start is None:
            # FEN representation of a starting setup in chess
            self.FEN_translator_in(
                "RNBQKBNR/PPPPPPPP/8/8/8/8/pppppppp/rnbqkbnr")
        else:
            self.FEN_translator_in(start)

    def FEN_translator_in(self, FEN_string: str):
        id_pointer = 0
        print(FEN_string)
        pieces_dict = {
            "P": "pawn_white",
            "N": "knight_white",
            "B": "bishop_white",
            "R": "rook_white",
            "Q": "queen_white",
            "K": "king_white",
            "p": "pawn_black",
            "n": "knight_black",
            "b": "bishop_black",
            "r": "rook_black",
            "q": "queen_black",
            "k": "king_black",
        }

        for character in range(len(FEN_string)):
            if FEN_string[character] == "/":
                continue
            if FEN_string[character].isdigit():
                for i in range(int(FEN_string[character])):
                    self.board[id_pointer].piece = None
                    id_pointer += 1
            else:
                print("this has been called")

                self.board[id_pointer].piece = pieces_dict[
                    FEN_string[character]]
                id_pointer += 1
                # do something FDGSLJNBDSAKHJGBKHJRFBDFKJHGNBsdl;ikfjmcvlkj

    # DEPRECATED!!! Literally just for Debugging atm I HATE HOW THIS IS AND IT NEEDS A COMPLETE REDESIGN TO SCALE IT
    def board_printer(self):

        id_pointer = 0

        while id_pointer is not (len(self.board)):
            print(f"|{' ' * ((21 * 8) - 1)}|")
            print_string = ""
            for _ in range(8):
                if self.board[id_pointer].piece is not None:
                    print_string += f"|{self.board[id_pointer].piece:^20}"

                else:
                    print_string += f"|{'Square':^20}"

                id_pointer += 1

            print_string += "|"
            print(print_string)
            print(f"|{' ' * ((21 * 8) - 1)}|")
            print("-" * 21 * 8)
Exemple #15
0
class EntryQueue:
    # This used to use a queue.Queue. but that was broken, because Queues are
    # implemented in Python, and not reentrant -- so it was thread-safe, but
    # not signal-safe. deque is implemented in C, so each operation is atomic
    # WRT threads (and this is guaranteed in the docs), AND each operation is
    # atomic WRT signal delivery (signal handlers can run on either side, but
    # not *during* a deque operation). dict makes similar guarantees - and on
    # CPython 3.6 and PyPy, it's even ordered!
    queue = attr.ib(default=attr.Factory(deque))
    idempotent_queue = attr.ib(default=attr.Factory(dict))

    wakeup = attr.ib(default=attr.Factory(WakeupSocketpair))
    done = attr.ib(default=False)
    # Must be a reentrant lock, because it's acquired from signal handlers.
    # RLock is signal-safe as of cpython 3.2. NB that this does mean that the
    # lock is effectively *disabled* when we enter from signal context. The
    # way we use the lock this is OK though, because when
    # run_sync_soon is called from a signal it's atomic WRT the
    # main thread -- it just might happen at some inconvenient place. But if
    # you look at the one place where the main thread holds the lock, it's
    # just to make 1 assignment, so that's atomic WRT a signal anyway.
    lock = attr.ib(default=attr.Factory(threading.RLock))

    async def task(self):
        assert _core.currently_ki_protected()
        # RLock has two implementations: a signal-safe version in _thread, and
        # and signal-UNsafe version in threading. We need the signal safe
        # version. Python 3.2 and later should always use this anyway, but,
        # since the symptoms if this goes wrong are just "weird rare
        # deadlocks", then let's make a little check.
        # See:
        #     https://bugs.python.org/issue13697#msg237140
        assert self.lock.__class__.__module__ == "_thread"

        def run_cb(job):
            # We run this with KI protection enabled; it's the callback's
            # job to disable it if it wants it disabled. Exceptions are
            # treated like system task exceptions (i.e., converted into
            # TrioInternalError and cause everything to shut down).
            sync_fn, args = job
            try:
                sync_fn(*args)
            except BaseException as exc:

                async def kill_everything(exc):
                    raise exc

                _core.spawn_system_task(kill_everything, exc)
            return True

        # This has to be carefully written to be safe in the face of new items
        # being queued while we iterate, and to do a bounded amount of work on
        # each pass:
        def run_all_bounded():
            for _ in range(len(self.queue)):
                run_cb(self.queue.popleft())
            for job in list(self.idempotent_queue):
                del self.idempotent_queue[job]
                run_cb(job)

        try:
            while True:
                run_all_bounded()
                if not self.queue and not self.idempotent_queue:
                    await self.wakeup.wait_woken()
                else:
                    await _core.checkpoint()
        except _core.Cancelled:
            # Keep the work done with this lock held as minimal as possible,
            # because it doesn't protect us against concurrent signal delivery
            # (see the comment above). Notice that this code would still be
            # correct if written like:
            #   self.done = True
            #   with self.lock:
            #       pass
            # because all we want is to force run_sync_soon
            # to either be completely before or completely after the write to
            # done. That's why we don't need the lock to protect
            # against signal handlers.
            with self.lock:
                self.done = True
            # No more jobs will be submitted, so just clear out any residual
            # ones:
            run_all_bounded()
            assert not self.queue
            assert not self.idempotent_queue

    def close(self):
        self.wakeup.close()

    def size(self):
        return len(self.queue) + len(self.idempotent_queue)

    def spawn(self):
        name = "<TrioToken.run_sync_soon task>"
        _core.spawn_system_task(self.task, name=name)

    def run_sync_soon(self, sync_fn, *args, idempotent=False):
        with self.lock:
            if self.done:
                raise _core.RunFinishedError("run() has exited")
            # We have to hold the lock all the way through here, because
            # otherwise the main thread might exit *while* we're doing these
            # calls, and then our queue item might not be processed, or the
            # wakeup call might trigger an OSError b/c the IO manager has
            # already been shut down.
            if idempotent:
                self.idempotent_queue[(sync_fn, args)] = None
            else:
                self.queue.append((sync_fn, args))
            self.wakeup.wakeup_thread_and_signal_safe()
Exemple #16
0
class C2Slots(object):
    x = attr.ib(default=foo)
    y = attr.ib(default=attr.Factory(list))
Exemple #17
0
class CopyrightTest(object):
    """
    A copyright detection test is used to verify that copyright detection works
    correctly

    It consists of two files with the same file name: a .yml file with test data
    and a test file with any other extension (and the same name whenremoving the
    .yml extension) that needs to be tested for detection.

    The following data are loaded based on or from the .yml file:
     - a test file to scan for copyrights (based on file name convenstions),
     - what to test
     - a list of expected copyrights, authors or holders to detect,
     - optional notes.
     - a list of expected_failures

    If a list of expected data is not provided or empty, then this test should
    not detect any such data in the test file.
    """
    data_file = attr.ib(default=None)
    test_file = attr.ib(default=None)
    # one of holders, copyrights, authors
    what = attr.ib(default=attr.Factory(list))
    copyrights = attr.ib(default=attr.Factory(list))
    holders = attr.ib(default=attr.Factory(list))
    authors = attr.ib(default=attr.Factory(list))

    holders_summary = attr.ib(default=attr.Factory(list))
    copyrights_summary = attr.ib(default=attr.Factory(list))
    authors_summary = attr.ib(default=attr.Factory(list))

    expected_failures = attr.ib(default=attr.Factory(list))
    notes = attr.ib(default=None)

    def __attrs_post_init__(self, *args, **kwargs):
        if self.data_file:
            try:
                with io.open(self.data_file, encoding='utf-8') as df:
                    for key, value in saneyaml.load(df.read()).items():
                        if value:
                            setattr(self, key, value)
            except:
                import traceback
                msg = 'file://' + self.data_file + '\n' + repr(self) + '\n' + traceback.format_exc()
                raise Exception(msg)

        # fix counts to be ints: saneyaml loads everything as string
        for holders_sum in self.holders_summary:
            holders_sum['count'] = int(holders_sum['count'])

        for copyrs_sum in self.copyrights_summary:
            copyrs_sum['count'] = int(copyrs_sum['count'])

        for auths_sum in self.authors_summary:
            auths_sum['count'] = int(auths_sum['count'])

    def to_dict(self):
        """
        Serialize self to an ordered mapping.
        """
        filtered = [field for field in attr.fields(CopyrightTest)
                    if '_file' in field.name]
        fields_filter = attr.filters.exclude(*filtered)
        data = attr.asdict(self, filter=fields_filter, dict_factory=dict)
        return dict([
            (key, value) for key, value in data.items()
            # do not dump false and empties
            if value])

    def dumps(self):
        """
        Return a string representation of self in YAML block format.
        """
        return saneyaml.dump(self.to_dict())

    def dump(self, check_exists=False):
        """
        Dump a representation of self to a .yml data_file in YAML block format.
        """
        if check_exists and path.exists(self.data_file):
            raise Exception(self.data_file)
        with io.open(self.data_file, 'w', encoding='utf-8') as df:
            df.write(self.dumps())
Exemple #18
0
class Statement(str):  # type: ignore[override]
    """String subclass with additional attributes to store the results of parsing.

    The ``cmd`` module in the standard library passes commands around as a
    string. To retain backwards compatibility, ``cmd2`` does the same. However,
    we need a place to capture the additional output of the command parsing, so
    we add our own attributes to this subclass.

    Instances of this class should not be created by anything other than the
    :meth:`cmd2.parsing.StatementParser.parse` method, nor should any of the
    attributes be modified once the object is created.

    The string portion of the class contains the arguments, but not the
    command, nor the output redirection clauses.

    Tips:

    1. `argparse <https://docs.python.org/3/library/argparse.html>`_ is your
       friend for anything complex. ``cmd2`` has the decorator
       (:func:`~cmd2.decorators.with_argparser`) which you can
       use to make your command method receive a namespace of parsed arguments,
       whether positional or denoted with switches.

    2. For commands with simple positional arguments, use
       :attr:`~cmd2.Statement.args` or :attr:`~cmd2.Statement.arg_list`

    3. If you don't want to have to worry about quoted arguments, see
       :attr:`argv` for a trick which strips quotes off for you.
    """

    # the arguments, but not the command, nor the output redirection clauses.
    args: str = attr.ib(default='', validator=attr.validators.instance_of(str))

    # string containing exactly what we input by the user
    raw: str = attr.ib(default='', validator=attr.validators.instance_of(str))

    # the command, i.e. the first whitespace delimited word
    command: str = attr.ib(default='',
                           validator=attr.validators.instance_of(str))

    # list of arguments to the command, not including any output redirection or terminators; quoted args remain quoted
    arg_list: List[str] = attr.ib(default=attr.Factory(list),
                                  validator=attr.validators.instance_of(list))

    # if the command is a multiline command, the name of the command, otherwise empty
    multiline_command: str = attr.ib(
        default='', validator=attr.validators.instance_of(str))

    # the character which terminated the multiline command, if there was one
    terminator: str = attr.ib(default='',
                              validator=attr.validators.instance_of(str))

    # characters appearing after the terminator but before output redirection, if any
    suffix: str = attr.ib(default='',
                          validator=attr.validators.instance_of(str))

    # if output was piped to a shell command, the shell command as a string
    pipe_to: str = attr.ib(default='',
                           validator=attr.validators.instance_of(str))

    # if output was redirected, the redirection token, i.e. '>>'
    output: str = attr.ib(default='',
                          validator=attr.validators.instance_of(str))

    # if output was redirected, the destination file token (quotes preserved)
    output_to: str = attr.ib(default='',
                             validator=attr.validators.instance_of(str))

    def __new__(cls, value: object, *pos_args: Any,
                **kw_args: Any) -> 'Statement':
        """Create a new instance of Statement.

        We must override __new__ because we are subclassing `str` which is
        immutable and takes a different number of arguments as Statement.

        NOTE:  attrs takes care of initializing other members in the __init__ it
        generates.
        """
        stmt = super().__new__(cls, value)
        return stmt

    @property
    def command_and_args(self) -> str:
        """Combine command and args with a space separating them.

        Quoted arguments remain quoted. Output redirection and piping are
        excluded, as are any command terminators.
        """
        if self.command and self.args:
            rtn = f'{self.command} {self.args}'
        elif self.command:
            # there were no arguments to the command
            rtn = self.command
        else:
            rtn = ''
        return rtn

    @property
    def post_command(self) -> str:
        """A string containing any ending terminator, suffix, and redirection chars"""
        rtn = ''
        if self.terminator:
            rtn += self.terminator

        if self.suffix:
            rtn += ' ' + self.suffix

        if self.pipe_to:
            rtn += ' | ' + self.pipe_to

        if self.output:
            rtn += ' ' + self.output
            if self.output_to:
                rtn += ' ' + self.output_to

        return rtn

    @property
    def expanded_command_line(self) -> str:
        """Concatenate :meth:`~cmd2.Statement.command_and_args`
        and :meth:`~cmd2.Statement.post_command`"""
        return self.command_and_args + self.post_command

    @property
    def argv(self) -> List[str]:
        """a list of arguments a-la ``sys.argv``.

        The first element of the list is the command after shortcut and macro
        expansion. Subsequent elements of the list contain any additional
        arguments, with quotes removed, just like bash would. This is very
        useful if you are going to use ``argparse.parse_args()``.

        If you want to strip quotes from the input, you can use ``argv[1:]``.
        """
        if self.command:
            rtn = [utils.strip_quotes(self.command)]
            for cur_token in self.arg_list:
                rtn.append(utils.strip_quotes(cur_token))
        else:
            rtn = []

        return rtn
Exemple #19
0
class TrillEffect:
    """A trill effect."""

    fret: int = 0
    duration: Duration = attr.Factory(Duration)
Exemple #20
0
class Device:
    definition_path = attr.ib(converter=pathlib.Path)
    definition = attr.ib(default=None)
    canmatrix = attr.ib(default=None)
    neo = attr.ib(default=None)
    nvs = attr.ib(default=None)
    bus = attr.ib(default=None)
    cyclic_frames = attr.ib(default=attr.Factory(set))
    default_elevated_access_level = attr.ib(default=None)
    default_access_level_password = attr.ib(default=None)
    save_nv = attr.ib(default=None)
    save_nv_value = attr.ib(default=None)
    uuid = attr.ib(default=uuid.uuid4)

    def load(self):
        if self.definition is not None:
            raise AlreadyLoadedError("The definition has already been loaded")

        with epyqlib.updateepc.updated(self.definition_path) as updated:
            self.definition = Definition.loadp(updated)
            matrix = self.definition.load_can()

        node_id_adjust = functools.partial(
            epyqlib.device.node_id_types[self.definition.node_id_type],
            device_id=self.definition.node_id,
            controller_id=self.definition.controller_id,
        )

        self.neo = epyqlib.canneo.Neo(
            matrix=matrix,
            node_id_adjust=node_id_adjust,
        )

        nv_neo = epyqlib.canneo.Neo(
            matrix=matrix,
            frame_class=epyqlib.nv.Frame,
            signal_class=epyqlib.nv.Nv,
            strip_summary=False,
            node_id_adjust=node_id_adjust,
        )
        self.nvs = epyqlib.nv.Nvs(
            neo=nv_neo,
            configuration=self.definition.nv_configuration,
            access_level_path=self.definition.access_level_path,
            access_password_path=self.definition.access_password_path,
        )

        self.save_nv = self.nv(
            self.nvs.save_frame.mux_name,
            self.nvs.save_signal.name,
        )
        self.save_nv_value = self.nvs.save_value

    def set_bus(self, bus):
        if self.bus is not None:
            raise BusAlreadySetError()

        try:
            self.neo.set_bus(bus=bus)
            self.nvs.set_bus(bus=bus)
        except:
            # TODO: actually rollback a partial setting
            self.bus = object()
            raise

        self.bus = bus
        self.bus.notifier.add(self.neo)
        # TODO: really think through what is proper...  won't this keep the
        #       nv objects from getting updated?
        # self.bus.notifier.add(self.nvs)

    # @functools.lru_cache(maxsize=512)
    def signal(self, *path):
        return Signal(
            signal=self.neo.signal_by_path(*path),
            device=self,
        )

    def signal_from_uuid(self, uuid_):
        return Signal(
            signal=self.neo.signal_from_uuid[uuid_],
            device=self,
        )

    # @functools.lru_cache(maxsize=512)
    def nv(self, *path):
        return Nv(
            nv=self.nvs.signal_from_names(*path),
            device=self,
        )

    def nv_from_uuid(self, uuid_):
        return Nv(
            nv=self.nvs.nv_from_uuid[uuid_],
            device=self,
        )

    def parameter_from_uuid(self, uuid_):
        try:
            return self.nv_from_uuid(uuid_=uuid_)
        except KeyError:
            return self.signal_from_uuid(uuid_=uuid_)

    @twisted.internet.defer.inlineCallbacks
    def active_to_nv(self, wait=False):
        # TODO: dedupe 8795477695t46542676781543768139
        yield twisted.internet.defer.ensureDeferred(
            self.save_nv.set(value=self.save_nv_value),
        )

        if wait:
            yield self.wait_for_nv_save_completion()

    @twisted.internet.defer.inlineCallbacks
    def wait_for_nv_save_completion(self):
        nv = self.nv("StatusWarnings", "eeSaveInProgress")

        yield epyqlib.utils.twisted.sleep(2)

        yield nv.wait_for(
            op="==",
            value=0,
            timeout=120,
            ignore_read_failures=True,
        )

    def cyclic_send_signal(self, signal, period):
        frame = signal.signal.frame
        frame.cyclic_request(self.uuid, period)
        if period is not None:
            self.cyclic_frames.add(frame)
        else:
            self.cyclic_frames.discard(frame)

    def cancel_all_cyclic_sends(self):
        for frame in self.cyclic_frames:
            frame.cyclic_request(self.uuid, None)

    @twisted.internet.defer.inlineCallbacks
    def get_access_level(self):
        nv = Nv(nv=self.nvs.access_level_node, device=self)
        access_level = yield nv.get()
        return access_level

    async def get_check_limits(self):
        nv = self.nv_from_uuid(
            uuid.UUID("bd7c3c96-bde9-4b4b-a646-e1d06a7cc24f"),
        )
        value = await nv.get()

        return value

    async def get_password(self):
        nv = self.nv_from_uuid(
            uuid.UUID("cc438574-bec0-4443-8a25-785e41240c1b"),
        )
        value = await nv.get()

        return value

    @twisted.internet.defer.inlineCallbacks
    def set_access_level(self, level=None, password=None, check_limits=True):
        if level is None:
            level = self.default_elevated_access_level

        if password is None:
            password = self.default_access_level_password

        self.nvs.password_node.set_value(password)
        self.nvs.access_level_node.set_value(level)
        check_limits_nv = self.nv_from_uuid(
            uuid.UUID("bd7c3c96-bde9-4b4b-a646-e1d06a7cc24f"),
        )
        check_limits_nv.nv.set_value(check_limits)

        selected_nodes = tuple(
            node
            for node in (
                self.nvs.password_node,
                self.nvs.access_level_node,
                check_limits_nv.nv,
            )
            if node is not None
        )

        yield self.nvs.write_all_to_device(
            only_these=selected_nodes,
            meta=[epyqlib.nv.MetaEnum.value],
        )

    @contextlib.asynccontextmanager
    async def temporary_access_level(
        self,
        level=None,
        password=None,
        check_limits=True,
    ):
        access_level_parameter = self.nv(*self.definition.access_level_path[1:])
        original_access_level = await access_level_parameter.get()

        check_limits_nv = self.nv_from_uuid(
            uuid.UUID("bd7c3c96-bde9-4b4b-a646-e1d06a7cc24f"),
        )
        original_check_limits = await check_limits_nv.get()

        try:
            await self.set_access_level(
                level=level,
                password=password,
                check_limits=check_limits,
            )
            yield
        finally:
            try:
                await self.set_access_level(
                    level=level,
                    password=password,
                    check_limits=original_check_limits,
                )
            finally:
                await self.set_access_level(
                    level=original_access_level,
                    password=password,
                )

    async def reset(self, timeout=10, sleep=0):
        # SoftwareReset:InitiateReset
        reset_parameter = self.parameter_from_uuid(
            uuid_=uuid.UUID("b582085d-7734-4260-ab97-47e50a41b06c"),
        )

        # Serial Number
        a_parameter_that_can_be_read = self.parameter_from_uuid(
            uuid_=uuid.UUID("390f27ea-6f28-4313-b183-5f37d007ccd1"),
        )

        # TODO: just accept the 1s or whatever default timeout?  A set without
        #       waiting for the response could be nice.  (or embedded sending
        #       a response)
        with contextlib.suppress(epyqlib.twisted.nvs.RequestTimeoutError):
            await reset_parameter.set(value=1)

        if sleep > 0:
            await epyqlib.utils.twisted.sleep(sleep)

        end = time.monotonic() + timeout
        while True:
            try:
                await a_parameter_that_can_be_read.get()
            except epyqlib.twisted.nvs.RequestTimeoutError:
                if time.monotonic() > end:
                    raise
                continue
            else:
                break

    async def wait_through_power_on_reset(self):
        status_signal = self.signal_from_uuid(
            uuid_=uuid.UUID("6392782a-b886-45a0-9642-dd4f47cd2a59"),
        )

        await status_signal.wait_for(
            op="!=",
            # TODO: stop comparing strings...
            value="Power On Reset",
            timeout=60,
        )

    async def to_nv(self):
        # TODO: dedupe 8795477695t46542676781543768139
        await self.nvs.module_to_nv()

    async def get_serial_number(self):
        nv = self.nv_from_uuid(
            uuid_=uuid.UUID("390f27ea-6f28-4313-b183-5f37d007ccd1"),
        )
        value = await nv.get()
        return value

    async def clear_faults(self):
        clear_faults_signal = self.signal_from_uuid(
            uuid_=uuid.UUID("62b6dc82-c93a-454a-a643-dd8a7b2a220e"),
        )
        clear_faults_status_signal = self.signal_from_uuid(
            uuid_=uuid.UUID("d84e5184-696d-487c-8850-cc904a7c018f"),
        )

        clear_faults_signal.set(value=False)
        await clear_faults_status_signal.wait_for(
            op="==",
            value="Normal",
            timeout=1,
        )

        clear_faults_signal.set(value=True)
        await clear_faults_status_signal.wait_for(
            op="==",
            value="Clear Faults",
            timeout=1,
        )

        clear_faults_signal.set(value=False)
        await clear_faults_status_signal.wait_for(
            op="==",
            value="Normal",
            timeout=1,
        )
Exemple #21
0
class WindowsFinder(BaseFinder):
    paths = attr.ib(default=attr.Factory(list), type=list)
    version_list = attr.ib(default=attr.Factory(list), type=list)
    _versions = attr.ib()  # type: DefaultDict[Tuple, PathEntry]
    _pythons = attr.ib()  # type: DefaultDict[str, PathEntry]

    def find_all_python_versions(
            self,
            major=None,  # type: Optional[Union[str, int]]
            minor=None,  # type: Optional[int]
            patch=None,  # type: Optional[int]
            pre=None,  # type: Optional[bool]
            dev=None,  # type: Optional[bool]
            arch=None,  # type: Optional[str]
            name=None,  # type: Optional[str]
    ):
        # type (...) -> List[PathEntry]
        version_matcher = operator.methodcaller("matches",
                                                major,
                                                minor,
                                                patch,
                                                pre,
                                                dev,
                                                arch,
                                                python_version=name)
        py_filter = filter(
            None, filter(lambda c: version_matcher(c), self.version_list))
        version_sort = operator.attrgetter("version_sort")
        return [
            c.comes_from
            for c in sorted(py_filter, key=version_sort, reverse=True)
        ]

    def find_python_version(
            self,
            major=None,  # type: Optional[Union[str, int]]
            minor=None,  # type: Optional[int]
            patch=None,  # type: Optional[int]
            pre=None,  # type: Optional[bool]
            dev=None,  # type: Optional[bool]
            arch=None,  # type: Optional[str]
            name=None,  # type: Optional[str]
    ):
        # type: (...) -> Optional[PathEntry]
        return next(
            iter(v for v in self.find_all_python_versions(
                major=major,
                minor=minor,
                patch=patch,
                pre=pre,
                dev=dev,
                arch=arch,
                name=None,
            )), None)

    @_versions.default
    def get_versions(self):
        # type: () -> DefaultDict[Tuple, PathEntry]
        versions = defaultdict(
            PathEntry)  # type: DefaultDict[Tuple, PathEntry]
        from pythonfinder._vendor.pep514tools import environment as pep514env

        env_versions = pep514env.findall()
        path = None
        for version_object in env_versions:
            install_path = getattr(version_object.info, "install_path", None)
            if install_path is None:
                continue
            try:
                path = ensure_path(install_path.__getattr__(""))
            except AttributeError:
                continue
            try:
                py_version = PythonVersion.from_windows_launcher(
                    version_object)
            except InvalidPythonVersion:
                continue
            if py_version is None:
                continue
            self.version_list.append(py_version)
            python_path = py_version.comes_from.path if py_version.comes_from else py_version.executable
            python_kwargs = {
                python_path: py_version
            } if python_path is not None else {}
            base_dir = PathEntry.create(
                path,
                is_root=True,
                only_python=True,
                pythons=python_kwargs,
            )
            versions[py_version.version_tuple[:5]] = base_dir
            self.paths.append(base_dir)
        return versions

    @property
    def versions(self):
        # type: () -> DefaultDict[Tuple, PathEntry]
        if not self._versions:
            self._versions = self.get_versions()
        return self._versions

    @_pythons.default
    def get_pythons(self):
        # type: () -> DefaultDict[str, PathEntry]
        pythons = defaultdict()  # type: DefaultDict[str, PathEntry]
        for version in self.version_list:
            _path = ensure_path(version.comes_from.path)
            pythons[_path.as_posix()] = version.comes_from
        return pythons

    @property
    def pythons(self):
        # type: () -> DefaultDict[str, PathEntry]
        return self._pythons

    @pythons.setter
    def pythons(self, value):
        # type: (DefaultDict[str, PathEntry]) -> None
        self._pythons = value

    @classmethod
    def create(cls, *args, **kwargs):
        # type: (Type[FinderType], Any, Any) -> FinderType
        return cls()
Exemple #22
0
class SunSpecDevice:
    model_path = attr.ib(converter=pathlib.Path)
    device = attr.ib(default=None)
    cyclic_frames = attr.ib(default=attr.Factory(set))
    default_elevated_access_level = attr.ib(default=None)
    default_access_level_password = attr.ib(default=None)
    save_nv = attr.ib(default=None)
    save_nv_value = attr.ib(default=None)
    uuid_to_point = attr.ib(default=None)
    uuid_to_model = attr.ib(default=None)
    uuid = attr.ib(default=uuid.uuid4)

    def load(
        self,
        slave_id=1,
        device_type=sunspec.core.client.RTU,
        name="/dev/ttyUSB0",
        baudrate=115200,
        timeout=1,
        parity=sunspec.core.client.PARITY_NONE,
        ipaddr=None,
        ipport=502,
        pathlist=None,
        trace=False,
    ):
        message = (
            "SunSpecDevice.load() method is deprecated and will be"
            + " removed in the future. Use SunSpecDevice.load_rtu()"
            + " or SunSpecDevice.load_tcp() instead."
        )
        warnings.warn(message=message, category=warnings.DeprecationWarning)

        with epcsunspecdemo.utils.fresh_smdx_path(self.model_path):
            self.device = sunspec.core.client.SunSpecClientDevice(
                slave_id=slave_id,
                device_type=device_type,
                name=name,
                baudrate=baudrate,
                timeout=timeout,
                parity=parity,
                ipaddr=ipaddr,
                ipport=ipport,
                pathlist=pathlist,
                trace=trace,
            )

    def load_rtu(
        self,
        slave_id=1,
        name="/dev/ttyUSB0",
        baudrate=115200,
        timeout=1,
        parity=sunspec.core.client.PARITY_NONE,
    ):
        with epcsunspecdemo.utils.fresh_smdx_path(self.model_path):
            self.device = sunspec.core.client.SunSpecClientDevice(
                slave_id=slave_id,
                device_type=sunspec.core.client.RTU,
                name=name,
                baudrate=baudrate,
                timeout=timeout,
                parity=parity,
            )

    def load_tcp(
        self,
        ipaddr,
        slave_id=1,
        timeout=1,
        ipport=None,
    ):
        with epcsunspecdemo.utils.fresh_smdx_path(self.model_path):
            self.device = sunspec.core.client.SunSpecClientDevice(
                slave_id=slave_id,
                device_type=sunspec.core.client.TCP,
                timeout=timeout,
                ipaddr=ipaddr,
                ipport=ipport,
            )

    # def signal_from_uuid(self, uuid_) -> SunSpecNv:
    #     return self.nv_from_uuid(uuid_=uuid_)

    def nv_from_uuid(self, uuid_) -> SunSpecNv:
        return SunSpecNv(
            nv=self.uuid_to_point[uuid_],
            model=self.uuid_to_model[uuid_],
            # device=self,
        )

    # no 'signals' so just alias
    parameter_from_uuid = nv_from_uuid

    def map_uuids(self):
        def get_uuid(block, point):
            comment = point.point_type.notes

            for index in itertools.count():
                comment, uuid = epyqlib.canneo.strip_uuid_from_comment(
                    comment,
                )

                if uuid is None:
                    return uuid

                if block.type == "fixed":
                    return uuid

                if block.type == "repeating" and index == block.index - 1:
                    return uuid

        points = [
            [model, block, point]
            for model in self.device.device.models_list
            for block in model.blocks
            for point in [*block.points_list, *block.points_sf.values()]
            if point.point_type.notes is not None
        ]

        self.uuid_to_point = {
            get_uuid(block=block, point=point): point for model, block, point in points
        }

        self.uuid_to_model = {
            get_uuid(block=block, point=point): model for model, block, point in points
        }

    async def get_access_level(self):
        access_level_point = self.device.epc_control.model.points["AccLvl"]

        self.device.epc_control.read()

        return access_level_point.value

    async def get_check_limits(self):
        point = self.device.epc_control.model.points["ChkLmts"]
        self.device.epc_control.read()

        return point.value

    async def get_password(self):
        point = self.device.epc_control.model.points["Passwd"]
        self.device.epc_control.read()

        return point.value

    async def set_access_level(self, level=None, password=None, check_limits=True):
        if level is None:
            level = self.default_elevated_access_level

        if password is None:
            password = self.default_access_level_password

        access_level_point = self.device.epc_control.model.points["AccLvl"]
        password_point = self.device.epc_control.model.points["Passwd"]
        check_limits_point = self.device.epc_control.model.points["ChkLmts"]
        submit_point = self.device.epc_control.model.points["SubAccLvl"]

        epcsunspecdemo.demos.send_val(access_level_point, level)
        epcsunspecdemo.demos.send_val(password_point, password)
        epcsunspecdemo.demos.send_val(check_limits_point, check_limits)

        epcsunspecdemo.demos.send_val(submit_point, True)

    @contextlib.asynccontextmanager
    async def temporary_access_level(
        self,
        level=None,
        password=None,
        check_limits=True,
    ):
        check_limits_point = self.device.epc_control.model.points["ChkLmts"]

        original_access_level = await self.get_access_level()
        self.device.epc_control.read()
        original_check_limits = check_limits_point.value

        try:
            await self.set_access_level(
                level=level,
                password=password,
                check_limits=check_limits,
            )
            yield
        finally:
            await self.set_access_level(
                level=original_access_level,
                password=password,
                check_limits=original_check_limits,
            )

    async def reset(self, timeout=10, sleep=0):
        # SoftwareReset:InitiateReset
        reset_parameter = self.parameter_from_uuid(
            uuid_=uuid.UUID("b582085d-7734-4260-ab97-47e50a41b06c"),
        )

        # Serial Number
        a_parameter_that_can_be_read = self.parameter_from_uuid(
            uuid_=uuid.UUID("390f27ea-6f28-4313-b183-5f37d007ccd1"),
        )

        # TODO: just accept the 1s or whatever default timeout?  A set without
        #       waiting for the response could be nice.  (or embedded sending
        #       a response)
        with contextlib.suppress(sunspec.core.client.SunSpecClientError):
            await reset_parameter.set(value=1)

        if sleep > 0:
            await epyqlib.utils.twisted.sleep(sleep)

        end = time.monotonic() + timeout
        while True:
            try:
                await a_parameter_that_can_be_read.get()
            except sunspec.core.client.SunSpecClientError:
                if time.monotonic() > end:
                    raise
                continue
            else:
                break

    async def to_nv(self, timeout=10):
        save_command_parameter = self.parameter_from_uuid(
            uuid.UUID("2c768acc-f88e-431c-8fc1-ea8d5b2ba253"),
        )
        save_in_progress_parameter = self.parameter_from_uuid(
            uuid.UUID("5d623539-a564-4374-b00d-492a0fbb2f55"),
        )

        await save_command_parameter.set(1)
        await epyqlib.utils.twisted.sleep(0.250)

        end = time.monotonic() + timeout
        while time.monotonic() < end:
            try:
                saving = await save_in_progress_parameter.get()
            except sunspec.core.client.SunSpecClientError:
                continue

            if not saving:
                break
        else:
            raise Exception()
Exemple #23
0
class Glyph:
    name: str = attr.ib()
    unicodes: List[str] = attr.ib(default=attr.Factory(list))

    leftKerningGroup: str = attr.ib(default="")
    rightKerningGroup: str = attr.ib(default="")
    bottomKerningGroup: str = attr.ib(default="")
    topKerningGroup: str = attr.ib(default="")

    _layers: List[Layer] = attr.ib(default=attr.Factory(list))

    color: Optional[Tuple] = attr.ib(default=None)
    _extraData: Optional[Dict] = attr.ib(default=None)

    _lastModified: Optional[float] = attr.ib(default=None, init=False)
    _parent: Optional[Any] = attr.ib(default=None, init=False)
    selected: bool = attr.ib(default=False, init=False)

    def __attrs_post_init__(self):
        for layer in self._layers:
            layer._parent = self

    def __repr__(self):
        return "%s(%r, %d layers)" % (self.__class__.__name__, self.name,
                                      len(self._layers))

    def __setattr__(self, key, value):
        try:
            font = self._parent
        except AttributeError:
            pass
        else:
            if font is not None and key[0] != "_" \
                                and key != "selected":
                oldValue = getattr(self, key)
                if value != oldValue:
                    obj_setattr(self, key, value)
                    self._lastModified = time()
                return
        obj_setattr(self, key, value)

    @property
    def extraData(self):
        extraData = self._extraData
        if extraData is None:
            extraData = self._extraData = {}
        return extraData

    @property
    def font(self):
        return self._parent

    @property
    def lastModified(self):
        return self._lastModified

    @property
    def layers(self):
        return GlyphLayersList(self)

    @property
    def unicode(self):
        unicodes = self.unicodes
        if unicodes:
            return unicodes[0]
        return None

    def layerForMaster(self, master):
        if master is None:
            font = self._parent
            if font is not None:
                name = font.selectedMaster.name
            else:
                raise ValueError("unreachable fallback master")
        elif master.__class__ is str:
            name = master
        else:
            name = master.name
        layers = self._layers
        for layer in layers:
            if not layer._name and layer.masterName == name:
                return layer
        layer = Layer(masterName=name)
        layer._parent = self
        layers.append(layer)
        return layer
Exemple #24
0
class FakeChannel:
    """
    A fake Twisted Web Channel (the part that interfaces with the
    wire).
    """

    site: Union[Site, "FakeSite"]
    _reactor: MemoryReactor
    result: dict = attr.Factory(dict)
    _ip: str = "127.0.0.1"
    _producer: Optional[Union[IPullProducer, IPushProducer]] = None
    resource_usage: Optional[ContextResourceUsage] = None
    _request: Optional[Request] = None

    @property
    def request(self) -> Request:
        assert self._request is not None
        return self._request

    @request.setter
    def request(self, request: Request) -> None:
        assert self._request is None
        self._request = request

    @property
    def json_body(self):
        return json.loads(self.text_body)

    @property
    def text_body(self) -> str:
        """The body of the result, utf-8-decoded.

        Raises an exception if the request has not yet completed.
        """
        if not self.is_finished:
            raise Exception("Request not yet completed")
        return self.result["body"].decode("utf8")

    def is_finished(self) -> bool:
        """check if the response has been completely received"""
        return self.result.get("done", False)

    @property
    def code(self):
        if not self.result:
            raise Exception("No result yet.")
        return int(self.result["code"])

    @property
    def headers(self) -> Headers:
        if not self.result:
            raise Exception("No result yet.")
        h = Headers()
        for i in self.result["headers"]:
            h.addRawHeader(*i)
        return h

    def writeHeaders(self, version, code, reason, headers):
        self.result["version"] = version
        self.result["code"] = code
        self.result["reason"] = reason
        self.result["headers"] = headers

    def write(self, content):
        assert isinstance(content, bytes), "Should be bytes! " + repr(content)

        if "body" not in self.result:
            self.result["body"] = b""

        self.result["body"] += content

    def registerProducer(self, producer, streaming):
        self._producer = producer
        self.producerStreaming = streaming

        def _produce():
            if self._producer:
                self._producer.resumeProducing()
                self._reactor.callLater(0.1, _produce)

        if not streaming:
            self._reactor.callLater(0.0, _produce)

    def unregisterProducer(self):
        if self._producer is None:
            return

        self._producer = None

    def requestDone(self, _self):
        self.result["done"] = True
        if isinstance(_self, SynapseRequest):
            self.resource_usage = _self.logcontext.get_resource_usage()

    def getPeer(self):
        # We give an address so that getClientAddress/getClientIP returns a non null entry,
        # causing us to record the MAU
        return address.IPv4Address("TCP", self._ip, 3423)

    def getHost(self):
        # this is called by Request.__init__ to configure Request.host.
        return address.IPv4Address("TCP", "127.0.0.1", 8888)

    def isSecure(self):
        return False

    @property
    def transport(self):
        return self

    def await_result(self, timeout_ms: int = 1000) -> None:
        """
        Wait until the request is finished.
        """
        end_time = self._reactor.seconds() + timeout_ms / 1000.0
        self._reactor.run()

        while not self.is_finished():
            # If there's a producer, tell it to resume producing so we get content
            if self._producer:
                self._producer.resumeProducing()

            if self._reactor.seconds() > end_time:
                raise TimedOutException("Timed out waiting for request to finish.")

            self._reactor.advance(0.1)

    def extract_cookies(self, cookies: MutableMapping[str, str]) -> None:
        """Process the contents of any Set-Cookie headers in the response

        Any cookines found are added to the given dict
        """
        headers = self.headers.getRawHeaders("Set-Cookie")
        if not headers:
            return

        for h in headers:
            parts = h.split(";")
            k, v = parts[0].split("=", maxsplit=1)
            cookies[k] = v
Exemple #25
0
class OpenTSDBTarget(object):
    """Generates OpenTSDB target JSON structure.

    Grafana docs on using OpenTSDB:
    http://docs.grafana.org/features/datasources/opentsdb/
    OpenTSDB docs on querying or reading data:
    http://opentsdb.net/docs/build/html/user_guide/query/index.html


    :param metric: OpenTSDB metric name
    :param refId: target reference id
    :param aggregator: defines metric aggregator.
        The list of opentsdb aggregators:
        http://opentsdb.net/docs/build/html/user_guide/query/aggregators.html#available-aggregators
    :param alias: legend alias. Use patterns like $tag_tagname to replace part
        of the alias for a tag value.
    :param isCounter: defines if rate function results should
        be interpret as counter
    :param counterMax: defines rate counter max value
    :param counterResetValue: defines rate counter reset value
    :param disableDownsampling: defines if downsampling should be disabled.
        OpenTSDB docs on downsampling:
        http://opentsdb.net/docs/build/html/user_guide/query/index.html#downsampling
    :param downsampleAggregator: defines downsampling aggregator
    :param downsampleFillPolicy: defines downsampling fill policy
    :param downsampleInterval: defines downsampling interval
    :param filters: defines the list of metric query filters.
        OpenTSDB docs on filters:
        http://opentsdb.net/docs/build/html/user_guide/query/index.html#filters
    :param shouldComputeRate: defines if rate function should be used.
        OpenTSDB docs on rate function:
        http://opentsdb.net/docs/build/html/user_guide/query/index.html#rate
    :param currentFilterGroupBy: defines if grouping should be enabled for
        current filter
    :param currentFilterKey: defines current filter key
    :param currentFilterType: defines current filter type
    :param currentFilterValue: defines current filter value
    """

    metric = attr.ib()
    refId = attr.ib(default="")
    aggregator = attr.ib(default='sum')
    alias = attr.ib(default=None)
    isCounter = attr.ib(default=False, validator=instance_of(bool))
    counterMax = attr.ib(default=None)
    counterResetValue = attr.ib(default=None)
    disableDownsampling = attr.ib(default=False, validator=instance_of(bool))
    downsampleAggregator = attr.ib(default=OTSDB_AGG_SUM)
    downsampleFillPolicy = attr.ib(
        default=OTSDB_DOWNSAMPLING_FILL_POLICY_DEFAULT,
        validator=is_in(OTSDB_DOWNSAMPLING_FILL_POLICIES))
    downsampleInterval = attr.ib(default=None)
    filters = attr.ib(default=attr.Factory(list))
    shouldComputeRate = attr.ib(default=False, validator=instance_of(bool))
    currentFilterGroupBy = attr.ib(default=False, validator=instance_of(bool))
    currentFilterKey = attr.ib(default="")
    currentFilterType = attr.ib(default=OTSDB_QUERY_FILTER_DEFAULT)
    currentFilterValue = attr.ib(default="")

    def to_json_data(self):

        return {
            'aggregator': self.aggregator,
            'alias': self.alias,
            'isCounter': self.isCounter,
            'counterMax': self.counterMax,
            'counterResetValue': self.counterResetValue,
            'disableDownsampling': self.disableDownsampling,
            'downsampleAggregator': self.downsampleAggregator,
            'downsampleFillPolicy': self.downsampleFillPolicy,
            'downsampleInterval': self.downsampleInterval,
            'filters': self.filters,
            'metric': self.metric,
            'refId': self.refId,
            'shouldComputeRate': self.shouldComputeRate,
            'currentFilterGroupBy': self.currentFilterGroupBy,
            'currentFilterKey': self.currentFilterKey,
            'currentFilterType': self.currentFilterType,
            'currentFilterValue': self.currentFilterValue,
        }
Exemple #26
0
class PythonPackageGraphSolver:
    """A wrapper to manipulate with Python packages using pure PackageVersion object interface."""

    graph = attr.ib(type=GraphDatabase, kw_only=True)
    runtime_environment = attr.ib(
        type=RuntimeEnvironment,
        kw_only=True,
        default=attr.Factory(RuntimeEnvironment.from_dict),
    )
    # Do not instantiate multiple objects for same python package tuple to optimize memory usage.
    _package_versions = attr.ib(
        type=Dict[Tuple[str, str, str], PackageVersion],
        default=attr.Factory(dict),
        kw_only=True,
    )
    # Have just one instance of Source object per python package source index url.
    _sources = attr.ib(type=Dict[str, Source],
                       default=attr.Factory(dict),
                       kw_only=True)
    _solver = attr.ib(type=PythonGraphSolver, default=None, kw_only=True)

    @property
    def solver(self) -> PythonGraphSolver:
        """Retrieve solver instance resolving using graph database."""
        if not self._solver:
            self._solver = PythonGraphSolver(
                dependency_parser=PackageVersionDependencyParser(),
                releases_fetcher=GraphReleasesFetcher(
                    graph=self.graph,
                    runtime_environment=self.runtime_environment),
            )

        return self._solver

    def solve(self,
              dependencies: List[PackageVersion],
              graceful: bool = True) -> Dict[str, List[PackageVersion]]:
        """Solve the given dependencies and return object representation of packages."""
        result = {}
        # First, construct the map for checking packages.
        dependencies_map = {
            dependency.name: dependency
            for dependency in dependencies
        }

        resolved = self.solver.solve(dependencies, graceful=graceful)
        if not resolved:
            return {}

        for package_name, versions in resolved.items():
            # If this pop fails, it means that the package name has changed over the resolution.
            original_package = dependencies_map.pop(package_name)
            result_versions = []
            for version, index_url in versions:
                package_tuple = (original_package.name, version, index_url)
                package_version = self._package_versions.get(package_tuple)
                if not package_version:
                    source = self._sources.get(index_url)
                    if not source:
                        source = Source(index_url)
                        self._sources[index_url] = source

                    package_version = PackageVersion(
                        name=original_package.name,
                        version="==" + version,
                        index=source,
                        develop=original_package.develop,
                    )

                result_versions.append(package_version)

            result[original_package.name] = result_versions

        return result
Exemple #27
0
class HealthCheckState(object):
    valid_examples = attr.ib(default=0)
    invalid_examples = attr.ib(default=0)
    overrun_examples = attr.ib(default=0)
    draw_times = attr.ib(default=attr.Factory(list))
Exemple #28
0
class PanCompleter(Completer):
    """Completer for panctl commands."""

    commands = attr.ib(type=List[str])
    ctl = attr.ib()
    devices = attr.ib()
    rooms = attr.ib(init=False, default=attr.Factory(lambda: defaultdict(set)))
    path_completer = PathCompleter(expanduser=True)

    def complete_commands(self, last_word):
        """Complete the available commands."""
        compl_words = self.filter_words(self.commands, last_word)
        for compl_word in compl_words:
            yield Completion(compl_word, -len(last_word))

    def complete_users(self, last_word, pan_user):
        devices = self.devices.List(pan_user)
        users = set(device["user_id"] for device in devices)
        compl_words = self.filter_words(users, last_word)

        for compl_word in compl_words:
            yield Completion(compl_word, -len(last_word))

        return ""

    def complete_devices(self, last_word, pan_user, user_id):
        devices = self.devices.ListUserDevices(pan_user, user_id)
        device_ids = [device["device_id"] for device in devices]
        compl_words = self.filter_words(device_ids, last_word)

        for compl_word in compl_words:
            yield Completion(compl_word, -len(last_word))

        return ""

    def filter_words(self, words, last_word):
        compl_words = []

        for word in words:
            if last_word in word:
                compl_words.append(word)

        return compl_words

    def complete_pan_users(self, last_word):
        servers = self.ctl.ListServers()
        users = [item[0] for sublist in servers.values() for item in sublist]
        compl_words = self.filter_words(users, last_word)

        for compl_word in compl_words:
            yield Completion(compl_word, -len(last_word))

    def complete_verification(self, command, last_word, words):
        if len(words) == 2:
            return self.complete_pan_users(last_word)
        elif len(words) == 3:
            pan_user = words[1]
            return self.complete_users(last_word, pan_user)
        elif len(words) == 4:
            pan_user = words[1]
            user_id = words[2]
            return self.complete_devices(last_word, pan_user, user_id)

        return ""

    def complete_key_file_cmds(self, document, complete_event, command,
                               last_word, words):
        if len(words) == 2:
            return self.complete_pan_users(last_word)
        elif len(words) == 3:
            return self.path_completer.get_completions(Document(last_word),
                                                       complete_event)

        return ""

    def complete_rooms(self, pan_user, last_word, words):
        rooms = self.rooms[pan_user]
        compl_words = self.filter_words(list(rooms), last_word)

        for compl_word in compl_words:
            yield Completion(compl_word, -len(last_word))

        return ""

    def complete_send_cmds(self, last_word, words):
        if len(words) == 2:
            return self.complete_pan_users(last_word)
        elif len(words) == 3:
            pan_user = words[1]
            return self.complete_rooms(pan_user, last_word, words)

        return ""

    def complete_list_devices(self, last_word, words):
        if len(words) == 2:
            return self.complete_pan_users(last_word)
        elif len(words) == 3:
            pan_user = words[1]
            return self.complete_users(last_word, pan_user)

        return ""

    def get_completions(self, document, complete_event):
        """Build the completions."""
        text_before_cursor = document.text_before_cursor
        text_before_cursor = str(text_before_cursor)
        words = text_before_cursor.split(" ")

        last_word = words[-1]

        if len(words) == 1:
            return self.complete_commands(last_word)

        if len(words) > 1:
            command = words[0]

            if command in [
                    "start-verification",
                    "accept-verification",
                    "confirm-verification",
                    "cancel-verification",
                    "verify-device",
                    "unverify-device",
                    "blacklist-device",
                    "unblacklist-device",
            ]:
                return self.complete_verification(command, last_word, words)

            elif command in ["export-keys", "import-keys"]:
                return self.complete_key_file_cmds(document, complete_event,
                                                   command, last_word, words)

            elif command in ["send-anyways", "cancel-sending"]:
                return self.complete_send_cmds(last_word, words)

            elif command == "list-devices":
                return self.complete_list_devices(last_word, words)

            elif command == "help":
                if len(words) == 2:
                    return self.complete_commands(last_word)
                else:
                    return ""

            elif command in ["cancel-keyshare", "continue-keyshare"]:
                return self.complete_verification(command, last_word, words)

        return ""
Exemple #29
0
class VlobAtom:
    id = attr.ib()
    read_trust_seed = attr.ib(default=attr.Factory(generate_trust_seed))
    write_trust_seed = attr.ib(default=attr.Factory(generate_trust_seed))
    blob = attr.ib(default=b'')
    version = attr.ib(default=1)
class AreaEntry:
    """Area Registry Entry."""

    name = attr.ib(type=str, default=None)
    id = attr.ib(type=str, default=attr.Factory(lambda: uuid.uuid4().hex))