Beispiel #1
0
    # pyre-fixme[11]: Annotation `Json` is not defined as a type.
    # pyre-fixme[11]: Annotation `Json` is not defined as a type.
    # pyre-fixme[11]: Annotation `Json` is not defined as a type.
    sent: Optional[Json]
    received: Optional[Json]

    def __repr__(self) -> str:
        return (
            "TranscriptEntry"
            + pprint.pformat({"sent": self.sent, "received": self.received})
            + "\n"
        )


Transcript = Mapping[str, TranscriptEntry]
U = TypeVar("U", bound="LspCommandProcessor")


class LspCommandProcessor:
    def __init__(
        self,
        # pyre-fixme[24]: Generic type `subprocess.Popen` expects 1 type parameter.
        proc: subprocess.Popen,
        reader: JsonRpcStreamReader,
        writer: JsonRpcStreamWriter,
    ) -> None:
        self.proc = proc
        self.reader = reader
        self.writer = writer

    @classmethod
Beispiel #2
0
from dataclasses import dataclass
from typing import TypeVar, Optional, Dict, Any

from dbt.adapters.base.column import Column

Self = TypeVar('Self', bound='MySQLColumn')


@dataclass
class MySQLColumn(Column):
    table_database: Optional[str] = None
    table_schema: Optional[str] = None
    table_name: Optional[str] = None
    table_type: Optional[str] = None
    table_owner: Optional[str] = None
    table_stats: Optional[Dict[str, Any]] = None
    column_index: Optional[int] = None

    @property
    def quoted(self) -> str:
        return '`{}`'.format(self.column)

    def __repr__(self) -> str:
        return "<MySQLColumn {} ({})>".format(self.name, self.data_type)
Beispiel #3
0
from limits.storage import Storage  # type: ignore
from limits.storage import MemoryStorage, storage_from_string
from limits.strategies import STRATEGIES, RateLimiter  # type: ignore
from starlette.applications import Starlette
from starlette.config import Config
from starlette.exceptions import HTTPException
from starlette.middleware.base import BaseHTTPMiddleware
from starlette.requests import Request
from starlette.responses import JSONResponse, Response

from .errors import RateLimitExceeded
from .util import get_ipaddr
from .wrappers import Limit, LimitGroup

# used to annotate get_app_config method
T = TypeVar("T")
# Define an alias for the most commonly used type
StrOrCallableStr = Union[str, Callable[..., str]]


class C:
    ENABLED = "RATELIMIT_ENABLED"
    HEADERS_ENABLED = "RATELIMIT_HEADERS_ENABLED"
    STORAGE_URL = "RATELIMIT_STORAGE_URL"
    STORAGE_OPTIONS = "RATELIMIT_STORAGE_OPTIONS"
    STRATEGY = "RATELIMIT_STRATEGY"
    GLOBAL_LIMITS = "RATELIMIT_GLOBAL"
    DEFAULT_LIMITS = "RATELIMIT_DEFAULT"
    APPLICATION_LIMITS = "RATELIMIT_APPLICATION"
    HEADER_LIMIT = "RATELIMIT_HEADER_LIMIT"
    HEADER_REMAINING = "RATELIMIT_HEADER_REMAINING"
Beispiel #4
0
from idom.utils import Ref

from .component import AbstractComponent

__all__ = [
    "use_state",
    "use_effect",
    "use_reducer",
    "use_callback",
    "use_ref",
    "use_memo",
]

logger = getLogger(__name__)

_StateType = TypeVar("_StateType")


def use_state(
    initial_value: Union[_StateType, Callable[[], _StateType]],
) -> Tuple[_StateType, Callable[
    [Union[_StateType, Callable[[_StateType], _StateType]]], None]]:
    """See the full :ref:`use_state` docs for details

    Parameters:
        initial_value:
            Defines the initial value of the state. A callable (accepting no arguments)
            can be used as a constructor function to avoid re-creating the initial value
            on each render.

    Returns:
Beispiel #5
0
from mypyc.errors import Errors
from mypyc.options import CompilerOptions
from mypyc.ir.rtypes import none_rprimitive
from mypyc.ir.module_ir import ModuleIR, ModuleIRs
from mypyc.ir.func_ir import FuncIR, FuncDecl, FuncSignature
from mypyc.irbuild.prebuildvisitor import PreBuildVisitor
from mypyc.irbuild.vtable import compute_vtable
from mypyc.irbuild.prepare import build_type_map
from mypyc.irbuild.builder import IRBuilder
from mypyc.irbuild.visitor import IRBuilderVisitor
from mypyc.irbuild.mapper import Mapper


# The stubs for callable contextmanagers are busted so cast it to the
# right type...
F = TypeVar('F', bound=Callable[..., Any])
strict_optional_dec = cast(Callable[[F], F], strict_optional_set(True))


@strict_optional_dec  # Turn on strict optional for any type manipulations we do
def build_ir(modules: List[MypyFile],
             graph: Graph,
             types: Dict[Expression, Type],
             mapper: 'Mapper',
             options: CompilerOptions,
             errors: Errors) -> ModuleIRs:
    """Build IR for a set of modules that have been type-checked by mypy."""

    build_type_map(mapper, modules, graph, types, options, errors)

    result = OrderedDict()  # type: ModuleIRs
Beispiel #6
0
import numpy as np
import tensorflow as tf
import torch

from fastestimator.backend.argmax import argmax
from fastestimator.backend.concat import concat
from fastestimator.backend.get_image_dims import get_image_dims
from fastestimator.backend.reduce_max import reduce_max
from fastestimator.backend.squeeze import squeeze
from fastestimator.trace.trace import Trace
from fastestimator.util.data import Data
from fastestimator.util.img_data import ImgData
from fastestimator.util.traceability_util import traceable
from fastestimator.util.util import to_number

Tensor = TypeVar('Tensor', tf.Tensor, torch.Tensor, np.ndarray)


@traceable()
class EigenCAM(Trace):
    """A trace which draws EigenCAM heatmaps on top of images.

    These are useful for visualizing the outputs of the feature extractor component of a model. They are relatively
    insensitive to adversarial attacks, so don't use them to try and detect those. See https://arxiv.org/abs/2008.00299
    for more details.

    Args:
        images: The key corresponding to images onto which to draw the CAM outputs.
        activations: The key corresponding to outputs from a convolution layer from which to draw the CAM outputs. You
            can easily extract these from any model by using the 'intermediate_layers' variable in a ModelOp.
        n_components: How many principal components to visualize. If you pass a float between 0 and 1 it will instead
# Python bytecode 2.7 (decompiled from Python 2.7)
# Embedded file name: scripts/common/items/components/c11n_components.py
import items
import items.vehicles as iv
from items.components import shared_components
from soft_exception import SoftException
from items.components.c11n_constants import ApplyArea, SeasonType, ItemTags, CustomizationType, MAX_CAMOUFLAGE_PATTERN_SIZE, DecalType
from typing import List, Dict, Type, Tuple, Optional, Union, TypeVar
Item = TypeVar('TypeVar')

class BaseCustomizationItem(object):
    __slots__ = ('id', 'tags', 'filter', 'parentGroup', 'season', 'historical', 'i18n', 'priceGroup', 'requiredToken', 'priceGroupTags', 'maxNumber')
    allSlots = __slots__
    itemType = 0

    def __init__(self, parentGroup=None):
        self.id = 0
        self.tags = frozenset()
        self.filter = None
        self.season = SeasonType.ALL
        self.historical = False
        self.i18n = None
        self.priceGroup = ''
        self.priceGroupTags = frozenset()
        self.requiredToken = ''
        self.maxNumber = 0
        if parentGroup and parentGroup.itemPrototype:
            for field in self.allSlots:
                setattr(self, field, getattr(parentGroup.itemPrototype, field))

        self.parentGroup = parentGroup
class _HypothesisTail(object):
    """
    Hold some bookkeeping about a hypothesis.
    """

    # use slots because we don't want dynamic attributes here
    __slots__ = ['timestep', 'hypid', 'score', 'tokenid']

    def __init__(self, timestep, hypid, score, tokenid):
        self.timestep = timestep
        self.hypid = hypid
        self.score = score
        self.tokenid = tokenid


TSType = TypeVar('TSType', bound='TreeSearch')


class TreeSearch(object):
    """
    Abstract Tree Search class.

    It keeps information about beam_size concurrent, developing hypotheses. Concrete
    implementations make choices about which token to explore next at each point in the
    tree. Different choices result in different generation algorithms.
    """

    def __init__(
        self,
        beam_size,
        block_ngram=-1,
Beispiel #9
0
import inspect
from typing import Any, Dict, Optional, Type, TypeVar, Union

from fastapi_ext.view import View
from fastapi_ext.view._routes import (
    APIRouteEntry,
    APIWebsocketRouteEntry,
    RouteEntryManager,
)
from fastapi_ext.view._utils import is_any_method
from fastapi_ext.view.decorators.types import MemberType

ExtendMemberType = Union[MemberType, type]
ExtendDecoratedMember = TypeVar("ExtendDecoratedMember", bound=ExtendMemberType)


class ModifyDecorator:
    def __call__(self, type_or_fn: ExtendDecoratedMember) -> ExtendDecoratedMember:
        if isinstance(type_or_fn, type):
            if not issubclass(type_or_fn, View) or type_or_fn is View:
                raise TypeError("Decorated class should be View subclass")
            self.extend_view(type_or_fn)
        elif inspect.isfunction(type_or_fn) or is_any_method(type_or_fn):
            self._extend_func(type_or_fn)
        else:
            raise TypeError(f"Not compatible type to decorate {type(type_or_fn)}")
        return type_or_fn

    def _extend_func(self, func: MemberType):
        for entry in RouteEntryManager.find(func):
            if isinstance(entry, APIRouteEntry):
Beispiel #10
0
from pathlib import Path
from typing import Any, Callable, Dict, Optional, Type, TypeVar

import jinja2
from typing_extensions import Protocol

from ..base_context import ExecutionContext
from .core import ensure_path, resolve_path

DEFAULT_TEMPLATE_VARIABLE_PREFIX = "qwikstart"
TEMPLATE_VARIABLE_META_PREFIX = "_meta_"
TRenderer = TypeVar("TRenderer", bound="TemplateRenderer")


class TemplateContext(Protocol):
    @property
    def execution_context(self) -> ExecutionContext:
        pass  # pragma: no cover

    @property
    def template_variables(self) -> Dict[str, Any]:
        pass  # pragma: no cover

    @property
    def template_variable_prefix(self) -> str:
        pass  # pragma: no cover


class TemplateRenderer:
    def __init__(
        self,
import torch
from texar.data.data.data_base import DataBase
from texar.data.data.dataset_utils import Batch
from texar.utils.types import MaybeSeq
from texar.utils.utils import ceildiv

__all__ = [
    "DataIterator",
    "TrainTestDataIterator",
    "BatchingStrategy",
    "TokenCountBatchingStrategy",
]

DatasetsType = Union[Dict[str, DataBase], MaybeSeq[DataBase]]
Example = TypeVar('Example')

# pylint: disable=attribute-defined-outside-init
# TODO: Remove this when Pylint fixes the bug. If the `disable` directive is not
#  added, Pylint incorrectly reports this error for `self.size` in subclasses of
#  `SamplerBase` in Python 3.6 due to use of the Generic class.
#  See Pylint issue: https://github.com/PyCQA/pylint/issues/2981


class SamplerBase(torch_sampler.Sampler, Generic[Example]):
    r"""A subclass of :torch_docs:`~torch.utils.data.Sampler
    <data.html#torch.utils.data.Sampler>` that supports:

    - Returning raw examples when required.
    - Creating iterators with unknown dataset size.
Beispiel #12
0
Example usage:

  @logged_retry_on_retriable_http_error()
  def function_to_retry_on_retriable_http_error():
    pass
"""

import functools
import logging
from typing import Callable, TypeVar

from airflow import exceptions
from googleapiclient import errors
import tenacity

_RT = TypeVar('_RT')  # General return variable

_RETRY_UTILS_MAX_RETRIES = 5

TOO_MANY_REQUESTS_ERROR = 429
INTERNAL_SERVER_ERROR = 500
SERVICE_UNAVAILABLE_ERROR = 503
RETRY_UTILS_RETRIABLE_STATUS_CODES = (TOO_MANY_REQUESTS_ERROR,
                                      INTERNAL_SERVER_ERROR,
                                      SERVICE_UNAVAILABLE_ERROR)

_LOGGER = logging.getLogger(__name__)


def _is_retriable_http_error(error: errors.HttpError) -> bool:
    """Checks if HttpError is in RETRY_UTILS_RETRIABLE_STATUS_CODES.
Beispiel #13
0
@castable.register(Any, Any)
@castable.register(Null, Any)
@castable.register(Integer, Category)
@castable.register(Integer, (Floating, Decimal))
@castable.register(Floating, Decimal)
@castable.register((Date, Timestamp), (Date, Timestamp))
def can_cast_any(source: DataType, target: DataType, **kwargs) -> bool:
    return True


@castable.register(Null, DataType)
def can_cast_null(source: DataType, target: DataType, **kwargs) -> bool:
    return target.nullable


Integral = TypeVar('Integral', SignedInteger, UnsignedInteger)


@castable.register(SignedInteger, UnsignedInteger)
@castable.register(UnsignedInteger, SignedInteger)
def can_cast_to_differently_signed_integer_type(source: Integral,
                                                target: Integral,
                                                value: Optional[int] = None,
                                                **kwargs) -> bool:
    if value is None:
        return False
    bounds = target.bounds
    return bounds.lower <= value <= bounds.upper


@castable.register(SignedInteger, SignedInteger)
Beispiel #14
0
from typing import TypeVar, List, Tuple
from collections import Counter
from scratch.linear_algebra import distance
import pandas as pd

X = TypeVar('X')  # generic type to represent a data point

Vector = List[float]
airquality_lst = ['best', 'better', 'good', 'normal', 'bad', 'worse', 'serious', 'worst']


def split_data(data: List[X], prob: float) -> Tuple[List[X], List[X]]:
    """Split data into fractions [prob, 1 - prob]"""
    data = data[:]  # 얕은 복사본을 만든다.
    # random.shuffle(data) # shuffle이 리스트 내용을 바꾸기 때문
    cut = int(len(data) * prob)  # prob을 사용하여 자를 위치를 선택
    return data[:cut], data[cut:]  # 섞인 리스트를 자른다.


def majority_vote(labels):
    """labels는 가장 가까운 데이터부터 가장 먼 데이터 순서로 정렬되어 있다고 가정"""
    vote_counts = Counter(labels)
    winner, winner_count = vote_counts.most_common(1)[0]
    num_winners = len([count
                       for count in vote_counts.values()
                       if count == winner_count])

    if num_winners == 1:
        return winner  # 1등이 하나이기 때문에 반환
    else:
        return majority_vote(labels[:-1])  # 가장 먼 데이터를 제외하고 다시 찾아 본다.
Beispiel #15
0
        self.lr = 0.003
        self.batch_size = 100


class BikeNYCConfig:
    def __init__(self):
        self.T = 24  # timestep
        self.W = 6  # convolution window size (convolution filter height)`
        self.n = 6  # the number of the long-term memory series
        self.highway_window = 12  # the window size of ar model

        self.D = 256  # input's variable dimension (convolution filter width)
        self.K = 256  # output's variable dimension

        self.horizon = 1  # the horizon of predicted value

        self.en_conv_hidden_size = 32
        self.en_rnn_hidden_sizes = [
            32, 32
        ]  # last size is equal to en_conv_hidden_size

        self.input_keep_prob = 0.8
        self.output_keep_prob = 1.0

        self.lr = 0.003
        self.batch_size = 32


ConfigType = TypeVar("configuration class", BikeNYCConfig, TaxiNYConfig,
                     SolarEnergyConfig, BJpmConfig)
Beispiel #16
0
class InstantiationResult(NamedTuple):
    """Information about whether a class can be instantiated or not. """

    result: bool
    error_message: Optional[str]


# Type definitions

AsyncFunc = Callable[..., Awaitable]
Job = Callable[[], Any]
UpstreamType = Union[List[Job], Job, None]
LockPlaceholder = Union[Lock, Semaphore, None]
DependencyType = Dict[Job, Set[Job]]

RetType = TypeVar("RetType", covariant=True)


class SupportsAsyncWith(Protocol[RetType]):
    """Type definition for objects that support ``async with``. """

    async def __aenter__(self) -> RetType:
        pass

    async def __aexit__(
        self,
        exc_type: Optional[Type[BaseException]],
        exc: Optional[BaseException],
        tb: Optional[types.TracebackType],
    ) -> None:
        pass
Beispiel #17
0
from typing import List, Type, TypeVar


T = TypeVar('T', bound='RegisteredObjectBase')


class RegisteredObjectBase:
    registered_map = {}

    def __init_subclass__(cls, name: str = None):
        if name is not None:
            cls.registered_map[name] = cls

    @classmethod
    def registered_names(cls) -> List[str]:
        return list(cls.registered_map.keys())

    @classmethod
    def find_registered_class(cls: Type[T], name: str) -> Type[T]:
        return cls.registered_map[name]
Beispiel #18
0
# Standard library
from functools import (
    wraps, )
from typing import (
    Any,
    Callable,
    TypeVar,
)

# Third party libraries
from starlette.responses import (
    JSONResponse, )

# Constants
TFun = TypeVar('TFun', bound=Callable[..., Any])


def api_error_boundary(function: TFun) -> TFun:
    @wraps(function)
    async def wrapper(*args: Any, **kwargs: Any) -> Any:
        try:
            return await function(*args, **kwargs)
        except Exception as exc:
            return JSONResponse(
                content={
                    'ok': False,
                    'error': str(exc),
                    'error_type':
                    f'{type(exc).__module__}.{type(exc).__name__}',
                },
                status_code=400,
Beispiel #19
0
  intersect:
    The intersection between two or more Regions
    It is more general. An overlap is an intersect,
    but an intersect is not an overlap.

Abstract Classes:
- RIGraph
"""

from abc import ABCMeta, abstractmethod
from collections import abc
from typing import Any, Dict, Generic, Iterator, Tuple, TypeVar, Union

from ..shapes import Region, RegionIdPair, RegionPair

G = TypeVar('G')


class RIGraph(Generic[G], abc.Sized):  # pylint: disable=E1136
    """
  Abstract Class

  A graph representation of intersecting and overlapping Regions.
  Provides a programming interface for accessing and constructing
  the data representation.

  Generic:
    G:  The underlying graph representation and
        concrete implementation.

  Extends:
Beispiel #20
0
from zerver.models import Realm, UserProfile, get_client, get_user_profile_by_api_key

if settings.ZILENCER_ENABLED:
    from zilencer.models import (
        RateLimitedRemoteZulipServer,
        RemoteZulipServer,
        get_remote_server_by_uuid,
    )

rate_limiter_logger = logging.getLogger("zerver.lib.rate_limiter")

webhook_logger = logging.getLogger("zulip.zerver.webhooks")
webhook_unsupported_events_logger = logging.getLogger(
    "zulip.zerver.webhooks.unsupported")

FuncT = TypeVar("FuncT", bound=Callable[..., object])


def cachify(method: FuncT) -> FuncT:
    dct: Dict[Tuple[object, ...], object] = {}

    def cache_wrapper(*args: object) -> object:
        tup = tuple(args)
        if tup in dct:
            return dct[tup]
        result = method(*args)
        dct[tup] = result
        return result

    return cast(FuncT,
                cache_wrapper)  # https://github.com/python/mypy/issues/1927
Beispiel #21
0
        "image_options": {},
        "compression_type": None,
        "other_transformations": [],
        "num_shards": None,
        "shard_id": None,
        "data_name": None,
        "@no_typecheck": [
            "files",
            "feature_original_types",
            "feature_convert_types",
            "image_options"
        ],
    }


RawExample = TypeVar('RawExample')


class PickleDataSource(DataSource[RawExample]):
    r"""Data source for reading from (multiple) pickled binary files. Each file
    could contain multiple pickled objects, and each object is yielded as an
    example.

    This data source does not support indexing.

    Args:
        file_paths (str or list[str]): Paths to pickled binary files.
        lists_are_examples (bool): If `True`, lists will be treated as
            a single example; if `False`, each element in the list will be
            treated as separate examples. Default is `True`. Set this to
            `False` if the entire pickled binary file is a list.
Beispiel #22
0
from abc import ABC, abstractmethod
import graphviz
from typing import (Dict, Iterable, Generic, Sequence, Tuple, Mapping,
                    Optional, TypeVar)
from collections import defaultdict
import numpy as np
from pprint import pprint

from rl.distribution import (Categorical, Choose, Distribution,
                             FiniteDistribution, SampledDistribution)

S = TypeVar('S')

Transition = Mapping[S, Optional[FiniteDistribution[S]]]


class MarkovProcess(ABC, Generic[S]):
    '''A Markov process with states of type S.
    '''
    @abstractmethod
    def transition(self, state: S) -> Optional[Distribution[S]]:
        '''Given a state of the process, returns a distribution of
        the next states.  Returning None means we are in a terminal state.
        '''

    def is_terminal(self, state: S) -> bool:
        '''Return whether the given state is a terminal state.

        The default implementation of is_terminal calculates a transition
        from the current state, so it could be worth overloading this
        method if your process has a cheaper way of determing whether
Beispiel #23
0
from typing import Dict, TypeVar, Callable

L = TypeVar("L")
R = TypeVar("R")


def values_map(v_map: Dict[L, R]) -> Callable[[L], R]:
    def converter(left: L):
        if left in v_map:
            return v_map[left]
        else:
            raise ValueError(f"Value {left} is missing in value map.")

    return converter
Beispiel #24
0
from sqlalchemy import (Boolean, Column, Date, DateTime, ForeignKey, Integer,
                        SmallInteger, String)
from sqlalchemy.ext.orderinglist import ordering_list
from sqlalchemy.orm import relationship

from .base import PkModel, PkModelWithTimestamps
from .relation_table import product_paintwork_table, product_sculptor_table

__all__ = [
    "ProductOfficialImage",
    "ProductReleaseInfo",
    "Product"
]


P = TypeVar('P', bound='ProductOfficialImage')


class ProductOfficialImage(PkModel):
    __tablename__ = "product_official_image"

    url = Column(String)
    order = Column(Integer)
    product_id = Column(Integer, ForeignKey("product.id", ondelete="CASCADE"), nullable=False)

    @classmethod
    def create_image_list(cls: Type[P], image_urls: list[str]) -> list[P]:
        images = []

        for url in image_urls:
            image = ProductOfficialImage(url=url)
Beispiel #25
0
                    Window(width=self.padding_right, char=char),
                ]),
                Window(height=self.padding_bottom, char=char),
            ],
            width=width,
            height=height,
            style=style,
            modal=modal,
            key_bindings=None,
        )

    def __pt_container__(self) -> Container:
        return self.container


_T = TypeVar("_T")


class _DialogList(Generic[_T]):
    """
    Common code for `RadioList` and `CheckboxList`.
    """

    open_character: str = ""
    close_character: str = ""
    container_style: str = ""
    default_style: str = ""
    selected_style: str = ""
    checked_style: str = ""
    multiple_selection: bool = False
    show_scrollbar: bool = True
Beispiel #26
0
class QEMUMonitorProtocol:
    """
    Provide an API to connect to QEMU via QEMU Monitor Protocol (QMP) and then
    allow to handle commands and events.
    """

    #: Logger object for debugging messages
    logger = logging.getLogger('QMP')

    def __init__(self, address: SocketAddrT,
                 server: bool = False,
                 nickname: Optional[str] = None):
        """
        Create a QEMUMonitorProtocol class.

        @param address: QEMU address, can be either a unix socket path (string)
                        or a tuple in the form ( address, port ) for a TCP
                        connection
        @param server: server mode listens on the socket (bool)
        @raise OSError on socket connection errors
        @note No connection is established, this is done by the connect() or
              accept() methods
        """
        self.__events: List[QMPMessage] = []
        self.__address = address
        self.__sock = self.__get_sock()
        self.__sockfile: Optional[TextIO] = None
        self._nickname = nickname
        if self._nickname:
            self.logger = logging.getLogger('QMP').getChild(self._nickname)
        if server:
            self.__sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
            self.__sock.bind(self.__address)
            self.__sock.listen(1)

    def __get_sock(self) -> socket.socket:
        if isinstance(self.__address, tuple):
            family = socket.AF_INET
        else:
            family = socket.AF_UNIX
        return socket.socket(family, socket.SOCK_STREAM)

    def __negotiate_capabilities(self) -> QMPMessage:
        greeting = self.__json_read()
        if greeting is None or "QMP" not in greeting:
            raise QMPConnectError
        # Greeting seems ok, negotiate capabilities
        resp = self.cmd('qmp_capabilities')
        if resp and "return" in resp:
            return greeting
        raise QMPCapabilitiesError

    def __json_read(self, only_event: bool = False) -> Optional[QMPMessage]:
        assert self.__sockfile is not None
        while True:
            data = self.__sockfile.readline()
            if not data:
                return None
            # By definition, any JSON received from QMP is a QMPMessage,
            # and we are asserting only at static analysis time that it
            # has a particular shape.
            resp: QMPMessage = json.loads(data)
            if 'event' in resp:
                self.logger.debug("<<< %s", resp)
                self.__events.append(resp)
                if not only_event:
                    continue
            return resp

    def __get_events(self, wait: Union[bool, float] = False) -> None:
        """
        Check for new events in the stream and cache them in __events.

        @param wait (bool): block until an event is available.
        @param wait (float): If wait is a float, treat it as a timeout value.

        @raise QMPTimeoutError: If a timeout float is provided and the timeout
                                period elapses.
        @raise QMPConnectError: If wait is True but no events could be
                                retrieved or if some other error occurred.
        """

        # Current timeout and blocking status
        current_timeout = self.__sock.gettimeout()

        # Check for new events regardless and pull them into the cache:
        self.__sock.settimeout(0)  # i.e. setblocking(False)
        try:
            self.__json_read()
        except OSError as err:
            # EAGAIN: No data available; not critical
            if err.errno != errno.EAGAIN:
                raise
        finally:
            self.__sock.settimeout(current_timeout)

        # Wait for new events, if needed.
        # if wait is 0.0, this means "no wait" and is also implicitly false.
        if not self.__events and wait:
            if isinstance(wait, float):
                self.__sock.settimeout(wait)
            try:
                ret = self.__json_read(only_event=True)
            except socket.timeout as err:
                raise QMPTimeoutError("Timeout waiting for event") from err
            except Exception as err:
                msg = "Error while reading from socket"
                raise QMPConnectError(msg) from err
            finally:
                self.__sock.settimeout(current_timeout)

            if ret is None:
                raise QMPConnectError("Error while reading from socket")

    T = TypeVar('T')

    def __enter__(self: T) -> T:
        # Implement context manager enter function.
        return self

    def __exit__(self,
                 # pylint: disable=duplicate-code
                 # see https://github.com/PyCQA/pylint/issues/3619
                 exc_type: Optional[Type[BaseException]],
                 exc_val: Optional[BaseException],
                 exc_tb: Optional[TracebackType]) -> None:
        # Implement context manager exit function.
        self.close()

    @classmethod
    def parse_address(cls, address: str) -> SocketAddrT:
        """
        Parse a string into a QMP address.

        Figure out if the argument is in the port:host form.
        If it's not, it's probably a file path.
        """
        components = address.split(':')
        if len(components) == 2:
            try:
                port = int(components[1])
            except ValueError:
                msg = f"Bad port: '{components[1]}' in '{address}'."
                raise QMPBadPortError(msg) from None
            return (components[0], port)

        # Treat as filepath.
        return address

    def connect(self, negotiate: bool = True) -> Optional[QMPMessage]:
        """
        Connect to the QMP Monitor and perform capabilities negotiation.

        @return QMP greeting dict, or None if negotiate is false
        @raise OSError on socket connection errors
        @raise QMPConnectError if the greeting is not received
        @raise QMPCapabilitiesError if fails to negotiate capabilities
        """
        self.__sock.connect(self.__address)
        self.__sockfile = self.__sock.makefile(mode='r')
        if negotiate:
            return self.__negotiate_capabilities()
        return None

    def accept(self, timeout: Optional[float] = 15.0) -> QMPMessage:
        """
        Await connection from QMP Monitor and perform capabilities negotiation.

        @param timeout: timeout in seconds (nonnegative float number, or
                        None). The value passed will set the behavior of the
                        underneath QMP socket as described in [1].
                        Default value is set to 15.0.

        @return QMP greeting dict
        @raise OSError on socket connection errors
        @raise QMPConnectError if the greeting is not received
        @raise QMPCapabilitiesError if fails to negotiate capabilities

        [1]
        https://docs.python.org/3/library/socket.html#socket.socket.settimeout
        """
        self.__sock.settimeout(timeout)
        self.__sock, _ = self.__sock.accept()
        self.__sockfile = self.__sock.makefile(mode='r')
        return self.__negotiate_capabilities()

    def cmd_obj(self, qmp_cmd: QMPMessage) -> QMPMessage:
        """
        Send a QMP command to the QMP Monitor.

        @param qmp_cmd: QMP command to be sent as a Python dict
        @return QMP response as a Python dict
        """
        self.logger.debug(">>> %s", qmp_cmd)
        self.__sock.sendall(json.dumps(qmp_cmd).encode('utf-8'))
        resp = self.__json_read()
        if resp is None:
            raise QMPConnectError("Unexpected empty reply from server")
        self.logger.debug("<<< %s", resp)
        return resp

    def cmd(self, name: str,
            args: Optional[Dict[str, object]] = None,
            cmd_id: Optional[object] = None) -> QMPMessage:
        """
        Build a QMP command and send it to the QMP Monitor.

        @param name: command name (string)
        @param args: command arguments (dict)
        @param cmd_id: command id (dict, list, string or int)
        """
        qmp_cmd: QMPMessage = {'execute': name}
        if args:
            qmp_cmd['arguments'] = args
        if cmd_id:
            qmp_cmd['id'] = cmd_id
        return self.cmd_obj(qmp_cmd)

    def command(self, cmd: str, **kwds: object) -> QMPReturnValue:
        """
        Build and send a QMP command to the monitor, report errors if any
        """
        ret = self.cmd(cmd, kwds)
        if 'error' in ret:
            raise QMPResponseError(ret)
        if 'return' not in ret:
            raise QMPProtocolError(
                "'return' key not found in QMP response '{}'".format(str(ret))
            )
        return cast(QMPReturnValue, ret['return'])

    def pull_event(self,
                   wait: Union[bool, float] = False) -> Optional[QMPMessage]:
        """
        Pulls a single event.

        @param wait (bool): block until an event is available.
        @param wait (float): If wait is a float, treat it as a timeout value.

        @raise QMPTimeoutError: If a timeout float is provided and the timeout
                                period elapses.
        @raise QMPConnectError: If wait is True but no events could be
                                retrieved or if some other error occurred.

        @return The first available QMP event, or None.
        """
        self.__get_events(wait)

        if self.__events:
            return self.__events.pop(0)
        return None

    def get_events(self, wait: bool = False) -> List[QMPMessage]:
        """
        Get a list of available QMP events.

        @param wait (bool): block until an event is available.
        @param wait (float): If wait is a float, treat it as a timeout value.

        @raise QMPTimeoutError: If a timeout float is provided and the timeout
                                period elapses.
        @raise QMPConnectError: If wait is True but no events could be
                                retrieved or if some other error occurred.

        @return The list of available QMP events.
        """
        self.__get_events(wait)
        return self.__events

    def clear_events(self) -> None:
        """
        Clear current list of pending events.
        """
        self.__events = []

    def close(self) -> None:
        """
        Close the socket and socket file.
        """
        if self.__sock:
            self.__sock.close()
        if self.__sockfile:
            self.__sockfile.close()

    def settimeout(self, timeout: Optional[float]) -> None:
        """
        Set the socket timeout.

        @param timeout (float): timeout in seconds (non-zero), or None.
        @note This is a wrap around socket.settimeout

        @raise ValueError: if timeout was set to 0.
        """
        if timeout == 0:
            msg = "timeout cannot be 0; this engages non-blocking mode."
            msg += " Use 'None' instead to disable timeouts."
            raise ValueError(msg)
        self.__sock.settimeout(timeout)

    def get_sock_fd(self) -> int:
        """
        Get the socket file descriptor.

        @return The file descriptor number.
        """
        return self.__sock.fileno()

    def is_scm_available(self) -> bool:
        """
        Check if the socket allows for SCM_RIGHTS.

        @return True if SCM_RIGHTS is available, otherwise False.
        """
        return self.__sock.family == socket.AF_UNIX
Beispiel #27
0
        extract_root = download_root
    if not filename:
        filename = os.path.basename(url)

    download_url(url, download_root, filename, md5)

    archive = os.path.join(download_root, filename)
    print("Extracting {} to {}".format(archive, extract_root))
    extract_archive(archive, extract_root, remove_finished)


def iterable_to_str(iterable: Iterable) -> str:
    return "'" + "', '".join([str(item) for item in iterable]) + "'"


T = TypeVar("T", str, bytes)


def verify_str_arg(
    value: T,
    arg: Optional[str] = None,
    valid_values: Iterable[T] = None,
    custom_msg: Optional[str] = None,
) -> T:
    if not isinstance(value, torch._six.string_classes):
        if arg is None:
            msg = "Expected type str, but got type {type}."
        else:
            msg = "Expected type str for argument {arg}, but got type {type}."
        msg = msg.format(type=type(value), arg=arg)
        raise ValueError(msg)
Beispiel #28
0
import dataclasses
from typing import TYPE_CHECKING, Any, Callable, Dict, Generator, Optional, Type, TypeVar, Union

from .class_validators import gather_all_validators
from .error_wrappers import ValidationError
from .errors import DataclassTypeError
from .fields import Required
from .main import create_model, validate_model
from .typing import AnyType

if TYPE_CHECKING:
    from .main import BaseModel  # noqa: F401

    DataclassT = TypeVar('DataclassT', bound='DataclassType')

    class DataclassType:
        __pydantic_model__: Type[BaseModel]
        __initialised__: bool

        def __init__(self, *args: Any, **kwargs: Any) -> None:
            pass

        @classmethod
        def __validate__(cls: Type['DataclassT'], v: Any) -> 'DataclassT':
            pass

        def __call__(self: 'DataclassT', *args: Any, **kwargs: Any) -> 'DataclassT':
            pass


def _validate_dataclass(cls: Type['DataclassT'], v: Any) -> 'DataclassT':
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling

from .. import models as _models

if TYPE_CHECKING:
    # pylint: disable=unused-import,ungrouped-imports
    from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union

    T = TypeVar('T')
    ClsType = Optional[Callable[
        [PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]


class ExpressRoutePortsOperations(object):
    """ExpressRoutePortsOperations operations.

    You should not instantiate this class directly. Instead, you should create a Client instance that
    instantiates it for you and attaches it as an attribute.

    :ivar models: Alias to model classes used in this operation group.
    :type models: ~azure.mgmt.network.v2019_09_01.models
    :param client: Client for service requests.
    :param config: Configuration of service client.
    :param serializer: An object model serializer.
from typing import TypeVar, Sequence, Dict, Type, List, Union, cast, Any
from logging import Logger
from .. import db, app
from sqlalchemy.orm import joinedload, subqueryload, Query
from datetime import datetime, date
from flask_restplus.errors import ValidationError

X = TypeVar('X', bound=db.Model)


class GetByID():

    _joined_load = []  # type: List[str]
    _subquery_load = []  # type: List[str]

    @classmethod
    def prepare_query(cls: Type[X], lazy: bool=False) -> Query:
        query = cls.query
        if lazy:
            return query
        options = []
        for attr in cls._joined_load:
            options.append(joinedload(attr))
        for attr in cls._subquery_load:
            options.append(subqueryload(attr))

        if options:
            query = query.options(*options)
        return query

    @staticmethod