示例#1
0
    """
    # TODO: support a setter for `msg` to create an instance of a type
    # according to `msg_type`
    typename = MESSAGE_RESPONSE
    schema = (
        (f.MSG_TYPE.nm, ChooseField(values=MessageReq.allowed_types)),
        (f.PARAMS.nm, AnyMapField()),
        (f.MSG.nm, AnyField())
    )


ThreePhaseType = (PrePrepare, Prepare, Commit)
ThreePhaseMsg = TypeVar("3PhaseMsg", *ThreePhaseType)

ThreePhaseKey = NamedTuple("ThreePhaseKey", [
    f.VIEW_NO,
    f.PP_SEQ_NO
])


class BatchCommitted(MessageBase):
    """
    Purpose: pass to Observable after each batch is committed
    (so that Observable can propagate the data to Observers using ObservedData msg)
    """
    typename = BATCH_COMMITTED
    schema = (
        (f.REQUESTS.nm,
         IterableField(ClientMessageValidator(
             operation_schema_is_strict=OPERATION_SCHEMA_IS_STRICT))),
        (f.LEDGER_ID.nm, LedgerIdField()),
        (f.INST_ID.nm, NonNegativeNumberField()),
示例#2
0
from urllib3.exceptions import HTTPError

from hax.exception import HAConsistencyException, InterruptedException
from hax.types import (ConfHaProcess, Fid, FsStatsWithTime, ObjT,
                       ServiceHealth, Profile)

__all__ = [
    'ConsulUtil', 'create_process_fid', 'create_service_fid',
    'create_sdev_fid', 'create_drive_fid'
]

LOG = logging.getLogger('hax')

# XXX What is the difference between `ip_addr` and `address`?
# The names are hard to discern.
ServiceData = NamedTuple('ServiceData', [('node', str), ('fid', Fid),
                                         ('ip_addr', str), ('address', str)])

FidWithType = NamedTuple('FidWithType', [('fid', Fid), ('service_type', str)])


def mkServiceData(service: Dict[str, Any]) -> ServiceData:
    return ServiceData(
        node=service['Node'],
        fid=mk_fid(
            ObjT.PROCESS,  # XXX s/PROCESS/SERVICE/ ?
            int(service['ServiceID'])),
        ip_addr=service['Address'],
        address='{}:{}'.format(service['ServiceAddress'],
                               service['ServicePort']))

示例#3
0
        response = mk_eval(response)
    except Exception:
        # The remote site will send non-Python data in case of an error.
        raise MKAutomationException("%s: <pre>%s</pre>" %
                                    (_("Got invalid data"), response))
    return response


def push_user_profiles_to_site(site, user_profiles):
    return do_remote_automation(site,
                                "push-profiles",
                                [("profiles", repr(user_profiles))],
                                timeout=60)


PushUserProfilesRequest = NamedTuple("PushUserProfilesRequest",
                                     [("user_profiles", dict)])


@automation_command_registry.register
class PushUserProfilesToSite(AutomationCommand):
    def command_name(self):
        return "push-profiles"

    def get_request(self):
        return PushUserProfilesRequest(
            ast.literal_eval(
                html.request.get_ascii_input_mandatory("profiles")))

    def execute(self, request):
        user_profiles = request.user_profiles
示例#4
0
# -*- coding: utf-8 -*-
# Copyright (C) 2020 tribe29 GmbH - License: GNU General Public License v2
# This file is part of Checkmk (https://checkmk.com). It is subject to the terms and
# conditions defined in the file COPYING, which is part of this source code package.

import copy
from typing import NamedTuple, List, Tuple, Set, Dict, Optional

from cmk.utils.type_defs import ServiceName
from cmk.utils.bi.bi_lib import RequiredBIElement, BIHostSpec
from cmk.utils.bi.bi_trees import BICompiledRule, BICompiledAggregation, NodeResultBundle

BIAggregationFilter = NamedTuple("BIAggregationFilter", [
    ("hosts", List[BIHostSpec]),
    ("services", List[Tuple[BIHostSpec, ServiceName]]),
    ("aggr_ids", List[str]),
    ("aggr_titles", List[str]),
    ("group_names", List[str]),
    ("group_path_prefix", List[str]),
])


class BIComputer:
    def __init__(self, compiled_aggregations, bi_status_fetcher):
        self._compiled_aggregations: Dict[str, BICompiledAggregation] = compiled_aggregations
        self._bi_status_fetcher = bi_status_fetcher
        self._legacy_branch_cache = {}

    def compute_aggregation_result(
        self,
        aggr_id: str,
        title: str,
示例#5
0
PKG_DB = BUILDDIR / 'packages.conf'  # package db

PackageName = NewType('PackageName', str)
Version = NewType('Version', str)
SHA256Hash = NewType('SHA256Hash', str)


class PackageSource(Enum):
    HACKAGE = 'hackage'
    LOCAL = 'local'


url = str

BuiltinDep = NamedTuple('BuiltinDep', [
    ('package', PackageName),
    ('version', Version),
])

BootstrapDep = NamedTuple(
    'BootstrapDep',
    [
        ('package', PackageName),
        ('version', Version),
        ('source', PackageSource),
        # source tarball SHA256
        ('src_sha256', Optional[SHA256Hash]),
        # `revision` is only valid when source == HACKAGE.
        ('revision', Optional[int]),
        ('cabal_sha256', Optional[SHA256Hash]),
        ('flags', List[str]),
    ])
示例#6
0
class FromStepOutput(
        NamedTuple(
            "_FromStepOutput",
            [
                ("step_output_handle", StepOutputHandle),
                ("solid_handle", SolidHandle),
                ("input_name", str),
                ("fan_in", bool),
            ],
        ),
        StepInputSource,
):
    """This step input source is the output of a previous step"""
    def __new__(cls, step_output_handle, solid_handle, input_name, fan_in):
        return super(FromStepOutput, cls).__new__(
            cls,
            step_output_handle=check.inst_param(step_output_handle,
                                                "step_output_handle",
                                                StepOutputHandle),
            solid_handle=check.inst_param(solid_handle, "solid_handle",
                                          SolidHandle),
            input_name=check.str_param(input_name, "input_name"),
            fan_in=check.bool_param(fan_in, "fan_in"),
        )

    @property
    def step_key_dependencies(self) -> Set[str]:
        return {self.step_output_handle.step_key}

    @property
    def step_output_handle_dependencies(self) -> List[StepOutputHandle]:
        return [self.step_output_handle]

    def get_load_context(
            self,
            step_context: "SystemStepExecutionContext") -> "InputContext":
        io_manager_key = step_context.execution_plan.get_manager_key(
            self.step_output_handle)
        resource_config = step_context.environment_config.resources[
            io_manager_key].config
        resources = build_resources_for_manager(io_manager_key, step_context)

        input_def = self.get_input_def(step_context.pipeline_def)

        solid_config = step_context.environment_config.solids.get(
            str(self.solid_handle))
        config_data = solid_config.inputs.get(
            self.input_name) if solid_config else None

        return step_context.for_input_manager(
            input_def.name,
            config_data,
            input_def.metadata,
            input_def.dagster_type,
            self.step_output_handle,
            resource_config,
            resources,
        )

    def load_input_object(
        self, step_context: "SystemStepExecutionContext"
    ) -> Iterator["DagsterEvent"]:
        from dagster.core.events import DagsterEvent
        from dagster.core.storage.intermediate_storage import IntermediateStorageAdapter

        source_handle = self.step_output_handle
        manager_key = step_context.execution_plan.get_manager_key(
            source_handle)
        input_manager = step_context.get_io_manager(source_handle)
        check.invariant(
            isinstance(input_manager, IOManager),
            f'Input "{self.input_name}" for step "{step_context.step.key}" is depending on '
            f'the manager of upstream output "{source_handle.output_name}" from step '
            f'"{source_handle.step_key}" to load it, but that manager is not an IOManager. '
            f"Please ensure that the resource returned for resource key "
            f'"{manager_key}" is an IOManager.',
        )
        yield _load_input_with_input_manager(
            input_manager, self.get_load_context(step_context))
        yield DagsterEvent.loaded_input(
            step_context,
            input_name=self.input_name,
            manager_key=manager_key,
            upstream_output_name=source_handle.output_name,
            upstream_step_key=source_handle.step_key,
            message_override=
            f'Loaded input "{self.input_name}" using intermediate storage'
            if isinstance(input_manager, IntermediateStorageAdapter) else None,
        )

    def compute_version(
        self,
        step_versions: Dict[str, Optional[str]],
        pipeline_def: PipelineDefinition,
        environment_config: EnvironmentConfig,
    ) -> Optional[str]:
        if (self.step_output_handle.step_key not in step_versions
                or not step_versions[self.step_output_handle.step_key]):
            return None
        else:
            return join_and_hash(
                step_versions[self.step_output_handle.step_key],
                self.step_output_handle.output_name)

    def required_resource_keys(self,
                               _pipeline_def: PipelineDefinition) -> Set[str]:
        return set()

    def get_asset_lineage(
            self, step_context: "SystemStepExecutionContext"
    ) -> List[AssetLineageInfo]:
        source_handle = self.step_output_handle
        input_manager = step_context.get_io_manager(source_handle)
        load_context = self.get_load_context(step_context)

        # check input_def
        input_def = self.get_input_def(step_context.pipeline_def)
        if input_def.is_asset:
            lineage_info = _get_asset_lineage_from_fns(
                load_context, input_def.get_asset_key,
                input_def.get_asset_partitions)
            return [lineage_info] if lineage_info else []

        # check io manager
        io_lineage_info = _get_asset_lineage_from_fns(
            load_context,
            input_manager.get_input_asset_key,
            input_manager.get_input_asset_partitions,
        )
        if io_lineage_info is not None:
            return [io_lineage_info]

        # check output_def
        upstream_output = step_context.execution_plan.get_step_output(
            self.step_output_handle)
        if upstream_output.is_asset:
            lineage_info = _get_asset_lineage_from_fns(
                load_context.upstream_output,
                upstream_output.get_asset_key,
                upstream_output.get_asset_partitions,
            )
            return [lineage_info] if lineage_info else []

        return []
示例#7
0
class FromPendingDynamicStepOutput(
        NamedTuple(
            "_FromPendingDynamicStepOutput",
            [
                ("step_output_handle", StepOutputHandle),
                ("solid_handle", SolidHandle),
                ("input_name", str),
            ],
        ), ):
    """
    This step input source models being directly downstream of a step with dynamic output.
    Once that step completes successfully, this will resolve once per DynamicOutput.
    """
    def __new__(
        cls,
        step_output_handle: StepOutputHandle,
        solid_handle: SolidHandle,
        input_name: str,
    ):
        # Model the unknown mapping key from known execution step
        # using a StepOutputHandle with None mapping_key.
        check.inst_param(step_output_handle, "step_output_handle",
                         StepOutputHandle)
        check.invariant(step_output_handle.mapping_key is None)

        return super(FromPendingDynamicStepOutput, cls).__new__(
            cls,
            step_output_handle=step_output_handle,
            solid_handle=check.inst_param(solid_handle, "solid_handle",
                                          SolidHandle),
            input_name=check.str_param(input_name, "input_name"),
        )

    @property
    def resolved_by_step_key(self) -> str:
        return self.step_output_handle.step_key

    @property
    def resolved_by_output_name(self) -> str:
        return self.step_output_handle.output_name

    def resolve(self, mapping_key) -> FromStepOutput:
        check.str_param(mapping_key, "mapping_key")
        return FromStepOutput(
            step_output_handle=StepOutputHandle(
                step_key=self.step_output_handle.step_key,
                output_name=self.step_output_handle.output_name,
                mapping_key=mapping_key,
            ),
            solid_handle=self.solid_handle,
            input_name=self.input_name,
            fan_in=False,
        )

    def get_step_output_handle_dep_with_placeholder(self) -> StepOutputHandle:
        # None mapping_key on StepOutputHandle acts as placeholder
        return self.step_output_handle

    def required_resource_keys(self,
                               _pipeline_def: PipelineDefinition) -> Set[str]:
        return set()

    def get_input_def(self,
                      pipeline_def: PipelineDefinition) -> InputDefinition:
        return pipeline_def.get_solid(self.solid_handle).input_def_named(
            self.input_name)
示例#8
0
    with_args,
)
from prefect.utilities.logging import create_diagnostic_logger

if TYPE_CHECKING:
    from prefect.core import Flow
    import requests
JSONLike = Union[bool, dict, list, str, int, float, None]

# type definitions for GraphQL results

TaskRunInfoResult = NamedTuple(
    "TaskRunInfoResult",
    [
        ("id", str),
        ("task_id", str),
        ("task_slug", str),
        ("version", int),
        ("state", "prefect.engine.state.State"),
    ],
)

FlowRunInfoResult = NamedTuple(
    "FlowRunInfoResult",
    [
        ("id", str),
        ("name", str),
        ("flow_id", str),
        ("parameters", Dict[str, Any]),
        ("context", Dict[str, Any]),
        ("version", int),
        ("scheduled_start_time", datetime.datetime),
示例#9
0
# model's eval_input_receiver_fn returns.
# pyformat: disable
AddMetricsCallbackType = Any
# pyformat: enable

# Type of keys we support for prediction, label and features dictionaries.
FPLKeyType = Union[Text, Tuple[Text, ...]]

# Dictionary of Tensor values fetched. The dictionary maps original dictionary
# keys => ('node' => value). This type exists for backward compatibility with
# FeaturesPredictionsLabels, new code should use DictOfTensorValue instead.
DictOfFetchedTensorValues = Dict[FPLKeyType, Dict[Text, TensorValue]]

FeaturesPredictionsLabels = NamedTuple(
    'FeaturesPredictionsLabels', [('input_ref', int),
                                  ('features', DictOfFetchedTensorValues),
                                  ('predictions', DictOfFetchedTensorValues),
                                  ('labels', DictOfFetchedTensorValues)])

# Used in building the model diagnostics table, a MaterializedColumn is a value
# inside of Extracts that will be emitted to file. Note that for strings, the
# values are raw byte strings rather than unicode strings. This is by design, as
# features can have arbitrary bytes values.
MaterializedColumn = NamedTuple(
    'MaterializedColumn',
    [('name', Text),
     ('value', Union[List[bytes], List[int], List[float], bytes, int, float])])

# Extracts represent data extracted during pipeline processing. In order to
# provide a flexible API, these types are just dicts where the keys are defined
# (reserved for use) by different extractor implementations. For example, the
示例#10
0
    NoneTyp,
    TupleType,
    TypeList,
)
from mypy.visitor import NodeVisitor

MYPY = False
if MYPY:
    from typing_extensions import Final

Options = NamedTuple('Options', [
    ('pyversion', Tuple[int, int]),
    ('no_import', bool),
    ('doc_dir', str),
    ('search_path', List[str]),
    ('interpreter', str),
    ('modules', List[str]),
    ('ignore_errors', bool),
    ('recursive', bool),
    ('include_private', bool),
    ('output_dir', str),
])


class CantImport(Exception):
    pass


def generate_stub_for_module(module: str,
                             output_dir: str,
                             quiet: bool = False,
                             add_header: bool = False,
示例#11
0
#.
#   .--Gettext i18n--------------------------------------------------------.
#   |           ____      _   _            _     _ _  ___                  |
#   |          / ___| ___| |_| |_ _____  _| |_  (_) |( _ ) _ __            |
#   |         | |  _ / _ \ __| __/ _ \ \/ / __| | | |/ _ \| '_ \           |
#   |         | |_| |  __/ |_| ||  __/>  <| |_  | | | (_) | | | |          |
#   |          \____|\___|\__|\__\___/_/\_\\__| |_|_|\___/|_| |_|          |
#   |                                                                      |
#   +----------------------------------------------------------------------+
#   | Handling of the regular localization of the GUI                      |
#   '----------------------------------------------------------------------'

# NullTranslations is the base class used by all translation classes in gettext
Translation = NamedTuple("Translation", [
    ("translation", gettext_module.NullTranslations),
    ("name", str),
])

# Current active translation object
_translation: Optional[Translation] = None


def _(message: str) -> str:
    if _translation:
        return _translation.translation.gettext(message)
    return str(message)


def _l(string: str) -> str:
    """Like _() but the string returned is lazy which means it will be translated when it is used as
    an actual string."""
示例#12
0
from ..utils import try_import_pytorch

torch = try_import_pytorch()
from torch.optim import Adam, lr_scheduler  # noqa
from torch.utils.data import DataLoader  # noqa
from torch.utils.tensorboard import SummaryWriter  # noqa

__all__ = [
    'fit',
]

train_val_result = NamedTuple('train_val_result', [
    ('model', torch.nn.Module),
    ('training_dataset', torch.utils.data.Dataset),
    ('validation_dataset', torch.utils.data.Dataset),
    ('evaluation', List[dict]),
    ('y_train_true', np.ndarray),
    ('y_train_pred', np.ndarray),
    ('y_val_true', np.ndarray),
    ('y_val_pred', np.ndarray),
])


def _train_and_validate_model(model: torch.nn.Module,
                              criterion,
                              optimizer,
                              scheduler,
                              data_loaders: dict,
                              *,
                              num_epochs=2,
                              tensorboard_exp=None,
                              stop_after: int = None,
示例#13
0
    elif config.type == "location":
        return LocationAttention(config.input_previous_word, max_seq_len)
    elif config.type == "mlp":
        return MlpAttention(input_previous_word=config.input_previous_word,
                            attention_num_hidden=config.num_hidden,
                            layer_normalization=config.layer_normalization)
    elif config.type == "coverage":
        return MlpAttention(input_previous_word=config.input_previous_word,
                            attention_num_hidden=config.num_hidden,
                            layer_normalization=config.layer_normalization,
                            config_coverage=config.config_coverage)
    else:
        raise ValueError("Unknown attention type %s" % config.type)


AttentionInput = NamedTuple('AttentionInput', [('seq_idx', int),
                                               ('query', mx.sym.Symbol)])
"""
Input to attention callables.

:param seq_idx: Decoder time step / sequence index.
:param query: Query input to attention mechanism, e.g. decoder hidden state (plus previous word).
"""

AttentionState = NamedTuple('AttentionState', [
    ('context', mx.sym.Symbol),
    ('probs', mx.sym.Symbol),
    ('dynamic_source', mx.sym.Symbol),
])
"""
Results returned from attention callables.
示例#14
0
文件: config.py 项目: gpsbird/sphinx
if PY3:
    CONFIG_SYNTAX_ERROR += "\nDid you change the syntax from 2.x to 3.x?"
CONFIG_ERROR = "There is a programable error in your configuration file:\n\n%s"
CONFIG_EXIT_ERROR = "The configuration file (or one of the modules it imports) " \
                    "called sys.exit()"
CONFIG_ENUM_WARNING = "The config value `{name}` has to be a one of {candidates}, " \
                      "but `{current}` is given."
CONFIG_PERMITTED_TYPE_WARNING = "The config value `{name}' has type `{current.__name__}', " \
                                "expected to {permitted}."
CONFIG_TYPE_WARNING = "The config value `{name}' has type `{current.__name__}', " \
                      "defaults to `{default.__name__}'."

if PY3:
    unicode = str  # special alias for static typing...

ConfigValue = NamedTuple('ConfigValue', [('name', str), ('value', Any),
                                         ('rebuild', Union[bool, unicode])])


class ENUM(object):
    """represents the config value should be a one of candidates.

    Example:
        app.add_config_value('latex_show_urls', 'no', None, ENUM('no', 'footnote', 'inline'))
    """
    def __init__(self, *candidates):
        # type: (unicode) -> None
        self.candidates = candidates

    def match(self, value):
        # type: (Union[unicode,List,Tuple]) -> bool
        if isinstance(value, (list, tuple)):
示例#15
0
from stp_zmq.authenticator import MultiZapAuthenticator
from zmq.utils import z85
from zmq.utils.monitor import recv_monitor_message

import zmq
from stp_core.common.log import getlogger
from stp_core.network.network_interface import NetworkInterface
from stp_zmq.util import createEncAndSigKeys, \
    moveKeyFilesToCorrectLocations, createCertsFromKeys
from stp_zmq.remote import Remote, set_keepalive, set_zmq_internal_queue_size
from plenum.common.exceptions import InvalidMessageExceedingSizeException, BaseExc
from stp_core.validators.message_length_validator import MessageLenValidator

logger = getlogger()

Quota = NamedTuple("Quota", [("count", int), ("size", int)])


# TODO: Use Async io
# TODO: There a number of methods related to keys management,
# they can be moved to some class like KeysManager
class ZStack(NetworkInterface):
    # Assuming only one listener per stack for now.

    PublicKeyDirName = 'public_keys'
    PrivateKeyDirName = 'private_keys'
    VerifKeyDirName = 'verif_keys'
    SigKeyDirName = 'sig_keys'

    sigLen = 64
    pingMessage = 'pi'
示例#16
0
class EvalSharedModel(
    NamedTuple(
        'EvalSharedModel',
        [
            ('model_path', Text),
            ('add_metrics_callbacks',
             List[Callable]),  # List[AnyMetricsCallbackType]
            ('include_default_metrics', bool),
            ('example_weight_key', Union[Text, Dict[Text, Text]]),
            ('additional_fetches', List[Text]),
            ('model_loader', ModelLoader),
            ('model_name', Text),
            ('model_type', Text),
            ('rubber_stamp', bool),
        ])):
  # pyformat: disable
  """Shared model used during extraction and evaluation.

  Attributes:
    model_path: Path to EvalSavedModel (containing the saved_model.pb file).
    add_metrics_callbacks: Optional list of callbacks for adding additional
      metrics to the graph. The names of the metrics added by the callbacks
      should not conflict with existing metrics. See below for more details
      about what each callback should do. The callbacks are only used during
      evaluation.
    include_default_metrics: True to include the default metrics that are part
      of the saved model graph during evaluation.
    example_weight_key: Example weight key (single-output model) or dict of
      example weight keys (multi-output model) keyed by output_name.
    additional_fetches: Prefixes of additional tensors stored in
      signature_def.inputs that should be fetched at prediction time. The
      "features" and "labels" tensors are handled automatically and should not
      be included in this list.
    model_loader: Model loader.
    model_name: Model name (should align with ModelSpecs.name).
    model_type: Model type (tfma.TF_KERAS, tfma.TF_LITE, tfma.TF_ESTIMATOR, ..).
    rubber_stamp: True if this model is being rubber stamped. When a
      model is rubber stamped diff thresholds will be ignored if an associated
      baseline model is not passed.


  More details on add_metrics_callbacks:

    Each add_metrics_callback should have the following prototype:
      def add_metrics_callback(features_dict, predictions_dict, labels_dict):

    Note that features_dict, predictions_dict and labels_dict are not
    necessarily dictionaries - they might also be Tensors, depending on what the
    model's eval_input_receiver_fn returns.

    It should create and return a metric_ops dictionary, such that
    metric_ops['metric_name'] = (value_op, update_op), just as in the Trainer.

    Short example:

    def add_metrics_callback(features_dict, predictions_dict, labels):
      metrics_ops = {}
      metric_ops['mean_label'] = tf.metrics.mean(labels)
      metric_ops['mean_probability'] = tf.metrics.mean(tf.slice(
        predictions_dict['probabilities'], [0, 1], [2, 1]))
      return metric_ops
  """
  # pyformat: enable

  def __new__(
      cls,
      model_path: Optional[Text] = None,
      add_metrics_callbacks: Optional[List[AddMetricsCallbackType]] = None,
      include_default_metrics: Optional[bool] = True,
      example_weight_key: Optional[Union[Text, Dict[Text, Text]]] = None,
      additional_fetches: Optional[List[Text]] = None,
      model_loader: Optional[ModelLoader] = None,
      model_name: Text = '',
      model_type: Text = '',
      rubber_stamp: bool = False,
      construct_fn: Optional[Callable[[], Any]] = None):
    if not add_metrics_callbacks:
      add_metrics_callbacks = []
    if model_loader and construct_fn:
      raise ValueError(
          'only one of model_loader or construct_fn should be used')
    if construct_fn:
      model_loader = ModelLoader(tags=None, construct_fn=construct_fn)
    if model_path is not None:
      model_path = six.ensure_str(model_path)
    return super(EvalSharedModel,
                 cls).__new__(cls, model_path, add_metrics_callbacks,
                              include_default_metrics, example_weight_key,
                              additional_fetches, model_loader, model_name,
                              model_type, rubber_stamp)
示例#17
0
        self.srcdir = "/"
        self.translator = translator
        self.verbosity = 0
        self._warncount = 0
        self.warningiserror = False

        self.config.add('autosummary_context', {}, True, None)
        self.config.add('autosummary_filename_map', {}, True, None)
        self.config.init_values()

    def emit_firstresult(self, *args: Any) -> None:
        pass


AutosummaryEntry = NamedTuple('AutosummaryEntry', [('name', str),
                                                   ('path', str),
                                                   ('template', str),
                                                   ('recursive', bool)])


def setup_documenters(app: Any) -> None:
    from sphinx.ext.autodoc import (AttributeDocumenter, ClassDocumenter,
                                    DataDocumenter, DecoratorDocumenter,
                                    ExceptionDocumenter, FunctionDocumenter,
                                    MethodDocumenter, ModuleDocumenter,
                                    NewTypeAttributeDocumenter,
                                    NewTypeDataDocumenter, PropertyDocumenter)
    documenters = [
        ModuleDocumenter,
        ClassDocumenter,
        ExceptionDocumenter,
        DataDocumenter,
示例#18
0
文件: cell.py 项目: yy8yy/Cirq
        return self._basis_change

    def operations(self) -> 'cirq.OP_TREE':
        return self._operations

    def controlled_by(self, qubit: 'cirq.Qid') -> 'ExplicitOperationsCell':
        return ExplicitOperationsCell(
            [op.controlled_by(qubit) for op in self._operations],
            self._basis_change)


CELL_SIZES = range(1, 17)

CellMakerArgs = NamedTuple('CellMakerArgs', [
    ('qubits', Sequence['cirq.Qid']),
    ('value', Any),
    ('row', int),
    ('col', int),
])

CellMaker = NamedTuple('CellMaker', [
    ('identifier', str),
    ('size', int),
    ('maker', Callable[[CellMakerArgs], Union[None, 'Cell',
                                              'cirq.Operation']]),
])
CellMaker.__doc__ = """Turns Quirk identifiers into Cirq operations.

Attributes:
    identifier: A string that identifies the cell type, such as "X" or "QFT3".
    size: The height of the operation. The number of qubits it covers.
    maker: A function that takes a `cirq.contrib.quirk.cells.CellMakerArgs` and
示例#19
0
class FromMultipleSources(
        NamedTuple(
            "_FromMultipleSources",
            [
                ("solid_handle", SolidHandle),
                ("input_name", str),
                ("sources", List[StepInputSource]),
            ],
        ),
        StepInputSource,
):
    """This step input is fans-in multiple sources in to a single input. The input will receive a list."""
    def __new__(cls, solid_handle: SolidHandle, input_name: str, sources):
        check.list_param(sources, "sources", StepInputSource)
        for source in sources:
            check.invariant(
                not isinstance(source, FromMultipleSources),
                "Can not have multiple levels of FromMultipleSources StepInputSource",
            )
        return super(FromMultipleSources,
                     cls).__new__(cls,
                                  solid_handle=solid_handle,
                                  input_name=input_name,
                                  sources=sources)

    @property
    def step_key_dependencies(self):
        keys = set()
        for source in self.sources:
            keys.update(source.step_key_dependencies)

        return keys

    @property
    def step_output_handle_dependencies(self):
        handles = []
        for source in self.sources:
            handles.extend(source.step_output_handle_dependencies)

        return handles

    def _step_output_handles_no_output(self, step_context):
        # FIXME https://github.com/dagster-io/dagster/issues/3511
        # this is a stopgap which asks the instance to check the event logs to find out step skipping
        step_output_handles_with_output = set()
        for event_record in step_context.instance.all_logs(
                step_context.run_id):
            if event_record.dagster_event and event_record.dagster_event.is_successful_output:
                step_output_handles_with_output.add(
                    event_record.dagster_event.event_specific_data.
                    step_output_handle)
        return set(self.step_output_handle_dependencies).difference(
            step_output_handles_with_output)

    def load_input_object(self, step_context):
        from dagster.core.events import DagsterEvent

        values = []

        # some upstream steps may have skipped and we allow fan-in to continue in their absence
        source_handles_to_skip = self._step_output_handles_no_output(
            step_context)

        for inner_source in self.sources:
            if (inner_source.step_output_handle_dependencies and
                    inner_source.step_output_handle in source_handles_to_skip):
                continue

            for event_or_input_value in ensure_gen(
                    inner_source.load_input_object(step_context)):
                if isinstance(event_or_input_value, DagsterEvent):
                    yield event_or_input_value
                else:
                    values.append(event_or_input_value)

        yield values

    def required_resource_keys(self,
                               pipeline_def: PipelineDefinition) -> Set[str]:
        resource_keys: Set[str] = set()
        for source in self.sources:
            resource_keys = resource_keys.union(
                source.required_resource_keys(pipeline_def))
        return resource_keys

    def compute_version(self, step_versions, pipeline_def,
                        environment_config) -> Optional[str]:
        return join_and_hash(*[
            inner_source.compute_version(step_versions, pipeline_def,
                                         environment_config)
            for inner_source in self.sources
        ])

    def get_asset_lineage(
            self, step_context: "SystemStepExecutionContext"
    ) -> List[AssetLineageInfo]:
        return [
            relation for source in self.sources
            for relation in source.get_asset_lineage(step_context)
        ]
示例#20
0
from collections import deque
from concurrent.futures import ThreadPoolExecutor
from typing import List, Tuple, Optional, Sequence, Union, Dict, Deque, NamedTuple, Iterator, Set

import uvloop

from hivemind.client import RemoteExpert
from hivemind.dht.node import DHTNode, DHTID, DHTExpiration
from hivemind.dht.routing import get_dht_time, DHTValue
from hivemind.dht.storage import ValueWithExpiration
from hivemind.utils import MPFuture, Endpoint, get_logger

logger = get_logger(__name__)

ExpertUID, ExpertPrefix, Coordinate, Score = str, str, int, float
UidEndpoint = NamedTuple("UidEndpoint", [('uid', ExpertUID),
                                         ('endpoint', Endpoint)])
UID_DELIMITER = '.'  # when declaring experts, DHT store all prefixes of that expert's uid, split over this prefix
FLAT_EXPERT = -1  # grid prefix reserved for storing 1d expert uids. Used to speed up find_best_experts in 1d case.
UID_PATTERN = re.compile('^(([^.])+)([.](?:[0]|([1-9]([0-9]*))))+$'
                         )  # e.g. ffn_expert.98.76.54 - prefix + some dims
PREFIX_PATTERN = re.compile('^(([^.])+)([.](?:[0]|([1-9]([0-9]*))))*[.]$'
                            )  # e.g. expert. or ffn.45. (ends with ".")
#  formally, prefixes = {uid.split(UID_DELIMITER)[:length] for length in range(1, uid.count(UID_DELIMITER) + 2)}


def is_valid_uid(maybe_uid: str) -> bool:
    return bool(UID_PATTERN.fullmatch(maybe_uid))


def is_valid_prefix(maybe_prefix: str) -> bool:
    return bool(PREFIX_PATTERN.fullmatch(maybe_prefix))
示例#21
0
class FromUnresolvedStepOutput(
        NamedTuple(
            "_FromUnresolvedStepOutput",
            [
                ("unresolved_step_output_handle", UnresolvedStepOutputHandle),
                ("solid_handle", SolidHandle),
                ("input_name", str),
            ],
        ), ):
    """
    This step input source models being downstream of another unresolved step,
    for example indirectly downstream from a step with dynamic output.
    """
    def __new__(
        cls,
        unresolved_step_output_handle: UnresolvedStepOutputHandle,
        solid_handle: SolidHandle,
        input_name: str,
    ):
        return super(FromUnresolvedStepOutput, cls).__new__(
            cls,
            unresolved_step_output_handle=check.inst_param(
                unresolved_step_output_handle,
                "unresolved_step_output_handle",
                UnresolvedStepOutputHandle,
            ),
            solid_handle=check.inst_param(solid_handle, "solid_handle",
                                          SolidHandle),
            input_name=check.str_param(input_name, "input_name"),
        )

    @property
    def resolved_by_step_key(self) -> str:
        return self.unresolved_step_output_handle.resolved_by_step_key

    @property
    def resolved_by_output_name(self) -> str:
        return self.unresolved_step_output_handle.resolved_by_output_name

    def resolve(self, mapping_key: str) -> FromStepOutput:
        check.str_param(mapping_key, "mapping_key")
        return FromStepOutput(
            step_output_handle=self.unresolved_step_output_handle.resolve(
                mapping_key),
            solid_handle=self.solid_handle,
            input_name=self.input_name,
            fan_in=False,
        )

    def get_step_output_handle_dep_with_placeholder(self) -> StepOutputHandle:
        return self.unresolved_step_output_handle.get_step_output_handle_with_placeholder(
        )

    def required_resource_keys(self,
                               _pipeline_def: PipelineDefinition) -> Set[str]:
        return set()

    def get_input_def(self,
                      pipeline_def: PipelineDefinition) -> InputDefinition:
        return pipeline_def.get_solid(self.solid_handle).input_def_named(
            self.input_name)
示例#22
0
from plenum.common.looper import Looper
from plenum.common.request import Request
from plenum.common.txn import REPLY, REQACK, TXN_ID, REQNACK
from plenum.common.types import OP_FIELD_NAME, \
    Reply, f, PrePrepare
from plenum.common.util import getMaxFailures, \
    checkIfMoreThanFSameItems, checkPortAvailable
from plenum.server.node import Node
from plenum.test.msgs import randomMsg
from plenum.test.spy_helpers import getLastClientReqReceivedForNode, getAllArgs, \
    getAllReturnVals
from plenum.test.test_client import TestClient, genTestClient
from plenum.test.test_node import TestNode, TestReplica, TestNodeSet, \
    checkPoolReady, checkNodesConnected, ensureElectionsDone, NodeRef

DelayRef = NamedTuple("DelayRef", [("op", Optional[str]),
                                   ("frm", Optional[str])])

RaetDelay = NamedTuple("RaetDelay", [("tk", Optional[TrnsKind]),
                                     ("pk", Optional[PcktKind]),
                                     ("fromPort", Optional[int])])

logger = getlogger()

# noinspection PyUnresolvedReferences


def ordinal(n):
    return "%d%s" % (n, "tsnrhtdd"[(n / 10 % 10 != 1) *
                                   (n % 10 < 4) * n % 10::4])

示例#23
0
        self.__dict__.update(state)
        self._compiled = compile(self._code, repr(self), "eval")

    ##  Expose a custom function to the code executed by SettingFunction
    #
    #   \param name What identifier to use in the executed code.
    #   \param operator A callable that implements the actual logic to execute.
    @classmethod
    def registerOperator(cls, name: str, operator: Callable) -> None:
        cls.__operators[name] = operator
        _SettingExpressionVisitor._knownNames.add(name)

    __operators = {"debug": _debug_value}


_VisitResult = NamedTuple("_VisitResult", [("values", Set[str]),
                                           ("keys", Set[str])])


# Helper class used to analyze a parsed function.
#
# It walks a Python AST generated from a Python expression. It will analyze the AST and
# produce two sets, one set of "used keys" and one set of "used values". "used keys" are
# setting keys (strings) that are used by the expression, whereas "used values" are
# actual variable references that are needed for the function to be executed.
class _SettingExpressionVisitor(ast.NodeVisitor):
    def __init__(self) -> None:
        super().__init__()
        self.values = set()  # type: Set[str]
        self.keys = set()  # type: Set[str]

    def visit(self, node: ast.AST) -> _VisitResult:
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree.

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from typing import NamedTuple
from lte.protos.pipelined_pb2 import ActivateFlowsRequest, \
            DeactivateFlowsRequest

FARRuleEntry = NamedTuple('FARRuleEntry', [('apply_action', int),
                                           ('o_teid', int),
                                           ('gnb_ip_addr', str)])

PDRRuleEntry = NamedTuple('PDRRuleEntry',
                          [('pdr_id', int), ('pdr_version', int),
                           ('pdr_state', int), ('precedence', int),
                           ('local_f_teid', int), ('ue_ip_addr', str),
                           ('del_qos_enforce_rule', DeactivateFlowsRequest),
                           ('add_qos_enforce_rule', ActivateFlowsRequest),
                           ('far_action', FARRuleEntry)])


# Create the Named tuple for the FAR entry
def far_create_rule_entry(far_entry) -> FARRuleEntry:
    o_teid = 0
    fwd_gnb_ip_addr = None
示例#25
0
    # Write to File
    with open(filePath, "w+") as f:
        f.writelines(["{}\n".format(int(val)) for val in signal])


#############################################################
# Data Generation
#############################################################
inFmt = PsiFixFmt(1, 0, 16)
outFmt = PsiFixFmt(1, 0, 17)

np.random.seed(0)
t = np.linspace(0, TEND, SAMPLES)

CfgInfo = NamedTuple("CfgInfo", [("order", int), ("ratio", int),
                                 ("diffDel", int), ("gainCorr", bool)])
configs = []
configs.append(CfgInfo(order=3, ratio=10, diffDel=1, gainCorr=True))
configs.append(CfgInfo(order=4, ratio=9, diffDel=2, gainCorr=True))
configs.append(CfgInfo(order=4, ratio=6, diffDel=2, gainCorr=False))

inSig = []
outSig = []
for cfg in configs:
    inSig0 = sps.chirp(t, 0, TEND, FREQ_SAMPLE / cfg.ratio)
    inSig0 = PsiFixFromReal(inSig0, inFmt, errSat=False)
    inSig1 = np.zeros_like(inSig0)
    inSig1[10] = PsiFixFromReal(0.5, inFmt, errSat=False)
    inSig2 = np.ones_like(inSig0)
    inSig2 = PsiFixFromReal(inSig2, inFmt, errSat=False)
    model = psi_fix_cic_dec(cfg.order, cfg.ratio, cfg.diffDel, inFmt, outFmt,
示例#26
0
文件: data.py 项目: Stoick01/bluebird
==============

Designed to feed the network data for training.

Iterators return Batch datatype, which is named tuple with inputs and targets tensors:

``Batch = NamedTuple("Batch", [("inputs", Tensor), ("targets", Tensor)])``
"""

from typing import Iterator, NamedTuple

import numpy as np

from .tensor import Tensor

Batch = NamedTuple("Batch", [("inputs", Tensor), ("targets", Tensor)])


class DataIterator:
    """
    Base class that every data iterator inherits.

    Example::

        class CustomDataIterator(DataIterator):
            def __call__(self, inputs: Tensor, targets: Tensor) -> Iterator[Batch]:
                for inp, targ in zip(inputs, targets):
                    yield Batch(inp, targ)
    """
    def __call__(self, inputs: Tensor, targets: Tensor) -> Iterator[Batch]:
        raise NotImplementedError
示例#27
0
from typing import NamedTuple
from collections import namedtuple

user = namedtuple('User',
                  'name age code')  #namedtuple pode ser acessado porindices
gabriel = user('Gabriel', 18, 123333)


class User(NamedTuple):
    """Classe que extende de namedtuple"""
    name: str
    age: int
    code: int


User = NamedTuple('User2', [('name', str), ('age', int), ('code', int)])
c = User('example', 22, 233445)

jh = ('jhon', 680, 1111)

print(c)
print(gabriel)
print(jh[0], jh[1], jh[2])
示例#28
0
Loading resources can be source of errors so we use typing and NamedTuple to facilitate the development and testing.
"""
import logging
import os
from typing import List, NamedTuple, Set, Tuple

from util.exception import InputFormatException

LOG = logging.getLogger(__name__)

CoordType = Tuple[int, int]
MovesType = List[str]
WallsType = Set[CoordType]

Input = NamedTuple("Input", [("board_dimension", CoordType),
                             ("initial_position", CoordType),
                             ("movements", MovesType), ("walls", WallsType)])
Resources = NamedTuple("Resources", [("input_data", Input)])


def load_input(filename) -> Input:
    """
    Load a file relative to the root folder
    :return: a NamedTuple input_data of type Input
    """
    file = os.path.join(os.path.dirname(__file__), "..", filename)

    def process_moves(line: str, line_number: int) -> MovesType:
        try:
            moves = list(line.strip())
            if any(p not in ['S', 'E', 'N', 'W'] for p in moves):
import itertools
import operator

from typing import Dict, List, Optional, NamedTuple, Text

import apache_beam as beam
from tensorflow_model_analysis import config
from tensorflow_model_analysis.metrics import metric_types
from tensorflow_model_analysis.metrics import metric_util

CALIBRATION_HISTOGRAM_NAME = '_calibration_histogram'

DEFAULT_NUM_BUCKETS = 10000

Bucket = NamedTuple('Bucket', [('bucket_id', int), ('weighted_labels', float),
                               ('weighted_predictions', float),
                               ('weighted_examples', float)])

Histogram = List[Bucket]


def calibration_histogram(
    num_buckets: Optional[int] = None,
    left: Optional[float] = None,
    right: Optional[float] = None,
    name: Text = None,
    eval_config: Optional[config.EvalConfig] = None,
    model_name: Text = '',
    output_name: Text = '',
    sub_key: Optional[metric_types.SubKey] = None,
    aggregation_type: Optional[metric_types.AggregationType] = None,
示例#30
0
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program.  If not, see <http://www.gnu.org/licenses/>.

from __future__ import annotations
from typing import NamedTuple

__author__: str = 'Michael Sasser'
__email__: str = '*****@*****.**'

__all__ = ['get_prefix', 'Prefixed']

Prefixed = NamedTuple('Prefixed', [('value', float), ('prefix', str),
                                   ('divisor', float)])


def get_prefix(value: float) -> Prefixed:
    prefixes = {
        -24: 'y',  # yocto
        -21: 'z',  # zepto
        -18: 'a',  # atto
        -15: 'f',  # femto
        -12: 'p',  # pico
        -9: 'n',  # nano
        -6: 'µ',  # micro
        -3: 'm',  # milli
        # -2 : 'c',  # centi (Not used in eletronics)
        # -1 : 'd',  # deci (Not used in eletronics)
        0: '',  # None