예제 #1
0
from typing import NamedTuple, Tuple, Dict, Union, List

import logging
import sys
import numpy as np

log = logging.getLogger('sampleOutput')
epsilon = sys.float_info.epsilon

ComparisonResult = NamedTuple('ComparisonResult', [('tp', int), ('fp', int),
                                                   ('tn', int), ('fn', int)])
ValidationResult = NamedTuple('ValidationResult', [('precision', float),
                                                   ('recall', float),
                                                   ('f_measure', float)])


def precision(tp: int, fp: int) -> float:
    if (tp + fp) == 0:
        return 0
    else:
        return tp / (tp + fp)


def recall(tp: int, fn: int) -> float:
    if (tp + fn) == 0:
        return 0
    else:
        return tp / (tp + fn)


def f_measure(p: float, r: float) -> float:
예제 #2
0
파일: types.py 프로젝트: Meterius/rfb-mc
from fractions import Fraction
from typing import NamedTuple

ProbabilisticInterval = NamedTuple("ProbabilisticInterval", [
    ("lower_bound", int),
    ("upper_bound", int),
    ("confidence", Fraction),
])
예제 #3
0
@dataclass(frozen=True)
class Let:
    """ Here: Let is more a syntactic sugar for `Ap (Lam ref body) expr` """
    ref: Ref
    expr: Expr
    body: Expr


@dataclass(frozen=True)
class Lam:
    name: str  # Pattern
    body: Expr  # May refer to `Ref(name)`


MethodName = NamedTuple('MethodName', [('val', str)])


@dataclass(frozen=True)
class Intrin:
    name: MethodName
    args: TMap[str, Expr]


Mem = TMap[Ref, Expr]

#  ___ _____
# |_ _| ____|_  ___ __  _ __
#  | ||  _| \ \/ / '_ \| '__|
#  | || |___ >  <| |_) | |
# |___|_____/_/\_\ .__/|_|
예제 #4
0
from typing import (Any, Dict, Iterable, List, Mapping, NamedTuple, Optional,
                    Sequence, Set, Tuple, Union)

import numpy
from numpy import count_nonzero
from scipy.stats import fisher_exact
from sklearn.base import BaseEstimator, ClassifierMixin
from sklearn.ensemble import RandomForestClassifier
from sklearn.exceptions import NotFittedError
from sklearn.metrics import accuracy_score
from sklearn.model_selection import train_test_split
from sklearn.tree import _tree as Tree, DecisionTreeClassifier

from lookout.core.ports import Type

RuleAttribute = NamedTuple("RuleAttribute", (("feature", int), ("cmp", bool),
                                             ("threshold", float)))
"""
`feature` is the feature taken for comparison
`cmp` is the comparison type: True is "x > v", False is "x <= v"
`threshold` is "v", the threshold value
"""

RuleStats = NamedTuple("RuleStats", (("cls", int), ("conf", float)))
"""
`cls` is the predicted class
`conf` is the rule confidence \\in [0, 1], "1" means super confident
"""

Rule = NamedTuple("RuleType",
                  (("attrs", Tuple[RuleAttribute, ...]), ("stats", RuleStats)))
예제 #5
0
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.select import Select

OMEGAUP_ROOT = os.path.normpath(os.path.join(__file__, '../../../..'))

PATH_WHITELIST = ('/api/grader/status/', '/js/error_handler.js')
MESSAGE_WHITELIST = ('http://staticxx.facebook.com/', '/api/grader/status/')

# This contains all the Python path-hacking to a single file instead of
# spreading it throughout all the files.
sys.path.append(os.path.join(OMEGAUP_ROOT, 'stuff'))
# pylint: disable=wrong-import-position,unused-import
import database_utils  # NOQA

Identity = NamedTuple('Identity', [('username', Text), ('password', Text)])


class StatusBarIsDismissed:
    """A class that can wait for the status bar to be dismissed."""
    def __init__(self, status_element, message_class, already_opened=False):
        self.status_element = status_element
        self.counter = int(
            self.status_element.get_attribute('data-counter') or '0')
        self.clicked = False
        self.message_class = message_class
        self.already_opened = already_opened

    def _click_button(self):
        if self.clicked:
            return
예제 #6
0
from neuralmonkey.decorators import tensor
from neuralmonkey.model.sequence import (EmbeddedSequence,
                                         EmbeddedFactorSequence)

RNN_CELL_TYPES = {
    "NematusGRU": NematusGRUCell,
    "GRU": OrthoGRUCell,
    "LSTM": tf.nn.rnn_cell.LSTMCell
}

RNN_DIRECTIONS = ["forward", "backward", "bidirectional"]

# pylint: disable=invalid-name
RNNCellTuple = Tuple[tf.nn.rnn_cell.RNNCell, tf.nn.rnn_cell.RNNCell]

RNNSpec = NamedTuple("RNNSpec", [("size", int), ("direction", str),
                                 ("cell_type", str)])

RNNSpecTuple = Union[Tuple[int], Tuple[int, str], Tuple[int, str, str]]
# pylint: enable=invalid-name


def _make_rnn_spec(size: int,
                   direction: str = "bidirectional",
                   cell_type: str = "GRU") -> RNNSpec:
    if size <= 0:
        raise ValueError(
            "RNN size must be a positive integer. {} given.".format(size))

    if direction not in RNN_DIRECTIONS:
        raise ValueError("RNN direction must be one of {}. {} given.".format(
            str(RNN_DIRECTIONS), direction))
예제 #7
0

int_neg_op = int_unary_op('-', 'CPyTagged_Negate')
int_invert_op = int_unary_op('~', 'CPyTagged_Invert')

# integer comparsion operation implementation related:

# Description for building int logical ops
# For each field:
# binary_op_variant: identify which BinaryIntOp to use when operands are short integers
# c_func_description: the C function to call when operands are tagged integers
# c_func_negated: whether to negate the C function call's result
# c_func_swap_operands: whether to swap lhs and rhs when call the function
IntLogicalOpDescrption = NamedTuple(
    'IntLogicalOpDescrption', [('binary_op_variant', int),
                               ('c_func_description', CFunctionDescription),
                               ('c_func_negated', bool),
                               ('c_func_swap_operands', bool)])

# description for equal operation on two boxed tagged integers
int_equal_ = c_custom_op(arg_types=[int_rprimitive, int_rprimitive],
                         return_type=bit_rprimitive,
                         c_function_name='CPyTagged_IsEq_',
                         error_kind=ERR_NEVER)

int_less_than_ = c_custom_op(arg_types=[int_rprimitive, int_rprimitive],
                             return_type=bit_rprimitive,
                             c_function_name='CPyTagged_IsLt_',
                             error_kind=ERR_NEVER)

# provide mapping from textual op to short int's op variant and boxed int's description
예제 #8
0
        stage_name='EvaluateMetricsAndPlots',
        run_after=run_after,
        ptransform=_EvaluateMetricsPlotsAndValidations(
            eval_config=eval_config,
            eval_shared_models=eval_shared_models,
            metrics_key=metrics_key,
            plots_key=plots_key,
            attributions_key=attributions_key,
            schema=schema,
            random_seed_for_testing=random_seed_for_testing))


MetricComputations = NamedTuple('MetricComputations', [
    ('non_derived_computations', List[metric_types.MetricComputation]),
    ('derived_computations', List[metric_types.DerivedMetricComputation]),
    ('cross_slice_computations',
     List[metric_types.CrossSliceMetricComputation]),
    ('ci_derived_computations', List[metric_types.CIDerivedMetricComputation])
])


def _filter_and_separate_computations(
        computations: metric_types.MetricComputations) -> MetricComputations:
    """Filters duplicate computations and separates non-derived and derived.

  All metrics are based on either direct computations using combiners or are
  based on the results of one or more other computations. This code separates
  the three types of computations so that only the combiner based computations
  are passed to the main combiner call and the remainder are processed after
  those combiners have run. Filtering is required because
  DerivedMetricComputations and CrossSliceMetricComputations typically include
예제 #9
0
파일: config.py 프로젝트: Arkham32/cmpt395
if PY3:
    CONFIG_SYNTAX_ERROR += "\nDid you change the syntax from 2.x to 3.x?"
CONFIG_ERROR = "There is a programable error in your configuration file:\n\n%s"
CONFIG_EXIT_ERROR = "The configuration file (or one of the modules it imports) " \
                    "called sys.exit()"
CONFIG_ENUM_WARNING = "The config value `{name}` has to be a one of {candidates}, " \
                      "but `{current}` is given."
CONFIG_PERMITTED_TYPE_WARNING = "The config value `{name}' has type `{current.__name__}', " \
                                "expected to {permitted}."
CONFIG_TYPE_WARNING = "The config value `{name}' has type `{current.__name__}', " \
                      "defaults to `{default.__name__}'."

if PY3:
    unicode = str  # special alias for static typing...

ConfigValue = NamedTuple('ConfigValue', [('name', str), ('value', Any),
                                         ('rebuild', Union[bool, unicode])])


class ENUM(object):
    """represents the config value should be a one of candidates.

    Example:
        app.add_config_value('latex_show_urls', 'no', None, ENUM('no', 'footnote', 'inline'))
    """
    def __init__(self, *candidates):
        # type: (unicode) -> None
        self.candidates = candidates

    def match(self, value):
        # type: (Union[unicode,List,Tuple]) -> bool
        if isinstance(value, (list, tuple)):
예제 #10
0
파일: pipeline.py 프로젝트: jfer2pi/kedro
_SETUP_PY_TEMPLATE = """# -*- coding: utf-8 -*-
from setuptools import setup, find_packages

setup(
    name="{name}",
    version="{version}",
    description="Modular pipeline `{name}`",
    packages=find_packages(),
    include_package_data=True,
    package_data={package_data},
)
"""

PipelineArtifacts = NamedTuple(
    "PipelineArtifacts",
    [("pipeline_dir", Path), ("pipeline_tests", Path), ("pipeline_conf", Path)],
)


def _check_pipeline_name(ctx, param, value):  # pylint: disable=unused-argument
    if value:
        _assert_pkg_name_ok(value)
    return value


@click.group()
def pipeline():
    """Commands for working with pipelines."""


@command_with_verbosity(pipeline, "create")
from keras.initializers import *
from keras.activations import *
from keras.layers import Dense
from keras.models import Sequential
import itertools
from data import Data

EPOCHS: int = 10
BATCH_SIZE: int = 512

denseLayerConfig = {
    'activation': ('relu', 'selu', 'tanh'),
    'kernel_initializer': ('he_normal', 'RandomNormal', 'RandomUniform')
}
LayerConfig = NamedTuple('LayerConfig',
                         [(field_name, str)
                          for field_name in denseLayerConfig.keys()])


def all_combos(in_dict: Dict[object, Iterable]):
    '''
    returns all dicts in which for each key, the key remains the same but the value is an element chosen
    from the original value which is an iterable
    :param in_dict: 
    :return: 
    '''
    for tuple in itertools.product(*in_dict.values()):
        yield dict(zip(denseLayerConfig.keys(), tuple))


def exhaustive_interpolate_vectors(a: float, b: float,
예제 #12
0
"""
We'll feed inputs into out network in batches.
So here are some tools for iterating over data in batches.
"""

from typing import Iterator, NamedTuple

import numpy as np

from joelnet.tensor import Tensor

Batch = NamedTuple("Batch", [("inputs", Tensor), ("targets", Tensor)])


class DataIterator:
    def __call__(self, inputs: Tensor, targets: Tensor) -> Iterator:
        raise NotImplementedError


class BatchIterator(DataIterator):
    def __init__(self, batch_size: int = 32, shuffle: bool = True) -> None:
        self.batch_size = batch_size
        self.shuffle = shuffle

    def __call__(self, inputs: Tensor, targets: Tensor) -> Iterator:
        starts = np.arange(0, len(inputs), self.batch_size)
        if self.shuffle:
            np.random.shuffle(starts)

        for start in starts:
            end = start + self.batch_size
예제 #13
0
from recidiviz.common.constants.state.state_supervision_violation import (
    StateSupervisionViolationType, )
from recidiviz.common.constants.state.state_supervision_violation_response import (
    StateSupervisionViolationResponseRevocationType, )
from recidiviz.persistence.entity.state.entities import (
    StateIncarcerationPeriod,
    StateSupervisionPeriod,
    StateSupervisionViolationResponse,
)

RevocationDetails = NamedTuple(
    "RevocationDetails",
    [
        ("revocation_type",
         Optional[StateSupervisionViolationResponseRevocationType]),
        ("revocation_type_subtype", Optional[str]),
        ("source_violation_type", Optional[StateSupervisionViolationType]),
        ("supervising_officer_external_id", Optional[str]),
        ("level_1_supervision_location_external_id", Optional[str]),
        ("level_2_supervision_location_external_id", Optional[str]),
    ],
)


def get_revocation_details(
    incarceration_period: StateIncarcerationPeriod,
    supervision_period: Optional[StateSupervisionPeriod],
    violation_responses: List[StateSupervisionViolationResponse],
    supervision_period_to_agent_associations: Optional[Dict[int, Dict[Any,
                                                                      Any]]],
) -> RevocationDetails:
    """Identifies the attributes of the revocation return from the supervision period that was revoked, if available,
예제 #14
0
>>> curl 'localhost:8000/atom?f=https%3A%2F%2Famericancynic.net%2Fshaarli%2F%3Fdo%3Datom&f=https%3A%2F%2Fhnrss.org%2Fnewest&n=1'


Interface
---------
"""
from feedmixer import FeedMixer
from shelfcache import ShelfCache
import falcon
from typing import NamedTuple, List
import json
import urllib
import urllib.parse

ParsedQS = NamedTuple('ParsedQS', [('f', List[str]),
                                   ('n', int),
                                   ('full', bool)])


def parse_qs(req: falcon.Request) -> ParsedQS:
    """
    Get `feeds` and `num_keep` from request query string.

    :param req: the Falcon request from which to parse the query string.
    """
    qs = falcon.uri.parse_query_string(req.query_string)
    feeds = qs.get('f', [])
    n = qs.get('n', 0)
    full = qs.get('full', False)
    if not isinstance(feeds, list): feeds = [feeds] # NOQA
    return ParsedQS(feeds, int(n), bool(full))
예제 #15
0
    FuncDef,
    FuncBase,
    ARG_STAR,
    ARG_STAR2,
    ARG_NAMED,
    ARG_NAMED_OPT,
)
from mypy.stubgenc import parse_all_signatures, find_unique_signatures, generate_stub_for_c_module
from mypy.stubutil import is_c_module, write_header
from mypy.options import Options as MypyOptions

Options = NamedTuple('Options', [
    ('pyversion', Tuple[int, int]),
    ('no_import', bool),
    ('doc_dir', str),
    ('search_path', List[str]),
    ('interpreter', str),
    ('modules', List[str]),
    ('ignore_errors', bool),
    ('recursive', bool),
])


class CantImport(Exception):
    pass


def generate_stub_for_module(module: str,
                             output_dir: str,
                             quiet: bool = False,
                             add_header: bool = False,
                             sigs: Dict[str, str] = {},
예제 #16
0
from pyramids.config import ModelConfig
from pyramids.loader import ModelLoader
from pyramids.model import Model
from pyramids.plugin import Plugin
from pyramids.tokenization import Tokenizer

__all__ = [
    'PLUGIN_ENTRY_POINT',
    'get_available_tokenizers',
    'get_available_models',
    'get_tokenizer',
    'get_model_loader',
    'load_model',
]

PluginEntry = NamedTuple('PluginEntry', [('provider', str), ('name', str)])

PLUGIN_ENTRY_POINT = 'pyramids.plugins'

LOGGER = logging.getLogger(__name__)


def get_available_tokenizers(language_name: str = None,
                             iso639_1: str = None,
                             iso639_2: str = None) -> Set[PluginEntry]:
    results = set()
    for plugin_name in _PLUGINS:
        plugin: Plugin = _PLUGINS[plugin_name]
        for name, language in plugin.provided_tokenizer_types.items():
            if language_name is not None and language.name != language_name:
                continue
예제 #17
0
from typing import NamedTuple

HttpMethods = NamedTuple('HttpMethods', [
    ('GET', str),
    ('HEAD', str),
    ('POST', str),
    ('PUT', str),
    ('DELETE', str),
    ('CONNECT', str),
    ('OPTIONS', str),
    ('TRACE', str),
    ('PATCH', str),
])

httpMethods = HttpMethods(
    f'GET',
    f'HEAD',
    f'POST',
    f'PUT',
    f'DELETE',
    f'CONNECT',
    f'OPTIONS',
    f'TRACE',
    f'PATCH',
)
예제 #18
0
파일: controller.py 프로젝트: divelab/GPT
#   add_head: Function which takes an embedding layer of shape [batch, row,
#     column, depth] and returns an output head of shape [batch, row, column,
#     num_classes].
#   shuffle: Whether to shuffle the batch.
#   num_classes: Number of pixel classes to predict.
#   loss: The loss function to use.
#   is_train: Whether we're training this graph.
# pylint: disable=invalid-name
GetInputTargetAndPredictedParameters = NamedTuple(
    'GetInputTargetAndPredictedParameters', [
        ('dp', data_provider.DataParameters),
        ('ap', Optional[augment.AugmentParameters]),
        ('extract_patch_size', int),
        ('stride', int),
        ('stitch_patch_size', int),
        ('bp', Optional[util.BatchParameters]),
        ('core_model', Callable),
        ('add_head', Callable),
        ('shuffle', bool),
        ('num_classes', int),
        ('loss', Callable),
        ('is_train', bool),
    ])
# pylint: enable=invalid-name


@tensorcheck.well_defined()
def model(core_model: Callable,
          add_head: Callable,
          pp: model_util.PredictionParameters,
          is_train: bool,
예제 #19
0
from UM.Message import Message
from UM.i18n import i18nCatalog

from cura.CuraApplication import CuraApplication
from cura.LayerDataBuilder import LayerDataBuilder
from cura.LayerDataDecorator import LayerDataDecorator
from cura.LayerPolygon import LayerPolygon
from cura.Scene.CuraSceneNode import CuraSceneNode
from cura.Scene.GCodeListDecorator import GCodeListDecorator
from cura.Settings.ExtruderManager import ExtruderManager

catalog = i18nCatalog("cura")

PositionOptional = NamedTuple("PositionOptional", [("x", Optional[float]),
                                                   ("y", Optional[float]),
                                                   ("z", Optional[float]),
                                                   ("f", Optional[float]),
                                                   ("e", Optional[float])])
Position = NamedTuple("Position", [("x", float), ("y", float), ("z", float),
                                   ("f", float), ("e", List[float])])


class FlavorParser:
    """This parser is intended to interpret the common firmware codes among all the different flavors"""

    MAX_EXTRUDER_COUNT = 16

    def __init__(self) -> None:
        CuraApplication.getInstance().hideMessageSignal.connect(
            self._onHideMessage)
        self._cancelled = False
예제 #20
0
    def __init__(self, sensors_names):
        self.SensorTuple = NamedTuple('SensorTuple', [(name, Sensor) for name, s in sensors_names])
        self.SensorReadingTuple = NamedTuple('SensorReadingTuple', [(name, float) for name, s in sensors_names])

        self.sensors = self.SensorTuple(*[s for n, s in sensors_names])
예제 #21
0
_Config = NamedTuple(
    '_Config',
    [
        # The OAuth client id string, such as '<USERNAME>@AMER.OAUTHAP'.
        ('client_id', str),

        # A redirect URI, such as 'https://127.0.0.1:8444'. Note that this must
        # match that with which you created your app in the Ameritrade API
        # interface.
        ('redirect_uri', str),

        # The location of the key PEM file.
        ('key_file', str),

        # The location of the certificate PEM file.
        ('certificate_file', str),

        # The location of the JSON file to store the OAuth token in between
        # invocations.
        ('secrets_file', str),

        # Timeout (in seconds) to wait for OAuth token response.
        ('timeout', int),

        # Safe-mode that disallows any methods that modify state. Only allows
        # getters to read data from the account.
        ('readonly', bool),

        # Cache directory; this string value must be set to a non-null directory
        # path (created on-demand if it does not already exist), if the 'read_cache'
        # or 'write_cache' options are set to true.
        # This is intended to be used during development of scripts in order to
        # avoid hitting the API so much while iterating over code. The cache is
        # indexed by method name and set of arguments.
        ('cache_dir', str),

        # FLags that decide whether we're reading from the cache or writing
        # to/updating the cache contents (if reading, on a cache miss).
        ('read_cache', bool),
        ('write_cache', bool),

        # Authenticate or refresh secrets lazily, upon first attribute access.
        ('lazy', bool),

        # Enable debug traces.
        ('debug', bool),
    ])
예제 #22
0
파일: client.py 프로젝트: sj26/uchroma
from uchroma.util import ArgsDict, camel_to_snake, max_keylen

from .cmd import UChromaConsoleUtil


PYTHON_ARGCOMPLETE_OK = 1

ENTER = u'\033(0'
EXIT = u'\033(B'
CHAR_HORIZ = u'\x71'
CHAR_VERT = ENTER + u'\x78' + EXIT
CHAR_CROSS = u'\x6e'

RemoteTraits = NamedTuple('RemoteTraits', [('name', str),
                                           ('description', str),
                                           ('author', str),
                                           ('version', str),
                                           ('traits', HasTraits)])


def color_block(*values):
    output = Colr('')
    for value in values:
        col = to_color(value)
        output = output.center(9, text=col.html,
                               fore=ColorUtils.inverse(col).intTuple,
                               back=col.intTuple)
    return str(output)


class AbstractCommand(object):
예제 #23
0
class Rules:
    CompiledNegatedRules = NamedTuple("CompiledNegatedRules",
                                      (("false", numpy.ndarray),
                                       ("true", numpy.ndarray)))
    """
    Each ndarray contains the rule indices which are **false** given
    the corresponding feature, threshold value and the comparison type ("false" and "true").
    """
    CompiledFeatureRules = NamedTuple(
        "CompiledRule", (("values", numpy.ndarray),
                         ("negated", Tuple[CompiledNegatedRules, ...])))

    CompiledRulesType = Dict[int, CompiledFeatureRules]

    _log = logging.getLogger("Rules")

    def __init__(self, rules: List[Rule], origin: Mapping[str, Any]):
        """
        Initializes the rules so that it is possible to call predict() afterwards.

        :param rules: the list of rules to assign.
        :param origin: the dictionary of parameters used to train the rules.
        """
        super().__init__()
        assert rules is not None, "rules may not be None"
        self._rules = rules
        self._compiled = self._compile(rules)
        self._origin = origin

    def __str__(self):
        return "%d rules, avg.len. %.1f" % (len(
            self._rules), self.avg_rule_len)

    def __len__(self):
        return len(self._rules)

    def predict(
        self,
        X: numpy.ndarray,
        return_winner_indices=False
    ) -> Union[numpy.ndarray, Tuple[numpy.ndarray, numpy.ndarray]]:
        """
        Evaluates the rules against the given features.

        :param X: input features.
        :param return_winner_indices: whether to return the winning rule index for each sample.
        :return: array of the same length as X with predictions or tuple of two arrays of the same\
                 length as X containing (predictions, winner rule indices).
        """
        self._log.debug("predicting %d samples using %d rules", len(X),
                        len(self._rules))
        rules = self._rules
        _compute_triggered = self._compute_triggered
        prediction = numpy.zeros(len(X), dtype=numpy.int32)
        if return_winner_indices:
            winner_indices = numpy.zeros(len(X), dtype=numpy.int32)
        for xi, x in enumerate(X):
            ris = _compute_triggered(self._compiled, rules, x)
            if len(ris) == 0:
                # self._log.warning("no rule!")
                continue
            if len(ris) > 1:
                confs = numpy.zeros(len(ris), dtype=numpy.float32)
                for i, ri in enumerate(ris):
                    confs[i] = rules[ri].stats.conf
                winner_index = ris[numpy.argmax(confs)]
            else:
                winner_index = ris[0]
            prediction[xi] = rules[winner_index].stats.cls
            if return_winner_indices:
                winner_indices[xi] = winner_index
        if return_winner_indices:
            return prediction, winner_indices
        return prediction

    @property
    def rules(self) -> List[Rule]:
        return self._rules

    @property
    def origin(self) -> Mapping[str, Any]:
        return self._origin

    @property
    def avg_rule_len(self) -> float:
        if not self._rules:
            return 0
        return sum(len(r.attrs) for r in self._rules) / len(self._rules)

    @classmethod
    def _compile(cls, rules: Sequence[Rule]) -> CompiledRulesType:
        cls._log.debug("compiling %d rules", len(rules))
        attrs = defaultdict(lambda: defaultdict(lambda: [[], []]))
        for i, (branch, _) in enumerate(rules):
            for rule in branch:
                attrs[rule.feature][rule.threshold][int(rule.cmp)].append(i)
        compiled_attrs = {}
        for key, attr in attrs.items():
            vals = sorted(attr)
            false_rules = set()
            true_rules = set()
            vr = [[None, None] for _ in vals]
            for i in range(len(vals)):
                false_rules.update(attr[vals[i]][False])
                true_rules.update(attr[vals[len(vals) - i - 1]][True])
                vr[i][False] = numpy.array(sorted(false_rules))
                vr[len(vr) - i - 1][True] = numpy.array(sorted(true_rules))
            compiled_attrs[key] = cls.CompiledFeatureRules(
                numpy.array(vals, dtype=numpy.float32),
                tuple(cls.CompiledNegatedRules(*v) for v in vr))
        return compiled_attrs

    @classmethod
    def _compute_triggered(cls, compiled_rules: CompiledRulesType,
                           rules: Sequence[Rule],
                           x: numpy.ndarray) -> numpy.ndarray:
        searchsorted = numpy.searchsorted
        triggered = numpy.full(len(rules), 0xff, dtype=numpy.int8)
        for i, v in enumerate(x):
            try:
                vals, arules = compiled_rules[i]
            except KeyError:
                continue
            border = searchsorted(vals, v)
            if border > 0:
                indices = arules[border - 1][False]
                if len(indices):
                    triggered[indices] = 0
            if border < len(arules):
                indices = arules[border][True]
                if len(indices):
                    triggered[indices] = 0
        return numpy.nonzero(triggered)[0]
예제 #24
0
    QAbstractButton,
    QWIDGETSIZE_MAX,
)
from AnyQt.QtGui import (QIcon, QFontMetrics, QPainter, QPalette, QBrush, QPen,
                         QColor, QFont)
from AnyQt.QtCore import (Qt, QObject, QSize, QRect, QPoint, QSignalMapper)
from AnyQt.QtCore import Signal, Property

from ..utils import set_flag
from .utils import brush_darker, ScrollBar

__all__ = ["ToolBox"]

_ToolBoxPage = NamedTuple("_ToolBoxPage", [
    ("index", int),
    ("widget", QWidget),
    ("action", QAction),
    ("button", QAbstractButton),
])


class ToolBoxTabButton(QToolButton):
    """
    A tab button for an item in a :class:`ToolBox`.
    """
    def setNativeStyling(self, state):
        # type: (bool) -> None
        """
        Render tab buttons as native (or css styled) :class:`QToolButtons`.
        If set to `False` (default) the button is pained using a custom
        paint routine.
        """
예제 #25
0
파일: conftest.py 프로젝트: selten/checkmk
import cmk.gui.config as config
import cmk.gui.htmllib as htmllib
import cmk.gui.login as login
from cmk.gui.globals import AppContext, RequestContext
from cmk.gui.http import Request
from cmk.gui.plugins.userdb import htpasswd
from cmk.gui.utils import get_random_string
from cmk.gui.watolib.users import delete_users, edit_users
from cmk.gui.wsgi import make_app

SPEC_LOCK = threading.Lock()

Automation = NamedTuple("Automation", [
    ("automation", MagicMock),
    ("local_automation", MagicMock),
    ("remote_automation", MagicMock),
    ("responses", Any),
])

HTTPMethod = Literal[
    "get", "put", "post", "delete",
    "GET", "PUT", "POST", "DELETE",
]  # yapf: disable

@pytest.fixture(scope='function')
def register_builtin_html():
    """This fixture registers a global htmllib.html() instance just like the regular GUI"""
    environ = create_environ()
    with AppContext(DummyApplication(environ, None)), \
            RequestContext(htmllib.html(Request(environ))):
        yield
예제 #26
0
    """
    # TODO: support a setter for `msg` to create an instance of a type
    # according to `msg_type`
    typename = MESSAGE_RESPONSE
    schema = (
        (f.MSG_TYPE.nm, ChooseField(values=MessageReq.allowed_types)),
        (f.PARAMS.nm, AnyMapField()),
        (f.MSG.nm, AnyField())
    )


ThreePhaseType = (PrePrepare, Prepare, Commit)
ThreePhaseMsg = TypeVar("3PhaseMsg", *ThreePhaseType)

ThreePhaseKey = NamedTuple("ThreePhaseKey", [
    f.VIEW_NO,
    f.PP_SEQ_NO
])


class BatchCommitted(MessageBase):
    """
    Purpose: pass to Observable after each batch is committed
    (so that Observable can propagate the data to Observers using ObservedData msg)
    """
    typename = BATCH_COMMITTED
    schema = (
        (f.REQUESTS.nm,
         IterableField(ClientMessageValidator(
             operation_schema_is_strict=OPERATION_SCHEMA_IS_STRICT))),
        (f.LEDGER_ID.nm, LedgerIdField()),
        (f.INST_ID.nm, NonNegativeNumberField()),
예제 #27
0
class HookDefinition(
        NamedTuple(
            "_HookDefinition",
            [
                ("name", str),
                ("hook_fn", Callable),
                ("required_resource_keys", AbstractSet[str]),
                ("decorated_fn", Callable),
            ],
        )):
    """Define a hook which can be triggered during a op execution (e.g. a callback on the step
    execution failure event during a op execution).

    Args:
        name (str): The name of this hook.
        hook_fn (Callable): The callback function that will be triggered.
        required_resource_keys (Optional[AbstractSet[str]]): Keys for the resources required by the
            hook.
    """
    def __new__(
        cls,
        name: str,
        hook_fn: Callable[..., Any],
        required_resource_keys: Optional[AbstractSet[str]] = None,
        decorated_fn: Optional[Callable[..., Any]] = None,
    ):
        return super(HookDefinition, cls).__new__(
            cls,
            name=check_valid_name(name),
            hook_fn=check.callable_param(hook_fn, "hook_fn"),
            required_resource_keys=frozenset(
                check.opt_set_param(required_resource_keys,
                                    "required_resource_keys",
                                    of_type=str)),
            decorated_fn=check.callable_param(decorated_fn, "decorated_fn"),
        )

    def __call__(self, *args, **kwargs):
        """This is invoked when the hook is used as a decorator.

        We currently support hooks to decorate the following:

        - PipelineDefinition: when the hook decorates a job definition, it will be added to
            all the op invocations within the job.

        Example:
            .. code-block:: python

                @success_hook
                def slack_message_on_success(_):
                    ...

                @slack_message_on_success
                @job
                def a_job():
                    foo(bar())

        """
        from ..execution.context.hook import HookContext
        from .graph_definition import GraphDefinition
        from .hook_invocation import hook_invocation_result
        from .pipeline_definition import PipelineDefinition

        if len(args) > 0 and isinstance(args[0],
                                        (PipelineDefinition, GraphDefinition)):
            # when it decorates a pipeline, we apply this hook to all the solid invocations within
            # the pipeline.
            return args[0].with_hooks({self})
        else:
            if not self.decorated_fn:
                raise DagsterInvalidInvocationError(
                    "Only hook definitions created using one of the hook decorators can be invoked."
                )
            fxn_args = get_function_params(self.decorated_fn)
            # If decorated fxn has two arguments, then this is an event list hook fxn, and parameter
            # names are always context and event_list
            if len(fxn_args) == 2:
                context_arg_name = fxn_args[0].name
                event_list_arg_name = fxn_args[1].name
                if len(args) + len(kwargs) != 2:
                    raise DagsterInvalidInvocationError(
                        "Decorated function expects two parameters, context and event_list, but "
                        f"{len(args) + len(kwargs)} were provided.")
                if args:
                    context = check.opt_inst_param(args[0], "context",
                                                   HookContext)
                    event_list = check.opt_list_param(
                        args[1]
                        if len(args) > 1 else kwargs[event_list_arg_name],
                        event_list_arg_name,
                    )
                else:
                    if context_arg_name not in kwargs:
                        raise DagsterInvalidInvocationError(
                            f"Could not find expected argument '{context_arg_name}'. Provided "
                            f"kwargs: {list(kwargs.keys())}")
                    if event_list_arg_name not in kwargs:
                        raise DagsterInvalidInvocationError(
                            f"Could not find expected argument '{event_list_arg_name}'. Provided "
                            f"kwargs: {list(kwargs.keys())}")
                    context = check.opt_inst_param(kwargs[context_arg_name],
                                                   context_arg_name,
                                                   HookContext)
                    event_list = check.opt_list_param(
                        kwargs[event_list_arg_name], event_list_arg_name)
                return hook_invocation_result(self, context, event_list)
            else:
                context_arg_name = fxn_args[0].name
                if len(args) + len(kwargs) != 1:
                    raise DagsterInvalidInvocationError(
                        f"Decorated function expects one parameter, {context_arg_name}, but "
                        f"{len(args) + len(kwargs)} were provided.")
                if args:
                    context = check.opt_inst_param(args[0], context_arg_name,
                                                   HookContext)
                else:
                    if context_arg_name not in kwargs:
                        raise DagsterInvalidInvocationError(
                            f"Could not find expected argument '{context_arg_name}'. Provided "
                            f"kwargs: {list(kwargs.keys())}")
                    context = check.opt_inst_param(kwargs[context_arg_name],
                                                   context_arg_name,
                                                   HookContext)
                return hook_invocation_result(self, context)
예제 #28
0
from . import modelinfo
from .details import make_kernel_args, dispersion_mesh

# Hack: load in any custom distributions
# Uses ~/.sasview/weights/*.py unless SASMODELS_WEIGHTS is set in the environ.
# Override with weights.load_weights(pattern="<weights_path>/*.py")
weights.load_weights()

# pylint: disable=unused-import
try:
    from typing import (Dict, Mapping, Any, Sequence, Tuple, NamedTuple, List,
                        Optional, Union, Callable)
    from .modelinfo import ModelInfo, Parameter
    from .kernel import KernelModel
    MultiplicityInfoType = NamedTuple('MultiplicityInfo',
                                      [("number", int), ("control", str),
                                       ("choices", List[str]),
                                       ("x_axis_label", str)])
    SasviewModelType = Callable[[int], "SasviewModel"]
except ImportError:
    pass
# pylint: enable=unused-import

logger = logging.getLogger(__name__)

calculation_lock = thread.allocate_lock()

#: True if pre-existing plugins, with the old names and parameters, should
#: continue to be supported.
SUPPORT_OLD_STYLE_PLUGINS = True

# TODO: separate x_axis_label from multiplicity info
예제 #29
0
#!/usr/bin/env python3
import json
from statistics import mean, median, stdev
import sys
import time
from typing import NamedTuple

import aqi
import click
from pygeodesy.ellipsoidalVincenty import LatLon
import requests

Sensor = NamedTuple('Sensor', [
    ('id', int),
    ('label', str),
    ('distance_m', float),
])


@click.group()
def main():
    pass


@main.command()
@click.option('--debug', is_flag=True, default=False)
@click.option('--max-count', type=int, default=8)
@click.option('--max-distance-km', type=float, default=2)
@click.argument('output_file',
                type=click.File('w', encoding='utf-8'),
                required=True)
예제 #30
0
import random
from typing import NamedTuple

import numpy as np

import settings

EMPTY = 0
TIC = 7
TAC = 8

DRAW = 100

Move = NamedTuple('TicTacToeMove', row=int, col=int)

MoveHistory = NamedTuple('MoveHistory', state=np.array, move=Move)


class TicTacToeGameEngine:
    _num_rows: int
    _num_columns: int
    _first_move: int
    _next_move: int
    _state: np.array
    _state_history: [np.array]
    _game_result: int

    def __init__(self, num_rows=3, num_columns=3, starting_move=None):
        self._num_rows = num_rows
        self._num_columns = num_columns