Esempio n. 1
0
    def to_nn(self, shape, scale_pixels=False):
        """Convert data to neural network inputs/outputs
        """

        return pipe(
            self.df.ImageId.unique(), map(self._to_single_nn(shape)),
            list_unzip, iffy(constantly(self.augment), self._augment_nn),
            map(np.array), list,
            iffy(constantly(scale_pixels),
                 lens[0].modify(lambda x: x / 255)), self._reshape_output)
Esempio n. 2
0
    def _refs_from_view(self, menu_maker, views, tooltip=constantly(None)):
        """Return an iterable of (start, end, (menu, tooltip)), running
        ``menu_maker`` across each item that comes of applying ``view`` to
        ``self.condensed`` and adding "Jump to definition" where applicable.

        :arg menu_maker: A function that takes a tree and an item from
            ``view()`` and returns a ref menu
        :arg views: An iterable of functions that take self.condensed and
            return an iterable of things to call ``menu_maker()`` on
        :arg tooltip: A function that takes one of those things from the
            iterable and emits a value to be shown in the mouseover of the ref

        """
        for prop in chain.from_iterable(v(self.condensed) for v in views):
            if 'span' in prop:  # TODO: This used to be unconditional. Should we still try to do it sometime if span isn't in prop? Both cases in test_direct are examples of this. [Marcell says no.]
                definition = prop.get('defloc')
                # If we can look up the target of this ref and it's not
                # outside the source tree (which results in an absolute path
                # starting with "/")...
                if definition and not definition[0].startswith('/'):
                    menu = definition_menu(self.tree,
                                           path=definition[0],
                                           row=definition[1].row)
                else:
                    menu = []

                menu.extend(menu_maker(self.tree, prop))
                start, end = prop['span']

                yield (self.char_offset(start.row, start.col),
                       self.char_offset(end.row, end.col),
                       Ref(menu, hover=tooltip(prop), qualname=prop.get('qualname')))
Esempio n. 3
0
def spec2graph(spec: ConcreteSpec, qdd=False, dprob=None) -> nx.DiGraph:
    dfa = spec._as_dfa(qdd=qdd)

    if dprob is None:
        dprob = fn.constantly(None)

    def is_sink(state) -> bool:
        return state.node.var is None

    def is_decision(state) -> bool:
        lvl = state.node.level
        if qdd:
            lvl -= state.debt
        return spec.order.is_decision(lvl)

    def key(state):
        return (state.ref, state.debt) if qdd else state.ref

    sinks = set()
    g = nx.DiGraph()

    @fn.memoize
    def _node(state):
        decision = is_decision(state) and not is_sink(state)
        lvl = state.node.level
        var = state.label() if state.node.var is None else state.node.var
        node = key(state)
        g.add_node(node, lvl=lvl, var=var, decision=decision)
        return node

    stack = [dfa.start]
    while len(stack) > 0:
        state = stack.pop()
        action2succ = {a: dfa._transition(state, a) for a in dfa.inputs}

        for action, succ in action2succ.items():
            if key(succ) not in g.nodes:
                stack.append(succ)

            if succ == state:  # Sink
                sinks.add(_node(succ))
                continue

            if qdd and state.debt > 0:
                g.add_edge(_node(state), _node(succ), action=None, prob=1)
            else:
                if is_decision(state):
                    prob = dprob(key(state), action)
                else:
                    prob = 1/2

                g.add_edge(_node(state), _node(succ), action=action, prob=prob)

    g.add_node("DUMMY", lvl=None, var=None, decision=False)
    for sink in sinks:
        g.add_edge(sink, "DUMMY", action=None, prob=1)

    g = nx.freeze(g)
    return g, _node(dfa.start), list(sinks)
Esempio n. 4
0
class BaseCheck(metaclass=ABCMeta):
    @ignore(Exception, default=False)
    @post_processing(constantly(True))
    def do_check(self, obj):
        return self.check(obj)

    @abstractmethod
    def check(self, obj):
        pass
Esempio n. 5
0
def representation(text_list, translation=esv, include_svd=True):
    """Converts list of sentences to a vocabulary-vectorized, SVD-reduced
    representation
    """
    return pipe(
        text_list,
        vocabulary_vectorizer(translation=translation).transform,
        iffy(constantly(include_svd),
             svd(translation=translation).transform),
    )
Esempio n. 6
0
    def __init__(
            self,
            bad_input_checks=(fy.constantly(True), ),
            errors=tuple(Exception.__subclasses__()),
    ):
        """Raises a random error if any input check returns True"""
        self.bad_input_checks = bad_input_checks
        self.errors = errors

        self._check = fy.any_fn(*self.bad_input_checks)
        seed = hash(fy.merge(self.bad_input_checks, self.errors))
        self._random = random.Random(seed)
Esempio n. 7
0
    def walk_down(self, skip=constantly(False), include_self=True):
        """Yield each node from here downward, myself included,
        in depth-first pre-order.

        :arg skip: A predicate decribing nodes to not descend into. We always
            return ourselves, even if the predicate says to skip us.
        :arg include_self: A flag for including the root in the walk down.

        The AST we get from Reflect.parse is somewhat unsatisfying. It's not a
        uniform tree shape; it seems to have already been turned into more
        specialized objects. Thus, we have to traverse into different fields
        depending on node type.

        """
        if include_self:
            yield self
        for child in self.children():
            if skip(child):
                yield child
                continue
            # Just a "yield from":
            for ret in child.walk_down(skip=skip):
                yield ret
Esempio n. 8
0
    def walk_down(self, skip=constantly(False), include_self=True):
        """Yield each node from here downward, myself included,
        in depth-first pre-order.

        :arg skip: A predicate decribing nodes to not descend into. We always
            return ourselves, even if the predicate says to skip us.
        :arg include_self: A flag for including the root in the walk down.

        The AST we get from Reflect.parse is somewhat unsatisfying. It's not a
        uniform tree shape; it seems to have already been turned into more
        specialized objects. Thus, we have to traverse into different fields
        depending on node type.

        """
        if include_self:
            yield self
        for child in self.children():
            if skip(child):
                yield child
                continue
            # Just a "yield from":
            for ret in child.walk_down(skip=skip):
                yield ret
Esempio n. 9
0
from funcy import group_by, constantly

FUNC_GROUP = constantly('function')
VAR_GROUP = constantly('variable')
CALL_GROUP = constantly('call')
SYM_GROUP = constantly('symbol')

GROUPS = {
    'Function': FUNC_GROUP,
    'FunctionExpression': FUNC_GROUP,
    'ArrowExpression': FUNC_GROUP,
    'FunctionDeclaration': FUNC_GROUP,
    'VariableDeclaration': VAR_GROUP,
    'LetStatement': VAR_GROUP,
    'LetExpression': VAR_GROUP,
    'CallExpression': CALL_GROUP,
}


def categorize(node):
    return GROUPS.get(node.get('type'))(node)


def transform(ast):
    return group_by(categorize, ast.walk_down())
Esempio n. 10
0
def passages_by_uuid(uuid, include_text=False):
    references = find_by_uuid(uuid).apply(reference.init_raw_row, axis=1).tolist()
    return pipe(references, passage.init, iffy(constantly(include_text), passage.text))
Esempio n. 11
0
def show_experiments(all_experiments,
                     pager=True,
                     no_timestamp=False,
                     **kwargs):
    include_metrics = _parse_filter_list(kwargs.pop("include_metrics", []))
    exclude_metrics = _parse_filter_list(kwargs.pop("exclude_metrics", []))
    include_params = _parse_filter_list(kwargs.pop("include_params", []))
    exclude_params = _parse_filter_list(kwargs.pop("exclude_params", []))

    metric_names, param_names = _collect_names(
        all_experiments,
        include_metrics=include_metrics,
        exclude_metrics=exclude_metrics,
        include_params=include_params,
        exclude_params=exclude_params,
    )
    metric_headers = _normalize_headers(metric_names)
    param_headers = _normalize_headers(param_names)

    td = experiments_table(
        all_experiments,
        metric_headers,
        metric_names,
        param_headers,
        param_names,
        kwargs.get("sort_by"),
        kwargs.get("sort_order"),
        kwargs.get("precision"),
    )

    if no_timestamp:
        td.drop("Created")

    baseline_styler = iffy(constantly({"style": "bold"}), default={})
    row_styles = lmap(baseline_styler, td.column("is_baseline"))
    td.drop("is_baseline")

    merge_headers = ["Experiment", "queued", "ident_guide", "parent"]
    td.column("Experiment")[:] = map(prepare_exp_id, td.as_dict(merge_headers))
    td.drop(*merge_headers[1:])

    headers = {"metrics": metric_headers, "params": param_headers}
    styles = {
        "Experiment": {
            "no_wrap": True,
            "header_style": "black on grey93"
        },
        "Created": {
            "header_style": "black on grey93"
        },
    }
    header_bg_colors = {"metrics": "cornsilk1", "params": "light_cyan1"}
    styles.update({
        header: {
            "justify": "left" if typ == "metrics" else "params",
            "header_style": f"black on {header_bg_colors[typ]}",
            "collapse": idx != 0,
            "no_wrap": typ == "metrics",
        }
        for typ, hs in headers.items() for idx, header in enumerate(hs)
    })

    td.render(
        pager=pager,
        borders=True,
        rich_table=True,
        header_styles=styles,
        row_styles=row_styles,
    )
Esempio n. 12
0
def show_experiments(all_experiments,
                     pager=True,
                     no_timestamp=False,
                     **kwargs):
    include_metrics = _parse_filter_list(kwargs.pop("include_metrics", []))
    exclude_metrics = _parse_filter_list(kwargs.pop("exclude_metrics", []))
    include_params = _parse_filter_list(kwargs.pop("include_params", []))
    exclude_params = _parse_filter_list(kwargs.pop("exclude_params", []))

    metric_names, param_names = _collect_names(
        all_experiments,
        include_metrics=include_metrics,
        exclude_metrics=exclude_metrics,
        include_params=include_params,
        exclude_params=exclude_params,
    )
    metric_headers = _normalize_headers(metric_names)
    param_headers = _normalize_headers(param_names)

    td = experiments_table(
        all_experiments,
        metric_headers,
        metric_names,
        param_headers,
        param_names,
        kwargs.get("sort_by"),
        kwargs.get("sort_order"),
        kwargs.get("precision"),
    )
    styles = [
        {
            "no_wrap": True,
            "header_style": "black on grey93"
        },
        {
            "header_style": "black on grey93"
        },
        *[{
            "justify": "right",
            "header_style": "black on cornsilk1",
            "no_wrap": True,
            "collapse": idx != 0,
        } for idx, _ in enumerate(metric_headers)],
        *[{
            "justify": "left",
            "header_style": "black on light_cyan1",
            "collapse": idx != 0,
        } for idx, _ in enumerate(param_headers)],
    ]

    if no_timestamp:
        td.drop("Created")
        styles.pop(1)

    baseline_styler = iffy(constantly({"style": "bold"}), default={})
    row_styles = lmap(baseline_styler, td.column("is_baseline"))
    td.drop("is_baseline")

    td.render(
        pager=pager,
        borders=True,
        rich_table=True,
        header_styles=styles,
        row_styles=row_styles,
    )