Ejemplo n.º 1
0
    def _feedback(self, inputs, outputs, initials=None, latches=None,
                  keep_outputs=False):
        # TODO: remove in next version bump and put into wire.
        if latches is None:
            latches = inputs

        def blast(bmap, vals):
            return fn.lmapcat(bmap.get, vals)

        lmap = BundleMap(
            {l: self.imap[i].size for i, l in zip(inputs, latches)}
        )

        if initials is not None:
            l2init = dict(self.aig.latch2init)
            l2init.update(
                {l: v for l, v in zip(latches, initials) if v is not None}
            )
            initials = fn.lcat(l2init[l] for l in latches)

        aig = rebundle_aig(self.aig.feedback(
            inputs=blast(self.imap, inputs), outputs=blast(self.omap, outputs),
            latches=blast(lmap, latches), keep_outputs=keep_outputs,
            initials=initials,
        ))

        return aig
Ejemplo n.º 2
0
Archivo: loader.py Proyecto: urahua/dvc
    def load_stage(cls, dvcfile, name, stage_data, lock_data=None):
        assert all([name, dvcfile, dvcfile.repo, dvcfile.path])
        assert stage_data and isinstance(stage_data, dict)

        path, wdir = resolve_paths(dvcfile.path,
                                   stage_data.get(Stage.PARAM_WDIR))
        stage = loads_from(PipelineStage, dvcfile.repo, path, wdir, stage_data)
        stage.name = name

        deps = project(stage_data, [stage.PARAM_DEPS, stage.PARAM_PARAMS])
        fill_stage_dependencies(stage, **deps)

        outs = project(
            stage_data,
            [stage.PARAM_OUTS, stage.PARAM_METRICS, stage.PARAM_PLOTS],
        )
        stage.outs = lcat(
            output.load_from_pipeline(stage, data, typ=key)
            for key, data in outs.items())

        if lock_data:
            stage.cmd_changed = lock_data.get(Stage.PARAM_CMD) != stage.cmd

        cls.fill_from_lock(stage, lock_data)
        return stage
Ejemplo n.º 3
0
def chain_nodes(nodes, chain):
    """Returns nodes matched by chain."""
    if not chain:
        return nodes

    link, *rest = chain
    if link.func is Ops.const:
        return []
    elif link.func is Ops.multi:
        coll, = link.args
        if isinstance(coll, list):
            coll = dict(enumerate(coll))
        return {
            k: chain_nodes(nodes, subchain + rest)
            for k, subchain in coll.items()
        }
    elif link.func is notnone_fn:
        return ldistinct(
            lcat(
                chain_nodes(nodes, subchain + rest) for subchain in link.args))
    else:
        # Doing this manually in case the link encapsulates a chain we can unpack
        func = link.func if link.args is None else link.func(*link.args)
        if isinstance(func, Chain):
            return chain_nodes(nodes, func + rest)
        else:
            next_value = func(nodes)
            return chain_nodes(next_value,
                               rest) if is_elements(next_value) else nodes
Ejemplo n.º 4
0
    def _dnf(where):
        """
        Constructs DNF of where tree consisting of terms in form:
            (alias, attribute, value, negation)
        meaning `alias.attribute = value`
         or `not alias.attribute = value` if negation is False

        Any conditions other then eq are dropped.
        """
        if isinstance(where, Lookup):
            # If where.lhs don't refer to a field then don't bother
            if not hasattr(where.lhs, 'target'):
                return SOME_TREE
            # Don't bother with complex right hand side either
            if isinstance(where.rhs, (QuerySet, Query, BaseExpression)):
                return SOME_TREE
            # Skip conditions on non-serialized fields
            if where.lhs.target not in serializable_fields(
                    where.lhs.target.model):
                return SOME_TREE

            attname = where.lhs.target.attname
            if isinstance(where, Exact):
                return [[(where.lhs.alias, attname, where.rhs, True)]]
            elif isinstance(where, IsNull):
                return [[(where.lhs.alias, attname, None, where.rhs)]]
            elif isinstance(where, In) and len(
                    where.rhs) < settings.CACHEOPS_LONG_DISJUNCTION:
                return [[(where.lhs.alias, attname, v, True)]
                        for v in where.rhs]
            else:
                return SOME_TREE
        elif isinstance(where, NothingNode):
            return []
        elif isinstance(where, (ExtraWhere, SubqueryConstraint, Exists)):
            return SOME_TREE
        elif len(where) == 0:
            return [[]]
        else:
            chilren_dnfs = lmap(_dnf, where.children)

            if len(chilren_dnfs) == 0:
                return [[]]
            elif len(chilren_dnfs) == 1:
                result = chilren_dnfs[0]
            else:
                # Just unite children joined with OR
                if where.connector == OR:
                    result = lcat(chilren_dnfs)
                # Use Cartesian product to AND children
                else:
                    result = lmap(lcat, product(*chilren_dnfs))

            # Negating and expanding brackets
            if where.negated:
                result = [lmap(negate, p) for p in product(*result)]

            return result
Ejemplo n.º 5
0
def _fleiss_kappa(sample_sets):
    # If there is only one set then it can't be measured
    if len(sample_sets) == 1:
        return float('nan')

    all_samples_annos = lcat(sample_sets)
    categories = ldistinct(sv.annotation for sv in all_samples_annos)
    # If there is only one label then it can't be measured
    if len(categories) == 1:
        return float('nan')
    category_index = {c: i for i, c in enumerate(categories)}

    stats = defaultdict(lambda: [0] * len(categories))
    for sv in all_samples_annos:
        stats[sv.sample_id][category_index[sv.annotation]] += 1

    return fleiss_kappa(list(stats.values()))
Ejemplo n.º 6
0
    def feedback(self,
                 inputs,
                 outputs,
                 initials=None,
                 latches=None,
                 keep_outputs=False,
                 signed=False):
        if latches is None:
            latches = inputs

        idrop, imap = fn.lsplit(lambda x: x[0] in inputs, self.input_map)
        odrop, omap = fn.lsplit(lambda x: x[0] in outputs, self.output_map)

        wordlens = [len(vals) for i, vals in idrop]
        new_latches = [(n, common.named_indexes(k, n))
                       for k, n in zip(wordlens, latches)]

        if initials is None:
            initials = [0 for _ in inputs]
        assert len(inputs) == len(outputs) == len(initials) == len(latches)

        initials = fn.lcat(
            common.encode_int(k, i, signed)
            for k, i in zip(wordlens, initials))

        def get_names(key_vals):
            return fn.lcat(fn.pluck(1, key_vals))

        aig = self.aig.feedback(
            inputs=get_names(idrop),
            outputs=get_names(odrop),
            latches=get_names(new_latches),
            initials=initials,
            keep_outputs=keep_outputs,
        )

        imap, odrop, omap = map(frozenset, [imap, odrop, omap])
        return AIGBV(
            aig=aig,
            input_map=imap,
            output_map=omap | (odrop if keep_outputs else frozenset()),
            latch_map=self.latch_map | set(new_latches),
        )
Ejemplo n.º 7
0
def discard(data):
    # just consider all ranges, ignoring the field association
    all_fields = lcat(data["fields"].values())

    invalid_entries = set()
    invalid_field_sum = 0

    for i, ticket in enumerate(data["nearby"]):
        for value in ticket:
            if not any(lo <= value <= hi for lo, hi in all_fields):
                invalid_field_sum += value
                invalid_entries.add(i)
                break

    # remove the invalid indexes in the nearby list
    for i in sorted(invalid_entries, reverse=True):
        del data["nearby"][i]

    return dict(invalid_field_sum=invalid_field_sum, nearby=data["nearby"])
Ejemplo n.º 8
0
def get_fields(graphql_schema, typename) -> Iterable[Tuple[str, str]]:
    IGNORE_FIELDS = (
        ScalarTypeDefinitionNode,
        FragmentDefinitionNode,
        EnumTypeDefinitionNode,
    )
    doc = parse_graphql_schema(graphql_schema)
    node: Node = find(doc.definitions, lambda x: x.name.value == typename)
    if getattr(node, "fields", None):
        return [(field.name.value, get_type_name(field.type)) for field in node.fields]
    else:
        if isinstance(node, (UnionTypeDefinitionNode,)):
            fields = [get_fields(graphql_schema, x.name.value) for x in node.types]
            fields = unique(lcat(fields), key=lambda x: x[0])
            return fields
        elif isinstance(node, IGNORE_FIELDS):
            print(f"ignoring {node}")
            return []
        else:
            raise Exception(f"unrecognized type for {node}")
Ejemplo n.º 9
0
 def get_names(key_vals):
     return fn.lcat(fn.pluck(1, key_vals))
Ejemplo n.º 10
0
def eval_order(circ):
    return fn.lcat(toposort(_dependency_graph(circ.cones | circ.latch_cones)))
Ejemplo n.º 11
0
 def containers(self):
     return lcat(pluck_attr("containers", self.replicasets.values()))
Ejemplo n.º 12
0
def sink(wordlen, inputs):
    blasted_inputs = [named_indexes(wordlen, i) for i in inputs]
    return aigbv.AIGBV(
        aig=aiger.sink(fn.lcat(blasted_inputs)),
        input_map=frozenset(fn.lzip(inputs, blasted_inputs)),
    )
Ejemplo n.º 13
0
def mget(keys):
    return lcat(redis_client.mget(chunk) for chunk in chunks(10000, keys))
Ejemplo n.º 14
0
def plugs(tile: np.ndarray) -> List[np.ndarray]:
    # the eight possible 1d borders for this tile
    # identified by the four borders and their flipped variants
    return lcat(edges(tile))
Ejemplo n.º 15
0
def get_mbta_info(cfgs):
    return pd.DataFrame.from_dict(f.lcat(map(get_mbta_station_info, cfgs)))
Ejemplo n.º 16
0
def eval_order(circ, *, concat: bool = True):
    """Return topologically sorted nodes in AIG."""
    order = toposort(_dependency_graph(circ.cones | circ.latch_cones))
    return fn.lcat(order) if concat else order