Exemplo n.º 1
0
def _reduce_with_count(pairwise, iterator, accumulator=None):
    """Return both the result of the reduction and the number of elements.

    Parameters
    ----------
    pairwise : function (a -> b -> a)
        The function with which to reduce the `iterator` sequence.
    iterator : iterable
        The sequence being reduced.
    accumulator : type "a", optional
        An initial value with which to perform the reduction.

    Returns
    -------
    result : type "a"
        The result of the reduce operation.
    count : int
        The number of elements that were accumulated.

    Examples
    --------
    >>> x = [5, 6, 7]
    >>> _reduce_with_count(np.add, x)
    (18, 3)
    """
    def new_pairwise(a, b):
        (elem1, c1), (elem2, c2) = a, b
        return pairwise(elem1, elem2), c2
    new_iter = zip(iterator, it.count(1))
    new_acc = (0, accumulator)
    return tz.reduce(new_pairwise, new_iter, new_acc)
Exemplo n.º 2
0
def _reduce_with_count(pairwise, iterator, accumulator=None):
    """Return both the result of the reduction and the number of elements.

    Parameters
    ----------
    pairwise : function (a -> b -> a)
        The function with which to reduce the `iterator` sequence.
    iterator : iterable
        The sequence being reduced.
    accumulator : type "a", optional
        An initial value with which to perform the reduction.

    Returns
    -------
    result : type "a"
        The result of the reduce operation.
    count : int
        The number of elements that were accumulated.

    Examples
    --------
    >>> x = [5, 6, 7]
    >>> _reduce_with_count(np.add, x)
    (18, 3)
    """
    def new_pairwise(a, b):
        (elem1, c1), (elem2, c2) = a, b
        return pairwise(elem1, elem2), c2
    new_iter = it.izip(iterator, it.count(1))
    new_acc = (0, accumulator)
    return tlz.reduce(new_pairwise, new_iter, new_acc)
Exemplo n.º 3
0
 def put_in(keys, coll, val):
     """Inverse of get_in, but does type promotion in the case of lists"""
     if keys:
         holder = reduce(operator.getitem, keys[:-1], coll)
         if isinstance(holder, tuple):
             holder = list(holder)
             coll = put_in(keys[:-1], coll, holder)
         holder[keys[-1]] = val
     else:
         coll = val
     return coll
Exemplo n.º 4
0
 def getFuncsIndex(self):
     for elt in ast.walk(self.node):
         if isinstance(elt, ast.Name):
             indice = self.getIndice(elt)
             if hasattr(elt, "func"):
                 self.funcs = merge_with(lambda x: reduce(add, x, []),
                                         self.funcs, {elt.func: [indice]})
     for key in self.funcs.keys():
         self.funcsIndex[key] = itemmap(lambda kv, key=key:
                                        (kv[0],
                                         (int(kv[0] in self.funcs[key]), )),
                                        self.index)
Exemplo n.º 5
0
def basic_validate(net, criterion, val_batches):
    print('running validation ... ', end='')
    net.eval()
    start = time()
    with torch.no_grad():
        validate_fn = val_step(compute_loss(net, criterion))
        n_data, tot_loss = reduce(lambda a, b: (a[0] + b[0], a[1] + b[1]),
                                  starmap(validate_fn, val_batches), (0, 0))
    val_loss = tot_loss / n_data
    print('validation finished in {}'.format(
        timedelta(seconds=int(time() - start))))
    print('validation loss: {:.4f} ... '.format(val_loss))
    return {'loss': val_loss}
Exemplo n.º 6
0
def write(values, filepath):
    """Writes a sequence of values to a filepath

    Args:
        filepath (str): Full path of raster file to write
        values (sequence): Values to write

    Returns:
        tuple: (filepath, bytes written)
    """

    with open(filepath, 'w+') as handle:
        return (filepath, reduce(add, map(handle.write, values)))
Exemplo n.º 7
0
def verify_merkle_proof(root: Hash32, item: Hashable, item_index: int,
                        proof: MerkleProof) -> bool:
    """Verify a Merkle proof against a root hash."""
    leaf = keccak(item)
    branch_indices = get_branch_indices(item_index, len(proof))
    node_orderers = [
        identity if branch_index % 2 == 0 else reversed
        for branch_index in branch_indices
    ]
    proof_root = reduce(
        lambda n1, n2_and_order: _calc_parent_hash(*n2_and_order[1]
                                                   ([n1, n2_and_order[0]])),
        zip(proof, node_orderers),
        leaf,
    )
    return proof_root == root
Exemplo n.º 8
0
def basic_validate(net, criterion, val_batches):
    print('running validation ... ', end='')
    net.eval()
    start = time()
    with torch.no_grad():
        validate_fn = val_step(compute_loss(net, criterion))
        n_data, tot_loss = reduce(
            lambda a, b: (a[0]+b[0], a[1]+b[1]),
            starmap(validate_fn, val_batches),
            (0, 0)
        )
    val_loss = tot_loss / n_data
    print(
        'validation finished in {}                                    '.format(
            timedelta(seconds=int(time()-start)))
    )
    print('validation loss: {:.4f} ... '.format(val_loss))
    return {'loss': val_loss}
Exemplo n.º 9
0
        def __getitem__(self, key: str) -> iter:
            if key not in self.map:
                raise ValueError(
                    dedent("""\
                        Key '{}' is invalid!
                        Valid keys: {}
                        """.format(
                        key,
                        reduce(lambda k1, k2: '{}, {}'.format(k1, k2),
                               map(lambda k: "'{}'".format(k), self.map)))))
            ref = self.map[key]
            if 'api' not in ref:
                ref['api'] = 'dbpy'  # default api
            api = ref['api']

            # load reader
            if key not in self.cache:
                print("Loading '{}' reader...".format(key))
                if api not in ('dbpy', 'stpy'):
                    raise ValueError("Invalid api type '{}'!".format(api))
                if 'id' not in ref:
                    ref['id'] = key  # default id
                id = ref['id']
                if api == 'dbpy':
                    self.cache[key] = fromiter(
                        read_syncdatalist_float(id, self.hi_tag,
                                                tuple(map(int,
                                                          self.low_tags))),
                        'float')
                if api == 'stpy':
                    self.cache[key] = StorageWrapper(*map(int, self.runs),
                                                     beamline=self.beamline,
                                                     id=id)
                if 'deco' not in ref:
                    ref['deco'] = identity  # default deco
                print('Loaded!')

            data = self.cache[key]
            deco = ref['deco'] if hasattr(ref['deco'], '__call__') else eval(
                ref['deco'])
            if api == 'dbpy':
                return map(deco, data)
            if api == 'stpy':
                return map(compose(deco, data.__getitem__), self.low_tags)
Exemplo n.º 10
0
def symmetric(datemap):
    """Returns a sequence of dates that are common to all map values if
    all datemap values are represented, else Exception.
    
    Args:
        datemap: {key: [datestrings,]}

    Returns:
        Sequence of date strings or Exception

    Example:

        >>> common({"reds":  [ds3, ds1, ds2],
                    "blues": [ds2, ds3, ds1]})
        [2, 3, 1]
        >>>
        >>> common({"reds":  [ds3, ds1],
                    "blues": [ds2, ds3, ds1]})
        Exception: reds:[3, 1] does not match blues:[2, 3, 1]
    """

    def check(a, b):
        """Reducer for efficiently comparing two unordered sequences.
        Executes in linear(On) time.

        Args:
            a: {k:[datestring1, datestring2...]}
            b: {k:[datestring2, datestring1...]}

        Returns:
            b if a == b, else Exception with details
        """

        if f.seqeq(second(a), second(b)):
            return b
        else:
            msg = ('assymetric dates detected - {} != {}'
                   .format(first(a), first(b)))
            msga = '{}{}'.format(first(a), second(a))
            msgb = '{}{}'.format(first(b), second(b))
            raise Exception('\n\n'.join([msg, msga, msgb]))

    return second(reduce(check, datemap.items()))
Exemplo n.º 11
0
def chips(x, y, acquired, ubids, url, resource='/chips'):
    """Returns chips from a Chipmunk url given x, y, date range and ubid sequence

    Args:
        x (int): projection coordinate x
        y (int): projection coordinate y
        acquired (str): ISO8601 daterange '2012-01-01/2014-01-03'
        ubids (sequence): sequence of ubids
        url (str): protocol://host:port/path
        resource (str): /chips/resource/path (default: /chips)

    Returns:
        tuple: chips

    Example:
        >>> chipmunk.chips(url='http://host:port/path',
                           x=123456,
                           y=789456,
                           acquired='2012-01-01/2014-01-03',
                           ubids=['LE07_SRB1', 'LT05_SRB1'])
        (LE07_SRB1_DATE1, LT05_SRB1_DATE2, LE07_SRB1_DATE2, ...)
    """

    url = '{}{}'.format(url, resource)

    params = [{'x': x, 'y': y, 'acquired': acquired, 'ubid': u } for u in ubids]
    
    def request(url, params):
        r = requests.get(url=url, params=params)
        body = r.json()

        if not r.ok:
            logger.error("{} at {} for {}".format(body, url, params))
            return None
        else:
            return body
    
    responses = [request(url=url, params=p) for p in params]
    return tuple(reduce(add, filter(lambda x: type(x) in [list, tuple], responses), []))
def multi_basic_validate(net, criterion, val_batches):
    print('running validation ... ', end='')
    net.eval()
    start = time()
    with torch.no_grad():
        validate_fn = multi_val_step(compute_loss(net, criterion))
        n, tot_loss, tot_p, tot_r, tot_f = reduce(
            lambda a, b:
            (a[0] + b[0], a[1] + b[1], a[2] + b[2], a[3] + b[3], a[4] + b[4]),
            starmap(validate_fn, val_batches), (0, 0, 0, 0, 0))
    val_loss = tot_loss / n
    val_p = tot_p / n
    val_r = tot_r / n
    val_f = tot_f / n
    print(
        'validation finished in {}                                    '.format(
            timedelta(seconds=int(time() - start))))
    print('validation precision: {:.4f} ... '.format(val_loss))
    print('validation p: {:.4f} ... '.format(val_p))
    print('validation r: {:.4f} ... '.format(val_r))
    print('validation f: {:.4f} ... '.format(val_f))
    return {'loss': val_loss}
Exemplo n.º 13
0
def flip_keys(dods):
    """Accepts a dictionary of dictionaries and flips the outer and inner keys.
    All inner dictionaries must have a consistent set of keys or key Exception
    is raised.

    Args:
        dods: dict of dicts

    Returns:
        dict of dicts with inner and outer keys flipped

    Example:
        >>> dods = {"reds":   {(0, 0): [110, 110, 234, 664],
                               (0, 1): [23, 887, 110, 111]},
                    "greens": {(0, 0): [120, 112, 224, 624],
                               (0, 1): [33, 387, 310, 511]},
                    "blues":  {(0, 0): [128, 412, 244, 654],
                               (0, 1): [73, 987, 119, 191]},
        >>> flip_keys(dods)
        {(0, 0): {"reds":   [110, 110, 234, 664],
                  "greens": [120, 112, 224, 624],
                  "blues":  [128, 412, 244, 654], ... },
         (0, 1), {"reds":   [23, 887, 110, 111],
                  "greens": [33, 387, 310, 511],
                  "blues":  [73, 987, 119, 191], ... }}

    """

    def flip(innerkeys, outerkeys, inputs):
        for ik in innerkeys:
            yield({ik: {ok: inputs[ok][ik] for ok in outerkeys}})

    outerkeys = set(dods.keys())
    innerkeys = set(reduce(lambda accum, v: accum + v,
                           [list(dods[ok].keys()) for ok in outerkeys]))
    return merge(flip(innerkeys, outerkeys, dods))
Exemplo n.º 14
0
def flow(*args):
    return lambda k, v, p: reduce(
        lambda tup, f: f(tup[0], tup[1], tup[2]) + (tup[2], ), args,
        (k, v, p))[0:2]
Exemplo n.º 15
0
    def logging_once(self, step):
        """Log information at one step"""
        do = self.valid

        # Print console info
        self.logger.info(
            '[step=%04d]'
            '[dslt=%6.3f]'
            '[btpt=%6.3f],'
            '[cs1t=%6.3f]'
            '[cs2t=%6.3f]'
            '[ebpt=%6.3f],'
            '[ss1t=%6.3f]'
            '[ss2t=%6.3f]'
            '[expt=%6.3f]',  #
            step,
            do.mt.dslt.avg,
            do.mt.btpt.avg,
            do.mt.cs1t.avg,
            do.mt.cs2t.avg,
            do.mt.ebpt.avg,
            do.mt.ss1t.avg,
            do.mt.ss2t.avg,
            do.mt.expt.avg)
        self.logger.info(
            '[step=%04d]'
            '[step-1-attn-loss=%8.1fb]'
            '[step-2-attn-loss=%8.1fb]'
            '[step-3-attn-loss=%8.1fb]'
            '[step-1-area=%5.3f/%5.3f]'
            '[step-2-area=%5.3f/%5.3f]'
            '[step-3-area=%5.3f/%5.3f]',  #
            step,
            do.mt.s1al.avg,
            do.mt.s2al.avg,
            do.mt.s3al.avg,
            do.mt.c1aa.avg,
            do.mt.s1aa.avg,
            do.mt.c2aa.avg,
            do.mt.s2aa.avg,
            do.mt.c3aa.avg,
            do.mt.s3aa.avg)

        avg_embd_bytes = np.mean([msg.size for msg in do.embedded_messages])
        avg_extt_bytes = np.mean([msg.size for msg in do.extracted_messages])
        total_pixels = tz.reduce(operator.mul, do.cover_images.shape[2:])
        self.logger.info(
            '[embd-payload=%6db/%6dP(%5.2f)]'
            '[extt-payload=%6db/%8dP(%5.2f)],'
            '[msg-diff=%10.3fb/%10db(%05.2f%%)]'
            '[msg-EDis=%10.3fb/%10db(%05.2f%%)]',  #
            avg_embd_bytes * 8,
            total_pixels,
            avg_embd_bytes * 8 / total_pixels,
            avg_extt_bytes * 8,
            total_pixels,
            avg_extt_bytes * 8 / total_pixels,
            do.mt.msg_diff.avg,
            avg_embd_bytes * 8,
            do.mt.msg_diff.avg / (avg_embd_bytes * 8) * 100,
            do.mt.msg_edis.avg,
            avg_embd_bytes * 8,
            do.mt.msg_edis.avg / (avg_embd_bytes * 8) * 100)
Exemplo n.º 16
0
 def mask(v):
     return reduce(logical_or, (__mask(*reg, v=v) for reg in regs))