示例#1
0
def stereomers(xgr):
    """ all stereomers, ignoring this graph's assignments
    """
    bool_vals = (False, True)

    def _expand_atom_stereo(sgr):
        atm_ste_keys = stereogenic_atom_keys(sgr)
        nste_atms = len(atm_ste_keys)
        sgrs = [
            _set_atom_stereo_parities(
                sgr, dict(zip(atm_ste_keys, atm_ste_par_vals)))
            for atm_ste_par_vals in _product(bool_vals, repeat=nste_atms)
        ]
        return sgrs

    def _expand_bond_stereo(sgr):
        bnd_ste_keys = stereogenic_bond_keys(sgr)
        nste_bnds = len(bnd_ste_keys)
        sgrs = [
            _set_bond_stereo_parities(
                sgr, dict(zip(bnd_ste_keys, bnd_ste_par_vals)))
            for bnd_ste_par_vals in _product(bool_vals, repeat=nste_bnds)
        ]
        return sgrs

    last_sgrs = []
    sgrs = [_without_stereo_parities(xgr)]

    while sgrs != last_sgrs:
        last_sgrs = sgrs
        sgrs = list(_chain(*map(_expand_atom_stereo, sgrs)))
        sgrs = list(_chain(*map(_expand_bond_stereo, sgrs)))

    return tuple(sorted(sgrs, key=_frozen))
示例#2
0
 def __init__(self, x, y, e, *args, line_options=None, error_options=None, texlabel=None, legendentry=None, **kwargs):
     super().__init__(*args, **kwargs)
     self.line = CPlot(Coordinates(zip(x, y)), texlabel=texlabel, legendentry=legendentry)
     self.line.options.add(*args, **kwargs)
     if line_options is not None:
         self.line.options.add(line_options)
     ex = _chain(x, reversed(x))
     ey = _chain((yi + ei[0] for yi, ei in zip(y, e)), (yi-ei[1] for yi, ei in reversed(list(zip(y, e)))))
     self.error = CPlot(Coordinates(zip(ex, ey)), 'fill', 'forget plot', draw='none', mark='none')
     self.error['fill opacity'] = 0.1
     self.error.options.add(*args, **kwargs)
     self.error.options.add(error_options)
     self.children = [self.error, self.line]
示例#3
0
 def __or__(self, other):
     if isinstance(self, group):
         if isinstance(other, group):
             # group() | group() -> single group
             return group(_chain(self.tasks, other.tasks), app=self.app)
         # group() | task -> chord
         return chord(self, body=other, app=self._app)
     elif isinstance(other, group):
         # task | group() -> unroll group with one member
         other = maybe_unroll_group(other)
         return chain(self, other, app=self._app)
     if not isinstance(self, chain) and isinstance(other, chain):
         # task | chain -> chain
         return chain((self, ) + other.tasks, app=self._app)
     elif isinstance(other, chain):
         # chain | chain -> chain
         return chain(*self.tasks + other.tasks, app=self._app)
     elif isinstance(self, chord):
         sig = self.clone()
         sig.body = sig.body | other
         return sig
     elif isinstance(other, Signature):
         if isinstance(self, chain):
             # chain | task -> chain
             return chain(*self.tasks + (other, ), app=self._app)
         # task | task -> chain
         return chain(self, other, app=self._app)
     return NotImplemented
示例#4
0
    def _resolved_entry_phase_affordancesets(self, phase, affordances):

        upstream_affordancesets = _set()
        downstream_affordances = affordances.unfrozen_copy()
        downstream_affordances.inputs = '*'
        downstream_affordances.outputs = '*'
        for resolved_input_affordances \
                in self._resolved_phase_input_affordancesets\
                    (phase, 'upstream',
                     reverse_affordances=downstream_affordances):
            upstream_affordances = resolved_input_affordances.unfrozen()
            upstream_affordances.inputs = '*'
            upstream_affordances.outputs = affordances.outputs
            upstream_affordancesets.add(upstream_affordances.frozen())

        self.logger.cond((_logging.DEBUG,
                          lambda: 'resolved upstream affordance sets {}'
                                   .format(upstream_affordancesets)))

        downstream_affordancesets = \
            _frozenset(_chain(*[self._resolved_phase_output_affordancesets
                                 (phase, 'downstream',
                                  reverse_affordances=upstream_affordances)
                                for upstream_affordances
                                in upstream_affordancesets]))

        self.logger.cond((_logging.DEBUG,
                          lambda: 'resolved downstream affordance sets {}'
                                   .format(downstream_affordancesets)))

        return downstream_affordancesets
示例#5
0
 def flatten_links(self):
     return list(
         chain_from_iterable(
             _chain(
                 [[self]],
                 (link.flatten_links()
                  for link in maybe_list(self.options.get("link")) or []))))
示例#6
0
def provisionsets_combinations(provisionsets, choose0=True):

    """
    The authentication security provision sets that are yielded by
    combinations of given sets.

    :param provisionsets:
        A set of security provision sets.
    :type provisionsets: ~[:class:`ProvisionSetABC`]

    :param bool choose0:
        Whether to include the empty choice.  If true, then the empty set of
        security provisions is always included in the result.  Otherwise, it
        is included only if *provisionsets* contains it.

    :rtype: :class:`ProvisionSetABC`

    """

    if choose0:
        yield iter(provisionsets).next().__class__()

    for combination \
            in _chain(*(_combinations(provisionsets, nchoices)
                        for nchoices in range(1, len(provisionsets) + 1))):
        yield _reduce(_or, combination)
示例#7
0
    def min(*iterable, **kwds):
        """
        min(iterable, *[, default, key])
        min(arg1, arg2, *args, *[, key])
        """
        allowed_kwds = ('default', 'key')
        for key in kwds:
            if key not in allowed_kwds:
                msg = "'{0}' is an invalid keyword argument for this function"
                raise TypeError(msg.format(key))

        if len(iterable) == 1:
            iterable = iterable[0]

        try:
            first_item = next(iter(iterable))
            if iter(iterable) is iterable:
                iterable = _chain([first_item], iterable)
        except StopIteration:
            if 'default' not in kwds:
                raise ValueError('min() arg is an empty sequence')
            return kwds['default']

        if 'key' in kwds:
            return _min(iterable, key=kwds['key'])
        return _min(iterable)
示例#8
0
def _atom_stereo_coordinates(anchor_key, atm_key, atm_ngb_keys_dct, xyz_dct,
                             key_sorter, parity):
    """ assign atom-stereo coordinates from a stencil
    """
    stencil_xyzs = (
        (0, 0, 0),  # atm 1
        (-1, 0, 0),  # atm 1 ngb 0
        (0, 1, 0),  # atm 1 ngb 1
        (0, 0, (-1)**(not parity)),  # atm 1 ngb 2
        (0, -1, 0))  # atm 1 ngb 3

    assert atm_key in atm_ngb_keys_dct
    atm_ngb_keys = atm_ngb_keys_dct[atm_key]

    stencil_keys = list(_chain([atm_key], key_sorter(atm_ngb_keys)))

    assert len(stencil_keys) == len(stencil_xyzs)
    xyz_dct = dict.copy(xyz_dct)
    xyz_dct.update(
        _from_stencil(atm_key, anchor_key, xyz_dct, stencil_keys,
                      stencil_xyzs))

    boundary_edges = tuple((atm_key, ngb_key) for ngb_key in atm_ngb_keys
                           if ngb_key != anchor_key)

    return xyz_dct, boundary_edges
示例#9
0
    def _make_tables(exprs):
        def append_to_all_values(tbl, item):
            for L in tbl.itervalues(): L.append(item)
        includingUnknownReq = any(e is None for e in exprs)
        exprAndReqs = [(expr, expr.getMatchCandidateForLookAhead()) for expr in exprs]

        ns, ls = [], []
        for r in filter(None, (r for _, r in exprAndReqs)):
            if r.nodes is not ANY_ITEM: ns.extend(r.nodes)
            if r.literals is not ANY_ITEM: ls.extend(r.literals)

        ntbl = dict((lbl, []) for lbl in ns)
        unknown_nlst = []
        ltbl = dict((s, []) for s in ls)
        unknown_llst = []
        elst = []
        for expr, r in exprAndReqs:
            if r is not None:
                if r.nodes is not ANY_ITEM:
                    for lbl in r.nodes: ntbl[lbl].append(expr)
                else:
                    append_to_all_values(ntbl, expr)
                    unknown_nlst.append(expr)
                if r.literals is not ANY_ITEM:
                    for s in r.literals: ltbl[s].append(expr)
                else:
                    append_to_all_values(ltbl, expr)
                    unknown_llst.append(expr)
                if r.emptyseq: elst.append(expr)
            else:
                for L in _chain(ntbl.itervalues(), ltbl.itervalues(), [unknown_nlst, unknown_llst, elst]):
                    L.append(expr)
        return ntbl, unknown_nlst, ltbl, unknown_llst, elst, includingUnknownReq
示例#10
0
def _devel_searchpaths_iter(names,
                            srcs,
                            require=False,
                            include_deps=False,
                            distinction='dist',
                            extras=False,
                            update_cmd=None,
                            seen_paths=None,
                            logger=None,
                            loglevel=_logging.DEBUG):

    if seen_paths is None:
        seen_paths = set()

    missing_names = []
    for name in names:
        name = _projects_misc.normalized_project_name(name)

        path = None
        for path in _chain(*(_devel_project_searchpaths_iter(name, src=src)
                             for src in srcs)):
            if path in seen_paths:
                continue
            seen_paths.add(path)

            if update_cmd:
                _misc.update_metadata_at(path, cmd=update_cmd, logger=logger,
                                         loglevel=loglevel)

            yield path

            if include_deps:
                project = _projects.Project.from_path(path)
                env = _envs.DistEnv(project.dists)
                project_extras = \
                    _misc.normalized_extras(extras, env=env).get(name, ())
                requirements_names = \
                    (_requirements.normalized_requirement_name(requirement)
                     for requirement
                     in project.requirements(extras=project_extras))
                for dep_path \
                        in _devel_searchpaths_iter(requirements_names,
                                                   srcs=srcs,
                                                   require=False,
                                                   include_deps=include_deps,
                                                   distinction=distinction,
                                                   extras=extras,
                                                   update_cmd=update_cmd,
                                                   logger=logger,
                                                   loglevel=loglevel,
                                                   seen_paths=seen_paths):
                    yield dep_path

        if path is None:
            missing_names.append(name)
            continue

    if require and missing_names:
        tried_paths = list(_devel_searchpaths_iter(missing_names, srcs=srcs))
        raise _exc.ProjectsNotFound(missing_names, tried_paths)
示例#11
0
    def __init__(self, m, to, sender, subject=None, cc=None, bcc=None):
        super().__init__()

        self.m = check_m(m)

        to = to_list(to)
        cc = to_list(cc)
        bcc = to_list(bcc)
        if not subject:
            subject = '%s mailer' % (IDENT)
        subject = '%s - %s' % (subject, get_timestamp())

        self.__sender = sender
        self.__recipients = list(_chain(to, cc, bcc))

        _charset.add_charset('utf-8', _charset.QP, _charset.QP, 'UTF-8')

        self.__message = _MIMEMultipart()
        self.__message.add_header('To', ', '.join(to))
        if cc:
            self.__message.add_header('CC', ', '.join(cc))
        self.__message.add_header('From', sender)
        self.__message.add_header('Subject', subject)
        self.__message.add_header('Date', _formatdate())
        self.__message.add_header('X-Mailer', '%s mailer' % (IDENT))

        self.m('mail tool startup done',
               more=dict(to=to, cc=cc, bcc=bcc, sender=sender,
                         subject=subject),
               verbose=False)
示例#12
0
文件: mail.py 项目: spookey/photon
    def __init__(self, m, to, sender, subject=None, cc=None, bcc=None):
        super().__init__()

        self.m = check_m(m)

        to = to_list(to)
        cc = to_list(cc)
        bcc = to_list(bcc)
        if not subject:
            subject = '%s mailer' % (IDENT)
        subject = '%s - %s' % (subject, get_timestamp())

        self.__sender = sender
        self.__recipients = list(_chain(to, cc, bcc))

        _charset.add_charset('utf-8', _charset.QP, _charset.QP, 'UTF-8')

        self.__message = _MIMEMultipart()
        self.__message.add_header('To', ', '.join(to))
        if cc:
            self.__message.add_header('CC', ', '.join(cc))
        self.__message.add_header('From', sender)
        self.__message.add_header('Subject', subject)
        self.__message.add_header('Date', _formatdate())
        self.__message.add_header('X-Mailer', '%s mailer' % (IDENT))

        self.m(
            'mail tool startup done',
            more=dict(to=to, cc=cc, bcc=bcc, sender=sender, subject=subject),
            verbose=False
        )
示例#13
0
        def inner(*args, **kwargs):
            # as zip terminates as soon as one iterator is exausted we dont
            # have to worry about the case where some args are specified and
            # some use thier default value as the len(args) (ie the specified 
            # args) will cause automatic termination
            args_mapping = zip(argspec.args, args)
            
            cache_keys = []
            for key, item in _chain(args_mapping, kwargs.items()):
                if key in cacheable_keys:
                    cache_keys.append((key, item))
            # lists are unhashable and cant be used as dict keys
            cache_keys.sort()
            cache_keys = tuple(val for key, val in cache_keys)
            
            recalculate = False
            cached = backend.get(cache_keys)
            if cached:
                expiry, output = cached
                expire = lifetime_func(expiry)
                if expire:
                    recalculate = True
            else:
                recalculate = True
                    
            if recalculate:
                output = func(*args, **kwargs)
                expiry = lifetime_func()
                backend[cache_keys] = expiry, output

            return output
示例#14
0
def _explicit_stereo(sgr):
    """ make the hydrogens at atom and bond stereo sites explicit
    """
    atm_ste_keys = atom_stereo_keys(sgr)
    bnd_ste_keys = bond_stereo_keys(sgr)
    bnd_ste_atm_keys = set(_chain(*bnd_ste_keys))
    ste_atm_keys = atm_ste_keys | bnd_ste_atm_keys
    return _explicit(sgr, atm_keys=ste_atm_keys)
示例#15
0
def ensure_row_length(matrix, ncols, fill=''):
    """Make sure each row in matrix has length ncols.

    If a row's length is less than ncols add elements fill
    Returns a 2d tuple
    """
    tcols = (fill, ) * ncols
    return tuple(tuple(_islice(_chain(x, tcols), ncols)) for x in matrix)
示例#16
0
 def flatten_links(self):
     """Return a recursive list of dependencies (unchain if you will,
     but with links intact)."""
     return list(_chain.from_iterable(_chain(
         [[self]],
         (link.flatten_links()
             for link in maybe_list(self.options.get('link')) or [])
     )))
示例#17
0
def make_legacy_params_dict(req):
    """convert new-style params to old style athana params dict"""
    req.params = params = {}
    for key, values in _chain(req.form.iterlists(), req.args.iterlists()):
        value = ";".join(values)
        params[key] = value
#             params[key.encode("utf8")] = value.encode("utf8")
    params.update(req.files.iteritems())
示例#18
0
文件: canvas.py 项目: csunny/celery
 def flatten_links(self):
     """Return a recursive list of dependencies (unchain if you will,
     but with links intact)."""
     return list(_chain.from_iterable(_chain(
         [[self]],
         (link.flatten_links()
             for link in maybe_list(self.options.get('link')) or [])
     )))
示例#19
0
def _bond_stereo_coordinates(anchor_key, bnd_key, atm_ngb_keys_dct, xyz_dct,
                             key_sorter, parity):
    """ assign bond-stereo coordinates from a stencil
    """
    stencil1_xyzs = (
        (0, 0, 0),  # atm 1
        (-1, 0, 0),  # atm 1 ngb 0 (anchor?)
        (1, 0, 0))  # atm 1 ngb 1 (anchor?)

    stencil2_xyzs = (
        (0, 1, 0),  # atm 2
        ((-1)**(not parity), 1, 0),  # atm 2 ngb 0
        ((-1)**parity, 1, 0))  # atm 2 ngb 1

    atm1_key, atm2_key = bnd_key
    assert atm1_key in atm_ngb_keys_dct
    assert atm2_key in atm_ngb_keys_dct
    atm1_ngb_keys = atm_ngb_keys_dct[atm1_key]
    atm2_ngb_keys = atm_ngb_keys_dct[atm2_key]

    if anchor_key in atm2_ngb_keys:
        atm1_key, atm2_key = atm2_key, atm1_key
        atm1_ngb_keys, atm2_ngb_keys = atm2_ngb_keys, atm1_ngb_keys

    atm1_ngb_keys -= {atm2_key}
    atm2_ngb_keys -= {atm1_key}

    stencil_xyz_dct = {}
    stencil_xyz_dct.update(
        zip(_chain([atm1_key], key_sorter(atm1_ngb_keys)), stencil1_xyzs))
    stencil_xyz_dct.update(
        zip(_chain([atm2_key], key_sorter(atm2_ngb_keys)), stencil2_xyzs))

    xyz_dct = dict.copy(xyz_dct)
    xyz_dct.update(
        _from_stencil(atm1_key, anchor_key, xyz_dct, stencil_xyz_dct.keys(),
                      stencil_xyz_dct.values()))

    boundary_edges = tuple(
        _chain(
            ((atm1_key, ngb_key) for ngb_key in atm1_ngb_keys - {anchor_key}),
            ((atm2_key, ngb_key) for ngb_key in atm2_ngb_keys)))

    return xyz_dct, boundary_edges
示例#20
0
def bond_induced_subgraph(xgr, bnd_keys):
    """ the subgraph induced by a subset of the bonds
    """
    atm_keys = set(_chain(*bnd_keys))
    bnd_keys = set(bnd_keys)
    assert atm_keys <= _atom_keys(xgr)
    assert bnd_keys <= _bond_keys(xgr)
    atm_dct = _by_key(_atoms(xgr), atm_keys)
    bnd_dct = _by_key(_bonds(xgr), bnd_keys)
    return _from_atoms_and_bonds(atm_dct, bnd_dct)
示例#21
0
 def _zip_longest(*args, **kwds):
     # izip_longest('ABCD', 'xy', fillvalue='-') --> Ax By C- D-
     fillvalue = kwds.get('fillvalue')
     def sentinel(counter = ([fillvalue]*(len(args)-1)).pop):
         yield counter()         # yields the fillvalue, or raises IndexError
     fillers = _repeat(fillvalue)
     iters = [_chain(it, sentinel(), fillers) for it in args]
     try:
         for tup in izip(*iters):
             yield tup
     except IndexError:
         pass
示例#22
0
 def _zip_longest(*args, **kwds):
     # izip_longest('ABCD', 'xy', fillvalue='-') --> Ax By C- D-
     fillvalue = kwds.get('fillvalue')
     def sentinel(counter = ([fillvalue]*(len(args)-1)).pop):
         yield counter()         # yields the fillvalue, or raises IndexError
     fillers = _repeat(fillvalue)
     iters = [_chain(it, sentinel(), fillers) for it in args]
     try:
         for tup in izip(*iters):
             yield tup
     except IndexError:
         pass
示例#23
0
 def make_sig(*fields):
     Parameter, Signature = _inspect.Parameter, _inspect.Signature
     pos_parms = []
     kw_parms = []
     for field in fields:
         if isinstance(field, str):
             pos_parms.append(
                 Parameter(field, Parameter.POSITIONAL_OR_KEYWORD))
         else:
             name, default = field
             kw_parms.append(
                 Parameter(name, Parameter.KEYWORD_ONLY, default=default))
     return Signature(_chain(pos_parms, kw_parms))
示例#24
0
 def __new__(cls, clsname, bases, clsdict):
     # print(clsname, '->', bases)
     # print(clsdict)
     fields = clsdict.get('_fields', [])
     kwfields = clsdict.get('_kwfields', {})
     parameters = _chain(
         fields,
         kwfields.items(),
         *StructureMeta._get_fields(*bases),
     )
     sig = StructureMeta.make_sig(*parameters)
     clsdict['__signature__'] = sig
     clsdict['__slots__'] = tuple(sig.parameters)
     return super().__new__(cls, clsname, bases, clsdict)
示例#25
0
def _interpret_range_strings(rng_strs):

    def _interpret_range_string(rng_str):
        split_rng = str(rng_str).split('-')
        if len(split_rng) == 1:
            rng = [int(split_rng[-1])]
        elif len(split_rng) == 2:
            start, stop = map(int, split_rng)
            rng = list(range(start, stop+1))
        else:
            raise ValueError("Failed to interet index ranges")
        return rng

    return tuple(_chain(*map(_interpret_range_string, rng_strs)))
示例#26
0
    def from_path(cls, path, name=None, dists_relpaths=('.',)):

        dists_paths = [_os.path.join(path, dists_relpath)
                       for dists_relpath in dists_relpaths]
        dists = list(_chain(*(_pkgr.find_distributions(dists_path, only=True)
                              for dists_path in dists_paths)))

        if not dists:
            raise _exc.DistNotFound(location=dists_paths)

        if name is None:
            name = dists[0].project_name

        return cls(name, dists, location=path)
示例#27
0
    def _call(self, *, ws, pos):
        assert isinstance(ws, _SegParagraph)
        assert isinstance(pos, _SegParagraph)

        conparse_text = []
        for ws_sent, pos_sent in zip(ws, pos):
            conparse_sent_text = []
            ws_clause = []
            pos_clause = []
            for ws_token, pos_token in _chain(zip(ws_sent, pos_sent), [
                (None, None),
            ]):

                # Skip WHITESPACE
                if pos_token == 'WHITESPACE':
                    continue

                # Segment clauses by punctuations
                if pos_token is None or (pos_token.endswith('CATEGORY')
                                         and pos_token != 'PAUSECATEGORY'):
                    if ws_clause:
                        wspos_clause_text = _WsPosSentence.to_text(
                            ws_clause, pos_clause)
                        for conparse_clause_text in self._core.apply_list(
                            [wspos_clause_text]):
                            conparse_sent_text.append([
                                self._normalize(conparse_clause_text),
                                '',
                            ])

                    if ws_token:
                        if not conparse_sent_text:
                            conparse_sent_text.append([
                                None,
                                '',
                            ])
                        conparse_sent_text[-1][1] += ws_token

                    ws_clause = []
                    pos_clause = []

                else:
                    ws_clause.append(self._half2full(ws_token))
                    pos_clause.append(pos_token)

            conparse_text.append(conparse_sent_text)
        conparse = _ParseParagraph.from_list(conparse_text)

        return conparse
示例#28
0
def load_recursive(path,
                   encoding="cp932",
                   node_type=_OtoNode,
                   greedy_recursion=True):
    if greedy_recursion:
        inis = _glob(_path.join(path, "**/oto.ini"), recursive=True)
    else:
        inis = _chain(_glob(_path.join(path, "oto.ini")),
                      _glob(_path.join(path, "*/oto.ini")))

    oto_dict = {}
    for oto_ini in inis:
        oto_dict.update(
            load(oto_ini, root=path, encoding=encoding, node_type=node_type))
    return oto_dict
示例#29
0
文件: canvas.py 项目: zzkristy/celery
    def __or__(self, other):
        if isinstance(self, group):
            if isinstance(other, group):
                return group(_chain(self.tasks, other.tasks), app=self.app)
            return chord(self, body=other, app=self._app)
        elif isinstance(other, group):
            other = maybe_unroll_group(other)

        if not isinstance(self, chain) and isinstance(other, chain):
            return chain((self, ) + other.tasks, app=self._app)
        elif isinstance(other, chain):
            return chain(*self.tasks + other.tasks, app=self._app)
        elif isinstance(other, Signature):
            if isinstance(self, chain):
                return chain(*self.tasks + (other, ), app=self._app)
            return chain(self, other, app=self._app)
        return NotImplemented
示例#30
0
def _fill_atom_inchi_numbers(xgr, bbn_ich_num_dct):
    """ atom inchi number dictionary from inchi-sorted backbone keys
    """
    atm_ich_num_dct = bbn_ich_num_dct.copy()

    ich_srt_bbn_keys = _keys_sorted_by_value(bbn_ich_num_dct)
    atm_exp_hyd_keys_dct = atom_explicit_hydrogen_keys(xgr)
    ich_srt_bbn_exp_hyd_keys = _values_by_key(atm_exp_hyd_keys_dct,
                                              ich_srt_bbn_keys)
    ich_srt_exp_hyd_keys = tuple(_chain(*ich_srt_bbn_exp_hyd_keys))
    first_exp_hyd_ich_num = min(bbn_ich_num_dct.values()) - 1
    atm_ich_num_dct.update({
        exp_hyd_key: first_exp_hyd_ich_num - exp_hyd_srt_idx
        for exp_hyd_srt_idx, exp_hyd_key in enumerate(ich_srt_exp_hyd_keys)
    })
    assert set(atm_ich_num_dct.keys()) == atom_keys(xgr)
    return atm_ich_num_dct
示例#31
0
文件: canvas.py 项目: buckensl/celery
    def __or__(self, other):
        if isinstance(self, group):
            if isinstance(other, group):
                return group(_chain(self.tasks, other.tasks), app=self.app)
            return chord(self, body=other, app=self._app)
        elif isinstance(other, group):
            other = maybe_unroll_group(other)

        if not isinstance(self, chain) and isinstance(other, chain):
            return chain((self,) + other.tasks, app=self._app)
        elif isinstance(other, chain):
            return chain(*self.tasks + other.tasks, app=self._app)
        elif isinstance(other, Signature):
            if isinstance(self, chain):
                return chain(*self.tasks + (other,), app=self._app)
            return chain(self, other, app=self._app)
        return NotImplemented
示例#32
0
        def retrying(*args, **kwargs):
            problems = []
            for delay in _chain(DataFeed.retryDelays, [None]):
                try:
                    # attempt call
                    return func(*args, **kwargs)

                # we need to try again
                except DataFeedException as problem:
                    problems.append(problem)
                    if delay is None:
                        Logger.debug(DataFeed, problems)
                        raise MaxRetriesException('retryDelays exhausted ' + str(problem))
                    else:
                        # log exception and wait
                        Logger.debug(DataFeed, problem)
                        Logger.error(DataFeed, "No reply... -- delaying for %ds" % delay)
                        sleep(delay)
示例#33
0
        def retrying(*args, **kwargs):
            problems = []
            for delay in _chain(retryDelays, [None]):
                try:
                    # attempt call
                    return func(*args, **kwargs)

                # we need to try again
                except RequestException as problem:
                    problems.append(problem)
                    if delay is None:
                        logger.error(problems)
                        raise
                    else:
                        # log exception and wait
                        logger.exception(problem)
                        logger.info("-- delaying for %ds", delay)
                        sleep(delay)
示例#34
0
	def __init__(self):

		self.relation_count = 1
	
		if os.path.exists("wn_hierarchy_data.npy"):
			self.data = numpy.load("wn_hierarchy_data.npy")
			self.train_data = numpy.load("wn_hierarchy_train_data.npy")
			self.valid_data = numpy.load("wn_hierarchy_valid_data.npy")
			self.test_data = numpy.load("wn_hierarchy_test_data.npy")
			self.entity_count, self.id2entity, self.entity2id = pickle.load(open("wn_hierarchy_metadata.pickle", "rb"))
		else:
			self.data = list(map(lambda x: list(map(str, x.strip().split('\t'))), open("WN_HIERARCHY/noun_closure.tsv").readlines()))
			self.id2entity = dict(enumerate(sorted(list(set(list(_chain(*self.data)))))))
			self.entity2id = {v: k for k, v in self.id2entity.items()}
			self.entity_count = len(self.id2entity)
			self.data = numpy.array(list(map(lambda x: [self.entity2id[x[0]], self.entity2id[x[1]], 0], self.data)))
			numpy.random.shuffle(self.data)
			self.train_data = self.data[:-50000]
			self.valid_data = self.data[-50000:-25000]
			self.test_data = self.data[-25000:]
			numpy.save("wn_hierarchy_data", self.data)
			numpy.save("wn_hierarchy_train_data", self.train_data)
			numpy.save("wn_hierarchy_valid_data", self.valid_data)
			numpy.save("wn_hierarchy_test_data", self.test_data)
			pickle.dump((self.entity_count, self.id2entity, self.entity2id), open("wn_hierarchy_metadata.pickle", "wb"))

		if os.path.exists("wn_hierarchy_all_data.pickle"):
			self.all_data = pickle.load(open("wn_hierarchy_all_data.pickle", "rb"))
		else:
			self.all_data = set(map(tuple, self.data.tolist()))
			pickle.dump(self.all_data, open("wn_hierarchy_all_data.pickle", "wb"))

		if os.path.exists("wn_hierarchy_hierarchy.pickle"):
			self.hierarchy = pickle.load(open("wn_hierarchy_hierarchy.pickle", "rb"))
		else:
			self.hierarchy = self.create_hierarchy()
			pickle.dump(self.hierarchy, open("wn_hierarchy_hierarchy.pickle", "wb"))
			del self._children_of_all_entities

		if os.path.exists("wn_hierarchy_entity_distances.npy"):
			self.entity_distances = numpy.load("wn_hierarchy_entity_distances.npy")
		else:
			self.entity_distances = self.get_entity_distances()
			numpy.save("wn_hierarchy_entity_distances", self.entity_distances)
示例#35
0
def implicit(xgr, atm_keys=None):
    """ make the hydrogens at these atoms implicit
    """
    atm_keys = backbone_keys(xgr) if atm_keys is None else atm_keys
    atm_keys = list(atm_keys)
    atm_imp_hyd_vlcs = _values_by_key(_atom_implicit_hydrogen_valences(xgr),
                                      atm_keys)

    atm_exp_hyd_keys = _values_by_key(atom_explicit_hydrogen_keys(xgr),
                                      atm_keys)
    atm_exp_hyd_vlcs = tuple(map(len, atm_exp_hyd_keys))
    atm_tot_hyd_vlcs = numpy.add(atm_imp_hyd_vlcs, atm_exp_hyd_vlcs)

    exp_hyd_keys = tuple(_chain(*atm_exp_hyd_keys))

    xgr = _set_atom_implicit_hydrogen_valences(
        xgr, dict(zip(atm_keys, atm_tot_hyd_vlcs)))
    xgr = _delete_atoms(xgr, exp_hyd_keys)
    return xgr
示例#36
0
        def retrying(*args, **kwargs):
            problems = []
            for delay in _chain(DataFeed.retryDelays, [None]):
                try:
                    # attempt call
                    return func(*args, **kwargs)

                # we need to try again
                except PoloniexError as problem:
                    problems.append(problem)
                    if delay is None:
                        DataFeed.logger.debug(DataFeed, problems)
                        raise RetryException(
                            'retryDelays exhausted ' + str(problem))
                    else:
                        # log exception and wait
                        DataFeed.logger.debug(DataFeed, problem)
                        DataFeed.logger.info(DataFeed, "-- delaying for %ds" % delay)
                        sleep(delay)
示例#37
0
def python_entrypoint_reference(value: str) -> bool:
    module, _, rest = value.partition(":")
    if "[" in rest:
        obj, _, extras_ = rest.partition("[")
        if extras_.strip()[-1] != "]":
            return False
        extras = (x.strip()
                  for x in extras_.strip(string.whitespace + "[]").split(","))
        if not all(pep508_identifier(e) for e in extras):
            return False
        _logger.warning(
            f"`{value}` - using extras for entry points is not recommended")
    else:
        obj = rest

    module_parts = module.split(".")
    identifiers = _chain(module_parts,
                         obj.split(".")) if rest else module_parts
    return all(python_identifier(i.strip()) for i in identifiers)
示例#38
0
        def retrying(*args, **kwargs):
            problems = []
            for delay in _chain(retryDelays, [None]):
                try:
                    # attempt call
                    return func(*args, **kwargs)

                # we need to try again
                except RequestException as problem:
                    problems.append(problem)
                    if delay is None:
                        Logger.debug(func, problems)
                        raise MaxRetriesException(
                            'retryDelays exhausted ' + str(problem))
                    else:
                        # log exception and wait
                        Logger.debug(func, problem)
                        Logger.info(func, "-- delaying for %ds" % delay)
                        sleep(delay)
示例#39
0
        def retrying(*args, **kwargs):
            problems = []
            for delay in _chain(retryDelays, [None]):
                try:
                    # attempt call
                    return func(*args, **kwargs)

                # we need to try again
                except RequestException as problem:
                    problems.append(problem)
                    if delay is None:
                        logger.debug(problems)
                        raise RetryException('retryDelays exhausted ' +
                                             str(problem))
                    else:
                        # log exception and wait
                        logger.debug(problem)
                        logger.info("-- retrying in %ds", delay)
                        sleep(delay)
示例#40
0
文件: key.py 项目: aperezdc/indicium
def join(components, *arg):
    """
    Joins path components to form a normalized key.

        >>> join("foo", "bar", "baz")
        "/foo/bar/baz"
        >>> join(["foo", "bar"], "baz")
        "/foo/bar/baz"

    :param components:
        An iterable value of path components, or a string (which will be
        passed to :func:`split()` to turn it into an iterable value).
    :param arg:
        Additional path components to be appended to the generated key.
    :return:
        Normalized key, as a string.
    """
    if not (isinstance(components, (list, tuple))
            or _isgenerator(components)):
        components = (c for c in components.split("/") if c)
    return "/" + "/".join(_chain(components, arg))
示例#41
0
    def elements(self):
        '''Iterator over elements repeating each as many times as its count.

        >>> c = Counter('ABCABC')
        >>> sorted(c.elements())
        ['A', 'A', 'B', 'B', 'C', 'C']

        # Knuth's example for prime factors of 1836:  2**2 * 3**3 * 17**1
        >>> prime_factors = Counter({2: 2, 3: 3, 17: 1})
        >>> product = 1
        >>> for factor in prime_factors.elements():     # loop over factors
        ...     product *= factor                       # and multiply them
        >>> product
        1836

        Note, if an element's count has been set to zero or is a negative
        number, elements() will ignore it.

        '''
        # Emulate Bag.do from Smalltalk and Multiset.begin from C++.
        return _chain(*_starmap(_repeat, self.iteritems()))
示例#42
0
def devel_searchpaths_iter(names=True,
                           src=None,
                           ext_dir=DEVEL_SRC_EXT_DIR,
                           require=False,
                           include_deps=False,
                           distinction='dist',
                           extras=False,
                           update_cmd=None,
                           logger=None,
                           loglevel=_logging.DEBUG):

    if src is None:
        src = devel_src()
    srcs = [src]
    if ext_dir is not None:
        srcs.append(_os.path.join(src, ext_dir))

    if names is True:
        for path in _chain(*([_os.path.join(src_, dirname)
                              for dirname
                              in devel_projects_dirnames_iter(src=src_)]
                             for src_ in srcs)):
            if update_cmd:
                _misc.update_metadata_at(path, cmd=update_cmd, logger=logger,
                                         loglevel=loglevel)

            yield path
        return

    for path in _devel_searchpaths_iter(names,
                                        srcs=srcs,
                                        require=require,
                                        include_deps=include_deps,
                                        distinction=distinction,
                                        extras=extras,
                                        update_cmd=update_cmd,
                                        logger=logger,
                                        loglevel=loglevel):
        yield path
示例#43
0
文件: canvas.py 项目: aliscott/celery
 def flatten_links(self):
     return list(chain_from_iterable(_chain([[self]],
             (link.flatten_links()
                 for link in maybe_list(self.options.get('link')) or []))))
示例#44
0
 def __hash__(self):
     return hash("MatchCandiateForLookAhead") + sum(map(hash, _chain(self.__nodes, self.__literals, self.__emptyseq)))
示例#45
0
from functools import partial as _partial
from itertools import chain as _chain
from .termformatconstants import FG_COLORS, BG_COLORS, STYLES
from .fmtstr import fmtstr

for att in _chain(FG_COLORS, ("on_" + x for x in BG_COLORS), STYLES):
    locals()[att] = _partial(fmtstr, style=att)
plain = _partial(fmtstr)

if __name__ == "__main__":
    import doctest

    doctest.testmod()
    print((blue("adf")))
    print((blue(on_red("ad"))))
    print((blue("asdf") + on_red("adsf")))
    print(((blue("asdf") + on_red("adsf"))[3:7]))
    f = blue("hey there") + on_red(" Tom!")
    print(f)
    f[1:3] = "ot"
    print((repr(f)))
    print(f)
    f = on_blue(red("stuff"))
    print((repr(f)))
    print((repr(str(f))))
    print(f)
    print(((f + "!")[0:6] + "?"))
示例#46
0
from functools import partial as _partial
from itertools import chain as _chain
from .termformatconstants import FG_COLORS, BG_COLORS, STYLES
from .formatstring import fmtstr

for att in _chain(FG_COLORS, ('on_'+x for x in BG_COLORS), STYLES):
    locals()[att] = _partial(fmtstr, style=att)
plain = _partial(fmtstr)

if __name__ == '__main__':
    import doctest
    doctest.testmod()
    print((blue('adf')))
    print((blue(on_red('ad'))))
    print((blue('asdf') + on_red('adsf')))
    print(((blue('asdf') + on_red('adsf'))[3:7]))
    f = blue('hey there') + on_red(' Tom!')
    print(f)
    f[1:3] = 'ot'
    print((repr(f)))
    print(f)
    f = on_blue(red('stuff'))
    print((repr(f)))
    print((repr(str(f))))
    print(f)
    print(((f + '!')[0:6] + '?'))
示例#47
0
import re as _re

from spruce.collections import frozenusetset as _frozenusetset

from ... import digest as _digest
from .. import _std as _std_http


_DIGEST_USER_OPTIONAL_TOKENS = ('qop', 'digest_algorithm', 'client_nonce',
                                'opaque', 'server_nonce_use_count')

_DIGEST_USER_REQUIRED_TOKENS = ('user', 'realm', 'server_nonce', 'digest_uri',
                                'digest')

_DIGEST_USER_TOKENSETS = \
    _frozenusetset(_chain(_DIGEST_USER_REQUIRED_TOKENS, optional_tokens)
                   for optional_tokens
                   in _chain((),
                             *(_combinations(_DIGEST_USER_OPTIONAL_TOKENS,
                                             n + 1)
                               for n
                               in range(len(_DIGEST_USER_OPTIONAL_TOKENS)))))


class HttpDigestClerk(_std_http.HttpStandardClerk):

    """An authentication clerk for HTTP Digest authentication"""

    __metaclass__ = _abc.ABCMeta

    _DIGEST_USER_OPTIONAL_TOKENS = _DIGEST_USER_OPTIONAL_TOKENS
示例#48
0
                                     'CST': -6,
                                     'MDT': -6,
                                     'MST': -7,
                                     'PDT': -7,
                                     'PST': -8,
                                     }
"""
A mapping of names to offset hours for the obsolete time zone names
specified by :rfc:`Internet Message Format Obsolete Date and Time \
<5322#section-4.3>`

:type: {:obj:`str`: :obj:`int`}

"""
for military_tz_name in (chr(ord_) for ord_ in _chain(range(65, 74),
                                                      range(75, 91),
                                                      range(97, 106),
                                                      range(107, 122))):
    RFC5322_OBSOLETE_TZ_HOURS_BY_NAME[military_tz_name] = 0
del military_tz_name

RFC5322_OBSOLETE_TZ_NAMES_RE = \
    _re.compile('|'.join(RFC5322_OBSOLETE_TZ_HOURS_BY_NAME.keys()) + '$')
"""
A regular expression that matches any of the obsolete time zone names
specified by :rfc:`Internet Message Format Obsolete Date and Time \
<5322#section-4.3>`

:type: :class:`re.RegexObject`

"""
示例#49
0
def tokens_combinations(tokens_or_names):
    names = tokens_names(tokens_or_names)
    return _chain(*(_combinations(names, ntokens)
                    for ntokens in range(len(names) + 1)))
示例#50
0
 def extras(self):
     return set(_chain(*(dist.extras for dist in self.dists)))
示例#51
0
 def requirements(self, extras=()):
     return set(_chain(*(dist.requires(extras=extras)
                         for dist in self.dists)))
示例#52
0
def projects_dists_iter(projects):
    return _chain(*(project.dists for project in projects))
示例#53
0
def tdfilesiterschain(filenames, default=_stdin):
    """return chained iterator for the sequence of files, or the default"""    
    return _chain(*tdfilesiters(filenames, default))
示例#54
0
 def __iter__(self):
     return _chain(*self._items_by_name.values())