def contribute_to_class(self, cls, name): ensure_class_has_cached_field_methods(cls) self.name = name setattr(cls, 'recalculate_{}'.format(self.name), curry(cls._recalculate_FIELD, field=self)) setattr( cls, self.name, property(curry_for_property(cls._get_FIELD, field=self), curry_for_property(cls._set_FIELD, field=self))) proper_field = (set(type(self).__bases__) - {CachedFieldMixin}).pop() # :MC: ew. proper_field = proper_field(*self.init_args_for_field, **self.init_kwargs_for_field) setattr(cls, self.cached_field_name, proper_field) proper_field.contribute_to_class(cls, self.cached_field_name) flag_field = models.BooleanField( default=True, db_index=self.db_index_on_recalculation_needed_field) setattr(cls, self.recalculation_needed_field_name, flag_field) flag_field.contribute_to_class(cls, self.recalculation_needed_field_name) if self.temporal_triggers: setattr(cls, 'expire_{}_after'.format(self.name), curry(cls._expire_FIELD_after, field=self)) expire_field = models.DateTimeField( null=True, db_index=self.db_index_on_temporal_trigger_field) setattr(cls, self.expiration_field_name, expire_field) expire_field.contribute_to_class(cls, self.expiration_field_name) setattr(cls, 'flag_{}_as_stale'.format(self.name), curry(cls._flag_FIELD_as_stale, field=self))
def hook(f, hmodules=HOOK_MODULES, ignore_exceptions=False, pre_prefix=PRE_HOOKS_PREFIX, post_prefix=POST_HOOKS_PREFIX): """ Decorator to run hooks before/after wrapped functions. """ def run(*args, **kwargs): def run_pre_post(prefix): h = curry(find_hook, f, prefix) return [run_in_sandbox(h(m), ignore_exceptions) for m in hmodules] def run_in_sandbox(g, ignore_exceptions=True): try: return g(*args, **kwargs) except Exception, e: logging.warn(str(e)) if ignore_exceptions: return e else: raise run_pre = curry(run_pre_post, pre_prefix) run_post = curry(run_pre_post, post_prefix) # TODO: What should be done with resutls especially failed ones? pre_results = run_pre() r = f(*args, **kwargs) post_results = run_post() return r
def add_accessor_methods(self, *args, **kwargs): for size in PhotoSizeCache().sizes: setattr(self, 'get_%s_size' % size, curry(self._get_SIZE_size, size=size)) setattr(self, 'get_%s_photosize' % size, curry(self._get_SIZE_photosize, size=size)) setattr(self, 'get_%s_url' % size, curry(self._get_SIZE_url, size=size)) setattr(self, 'get_%s_filename' % size, curry(self._get_SIZE_filename, size=size))
def find_nearest_pickleable_exception(exc): """With an exception instance, iterate over its super classes (by mro) and find the first super exception that is pickleable. It does not go below :exc:`Exception` (i.e. it skips :exc:`Exception`, :class:`BaseException` and :class:`object`). If that happens you should use :exc:`UnpickleableException` instead. :param exc: An exception instance. :returns: the nearest exception if it's not :exc:`Exception` or below, if it is it returns ``None``. :rtype: :exc:`Exception` """ unwanted = (Exception, BaseException, object) is_unwanted = lambda exc: any(map(curry(operator.is_, exc), unwanted)) mro_ = getattr(exc.__class__, "mro", lambda: []) for supercls in mro_(): if is_unwanted(supercls): # only BaseException and object, from here on down, # we don't care about these. return None try: exc_args = getattr(exc, "args", []) superexc = supercls(*exc_args) pickle.dumps(superexc) except: pass else: return superexc return None
def insert(self, value): node = Node.new(value) # Local helper function to transpose a list of lists def transpose(lst): fn = lambda i: map(lambda arr: arr[i], lst) return map(fn, range(self.limit)) # Helper function for connecting two adjacent nodes horizontaly def _bridge(n1, n2): n1.next, n2.prev = n2, n1 return n2 # Helper function for connecting two adjacent nodes verticaly def __bridge(n1, n2): n1.down, n2.up = n2, n1 return n2 # When the line limit is reached if len(self.partial_line) == self.limit: the_line = self.partial_line[:] reduce(_bridge, the_line) # connect horizontaly each node to his nearbord # Save the line # TODO: The head of the line is the only thing we need here to access to the whole line. # Storing the complete line is irrelevant. # Find a way to write it whithout that evident messyness. self.lines.append(the_line) if len(self.lines) >= 2: map(curry(reduce, __bridge), transpose(self.lines)) self.partial_line = [] self.partial_line.append(node)
def apply_async(self, target, args=None, kwargs=None, callbacks=None, errbacks=None, on_ack=None, meta=None): """Equivalent of the :func:``apply`` built-in function. All ``callbacks`` and ``errbacks`` should complete immediately since otherwise the thread which handles the result will get blocked. """ args = args or [] kwargs = kwargs or {} callbacks = callbacks or [] errbacks = errbacks or [] meta = meta or {} on_return = curry(self.on_return, callbacks, errbacks, on_ack, meta) self.logger.debug("TaskPool: Apply %s (args:%s kwargs:%s)" % ( target, args, kwargs)) self.replace_dead_workers() return self._pool.apply_async(target, args, kwargs, callback=on_return)
def save_post(self, post): published = False if post.pk is None or Post.objects.filter(pk=post.pk, published=None).count(): if self.cleaned_data["state"] == Post.STATE_CHOICES[-1][0]: post.published = timezone.now() published = True render_func = curry( load_path_attr(settings.PINAX_BLOG_MARKUP_CHOICE_MAP[ self.markup_choice]["parser"])) post.teaser_html = render_func(self.cleaned_data["teaser"]) post.content_html = render_func(self.cleaned_data["content"]) post.updated = timezone.now() post.save() r = Revision() r.post = post r.title = post.title r.teaser = self.cleaned_data["teaser"] r.content = self.cleaned_data["content"] r.author = post.author r.updated = post.updated r.published = post.published r.save() if published: post_published.send(sender=Post, post=post) return post
def add_accessor_methods(self): """ Dynamically create Meta methods. self.get_<type>() method """ for field in ('keywords', 'title', 'description', 'canonical_url'): setattr(self, 'get_%s' % field, curry(self.__get_meta, field))
def read(self): self.gio.addReader(self) self.onExit(curry(self.gio.removeReader, self)) try: yield message = os.read(self._fd, self.readBufSize) finally: self.gio.removeReader(self) raise StopIteration(message)
def makeGrid(self): self.nodes = [[Node(self.rep) for i in range(self.width)] for j in range(self.hight)] def vertical_bridge(n, n_): n.down = n_ n_.up = n return n_ def horizontal_bridge(node_1, node_2): node_1.next = node_2 node_2.prev = node_1 return node_2 nodes = self.transpose() self.map_(curry(reduce, horizontal_bridge), self.nodes) self.map_(curry(reduce, vertical_bridge), nodes) return self
def max_col_widths(xss): """ @return list of max width needed for columns (:: [Int]). see an example below. >>> xss = [['aaa', 'bbb', 'cccccccc', 'dd'], ['aa', 'b', 'ccccc', 'ddddddd'], ['aaaa', 'bbbb', 'c', 'dd']] >>> max_col_widths(xss) [4, 4, 8, 7] """ yss = [[len(x) for x in xs] for xs in xss] return fold(curry(zipWith, max), yss[1:], yss[0])
def write(self, response): self.gio.addWriter(self) self.onExit(curry(self.gio.removeWriter, self)) buff = response try: while len(buff) > 0: yield written = os.write(self._fd, buff) buff = buffer(buff, written) finally: self.gio.removeWriter(self)
def write(self,response): self.gio.addWriter(self) self.onExit(curry(self.gio.removeWriter, self)) buff = response try: while len(buff) > 0: yield written = os.write(self._fd, buff) buff = buffer(buff, written) finally: self.gio.removeWriter(self)
def _update_attr(self, attr, value): # update the parameter in question setattr(self, attr.replace(".", "_"), value) self._attr_updated[attr] = True # and call any callbacks cb = [] for attr_name, fn, once in self._callbacks: if attr_name == attr: self.actor.call_soon(curry(fn, attr, value)) if once: continue cb.append((attr_name, fn, once)) self._callbacks = cb
def _compat_wraps(wrapped, assigned=WRAPPER_ASSIGNMENTS, updated=WRAPPER_UPDATES): """Decorator factory to apply update_wrapper() to a wrapper function Returns a decorator that invokes update_wrapper() with the decorated function as the wrapper argument and the arguments to wraps() as the remaining arguments. Default arguments are as for update_wrapper(). This is a convenience function to simplify applying curry() to update_wrapper(). """ return curry(update_wrapper, wrapped=wrapped, assigned=assigned, updated=updated)
def _citdl_from_power_node(self, node): """CITDL for a given python_symbols.power Node type.""" children = node.children from functools import partial as curry N = pytree.NodePattern N.POWER = curry(N, type=python_symbols.power) N.TRAILER = curry(N, type=python_symbols.trailer) L = pytree.LeafPattern L.NAME = curry(L, type=token.NAME) L.LPAR = curry(L, type=token.LPAR, content='(') L.RPAR = curry(L, type=token.RPAR, content=')') ANY = pytree.WildcardPattern funcall_pattern = N.POWER(content=[ L.NAME(name="func"), N.TRAILER(content=[L.LPAR(), ANY(), L.RPAR()]) ]) match = {} if funcall_pattern.match(node, results=match): func = match['func'].value if func in ('range', 'filter', 'map'): return 'list' elif func in ('list', 'tuple', 'dict'): return func bits = [children[0].value] for c in children[1:]: if c.children and c.children[0].type == token.DOT: bits += ['.', c.children[1].value] else: assert c.children[0].type in (token.LPAR, token.LSQB) bits += [c.children[0].value, c.children[-1].value] return ''.join(bits)
def _citdl_from_power_node(self, node): """CITDL for a given python_symbols.power Node type.""" children = node.children from functools import partial as curry N = pytree.NodePattern N.POWER = curry(N, type=python_symbols.power) N.TRAILER = curry(N, type=python_symbols.trailer) L = pytree.LeafPattern L.NAME = curry(L, type=token.NAME) L.LPAR = curry(L, type=token.LPAR, content='(') L.RPAR = curry(L, type=token.RPAR, content=')') ANY = pytree.WildcardPattern funcall_pattern = N.POWER(content=[L.NAME(name="func"), N.TRAILER(content=[L.LPAR(), ANY(), L.RPAR()])]) match = {} if funcall_pattern.match(node, results=match): func = match['func'].value if func in ('range', 'filter', 'map'): return 'list' elif func in ('list', 'tuple', 'dict'): return func bits = [children[0].value] for c in children[1:]: if c.children and c.children[0].type == token.DOT: bits += ['.', c.children[1].value] else: assert c.children[0].type in (token.LPAR, token.LSQB) bits += [c.children[0].value, c.children[-1].value] return ''.join(bits)
def send(self, sender, **payload): """Send signal and dispatch to all listeners. :param sender: The sender of the signal. Either a specific object or ``None``. :param payload: The data to pass on to listeners. Usually the keys described in :attr:`provides_args` and any additional keys you'd want to provide. """ payload = self.prepare_payload(sender, payload) apply_ = curry(self._send_signal, sender, payload) return map(apply_, self.get_listeners(sender, payload))
def fixText(self, text, moreMacros={}): # Do several textual replacements that need to happen *before* the document is parsed as h. # If markdown shorthands are on, remove all `foo`s while processing, # so their contents don't accidentally trigger other stuff. # Also handle markdown escapes. if "markdown" in self.md.markupShorthands: textFunctor = MarkdownCodeSpans(text) else: textFunctor = Functor(text) macros = dict(self.macros, **moreMacros) textFunctor = textFunctor.map(curry(replaceMacros, macros=macros)) textFunctor = textFunctor.map(fixTypography) if "css" in self.md.markupShorthands: textFunctor = textFunctor.map(replaceAwkwardCSSShorthands) return textFunctor.extract()
def process_request(self, request): """ Gets the current user from the request and prepares and connects a signal receiver with the user already attached to it. """ # Initialize thread local storage threadlocal.auditlog = { "signal_duid": (self.__class__, time.time()), "remote_addr": request.META.get("REMOTE_ADDR"), } # In case of proxy, set 'original' address if request.META.get("HTTP_X_FORWARDED_FOR"): threadlocal.auditlog["remote_addr"] = request.META.get("HTTP_X_FORWARDED_FOR").split(",")[0] # Connect signal for automatic logging if hasattr(request, "user") and hasattr(request.user, "is_authenticated") and request.user.is_authenticated: set_actor = curry(self.set_actor, user=request.user, signal_duid=threadlocal.auditlog["signal_duid"]) pre_save.connect(set_actor, sender=LogEntry, dispatch_uid=threadlocal.auditlog["signal_duid"], weak=False)
def fun_takes_kwargs(fun, kwlist=[]): """With a function, and a list of keyword arguments, returns arguments in the list which the function takes. :param fun: The function to inspect arguments of. :param kwlist: The list of keyword arguments. Examples >>> def foo(self, x, y, logfile=None, loglevel=None): ... return x * y >>> fun_takes_kwargs(foo, ["logfile", "loglevel", "task_id"]) ["logfile", "loglevel"] >>> def foo(self, x, y, **kwargs): >>> fun_takes_kwargs(foo, ["logfile", "loglevel", "task_id"]) ["logfile", "loglevel", "task_id"] """ args, _varargs, keywords, _defaults = getargspec(fun) if keywords != None: return kwlist return filter(curry(operator.contains, args), kwlist)
def __call__(self, request): if request.method not in ('GET', 'HEAD', 'OPTIONS', 'TRACE'): if hasattr(request, 'user') and request.user.is_authenticated: user = request.user else: user = None mark_whodid = curry(self.mark_whodid, user) signals.pre_save.connect(mark_whodid, dispatch_uid=( self.__class__, request, ), weak=False) response = self.get_response(request) signals.pre_save.disconnect(dispatch_uid=( self.__class__, request, )) return response
def _rgrep_helper(obj, grep_str): if type(obj) == list: results = list() for rec_has_bottom, rec_results in map( curry(_rgrep_helper, grep_str=grep_str), obj): if rec_has_bottom: results.append(rec_results) has_bottom = len(results) != 0 return has_bottom, results elif type(obj) == dict: results = dict() for key, val in obj.iteritems(): if re.search(grep_str, key): results[key] = val elif type(val) == dict: rec_has_bottom, rec_results = _rgrep_helper(val, grep_str) if rec_has_bottom: results[key] = rec_results has_bottom = len(results) != 0 return has_bottom, results else: raise TypeError( "rgrep only works on dicts, lists of dicts, lists of lists of dicts, etc." )
def apply_async(self, target, args=None, kwargs=None, callbacks=None, errbacks=None, on_ack=None, meta=None): """Equivalent of the :func:``apply`` built-in function. All ``callbacks`` and ``errbacks`` should complete immediately since otherwise the thread which handles the result will get blocked. """ args = args or [] kwargs = kwargs or {} callbacks = callbacks or [] errbacks = errbacks or [] meta = meta or {} tid = gen_unique_id() on_return = curry(self.on_return, tid, callbacks, errbacks, on_ack, meta) result = self._pool.apply_async(target, args, kwargs, callback=on_return) self._processes[tid] = [result, callbacks, errbacks, meta] return result
def decode(message, pad, otp_func): otp_func = curry(otp_func, direction='decode') return map(otp_func, message, pad)
def transcode_stream(stream, mapping): return map(curry(transcode_char, mapping=mapping), stream)
def __call__(self, *args, **kwargs): if not self.value: self.value = self.func(*args, **kwargs) return self.value def n_times(n, f): '''Returns a function returning a list of the results of _n_ applications of _f_.''' # n_times(2, f)(1, 2, 3) => [ f(1, 2, 3), f(1, 2, 3) ] def _f(*args, **kwargs): return map(lambda f: f(*args, **kwargs), [f] * n) return _f twice = curry(n_times, 2) if __name__ == '__main__': even = lambda n: n % 2 == 0 odd = lambda n: not even(n) gt1 = lambda n: n > 1 gt2 = lambda n: n > 2 lt1 = lambda n: n < 1 lt2 = lambda n: n < 2 print satisfies_any(even, odd)(3) print satisfies_all(even, odd)(3) print satisfies_any(gt1, gt2)(3) print satisfies_all(gt1, gt2)(3) print satisfies_any(lt1, lt2)(3)
class _deferred(object): def __init__(self, func): self.func = func self.value = None def __call__(self, *args, **kwargs): if not self.value: self.value = self.func(*args, **kwargs) return self.value def n_times(n, f): '''Returns a function returning a list of the results of _n_ applications of _f_.''' # n_times(2, f)(1, 2, 3) => [ f(1, 2, 3), f(1, 2, 3) ] def _f(*args, **kwargs): return map(lambda f: f(*args, **kwargs), [f] * n) return _f twice = curry(n_times, 2) if __name__ == '__main__': even = lambda n: n % 2 == 0 odd = lambda n: not even(n) gt1 = lambda n: n>1 gt2 = lambda n: n>2 lt1 = lambda n: n<1 lt2 = lambda n: n<2 print satisfies_any(even, odd)(3) print satisfies_all(even, odd)(3) print satisfies_any(gt1, gt2)(3) print satisfies_all(gt1, gt2)(3) print satisfies_any(lt1, lt2)(3)
def run_pre_post(prefix): h = curry(find_hook, f, prefix) return [run_in_sandbox(h(m), ignore_exceptions) for m in hmodules]
lex.input(expression) for tok in iter(lex.token, None): debug("\t%s %s", tok.type, tok.value) queries = [yacc.parse(expression) for expression in query_callback_map.keys()] for node in nodes(deriv): for query_expr, query_str in izip(queries, query_callback_map.keys()): context = Context() if query_expr.is_satisfied_by(node, context): if context: query_callback_map[query_str](node, **smash_key_case(context)) else: query_callback_map[query_str](node) find_all = tgrep find_first = compose(curry(take, 1), find_all) SmallSubtreeThreshold = 10 def find_small(*args, **kwargs): matches = tgrep(*args, **kwargs) with_context = kwargs['with_context'] if with_context: return ifilter(lambda (match, context): match.leaf_count() <= SmallSubtreeThreshold, matches) else: return ifilter(lambda match: match.leaf_count() <= SmallSubtreeThreshold, matches) SmallSentenceThreshold = 18 def find_small_sents(*args, **kwargs): deriv = args[0]
# University of Sydney # Use of this software is governed by the attached "Chinese CCGbank converter Licence Agreement" # supplied in the Chinese CCGbank conversion distribution. If the LICENCE file is missing, please # notify the maintainer Daniel Tse <*****@*****.**>. from functools import partial as curry def do_pad_split(splitter, str, sep, maxsplit): '''Splits the string, but pads the result tuple to the maximum number of allowable sub-strings, as defined by _maxsplit_.''' ret = splitter(str, sep, maxsplit) ret += [None] * (maxsplit+1 - len(ret)) return ret padded_split = curry(do_pad_split, str.split) padded_rsplit = curry(do_pad_split, str.rsplit) def nth_occurrence(seq, N, when, until): '''Given a sequence _seq_, this returns the _n_th sub-sequence for which the predicate _when_ is true, with _until_ defining the end of each sub-sequence. The returned sub-sequence does not include the line where _until_ is True.''' n = 0 buffer = [] recording = False for element in seq: if until(element): recording = False if n > N: break
def __get__(self, obj, objtype): """Support instance methods. """ return curry(self.__call__, obj)
from functools import reduce, partial as curry from functional import foldl, compose as compose2 # n-composition, reduce expects a list, so the arguments are list-ified compose = curry(lambda f, *x: f(x), curry(reduce, compose2)) # wraps an empty string up in a function null = lambda: "" nulliter = ([null]) # wraps a string up in a function t = lambda x: lambda: x # nicer way to run the function render = lambda f: f() no_content = lambda: AttributeError("For this function, content should be None" ) no_at = lambda: AttributeError("For this function, at should be None") missing_type = lambda: AttributeError( "The type can't be None; something bad has happened...") def element_only(type, content, at): if content != null: raise no_content() if at != null: raise no_at() return lambda: ''.join(("<", type, ">")) def only_at(type, content, at): if content != null: raise no_content()
from munge.util.exceptions import DocParseException from munge.util.iter_utils import take from functools import partial as curry def with_paired_delimiters(pair, func, toks): shift_and_check(pair[0], toks) value = func(toks) shift_and_check(pair[1], toks) return value # Inspired by Parsec's _parens_ parser combinator. # If _func_ recognises the set of strings S, then _with_parens_ # recognises { ( s ) | s \in S }. with_parens = curry(with_paired_delimiters, "()") with_angles = curry(with_paired_delimiters, "<>") with_squares = curry(with_paired_delimiters, "[]") with_braces = curry(with_paired_delimiters, "{}") def get_context(toks, ntokens=10): return ", ".join(take(ntokens, toks)) def shift_and_check(tok, toks): """Peeks at a lexer token from the stream _toks_, throwing a DocParseException unless the token matches _tok_.""" next = toks.next() if tok != next: context = get_context(toks)
# Use of this software is governed by the attached "Chinese CCGbank converter Licence Agreement" # supplied in the Chinese CCGbank conversion distribution. If the LICENCE file is missing, please # notify the maintainer Daniel Tse <*****@*****.**>. from functools import partial as curry def do_pad_split(splitter, str, sep, maxsplit): '''Splits the string, but pads the result tuple to the maximum number of allowable sub-strings, as defined by _maxsplit_.''' ret = splitter(str, sep, maxsplit) ret += [None] * (maxsplit + 1 - len(ret)) return ret padded_split = curry(do_pad_split, str.split) padded_rsplit = curry(do_pad_split, str.rsplit) def nth_occurrence(seq, N, when, until): '''Given a sequence _seq_, this returns the _n_th sub-sequence for which the predicate _when_ is true, with _until_ defining the end of each sub-sequence. The returned sub-sequence does not include the line where _until_ is True.''' n = 0 buffer = [] recording = False for element in seq: if until(element): recording = False if n > N: break
def get_mapping(model_or_queryset): '''Get the mapping for a given model or queryset''' mappings = get_mappings() if isinstance(model_or_queryset, models.query.QuerySet): queryset = model_or_queryset model = model_or_queryset.model elif issubclass(model_or_queryset, models.Model): queryset = model_or_queryset.objects.all() model = model_or_queryset else: raise TypeError( 'Only `django.db.model.Model` and `django.db.query.QuerySet` ' 'objects are valid arguments') meta = model._meta mapping_key = meta.app_label + '.' + meta.object_name mapping = mappings.get(mapping_key) if mapping is not None: mapping = mapping.copy() else: raise exceptions.MappingUndefined('Unable to find mapping ' 'for %s' % mapping_key) # The callable allows for customizing the queryset on the fly queryset = mapping.get('queryset', queryset) if callable(queryset): queryset = queryset(mapping) mapping['app'] = meta.app_label mapping['model'] = meta.object_name mapping['queryset'] = queryset mapping.setdefault('separator', ' - ') if 'field' in mapping: mapping['fields'] = mapping['field'], elif 'fields' not in mapping: raise exceptions.ConfigurationError( 'Every mapping should have a field or fields attribute. Mapping: ' '%r' % mapping) mapping.setdefault( 'split_func', curry( mapping.get('split_func', split_func), mapping['fields'], mapping['separator'], )) mapping.setdefault( 'join_func', curry( mapping.get('join_func', join_func), mapping['fields'], mapping['separator'], )) mapping.setdefault( 'filter_func', curry( mapping.get('filter_func', filter_func), mapping['fields'], mapping['separator'], )) return mapping.copy()
def f(a): b = a t0 = time() for i in xrange(10**6): f('a') t1 = time() print 'Function call sec', t1 - t0, 'us 100%' for i in xrange(10**6): curry(f, 'a')() t2 = time() print 'With Curry', t2 - t1, 'us', (t2 - t1) / (t1 - t0) * 100, '%' def f(a): yield b = a for i in xrange(10**6): f('a').next() t3 = time() print 'With Generator', t3 - t2, 'us', (t3 - t2) / (t1 - t0) * 100, '%'
def apply_async(task, args=None, kwargs=None, countdown=None, eta=None, routing_key=None, exchange=None, immediate=None, mandatory=None, priority=None, connection=None, connect_timeout=AMQP_CONNECTION_TIMEOUT, **opts): """Run a task asynchronously by the celery daemon(s). :param task: The task to run (a callable object, or a :class:`Task` instance :param args: The positional arguments to pass on to the task (a ``list``). :param kwargs: The keyword arguments to pass on to the task (a ``dict``) :param countdown: Number of seconds into the future that the task should execute. Defaults to immediate delivery (Do not confuse that with the ``immediate`` setting, they are unrelated). :param eta: A :class:`datetime.datetime` object that describes the absolute time when the task should execute. May not be specified if ``countdown`` is also supplied. (Do not confuse this with the ``immediate`` setting, they are unrelated). :keyword routing_key: The routing key used to route the task to a worker server. :keyword exchange: The named exchange to send the task to. Defaults to :attr:`celery.task.base.Task.exchange`. :keyword immediate: Request immediate delivery. Will raise an exception if the task cannot be routed to a worker immediately. (Do not confuse this parameter with the ``countdown`` and ``eta`` settings, as they are unrelated). :keyword mandatory: Mandatory routing. Raises an exception if there's no running workers able to take on this task. :keyword connection: Re-use existing AMQP connection. The ``connect_timeout`` argument is not respected if this is set. :keyword connect_timeout: The timeout in seconds, before we give up on establishing a connection to the AMQP server. :keyword priority: The task priority, a number between ``0`` and ``9``. """ args = args or [] kwargs = kwargs or {} routing_key = routing_key or getattr(task, "routing_key", None) exchange = exchange or getattr(task, "exchange", None) immediate = immediate or getattr(task, "immediate", None) mandatory = mandatory or getattr(task, "mandatory", None) priority = priority or getattr(task, "priority", None) taskset_id = opts.get("taskset_id") publisher = opts.get("publisher") if countdown: eta = datetime.now() + timedelta(seconds=countdown) from celery.conf import ALWAYS_EAGER if ALWAYS_EAGER: return apply(task, args, kwargs) need_to_close_connection = False if not publisher: if not connection: connection = DjangoAMQPConnection(connect_timeout=connect_timeout) need_to_close_connection = True publisher = TaskPublisher(connection=connection) delay_task = publisher.delay_task if taskset_id: delay_task = curry(publisher.delay_task_in_set, taskset_id) task_id = delay_task(task.name, args, kwargs, routing_key=routing_key, exchange=exchange, mandatory=mandatory, immediate=immediate, priority=priority, eta=eta) if need_to_close_connection: publisher.close() connection.close() return AsyncResult(task_id)
return islice(seq, 0, n) def seqify(e): '''If _e_ is a sequence, returns an iterator over _e_. Otherwise, returns a single-element iterator yielding _e_.''' if isinstance(e, (list, tuple)): for el in e: yield el else: yield e def single(e): '''Yields an iterator over a single element _e_.''' yield e get_first = curry(take, 1) def intersperse(seq, spacer): '''Given a sequence _seq_, intersperses the given _spacer_ between each pair of elements.''' first = True for e in seq: if first: first = False else: yield spacer yield e
queries = [ yacc.parse(expression) for expression in query_callback_map.keys() ] for node in nodes(deriv): for query_expr, query_str in izip(queries, query_callback_map.keys()): context = Context() if query_expr.is_satisfied_by(node, context): if context: query_callback_map[query_str](node, **smash_key_case(context)) else: query_callback_map[query_str](node) find_all = tgrep find_first = compose(curry(take, 1), find_all) SmallSubtreeThreshold = 10 def find_small(*args, **kwargs): matches = tgrep(*args, **kwargs) with_context = kwargs['with_context'] if with_context: return ifilter( lambda (match, context): match.leaf_count() <= SmallSubtreeThreshold, matches) else: return ifilter(
orig_seq, seq = tee(orig_seq, 2) return (element for element in seq if not pred(element)) def take(n, seq): '''Returns the first _n_ elements from the given sequence.''' return islice(seq, 0, n) def seqify(e): '''If _e_ is a sequence, returns an iterator over _e_. Otherwise, returns a single-element iterator yielding _e_.''' if isinstance(e, (list, tuple)): for el in e: yield el else: yield e def single(e): '''Yields an iterator over a single element _e_.''' yield e get_first = curry(take, 1) def intersperse(seq, spacer): '''Given a sequence _seq_, intersperses the given _spacer_ between each pair of elements.''' first = True for e in seq: if first: first = False else: yield spacer yield e
def track_the_murder(): to_int = o(int, curry(reduce, add)) satisfy = lambda number: all( int(str(number)[:n]) % n == 0 for n in range(*FROM_1_TO_9)) return to_int(filter(o(satisfy, to_int), permutations(THE_SOURCE)).pop())
def get_form(self, request, obj=None, **kwargs): kwargs.update({ "formfield_callback": curry(self.formfield_for_dbfield, request=request), }) return super().get_form(request, obj, **kwargs)
from munge.util.exceptions import DocParseException from munge.util.iter_utils import take from functools import partial as curry def with_paired_delimiters(pair, func, toks): shift_and_check(pair[0], toks) value = func(toks) shift_and_check(pair[1], toks) return value # Inspired by Parsec's _parens_ parser combinator. # If _func_ recognises the set of strings S, then _with_parens_ # recognises { ( s ) | s \in S }. with_parens = curry(with_paired_delimiters, '()') with_angles = curry(with_paired_delimiters, '<>') with_squares = curry(with_paired_delimiters, '[]') with_braces = curry(with_paired_delimiters, '{}') def get_context(toks, ntokens=10): return ", ".join(take(ntokens, toks)) def shift_and_check(tok, toks): '''Peeks at a lexer token from the stream _toks_, throwing a DocParseException unless the token matches _tok_.''' next = toks.next() if tok != next: context = get_context(toks)
from functools import partial as curry from time import time def f(a): b = a t0 = time() for i in xrange(10**6): f('a') t1 = time() print 'Function call sec', t1-t0, 'us 100%' for i in xrange(10**6): curry(f, 'a')() t2 = time() print 'With Curry', t2-t1, 'us', (t2-t1)/(t1-t0)*100, '%' def f(a): yield b = a for i in xrange(10**6): f('a').next() t3 = time() print 'With Generator', t3-t2, 'us', (t3-t2)/(t1-t0)*100, '%' class f:
class NodeManager: def __init__(self, limit=100): self.partial_line = [] self.limit = limit self.lines = [] def head(self): return self.lines[0][0] if self.lines else None def insert(self, value): node = Node.new(value) # Local helper function to transpose a list of lists def transpose(lst): fn = lambda i: map(lambda arr: arr[i], lst) return map(fn, range(self.limit)) # Helper function for connecting two adjacent nodes horizontaly def _bridge(n1, n2): n1.next, n2.prev = n2, n1 return n2 # Helper function for connecting two adjacent nodes verticaly def __bridge(n1, n2): n1.down, n2.up = n2, n1 return n2 # When the line limit is reached if len(self.partial_line) == self.limit: the_line = self.partial_line[:] reduce(_bridge, the_line) # connect horizontaly each node to his nearbord # Save the line # TODO: The head of the line is the only thing we need here to access to the whole line. # Storing the complete line is irrelevant. # Find a way to write it whithout that evident messyness. self.lines.append(the_line) if len(self.lines) >= 2: map(curry(reduce, __bridge), transpose(self.lines)) self.partial_line = [] self.partial_line.append(node) def __walker(self, start, node, *fns): return self.__walker(fns[0](start, node), fns[1](node), fns) if node else start line, whole = map(curry(self.__walker, "", node), [[lambda s, n: s + n.value, lambda n: n.next], [lambda s, n: s + self.line, lambda n: n.down]]) def _iter_lines(self): line = self.head() while line: yield line line = line.down def __iter__(self): for line in self._iter_lines(): node = line while node: yield node.value node = node.next def show(self): head = self.head() if not head: return "" return self.whole(head) @classmethod def new_manager(cls, *arg): return cls(*arg) def write_to_file(self, fname): with open(fname, "a") as outSide: outSide.write(self.show()) @classmethod def From_file(cls, fname): manager = cls.new_manager() with open(fname, "r") as outSide: map(manager.insert, outSide.read()) return manager def __str__(self): return self.show() __repr__ = __str__