def to_internal_value(self, data): question = self.get_question() polling = question.polling now = _djtz.localtime(timezone=_djtz.get_default_timezone()) if now < polling.start_time: self.fail('polling_not_started') if polling.start_time + polling.duration <= now: self.fail('polling_ended') data = super().to_internal_value(data) data['question'] = question data = F.select_values(F.complement(F.isnone), data) logger.debug('QuestionAnswerSerializer.to_internal_value(%r)', data) if isinstance(question, models.TextQuestion): kwargs = F.project(data, 'guest_id question text'.split()) rest = F.project(data, F.without(data, *kwargs.keys())) elif isinstance(question, models.ChoiceQuestion): kwargs = F.project(data, 'guest_id question choices'.split()) rest = F.project(data, F.without(data, *kwargs.keys())) else: self.fail('unknown_question', question=question) if rest: self.fail('unknown_fields', fields=dict(rest)) return data
def take(self, limit=5): """ Take up to n (n = limit) posts/comments at a time. You can call this method as many times as you want. Once there are no more posts to take, it will return []. Returns: List of posts/comments in a batch of size up to `limit`. """ # get main posts only comment_filter = is_comment if self.comments_only else complement( is_comment) hist = filter(comment_filter, self.history) # filter out reblogs hist2 = filter(lambda x: x["author"] == self.account.name, hist) # post edits will re-appear in history # we should therefore filter out already seen posts def ensure_unique(post): if post["permlink"] not in self.seen_items: self.seen_items.add(post["permlink"]) return True unique = filter(ensure_unique, hist2) serialized = filter(bool, map(silent(Post), unique)) batch = take(limit, serialized) return batch
def PNot(value_or_stepf: Union[Callable, Any]) -> bool: "Unary negate for `LogicPiping`." if callable(value_or_stepf): stepf = value_or_stepf else: stepf = funcy.identity return funcy.complement(stepf)
def reduce_scenario(scenario, scenario_id, stream): def reducer(acc, ioTuple): return acc + (ioTuple.msg,) return stream.take_while(complement(end_of_scenario(scenario,scenario_id)))\ .reduce(reducer,())\ .map(lambda events: Scenario(scenario,scenario_id,events))
def get_accepted_features(features: Collection[Feature], proposed_feature: Feature) -> List[Feature]: """Deselect candidate features from list of all features Args: features: collection of all features in the ballet project: both accepted features and candidate ones that have not been accepted proposed_feature: candidate feature that has not been accepted Returns: list of features with the proposed feature not in it. Raises: ballet.exc.BalletError: Could not deselect exactly the proposed feature. """ def eq(feature): """Features are equal if they have the same source At least in this implementation... """ return feature.source == proposed_feature.source # deselect features that match the proposed feature result = lfilter(complement(eq), features) if len(features) - len(result) == 1: return result elif len(result) == len(features): raise BalletError( 'Did not find match for proposed feature within \'contrib\'') else: raise BalletError(f'Unexpected condition (n_features={len(features)}, ' f'n_result={len(result)})')
def push_branches_to_remote(repo: git.Repo, remote_name: str, branches: Iterable[str]): """Push selected branches to origin Similar to:: $ git push origin branch1:branch1 branch2:branch2 Raises: ballet.exc.BalletError: Push failed in some way """ remote = repo.remote(remote_name) result = remote.push([f'{b}:{b}' for b in branches]) failures = lfilter(complement(did_git_push_succeed), result) if failures: for push_info in failures: logger.error(f'Failed to push ref {push_info.local_ref.name} to ' f'{push_info.remote_ref.name}') raise BalletError('Push failed')
def _push(project): """Push default branch and project template branch to remote With default config (i.e. remote and branch names), equivalent to:: $ git push origin master:master project-template:project-template Raises: ballet.exc.BalletError: Push failed in some way """ repo = project.repo remote_name = project.config.get('github.remote') remote = repo.remote(remote_name) result = _call_remote_push(remote) failures = lfilter(complement(did_git_push_succeed), result) if failures: for push_info in failures: logger.error('Failed to push ref {from_ref} to {to_ref}'.format( from_ref=push_info.local_ref.name, to_ref=push_info.remote_ref.name)) raise BalletError('Push failed')
def needs_path(f): """Wraps a function that accepts path-like to give it a pathlib.Path""" @wraps(f) def wrapped(pathlike, *args, **kwargs): path = pathlib.Path(pathlike) return f(path, *args, **kwargs) return wrapped def warn(msg): """Issue a warning message of category BalletWarning""" warnings.warn(msg, category=BalletWarning) @decorator def raiseifnone(call): """Decorate a function to raise a ValueError if result is None""" result = call() if result is None: raise ValueError else: return result def falsy(o): return isinstance(o, str) and (o.lower() == 'false' or o == '') truthy = complement(falsy)
self.append(obj) else: self.extend(obj) @property def children(self): return self def as_node_set(self): return self def as_json(self): return [node.as_json() for node in self] is_nset = isa(NodeSet) is_single = any_fn(is_leaf, complement(isa(Node, NodeSet, list))) def nodify(name, data): _nodify = lambda (name, data): nodify(name, data) if isinstance(data, list): return Branch(name, NodeSet.from_seq(map(_nodify, enumerate(data)))) elif isinstance(data, dict): return Branch(name, NodeSet.from_seq(map(_nodify, data.iteritems()))) else: return Leaf(name, data) # ____ ___ __ __ ____ ___ _ _ _ _____ ___ ____ ____ # / ___/ _ \| \/ | __ )_ _| \ | | / \|_ _/ _ \| _ \/ ___|
else: self.extend(obj) @property def children(self): return self def as_node_set(self): return self def as_json(self): return [node.as_json() for node in self] is_nset = isa(NodeSet) is_single = any_fn(is_leaf, complement(isa(Node, NodeSet, list))) def nodify(name, data): _nodify = lambda (name, data): nodify(name, data) if isinstance(data, list): return Branch(name, NodeSet.from_seq(map(_nodify, enumerate(data)))) elif isinstance(data, dict): return Branch(name, NodeSet.from_seq(map(_nodify, data.iteritems()))) else: return Leaf(name, data) # ____ ___ __ __ ____ ___ _ _ _ _____ ___ ____ ____ # / ___/ _ \| \/ | __ )_ _| \ | | / \|_ _/ _ \| _ \/ ___| # | | | | | | |\/| | _ \| || \| | / _ \ | || | | | |_) \___ \
def remove_values(pred, col): return select_values(complement(pred), col)
def __neg__(self, stepf) -> None: "Does __ge__" return self.logically(funcy.complement(stepf), False)
return fn def mkclass(name: str, bases: Tuple = (), **clsattrs: Any) -> Any: "Does mkclass" Gen = type(name, (Base, ) + bases, clsattrs) return Gen def arity(fn: Callable) -> int: "Returns the number of arguments required by `fn`." return len(inspect.signature(fn).parameters) always_tup = funcy.iffy(funcy.complement(funcy.is_seqcont), lambda x: (x, )) class Piping(pipelib.BasePiping): """Piping objects is for (ab)using Python operator overloading to build small pipeline-DSL's. The most basic one will simply refuse to do anything - you have to give it instructions/permissions on everything it's made for ;-). """ class Fresh(object): "Marker for Piping instances that never has been run" pass class Executed(object):